master_thesis_models / configs /experiment /focusLiteNN_150.yaml
Hannes Kuchelmeister
add non-pretrained-focus-liteNN model
40e1762
raw
history blame
705 Bytes
# @package _global_
# to execute this experiment run:
# python train.py experiment=example
defaults:
- override /datamodule: focus150.yaml
- override /model: focusLiteNN.yaml
- override /callbacks: default.yaml
- override /logger: many_loggers
- override /trainer: default.yaml
# all parameters below will be merged with parameters from default configurations set above
# this allows you to overwrite only specified parameters
# name of the run determines folder name in logs
name: "focusLiteNN_150"
seed: 12345
trainer:
min_epochs: 1
max_epochs: 100
model:
pre_trained: False
num_channel: 3
lr: 0.001
weight_decay: 0.0005
datamodule:
batch_size: 128
augmentation: True