Hannes Kuchelmeister commited on
Commit
0ce7464
1 Parent(s): 1fb6fd9

run experiments with relu in cnn

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_07-59-42/0/.hydra/config.yaml +81 -0
  2. logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_07-59-42/0/.hydra/hydra.yaml +240 -0
  3. logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_07-59-42/0/.hydra/overrides.yaml +9 -0
  4. logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_07-59-42/0/checkpoints/epoch_068.ckpt +3 -0
  5. logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_07-59-42/0/checkpoints/last.ckpt +3 -0
  6. logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_07-59-42/0/csv/version_0/hparams.yaml +54 -0
  7. logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_07-59-42/0/csv/version_0/metrics.csv +202 -0
  8. logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_07-59-42/0/tensorboard/focusConvMSE_150_hyperparameter_search/events.out.tfevents.1652255986.908803db74d8.1.0 +3 -0
  9. logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_07-59-42/0/tensorboard/focusConvMSE_150_hyperparameter_search/events.out.tfevents.1652256202.908803db74d8.1.1 +3 -0
  10. logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_07-59-42/0/tensorboard/focusConvMSE_150_hyperparameter_search/hparams.yaml +54 -0
  11. logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_07-59-42/1/.hydra/config.yaml +81 -0
  12. logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_07-59-42/1/.hydra/hydra.yaml +240 -0
  13. logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_07-59-42/1/.hydra/overrides.yaml +9 -0
  14. logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_07-59-42/1/checkpoints/epoch_089.ckpt +3 -0
  15. logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_07-59-42/1/checkpoints/last.ckpt +3 -0
  16. logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_07-59-42/1/csv/version_0/hparams.yaml +54 -0
  17. logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_07-59-42/1/csv/version_0/metrics.csv +202 -0
  18. logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_07-59-42/1/tensorboard/focusConvMSE_150_hyperparameter_search/events.out.tfevents.1652256203.908803db74d8.1.2 +3 -0
  19. logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_07-59-42/1/tensorboard/focusConvMSE_150_hyperparameter_search/events.out.tfevents.1652256421.908803db74d8.1.3 +3 -0
  20. logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_07-59-42/1/tensorboard/focusConvMSE_150_hyperparameter_search/hparams.yaml +54 -0
  21. logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_07-59-42/10/.hydra/config.yaml +81 -0
  22. logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_07-59-42/10/.hydra/hydra.yaml +240 -0
  23. logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_07-59-42/10/.hydra/overrides.yaml +9 -0
  24. logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_07-59-42/10/checkpoints/epoch_036.ckpt +3 -0
  25. logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_07-59-42/10/checkpoints/last.ckpt +3 -0
  26. logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_07-59-42/10/csv/version_0/hparams.yaml +54 -0
  27. logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_07-59-42/10/csv/version_0/metrics.csv +202 -0
  28. logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_07-59-42/10/tensorboard/focusConvMSE_150_hyperparameter_search/events.out.tfevents.1652258020.908803db74d8.1.20 +3 -0
  29. logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_07-59-42/10/tensorboard/focusConvMSE_150_hyperparameter_search/events.out.tfevents.1652258226.908803db74d8.1.21 +3 -0
  30. logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_07-59-42/10/tensorboard/focusConvMSE_150_hyperparameter_search/hparams.yaml +54 -0
  31. logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_07-59-42/11/.hydra/config.yaml +81 -0
  32. logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_07-59-42/11/.hydra/hydra.yaml +240 -0
  33. logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_07-59-42/11/.hydra/overrides.yaml +9 -0
  34. logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_07-59-42/11/checkpoints/epoch_098.ckpt +3 -0
  35. logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_07-59-42/11/checkpoints/last.ckpt +3 -0
  36. logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_07-59-42/11/csv/version_0/hparams.yaml +54 -0
  37. logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_07-59-42/11/csv/version_0/metrics.csv +202 -0
  38. logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_07-59-42/11/tensorboard/focusConvMSE_150_hyperparameter_search/events.out.tfevents.1652258227.908803db74d8.1.22 +3 -0
  39. logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_07-59-42/11/tensorboard/focusConvMSE_150_hyperparameter_search/events.out.tfevents.1652258424.908803db74d8.1.23 +3 -0
  40. logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_07-59-42/11/tensorboard/focusConvMSE_150_hyperparameter_search/hparams.yaml +54 -0
  41. logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_07-59-42/12/.hydra/config.yaml +81 -0
  42. logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_07-59-42/12/.hydra/hydra.yaml +240 -0
  43. logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_07-59-42/12/.hydra/overrides.yaml +9 -0
  44. logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_07-59-42/12/checkpoints/epoch_089.ckpt +3 -0
  45. logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_07-59-42/12/checkpoints/last.ckpt +3 -0
  46. logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_07-59-42/12/csv/version_0/hparams.yaml +54 -0
  47. logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_07-59-42/12/csv/version_0/metrics.csv +202 -0
  48. logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_07-59-42/12/tensorboard/focusConvMSE_150_hyperparameter_search/events.out.tfevents.1652258424.908803db74d8.1.24 +3 -0
  49. logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_07-59-42/12/tensorboard/focusConvMSE_150_hyperparameter_search/events.out.tfevents.1652258606.908803db74d8.1.25 +3 -0
  50. logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_07-59-42/12/tensorboard/focusConvMSE_150_hyperparameter_search/hparams.yaml +54 -0
logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_07-59-42/0/.hydra/config.yaml ADDED
@@ -0,0 +1,81 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ original_work_dir: ${hydra:runtime.cwd}
2
+ data_dir: ${original_work_dir}/data
3
+ print_config: true
4
+ ignore_warnings: true
5
+ train: true
6
+ test: true
7
+ seed: null
8
+ name: focusConvMSE_150_hyperparameter_search
9
+ datamodule:
10
+ _target_: src.datamodules.focus_datamodule.FocusDataModule
11
+ data_dir: ${data_dir}/focus150
12
+ csv_train_file: ${data_dir}/focus150/train_metadata.csv
13
+ csv_val_file: ${data_dir}/focus150/validation_metadata.csv
14
+ csv_test_file: ${data_dir}/focus150/test_metadata.csv
15
+ batch_size: 64
16
+ num_workers: 0
17
+ pin_memory: false
18
+ model:
19
+ _target_: src.models.focus_conv_module.FocusConvLitModule
20
+ image_size: 150
21
+ pool_size: 2
22
+ conv1_size: 7
23
+ conv1_channels: 3
24
+ conv2_size: 7
25
+ conv2_channels: 16
26
+ lin1_size: 128
27
+ lin2_size: 32
28
+ output_size: 1
29
+ lr: 0.0019207962356032353
30
+ weight_decay: 0.0005
31
+ callbacks:
32
+ model_checkpoint:
33
+ _target_: pytorch_lightning.callbacks.ModelCheckpoint
34
+ monitor: val/mae
35
+ mode: min
36
+ save_top_k: 1
37
+ save_last: true
38
+ verbose: false
39
+ dirpath: checkpoints/
40
+ filename: epoch_{epoch:03d}
41
+ auto_insert_metric_name: false
42
+ early_stopping:
43
+ _target_: pytorch_lightning.callbacks.EarlyStopping
44
+ monitor: val/mae
45
+ mode: min
46
+ patience: 100
47
+ min_delta: 0
48
+ model_summary:
49
+ _target_: pytorch_lightning.callbacks.RichModelSummary
50
+ max_depth: -1
51
+ rich_progress_bar:
52
+ _target_: pytorch_lightning.callbacks.RichProgressBar
53
+ logger:
54
+ csv:
55
+ _target_: pytorch_lightning.loggers.csv_logs.CSVLogger
56
+ save_dir: .
57
+ name: csv/
58
+ prefix: ''
59
+ mlflow:
60
+ _target_: pytorch_lightning.loggers.mlflow.MLFlowLogger
61
+ experiment_name: ${name}
62
+ tracking_uri: ${original_work_dir}/logs/mlflow/mlruns
63
+ tags: null
64
+ save_dir: ./mlruns
65
+ prefix: ''
66
+ artifact_location: null
67
+ tensorboard:
68
+ _target_: pytorch_lightning.loggers.tensorboard.TensorBoardLogger
69
+ save_dir: tensorboard/
70
+ name: null
71
+ version: ${name}
72
+ log_graph: false
73
+ default_hp_metric: true
74
+ prefix: ''
75
+ trainer:
76
+ _target_: pytorch_lightning.Trainer
77
+ gpus: 1
78
+ min_epochs: 1
79
+ max_epochs: 100
80
+ resume_from_checkpoint: null
81
+ optimized_metric: val/mae_best
logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_07-59-42/0/.hydra/hydra.yaml ADDED
@@ -0,0 +1,240 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ hydra:
2
+ run:
3
+ dir: logs/experiments/runs/${name}/${now:%Y-%m-%d}_${now:%H-%M-%S}
4
+ sweep:
5
+ dir: logs/experiments/multiruns/${name}/${now:%Y-%m-%d}_${now:%H-%M-%S}
6
+ subdir: ${hydra.job.num}
7
+ launcher:
8
+ _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher
9
+ sweeper:
10
+ sampler:
11
+ _target_: optuna.samplers.TPESampler
12
+ seed: 12345
13
+ consider_prior: true
14
+ prior_weight: 1.0
15
+ consider_magic_clip: true
16
+ consider_endpoints: false
17
+ n_startup_trials: 10
18
+ n_ei_candidates: 24
19
+ multivariate: false
20
+ warn_independent_sampling: true
21
+ _target_: hydra_plugins.hydra_optuna_sweeper.optuna_sweeper.OptunaSweeper
22
+ direction: minimize
23
+ storage: null
24
+ study_name: focusConvMSE_150_hyperparameter_search
25
+ n_trials: 20
26
+ n_jobs: 1
27
+ search_space:
28
+ datamodule.batch_size:
29
+ type: categorical
30
+ choices:
31
+ - 64
32
+ - 128
33
+ model.lr:
34
+ type: float
35
+ low: 0.0001
36
+ high: 0.01
37
+ model.conv1_size:
38
+ type: categorical
39
+ choices:
40
+ - 3
41
+ - 5
42
+ - 7
43
+ model.conv1_channels:
44
+ type: categorical
45
+ choices:
46
+ - 3
47
+ - 6
48
+ - 9
49
+ model.conv2_size:
50
+ type: categorical
51
+ choices:
52
+ - 3
53
+ - 5
54
+ - 7
55
+ model.conv2_channels:
56
+ type: categorical
57
+ choices:
58
+ - 6
59
+ - 11
60
+ - 16
61
+ model.lin1_size:
62
+ type: categorical
63
+ choices:
64
+ - 32
65
+ - 72
66
+ - 128
67
+ model.lin2_size:
68
+ type: categorical
69
+ choices:
70
+ - 32
71
+ - 72
72
+ - 128
73
+ help:
74
+ app_name: ${hydra.job.name}
75
+ header: '${hydra.help.app_name} is powered by Hydra.
76
+
77
+ '
78
+ footer: 'Powered by Hydra (https://hydra.cc)
79
+
80
+ Use --hydra-help to view Hydra specific help
81
+
82
+ '
83
+ template: '${hydra.help.header}
84
+
85
+ == Configuration groups ==
86
+
87
+ Compose your configuration from those groups (group=option)
88
+
89
+
90
+ $APP_CONFIG_GROUPS
91
+
92
+
93
+ == Config ==
94
+
95
+ Override anything in the config (foo.bar=value)
96
+
97
+
98
+ $CONFIG
99
+
100
+
101
+ ${hydra.help.footer}
102
+
103
+ '
104
+ hydra_help:
105
+ template: 'Hydra (${hydra.runtime.version})
106
+
107
+ See https://hydra.cc for more info.
108
+
109
+
110
+ == Flags ==
111
+
112
+ $FLAGS_HELP
113
+
114
+
115
+ == Configuration groups ==
116
+
117
+ Compose your configuration from those groups (For example, append hydra/job_logging=disabled
118
+ to command line)
119
+
120
+
121
+ $HYDRA_CONFIG_GROUPS
122
+
123
+
124
+ Use ''--cfg hydra'' to Show the Hydra config.
125
+
126
+ '
127
+ hydra_help: ???
128
+ hydra_logging:
129
+ version: 1
130
+ formatters:
131
+ colorlog:
132
+ (): colorlog.ColoredFormatter
133
+ format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s'
134
+ handlers:
135
+ console:
136
+ class: logging.StreamHandler
137
+ formatter: colorlog
138
+ stream: ext://sys.stdout
139
+ root:
140
+ level: INFO
141
+ handlers:
142
+ - console
143
+ disable_existing_loggers: false
144
+ job_logging:
145
+ version: 1
146
+ formatters:
147
+ simple:
148
+ format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s'
149
+ colorlog:
150
+ (): colorlog.ColoredFormatter
151
+ format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s]
152
+ - %(message)s'
153
+ log_colors:
154
+ DEBUG: purple
155
+ INFO: green
156
+ WARNING: yellow
157
+ ERROR: red
158
+ CRITICAL: red
159
+ handlers:
160
+ console:
161
+ class: logging.StreamHandler
162
+ formatter: colorlog
163
+ stream: ext://sys.stdout
164
+ file:
165
+ class: logging.FileHandler
166
+ formatter: simple
167
+ filename: ${hydra.job.name}.log
168
+ root:
169
+ level: INFO
170
+ handlers:
171
+ - console
172
+ - file
173
+ disable_existing_loggers: false
174
+ env: {}
175
+ searchpath: []
176
+ callbacks: {}
177
+ output_subdir: .hydra
178
+ overrides:
179
+ hydra: []
180
+ task:
181
+ - datamodule.batch_size=64
182
+ - model.lr=0.0019207962356032353
183
+ - model.conv1_size=7
184
+ - model.conv1_channels=3
185
+ - model.conv2_size=7
186
+ - model.conv2_channels=16
187
+ - model.lin1_size=128
188
+ - model.lin2_size=32
189
+ - hparams_search=focusConvMSE_150.yaml
190
+ job:
191
+ name: train
192
+ override_dirname: datamodule.batch_size=64,hparams_search=focusConvMSE_150.yaml,model.conv1_channels=3,model.conv1_size=7,model.conv2_channels=16,model.conv2_size=7,model.lin1_size=128,model.lin2_size=32,model.lr=0.0019207962356032353
193
+ id: '0'
194
+ num: 0
195
+ config_name: train.yaml
196
+ env_set: {}
197
+ env_copy: []
198
+ config:
199
+ override_dirname:
200
+ kv_sep: '='
201
+ item_sep: ','
202
+ exclude_keys: []
203
+ runtime:
204
+ version: 1.1.2
205
+ cwd: /usr/src/app
206
+ config_sources:
207
+ - path: hydra.conf
208
+ schema: pkg
209
+ provider: hydra
210
+ - path: /usr/src/app/configs
211
+ schema: file
212
+ provider: main
213
+ - path: hydra_plugins.hydra_colorlog.conf
214
+ schema: pkg
215
+ provider: hydra-colorlog
216
+ - path: ''
217
+ schema: structured
218
+ provider: schema
219
+ choices:
220
+ local: default.yaml
221
+ hparams_search: focusConvMSE_150.yaml
222
+ debug: null
223
+ experiment: null
224
+ log_dir: default.yaml
225
+ trainer: long.yaml
226
+ logger: many_loggers
227
+ callbacks: default.yaml
228
+ model: focusConv_150.yaml
229
+ datamodule: focus150.yaml
230
+ hydra/env: default
231
+ hydra/callbacks: null
232
+ hydra/job_logging: colorlog
233
+ hydra/hydra_logging: colorlog
234
+ hydra/hydra_help: default
235
+ hydra/help: default
236
+ hydra/sweeper: optuna
237
+ hydra/sweeper/sampler: tpe
238
+ hydra/launcher: basic
239
+ hydra/output: default
240
+ verbose: false
logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_07-59-42/0/.hydra/overrides.yaml ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ - datamodule.batch_size=64
2
+ - model.lr=0.0019207962356032353
3
+ - model.conv1_size=7
4
+ - model.conv1_channels=3
5
+ - model.conv2_size=7
6
+ - model.conv2_channels=16
7
+ - model.lin1_size=128
8
+ - model.lin2_size=32
9
+ - hparams_search=focusConvMSE_150.yaml
logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_07-59-42/0/checkpoints/epoch_068.ckpt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:189d66b1fafa63a8cc4274db1945f378beb3537d31578a6401f4a87fbc1618ea
3
+ size 30180805
logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_07-59-42/0/checkpoints/last.ckpt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:10dd6b471f4b655ed1a659bea4ba5f0035e498971ce2cdfe9567b2d139558f78
3
+ size 30180805
logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_07-59-42/0/csv/version_0/hparams.yaml ADDED
@@ -0,0 +1,54 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ trainer:
2
+ _target_: pytorch_lightning.Trainer
3
+ gpus: 1
4
+ min_epochs: 1
5
+ max_epochs: 100
6
+ resume_from_checkpoint: null
7
+ model:
8
+ _target_: src.models.focus_conv_module.FocusConvLitModule
9
+ image_size: 150
10
+ pool_size: 2
11
+ conv1_size: 7
12
+ conv1_channels: 3
13
+ conv2_size: 7
14
+ conv2_channels: 16
15
+ lin1_size: 128
16
+ lin2_size: 32
17
+ output_size: 1
18
+ lr: 0.0019207962356032353
19
+ weight_decay: 0.0005
20
+ datamodule:
21
+ _target_: src.datamodules.focus_datamodule.FocusDataModule
22
+ data_dir: /usr/src/app/data/focus150
23
+ csv_train_file: /usr/src/app/data/focus150/train_metadata.csv
24
+ csv_val_file: /usr/src/app/data/focus150/validation_metadata.csv
25
+ csv_test_file: /usr/src/app/data/focus150/test_metadata.csv
26
+ batch_size: 64
27
+ num_workers: 0
28
+ pin_memory: false
29
+ seed: null
30
+ callbacks:
31
+ model_checkpoint:
32
+ _target_: pytorch_lightning.callbacks.ModelCheckpoint
33
+ monitor: val/mae
34
+ mode: min
35
+ save_top_k: 1
36
+ save_last: true
37
+ verbose: false
38
+ dirpath: checkpoints/
39
+ filename: epoch_{epoch:03d}
40
+ auto_insert_metric_name: false
41
+ early_stopping:
42
+ _target_: pytorch_lightning.callbacks.EarlyStopping
43
+ monitor: val/mae
44
+ mode: min
45
+ patience: 100
46
+ min_delta: 0
47
+ model_summary:
48
+ _target_: pytorch_lightning.callbacks.RichModelSummary
49
+ max_depth: -1
50
+ rich_progress_bar:
51
+ _target_: pytorch_lightning.callbacks.RichProgressBar
52
+ model/params/total: 2513981
53
+ model/params/trainable: 2513981
54
+ model/params/non_trainable: 0
logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_07-59-42/0/csv/version_0/metrics.csv ADDED
@@ -0,0 +1,202 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ val/loss,val/mae,val/mae_best,epoch,step,train/loss,train/mae,test/loss,test/mae
2
+ 0.0013214268255978823,0.02995089441537857,0.02995089441537857,0,47,,,,
3
+ ,,,1,47,0.09107564389705658,0.08544210344552994,,
4
+ 0.0008829442667774856,0.024166671559214592,0.024166671559214592,1,95,,,,
5
+ ,,,2,95,0.0011302413186058402,0.02657642588019371,,
6
+ 0.0007275021052919328,0.02201317623257637,0.02201317623257637,2,143,,,,
7
+ ,,,3,143,0.0008497812668792903,0.023298220708966255,,
8
+ 0.001101081375963986,0.027400169521570206,0.02201317623257637,3,191,,,,
9
+ ,,,4,191,0.0007581689860671759,0.022135509178042412,,
10
+ 0.0007860080804675817,0.02276007831096649,0.02201317623257637,4,239,,,,
11
+ ,,,5,239,0.0006350601906888187,0.020468834787607193,,
12
+ 0.0004643478023353964,0.017967579886317253,0.017967579886317253,5,287,,,,
13
+ ,,,6,287,0.0005186209455132484,0.018578730523586273,,
14
+ 0.0004630199691746384,0.017952095717191696,0.017952095717191696,6,335,,,,
15
+ ,,,7,335,0.0005978613044135273,0.019994867965579033,,
16
+ 0.00043752603232860565,0.017555581405758858,0.017555581405758858,7,383,,,,
17
+ ,,,8,383,0.00046152930008247495,0.01768277958035469,,
18
+ 0.0004740311997011304,0.01807774230837822,0.017555581405758858,8,431,,,,
19
+ ,,,9,431,0.00043159123742952943,0.0172695592045784,,
20
+ 0.00043740891851484776,0.017542192712426186,0.017542192712426186,9,479,,,,
21
+ ,,,10,479,0.00042695042793639004,0.01716277375817299,,
22
+ 0.0004299463180359453,0.01744217239320278,0.01744217239320278,10,527,,,,
23
+ ,,,11,527,0.0004290902288630605,0.01717282272875309,,
24
+ 0.0004243432485964149,0.01734023727476597,0.01734023727476597,11,575,,,,
25
+ ,,,12,575,0.0004114969342481345,0.016951708123087883,,
26
+ 0.0004806671349797398,0.018205799162387848,0.01734023727476597,12,623,,,,
27
+ ,,,13,623,0.0004503990348894149,0.017624076455831528,,
28
+ 0.00042711824062280357,0.017376843839883804,0.01734023727476597,13,671,,,,
29
+ ,,,14,671,0.000444843084551394,0.01745457760989666,,
30
+ 0.0004940874059684575,0.018439264968037605,0.01734023727476597,14,719,,,,
31
+ ,,,15,719,0.0004583768895827234,0.017727574333548546,,
32
+ 0.0004883793881163001,0.018309809267520905,0.01734023727476597,15,767,,,,
33
+ ,,,16,767,0.0004375737044028938,0.017398077994585037,,
34
+ 0.00043858421850018203,0.017545394599437714,0.01734023727476597,16,815,,,,
35
+ ,,,17,815,0.0004382661427371204,0.017402121797204018,,
36
+ 0.00044020896893925965,0.017576968297362328,0.01734023727476597,17,863,,,,
37
+ ,,,18,863,0.00041041255462914705,0.01695941388607025,,
38
+ 0.0006364707951433957,0.020539429038763046,0.01734023727476597,18,911,,,,
39
+ ,,,19,911,0.00042282711365260184,0.01713656634092331,,
40
+ 0.00042814717744477093,0.017377782613039017,0.01734023727476597,19,959,,,,
41
+ ,,,20,959,0.0004337873251643032,0.017260679975152016,,
42
+ 0.00043059542076662183,0.0174389835447073,0.01734023727476597,20,1007,,,,
43
+ ,,,21,1007,0.0004411266709212214,0.017336012795567513,,
44
+ 0.0004372385737951845,0.017526034265756607,0.01734023727476597,21,1055,,,,
45
+ ,,,22,1055,0.0004449570260476321,0.017479514703154564,,
46
+ 0.00043040158925578,0.01740269735455513,0.01734023727476597,22,1103,,,,
47
+ ,,,23,1103,0.00041064867400564253,0.016978072002530098,,
48
+ 0.00045276310993358493,0.017816361039876938,0.01734023727476597,23,1151,,,,
49
+ ,,,24,1151,0.00041040507494471967,0.016982730478048325,,
50
+ 0.00043775344965979457,0.017562175169587135,0.01734023727476597,24,1199,,,,
51
+ ,,,25,1199,0.0004101074009668082,0.016966601833701134,,
52
+ 0.0004312329110689461,0.017464062198996544,0.01734023727476597,25,1247,,,,
53
+ ,,,26,1247,0.0004146143910475075,0.017000725492835045,,
54
+ 0.000507558521348983,0.018663497641682625,0.01734023727476597,26,1295,,,,
55
+ ,,,27,1295,0.00041099710506387055,0.017012037336826324,,
56
+ 0.00042953650699928403,0.017396090552210808,0.01734023727476597,27,1343,,,,
57
+ ,,,28,1343,0.000414402165915817,0.01700458489358425,,
58
+ 0.00043241449748165905,0.017450544983148575,0.01734023727476597,28,1391,,,,
59
+ ,,,29,1391,0.00041250884532928467,0.01699022576212883,,
60
+ 0.00043058846495114267,0.017410045489668846,0.01734023727476597,29,1439,,,,
61
+ ,,,30,1439,0.00040791145875118673,0.01693185232579708,,
62
+ 0.0004241449641995132,0.017345231026411057,0.01734023727476597,30,1487,,,,
63
+ ,,,31,1487,0.0004128912405576557,0.0170387364923954,,
64
+ 0.0004274945822544396,0.0173699501901865,0.01734023727476597,31,1535,,,,
65
+ ,,,32,1535,0.00040698234806768596,0.016919052228331566,,
66
+ 0.00044470306602306664,0.017698165029287338,0.01734023727476597,32,1583,,,,
67
+ ,,,33,1583,0.00040801247814670205,0.016948970034718513,,
68
+ 0.0004444575170055032,0.01766776293516159,0.01734023727476597,33,1631,,,,
69
+ ,,,34,1631,0.00041007788968272507,0.016948459669947624,,
70
+ 0.00043741369154304266,0.017553919926285744,0.01734023727476597,34,1679,,,,
71
+ ,,,35,1679,0.0004065565299242735,0.016886629164218903,,
72
+ 0.00042276314343325794,0.01731080189347267,0.01731080189347267,35,1727,,,,
73
+ ,,,36,1727,0.0004089506692253053,0.016967322677373886,,
74
+ 0.00043424853356555104,0.017529159784317017,0.01731080189347267,36,1775,,,,
75
+ ,,,37,1775,0.0004056563775520772,0.016903068870306015,,
76
+ 0.0004245753516443074,0.017352856695652008,0.01731080189347267,37,1823,,,,
77
+ ,,,38,1823,0.0004096078628208488,0.016981760039925575,,
78
+ 0.00043152179569005966,0.017460186034440994,0.01731080189347267,38,1871,,,,
79
+ ,,,39,1871,0.00040643400279805064,0.01691904477775097,,
80
+ 0.0004224393633194268,0.01730339601635933,0.01730339601635933,39,1919,,,,
81
+ ,,,40,1919,0.0004066245164722204,0.0169126745313406,,
82
+ 0.00042398893856443465,0.01735004596412182,0.01730339601635933,40,1967,,,,
83
+ ,,,41,1967,0.000403663027100265,0.01687893271446228,,
84
+ 0.0004223120922688395,0.017319753766059875,0.01730339601635933,41,2015,,,,
85
+ ,,,42,2015,0.00040697352960705757,0.016926294192671776,,
86
+ 0.00042261608177796006,0.01729428581893444,0.01729428581893444,42,2063,,,,
87
+ ,,,43,2063,0.0004045187379233539,0.016874046996235847,,
88
+ 0.0004221730923745781,0.017291538417339325,0.017291538417339325,43,2111,,,,
89
+ ,,,44,2111,0.0004033922450616956,0.016845960170030594,,
90
+ 0.0004268202173989266,0.017393913120031357,0.017291538417339325,44,2159,,,,
91
+ ,,,45,2159,0.0004057279147673398,0.016899917274713516,,
92
+ 0.0004218656104058027,0.01729201339185238,0.017291538417339325,45,2207,,,,
93
+ ,,,46,2207,0.00040545297088101506,0.016911698505282402,,
94
+ 0.00042119991849176586,0.0172905083745718,0.0172905083745718,46,2255,,,,
95
+ ,,,47,2255,0.0004090172878932208,0.016958855092525482,,
96
+ 0.00042206395301036537,0.017269952222704887,0.017269952222704887,47,2303,,,,
97
+ ,,,48,2303,0.0004088839632458985,0.016935525462031364,,
98
+ 0.00047497544437646866,0.018141519278287888,0.017269952222704887,48,2351,,,,
99
+ ,,,49,2351,0.00040828718920238316,0.01695297285914421,,
100
+ 0.00042153429239988327,0.017271660268306732,0.017269952222704887,49,2399,,,,
101
+ ,,,50,2399,0.0004064234090037644,0.01689944602549076,,
102
+ 0.0004246366734150797,0.017359109595417976,0.017269952222704887,50,2447,,,,
103
+ ,,,51,2447,0.0004040885833092034,0.016873857006430626,,
104
+ 0.0004236254026181996,0.017332734540104866,0.017269952222704887,51,2495,,,,
105
+ ,,,52,2495,0.00040933446143753827,0.0169473048299551,,
106
+ 0.00042052214848808944,0.017267471179366112,0.017267471179366112,52,2543,,,,
107
+ ,,,53,2543,0.00040482505573891103,0.016867199912667274,,
108
+ 0.0004211203195154667,0.017269957810640335,0.017267471179366112,53,2591,,,,
109
+ ,,,54,2591,0.00040544485091231763,0.016882458701729774,,
110
+ 0.0004216745146550238,0.01729307323694229,0.017267471179366112,54,2639,,,,
111
+ ,,,55,2639,0.0004069584247190505,0.016943389549851418,,
112
+ 0.0004252626677043736,0.017382489517331123,0.017267471179366112,55,2687,,,,
113
+ ,,,56,2687,0.0004056995967403054,0.016914399340748787,,
114
+ 0.0004203443822916597,0.017267588526010513,0.017267471179366112,56,2735,,,,
115
+ ,,,57,2735,0.0004066229157615453,0.016942813992500305,,
116
+ 0.0004203298594802618,0.01725783199071884,0.01725783199071884,57,2783,,,,
117
+ ,,,58,2783,0.0004078345082234591,0.01694861426949501,,
118
+ 0.0004239369882270694,0.01735486462712288,0.01725783199071884,58,2831,,,,
119
+ ,,,59,2831,0.0004067661939188838,0.016939524561166763,,
120
+ 0.0004247304459568113,0.01736706681549549,0.01725783199071884,59,2879,,,,
121
+ ,,,60,2879,0.0004077294433955103,0.01695873960852623,,
122
+ 0.0004206763405818492,0.01725643314421177,0.01725643314421177,60,2927,,,,
123
+ ,,,61,2927,0.00040598350460641086,0.016905728727579117,,
124
+ 0.00042073047370649874,0.017249858006834984,0.017249858006834984,61,2975,,,,
125
+ ,,,62,2975,0.00040633013122715056,0.016917090862989426,,
126
+ 0.0004259949491824955,0.017397195100784302,0.017249858006834984,62,3023,,,,
127
+ ,,,63,3023,0.00040659026126377285,0.016912169754505157,,
128
+ 0.00042176578426733613,0.017294948920607567,0.017249858006834984,63,3071,,,,
129
+ ,,,64,3071,0.00040871035889722407,0.016957785934209824,,
130
+ 0.00042948423651978374,0.017466848716139793,0.017249858006834984,64,3119,,,,
131
+ ,,,65,3119,0.00040619607898406684,0.016901874914765358,,
132
+ 0.0004205190925858915,0.017265569418668747,0.017249858006834984,65,3167,,,,
133
+ ,,,66,3167,0.00040628656279295683,0.016901008784770966,,
134
+ 0.00042266008676961064,0.017329422757029533,0.017249858006834984,66,3215,,,,
135
+ ,,,67,3215,0.0004058164486195892,0.016901446506381035,,
136
+ 0.00042031798511743546,0.017251981422305107,0.017249858006834984,67,3263,,,,
137
+ ,,,68,3263,0.0004056946199852973,0.016897695139050484,,
138
+ 0.00041988177690654993,0.017225734889507294,0.017225734889507294,68,3311,,,,
139
+ ,,,69,3311,0.00040594657184556127,0.016924357041716576,,
140
+ 0.0004222714342176914,0.01731383427977562,0.017225734889507294,69,3359,,,,
141
+ ,,,70,3359,0.00040589048876427114,0.016935881227254868,,
142
+ 0.0004217163659632206,0.017311586067080498,0.017225734889507294,70,3407,,,,
143
+ ,,,71,3407,0.00040619695209898055,0.01688009686768055,,
144
+ 0.0004228964971844107,0.017341069877147675,0.017225734889507294,71,3455,,,,
145
+ ,,,72,3455,0.00040669829468242824,0.016929443925619125,,
146
+ 0.00042393585317768157,0.01736735738813877,0.017225734889507294,72,3503,,,,
147
+ ,,,73,3503,0.0004054413875564933,0.016895873472094536,,
148
+ 0.0004205110017210245,0.017236987128853798,0.017225734889507294,73,3551,,,,
149
+ ,,,74,3551,0.00040918096783570945,0.016991764307022095,,
150
+ 0.00042254861909896135,0.01731589436531067,0.017225734889507294,74,3599,,,,
151
+ ,,,75,3599,0.0004054117016494274,0.01689889281988144,,
152
+ 0.00041971675818786025,0.0172366164624691,0.017225734889507294,75,3647,,,,
153
+ ,,,76,3647,0.000407597137382254,0.016920411959290504,,
154
+ 0.0004218820540700108,0.017297610640525818,0.017225734889507294,76,3695,,,,
155
+ ,,,77,3695,0.0004070137219969183,0.01692647486925125,,
156
+ 0.0004198694077786058,0.01723865419626236,0.017225734889507294,77,3743,,,,
157
+ ,,,78,3743,0.00040810651262290776,0.016920384019613266,,
158
+ 0.0004219312686473131,0.017285816371440887,0.017225734889507294,78,3791,,,,
159
+ ,,,79,3791,0.00040583129157312214,0.016902318224310875,,
160
+ 0.00042128973291255534,0.017268585041165352,0.017225734889507294,79,3839,,,,
161
+ ,,,80,3839,0.00040777921094559133,0.01694045588374138,,
162
+ 0.00042737109470181167,0.017440415918827057,0.017225734889507294,80,3887,,,,
163
+ ,,,81,3887,0.00040593036101199687,0.01688615418970585,,
164
+ 0.0004200975818093866,0.017234425991773605,0.017225734889507294,81,3935,,,,
165
+ ,,,82,3935,0.0004073919262737036,0.016920192167162895,,
166
+ 0.0004274568345863372,0.017441829666495323,0.017225734889507294,82,3983,,,,
167
+ ,,,83,3983,0.00040686727152206004,0.016899390146136284,,
168
+ 0.00042046577436849475,0.017226122319698334,0.017225734889507294,83,4031,,,,
169
+ ,,,84,4031,0.00040584601811133325,0.016897406429052353,,
170
+ 0.0004206631565466523,0.017240891233086586,0.017225734889507294,84,4079,,,,
171
+ ,,,85,4079,0.00040555562009103596,0.016894273459911346,,
172
+ 0.00042056647362187505,0.01722583919763565,0.017225734889507294,85,4127,,,,
173
+ ,,,86,4127,0.00040819463902153075,0.01693749986588955,,
174
+ 0.00042367636342532933,0.017313098534941673,0.017225734889507294,86,4175,,,,
175
+ ,,,87,4175,0.0004077526682522148,0.016914527863264084,,
176
+ 0.00042057037353515625,0.01724100112915039,0.017225734889507294,87,4223,,,,
177
+ ,,,88,4223,0.0004064103413838893,0.01687958650290966,,
178
+ 0.00042142454185523093,0.017310377210378647,0.017225734889507294,88,4271,,,,
179
+ ,,,89,4271,0.0004058389167767018,0.016901619732379913,,
180
+ 0.00042020328692160547,0.01723584719002247,0.017225734889507294,89,4319,,,,
181
+ ,,,90,4319,0.00040814539534039795,0.01695222221314907,,
182
+ 0.00042191610555164516,0.017275961115956306,0.017225734889507294,90,4367,,,,
183
+ ,,,91,4367,0.0004060538485646248,0.016901034861803055,,
184
+ 0.00043441352318041027,0.017541974782943726,0.017225734889507294,91,4415,,,,
185
+ ,,,92,4415,0.00040564066148363054,0.016894837841391563,,
186
+ 0.0004196066875010729,0.017246531322598457,0.017225734889507294,92,4463,,,,
187
+ ,,,93,4463,0.00040611534495837986,0.016885772347450256,,
188
+ 0.0004206717130728066,0.017277369275689125,0.017225734889507294,93,4511,,,,
189
+ ,,,94,4511,0.0004060569335706532,0.01689089462161064,,
190
+ 0.00042048952309414744,0.017247222363948822,0.017225734889507294,94,4559,,,,
191
+ ,,,95,4559,0.0004050368152093142,0.016897881403565407,,
192
+ 0.00042095466051250696,0.01726377196609974,0.017225734889507294,95,4607,,,,
193
+ ,,,96,4607,0.0004073544405400753,0.01691468246281147,,
194
+ 0.0004210512270219624,0.017280427739024162,0.017225734889507294,96,4655,,,,
195
+ ,,,97,4655,0.0004060851060785353,0.01690417155623436,,
196
+ 0.00042081403080374,0.017260488122701645,0.017225734889507294,97,4703,,,,
197
+ ,,,98,4703,0.0004056034376844764,0.016891250386834145,,
198
+ 0.00042369129369035363,0.017357047647237778,0.017225734889507294,98,4751,,,,
199
+ ,,,99,4751,0.000405541475629434,0.016889140009880066,,
200
+ 0.00042156659765169024,0.017308050766587257,0.017225734889507294,99,4799,,,,
201
+ ,,,100,4799,0.00040594732854515314,0.0168966893106699,,
202
+ ,,,68,4800,,,0.00041280631558038294,0.01698911003768444
logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_07-59-42/0/tensorboard/focusConvMSE_150_hyperparameter_search/events.out.tfevents.1652255986.908803db74d8.1.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fb14c8ad73f6ecb4486a874bb4a414ac214a6f2a94b053ddf21f41c7106e6aea
3
+ size 36796
logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_07-59-42/0/tensorboard/focusConvMSE_150_hyperparameter_search/events.out.tfevents.1652256202.908803db74d8.1.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c13a9eead19d86aac7cb5cc29f9a02b9000a3cf673e406882ef71a06c3a87d76
3
+ size 179
logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_07-59-42/0/tensorboard/focusConvMSE_150_hyperparameter_search/hparams.yaml ADDED
@@ -0,0 +1,54 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ trainer:
2
+ _target_: pytorch_lightning.Trainer
3
+ gpus: 1
4
+ min_epochs: 1
5
+ max_epochs: 100
6
+ resume_from_checkpoint: null
7
+ model:
8
+ _target_: src.models.focus_conv_module.FocusConvLitModule
9
+ image_size: 150
10
+ pool_size: 2
11
+ conv1_size: 7
12
+ conv1_channels: 3
13
+ conv2_size: 7
14
+ conv2_channels: 16
15
+ lin1_size: 128
16
+ lin2_size: 32
17
+ output_size: 1
18
+ lr: 0.0019207962356032353
19
+ weight_decay: 0.0005
20
+ datamodule:
21
+ _target_: src.datamodules.focus_datamodule.FocusDataModule
22
+ data_dir: /usr/src/app/data/focus150
23
+ csv_train_file: /usr/src/app/data/focus150/train_metadata.csv
24
+ csv_val_file: /usr/src/app/data/focus150/validation_metadata.csv
25
+ csv_test_file: /usr/src/app/data/focus150/test_metadata.csv
26
+ batch_size: 64
27
+ num_workers: 0
28
+ pin_memory: false
29
+ seed: null
30
+ callbacks:
31
+ model_checkpoint:
32
+ _target_: pytorch_lightning.callbacks.ModelCheckpoint
33
+ monitor: val/mae
34
+ mode: min
35
+ save_top_k: 1
36
+ save_last: true
37
+ verbose: false
38
+ dirpath: checkpoints/
39
+ filename: epoch_{epoch:03d}
40
+ auto_insert_metric_name: false
41
+ early_stopping:
42
+ _target_: pytorch_lightning.callbacks.EarlyStopping
43
+ monitor: val/mae
44
+ mode: min
45
+ patience: 100
46
+ min_delta: 0
47
+ model_summary:
48
+ _target_: pytorch_lightning.callbacks.RichModelSummary
49
+ max_depth: -1
50
+ rich_progress_bar:
51
+ _target_: pytorch_lightning.callbacks.RichProgressBar
52
+ model/params/total: 2513981
53
+ model/params/trainable: 2513981
54
+ model/params/non_trainable: 0
logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_07-59-42/1/.hydra/config.yaml ADDED
@@ -0,0 +1,81 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ original_work_dir: ${hydra:runtime.cwd}
2
+ data_dir: ${original_work_dir}/data
3
+ print_config: true
4
+ ignore_warnings: true
5
+ train: true
6
+ test: true
7
+ seed: null
8
+ name: focusConvMSE_150_hyperparameter_search
9
+ datamodule:
10
+ _target_: src.datamodules.focus_datamodule.FocusDataModule
11
+ data_dir: ${data_dir}/focus150
12
+ csv_train_file: ${data_dir}/focus150/train_metadata.csv
13
+ csv_val_file: ${data_dir}/focus150/validation_metadata.csv
14
+ csv_test_file: ${data_dir}/focus150/test_metadata.csv
15
+ batch_size: 64
16
+ num_workers: 0
17
+ pin_memory: false
18
+ model:
19
+ _target_: src.models.focus_conv_module.FocusConvLitModule
20
+ image_size: 150
21
+ pool_size: 2
22
+ conv1_size: 7
23
+ conv1_channels: 6
24
+ conv2_size: 7
25
+ conv2_channels: 16
26
+ lin1_size: 128
27
+ lin2_size: 72
28
+ output_size: 1
29
+ lr: 0.0033232883076398404
30
+ weight_decay: 0.0005
31
+ callbacks:
32
+ model_checkpoint:
33
+ _target_: pytorch_lightning.callbacks.ModelCheckpoint
34
+ monitor: val/mae
35
+ mode: min
36
+ save_top_k: 1
37
+ save_last: true
38
+ verbose: false
39
+ dirpath: checkpoints/
40
+ filename: epoch_{epoch:03d}
41
+ auto_insert_metric_name: false
42
+ early_stopping:
43
+ _target_: pytorch_lightning.callbacks.EarlyStopping
44
+ monitor: val/mae
45
+ mode: min
46
+ patience: 100
47
+ min_delta: 0
48
+ model_summary:
49
+ _target_: pytorch_lightning.callbacks.RichModelSummary
50
+ max_depth: -1
51
+ rich_progress_bar:
52
+ _target_: pytorch_lightning.callbacks.RichProgressBar
53
+ logger:
54
+ csv:
55
+ _target_: pytorch_lightning.loggers.csv_logs.CSVLogger
56
+ save_dir: .
57
+ name: csv/
58
+ prefix: ''
59
+ mlflow:
60
+ _target_: pytorch_lightning.loggers.mlflow.MLFlowLogger
61
+ experiment_name: ${name}
62
+ tracking_uri: ${original_work_dir}/logs/mlflow/mlruns
63
+ tags: null
64
+ save_dir: ./mlruns
65
+ prefix: ''
66
+ artifact_location: null
67
+ tensorboard:
68
+ _target_: pytorch_lightning.loggers.tensorboard.TensorBoardLogger
69
+ save_dir: tensorboard/
70
+ name: null
71
+ version: ${name}
72
+ log_graph: false
73
+ default_hp_metric: true
74
+ prefix: ''
75
+ trainer:
76
+ _target_: pytorch_lightning.Trainer
77
+ gpus: 1
78
+ min_epochs: 1
79
+ max_epochs: 100
80
+ resume_from_checkpoint: null
81
+ optimized_metric: val/mae_best
logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_07-59-42/1/.hydra/hydra.yaml ADDED
@@ -0,0 +1,240 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ hydra:
2
+ run:
3
+ dir: logs/experiments/runs/${name}/${now:%Y-%m-%d}_${now:%H-%M-%S}
4
+ sweep:
5
+ dir: logs/experiments/multiruns/${name}/${now:%Y-%m-%d}_${now:%H-%M-%S}
6
+ subdir: ${hydra.job.num}
7
+ launcher:
8
+ _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher
9
+ sweeper:
10
+ sampler:
11
+ _target_: optuna.samplers.TPESampler
12
+ seed: 12345
13
+ consider_prior: true
14
+ prior_weight: 1.0
15
+ consider_magic_clip: true
16
+ consider_endpoints: false
17
+ n_startup_trials: 10
18
+ n_ei_candidates: 24
19
+ multivariate: false
20
+ warn_independent_sampling: true
21
+ _target_: hydra_plugins.hydra_optuna_sweeper.optuna_sweeper.OptunaSweeper
22
+ direction: minimize
23
+ storage: null
24
+ study_name: focusConvMSE_150_hyperparameter_search
25
+ n_trials: 20
26
+ n_jobs: 1
27
+ search_space:
28
+ datamodule.batch_size:
29
+ type: categorical
30
+ choices:
31
+ - 64
32
+ - 128
33
+ model.lr:
34
+ type: float
35
+ low: 0.0001
36
+ high: 0.01
37
+ model.conv1_size:
38
+ type: categorical
39
+ choices:
40
+ - 3
41
+ - 5
42
+ - 7
43
+ model.conv1_channels:
44
+ type: categorical
45
+ choices:
46
+ - 3
47
+ - 6
48
+ - 9
49
+ model.conv2_size:
50
+ type: categorical
51
+ choices:
52
+ - 3
53
+ - 5
54
+ - 7
55
+ model.conv2_channels:
56
+ type: categorical
57
+ choices:
58
+ - 6
59
+ - 11
60
+ - 16
61
+ model.lin1_size:
62
+ type: categorical
63
+ choices:
64
+ - 32
65
+ - 72
66
+ - 128
67
+ model.lin2_size:
68
+ type: categorical
69
+ choices:
70
+ - 32
71
+ - 72
72
+ - 128
73
+ help:
74
+ app_name: ${hydra.job.name}
75
+ header: '${hydra.help.app_name} is powered by Hydra.
76
+
77
+ '
78
+ footer: 'Powered by Hydra (https://hydra.cc)
79
+
80
+ Use --hydra-help to view Hydra specific help
81
+
82
+ '
83
+ template: '${hydra.help.header}
84
+
85
+ == Configuration groups ==
86
+
87
+ Compose your configuration from those groups (group=option)
88
+
89
+
90
+ $APP_CONFIG_GROUPS
91
+
92
+
93
+ == Config ==
94
+
95
+ Override anything in the config (foo.bar=value)
96
+
97
+
98
+ $CONFIG
99
+
100
+
101
+ ${hydra.help.footer}
102
+
103
+ '
104
+ hydra_help:
105
+ template: 'Hydra (${hydra.runtime.version})
106
+
107
+ See https://hydra.cc for more info.
108
+
109
+
110
+ == Flags ==
111
+
112
+ $FLAGS_HELP
113
+
114
+
115
+ == Configuration groups ==
116
+
117
+ Compose your configuration from those groups (For example, append hydra/job_logging=disabled
118
+ to command line)
119
+
120
+
121
+ $HYDRA_CONFIG_GROUPS
122
+
123
+
124
+ Use ''--cfg hydra'' to Show the Hydra config.
125
+
126
+ '
127
+ hydra_help: ???
128
+ hydra_logging:
129
+ version: 1
130
+ formatters:
131
+ colorlog:
132
+ (): colorlog.ColoredFormatter
133
+ format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s'
134
+ handlers:
135
+ console:
136
+ class: logging.StreamHandler
137
+ formatter: colorlog
138
+ stream: ext://sys.stdout
139
+ root:
140
+ level: INFO
141
+ handlers:
142
+ - console
143
+ disable_existing_loggers: false
144
+ job_logging:
145
+ version: 1
146
+ formatters:
147
+ simple:
148
+ format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s'
149
+ colorlog:
150
+ (): colorlog.ColoredFormatter
151
+ format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s]
152
+ - %(message)s'
153
+ log_colors:
154
+ DEBUG: purple
155
+ INFO: green
156
+ WARNING: yellow
157
+ ERROR: red
158
+ CRITICAL: red
159
+ handlers:
160
+ console:
161
+ class: logging.StreamHandler
162
+ formatter: colorlog
163
+ stream: ext://sys.stdout
164
+ file:
165
+ class: logging.FileHandler
166
+ formatter: simple
167
+ filename: ${hydra.job.name}.log
168
+ root:
169
+ level: INFO
170
+ handlers:
171
+ - console
172
+ - file
173
+ disable_existing_loggers: false
174
+ env: {}
175
+ searchpath: []
176
+ callbacks: {}
177
+ output_subdir: .hydra
178
+ overrides:
179
+ hydra: []
180
+ task:
181
+ - datamodule.batch_size=64
182
+ - model.lr=0.0033232883076398404
183
+ - model.conv1_size=7
184
+ - model.conv1_channels=6
185
+ - model.conv2_size=7
186
+ - model.conv2_channels=16
187
+ - model.lin1_size=128
188
+ - model.lin2_size=72
189
+ - hparams_search=focusConvMSE_150.yaml
190
+ job:
191
+ name: train
192
+ override_dirname: datamodule.batch_size=64,hparams_search=focusConvMSE_150.yaml,model.conv1_channels=6,model.conv1_size=7,model.conv2_channels=16,model.conv2_size=7,model.lin1_size=128,model.lin2_size=72,model.lr=0.0033232883076398404
193
+ id: '1'
194
+ num: 1
195
+ config_name: train.yaml
196
+ env_set: {}
197
+ env_copy: []
198
+ config:
199
+ override_dirname:
200
+ kv_sep: '='
201
+ item_sep: ','
202
+ exclude_keys: []
203
+ runtime:
204
+ version: 1.1.2
205
+ cwd: /usr/src/app
206
+ config_sources:
207
+ - path: hydra.conf
208
+ schema: pkg
209
+ provider: hydra
210
+ - path: /usr/src/app/configs
211
+ schema: file
212
+ provider: main
213
+ - path: hydra_plugins.hydra_colorlog.conf
214
+ schema: pkg
215
+ provider: hydra-colorlog
216
+ - path: ''
217
+ schema: structured
218
+ provider: schema
219
+ choices:
220
+ local: default.yaml
221
+ hparams_search: focusConvMSE_150.yaml
222
+ debug: null
223
+ experiment: null
224
+ log_dir: default.yaml
225
+ trainer: long.yaml
226
+ logger: many_loggers
227
+ callbacks: default.yaml
228
+ model: focusConv_150.yaml
229
+ datamodule: focus150.yaml
230
+ hydra/env: default
231
+ hydra/callbacks: null
232
+ hydra/job_logging: colorlog
233
+ hydra/hydra_logging: colorlog
234
+ hydra/hydra_help: default
235
+ hydra/help: default
236
+ hydra/sweeper: optuna
237
+ hydra/sweeper/sampler: tpe
238
+ hydra/launcher: basic
239
+ hydra/output: default
240
+ verbose: false
logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_07-59-42/1/.hydra/overrides.yaml ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ - datamodule.batch_size=64
2
+ - model.lr=0.0033232883076398404
3
+ - model.conv1_size=7
4
+ - model.conv1_channels=6
5
+ - model.conv2_size=7
6
+ - model.conv2_channels=16
7
+ - model.lin1_size=128
8
+ - model.lin2_size=72
9
+ - hparams_search=focusConvMSE_150.yaml
logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_07-59-42/1/checkpoints/epoch_089.ckpt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:879a86fed42784323646afd678f82953a2896d092381a63ad218cb45902f2926
3
+ size 26942533
logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_07-59-42/1/checkpoints/last.ckpt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:148801bf725a4abee87658dc1a7c88de18371b3fddbe35ef845866dbee54b7c9
3
+ size 26942533
logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_07-59-42/1/csv/version_0/hparams.yaml ADDED
@@ -0,0 +1,54 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ trainer:
2
+ _target_: pytorch_lightning.Trainer
3
+ gpus: 1
4
+ min_epochs: 1
5
+ max_epochs: 100
6
+ resume_from_checkpoint: null
7
+ model:
8
+ _target_: src.models.focus_conv_module.FocusConvLitModule
9
+ image_size: 150
10
+ pool_size: 2
11
+ conv1_size: 7
12
+ conv1_channels: 6
13
+ conv2_size: 7
14
+ conv2_channels: 16
15
+ lin1_size: 128
16
+ lin2_size: 72
17
+ output_size: 1
18
+ lr: 0.0033232883076398404
19
+ weight_decay: 0.0005
20
+ datamodule:
21
+ _target_: src.datamodules.focus_datamodule.FocusDataModule
22
+ data_dir: /usr/src/app/data/focus150
23
+ csv_train_file: /usr/src/app/data/focus150/train_metadata.csv
24
+ csv_val_file: /usr/src/app/data/focus150/validation_metadata.csv
25
+ csv_test_file: /usr/src/app/data/focus150/test_metadata.csv
26
+ batch_size: 64
27
+ num_workers: 0
28
+ pin_memory: false
29
+ seed: null
30
+ callbacks:
31
+ model_checkpoint:
32
+ _target_: pytorch_lightning.callbacks.ModelCheckpoint
33
+ monitor: val/mae
34
+ mode: min
35
+ save_top_k: 1
36
+ save_last: true
37
+ verbose: false
38
+ dirpath: checkpoints/
39
+ filename: epoch_{epoch:03d}
40
+ auto_insert_metric_name: false
41
+ early_stopping:
42
+ _target_: pytorch_lightning.callbacks.EarlyStopping
43
+ monitor: val/mae
44
+ mode: min
45
+ patience: 100
46
+ min_delta: 0
47
+ model_summary:
48
+ _target_: pytorch_lightning.callbacks.RichModelSummary
49
+ max_depth: -1
50
+ rich_progress_bar:
51
+ _target_: pytorch_lightning.callbacks.RichProgressBar
52
+ model/params/total: 2244121
53
+ model/params/trainable: 2244121
54
+ model/params/non_trainable: 0
logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_07-59-42/1/csv/version_0/metrics.csv ADDED
@@ -0,0 +1,202 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ val/loss,val/mae,val/mae_best,epoch,step,train/loss,train/mae,test/loss,test/mae
2
+ 0.0004535996704362333,0.01774800941348076,0.01774800941348076,0,47,,,,
3
+ ,,,1,47,0.33452099561691284,0.1063535213470459,,
4
+ 0.00042222210322506726,0.017232147976756096,0.017232147976756096,1,95,,,,
5
+ ,,,2,95,0.00065139977959916,0.020092403516173363,,
6
+ 0.0005625737248919904,0.01948234997689724,0.017232147976756096,2,143,,,,
7
+ ,,,3,143,0.00042977032717317343,0.01732071302831173,,
8
+ 0.0005026570870541036,0.01848946511745453,0.017232147976756096,3,191,,,,
9
+ ,,,4,191,0.000453472399385646,0.017680538818240166,,
10
+ 0.000548032287042588,0.019156236201524734,0.017232147976756096,4,239,,,,
11
+ ,,,5,239,0.0004323211614973843,0.017283977940678596,,
12
+ 0.00045324687380343676,0.017743004485964775,0.017232147976756096,5,287,,,,
13
+ ,,,6,287,0.00044829703983850777,0.01752995140850544,,
14
+ 0.0004613034543581307,0.01779683120548725,0.017232147976756096,6,335,,,,
15
+ ,,,7,335,0.0004360831808298826,0.017405197024345398,,
16
+ 0.00044732048991136253,0.017683668062090874,0.017232147976756096,7,383,,,,
17
+ ,,,8,383,0.0004331246018409729,0.01730060763657093,,
18
+ 0.000431973923696205,0.017511965706944466,0.017232147976756096,8,431,,,,
19
+ ,,,9,431,0.0004397957236506045,0.017440319061279297,,
20
+ 0.00044244949822314084,0.017636723816394806,0.017232147976756096,9,479,,,,
21
+ ,,,10,479,0.00042200731695629656,0.017144231125712395,,
22
+ 0.00042235947330482304,0.017297761514782906,0.017232147976756096,10,527,,,,
23
+ ,,,11,527,0.0004329081275500357,0.017299866303801537,,
24
+ 0.0004629742179531604,0.017819033935666084,0.017232147976756096,11,575,,,,
25
+ ,,,12,575,0.0004484818200580776,0.017580410465598106,,
26
+ 0.000428528874181211,0.01741543971002102,0.017232147976756096,12,623,,,,
27
+ ,,,13,623,0.0004227207391522825,0.01709883287549019,,
28
+ 0.0004217610112391412,0.017198406159877777,0.017198406159877777,13,671,,,,
29
+ ,,,14,671,0.00042910518823191524,0.01725650019943714,,
30
+ 0.00044611742487177253,0.017651554197072983,0.017198406159877777,14,719,,,,
31
+ ,,,15,719,0.000429434614488855,0.017244217917323112,,
32
+ 0.0004570444580167532,0.017795482650399208,0.017198406159877777,15,767,,,,
33
+ ,,,16,767,0.0004223276919219643,0.017129184678196907,,
34
+ 0.0004216870293021202,0.01725301891565323,0.017198406159877777,16,815,,,,
35
+ ,,,17,815,0.00041952644824050367,0.01709076203405857,,
36
+ 0.00042376937926746905,0.01729869656264782,0.017198406159877777,17,863,,,,
37
+ ,,,18,863,0.0004137688665650785,0.017030980437994003,,
38
+ 0.0004281677829567343,0.017451129853725433,0.017198406159877777,18,911,,,,
39
+ ,,,19,911,0.00041286065243184566,0.016985513269901276,,
40
+ 0.00042160568409599364,0.017180636525154114,0.017180636525154114,19,959,,,,
41
+ ,,,20,959,0.0004134568152949214,0.017002396285533905,,
42
+ 0.0004333507386036217,0.0175311416387558,0.017180636525154114,20,1007,,,,
43
+ ,,,21,1007,0.00041579484241083264,0.017039744183421135,,
44
+ 0.0004217829555273056,0.017200469970703125,0.017180636525154114,21,1055,,,,
45
+ ,,,22,1055,0.0004110188747290522,0.01696665957570076,,
46
+ 0.00042224262142553926,0.017233354970812798,0.017180636525154114,22,1103,,,,
47
+ ,,,23,1103,0.00041112463804893196,0.01697538048028946,,
48
+ 0.00042250638944096863,0.017304757609963417,0.017180636525154114,23,1151,,,,
49
+ ,,,24,1151,0.0004119329678360373,0.016991790384054184,,
50
+ 0.00042761690565384924,0.017397604882717133,0.017180636525154114,24,1199,,,,
51
+ ,,,25,1199,0.00040832700324244797,0.016920698806643486,,
52
+ 0.0004228346806485206,0.01731877774000168,0.017180636525154114,25,1247,,,,
53
+ ,,,26,1247,0.0004104417166672647,0.016962867230176926,,
54
+ 0.0004228335164953023,0.01726297289133072,0.017180636525154114,26,1295,,,,
55
+ ,,,27,1295,0.0004095648182556033,0.016948098316788673,,
56
+ 0.00042251948616467416,0.01730535924434662,0.017180636525154114,27,1343,,,,
57
+ ,,,28,1343,0.0004080926883034408,0.016906877979636192,,
58
+ 0.0004264191957190633,0.01737121120095253,0.017180636525154114,28,1391,,,,
59
+ ,,,29,1391,0.0004094658652320504,0.01692807674407959,,
60
+ 0.00042779635987244546,0.01744498685002327,0.017180636525154114,29,1439,,,,
61
+ ,,,30,1439,0.00040991013520397246,0.016950607299804688,,
62
+ 0.00042672595009207726,0.0174236036837101,0.017180636525154114,30,1487,,,,
63
+ ,,,31,1487,0.0004106027481611818,0.016968974843621254,,
64
+ 0.0004225205339025706,0.01730540581047535,0.017180636525154114,31,1535,,,,
65
+ ,,,32,1535,0.0004131190071348101,0.017010381445288658,,
66
+ 0.0006186373648233712,0.020344937220215797,0.017180636525154114,32,1583,,,,
67
+ ,,,33,1583,0.001111123594455421,0.020980652421712875,,
68
+ 0.0008090639603324234,0.023039238527417183,0.017180636525154114,33,1631,,,,
69
+ ,,,34,1631,0.0004966562264598906,0.01824203133583069,,
70
+ 0.00042163225589320064,0.01718423329293728,0.017180636525154114,34,1679,,,,
71
+ ,,,35,1679,0.0004594184865709394,0.017710773274302483,,
72
+ 0.0004214858345221728,0.017219513654708862,0.017180636525154114,35,1727,,,,
73
+ ,,,36,1727,0.00042041370761580765,0.017054403200745583,,
74
+ 0.0004276394029147923,0.01739773154258728,0.017180636525154114,36,1775,,,,
75
+ ,,,37,1775,0.00042001644033007324,0.01718241535127163,,
76
+ 0.0004230390477459878,0.017326653003692627,0.017180636525154114,37,1823,,,,
77
+ ,,,38,1823,0.00042714248411357403,0.017278071492910385,,
78
+ 0.0004257058026269078,0.01735416240990162,0.017180636525154114,38,1871,,,,
79
+ ,,,39,1871,0.0004168522427789867,0.017084956169128418,,
80
+ 0.0004345644556451589,0.017513850703835487,0.017180636525154114,39,1919,,,,
81
+ ,,,40,1919,0.0004130973247811198,0.01702546328306198,,
82
+ 0.0004291134828235954,0.017426474019885063,0.017180636525154114,40,1967,,,,
83
+ ,,,41,1967,0.0004107497807126492,0.016967182978987694,,
84
+ 0.0004217565874569118,0.017259564250707626,0.017180636525154114,41,2015,,,,
85
+ ,,,42,2015,0.00040991531568579376,0.01693686842918396,,
86
+ 0.0004282454610802233,0.017409926280379295,0.017180636525154114,42,2063,,,,
87
+ ,,,43,2063,0.0004086632688995451,0.01691603660583496,,
88
+ 0.0004224505100864917,0.017244771122932434,0.017180636525154114,43,2111,,,,
89
+ ,,,44,2111,0.0004086371627636254,0.01692723110318184,,
90
+ 0.0004217490495648235,0.017258893698453903,0.017180636525154114,44,2159,,,,
91
+ ,,,45,2159,0.00040833826642483473,0.01689218543469906,,
92
+ 0.00042307612602598965,0.01732802763581276,0.017180636525154114,45,2207,,,,
93
+ ,,,46,2207,0.00040763855213299394,0.016885746270418167,,
94
+ 0.0004225426819175482,0.017249437049031258,0.017180636525154114,46,2255,,,,
95
+ ,,,47,2255,0.00040763034485280514,0.01688656024634838,,
96
+ 0.0004214778309687972,0.01720612309873104,0.017180636525154114,47,2303,,,,
97
+ ,,,48,2303,0.0004078578785993159,0.01689174957573414,,
98
+ 0.00042148647480644286,0.01720060594379902,0.017180636525154114,48,2351,,,,
99
+ ,,,49,2351,0.0004092213639523834,0.0169429499655962,,
100
+ 0.000423734774813056,0.01735018938779831,0.017180636525154114,49,2399,,,,
101
+ ,,,50,2399,0.00040971703128889203,0.01693892292678356,,
102
+ 0.0004235743544995785,0.0173451267182827,0.017180636525154114,50,2447,,,,
103
+ ,,,51,2447,0.0004115517367608845,0.016954993829131126,,
104
+ 0.0004217598179820925,0.01725984923541546,0.017180636525154114,51,2495,,,,
105
+ ,,,52,2495,0.00040880037704482675,0.016893478110432625,,
106
+ 0.00042174436384812,0.017258474603295326,0.017180636525154114,52,2543,,,,
107
+ ,,,53,2543,0.00040773171349428594,0.01689985580742359,,
108
+ 0.00042467721505090594,0.017376849427819252,0.017180636525154114,53,2591,,,,
109
+ ,,,54,2591,0.00040842589805833995,0.01691640354692936,,
110
+ 0.0004238631809130311,0.017301833257079124,0.017180636525154114,54,2639,,,,
111
+ ,,,55,2639,0.00041089410660788417,0.016986478120088577,,
112
+ 0.00042274288716726005,0.01725892536342144,0.017180636525154114,55,2687,,,,
113
+ ,,,56,2687,0.00040888373041525483,0.0169377401471138,,
114
+ 0.00043978114263154566,0.01760867051780224,0.017180636525154114,56,2735,,,,
115
+ ,,,57,2735,0.0004124894621782005,0.017002500593662262,,
116
+ 0.00042177989962510765,0.01720019057393074,0.017180636525154114,57,2783,,,,
117
+ ,,,58,2783,0.00041114454506896436,0.016989201307296753,,
118
+ 0.0004230675403960049,0.017272833734750748,0.017180636525154114,58,2831,,,,
119
+ ,,,59,2831,0.000407365063438192,0.016907857730984688,,
120
+ 0.0004214845539536327,0.017201505601406097,0.017180636525154114,59,2879,,,,
121
+ ,,,60,2879,0.0004080318030901253,0.01691310480237007,,
122
+ 0.0004241798596922308,0.017311986535787582,0.017180636525154114,60,2927,,,,
123
+ ,,,61,2927,0.00040867255302146077,0.01690124347805977,,
124
+ 0.0004215940134599805,0.01724223420023918,0.017180636525154114,61,2975,,,,
125
+ ,,,62,2975,0.00040876769344322383,0.016911890357732773,,
126
+ 0.00042177378782071173,0.017199620604515076,0.017180636525154114,62,3023,,,,
127
+ ,,,63,3023,0.000407680228818208,0.01688404381275177,,
128
+ 0.00042148909415118396,0.017199493944644928,0.017180636525154114,63,3071,,,,
129
+ ,,,64,3071,0.0004076901532243937,0.016904275864362717,,
130
+ 0.0004215237277094275,0.01718984916806221,0.017180636525154114,64,3119,,,,
131
+ ,,,65,3119,0.00040813785744830966,0.016921481117606163,,
132
+ 0.00042179186129942536,0.017201285809278488,0.017180636525154114,65,3167,,,,
133
+ ,,,66,3167,0.00040804094169288874,0.01689228229224682,,
134
+ 0.00042319647036492825,0.017332376912236214,0.017180636525154114,66,3215,,,,
135
+ ,,,67,3215,0.0004103138926438987,0.016963262110948563,,
136
+ 0.00042742540244944394,0.017393289133906364,0.017180636525154114,67,3263,,,,
137
+ ,,,68,3263,0.00040886428905650973,0.01695343293249607,,
138
+ 0.0004219135153107345,0.017271826043725014,0.017180636525154114,68,3311,,,,
139
+ ,,,69,3311,0.0004091692971996963,0.01694905199110508,,
140
+ 0.00042152637615799904,0.01718928851187229,0.017180636525154114,69,3359,,,,
141
+ ,,,70,3359,0.00041124277049675584,0.01698155514895916,,
142
+ 0.0004270290082786232,0.01742967963218689,0.017180636525154114,70,3407,,,,
143
+ ,,,71,3407,0.0004093978204764426,0.016944659873843193,,
144
+ 0.00042151135858148336,0.017192671075463295,0.017180636525154114,71,3455,,,,
145
+ ,,,72,3455,0.00040966301457956433,0.016939664259552956,,
146
+ 0.000421613862272352,0.01718178577721119,0.017180636525154114,72,3503,,,,
147
+ ,,,73,3503,0.00040795584209263325,0.0168988686054945,,
148
+ 0.00042147625936195254,0.01721218414604664,0.017180636525154114,73,3551,,,,
149
+ ,,,74,3551,0.00040833823732100427,0.016881342977285385,,
150
+ 0.00042156223207712173,0.017237588763237,0.017180636525154114,74,3599,,,,
151
+ ,,,75,3599,0.00040975536103360355,0.016945144161581993,,
152
+ 0.0004225510638207197,0.01730678789317608,0.017180636525154114,75,3647,,,,
153
+ ,,,76,3647,0.0004079305217601359,0.01691216602921486,,
154
+ 0.00042534625390544534,0.017393436282873154,0.017180636525154114,76,3695,,,,
155
+ ,,,77,3695,0.0004077882331330329,0.016901129856705666,,
156
+ 0.0004248854238539934,0.017332661896944046,0.017180636525154114,77,3743,,,,
157
+ ,,,78,3743,0.0004088019486516714,0.016935009509325027,,
158
+ 0.0004235015658196062,0.017342764884233475,0.017180636525154114,78,3791,,,,
159
+ ,,,79,3791,0.000409911124734208,0.01693202182650566,,
160
+ 0.00042179637239314616,0.01726294495165348,0.017180636525154114,79,3839,,,,
161
+ ,,,80,3839,0.00040942468331195414,0.016959629952907562,,
162
+ 0.00043867831118404865,0.017568040639162064,0.017180636525154114,80,3887,,,,
163
+ ,,,81,3887,0.00043616106268018484,0.017389630898833275,,
164
+ 0.0004248438053764403,0.017381129786372185,0.017180636525154114,81,3935,,,,
165
+ ,,,82,3935,0.00041584958671592176,0.017071561887860298,,
166
+ 0.0004217216046527028,0.01725638285279274,0.017180636525154114,82,3983,,,,
167
+ ,,,83,3983,0.0004083994426764548,0.016924062743782997,,
168
+ 0.0004268444317858666,0.017380790784955025,0.017180636525154114,83,4031,,,,
169
+ ,,,84,4031,0.0004073888121638447,0.01690981164574623,,
170
+ 0.00042664416832849383,0.017421938478946686,0.017180636525154114,84,4079,,,,
171
+ ,,,85,4079,0.0004102005623281002,0.016968559473752975,,
172
+ 0.0004215119988657534,0.01722791977226734,0.017180636525154114,85,4127,,,,
173
+ ,,,86,4127,0.0004094345204066485,0.016923023387789726,,
174
+ 0.0004214770451653749,0.017213383689522743,0.017180636525154114,86,4175,,,,
175
+ ,,,87,4175,0.0004084872198291123,0.01689877174794674,,
176
+ 0.0004218939575366676,0.017270436510443687,0.017180636525154114,87,4223,,,,
177
+ ,,,88,4223,0.0004081521765328944,0.016891254112124443,,
178
+ 0.0004217997193336487,0.017263222485780716,0.017180636525154114,88,4271,,,,
179
+ ,,,89,4271,0.00040932235424406826,0.01695392094552517,,
180
+ 0.0004215977096464485,0.01717948541045189,0.01717948541045189,89,4319,,,,
181
+ ,,,90,4319,0.00040721826371736825,0.016875335946679115,,
182
+ 0.0004228277539368719,0.017318502068519592,0.01717948541045189,90,4367,,,,
183
+ ,,,91,4367,0.0004084964457433671,0.01691940240561962,,
184
+ 0.00042701218626461923,0.017384465783834457,0.01717948541045189,91,4415,,,,
185
+ ,,,92,4415,0.00040828666533343494,0.01693566143512726,,
186
+ 0.00042167669744230807,0.017251959070563316,0.01717948541045189,92,4463,,,,
187
+ ,,,93,4463,0.0004104569088667631,0.01696004346013069,,
188
+ 0.0004228699835948646,0.017264559864997864,0.01717948541045189,93,4511,,,,
189
+ ,,,94,4511,0.0004097290802747011,0.01693933829665184,,
190
+ 0.00042360820225439966,0.017293153330683708,0.01717948541045189,94,4559,,,,
191
+ ,,,95,4559,0.00040938195888884366,0.016963394358754158,,
192
+ 0.0004216664528939873,0.01718844473361969,0.01717948541045189,95,4607,,,,
193
+ ,,,96,4607,0.0004086557310074568,0.01693633571267128,,
194
+ 0.00042314501479268074,0.017330540344119072,0.01717948541045189,96,4655,,,,
195
+ ,,,97,4655,0.00040876472485251725,0.016915222629904747,,
196
+ 0.0004218940157443285,0.01720993034541607,0.01717948541045189,97,4703,,,,
197
+ ,,,98,4703,0.00041095513734035194,0.0169514287263155,,
198
+ 0.0004214801883790642,0.017216339707374573,0.01717948541045189,98,4751,,,,
199
+ ,,,99,4751,0.00040956976590678096,0.016954516991972923,,
200
+ 0.00042280496563762426,0.01726171188056469,0.01717948541045189,99,4799,,,,
201
+ ,,,100,4799,0.00040905189234763384,0.016932755708694458,,
202
+ ,,,89,4800,,,0.0004145506536588073,0.01695971190929413
logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_07-59-42/1/tensorboard/focusConvMSE_150_hyperparameter_search/events.out.tfevents.1652256203.908803db74d8.1.2 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:05c94bf2ce61abe5bf3bc16ec3891cf39be1b74c17a73360d521d476710a5024
3
+ size 36796
logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_07-59-42/1/tensorboard/focusConvMSE_150_hyperparameter_search/events.out.tfevents.1652256421.908803db74d8.1.3 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e88ae1c801a50ee2b518b4dd650f6b2ea80d01b6076b85b16a66ffde0bb7bc70
3
+ size 179
logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_07-59-42/1/tensorboard/focusConvMSE_150_hyperparameter_search/hparams.yaml ADDED
@@ -0,0 +1,54 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ trainer:
2
+ _target_: pytorch_lightning.Trainer
3
+ gpus: 1
4
+ min_epochs: 1
5
+ max_epochs: 100
6
+ resume_from_checkpoint: null
7
+ model:
8
+ _target_: src.models.focus_conv_module.FocusConvLitModule
9
+ image_size: 150
10
+ pool_size: 2
11
+ conv1_size: 7
12
+ conv1_channels: 6
13
+ conv2_size: 7
14
+ conv2_channels: 16
15
+ lin1_size: 128
16
+ lin2_size: 72
17
+ output_size: 1
18
+ lr: 0.0033232883076398404
19
+ weight_decay: 0.0005
20
+ datamodule:
21
+ _target_: src.datamodules.focus_datamodule.FocusDataModule
22
+ data_dir: /usr/src/app/data/focus150
23
+ csv_train_file: /usr/src/app/data/focus150/train_metadata.csv
24
+ csv_val_file: /usr/src/app/data/focus150/validation_metadata.csv
25
+ csv_test_file: /usr/src/app/data/focus150/test_metadata.csv
26
+ batch_size: 64
27
+ num_workers: 0
28
+ pin_memory: false
29
+ seed: null
30
+ callbacks:
31
+ model_checkpoint:
32
+ _target_: pytorch_lightning.callbacks.ModelCheckpoint
33
+ monitor: val/mae
34
+ mode: min
35
+ save_top_k: 1
36
+ save_last: true
37
+ verbose: false
38
+ dirpath: checkpoints/
39
+ filename: epoch_{epoch:03d}
40
+ auto_insert_metric_name: false
41
+ early_stopping:
42
+ _target_: pytorch_lightning.callbacks.EarlyStopping
43
+ monitor: val/mae
44
+ mode: min
45
+ patience: 100
46
+ min_delta: 0
47
+ model_summary:
48
+ _target_: pytorch_lightning.callbacks.RichModelSummary
49
+ max_depth: -1
50
+ rich_progress_bar:
51
+ _target_: pytorch_lightning.callbacks.RichProgressBar
52
+ model/params/total: 2244121
53
+ model/params/trainable: 2244121
54
+ model/params/non_trainable: 0
logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_07-59-42/10/.hydra/config.yaml ADDED
@@ -0,0 +1,81 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ original_work_dir: ${hydra:runtime.cwd}
2
+ data_dir: ${original_work_dir}/data
3
+ print_config: true
4
+ ignore_warnings: true
5
+ train: true
6
+ test: true
7
+ seed: null
8
+ name: focusConvMSE_150_hyperparameter_search
9
+ datamodule:
10
+ _target_: src.datamodules.focus_datamodule.FocusDataModule
11
+ data_dir: ${data_dir}/focus150
12
+ csv_train_file: ${data_dir}/focus150/train_metadata.csv
13
+ csv_val_file: ${data_dir}/focus150/validation_metadata.csv
14
+ csv_test_file: ${data_dir}/focus150/test_metadata.csv
15
+ batch_size: 64
16
+ num_workers: 0
17
+ pin_memory: false
18
+ model:
19
+ _target_: src.models.focus_conv_module.FocusConvLitModule
20
+ image_size: 150
21
+ pool_size: 2
22
+ conv1_size: 3
23
+ conv1_channels: 9
24
+ conv2_size: 3
25
+ conv2_channels: 6
26
+ lin1_size: 32
27
+ lin2_size: 72
28
+ output_size: 1
29
+ lr: 0.005087336593525169
30
+ weight_decay: 0.0005
31
+ callbacks:
32
+ model_checkpoint:
33
+ _target_: pytorch_lightning.callbacks.ModelCheckpoint
34
+ monitor: val/mae
35
+ mode: min
36
+ save_top_k: 1
37
+ save_last: true
38
+ verbose: false
39
+ dirpath: checkpoints/
40
+ filename: epoch_{epoch:03d}
41
+ auto_insert_metric_name: false
42
+ early_stopping:
43
+ _target_: pytorch_lightning.callbacks.EarlyStopping
44
+ monitor: val/mae
45
+ mode: min
46
+ patience: 100
47
+ min_delta: 0
48
+ model_summary:
49
+ _target_: pytorch_lightning.callbacks.RichModelSummary
50
+ max_depth: -1
51
+ rich_progress_bar:
52
+ _target_: pytorch_lightning.callbacks.RichProgressBar
53
+ logger:
54
+ csv:
55
+ _target_: pytorch_lightning.loggers.csv_logs.CSVLogger
56
+ save_dir: .
57
+ name: csv/
58
+ prefix: ''
59
+ mlflow:
60
+ _target_: pytorch_lightning.loggers.mlflow.MLFlowLogger
61
+ experiment_name: ${name}
62
+ tracking_uri: ${original_work_dir}/logs/mlflow/mlruns
63
+ tags: null
64
+ save_dir: ./mlruns
65
+ prefix: ''
66
+ artifact_location: null
67
+ tensorboard:
68
+ _target_: pytorch_lightning.loggers.tensorboard.TensorBoardLogger
69
+ save_dir: tensorboard/
70
+ name: null
71
+ version: ${name}
72
+ log_graph: false
73
+ default_hp_metric: true
74
+ prefix: ''
75
+ trainer:
76
+ _target_: pytorch_lightning.Trainer
77
+ gpus: 1
78
+ min_epochs: 1
79
+ max_epochs: 100
80
+ resume_from_checkpoint: null
81
+ optimized_metric: val/mae_best
logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_07-59-42/10/.hydra/hydra.yaml ADDED
@@ -0,0 +1,240 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ hydra:
2
+ run:
3
+ dir: logs/experiments/runs/${name}/${now:%Y-%m-%d}_${now:%H-%M-%S}
4
+ sweep:
5
+ dir: logs/experiments/multiruns/${name}/${now:%Y-%m-%d}_${now:%H-%M-%S}
6
+ subdir: ${hydra.job.num}
7
+ launcher:
8
+ _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher
9
+ sweeper:
10
+ sampler:
11
+ _target_: optuna.samplers.TPESampler
12
+ seed: 12345
13
+ consider_prior: true
14
+ prior_weight: 1.0
15
+ consider_magic_clip: true
16
+ consider_endpoints: false
17
+ n_startup_trials: 10
18
+ n_ei_candidates: 24
19
+ multivariate: false
20
+ warn_independent_sampling: true
21
+ _target_: hydra_plugins.hydra_optuna_sweeper.optuna_sweeper.OptunaSweeper
22
+ direction: minimize
23
+ storage: null
24
+ study_name: focusConvMSE_150_hyperparameter_search
25
+ n_trials: 20
26
+ n_jobs: 1
27
+ search_space:
28
+ datamodule.batch_size:
29
+ type: categorical
30
+ choices:
31
+ - 64
32
+ - 128
33
+ model.lr:
34
+ type: float
35
+ low: 0.0001
36
+ high: 0.01
37
+ model.conv1_size:
38
+ type: categorical
39
+ choices:
40
+ - 3
41
+ - 5
42
+ - 7
43
+ model.conv1_channels:
44
+ type: categorical
45
+ choices:
46
+ - 3
47
+ - 6
48
+ - 9
49
+ model.conv2_size:
50
+ type: categorical
51
+ choices:
52
+ - 3
53
+ - 5
54
+ - 7
55
+ model.conv2_channels:
56
+ type: categorical
57
+ choices:
58
+ - 6
59
+ - 11
60
+ - 16
61
+ model.lin1_size:
62
+ type: categorical
63
+ choices:
64
+ - 32
65
+ - 72
66
+ - 128
67
+ model.lin2_size:
68
+ type: categorical
69
+ choices:
70
+ - 32
71
+ - 72
72
+ - 128
73
+ help:
74
+ app_name: ${hydra.job.name}
75
+ header: '${hydra.help.app_name} is powered by Hydra.
76
+
77
+ '
78
+ footer: 'Powered by Hydra (https://hydra.cc)
79
+
80
+ Use --hydra-help to view Hydra specific help
81
+
82
+ '
83
+ template: '${hydra.help.header}
84
+
85
+ == Configuration groups ==
86
+
87
+ Compose your configuration from those groups (group=option)
88
+
89
+
90
+ $APP_CONFIG_GROUPS
91
+
92
+
93
+ == Config ==
94
+
95
+ Override anything in the config (foo.bar=value)
96
+
97
+
98
+ $CONFIG
99
+
100
+
101
+ ${hydra.help.footer}
102
+
103
+ '
104
+ hydra_help:
105
+ template: 'Hydra (${hydra.runtime.version})
106
+
107
+ See https://hydra.cc for more info.
108
+
109
+
110
+ == Flags ==
111
+
112
+ $FLAGS_HELP
113
+
114
+
115
+ == Configuration groups ==
116
+
117
+ Compose your configuration from those groups (For example, append hydra/job_logging=disabled
118
+ to command line)
119
+
120
+
121
+ $HYDRA_CONFIG_GROUPS
122
+
123
+
124
+ Use ''--cfg hydra'' to Show the Hydra config.
125
+
126
+ '
127
+ hydra_help: ???
128
+ hydra_logging:
129
+ version: 1
130
+ formatters:
131
+ colorlog:
132
+ (): colorlog.ColoredFormatter
133
+ format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s'
134
+ handlers:
135
+ console:
136
+ class: logging.StreamHandler
137
+ formatter: colorlog
138
+ stream: ext://sys.stdout
139
+ root:
140
+ level: INFO
141
+ handlers:
142
+ - console
143
+ disable_existing_loggers: false
144
+ job_logging:
145
+ version: 1
146
+ formatters:
147
+ simple:
148
+ format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s'
149
+ colorlog:
150
+ (): colorlog.ColoredFormatter
151
+ format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s]
152
+ - %(message)s'
153
+ log_colors:
154
+ DEBUG: purple
155
+ INFO: green
156
+ WARNING: yellow
157
+ ERROR: red
158
+ CRITICAL: red
159
+ handlers:
160
+ console:
161
+ class: logging.StreamHandler
162
+ formatter: colorlog
163
+ stream: ext://sys.stdout
164
+ file:
165
+ class: logging.FileHandler
166
+ formatter: simple
167
+ filename: ${hydra.job.name}.log
168
+ root:
169
+ level: INFO
170
+ handlers:
171
+ - console
172
+ - file
173
+ disable_existing_loggers: false
174
+ env: {}
175
+ searchpath: []
176
+ callbacks: {}
177
+ output_subdir: .hydra
178
+ overrides:
179
+ hydra: []
180
+ task:
181
+ - datamodule.batch_size=64
182
+ - model.lr=0.005087336593525169
183
+ - model.conv1_size=3
184
+ - model.conv1_channels=9
185
+ - model.conv2_size=3
186
+ - model.conv2_channels=6
187
+ - model.lin1_size=32
188
+ - model.lin2_size=72
189
+ - hparams_search=focusConvMSE_150.yaml
190
+ job:
191
+ name: train
192
+ override_dirname: datamodule.batch_size=64,hparams_search=focusConvMSE_150.yaml,model.conv1_channels=9,model.conv1_size=3,model.conv2_channels=6,model.conv2_size=3,model.lin1_size=32,model.lin2_size=72,model.lr=0.005087336593525169
193
+ id: '10'
194
+ num: 10
195
+ config_name: train.yaml
196
+ env_set: {}
197
+ env_copy: []
198
+ config:
199
+ override_dirname:
200
+ kv_sep: '='
201
+ item_sep: ','
202
+ exclude_keys: []
203
+ runtime:
204
+ version: 1.1.2
205
+ cwd: /usr/src/app
206
+ config_sources:
207
+ - path: hydra.conf
208
+ schema: pkg
209
+ provider: hydra
210
+ - path: /usr/src/app/configs
211
+ schema: file
212
+ provider: main
213
+ - path: hydra_plugins.hydra_colorlog.conf
214
+ schema: pkg
215
+ provider: hydra-colorlog
216
+ - path: ''
217
+ schema: structured
218
+ provider: schema
219
+ choices:
220
+ local: default.yaml
221
+ hparams_search: focusConvMSE_150.yaml
222
+ debug: null
223
+ experiment: null
224
+ log_dir: default.yaml
225
+ trainer: long.yaml
226
+ logger: many_loggers
227
+ callbacks: default.yaml
228
+ model: focusConv_150.yaml
229
+ datamodule: focus150.yaml
230
+ hydra/env: default
231
+ hydra/callbacks: null
232
+ hydra/job_logging: colorlog
233
+ hydra/hydra_logging: colorlog
234
+ hydra/hydra_help: default
235
+ hydra/help: default
236
+ hydra/sweeper: optuna
237
+ hydra/sweeper/sampler: tpe
238
+ hydra/launcher: basic
239
+ hydra/output: default
240
+ verbose: false
logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_07-59-42/10/.hydra/overrides.yaml ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ - datamodule.batch_size=64
2
+ - model.lr=0.005087336593525169
3
+ - model.conv1_size=3
4
+ - model.conv1_channels=9
5
+ - model.conv2_size=3
6
+ - model.conv2_channels=6
7
+ - model.lin1_size=32
8
+ - model.lin2_size=72
9
+ - hparams_search=focusConvMSE_150.yaml
logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_07-59-42/10/checkpoints/epoch_036.ckpt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:59a8364a1659fad87f2624b3c0788ffe0032e17ebdb2ab12a5e90749d64a6e50
3
+ size 2607365
logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_07-59-42/10/checkpoints/last.ckpt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5061ed27bdf4cb5e58c74fbf369f9b96285104c3549097f5c0f1b8f649783e2e
3
+ size 2607365
logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_07-59-42/10/csv/version_0/hparams.yaml ADDED
@@ -0,0 +1,54 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ trainer:
2
+ _target_: pytorch_lightning.Trainer
3
+ gpus: 1
4
+ min_epochs: 1
5
+ max_epochs: 100
6
+ resume_from_checkpoint: null
7
+ model:
8
+ _target_: src.models.focus_conv_module.FocusConvLitModule
9
+ image_size: 150
10
+ pool_size: 2
11
+ conv1_size: 3
12
+ conv1_channels: 9
13
+ conv2_size: 3
14
+ conv2_channels: 6
15
+ lin1_size: 32
16
+ lin2_size: 72
17
+ output_size: 1
18
+ lr: 0.005087336593525169
19
+ weight_decay: 0.0005
20
+ datamodule:
21
+ _target_: src.datamodules.focus_datamodule.FocusDataModule
22
+ data_dir: /usr/src/app/data/focus150
23
+ csv_train_file: /usr/src/app/data/focus150/train_metadata.csv
24
+ csv_val_file: /usr/src/app/data/focus150/validation_metadata.csv
25
+ csv_test_file: /usr/src/app/data/focus150/test_metadata.csv
26
+ batch_size: 64
27
+ num_workers: 0
28
+ pin_memory: false
29
+ seed: null
30
+ callbacks:
31
+ model_checkpoint:
32
+ _target_: pytorch_lightning.callbacks.ModelCheckpoint
33
+ monitor: val/mae
34
+ mode: min
35
+ save_top_k: 1
36
+ save_last: true
37
+ verbose: false
38
+ dirpath: checkpoints/
39
+ filename: epoch_{epoch:03d}
40
+ auto_insert_metric_name: false
41
+ early_stopping:
42
+ _target_: pytorch_lightning.callbacks.EarlyStopping
43
+ monitor: val/mae
44
+ mode: min
45
+ patience: 100
46
+ min_delta: 0
47
+ model_summary:
48
+ _target_: pytorch_lightning.callbacks.RichModelSummary
49
+ max_depth: -1
50
+ rich_progress_bar:
51
+ _target_: pytorch_lightning.callbacks.RichProgressBar
52
+ model/params/total: 216201
53
+ model/params/trainable: 216201
54
+ model/params/non_trainable: 0
logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_07-59-42/10/csv/version_0/metrics.csv ADDED
@@ -0,0 +1,202 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ val/loss,val/mae,val/mae_best,epoch,step,train/loss,train/mae,test/loss,test/mae
2
+ 0.00043468139483593404,0.017546463757753372,0.017546463757753372,0,47,,,,
3
+ ,,,1,47,0.056779056787490845,0.07177011668682098,,
4
+ 0.0004235544765833765,0.017344487830996513,0.017344487830996513,1,95,,,,
5
+ ,,,2,95,0.00041372200939804316,0.01701633632183075,,
6
+ 0.0004272653313819319,0.017434298992156982,0.017344487830996513,2,143,,,,
7
+ ,,,3,143,0.0004092987219337374,0.016944710165262222,,
8
+ 0.00042198406299576163,0.017276611179113388,0.017276611179113388,3,191,,,,
9
+ ,,,4,191,0.00040964968502521515,0.01691558212041855,,
10
+ 0.0004263336304575205,0.017415478825569153,0.017276611179113388,4,239,,,,
11
+ ,,,5,239,0.0004098546633031219,0.01695418357849121,,
12
+ 0.0004287038173060864,0.017418786883354187,0.017276611179113388,5,287,,,,
13
+ ,,,6,287,0.00040788110345602036,0.016931764781475067,,
14
+ 0.0004215880180709064,0.01717902533710003,0.01717902533710003,6,335,,,,
15
+ ,,,7,335,0.000410393433412537,0.01698286458849907,,
16
+ 0.00042162393219769,0.017246054485440254,0.01717902533710003,7,383,,,,
17
+ ,,,8,383,0.0004082897794432938,0.016899364069104195,,
18
+ 0.0004220792034175247,0.017223235219717026,0.01717902533710003,8,431,,,,
19
+ ,,,9,431,0.0004097525670658797,0.016918806359171867,,
20
+ 0.0004217562673147768,0.01725953444838524,0.01717902533710003,9,479,,,,
21
+ ,,,10,479,0.00040964901563711464,0.016937481239438057,,
22
+ 0.00042254128493368626,0.017249368131160736,0.01717902533710003,10,527,,,,
23
+ ,,,11,527,0.0004101248050574213,0.016971716657280922,,
24
+ 0.00042164206388406456,0.01718549244105816,0.01717902533710003,11,575,,,,
25
+ ,,,12,575,0.0004096446209587157,0.016937173902988434,,
26
+ 0.0004231781349517405,0.01733172871172428,0.01717902533710003,12,623,,,,
27
+ ,,,13,623,0.00040813247323967516,0.016878241673111916,,
28
+ 0.0004218678514007479,0.017268523573875427,0.01717902533710003,13,671,,,,
29
+ ,,,14,671,0.0004125345149077475,0.017032235860824585,,
30
+ 0.0004227508616168052,0.017315374687314034,0.01717902533710003,14,719,,,,
31
+ ,,,15,719,0.0004076789482496679,0.01690765470266342,,
32
+ 0.00042589398799464107,0.01735878922045231,0.01717902533710003,15,767,,,,
33
+ ,,,16,767,0.0004070499853696674,0.016888972371816635,,
34
+ 0.0004214785585645586,0.0172053724527359,0.01717902533710003,16,815,,,,
35
+ ,,,17,815,0.00040906015783548355,0.016918480396270752,,
36
+ 0.0004344239423517138,0.01751185767352581,0.01717902533710003,17,863,,,,
37
+ ,,,18,863,0.00041176454396918416,0.017036093398928642,,
38
+ 0.00042967809713445604,0.017436737194657326,0.01717902533710003,18,911,,,,
39
+ ,,,19,911,0.0004111098241992295,0.016948826611042023,,
40
+ 0.00042188496445305645,0.017209213227033615,0.01717902533710003,19,959,,,,
41
+ ,,,20,959,0.00041171678458340466,0.017007725313305855,,
42
+ 0.0004244951705913991,0.017372041940689087,0.01717902533710003,20,1007,,,,
43
+ ,,,21,1007,0.00040778887341730297,0.016909874975681305,,
44
+ 0.00042465105070732534,0.017376165837049484,0.01717902533710003,21,1055,,,,
45
+ ,,,22,1055,0.0004122427199035883,0.017018109560012817,,
46
+ 0.0004215144435875118,0.017191920429468155,0.01717902533710003,22,1103,,,,
47
+ ,,,23,1103,0.0004087020643055439,0.0169296246021986,,
48
+ 0.00042411283357068896,0.01736144907772541,0.01717902533710003,23,1151,,,,
49
+ ,,,24,1151,0.0004096532065887004,0.01693853735923767,,
50
+ 0.0004222666029818356,0.017293032258749008,0.01717902533710003,24,1199,,,,
51
+ ,,,25,1199,0.00041062405216507614,0.016997206956148148,,
52
+ 0.00042148851207457483,0.01722068525850773,0.01717902533710003,25,1247,,,,
53
+ ,,,26,1247,0.0004095557378605008,0.016948653385043144,,
54
+ 0.0004217678215354681,0.017260538414120674,0.01717902533710003,26,1295,,,,
55
+ ,,,27,1295,0.0004101998929399997,0.016950329765677452,,
56
+ 0.0004274123057257384,0.01739300973713398,0.01717902533710003,27,1343,,,,
57
+ ,,,28,1343,0.00040830261423252523,0.016966771334409714,,
58
+ 0.00042349955765530467,0.0172892976552248,0.01717902533710003,28,1391,,,,
59
+ ,,,29,1391,0.0004114869807381183,0.01695878617465496,,
60
+ 0.0004232345090713352,0.01733372174203396,0.01717902533710003,29,1439,,,,
61
+ ,,,30,1439,0.00040702245314605534,0.016885461285710335,,
62
+ 0.00042155766277574003,0.0171835795044899,0.01717902533710003,30,1487,,,,
63
+ ,,,31,1487,0.0004086740082129836,0.01690738834440708,,
64
+ 0.00042203019256703556,0.01727955788373947,0.01717902533710003,31,1535,,,,
65
+ ,,,32,1535,0.00040975367301143706,0.016935858875513077,,
66
+ 0.00042147867497988045,0.017205258831381798,0.01717902533710003,32,1583,,,,
67
+ ,,,33,1583,0.0004092739836778492,0.016939982771873474,,
68
+ 0.0004217113310005516,0.017255406826734543,0.01717902533710003,33,1631,,,,
69
+ ,,,34,1631,0.00040878428262658417,0.01696481741964817,,
70
+ 0.00042355660116299987,0.017344554886221886,0.01717902533710003,34,1679,,,,
71
+ ,,,35,1679,0.0004086242406629026,0.016926748678088188,,
72
+ 0.0004228296165820211,0.01726279966533184,0.01717902533710003,35,1727,,,,
73
+ ,,,36,1727,0.0004080396902281791,0.016909627243876457,,
74
+ 0.0004215901135466993,0.01717873103916645,0.01717873103916645,36,1775,,,,
75
+ ,,,37,1775,0.000408114428864792,0.016912007704377174,,
76
+ 0.0004220795526634902,0.017282577231526375,0.01717873103916645,37,1823,,,,
77
+ ,,,38,1823,0.00040914167766459286,0.016942134127020836,,
78
+ 0.00042367237620055676,0.017295384779572487,0.01717873103916645,38,1871,,,,
79
+ ,,,39,1871,0.00040878841537050903,0.01690341904759407,,
80
+ 0.00042147585190832615,0.017209647223353386,0.01717873103916645,39,1919,,,,
81
+ ,,,40,1919,0.0004094017203897238,0.016927793622016907,,
82
+ 0.0004240934504196048,0.01736089028418064,0.01717873103916645,40,1967,,,,
83
+ ,,,41,1967,0.00040820211870595813,0.016919946298003197,,
84
+ 0.0004218496906105429,0.01726716011762619,0.01717873103916645,41,2015,,,,
85
+ ,,,42,2015,0.0004096354532521218,0.016962463036179543,,
86
+ 0.0004257959662936628,0.01740378513932228,0.01717873103916645,42,2063,,,,
87
+ ,,,43,2063,0.00040838640416041017,0.01689140498638153,,
88
+ 0.00042955984827131033,0.017475005239248276,0.01717873103916645,43,2111,,,,
89
+ ,,,44,2111,0.00040696581709198654,0.016923272982239723,,
90
+ 0.0004327644419390708,0.01752312108874321,0.01717873103916645,44,2159,,,,
91
+ ,,,45,2159,0.0004092560557182878,0.016950111836194992,,
92
+ 0.0004222602874506265,0.017292698845267296,0.01717873103916645,45,2207,,,,
93
+ ,,,46,2207,0.0004100011719856411,0.016941234469413757,,
94
+ 0.00042233275598846376,0.017296427860856056,0.01717873103916645,46,2255,,,,
95
+ ,,,47,2255,0.00040798625559546053,0.01688951812684536,,
96
+ 0.0004218195681460202,0.01720374822616577,0.01717873103916645,47,2303,,,,
97
+ ,,,48,2303,0.0004083216772414744,0.016924096271395683,,
98
+ 0.0004218708781991154,0.017268748953938484,0.01717873103916645,48,2351,,,,
99
+ ,,,49,2351,0.00040842840098775923,0.016918513923883438,,
100
+ 0.0004219994880259037,0.017277609556913376,0.01717873103916645,49,2399,,,,
101
+ ,,,50,2399,0.0004077427147421986,0.016878319904208183,,
102
+ 0.0004242933355271816,0.01736653968691826,0.01717873103916645,50,2447,,,,
103
+ ,,,51,2447,0.0004088195855729282,0.016926312819123268,,
104
+ 0.00042221896001137793,0.017290499061346054,0.01717873103916645,51,2495,,,,
105
+ ,,,52,2495,0.00040954744326882064,0.016946373507380486,,
106
+ 0.0004219755355734378,0.017216088250279427,0.01717873103916645,52,2543,,,,
107
+ ,,,53,2543,0.00041001453064382076,0.01695138029754162,,
108
+ 0.0004217936657369137,0.017262723296880722,0.01717873103916645,53,2591,,,,
109
+ ,,,54,2591,0.0004082319501321763,0.01693730615079403,,
110
+ 0.00042498315451666713,0.01738462969660759,0.01717873103916645,54,2639,,,,
111
+ ,,,55,2639,0.000408886291552335,0.01691211201250553,,
112
+ 0.0004236290988046676,0.017346875742077827,0.01717873103916645,55,2687,,,,
113
+ ,,,56,2687,0.0004078254278283566,0.016930311918258667,,
114
+ 0.00042486932943575084,0.01738177426159382,0.01717873103916645,56,2735,,,,
115
+ ,,,57,2735,0.0004103595274500549,0.016968172043561935,,
116
+ 0.00042739874334074557,0.017436865717172623,0.01717873103916645,57,2783,,,,
117
+ ,,,58,2783,0.0004080187645740807,0.01689358614385128,,
118
+ 0.0004222442221362144,0.017291851341724396,0.01717873103916645,58,2831,,,,
119
+ ,,,59,2831,0.00040843390161171556,0.016921762377023697,,
120
+ 0.00042379635851830244,0.017299603670835495,0.01717873103916645,59,2879,,,,
121
+ ,,,60,2879,0.00040983682265505195,0.01695006713271141,,
122
+ 0.0004221262352075428,0.01728532463312149,0.01717873103916645,60,2927,,,,
123
+ ,,,61,2927,0.0004084053507540375,0.016897065564990044,,
124
+ 0.0004222682910040021,0.017293117940425873,0.01717873103916645,61,2975,,,,
125
+ ,,,62,2975,0.00040911303949542344,0.016928739845752716,,
126
+ 0.00042747813859023154,0.01739438995718956,0.01717873103916645,62,3023,,,,
127
+ ,,,63,3023,0.00040799641283228993,0.01693059131503105,,
128
+ 0.0004254797531757504,0.01739656738936901,0.01717873103916645,63,3071,,,,
129
+ ,,,64,3071,0.00040981770143844187,0.016943469643592834,,
130
+ 0.00042329620919190347,0.01733586937189102,0.01717873103916645,64,3119,,,,
131
+ ,,,65,3119,0.0004088404239155352,0.01693478226661682,,
132
+ 0.0004217066743876785,0.017254961654543877,0.01717873103916645,65,3167,,,,
133
+ ,,,66,3167,0.00040838937275111675,0.01690654456615448,,
134
+ 0.00042189378291368484,0.017270425334572792,0.01717873103916645,66,3215,,,,
135
+ ,,,67,3215,0.0004081486549694091,0.01690490171313286,,
136
+ 0.0004215087683405727,0.01719331555068493,0.01717873103916645,67,3263,,,,
137
+ ,,,68,3263,0.0004093026218470186,0.016943974420428276,,
138
+ 0.0004223258583806455,0.017296073958277702,0.01717873103916645,68,3311,,,,
139
+ ,,,69,3311,0.00041087312274612486,0.016967173665761948,,
140
+ 0.0004220983537379652,0.017283696681261063,0.01717873103916645,69,3359,,,,
141
+ ,,,70,3359,0.0004078306374140084,0.01692153513431549,,
142
+ 0.0004249326593708247,0.017333967611193657,0.01717873103916645,70,3407,,,,
143
+ ,,,71,3407,0.0004084692045580596,0.01691775768995285,,
144
+ 0.00042621931061148643,0.017413049936294556,0.01717873103916645,71,3455,,,,
145
+ ,,,72,3455,0.0004091760783921927,0.016929099336266518,,
146
+ 0.00042185187339782715,0.017206506803631783,0.01717873103916645,72,3503,,,,
147
+ ,,,73,3503,0.0004083532840013504,0.016920056194067,,
148
+ 0.00042171819950453937,0.01725606620311737,0.01717873103916645,73,3551,,,,
149
+ ,,,74,3551,0.0004093616153113544,0.0169339869171381,,
150
+ 0.00042190300882793963,0.017210641875863075,0.01717873103916645,74,3599,,,,
151
+ ,,,75,3599,0.00040874999831430614,0.016917385160923004,,
152
+ 0.00042252230923622847,0.017305485904216766,0.01717873103916645,75,3647,,,,
153
+ ,,,76,3647,0.00040824984898790717,0.016896480694413185,,
154
+ 0.0004308913485147059,0.017496133223176003,0.01717873103916645,76,3695,,,,
155
+ ,,,77,3695,0.00040839932626113296,0.016898926347494125,,
156
+ 0.0004225029260851443,0.01730460114777088,0.01717873103916645,77,3743,,,,
157
+ ,,,78,3743,0.002482874784618616,0.026759404689073563,,
158
+ 0.00042378046782687306,0.017351597547531128,0.01717873103916645,78,3791,,,,
159
+ ,,,79,3791,0.0004088142595719546,0.016951654106378555,,
160
+ 0.00042419382953085005,0.017312422394752502,0.01717873103916645,79,3839,,,,
161
+ ,,,80,3839,0.0004078223428223282,0.016915207728743553,,
162
+ 0.0004217459645587951,0.01719694398343563,0.01717873103916645,80,3887,,,,
163
+ ,,,81,3887,0.00040778153925202787,0.01690812222659588,,
164
+ 0.0004215148219373077,0.01719183847308159,0.01717873103916645,81,3935,,,,
165
+ ,,,82,3935,0.0004089638823643327,0.016936594620347023,,
166
+ 0.0004224865115247667,0.017246617004275322,0.01717873103916645,82,3983,,,,
167
+ ,,,83,3983,0.0004087591078132391,0.01692243665456772,,
168
+ 0.000431923137512058,0.017474407330155373,0.01717873103916645,83,4031,,,,
169
+ ,,,84,4031,0.00040745321894064546,0.016902605071663857,,
170
+ 0.00042346230475232005,0.017287950962781906,0.01717873103916645,84,4079,,,,
171
+ ,,,85,4079,0.0004094722680747509,0.01699863001704216,,
172
+ 0.00042147800559177995,0.017205914482474327,0.01717873103916645,85,4127,,,,
173
+ ,,,86,4127,0.00040972212445922196,0.016939392313361168,,
174
+ 0.00042261293856427073,0.01730952598154545,0.01717873103916645,86,4175,,,,
175
+ ,,,87,4175,0.0004083102394361049,0.016911597922444344,,
176
+ 0.00042147585190832615,0.01721024513244629,0.01717873103916645,87,4223,,,,
177
+ ,,,88,4223,0.0004092261951882392,0.016951315104961395,,
178
+ 0.00042229812243022025,0.01723654195666313,0.01717873103916645,88,4271,,,,
179
+ ,,,89,4271,0.0004078421916346997,0.016900014132261276,,
180
+ 0.00042452578782103956,0.017322422936558723,0.01717873103916645,89,4319,,,,
181
+ ,,,90,4319,0.0004093227325938642,0.016930103302001953,,
182
+ 0.0004249550402164459,0.017334580421447754,0.01717873103916645,90,4367,,,,
183
+ ,,,91,4367,0.0004070944560226053,0.016889022663235664,,
184
+ 0.00042843769188039005,0.017413679510354996,0.01717873103916645,91,4415,,,,
185
+ ,,,92,4415,0.00041222560685127974,0.016949515789747238,,
186
+ 0.0004228315665386617,0.01731865294277668,0.01717873103916645,92,4463,,,,
187
+ ,,,93,4463,0.00041204693843610585,0.017000366002321243,,
188
+ 0.0004242166760377586,0.017364399507641792,0.01717873103916645,93,4511,,,,
189
+ ,,,94,4511,0.00040950370021164417,0.01694655418395996,,
190
+ 0.00042397831566631794,0.017357541248202324,0.01717873103916645,94,4559,,,,
191
+ ,,,95,4559,0.0004079828504472971,0.016908368095755577,,
192
+ 0.0004339022561907768,0.017538508400321007,0.01717873103916645,95,4607,,,,
193
+ ,,,96,4607,0.0004094525647815317,0.016927026212215424,,
194
+ 0.00042174066766165197,0.017196424305438995,0.01717873103916645,96,4655,,,,
195
+ ,,,97,4655,0.0004097293713130057,0.016963521018624306,,
196
+ 0.00042153807589784265,0.01718698814511299,0.01717873103916645,97,4703,,,,
197
+ ,,,98,4703,0.00041035289177671075,0.016968535259366035,,
198
+ 0.0004218154354020953,0.017264489084482193,0.01717873103916645,98,4751,,,,
199
+ ,,,99,4751,0.0004082560772076249,0.016915328800678253,,
200
+ 0.00042380928061902523,0.017352478578686714,0.01717873103916645,99,4799,,,,
201
+ ,,,100,4799,0.0004081620427314192,0.016912754625082016,,
202
+ ,,,36,4800,,,0.00041454812162555754,0.016959020867943764
logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_07-59-42/10/tensorboard/focusConvMSE_150_hyperparameter_search/events.out.tfevents.1652258020.908803db74d8.1.20 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:65fc2fa44d9b38ea644b7b19735b6a37c9c91c27e480e643d7834b9ec700959d
3
+ size 36796
logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_07-59-42/10/tensorboard/focusConvMSE_150_hyperparameter_search/events.out.tfevents.1652258226.908803db74d8.1.21 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:917aaa776c94e03aa14863f3b1c47c4d9656a33d0e1740814f71bb4898eff10c
3
+ size 179
logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_07-59-42/10/tensorboard/focusConvMSE_150_hyperparameter_search/hparams.yaml ADDED
@@ -0,0 +1,54 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ trainer:
2
+ _target_: pytorch_lightning.Trainer
3
+ gpus: 1
4
+ min_epochs: 1
5
+ max_epochs: 100
6
+ resume_from_checkpoint: null
7
+ model:
8
+ _target_: src.models.focus_conv_module.FocusConvLitModule
9
+ image_size: 150
10
+ pool_size: 2
11
+ conv1_size: 3
12
+ conv1_channels: 9
13
+ conv2_size: 3
14
+ conv2_channels: 6
15
+ lin1_size: 32
16
+ lin2_size: 72
17
+ output_size: 1
18
+ lr: 0.005087336593525169
19
+ weight_decay: 0.0005
20
+ datamodule:
21
+ _target_: src.datamodules.focus_datamodule.FocusDataModule
22
+ data_dir: /usr/src/app/data/focus150
23
+ csv_train_file: /usr/src/app/data/focus150/train_metadata.csv
24
+ csv_val_file: /usr/src/app/data/focus150/validation_metadata.csv
25
+ csv_test_file: /usr/src/app/data/focus150/test_metadata.csv
26
+ batch_size: 64
27
+ num_workers: 0
28
+ pin_memory: false
29
+ seed: null
30
+ callbacks:
31
+ model_checkpoint:
32
+ _target_: pytorch_lightning.callbacks.ModelCheckpoint
33
+ monitor: val/mae
34
+ mode: min
35
+ save_top_k: 1
36
+ save_last: true
37
+ verbose: false
38
+ dirpath: checkpoints/
39
+ filename: epoch_{epoch:03d}
40
+ auto_insert_metric_name: false
41
+ early_stopping:
42
+ _target_: pytorch_lightning.callbacks.EarlyStopping
43
+ monitor: val/mae
44
+ mode: min
45
+ patience: 100
46
+ min_delta: 0
47
+ model_summary:
48
+ _target_: pytorch_lightning.callbacks.RichModelSummary
49
+ max_depth: -1
50
+ rich_progress_bar:
51
+ _target_: pytorch_lightning.callbacks.RichProgressBar
52
+ model/params/total: 216201
53
+ model/params/trainable: 216201
54
+ model/params/non_trainable: 0
logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_07-59-42/11/.hydra/config.yaml ADDED
@@ -0,0 +1,81 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ original_work_dir: ${hydra:runtime.cwd}
2
+ data_dir: ${original_work_dir}/data
3
+ print_config: true
4
+ ignore_warnings: true
5
+ train: true
6
+ test: true
7
+ seed: null
8
+ name: focusConvMSE_150_hyperparameter_search
9
+ datamodule:
10
+ _target_: src.datamodules.focus_datamodule.FocusDataModule
11
+ data_dir: ${data_dir}/focus150
12
+ csv_train_file: ${data_dir}/focus150/train_metadata.csv
13
+ csv_val_file: ${data_dir}/focus150/validation_metadata.csv
14
+ csv_test_file: ${data_dir}/focus150/test_metadata.csv
15
+ batch_size: 64
16
+ num_workers: 0
17
+ pin_memory: false
18
+ model:
19
+ _target_: src.models.focus_conv_module.FocusConvLitModule
20
+ image_size: 150
21
+ pool_size: 2
22
+ conv1_size: 3
23
+ conv1_channels: 3
24
+ conv2_size: 7
25
+ conv2_channels: 6
26
+ lin1_size: 72
27
+ lin2_size: 128
28
+ output_size: 1
29
+ lr: 0.00010816648299355816
30
+ weight_decay: 0.0005
31
+ callbacks:
32
+ model_checkpoint:
33
+ _target_: pytorch_lightning.callbacks.ModelCheckpoint
34
+ monitor: val/mae
35
+ mode: min
36
+ save_top_k: 1
37
+ save_last: true
38
+ verbose: false
39
+ dirpath: checkpoints/
40
+ filename: epoch_{epoch:03d}
41
+ auto_insert_metric_name: false
42
+ early_stopping:
43
+ _target_: pytorch_lightning.callbacks.EarlyStopping
44
+ monitor: val/mae
45
+ mode: min
46
+ patience: 100
47
+ min_delta: 0
48
+ model_summary:
49
+ _target_: pytorch_lightning.callbacks.RichModelSummary
50
+ max_depth: -1
51
+ rich_progress_bar:
52
+ _target_: pytorch_lightning.callbacks.RichProgressBar
53
+ logger:
54
+ csv:
55
+ _target_: pytorch_lightning.loggers.csv_logs.CSVLogger
56
+ save_dir: .
57
+ name: csv/
58
+ prefix: ''
59
+ mlflow:
60
+ _target_: pytorch_lightning.loggers.mlflow.MLFlowLogger
61
+ experiment_name: ${name}
62
+ tracking_uri: ${original_work_dir}/logs/mlflow/mlruns
63
+ tags: null
64
+ save_dir: ./mlruns
65
+ prefix: ''
66
+ artifact_location: null
67
+ tensorboard:
68
+ _target_: pytorch_lightning.loggers.tensorboard.TensorBoardLogger
69
+ save_dir: tensorboard/
70
+ name: null
71
+ version: ${name}
72
+ log_graph: false
73
+ default_hp_metric: true
74
+ prefix: ''
75
+ trainer:
76
+ _target_: pytorch_lightning.Trainer
77
+ gpus: 1
78
+ min_epochs: 1
79
+ max_epochs: 100
80
+ resume_from_checkpoint: null
81
+ optimized_metric: val/mae_best
logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_07-59-42/11/.hydra/hydra.yaml ADDED
@@ -0,0 +1,240 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ hydra:
2
+ run:
3
+ dir: logs/experiments/runs/${name}/${now:%Y-%m-%d}_${now:%H-%M-%S}
4
+ sweep:
5
+ dir: logs/experiments/multiruns/${name}/${now:%Y-%m-%d}_${now:%H-%M-%S}
6
+ subdir: ${hydra.job.num}
7
+ launcher:
8
+ _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher
9
+ sweeper:
10
+ sampler:
11
+ _target_: optuna.samplers.TPESampler
12
+ seed: 12345
13
+ consider_prior: true
14
+ prior_weight: 1.0
15
+ consider_magic_clip: true
16
+ consider_endpoints: false
17
+ n_startup_trials: 10
18
+ n_ei_candidates: 24
19
+ multivariate: false
20
+ warn_independent_sampling: true
21
+ _target_: hydra_plugins.hydra_optuna_sweeper.optuna_sweeper.OptunaSweeper
22
+ direction: minimize
23
+ storage: null
24
+ study_name: focusConvMSE_150_hyperparameter_search
25
+ n_trials: 20
26
+ n_jobs: 1
27
+ search_space:
28
+ datamodule.batch_size:
29
+ type: categorical
30
+ choices:
31
+ - 64
32
+ - 128
33
+ model.lr:
34
+ type: float
35
+ low: 0.0001
36
+ high: 0.01
37
+ model.conv1_size:
38
+ type: categorical
39
+ choices:
40
+ - 3
41
+ - 5
42
+ - 7
43
+ model.conv1_channels:
44
+ type: categorical
45
+ choices:
46
+ - 3
47
+ - 6
48
+ - 9
49
+ model.conv2_size:
50
+ type: categorical
51
+ choices:
52
+ - 3
53
+ - 5
54
+ - 7
55
+ model.conv2_channels:
56
+ type: categorical
57
+ choices:
58
+ - 6
59
+ - 11
60
+ - 16
61
+ model.lin1_size:
62
+ type: categorical
63
+ choices:
64
+ - 32
65
+ - 72
66
+ - 128
67
+ model.lin2_size:
68
+ type: categorical
69
+ choices:
70
+ - 32
71
+ - 72
72
+ - 128
73
+ help:
74
+ app_name: ${hydra.job.name}
75
+ header: '${hydra.help.app_name} is powered by Hydra.
76
+
77
+ '
78
+ footer: 'Powered by Hydra (https://hydra.cc)
79
+
80
+ Use --hydra-help to view Hydra specific help
81
+
82
+ '
83
+ template: '${hydra.help.header}
84
+
85
+ == Configuration groups ==
86
+
87
+ Compose your configuration from those groups (group=option)
88
+
89
+
90
+ $APP_CONFIG_GROUPS
91
+
92
+
93
+ == Config ==
94
+
95
+ Override anything in the config (foo.bar=value)
96
+
97
+
98
+ $CONFIG
99
+
100
+
101
+ ${hydra.help.footer}
102
+
103
+ '
104
+ hydra_help:
105
+ template: 'Hydra (${hydra.runtime.version})
106
+
107
+ See https://hydra.cc for more info.
108
+
109
+
110
+ == Flags ==
111
+
112
+ $FLAGS_HELP
113
+
114
+
115
+ == Configuration groups ==
116
+
117
+ Compose your configuration from those groups (For example, append hydra/job_logging=disabled
118
+ to command line)
119
+
120
+
121
+ $HYDRA_CONFIG_GROUPS
122
+
123
+
124
+ Use ''--cfg hydra'' to Show the Hydra config.
125
+
126
+ '
127
+ hydra_help: ???
128
+ hydra_logging:
129
+ version: 1
130
+ formatters:
131
+ colorlog:
132
+ (): colorlog.ColoredFormatter
133
+ format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s'
134
+ handlers:
135
+ console:
136
+ class: logging.StreamHandler
137
+ formatter: colorlog
138
+ stream: ext://sys.stdout
139
+ root:
140
+ level: INFO
141
+ handlers:
142
+ - console
143
+ disable_existing_loggers: false
144
+ job_logging:
145
+ version: 1
146
+ formatters:
147
+ simple:
148
+ format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s'
149
+ colorlog:
150
+ (): colorlog.ColoredFormatter
151
+ format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s]
152
+ - %(message)s'
153
+ log_colors:
154
+ DEBUG: purple
155
+ INFO: green
156
+ WARNING: yellow
157
+ ERROR: red
158
+ CRITICAL: red
159
+ handlers:
160
+ console:
161
+ class: logging.StreamHandler
162
+ formatter: colorlog
163
+ stream: ext://sys.stdout
164
+ file:
165
+ class: logging.FileHandler
166
+ formatter: simple
167
+ filename: ${hydra.job.name}.log
168
+ root:
169
+ level: INFO
170
+ handlers:
171
+ - console
172
+ - file
173
+ disable_existing_loggers: false
174
+ env: {}
175
+ searchpath: []
176
+ callbacks: {}
177
+ output_subdir: .hydra
178
+ overrides:
179
+ hydra: []
180
+ task:
181
+ - datamodule.batch_size=64
182
+ - model.lr=0.00010816648299355816
183
+ - model.conv1_size=3
184
+ - model.conv1_channels=3
185
+ - model.conv2_size=7
186
+ - model.conv2_channels=6
187
+ - model.lin1_size=72
188
+ - model.lin2_size=128
189
+ - hparams_search=focusConvMSE_150.yaml
190
+ job:
191
+ name: train
192
+ override_dirname: datamodule.batch_size=64,hparams_search=focusConvMSE_150.yaml,model.conv1_channels=3,model.conv1_size=3,model.conv2_channels=6,model.conv2_size=7,model.lin1_size=72,model.lin2_size=128,model.lr=0.00010816648299355816
193
+ id: '11'
194
+ num: 11
195
+ config_name: train.yaml
196
+ env_set: {}
197
+ env_copy: []
198
+ config:
199
+ override_dirname:
200
+ kv_sep: '='
201
+ item_sep: ','
202
+ exclude_keys: []
203
+ runtime:
204
+ version: 1.1.2
205
+ cwd: /usr/src/app
206
+ config_sources:
207
+ - path: hydra.conf
208
+ schema: pkg
209
+ provider: hydra
210
+ - path: /usr/src/app/configs
211
+ schema: file
212
+ provider: main
213
+ - path: hydra_plugins.hydra_colorlog.conf
214
+ schema: pkg
215
+ provider: hydra-colorlog
216
+ - path: ''
217
+ schema: structured
218
+ provider: schema
219
+ choices:
220
+ local: default.yaml
221
+ hparams_search: focusConvMSE_150.yaml
222
+ debug: null
223
+ experiment: null
224
+ log_dir: default.yaml
225
+ trainer: long.yaml
226
+ logger: many_loggers
227
+ callbacks: default.yaml
228
+ model: focusConv_150.yaml
229
+ datamodule: focus150.yaml
230
+ hydra/env: default
231
+ hydra/callbacks: null
232
+ hydra/job_logging: colorlog
233
+ hydra/hydra_logging: colorlog
234
+ hydra/hydra_help: default
235
+ hydra/help: default
236
+ hydra/sweeper: optuna
237
+ hydra/sweeper/sampler: tpe
238
+ hydra/launcher: basic
239
+ hydra/output: default
240
+ verbose: false
logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_07-59-42/11/.hydra/overrides.yaml ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ - datamodule.batch_size=64
2
+ - model.lr=0.00010816648299355816
3
+ - model.conv1_size=3
4
+ - model.conv1_channels=3
5
+ - model.conv2_size=7
6
+ - model.conv2_channels=6
7
+ - model.lin1_size=72
8
+ - model.lin2_size=128
9
+ - hparams_search=focusConvMSE_150.yaml
logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_07-59-42/11/checkpoints/epoch_098.ckpt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e7d88e42e353d3c3a02fa8ddc16e9edfd51e5d18f0185778b69ab4e276ee0835
3
+ size 6849093
logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_07-59-42/11/checkpoints/last.ckpt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5a3989e00c663b5d037abf748f9ce8c728c930b05b8a4fe9d57a60c4f30b9585
3
+ size 6849093
logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_07-59-42/11/csv/version_0/hparams.yaml ADDED
@@ -0,0 +1,54 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ trainer:
2
+ _target_: pytorch_lightning.Trainer
3
+ gpus: 1
4
+ min_epochs: 1
5
+ max_epochs: 100
6
+ resume_from_checkpoint: null
7
+ model:
8
+ _target_: src.models.focus_conv_module.FocusConvLitModule
9
+ image_size: 150
10
+ pool_size: 2
11
+ conv1_size: 3
12
+ conv1_channels: 3
13
+ conv2_size: 7
14
+ conv2_channels: 6
15
+ lin1_size: 72
16
+ lin2_size: 128
17
+ output_size: 1
18
+ lr: 0.00010816648299355816
19
+ weight_decay: 0.0005
20
+ datamodule:
21
+ _target_: src.datamodules.focus_datamodule.FocusDataModule
22
+ data_dir: /usr/src/app/data/focus150
23
+ csv_train_file: /usr/src/app/data/focus150/train_metadata.csv
24
+ csv_val_file: /usr/src/app/data/focus150/validation_metadata.csv
25
+ csv_test_file: /usr/src/app/data/focus150/test_metadata.csv
26
+ batch_size: 64
27
+ num_workers: 0
28
+ pin_memory: false
29
+ seed: null
30
+ callbacks:
31
+ model_checkpoint:
32
+ _target_: pytorch_lightning.callbacks.ModelCheckpoint
33
+ monitor: val/mae
34
+ mode: min
35
+ save_top_k: 1
36
+ save_last: true
37
+ verbose: false
38
+ dirpath: checkpoints/
39
+ filename: epoch_{epoch:03d}
40
+ auto_insert_metric_name: false
41
+ early_stopping:
42
+ _target_: pytorch_lightning.callbacks.EarlyStopping
43
+ monitor: val/mae
44
+ mode: min
45
+ patience: 100
46
+ min_delta: 0
47
+ model_summary:
48
+ _target_: pytorch_lightning.callbacks.RichModelSummary
49
+ max_depth: -1
50
+ rich_progress_bar:
51
+ _target_: pytorch_lightning.callbacks.RichProgressBar
52
+ model/params/total: 569669
53
+ model/params/trainable: 569669
54
+ model/params/non_trainable: 0
logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_07-59-42/11/csv/version_0/metrics.csv ADDED
@@ -0,0 +1,202 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ val/loss,val/mae,val/mae_best,epoch,step,train/loss,train/mae,test/loss,test/mae
2
+ 0.0004394736315589398,0.01759462058544159,0.01759462058544159,0,47,,,,
3
+ ,,,1,47,0.0009351766784675419,0.023430056869983673,,
4
+ 0.00041906489059329033,0.01721055805683136,0.01721055805683136,1,95,,,,
5
+ ,,,2,95,0.00040893949335440993,0.0169113390147686,,
6
+ 0.00041984315612353384,0.017213858664035797,0.01721055805683136,2,143,,,,
7
+ ,,,3,143,0.0004081485094502568,0.016930682584643364,,
8
+ 0.0004227296158205718,0.01728403940796852,0.01721055805683136,3,191,,,,
9
+ ,,,4,191,0.00040986636304296553,0.016950614750385284,,
10
+ 0.00042270508129149675,0.01736144907772541,0.01721055805683136,4,239,,,,
11
+ ,,,5,239,0.0004075665201526135,0.016925007104873657,,
12
+ 0.0004182030097581446,0.017209336161613464,0.017209336161613464,5,287,,,,
13
+ ,,,6,287,0.0004079578793607652,0.016964636743068695,,
14
+ 0.00041821785271167755,0.01721034198999405,0.017209336161613464,6,335,,,,
15
+ ,,,7,335,0.0004071846487931907,0.016894778236746788,,
16
+ 0.0004181323747616261,0.017227523028850555,0.017209336161613464,7,383,,,,
17
+ ,,,8,383,0.00040658749639987946,0.016897208988666534,,
18
+ 0.0004184022545814514,0.017221981659531593,0.017209336161613464,8,431,,,,
19
+ ,,,9,431,0.00040903923218138516,0.01694357395172119,,
20
+ 0.00043418965651653707,0.01746235601603985,0.017209336161613464,9,479,,,,
21
+ ,,,10,479,0.00040912095573730767,0.016967719420790672,,
22
+ 0.0004263891896698624,0.017350107431411743,0.017209336161613464,10,527,,,,
23
+ ,,,11,527,0.00040970530244521797,0.01693388819694519,,
24
+ 0.00041944460826925933,0.017246125265955925,0.017209336161613464,11,575,,,,
25
+ ,,,12,575,0.0004083856474608183,0.01694868691265583,,
26
+ 0.0004312821547500789,0.01750073954463005,0.017209336161613464,12,623,,,,
27
+ ,,,13,623,0.0004043453373014927,0.01688040979206562,,
28
+ 0.00041922967648133636,0.01723908819258213,0.017209336161613464,13,671,,,,
29
+ ,,,14,671,0.00040992864524014294,0.016984954476356506,,
30
+ 0.0004183636046946049,0.017232075333595276,0.017209336161613464,14,719,,,,
31
+ ,,,15,719,0.0004078464407939464,0.016938133165240288,,
32
+ 0.0004213706706650555,0.017330141738057137,0.017209336161613464,15,767,,,,
33
+ ,,,16,767,0.00040721698314882815,0.01693125255405903,,
34
+ 0.0004258810658939183,0.017421795055270195,0.017209336161613464,16,815,,,,
35
+ ,,,17,815,0.00040840404108166695,0.016969764605164528,,
36
+ 0.0004273841914255172,0.017452670261263847,0.017209336161613464,17,863,,,,
37
+ ,,,18,863,0.0004114188195671886,0.01694268174469471,,
38
+ 0.00042429703171364963,0.01732434518635273,0.017209336161613464,18,911,,,,
39
+ ,,,19,911,0.0004136214265599847,0.017073052003979683,,
40
+ 0.00041961405077017844,0.017272720113396645,0.017209336161613464,19,959,,,,
41
+ ,,,20,959,0.00040746034937910736,0.016918562352657318,,
42
+ 0.0004219464317429811,0.017344538122415543,0.017209336161613464,20,1007,,,,
43
+ ,,,21,1007,0.0004084990650881082,0.016918551176786423,,
44
+ 0.0004194392531644553,0.017226368188858032,0.017209336161613464,21,1055,,,,
45
+ ,,,22,1055,0.00041016514296643436,0.016997957602143288,,
46
+ 0.00042012592894025147,0.01729559525847435,0.017209336161613464,22,1103,,,,
47
+ ,,,23,1103,0.0004045181558467448,0.016872847452759743,,
48
+ 0.00041877696639858186,0.01723659597337246,0.017209336161613464,23,1151,,,,
49
+ ,,,24,1151,0.0004077923658769578,0.016967764124274254,,
50
+ 0.000419879041146487,0.017242688685655594,0.017209336161613464,24,1199,,,,
51
+ ,,,25,1199,0.00040664407424628735,0.01691400445997715,,
52
+ 0.0004241569258738309,0.017392829060554504,0.017209336161613464,25,1247,,,,
53
+ ,,,26,1247,0.0004059354541823268,0.016912179067730904,,
54
+ 0.00042421076796017587,0.017325211316347122,0.017209336161613464,26,1295,,,,
55
+ ,,,27,1295,0.0004058551858179271,0.016905680298805237,,
56
+ 0.00042095675598829985,0.017259234562516212,0.017209336161613464,27,1343,,,,
57
+ ,,,28,1343,0.0004095307376701385,0.01697835512459278,,
58
+ 0.0004286278272047639,0.017470862716436386,0.017209336161613464,28,1391,,,,
59
+ ,,,29,1391,0.0004061052459292114,0.016909321770071983,,
60
+ 0.0004189849423710257,0.01724412851035595,0.017209336161613464,29,1439,,,,
61
+ ,,,30,1439,0.00040587192052043974,0.016914542764425278,,
62
+ 0.00042107870103791356,0.017260584980249405,0.017209336161613464,30,1487,,,,
63
+ ,,,31,1487,0.000406049337470904,0.016903238371014595,,
64
+ 0.0004323205794207752,0.01752062514424324,0.017209336161613464,31,1535,,,,
65
+ ,,,32,1535,0.00040674221236258745,0.016873374581336975,,
66
+ 0.0004220288246870041,0.017342563718557358,0.017209336161613464,32,1583,,,,
67
+ ,,,33,1583,0.00040837537380866706,0.01692577637732029,,
68
+ 0.00041968817822635174,0.01726115681231022,0.017209336161613464,33,1631,,,,
69
+ ,,,34,1631,0.00040623859968036413,0.016888979822397232,,
70
+ 0.0004191771731711924,0.01722089573740959,0.017209336161613464,34,1679,,,,
71
+ ,,,35,1679,0.0004055347526445985,0.016880258917808533,,
72
+ 0.0004193111089989543,0.01721850223839283,0.017209336161613464,35,1727,,,,
73
+ ,,,36,1727,0.0004064968088641763,0.016912607476115227,,
74
+ 0.00042720127385109663,0.017447343096137047,0.017209336161613464,36,1775,,,,
75
+ ,,,37,1775,0.0004063997184857726,0.01690622605383396,,
76
+ 0.00042203973862342536,0.017340166494250298,0.017209336161613464,37,1823,,,,
77
+ ,,,38,1823,0.000408988882554695,0.01694549433887005,,
78
+ 0.0004210990737192333,0.017310941591858864,0.017209336161613464,38,1871,,,,
79
+ ,,,39,1871,0.00040547287790104747,0.01686597242951393,,
80
+ 0.00042055093217641115,0.01723533496260643,0.017209336161613464,39,1919,,,,
81
+ ,,,40,1919,0.000405726459575817,0.016893891617655754,,
82
+ 0.0004202211566735059,0.01727336458861828,0.017209336161613464,40,1967,,,,
83
+ ,,,41,1967,0.000406817503971979,0.01692579872906208,,
84
+ 0.00042087677866220474,0.017299750819802284,0.017209336161613464,41,2015,,,,
85
+ ,,,42,2015,0.00040672384784556925,0.016875851899385452,,
86
+ 0.00041950418381020427,0.017211077734827995,0.017209336161613464,42,2063,,,,
87
+ ,,,43,2063,0.0004062054504174739,0.016895409673452377,,
88
+ 0.0004219987604301423,0.01733498089015484,0.017209336161613464,43,2111,,,,
89
+ ,,,44,2111,0.0004059690982103348,0.016894662752747536,,
90
+ 0.0004282607405912131,0.017461011186242104,0.017209336161613464,44,2159,,,,
91
+ ,,,45,2159,0.00040477659786120057,0.0168693196028471,,
92
+ 0.0004205380682833493,0.01728254184126854,0.017209336161613464,45,2207,,,,
93
+ ,,,46,2207,0.0004059646453242749,0.01687576062977314,,
94
+ 0.0004195549990981817,0.01721327193081379,0.017209336161613464,46,2255,,,,
95
+ ,,,47,2255,0.00040678426739759743,0.016902988776564598,,
96
+ 0.0004232749342918396,0.017365971580147743,0.017209336161613464,47,2303,,,,
97
+ ,,,48,2303,0.00040617489139549434,0.01688697747886181,,
98
+ 0.00042031530756503344,0.0172633808106184,0.017209336161613464,48,2351,,,,
99
+ ,,,49,2351,0.0004060060018673539,0.01685989648103714,,
100
+ 0.00041977816727012396,0.017214851453900337,0.017209336161613464,49,2399,,,,
101
+ ,,,50,2399,0.00040631488081999123,0.016884861513972282,,
102
+ 0.00042017194209620357,0.017245924100279808,0.017209336161613464,50,2447,,,,
103
+ ,,,51,2447,0.0004073091549798846,0.0169228445738554,,
104
+ 0.0004198932438157499,0.017207011580467224,0.017207011580467224,51,2495,,,,
105
+ ,,,52,2495,0.00040664279367774725,0.01688285544514656,,
106
+ 0.0004203504358883947,0.017201559618115425,0.017201559618115425,52,2543,,,,
107
+ ,,,53,2543,0.00040602145600132644,0.016854193061590195,,
108
+ 0.0004201350675430149,0.017222188413143158,0.017201559618115425,53,2591,,,,
109
+ ,,,54,2591,0.00040644078399054706,0.016869615763425827,,
110
+ 0.00042159808799624443,0.017308376729488373,0.017201559618115425,54,2639,,,,
111
+ ,,,55,2639,0.0004067619738634676,0.01691346801817417,,
112
+ 0.0004202201671432704,0.017207473516464233,0.017201559618115425,55,2687,,,,
113
+ ,,,56,2687,0.00040618900675326586,0.016874011605978012,,
114
+ 0.0004205267468933016,0.01719348132610321,0.01719348132610321,56,2735,,,,
115
+ ,,,57,2735,0.00040786725003272295,0.016908960416913033,,
116
+ 0.00042105617467314005,0.01727565936744213,0.01719348132610321,57,2783,,,,
117
+ ,,,58,2783,0.00040669902227818966,0.016872091218829155,,
118
+ 0.00042053242214024067,0.01722707785665989,0.01719348132610321,58,2831,,,,
119
+ ,,,59,2831,0.0004069833958055824,0.016887515783309937,,
120
+ 0.0004210672341287136,0.017203351482748985,0.01719348132610321,59,2879,,,,
121
+ ,,,60,2879,0.000407146435463801,0.016905397176742554,,
122
+ 0.00042082270374521613,0.017188889905810356,0.017188889905810356,60,2927,,,,
123
+ ,,,61,2927,0.00040758875547908247,0.016900332644581795,,
124
+ 0.0004208592581562698,0.017239205539226532,0.017188889905810356,61,2975,,,,
125
+ ,,,62,2975,0.0004076542390976101,0.01691567711532116,,
126
+ 0.0004211909545119852,0.01726185530424118,0.017188889905810356,62,3023,,,,
127
+ ,,,63,3023,0.0004071186122018844,0.016875412315130234,,
128
+ 0.00042141269659623504,0.017273573204874992,0.017188889905810356,63,3071,,,,
129
+ ,,,64,3071,0.00040695411735214293,0.016890009865164757,,
130
+ 0.00042095634853467345,0.017195511609315872,0.017188889905810356,64,3119,,,,
131
+ ,,,65,3119,0.0004070116556249559,0.016871344298124313,,
132
+ 0.0004225668963044882,0.017262568697333336,0.017188889905810356,65,3167,,,,
133
+ ,,,66,3167,0.00040649864240549505,0.016882920637726784,,
134
+ 0.0004211030900478363,0.0172194205224514,0.017188889905810356,66,3215,,,,
135
+ ,,,67,3215,0.00040816894033923745,0.016897454857826233,,
136
+ 0.0004213799547869712,0.017253760248422623,0.017188889905810356,67,3263,,,,
137
+ ,,,68,3263,0.0004068886919412762,0.01687660440802574,,
138
+ 0.0004213257634546608,0.017242353409528732,0.017188889905810356,68,3311,,,,
139
+ ,,,69,3311,0.00040740708936937153,0.016883065924048424,,
140
+ 0.0004224215226713568,0.017309898510575294,0.017188889905810356,69,3359,,,,
141
+ ,,,70,3359,0.0004082007799297571,0.016910336911678314,,
142
+ 0.0004215894441585988,0.017260881140828133,0.017188889905810356,70,3407,,,,
143
+ ,,,71,3407,0.0004078579950146377,0.016899337992072105,,
144
+ 0.0004233350628055632,0.017341619357466698,0.017188889905810356,71,3455,,,,
145
+ ,,,72,3455,0.0004077304038219154,0.016887040808796883,,
146
+ 0.00042134863906539977,0.01720932498574257,0.017188889905810356,72,3503,,,,
147
+ ,,,73,3503,0.0004068168345838785,0.0168687142431736,,
148
+ 0.0004214070795569569,0.0172271691262722,0.017188889905810356,73,3551,,,,
149
+ ,,,74,3551,0.0004071328148711473,0.01687823235988617,,
150
+ 0.00042139433207921684,0.017213845625519753,0.017188889905810356,74,3599,,,,
151
+ ,,,75,3599,0.000407340528909117,0.01686885766685009,,
152
+ 0.00042143251630477607,0.017224188894033432,0.017188889905810356,75,3647,,,,
153
+ ,,,76,3647,0.00040711218025535345,0.016894230619072914,,
154
+ 0.00042148478678427637,0.017233220860362053,0.017188889905810356,76,3695,,,,
155
+ ,,,77,3695,0.0004076219629496336,0.016865206882357597,,
156
+ 0.0004214370856061578,0.01720760017633438,0.017188889905810356,77,3743,,,,
157
+ ,,,78,3743,0.0004075857868883759,0.016889605671167374,,
158
+ 0.00042144660255871713,0.017206992954015732,0.017188889905810356,78,3791,,,,
159
+ ,,,79,3791,0.0004076878249179572,0.016882330179214478,,
160
+ 0.0004217152891214937,0.01719551347196102,0.017188889905810356,79,3839,,,,
161
+ ,,,80,3839,0.00040750508196651936,0.01688593067228794,,
162
+ 0.00042259410838596523,0.01730957254767418,0.017188889905810356,80,3887,,,,
163
+ ,,,81,3887,0.0004073386371601373,0.016866957768797874,,
164
+ 0.00042276069871149957,0.017316414043307304,0.017188889905810356,81,3935,,,,
165
+ ,,,82,3935,0.0004076618643011898,0.016892241314053535,,
166
+ 0.0004223377618473023,0.017297260463237762,0.017188889905810356,82,3983,,,,
167
+ ,,,83,3983,0.0004080832004547119,0.016909297555685043,,
168
+ 0.0004223301075398922,0.017296744510531425,0.017188889905810356,83,4031,,,,
169
+ ,,,84,4031,0.00040758089744485915,0.016870059072971344,,
170
+ 0.0004214837681502104,0.017221003770828247,0.017188889905810356,84,4079,,,,
171
+ ,,,85,4079,0.00040729722240939736,0.016884170472621918,,
172
+ 0.00042154756374657154,0.01723591797053814,0.017188889905810356,85,4127,,,,
173
+ ,,,86,4127,0.0004071864241268486,0.016871938481926918,,
174
+ 0.00042148437933064997,0.017200423404574394,0.017188889905810356,86,4175,,,,
175
+ ,,,87,4175,0.00040779015398584306,0.01691027171909809,,
176
+ 0.0004216297820676118,0.017247051000595093,0.017188889905810356,87,4223,,,,
177
+ ,,,88,4223,0.0004074085736647248,0.01688033528625965,,
178
+ 0.00042225176002830267,0.017292378470301628,0.017188889905810356,88,4271,,,,
179
+ ,,,89,4271,0.0004072744632139802,0.016860222443938255,,
180
+ 0.0004215699154883623,0.017238985747098923,0.017188889905810356,89,4319,,,,
181
+ ,,,90,4319,0.00040759044350124896,0.01689395308494568,,
182
+ 0.00042148042120970786,0.01720346137881279,0.017188889905810356,90,4367,,,,
183
+ ,,,91,4367,0.000407122919568792,0.016904108226299286,,
184
+ 0.00042178231524303555,0.017261847853660583,0.017188889905810356,91,4415,,,,
185
+ ,,,92,4415,0.00040751745109446347,0.016863549128174782,,
186
+ 0.0004230293270666152,0.017326325178146362,0.017188889905810356,92,4463,,,,
187
+ ,,,93,4463,0.0004077217890881002,0.016855353489518166,,
188
+ 0.00042154555558227,0.017234867438673973,0.017188889905810356,93,4511,,,,
189
+ ,,,94,4511,0.00040814257226884365,0.016924481838941574,,
190
+ 0.0004215830995235592,0.01724076084792614,0.017188889905810356,94,4559,,,,
191
+ ,,,95,4559,0.00040729803731665015,0.01686866395175457,,
192
+ 0.00042147969361394644,0.01720438338816166,0.017188889905810356,95,4607,,,,
193
+ ,,,96,4607,0.0004076566838193685,0.016907932236790657,,
194
+ 0.00042162800673395395,0.01724657043814659,0.017188889905810356,96,4655,,,,
195
+ ,,,97,4655,0.0004075334290973842,0.01689138263463974,,
196
+ 0.0004218488174956292,0.017267100512981415,0.017188889905810356,97,4703,,,,
197
+ ,,,98,4703,0.0004077201010659337,0.01687220297753811,,
198
+ 0.0004216317320242524,0.017184168100357056,0.017184168100357056,98,4751,,,,
199
+ ,,,99,4751,0.0004070783033967018,0.016874946653842926,,
200
+ 0.00042147882049903274,0.017205115407705307,0.017184168100357056,99,4799,,,,
201
+ ,,,100,4799,0.0004079008649569005,0.01687534712255001,,
202
+ ,,,98,4800,,,0.00041456366307102144,0.016964180395007133
logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_07-59-42/11/tensorboard/focusConvMSE_150_hyperparameter_search/events.out.tfevents.1652258227.908803db74d8.1.22 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3d4b8b1f5a99e2ac50f8e229067ea98ea02f64abfa01ef399e48a942869c445c
3
+ size 36796
logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_07-59-42/11/tensorboard/focusConvMSE_150_hyperparameter_search/events.out.tfevents.1652258424.908803db74d8.1.23 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:058766e3c92caf54d86e6ee0954fe590863d41b75702dec64d5a8047b3db49f9
3
+ size 179
logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_07-59-42/11/tensorboard/focusConvMSE_150_hyperparameter_search/hparams.yaml ADDED
@@ -0,0 +1,54 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ trainer:
2
+ _target_: pytorch_lightning.Trainer
3
+ gpus: 1
4
+ min_epochs: 1
5
+ max_epochs: 100
6
+ resume_from_checkpoint: null
7
+ model:
8
+ _target_: src.models.focus_conv_module.FocusConvLitModule
9
+ image_size: 150
10
+ pool_size: 2
11
+ conv1_size: 3
12
+ conv1_channels: 3
13
+ conv2_size: 7
14
+ conv2_channels: 6
15
+ lin1_size: 72
16
+ lin2_size: 128
17
+ output_size: 1
18
+ lr: 0.00010816648299355816
19
+ weight_decay: 0.0005
20
+ datamodule:
21
+ _target_: src.datamodules.focus_datamodule.FocusDataModule
22
+ data_dir: /usr/src/app/data/focus150
23
+ csv_train_file: /usr/src/app/data/focus150/train_metadata.csv
24
+ csv_val_file: /usr/src/app/data/focus150/validation_metadata.csv
25
+ csv_test_file: /usr/src/app/data/focus150/test_metadata.csv
26
+ batch_size: 64
27
+ num_workers: 0
28
+ pin_memory: false
29
+ seed: null
30
+ callbacks:
31
+ model_checkpoint:
32
+ _target_: pytorch_lightning.callbacks.ModelCheckpoint
33
+ monitor: val/mae
34
+ mode: min
35
+ save_top_k: 1
36
+ save_last: true
37
+ verbose: false
38
+ dirpath: checkpoints/
39
+ filename: epoch_{epoch:03d}
40
+ auto_insert_metric_name: false
41
+ early_stopping:
42
+ _target_: pytorch_lightning.callbacks.EarlyStopping
43
+ monitor: val/mae
44
+ mode: min
45
+ patience: 100
46
+ min_delta: 0
47
+ model_summary:
48
+ _target_: pytorch_lightning.callbacks.RichModelSummary
49
+ max_depth: -1
50
+ rich_progress_bar:
51
+ _target_: pytorch_lightning.callbacks.RichProgressBar
52
+ model/params/total: 569669
53
+ model/params/trainable: 569669
54
+ model/params/non_trainable: 0
logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_07-59-42/12/.hydra/config.yaml ADDED
@@ -0,0 +1,81 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ original_work_dir: ${hydra:runtime.cwd}
2
+ data_dir: ${original_work_dir}/data
3
+ print_config: true
4
+ ignore_warnings: true
5
+ train: true
6
+ test: true
7
+ seed: null
8
+ name: focusConvMSE_150_hyperparameter_search
9
+ datamodule:
10
+ _target_: src.datamodules.focus_datamodule.FocusDataModule
11
+ data_dir: ${data_dir}/focus150
12
+ csv_train_file: ${data_dir}/focus150/train_metadata.csv
13
+ csv_val_file: ${data_dir}/focus150/validation_metadata.csv
14
+ csv_test_file: ${data_dir}/focus150/test_metadata.csv
15
+ batch_size: 64
16
+ num_workers: 0
17
+ pin_memory: false
18
+ model:
19
+ _target_: src.models.focus_conv_module.FocusConvLitModule
20
+ image_size: 150
21
+ pool_size: 2
22
+ conv1_size: 5
23
+ conv1_channels: 3
24
+ conv2_size: 7
25
+ conv2_channels: 6
26
+ lin1_size: 32
27
+ lin2_size: 128
28
+ output_size: 1
29
+ lr: 0.00010464412462087176
30
+ weight_decay: 0.0005
31
+ callbacks:
32
+ model_checkpoint:
33
+ _target_: pytorch_lightning.callbacks.ModelCheckpoint
34
+ monitor: val/mae
35
+ mode: min
36
+ save_top_k: 1
37
+ save_last: true
38
+ verbose: false
39
+ dirpath: checkpoints/
40
+ filename: epoch_{epoch:03d}
41
+ auto_insert_metric_name: false
42
+ early_stopping:
43
+ _target_: pytorch_lightning.callbacks.EarlyStopping
44
+ monitor: val/mae
45
+ mode: min
46
+ patience: 100
47
+ min_delta: 0
48
+ model_summary:
49
+ _target_: pytorch_lightning.callbacks.RichModelSummary
50
+ max_depth: -1
51
+ rich_progress_bar:
52
+ _target_: pytorch_lightning.callbacks.RichProgressBar
53
+ logger:
54
+ csv:
55
+ _target_: pytorch_lightning.loggers.csv_logs.CSVLogger
56
+ save_dir: .
57
+ name: csv/
58
+ prefix: ''
59
+ mlflow:
60
+ _target_: pytorch_lightning.loggers.mlflow.MLFlowLogger
61
+ experiment_name: ${name}
62
+ tracking_uri: ${original_work_dir}/logs/mlflow/mlruns
63
+ tags: null
64
+ save_dir: ./mlruns
65
+ prefix: ''
66
+ artifact_location: null
67
+ tensorboard:
68
+ _target_: pytorch_lightning.loggers.tensorboard.TensorBoardLogger
69
+ save_dir: tensorboard/
70
+ name: null
71
+ version: ${name}
72
+ log_graph: false
73
+ default_hp_metric: true
74
+ prefix: ''
75
+ trainer:
76
+ _target_: pytorch_lightning.Trainer
77
+ gpus: 1
78
+ min_epochs: 1
79
+ max_epochs: 100
80
+ resume_from_checkpoint: null
81
+ optimized_metric: val/mae_best
logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_07-59-42/12/.hydra/hydra.yaml ADDED
@@ -0,0 +1,240 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ hydra:
2
+ run:
3
+ dir: logs/experiments/runs/${name}/${now:%Y-%m-%d}_${now:%H-%M-%S}
4
+ sweep:
5
+ dir: logs/experiments/multiruns/${name}/${now:%Y-%m-%d}_${now:%H-%M-%S}
6
+ subdir: ${hydra.job.num}
7
+ launcher:
8
+ _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher
9
+ sweeper:
10
+ sampler:
11
+ _target_: optuna.samplers.TPESampler
12
+ seed: 12345
13
+ consider_prior: true
14
+ prior_weight: 1.0
15
+ consider_magic_clip: true
16
+ consider_endpoints: false
17
+ n_startup_trials: 10
18
+ n_ei_candidates: 24
19
+ multivariate: false
20
+ warn_independent_sampling: true
21
+ _target_: hydra_plugins.hydra_optuna_sweeper.optuna_sweeper.OptunaSweeper
22
+ direction: minimize
23
+ storage: null
24
+ study_name: focusConvMSE_150_hyperparameter_search
25
+ n_trials: 20
26
+ n_jobs: 1
27
+ search_space:
28
+ datamodule.batch_size:
29
+ type: categorical
30
+ choices:
31
+ - 64
32
+ - 128
33
+ model.lr:
34
+ type: float
35
+ low: 0.0001
36
+ high: 0.01
37
+ model.conv1_size:
38
+ type: categorical
39
+ choices:
40
+ - 3
41
+ - 5
42
+ - 7
43
+ model.conv1_channels:
44
+ type: categorical
45
+ choices:
46
+ - 3
47
+ - 6
48
+ - 9
49
+ model.conv2_size:
50
+ type: categorical
51
+ choices:
52
+ - 3
53
+ - 5
54
+ - 7
55
+ model.conv2_channels:
56
+ type: categorical
57
+ choices:
58
+ - 6
59
+ - 11
60
+ - 16
61
+ model.lin1_size:
62
+ type: categorical
63
+ choices:
64
+ - 32
65
+ - 72
66
+ - 128
67
+ model.lin2_size:
68
+ type: categorical
69
+ choices:
70
+ - 32
71
+ - 72
72
+ - 128
73
+ help:
74
+ app_name: ${hydra.job.name}
75
+ header: '${hydra.help.app_name} is powered by Hydra.
76
+
77
+ '
78
+ footer: 'Powered by Hydra (https://hydra.cc)
79
+
80
+ Use --hydra-help to view Hydra specific help
81
+
82
+ '
83
+ template: '${hydra.help.header}
84
+
85
+ == Configuration groups ==
86
+
87
+ Compose your configuration from those groups (group=option)
88
+
89
+
90
+ $APP_CONFIG_GROUPS
91
+
92
+
93
+ == Config ==
94
+
95
+ Override anything in the config (foo.bar=value)
96
+
97
+
98
+ $CONFIG
99
+
100
+
101
+ ${hydra.help.footer}
102
+
103
+ '
104
+ hydra_help:
105
+ template: 'Hydra (${hydra.runtime.version})
106
+
107
+ See https://hydra.cc for more info.
108
+
109
+
110
+ == Flags ==
111
+
112
+ $FLAGS_HELP
113
+
114
+
115
+ == Configuration groups ==
116
+
117
+ Compose your configuration from those groups (For example, append hydra/job_logging=disabled
118
+ to command line)
119
+
120
+
121
+ $HYDRA_CONFIG_GROUPS
122
+
123
+
124
+ Use ''--cfg hydra'' to Show the Hydra config.
125
+
126
+ '
127
+ hydra_help: ???
128
+ hydra_logging:
129
+ version: 1
130
+ formatters:
131
+ colorlog:
132
+ (): colorlog.ColoredFormatter
133
+ format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s'
134
+ handlers:
135
+ console:
136
+ class: logging.StreamHandler
137
+ formatter: colorlog
138
+ stream: ext://sys.stdout
139
+ root:
140
+ level: INFO
141
+ handlers:
142
+ - console
143
+ disable_existing_loggers: false
144
+ job_logging:
145
+ version: 1
146
+ formatters:
147
+ simple:
148
+ format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s'
149
+ colorlog:
150
+ (): colorlog.ColoredFormatter
151
+ format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s]
152
+ - %(message)s'
153
+ log_colors:
154
+ DEBUG: purple
155
+ INFO: green
156
+ WARNING: yellow
157
+ ERROR: red
158
+ CRITICAL: red
159
+ handlers:
160
+ console:
161
+ class: logging.StreamHandler
162
+ formatter: colorlog
163
+ stream: ext://sys.stdout
164
+ file:
165
+ class: logging.FileHandler
166
+ formatter: simple
167
+ filename: ${hydra.job.name}.log
168
+ root:
169
+ level: INFO
170
+ handlers:
171
+ - console
172
+ - file
173
+ disable_existing_loggers: false
174
+ env: {}
175
+ searchpath: []
176
+ callbacks: {}
177
+ output_subdir: .hydra
178
+ overrides:
179
+ hydra: []
180
+ task:
181
+ - datamodule.batch_size=64
182
+ - model.lr=0.00010464412462087176
183
+ - model.conv1_size=5
184
+ - model.conv1_channels=3
185
+ - model.conv2_size=7
186
+ - model.conv2_channels=6
187
+ - model.lin1_size=32
188
+ - model.lin2_size=128
189
+ - hparams_search=focusConvMSE_150.yaml
190
+ job:
191
+ name: train
192
+ override_dirname: datamodule.batch_size=64,hparams_search=focusConvMSE_150.yaml,model.conv1_channels=3,model.conv1_size=5,model.conv2_channels=6,model.conv2_size=7,model.lin1_size=32,model.lin2_size=128,model.lr=0.00010464412462087176
193
+ id: '12'
194
+ num: 12
195
+ config_name: train.yaml
196
+ env_set: {}
197
+ env_copy: []
198
+ config:
199
+ override_dirname:
200
+ kv_sep: '='
201
+ item_sep: ','
202
+ exclude_keys: []
203
+ runtime:
204
+ version: 1.1.2
205
+ cwd: /usr/src/app
206
+ config_sources:
207
+ - path: hydra.conf
208
+ schema: pkg
209
+ provider: hydra
210
+ - path: /usr/src/app/configs
211
+ schema: file
212
+ provider: main
213
+ - path: hydra_plugins.hydra_colorlog.conf
214
+ schema: pkg
215
+ provider: hydra-colorlog
216
+ - path: ''
217
+ schema: structured
218
+ provider: schema
219
+ choices:
220
+ local: default.yaml
221
+ hparams_search: focusConvMSE_150.yaml
222
+ debug: null
223
+ experiment: null
224
+ log_dir: default.yaml
225
+ trainer: long.yaml
226
+ logger: many_loggers
227
+ callbacks: default.yaml
228
+ model: focusConv_150.yaml
229
+ datamodule: focus150.yaml
230
+ hydra/env: default
231
+ hydra/callbacks: null
232
+ hydra/job_logging: colorlog
233
+ hydra/hydra_logging: colorlog
234
+ hydra/hydra_help: default
235
+ hydra/help: default
236
+ hydra/sweeper: optuna
237
+ hydra/sweeper/sampler: tpe
238
+ hydra/launcher: basic
239
+ hydra/output: default
240
+ verbose: false
logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_07-59-42/12/.hydra/overrides.yaml ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ - datamodule.batch_size=64
2
+ - model.lr=0.00010464412462087176
3
+ - model.conv1_size=5
4
+ - model.conv1_channels=3
5
+ - model.conv2_size=7
6
+ - model.conv2_channels=6
7
+ - model.lin1_size=32
8
+ - model.lin2_size=128
9
+ - hparams_search=focusConvMSE_150.yaml
logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_07-59-42/12/checkpoints/epoch_089.ckpt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:92b471db1b1ecede57e850cd224b73ffa430867679c74d2407674b2acd4d7026
3
+ size 2892869
logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_07-59-42/12/checkpoints/last.ckpt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d41efaa29aef16549628cfbd51fb4eb6ae1154ac9894b9cbb43073470ca5f592
3
+ size 2892869
logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_07-59-42/12/csv/version_0/hparams.yaml ADDED
@@ -0,0 +1,54 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ trainer:
2
+ _target_: pytorch_lightning.Trainer
3
+ gpus: 1
4
+ min_epochs: 1
5
+ max_epochs: 100
6
+ resume_from_checkpoint: null
7
+ model:
8
+ _target_: src.models.focus_conv_module.FocusConvLitModule
9
+ image_size: 150
10
+ pool_size: 2
11
+ conv1_size: 5
12
+ conv1_channels: 3
13
+ conv2_size: 7
14
+ conv2_channels: 6
15
+ lin1_size: 32
16
+ lin2_size: 128
17
+ output_size: 1
18
+ lr: 0.00010464412462087176
19
+ weight_decay: 0.0005
20
+ datamodule:
21
+ _target_: src.datamodules.focus_datamodule.FocusDataModule
22
+ data_dir: /usr/src/app/data/focus150
23
+ csv_train_file: /usr/src/app/data/focus150/train_metadata.csv
24
+ csv_val_file: /usr/src/app/data/focus150/validation_metadata.csv
25
+ csv_test_file: /usr/src/app/data/focus150/test_metadata.csv
26
+ batch_size: 64
27
+ num_workers: 0
28
+ pin_memory: false
29
+ seed: null
30
+ callbacks:
31
+ model_checkpoint:
32
+ _target_: pytorch_lightning.callbacks.ModelCheckpoint
33
+ monitor: val/mae
34
+ mode: min
35
+ save_top_k: 1
36
+ save_last: true
37
+ verbose: false
38
+ dirpath: checkpoints/
39
+ filename: epoch_{epoch:03d}
40
+ auto_insert_metric_name: false
41
+ early_stopping:
42
+ _target_: pytorch_lightning.callbacks.EarlyStopping
43
+ monitor: val/mae
44
+ mode: min
45
+ patience: 100
46
+ min_delta: 0
47
+ model_summary:
48
+ _target_: pytorch_lightning.callbacks.RichModelSummary
49
+ max_depth: -1
50
+ rich_progress_bar:
51
+ _target_: pytorch_lightning.callbacks.RichProgressBar
52
+ model/params/total: 239981
53
+ model/params/trainable: 239981
54
+ model/params/non_trainable: 0
logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_07-59-42/12/csv/version_0/metrics.csv ADDED
@@ -0,0 +1,202 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ val/loss,val/mae,val/mae_best,epoch,step,train/loss,train/mae,test/loss,test/mae
2
+ 0.0004424896906130016,0.017959481105208397,0.017959481105208397,0,47,,,,
3
+ ,,,1,47,0.0008503751014359295,0.021830741316080093,,
4
+ 0.0004394336137920618,0.017535479739308357,0.017535479739308357,1,95,,,,
5
+ ,,,2,95,0.0004198385577183217,0.017243146896362305,,
6
+ 0.0004256853135302663,0.01749459095299244,0.01749459095299244,2,143,,,,
7
+ ,,,3,143,0.0004169363819528371,0.017174007371068,,
8
+ 0.0004255564708728343,0.017491791397333145,0.017491791397333145,3,191,,,,
9
+ ,,,4,191,0.00041399087058380246,0.017083102837204933,,
10
+ 0.0004244967713020742,0.01736367493867874,0.01736367493867874,4,239,,,,
11
+ ,,,5,239,0.00041221638093702495,0.0170562956482172,,
12
+ 0.0004244090523570776,0.017457807436585426,0.01736367493867874,5,287,,,,
13
+ ,,,6,287,0.0004145925340708345,0.017110759392380714,,
14
+ 0.00042389644659124315,0.017344404011964798,0.017344404011964798,6,335,,,,
15
+ ,,,7,335,0.00041398825123906136,0.017099611461162567,,
16
+ 0.00042261421913281083,0.01734013296663761,0.01734013296663761,7,383,,,,
17
+ ,,,8,383,0.0004125249688513577,0.017082981765270233,,
18
+ 0.00042427514563314617,0.017437094822525978,0.01734013296663761,8,431,,,,
19
+ ,,,9,431,0.00041128185694105923,0.017007360234856606,,
20
+ 0.0004267975455150008,0.017360305413603783,0.01734013296663761,9,479,,,,
21
+ ,,,10,479,0.0004101861850358546,0.017003115266561508,,
22
+ 0.00045065765152685344,0.017889520153403282,0.01734013296663761,10,527,,,,
23
+ ,,,11,527,0.0004174894420430064,0.01712065003812313,,
24
+ 0.00042258965549990535,0.01735387183725834,0.01734013296663761,11,575,,,,
25
+ ,,,12,575,0.0004142852558288723,0.017029745504260063,,
26
+ 0.00042401644168421626,0.017421476542949677,0.01734013296663761,12,623,,,,
27
+ ,,,13,623,0.00041140159009955823,0.017037283629179,,
28
+ 0.00042305735405534506,0.017402898520231247,0.01734013296663761,13,671,,,,
29
+ ,,,14,671,0.0004085058462806046,0.016932420432567596,,
30
+ 0.00042921589920297265,0.01752978377044201,0.01734013296663761,14,719,,,,
31
+ ,,,15,719,0.0004077278426848352,0.016931703314185143,,
32
+ 0.00042201988981105387,0.0173671692609787,0.01734013296663761,15,767,,,,
33
+ ,,,16,767,0.000409798783948645,0.01701532118022442,,
34
+ 0.0004217711684759706,0.01725127175450325,0.01725127175450325,16,815,,,,
35
+ ,,,17,815,0.0004090390575584024,0.016987862065434456,,
36
+ 0.00042004144052043557,0.01728256419301033,0.01725127175450325,17,863,,,,
37
+ ,,,18,863,0.00040869275107979774,0.016960421577095985,,
38
+ 0.0004271664656698704,0.01738048531115055,0.01725127175450325,18,911,,,,
39
+ ,,,19,911,0.0004105813568457961,0.017000408843159676,,
40
+ 0.0004191923653706908,0.017288006842136383,0.01725127175450325,19,959,,,,
41
+ ,,,20,959,0.0004090520378667861,0.016984596848487854,,
42
+ 0.00043419821304269135,0.0175836943089962,0.01725127175450325,20,1007,,,,
43
+ ,,,21,1007,0.0004106829292140901,0.016990289092063904,,
44
+ 0.0004192343621980399,0.017262808978557587,0.01725127175450325,21,1055,,,,
45
+ ,,,22,1055,0.0004120550293009728,0.017018092796206474,,
46
+ 0.0004190890758763999,0.017277225852012634,0.01725127175450325,22,1103,,,,
47
+ ,,,23,1103,0.0004073189338669181,0.01695427857339382,,
48
+ 0.0004191976331640035,0.017253341153264046,0.01725127175450325,23,1151,,,,
49
+ ,,,24,1151,0.00040765252197161317,0.01695452816784382,,
50
+ 0.0004192139022052288,0.017255427315831184,0.01725127175450325,24,1199,,,,
51
+ ,,,25,1199,0.0004080521466676146,0.016956355422735214,,
52
+ 0.0004217189271003008,0.01736193150281906,0.01725127175450325,25,1247,,,,
53
+ ,,,26,1247,0.0004086950793862343,0.016954844817519188,,
54
+ 0.00042492084321565926,0.017329704016447067,0.01725127175450325,26,1295,,,,
55
+ ,,,27,1295,0.0004149794112890959,0.017044488340616226,,
56
+ 0.00042391865281388164,0.017308935523033142,0.01725127175450325,27,1343,,,,
57
+ ,,,28,1343,0.00040839650318957865,0.016959894448518753,,
58
+ 0.0004199695831630379,0.01727968640625477,0.01725127175450325,28,1391,,,,
59
+ ,,,29,1391,0.00040835910476744175,0.016938744112849236,,
60
+ 0.0004201676056254655,0.01728694699704647,0.01725127175450325,29,1439,,,,
61
+ ,,,30,1439,0.0004080582002643496,0.01692156307399273,,
62
+ 0.0004197414964437485,0.017272979021072388,0.01725127175450325,30,1487,,,,
63
+ ,,,31,1487,0.00041038941708393395,0.01699984446167946,,
64
+ 0.0004194212378934026,0.017279094085097313,0.01725127175450325,31,1535,,,,
65
+ ,,,32,1535,0.000405834085540846,0.016923686489462852,,
66
+ 0.00042217891314066947,0.01727912202477455,0.01725127175450325,32,1583,,,,
67
+ ,,,33,1583,0.0004075095057487488,0.016922341659665108,,
68
+ 0.00041999921086244285,0.01731017231941223,0.01725127175450325,33,1631,,,,
69
+ ,,,34,1631,0.0004061452636960894,0.01691046915948391,,
70
+ 0.00042071775533258915,0.017314253374934196,0.01725127175450325,34,1679,,,,
71
+ ,,,35,1679,0.00040677739889360964,0.016929712146520615,,
72
+ 0.00041933328611776233,0.017255181446671486,0.01725127175450325,35,1727,,,,
73
+ ,,,36,1727,0.00040587663534097373,0.016896529123187065,,
74
+ 0.0004256489046383649,0.01743735745549202,0.01725127175450325,36,1775,,,,
75
+ ,,,37,1775,0.00040643554530106485,0.01691404916346073,,
76
+ 0.0004228287434671074,0.017296647652983665,0.01725127175450325,37,1823,,,,
77
+ ,,,38,1823,0.0004090176662430167,0.016972295939922333,,
78
+ 0.000419100106228143,0.017252372577786446,0.01725127175450325,38,1871,,,,
79
+ ,,,39,1871,0.0004071822331752628,0.016918769106268883,,
80
+ 0.00041932176100090146,0.017233381047844887,0.017233381047844887,39,1919,,,,
81
+ ,,,40,1919,0.0004086628614459187,0.016961360350251198,,
82
+ 0.00041972563485614955,0.01726134680211544,0.017233381047844887,40,1967,,,,
83
+ ,,,41,1967,0.0004072812735103071,0.016915565356612206,,
84
+ 0.00041937854257412255,0.01723916456103325,0.017233381047844887,41,2015,,,,
85
+ ,,,42,2015,0.0004068742855452001,0.01692686229944229,,
86
+ 0.00041900400537997484,0.017257582396268845,0.017233381047844887,42,2063,,,,
87
+ ,,,43,2063,0.00040646380512043834,0.016936227679252625,,
88
+ 0.0004213516367599368,0.01732936128973961,0.017233381047844887,43,2111,,,,
89
+ ,,,44,2111,0.0004085975233465433,0.016970692202448845,,
90
+ 0.0004200285184197128,0.017255302518606186,0.017233381047844887,44,2159,,,,
91
+ ,,,45,2159,0.00040601525688543916,0.016925085335969925,,
92
+ 0.00042586761992424726,0.01743488386273384,0.017233381047844887,45,2207,,,,
93
+ ,,,46,2207,0.000405689439503476,0.01689976453781128,,
94
+ 0.0004190520558040589,0.017237048596143723,0.017233381047844887,46,2255,,,,
95
+ ,,,47,2255,0.0004098281206097454,0.01699254848062992,,
96
+ 0.0004210366460029036,0.017268165946006775,0.017233381047844887,47,2303,,,,
97
+ ,,,48,2303,0.00040609779534861445,0.01691155508160591,,
98
+ 0.0004189402097836137,0.01723451167345047,0.017233381047844887,48,2351,,,,
99
+ ,,,49,2351,0.00040664427797310054,0.016927678138017654,,
100
+ 0.00043060703319497406,0.017503641545772552,0.017233381047844887,49,2399,,,,
101
+ ,,,50,2399,0.00040804711170494556,0.016940303146839142,,
102
+ 0.00042062182910740376,0.017256947234272957,0.017233381047844887,50,2447,,,,
103
+ ,,,51,2447,0.0004058278282172978,0.016903702169656754,,
104
+ 0.0004233802028466016,0.01737489551305771,0.017233381047844887,51,2495,,,,
105
+ ,,,52,2495,0.00041140205576084554,0.016979748383164406,,
106
+ 0.00042137218406423926,0.017321409657597542,0.017233381047844887,52,2543,,,,
107
+ ,,,53,2543,0.00040844755130819976,0.016955701634287834,,
108
+ 0.0004214969230815768,0.01727386750280857,0.017233381047844887,53,2591,,,,
109
+ ,,,54,2591,0.000406143837608397,0.016907261684536934,,
110
+ 0.0004194319772068411,0.01724429242312908,0.017233381047844887,54,2639,,,,
111
+ ,,,55,2639,0.0004064734384883195,0.016908807680010796,,
112
+ 0.0004189143073745072,0.017222696915268898,0.017222696915268898,55,2687,,,,
113
+ ,,,56,2687,0.00040622276719659567,0.01691458374261856,,
114
+ 0.00041982135735452175,0.017235537990927696,0.017222696915268898,56,2735,,,,
115
+ ,,,57,2735,0.0004063477972522378,0.016902480274438858,,
116
+ 0.0004189763858448714,0.01722045987844467,0.01722045987844467,57,2783,,,,
117
+ ,,,58,2783,0.00040614878525957465,0.01688353158533573,,
118
+ 0.00042443795246072114,0.017395619302988052,0.01722045987844467,58,2831,,,,
119
+ ,,,59,2831,0.0004078562487848103,0.01692832261323929,,
120
+ 0.000419820164097473,0.017228975892066956,0.01722045987844467,59,2879,,,,
121
+ ,,,60,2879,0.000407998712034896,0.01693573221564293,,
122
+ 0.00042271081474609673,0.017355410382151604,0.01722045987844467,60,2927,,,,
123
+ ,,,61,2927,0.0004067414265591651,0.016900844871997833,,
124
+ 0.0004202548007015139,0.01728091575205326,0.01722045987844467,61,2975,,,,
125
+ ,,,62,2975,0.0004071922739967704,0.016922224313020706,,
126
+ 0.0004189744358882308,0.017214331775903702,0.017214331775903702,62,3023,,,,
127
+ ,,,63,3023,0.00040663970867171884,0.016909807920455933,,
128
+ 0.00042052919161505997,0.017238674685359,0.017214331775903702,63,3071,,,,
129
+ ,,,64,3071,0.00040765723679214716,0.016922272741794586,,
130
+ 0.00042058920371346176,0.017291359603405,0.017214331775903702,64,3119,,,,
131
+ ,,,65,3119,0.00040696037467569113,0.016935555264353752,,
132
+ 0.0004191528423689306,0.01721913367509842,0.017214331775903702,65,3167,,,,
133
+ ,,,66,3167,0.00040656630881130695,0.01690230332314968,,
134
+ 0.0004265015304554254,0.017428452149033546,0.017214331775903702,66,3215,,,,
135
+ ,,,67,3215,0.00040803488809615374,0.01693248189985752,,
136
+ 0.0004194868088234216,0.01722446084022522,0.017214331775903702,67,3263,,,,
137
+ ,,,68,3263,0.000410389038734138,0.017007116228342056,,
138
+ 0.0004289847274776548,0.01746821217238903,0.017214331775903702,68,3311,,,,
139
+ ,,,69,3311,0.00040725848521105945,0.01690066047012806,,
140
+ 0.0004195565124973655,0.01720581389963627,0.01720581389963627,69,3359,,,,
141
+ ,,,70,3359,0.00040736841037869453,0.016895892098546028,,
142
+ 0.00042674760334193707,0.01743222214281559,0.01720581389963627,70,3407,,,,
143
+ ,,,71,3407,0.00040618880302645266,0.016877828165888786,,
144
+ 0.00042028972529806197,0.017265677452087402,0.01720581389963627,71,3455,,,,
145
+ ,,,72,3455,0.0004143154073972255,0.017035480588674545,,
146
+ 0.0004196070658508688,0.017196333035826683,0.017196333035826683,72,3503,,,,
147
+ ,,,73,3503,0.00040715798968449235,0.016886098310351372,,
148
+ 0.00042047613533213735,0.017219020053744316,0.017196333035826683,73,3551,,,,
149
+ ,,,74,3551,0.0004086419357918203,0.016935650259256363,,
150
+ 0.0004258734988979995,0.01736036315560341,0.017196333035826683,74,3599,,,,
151
+ ,,,75,3599,0.0004089963622391224,0.0169577244669199,,
152
+ 0.000419808755395934,0.01720733940601349,0.017196333035826683,75,3647,,,,
153
+ ,,,76,3647,0.0004079870705027133,0.01692424900829792,,
154
+ 0.0004211988707538694,0.017236631363630295,0.017196333035826683,76,3695,,,,
155
+ ,,,77,3695,0.0004064473614562303,0.01687300205230713,,
156
+ 0.0004202329146210104,0.017244501039385796,0.017196333035826683,77,3743,,,,
157
+ ,,,78,3743,0.00040792382787913084,0.016908859834074974,,
158
+ 0.00042076484533026814,0.017216766253113747,0.017196333035826683,78,3791,,,,
159
+ ,,,79,3791,0.00040654526674188673,0.01687348634004593,,
160
+ 0.00042001818655990064,0.017190324142575264,0.017190324142575264,79,3839,,,,
161
+ ,,,80,3839,0.00040627329144626856,0.016884636133909225,,
162
+ 0.0004255044914316386,0.017406614497303963,0.017190324142575264,80,3887,,,,
163
+ ,,,81,3887,0.00040674902265891433,0.016892146319150925,,
164
+ 0.0004211646446492523,0.017285823822021484,0.017190324142575264,81,3935,,,,
165
+ ,,,82,3935,0.0004065226239617914,0.01687990501523018,,
166
+ 0.00042035547085106373,0.01722731627523899,0.017190324142575264,82,3983,,,,
167
+ ,,,83,3983,0.0004084274696651846,0.016938254237174988,,
168
+ 0.0004205346922390163,0.017191216349601746,0.017190324142575264,83,4031,,,,
169
+ ,,,84,4031,0.0004086291301064193,0.01697009801864624,,
170
+ 0.00042040058178827167,0.01720654033124447,0.017190324142575264,84,4079,,,,
171
+ ,,,85,4079,0.0004084391985088587,0.016913630068302155,,
172
+ 0.00042578173452056944,0.017359964549541473,0.017190324142575264,85,4127,,,,
173
+ ,,,86,4127,0.00040733470814302564,0.016919804736971855,,
174
+ 0.00042214125278405845,0.017315171658992767,0.017190324142575264,86,4175,,,,
175
+ ,,,87,4175,0.0004073490563314408,0.01691298373043537,,
176
+ 0.00042060783016495407,0.01719784364104271,0.017190324142575264,87,4223,,,,
177
+ ,,,88,4223,0.0004067633708473295,0.016886699944734573,,
178
+ 0.00042095762910321355,0.01724831573665142,0.017190324142575264,88,4271,,,,
179
+ ,,,89,4271,0.00040765127050690353,0.01691434532403946,,
180
+ 0.0004210338811390102,0.017190048471093178,0.017190048471093178,89,4319,,,,
181
+ ,,,90,4319,0.00040717696538195014,0.01687450148165226,,
182
+ 0.00042310060234740376,0.017341844737529755,0.017190048471093178,90,4367,,,,
183
+ ,,,91,4367,0.0004093144671060145,0.016939982771873474,,
184
+ 0.00042176738497801125,0.017227180302143097,0.017190048471093178,91,4415,,,,
185
+ ,,,92,4415,0.00040769626502878964,0.016885636374354362,,
186
+ 0.0004216250963509083,0.017216544598340988,0.017190048471093178,92,4463,,,,
187
+ ,,,93,4463,0.0004078825586475432,0.016912853345274925,,
188
+ 0.0004210306506138295,0.017209583893418312,0.017190048471093178,93,4511,,,,
189
+ ,,,94,4511,0.00040880130836740136,0.016922330483794212,,
190
+ 0.00042168042273260653,0.017277520149946213,0.017190048471093178,94,4559,,,,
191
+ ,,,95,4559,0.00040731849730946124,0.01686994731426239,,
192
+ 0.0004228450998198241,0.017327819019556046,0.017190048471093178,95,4607,,,,
193
+ ,,,96,4607,0.00040774286026135087,0.01689796894788742,,
194
+ 0.0004211871128063649,0.017213985323905945,0.017190048471093178,96,4655,,,,
195
+ ,,,97,4655,0.00040760886622592807,0.016890835016965866,,
196
+ 0.0004233677464071661,0.017343534156680107,0.017190048471093178,97,4703,,,,
197
+ ,,,98,4703,0.0004082719096913934,0.016921719536185265,,
198
+ 0.0004215212247800082,0.017190946266055107,0.017190048471093178,98,4751,,,,
199
+ ,,,99,4751,0.0004084703396074474,0.01692170463502407,,
200
+ 0.000422388082370162,0.017246762290596962,0.017190048471093178,99,4799,,,,
201
+ ,,,100,4799,0.0004078446072526276,0.01690007373690605,,
202
+ ,,,89,4800,,,0.0004132115573156625,0.01696036010980606
logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_07-59-42/12/tensorboard/focusConvMSE_150_hyperparameter_search/events.out.tfevents.1652258424.908803db74d8.1.24 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b1a170118fa9eb5b9fa39973ca99b2a8826d42d2953a1cb4c630997d4d582107
3
+ size 36796
logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_07-59-42/12/tensorboard/focusConvMSE_150_hyperparameter_search/events.out.tfevents.1652258606.908803db74d8.1.25 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:17728cc56439e7585742a7a9ff89bfb66394861caffe44f49e891656670569ff
3
+ size 179
logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_07-59-42/12/tensorboard/focusConvMSE_150_hyperparameter_search/hparams.yaml ADDED
@@ -0,0 +1,54 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ trainer:
2
+ _target_: pytorch_lightning.Trainer
3
+ gpus: 1
4
+ min_epochs: 1
5
+ max_epochs: 100
6
+ resume_from_checkpoint: null
7
+ model:
8
+ _target_: src.models.focus_conv_module.FocusConvLitModule
9
+ image_size: 150
10
+ pool_size: 2
11
+ conv1_size: 5
12
+ conv1_channels: 3
13
+ conv2_size: 7
14
+ conv2_channels: 6
15
+ lin1_size: 32
16
+ lin2_size: 128
17
+ output_size: 1
18
+ lr: 0.00010464412462087176
19
+ weight_decay: 0.0005
20
+ datamodule:
21
+ _target_: src.datamodules.focus_datamodule.FocusDataModule
22
+ data_dir: /usr/src/app/data/focus150
23
+ csv_train_file: /usr/src/app/data/focus150/train_metadata.csv
24
+ csv_val_file: /usr/src/app/data/focus150/validation_metadata.csv
25
+ csv_test_file: /usr/src/app/data/focus150/test_metadata.csv
26
+ batch_size: 64
27
+ num_workers: 0
28
+ pin_memory: false
29
+ seed: null
30
+ callbacks:
31
+ model_checkpoint:
32
+ _target_: pytorch_lightning.callbacks.ModelCheckpoint
33
+ monitor: val/mae
34
+ mode: min
35
+ save_top_k: 1
36
+ save_last: true
37
+ verbose: false
38
+ dirpath: checkpoints/
39
+ filename: epoch_{epoch:03d}
40
+ auto_insert_metric_name: false
41
+ early_stopping:
42
+ _target_: pytorch_lightning.callbacks.EarlyStopping
43
+ monitor: val/mae
44
+ mode: min
45
+ patience: 100
46
+ min_delta: 0
47
+ model_summary:
48
+ _target_: pytorch_lightning.callbacks.RichModelSummary
49
+ max_depth: -1
50
+ rich_progress_bar:
51
+ _target_: pytorch_lightning.callbacks.RichProgressBar
52
+ model/params/total: 239981
53
+ model/params/trainable: 239981
54
+ model/params/non_trainable: 0