Hannes Kuchelmeister commited on
Commit
798f4db
·
1 Parent(s): cd2e0ce

add optimization run for cnn

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_08-46-15/0/.hydra/config.yaml +81 -0
  2. logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_08-46-15/0/.hydra/hydra.yaml +240 -0
  3. logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_08-46-15/0/.hydra/overrides.yaml +9 -0
  4. logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_08-46-15/0/checkpoints/epoch_077.ckpt +3 -0
  5. logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_08-46-15/0/checkpoints/last.ckpt +3 -0
  6. logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_08-46-15/0/csv/version_0/hparams.yaml +54 -0
  7. logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_08-46-15/0/csv/version_0/metrics.csv +202 -0
  8. logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_08-46-15/0/tensorboard/focusConvMSE_150_hyperparameter_search/events.out.tfevents.1652258779.30858e6037f6.1.0 +3 -0
  9. logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_08-46-15/0/tensorboard/focusConvMSE_150_hyperparameter_search/events.out.tfevents.1652258986.30858e6037f6.1.1 +3 -0
  10. logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_08-46-15/0/tensorboard/focusConvMSE_150_hyperparameter_search/hparams.yaml +54 -0
  11. logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_08-46-15/1/.hydra/config.yaml +81 -0
  12. logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_08-46-15/1/.hydra/hydra.yaml +240 -0
  13. logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_08-46-15/1/.hydra/overrides.yaml +9 -0
  14. logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_08-46-15/1/checkpoints/epoch_098.ckpt +3 -0
  15. logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_08-46-15/1/checkpoints/last.ckpt +3 -0
  16. logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_08-46-15/1/csv/version_0/hparams.yaml +54 -0
  17. logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_08-46-15/1/csv/version_0/metrics.csv +202 -0
  18. logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_08-46-15/1/tensorboard/focusConvMSE_150_hyperparameter_search/events.out.tfevents.1652258987.30858e6037f6.1.2 +3 -0
  19. logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_08-46-15/1/tensorboard/focusConvMSE_150_hyperparameter_search/events.out.tfevents.1652259201.30858e6037f6.1.3 +3 -0
  20. logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_08-46-15/1/tensorboard/focusConvMSE_150_hyperparameter_search/hparams.yaml +54 -0
  21. logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_08-46-15/10/.hydra/config.yaml +81 -0
  22. logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_08-46-15/10/.hydra/hydra.yaml +240 -0
  23. logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_08-46-15/10/.hydra/overrides.yaml +9 -0
  24. logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_08-46-15/10/checkpoints/epoch_092.ckpt +3 -0
  25. logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_08-46-15/10/checkpoints/last.ckpt +3 -0
  26. logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_08-46-15/10/csv/version_0/hparams.yaml +54 -0
  27. logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_08-46-15/10/csv/version_0/metrics.csv +202 -0
  28. logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_08-46-15/10/tensorboard/focusConvMSE_150_hyperparameter_search/events.out.tfevents.1652260828.30858e6037f6.1.20 +3 -0
  29. logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_08-46-15/10/tensorboard/focusConvMSE_150_hyperparameter_search/events.out.tfevents.1652261056.30858e6037f6.1.21 +3 -0
  30. logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_08-46-15/10/tensorboard/focusConvMSE_150_hyperparameter_search/hparams.yaml +54 -0
  31. logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_08-46-15/11/.hydra/config.yaml +81 -0
  32. logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_08-46-15/11/.hydra/hydra.yaml +240 -0
  33. logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_08-46-15/11/.hydra/overrides.yaml +9 -0
  34. logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_08-46-15/11/checkpoints/epoch_096.ckpt +3 -0
  35. logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_08-46-15/11/checkpoints/last.ckpt +3 -0
  36. logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_08-46-15/11/csv/version_0/hparams.yaml +54 -0
  37. logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_08-46-15/11/csv/version_0/metrics.csv +202 -0
  38. logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_08-46-15/11/tensorboard/focusConvMSE_150_hyperparameter_search/events.out.tfevents.1652261056.30858e6037f6.1.22 +3 -0
  39. logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_08-46-15/11/tensorboard/focusConvMSE_150_hyperparameter_search/events.out.tfevents.1652261259.30858e6037f6.1.23 +3 -0
  40. logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_08-46-15/11/tensorboard/focusConvMSE_150_hyperparameter_search/hparams.yaml +54 -0
  41. logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_08-46-15/12/.hydra/config.yaml +81 -0
  42. logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_08-46-15/12/.hydra/hydra.yaml +240 -0
  43. logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_08-46-15/12/.hydra/overrides.yaml +9 -0
  44. logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_08-46-15/12/checkpoints/epoch_054.ckpt +3 -0
  45. logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_08-46-15/12/checkpoints/last.ckpt +3 -0
  46. logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_08-46-15/12/csv/version_0/hparams.yaml +54 -0
  47. logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_08-46-15/12/csv/version_0/metrics.csv +202 -0
  48. logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_08-46-15/12/tensorboard/focusConvMSE_150_hyperparameter_search/events.out.tfevents.1652261260.30858e6037f6.1.24 +3 -0
  49. logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_08-46-15/12/tensorboard/focusConvMSE_150_hyperparameter_search/events.out.tfevents.1652261462.30858e6037f6.1.25 +3 -0
  50. logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_08-46-15/12/tensorboard/focusConvMSE_150_hyperparameter_search/hparams.yaml +54 -0
logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_08-46-15/0/.hydra/config.yaml ADDED
@@ -0,0 +1,81 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ original_work_dir: ${hydra:runtime.cwd}
2
+ data_dir: ${original_work_dir}/data
3
+ print_config: true
4
+ ignore_warnings: true
5
+ train: true
6
+ test: true
7
+ seed: null
8
+ name: focusConvMSE_150_hyperparameter_search
9
+ datamodule:
10
+ _target_: src.datamodules.focus_datamodule.FocusDataModule
11
+ data_dir: ${data_dir}/focus150
12
+ csv_train_file: ${data_dir}/focus150/train_metadata.csv
13
+ csv_val_file: ${data_dir}/focus150/validation_metadata.csv
14
+ csv_test_file: ${data_dir}/focus150/test_metadata.csv
15
+ batch_size: 64
16
+ num_workers: 0
17
+ pin_memory: false
18
+ model:
19
+ _target_: src.models.focus_conv_module.FocusConvLitModule
20
+ image_size: 150
21
+ pool_size: 2
22
+ conv1_size: 7
23
+ conv1_channels: 3
24
+ conv2_size: 7
25
+ conv2_channels: 16
26
+ lin1_size: 128
27
+ lin2_size: 32
28
+ output_size: 1
29
+ lr: 0.0019207962356032353
30
+ weight_decay: 0.0005
31
+ callbacks:
32
+ model_checkpoint:
33
+ _target_: pytorch_lightning.callbacks.ModelCheckpoint
34
+ monitor: val/mae
35
+ mode: min
36
+ save_top_k: 1
37
+ save_last: true
38
+ verbose: false
39
+ dirpath: checkpoints/
40
+ filename: epoch_{epoch:03d}
41
+ auto_insert_metric_name: false
42
+ early_stopping:
43
+ _target_: pytorch_lightning.callbacks.EarlyStopping
44
+ monitor: val/mae
45
+ mode: min
46
+ patience: 100
47
+ min_delta: 0
48
+ model_summary:
49
+ _target_: pytorch_lightning.callbacks.RichModelSummary
50
+ max_depth: -1
51
+ rich_progress_bar:
52
+ _target_: pytorch_lightning.callbacks.RichProgressBar
53
+ logger:
54
+ csv:
55
+ _target_: pytorch_lightning.loggers.csv_logs.CSVLogger
56
+ save_dir: .
57
+ name: csv/
58
+ prefix: ''
59
+ mlflow:
60
+ _target_: pytorch_lightning.loggers.mlflow.MLFlowLogger
61
+ experiment_name: ${name}
62
+ tracking_uri: ${original_work_dir}/logs/mlflow/mlruns
63
+ tags: null
64
+ save_dir: ./mlruns
65
+ prefix: ''
66
+ artifact_location: null
67
+ tensorboard:
68
+ _target_: pytorch_lightning.loggers.tensorboard.TensorBoardLogger
69
+ save_dir: tensorboard/
70
+ name: null
71
+ version: ${name}
72
+ log_graph: false
73
+ default_hp_metric: true
74
+ prefix: ''
75
+ trainer:
76
+ _target_: pytorch_lightning.Trainer
77
+ gpus: 1
78
+ min_epochs: 1
79
+ max_epochs: 100
80
+ resume_from_checkpoint: null
81
+ optimized_metric: val/mae_best
logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_08-46-15/0/.hydra/hydra.yaml ADDED
@@ -0,0 +1,240 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ hydra:
2
+ run:
3
+ dir: logs/experiments/runs/${name}/${now:%Y-%m-%d}_${now:%H-%M-%S}
4
+ sweep:
5
+ dir: logs/experiments/multiruns/${name}/${now:%Y-%m-%d}_${now:%H-%M-%S}
6
+ subdir: ${hydra.job.num}
7
+ launcher:
8
+ _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher
9
+ sweeper:
10
+ sampler:
11
+ _target_: optuna.samplers.TPESampler
12
+ seed: 12345
13
+ consider_prior: true
14
+ prior_weight: 1.0
15
+ consider_magic_clip: true
16
+ consider_endpoints: false
17
+ n_startup_trials: 10
18
+ n_ei_candidates: 24
19
+ multivariate: false
20
+ warn_independent_sampling: true
21
+ _target_: hydra_plugins.hydra_optuna_sweeper.optuna_sweeper.OptunaSweeper
22
+ direction: minimize
23
+ storage: null
24
+ study_name: focusConvMSE_150_hyperparameter_search
25
+ n_trials: 20
26
+ n_jobs: 1
27
+ search_space:
28
+ datamodule.batch_size:
29
+ type: categorical
30
+ choices:
31
+ - 64
32
+ - 128
33
+ model.lr:
34
+ type: float
35
+ low: 0.0001
36
+ high: 0.01
37
+ model.conv1_size:
38
+ type: categorical
39
+ choices:
40
+ - 3
41
+ - 5
42
+ - 7
43
+ model.conv1_channels:
44
+ type: categorical
45
+ choices:
46
+ - 3
47
+ - 6
48
+ - 9
49
+ model.conv2_size:
50
+ type: categorical
51
+ choices:
52
+ - 3
53
+ - 5
54
+ - 7
55
+ model.conv2_channels:
56
+ type: categorical
57
+ choices:
58
+ - 6
59
+ - 11
60
+ - 16
61
+ model.lin1_size:
62
+ type: categorical
63
+ choices:
64
+ - 32
65
+ - 72
66
+ - 128
67
+ model.lin2_size:
68
+ type: categorical
69
+ choices:
70
+ - 32
71
+ - 72
72
+ - 128
73
+ help:
74
+ app_name: ${hydra.job.name}
75
+ header: '${hydra.help.app_name} is powered by Hydra.
76
+
77
+ '
78
+ footer: 'Powered by Hydra (https://hydra.cc)
79
+
80
+ Use --hydra-help to view Hydra specific help
81
+
82
+ '
83
+ template: '${hydra.help.header}
84
+
85
+ == Configuration groups ==
86
+
87
+ Compose your configuration from those groups (group=option)
88
+
89
+
90
+ $APP_CONFIG_GROUPS
91
+
92
+
93
+ == Config ==
94
+
95
+ Override anything in the config (foo.bar=value)
96
+
97
+
98
+ $CONFIG
99
+
100
+
101
+ ${hydra.help.footer}
102
+
103
+ '
104
+ hydra_help:
105
+ template: 'Hydra (${hydra.runtime.version})
106
+
107
+ See https://hydra.cc for more info.
108
+
109
+
110
+ == Flags ==
111
+
112
+ $FLAGS_HELP
113
+
114
+
115
+ == Configuration groups ==
116
+
117
+ Compose your configuration from those groups (For example, append hydra/job_logging=disabled
118
+ to command line)
119
+
120
+
121
+ $HYDRA_CONFIG_GROUPS
122
+
123
+
124
+ Use ''--cfg hydra'' to Show the Hydra config.
125
+
126
+ '
127
+ hydra_help: ???
128
+ hydra_logging:
129
+ version: 1
130
+ formatters:
131
+ colorlog:
132
+ (): colorlog.ColoredFormatter
133
+ format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s'
134
+ handlers:
135
+ console:
136
+ class: logging.StreamHandler
137
+ formatter: colorlog
138
+ stream: ext://sys.stdout
139
+ root:
140
+ level: INFO
141
+ handlers:
142
+ - console
143
+ disable_existing_loggers: false
144
+ job_logging:
145
+ version: 1
146
+ formatters:
147
+ simple:
148
+ format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s'
149
+ colorlog:
150
+ (): colorlog.ColoredFormatter
151
+ format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s]
152
+ - %(message)s'
153
+ log_colors:
154
+ DEBUG: purple
155
+ INFO: green
156
+ WARNING: yellow
157
+ ERROR: red
158
+ CRITICAL: red
159
+ handlers:
160
+ console:
161
+ class: logging.StreamHandler
162
+ formatter: colorlog
163
+ stream: ext://sys.stdout
164
+ file:
165
+ class: logging.FileHandler
166
+ formatter: simple
167
+ filename: ${hydra.job.name}.log
168
+ root:
169
+ level: INFO
170
+ handlers:
171
+ - console
172
+ - file
173
+ disable_existing_loggers: false
174
+ env: {}
175
+ searchpath: []
176
+ callbacks: {}
177
+ output_subdir: .hydra
178
+ overrides:
179
+ hydra: []
180
+ task:
181
+ - datamodule.batch_size=64
182
+ - model.lr=0.0019207962356032353
183
+ - model.conv1_size=7
184
+ - model.conv1_channels=3
185
+ - model.conv2_size=7
186
+ - model.conv2_channels=16
187
+ - model.lin1_size=128
188
+ - model.lin2_size=32
189
+ - hparams_search=focusConvMSE_150.yaml
190
+ job:
191
+ name: train
192
+ override_dirname: datamodule.batch_size=64,hparams_search=focusConvMSE_150.yaml,model.conv1_channels=3,model.conv1_size=7,model.conv2_channels=16,model.conv2_size=7,model.lin1_size=128,model.lin2_size=32,model.lr=0.0019207962356032353
193
+ id: '0'
194
+ num: 0
195
+ config_name: train.yaml
196
+ env_set: {}
197
+ env_copy: []
198
+ config:
199
+ override_dirname:
200
+ kv_sep: '='
201
+ item_sep: ','
202
+ exclude_keys: []
203
+ runtime:
204
+ version: 1.1.2
205
+ cwd: /usr/src/app
206
+ config_sources:
207
+ - path: hydra.conf
208
+ schema: pkg
209
+ provider: hydra
210
+ - path: /usr/src/app/configs
211
+ schema: file
212
+ provider: main
213
+ - path: hydra_plugins.hydra_colorlog.conf
214
+ schema: pkg
215
+ provider: hydra-colorlog
216
+ - path: ''
217
+ schema: structured
218
+ provider: schema
219
+ choices:
220
+ local: default.yaml
221
+ hparams_search: focusConvMSE_150.yaml
222
+ debug: null
223
+ experiment: null
224
+ log_dir: default.yaml
225
+ trainer: long.yaml
226
+ logger: many_loggers
227
+ callbacks: default.yaml
228
+ model: focusConv_150.yaml
229
+ datamodule: focus150.yaml
230
+ hydra/env: default
231
+ hydra/callbacks: null
232
+ hydra/job_logging: colorlog
233
+ hydra/hydra_logging: colorlog
234
+ hydra/hydra_help: default
235
+ hydra/help: default
236
+ hydra/sweeper: optuna
237
+ hydra/sweeper/sampler: tpe
238
+ hydra/launcher: basic
239
+ hydra/output: default
240
+ verbose: false
logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_08-46-15/0/.hydra/overrides.yaml ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ - datamodule.batch_size=64
2
+ - model.lr=0.0019207962356032353
3
+ - model.conv1_size=7
4
+ - model.conv1_channels=3
5
+ - model.conv2_size=7
6
+ - model.conv2_channels=16
7
+ - model.lin1_size=128
8
+ - model.lin2_size=32
9
+ - hparams_search=focusConvMSE_150.yaml
logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_08-46-15/0/checkpoints/epoch_077.ckpt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:148cf070013a3c85e637dda2ec42b2a77ef1dc86182417929d969b8b91e74ba4
3
+ size 30180613
logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_08-46-15/0/checkpoints/last.ckpt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ee10b8a45132b566b7ea3869abaf24d74f9a2a8c509aecb9e57d0b046ca6acb6
3
+ size 30180613
logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_08-46-15/0/csv/version_0/hparams.yaml ADDED
@@ -0,0 +1,54 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ trainer:
2
+ _target_: pytorch_lightning.Trainer
3
+ gpus: 1
4
+ min_epochs: 1
5
+ max_epochs: 100
6
+ resume_from_checkpoint: null
7
+ model:
8
+ _target_: src.models.focus_conv_module.FocusConvLitModule
9
+ image_size: 150
10
+ pool_size: 2
11
+ conv1_size: 7
12
+ conv1_channels: 3
13
+ conv2_size: 7
14
+ conv2_channels: 16
15
+ lin1_size: 128
16
+ lin2_size: 32
17
+ output_size: 1
18
+ lr: 0.0019207962356032353
19
+ weight_decay: 0.0005
20
+ datamodule:
21
+ _target_: src.datamodules.focus_datamodule.FocusDataModule
22
+ data_dir: /usr/src/app/data/focus150
23
+ csv_train_file: /usr/src/app/data/focus150/train_metadata.csv
24
+ csv_val_file: /usr/src/app/data/focus150/validation_metadata.csv
25
+ csv_test_file: /usr/src/app/data/focus150/test_metadata.csv
26
+ batch_size: 64
27
+ num_workers: 0
28
+ pin_memory: false
29
+ seed: null
30
+ callbacks:
31
+ model_checkpoint:
32
+ _target_: pytorch_lightning.callbacks.ModelCheckpoint
33
+ monitor: val/mae
34
+ mode: min
35
+ save_top_k: 1
36
+ save_last: true
37
+ verbose: false
38
+ dirpath: checkpoints/
39
+ filename: epoch_{epoch:03d}
40
+ auto_insert_metric_name: false
41
+ early_stopping:
42
+ _target_: pytorch_lightning.callbacks.EarlyStopping
43
+ monitor: val/mae
44
+ mode: min
45
+ patience: 100
46
+ min_delta: 0
47
+ model_summary:
48
+ _target_: pytorch_lightning.callbacks.RichModelSummary
49
+ max_depth: -1
50
+ rich_progress_bar:
51
+ _target_: pytorch_lightning.callbacks.RichProgressBar
52
+ model/params/total: 2513981
53
+ model/params/trainable: 2513981
54
+ model/params/non_trainable: 0
logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_08-46-15/0/csv/version_0/metrics.csv ADDED
@@ -0,0 +1,202 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ val/loss,val/mae,val/mae_best,epoch,step,train/loss,train/mae,test/loss,test/mae
2
+ 0.1529688835144043,0.3840637803077698,0.06533164530992508,0,47,,,,
3
+ ,,,1,47,11.644712448120117,1.8490697145462036,,
4
+ 0.0013088983250781894,0.02986341528594494,0.02986341528594494,1,95,,,,
5
+ ,,,2,95,0.022778350859880447,0.10598094761371613,,
6
+ 0.0004474002053029835,0.017683187499642372,0.017683187499642372,2,143,,,,
7
+ ,,,3,143,0.0005043061682954431,0.01835504360496998,,
8
+ 0.0004419668985065073,0.017632238566875458,0.017632238566875458,3,191,,,,
9
+ ,,,4,191,0.00040777321555651724,0.016878977417945862,,
10
+ 0.00044000372872687876,0.017521467059850693,0.017521467059850693,4,239,,,,
11
+ ,,,5,239,0.00040783037547953427,0.01691857911646366,,
12
+ 0.00044004194205626845,0.01749260537326336,0.01749260537326336,5,287,,,,
13
+ ,,,6,287,0.0004114176845178008,0.016938986256718636,,
14
+ 0.0004512796876952052,0.01757303811609745,0.01749260537326336,6,335,,,,
15
+ ,,,7,335,0.0004048396658618003,0.016843905672430992,,
16
+ 0.0004403846978675574,0.017517533153295517,0.01749260537326336,7,383,,,,
17
+ ,,,8,383,0.0004090000584255904,0.01691933535039425,,
18
+ 0.00043910928070545197,0.0174802727997303,0.0174802727997303,8,431,,,,
19
+ ,,,9,431,0.00041262785089202225,0.017000757157802582,,
20
+ 0.000435756694059819,0.017478039488196373,0.017478039488196373,9,479,,,,
21
+ ,,,10,479,0.000402910343836993,0.016798974946141243,,
22
+ 0.0005029197782278061,0.018827276304364204,0.017478039488196373,10,527,,,,
23
+ ,,,11,527,0.0004071915755048394,0.016882190480828285,,
24
+ 0.0004364804772194475,0.01742841675877571,0.01742841675877571,11,575,,,,
25
+ ,,,12,575,0.0004072989395353943,0.016921985894441605,,
26
+ 0.00043792996439151466,0.01756676472723484,0.01742841675877571,12,623,,,,
27
+ ,,,13,623,0.0004022790817543864,0.016777021810412407,,
28
+ 0.00044144014827907085,0.017657386139035225,0.01742841675877571,13,671,,,,
29
+ ,,,14,671,0.0004025509406346828,0.016774984076619148,,
30
+ 0.0004985100822523236,0.018707212060689926,0.01742841675877571,14,719,,,,
31
+ ,,,15,719,0.0004107572603970766,0.016919907182455063,,
32
+ 0.00043522074702195823,0.01737971045076847,0.01737971045076847,15,767,,,,
33
+ ,,,16,767,0.00040410313522443175,0.016768287867307663,,
34
+ 0.0004362806212157011,0.01753413863480091,0.01737971045076847,16,815,,,,
35
+ ,,,17,815,0.00040319591062143445,0.016793109476566315,,
36
+ 0.0004426381492521614,0.017703624442219734,0.01737971045076847,17,863,,,,
37
+ ,,,18,863,0.0003996533341705799,0.016707172617316246,,
38
+ 0.0004321836750023067,0.017420092597603798,0.01737971045076847,18,911,,,,
39
+ ,,,19,911,0.00040381919825449586,0.016786139458417892,,
40
+ 0.00043258839286863804,0.017349906265735626,0.017349906265735626,19,959,,,,
41
+ ,,,20,959,0.0004018966283183545,0.016743654385209084,,
42
+ 0.0004612189077306539,0.017602281644940376,0.017349906265735626,20,1007,,,,
43
+ ,,,21,1007,0.000407709798309952,0.016833996400237083,,
44
+ 0.0004798996960744262,0.018394676968455315,0.017349906265735626,21,1055,,,,
45
+ ,,,22,1055,0.00040055703721009195,0.01669343002140522,,
46
+ 0.0004313522658776492,0.01741970330476761,0.017349906265735626,22,1103,,,,
47
+ ,,,23,1103,0.0004048779665026814,0.016778070479631424,,
48
+ 0.00043127310345880687,0.017333654686808586,0.017333654686808586,23,1151,,,,
49
+ ,,,24,1151,0.00040623851236887276,0.01683102548122406,,
50
+ 0.00043488387018442154,0.01736687868833542,0.017333654686808586,24,1199,,,,
51
+ ,,,25,1199,0.00039700925117358565,0.01664406806230545,,
52
+ 0.00044637557584792376,0.017416387796401978,0.017333654686808586,25,1247,,,,
53
+ ,,,26,1247,0.00040725062717683613,0.016821909695863724,,
54
+ 0.00044636763050220907,0.017416302114725113,0.017333654686808586,26,1295,,,,
55
+ ,,,27,1295,0.00040715228533372283,0.016875900328159332,,
56
+ 0.00043170692515559494,0.017304137349128723,0.017304137349128723,27,1343,,,,
57
+ ,,,28,1343,0.00041111401515081525,0.016883963719010353,,
58
+ 0.0004428277024999261,0.017378730699419975,0.017304137349128723,28,1391,,,,
59
+ ,,,29,1391,0.0003965034266002476,0.01662992313504219,,
60
+ 0.00044146436266601086,0.017677610740065575,0.017304137349128723,29,1439,,,,
61
+ ,,,30,1439,0.0004144893609918654,0.016923334449529648,,
62
+ 0.0004331316740717739,0.01730620115995407,0.017304137349128723,30,1487,,,,
63
+ ,,,31,1487,0.00040298167732544243,0.016782693564891815,,
64
+ 0.0005577249103225768,0.018985610455274582,0.017304137349128723,31,1535,,,,
65
+ ,,,32,1535,0.00041650841012597084,0.016953209415078163,,
66
+ 0.0004317727289162576,0.017443519085645676,0.017304137349128723,32,1583,,,,
67
+ ,,,33,1583,0.00040156012983061373,0.01666395179927349,,
68
+ 0.0004860552726313472,0.018519103527069092,0.017304137349128723,33,1631,,,,
69
+ ,,,34,1631,0.0004101500380784273,0.016861293464899063,,
70
+ 0.0004471941210795194,0.017407214269042015,0.017304137349128723,34,1679,,,,
71
+ ,,,35,1679,0.0003987945383414626,0.01664213463664055,,
72
+ 0.0004603511479217559,0.0180084016174078,0.017304137349128723,35,1727,,,,
73
+ ,,,36,1727,0.00041239746497012675,0.01687997207045555,,
74
+ 0.0004278428095858544,0.0173155777156353,0.017304137349128723,36,1775,,,,
75
+ ,,,37,1775,0.0004054378950968385,0.016795067116618156,,
76
+ 0.0005004727281630039,0.01811966858804226,0.017304137349128723,37,1823,,,,
77
+ ,,,38,1823,0.000393460359191522,0.016534248366951942,,
78
+ 0.0004358309379313141,0.017527006566524506,0.017304137349128723,38,1871,,,,
79
+ ,,,39,1871,0.0004243403673171997,0.01695549674332142,,
80
+ 0.0004575180064421147,0.01753048785030842,0.017304137349128723,39,1919,,,,
81
+ ,,,40,1919,0.0004104382824152708,0.016843458637595177,,
82
+ 0.0004550634475890547,0.017495280131697655,0.017304137349128723,40,1967,,,,
83
+ ,,,41,1967,0.0004408454697113484,0.017339007928967476,,
84
+ 0.0005757711478509009,0.019313056021928787,0.017304137349128723,41,2015,,,,
85
+ ,,,42,2015,0.0004055813478771597,0.01672944240272045,,
86
+ 0.00042865899740718305,0.01726781390607357,0.01726781390607357,42,2063,,,,
87
+ ,,,43,2063,0.0004354550619609654,0.0172486063092947,,
88
+ 0.0004531650629360229,0.01788659580051899,0.01726781390607357,43,2111,,,,
89
+ ,,,44,2111,0.00039147044299170375,0.016511356458067894,,
90
+ 0.00043888806249015033,0.017572248354554176,0.01726781390607357,44,2159,,,,
91
+ ,,,45,2159,0.000413993519032374,0.016865307465195656,,
92
+ 0.0005192144890315831,0.019070060923695564,0.01726781390607357,45,2207,,,,
93
+ ,,,46,2207,0.00039004735299386084,0.016423476859927177,,
94
+ 0.0006370747578330338,0.02021108567714691,0.01726781390607357,46,2255,,,,
95
+ ,,,47,2255,0.00039929230115376413,0.016609642654657364,,
96
+ 0.000564892019610852,0.01910146325826645,0.01726781390607357,47,2303,,,,
97
+ ,,,48,2303,0.00041608125320635736,0.016893617808818817,,
98
+ 0.00042804412078112364,0.01723213866353035,0.01723213866353035,48,2351,,,,
99
+ ,,,49,2351,0.000393417663872242,0.01645500212907791,,
100
+ 0.0004723197198472917,0.017699791118502617,0.01723213866353035,49,2399,,,,
101
+ ,,,50,2399,0.00040989479748532176,0.016777772456407547,,
102
+ 0.00042921159183606505,0.017366625368595123,0.01723213866353035,50,2447,,,,
103
+ ,,,51,2447,0.00041992601472884417,0.01699262484908104,,
104
+ 0.0004285224131308496,0.017344307154417038,0.01723213866353035,51,2495,,,,
105
+ ,,,52,2495,0.0004249522462487221,0.01703060232102871,,
106
+ 0.000451786007033661,0.017833853140473366,0.01723213866353035,52,2543,,,,
107
+ ,,,53,2543,0.00041280718869529665,0.016791779547929764,,
108
+ 0.0004415402945596725,0.017645012587308884,0.01723213866353035,53,2591,,,,
109
+ ,,,54,2591,0.00040742100100032985,0.0168253593146801,,
110
+ 0.0004285259929019958,0.01734541356563568,0.01723213866353035,54,2639,,,,
111
+ ,,,55,2639,0.0004132221220061183,0.01674467697739601,,
112
+ 0.0004427391686476767,0.017658058553934097,0.01723213866353035,55,2687,,,,
113
+ ,,,56,2687,0.00040023995097726583,0.016600701957941055,,
114
+ 0.0004283359448891133,0.01721980795264244,0.01721980795264244,56,2735,,,,
115
+ ,,,57,2735,0.0004230951890349388,0.017035314813256264,,
116
+ 0.0004301833687350154,0.017382701858878136,0.01721980795264244,57,2783,,,,
117
+ ,,,58,2783,0.0004508436832111329,0.01745504140853882,,
118
+ 0.0004391385882627219,0.01731993816792965,0.01721980795264244,58,2831,,,,
119
+ ,,,59,2831,0.00040320356492884457,0.016686292365193367,,
120
+ 0.000523244496434927,0.019068347290158272,0.01721980795264244,59,2879,,,,
121
+ ,,,60,2879,0.00040534965228289366,0.016673410311341286,,
122
+ 0.0005052355700172484,0.01817459426820278,0.01721980795264244,60,2927,,,,
123
+ ,,,61,2927,0.00042102858424186707,0.0169852115213871,,
124
+ 0.00047358853043988347,0.017716092988848686,0.01721980795264244,61,2975,,,,
125
+ ,,,62,2975,0.00039721388020552695,0.016487877815961838,,
126
+ 0.000443755037849769,0.017675353214144707,0.01721980795264244,62,3023,,,,
127
+ ,,,63,3023,0.00040845508920028806,0.016667738556861877,,
128
+ 0.00043943306081928313,0.017575744539499283,0.01721980795264244,63,3071,,,,
129
+ ,,,64,3071,0.0004208188911434263,0.016886718571186066,,
130
+ 0.0007027420215308666,0.021867714822292328,0.01721980795264244,64,3119,,,,
131
+ ,,,65,3119,0.00039530752110295,0.016518570482730865,,
132
+ 0.000454102351795882,0.017825162038207054,0.01721980795264244,65,3167,,,,
133
+ ,,,66,3167,0.0004605791764333844,0.017464637756347656,,
134
+ 0.0007526285480707884,0.022056609392166138,0.01721980795264244,66,3215,,,,
135
+ ,,,67,3215,0.0004501427465584129,0.017417488619685173,,
136
+ 0.0007369557279162109,0.022328002378344536,0.01721980795264244,67,3263,,,,
137
+ ,,,68,3263,0.0004968887660652399,0.018165135756134987,,
138
+ 0.00042625609785318375,0.0172402523458004,0.01721980795264244,68,3311,,,,
139
+ ,,,69,3311,0.00041561920079402626,0.01673259027302265,,
140
+ 0.00043612942681647837,0.017280401661992073,0.01721980795264244,69,3359,,,,
141
+ ,,,70,3359,0.0005190926603972912,0.018571801483631134,,
142
+ 0.00043081920011900365,0.017234686762094498,0.01721980795264244,70,3407,,,,
143
+ ,,,71,3407,0.00045268103713169694,0.017419906333088875,,
144
+ 0.00043040665332227945,0.017224041745066643,0.01721980795264244,71,3455,,,,
145
+ ,,,72,3455,0.0003981499176006764,0.01648879610002041,,
146
+ 0.00044993922347202897,0.01742035523056984,0.01721980795264244,72,3503,,,,
147
+ ,,,73,3503,0.00039830469177104533,0.016554851084947586,,
148
+ 0.0004597785009536892,0.017572415992617607,0.01721980795264244,73,3551,,,,
149
+ ,,,74,3551,0.0005435345228761435,0.01897130161523819,,
150
+ 0.0004575865459628403,0.017902733758091927,0.01721980795264244,74,3599,,,,
151
+ ,,,75,3599,0.0005293361027725041,0.01863323152065277,,
152
+ 0.0004255521052982658,0.017226573079824448,0.01721980795264244,75,3647,,,,
153
+ ,,,76,3647,0.0004442641802597791,0.017237989231944084,,
154
+ 0.00042534503154456615,0.01723603345453739,0.01721980795264244,76,3695,,,,
155
+ ,,,77,3695,0.00046748609747737646,0.017770767211914062,,
156
+ 0.0004287570482119918,0.017183762043714523,0.017183762043714523,77,3743,,,,
157
+ ,,,78,3743,0.00042641666368581355,0.017017262056469917,,
158
+ 0.0004509416758082807,0.017438925802707672,0.017183762043714523,78,3791,,,,
159
+ ,,,79,3791,0.00040112051647156477,0.016648024320602417,,
160
+ 0.0004406880761962384,0.01730762980878353,0.017183762043714523,79,3839,,,,
161
+ ,,,80,3839,0.0004375083663035184,0.017360324040055275,,
162
+ 0.00043281176476739347,0.017247747629880905,0.017183762043714523,80,3887,,,,
163
+ ,,,81,3887,0.0004426184168551117,0.01713564619421959,,
164
+ 0.0004277283442206681,0.01723349839448929,0.017183762043714523,81,3935,,,,
165
+ ,,,82,3935,0.000426084385253489,0.016905630007386208,,
166
+ 0.00043000010191462934,0.017354467883706093,0.017183762043714523,82,3983,,,,
167
+ ,,,83,3983,0.00045572000090032816,0.01739797554910183,,
168
+ 0.00043460977030918,0.017244888469576836,0.017183762043714523,83,4031,,,,
169
+ ,,,84,4031,0.00042116420809179544,0.016954459249973297,,
170
+ 0.0005070960032753646,0.01883615553379059,0.017183762043714523,84,4079,,,,
171
+ ,,,85,4079,0.00045345089165493846,0.01743008941411972,,
172
+ 0.0004264841554686427,0.017235497012734413,0.017183762043714523,85,4127,,,,
173
+ ,,,86,4127,0.0004648560716304928,0.01765625737607479,,
174
+ 0.0005678170709870756,0.019099337980151176,0.017183762043714523,86,4175,,,,
175
+ ,,,87,4175,0.00042895929072983563,0.01709340326488018,,
176
+ 0.0004432181885931641,0.017339961603283882,0.017183762043714523,87,4223,,,,
177
+ ,,,88,4223,0.0004631428455468267,0.017719121649861336,,
178
+ 0.00043438412831164896,0.01722612790763378,0.017183762043714523,88,4271,,,,
179
+ ,,,89,4271,0.0004017721221316606,0.016546856611967087,,
180
+ 0.0004417546442709863,0.017318138852715492,0.017183762043714523,89,4319,,,,
181
+ ,,,90,4319,0.0004242872819304466,0.016980793327093124,,
182
+ 0.0006290545570664108,0.020024864003062248,0.017183762043714523,90,4367,,,,
183
+ ,,,91,4367,0.0004177122318651527,0.01676446758210659,,
184
+ 0.0004343774344306439,0.01747911237180233,0.017183762043714523,91,4415,,,,
185
+ ,,,92,4415,0.0004466319514904171,0.017268093302845955,,
186
+ 0.0004328473296482116,0.017231525853276253,0.017183762043714523,92,4463,,,,
187
+ ,,,93,4463,0.0004602254484780133,0.017535557970404625,,
188
+ 0.0006055114790797234,0.020362257957458496,0.017183762043714523,93,4511,,,,
189
+ ,,,94,4511,0.0004779639421030879,0.017868654802441597,,
190
+ 0.0004357439756859094,0.017517756670713425,0.017183762043714523,94,4559,,,,
191
+ ,,,95,4559,0.0004313699319027364,0.017171932384371758,,
192
+ 0.0005241190083324909,0.018405811861157417,0.017183762043714523,95,4607,,,,
193
+ ,,,96,4607,0.0004586382710840553,0.01771795004606247,,
194
+ 0.0005834983894601464,0.020048359408974648,0.017183762043714523,96,4655,,,,
195
+ ,,,97,4655,0.0004241660935804248,0.01693611405789852,,
196
+ 0.0007275238167494535,0.022219236940145493,0.017183762043714523,97,4703,,,,
197
+ ,,,98,4703,0.0004737096605822444,0.01778413914144039,,
198
+ 0.0004264401213731617,0.01725609600543976,0.017183762043714523,98,4751,,,,
199
+ ,,,99,4751,0.0004656390519812703,0.017644871026277542,,
200
+ 0.0004286102775949985,0.01736615039408207,0.017183762043714523,99,4799,,,,
201
+ ,,,100,4799,0.00044306073687039316,0.017206551507115364,,
202
+ ,,,77,4800,,,0.00040581796201877296,0.016683727502822876
logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_08-46-15/0/tensorboard/focusConvMSE_150_hyperparameter_search/events.out.tfevents.1652258779.30858e6037f6.1.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:105c6d0b896a36822cb57e46336571f3de7abc98162634b3c98820cfda174370
3
+ size 36796
logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_08-46-15/0/tensorboard/focusConvMSE_150_hyperparameter_search/events.out.tfevents.1652258986.30858e6037f6.1.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:eae9207e3013f9809a7281e6dc8e8206aea0c5ad7c0f6b2127cf4a7e0bc3b9d6
3
+ size 179
logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_08-46-15/0/tensorboard/focusConvMSE_150_hyperparameter_search/hparams.yaml ADDED
@@ -0,0 +1,54 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ trainer:
2
+ _target_: pytorch_lightning.Trainer
3
+ gpus: 1
4
+ min_epochs: 1
5
+ max_epochs: 100
6
+ resume_from_checkpoint: null
7
+ model:
8
+ _target_: src.models.focus_conv_module.FocusConvLitModule
9
+ image_size: 150
10
+ pool_size: 2
11
+ conv1_size: 7
12
+ conv1_channels: 3
13
+ conv2_size: 7
14
+ conv2_channels: 16
15
+ lin1_size: 128
16
+ lin2_size: 32
17
+ output_size: 1
18
+ lr: 0.0019207962356032353
19
+ weight_decay: 0.0005
20
+ datamodule:
21
+ _target_: src.datamodules.focus_datamodule.FocusDataModule
22
+ data_dir: /usr/src/app/data/focus150
23
+ csv_train_file: /usr/src/app/data/focus150/train_metadata.csv
24
+ csv_val_file: /usr/src/app/data/focus150/validation_metadata.csv
25
+ csv_test_file: /usr/src/app/data/focus150/test_metadata.csv
26
+ batch_size: 64
27
+ num_workers: 0
28
+ pin_memory: false
29
+ seed: null
30
+ callbacks:
31
+ model_checkpoint:
32
+ _target_: pytorch_lightning.callbacks.ModelCheckpoint
33
+ monitor: val/mae
34
+ mode: min
35
+ save_top_k: 1
36
+ save_last: true
37
+ verbose: false
38
+ dirpath: checkpoints/
39
+ filename: epoch_{epoch:03d}
40
+ auto_insert_metric_name: false
41
+ early_stopping:
42
+ _target_: pytorch_lightning.callbacks.EarlyStopping
43
+ monitor: val/mae
44
+ mode: min
45
+ patience: 100
46
+ min_delta: 0
47
+ model_summary:
48
+ _target_: pytorch_lightning.callbacks.RichModelSummary
49
+ max_depth: -1
50
+ rich_progress_bar:
51
+ _target_: pytorch_lightning.callbacks.RichProgressBar
52
+ model/params/total: 2513981
53
+ model/params/trainable: 2513981
54
+ model/params/non_trainable: 0
logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_08-46-15/1/.hydra/config.yaml ADDED
@@ -0,0 +1,81 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ original_work_dir: ${hydra:runtime.cwd}
2
+ data_dir: ${original_work_dir}/data
3
+ print_config: true
4
+ ignore_warnings: true
5
+ train: true
6
+ test: true
7
+ seed: null
8
+ name: focusConvMSE_150_hyperparameter_search
9
+ datamodule:
10
+ _target_: src.datamodules.focus_datamodule.FocusDataModule
11
+ data_dir: ${data_dir}/focus150
12
+ csv_train_file: ${data_dir}/focus150/train_metadata.csv
13
+ csv_val_file: ${data_dir}/focus150/validation_metadata.csv
14
+ csv_test_file: ${data_dir}/focus150/test_metadata.csv
15
+ batch_size: 64
16
+ num_workers: 0
17
+ pin_memory: false
18
+ model:
19
+ _target_: src.models.focus_conv_module.FocusConvLitModule
20
+ image_size: 150
21
+ pool_size: 2
22
+ conv1_size: 7
23
+ conv1_channels: 6
24
+ conv2_size: 7
25
+ conv2_channels: 16
26
+ lin1_size: 128
27
+ lin2_size: 72
28
+ output_size: 1
29
+ lr: 0.0033232883076398404
30
+ weight_decay: 0.0005
31
+ callbacks:
32
+ model_checkpoint:
33
+ _target_: pytorch_lightning.callbacks.ModelCheckpoint
34
+ monitor: val/mae
35
+ mode: min
36
+ save_top_k: 1
37
+ save_last: true
38
+ verbose: false
39
+ dirpath: checkpoints/
40
+ filename: epoch_{epoch:03d}
41
+ auto_insert_metric_name: false
42
+ early_stopping:
43
+ _target_: pytorch_lightning.callbacks.EarlyStopping
44
+ monitor: val/mae
45
+ mode: min
46
+ patience: 100
47
+ min_delta: 0
48
+ model_summary:
49
+ _target_: pytorch_lightning.callbacks.RichModelSummary
50
+ max_depth: -1
51
+ rich_progress_bar:
52
+ _target_: pytorch_lightning.callbacks.RichProgressBar
53
+ logger:
54
+ csv:
55
+ _target_: pytorch_lightning.loggers.csv_logs.CSVLogger
56
+ save_dir: .
57
+ name: csv/
58
+ prefix: ''
59
+ mlflow:
60
+ _target_: pytorch_lightning.loggers.mlflow.MLFlowLogger
61
+ experiment_name: ${name}
62
+ tracking_uri: ${original_work_dir}/logs/mlflow/mlruns
63
+ tags: null
64
+ save_dir: ./mlruns
65
+ prefix: ''
66
+ artifact_location: null
67
+ tensorboard:
68
+ _target_: pytorch_lightning.loggers.tensorboard.TensorBoardLogger
69
+ save_dir: tensorboard/
70
+ name: null
71
+ version: ${name}
72
+ log_graph: false
73
+ default_hp_metric: true
74
+ prefix: ''
75
+ trainer:
76
+ _target_: pytorch_lightning.Trainer
77
+ gpus: 1
78
+ min_epochs: 1
79
+ max_epochs: 100
80
+ resume_from_checkpoint: null
81
+ optimized_metric: val/mae_best
logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_08-46-15/1/.hydra/hydra.yaml ADDED
@@ -0,0 +1,240 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ hydra:
2
+ run:
3
+ dir: logs/experiments/runs/${name}/${now:%Y-%m-%d}_${now:%H-%M-%S}
4
+ sweep:
5
+ dir: logs/experiments/multiruns/${name}/${now:%Y-%m-%d}_${now:%H-%M-%S}
6
+ subdir: ${hydra.job.num}
7
+ launcher:
8
+ _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher
9
+ sweeper:
10
+ sampler:
11
+ _target_: optuna.samplers.TPESampler
12
+ seed: 12345
13
+ consider_prior: true
14
+ prior_weight: 1.0
15
+ consider_magic_clip: true
16
+ consider_endpoints: false
17
+ n_startup_trials: 10
18
+ n_ei_candidates: 24
19
+ multivariate: false
20
+ warn_independent_sampling: true
21
+ _target_: hydra_plugins.hydra_optuna_sweeper.optuna_sweeper.OptunaSweeper
22
+ direction: minimize
23
+ storage: null
24
+ study_name: focusConvMSE_150_hyperparameter_search
25
+ n_trials: 20
26
+ n_jobs: 1
27
+ search_space:
28
+ datamodule.batch_size:
29
+ type: categorical
30
+ choices:
31
+ - 64
32
+ - 128
33
+ model.lr:
34
+ type: float
35
+ low: 0.0001
36
+ high: 0.01
37
+ model.conv1_size:
38
+ type: categorical
39
+ choices:
40
+ - 3
41
+ - 5
42
+ - 7
43
+ model.conv1_channels:
44
+ type: categorical
45
+ choices:
46
+ - 3
47
+ - 6
48
+ - 9
49
+ model.conv2_size:
50
+ type: categorical
51
+ choices:
52
+ - 3
53
+ - 5
54
+ - 7
55
+ model.conv2_channels:
56
+ type: categorical
57
+ choices:
58
+ - 6
59
+ - 11
60
+ - 16
61
+ model.lin1_size:
62
+ type: categorical
63
+ choices:
64
+ - 32
65
+ - 72
66
+ - 128
67
+ model.lin2_size:
68
+ type: categorical
69
+ choices:
70
+ - 32
71
+ - 72
72
+ - 128
73
+ help:
74
+ app_name: ${hydra.job.name}
75
+ header: '${hydra.help.app_name} is powered by Hydra.
76
+
77
+ '
78
+ footer: 'Powered by Hydra (https://hydra.cc)
79
+
80
+ Use --hydra-help to view Hydra specific help
81
+
82
+ '
83
+ template: '${hydra.help.header}
84
+
85
+ == Configuration groups ==
86
+
87
+ Compose your configuration from those groups (group=option)
88
+
89
+
90
+ $APP_CONFIG_GROUPS
91
+
92
+
93
+ == Config ==
94
+
95
+ Override anything in the config (foo.bar=value)
96
+
97
+
98
+ $CONFIG
99
+
100
+
101
+ ${hydra.help.footer}
102
+
103
+ '
104
+ hydra_help:
105
+ template: 'Hydra (${hydra.runtime.version})
106
+
107
+ See https://hydra.cc for more info.
108
+
109
+
110
+ == Flags ==
111
+
112
+ $FLAGS_HELP
113
+
114
+
115
+ == Configuration groups ==
116
+
117
+ Compose your configuration from those groups (For example, append hydra/job_logging=disabled
118
+ to command line)
119
+
120
+
121
+ $HYDRA_CONFIG_GROUPS
122
+
123
+
124
+ Use ''--cfg hydra'' to Show the Hydra config.
125
+
126
+ '
127
+ hydra_help: ???
128
+ hydra_logging:
129
+ version: 1
130
+ formatters:
131
+ colorlog:
132
+ (): colorlog.ColoredFormatter
133
+ format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s'
134
+ handlers:
135
+ console:
136
+ class: logging.StreamHandler
137
+ formatter: colorlog
138
+ stream: ext://sys.stdout
139
+ root:
140
+ level: INFO
141
+ handlers:
142
+ - console
143
+ disable_existing_loggers: false
144
+ job_logging:
145
+ version: 1
146
+ formatters:
147
+ simple:
148
+ format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s'
149
+ colorlog:
150
+ (): colorlog.ColoredFormatter
151
+ format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s]
152
+ - %(message)s'
153
+ log_colors:
154
+ DEBUG: purple
155
+ INFO: green
156
+ WARNING: yellow
157
+ ERROR: red
158
+ CRITICAL: red
159
+ handlers:
160
+ console:
161
+ class: logging.StreamHandler
162
+ formatter: colorlog
163
+ stream: ext://sys.stdout
164
+ file:
165
+ class: logging.FileHandler
166
+ formatter: simple
167
+ filename: ${hydra.job.name}.log
168
+ root:
169
+ level: INFO
170
+ handlers:
171
+ - console
172
+ - file
173
+ disable_existing_loggers: false
174
+ env: {}
175
+ searchpath: []
176
+ callbacks: {}
177
+ output_subdir: .hydra
178
+ overrides:
179
+ hydra: []
180
+ task:
181
+ - datamodule.batch_size=64
182
+ - model.lr=0.0033232883076398404
183
+ - model.conv1_size=7
184
+ - model.conv1_channels=6
185
+ - model.conv2_size=7
186
+ - model.conv2_channels=16
187
+ - model.lin1_size=128
188
+ - model.lin2_size=72
189
+ - hparams_search=focusConvMSE_150.yaml
190
+ job:
191
+ name: train
192
+ override_dirname: datamodule.batch_size=64,hparams_search=focusConvMSE_150.yaml,model.conv1_channels=6,model.conv1_size=7,model.conv2_channels=16,model.conv2_size=7,model.lin1_size=128,model.lin2_size=72,model.lr=0.0033232883076398404
193
+ id: '1'
194
+ num: 1
195
+ config_name: train.yaml
196
+ env_set: {}
197
+ env_copy: []
198
+ config:
199
+ override_dirname:
200
+ kv_sep: '='
201
+ item_sep: ','
202
+ exclude_keys: []
203
+ runtime:
204
+ version: 1.1.2
205
+ cwd: /usr/src/app
206
+ config_sources:
207
+ - path: hydra.conf
208
+ schema: pkg
209
+ provider: hydra
210
+ - path: /usr/src/app/configs
211
+ schema: file
212
+ provider: main
213
+ - path: hydra_plugins.hydra_colorlog.conf
214
+ schema: pkg
215
+ provider: hydra-colorlog
216
+ - path: ''
217
+ schema: structured
218
+ provider: schema
219
+ choices:
220
+ local: default.yaml
221
+ hparams_search: focusConvMSE_150.yaml
222
+ debug: null
223
+ experiment: null
224
+ log_dir: default.yaml
225
+ trainer: long.yaml
226
+ logger: many_loggers
227
+ callbacks: default.yaml
228
+ model: focusConv_150.yaml
229
+ datamodule: focus150.yaml
230
+ hydra/env: default
231
+ hydra/callbacks: null
232
+ hydra/job_logging: colorlog
233
+ hydra/hydra_logging: colorlog
234
+ hydra/hydra_help: default
235
+ hydra/help: default
236
+ hydra/sweeper: optuna
237
+ hydra/sweeper/sampler: tpe
238
+ hydra/launcher: basic
239
+ hydra/output: default
240
+ verbose: false
logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_08-46-15/1/.hydra/overrides.yaml ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ - datamodule.batch_size=64
2
+ - model.lr=0.0033232883076398404
3
+ - model.conv1_size=7
4
+ - model.conv1_channels=6
5
+ - model.conv2_size=7
6
+ - model.conv2_channels=16
7
+ - model.lin1_size=128
8
+ - model.lin2_size=72
9
+ - hparams_search=focusConvMSE_150.yaml
logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_08-46-15/1/checkpoints/epoch_098.ckpt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:231c8023cd63d10ae210643e3036cfd03da5c6063f86478624915801679c6cd9
3
+ size 26942341
logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_08-46-15/1/checkpoints/last.ckpt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:db174425fed1210301968f40b2a1145a84ae7e2c806780057ae15c3920332e85
3
+ size 26942341
logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_08-46-15/1/csv/version_0/hparams.yaml ADDED
@@ -0,0 +1,54 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ trainer:
2
+ _target_: pytorch_lightning.Trainer
3
+ gpus: 1
4
+ min_epochs: 1
5
+ max_epochs: 100
6
+ resume_from_checkpoint: null
7
+ model:
8
+ _target_: src.models.focus_conv_module.FocusConvLitModule
9
+ image_size: 150
10
+ pool_size: 2
11
+ conv1_size: 7
12
+ conv1_channels: 6
13
+ conv2_size: 7
14
+ conv2_channels: 16
15
+ lin1_size: 128
16
+ lin2_size: 72
17
+ output_size: 1
18
+ lr: 0.0033232883076398404
19
+ weight_decay: 0.0005
20
+ datamodule:
21
+ _target_: src.datamodules.focus_datamodule.FocusDataModule
22
+ data_dir: /usr/src/app/data/focus150
23
+ csv_train_file: /usr/src/app/data/focus150/train_metadata.csv
24
+ csv_val_file: /usr/src/app/data/focus150/validation_metadata.csv
25
+ csv_test_file: /usr/src/app/data/focus150/test_metadata.csv
26
+ batch_size: 64
27
+ num_workers: 0
28
+ pin_memory: false
29
+ seed: null
30
+ callbacks:
31
+ model_checkpoint:
32
+ _target_: pytorch_lightning.callbacks.ModelCheckpoint
33
+ monitor: val/mae
34
+ mode: min
35
+ save_top_k: 1
36
+ save_last: true
37
+ verbose: false
38
+ dirpath: checkpoints/
39
+ filename: epoch_{epoch:03d}
40
+ auto_insert_metric_name: false
41
+ early_stopping:
42
+ _target_: pytorch_lightning.callbacks.EarlyStopping
43
+ monitor: val/mae
44
+ mode: min
45
+ patience: 100
46
+ min_delta: 0
47
+ model_summary:
48
+ _target_: pytorch_lightning.callbacks.RichModelSummary
49
+ max_depth: -1
50
+ rich_progress_bar:
51
+ _target_: pytorch_lightning.callbacks.RichProgressBar
52
+ model/params/total: 2244121
53
+ model/params/trainable: 2244121
54
+ model/params/non_trainable: 0
logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_08-46-15/1/csv/version_0/metrics.csv ADDED
@@ -0,0 +1,202 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ val/loss,val/mae,val/mae_best,epoch,step,train/loss,train/mae,test/loss,test/mae
2
+ 0.5275669097900391,0.6093315482139587,0.02514241449534893,0,47,,,,
3
+ ,,,1,47,53.885658264160156,3.7185118198394775,,
4
+ 0.035016387701034546,0.14811477065086365,0.02514241449534893,1,95,,,,
5
+ ,,,2,95,0.1827768236398697,0.33221203088760376,,
6
+ 0.015374592505395412,0.10036610811948776,0.02514241449534893,2,143,,,,
7
+ ,,,3,143,0.026036100462079048,0.13029147684574127,,
8
+ 0.008924655616283417,0.07514648139476776,0.02514241449534893,3,191,,,,
9
+ ,,,4,191,0.012186992913484573,0.08838197588920593,,
10
+ 0.004836302250623703,0.05557500198483467,0.02514241449534893,4,239,,,,
11
+ ,,,5,239,0.006325086113065481,0.06380344182252884,,
12
+ 0.002920554718002677,0.04324944317340851,0.02514241449534893,5,287,,,,
13
+ ,,,6,287,0.0030073849484324455,0.043991029262542725,,
14
+ 0.0022141921799629927,0.03829417750239372,0.02514241449534893,6,335,,,,
15
+ ,,,7,335,0.001855208189226687,0.034627366811037064,,
16
+ 0.0029016362968832254,0.04380028322339058,0.02514241449534893,7,383,,,,
17
+ ,,,8,383,0.0017762924544513226,0.034229826182127,,
18
+ 0.001324483659118414,0.03088465705513954,0.02514241449534893,8,431,,,,
19
+ ,,,9,431,0.0011471427278593183,0.027331626042723656,,
20
+ 0.0009637662442401052,0.0246257521212101,0.0246257521212101,9,479,,,,
21
+ ,,,10,479,0.0008773336885496974,0.024108557030558586,,
22
+ 0.000991585198789835,0.02442190796136856,0.02442190796136856,10,527,,,,
23
+ ,,,11,527,0.0012166403466835618,0.02808286063373089,,
24
+ 0.0007171589531935751,0.022364649921655655,0.022364649921655655,11,575,,,,
25
+ ,,,12,575,0.0007882644422352314,0.022753547877073288,,
26
+ 0.0008671012474223971,0.022602954879403114,0.022364649921655655,12,623,,,,
27
+ ,,,13,623,0.0007994149927981198,0.02283688448369503,,
28
+ 0.000643746112473309,0.021279098466038704,0.021279098466038704,13,671,,,,
29
+ ,,,14,671,0.000657115422654897,0.020935196429491043,,
30
+ 0.0006250638398341835,0.020756086334586143,0.020756086334586143,14,719,,,,
31
+ ,,,15,719,0.0008711983100511134,0.02367435209453106,,
32
+ 0.0012114711571484804,0.029617929831147194,0.020756086334586143,15,767,,,,
33
+ ,,,16,767,0.0007185650174506009,0.021673697978258133,,
34
+ 0.0009525752975605428,0.023560674861073494,0.020756086334586143,16,815,,,,
35
+ ,,,17,815,0.0009485357441008091,0.024565501138567924,,
36
+ 0.0006448689964599907,0.020190082490444183,0.020190082490444183,17,863,,,,
37
+ ,,,18,863,0.000712849257979542,0.021669510751962662,,
38
+ 0.0007498672348447144,0.02114405296742916,0.020190082490444183,18,911,,,,
39
+ ,,,19,911,0.0005719225737266243,0.01969040185213089,,
40
+ 0.0006509821978397667,0.021996501833200455,0.020190082490444183,19,959,,,,
41
+ ,,,20,959,0.0010296448599547148,0.02595364674925804,,
42
+ 0.0008699737372808158,0.024952147156000137,0.020190082490444183,20,1007,,,,
43
+ ,,,21,1007,0.0006586618255823851,0.02074377052485943,,
44
+ 0.000791403348557651,0.021877620369195938,0.020190082490444183,21,1055,,,,
45
+ ,,,22,1055,0.0010672039352357388,0.026486895978450775,,
46
+ 0.0005704872310161591,0.020514430478215218,0.020190082490444183,22,1103,,,,
47
+ ,,,23,1103,0.0007771493983455002,0.022452669218182564,,
48
+ 0.0006115568103268743,0.019935334101319313,0.019935334101319313,23,1151,,,,
49
+ ,,,24,1151,0.0007553790346719325,0.022343099117279053,,
50
+ 0.0005780598730780184,0.019660718739032745,0.019660718739032745,24,1199,,,,
51
+ ,,,25,1199,0.0006169310072436929,0.020330922678112984,,
52
+ 0.0010343262692913413,0.026613466441631317,0.019660718739032745,25,1247,,,,
53
+ ,,,26,1247,0.0006857134867459536,0.021199919283390045,,
54
+ 0.0012351988116279244,0.028455765917897224,0.019660718739032745,26,1295,,,,
55
+ ,,,27,1295,0.0006879523862153292,0.02121087722480297,,
56
+ 0.0007609054446220398,0.021991929039359093,0.019660718739032745,27,1343,,,,
57
+ ,,,28,1343,0.0006570351542904973,0.02099188044667244,,
58
+ 0.000528292148374021,0.01951993629336357,0.01951993629336357,28,1391,,,,
59
+ ,,,29,1391,0.0006471765809692442,0.020628664642572403,,
60
+ 0.0005257359589450061,0.019237522035837173,0.019237522035837173,29,1439,,,,
61
+ ,,,30,1439,0.0005726320669054985,0.019669175148010254,,
62
+ 0.0005382749368436635,0.019626064226031303,0.019237522035837173,30,1487,,,,
63
+ ,,,31,1487,0.0006941626779735088,0.021432876586914062,,
64
+ 0.0008383388048969209,0.023213475942611694,0.019237522035837173,31,1535,,,,
65
+ ,,,32,1535,0.000621735758613795,0.020383620634675026,,
66
+ 0.0007140219677239656,0.022019553929567337,0.019237522035837173,32,1583,,,,
67
+ ,,,33,1583,0.000715982518158853,0.02162073366343975,,
68
+ 0.001332955784164369,0.030138226225972176,0.019237522035837173,33,1631,,,,
69
+ ,,,34,1631,0.0006747125880792737,0.021204832941293716,,
70
+ 0.0011111361673101783,0.027148135006427765,0.019237522035837173,34,1679,,,,
71
+ ,,,35,1679,0.0009053927497006953,0.02433197759091854,,
72
+ 0.0005094181396998465,0.01906736195087433,0.01906736195087433,35,1727,,,,
73
+ ,,,36,1727,0.0005264222272671759,0.018955666571855545,,
74
+ 0.0005467374576255679,0.019594207406044006,0.01906736195087433,36,1775,,,,
75
+ ,,,37,1775,0.0007064650999382138,0.02146984450519085,,
76
+ 0.0006757000810466707,0.021248893812298775,0.01906736195087433,37,1823,,,,
77
+ ,,,38,1823,0.0004875431477557868,0.018311936408281326,,
78
+ 0.0005100793787278235,0.019004156813025475,0.019004156813025475,38,1871,,,,
79
+ ,,,39,1871,0.0005283526261337101,0.01893465593457222,,
80
+ 0.0009307982982136309,0.02480236627161503,0.019004156813025475,39,1919,,,,
81
+ ,,,40,1919,0.0005279034958221018,0.018864721059799194,,
82
+ 0.0008235661080107093,0.02327311784029007,0.019004156813025475,40,1967,,,,
83
+ ,,,41,1967,0.0006446486804634333,0.02054503560066223,,
84
+ 0.0005123321316204965,0.019014237448573112,0.019004156813025475,41,2015,,,,
85
+ ,,,42,2015,0.0007002244819886982,0.021364932879805565,,
86
+ 0.0005031232722103596,0.018889565020799637,0.018889565020799637,42,2063,,,,
87
+ ,,,43,2063,0.0004827312659472227,0.018181219696998596,,
88
+ 0.001130367978475988,0.027670804411172867,0.018889565020799637,43,2111,,,,
89
+ ,,,44,2111,0.0005814875476062298,0.01955241896212101,,
90
+ 0.000524190254509449,0.019084710627794266,0.018889565020799637,44,2159,,,,
91
+ ,,,45,2159,0.0005363462259992957,0.01889832317829132,,
92
+ 0.0006214368622750044,0.0202583447098732,0.018889565020799637,45,2207,,,,
93
+ ,,,46,2207,0.000492732971906662,0.018247883766889572,,
94
+ 0.0010672998614609241,0.02638295292854309,0.018889565020799637,46,2255,,,,
95
+ ,,,47,2255,0.0005116350948810577,0.018509697169065475,,
96
+ 0.0008521674899384379,0.0234022606164217,0.018889565020799637,47,2303,,,,
97
+ ,,,48,2303,0.0005453287740238011,0.01907530054450035,,
98
+ 0.0005633384571410716,0.019727090373635292,0.018889565020799637,48,2351,,,,
99
+ ,,,49,2351,0.0005102154682390392,0.018462389707565308,,
100
+ 0.0005005526472814381,0.018745388835668564,0.018745388835668564,49,2399,,,,
101
+ ,,,50,2399,0.0004720031574834138,0.017778053879737854,,
102
+ 0.0005969546618871391,0.019920984283089638,0.018745388835668564,50,2447,,,,
103
+ ,,,51,2447,0.00045170169323682785,0.01754944585263729,,
104
+ 0.0006072758696973324,0.020039290189743042,0.018745388835668564,51,2495,,,,
105
+ ,,,52,2495,0.0006239523063413799,0.02022615075111389,,
106
+ 0.0006018414860591292,0.019971538335084915,0.018745388835668564,52,2543,,,,
107
+ ,,,53,2543,0.0004767236241605133,0.017919551581144333,,
108
+ 0.0005064652650617063,0.01886296272277832,0.018745388835668564,53,2591,,,,
109
+ ,,,54,2591,0.0004878145700786263,0.01792169362306595,,
110
+ 0.0006775048677809536,0.021302856504917145,0.018745388835668564,54,2639,,,,
111
+ ,,,55,2639,0.0006160226184874773,0.020086171105504036,,
112
+ 0.0006067649810574949,0.020029881969094276,0.018745388835668564,55,2687,,,,
113
+ ,,,56,2687,0.0004929049755446613,0.01815544068813324,,
114
+ 0.0004856096929870546,0.01846287213265896,0.01846287213265896,56,2735,,,,
115
+ ,,,57,2735,0.0004832090635318309,0.017951903864741325,,
116
+ 0.0004980151425115764,0.018671030178666115,0.01846287213265896,57,2783,,,,
117
+ ,,,58,2783,0.0004376235883682966,0.017234988510608673,,
118
+ 0.0004785487544722855,0.01831466518342495,0.01831466518342495,58,2831,,,,
119
+ ,,,59,2831,0.00047893947339616716,0.017942199483513832,,
120
+ 0.0012450891081243753,0.029026294127106667,0.01831466518342495,59,2879,,,,
121
+ ,,,60,2879,0.000540503824595362,0.01890811324119568,,
122
+ 0.0005021556280553341,0.018527239561080933,0.01831466518342495,60,2927,,,,
123
+ ,,,61,2927,0.00045101947034709156,0.017281025648117065,,
124
+ 0.0009486160124652088,0.024779850617051125,0.01831466518342495,61,2975,,,,
125
+ ,,,62,2975,0.0004714902024716139,0.017731236293911934,,
126
+ 0.0004714909882750362,0.018174635246396065,0.018174635246396065,62,3023,,,,
127
+ ,,,63,3023,0.00044276448898017406,0.01719634234905243,,
128
+ 0.0006917946739122272,0.021217238157987595,0.018174635246396065,63,3071,,,,
129
+ ,,,64,3071,0.0004324156616348773,0.01711345836520195,,
130
+ 0.0005978466360829771,0.020045902580022812,0.018174635246396065,64,3119,,,,
131
+ ,,,65,3119,0.0004004494985565543,0.0164721067994833,,
132
+ 0.0004856165614910424,0.018378695473074913,0.018174635246396065,65,3167,,,,
133
+ ,,,66,3167,0.00042870952165685594,0.017112910747528076,,
134
+ 0.0004739972064271569,0.01815548911690712,0.01815548911690712,66,3215,,,,
135
+ ,,,67,3215,0.0004309695214033127,0.01689757965505123,,
136
+ 0.0005372599116526544,0.019116533920168877,0.01815548911690712,67,3263,,,,
137
+ ,,,68,3263,0.00044553211773745716,0.01721913553774357,,
138
+ 0.0006342271226458251,0.020581241697072983,0.01815548911690712,68,3311,,,,
139
+ ,,,69,3311,0.00040322632412426174,0.01641654595732689,,
140
+ 0.0004660649283323437,0.0180069487541914,0.0180069487541914,69,3359,,,,
141
+ ,,,70,3359,0.0005176242557354271,0.018430711701512337,,
142
+ 0.0006550574908033013,0.02065545879304409,0.0180069487541914,70,3407,,,,
143
+ ,,,71,3407,0.0004612793563865125,0.017452673986554146,,
144
+ 0.0005298222531564534,0.018815303221344948,0.0180069487541914,71,3455,,,,
145
+ ,,,72,3455,0.00042226904770359397,0.016783911734819412,,
146
+ 0.0005669915699400008,0.01938854157924652,0.0180069487541914,72,3503,,,,
147
+ ,,,73,3503,0.00040137104224413633,0.0164727121591568,,
148
+ 0.000472719082608819,0.018005898222327232,0.018005898222327232,73,3551,,,,
149
+ ,,,74,3551,0.00046363542787730694,0.01753677800297737,,
150
+ 0.00045780380605719984,0.01780862919986248,0.01780862919986248,74,3599,,,,
151
+ ,,,75,3599,0.0004112946626264602,0.016557691618800163,,
152
+ 0.0007896430906839669,0.022685136646032333,0.01780862919986248,75,3647,,,,
153
+ ,,,76,3647,0.0004247884498909116,0.016840221360325813,,
154
+ 0.000585844973102212,0.019704436883330345,0.01780862919986248,76,3695,,,,
155
+ ,,,77,3695,0.0004378467856440693,0.01698942482471466,,
156
+ 0.0004729386419057846,0.018055276945233345,0.01780862919986248,77,3743,,,,
157
+ ,,,78,3743,0.00048724949010647833,0.017840662971138954,,
158
+ 0.00048680519103072584,0.018200617283582687,0.01780862919986248,78,3791,,,,
159
+ ,,,79,3791,0.00038546969881281257,0.016008595004677773,,
160
+ 0.0004997012438252568,0.018347330391407013,0.01780862919986248,79,3839,,,,
161
+ ,,,80,3839,0.0004130962479393929,0.01657581701874733,,
162
+ 0.0006214969325810671,0.020249323919415474,0.01780862919986248,80,3887,,,,
163
+ ,,,81,3887,0.00037212911411188543,0.015849240124225616,,
164
+ 0.0005635043489746749,0.019386647269129753,0.01780862919986248,81,3935,,,,
165
+ ,,,82,3935,0.0004050152492709458,0.01637003943324089,,
166
+ 0.0004842673079110682,0.018157681450247765,0.01780862919986248,82,3983,,,,
167
+ ,,,83,3983,0.0003964641073253006,0.01618850976228714,,
168
+ 0.0005478051607497036,0.01914801448583603,0.01780862919986248,83,4031,,,,
169
+ ,,,84,4031,0.0004142601101193577,0.01642853580415249,,
170
+ 0.0006480533047579229,0.02065194770693779,0.01780862919986248,84,4079,,,,
171
+ ,,,85,4079,0.0004538017965387553,0.017401060089468956,,
172
+ 0.0004542766255326569,0.017664361745119095,0.017664361745119095,85,4127,,,,
173
+ ,,,86,4127,0.00038512187893502414,0.015987012535333633,,
174
+ 0.0005613107932731509,0.019350778311491013,0.017664361745119095,86,4175,,,,
175
+ ,,,87,4175,0.00036956972326152027,0.01566312089562416,,
176
+ 0.0004435292212292552,0.01750669814646244,0.01750669814646244,87,4223,,,,
177
+ ,,,88,4223,0.00037626290577463806,0.01578366942703724,,
178
+ 0.0004419768520165235,0.01747036539018154,0.01747036539018154,88,4271,,,,
179
+ ,,,89,4271,0.0003800286795012653,0.01586548425257206,,
180
+ 0.0006269440054893494,0.020266743376851082,0.01747036539018154,89,4319,,,,
181
+ ,,,90,4319,0.00043421026202850044,0.01677519641816616,,
182
+ 0.000440193951362744,0.017425496131181717,0.017425496131181717,90,4367,,,,
183
+ ,,,91,4367,0.00037983679794706404,0.01583649218082428,,
184
+ 0.0004414564464241266,0.017488256096839905,0.017425496131181717,91,4415,,,,
185
+ ,,,92,4415,0.0003485319612082094,0.015215429477393627,,
186
+ 0.0005353452870622277,0.018850816413760185,0.017425496131181717,92,4463,,,,
187
+ ,,,93,4463,0.0003943751798942685,0.01622661203145981,,
188
+ 0.0005373007734306157,0.019013861194252968,0.017425496131181717,93,4511,,,,
189
+ ,,,94,4511,0.00039776595076546073,0.016172820702195168,,
190
+ 0.0005660614697262645,0.019260402768850327,0.017425496131181717,94,4559,,,,
191
+ ,,,95,4559,0.0004270143690519035,0.016735894605517387,,
192
+ 0.0004480798670556396,0.017444100230932236,0.017425496131181717,95,4607,,,,
193
+ ,,,96,4607,0.00043075362918898463,0.01675921492278576,,
194
+ 0.0008359761559404433,0.023616880178451538,0.017425496131181717,96,4655,,,,
195
+ ,,,97,4655,0.0003729037125594914,0.015669796615839005,,
196
+ 0.00043515325523912907,0.017283106222748756,0.017283106222748756,97,4703,,,,
197
+ ,,,98,4703,0.00040442668250761926,0.016322694718837738,,
198
+ 0.0004255669191479683,0.017137357965111732,0.017137357965111732,98,4751,,,,
199
+ ,,,99,4751,0.0004089919093530625,0.01632435992360115,,
200
+ 0.00045027551823295653,0.017563797533512115,0.017137357965111732,99,4799,,,,
201
+ ,,,100,4799,0.00037797895492985845,0.015829505398869514,,
202
+ ,,,98,4800,,,0.00040623891982249916,0.016629530116915703
logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_08-46-15/1/tensorboard/focusConvMSE_150_hyperparameter_search/events.out.tfevents.1652258987.30858e6037f6.1.2 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:343c1ec626f19d39fd3a5de90cfa7843b2ea8704413aa03a0e6ed84a0be6c596
3
+ size 36796
logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_08-46-15/1/tensorboard/focusConvMSE_150_hyperparameter_search/events.out.tfevents.1652259201.30858e6037f6.1.3 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4be5aeef2bda1e93200e6afbaf48701a3c34977c047f445b9af14497d097b5ba
3
+ size 179
logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_08-46-15/1/tensorboard/focusConvMSE_150_hyperparameter_search/hparams.yaml ADDED
@@ -0,0 +1,54 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ trainer:
2
+ _target_: pytorch_lightning.Trainer
3
+ gpus: 1
4
+ min_epochs: 1
5
+ max_epochs: 100
6
+ resume_from_checkpoint: null
7
+ model:
8
+ _target_: src.models.focus_conv_module.FocusConvLitModule
9
+ image_size: 150
10
+ pool_size: 2
11
+ conv1_size: 7
12
+ conv1_channels: 6
13
+ conv2_size: 7
14
+ conv2_channels: 16
15
+ lin1_size: 128
16
+ lin2_size: 72
17
+ output_size: 1
18
+ lr: 0.0033232883076398404
19
+ weight_decay: 0.0005
20
+ datamodule:
21
+ _target_: src.datamodules.focus_datamodule.FocusDataModule
22
+ data_dir: /usr/src/app/data/focus150
23
+ csv_train_file: /usr/src/app/data/focus150/train_metadata.csv
24
+ csv_val_file: /usr/src/app/data/focus150/validation_metadata.csv
25
+ csv_test_file: /usr/src/app/data/focus150/test_metadata.csv
26
+ batch_size: 64
27
+ num_workers: 0
28
+ pin_memory: false
29
+ seed: null
30
+ callbacks:
31
+ model_checkpoint:
32
+ _target_: pytorch_lightning.callbacks.ModelCheckpoint
33
+ monitor: val/mae
34
+ mode: min
35
+ save_top_k: 1
36
+ save_last: true
37
+ verbose: false
38
+ dirpath: checkpoints/
39
+ filename: epoch_{epoch:03d}
40
+ auto_insert_metric_name: false
41
+ early_stopping:
42
+ _target_: pytorch_lightning.callbacks.EarlyStopping
43
+ monitor: val/mae
44
+ mode: min
45
+ patience: 100
46
+ min_delta: 0
47
+ model_summary:
48
+ _target_: pytorch_lightning.callbacks.RichModelSummary
49
+ max_depth: -1
50
+ rich_progress_bar:
51
+ _target_: pytorch_lightning.callbacks.RichProgressBar
52
+ model/params/total: 2244121
53
+ model/params/trainable: 2244121
54
+ model/params/non_trainable: 0
logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_08-46-15/10/.hydra/config.yaml ADDED
@@ -0,0 +1,81 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ original_work_dir: ${hydra:runtime.cwd}
2
+ data_dir: ${original_work_dir}/data
3
+ print_config: true
4
+ ignore_warnings: true
5
+ train: true
6
+ test: true
7
+ seed: null
8
+ name: focusConvMSE_150_hyperparameter_search
9
+ datamodule:
10
+ _target_: src.datamodules.focus_datamodule.FocusDataModule
11
+ data_dir: ${data_dir}/focus150
12
+ csv_train_file: ${data_dir}/focus150/train_metadata.csv
13
+ csv_val_file: ${data_dir}/focus150/validation_metadata.csv
14
+ csv_test_file: ${data_dir}/focus150/test_metadata.csv
15
+ batch_size: 64
16
+ num_workers: 0
17
+ pin_memory: false
18
+ model:
19
+ _target_: src.models.focus_conv_module.FocusConvLitModule
20
+ image_size: 150
21
+ pool_size: 2
22
+ conv1_size: 3
23
+ conv1_channels: 9
24
+ conv2_size: 3
25
+ conv2_channels: 6
26
+ lin1_size: 32
27
+ lin2_size: 72
28
+ output_size: 1
29
+ lr: 0.005087336593525169
30
+ weight_decay: 0.0005
31
+ callbacks:
32
+ model_checkpoint:
33
+ _target_: pytorch_lightning.callbacks.ModelCheckpoint
34
+ monitor: val/mae
35
+ mode: min
36
+ save_top_k: 1
37
+ save_last: true
38
+ verbose: false
39
+ dirpath: checkpoints/
40
+ filename: epoch_{epoch:03d}
41
+ auto_insert_metric_name: false
42
+ early_stopping:
43
+ _target_: pytorch_lightning.callbacks.EarlyStopping
44
+ monitor: val/mae
45
+ mode: min
46
+ patience: 100
47
+ min_delta: 0
48
+ model_summary:
49
+ _target_: pytorch_lightning.callbacks.RichModelSummary
50
+ max_depth: -1
51
+ rich_progress_bar:
52
+ _target_: pytorch_lightning.callbacks.RichProgressBar
53
+ logger:
54
+ csv:
55
+ _target_: pytorch_lightning.loggers.csv_logs.CSVLogger
56
+ save_dir: .
57
+ name: csv/
58
+ prefix: ''
59
+ mlflow:
60
+ _target_: pytorch_lightning.loggers.mlflow.MLFlowLogger
61
+ experiment_name: ${name}
62
+ tracking_uri: ${original_work_dir}/logs/mlflow/mlruns
63
+ tags: null
64
+ save_dir: ./mlruns
65
+ prefix: ''
66
+ artifact_location: null
67
+ tensorboard:
68
+ _target_: pytorch_lightning.loggers.tensorboard.TensorBoardLogger
69
+ save_dir: tensorboard/
70
+ name: null
71
+ version: ${name}
72
+ log_graph: false
73
+ default_hp_metric: true
74
+ prefix: ''
75
+ trainer:
76
+ _target_: pytorch_lightning.Trainer
77
+ gpus: 1
78
+ min_epochs: 1
79
+ max_epochs: 100
80
+ resume_from_checkpoint: null
81
+ optimized_metric: val/mae_best
logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_08-46-15/10/.hydra/hydra.yaml ADDED
@@ -0,0 +1,240 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ hydra:
2
+ run:
3
+ dir: logs/experiments/runs/${name}/${now:%Y-%m-%d}_${now:%H-%M-%S}
4
+ sweep:
5
+ dir: logs/experiments/multiruns/${name}/${now:%Y-%m-%d}_${now:%H-%M-%S}
6
+ subdir: ${hydra.job.num}
7
+ launcher:
8
+ _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher
9
+ sweeper:
10
+ sampler:
11
+ _target_: optuna.samplers.TPESampler
12
+ seed: 12345
13
+ consider_prior: true
14
+ prior_weight: 1.0
15
+ consider_magic_clip: true
16
+ consider_endpoints: false
17
+ n_startup_trials: 10
18
+ n_ei_candidates: 24
19
+ multivariate: false
20
+ warn_independent_sampling: true
21
+ _target_: hydra_plugins.hydra_optuna_sweeper.optuna_sweeper.OptunaSweeper
22
+ direction: minimize
23
+ storage: null
24
+ study_name: focusConvMSE_150_hyperparameter_search
25
+ n_trials: 20
26
+ n_jobs: 1
27
+ search_space:
28
+ datamodule.batch_size:
29
+ type: categorical
30
+ choices:
31
+ - 64
32
+ - 128
33
+ model.lr:
34
+ type: float
35
+ low: 0.0001
36
+ high: 0.01
37
+ model.conv1_size:
38
+ type: categorical
39
+ choices:
40
+ - 3
41
+ - 5
42
+ - 7
43
+ model.conv1_channels:
44
+ type: categorical
45
+ choices:
46
+ - 3
47
+ - 6
48
+ - 9
49
+ model.conv2_size:
50
+ type: categorical
51
+ choices:
52
+ - 3
53
+ - 5
54
+ - 7
55
+ model.conv2_channels:
56
+ type: categorical
57
+ choices:
58
+ - 6
59
+ - 11
60
+ - 16
61
+ model.lin1_size:
62
+ type: categorical
63
+ choices:
64
+ - 32
65
+ - 72
66
+ - 128
67
+ model.lin2_size:
68
+ type: categorical
69
+ choices:
70
+ - 32
71
+ - 72
72
+ - 128
73
+ help:
74
+ app_name: ${hydra.job.name}
75
+ header: '${hydra.help.app_name} is powered by Hydra.
76
+
77
+ '
78
+ footer: 'Powered by Hydra (https://hydra.cc)
79
+
80
+ Use --hydra-help to view Hydra specific help
81
+
82
+ '
83
+ template: '${hydra.help.header}
84
+
85
+ == Configuration groups ==
86
+
87
+ Compose your configuration from those groups (group=option)
88
+
89
+
90
+ $APP_CONFIG_GROUPS
91
+
92
+
93
+ == Config ==
94
+
95
+ Override anything in the config (foo.bar=value)
96
+
97
+
98
+ $CONFIG
99
+
100
+
101
+ ${hydra.help.footer}
102
+
103
+ '
104
+ hydra_help:
105
+ template: 'Hydra (${hydra.runtime.version})
106
+
107
+ See https://hydra.cc for more info.
108
+
109
+
110
+ == Flags ==
111
+
112
+ $FLAGS_HELP
113
+
114
+
115
+ == Configuration groups ==
116
+
117
+ Compose your configuration from those groups (For example, append hydra/job_logging=disabled
118
+ to command line)
119
+
120
+
121
+ $HYDRA_CONFIG_GROUPS
122
+
123
+
124
+ Use ''--cfg hydra'' to Show the Hydra config.
125
+
126
+ '
127
+ hydra_help: ???
128
+ hydra_logging:
129
+ version: 1
130
+ formatters:
131
+ colorlog:
132
+ (): colorlog.ColoredFormatter
133
+ format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s'
134
+ handlers:
135
+ console:
136
+ class: logging.StreamHandler
137
+ formatter: colorlog
138
+ stream: ext://sys.stdout
139
+ root:
140
+ level: INFO
141
+ handlers:
142
+ - console
143
+ disable_existing_loggers: false
144
+ job_logging:
145
+ version: 1
146
+ formatters:
147
+ simple:
148
+ format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s'
149
+ colorlog:
150
+ (): colorlog.ColoredFormatter
151
+ format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s]
152
+ - %(message)s'
153
+ log_colors:
154
+ DEBUG: purple
155
+ INFO: green
156
+ WARNING: yellow
157
+ ERROR: red
158
+ CRITICAL: red
159
+ handlers:
160
+ console:
161
+ class: logging.StreamHandler
162
+ formatter: colorlog
163
+ stream: ext://sys.stdout
164
+ file:
165
+ class: logging.FileHandler
166
+ formatter: simple
167
+ filename: ${hydra.job.name}.log
168
+ root:
169
+ level: INFO
170
+ handlers:
171
+ - console
172
+ - file
173
+ disable_existing_loggers: false
174
+ env: {}
175
+ searchpath: []
176
+ callbacks: {}
177
+ output_subdir: .hydra
178
+ overrides:
179
+ hydra: []
180
+ task:
181
+ - datamodule.batch_size=64
182
+ - model.lr=0.005087336593525169
183
+ - model.conv1_size=3
184
+ - model.conv1_channels=9
185
+ - model.conv2_size=3
186
+ - model.conv2_channels=6
187
+ - model.lin1_size=32
188
+ - model.lin2_size=72
189
+ - hparams_search=focusConvMSE_150.yaml
190
+ job:
191
+ name: train
192
+ override_dirname: datamodule.batch_size=64,hparams_search=focusConvMSE_150.yaml,model.conv1_channels=9,model.conv1_size=3,model.conv2_channels=6,model.conv2_size=3,model.lin1_size=32,model.lin2_size=72,model.lr=0.005087336593525169
193
+ id: '10'
194
+ num: 10
195
+ config_name: train.yaml
196
+ env_set: {}
197
+ env_copy: []
198
+ config:
199
+ override_dirname:
200
+ kv_sep: '='
201
+ item_sep: ','
202
+ exclude_keys: []
203
+ runtime:
204
+ version: 1.1.2
205
+ cwd: /usr/src/app
206
+ config_sources:
207
+ - path: hydra.conf
208
+ schema: pkg
209
+ provider: hydra
210
+ - path: /usr/src/app/configs
211
+ schema: file
212
+ provider: main
213
+ - path: hydra_plugins.hydra_colorlog.conf
214
+ schema: pkg
215
+ provider: hydra-colorlog
216
+ - path: ''
217
+ schema: structured
218
+ provider: schema
219
+ choices:
220
+ local: default.yaml
221
+ hparams_search: focusConvMSE_150.yaml
222
+ debug: null
223
+ experiment: null
224
+ log_dir: default.yaml
225
+ trainer: long.yaml
226
+ logger: many_loggers
227
+ callbacks: default.yaml
228
+ model: focusConv_150.yaml
229
+ datamodule: focus150.yaml
230
+ hydra/env: default
231
+ hydra/callbacks: null
232
+ hydra/job_logging: colorlog
233
+ hydra/hydra_logging: colorlog
234
+ hydra/hydra_help: default
235
+ hydra/help: default
236
+ hydra/sweeper: optuna
237
+ hydra/sweeper/sampler: tpe
238
+ hydra/launcher: basic
239
+ hydra/output: default
240
+ verbose: false
logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_08-46-15/10/.hydra/overrides.yaml ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ - datamodule.batch_size=64
2
+ - model.lr=0.005087336593525169
3
+ - model.conv1_size=3
4
+ - model.conv1_channels=9
5
+ - model.conv2_size=3
6
+ - model.conv2_channels=6
7
+ - model.lin1_size=32
8
+ - model.lin2_size=72
9
+ - hparams_search=focusConvMSE_150.yaml
logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_08-46-15/10/checkpoints/epoch_092.ckpt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6cfc365c77d2521680e93f07bed74816864dcd921df861b6c7fc3e8ac566a528
3
+ size 2607173
logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_08-46-15/10/checkpoints/last.ckpt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:353dc96af323e2754879f17a013fbe4801b1bd4d509ab9d9e86c0f2d0df17472
3
+ size 2607173
logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_08-46-15/10/csv/version_0/hparams.yaml ADDED
@@ -0,0 +1,54 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ trainer:
2
+ _target_: pytorch_lightning.Trainer
3
+ gpus: 1
4
+ min_epochs: 1
5
+ max_epochs: 100
6
+ resume_from_checkpoint: null
7
+ model:
8
+ _target_: src.models.focus_conv_module.FocusConvLitModule
9
+ image_size: 150
10
+ pool_size: 2
11
+ conv1_size: 3
12
+ conv1_channels: 9
13
+ conv2_size: 3
14
+ conv2_channels: 6
15
+ lin1_size: 32
16
+ lin2_size: 72
17
+ output_size: 1
18
+ lr: 0.005087336593525169
19
+ weight_decay: 0.0005
20
+ datamodule:
21
+ _target_: src.datamodules.focus_datamodule.FocusDataModule
22
+ data_dir: /usr/src/app/data/focus150
23
+ csv_train_file: /usr/src/app/data/focus150/train_metadata.csv
24
+ csv_val_file: /usr/src/app/data/focus150/validation_metadata.csv
25
+ csv_test_file: /usr/src/app/data/focus150/test_metadata.csv
26
+ batch_size: 64
27
+ num_workers: 0
28
+ pin_memory: false
29
+ seed: null
30
+ callbacks:
31
+ model_checkpoint:
32
+ _target_: pytorch_lightning.callbacks.ModelCheckpoint
33
+ monitor: val/mae
34
+ mode: min
35
+ save_top_k: 1
36
+ save_last: true
37
+ verbose: false
38
+ dirpath: checkpoints/
39
+ filename: epoch_{epoch:03d}
40
+ auto_insert_metric_name: false
41
+ early_stopping:
42
+ _target_: pytorch_lightning.callbacks.EarlyStopping
43
+ monitor: val/mae
44
+ mode: min
45
+ patience: 100
46
+ min_delta: 0
47
+ model_summary:
48
+ _target_: pytorch_lightning.callbacks.RichModelSummary
49
+ max_depth: -1
50
+ rich_progress_bar:
51
+ _target_: pytorch_lightning.callbacks.RichProgressBar
52
+ model/params/total: 216201
53
+ model/params/trainable: 216201
54
+ model/params/non_trainable: 0
logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_08-46-15/10/csv/version_0/metrics.csv ADDED
@@ -0,0 +1,202 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ val/loss,val/mae,val/mae_best,epoch,step,train/loss,train/mae,test/loss,test/mae
2
+ 0.297128289937973,0.38335299491882324,0.08328600227832794,0,47,,,,
3
+ ,,,1,47,40.01511764526367,3.1360795497894287,,
4
+ 0.00691829202696681,0.06787092983722687,0.06787092983722687,1,95,,,,
5
+ ,,,2,95,0.08132766932249069,0.18770506978034973,,
6
+ 0.0030725460965186357,0.043565649539232254,0.043565649539232254,2,143,,,,
7
+ ,,,3,143,0.003296283073723316,0.04497074335813522,,
8
+ 0.002312231110408902,0.037668559700250626,0.037668559700250626,3,191,,,,
9
+ ,,,4,191,0.0024263542145490646,0.03879760950803757,,
10
+ 0.0020216782577335835,0.035504769533872604,0.035504769533872604,4,239,,,,
11
+ ,,,5,239,0.0020707871299237013,0.03605496138334274,,
12
+ 0.001803099294193089,0.03344641253352165,0.03344641253352165,5,287,,,,
13
+ ,,,6,287,0.0018249934073537588,0.034145988523960114,,
14
+ 0.0015667366096749902,0.03155108541250229,0.03155108541250229,6,335,,,,
15
+ ,,,7,335,0.0016111411387100816,0.03227376192808151,,
16
+ 0.0014411286683753133,0.030259162187576294,0.030259162187576294,7,383,,,,
17
+ ,,,8,383,0.0014112954959273338,0.030364587903022766,,
18
+ 0.001279135118238628,0.029262246564030647,0.029262246564030647,8,431,,,,
19
+ ,,,9,431,0.0012712676543742418,0.029058385640382767,,
20
+ 0.0014408566057682037,0.02985064685344696,0.029262246564030647,9,479,,,,
21
+ ,,,10,479,0.0011692742118611932,0.028139259666204453,,
22
+ 0.0010777359129860997,0.026678768917918205,0.026678768917918205,10,527,,,,
23
+ ,,,11,527,0.0010590862948447466,0.026911111548542976,,
24
+ 0.0010190429165959358,0.026789167895913124,0.026678768917918205,11,575,,,,
25
+ ,,,12,575,0.0009614312439225614,0.025751642882823944,,
26
+ 0.0009529078961350024,0.02568790689110756,0.02568790689110756,12,623,,,,
27
+ ,,,13,623,0.0009066589409485459,0.025140710175037384,,
28
+ 0.000915870419703424,0.025255456566810608,0.025255456566810608,13,671,,,,
29
+ ,,,14,671,0.0008375730249099433,0.02426665462553501,,
30
+ 0.0009238356724381447,0.025964440777897835,0.025255456566810608,14,719,,,,
31
+ ,,,15,719,0.0008069746545515954,0.023811794817447662,,
32
+ 0.0008757266332395375,0.025355642661452293,0.025255456566810608,15,767,,,,
33
+ ,,,16,767,0.0007426062948070467,0.022953763604164124,,
34
+ 0.0008445768035016954,0.023457424715161324,0.023457424715161324,16,815,,,,
35
+ ,,,17,815,0.0007195461075752974,0.022516882047057152,,
36
+ 0.0007374058477580547,0.022849686443805695,0.022849686443805695,17,863,,,,
37
+ ,,,18,863,0.0006567381205968559,0.021802464500069618,,
38
+ 0.0007077977643348277,0.022693727165460587,0.022693727165460587,18,911,,,,
39
+ ,,,19,911,0.0006642473163083196,0.02182561531662941,,
40
+ 0.0007514710887335241,0.022226471453905106,0.022226471453905106,19,959,,,,
41
+ ,,,20,959,0.0006159473559819162,0.021135929971933365,,
42
+ 0.0006898191059008241,0.02177397534251213,0.02177397534251213,20,1007,,,,
43
+ ,,,21,1007,0.0005949742626398802,0.02071426622569561,,
44
+ 0.000646718661300838,0.021527979522943497,0.021527979522943497,21,1055,,,,
45
+ ,,,22,1055,0.0005545819876715541,0.02010226994752884,,
46
+ 0.0006690772715955973,0.022260967642068863,0.021527979522943497,22,1103,,,,
47
+ ,,,23,1103,0.0005510245100595057,0.019918637350201607,,
48
+ 0.0006288495496846735,0.02157101035118103,0.021527979522943497,23,1151,,,,
49
+ ,,,24,1151,0.0005404771654866636,0.019692620262503624,,
50
+ 0.0005939495749771595,0.0208309106528759,0.0208309106528759,24,1199,,,,
51
+ ,,,25,1199,0.0005010634777136147,0.019118448719382286,,
52
+ 0.0006160447373986244,0.021308191120624542,0.0208309106528759,25,1247,,,,
53
+ ,,,26,1247,0.0004856302111875266,0.01876657083630562,,
54
+ 0.0006083704647608101,0.020363669842481613,0.020363669842481613,26,1295,,,,
55
+ ,,,27,1295,0.0005066324374638498,0.018962891772389412,,
56
+ 0.0006204916862770915,0.02024492248892784,0.02024492248892784,27,1343,,,,
57
+ ,,,28,1343,0.00046793121146038175,0.018299954012036324,,
58
+ 0.0005573905655182898,0.019922440871596336,0.019922440871596336,28,1391,,,,
59
+ ,,,29,1391,0.00044876415631733835,0.017899714410305023,,
60
+ 0.0005943257128819823,0.020701762288808823,0.019922440871596336,29,1439,,,,
61
+ ,,,30,1439,0.000458199909189716,0.0179173294454813,,
62
+ 0.0005497335223481059,0.01967148669064045,0.01967148669064045,30,1487,,,,
63
+ ,,,31,1487,0.00043762134737335145,0.017593257129192352,,
64
+ 0.0006309315213002264,0.021029993891716003,0.01967148669064045,31,1535,,,,
65
+ ,,,32,1535,0.00043793627992272377,0.0174094270914793,,
66
+ 0.000539357599336654,0.0193520225584507,0.0193520225584507,32,1583,,,,
67
+ ,,,33,1583,0.0004333304532337934,0.017360197380185127,,
68
+ 0.0005120911519043148,0.018871715292334557,0.018871715292334557,33,1631,,,,
69
+ ,,,34,1631,0.0004008531104773283,0.016732648015022278,,
70
+ 0.0005218895385041833,0.018841875717043877,0.018841875717043877,34,1679,,,,
71
+ ,,,35,1679,0.00038889585994184017,0.01643705740571022,,
72
+ 0.0004955498734489083,0.01850888691842556,0.01850888691842556,35,1727,,,,
73
+ ,,,36,1727,0.00041668149060569704,0.016752639785408974,,
74
+ 0.0004906250978820026,0.018399285152554512,0.018399285152554512,36,1775,,,,
75
+ ,,,37,1775,0.00037922398769296706,0.016120539978146553,,
76
+ 0.0004966743290424347,0.018477337434887886,0.018399285152554512,37,1823,,,,
77
+ ,,,38,1823,0.000409803818911314,0.016566643491387367,,
78
+ 0.0004842205671593547,0.01818913407623768,0.01818913407623768,38,1871,,,,
79
+ ,,,39,1871,0.00037369230994954705,0.015994809567928314,,
80
+ 0.00047722840099595487,0.01805187202990055,0.01805187202990055,39,1919,,,,
81
+ ,,,40,1919,0.0003939098969567567,0.01620776392519474,,
82
+ 0.0004883329966105521,0.018200663849711418,0.01805187202990055,40,1967,,,,
83
+ ,,,41,1967,0.00037465605419129133,0.015753736719489098,,
84
+ 0.0005813867319375277,0.019705940037965775,0.01805187202990055,41,2015,,,,
85
+ ,,,42,2015,0.00037237259675748646,0.01578802429139614,,
86
+ 0.00048035706277005374,0.017833486199378967,0.017833486199378967,42,2063,,,,
87
+ ,,,43,2063,0.00035546981962397695,0.015316423960030079,,
88
+ 0.00046007975470274687,0.017606763169169426,0.017606763169169426,43,2111,,,,
89
+ ,,,44,2111,0.0003548930981196463,0.015290375798940659,,
90
+ 0.0004768681537825614,0.01788400299847126,0.017606763169169426,44,2159,,,,
91
+ ,,,45,2159,0.00036000547697767615,0.015402588061988354,,
92
+ 0.00047660141717642546,0.017773112282156944,0.017606763169169426,45,2207,,,,
93
+ ,,,46,2207,0.00036259525222703815,0.015470572747290134,,
94
+ 0.0004637573438230902,0.017751259729266167,0.017606763169169426,46,2255,,,,
95
+ ,,,47,2255,0.00035445764660835266,0.015216158702969551,,
96
+ 0.0005082394927740097,0.01797080785036087,0.017606763169169426,47,2303,,,,
97
+ ,,,48,2303,0.0003874969552271068,0.015887649729847908,,
98
+ 0.0004757147398777306,0.017772117629647255,0.017606763169169426,48,2351,,,,
99
+ ,,,49,2351,0.00034212280297651887,0.01486652996391058,,
100
+ 0.0005294601432979107,0.019027674570679665,0.017606763169169426,49,2399,,,,
101
+ ,,,50,2399,0.0003533226845320314,0.015159578062593937,,
102
+ 0.00047022895887494087,0.017878921702504158,0.017606763169169426,50,2447,,,,
103
+ ,,,51,2447,0.0003633022424764931,0.015364286489784718,,
104
+ 0.0004938418278470635,0.018388651311397552,0.017606763169169426,51,2495,,,,
105
+ ,,,52,2495,0.0004122417012695223,0.016270458698272705,,
106
+ 0.00046376671525649726,0.01728753373026848,0.01728753373026848,52,2543,,,,
107
+ ,,,53,2543,0.00034436502028256655,0.014873036183416843,,
108
+ 0.0004671674105338752,0.01785762794315815,0.01728753373026848,53,2591,,,,
109
+ ,,,54,2591,0.00033432827331125736,0.014644384384155273,,
110
+ 0.000451440253527835,0.017187360674142838,0.017187360674142838,54,2639,,,,
111
+ ,,,55,2639,0.00033150071976706386,0.014589235186576843,,
112
+ 0.0004798794398084283,0.017389385029673576,0.017187360674142838,55,2687,,,,
113
+ ,,,56,2687,0.00036632531555369496,0.01539022009819746,,
114
+ 0.0004537151544354856,0.017134176567196846,0.017134176567196846,56,2735,,,,
115
+ ,,,57,2735,0.000349498848663643,0.014891334809362888,,
116
+ 0.0004600228276103735,0.017688928171992302,0.017134176567196846,57,2783,,,,
117
+ ,,,58,2783,0.00033381255343556404,0.01461023185402155,,
118
+ 0.00051453075138852,0.018726876005530357,0.017134176567196846,58,2831,,,,
119
+ ,,,59,2831,0.00034784406307153404,0.014819341711699963,,
120
+ 0.0005097354296594858,0.01771176978945732,0.017134176567196846,59,2879,,,,
121
+ ,,,60,2879,0.00035473270690999925,0.015000062063336372,,
122
+ 0.00043879548320546746,0.017043637111783028,0.017043637111783028,60,2927,,,,
123
+ ,,,61,2927,0.00033864876604638994,0.014578519389033318,,
124
+ 0.0004886853275820613,0.01813986338675022,0.017043637111783028,61,2975,,,,
125
+ ,,,62,2975,0.000348740431945771,0.014804967679083347,,
126
+ 0.0004828817618545145,0.018005890771746635,0.017043637111783028,62,3023,,,,
127
+ ,,,63,3023,0.0003484741901047528,0.01485227607190609,,
128
+ 0.0005082758725620806,0.018634019419550896,0.017043637111783028,63,3071,,,,
129
+ ,,,64,3071,0.0003519932215567678,0.014947101473808289,,
130
+ 0.0004358866426628083,0.01697085238993168,0.01697085238993168,64,3119,,,,
131
+ ,,,65,3119,0.00032828605617396533,0.014392907731235027,,
132
+ 0.0006732946494594216,0.021323291584849358,0.01697085238993168,65,3167,,,,
133
+ ,,,66,3167,0.000360648991772905,0.015026014298200607,,
134
+ 0.0004951555747538805,0.01833316497504711,0.01697085238993168,66,3215,,,,
135
+ ,,,67,3215,0.0004139290249440819,0.016335349529981613,,
136
+ 0.0004732554079964757,0.01728786714375019,0.01697085238993168,67,3263,,,,
137
+ ,,,68,3263,0.00034444956691004336,0.014759093523025513,,
138
+ 0.0007570147281512618,0.021798543632030487,0.01697085238993168,68,3311,,,,
139
+ ,,,69,3311,0.00034858830622397363,0.014735118485987186,,
140
+ 0.0005190008087083697,0.01884130761027336,0.01697085238993168,69,3359,,,,
141
+ ,,,70,3359,0.00036528645432554185,0.01494115125387907,,
142
+ 0.0004827289085369557,0.01739480346441269,0.01697085238993168,70,3407,,,,
143
+ ,,,71,3407,0.0003466542111709714,0.014748144894838333,,
144
+ 0.0004802520852535963,0.017185028642416,0.01697085238993168,71,3455,,,,
145
+ ,,,72,3455,0.0003102464834228158,0.013857228681445122,,
146
+ 0.0011698635062202811,0.028825512155890465,0.01697085238993168,72,3503,,,,
147
+ ,,,73,3503,0.00040557308238931,0.015728285536170006,,
148
+ 0.00047476834151893854,0.017674533650279045,0.01697085238993168,73,3551,,,,
149
+ ,,,74,3551,0.000504529511090368,0.017795048654079437,,
150
+ 0.00045395735651254654,0.01744324341416359,0.01697085238993168,74,3599,,,,
151
+ ,,,75,3599,0.0002941991260740906,0.013535694219172001,,
152
+ 0.00047264594468288124,0.01756073534488678,0.01697085238993168,75,3647,,,,
153
+ ,,,76,3647,0.00032232890953309834,0.014125063084065914,,
154
+ 0.0005077887326478958,0.01841810718178749,0.01697085238993168,76,3695,,,,
155
+ ,,,77,3695,0.00031050245161168277,0.013849079608917236,,
156
+ 0.0004400684265419841,0.017130820080637932,0.01697085238993168,77,3743,,,,
157
+ ,,,78,3743,0.00037385697942227125,0.015117947943508625,,
158
+ 0.00048583323950879276,0.017336329445242882,0.01697085238993168,78,3791,,,,
159
+ ,,,79,3791,0.0002798529458232224,0.013143634423613548,,
160
+ 0.0006713546463288367,0.02037055976688862,0.01697085238993168,79,3839,,,,
161
+ ,,,80,3839,0.0003314435889478773,0.014346498996019363,,
162
+ 0.00044516820344142616,0.017203398048877716,0.01697085238993168,80,3887,,,,
163
+ ,,,81,3887,0.0003526363580022007,0.014847283251583576,,
164
+ 0.0004415729199536145,0.017022131010890007,0.01697085238993168,81,3935,,,,
165
+ ,,,82,3935,0.00030380903626792133,0.013673552311956882,,
166
+ 0.00044664027518592775,0.017137445509433746,0.01697085238993168,82,3983,,,,
167
+ ,,,83,3983,0.0002919149410445243,0.013349147513508797,,
168
+ 0.0004899780033156276,0.01782706007361412,0.01697085238993168,83,4031,,,,
169
+ ,,,84,4031,0.000373672490241006,0.015108526684343815,,
170
+ 0.00044448766857385635,0.017205312848091125,0.01697085238993168,84,4079,,,,
171
+ ,,,85,4079,0.00028202898101881146,0.01317006815224886,,
172
+ 0.0004958384670317173,0.018177904188632965,0.01697085238993168,85,4127,,,,
173
+ ,,,86,4127,0.0003403614682611078,0.014474233612418175,,
174
+ 0.00046640002983622253,0.01725357584655285,0.01697085238993168,86,4175,,,,
175
+ ,,,87,4175,0.00029584148433059454,0.013471836224198341,,
176
+ 0.0005176477716304362,0.017849940806627274,0.01697085238993168,87,4223,,,,
177
+ ,,,88,4223,0.0003505153290461749,0.014671403914690018,,
178
+ 0.0006972982664592564,0.021950330585241318,0.01697085238993168,88,4271,,,,
179
+ ,,,89,4271,0.0003013134701177478,0.0135008180513978,,
180
+ 0.0005737725296057761,0.0197175070643425,0.01697085238993168,89,4319,,,,
181
+ ,,,90,4319,0.00033211393747478724,0.014318034052848816,,
182
+ 0.0009465614566579461,0.0245533287525177,0.01697085238993168,90,4367,,,,
183
+ ,,,91,4367,0.0003604537050705403,0.014873224310576916,,
184
+ 0.0006661396473646164,0.02043437957763672,0.01697085238993168,91,4415,,,,
185
+ ,,,92,4415,0.00046966286026872694,0.01709653064608574,,
186
+ 0.00043424832983873785,0.0167109202593565,0.0167109202593565,92,4463,,,,
187
+ ,,,93,4463,0.0003162614593748003,0.013947393745183945,,
188
+ 0.0005107529577799141,0.01851806789636612,0.0167109202593565,93,4511,,,,
189
+ ,,,94,4511,0.0003644294338300824,0.014973732642829418,,
190
+ 0.0005079001421108842,0.017823534086346626,0.0167109202593565,94,4559,,,,
191
+ ,,,95,4559,0.0003134273283649236,0.013791141100227833,,
192
+ 0.0004811722319573164,0.017365051433444023,0.0167109202593565,95,4607,,,,
193
+ ,,,96,4607,0.00034785032039508224,0.014716319739818573,,
194
+ 0.0005111803184263408,0.018264373764395714,0.0167109202593565,96,4655,,,,
195
+ ,,,97,4655,0.00028366665355861187,0.013138112612068653,,
196
+ 0.0005661779432557523,0.018661348149180412,0.0167109202593565,97,4703,,,,
197
+ ,,,98,4703,0.0003329300379846245,0.014331191778182983,,
198
+ 0.0005402230308391154,0.018148407340049744,0.0167109202593565,98,4751,,,,
199
+ ,,,99,4751,0.0003975348954554647,0.015554054640233517,,
200
+ 0.0005257187876850367,0.01901872642338276,0.0167109202593565,99,4799,,,,
201
+ ,,,100,4799,0.0002697853196877986,0.012862459756433964,,
202
+ ,,,92,4800,,,0.0003916280984412879,0.01619379036128521
logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_08-46-15/10/tensorboard/focusConvMSE_150_hyperparameter_search/events.out.tfevents.1652260828.30858e6037f6.1.20 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8d365bf7c1bfd9c1ea488651a73552b8d264d1aeb6ec18b2df2c9d192e53c55a
3
+ size 36796
logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_08-46-15/10/tensorboard/focusConvMSE_150_hyperparameter_search/events.out.tfevents.1652261056.30858e6037f6.1.21 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:80f13afc38eb08ee2a23ccc1af49ba27ef606ff13b302347cde7f73393956b6f
3
+ size 179
logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_08-46-15/10/tensorboard/focusConvMSE_150_hyperparameter_search/hparams.yaml ADDED
@@ -0,0 +1,54 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ trainer:
2
+ _target_: pytorch_lightning.Trainer
3
+ gpus: 1
4
+ min_epochs: 1
5
+ max_epochs: 100
6
+ resume_from_checkpoint: null
7
+ model:
8
+ _target_: src.models.focus_conv_module.FocusConvLitModule
9
+ image_size: 150
10
+ pool_size: 2
11
+ conv1_size: 3
12
+ conv1_channels: 9
13
+ conv2_size: 3
14
+ conv2_channels: 6
15
+ lin1_size: 32
16
+ lin2_size: 72
17
+ output_size: 1
18
+ lr: 0.005087336593525169
19
+ weight_decay: 0.0005
20
+ datamodule:
21
+ _target_: src.datamodules.focus_datamodule.FocusDataModule
22
+ data_dir: /usr/src/app/data/focus150
23
+ csv_train_file: /usr/src/app/data/focus150/train_metadata.csv
24
+ csv_val_file: /usr/src/app/data/focus150/validation_metadata.csv
25
+ csv_test_file: /usr/src/app/data/focus150/test_metadata.csv
26
+ batch_size: 64
27
+ num_workers: 0
28
+ pin_memory: false
29
+ seed: null
30
+ callbacks:
31
+ model_checkpoint:
32
+ _target_: pytorch_lightning.callbacks.ModelCheckpoint
33
+ monitor: val/mae
34
+ mode: min
35
+ save_top_k: 1
36
+ save_last: true
37
+ verbose: false
38
+ dirpath: checkpoints/
39
+ filename: epoch_{epoch:03d}
40
+ auto_insert_metric_name: false
41
+ early_stopping:
42
+ _target_: pytorch_lightning.callbacks.EarlyStopping
43
+ monitor: val/mae
44
+ mode: min
45
+ patience: 100
46
+ min_delta: 0
47
+ model_summary:
48
+ _target_: pytorch_lightning.callbacks.RichModelSummary
49
+ max_depth: -1
50
+ rich_progress_bar:
51
+ _target_: pytorch_lightning.callbacks.RichProgressBar
52
+ model/params/total: 216201
53
+ model/params/trainable: 216201
54
+ model/params/non_trainable: 0
logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_08-46-15/11/.hydra/config.yaml ADDED
@@ -0,0 +1,81 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ original_work_dir: ${hydra:runtime.cwd}
2
+ data_dir: ${original_work_dir}/data
3
+ print_config: true
4
+ ignore_warnings: true
5
+ train: true
6
+ test: true
7
+ seed: null
8
+ name: focusConvMSE_150_hyperparameter_search
9
+ datamodule:
10
+ _target_: src.datamodules.focus_datamodule.FocusDataModule
11
+ data_dir: ${data_dir}/focus150
12
+ csv_train_file: ${data_dir}/focus150/train_metadata.csv
13
+ csv_val_file: ${data_dir}/focus150/validation_metadata.csv
14
+ csv_test_file: ${data_dir}/focus150/test_metadata.csv
15
+ batch_size: 64
16
+ num_workers: 0
17
+ pin_memory: false
18
+ model:
19
+ _target_: src.models.focus_conv_module.FocusConvLitModule
20
+ image_size: 150
21
+ pool_size: 2
22
+ conv1_size: 3
23
+ conv1_channels: 9
24
+ conv2_size: 3
25
+ conv2_channels: 6
26
+ lin1_size: 32
27
+ lin2_size: 72
28
+ output_size: 1
29
+ lr: 0.00010666027901645324
30
+ weight_decay: 0.0005
31
+ callbacks:
32
+ model_checkpoint:
33
+ _target_: pytorch_lightning.callbacks.ModelCheckpoint
34
+ monitor: val/mae
35
+ mode: min
36
+ save_top_k: 1
37
+ save_last: true
38
+ verbose: false
39
+ dirpath: checkpoints/
40
+ filename: epoch_{epoch:03d}
41
+ auto_insert_metric_name: false
42
+ early_stopping:
43
+ _target_: pytorch_lightning.callbacks.EarlyStopping
44
+ monitor: val/mae
45
+ mode: min
46
+ patience: 100
47
+ min_delta: 0
48
+ model_summary:
49
+ _target_: pytorch_lightning.callbacks.RichModelSummary
50
+ max_depth: -1
51
+ rich_progress_bar:
52
+ _target_: pytorch_lightning.callbacks.RichProgressBar
53
+ logger:
54
+ csv:
55
+ _target_: pytorch_lightning.loggers.csv_logs.CSVLogger
56
+ save_dir: .
57
+ name: csv/
58
+ prefix: ''
59
+ mlflow:
60
+ _target_: pytorch_lightning.loggers.mlflow.MLFlowLogger
61
+ experiment_name: ${name}
62
+ tracking_uri: ${original_work_dir}/logs/mlflow/mlruns
63
+ tags: null
64
+ save_dir: ./mlruns
65
+ prefix: ''
66
+ artifact_location: null
67
+ tensorboard:
68
+ _target_: pytorch_lightning.loggers.tensorboard.TensorBoardLogger
69
+ save_dir: tensorboard/
70
+ name: null
71
+ version: ${name}
72
+ log_graph: false
73
+ default_hp_metric: true
74
+ prefix: ''
75
+ trainer:
76
+ _target_: pytorch_lightning.Trainer
77
+ gpus: 1
78
+ min_epochs: 1
79
+ max_epochs: 100
80
+ resume_from_checkpoint: null
81
+ optimized_metric: val/mae_best
logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_08-46-15/11/.hydra/hydra.yaml ADDED
@@ -0,0 +1,240 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ hydra:
2
+ run:
3
+ dir: logs/experiments/runs/${name}/${now:%Y-%m-%d}_${now:%H-%M-%S}
4
+ sweep:
5
+ dir: logs/experiments/multiruns/${name}/${now:%Y-%m-%d}_${now:%H-%M-%S}
6
+ subdir: ${hydra.job.num}
7
+ launcher:
8
+ _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher
9
+ sweeper:
10
+ sampler:
11
+ _target_: optuna.samplers.TPESampler
12
+ seed: 12345
13
+ consider_prior: true
14
+ prior_weight: 1.0
15
+ consider_magic_clip: true
16
+ consider_endpoints: false
17
+ n_startup_trials: 10
18
+ n_ei_candidates: 24
19
+ multivariate: false
20
+ warn_independent_sampling: true
21
+ _target_: hydra_plugins.hydra_optuna_sweeper.optuna_sweeper.OptunaSweeper
22
+ direction: minimize
23
+ storage: null
24
+ study_name: focusConvMSE_150_hyperparameter_search
25
+ n_trials: 20
26
+ n_jobs: 1
27
+ search_space:
28
+ datamodule.batch_size:
29
+ type: categorical
30
+ choices:
31
+ - 64
32
+ - 128
33
+ model.lr:
34
+ type: float
35
+ low: 0.0001
36
+ high: 0.01
37
+ model.conv1_size:
38
+ type: categorical
39
+ choices:
40
+ - 3
41
+ - 5
42
+ - 7
43
+ model.conv1_channels:
44
+ type: categorical
45
+ choices:
46
+ - 3
47
+ - 6
48
+ - 9
49
+ model.conv2_size:
50
+ type: categorical
51
+ choices:
52
+ - 3
53
+ - 5
54
+ - 7
55
+ model.conv2_channels:
56
+ type: categorical
57
+ choices:
58
+ - 6
59
+ - 11
60
+ - 16
61
+ model.lin1_size:
62
+ type: categorical
63
+ choices:
64
+ - 32
65
+ - 72
66
+ - 128
67
+ model.lin2_size:
68
+ type: categorical
69
+ choices:
70
+ - 32
71
+ - 72
72
+ - 128
73
+ help:
74
+ app_name: ${hydra.job.name}
75
+ header: '${hydra.help.app_name} is powered by Hydra.
76
+
77
+ '
78
+ footer: 'Powered by Hydra (https://hydra.cc)
79
+
80
+ Use --hydra-help to view Hydra specific help
81
+
82
+ '
83
+ template: '${hydra.help.header}
84
+
85
+ == Configuration groups ==
86
+
87
+ Compose your configuration from those groups (group=option)
88
+
89
+
90
+ $APP_CONFIG_GROUPS
91
+
92
+
93
+ == Config ==
94
+
95
+ Override anything in the config (foo.bar=value)
96
+
97
+
98
+ $CONFIG
99
+
100
+
101
+ ${hydra.help.footer}
102
+
103
+ '
104
+ hydra_help:
105
+ template: 'Hydra (${hydra.runtime.version})
106
+
107
+ See https://hydra.cc for more info.
108
+
109
+
110
+ == Flags ==
111
+
112
+ $FLAGS_HELP
113
+
114
+
115
+ == Configuration groups ==
116
+
117
+ Compose your configuration from those groups (For example, append hydra/job_logging=disabled
118
+ to command line)
119
+
120
+
121
+ $HYDRA_CONFIG_GROUPS
122
+
123
+
124
+ Use ''--cfg hydra'' to Show the Hydra config.
125
+
126
+ '
127
+ hydra_help: ???
128
+ hydra_logging:
129
+ version: 1
130
+ formatters:
131
+ colorlog:
132
+ (): colorlog.ColoredFormatter
133
+ format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s'
134
+ handlers:
135
+ console:
136
+ class: logging.StreamHandler
137
+ formatter: colorlog
138
+ stream: ext://sys.stdout
139
+ root:
140
+ level: INFO
141
+ handlers:
142
+ - console
143
+ disable_existing_loggers: false
144
+ job_logging:
145
+ version: 1
146
+ formatters:
147
+ simple:
148
+ format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s'
149
+ colorlog:
150
+ (): colorlog.ColoredFormatter
151
+ format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s]
152
+ - %(message)s'
153
+ log_colors:
154
+ DEBUG: purple
155
+ INFO: green
156
+ WARNING: yellow
157
+ ERROR: red
158
+ CRITICAL: red
159
+ handlers:
160
+ console:
161
+ class: logging.StreamHandler
162
+ formatter: colorlog
163
+ stream: ext://sys.stdout
164
+ file:
165
+ class: logging.FileHandler
166
+ formatter: simple
167
+ filename: ${hydra.job.name}.log
168
+ root:
169
+ level: INFO
170
+ handlers:
171
+ - console
172
+ - file
173
+ disable_existing_loggers: false
174
+ env: {}
175
+ searchpath: []
176
+ callbacks: {}
177
+ output_subdir: .hydra
178
+ overrides:
179
+ hydra: []
180
+ task:
181
+ - datamodule.batch_size=64
182
+ - model.lr=0.00010666027901645324
183
+ - model.conv1_size=3
184
+ - model.conv1_channels=9
185
+ - model.conv2_size=3
186
+ - model.conv2_channels=6
187
+ - model.lin1_size=32
188
+ - model.lin2_size=72
189
+ - hparams_search=focusConvMSE_150.yaml
190
+ job:
191
+ name: train
192
+ override_dirname: datamodule.batch_size=64,hparams_search=focusConvMSE_150.yaml,model.conv1_channels=9,model.conv1_size=3,model.conv2_channels=6,model.conv2_size=3,model.lin1_size=32,model.lin2_size=72,model.lr=0.00010666027901645324
193
+ id: '11'
194
+ num: 11
195
+ config_name: train.yaml
196
+ env_set: {}
197
+ env_copy: []
198
+ config:
199
+ override_dirname:
200
+ kv_sep: '='
201
+ item_sep: ','
202
+ exclude_keys: []
203
+ runtime:
204
+ version: 1.1.2
205
+ cwd: /usr/src/app
206
+ config_sources:
207
+ - path: hydra.conf
208
+ schema: pkg
209
+ provider: hydra
210
+ - path: /usr/src/app/configs
211
+ schema: file
212
+ provider: main
213
+ - path: hydra_plugins.hydra_colorlog.conf
214
+ schema: pkg
215
+ provider: hydra-colorlog
216
+ - path: ''
217
+ schema: structured
218
+ provider: schema
219
+ choices:
220
+ local: default.yaml
221
+ hparams_search: focusConvMSE_150.yaml
222
+ debug: null
223
+ experiment: null
224
+ log_dir: default.yaml
225
+ trainer: long.yaml
226
+ logger: many_loggers
227
+ callbacks: default.yaml
228
+ model: focusConv_150.yaml
229
+ datamodule: focus150.yaml
230
+ hydra/env: default
231
+ hydra/callbacks: null
232
+ hydra/job_logging: colorlog
233
+ hydra/hydra_logging: colorlog
234
+ hydra/hydra_help: default
235
+ hydra/help: default
236
+ hydra/sweeper: optuna
237
+ hydra/sweeper/sampler: tpe
238
+ hydra/launcher: basic
239
+ hydra/output: default
240
+ verbose: false
logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_08-46-15/11/.hydra/overrides.yaml ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ - datamodule.batch_size=64
2
+ - model.lr=0.00010666027901645324
3
+ - model.conv1_size=3
4
+ - model.conv1_channels=9
5
+ - model.conv2_size=3
6
+ - model.conv2_channels=6
7
+ - model.lin1_size=32
8
+ - model.lin2_size=72
9
+ - hparams_search=focusConvMSE_150.yaml
logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_08-46-15/11/checkpoints/epoch_096.ckpt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d753864d06e9755c117f3e5d565dddd3b63a1ad9ac16e48524c48610e97fdc7d
3
+ size 2607173
logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_08-46-15/11/checkpoints/last.ckpt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f7ae63f78f34f933c0a1a259bed5c43efe1d0e31c1c5f90693c29d215edd5bb9
3
+ size 2607173
logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_08-46-15/11/csv/version_0/hparams.yaml ADDED
@@ -0,0 +1,54 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ trainer:
2
+ _target_: pytorch_lightning.Trainer
3
+ gpus: 1
4
+ min_epochs: 1
5
+ max_epochs: 100
6
+ resume_from_checkpoint: null
7
+ model:
8
+ _target_: src.models.focus_conv_module.FocusConvLitModule
9
+ image_size: 150
10
+ pool_size: 2
11
+ conv1_size: 3
12
+ conv1_channels: 9
13
+ conv2_size: 3
14
+ conv2_channels: 6
15
+ lin1_size: 32
16
+ lin2_size: 72
17
+ output_size: 1
18
+ lr: 0.00010666027901645324
19
+ weight_decay: 0.0005
20
+ datamodule:
21
+ _target_: src.datamodules.focus_datamodule.FocusDataModule
22
+ data_dir: /usr/src/app/data/focus150
23
+ csv_train_file: /usr/src/app/data/focus150/train_metadata.csv
24
+ csv_val_file: /usr/src/app/data/focus150/validation_metadata.csv
25
+ csv_test_file: /usr/src/app/data/focus150/test_metadata.csv
26
+ batch_size: 64
27
+ num_workers: 0
28
+ pin_memory: false
29
+ seed: null
30
+ callbacks:
31
+ model_checkpoint:
32
+ _target_: pytorch_lightning.callbacks.ModelCheckpoint
33
+ monitor: val/mae
34
+ mode: min
35
+ save_top_k: 1
36
+ save_last: true
37
+ verbose: false
38
+ dirpath: checkpoints/
39
+ filename: epoch_{epoch:03d}
40
+ auto_insert_metric_name: false
41
+ early_stopping:
42
+ _target_: pytorch_lightning.callbacks.EarlyStopping
43
+ monitor: val/mae
44
+ mode: min
45
+ patience: 100
46
+ min_delta: 0
47
+ model_summary:
48
+ _target_: pytorch_lightning.callbacks.RichModelSummary
49
+ max_depth: -1
50
+ rich_progress_bar:
51
+ _target_: pytorch_lightning.callbacks.RichProgressBar
52
+ model/params/total: 216201
53
+ model/params/trainable: 216201
54
+ model/params/non_trainable: 0
logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_08-46-15/11/csv/version_0/metrics.csv ADDED
@@ -0,0 +1,202 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ val/loss,val/mae,val/mae_best,epoch,step,train/loss,train/mae,test/loss,test/mae
2
+ 0.00044237475958652794,0.01763676293194294,0.01763676293194294,0,47,,,,
3
+ ,,,1,47,0.0008919340325519443,0.02283974178135395,,
4
+ 0.0004388249944895506,0.017558621242642403,0.017558621242642403,1,95,,,,
5
+ ,,,2,95,0.00041984900599345565,0.017178816720843315,,
6
+ 0.0004397322190925479,0.017526816576719284,0.017526816576719284,2,143,,,,
7
+ ,,,3,143,0.0004107994318474084,0.01695815660059452,,
8
+ 0.00043804896995425224,0.01748722419142723,0.01748722419142723,3,191,,,,
9
+ ,,,4,191,0.0004020490450784564,0.016791781410574913,,
10
+ 0.00043852656381204724,0.017491716891527176,0.01748722419142723,4,239,,,,
11
+ ,,,5,239,0.000397334253648296,0.016678616404533386,,
12
+ 0.0004380833124741912,0.01745373010635376,0.01745373010635376,5,287,,,,
13
+ ,,,6,287,0.00038863052031956613,0.0164650846272707,,
14
+ 0.00043738019303418696,0.017546452581882477,0.01745373010635376,6,335,,,,
15
+ ,,,7,335,0.000383852660888806,0.01634947769343853,,
16
+ 0.00043664168333634734,0.017448099330067635,0.017448099330067635,7,383,,,,
17
+ ,,,8,383,0.0003788373142015189,0.01623653620481491,,
18
+ 0.0004376796423457563,0.017421182245016098,0.017421182245016098,8,431,,,,
19
+ ,,,9,431,0.00037341867573559284,0.016121070832014084,,
20
+ 0.00044727211934514344,0.017729932442307472,0.017421182245016098,9,479,,,,
21
+ ,,,10,479,0.0003732156765181571,0.01604853942990303,,
22
+ 0.0004424943763297051,0.017517900094389915,0.017421182245016098,10,527,,,,
23
+ ,,,11,527,0.0003708217409439385,0.016067948192358017,,
24
+ 0.0004376410215627402,0.017501866444945335,0.017421182245016098,11,575,,,,
25
+ ,,,12,575,0.0003657707420643419,0.015891214832663536,,
26
+ 0.0004350252856966108,0.017448827624320984,0.017421182245016098,12,623,,,,
27
+ ,,,13,623,0.0003640395007096231,0.015868620947003365,,
28
+ 0.00043666621786542237,0.017442811280488968,0.017421182245016098,13,671,,,,
29
+ ,,,14,671,0.0003595789894461632,0.015750009566545486,,
30
+ 0.00044520292431116104,0.017667440697550774,0.017421182245016098,14,719,,,,
31
+ ,,,15,719,0.00036389240995049477,0.015874886885285378,,
32
+ 0.0004397429001983255,0.017566094174981117,0.017421182245016098,15,767,,,,
33
+ ,,,16,767,0.0003602604556363076,0.015761734917759895,,
34
+ 0.0004458787734620273,0.017593514174222946,0.017421182245016098,16,815,,,,
35
+ ,,,17,815,0.00035641301656141877,0.015685390681028366,,
36
+ 0.00044106703717261553,0.01759248971939087,0.017421182245016098,17,863,,,,
37
+ ,,,18,863,0.0003560411569196731,0.015664445236325264,,
38
+ 0.0004394661809783429,0.017517846077680588,0.017421182245016098,18,911,,,,
39
+ ,,,19,911,0.00035435426980257034,0.015622190199792385,,
40
+ 0.00044928002171218395,0.017760854214429855,0.017421182245016098,19,959,,,,
41
+ ,,,20,959,0.0003515574790071696,0.015560849569737911,,
42
+ 0.00044077957863919437,0.017543144524097443,0.017421182245016098,20,1007,,,,
43
+ ,,,21,1007,0.00035550742177292705,0.01562040951102972,,
44
+ 0.0004409919201862067,0.0176087599247694,0.017421182245016098,21,1055,,,,
45
+ ,,,22,1055,0.00035260809818282723,0.015567011199891567,,
46
+ 0.0004459311894606799,0.017727011814713478,0.017421182245016098,22,1103,,,,
47
+ ,,,23,1103,0.00035820493940263987,0.015736034139990807,,
48
+ 0.0004374788550194353,0.01750698871910572,0.017421182245016098,23,1151,,,,
49
+ ,,,24,1151,0.0003549072134774178,0.01560882292687893,,
50
+ 0.0004391085240058601,0.017550522461533546,0.017421182245016098,24,1199,,,,
51
+ ,,,25,1199,0.00035063253017142415,0.015513729304075241,,
52
+ 0.000438952527474612,0.017544757574796677,0.017421182245016098,25,1247,,,,
53
+ ,,,26,1247,0.0003477166756056249,0.015469828620553017,,
54
+ 0.00043714186176657677,0.017448635771870613,0.017421182245016098,26,1295,,,,
55
+ ,,,27,1295,0.0003475948760751635,0.015468760393559933,,
56
+ 0.0004422964411787689,0.01761581562459469,0.017421182245016098,27,1343,,,,
57
+ ,,,28,1343,0.0003525547799654305,0.015561463311314583,,
58
+ 0.0004402338236104697,0.017498398199677467,0.017421182245016098,28,1391,,,,
59
+ ,,,29,1391,0.00035199683043174446,0.01558825746178627,,
60
+ 0.0004425983061082661,0.01753808930516243,0.017421182245016098,29,1439,,,,
61
+ ,,,30,1439,0.0003504078194964677,0.015529691241681576,,
62
+ 0.0004498255148064345,0.0177548136562109,0.017421182245016098,30,1487,,,,
63
+ ,,,31,1487,0.0003467368660494685,0.015429956838488579,,
64
+ 0.00043997683678753674,0.01754026673734188,0.017421182245016098,31,1535,,,,
65
+ ,,,32,1535,0.0003500457387417555,0.015520870685577393,,
66
+ 0.0004362169711384922,0.01746012642979622,0.017421182245016098,32,1583,,,,
67
+ ,,,33,1583,0.00034992044675163925,0.015574321150779724,,
68
+ 0.0004356371646281332,0.017432082444429398,0.017421182245016098,33,1631,,,,
69
+ ,,,34,1631,0.00034979439806193113,0.015500788576900959,,
70
+ 0.00043828008347190917,0.017479071393609047,0.017421182245016098,34,1679,,,,
71
+ ,,,35,1679,0.00035535931237973273,0.01569623127579689,,
72
+ 0.00043588149128481746,0.017438696697354317,0.017421182245016098,35,1727,,,,
73
+ ,,,36,1727,0.0003519091405905783,0.01559762004762888,,
74
+ 0.0004409944813232869,0.017549559473991394,0.017421182245016098,36,1775,,,,
75
+ ,,,37,1775,0.0003526139771565795,0.015582408756017685,,
76
+ 0.0004371838294900954,0.017497200518846512,0.017421182245016098,37,1823,,,,
77
+ ,,,38,1823,0.0003546958032529801,0.01561814360320568,,
78
+ 0.0004526934935711324,0.017803113907575607,0.017421182245016098,38,1871,,,,
79
+ ,,,39,1871,0.0003539781318977475,0.015633786097168922,,
80
+ 0.0004371951217763126,0.017500780522823334,0.017421182245016098,39,1919,,,,
81
+ ,,,40,1919,0.00035463363747112453,0.015674641355872154,,
82
+ 0.0004323917964939028,0.01740151271224022,0.01740151271224022,40,1967,,,,
83
+ ,,,41,1967,0.00035178018151782453,0.015560634434223175,,
84
+ 0.0004428534011822194,0.017604516819119453,0.01740151271224022,41,2015,,,,
85
+ ,,,42,2015,0.00035143198329024017,0.015515097416937351,,
86
+ 0.0004349216178525239,0.017420772463083267,0.01740151271224022,42,2063,,,,
87
+ ,,,43,2063,0.0003609397099353373,0.015795975923538208,,
88
+ 0.0004396806180011481,0.017549678683280945,0.01740151271224022,43,2111,,,,
89
+ ,,,44,2111,0.00035500028752721846,0.015640176832675934,,
90
+ 0.00043076532892882824,0.01737154833972454,0.01737154833972454,44,2159,,,,
91
+ ,,,45,2159,0.0003549921966623515,0.015620765276253223,,
92
+ 0.0004312520322855562,0.017368266358971596,0.017368266358971596,45,2207,,,,
93
+ ,,,46,2207,0.00035388051765039563,0.01562121044844389,,
94
+ 0.0004354195261839777,0.01744888536632061,0.017368266358971596,46,2255,,,,
95
+ ,,,47,2255,0.00035340539761818945,0.015559324994683266,,
96
+ 0.00043299648677930236,0.017353488132357597,0.017353488132357597,47,2303,,,,
97
+ ,,,48,2303,0.00035770077374763787,0.01564095914363861,,
98
+ 0.00043391904910095036,0.017364034429192543,0.017353488132357597,48,2351,,,,
99
+ ,,,49,2351,0.000359859608579427,0.015786558389663696,,
100
+ 0.0004285445320419967,0.017320256680250168,0.017320256680250168,49,2399,,,,
101
+ ,,,50,2399,0.00035796070005744696,0.015735318884253502,,
102
+ 0.000430954503826797,0.017241472378373146,0.017241472378373146,50,2447,,,,
103
+ ,,,51,2447,0.00035373438731767237,0.015651464462280273,,
104
+ 0.00042562352609820664,0.01719299890100956,0.01719299890100956,51,2495,,,,
105
+ ,,,52,2495,0.0003564102225936949,0.01572144776582718,,
106
+ 0.00042932856013067067,0.017329717054963112,0.01719299890100956,52,2543,,,,
107
+ ,,,53,2543,0.00035671371733769774,0.01566898636519909,,
108
+ 0.0004429467662703246,0.01733720861375332,0.01719299890100956,53,2591,,,,
109
+ ,,,54,2591,0.0003536145086400211,0.015626167878508568,,
110
+ 0.0004243775620125234,0.01720307022333145,0.01719299890100956,54,2639,,,,
111
+ ,,,55,2639,0.0003565653460100293,0.01568603329360485,,
112
+ 0.0004212360945530236,0.017128337174654007,0.017128337174654007,55,2687,,,,
113
+ ,,,56,2687,0.0003542162012308836,0.01563875377178192,,
114
+ 0.00043175890459679067,0.017251433804631233,0.017128337174654007,56,2735,,,,
115
+ ,,,57,2735,0.0003585127124097198,0.015710385516285896,,
116
+ 0.0004701023281086236,0.017810514196753502,0.017128337174654007,57,2783,,,,
117
+ ,,,58,2783,0.0003605375241022557,0.015799447894096375,,
118
+ 0.0004310261574573815,0.01735604554414749,0.017128337174654007,58,2831,,,,
119
+ ,,,59,2831,0.0003615531895775348,0.015817413106560707,,
120
+ 0.00042210734682157636,0.017165636643767357,0.017128337174654007,59,2879,,,,
121
+ ,,,60,2879,0.0003666633856482804,0.015920042991638184,,
122
+ 0.0004298300773371011,0.01733965240418911,0.017128337174654007,60,2927,,,,
123
+ ,,,61,2927,0.0003627555852290243,0.015824392437934875,,
124
+ 0.0004292436642572284,0.01719118095934391,0.017128337174654007,61,2975,,,,
125
+ ,,,62,2975,0.0003614154411479831,0.015812227502465248,,
126
+ 0.0004273036611266434,0.01727384701371193,0.017128337174654007,62,3023,,,,
127
+ ,,,63,3023,0.0003659140784293413,0.015878338366746902,,
128
+ 0.0004190914041828364,0.017106348648667336,0.017106348648667336,63,3071,,,,
129
+ ,,,64,3071,0.00036473284126259387,0.01589711755514145,,
130
+ 0.00041871817666105926,0.017070185393095016,0.017070185393095016,64,3119,,,,
131
+ ,,,65,3119,0.00036075201933272183,0.01581686921417713,,
132
+ 0.00042548280907794833,0.01724153570830822,0.017070185393095016,65,3167,,,,
133
+ ,,,66,3167,0.00035935506457462907,0.01573988050222397,,
134
+ 0.00041766479262150824,0.017069552093744278,0.017069552093744278,66,3215,,,,
135
+ ,,,67,3215,0.00036625820212066174,0.01589464582502842,,
136
+ 0.00042810666491277516,0.017292682081460953,0.017069552093744278,67,3263,,,,
137
+ ,,,68,3263,0.00036182126495987177,0.015805834904313087,,
138
+ 0.0004180409014225006,0.01710260473191738,0.017069552093744278,68,3311,,,,
139
+ ,,,69,3311,0.0003723218687810004,0.01602661982178688,,
140
+ 0.00041894338210113347,0.01713237166404724,0.017069552093744278,69,3359,,,,
141
+ ,,,70,3359,0.0003704174014274031,0.016009429469704628,,
142
+ 0.0004207020392641425,0.01707346737384796,0.017069552093744278,70,3407,,,,
143
+ ,,,71,3407,0.0003666004922706634,0.015945591032505035,,
144
+ 0.0004166274156887084,0.01707972027361393,0.017069552093744278,71,3455,,,,
145
+ ,,,72,3455,0.00036974326940253377,0.015979308634996414,,
146
+ 0.0004175134818069637,0.017124274745583534,0.017069552093744278,72,3503,,,,
147
+ ,,,73,3503,0.0003729758318513632,0.0160704106092453,,
148
+ 0.0004161228716839105,0.017069727182388306,0.017069552093744278,73,3551,,,,
149
+ ,,,74,3551,0.0003703076217789203,0.016026459634304047,,
150
+ 0.0004261505091562867,0.017154613509774208,0.017069552093744278,74,3599,,,,
151
+ ,,,75,3599,0.00036900219856761396,0.016032619401812553,,
152
+ 0.0004146775754634291,0.01702982932329178,0.01702982932329178,75,3647,,,,
153
+ ,,,76,3647,0.0003677821368910372,0.015983235090970993,,
154
+ 0.00042938877595588565,0.01724373735487461,0.01702982932329178,76,3695,,,,
155
+ ,,,77,3695,0.00037143495865166187,0.016052111983299255,,
156
+ 0.00041174268699251115,0.016967177391052246,0.016967177391052246,77,3743,,,,
157
+ ,,,78,3743,0.0003717253857757896,0.016055332496762276,,
158
+ 0.000437425245763734,0.01727815344929695,0.016967177391052246,78,3791,,,,
159
+ ,,,79,3791,0.0003686454438138753,0.015944086015224457,,
160
+ 0.0004134521586820483,0.016999950632452965,0.016967177391052246,79,3839,,,,
161
+ ,,,80,3839,0.00037249684100970626,0.01605980098247528,,
162
+ 0.0004120038647670299,0.01692464016377926,0.01692464016377926,80,3887,,,,
163
+ ,,,81,3887,0.00036835609353147447,0.015996821224689484,,
164
+ 0.0004343365435488522,0.017368201166391373,0.01692464016377926,81,3935,,,,
165
+ ,,,82,3935,0.00036728716804645956,0.01591765135526657,,
166
+ 0.00042008497985079885,0.017036356031894684,0.01692464016377926,82,3983,,,,
167
+ ,,,83,3983,0.0003696488856803626,0.0160061102360487,,
168
+ 0.0004190867766737938,0.01708969473838806,0.01692464016377926,83,4031,,,,
169
+ ,,,84,4031,0.0003737302031368017,0.016063334420323372,,
170
+ 0.00042142876191064715,0.01709361933171749,0.01692464016377926,84,4079,,,,
171
+ ,,,85,4079,0.00037238094955682755,0.016087690368294716,,
172
+ 0.00041377212619408965,0.016958927735686302,0.01692464016377926,85,4127,,,,
173
+ ,,,86,4127,0.0003751576296053827,0.01610795222222805,,
174
+ 0.00041242982842959464,0.016963772475719452,0.01692464016377926,86,4175,,,,
175
+ ,,,87,4175,0.00037346957833506167,0.016124632209539413,,
176
+ 0.00041769869858399034,0.017061550170183182,0.01692464016377926,87,4223,,,,
177
+ ,,,88,4223,0.00038180570118129253,0.01632540114223957,,
178
+ 0.0004111539456062019,0.016972210258245468,0.01692464016377926,88,4271,,,,
179
+ ,,,89,4271,0.0003764932043850422,0.016198858618736267,,
180
+ 0.0004117168427910656,0.016982154920697212,0.01692464016377926,89,4319,,,,
181
+ ,,,90,4319,0.00037736829835921526,0.016201643273234367,,
182
+ 0.0004112584865652025,0.01694490946829319,0.01692464016377926,90,4367,,,,
183
+ ,,,91,4367,0.00037434030673466623,0.016127511858940125,,
184
+ 0.0004176913353148848,0.01709410361945629,0.01692464016377926,91,4415,,,,
185
+ ,,,92,4415,0.0003782199346460402,0.01617586612701416,,
186
+ 0.0004172335029579699,0.017082450911402702,0.01692464016377926,92,4463,,,,
187
+ ,,,93,4463,0.00037703843554481864,0.016223471611738205,,
188
+ 0.000413818983361125,0.01702585071325302,0.01692464016377926,93,4511,,,,
189
+ ,,,94,4511,0.0003798868565354496,0.016270410269498825,,
190
+ 0.00041081098606809974,0.01695484295487404,0.01692464016377926,94,4559,,,,
191
+ ,,,95,4559,0.00038088674773462117,0.016313616186380386,,
192
+ 0.0004101528902538121,0.016961613669991493,0.01692464016377926,95,4607,,,,
193
+ ,,,96,4607,0.00037924019852653146,0.016253754496574402,,
194
+ 0.0004076159675605595,0.016890889033675194,0.016890889033675194,96,4655,,,,
195
+ ,,,97,4655,0.0003762643027585,0.016185859218239784,,
196
+ 0.0004093820753041655,0.01693614013493061,0.016890889033675194,97,4703,,,,
197
+ ,,,98,4703,0.0003816284879576415,0.016296468675136566,,
198
+ 0.000412202876759693,0.01700555719435215,0.016890889033675194,98,4751,,,,
199
+ ,,,99,4751,0.0003795750089921057,0.01622914709150791,,
200
+ 0.00040874816477298737,0.01693073660135269,0.016890889033675194,99,4799,,,,
201
+ ,,,100,4799,0.0003787344612646848,0.01624605990946293,,
202
+ ,,,96,4800,,,0.00038821963244117796,0.01644648052752018
logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_08-46-15/11/tensorboard/focusConvMSE_150_hyperparameter_search/events.out.tfevents.1652261056.30858e6037f6.1.22 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e08a0e13bde9f02bd751de487bc17833724a07d7263ed1b8d31c16f61ab883d4
3
+ size 36796
logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_08-46-15/11/tensorboard/focusConvMSE_150_hyperparameter_search/events.out.tfevents.1652261259.30858e6037f6.1.23 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ba7c9f1202cea565887ac261a554252c72ac6cc36cc097d2f6d8ce790b256d23
3
+ size 179
logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_08-46-15/11/tensorboard/focusConvMSE_150_hyperparameter_search/hparams.yaml ADDED
@@ -0,0 +1,54 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ trainer:
2
+ _target_: pytorch_lightning.Trainer
3
+ gpus: 1
4
+ min_epochs: 1
5
+ max_epochs: 100
6
+ resume_from_checkpoint: null
7
+ model:
8
+ _target_: src.models.focus_conv_module.FocusConvLitModule
9
+ image_size: 150
10
+ pool_size: 2
11
+ conv1_size: 3
12
+ conv1_channels: 9
13
+ conv2_size: 3
14
+ conv2_channels: 6
15
+ lin1_size: 32
16
+ lin2_size: 72
17
+ output_size: 1
18
+ lr: 0.00010666027901645324
19
+ weight_decay: 0.0005
20
+ datamodule:
21
+ _target_: src.datamodules.focus_datamodule.FocusDataModule
22
+ data_dir: /usr/src/app/data/focus150
23
+ csv_train_file: /usr/src/app/data/focus150/train_metadata.csv
24
+ csv_val_file: /usr/src/app/data/focus150/validation_metadata.csv
25
+ csv_test_file: /usr/src/app/data/focus150/test_metadata.csv
26
+ batch_size: 64
27
+ num_workers: 0
28
+ pin_memory: false
29
+ seed: null
30
+ callbacks:
31
+ model_checkpoint:
32
+ _target_: pytorch_lightning.callbacks.ModelCheckpoint
33
+ monitor: val/mae
34
+ mode: min
35
+ save_top_k: 1
36
+ save_last: true
37
+ verbose: false
38
+ dirpath: checkpoints/
39
+ filename: epoch_{epoch:03d}
40
+ auto_insert_metric_name: false
41
+ early_stopping:
42
+ _target_: pytorch_lightning.callbacks.EarlyStopping
43
+ monitor: val/mae
44
+ mode: min
45
+ patience: 100
46
+ min_delta: 0
47
+ model_summary:
48
+ _target_: pytorch_lightning.callbacks.RichModelSummary
49
+ max_depth: -1
50
+ rich_progress_bar:
51
+ _target_: pytorch_lightning.callbacks.RichProgressBar
52
+ model/params/total: 216201
53
+ model/params/trainable: 216201
54
+ model/params/non_trainable: 0
logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_08-46-15/12/.hydra/config.yaml ADDED
@@ -0,0 +1,81 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ original_work_dir: ${hydra:runtime.cwd}
2
+ data_dir: ${original_work_dir}/data
3
+ print_config: true
4
+ ignore_warnings: true
5
+ train: true
6
+ test: true
7
+ seed: null
8
+ name: focusConvMSE_150_hyperparameter_search
9
+ datamodule:
10
+ _target_: src.datamodules.focus_datamodule.FocusDataModule
11
+ data_dir: ${data_dir}/focus150
12
+ csv_train_file: ${data_dir}/focus150/train_metadata.csv
13
+ csv_val_file: ${data_dir}/focus150/validation_metadata.csv
14
+ csv_test_file: ${data_dir}/focus150/test_metadata.csv
15
+ batch_size: 64
16
+ num_workers: 0
17
+ pin_memory: false
18
+ model:
19
+ _target_: src.models.focus_conv_module.FocusConvLitModule
20
+ image_size: 150
21
+ pool_size: 2
22
+ conv1_size: 3
23
+ conv1_channels: 9
24
+ conv2_size: 3
25
+ conv2_channels: 6
26
+ lin1_size: 32
27
+ lin2_size: 72
28
+ output_size: 1
29
+ lr: 0.005913730093886128
30
+ weight_decay: 0.0005
31
+ callbacks:
32
+ model_checkpoint:
33
+ _target_: pytorch_lightning.callbacks.ModelCheckpoint
34
+ monitor: val/mae
35
+ mode: min
36
+ save_top_k: 1
37
+ save_last: true
38
+ verbose: false
39
+ dirpath: checkpoints/
40
+ filename: epoch_{epoch:03d}
41
+ auto_insert_metric_name: false
42
+ early_stopping:
43
+ _target_: pytorch_lightning.callbacks.EarlyStopping
44
+ monitor: val/mae
45
+ mode: min
46
+ patience: 100
47
+ min_delta: 0
48
+ model_summary:
49
+ _target_: pytorch_lightning.callbacks.RichModelSummary
50
+ max_depth: -1
51
+ rich_progress_bar:
52
+ _target_: pytorch_lightning.callbacks.RichProgressBar
53
+ logger:
54
+ csv:
55
+ _target_: pytorch_lightning.loggers.csv_logs.CSVLogger
56
+ save_dir: .
57
+ name: csv/
58
+ prefix: ''
59
+ mlflow:
60
+ _target_: pytorch_lightning.loggers.mlflow.MLFlowLogger
61
+ experiment_name: ${name}
62
+ tracking_uri: ${original_work_dir}/logs/mlflow/mlruns
63
+ tags: null
64
+ save_dir: ./mlruns
65
+ prefix: ''
66
+ artifact_location: null
67
+ tensorboard:
68
+ _target_: pytorch_lightning.loggers.tensorboard.TensorBoardLogger
69
+ save_dir: tensorboard/
70
+ name: null
71
+ version: ${name}
72
+ log_graph: false
73
+ default_hp_metric: true
74
+ prefix: ''
75
+ trainer:
76
+ _target_: pytorch_lightning.Trainer
77
+ gpus: 1
78
+ min_epochs: 1
79
+ max_epochs: 100
80
+ resume_from_checkpoint: null
81
+ optimized_metric: val/mae_best
logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_08-46-15/12/.hydra/hydra.yaml ADDED
@@ -0,0 +1,240 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ hydra:
2
+ run:
3
+ dir: logs/experiments/runs/${name}/${now:%Y-%m-%d}_${now:%H-%M-%S}
4
+ sweep:
5
+ dir: logs/experiments/multiruns/${name}/${now:%Y-%m-%d}_${now:%H-%M-%S}
6
+ subdir: ${hydra.job.num}
7
+ launcher:
8
+ _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher
9
+ sweeper:
10
+ sampler:
11
+ _target_: optuna.samplers.TPESampler
12
+ seed: 12345
13
+ consider_prior: true
14
+ prior_weight: 1.0
15
+ consider_magic_clip: true
16
+ consider_endpoints: false
17
+ n_startup_trials: 10
18
+ n_ei_candidates: 24
19
+ multivariate: false
20
+ warn_independent_sampling: true
21
+ _target_: hydra_plugins.hydra_optuna_sweeper.optuna_sweeper.OptunaSweeper
22
+ direction: minimize
23
+ storage: null
24
+ study_name: focusConvMSE_150_hyperparameter_search
25
+ n_trials: 20
26
+ n_jobs: 1
27
+ search_space:
28
+ datamodule.batch_size:
29
+ type: categorical
30
+ choices:
31
+ - 64
32
+ - 128
33
+ model.lr:
34
+ type: float
35
+ low: 0.0001
36
+ high: 0.01
37
+ model.conv1_size:
38
+ type: categorical
39
+ choices:
40
+ - 3
41
+ - 5
42
+ - 7
43
+ model.conv1_channels:
44
+ type: categorical
45
+ choices:
46
+ - 3
47
+ - 6
48
+ - 9
49
+ model.conv2_size:
50
+ type: categorical
51
+ choices:
52
+ - 3
53
+ - 5
54
+ - 7
55
+ model.conv2_channels:
56
+ type: categorical
57
+ choices:
58
+ - 6
59
+ - 11
60
+ - 16
61
+ model.lin1_size:
62
+ type: categorical
63
+ choices:
64
+ - 32
65
+ - 72
66
+ - 128
67
+ model.lin2_size:
68
+ type: categorical
69
+ choices:
70
+ - 32
71
+ - 72
72
+ - 128
73
+ help:
74
+ app_name: ${hydra.job.name}
75
+ header: '${hydra.help.app_name} is powered by Hydra.
76
+
77
+ '
78
+ footer: 'Powered by Hydra (https://hydra.cc)
79
+
80
+ Use --hydra-help to view Hydra specific help
81
+
82
+ '
83
+ template: '${hydra.help.header}
84
+
85
+ == Configuration groups ==
86
+
87
+ Compose your configuration from those groups (group=option)
88
+
89
+
90
+ $APP_CONFIG_GROUPS
91
+
92
+
93
+ == Config ==
94
+
95
+ Override anything in the config (foo.bar=value)
96
+
97
+
98
+ $CONFIG
99
+
100
+
101
+ ${hydra.help.footer}
102
+
103
+ '
104
+ hydra_help:
105
+ template: 'Hydra (${hydra.runtime.version})
106
+
107
+ See https://hydra.cc for more info.
108
+
109
+
110
+ == Flags ==
111
+
112
+ $FLAGS_HELP
113
+
114
+
115
+ == Configuration groups ==
116
+
117
+ Compose your configuration from those groups (For example, append hydra/job_logging=disabled
118
+ to command line)
119
+
120
+
121
+ $HYDRA_CONFIG_GROUPS
122
+
123
+
124
+ Use ''--cfg hydra'' to Show the Hydra config.
125
+
126
+ '
127
+ hydra_help: ???
128
+ hydra_logging:
129
+ version: 1
130
+ formatters:
131
+ colorlog:
132
+ (): colorlog.ColoredFormatter
133
+ format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s'
134
+ handlers:
135
+ console:
136
+ class: logging.StreamHandler
137
+ formatter: colorlog
138
+ stream: ext://sys.stdout
139
+ root:
140
+ level: INFO
141
+ handlers:
142
+ - console
143
+ disable_existing_loggers: false
144
+ job_logging:
145
+ version: 1
146
+ formatters:
147
+ simple:
148
+ format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s'
149
+ colorlog:
150
+ (): colorlog.ColoredFormatter
151
+ format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s]
152
+ - %(message)s'
153
+ log_colors:
154
+ DEBUG: purple
155
+ INFO: green
156
+ WARNING: yellow
157
+ ERROR: red
158
+ CRITICAL: red
159
+ handlers:
160
+ console:
161
+ class: logging.StreamHandler
162
+ formatter: colorlog
163
+ stream: ext://sys.stdout
164
+ file:
165
+ class: logging.FileHandler
166
+ formatter: simple
167
+ filename: ${hydra.job.name}.log
168
+ root:
169
+ level: INFO
170
+ handlers:
171
+ - console
172
+ - file
173
+ disable_existing_loggers: false
174
+ env: {}
175
+ searchpath: []
176
+ callbacks: {}
177
+ output_subdir: .hydra
178
+ overrides:
179
+ hydra: []
180
+ task:
181
+ - datamodule.batch_size=64
182
+ - model.lr=0.005913730093886128
183
+ - model.conv1_size=3
184
+ - model.conv1_channels=9
185
+ - model.conv2_size=3
186
+ - model.conv2_channels=6
187
+ - model.lin1_size=32
188
+ - model.lin2_size=72
189
+ - hparams_search=focusConvMSE_150.yaml
190
+ job:
191
+ name: train
192
+ override_dirname: datamodule.batch_size=64,hparams_search=focusConvMSE_150.yaml,model.conv1_channels=9,model.conv1_size=3,model.conv2_channels=6,model.conv2_size=3,model.lin1_size=32,model.lin2_size=72,model.lr=0.005913730093886128
193
+ id: '12'
194
+ num: 12
195
+ config_name: train.yaml
196
+ env_set: {}
197
+ env_copy: []
198
+ config:
199
+ override_dirname:
200
+ kv_sep: '='
201
+ item_sep: ','
202
+ exclude_keys: []
203
+ runtime:
204
+ version: 1.1.2
205
+ cwd: /usr/src/app
206
+ config_sources:
207
+ - path: hydra.conf
208
+ schema: pkg
209
+ provider: hydra
210
+ - path: /usr/src/app/configs
211
+ schema: file
212
+ provider: main
213
+ - path: hydra_plugins.hydra_colorlog.conf
214
+ schema: pkg
215
+ provider: hydra-colorlog
216
+ - path: ''
217
+ schema: structured
218
+ provider: schema
219
+ choices:
220
+ local: default.yaml
221
+ hparams_search: focusConvMSE_150.yaml
222
+ debug: null
223
+ experiment: null
224
+ log_dir: default.yaml
225
+ trainer: long.yaml
226
+ logger: many_loggers
227
+ callbacks: default.yaml
228
+ model: focusConv_150.yaml
229
+ datamodule: focus150.yaml
230
+ hydra/env: default
231
+ hydra/callbacks: null
232
+ hydra/job_logging: colorlog
233
+ hydra/hydra_logging: colorlog
234
+ hydra/hydra_help: default
235
+ hydra/help: default
236
+ hydra/sweeper: optuna
237
+ hydra/sweeper/sampler: tpe
238
+ hydra/launcher: basic
239
+ hydra/output: default
240
+ verbose: false
logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_08-46-15/12/.hydra/overrides.yaml ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ - datamodule.batch_size=64
2
+ - model.lr=0.005913730093886128
3
+ - model.conv1_size=3
4
+ - model.conv1_channels=9
5
+ - model.conv2_size=3
6
+ - model.conv2_channels=6
7
+ - model.lin1_size=32
8
+ - model.lin2_size=72
9
+ - hparams_search=focusConvMSE_150.yaml
logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_08-46-15/12/checkpoints/epoch_054.ckpt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2f01bf299f40f500b6e6a9deb78ff638c1f9001b64a98321cabc2f2d073403dc
3
+ size 2607173
logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_08-46-15/12/checkpoints/last.ckpt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:367613a9be5bc652ae90deee8568616cdde3a8bda44d6cad26b48011be6dd664
3
+ size 2607173
logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_08-46-15/12/csv/version_0/hparams.yaml ADDED
@@ -0,0 +1,54 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ trainer:
2
+ _target_: pytorch_lightning.Trainer
3
+ gpus: 1
4
+ min_epochs: 1
5
+ max_epochs: 100
6
+ resume_from_checkpoint: null
7
+ model:
8
+ _target_: src.models.focus_conv_module.FocusConvLitModule
9
+ image_size: 150
10
+ pool_size: 2
11
+ conv1_size: 3
12
+ conv1_channels: 9
13
+ conv2_size: 3
14
+ conv2_channels: 6
15
+ lin1_size: 32
16
+ lin2_size: 72
17
+ output_size: 1
18
+ lr: 0.005913730093886128
19
+ weight_decay: 0.0005
20
+ datamodule:
21
+ _target_: src.datamodules.focus_datamodule.FocusDataModule
22
+ data_dir: /usr/src/app/data/focus150
23
+ csv_train_file: /usr/src/app/data/focus150/train_metadata.csv
24
+ csv_val_file: /usr/src/app/data/focus150/validation_metadata.csv
25
+ csv_test_file: /usr/src/app/data/focus150/test_metadata.csv
26
+ batch_size: 64
27
+ num_workers: 0
28
+ pin_memory: false
29
+ seed: null
30
+ callbacks:
31
+ model_checkpoint:
32
+ _target_: pytorch_lightning.callbacks.ModelCheckpoint
33
+ monitor: val/mae
34
+ mode: min
35
+ save_top_k: 1
36
+ save_last: true
37
+ verbose: false
38
+ dirpath: checkpoints/
39
+ filename: epoch_{epoch:03d}
40
+ auto_insert_metric_name: false
41
+ early_stopping:
42
+ _target_: pytorch_lightning.callbacks.EarlyStopping
43
+ monitor: val/mae
44
+ mode: min
45
+ patience: 100
46
+ min_delta: 0
47
+ model_summary:
48
+ _target_: pytorch_lightning.callbacks.RichModelSummary
49
+ max_depth: -1
50
+ rich_progress_bar:
51
+ _target_: pytorch_lightning.callbacks.RichProgressBar
52
+ model/params/total: 216201
53
+ model/params/trainable: 216201
54
+ model/params/non_trainable: 0
logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_08-46-15/12/csv/version_0/metrics.csv ADDED
@@ -0,0 +1,202 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ val/loss,val/mae,val/mae_best,epoch,step,train/loss,train/mae,test/loss,test/mae
2
+ 0.6214974522590637,0.7471162676811218,0.051749784499406815,0,47,,,,
3
+ ,,,1,47,54.98261642456055,3.5676584243774414,,
4
+ 0.024626893922686577,0.11367996037006378,0.051749784499406815,1,95,,,,
5
+ ,,,2,95,0.1092231273651123,0.24053218960762024,,
6
+ 0.0046891882084310055,0.04559226334095001,0.04559226334095001,2,143,,,,
7
+ ,,,3,143,0.006611417979001999,0.059760935604572296,,
8
+ 0.0017719838069751859,0.03023330681025982,0.03023330681025982,3,191,,,,
9
+ ,,,4,191,0.001903047552332282,0.03377484902739525,,
10
+ 0.00097160495352,0.02466624043881893,0.02466624043881893,4,239,,,,
11
+ ,,,5,239,0.0009447758784517646,0.024573080241680145,,
12
+ 0.0007414190913550556,0.02164529822766781,0.02164529822766781,5,287,,,,
13
+ ,,,6,287,0.0006642084335908294,0.02101486176252365,,
14
+ 0.000648503249976784,0.02049391344189644,0.02049391344189644,6,335,,,,
15
+ ,,,7,335,0.0005618844297714531,0.01974571868777275,,
16
+ 0.000583428714890033,0.01970207691192627,0.01970207691192627,7,383,,,,
17
+ ,,,8,383,0.0005463646375574172,0.019522203132510185,,
18
+ 0.0005508114118129015,0.01937088742852211,0.01937088742852211,8,431,,,,
19
+ ,,,9,431,0.0005175085971131921,0.018993718549609184,,
20
+ 0.0005528865149244666,0.019159140065312386,0.019159140065312386,9,479,,,,
21
+ ,,,10,479,0.0004887341056019068,0.018611760810017586,,
22
+ 0.0005367018165998161,0.019745565950870514,0.019159140065312386,10,527,,,,
23
+ ,,,11,527,0.00048508282634429634,0.01857646182179451,,
24
+ 0.0005495632067322731,0.01913735456764698,0.01913735456764698,11,575,,,,
25
+ ,,,12,575,0.00047233805526047945,0.018383866176009178,,
26
+ 0.0005554938688874245,0.018981562927365303,0.018981562927365303,12,623,,,,
27
+ ,,,13,623,0.00048019466339610517,0.018446439877152443,,
28
+ 0.0005659571033902466,0.020243771374225616,0.018981562927365303,13,671,,,,
29
+ ,,,14,671,0.00047845090739428997,0.018349003046751022,,
30
+ 0.000490667182020843,0.018612753599882126,0.018612753599882126,14,719,,,,
31
+ ,,,15,719,0.00046140895574353635,0.01810799166560173,,
32
+ 0.0006346730515360832,0.019904447719454765,0.018612753599882126,15,767,,,,
33
+ ,,,16,767,0.0005025239079259336,0.01880612224340439,,
34
+ 0.0004915434983558953,0.018407391384243965,0.018407391384243965,16,815,,,,
35
+ ,,,17,815,0.0004911725991405547,0.018423493951559067,,
36
+ 0.0005361050134524703,0.019668009132146835,0.018407391384243965,17,863,,,,
37
+ ,,,18,863,0.0004812282568309456,0.018325747922062874,,
38
+ 0.0005612117820419371,0.019221989437937737,0.018407391384243965,18,911,,,,
39
+ ,,,19,911,0.0005307355895638466,0.018876703456044197,,
40
+ 0.0004970837035216391,0.018836118280887604,0.018407391384243965,19,959,,,,
41
+ ,,,20,959,0.00047979317605495453,0.018256444483995438,,
42
+ 0.000561133841983974,0.01906445249915123,0.018407391384243965,20,1007,,,,
43
+ ,,,21,1007,0.0004872012650594115,0.018274514004588127,,
44
+ 0.00048561973380856216,0.0185296181589365,0.018407391384243965,21,1055,,,,
45
+ ,,,22,1055,0.0004583315458148718,0.017928218469023705,,
46
+ 0.0006291944300755858,0.020076945424079895,0.018407391384243965,22,1103,,,,
47
+ ,,,23,1103,0.00044877041364088655,0.017807049676775932,,
48
+ 0.0004673828952945769,0.018109111115336418,0.018109111115336418,23,1151,,,,
49
+ ,,,24,1151,0.00043755475780926645,0.017662618309259415,,
50
+ 0.0004707105690613389,0.018253739923238754,0.018109111115336418,24,1199,,,,
51
+ ,,,25,1199,0.00045014385250397027,0.017761170864105225,,
52
+ 0.0004957315977662802,0.018719036132097244,0.018109111115336418,25,1247,,,,
53
+ ,,,26,1247,0.0004227583121974021,0.01726197823882103,,
54
+ 0.0005920422845520079,0.01969858631491661,0.018109111115336418,26,1295,,,,
55
+ ,,,27,1295,0.0004672000359278172,0.018030516803264618,,
56
+ 0.00047983432887122035,0.018343815580010414,0.018109111115336418,27,1343,,,,
57
+ ,,,28,1343,0.000487330777104944,0.018348004668951035,,
58
+ 0.0005549840861931443,0.019585980102419853,0.018109111115336418,28,1391,,,,
59
+ ,,,29,1391,0.00043545971857383847,0.017473947256803513,,
60
+ 0.00047767863725312054,0.018152261152863503,0.018109111115336418,29,1439,,,,
61
+ ,,,30,1439,0.0004693268856499344,0.01803048700094223,,
62
+ 0.0005322074866853654,0.018905622884631157,0.018109111115336418,30,1487,,,,
63
+ ,,,31,1487,0.00046647689305245876,0.017939887940883636,,
64
+ 0.00046985011431388557,0.018060753121972084,0.018060753121972084,31,1535,,,,
65
+ ,,,32,1535,0.00043252212344668806,0.017400292679667473,,
66
+ 0.0005201464518904686,0.018805604428052902,0.018060753121972084,32,1583,,,,
67
+ ,,,33,1583,0.00045904898433946073,0.01784016378223896,,
68
+ 0.000468980724690482,0.018035506829619408,0.018035506829619408,33,1631,,,,
69
+ ,,,34,1631,0.00044194370275363326,0.01747795194387436,,
70
+ 0.000503936258610338,0.018557855859398842,0.018035506829619408,34,1679,,,,
71
+ ,,,35,1679,0.0004662636492867023,0.017913851886987686,,
72
+ 0.00046571611892431974,0.017930952832102776,0.017930952832102776,35,1727,,,,
73
+ ,,,36,1727,0.0005562683800235391,0.019239461049437523,,
74
+ 0.0004745067853946239,0.018086634576320648,0.017930952832102776,36,1775,,,,
75
+ ,,,37,1775,0.0004486727702897042,0.01754455454647541,,
76
+ 0.00046081989421509206,0.017846237868070602,0.017846237868070602,37,1823,,,,
77
+ ,,,38,1823,0.0004972383030690253,0.018314693123102188,,
78
+ 0.0004835955915041268,0.018191315233707428,0.017846237868070602,38,1871,,,,
79
+ ,,,39,1871,0.0004679520206991583,0.017779584974050522,,
80
+ 0.0005488524911925197,0.019241761416196823,0.017846237868070602,39,1919,,,,
81
+ ,,,40,1919,0.00043179592466913164,0.017186377197504044,,
82
+ 0.0004880795895587653,0.018342090770602226,0.017846237868070602,40,1967,,,,
83
+ ,,,41,1967,0.0004102094972040504,0.017018727958202362,,
84
+ 0.0005965570453554392,0.019890347495675087,0.017846237868070602,41,2015,,,,
85
+ ,,,42,2015,0.0004457918112166226,0.017404114827513695,,
86
+ 0.0006263802060857415,0.02037600241601467,0.017846237868070602,42,2063,,,,
87
+ ,,,43,2063,0.0005017647054046392,0.018403148278594017,,
88
+ 0.000503115588799119,0.018492616713047028,0.017846237868070602,43,2111,,,,
89
+ ,,,44,2111,0.0007137061911635101,0.02132233791053295,,
90
+ 0.0005553578375838697,0.01921975240111351,0.017846237868070602,44,2159,,,,
91
+ ,,,45,2159,0.00042427596054039896,0.01704493537545204,,
92
+ 0.0004870813281740993,0.018283123150467873,0.017846237868070602,45,2207,,,,
93
+ ,,,46,2207,0.000430095853516832,0.017181141301989555,,
94
+ 0.00046227785060182214,0.01781376823782921,0.01781376823782921,46,2255,,,,
95
+ ,,,47,2255,0.0004148777516093105,0.016909994184970856,,
96
+ 0.0005123030859977007,0.018533220514655113,0.01781376823782921,47,2303,,,,
97
+ ,,,48,2303,0.0004378673038445413,0.017237214371562004,,
98
+ 0.0005502207786776125,0.019131461158394814,0.01781376823782921,48,2351,,,,
99
+ ,,,49,2351,0.00048156894627027214,0.01796054095029831,,
100
+ 0.0005581609439104795,0.01946868561208248,0.01781376823782921,49,2399,,,,
101
+ ,,,50,2399,0.0005329966079443693,0.018821362406015396,,
102
+ 0.0004844588693231344,0.018173998221755028,0.01781376823782921,50,2447,,,,
103
+ ,,,51,2447,0.00044822730706073344,0.017376136034727097,,
104
+ 0.0006340101244859397,0.020407659932971,0.01781376823782921,51,2495,,,,
105
+ ,,,52,2495,0.0004119820077903569,0.016853779554367065,,
106
+ 0.000485608063172549,0.01826108619570732,0.01781376823782921,52,2543,,,,
107
+ ,,,53,2543,0.0008662648615427315,0.023273319005966187,,
108
+ 0.0011644745245575905,0.027415774762630463,0.01781376823782921,53,2591,,,,
109
+ ,,,54,2591,0.0006078333826735616,0.02011641301214695,,
110
+ 0.00045688680256716907,0.017704259604215622,0.017704259604215622,54,2639,,,,
111
+ ,,,55,2639,0.00045504223089665174,0.01755187287926674,,
112
+ 0.0004869333060923964,0.018115131184458733,0.017704259604215622,55,2687,,,,
113
+ ,,,56,2687,0.0005906501319259405,0.019412167370319366,,
114
+ 0.0005366867408156395,0.018784988671541214,0.017704259604215622,56,2735,,,,
115
+ ,,,57,2735,0.0005746602546423674,0.01938503608107567,,
116
+ 0.0005071545601822436,0.018510816618800163,0.017704259604215622,57,2783,,,,
117
+ ,,,58,2783,0.0004790645616594702,0.01777295581996441,,
118
+ 0.000738093804102391,0.022203614935278893,0.017704259604215622,58,2831,,,,
119
+ ,,,59,2831,0.0006338849198073149,0.02024722658097744,,
120
+ 0.0005185737973079085,0.018660763278603554,0.017704259604215622,59,2879,,,,
121
+ ,,,60,2879,0.00046805432066321373,0.017542803660035133,,
122
+ 0.0004629670293070376,0.01779290847480297,0.017704259604215622,60,2927,,,,
123
+ ,,,61,2927,0.0004429595428518951,0.017300989478826523,,
124
+ 0.0004779730224981904,0.017974132671952248,0.017704259604215622,61,2975,,,,
125
+ ,,,62,2975,0.0004106912820134312,0.0167682021856308,,
126
+ 0.0006325808935798705,0.020507998764514923,0.017704259604215622,62,3023,,,,
127
+ ,,,63,3023,0.00041176777449436486,0.01666209101676941,,
128
+ 0.0005173715180717409,0.018566908314824104,0.017704259604215622,63,3071,,,,
129
+ ,,,64,3071,0.00039108272176235914,0.016448594629764557,,
130
+ 0.0005050273030065,0.018393220379948616,0.017704259604215622,64,3119,,,,
131
+ ,,,65,3119,0.0004669825721066445,0.017649026587605476,,
132
+ 0.0005565526662394404,0.019429463893175125,0.017704259604215622,65,3167,,,,
133
+ ,,,66,3167,0.00046827681944705546,0.0176194217056036,,
134
+ 0.000834873178973794,0.0233157929033041,0.017704259604215622,66,3215,,,,
135
+ ,,,67,3215,0.0005472868215292692,0.018929388374090195,,
136
+ 0.0005160450236871839,0.018566986545920372,0.017704259604215622,67,3263,,,,
137
+ ,,,68,3263,0.0008321892819367349,0.022697987034916878,,
138
+ 0.0006538699381053448,0.020600052550435066,0.017704259604215622,68,3311,,,,
139
+ ,,,69,3311,0.0009006947511807084,0.023422785103321075,,
140
+ 0.0013923393562436104,0.029745005071163177,0.017704259604215622,69,3359,,,,
141
+ ,,,70,3359,0.00045100782881490886,0.017245667055249214,,
142
+ 0.0007816018187440932,0.02291208878159523,0.017704259604215622,70,3407,,,,
143
+ ,,,71,3407,0.000576465972699225,0.01917918212711811,,
144
+ 0.0005393499159254134,0.0188626516610384,0.017704259604215622,71,3455,,,,
145
+ ,,,72,3455,0.0004668397014029324,0.01751832105219364,,
146
+ 0.0006855791434645653,0.02113465406000614,0.017704259604215622,72,3503,,,,
147
+ ,,,73,3503,0.0005413642502389848,0.018528936430811882,,
148
+ 0.0004608482413459569,0.017831595614552498,0.017704259604215622,73,3551,,,,
149
+ ,,,74,3551,0.0013023515930399299,0.026685724034905434,,
150
+ 0.0004951225128024817,0.01832485757768154,0.017704259604215622,74,3599,,,,
151
+ ,,,75,3599,0.0005071918130852282,0.018212169408798218,,
152
+ 0.0005755555466748774,0.019464295357465744,0.017704259604215622,75,3647,,,,
153
+ ,,,76,3647,0.0004369583330117166,0.017049778252840042,,
154
+ 0.0005622752942144871,0.019192274659872055,0.017704259604215622,76,3695,,,,
155
+ ,,,77,3695,0.00040735132643021643,0.016620483249425888,,
156
+ 0.0005157670238986611,0.018762199208140373,0.017704259604215622,77,3743,,,,
157
+ ,,,78,3743,0.0006046847556717694,0.019482189789414406,,
158
+ 0.0004655899538192898,0.01782340183854103,0.017704259604215622,78,3791,,,,
159
+ ,,,79,3791,0.00040650140726938844,0.016597220674157143,,
160
+ 0.0005471227923408151,0.019175032153725624,0.017704259604215622,79,3839,,,,
161
+ ,,,80,3839,0.0003822071885224432,0.016074717044830322,,
162
+ 0.0004813236591871828,0.018047591671347618,0.017704259604215622,80,3887,,,,
163
+ ,,,81,3887,0.0004354139673523605,0.016902578994631767,,
164
+ 0.0006498921429738402,0.020716410130262375,0.017704259604215622,81,3935,,,,
165
+ ,,,82,3935,0.0003958173911087215,0.016273755580186844,,
166
+ 0.00048002219409681857,0.0180223286151886,0.017704259604215622,82,3983,,,,
167
+ ,,,83,3983,0.00042998467688448727,0.016923092305660248,,
168
+ 0.0005463417037390172,0.019057003781199455,0.017704259604215622,83,4031,,,,
169
+ ,,,84,4031,0.0004717276315204799,0.017400458455085754,,
170
+ 0.0005609734798781574,0.019295504316687584,0.017704259604215622,84,4079,,,,
171
+ ,,,85,4079,0.00041594228241592646,0.016663821414113045,,
172
+ 0.00046886541531421244,0.017819806933403015,0.017704259604215622,85,4127,,,,
173
+ ,,,86,4127,0.0004251051286701113,0.01660815067589283,,
174
+ 0.0005137126427143812,0.01848694309592247,0.017704259604215622,86,4175,,,,
175
+ ,,,87,4175,0.0004317341954447329,0.016820469871163368,,
176
+ 0.0006222209194675088,0.020034074783325195,0.017704259604215622,87,4223,,,,
177
+ ,,,88,4223,0.0004450256528798491,0.016990093514323235,,
178
+ 0.000551124569028616,0.019276859238743782,0.017704259604215622,88,4271,,,,
179
+ ,,,89,4271,0.00045740167843177915,0.017099594697356224,,
180
+ 0.0004813903651665896,0.018022313714027405,0.017704259604215622,89,4319,,,,
181
+ ,,,90,4319,0.0003664001415017992,0.01567823812365532,,
182
+ 0.0005113662919029593,0.01844743825495243,0.017704259604215622,90,4367,,,,
183
+ ,,,91,4367,0.0003943891788367182,0.01618720404803753,,
184
+ 0.000544743612408638,0.018945906311273575,0.017704259604215622,91,4415,,,,
185
+ ,,,92,4415,0.0003673900500871241,0.015821335837244987,,
186
+ 0.0006192549481056631,0.020074626430869102,0.017704259604215622,92,4463,,,,
187
+ ,,,93,4463,0.00040770453051663935,0.016325173899531364,,
188
+ 0.000523777271155268,0.018667496740818024,0.017704259604215622,93,4511,,,,
189
+ ,,,94,4511,0.0006110144895501435,0.019296737387776375,,
190
+ 0.0005517604877240956,0.019026534631848335,0.017704259604215622,94,4559,,,,
191
+ ,,,95,4559,0.00047071711742319167,0.017456240952014923,,
192
+ 0.0004667578905355185,0.017799997702240944,0.017704259604215622,95,4607,,,,
193
+ ,,,96,4607,0.0003633774758782238,0.015501819550991058,,
194
+ 0.0005113615188747644,0.018396787345409393,0.017704259604215622,96,4655,,,,
195
+ ,,,97,4655,0.0003719012893270701,0.015829036012291908,,
196
+ 0.0004747837665490806,0.017793558537960052,0.017704259604215622,97,4703,,,,
197
+ ,,,98,4703,0.00040522500057704747,0.016278425231575966,,
198
+ 0.00049640575889498,0.018105769529938698,0.017704259604215622,98,4751,,,,
199
+ ,,,99,4751,0.0003585604135878384,0.01551875565201044,,
200
+ 0.00047644731239415705,0.017881296575069427,0.017704259604215622,99,4799,,,,
201
+ ,,,100,4799,0.0004027250688523054,0.01629386469721794,,
202
+ ,,,54,4800,,,0.0004482637159526348,0.01756158284842968
logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_08-46-15/12/tensorboard/focusConvMSE_150_hyperparameter_search/events.out.tfevents.1652261260.30858e6037f6.1.24 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9137efd367e3e2e0d9e52b5da2c431a80b981d0e1cabfb89f6e2f8ae5c78b95f
3
+ size 36796
logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_08-46-15/12/tensorboard/focusConvMSE_150_hyperparameter_search/events.out.tfevents.1652261462.30858e6037f6.1.25 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3bb27a3692d9f5607bd3bc3cf104b7ca506ac3e72f617df880daf78b18f67664
3
+ size 179
logs/experiments/multiruns/focusConvMSE_150_hyperparameter_search/2022-05-11_08-46-15/12/tensorboard/focusConvMSE_150_hyperparameter_search/hparams.yaml ADDED
@@ -0,0 +1,54 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ trainer:
2
+ _target_: pytorch_lightning.Trainer
3
+ gpus: 1
4
+ min_epochs: 1
5
+ max_epochs: 100
6
+ resume_from_checkpoint: null
7
+ model:
8
+ _target_: src.models.focus_conv_module.FocusConvLitModule
9
+ image_size: 150
10
+ pool_size: 2
11
+ conv1_size: 3
12
+ conv1_channels: 9
13
+ conv2_size: 3
14
+ conv2_channels: 6
15
+ lin1_size: 32
16
+ lin2_size: 72
17
+ output_size: 1
18
+ lr: 0.005913730093886128
19
+ weight_decay: 0.0005
20
+ datamodule:
21
+ _target_: src.datamodules.focus_datamodule.FocusDataModule
22
+ data_dir: /usr/src/app/data/focus150
23
+ csv_train_file: /usr/src/app/data/focus150/train_metadata.csv
24
+ csv_val_file: /usr/src/app/data/focus150/validation_metadata.csv
25
+ csv_test_file: /usr/src/app/data/focus150/test_metadata.csv
26
+ batch_size: 64
27
+ num_workers: 0
28
+ pin_memory: false
29
+ seed: null
30
+ callbacks:
31
+ model_checkpoint:
32
+ _target_: pytorch_lightning.callbacks.ModelCheckpoint
33
+ monitor: val/mae
34
+ mode: min
35
+ save_top_k: 1
36
+ save_last: true
37
+ verbose: false
38
+ dirpath: checkpoints/
39
+ filename: epoch_{epoch:03d}
40
+ auto_insert_metric_name: false
41
+ early_stopping:
42
+ _target_: pytorch_lightning.callbacks.EarlyStopping
43
+ monitor: val/mae
44
+ mode: min
45
+ patience: 100
46
+ min_delta: 0
47
+ model_summary:
48
+ _target_: pytorch_lightning.callbacks.RichModelSummary
49
+ max_depth: -1
50
+ rich_progress_bar:
51
+ _target_: pytorch_lightning.callbacks.RichProgressBar
52
+ model/params/total: 216201
53
+ model/params/trainable: 216201
54
+ model/params/non_trainable: 0