Hannes Kuchelmeister
commited on
Commit
•
aa73104
1
Parent(s):
798f4db
run fc models hyperparameter tuning again on MSE model
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- configs/hparams_search/{focusMAE_150_optuna.yaml → focusMSE_150_optuna.yaml} +2 -2
- configs/model/focus150.yaml +1 -1
- logs/experiments/multiruns/focusMSE_150_hyperparameter_search/2022-05-11_10-16-51/0/.hydra/config.yaml +77 -0
- logs/experiments/multiruns/focusMSE_150_hyperparameter_search/2022-05-11_10-16-51/0/.hydra/hydra.yaml +225 -0
- logs/experiments/multiruns/focusMSE_150_hyperparameter_search/2022-05-11_10-16-51/0/.hydra/overrides.yaml +6 -0
- logs/experiments/multiruns/focusMSE_150_hyperparameter_search/2022-05-11_10-16-51/0/checkpoints/epoch_085.ckpt +3 -0
- logs/experiments/multiruns/focusMSE_150_hyperparameter_search/2022-05-11_10-16-51/0/checkpoints/last.ckpt +3 -0
- logs/experiments/multiruns/focusMSE_150_hyperparameter_search/2022-05-11_10-16-51/0/csv/version_0/hparams.yaml +50 -0
- logs/experiments/multiruns/focusMSE_150_hyperparameter_search/2022-05-11_10-16-51/0/csv/version_0/metrics.csv +202 -0
- logs/experiments/multiruns/focusMSE_150_hyperparameter_search/2022-05-11_10-16-51/0/tensorboard/focusMSE_150_hyperparameter_search/events.out.tfevents.1652264215.873c0dcdd84d.1.0 +3 -0
- logs/experiments/multiruns/focusMSE_150_hyperparameter_search/2022-05-11_10-16-51/0/tensorboard/focusMSE_150_hyperparameter_search/events.out.tfevents.1652264689.873c0dcdd84d.1.1 +3 -0
- logs/experiments/multiruns/focusMSE_150_hyperparameter_search/2022-05-11_10-16-51/0/tensorboard/focusMSE_150_hyperparameter_search/hparams.yaml +50 -0
- logs/experiments/multiruns/focusMSE_150_hyperparameter_search/2022-05-11_10-16-51/1/.hydra/config.yaml +77 -0
- logs/experiments/multiruns/focusMSE_150_hyperparameter_search/2022-05-11_10-16-51/1/.hydra/hydra.yaml +225 -0
- logs/experiments/multiruns/focusMSE_150_hyperparameter_search/2022-05-11_10-16-51/1/.hydra/overrides.yaml +6 -0
- logs/experiments/multiruns/focusMSE_150_hyperparameter_search/2022-05-11_10-16-51/1/checkpoints/epoch_049.ckpt +3 -0
- logs/experiments/multiruns/focusMSE_150_hyperparameter_search/2022-05-11_10-16-51/1/checkpoints/last.ckpt +3 -0
- logs/experiments/multiruns/focusMSE_150_hyperparameter_search/2022-05-11_10-16-51/1/csv/version_0/hparams.yaml +50 -0
- logs/experiments/multiruns/focusMSE_150_hyperparameter_search/2022-05-11_10-16-51/1/csv/version_0/metrics.csv +202 -0
- logs/experiments/multiruns/focusMSE_150_hyperparameter_search/2022-05-11_10-16-51/1/tensorboard/focusMSE_150_hyperparameter_search/events.out.tfevents.1652264690.873c0dcdd84d.1.2 +3 -0
- logs/experiments/multiruns/focusMSE_150_hyperparameter_search/2022-05-11_10-16-51/1/tensorboard/focusMSE_150_hyperparameter_search/events.out.tfevents.1652265463.873c0dcdd84d.1.3 +3 -0
- logs/experiments/multiruns/focusMSE_150_hyperparameter_search/2022-05-11_10-16-51/1/tensorboard/focusMSE_150_hyperparameter_search/hparams.yaml +50 -0
- logs/experiments/multiruns/focusMSE_150_hyperparameter_search/2022-05-11_10-16-51/10/.hydra/config.yaml +77 -0
- logs/experiments/multiruns/focusMSE_150_hyperparameter_search/2022-05-11_10-16-51/10/.hydra/hydra.yaml +225 -0
- logs/experiments/multiruns/focusMSE_150_hyperparameter_search/2022-05-11_10-16-51/10/.hydra/overrides.yaml +6 -0
- logs/experiments/multiruns/focusMSE_150_hyperparameter_search/2022-05-11_10-16-51/10/checkpoints/epoch_043.ckpt +3 -0
- logs/experiments/multiruns/focusMSE_150_hyperparameter_search/2022-05-11_10-16-51/10/checkpoints/last.ckpt +3 -0
- logs/experiments/multiruns/focusMSE_150_hyperparameter_search/2022-05-11_10-16-51/10/csv/version_0/hparams.yaml +50 -0
- logs/experiments/multiruns/focusMSE_150_hyperparameter_search/2022-05-11_10-16-51/10/csv/version_0/metrics.csv +202 -0
- logs/experiments/multiruns/focusMSE_150_hyperparameter_search/2022-05-11_10-16-51/10/tensorboard/focusMSE_150_hyperparameter_search/events.out.tfevents.1652269144.873c0dcdd84d.1.20 +3 -0
- logs/experiments/multiruns/focusMSE_150_hyperparameter_search/2022-05-11_10-16-51/10/tensorboard/focusMSE_150_hyperparameter_search/events.out.tfevents.1652269613.873c0dcdd84d.1.21 +3 -0
- logs/experiments/multiruns/focusMSE_150_hyperparameter_search/2022-05-11_10-16-51/10/tensorboard/focusMSE_150_hyperparameter_search/hparams.yaml +50 -0
- logs/experiments/multiruns/focusMSE_150_hyperparameter_search/2022-05-11_10-16-51/11/.hydra/config.yaml +77 -0
- logs/experiments/multiruns/focusMSE_150_hyperparameter_search/2022-05-11_10-16-51/11/.hydra/hydra.yaml +225 -0
- logs/experiments/multiruns/focusMSE_150_hyperparameter_search/2022-05-11_10-16-51/11/.hydra/overrides.yaml +6 -0
- logs/experiments/multiruns/focusMSE_150_hyperparameter_search/2022-05-11_10-16-51/11/checkpoints/epoch_098.ckpt +3 -0
- logs/experiments/multiruns/focusMSE_150_hyperparameter_search/2022-05-11_10-16-51/11/checkpoints/last.ckpt +3 -0
- logs/experiments/multiruns/focusMSE_150_hyperparameter_search/2022-05-11_10-16-51/11/csv/version_0/hparams.yaml +50 -0
- logs/experiments/multiruns/focusMSE_150_hyperparameter_search/2022-05-11_10-16-51/11/csv/version_0/metrics.csv +202 -0
- logs/experiments/multiruns/focusMSE_150_hyperparameter_search/2022-05-11_10-16-51/11/tensorboard/focusMSE_150_hyperparameter_search/events.out.tfevents.1652269614.873c0dcdd84d.1.22 +3 -0
- logs/experiments/multiruns/focusMSE_150_hyperparameter_search/2022-05-11_10-16-51/11/tensorboard/focusMSE_150_hyperparameter_search/events.out.tfevents.1652269798.873c0dcdd84d.1.23 +3 -0
- logs/experiments/multiruns/focusMSE_150_hyperparameter_search/2022-05-11_10-16-51/11/tensorboard/focusMSE_150_hyperparameter_search/hparams.yaml +50 -0
- logs/experiments/multiruns/focusMSE_150_hyperparameter_search/2022-05-11_10-16-51/12/.hydra/config.yaml +77 -0
- logs/experiments/multiruns/focusMSE_150_hyperparameter_search/2022-05-11_10-16-51/12/.hydra/hydra.yaml +225 -0
- logs/experiments/multiruns/focusMSE_150_hyperparameter_search/2022-05-11_10-16-51/12/.hydra/overrides.yaml +6 -0
- logs/experiments/multiruns/focusMSE_150_hyperparameter_search/2022-05-11_10-16-51/12/checkpoints/epoch_048.ckpt +3 -0
- logs/experiments/multiruns/focusMSE_150_hyperparameter_search/2022-05-11_10-16-51/12/checkpoints/last.ckpt +3 -0
- logs/experiments/multiruns/focusMSE_150_hyperparameter_search/2022-05-11_10-16-51/12/csv/version_0/hparams.yaml +50 -0
- logs/experiments/multiruns/focusMSE_150_hyperparameter_search/2022-05-11_10-16-51/12/csv/version_0/metrics.csv +202 -0
- logs/experiments/multiruns/focusMSE_150_hyperparameter_search/2022-05-11_10-16-51/12/tensorboard/focusMSE_150_hyperparameter_search/events.out.tfevents.1652269799.873c0dcdd84d.1.24 +3 -0
configs/hparams_search/{focusMAE_150_optuna.yaml → focusMSE_150_optuna.yaml}
RENAMED
@@ -12,7 +12,7 @@ defaults:
|
|
12 |
# make sure this is the correct name of some metric logged in lightning module!
|
13 |
optimized_metric: "val/mae_best"
|
14 |
|
15 |
-
name: "
|
16 |
|
17 |
# here we define Optuna hyperparameter search
|
18 |
# it optimizes for value returned from function with @hydra.main decorator
|
@@ -35,7 +35,7 @@ hydra:
|
|
35 |
direction: minimize
|
36 |
|
37 |
# total number of runs that will be executed
|
38 |
-
n_trials:
|
39 |
|
40 |
# choose Optuna hyperparameter sampler
|
41 |
# docs: https://optuna.readthedocs.io/en/stable/reference/samplers.html
|
|
|
12 |
# make sure this is the correct name of some metric logged in lightning module!
|
13 |
optimized_metric: "val/mae_best"
|
14 |
|
15 |
+
name: "focusMSE_150_hyperparameter_search"
|
16 |
|
17 |
# here we define Optuna hyperparameter search
|
18 |
# it optimizes for value returned from function with @hydra.main decorator
|
|
|
35 |
direction: minimize
|
36 |
|
37 |
# total number of runs that will be executed
|
38 |
+
n_trials: 20
|
39 |
|
40 |
# choose Optuna hyperparameter sampler
|
41 |
# docs: https://optuna.readthedocs.io/en/stable/reference/samplers.html
|
configs/model/focus150.yaml
CHANGED
@@ -1,4 +1,4 @@
|
|
1 |
-
_target_: src.models.focus_module.
|
2 |
|
3 |
input_size: 67500
|
4 |
lin1_size: 256
|
|
|
1 |
+
_target_: src.models.focus_module.FocusMSELitModule
|
2 |
|
3 |
input_size: 67500
|
4 |
lin1_size: 256
|
logs/experiments/multiruns/focusMSE_150_hyperparameter_search/2022-05-11_10-16-51/0/.hydra/config.yaml
ADDED
@@ -0,0 +1,77 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
original_work_dir: ${hydra:runtime.cwd}
|
2 |
+
data_dir: ${original_work_dir}/data
|
3 |
+
print_config: true
|
4 |
+
ignore_warnings: true
|
5 |
+
train: true
|
6 |
+
test: true
|
7 |
+
seed: null
|
8 |
+
name: focusMSE_150_hyperparameter_search
|
9 |
+
datamodule:
|
10 |
+
_target_: src.datamodules.focus_datamodule.FocusDataModule
|
11 |
+
data_dir: ${data_dir}/focus150
|
12 |
+
csv_train_file: ${data_dir}/focus150/train_metadata.csv
|
13 |
+
csv_val_file: ${data_dir}/focus150/validation_metadata.csv
|
14 |
+
csv_test_file: ${data_dir}/focus150/test_metadata.csv
|
15 |
+
batch_size: 64
|
16 |
+
num_workers: 0
|
17 |
+
pin_memory: false
|
18 |
+
model:
|
19 |
+
_target_: src.models.focus_module.FocusMSELitModule
|
20 |
+
input_size: 67500
|
21 |
+
lin1_size: 512
|
22 |
+
lin2_size: 512
|
23 |
+
lin3_size: 1024
|
24 |
+
output_size: 1
|
25 |
+
lr: 0.03686537045425119
|
26 |
+
weight_decay: 0.0005
|
27 |
+
callbacks:
|
28 |
+
model_checkpoint:
|
29 |
+
_target_: pytorch_lightning.callbacks.ModelCheckpoint
|
30 |
+
monitor: val/mae
|
31 |
+
mode: min
|
32 |
+
save_top_k: 1
|
33 |
+
save_last: true
|
34 |
+
verbose: false
|
35 |
+
dirpath: checkpoints/
|
36 |
+
filename: epoch_{epoch:03d}
|
37 |
+
auto_insert_metric_name: false
|
38 |
+
early_stopping:
|
39 |
+
_target_: pytorch_lightning.callbacks.EarlyStopping
|
40 |
+
monitor: val/mae
|
41 |
+
mode: min
|
42 |
+
patience: 100
|
43 |
+
min_delta: 0
|
44 |
+
model_summary:
|
45 |
+
_target_: pytorch_lightning.callbacks.RichModelSummary
|
46 |
+
max_depth: -1
|
47 |
+
rich_progress_bar:
|
48 |
+
_target_: pytorch_lightning.callbacks.RichProgressBar
|
49 |
+
logger:
|
50 |
+
csv:
|
51 |
+
_target_: pytorch_lightning.loggers.csv_logs.CSVLogger
|
52 |
+
save_dir: .
|
53 |
+
name: csv/
|
54 |
+
prefix: ''
|
55 |
+
mlflow:
|
56 |
+
_target_: pytorch_lightning.loggers.mlflow.MLFlowLogger
|
57 |
+
experiment_name: ${name}
|
58 |
+
tracking_uri: ${original_work_dir}/logs/mlflow/mlruns
|
59 |
+
tags: null
|
60 |
+
save_dir: ./mlruns
|
61 |
+
prefix: ''
|
62 |
+
artifact_location: null
|
63 |
+
tensorboard:
|
64 |
+
_target_: pytorch_lightning.loggers.tensorboard.TensorBoardLogger
|
65 |
+
save_dir: tensorboard/
|
66 |
+
name: null
|
67 |
+
version: ${name}
|
68 |
+
log_graph: false
|
69 |
+
default_hp_metric: true
|
70 |
+
prefix: ''
|
71 |
+
trainer:
|
72 |
+
_target_: pytorch_lightning.Trainer
|
73 |
+
gpus: 1
|
74 |
+
min_epochs: 1
|
75 |
+
max_epochs: 100
|
76 |
+
resume_from_checkpoint: null
|
77 |
+
optimized_metric: val/mae_best
|
logs/experiments/multiruns/focusMSE_150_hyperparameter_search/2022-05-11_10-16-51/0/.hydra/hydra.yaml
ADDED
@@ -0,0 +1,225 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
hydra:
|
2 |
+
run:
|
3 |
+
dir: logs/experiments/runs/${name}/${now:%Y-%m-%d}_${now:%H-%M-%S}
|
4 |
+
sweep:
|
5 |
+
dir: logs/experiments/multiruns/${name}/${now:%Y-%m-%d}_${now:%H-%M-%S}
|
6 |
+
subdir: ${hydra.job.num}
|
7 |
+
launcher:
|
8 |
+
_target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher
|
9 |
+
sweeper:
|
10 |
+
sampler:
|
11 |
+
_target_: optuna.samplers.TPESampler
|
12 |
+
seed: 12345
|
13 |
+
consider_prior: true
|
14 |
+
prior_weight: 1.0
|
15 |
+
consider_magic_clip: true
|
16 |
+
consider_endpoints: false
|
17 |
+
n_startup_trials: 10
|
18 |
+
n_ei_candidates: 24
|
19 |
+
multivariate: false
|
20 |
+
warn_independent_sampling: true
|
21 |
+
_target_: hydra_plugins.hydra_optuna_sweeper.optuna_sweeper.OptunaSweeper
|
22 |
+
direction: minimize
|
23 |
+
storage: null
|
24 |
+
study_name: focusMAE_150_hyperparameter_search
|
25 |
+
n_trials: 20
|
26 |
+
n_jobs: 1
|
27 |
+
search_space:
|
28 |
+
datamodule.batch_size:
|
29 |
+
type: categorical
|
30 |
+
choices:
|
31 |
+
- 64
|
32 |
+
- 128
|
33 |
+
model.lr:
|
34 |
+
type: float
|
35 |
+
low: 0.0001
|
36 |
+
high: 0.2
|
37 |
+
model.lin1_size:
|
38 |
+
type: categorical
|
39 |
+
choices:
|
40 |
+
- 64
|
41 |
+
- 128
|
42 |
+
- 256
|
43 |
+
- 512
|
44 |
+
- 1024
|
45 |
+
model.lin2_size:
|
46 |
+
type: categorical
|
47 |
+
choices:
|
48 |
+
- 64
|
49 |
+
- 128
|
50 |
+
- 256
|
51 |
+
- 512
|
52 |
+
- 1024
|
53 |
+
model.lin3_size:
|
54 |
+
type: categorical
|
55 |
+
choices:
|
56 |
+
- 64
|
57 |
+
- 128
|
58 |
+
- 256
|
59 |
+
- 512
|
60 |
+
- 1024
|
61 |
+
help:
|
62 |
+
app_name: ${hydra.job.name}
|
63 |
+
header: '${hydra.help.app_name} is powered by Hydra.
|
64 |
+
|
65 |
+
'
|
66 |
+
footer: 'Powered by Hydra (https://hydra.cc)
|
67 |
+
|
68 |
+
Use --hydra-help to view Hydra specific help
|
69 |
+
|
70 |
+
'
|
71 |
+
template: '${hydra.help.header}
|
72 |
+
|
73 |
+
== Configuration groups ==
|
74 |
+
|
75 |
+
Compose your configuration from those groups (group=option)
|
76 |
+
|
77 |
+
|
78 |
+
$APP_CONFIG_GROUPS
|
79 |
+
|
80 |
+
|
81 |
+
== Config ==
|
82 |
+
|
83 |
+
Override anything in the config (foo.bar=value)
|
84 |
+
|
85 |
+
|
86 |
+
$CONFIG
|
87 |
+
|
88 |
+
|
89 |
+
${hydra.help.footer}
|
90 |
+
|
91 |
+
'
|
92 |
+
hydra_help:
|
93 |
+
template: 'Hydra (${hydra.runtime.version})
|
94 |
+
|
95 |
+
See https://hydra.cc for more info.
|
96 |
+
|
97 |
+
|
98 |
+
== Flags ==
|
99 |
+
|
100 |
+
$FLAGS_HELP
|
101 |
+
|
102 |
+
|
103 |
+
== Configuration groups ==
|
104 |
+
|
105 |
+
Compose your configuration from those groups (For example, append hydra/job_logging=disabled
|
106 |
+
to command line)
|
107 |
+
|
108 |
+
|
109 |
+
$HYDRA_CONFIG_GROUPS
|
110 |
+
|
111 |
+
|
112 |
+
Use ''--cfg hydra'' to Show the Hydra config.
|
113 |
+
|
114 |
+
'
|
115 |
+
hydra_help: ???
|
116 |
+
hydra_logging:
|
117 |
+
version: 1
|
118 |
+
formatters:
|
119 |
+
colorlog:
|
120 |
+
(): colorlog.ColoredFormatter
|
121 |
+
format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s'
|
122 |
+
handlers:
|
123 |
+
console:
|
124 |
+
class: logging.StreamHandler
|
125 |
+
formatter: colorlog
|
126 |
+
stream: ext://sys.stdout
|
127 |
+
root:
|
128 |
+
level: INFO
|
129 |
+
handlers:
|
130 |
+
- console
|
131 |
+
disable_existing_loggers: false
|
132 |
+
job_logging:
|
133 |
+
version: 1
|
134 |
+
formatters:
|
135 |
+
simple:
|
136 |
+
format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s'
|
137 |
+
colorlog:
|
138 |
+
(): colorlog.ColoredFormatter
|
139 |
+
format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s]
|
140 |
+
- %(message)s'
|
141 |
+
log_colors:
|
142 |
+
DEBUG: purple
|
143 |
+
INFO: green
|
144 |
+
WARNING: yellow
|
145 |
+
ERROR: red
|
146 |
+
CRITICAL: red
|
147 |
+
handlers:
|
148 |
+
console:
|
149 |
+
class: logging.StreamHandler
|
150 |
+
formatter: colorlog
|
151 |
+
stream: ext://sys.stdout
|
152 |
+
file:
|
153 |
+
class: logging.FileHandler
|
154 |
+
formatter: simple
|
155 |
+
filename: ${hydra.job.name}.log
|
156 |
+
root:
|
157 |
+
level: INFO
|
158 |
+
handlers:
|
159 |
+
- console
|
160 |
+
- file
|
161 |
+
disable_existing_loggers: false
|
162 |
+
env: {}
|
163 |
+
searchpath: []
|
164 |
+
callbacks: {}
|
165 |
+
output_subdir: .hydra
|
166 |
+
overrides:
|
167 |
+
hydra: []
|
168 |
+
task:
|
169 |
+
- datamodule.batch_size=64
|
170 |
+
- model.lr=0.03686537045425119
|
171 |
+
- model.lin1_size=512
|
172 |
+
- model.lin2_size=512
|
173 |
+
- model.lin3_size=1024
|
174 |
+
- hparams_search=focusMSE_150_optuna.yaml
|
175 |
+
job:
|
176 |
+
name: train
|
177 |
+
override_dirname: datamodule.batch_size=64,hparams_search=focusMSE_150_optuna.yaml,model.lin1_size=512,model.lin2_size=512,model.lin3_size=1024,model.lr=0.03686537045425119
|
178 |
+
id: '0'
|
179 |
+
num: 0
|
180 |
+
config_name: train.yaml
|
181 |
+
env_set: {}
|
182 |
+
env_copy: []
|
183 |
+
config:
|
184 |
+
override_dirname:
|
185 |
+
kv_sep: '='
|
186 |
+
item_sep: ','
|
187 |
+
exclude_keys: []
|
188 |
+
runtime:
|
189 |
+
version: 1.1.2
|
190 |
+
cwd: /usr/src/app
|
191 |
+
config_sources:
|
192 |
+
- path: hydra.conf
|
193 |
+
schema: pkg
|
194 |
+
provider: hydra
|
195 |
+
- path: /usr/src/app/configs
|
196 |
+
schema: file
|
197 |
+
provider: main
|
198 |
+
- path: hydra_plugins.hydra_colorlog.conf
|
199 |
+
schema: pkg
|
200 |
+
provider: hydra-colorlog
|
201 |
+
- path: ''
|
202 |
+
schema: structured
|
203 |
+
provider: schema
|
204 |
+
choices:
|
205 |
+
local: default.yaml
|
206 |
+
hparams_search: focusMSE_150_optuna.yaml
|
207 |
+
debug: null
|
208 |
+
experiment: null
|
209 |
+
log_dir: default.yaml
|
210 |
+
trainer: long.yaml
|
211 |
+
logger: many_loggers
|
212 |
+
callbacks: default.yaml
|
213 |
+
model: focus150.yaml
|
214 |
+
datamodule: focus150.yaml
|
215 |
+
hydra/env: default
|
216 |
+
hydra/callbacks: null
|
217 |
+
hydra/job_logging: colorlog
|
218 |
+
hydra/hydra_logging: colorlog
|
219 |
+
hydra/hydra_help: default
|
220 |
+
hydra/help: default
|
221 |
+
hydra/sweeper: optuna
|
222 |
+
hydra/sweeper/sampler: tpe
|
223 |
+
hydra/launcher: basic
|
224 |
+
hydra/output: default
|
225 |
+
verbose: false
|
logs/experiments/multiruns/focusMSE_150_hyperparameter_search/2022-05-11_10-16-51/0/.hydra/overrides.yaml
ADDED
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
- datamodule.batch_size=64
|
2 |
+
- model.lr=0.03686537045425119
|
3 |
+
- model.lin1_size=512
|
4 |
+
- model.lin2_size=512
|
5 |
+
- model.lin3_size=1024
|
6 |
+
- hparams_search=focusMSE_150_optuna.yaml
|
logs/experiments/multiruns/focusMSE_150_hyperparameter_search/2022-05-11_10-16-51/0/checkpoints/epoch_085.ckpt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:0d1a67b4da401a35ee406ad6561f4174a114de6526bee3b873b5d92ba0a1fdbb
|
3 |
+
size 424279302
|
logs/experiments/multiruns/focusMSE_150_hyperparameter_search/2022-05-11_10-16-51/0/checkpoints/last.ckpt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:2f8026d168a4cd95f76889a1b00f191fa5f6d69af2cd941fb628eb5c8f27f2e1
|
3 |
+
size 424279302
|
logs/experiments/multiruns/focusMSE_150_hyperparameter_search/2022-05-11_10-16-51/0/csv/version_0/hparams.yaml
ADDED
@@ -0,0 +1,50 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
trainer:
|
2 |
+
_target_: pytorch_lightning.Trainer
|
3 |
+
gpus: 1
|
4 |
+
min_epochs: 1
|
5 |
+
max_epochs: 100
|
6 |
+
resume_from_checkpoint: null
|
7 |
+
model:
|
8 |
+
_target_: src.models.focus_module.FocusMSELitModule
|
9 |
+
input_size: 67500
|
10 |
+
lin1_size: 512
|
11 |
+
lin2_size: 512
|
12 |
+
lin3_size: 1024
|
13 |
+
output_size: 1
|
14 |
+
lr: 0.03686537045425119
|
15 |
+
weight_decay: 0.0005
|
16 |
+
datamodule:
|
17 |
+
_target_: src.datamodules.focus_datamodule.FocusDataModule
|
18 |
+
data_dir: /usr/src/app/data/focus150
|
19 |
+
csv_train_file: /usr/src/app/data/focus150/train_metadata.csv
|
20 |
+
csv_val_file: /usr/src/app/data/focus150/validation_metadata.csv
|
21 |
+
csv_test_file: /usr/src/app/data/focus150/test_metadata.csv
|
22 |
+
batch_size: 64
|
23 |
+
num_workers: 0
|
24 |
+
pin_memory: false
|
25 |
+
seed: null
|
26 |
+
callbacks:
|
27 |
+
model_checkpoint:
|
28 |
+
_target_: pytorch_lightning.callbacks.ModelCheckpoint
|
29 |
+
monitor: val/mae
|
30 |
+
mode: min
|
31 |
+
save_top_k: 1
|
32 |
+
save_last: true
|
33 |
+
verbose: false
|
34 |
+
dirpath: checkpoints/
|
35 |
+
filename: epoch_{epoch:03d}
|
36 |
+
auto_insert_metric_name: false
|
37 |
+
early_stopping:
|
38 |
+
_target_: pytorch_lightning.callbacks.EarlyStopping
|
39 |
+
monitor: val/mae
|
40 |
+
mode: min
|
41 |
+
patience: 100
|
42 |
+
min_delta: 0
|
43 |
+
model_summary:
|
44 |
+
_target_: pytorch_lightning.callbacks.RichModelSummary
|
45 |
+
max_depth: -1
|
46 |
+
rich_progress_bar:
|
47 |
+
_target_: pytorch_lightning.callbacks.RichProgressBar
|
48 |
+
model/params/total: 35353601
|
49 |
+
model/params/trainable: 35353601
|
50 |
+
model/params/non_trainable: 0
|
logs/experiments/multiruns/focusMSE_150_hyperparameter_search/2022-05-11_10-16-51/0/csv/version_0/metrics.csv
ADDED
@@ -0,0 +1,202 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
val/loss,val/mae,val/mae_best,epoch,step,train/loss,train/mae,test/loss,test/mae
|
2 |
+
0.03579968959093094,0.16353444755077362,0.017112186178565025,0,47,,,,
|
3 |
+
,,,1,47,14.495015144348145,1.6559021472930908,,
|
4 |
+
0.023028317838907242,0.09784077852964401,0.017112186178565025,1,95,,,,
|
5 |
+
,,,2,95,0.03121190331876278,0.11386504769325256,,
|
6 |
+
0.01428324542939663,0.10648149251937866,0.017112186178565025,2,143,,,,
|
7 |
+
,,,3,143,0.011229651048779488,0.06736771017313004,,
|
8 |
+
0.023218825459480286,0.13943032920360565,0.017112186178565025,3,191,,,,
|
9 |
+
,,,4,191,0.003977159038186073,0.04585763439536095,,
|
10 |
+
0.03244025632739067,0.14768877625465393,0.017112186178565025,4,239,,,,
|
11 |
+
,,,5,239,0.0031328110489994287,0.04164045304059982,,
|
12 |
+
0.002735174959525466,0.04452373832464218,0.017112186178565025,5,287,,,,
|
13 |
+
,,,6,287,0.0029085695277899504,0.04004088416695595,,
|
14 |
+
0.005062913987785578,0.051083046942949295,0.017112186178565025,6,335,,,,
|
15 |
+
,,,7,335,0.00467465678229928,0.04571106657385826,,
|
16 |
+
0.0013473607832565904,0.029090115800499916,0.017112186178565025,7,383,,,,
|
17 |
+
,,,8,383,0.0053647966124117374,0.04786178842186928,,
|
18 |
+
0.031053932383656502,0.09153944253921509,0.017112186178565025,8,431,,,,
|
19 |
+
,,,9,431,0.0025455555878579617,0.037371132522821426,,
|
20 |
+
0.02316385880112648,0.10995951294898987,0.017112186178565025,9,479,,,,
|
21 |
+
,,,10,479,0.00460535753518343,0.04614894464612007,,
|
22 |
+
0.039701227098703384,0.13647186756134033,0.017112186178565025,10,527,,,,
|
23 |
+
,,,11,527,0.0028947158716619015,0.03917166590690613,,
|
24 |
+
0.07488998770713806,0.18475137650966644,0.017112186178565025,11,575,,,,
|
25 |
+
,,,12,575,0.002714551752433181,0.03677684813737869,,
|
26 |
+
0.1193268671631813,0.3156556487083435,0.017112186178565025,12,623,,,,
|
27 |
+
,,,13,623,0.003820999525487423,0.04072280600667,,
|
28 |
+
0.0597187839448452,0.19241741299629211,0.017112186178565025,13,671,,,,
|
29 |
+
,,,14,671,0.002033474389463663,0.03253743052482605,,
|
30 |
+
0.0008762971265241504,0.02348732389509678,0.017112186178565025,14,719,,,,
|
31 |
+
,,,15,719,0.0019064688822254539,0.031217798590660095,,
|
32 |
+
0.013060725294053555,0.08380230516195297,0.017112186178565025,15,767,,,,
|
33 |
+
,,,16,767,0.002687701489776373,0.034209731966257095,,
|
34 |
+
0.042808327823877335,0.14635927975177765,0.017112186178565025,16,815,,,,
|
35 |
+
,,,17,815,0.0016920339548960328,0.030560456216335297,,
|
36 |
+
0.0037040594033896923,0.03961755335330963,0.017112186178565025,17,863,,,,
|
37 |
+
,,,18,863,0.00137220264878124,0.026769932359457016,,
|
38 |
+
0.18847565352916718,0.4138505458831787,0.017112186178565025,18,911,,,,
|
39 |
+
,,,19,911,0.0027047644834965467,0.03762326389551163,,
|
40 |
+
0.0008785146055743098,0.024225061759352684,0.017112186178565025,19,959,,,,
|
41 |
+
,,,20,959,0.001497094053775072,0.02841769903898239,,
|
42 |
+
0.0039510359056293964,0.04923921450972557,0.017112186178565025,20,1007,,,,
|
43 |
+
,,,21,1007,0.000974485999904573,0.02354809269309044,,
|
44 |
+
0.008713917806744576,0.06103385612368584,0.017112186178565025,21,1055,,,,
|
45 |
+
,,,22,1055,0.0007378252921625972,0.02121991477906704,,
|
46 |
+
0.06376664340496063,0.24758918583393097,0.017112186178565025,22,1103,,,,
|
47 |
+
,,,23,1103,0.0010210195323452353,0.02382960356771946,,
|
48 |
+
0.02282049134373665,0.13273103535175323,0.017112186178565025,23,1151,,,,
|
49 |
+
,,,24,1151,0.0008915272774174809,0.022772688418626785,,
|
50 |
+
0.014725679531693459,0.10588779300451279,0.017112186178565025,24,1199,,,,
|
51 |
+
,,,25,1199,0.0011300592450425029,0.023914732038974762,,
|
52 |
+
0.09754057228565216,0.22108455002307892,0.017112186178565025,25,1247,,,,
|
53 |
+
,,,26,1247,0.0010958443162962794,0.02455698885023594,,
|
54 |
+
0.0013632983900606632,0.02987341210246086,0.017112186178565025,26,1295,,,,
|
55 |
+
,,,27,1295,0.0008451506146229804,0.02233395166695118,,
|
56 |
+
0.15422840416431427,0.37042707204818726,0.017112186178565025,27,1343,,,,
|
57 |
+
,,,28,1343,0.0007520682993344963,0.02138493023812771,,
|
58 |
+
0.00456196628510952,0.0612277090549469,0.017112186178565025,28,1391,,,,
|
59 |
+
,,,29,1391,0.0011850229930132627,0.025070320814847946,,
|
60 |
+
0.002098421100527048,0.03682006150484085,0.017112186178565025,29,1439,,,,
|
61 |
+
,,,30,1439,0.0006529194070026278,0.020472845062613487,,
|
62 |
+
0.002240169793367386,0.04103026166558266,0.017112186178565025,30,1487,,,,
|
63 |
+
,,,31,1487,0.0006283859256654978,0.02014467678964138,,
|
64 |
+
0.0004951963201165199,0.01838625594973564,0.017112186178565025,31,1535,,,,
|
65 |
+
,,,32,1535,0.0009012636728584766,0.022054705768823624,,
|
66 |
+
0.0007436001324094832,0.021402768790721893,0.017112186178565025,32,1583,,,,
|
67 |
+
,,,33,1583,0.0005680572940036654,0.019372815266251564,,
|
68 |
+
0.040956251323223114,0.1968158334493637,0.017112186178565025,33,1631,,,,
|
69 |
+
,,,34,1631,0.0005614915862679482,0.019017931073904037,,
|
70 |
+
0.0007083106320351362,0.021528320387005806,0.017112186178565025,34,1679,,,,
|
71 |
+
,,,35,1679,0.0006503739859908819,0.02023974061012268,,
|
72 |
+
0.0021509581711143255,0.03562309220433235,0.017112186178565025,35,1727,,,,
|
73 |
+
,,,36,1727,0.0005799040663987398,0.019445858895778656,,
|
74 |
+
0.0053063249215483665,0.06312990188598633,0.017112186178565025,36,1775,,,,
|
75 |
+
,,,37,1775,0.0007356507703661919,0.021103426814079285,,
|
76 |
+
0.0005794164026156068,0.0195970106869936,0.017112186178565025,37,1823,,,,
|
77 |
+
,,,38,1823,0.0005876359064131975,0.019293928518891335,,
|
78 |
+
0.003985106945037842,0.05617513135075569,0.017112186178565025,38,1871,,,,
|
79 |
+
,,,39,1871,0.0006058276630938053,0.01931830123066902,,
|
80 |
+
0.00705486536026001,0.08003085106611252,0.017112186178565025,39,1919,,,,
|
81 |
+
,,,40,1919,0.000602397252805531,0.01928453892469406,,
|
82 |
+
0.0013222043635323644,0.029894838109612465,0.017112186178565025,40,1967,,,,
|
83 |
+
,,,41,1967,0.0006053977995179594,0.019558429718017578,,
|
84 |
+
0.004207386635243893,0.04615321755409241,0.017112186178565025,41,2015,,,,
|
85 |
+
,,,42,2015,0.000572093587834388,0.018962126225233078,,
|
86 |
+
0.005536203738301992,0.05801777169108391,0.017112186178565025,42,2063,,,,
|
87 |
+
,,,43,2063,0.000504851748701185,0.01849149912595749,,
|
88 |
+
1.5762557983398438,1.2024459838867188,0.017112186178565025,43,2111,,,,
|
89 |
+
,,,44,2111,0.0005294675938785076,0.01860319823026657,,
|
90 |
+
0.0007897928589954972,0.02223595604300499,0.017112186178565025,44,2159,,,,
|
91 |
+
,,,45,2159,0.0005814236938022077,0.019124582409858704,,
|
92 |
+
0.0006930687231943011,0.021263839676976204,0.017112186178565025,45,2207,,,,
|
93 |
+
,,,46,2207,0.0005279643228277564,0.01869916170835495,,
|
94 |
+
0.01906702108681202,0.1091514378786087,0.017112186178565025,46,2255,,,,
|
95 |
+
,,,47,2255,0.000545135757420212,0.01868341863155365,,
|
96 |
+
0.0034894607961177826,0.054953016340732574,0.017112186178565025,47,2303,,,,
|
97 |
+
,,,48,2303,0.0005268887034617364,0.018510863184928894,,
|
98 |
+
0.0021984742488712072,0.03749038279056549,0.017112186178565025,48,2351,,,,
|
99 |
+
,,,49,2351,0.0004719052813015878,0.01791469193994999,,
|
100 |
+
0.00133929040748626,0.030697055160999298,0.017112186178565025,49,2399,,,,
|
101 |
+
,,,50,2399,0.0004746823979075998,0.01792297698557377,,
|
102 |
+
0.004902152344584465,0.0471884161233902,0.017112186178565025,50,2447,,,,
|
103 |
+
,,,51,2447,0.00047967510181479156,0.017983965575695038,,
|
104 |
+
0.001409885473549366,0.029906224459409714,0.017112186178565025,51,2495,,,,
|
105 |
+
,,,52,2495,0.000505766540300101,0.018452443182468414,,
|
106 |
+
0.009410562925040722,0.07600624114274979,0.017112186178565025,52,2543,,,,
|
107 |
+
,,,53,2543,0.000523146940395236,0.018427005037665367,,
|
108 |
+
0.007457168772816658,0.08037704974412918,0.017112186178565025,53,2591,,,,
|
109 |
+
,,,54,2591,0.000513765262439847,0.018448060378432274,,
|
110 |
+
0.0013765296898782253,0.03153516352176666,0.017112186178565025,54,2639,,,,
|
111 |
+
,,,55,2639,0.0004596926737576723,0.017767220735549927,,
|
112 |
+
0.0035890962462872267,0.05398249253630638,0.017112186178565025,55,2687,,,,
|
113 |
+
,,,56,2687,0.00044904297101311386,0.017626482993364334,,
|
114 |
+
0.0005490530747920275,0.019091133028268814,0.017112186178565025,56,2735,,,,
|
115 |
+
,,,57,2735,0.0004851606208831072,0.018171915784478188,,
|
116 |
+
0.0008657160797156394,0.02421419508755207,0.017112186178565025,57,2783,,,,
|
117 |
+
,,,58,2783,0.00046045734779909253,0.017771797254681587,,
|
118 |
+
0.0005166709888726473,0.018768705427646637,0.017112186178565025,58,2831,,,,
|
119 |
+
,,,59,2831,0.00043915206333622336,0.017429670318961143,,
|
120 |
+
0.0008474302594549954,0.02383488044142723,0.017112186178565025,59,2879,,,,
|
121 |
+
,,,60,2879,0.00045944342855364084,0.017761873081326485,,
|
122 |
+
0.014071032404899597,0.11284523457288742,0.017112186178565025,60,2927,,,,
|
123 |
+
,,,61,2927,0.0004569795564748347,0.017758561298251152,,
|
124 |
+
0.0013134669279679656,0.029274949803948402,0.017112186178565025,61,2975,,,,
|
125 |
+
,,,62,2975,0.0004361559695098549,0.017410052940249443,,
|
126 |
+
0.006855101324617863,0.07921323925256729,0.017112186178565025,62,3023,,,,
|
127 |
+
,,,63,3023,0.00042800724622793496,0.017171263694763184,,
|
128 |
+
0.00043185445247218013,0.017456555739045143,0.017112186178565025,63,3071,,,,
|
129 |
+
,,,64,3071,0.0004368615918792784,0.017381440848112106,,
|
130 |
+
0.00042319719796068966,0.017315221950411797,0.017112186178565025,64,3119,,,,
|
131 |
+
,,,65,3119,0.0004203100106678903,0.017117001116275787,,
|
132 |
+
0.0009634621092118323,0.025571202859282494,0.017112186178565025,65,3167,,,,
|
133 |
+
,,,66,3167,0.0004209606850054115,0.01707778312265873,,
|
134 |
+
0.0006285567651502788,0.020474446937441826,0.017112186178565025,66,3215,,,,
|
135 |
+
,,,67,3215,0.00042037287494167686,0.01704213209450245,,
|
136 |
+
0.00047947626444511116,0.018225286155939102,0.017112186178565025,67,3263,,,,
|
137 |
+
,,,68,3263,0.00042242908966727555,0.017104873433709145,,
|
138 |
+
0.000591530988458544,0.019799495115876198,0.017112186178565025,68,3311,,,,
|
139 |
+
,,,69,3311,0.0004247592587489635,0.01722637191414833,,
|
140 |
+
0.0004329412477090955,0.017429780215024948,0.017112186178565025,69,3359,,,,
|
141 |
+
,,,70,3359,0.0004230976919643581,0.017216183245182037,,
|
142 |
+
0.0016023569041863084,0.03442476689815521,0.017112186178565025,70,3407,,,,
|
143 |
+
,,,71,3407,0.00041174382204189897,0.01699843443930149,,
|
144 |
+
0.0017801939975470304,0.037161119282245636,0.017112186178565025,71,3455,,,,
|
145 |
+
,,,72,3455,0.0004160652752034366,0.017074553295969963,,
|
146 |
+
0.0007763000903651118,0.02262784354388714,0.017112186178565025,72,3503,,,,
|
147 |
+
,,,73,3503,0.0004123607068322599,0.0169618409126997,,
|
148 |
+
0.0008122496656142175,0.02330201491713524,0.017112186178565025,73,3551,,,,
|
149 |
+
,,,74,3551,0.00041551035246811807,0.017032146453857422,,
|
150 |
+
0.0004213419451843947,0.0172469150274992,0.017112186178565025,74,3599,,,,
|
151 |
+
,,,75,3599,0.0004079364880453795,0.01693829335272312,,
|
152 |
+
0.0004362885665614158,0.017566896975040436,0.017112186178565025,75,3647,,,,
|
153 |
+
,,,76,3647,0.00041112879989668727,0.016977883875370026,,
|
154 |
+
0.00042163411853834987,0.01730583794414997,0.017112186178565025,76,3695,,,,
|
155 |
+
,,,77,3695,0.00040956540033221245,0.01700553297996521,,
|
156 |
+
0.0005471267504617572,0.019237322732806206,0.017112186178565025,77,3743,,,,
|
157 |
+
,,,78,3743,0.00040846990304999053,0.016929807141423225,,
|
158 |
+
0.0004596186045091599,0.017886068671941757,0.017112186178565025,78,3791,,,,
|
159 |
+
,,,79,3791,0.0004078353231307119,0.016930023208260536,,
|
160 |
+
0.0004769843362737447,0.01808129996061325,0.017112186178565025,79,3839,,,,
|
161 |
+
,,,80,3839,0.0004068900307174772,0.016888847574591637,,
|
162 |
+
0.0004327106580603868,0.017518600448966026,0.017112186178565025,80,3887,,,,
|
163 |
+
,,,81,3887,0.00040636511403135955,0.01688363589346409,,
|
164 |
+
0.0004288623749744147,0.017394402995705605,0.017112186178565025,81,3935,,,,
|
165 |
+
,,,82,3935,0.0004090180736966431,0.01692376658320427,,
|
166 |
+
0.00042773238965310156,0.017413228750228882,0.017112186178565025,82,3983,,,,
|
167 |
+
,,,83,3983,0.0004068940761499107,0.016915373504161835,,
|
168 |
+
0.0006319741951301694,0.020521974191069603,0.017112186178565025,83,4031,,,,
|
169 |
+
,,,84,4031,0.0004071860166732222,0.01692325808107853,,
|
170 |
+
0.00042987579945474863,0.017476310953497887,0.017112186178565025,84,4079,,,,
|
171 |
+
,,,85,4079,0.000407775049097836,0.016926387324929237,,
|
172 |
+
0.00042044275323860347,0.017197227105498314,0.017112186178565025,85,4127,,,,
|
173 |
+
,,,86,4127,0.00040843471651896834,0.01693855971097946,,
|
174 |
+
0.00042171942186541855,0.01728913001716137,0.017112186178565025,86,4175,,,,
|
175 |
+
,,,87,4175,0.0004085320688318461,0.016957812011241913,,
|
176 |
+
0.0004221313865855336,0.017296813428401947,0.017112186178565025,87,4223,,,,
|
177 |
+
,,,88,4223,0.00041161643457598984,0.016990887001156807,,
|
178 |
+
0.0004345993511378765,0.01754409447312355,0.017112186178565025,88,4271,,,,
|
179 |
+
,,,89,4271,0.00040749149047769606,0.016908349469304085,,
|
180 |
+
0.0004223422729410231,0.01724177598953247,0.017112186178565025,89,4319,,,,
|
181 |
+
,,,90,4319,0.00040703106787987053,0.016885219141840935,,
|
182 |
+
0.0004245877789799124,0.017370199784636497,0.017112186178565025,90,4367,,,,
|
183 |
+
,,,91,4367,0.00040756797534413636,0.016913842409849167,,
|
184 |
+
0.00042287580436095595,0.017294440418481827,0.017112186178565025,91,4415,,,,
|
185 |
+
,,,92,4415,0.00040911362157203257,0.016981054097414017,,
|
186 |
+
0.0004231697239447385,0.017325298860669136,0.017112186178565025,92,4463,,,,
|
187 |
+
,,,93,4463,0.0004069591232109815,0.016872722655534744,,
|
188 |
+
0.00045532287913374603,0.017767004668712616,0.017112186178565025,93,4511,,,,
|
189 |
+
,,,94,4511,0.0004071206785738468,0.016934514045715332,,
|
190 |
+
0.00042416725773364305,0.017286812886595726,0.017112186178565025,94,4559,,,,
|
191 |
+
,,,95,4559,0.00040892523247748613,0.01696682907640934,,
|
192 |
+
0.0004217335663270205,0.017224781215190887,0.017112186178565025,95,4607,,,,
|
193 |
+
,,,96,4607,0.0004099648795090616,0.01698911190032959,,
|
194 |
+
0.00042588356882333755,0.01735340803861618,0.017112186178565025,96,4655,,,,
|
195 |
+
,,,97,4655,0.0004082414961885661,0.016903094947338104,,
|
196 |
+
0.0004272103251423687,0.017434189096093178,0.017112186178565025,97,4703,,,,
|
197 |
+
,,,98,4703,0.00040798200643621385,0.01691082864999771,,
|
198 |
+
0.0004220905539114028,0.017203766852617264,0.017112186178565025,98,4751,,,,
|
199 |
+
,,,99,4751,0.00040974857984110713,0.01696089282631874,,
|
200 |
+
0.00042423405102454126,0.017363552004098892,0.017112186178565025,99,4799,,,,
|
201 |
+
,,,100,4799,0.00041067838901653886,0.016979962587356567,,
|
202 |
+
,,,85,4800,,,0.0004141924437135458,0.016994459554553032
|
logs/experiments/multiruns/focusMSE_150_hyperparameter_search/2022-05-11_10-16-51/0/tensorboard/focusMSE_150_hyperparameter_search/events.out.tfevents.1652264215.873c0dcdd84d.1.0
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:613ad06cbaa099d07f604eba1300752a37ddd419e75954000e164a25d8149a1f
|
3 |
+
size 36562
|
logs/experiments/multiruns/focusMSE_150_hyperparameter_search/2022-05-11_10-16-51/0/tensorboard/focusMSE_150_hyperparameter_search/events.out.tfevents.1652264689.873c0dcdd84d.1.1
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:22f1f73d3be37cf9435b838b6ac99c5a22fd7c04aeba0f54f7adae2e842ad9a7
|
3 |
+
size 179
|
logs/experiments/multiruns/focusMSE_150_hyperparameter_search/2022-05-11_10-16-51/0/tensorboard/focusMSE_150_hyperparameter_search/hparams.yaml
ADDED
@@ -0,0 +1,50 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
trainer:
|
2 |
+
_target_: pytorch_lightning.Trainer
|
3 |
+
gpus: 1
|
4 |
+
min_epochs: 1
|
5 |
+
max_epochs: 100
|
6 |
+
resume_from_checkpoint: null
|
7 |
+
model:
|
8 |
+
_target_: src.models.focus_module.FocusMSELitModule
|
9 |
+
input_size: 67500
|
10 |
+
lin1_size: 512
|
11 |
+
lin2_size: 512
|
12 |
+
lin3_size: 1024
|
13 |
+
output_size: 1
|
14 |
+
lr: 0.03686537045425119
|
15 |
+
weight_decay: 0.0005
|
16 |
+
datamodule:
|
17 |
+
_target_: src.datamodules.focus_datamodule.FocusDataModule
|
18 |
+
data_dir: /usr/src/app/data/focus150
|
19 |
+
csv_train_file: /usr/src/app/data/focus150/train_metadata.csv
|
20 |
+
csv_val_file: /usr/src/app/data/focus150/validation_metadata.csv
|
21 |
+
csv_test_file: /usr/src/app/data/focus150/test_metadata.csv
|
22 |
+
batch_size: 64
|
23 |
+
num_workers: 0
|
24 |
+
pin_memory: false
|
25 |
+
seed: null
|
26 |
+
callbacks:
|
27 |
+
model_checkpoint:
|
28 |
+
_target_: pytorch_lightning.callbacks.ModelCheckpoint
|
29 |
+
monitor: val/mae
|
30 |
+
mode: min
|
31 |
+
save_top_k: 1
|
32 |
+
save_last: true
|
33 |
+
verbose: false
|
34 |
+
dirpath: checkpoints/
|
35 |
+
filename: epoch_{epoch:03d}
|
36 |
+
auto_insert_metric_name: false
|
37 |
+
early_stopping:
|
38 |
+
_target_: pytorch_lightning.callbacks.EarlyStopping
|
39 |
+
monitor: val/mae
|
40 |
+
mode: min
|
41 |
+
patience: 100
|
42 |
+
min_delta: 0
|
43 |
+
model_summary:
|
44 |
+
_target_: pytorch_lightning.callbacks.RichModelSummary
|
45 |
+
max_depth: -1
|
46 |
+
rich_progress_bar:
|
47 |
+
_target_: pytorch_lightning.callbacks.RichProgressBar
|
48 |
+
model/params/total: 35353601
|
49 |
+
model/params/trainable: 35353601
|
50 |
+
model/params/non_trainable: 0
|
logs/experiments/multiruns/focusMSE_150_hyperparameter_search/2022-05-11_10-16-51/1/.hydra/config.yaml
ADDED
@@ -0,0 +1,77 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
original_work_dir: ${hydra:runtime.cwd}
|
2 |
+
data_dir: ${original_work_dir}/data
|
3 |
+
print_config: true
|
4 |
+
ignore_warnings: true
|
5 |
+
train: true
|
6 |
+
test: true
|
7 |
+
seed: null
|
8 |
+
name: focusMSE_150_hyperparameter_search
|
9 |
+
datamodule:
|
10 |
+
_target_: src.datamodules.focus_datamodule.FocusDataModule
|
11 |
+
data_dir: ${data_dir}/focus150
|
12 |
+
csv_train_file: ${data_dir}/focus150/train_metadata.csv
|
13 |
+
csv_val_file: ${data_dir}/focus150/validation_metadata.csv
|
14 |
+
csv_test_file: ${data_dir}/focus150/test_metadata.csv
|
15 |
+
batch_size: 64
|
16 |
+
num_workers: 0
|
17 |
+
pin_memory: false
|
18 |
+
model:
|
19 |
+
_target_: src.models.focus_module.FocusMSELitModule
|
20 |
+
input_size: 67500
|
21 |
+
lin1_size: 1024
|
22 |
+
lin2_size: 64
|
23 |
+
lin3_size: 128
|
24 |
+
output_size: 1
|
25 |
+
lr: 0.12853081804637867
|
26 |
+
weight_decay: 0.0005
|
27 |
+
callbacks:
|
28 |
+
model_checkpoint:
|
29 |
+
_target_: pytorch_lightning.callbacks.ModelCheckpoint
|
30 |
+
monitor: val/mae
|
31 |
+
mode: min
|
32 |
+
save_top_k: 1
|
33 |
+
save_last: true
|
34 |
+
verbose: false
|
35 |
+
dirpath: checkpoints/
|
36 |
+
filename: epoch_{epoch:03d}
|
37 |
+
auto_insert_metric_name: false
|
38 |
+
early_stopping:
|
39 |
+
_target_: pytorch_lightning.callbacks.EarlyStopping
|
40 |
+
monitor: val/mae
|
41 |
+
mode: min
|
42 |
+
patience: 100
|
43 |
+
min_delta: 0
|
44 |
+
model_summary:
|
45 |
+
_target_: pytorch_lightning.callbacks.RichModelSummary
|
46 |
+
max_depth: -1
|
47 |
+
rich_progress_bar:
|
48 |
+
_target_: pytorch_lightning.callbacks.RichProgressBar
|
49 |
+
logger:
|
50 |
+
csv:
|
51 |
+
_target_: pytorch_lightning.loggers.csv_logs.CSVLogger
|
52 |
+
save_dir: .
|
53 |
+
name: csv/
|
54 |
+
prefix: ''
|
55 |
+
mlflow:
|
56 |
+
_target_: pytorch_lightning.loggers.mlflow.MLFlowLogger
|
57 |
+
experiment_name: ${name}
|
58 |
+
tracking_uri: ${original_work_dir}/logs/mlflow/mlruns
|
59 |
+
tags: null
|
60 |
+
save_dir: ./mlruns
|
61 |
+
prefix: ''
|
62 |
+
artifact_location: null
|
63 |
+
tensorboard:
|
64 |
+
_target_: pytorch_lightning.loggers.tensorboard.TensorBoardLogger
|
65 |
+
save_dir: tensorboard/
|
66 |
+
name: null
|
67 |
+
version: ${name}
|
68 |
+
log_graph: false
|
69 |
+
default_hp_metric: true
|
70 |
+
prefix: ''
|
71 |
+
trainer:
|
72 |
+
_target_: pytorch_lightning.Trainer
|
73 |
+
gpus: 1
|
74 |
+
min_epochs: 1
|
75 |
+
max_epochs: 100
|
76 |
+
resume_from_checkpoint: null
|
77 |
+
optimized_metric: val/mae_best
|
logs/experiments/multiruns/focusMSE_150_hyperparameter_search/2022-05-11_10-16-51/1/.hydra/hydra.yaml
ADDED
@@ -0,0 +1,225 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
hydra:
|
2 |
+
run:
|
3 |
+
dir: logs/experiments/runs/${name}/${now:%Y-%m-%d}_${now:%H-%M-%S}
|
4 |
+
sweep:
|
5 |
+
dir: logs/experiments/multiruns/${name}/${now:%Y-%m-%d}_${now:%H-%M-%S}
|
6 |
+
subdir: ${hydra.job.num}
|
7 |
+
launcher:
|
8 |
+
_target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher
|
9 |
+
sweeper:
|
10 |
+
sampler:
|
11 |
+
_target_: optuna.samplers.TPESampler
|
12 |
+
seed: 12345
|
13 |
+
consider_prior: true
|
14 |
+
prior_weight: 1.0
|
15 |
+
consider_magic_clip: true
|
16 |
+
consider_endpoints: false
|
17 |
+
n_startup_trials: 10
|
18 |
+
n_ei_candidates: 24
|
19 |
+
multivariate: false
|
20 |
+
warn_independent_sampling: true
|
21 |
+
_target_: hydra_plugins.hydra_optuna_sweeper.optuna_sweeper.OptunaSweeper
|
22 |
+
direction: minimize
|
23 |
+
storage: null
|
24 |
+
study_name: focusMAE_150_hyperparameter_search
|
25 |
+
n_trials: 20
|
26 |
+
n_jobs: 1
|
27 |
+
search_space:
|
28 |
+
datamodule.batch_size:
|
29 |
+
type: categorical
|
30 |
+
choices:
|
31 |
+
- 64
|
32 |
+
- 128
|
33 |
+
model.lr:
|
34 |
+
type: float
|
35 |
+
low: 0.0001
|
36 |
+
high: 0.2
|
37 |
+
model.lin1_size:
|
38 |
+
type: categorical
|
39 |
+
choices:
|
40 |
+
- 64
|
41 |
+
- 128
|
42 |
+
- 256
|
43 |
+
- 512
|
44 |
+
- 1024
|
45 |
+
model.lin2_size:
|
46 |
+
type: categorical
|
47 |
+
choices:
|
48 |
+
- 64
|
49 |
+
- 128
|
50 |
+
- 256
|
51 |
+
- 512
|
52 |
+
- 1024
|
53 |
+
model.lin3_size:
|
54 |
+
type: categorical
|
55 |
+
choices:
|
56 |
+
- 64
|
57 |
+
- 128
|
58 |
+
- 256
|
59 |
+
- 512
|
60 |
+
- 1024
|
61 |
+
help:
|
62 |
+
app_name: ${hydra.job.name}
|
63 |
+
header: '${hydra.help.app_name} is powered by Hydra.
|
64 |
+
|
65 |
+
'
|
66 |
+
footer: 'Powered by Hydra (https://hydra.cc)
|
67 |
+
|
68 |
+
Use --hydra-help to view Hydra specific help
|
69 |
+
|
70 |
+
'
|
71 |
+
template: '${hydra.help.header}
|
72 |
+
|
73 |
+
== Configuration groups ==
|
74 |
+
|
75 |
+
Compose your configuration from those groups (group=option)
|
76 |
+
|
77 |
+
|
78 |
+
$APP_CONFIG_GROUPS
|
79 |
+
|
80 |
+
|
81 |
+
== Config ==
|
82 |
+
|
83 |
+
Override anything in the config (foo.bar=value)
|
84 |
+
|
85 |
+
|
86 |
+
$CONFIG
|
87 |
+
|
88 |
+
|
89 |
+
${hydra.help.footer}
|
90 |
+
|
91 |
+
'
|
92 |
+
hydra_help:
|
93 |
+
template: 'Hydra (${hydra.runtime.version})
|
94 |
+
|
95 |
+
See https://hydra.cc for more info.
|
96 |
+
|
97 |
+
|
98 |
+
== Flags ==
|
99 |
+
|
100 |
+
$FLAGS_HELP
|
101 |
+
|
102 |
+
|
103 |
+
== Configuration groups ==
|
104 |
+
|
105 |
+
Compose your configuration from those groups (For example, append hydra/job_logging=disabled
|
106 |
+
to command line)
|
107 |
+
|
108 |
+
|
109 |
+
$HYDRA_CONFIG_GROUPS
|
110 |
+
|
111 |
+
|
112 |
+
Use ''--cfg hydra'' to Show the Hydra config.
|
113 |
+
|
114 |
+
'
|
115 |
+
hydra_help: ???
|
116 |
+
hydra_logging:
|
117 |
+
version: 1
|
118 |
+
formatters:
|
119 |
+
colorlog:
|
120 |
+
(): colorlog.ColoredFormatter
|
121 |
+
format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s'
|
122 |
+
handlers:
|
123 |
+
console:
|
124 |
+
class: logging.StreamHandler
|
125 |
+
formatter: colorlog
|
126 |
+
stream: ext://sys.stdout
|
127 |
+
root:
|
128 |
+
level: INFO
|
129 |
+
handlers:
|
130 |
+
- console
|
131 |
+
disable_existing_loggers: false
|
132 |
+
job_logging:
|
133 |
+
version: 1
|
134 |
+
formatters:
|
135 |
+
simple:
|
136 |
+
format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s'
|
137 |
+
colorlog:
|
138 |
+
(): colorlog.ColoredFormatter
|
139 |
+
format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s]
|
140 |
+
- %(message)s'
|
141 |
+
log_colors:
|
142 |
+
DEBUG: purple
|
143 |
+
INFO: green
|
144 |
+
WARNING: yellow
|
145 |
+
ERROR: red
|
146 |
+
CRITICAL: red
|
147 |
+
handlers:
|
148 |
+
console:
|
149 |
+
class: logging.StreamHandler
|
150 |
+
formatter: colorlog
|
151 |
+
stream: ext://sys.stdout
|
152 |
+
file:
|
153 |
+
class: logging.FileHandler
|
154 |
+
formatter: simple
|
155 |
+
filename: ${hydra.job.name}.log
|
156 |
+
root:
|
157 |
+
level: INFO
|
158 |
+
handlers:
|
159 |
+
- console
|
160 |
+
- file
|
161 |
+
disable_existing_loggers: false
|
162 |
+
env: {}
|
163 |
+
searchpath: []
|
164 |
+
callbacks: {}
|
165 |
+
output_subdir: .hydra
|
166 |
+
overrides:
|
167 |
+
hydra: []
|
168 |
+
task:
|
169 |
+
- datamodule.batch_size=64
|
170 |
+
- model.lr=0.12853081804637867
|
171 |
+
- model.lin1_size=1024
|
172 |
+
- model.lin2_size=64
|
173 |
+
- model.lin3_size=128
|
174 |
+
- hparams_search=focusMSE_150_optuna.yaml
|
175 |
+
job:
|
176 |
+
name: train
|
177 |
+
override_dirname: datamodule.batch_size=64,hparams_search=focusMSE_150_optuna.yaml,model.lin1_size=1024,model.lin2_size=64,model.lin3_size=128,model.lr=0.12853081804637867
|
178 |
+
id: '1'
|
179 |
+
num: 1
|
180 |
+
config_name: train.yaml
|
181 |
+
env_set: {}
|
182 |
+
env_copy: []
|
183 |
+
config:
|
184 |
+
override_dirname:
|
185 |
+
kv_sep: '='
|
186 |
+
item_sep: ','
|
187 |
+
exclude_keys: []
|
188 |
+
runtime:
|
189 |
+
version: 1.1.2
|
190 |
+
cwd: /usr/src/app
|
191 |
+
config_sources:
|
192 |
+
- path: hydra.conf
|
193 |
+
schema: pkg
|
194 |
+
provider: hydra
|
195 |
+
- path: /usr/src/app/configs
|
196 |
+
schema: file
|
197 |
+
provider: main
|
198 |
+
- path: hydra_plugins.hydra_colorlog.conf
|
199 |
+
schema: pkg
|
200 |
+
provider: hydra-colorlog
|
201 |
+
- path: ''
|
202 |
+
schema: structured
|
203 |
+
provider: schema
|
204 |
+
choices:
|
205 |
+
local: default.yaml
|
206 |
+
hparams_search: focusMSE_150_optuna.yaml
|
207 |
+
debug: null
|
208 |
+
experiment: null
|
209 |
+
log_dir: default.yaml
|
210 |
+
trainer: long.yaml
|
211 |
+
logger: many_loggers
|
212 |
+
callbacks: default.yaml
|
213 |
+
model: focus150.yaml
|
214 |
+
datamodule: focus150.yaml
|
215 |
+
hydra/env: default
|
216 |
+
hydra/callbacks: null
|
217 |
+
hydra/job_logging: colorlog
|
218 |
+
hydra/hydra_logging: colorlog
|
219 |
+
hydra/hydra_help: default
|
220 |
+
hydra/help: default
|
221 |
+
hydra/sweeper: optuna
|
222 |
+
hydra/sweeper/sampler: tpe
|
223 |
+
hydra/launcher: basic
|
224 |
+
hydra/output: default
|
225 |
+
verbose: false
|
logs/experiments/multiruns/focusMSE_150_hyperparameter_search/2022-05-11_10-16-51/1/.hydra/overrides.yaml
ADDED
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
- datamodule.batch_size=64
|
2 |
+
- model.lr=0.12853081804637867
|
3 |
+
- model.lin1_size=1024
|
4 |
+
- model.lin2_size=64
|
5 |
+
- model.lin3_size=128
|
6 |
+
- hparams_search=focusMSE_150_optuna.yaml
|
logs/experiments/multiruns/focusMSE_150_hyperparameter_search/2022-05-11_10-16-51/1/checkpoints/epoch_049.ckpt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:9c8f5479483df358de7778d658f799a478cdc26771d61665578994929815ec66
|
3 |
+
size 830399430
|
logs/experiments/multiruns/focusMSE_150_hyperparameter_search/2022-05-11_10-16-51/1/checkpoints/last.ckpt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:ca6de7b9a8ee06537a18c398fca86749ed2c4979b90b00a0cb455c0b08537d03
|
3 |
+
size 830399430
|
logs/experiments/multiruns/focusMSE_150_hyperparameter_search/2022-05-11_10-16-51/1/csv/version_0/hparams.yaml
ADDED
@@ -0,0 +1,50 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
trainer:
|
2 |
+
_target_: pytorch_lightning.Trainer
|
3 |
+
gpus: 1
|
4 |
+
min_epochs: 1
|
5 |
+
max_epochs: 100
|
6 |
+
resume_from_checkpoint: null
|
7 |
+
model:
|
8 |
+
_target_: src.models.focus_module.FocusMSELitModule
|
9 |
+
input_size: 67500
|
10 |
+
lin1_size: 1024
|
11 |
+
lin2_size: 64
|
12 |
+
lin3_size: 128
|
13 |
+
output_size: 1
|
14 |
+
lr: 0.12853081804637867
|
15 |
+
weight_decay: 0.0005
|
16 |
+
datamodule:
|
17 |
+
_target_: src.datamodules.focus_datamodule.FocusDataModule
|
18 |
+
data_dir: /usr/src/app/data/focus150
|
19 |
+
csv_train_file: /usr/src/app/data/focus150/train_metadata.csv
|
20 |
+
csv_val_file: /usr/src/app/data/focus150/validation_metadata.csv
|
21 |
+
csv_test_file: /usr/src/app/data/focus150/test_metadata.csv
|
22 |
+
batch_size: 64
|
23 |
+
num_workers: 0
|
24 |
+
pin_memory: false
|
25 |
+
seed: null
|
26 |
+
callbacks:
|
27 |
+
model_checkpoint:
|
28 |
+
_target_: pytorch_lightning.callbacks.ModelCheckpoint
|
29 |
+
monitor: val/mae
|
30 |
+
mode: min
|
31 |
+
save_top_k: 1
|
32 |
+
save_last: true
|
33 |
+
verbose: false
|
34 |
+
dirpath: checkpoints/
|
35 |
+
filename: epoch_{epoch:03d}
|
36 |
+
auto_insert_metric_name: false
|
37 |
+
early_stopping:
|
38 |
+
_target_: pytorch_lightning.callbacks.EarlyStopping
|
39 |
+
monitor: val/mae
|
40 |
+
mode: min
|
41 |
+
patience: 100
|
42 |
+
min_delta: 0
|
43 |
+
model_summary:
|
44 |
+
_target_: pytorch_lightning.callbacks.RichModelSummary
|
45 |
+
max_depth: -1
|
46 |
+
rich_progress_bar:
|
47 |
+
_target_: pytorch_lightning.callbacks.RichProgressBar
|
48 |
+
model/params/total: 69197505
|
49 |
+
model/params/trainable: 69197505
|
50 |
+
model/params/non_trainable: 0
|
logs/experiments/multiruns/focusMSE_150_hyperparameter_search/2022-05-11_10-16-51/1/csv/version_0/metrics.csv
ADDED
@@ -0,0 +1,202 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
val/loss,val/mae,val/mae_best,epoch,step,train/loss,train/mae,test/loss,test/mae
|
2 |
+
0.00682297395542264,0.07781428098678589,0.026419755071401596,0,47,,,,
|
3 |
+
,,,1,47,1.5195387601852417,0.4864364564418793,,
|
4 |
+
0.0004452606663107872,0.017664406448602676,0.017664406448602676,1,95,,,,
|
5 |
+
,,,2,95,0.001366301323287189,0.025391263887286186,,
|
6 |
+
0.003837737487629056,0.03568581864237785,0.017664406448602676,2,143,,,,
|
7 |
+
,,,3,143,0.0006480395677499473,0.01840323396027088,,
|
8 |
+
0.0004315875412430614,0.017425449565052986,0.017425449565052986,3,191,,,,
|
9 |
+
,,,4,191,0.00048692894051782787,0.017654310911893845,,
|
10 |
+
0.001785098691470921,0.03513012081384659,0.017425449565052986,4,239,,,,
|
11 |
+
,,,5,239,0.0005325334495864809,0.01791628822684288,,
|
12 |
+
0.0004218316753394902,0.017204800620675087,0.017204800620675087,5,287,,,,
|
13 |
+
,,,6,287,0.00044381595216691494,0.01727064698934555,,
|
14 |
+
0.0006985518848523498,0.021348316222429276,0.017204800620675087,6,335,,,,
|
15 |
+
,,,7,335,0.00043370836647227407,0.017200622707605362,,
|
16 |
+
0.00043154481681995094,0.017408335581421852,0.017204800620675087,7,383,,,,
|
17 |
+
,,,8,383,0.00044726129272021353,0.01738433912396431,,
|
18 |
+
0.149286150932312,0.36627456545829773,0.017204800620675087,8,431,,,,
|
19 |
+
,,,9,431,0.00042586418567225337,0.017197927460074425,,
|
20 |
+
0.0005076176021248102,0.018538301810622215,0.017204800620675087,9,479,,,,
|
21 |
+
,,,10,479,0.00042498009861446917,0.017180653288960457,,
|
22 |
+
0.00045017892261967063,0.0176100954413414,0.017204800620675087,10,527,,,,
|
23 |
+
,,,11,527,0.00042462226701900363,0.01719866506755352,,
|
24 |
+
0.017598045989871025,0.08992134034633636,0.017204800620675087,11,575,,,,
|
25 |
+
,,,12,575,0.00042173403198830783,0.017154544591903687,,
|
26 |
+
0.006207867991179228,0.07309741526842117,0.017204800620675087,12,623,,,,
|
27 |
+
,,,13,623,0.0004145730345044285,0.017039284110069275,,
|
28 |
+
0.009742644615471363,0.08092442154884338,0.017204800620675087,13,671,,,,
|
29 |
+
,,,14,671,0.00041579280514270067,0.017030149698257446,,
|
30 |
+
0.00042154479888267815,0.0172513909637928,0.017204800620675087,14,719,,,,
|
31 |
+
,,,15,719,0.0004148517909925431,0.01701894775032997,,
|
32 |
+
0.0004277914995327592,0.017410986125469208,0.017204800620675087,15,767,,,,
|
33 |
+
,,,16,767,0.00041334357229061425,0.017040103673934937,,
|
34 |
+
0.00044159326353110373,0.017592396587133408,0.017204800620675087,16,815,,,,
|
35 |
+
,,,17,815,0.00040961866034194827,0.0169181190431118,,
|
36 |
+
0.0004230770864523947,0.017273221164941788,0.017204800620675087,17,863,,,,
|
37 |
+
,,,18,863,0.0004110141599085182,0.01694667525589466,,
|
38 |
+
0.0004222896823193878,0.017236046493053436,0.017204800620675087,18,911,,,,
|
39 |
+
,,,19,911,0.0004096253833267838,0.016953082755208015,,
|
40 |
+
0.00042446504812687635,0.01737123169004917,0.017204800620675087,19,959,,,,
|
41 |
+
,,,20,959,0.00040962224011309445,0.016952650621533394,,
|
42 |
+
0.00042159794247709215,0.01724276691675186,0.017204800620675087,20,1007,,,,
|
43 |
+
,,,21,1007,0.000410327484132722,0.01695404201745987,,
|
44 |
+
0.0004219409602228552,0.017273735255002975,0.017204800620675087,21,1055,,,,
|
45 |
+
,,,22,1055,0.0004092143790330738,0.016931680962443352,,
|
46 |
+
0.0004251353966537863,0.017388373613357544,0.017204800620675087,22,1103,,,,
|
47 |
+
,,,23,1103,0.00040858815191313624,0.016911476850509644,,
|
48 |
+
0.00042343189124949276,0.017286844551563263,0.017204800620675087,23,1151,,,,
|
49 |
+
,,,24,1151,0.0004085705440957099,0.01696939393877983,,
|
50 |
+
0.0004219453257974237,0.017274025827646255,0.017204800620675087,24,1199,,,,
|
51 |
+
,,,25,1199,0.00041103537660092115,0.01697969064116478,,
|
52 |
+
0.0004224671283736825,0.01724562607705593,0.017204800620675087,25,1247,,,,
|
53 |
+
,,,26,1247,0.00041232077637687325,0.016997741535305977,,
|
54 |
+
0.00042163420584984124,0.017184488475322723,0.017184488475322723,26,1295,,,,
|
55 |
+
,,,27,1295,0.00040987570537254214,0.016967665404081345,,
|
56 |
+
0.0006064781919121742,0.01996004581451416,0.017184488475322723,27,1343,,,,
|
57 |
+
,,,28,1343,0.000408223073463887,0.01691911183297634,,
|
58 |
+
0.00042333098826929927,0.017337067052721977,0.017184488475322723,28,1391,,,,
|
59 |
+
,,,29,1391,0.0004121636156924069,0.016982391476631165,,
|
60 |
+
0.00042206826037727296,0.017222514376044273,0.017184488475322723,29,1439,,,,
|
61 |
+
,,,30,1439,0.0004151871835347265,0.01707288809120655,,
|
62 |
+
0.0004220142727717757,0.017278552055358887,0.017184488475322723,30,1487,,,,
|
63 |
+
,,,31,1487,0.00040970646659843624,0.016943098977208138,,
|
64 |
+
0.00042208092054352164,0.01722334884107113,0.017184488475322723,31,1535,,,,
|
65 |
+
,,,32,1535,0.0004089745052624494,0.016909856349229813,,
|
66 |
+
0.00042148077045567334,0.017203666269779205,0.017184488475322723,32,1583,,,,
|
67 |
+
,,,33,1583,0.00041052987216971815,0.016983231529593468,,
|
68 |
+
0.0004309857031330466,0.01745922677218914,0.017184488475322723,33,1631,,,,
|
69 |
+
,,,34,1631,0.00041121445246972144,0.01699080318212509,,
|
70 |
+
0.0004309348587412387,0.017496643587946892,0.017184488475322723,34,1679,,,,
|
71 |
+
,,,35,1679,0.0004116280470043421,0.01698940433561802,,
|
72 |
+
0.0004216515808366239,0.017249250784516335,0.017184488475322723,35,1727,,,,
|
73 |
+
,,,36,1727,0.0004114076728001237,0.01698952354490757,,
|
74 |
+
0.0004256644460838288,0.017353128641843796,0.017184488475322723,36,1775,,,,
|
75 |
+
,,,37,1775,0.0004126882995478809,0.01701447367668152,,
|
76 |
+
0.0004217801324557513,0.01726158894598484,0.017184488475322723,37,1823,,,,
|
77 |
+
,,,38,1823,0.0004120112571399659,0.01701219752430916,,
|
78 |
+
0.0004364669439382851,0.017570799216628075,0.017184488475322723,38,1871,,,,
|
79 |
+
,,,39,1871,0.0004088522109668702,0.016941051930189133,,
|
80 |
+
0.0004215183143969625,0.017229409888386726,0.017184488475322723,39,1919,,,,
|
81 |
+
,,,40,1919,0.0004136713105253875,0.017063753679394722,,
|
82 |
+
0.0004240644339006394,0.01730835810303688,0.017184488475322723,40,1967,,,,
|
83 |
+
,,,41,1967,0.0004097933997400105,0.01698816940188408,,
|
84 |
+
0.0004217067325953394,0.017254965379834175,0.017184488475322723,41,2015,,,,
|
85 |
+
,,,42,2015,0.00040887080831453204,0.016934873536229134,,
|
86 |
+
0.000421824719524011,0.017204202711582184,0.017184488475322723,42,2063,,,,
|
87 |
+
,,,43,2063,0.00040849976358003914,0.016925718635320663,,
|
88 |
+
0.0004219799884594977,0.01721641607582569,0.017184488475322723,43,2111,,,,
|
89 |
+
,,,44,2111,0.00041056069312617183,0.01697554439306259,,
|
90 |
+
0.00042241415940225124,0.017300428822636604,0.017184488475322723,44,2159,,,,
|
91 |
+
,,,45,2159,0.000408981170039624,0.016921591013669968,,
|
92 |
+
0.00042149514774791896,0.017197266221046448,0.017184488475322723,45,2207,,,,
|
93 |
+
,,,46,2207,0.00041037669871002436,0.01701531745493412,,
|
94 |
+
0.0004256282118149102,0.017399990931153297,0.017184488475322723,46,2255,,,,
|
95 |
+
,,,47,2255,0.000409190688515082,0.0169337410479784,,
|
96 |
+
0.0004261268477421254,0.0173643846064806,0.017184488475322723,47,2303,,,,
|
97 |
+
,,,48,2303,0.0004181844706181437,0.017103074118494987,,
|
98 |
+
0.0004442270437721163,0.017631689086556435,0.017184488475322723,48,2351,,,,
|
99 |
+
,,,49,2351,0.0004198215901851654,0.01715872995555401,,
|
100 |
+
0.00042159619624726474,0.017179273068904877,0.017179273068904877,49,2399,,,,
|
101 |
+
,,,50,2399,0.00041070699808187783,0.016972435638308525,,
|
102 |
+
0.00042488242615945637,0.01738210767507553,0.017179273068904877,50,2447,,,,
|
103 |
+
,,,51,2447,0.00041301778401248157,0.017010163515806198,,
|
104 |
+
0.0004215243679936975,0.01723073050379753,0.017179273068904877,51,2495,,,,
|
105 |
+
,,,52,2495,0.0004114630864933133,0.017010388895869255,,
|
106 |
+
0.0004465823294594884,0.017676856368780136,0.017179273068904877,52,2543,,,,
|
107 |
+
,,,53,2543,0.00041402687202207744,0.017015185207128525,,
|
108 |
+
0.0004225925076752901,0.01730862632393837,0.017179273068904877,53,2591,,,,
|
109 |
+
,,,54,2591,0.00041468386189080775,0.016993489116430283,,
|
110 |
+
0.0004244353622198105,0.017370430752635002,0.017179273068904877,54,2639,,,,
|
111 |
+
,,,55,2639,0.0004096245102118701,0.016910377889871597,,
|
112 |
+
0.00042158912401646376,0.017241565510630608,0.017179273068904877,55,2687,,,,
|
113 |
+
,,,56,2687,0.0004149657324887812,0.01703481189906597,,
|
114 |
+
0.00042784333345480263,0.017445219680666924,0.017179273068904877,56,2735,,,,
|
115 |
+
,,,57,2735,0.0004104162217117846,0.01693139038980007,,
|
116 |
+
0.0004259246925357729,0.017406649887561798,0.017179273068904877,57,2783,,,,
|
117 |
+
,,,58,2783,0.00041017591138370335,0.016951419413089752,,
|
118 |
+
0.0004439505864866078,0.017628714442253113,0.017179273068904877,58,2831,,,,
|
119 |
+
,,,59,2831,0.00041116963257081807,0.01698685996234417,,
|
120 |
+
0.00042210216633975506,0.017224734649062157,0.017179273068904877,59,2879,,,,
|
121 |
+
,,,60,2879,0.000424183119321242,0.017129113897681236,,
|
122 |
+
0.00043270725291222334,0.017522327601909637,0.017179273068904877,60,2927,,,,
|
123 |
+
,,,61,2927,0.0004093537572771311,0.016944479197263718,,
|
124 |
+
0.0004273414087947458,0.017435764893889427,0.017179273068904877,61,2975,,,,
|
125 |
+
,,,62,2975,0.00041107513243332505,0.016985327005386353,,
|
126 |
+
0.0004215409280732274,0.017233965918421745,0.017179273068904877,62,3023,,,,
|
127 |
+
,,,63,3023,0.0004128917644266039,0.017018217593431473,,
|
128 |
+
0.00042469974141567945,0.01732744835317135,0.017179273068904877,63,3071,,,,
|
129 |
+
,,,64,3071,0.00041337363654747605,0.017026126384735107,,
|
130 |
+
0.0004269485652912408,0.01738307811319828,0.017179273068904877,64,3119,,,,
|
131 |
+
,,,65,3119,0.0004217892710585147,0.017096083611249924,,
|
132 |
+
0.00046458179713226855,0.017839977517724037,0.017179273068904877,65,3167,,,,
|
133 |
+
,,,66,3167,0.00043729785829782486,0.017409132793545723,,
|
134 |
+
0.00042537128319963813,0.01739402860403061,0.017179273068904877,66,3215,,,,
|
135 |
+
,,,67,3215,0.00041837882599793375,0.01710938848555088,,
|
136 |
+
0.00042147625936195254,0.017212117090821266,0.017179273068904877,67,3263,,,,
|
137 |
+
,,,68,3263,0.0004119124496355653,0.017021844163537025,,
|
138 |
+
0.00042168371146544814,0.017190419137477875,0.017179273068904877,68,3311,,,,
|
139 |
+
,,,69,3311,0.00041857027099467814,0.017116108909249306,,
|
140 |
+
0.00042692740680649877,0.01738261803984642,0.017179273068904877,69,3359,,,,
|
141 |
+
,,,70,3359,0.00041257808334194124,0.017003631219267845,,
|
142 |
+
0.0004623394343070686,0.017810652032494545,0.017179273068904877,70,3407,,,,
|
143 |
+
,,,71,3407,0.0004188423336017877,0.017128678038716316,,
|
144 |
+
0.0004283777088858187,0.017454883083701134,0.017179273068904877,71,3455,,,,
|
145 |
+
,,,72,3455,0.0004153030167799443,0.01704205758869648,,
|
146 |
+
0.0004220851114951074,0.017282908782362938,0.017179273068904877,72,3503,,,,
|
147 |
+
,,,73,3503,0.0004162245895713568,0.01709717884659767,,
|
148 |
+
0.0004426346567925066,0.017614305019378662,0.017179273068904877,73,3551,,,,
|
149 |
+
,,,74,3551,0.0004165142890997231,0.017087774351239204,,
|
150 |
+
0.00042220077011734247,0.01728951185941696,0.017179273068904877,74,3599,,,,
|
151 |
+
,,,75,3599,0.0004265747556928545,0.01724381372332573,,
|
152 |
+
0.00042415643110871315,0.017362695187330246,0.017179273068904877,75,3647,,,,
|
153 |
+
,,,76,3647,0.00041404718649573624,0.01702992059290409,,
|
154 |
+
0.00044414715375751257,0.017653649672865868,0.017179273068904877,76,3695,,,,
|
155 |
+
,,,77,3695,0.0004164548299741,0.017070643603801727,,
|
156 |
+
0.0004233243816997856,0.017282860353589058,0.017179273068904877,77,3743,,,,
|
157 |
+
,,,78,3743,0.00041260532452724874,0.017008282244205475,,
|
158 |
+
0.00042865649447776377,0.017459772527217865,0.017179273068904877,78,3791,,,,
|
159 |
+
,,,79,3791,0.0004107530112378299,0.016971420496702194,,
|
160 |
+
0.0004251468344591558,0.01738865301012993,0.017179273068904877,79,3839,,,,
|
161 |
+
,,,80,3839,0.0004320554144214839,0.017288552597165108,,
|
162 |
+
0.000425597740104422,0.017399294301867485,0.017179273068904877,80,3887,,,,
|
163 |
+
,,,81,3887,0.0004141622921451926,0.017067447304725647,,
|
164 |
+
0.0004246118478477001,0.017375139519572258,0.017179273068904877,81,3935,,,,
|
165 |
+
,,,82,3935,0.0004259358101990074,0.01719009131193161,,
|
166 |
+
0.0004423320642672479,0.017610928043723106,0.017179273068904877,82,3983,,,,
|
167 |
+
,,,83,3983,0.0004292238154448569,0.01726389490067959,,
|
168 |
+
0.0004549343138933182,0.017766691744327545,0.017179273068904877,83,4031,,,,
|
169 |
+
,,,84,4031,0.00042488810140639544,0.01724543794989586,,
|
170 |
+
0.0004223826399538666,0.017241187393665314,0.017179273068904877,84,4079,,,,
|
171 |
+
,,,85,4079,0.0004182246048003435,0.017111994326114655,,
|
172 |
+
0.0004421582561917603,0.017633754760026932,0.017179273068904877,85,4127,,,,
|
173 |
+
,,,86,4127,0.0004144125559832901,0.017016030848026276,,
|
174 |
+
0.0004214845539536327,0.017201540991663933,0.017179273068904877,86,4175,,,,
|
175 |
+
,,,87,4175,0.0004190489707980305,0.01718851923942566,,
|
176 |
+
0.00044335509301163256,0.017622247338294983,0.017179273068904877,87,4223,,,,
|
177 |
+
,,,88,4223,0.0004233765648677945,0.01723206415772438,,
|
178 |
+
0.0004255291714798659,0.017349720001220703,0.017179273068904877,88,4271,,,,
|
179 |
+
,,,89,4271,0.00041971352766267955,0.01712927222251892,,
|
180 |
+
0.0004495760949794203,0.017686018720269203,0.017179273068904877,89,4319,,,,
|
181 |
+
,,,90,4319,0.0004192318592686206,0.01714130863547325,,
|
182 |
+
0.0004797343863174319,0.018079420551657677,0.017179273068904877,90,4367,,,,
|
183 |
+
,,,91,4367,0.0004233880899846554,0.017168143764138222,,
|
184 |
+
0.0004248766927048564,0.017332419753074646,0.017179273068904877,91,4415,,,,
|
185 |
+
,,,92,4415,0.00042652132106013596,0.0171901173889637,,
|
186 |
+
0.00042941837455146015,0.017472682520747185,0.017179273068904877,92,4463,,,,
|
187 |
+
,,,93,4463,0.000412994297221303,0.016988445073366165,,
|
188 |
+
0.00047174590872600675,0.018054025247693062,0.017179273068904877,93,4511,,,,
|
189 |
+
,,,94,4511,0.0004168211598880589,0.017037231475114822,,
|
190 |
+
0.0004494131135288626,0.017684444785118103,0.017179273068904877,94,4559,,,,
|
191 |
+
,,,95,4559,0.00042177760042250156,0.017126820981502533,,
|
192 |
+
0.0004286507028155029,0.017459675669670105,0.017179273068904877,95,4607,,,,
|
193 |
+
,,,96,4607,0.00042839429806917906,0.017229337245225906,,
|
194 |
+
0.0004395277064759284,0.01757838763296604,0.017179273068904877,96,4655,,,,
|
195 |
+
,,,97,4655,0.000431840744568035,0.017288153991103172,,
|
196 |
+
0.00042150012450292706,0.01719571091234684,0.017179273068904877,97,4703,,,,
|
197 |
+
,,,98,4703,0.0004324063775129616,0.017345041036605835,,
|
198 |
+
0.00044369680108502507,0.017625968903303146,0.017179273068904877,98,4751,,,,
|
199 |
+
,,,99,4751,0.000430561340181157,0.017358092591166496,,
|
200 |
+
0.0004354063712526113,0.017557810992002487,0.017179273068904877,99,4799,,,,
|
201 |
+
,,,100,4799,0.0004391666443552822,0.01735127903521061,,
|
202 |
+
,,,49,4800,,,0.00041455012978985906,0.016959508880972862
|
logs/experiments/multiruns/focusMSE_150_hyperparameter_search/2022-05-11_10-16-51/1/tensorboard/focusMSE_150_hyperparameter_search/events.out.tfevents.1652264690.873c0dcdd84d.1.2
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:767925bffb0af36304f3cdb4978f58d2ee7a2da809a023e54a965d7011512c56
|
3 |
+
size 36562
|
logs/experiments/multiruns/focusMSE_150_hyperparameter_search/2022-05-11_10-16-51/1/tensorboard/focusMSE_150_hyperparameter_search/events.out.tfevents.1652265463.873c0dcdd84d.1.3
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:6fa3b3e87f761bdbadf4133646e0f1be5cfd9a6cfc97e822831f80b34c78311d
|
3 |
+
size 179
|
logs/experiments/multiruns/focusMSE_150_hyperparameter_search/2022-05-11_10-16-51/1/tensorboard/focusMSE_150_hyperparameter_search/hparams.yaml
ADDED
@@ -0,0 +1,50 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
trainer:
|
2 |
+
_target_: pytorch_lightning.Trainer
|
3 |
+
gpus: 1
|
4 |
+
min_epochs: 1
|
5 |
+
max_epochs: 100
|
6 |
+
resume_from_checkpoint: null
|
7 |
+
model:
|
8 |
+
_target_: src.models.focus_module.FocusMSELitModule
|
9 |
+
input_size: 67500
|
10 |
+
lin1_size: 1024
|
11 |
+
lin2_size: 64
|
12 |
+
lin3_size: 128
|
13 |
+
output_size: 1
|
14 |
+
lr: 0.12853081804637867
|
15 |
+
weight_decay: 0.0005
|
16 |
+
datamodule:
|
17 |
+
_target_: src.datamodules.focus_datamodule.FocusDataModule
|
18 |
+
data_dir: /usr/src/app/data/focus150
|
19 |
+
csv_train_file: /usr/src/app/data/focus150/train_metadata.csv
|
20 |
+
csv_val_file: /usr/src/app/data/focus150/validation_metadata.csv
|
21 |
+
csv_test_file: /usr/src/app/data/focus150/test_metadata.csv
|
22 |
+
batch_size: 64
|
23 |
+
num_workers: 0
|
24 |
+
pin_memory: false
|
25 |
+
seed: null
|
26 |
+
callbacks:
|
27 |
+
model_checkpoint:
|
28 |
+
_target_: pytorch_lightning.callbacks.ModelCheckpoint
|
29 |
+
monitor: val/mae
|
30 |
+
mode: min
|
31 |
+
save_top_k: 1
|
32 |
+
save_last: true
|
33 |
+
verbose: false
|
34 |
+
dirpath: checkpoints/
|
35 |
+
filename: epoch_{epoch:03d}
|
36 |
+
auto_insert_metric_name: false
|
37 |
+
early_stopping:
|
38 |
+
_target_: pytorch_lightning.callbacks.EarlyStopping
|
39 |
+
monitor: val/mae
|
40 |
+
mode: min
|
41 |
+
patience: 100
|
42 |
+
min_delta: 0
|
43 |
+
model_summary:
|
44 |
+
_target_: pytorch_lightning.callbacks.RichModelSummary
|
45 |
+
max_depth: -1
|
46 |
+
rich_progress_bar:
|
47 |
+
_target_: pytorch_lightning.callbacks.RichProgressBar
|
48 |
+
model/params/total: 69197505
|
49 |
+
model/params/trainable: 69197505
|
50 |
+
model/params/non_trainable: 0
|
logs/experiments/multiruns/focusMSE_150_hyperparameter_search/2022-05-11_10-16-51/10/.hydra/config.yaml
ADDED
@@ -0,0 +1,77 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
original_work_dir: ${hydra:runtime.cwd}
|
2 |
+
data_dir: ${original_work_dir}/data
|
3 |
+
print_config: true
|
4 |
+
ignore_warnings: true
|
5 |
+
train: true
|
6 |
+
test: true
|
7 |
+
seed: null
|
8 |
+
name: focusMSE_150_hyperparameter_search
|
9 |
+
datamodule:
|
10 |
+
_target_: src.datamodules.focus_datamodule.FocusDataModule
|
11 |
+
data_dir: ${data_dir}/focus150
|
12 |
+
csv_train_file: ${data_dir}/focus150/train_metadata.csv
|
13 |
+
csv_val_file: ${data_dir}/focus150/validation_metadata.csv
|
14 |
+
csv_test_file: ${data_dir}/focus150/test_metadata.csv
|
15 |
+
batch_size: 64
|
16 |
+
num_workers: 0
|
17 |
+
pin_memory: false
|
18 |
+
model:
|
19 |
+
_target_: src.models.focus_module.FocusMSELitModule
|
20 |
+
input_size: 67500
|
21 |
+
lin1_size: 512
|
22 |
+
lin2_size: 128
|
23 |
+
lin3_size: 64
|
24 |
+
output_size: 1
|
25 |
+
lr: 0.02282695974132741
|
26 |
+
weight_decay: 0.0005
|
27 |
+
callbacks:
|
28 |
+
model_checkpoint:
|
29 |
+
_target_: pytorch_lightning.callbacks.ModelCheckpoint
|
30 |
+
monitor: val/mae
|
31 |
+
mode: min
|
32 |
+
save_top_k: 1
|
33 |
+
save_last: true
|
34 |
+
verbose: false
|
35 |
+
dirpath: checkpoints/
|
36 |
+
filename: epoch_{epoch:03d}
|
37 |
+
auto_insert_metric_name: false
|
38 |
+
early_stopping:
|
39 |
+
_target_: pytorch_lightning.callbacks.EarlyStopping
|
40 |
+
monitor: val/mae
|
41 |
+
mode: min
|
42 |
+
patience: 100
|
43 |
+
min_delta: 0
|
44 |
+
model_summary:
|
45 |
+
_target_: pytorch_lightning.callbacks.RichModelSummary
|
46 |
+
max_depth: -1
|
47 |
+
rich_progress_bar:
|
48 |
+
_target_: pytorch_lightning.callbacks.RichProgressBar
|
49 |
+
logger:
|
50 |
+
csv:
|
51 |
+
_target_: pytorch_lightning.loggers.csv_logs.CSVLogger
|
52 |
+
save_dir: .
|
53 |
+
name: csv/
|
54 |
+
prefix: ''
|
55 |
+
mlflow:
|
56 |
+
_target_: pytorch_lightning.loggers.mlflow.MLFlowLogger
|
57 |
+
experiment_name: ${name}
|
58 |
+
tracking_uri: ${original_work_dir}/logs/mlflow/mlruns
|
59 |
+
tags: null
|
60 |
+
save_dir: ./mlruns
|
61 |
+
prefix: ''
|
62 |
+
artifact_location: null
|
63 |
+
tensorboard:
|
64 |
+
_target_: pytorch_lightning.loggers.tensorboard.TensorBoardLogger
|
65 |
+
save_dir: tensorboard/
|
66 |
+
name: null
|
67 |
+
version: ${name}
|
68 |
+
log_graph: false
|
69 |
+
default_hp_metric: true
|
70 |
+
prefix: ''
|
71 |
+
trainer:
|
72 |
+
_target_: pytorch_lightning.Trainer
|
73 |
+
gpus: 1
|
74 |
+
min_epochs: 1
|
75 |
+
max_epochs: 100
|
76 |
+
resume_from_checkpoint: null
|
77 |
+
optimized_metric: val/mae_best
|
logs/experiments/multiruns/focusMSE_150_hyperparameter_search/2022-05-11_10-16-51/10/.hydra/hydra.yaml
ADDED
@@ -0,0 +1,225 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
hydra:
|
2 |
+
run:
|
3 |
+
dir: logs/experiments/runs/${name}/${now:%Y-%m-%d}_${now:%H-%M-%S}
|
4 |
+
sweep:
|
5 |
+
dir: logs/experiments/multiruns/${name}/${now:%Y-%m-%d}_${now:%H-%M-%S}
|
6 |
+
subdir: ${hydra.job.num}
|
7 |
+
launcher:
|
8 |
+
_target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher
|
9 |
+
sweeper:
|
10 |
+
sampler:
|
11 |
+
_target_: optuna.samplers.TPESampler
|
12 |
+
seed: 12345
|
13 |
+
consider_prior: true
|
14 |
+
prior_weight: 1.0
|
15 |
+
consider_magic_clip: true
|
16 |
+
consider_endpoints: false
|
17 |
+
n_startup_trials: 10
|
18 |
+
n_ei_candidates: 24
|
19 |
+
multivariate: false
|
20 |
+
warn_independent_sampling: true
|
21 |
+
_target_: hydra_plugins.hydra_optuna_sweeper.optuna_sweeper.OptunaSweeper
|
22 |
+
direction: minimize
|
23 |
+
storage: null
|
24 |
+
study_name: focusMAE_150_hyperparameter_search
|
25 |
+
n_trials: 20
|
26 |
+
n_jobs: 1
|
27 |
+
search_space:
|
28 |
+
datamodule.batch_size:
|
29 |
+
type: categorical
|
30 |
+
choices:
|
31 |
+
- 64
|
32 |
+
- 128
|
33 |
+
model.lr:
|
34 |
+
type: float
|
35 |
+
low: 0.0001
|
36 |
+
high: 0.2
|
37 |
+
model.lin1_size:
|
38 |
+
type: categorical
|
39 |
+
choices:
|
40 |
+
- 64
|
41 |
+
- 128
|
42 |
+
- 256
|
43 |
+
- 512
|
44 |
+
- 1024
|
45 |
+
model.lin2_size:
|
46 |
+
type: categorical
|
47 |
+
choices:
|
48 |
+
- 64
|
49 |
+
- 128
|
50 |
+
- 256
|
51 |
+
- 512
|
52 |
+
- 1024
|
53 |
+
model.lin3_size:
|
54 |
+
type: categorical
|
55 |
+
choices:
|
56 |
+
- 64
|
57 |
+
- 128
|
58 |
+
- 256
|
59 |
+
- 512
|
60 |
+
- 1024
|
61 |
+
help:
|
62 |
+
app_name: ${hydra.job.name}
|
63 |
+
header: '${hydra.help.app_name} is powered by Hydra.
|
64 |
+
|
65 |
+
'
|
66 |
+
footer: 'Powered by Hydra (https://hydra.cc)
|
67 |
+
|
68 |
+
Use --hydra-help to view Hydra specific help
|
69 |
+
|
70 |
+
'
|
71 |
+
template: '${hydra.help.header}
|
72 |
+
|
73 |
+
== Configuration groups ==
|
74 |
+
|
75 |
+
Compose your configuration from those groups (group=option)
|
76 |
+
|
77 |
+
|
78 |
+
$APP_CONFIG_GROUPS
|
79 |
+
|
80 |
+
|
81 |
+
== Config ==
|
82 |
+
|
83 |
+
Override anything in the config (foo.bar=value)
|
84 |
+
|
85 |
+
|
86 |
+
$CONFIG
|
87 |
+
|
88 |
+
|
89 |
+
${hydra.help.footer}
|
90 |
+
|
91 |
+
'
|
92 |
+
hydra_help:
|
93 |
+
template: 'Hydra (${hydra.runtime.version})
|
94 |
+
|
95 |
+
See https://hydra.cc for more info.
|
96 |
+
|
97 |
+
|
98 |
+
== Flags ==
|
99 |
+
|
100 |
+
$FLAGS_HELP
|
101 |
+
|
102 |
+
|
103 |
+
== Configuration groups ==
|
104 |
+
|
105 |
+
Compose your configuration from those groups (For example, append hydra/job_logging=disabled
|
106 |
+
to command line)
|
107 |
+
|
108 |
+
|
109 |
+
$HYDRA_CONFIG_GROUPS
|
110 |
+
|
111 |
+
|
112 |
+
Use ''--cfg hydra'' to Show the Hydra config.
|
113 |
+
|
114 |
+
'
|
115 |
+
hydra_help: ???
|
116 |
+
hydra_logging:
|
117 |
+
version: 1
|
118 |
+
formatters:
|
119 |
+
colorlog:
|
120 |
+
(): colorlog.ColoredFormatter
|
121 |
+
format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s'
|
122 |
+
handlers:
|
123 |
+
console:
|
124 |
+
class: logging.StreamHandler
|
125 |
+
formatter: colorlog
|
126 |
+
stream: ext://sys.stdout
|
127 |
+
root:
|
128 |
+
level: INFO
|
129 |
+
handlers:
|
130 |
+
- console
|
131 |
+
disable_existing_loggers: false
|
132 |
+
job_logging:
|
133 |
+
version: 1
|
134 |
+
formatters:
|
135 |
+
simple:
|
136 |
+
format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s'
|
137 |
+
colorlog:
|
138 |
+
(): colorlog.ColoredFormatter
|
139 |
+
format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s]
|
140 |
+
- %(message)s'
|
141 |
+
log_colors:
|
142 |
+
DEBUG: purple
|
143 |
+
INFO: green
|
144 |
+
WARNING: yellow
|
145 |
+
ERROR: red
|
146 |
+
CRITICAL: red
|
147 |
+
handlers:
|
148 |
+
console:
|
149 |
+
class: logging.StreamHandler
|
150 |
+
formatter: colorlog
|
151 |
+
stream: ext://sys.stdout
|
152 |
+
file:
|
153 |
+
class: logging.FileHandler
|
154 |
+
formatter: simple
|
155 |
+
filename: ${hydra.job.name}.log
|
156 |
+
root:
|
157 |
+
level: INFO
|
158 |
+
handlers:
|
159 |
+
- console
|
160 |
+
- file
|
161 |
+
disable_existing_loggers: false
|
162 |
+
env: {}
|
163 |
+
searchpath: []
|
164 |
+
callbacks: {}
|
165 |
+
output_subdir: .hydra
|
166 |
+
overrides:
|
167 |
+
hydra: []
|
168 |
+
task:
|
169 |
+
- datamodule.batch_size=64
|
170 |
+
- model.lr=0.02282695974132741
|
171 |
+
- model.lin1_size=512
|
172 |
+
- model.lin2_size=128
|
173 |
+
- model.lin3_size=64
|
174 |
+
- hparams_search=focusMSE_150_optuna.yaml
|
175 |
+
job:
|
176 |
+
name: train
|
177 |
+
override_dirname: datamodule.batch_size=64,hparams_search=focusMSE_150_optuna.yaml,model.lin1_size=512,model.lin2_size=128,model.lin3_size=64,model.lr=0.02282695974132741
|
178 |
+
id: '10'
|
179 |
+
num: 10
|
180 |
+
config_name: train.yaml
|
181 |
+
env_set: {}
|
182 |
+
env_copy: []
|
183 |
+
config:
|
184 |
+
override_dirname:
|
185 |
+
kv_sep: '='
|
186 |
+
item_sep: ','
|
187 |
+
exclude_keys: []
|
188 |
+
runtime:
|
189 |
+
version: 1.1.2
|
190 |
+
cwd: /usr/src/app
|
191 |
+
config_sources:
|
192 |
+
- path: hydra.conf
|
193 |
+
schema: pkg
|
194 |
+
provider: hydra
|
195 |
+
- path: /usr/src/app/configs
|
196 |
+
schema: file
|
197 |
+
provider: main
|
198 |
+
- path: hydra_plugins.hydra_colorlog.conf
|
199 |
+
schema: pkg
|
200 |
+
provider: hydra-colorlog
|
201 |
+
- path: ''
|
202 |
+
schema: structured
|
203 |
+
provider: schema
|
204 |
+
choices:
|
205 |
+
local: default.yaml
|
206 |
+
hparams_search: focusMSE_150_optuna.yaml
|
207 |
+
debug: null
|
208 |
+
experiment: null
|
209 |
+
log_dir: default.yaml
|
210 |
+
trainer: long.yaml
|
211 |
+
logger: many_loggers
|
212 |
+
callbacks: default.yaml
|
213 |
+
model: focus150.yaml
|
214 |
+
datamodule: focus150.yaml
|
215 |
+
hydra/env: default
|
216 |
+
hydra/callbacks: null
|
217 |
+
hydra/job_logging: colorlog
|
218 |
+
hydra/hydra_logging: colorlog
|
219 |
+
hydra/hydra_help: default
|
220 |
+
hydra/help: default
|
221 |
+
hydra/sweeper: optuna
|
222 |
+
hydra/sweeper/sampler: tpe
|
223 |
+
hydra/launcher: basic
|
224 |
+
hydra/output: default
|
225 |
+
verbose: false
|
logs/experiments/multiruns/focusMSE_150_hyperparameter_search/2022-05-11_10-16-51/10/.hydra/overrides.yaml
ADDED
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
- datamodule.batch_size=64
|
2 |
+
- model.lr=0.02282695974132741
|
3 |
+
- model.lin1_size=512
|
4 |
+
- model.lin2_size=128
|
5 |
+
- model.lin3_size=64
|
6 |
+
- hparams_search=focusMSE_150_optuna.yaml
|
logs/experiments/multiruns/focusMSE_150_hyperparameter_search/2022-05-11_10-16-51/10/checkpoints/epoch_043.ckpt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:7bc822279067d6f10eaa6dff668f513ed40faabc28a94f51394d6ba8121ec717
|
3 |
+
size 415656134
|
logs/experiments/multiruns/focusMSE_150_hyperparameter_search/2022-05-11_10-16-51/10/checkpoints/last.ckpt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:bb2f89fc7acdb3af5ed3f4ff3363fff06cb42b24771474a2ec33c9dfbad7d448
|
3 |
+
size 415656134
|
logs/experiments/multiruns/focusMSE_150_hyperparameter_search/2022-05-11_10-16-51/10/csv/version_0/hparams.yaml
ADDED
@@ -0,0 +1,50 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
trainer:
|
2 |
+
_target_: pytorch_lightning.Trainer
|
3 |
+
gpus: 1
|
4 |
+
min_epochs: 1
|
5 |
+
max_epochs: 100
|
6 |
+
resume_from_checkpoint: null
|
7 |
+
model:
|
8 |
+
_target_: src.models.focus_module.FocusMSELitModule
|
9 |
+
input_size: 67500
|
10 |
+
lin1_size: 512
|
11 |
+
lin2_size: 128
|
12 |
+
lin3_size: 64
|
13 |
+
output_size: 1
|
14 |
+
lr: 0.02282695974132741
|
15 |
+
weight_decay: 0.0005
|
16 |
+
datamodule:
|
17 |
+
_target_: src.datamodules.focus_datamodule.FocusDataModule
|
18 |
+
data_dir: /usr/src/app/data/focus150
|
19 |
+
csv_train_file: /usr/src/app/data/focus150/train_metadata.csv
|
20 |
+
csv_val_file: /usr/src/app/data/focus150/validation_metadata.csv
|
21 |
+
csv_test_file: /usr/src/app/data/focus150/test_metadata.csv
|
22 |
+
batch_size: 64
|
23 |
+
num_workers: 0
|
24 |
+
pin_memory: false
|
25 |
+
seed: null
|
26 |
+
callbacks:
|
27 |
+
model_checkpoint:
|
28 |
+
_target_: pytorch_lightning.callbacks.ModelCheckpoint
|
29 |
+
monitor: val/mae
|
30 |
+
mode: min
|
31 |
+
save_top_k: 1
|
32 |
+
save_last: true
|
33 |
+
verbose: false
|
34 |
+
dirpath: checkpoints/
|
35 |
+
filename: epoch_{epoch:03d}
|
36 |
+
auto_insert_metric_name: false
|
37 |
+
early_stopping:
|
38 |
+
_target_: pytorch_lightning.callbacks.EarlyStopping
|
39 |
+
monitor: val/mae
|
40 |
+
mode: min
|
41 |
+
patience: 100
|
42 |
+
min_delta: 0
|
43 |
+
model_summary:
|
44 |
+
_target_: pytorch_lightning.callbacks.RichModelSummary
|
45 |
+
max_depth: -1
|
46 |
+
rich_progress_bar:
|
47 |
+
_target_: pytorch_lightning.callbacks.RichProgressBar
|
48 |
+
model/params/total: 34635905
|
49 |
+
model/params/trainable: 34635905
|
50 |
+
model/params/non_trainable: 0
|
logs/experiments/multiruns/focusMSE_150_hyperparameter_search/2022-05-11_10-16-51/10/csv/version_0/metrics.csv
ADDED
@@ -0,0 +1,202 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
val/loss,val/mae,val/mae_best,epoch,step,train/loss,train/mae,test/loss,test/mae
|
2 |
+
0.0011281021870672703,0.02711985632777214,0.018362004309892654,0,47,,,,
|
3 |
+
,,,1,47,0.07122854888439178,0.11967164278030396,,
|
4 |
+
0.0006400587735697627,0.02036621980369091,0.018362004309892654,1,95,,,,
|
5 |
+
,,,2,95,0.0007135855848900974,0.021475350484251976,,
|
6 |
+
0.0005043616984039545,0.018463686108589172,0.018362004309892654,2,143,,,,
|
7 |
+
,,,3,143,0.0005328346160240471,0.01880355179309845,,
|
8 |
+
0.0006693615578114986,0.02085697092115879,0.018362004309892654,3,191,,,,
|
9 |
+
,,,4,191,0.0004978328361175954,0.018373282626271248,,
|
10 |
+
0.0007864921935833991,0.02248457446694374,0.018362004309892654,4,239,,,,
|
11 |
+
,,,5,239,0.000463975768070668,0.017843009904026985,,
|
12 |
+
0.0005851046880707145,0.01973997801542282,0.018362004309892654,5,287,,,,
|
13 |
+
,,,6,287,0.00047062025987543166,0.017921416088938713,,
|
14 |
+
0.0004586242139339447,0.01787249930202961,0.01787249930202961,6,335,,,,
|
15 |
+
,,,7,335,0.0004447203245945275,0.017580008134245872,,
|
16 |
+
0.0005991319194436073,0.019893821328878403,0.01787249930202961,7,383,,,,
|
17 |
+
,,,8,383,0.0004421287449076772,0.01751449517905712,,
|
18 |
+
0.0004540012450888753,0.017811132594943047,0.017811132594943047,8,431,,,,
|
19 |
+
,,,9,431,0.00043958041351288557,0.017420684918761253,,
|
20 |
+
0.0004406502121128142,0.017605790868401527,0.017605790868401527,9,479,,,,
|
21 |
+
,,,10,479,0.0004327886563260108,0.017311563715338707,,
|
22 |
+
0.000434703950304538,0.017524829134345055,0.017524829134345055,10,527,,,,
|
23 |
+
,,,11,527,0.00043747934978455305,0.01745450496673584,,
|
24 |
+
0.0012795940274372697,0.02870207652449608,0.017524829134345055,11,575,,,,
|
25 |
+
,,,12,575,0.0004264127928763628,0.017225079238414764,,
|
26 |
+
0.00043065566569566727,0.01739424280822277,0.01739424280822277,12,623,,,,
|
27 |
+
,,,13,623,0.0004278869309928268,0.017278237268328667,,
|
28 |
+
0.0004774496774189174,0.018153764307498932,0.01739424280822277,13,671,,,,
|
29 |
+
,,,14,671,0.0004159110540058464,0.01704791747033596,,
|
30 |
+
0.0005076391389593482,0.018503086641430855,0.01739424280822277,14,719,,,,
|
31 |
+
,,,15,719,0.00042174156988039613,0.017164118587970734,,
|
32 |
+
0.00048621100722812116,0.018323227763175964,0.01739424280822277,15,767,,,,
|
33 |
+
,,,16,767,0.0004153060435783118,0.016995010897517204,,
|
34 |
+
0.0004438474425114691,0.017588619142770767,0.01739424280822277,16,815,,,,
|
35 |
+
,,,17,815,0.00041301408782601357,0.017021803185343742,,
|
36 |
+
0.0004369286762084812,0.017464973032474518,0.01739424280822277,17,863,,,,
|
37 |
+
,,,18,863,0.0004131711320951581,0.017033221200108528,,
|
38 |
+
0.0004275658284313977,0.017399858683347702,0.01739424280822277,18,911,,,,
|
39 |
+
,,,19,911,0.0004071082512382418,0.01692000776529312,,
|
40 |
+
0.000422586192144081,0.017316868528723717,0.017316868528723717,19,959,,,,
|
41 |
+
,,,20,959,0.0004086386179551482,0.01696924865245819,,
|
42 |
+
0.00042186310747638345,0.017238253727555275,0.017238253727555275,20,1007,,,,
|
43 |
+
,,,21,1007,0.0004147801664657891,0.017028864473104477,,
|
44 |
+
0.00042030459735542536,0.017246495932340622,0.017238253727555275,21,1055,,,,
|
45 |
+
,,,22,1055,0.00040855054976418614,0.016924137249588966,,
|
46 |
+
0.0004246641183272004,0.01738802343606949,0.017238253727555275,22,1103,,,,
|
47 |
+
,,,23,1103,0.0004072301962878555,0.016901714727282524,,
|
48 |
+
0.000422256242018193,0.017278073355555534,0.017238253727555275,23,1151,,,,
|
49 |
+
,,,24,1151,0.00041184201836586,0.01701800338923931,,
|
50 |
+
0.00042004804708994925,0.017205307260155678,0.017205307260155678,24,1199,,,,
|
51 |
+
,,,25,1199,0.000407969142543152,0.016927435994148254,,
|
52 |
+
0.00042051286436617374,0.017214380204677582,0.017205307260155678,25,1247,,,,
|
53 |
+
,,,26,1247,0.0004065701214130968,0.016892217099666595,,
|
54 |
+
0.00042125879554077983,0.017213856801390648,0.017205307260155678,26,1295,,,,
|
55 |
+
,,,27,1295,0.00041026173857972026,0.01697658561170101,,
|
56 |
+
0.000424766301875934,0.01739085093140602,0.017205307260155678,27,1343,,,,
|
57 |
+
,,,28,1343,0.0004091080045327544,0.016964141279459,,
|
58 |
+
0.00042140070581808686,0.01719926856458187,0.01719926856458187,28,1391,,,,
|
59 |
+
,,,29,1391,0.0004161283723078668,0.017023712396621704,,
|
60 |
+
0.0004209254402667284,0.017274359241127968,0.01719926856458187,29,1439,,,,
|
61 |
+
,,,30,1439,0.00041239967686124146,0.017025787383317947,,
|
62 |
+
0.0004426456580404192,0.017637990415096283,0.01719926856458187,30,1487,,,,
|
63 |
+
,,,31,1487,0.0004088685673195869,0.016913358122110367,,
|
64 |
+
0.0004217410460114479,0.0172589048743248,0.01719926856458187,31,1535,,,,
|
65 |
+
,,,32,1535,0.0004142217803746462,0.017029695212841034,,
|
66 |
+
0.0004275539831724018,0.01739603467285633,0.01719926856458187,32,1583,,,,
|
67 |
+
,,,33,1583,0.0004113833128940314,0.01700104959309101,,
|
68 |
+
0.00043005688348785043,0.017443425953388214,0.01719926856458187,33,1631,,,,
|
69 |
+
,,,34,1631,0.00041530554881319404,0.01700584590435028,,
|
70 |
+
0.00042668095557019114,0.017422690987586975,0.01719926856458187,34,1679,,,,
|
71 |
+
,,,35,1679,0.0004085572436451912,0.016955584287643433,,
|
72 |
+
0.0004221853450872004,0.01728866435587406,0.01719926856458187,35,1727,,,,
|
73 |
+
,,,36,1727,0.0004117787757422775,0.016962070018053055,,
|
74 |
+
0.0004216516681481153,0.017249267548322678,0.01719926856458187,36,1775,,,,
|
75 |
+
,,,37,1775,0.0004093268944416195,0.016944412142038345,,
|
76 |
+
0.0004246116732247174,0.01737513206899166,0.01719926856458187,37,1823,,,,
|
77 |
+
,,,38,1823,0.00041178453830070794,0.016976770013570786,,
|
78 |
+
0.000426341692218557,0.01736942119896412,0.01719926856458187,38,1871,,,,
|
79 |
+
,,,39,1871,0.0004271950456313789,0.01719795912504196,,
|
80 |
+
0.0004223192809149623,0.017295747995376587,0.01719926856458187,39,1919,,,,
|
81 |
+
,,,40,1919,0.0004120529629290104,0.01702703721821308,,
|
82 |
+
0.0004214943328406662,0.01722288876771927,0.01719926856458187,40,1967,,,,
|
83 |
+
,,,41,1967,0.0004114086041226983,0.017010381445288658,,
|
84 |
+
0.00046932350960560143,0.01789957657456398,0.01719926856458187,41,2015,,,,
|
85 |
+
,,,42,2015,0.0004145224520470947,0.017043476924300194,,
|
86 |
+
0.0004214814689476043,0.01720321923494339,0.01719926856458187,42,2063,,,,
|
87 |
+
,,,43,2063,0.0004201580013614148,0.01709224097430706,,
|
88 |
+
0.00042157727875746787,0.017180560156702995,0.017180560156702995,43,2111,,,,
|
89 |
+
,,,44,2111,0.00041081535164266825,0.01698470115661621,,
|
90 |
+
0.0004437507886905223,0.017649756744503975,0.017180560156702995,44,2159,,,,
|
91 |
+
,,,45,2159,0.0004155017377343029,0.017049914225935936,,
|
92 |
+
0.00042982824379578233,0.017439404502511024,0.017180560156702995,45,2207,,,,
|
93 |
+
,,,46,2207,0.0004167530278209597,0.01704738847911358,,
|
94 |
+
0.00043227922287769616,0.017479993402957916,0.017180560156702995,46,2255,,,,
|
95 |
+
,,,47,2255,0.0004328438371885568,0.017372192814946175,,
|
96 |
+
0.00042793824104592204,0.017403818666934967,0.017180560156702995,47,2303,,,,
|
97 |
+
,,,48,2303,0.00041387981036677957,0.01704348623752594,,
|
98 |
+
0.0004214852233417332,0.017201198264956474,0.017180560156702995,48,2351,,,,
|
99 |
+
,,,49,2351,0.00041564772254787385,0.017086055129766464,,
|
100 |
+
0.0004368383961264044,0.01757523976266384,0.017180560156702995,49,2399,,,,
|
101 |
+
,,,50,2399,0.00041277805576100945,0.017023010179400444,,
|
102 |
+
0.0004244837909936905,0.017371734604239464,0.017180560156702995,50,2447,,,,
|
103 |
+
,,,51,2447,0.0004103230603504926,0.016944022849202156,,
|
104 |
+
0.0004223459691274911,0.017239198088645935,0.017180560156702995,51,2495,,,,
|
105 |
+
,,,52,2495,0.00041400373447686434,0.01701498217880726,,
|
106 |
+
0.0004284657188691199,0.017414217814803123,0.017180560156702995,52,2543,,,,
|
107 |
+
,,,53,2543,0.00041939469520002604,0.017096364870667458,,
|
108 |
+
0.0004338828148320317,0.01750408485531807,0.017180560156702995,53,2591,,,,
|
109 |
+
,,,54,2591,0.00041186335147358477,0.017002707347273827,,
|
110 |
+
0.0004290273936931044,0.017424874007701874,0.017180560156702995,54,2639,,,,
|
111 |
+
,,,55,2639,0.0004174927598796785,0.01711188815534115,,
|
112 |
+
0.0004430662957020104,0.017619077116250992,0.017180560156702995,55,2687,,,,
|
113 |
+
,,,56,2687,0.00041559492819942534,0.01708925887942314,,
|
114 |
+
0.0004280649882275611,0.01744927652180195,0.017180560156702995,56,2735,,,,
|
115 |
+
,,,57,2735,0.0004113404138479382,0.0169532410800457,,
|
116 |
+
0.0004236065724398941,0.01729309745132923,0.017180560156702995,57,2783,,,,
|
117 |
+
,,,58,2783,0.00041848039836622775,0.017124412581324577,,
|
118 |
+
0.0004215107182972133,0.01719283126294613,0.017180560156702995,58,2831,,,,
|
119 |
+
,,,59,2831,0.00041483351378701627,0.01702684536576271,,
|
120 |
+
0.0004237977263983339,0.01735212653875351,0.017180560156702995,59,2879,,,,
|
121 |
+
,,,60,2879,0.0004151337780058384,0.017060047015547752,,
|
122 |
+
0.0004566407878883183,0.017790043726563454,0.017180560156702995,60,2927,,,,
|
123 |
+
,,,61,2927,0.0004122813988942653,0.017023755237460136,,
|
124 |
+
0.00044156599324196577,0.01760227046906948,0.017180560156702995,61,2975,,,,
|
125 |
+
,,,62,2975,0.000420794531237334,0.01707250438630581,,
|
126 |
+
0.00042823690455406904,0.017409758642315865,0.017180560156702995,62,3023,,,,
|
127 |
+
,,,63,3023,0.0004184271383564919,0.017092760652303696,,
|
128 |
+
0.0004263119481038302,0.017415020614862442,0.017180560156702995,63,3071,,,,
|
129 |
+
,,,64,3071,0.00042030634358525276,0.01711852476000786,,
|
130 |
+
0.0004218357789795846,0.017266089096665382,0.017180560156702995,64,3119,,,,
|
131 |
+
,,,65,3119,0.00042133103124797344,0.017166176810860634,,
|
132 |
+
0.00042552468948997557,0.017349611967802048,0.017180560156702995,65,3167,,,,
|
133 |
+
,,,66,3167,0.00041496226913295686,0.017062131315469742,,
|
134 |
+
0.00042211145046167076,0.017284464091062546,0.017180560156702995,66,3215,,,,
|
135 |
+
,,,67,3215,0.0004231124185025692,0.01719653606414795,,
|
136 |
+
0.0004277771804481745,0.01744399592280388,0.017180560156702995,67,3263,,,,
|
137 |
+
,,,68,3263,0.00041238151607103646,0.01699051633477211,,
|
138 |
+
0.000423470395617187,0.017288245260715485,0.017180560156702995,68,3311,,,,
|
139 |
+
,,,69,3311,0.00041538989171385765,0.017079917713999748,,
|
140 |
+
0.00042151554953306913,0.017191661521792412,0.017180560156702995,69,3359,,,,
|
141 |
+
,,,70,3359,0.00041306010098196566,0.017040593549609184,,
|
142 |
+
0.0004215479420963675,0.01718522049486637,0.017180560156702995,70,3407,,,,
|
143 |
+
,,,71,3407,0.00041025778045877814,0.0169714093208313,,
|
144 |
+
0.0004282475565560162,0.01745256409049034,0.017180560156702995,71,3455,,,,
|
145 |
+
,,,72,3455,0.00042295167804695666,0.017161324620246887,,
|
146 |
+
0.00042303104419261217,0.01732635870575905,0.017180560156702995,72,3503,,,,
|
147 |
+
,,,73,3503,0.0004189040046185255,0.01712844893336296,,
|
148 |
+
0.0004250160127412528,0.017385443672537804,0.017180560156702995,73,3551,,,,
|
149 |
+
,,,74,3551,0.0004222102288622409,0.017171140760183334,,
|
150 |
+
0.00043932299013249576,0.01760365255177021,0.017180560156702995,74,3599,,,,
|
151 |
+
,,,75,3599,0.00042064933222718537,0.017108803614974022,,
|
152 |
+
0.0004228642792440951,0.017264315858483315,0.017180560156702995,75,3647,,,,
|
153 |
+
,,,76,3647,0.00041850193520076573,0.017119674012064934,,
|
154 |
+
0.00042154884431511164,0.017185058444738388,0.017180560156702995,76,3695,,,,
|
155 |
+
,,,77,3695,0.00041348932427354157,0.017014658078551292,,
|
156 |
+
0.00045598464203067124,0.017744679003953934,0.017180560156702995,77,3743,,,,
|
157 |
+
,,,78,3743,0.00041206987225450575,0.017023805528879166,,
|
158 |
+
0.00045128242345526814,0.017702238634228706,0.017180560156702995,78,3791,,,,
|
159 |
+
,,,79,3791,0.00041889146086759865,0.01709752343595028,,
|
160 |
+
0.0004223989089950919,0.017242060974240303,0.017180560156702995,79,3839,,,,
|
161 |
+
,,,80,3839,0.0004256273969076574,0.017267677932977676,,
|
162 |
+
0.00043138538603670895,0.01750338263809681,0.017180560156702995,80,3887,,,,
|
163 |
+
,,,81,3887,0.00042529244092293084,0.01723828725516796,,
|
164 |
+
0.0004400289326440543,0.017611360177397728,0.017180560156702995,81,3935,,,,
|
165 |
+
,,,82,3935,0.00041474730824120343,0.017081473022699356,,
|
166 |
+
0.00044159142998978496,0.017627907916903496,0.017180560156702995,82,3983,,,,
|
167 |
+
,,,83,3983,0.0004136357456445694,0.0170455239713192,,
|
168 |
+
0.0004229248152114451,0.017322320491075516,0.017180560156702995,83,4031,,,,
|
169 |
+
,,,84,4031,0.0004242396680638194,0.017158087342977524,,
|
170 |
+
0.00044080588850192726,0.017593512311577797,0.017180560156702995,84,4079,,,,
|
171 |
+
,,,85,4079,0.0004195038345642388,0.01711467280983925,,
|
172 |
+
0.0004435201990418136,0.01762404851615429,0.017180560156702995,85,4127,,,,
|
173 |
+
,,,86,4127,0.0004196020890958607,0.017128098756074905,,
|
174 |
+
0.00042513245716691017,0.01733938604593277,0.017180560156702995,86,4175,,,,
|
175 |
+
,,,87,4175,0.0004222663410473615,0.01720554381608963,,
|
176 |
+
0.0004291928780730814,0.017427941784262657,0.017180560156702995,87,4223,,,,
|
177 |
+
,,,88,4223,0.0004286100738681853,0.01728161796927452,,
|
178 |
+
0.0004272959486115724,0.017390556633472443,0.017180560156702995,88,4271,,,,
|
179 |
+
,,,89,4271,0.00042584165930747986,0.01722331903874874,,
|
180 |
+
0.0004219522525090724,0.01727449893951416,0.017180560156702995,89,4319,,,,
|
181 |
+
,,,90,4319,0.0004145670682191849,0.01706518419086933,,
|
182 |
+
0.0004433141730260104,0.017621798440814018,0.017180560156702995,90,4367,,,,
|
183 |
+
,,,91,4367,0.0004169942403677851,0.01704483851790428,,
|
184 |
+
0.0004240768030285835,0.017360413447022438,0.017180560156702995,91,4415,,,,
|
185 |
+
,,,92,4415,0.0004216157831251621,0.01713530346751213,,
|
186 |
+
0.0004245628952048719,0.017323505133390427,0.017180560156702995,92,4463,,,,
|
187 |
+
,,,93,4463,0.000410627166274935,0.016985256224870682,,
|
188 |
+
0.00042714871233329177,0.01743203215301037,0.017180560156702995,93,4511,,,,
|
189 |
+
,,,94,4511,0.00041893869638442993,0.017081163823604584,,
|
190 |
+
0.000421605771407485,0.017243782058358192,0.017180560156702995,94,4559,,,,
|
191 |
+
,,,95,4559,0.00041459358180873096,0.01703556440770626,,
|
192 |
+
0.0004268198972567916,0.0173802487552166,0.017180560156702995,95,4607,,,,
|
193 |
+
,,,96,4607,0.0004316757258493453,0.01725691556930542,,
|
194 |
+
0.0004286697367206216,0.017418138682842255,0.017180560156702995,96,4655,,,,
|
195 |
+
,,,97,4655,0.00041463717934675515,0.017060644924640656,,
|
196 |
+
0.00043312174966558814,0.017528032884001732,0.017180560156702995,97,4703,,,,
|
197 |
+
,,,98,4703,0.00041683894232846797,0.017048858106136322,,
|
198 |
+
0.000422937999246642,0.017322828993201256,0.017180560156702995,98,4751,,,,
|
199 |
+
,,,99,4751,0.00041537146898917854,0.017093045637011528,,
|
200 |
+
0.00042151304660364985,0.01719224825501442,0.017180560156702995,99,4799,,,,
|
201 |
+
,,,100,4799,0.0004299595020711422,0.017297277227044106,,
|
202 |
+
,,,43,4800,,,0.0004145443090237677,0.016961054876446724
|
logs/experiments/multiruns/focusMSE_150_hyperparameter_search/2022-05-11_10-16-51/10/tensorboard/focusMSE_150_hyperparameter_search/events.out.tfevents.1652269144.873c0dcdd84d.1.20
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:c00daa4f58e92ab574674a70f1b075d9c379d5cbeb969e4cb75ba1b4aabf667f
|
3 |
+
size 36562
|
logs/experiments/multiruns/focusMSE_150_hyperparameter_search/2022-05-11_10-16-51/10/tensorboard/focusMSE_150_hyperparameter_search/events.out.tfevents.1652269613.873c0dcdd84d.1.21
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:9e06eaf4e3cbac0974f6ab78be7cd5acdbaef589ee053332cfdc763e04e21772
|
3 |
+
size 179
|
logs/experiments/multiruns/focusMSE_150_hyperparameter_search/2022-05-11_10-16-51/10/tensorboard/focusMSE_150_hyperparameter_search/hparams.yaml
ADDED
@@ -0,0 +1,50 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
trainer:
|
2 |
+
_target_: pytorch_lightning.Trainer
|
3 |
+
gpus: 1
|
4 |
+
min_epochs: 1
|
5 |
+
max_epochs: 100
|
6 |
+
resume_from_checkpoint: null
|
7 |
+
model:
|
8 |
+
_target_: src.models.focus_module.FocusMSELitModule
|
9 |
+
input_size: 67500
|
10 |
+
lin1_size: 512
|
11 |
+
lin2_size: 128
|
12 |
+
lin3_size: 64
|
13 |
+
output_size: 1
|
14 |
+
lr: 0.02282695974132741
|
15 |
+
weight_decay: 0.0005
|
16 |
+
datamodule:
|
17 |
+
_target_: src.datamodules.focus_datamodule.FocusDataModule
|
18 |
+
data_dir: /usr/src/app/data/focus150
|
19 |
+
csv_train_file: /usr/src/app/data/focus150/train_metadata.csv
|
20 |
+
csv_val_file: /usr/src/app/data/focus150/validation_metadata.csv
|
21 |
+
csv_test_file: /usr/src/app/data/focus150/test_metadata.csv
|
22 |
+
batch_size: 64
|
23 |
+
num_workers: 0
|
24 |
+
pin_memory: false
|
25 |
+
seed: null
|
26 |
+
callbacks:
|
27 |
+
model_checkpoint:
|
28 |
+
_target_: pytorch_lightning.callbacks.ModelCheckpoint
|
29 |
+
monitor: val/mae
|
30 |
+
mode: min
|
31 |
+
save_top_k: 1
|
32 |
+
save_last: true
|
33 |
+
verbose: false
|
34 |
+
dirpath: checkpoints/
|
35 |
+
filename: epoch_{epoch:03d}
|
36 |
+
auto_insert_metric_name: false
|
37 |
+
early_stopping:
|
38 |
+
_target_: pytorch_lightning.callbacks.EarlyStopping
|
39 |
+
monitor: val/mae
|
40 |
+
mode: min
|
41 |
+
patience: 100
|
42 |
+
min_delta: 0
|
43 |
+
model_summary:
|
44 |
+
_target_: pytorch_lightning.callbacks.RichModelSummary
|
45 |
+
max_depth: -1
|
46 |
+
rich_progress_bar:
|
47 |
+
_target_: pytorch_lightning.callbacks.RichProgressBar
|
48 |
+
model/params/total: 34635905
|
49 |
+
model/params/trainable: 34635905
|
50 |
+
model/params/non_trainable: 0
|
logs/experiments/multiruns/focusMSE_150_hyperparameter_search/2022-05-11_10-16-51/11/.hydra/config.yaml
ADDED
@@ -0,0 +1,77 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
original_work_dir: ${hydra:runtime.cwd}
|
2 |
+
data_dir: ${original_work_dir}/data
|
3 |
+
print_config: true
|
4 |
+
ignore_warnings: true
|
5 |
+
train: true
|
6 |
+
test: true
|
7 |
+
seed: null
|
8 |
+
name: focusMSE_150_hyperparameter_search
|
9 |
+
datamodule:
|
10 |
+
_target_: src.datamodules.focus_datamodule.FocusDataModule
|
11 |
+
data_dir: ${data_dir}/focus150
|
12 |
+
csv_train_file: ${data_dir}/focus150/train_metadata.csv
|
13 |
+
csv_val_file: ${data_dir}/focus150/validation_metadata.csv
|
14 |
+
csv_test_file: ${data_dir}/focus150/test_metadata.csv
|
15 |
+
batch_size: 128
|
16 |
+
num_workers: 0
|
17 |
+
pin_memory: false
|
18 |
+
model:
|
19 |
+
_target_: src.models.focus_module.FocusMSELitModule
|
20 |
+
input_size: 67500
|
21 |
+
lin1_size: 64
|
22 |
+
lin2_size: 64
|
23 |
+
lin3_size: 1024
|
24 |
+
output_size: 1
|
25 |
+
lr: 0.19688938412353008
|
26 |
+
weight_decay: 0.0005
|
27 |
+
callbacks:
|
28 |
+
model_checkpoint:
|
29 |
+
_target_: pytorch_lightning.callbacks.ModelCheckpoint
|
30 |
+
monitor: val/mae
|
31 |
+
mode: min
|
32 |
+
save_top_k: 1
|
33 |
+
save_last: true
|
34 |
+
verbose: false
|
35 |
+
dirpath: checkpoints/
|
36 |
+
filename: epoch_{epoch:03d}
|
37 |
+
auto_insert_metric_name: false
|
38 |
+
early_stopping:
|
39 |
+
_target_: pytorch_lightning.callbacks.EarlyStopping
|
40 |
+
monitor: val/mae
|
41 |
+
mode: min
|
42 |
+
patience: 100
|
43 |
+
min_delta: 0
|
44 |
+
model_summary:
|
45 |
+
_target_: pytorch_lightning.callbacks.RichModelSummary
|
46 |
+
max_depth: -1
|
47 |
+
rich_progress_bar:
|
48 |
+
_target_: pytorch_lightning.callbacks.RichProgressBar
|
49 |
+
logger:
|
50 |
+
csv:
|
51 |
+
_target_: pytorch_lightning.loggers.csv_logs.CSVLogger
|
52 |
+
save_dir: .
|
53 |
+
name: csv/
|
54 |
+
prefix: ''
|
55 |
+
mlflow:
|
56 |
+
_target_: pytorch_lightning.loggers.mlflow.MLFlowLogger
|
57 |
+
experiment_name: ${name}
|
58 |
+
tracking_uri: ${original_work_dir}/logs/mlflow/mlruns
|
59 |
+
tags: null
|
60 |
+
save_dir: ./mlruns
|
61 |
+
prefix: ''
|
62 |
+
artifact_location: null
|
63 |
+
tensorboard:
|
64 |
+
_target_: pytorch_lightning.loggers.tensorboard.TensorBoardLogger
|
65 |
+
save_dir: tensorboard/
|
66 |
+
name: null
|
67 |
+
version: ${name}
|
68 |
+
log_graph: false
|
69 |
+
default_hp_metric: true
|
70 |
+
prefix: ''
|
71 |
+
trainer:
|
72 |
+
_target_: pytorch_lightning.Trainer
|
73 |
+
gpus: 1
|
74 |
+
min_epochs: 1
|
75 |
+
max_epochs: 100
|
76 |
+
resume_from_checkpoint: null
|
77 |
+
optimized_metric: val/mae_best
|
logs/experiments/multiruns/focusMSE_150_hyperparameter_search/2022-05-11_10-16-51/11/.hydra/hydra.yaml
ADDED
@@ -0,0 +1,225 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
hydra:
|
2 |
+
run:
|
3 |
+
dir: logs/experiments/runs/${name}/${now:%Y-%m-%d}_${now:%H-%M-%S}
|
4 |
+
sweep:
|
5 |
+
dir: logs/experiments/multiruns/${name}/${now:%Y-%m-%d}_${now:%H-%M-%S}
|
6 |
+
subdir: ${hydra.job.num}
|
7 |
+
launcher:
|
8 |
+
_target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher
|
9 |
+
sweeper:
|
10 |
+
sampler:
|
11 |
+
_target_: optuna.samplers.TPESampler
|
12 |
+
seed: 12345
|
13 |
+
consider_prior: true
|
14 |
+
prior_weight: 1.0
|
15 |
+
consider_magic_clip: true
|
16 |
+
consider_endpoints: false
|
17 |
+
n_startup_trials: 10
|
18 |
+
n_ei_candidates: 24
|
19 |
+
multivariate: false
|
20 |
+
warn_independent_sampling: true
|
21 |
+
_target_: hydra_plugins.hydra_optuna_sweeper.optuna_sweeper.OptunaSweeper
|
22 |
+
direction: minimize
|
23 |
+
storage: null
|
24 |
+
study_name: focusMAE_150_hyperparameter_search
|
25 |
+
n_trials: 20
|
26 |
+
n_jobs: 1
|
27 |
+
search_space:
|
28 |
+
datamodule.batch_size:
|
29 |
+
type: categorical
|
30 |
+
choices:
|
31 |
+
- 64
|
32 |
+
- 128
|
33 |
+
model.lr:
|
34 |
+
type: float
|
35 |
+
low: 0.0001
|
36 |
+
high: 0.2
|
37 |
+
model.lin1_size:
|
38 |
+
type: categorical
|
39 |
+
choices:
|
40 |
+
- 64
|
41 |
+
- 128
|
42 |
+
- 256
|
43 |
+
- 512
|
44 |
+
- 1024
|
45 |
+
model.lin2_size:
|
46 |
+
type: categorical
|
47 |
+
choices:
|
48 |
+
- 64
|
49 |
+
- 128
|
50 |
+
- 256
|
51 |
+
- 512
|
52 |
+
- 1024
|
53 |
+
model.lin3_size:
|
54 |
+
type: categorical
|
55 |
+
choices:
|
56 |
+
- 64
|
57 |
+
- 128
|
58 |
+
- 256
|
59 |
+
- 512
|
60 |
+
- 1024
|
61 |
+
help:
|
62 |
+
app_name: ${hydra.job.name}
|
63 |
+
header: '${hydra.help.app_name} is powered by Hydra.
|
64 |
+
|
65 |
+
'
|
66 |
+
footer: 'Powered by Hydra (https://hydra.cc)
|
67 |
+
|
68 |
+
Use --hydra-help to view Hydra specific help
|
69 |
+
|
70 |
+
'
|
71 |
+
template: '${hydra.help.header}
|
72 |
+
|
73 |
+
== Configuration groups ==
|
74 |
+
|
75 |
+
Compose your configuration from those groups (group=option)
|
76 |
+
|
77 |
+
|
78 |
+
$APP_CONFIG_GROUPS
|
79 |
+
|
80 |
+
|
81 |
+
== Config ==
|
82 |
+
|
83 |
+
Override anything in the config (foo.bar=value)
|
84 |
+
|
85 |
+
|
86 |
+
$CONFIG
|
87 |
+
|
88 |
+
|
89 |
+
${hydra.help.footer}
|
90 |
+
|
91 |
+
'
|
92 |
+
hydra_help:
|
93 |
+
template: 'Hydra (${hydra.runtime.version})
|
94 |
+
|
95 |
+
See https://hydra.cc for more info.
|
96 |
+
|
97 |
+
|
98 |
+
== Flags ==
|
99 |
+
|
100 |
+
$FLAGS_HELP
|
101 |
+
|
102 |
+
|
103 |
+
== Configuration groups ==
|
104 |
+
|
105 |
+
Compose your configuration from those groups (For example, append hydra/job_logging=disabled
|
106 |
+
to command line)
|
107 |
+
|
108 |
+
|
109 |
+
$HYDRA_CONFIG_GROUPS
|
110 |
+
|
111 |
+
|
112 |
+
Use ''--cfg hydra'' to Show the Hydra config.
|
113 |
+
|
114 |
+
'
|
115 |
+
hydra_help: ???
|
116 |
+
hydra_logging:
|
117 |
+
version: 1
|
118 |
+
formatters:
|
119 |
+
colorlog:
|
120 |
+
(): colorlog.ColoredFormatter
|
121 |
+
format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s'
|
122 |
+
handlers:
|
123 |
+
console:
|
124 |
+
class: logging.StreamHandler
|
125 |
+
formatter: colorlog
|
126 |
+
stream: ext://sys.stdout
|
127 |
+
root:
|
128 |
+
level: INFO
|
129 |
+
handlers:
|
130 |
+
- console
|
131 |
+
disable_existing_loggers: false
|
132 |
+
job_logging:
|
133 |
+
version: 1
|
134 |
+
formatters:
|
135 |
+
simple:
|
136 |
+
format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s'
|
137 |
+
colorlog:
|
138 |
+
(): colorlog.ColoredFormatter
|
139 |
+
format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s]
|
140 |
+
- %(message)s'
|
141 |
+
log_colors:
|
142 |
+
DEBUG: purple
|
143 |
+
INFO: green
|
144 |
+
WARNING: yellow
|
145 |
+
ERROR: red
|
146 |
+
CRITICAL: red
|
147 |
+
handlers:
|
148 |
+
console:
|
149 |
+
class: logging.StreamHandler
|
150 |
+
formatter: colorlog
|
151 |
+
stream: ext://sys.stdout
|
152 |
+
file:
|
153 |
+
class: logging.FileHandler
|
154 |
+
formatter: simple
|
155 |
+
filename: ${hydra.job.name}.log
|
156 |
+
root:
|
157 |
+
level: INFO
|
158 |
+
handlers:
|
159 |
+
- console
|
160 |
+
- file
|
161 |
+
disable_existing_loggers: false
|
162 |
+
env: {}
|
163 |
+
searchpath: []
|
164 |
+
callbacks: {}
|
165 |
+
output_subdir: .hydra
|
166 |
+
overrides:
|
167 |
+
hydra: []
|
168 |
+
task:
|
169 |
+
- datamodule.batch_size=128
|
170 |
+
- model.lr=0.19688938412353008
|
171 |
+
- model.lin1_size=64
|
172 |
+
- model.lin2_size=64
|
173 |
+
- model.lin3_size=1024
|
174 |
+
- hparams_search=focusMSE_150_optuna.yaml
|
175 |
+
job:
|
176 |
+
name: train
|
177 |
+
override_dirname: datamodule.batch_size=128,hparams_search=focusMSE_150_optuna.yaml,model.lin1_size=64,model.lin2_size=64,model.lin3_size=1024,model.lr=0.19688938412353008
|
178 |
+
id: '11'
|
179 |
+
num: 11
|
180 |
+
config_name: train.yaml
|
181 |
+
env_set: {}
|
182 |
+
env_copy: []
|
183 |
+
config:
|
184 |
+
override_dirname:
|
185 |
+
kv_sep: '='
|
186 |
+
item_sep: ','
|
187 |
+
exclude_keys: []
|
188 |
+
runtime:
|
189 |
+
version: 1.1.2
|
190 |
+
cwd: /usr/src/app
|
191 |
+
config_sources:
|
192 |
+
- path: hydra.conf
|
193 |
+
schema: pkg
|
194 |
+
provider: hydra
|
195 |
+
- path: /usr/src/app/configs
|
196 |
+
schema: file
|
197 |
+
provider: main
|
198 |
+
- path: hydra_plugins.hydra_colorlog.conf
|
199 |
+
schema: pkg
|
200 |
+
provider: hydra-colorlog
|
201 |
+
- path: ''
|
202 |
+
schema: structured
|
203 |
+
provider: schema
|
204 |
+
choices:
|
205 |
+
local: default.yaml
|
206 |
+
hparams_search: focusMSE_150_optuna.yaml
|
207 |
+
debug: null
|
208 |
+
experiment: null
|
209 |
+
log_dir: default.yaml
|
210 |
+
trainer: long.yaml
|
211 |
+
logger: many_loggers
|
212 |
+
callbacks: default.yaml
|
213 |
+
model: focus150.yaml
|
214 |
+
datamodule: focus150.yaml
|
215 |
+
hydra/env: default
|
216 |
+
hydra/callbacks: null
|
217 |
+
hydra/job_logging: colorlog
|
218 |
+
hydra/hydra_logging: colorlog
|
219 |
+
hydra/hydra_help: default
|
220 |
+
hydra/help: default
|
221 |
+
hydra/sweeper: optuna
|
222 |
+
hydra/sweeper/sampler: tpe
|
223 |
+
hydra/launcher: basic
|
224 |
+
hydra/output: default
|
225 |
+
verbose: false
|
logs/experiments/multiruns/focusMSE_150_hyperparameter_search/2022-05-11_10-16-51/11/.hydra/overrides.yaml
ADDED
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
- datamodule.batch_size=128
|
2 |
+
- model.lr=0.19688938412353008
|
3 |
+
- model.lin1_size=64
|
4 |
+
- model.lin2_size=64
|
5 |
+
- model.lin3_size=1024
|
6 |
+
- hparams_search=focusMSE_150_optuna.yaml
|
logs/experiments/multiruns/focusMSE_150_hyperparameter_search/2022-05-11_10-16-51/11/checkpoints/epoch_098.ckpt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:8c0a19b5d8688d755f205d2eba8f8010c2dc17e6916f25abc002f83260e5b1f7
|
3 |
+
size 52758214
|
logs/experiments/multiruns/focusMSE_150_hyperparameter_search/2022-05-11_10-16-51/11/checkpoints/last.ckpt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:c3c8a391618a1ed3b28e8e73feae52295983eb55f34652d8a6d7edaef22db77c
|
3 |
+
size 52758214
|
logs/experiments/multiruns/focusMSE_150_hyperparameter_search/2022-05-11_10-16-51/11/csv/version_0/hparams.yaml
ADDED
@@ -0,0 +1,50 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
trainer:
|
2 |
+
_target_: pytorch_lightning.Trainer
|
3 |
+
gpus: 1
|
4 |
+
min_epochs: 1
|
5 |
+
max_epochs: 100
|
6 |
+
resume_from_checkpoint: null
|
7 |
+
model:
|
8 |
+
_target_: src.models.focus_module.FocusMSELitModule
|
9 |
+
input_size: 67500
|
10 |
+
lin1_size: 64
|
11 |
+
lin2_size: 64
|
12 |
+
lin3_size: 1024
|
13 |
+
output_size: 1
|
14 |
+
lr: 0.19688938412353008
|
15 |
+
weight_decay: 0.0005
|
16 |
+
datamodule:
|
17 |
+
_target_: src.datamodules.focus_datamodule.FocusDataModule
|
18 |
+
data_dir: /usr/src/app/data/focus150
|
19 |
+
csv_train_file: /usr/src/app/data/focus150/train_metadata.csv
|
20 |
+
csv_val_file: /usr/src/app/data/focus150/validation_metadata.csv
|
21 |
+
csv_test_file: /usr/src/app/data/focus150/test_metadata.csv
|
22 |
+
batch_size: 128
|
23 |
+
num_workers: 0
|
24 |
+
pin_memory: false
|
25 |
+
seed: null
|
26 |
+
callbacks:
|
27 |
+
model_checkpoint:
|
28 |
+
_target_: pytorch_lightning.callbacks.ModelCheckpoint
|
29 |
+
monitor: val/mae
|
30 |
+
mode: min
|
31 |
+
save_top_k: 1
|
32 |
+
save_last: true
|
33 |
+
verbose: false
|
34 |
+
dirpath: checkpoints/
|
35 |
+
filename: epoch_{epoch:03d}
|
36 |
+
auto_insert_metric_name: false
|
37 |
+
early_stopping:
|
38 |
+
_target_: pytorch_lightning.callbacks.EarlyStopping
|
39 |
+
monitor: val/mae
|
40 |
+
mode: min
|
41 |
+
patience: 100
|
42 |
+
min_delta: 0
|
43 |
+
model_summary:
|
44 |
+
_target_: pytorch_lightning.callbacks.RichModelSummary
|
45 |
+
max_depth: -1
|
46 |
+
rich_progress_bar:
|
47 |
+
_target_: pytorch_lightning.callbacks.RichProgressBar
|
48 |
+
model/params/total: 4394113
|
49 |
+
model/params/trainable: 4394113
|
50 |
+
model/params/non_trainable: 0
|
logs/experiments/multiruns/focusMSE_150_hyperparameter_search/2022-05-11_10-16-51/11/csv/version_0/metrics.csv
ADDED
@@ -0,0 +1,202 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
val/loss,val/mae,val/mae_best,epoch,step,train/loss,train/mae,test/loss,test/mae
|
2 |
+
290.46063232421875,7.552501678466797,0.058129601180553436,0,23,,,,
|
3 |
+
,,,1,23,397.6566162109375,6.335385799407959,,
|
4 |
+
1.0709067583084106,0.3588452935218811,0.058129601180553436,1,47,,,,
|
5 |
+
,,,2,47,1.2222867012023926,0.3946356475353241,,
|
6 |
+
0.027325689792633057,0.16402503848075867,0.058129601180553436,2,71,,,,
|
7 |
+
,,,3,71,0.06187015399336815,0.2290792614221573,,
|
8 |
+
0.00675490265712142,0.0795828327536583,0.058129601180553436,3,95,,,,
|
9 |
+
,,,4,95,0.023176884278655052,0.12385223060846329,,
|
10 |
+
0.0014470107853412628,0.032457344233989716,0.032457344233989716,4,119,,,,
|
11 |
+
,,,5,119,0.006077623460441828,0.0563577301800251,,
|
12 |
+
107145.8046875,286.6622009277344,0.032457344233989716,5,143,,,,
|
13 |
+
,,,6,143,0.5144323110580444,0.044989462941884995,,
|
14 |
+
0.0019595948979258537,0.02773221768438816,0.02773221768438816,6,167,,,,
|
15 |
+
,,,7,167,0.03506423532962799,0.029398465529084206,,
|
16 |
+
0.6393566131591797,0.6800277233123779,0.02773221768438816,7,191,,,,
|
17 |
+
,,,8,191,0.002419231692329049,0.02279660664498806,,
|
18 |
+
0.0004425497318152338,0.017637740820646286,0.017637740820646286,8,215,,,,
|
19 |
+
,,,9,215,0.0005701956106349826,0.018388325348496437,,
|
20 |
+
0.00042316867620684206,0.017331387847661972,0.017331387847661972,9,239,,,,
|
21 |
+
,,,10,239,0.00041593308560550213,0.0171124879270792,,
|
22 |
+
2489.599609375,35.977237701416016,0.017331387847661972,10,263,,,,
|
23 |
+
,,,11,263,0.0004333571414463222,0.017007775604724884,,
|
24 |
+
0.00042198714800179005,0.017276808619499207,0.017276808619499207,11,287,,,,
|
25 |
+
,,,12,287,0.001102256472222507,0.017608998343348503,,
|
26 |
+
0.0004219721886329353,0.017275827005505562,0.017275827005505562,12,311,,,,
|
27 |
+
,,,13,311,0.00047529442235827446,0.017071565613150597,,
|
28 |
+
0.00042168929940089583,0.017253246158361435,0.017253246158361435,13,335,,,,
|
29 |
+
,,,14,335,0.0004201809933874756,0.016991345211863518,,
|
30 |
+
0.00042157614370808005,0.017239710316061974,0.017239710316061974,14,359,,,,
|
31 |
+
,,,15,359,0.0004227396857459098,0.01696459762752056,,
|
32 |
+
0.0004686574393417686,0.01792033016681671,0.017239710316061974,15,383,,,,
|
33 |
+
,,,16,383,0.00045613068505190313,0.017093388363718987,,
|
34 |
+
300.81512451171875,9.779393196105957,0.017239710316061974,16,407,,,,
|
35 |
+
,,,17,407,0.0004586545692291111,0.01715674251317978,,
|
36 |
+
0.0004217289388179779,0.017257066443562508,0.017239710316061974,17,431,,,,
|
37 |
+
,,,18,431,0.0004202421405352652,0.016970615833997726,,
|
38 |
+
0.0004217202076688409,0.017256250604987144,0.017239710316061974,18,455,,,,
|
39 |
+
,,,19,455,0.0004241947317495942,0.01701999455690384,,
|
40 |
+
0.0004217733512632549,0.01726101152598858,0.017239710316061974,19,479,,,,
|
41 |
+
,,,20,479,0.00040709308814257383,0.01688750460743904,,
|
42 |
+
0.002300828928127885,0.024817269295454025,0.017239710316061974,20,503,,,,
|
43 |
+
,,,21,503,0.0004356978752184659,0.017046749591827393,,
|
44 |
+
268802.21875,481.543212890625,0.017239710316061974,21,527,,,,
|
45 |
+
,,,22,527,0.0005478913662955165,0.017213929444551468,,
|
46 |
+
0.012314507737755775,0.09756547957658768,0.017239710316061974,22,551,,,,
|
47 |
+
,,,23,551,0.0004235673986840993,0.01699160970747471,,
|
48 |
+
0.00042308829142712057,0.017328476533293724,0.017239710316061974,23,575,,,,
|
49 |
+
,,,24,575,0.0011131734354421496,0.017989540472626686,,
|
50 |
+
0.00042194509296678007,0.01727401651442051,0.017239710316061974,24,599,,,,
|
51 |
+
,,,25,599,0.0004122753452975303,0.01698240078985691,,
|
52 |
+
0.00042165600461885333,0.01724974624812603,0.017239710316061974,25,623,,,,
|
53 |
+
,,,26,623,0.00042884648428298533,0.016978222876787186,,
|
54 |
+
0.0005857652286067605,0.019144847989082336,0.017239710316061974,26,647,,,,
|
55 |
+
,,,27,647,0.0004236449603922665,0.017013704404234886,,
|
56 |
+
0.00042231110273860395,0.01729533076286316,0.017239710316061974,27,671,,,,
|
57 |
+
,,,28,671,0.0011354309972375631,0.017531929537653923,,
|
58 |
+
0.00042183618643321097,0.01726611703634262,0.017239710316061974,28,695,,,,
|
59 |
+
,,,29,695,0.0004721989098470658,0.017138440161943436,,
|
60 |
+
0.00042180356103926897,0.01726352982223034,0.017239710316061974,29,719,,,,
|
61 |
+
,,,30,719,0.00040998662007041276,0.016923554241657257,,
|
62 |
+
0.00044840486953034997,0.017750661820173264,0.017239710316061974,30,743,,,,
|
63 |
+
,,,31,743,0.0025422286707907915,0.018168119713664055,,
|
64 |
+
0.000422007025917992,0.017278093844652176,0.017239710316061974,31,767,,,,
|
65 |
+
,,,32,767,0.0004958775825798512,0.01722015254199505,,
|
66 |
+
0.00042171715176664293,0.017255963757634163,0.017239710316061974,32,791,,,,
|
67 |
+
,,,33,791,0.000428748142439872,0.017059169709682465,,
|
68 |
+
16557.5,112.22099304199219,0.017239710316061974,33,815,,,,
|
69 |
+
,,,34,815,0.0005969061749055982,0.017227454110980034,,
|
70 |
+
0.012673282995820045,0.0767398253083229,0.017239710316061974,34,839,,,,
|
71 |
+
,,,35,839,0.000408661289839074,0.016927823424339294,,
|
72 |
+
0.0004216283850837499,0.017246585339307785,0.017239710316061974,35,863,,,,
|
73 |
+
,,,36,863,0.00041069809230975807,0.01691826432943344,,
|
74 |
+
20.86650276184082,1.8447030782699585,0.017239710316061974,36,887,,,,
|
75 |
+
,,,37,887,0.00041230086935684085,0.016924262046813965,,
|
76 |
+
0.00042260991176590323,0.017309391871094704,0.017239710316061974,37,911,,,,
|
77 |
+
,,,38,911,0.0005111378850415349,0.017280040308833122,,
|
78 |
+
0.009526961483061314,0.04188250005245209,0.017239710316061974,38,935,,,,
|
79 |
+
,,,39,935,0.00041394526488147676,0.016994250938296318,,
|
80 |
+
3.523869037628174,0.6966153979301453,0.017239710316061974,39,959,,,,
|
81 |
+
,,,40,959,0.00042038399260491133,0.01698523387312889,,
|
82 |
+
0.00042166633647866547,0.01725086383521557,0.017239710316061974,40,983,,,,
|
83 |
+
,,,41,983,0.0005159108550287783,0.017121804878115654,,
|
84 |
+
18.15214729309082,1.5228229761123657,0.017239710316061974,41,1007,,,,
|
85 |
+
,,,42,1007,0.0004077124176546931,0.016909947618842125,,
|
86 |
+
47.93381118774414,3.192734718322754,0.017239710316061974,42,1031,,,,
|
87 |
+
,,,43,1031,0.0004440796619746834,0.017033183947205544,,
|
88 |
+
780.305419921875,20.36632537841797,0.017239710316061974,43,1055,,,,
|
89 |
+
,,,44,1055,0.00044844215153716505,0.01709318533539772,,
|
90 |
+
0.00042154514812864363,0.017234742641448975,0.017234742641448975,44,1079,,,,
|
91 |
+
,,,45,1079,0.0004253858351148665,0.016959333792328835,,
|
92 |
+
1.9485539197921753,0.5137192010879517,0.017234742641448975,45,1103,,,,
|
93 |
+
,,,46,1103,0.0004255447711329907,0.01700255088508129,,
|
94 |
+
0.01531565748155117,0.06744838505983353,0.017234742641448975,46,1127,,,,
|
95 |
+
,,,47,1127,0.0004070821451023221,0.016875876113772392,,
|
96 |
+
0.00042240816401317716,0.017300138249993324,0.017234742641448975,47,1151,,,,
|
97 |
+
,,,48,1151,0.0007220113184303045,0.01735934615135193,,
|
98 |
+
0.0004217643290758133,0.017260238528251648,0.017234742641448975,48,1175,,,,
|
99 |
+
,,,49,1175,0.00043266810826025903,0.01699814200401306,,
|
100 |
+
4.774197578430176,1.6477692127227783,0.017234742641448975,49,1199,,,,
|
101 |
+
,,,50,1199,0.0005020825774408877,0.0170680470764637,,
|
102 |
+
0.0004217469831928611,0.017258714884519577,0.017234742641448975,50,1223,,,,
|
103 |
+
,,,51,1223,0.00041087076533585787,0.01691940799355507,,
|
104 |
+
0.002253028331324458,0.026245873421430588,0.017234742641448975,51,1247,,,,
|
105 |
+
,,,52,1247,0.0004092806775588542,0.016890719532966614,,
|
106 |
+
368394.84375,583.8623046875,0.017234742641448975,52,1271,,,,
|
107 |
+
,,,53,1271,0.00042167678475379944,0.016948778182268143,,
|
108 |
+
0.0004216834204271436,0.017252648249268532,0.017234742641448975,53,1295,,,,
|
109 |
+
,,,54,1295,0.00040875672129914165,0.016908742487430573,,
|
110 |
+
377644.75,586.76513671875,0.017234742641448975,54,1319,,,,
|
111 |
+
,,,55,1319,0.0004222877905704081,0.016974668949842453,,
|
112 |
+
1036.709228515625,20.63543701171875,0.017234742641448975,55,1343,,,,
|
113 |
+
,,,56,1343,0.0004403362690936774,0.016973838210105896,,
|
114 |
+
20.271991729736328,3.978113889694214,0.017234742641448975,56,1367,,,,
|
115 |
+
,,,57,1367,0.007735459133982658,0.018923504278063774,,
|
116 |
+
0.0004215175868012011,0.017191190272569656,0.017191190272569656,57,1391,,,,
|
117 |
+
,,,58,1391,0.0004075003380421549,0.016878243535757065,,
|
118 |
+
0.6299417614936829,0.615690290927887,0.017191190272569656,58,1415,,,,
|
119 |
+
,,,59,1415,0.00041585753206163645,0.01693626679480076,,
|
120 |
+
0.08114701509475708,0.27500370144844055,0.017191190272569656,59,1439,,,,
|
121 |
+
,,,60,1439,0.00040917537990026176,0.01694137044250965,,
|
122 |
+
1.2089520692825317,1.0993170738220215,0.017191190272569656,60,1463,,,,
|
123 |
+
,,,61,1463,0.0004134125483687967,0.01690886914730072,,
|
124 |
+
0.000421681790612638,0.0172524843364954,0.017191190272569656,61,1487,,,,
|
125 |
+
,,,62,1487,0.0004147796717006713,0.01695553958415985,,
|
126 |
+
0.5910930037498474,0.6062424182891846,0.017191190272569656,62,1511,,,,
|
127 |
+
,,,63,1511,0.0007074996246956289,0.017257915809750557,,
|
128 |
+
2.2131543159484863,1.487324833869934,0.017191190272569656,63,1535,,,,
|
129 |
+
,,,64,1535,0.000409988802857697,0.016945485025644302,,
|
130 |
+
0.00042151243542321026,0.017228037118911743,0.017191190272569656,64,1559,,,,
|
131 |
+
,,,65,1559,0.0004098541976418346,0.016894329339265823,,
|
132 |
+
0.0875498503446579,0.2938840389251709,0.017191190272569656,65,1583,,,,
|
133 |
+
,,,66,1583,0.0004161391989327967,0.01692815311253071,,
|
134 |
+
0.0004216074594296515,0.01724400743842125,0.017191190272569656,66,1607,,,,
|
135 |
+
,,,67,1607,0.000407152489060536,0.016886791214346886,,
|
136 |
+
0.00042192020919173956,0.01727229915559292,0.017191190272569656,67,1631,,,,
|
137 |
+
,,,68,1631,0.00047115187044255435,0.017046567052602768,,
|
138 |
+
0.00042150283115915954,0.017194906249642372,0.017191190272569656,68,1655,,,,
|
139 |
+
,,,69,1655,0.00040785729652270675,0.01690603792667389,,
|
140 |
+
0.00042149980436079204,0.017224635928869247,0.017191190272569656,69,1679,,,,
|
141 |
+
,,,70,1679,0.00041220258572138846,0.016889236867427826,,
|
142 |
+
5.968747615814209,2.2495882511138916,0.017191190272569656,70,1703,,,,
|
143 |
+
,,,71,1703,0.0004089292779099196,0.016908705234527588,,
|
144 |
+
0.00042163784382864833,0.017247702926397324,0.017191190272569656,71,1727,,,,
|
145 |
+
,,,72,1727,0.0005426046554930508,0.017131227999925613,,
|
146 |
+
0.003047814592719078,0.05044061690568924,0.017191190272569656,72,1751,,,,
|
147 |
+
,,,73,1751,0.00041225829045288265,0.016869327053427696,,
|
148 |
+
0.00042163892067037523,0.01724783144891262,0.017191190272569656,73,1775,,,,
|
149 |
+
,,,74,1775,0.0004092997405678034,0.016934029757976532,,
|
150 |
+
0.0004220574046485126,0.017281237989664078,0.017191190272569656,74,1799,,,,
|
151 |
+
,,,75,1799,0.0004072674782946706,0.01686912216246128,,
|
152 |
+
29427984.0,5347.9169921875,0.017191190272569656,75,1823,,,,
|
153 |
+
,,,76,1823,0.0004073391028214246,0.016916636377573013,,
|
154 |
+
6.47248649597168,1.3481440544128418,0.017191190272569656,76,1847,,,,
|
155 |
+
,,,77,1847,0.000409680709708482,0.016891421750187874,,
|
156 |
+
0.00042169485823251307,0.017253803089261055,0.017191190272569656,77,1871,,,,
|
157 |
+
,,,78,1871,0.0004072998999617994,0.016901595517992973,,
|
158 |
+
142.82139587402344,7.579879283905029,0.017191190272569656,78,1895,,,,
|
159 |
+
,,,79,1895,0.00040729984175413847,0.01687437668442726,,
|
160 |
+
19.674484252929688,4.2749247550964355,0.017191190272569656,79,1919,,,,
|
161 |
+
,,,80,1919,0.001252647372893989,0.017444657161831856,,
|
162 |
+
0.41430971026420593,0.6359504461288452,0.017191190272569656,80,1943,,,,
|
163 |
+
,,,81,1943,0.0004150721651967615,0.01701475866138935,,
|
164 |
+
0.0029128510504961014,0.04872633144259453,0.017191190272569656,81,1967,,,,
|
165 |
+
,,,82,1967,0.000414763402659446,0.01692356914281845,,
|
166 |
+
0.00042162477620877326,0.01724615879356861,0.017191190272569656,82,1991,,,,
|
167 |
+
,,,83,1991,0.0004169928433839232,0.016949135810136795,,
|
168 |
+
0.003173090750351548,0.05241129547357559,0.017191190272569656,83,2015,,,,
|
169 |
+
,,,84,2015,0.000408671039622277,0.0169080663472414,,
|
170 |
+
0.00042147591011598706,0.017209479585289955,0.017191190272569656,84,2039,,,,
|
171 |
+
,,,85,2039,0.00040734087815508246,0.016884248703718185,,
|
172 |
+
0.00042162404861301184,0.017246069386601448,0.017191190272569656,85,2063,,,,
|
173 |
+
,,,86,2063,0.00040780528797768056,0.016893751919269562,,
|
174 |
+
0.0004219940456096083,0.01727725937962532,0.017191190272569656,86,2087,,,,
|
175 |
+
,,,87,2087,0.0006910968222655356,0.017311464995145798,,
|
176 |
+
0.00042161523015238345,0.017244983464479446,0.017191190272569656,87,2111,,,,
|
177 |
+
,,,88,2111,0.0004103009414393455,0.01690949872136116,,
|
178 |
+
0.008146144449710846,0.08789008855819702,0.017191190272569656,88,2135,,,,
|
179 |
+
,,,89,2135,0.0004083800013177097,0.016902156174182892,,
|
180 |
+
0.0004316050617489964,0.017421871423721313,0.017191190272569656,89,2159,,,,
|
181 |
+
,,,90,2159,0.00040748892934061587,0.01690521277487278,,
|
182 |
+
749.7282104492188,22.87730598449707,0.017191190272569656,90,2183,,,,
|
183 |
+
,,,91,2183,0.000650618108920753,0.017184201627969742,,
|
184 |
+
0.00047079764772206545,0.0178945604711771,0.017191190272569656,91,2207,,,,
|
185 |
+
,,,92,2207,0.00040781270945444703,0.016902942210435867,,
|
186 |
+
0.002916059223935008,0.04849648103117943,0.017191190272569656,92,2231,,,,
|
187 |
+
,,,93,2231,0.0004071933217346668,0.016887448728084564,,
|
188 |
+
1211.4993896484375,27.625381469726562,0.017191190272569656,93,2255,,,,
|
189 |
+
,,,94,2255,0.0004074369207955897,0.016852932050824165,,
|
190 |
+
0.000421477627241984,0.017214179039001465,0.017191190272569656,94,2279,,,,
|
191 |
+
,,,95,2279,0.00040755418012849987,0.016923557966947556,,
|
192 |
+
6.805961608886719,1.7546496391296387,0.017191190272569656,95,2303,,,,
|
193 |
+
,,,96,2303,0.00040928259841166437,0.016885263845324516,,
|
194 |
+
0.0004214762302581221,0.017208367586135864,0.017191190272569656,96,2327,,,,
|
195 |
+
,,,97,2327,0.00041154312202706933,0.01693705841898918,,
|
196 |
+
0.00042251788545399904,0.01730528473854065,0.017191190272569656,97,2351,,,,
|
197 |
+
,,,98,2351,0.0004075624165125191,0.01685398630797863,,
|
198 |
+
0.00042153295362368226,0.017187954857945442,0.017187954857945442,98,2375,,,,
|
199 |
+
,,,99,2375,0.00041895039612427354,0.016973385587334633,,
|
200 |
+
0.0962086021900177,0.3094952404499054,0.017187954857945442,99,2399,,,,
|
201 |
+
,,,100,2399,0.0004077503690496087,0.01689663901925087,,
|
202 |
+
,,,98,2400,,,0.00041453648009337485,0.016969287768006325
|
logs/experiments/multiruns/focusMSE_150_hyperparameter_search/2022-05-11_10-16-51/11/tensorboard/focusMSE_150_hyperparameter_search/events.out.tfevents.1652269614.873c0dcdd84d.1.22
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:9c06fa891f50c70f34904e6b5b3191cb31052f5cd8a3d211d2bc162bd2aa9d03
|
3 |
+
size 36541
|
logs/experiments/multiruns/focusMSE_150_hyperparameter_search/2022-05-11_10-16-51/11/tensorboard/focusMSE_150_hyperparameter_search/events.out.tfevents.1652269798.873c0dcdd84d.1.23
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:59b96bc5fa0752fdbd915526f3a3c2fe76204ee3c2ab0ced41fb4590390209c2
|
3 |
+
size 179
|
logs/experiments/multiruns/focusMSE_150_hyperparameter_search/2022-05-11_10-16-51/11/tensorboard/focusMSE_150_hyperparameter_search/hparams.yaml
ADDED
@@ -0,0 +1,50 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
trainer:
|
2 |
+
_target_: pytorch_lightning.Trainer
|
3 |
+
gpus: 1
|
4 |
+
min_epochs: 1
|
5 |
+
max_epochs: 100
|
6 |
+
resume_from_checkpoint: null
|
7 |
+
model:
|
8 |
+
_target_: src.models.focus_module.FocusMSELitModule
|
9 |
+
input_size: 67500
|
10 |
+
lin1_size: 64
|
11 |
+
lin2_size: 64
|
12 |
+
lin3_size: 1024
|
13 |
+
output_size: 1
|
14 |
+
lr: 0.19688938412353008
|
15 |
+
weight_decay: 0.0005
|
16 |
+
datamodule:
|
17 |
+
_target_: src.datamodules.focus_datamodule.FocusDataModule
|
18 |
+
data_dir: /usr/src/app/data/focus150
|
19 |
+
csv_train_file: /usr/src/app/data/focus150/train_metadata.csv
|
20 |
+
csv_val_file: /usr/src/app/data/focus150/validation_metadata.csv
|
21 |
+
csv_test_file: /usr/src/app/data/focus150/test_metadata.csv
|
22 |
+
batch_size: 128
|
23 |
+
num_workers: 0
|
24 |
+
pin_memory: false
|
25 |
+
seed: null
|
26 |
+
callbacks:
|
27 |
+
model_checkpoint:
|
28 |
+
_target_: pytorch_lightning.callbacks.ModelCheckpoint
|
29 |
+
monitor: val/mae
|
30 |
+
mode: min
|
31 |
+
save_top_k: 1
|
32 |
+
save_last: true
|
33 |
+
verbose: false
|
34 |
+
dirpath: checkpoints/
|
35 |
+
filename: epoch_{epoch:03d}
|
36 |
+
auto_insert_metric_name: false
|
37 |
+
early_stopping:
|
38 |
+
_target_: pytorch_lightning.callbacks.EarlyStopping
|
39 |
+
monitor: val/mae
|
40 |
+
mode: min
|
41 |
+
patience: 100
|
42 |
+
min_delta: 0
|
43 |
+
model_summary:
|
44 |
+
_target_: pytorch_lightning.callbacks.RichModelSummary
|
45 |
+
max_depth: -1
|
46 |
+
rich_progress_bar:
|
47 |
+
_target_: pytorch_lightning.callbacks.RichProgressBar
|
48 |
+
model/params/total: 4394113
|
49 |
+
model/params/trainable: 4394113
|
50 |
+
model/params/non_trainable: 0
|
logs/experiments/multiruns/focusMSE_150_hyperparameter_search/2022-05-11_10-16-51/12/.hydra/config.yaml
ADDED
@@ -0,0 +1,77 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
original_work_dir: ${hydra:runtime.cwd}
|
2 |
+
data_dir: ${original_work_dir}/data
|
3 |
+
print_config: true
|
4 |
+
ignore_warnings: true
|
5 |
+
train: true
|
6 |
+
test: true
|
7 |
+
seed: null
|
8 |
+
name: focusMSE_150_hyperparameter_search
|
9 |
+
datamodule:
|
10 |
+
_target_: src.datamodules.focus_datamodule.FocusDataModule
|
11 |
+
data_dir: ${data_dir}/focus150
|
12 |
+
csv_train_file: ${data_dir}/focus150/train_metadata.csv
|
13 |
+
csv_val_file: ${data_dir}/focus150/validation_metadata.csv
|
14 |
+
csv_test_file: ${data_dir}/focus150/test_metadata.csv
|
15 |
+
batch_size: 128
|
16 |
+
num_workers: 0
|
17 |
+
pin_memory: false
|
18 |
+
model:
|
19 |
+
_target_: src.models.focus_module.FocusMSELitModule
|
20 |
+
input_size: 67500
|
21 |
+
lin1_size: 64
|
22 |
+
lin2_size: 512
|
23 |
+
lin3_size: 1024
|
24 |
+
output_size: 1
|
25 |
+
lr: 0.06633213937592236
|
26 |
+
weight_decay: 0.0005
|
27 |
+
callbacks:
|
28 |
+
model_checkpoint:
|
29 |
+
_target_: pytorch_lightning.callbacks.ModelCheckpoint
|
30 |
+
monitor: val/mae
|
31 |
+
mode: min
|
32 |
+
save_top_k: 1
|
33 |
+
save_last: true
|
34 |
+
verbose: false
|
35 |
+
dirpath: checkpoints/
|
36 |
+
filename: epoch_{epoch:03d}
|
37 |
+
auto_insert_metric_name: false
|
38 |
+
early_stopping:
|
39 |
+
_target_: pytorch_lightning.callbacks.EarlyStopping
|
40 |
+
monitor: val/mae
|
41 |
+
mode: min
|
42 |
+
patience: 100
|
43 |
+
min_delta: 0
|
44 |
+
model_summary:
|
45 |
+
_target_: pytorch_lightning.callbacks.RichModelSummary
|
46 |
+
max_depth: -1
|
47 |
+
rich_progress_bar:
|
48 |
+
_target_: pytorch_lightning.callbacks.RichProgressBar
|
49 |
+
logger:
|
50 |
+
csv:
|
51 |
+
_target_: pytorch_lightning.loggers.csv_logs.CSVLogger
|
52 |
+
save_dir: .
|
53 |
+
name: csv/
|
54 |
+
prefix: ''
|
55 |
+
mlflow:
|
56 |
+
_target_: pytorch_lightning.loggers.mlflow.MLFlowLogger
|
57 |
+
experiment_name: ${name}
|
58 |
+
tracking_uri: ${original_work_dir}/logs/mlflow/mlruns
|
59 |
+
tags: null
|
60 |
+
save_dir: ./mlruns
|
61 |
+
prefix: ''
|
62 |
+
artifact_location: null
|
63 |
+
tensorboard:
|
64 |
+
_target_: pytorch_lightning.loggers.tensorboard.TensorBoardLogger
|
65 |
+
save_dir: tensorboard/
|
66 |
+
name: null
|
67 |
+
version: ${name}
|
68 |
+
log_graph: false
|
69 |
+
default_hp_metric: true
|
70 |
+
prefix: ''
|
71 |
+
trainer:
|
72 |
+
_target_: pytorch_lightning.Trainer
|
73 |
+
gpus: 1
|
74 |
+
min_epochs: 1
|
75 |
+
max_epochs: 100
|
76 |
+
resume_from_checkpoint: null
|
77 |
+
optimized_metric: val/mae_best
|
logs/experiments/multiruns/focusMSE_150_hyperparameter_search/2022-05-11_10-16-51/12/.hydra/hydra.yaml
ADDED
@@ -0,0 +1,225 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
hydra:
|
2 |
+
run:
|
3 |
+
dir: logs/experiments/runs/${name}/${now:%Y-%m-%d}_${now:%H-%M-%S}
|
4 |
+
sweep:
|
5 |
+
dir: logs/experiments/multiruns/${name}/${now:%Y-%m-%d}_${now:%H-%M-%S}
|
6 |
+
subdir: ${hydra.job.num}
|
7 |
+
launcher:
|
8 |
+
_target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher
|
9 |
+
sweeper:
|
10 |
+
sampler:
|
11 |
+
_target_: optuna.samplers.TPESampler
|
12 |
+
seed: 12345
|
13 |
+
consider_prior: true
|
14 |
+
prior_weight: 1.0
|
15 |
+
consider_magic_clip: true
|
16 |
+
consider_endpoints: false
|
17 |
+
n_startup_trials: 10
|
18 |
+
n_ei_candidates: 24
|
19 |
+
multivariate: false
|
20 |
+
warn_independent_sampling: true
|
21 |
+
_target_: hydra_plugins.hydra_optuna_sweeper.optuna_sweeper.OptunaSweeper
|
22 |
+
direction: minimize
|
23 |
+
storage: null
|
24 |
+
study_name: focusMAE_150_hyperparameter_search
|
25 |
+
n_trials: 20
|
26 |
+
n_jobs: 1
|
27 |
+
search_space:
|
28 |
+
datamodule.batch_size:
|
29 |
+
type: categorical
|
30 |
+
choices:
|
31 |
+
- 64
|
32 |
+
- 128
|
33 |
+
model.lr:
|
34 |
+
type: float
|
35 |
+
low: 0.0001
|
36 |
+
high: 0.2
|
37 |
+
model.lin1_size:
|
38 |
+
type: categorical
|
39 |
+
choices:
|
40 |
+
- 64
|
41 |
+
- 128
|
42 |
+
- 256
|
43 |
+
- 512
|
44 |
+
- 1024
|
45 |
+
model.lin2_size:
|
46 |
+
type: categorical
|
47 |
+
choices:
|
48 |
+
- 64
|
49 |
+
- 128
|
50 |
+
- 256
|
51 |
+
- 512
|
52 |
+
- 1024
|
53 |
+
model.lin3_size:
|
54 |
+
type: categorical
|
55 |
+
choices:
|
56 |
+
- 64
|
57 |
+
- 128
|
58 |
+
- 256
|
59 |
+
- 512
|
60 |
+
- 1024
|
61 |
+
help:
|
62 |
+
app_name: ${hydra.job.name}
|
63 |
+
header: '${hydra.help.app_name} is powered by Hydra.
|
64 |
+
|
65 |
+
'
|
66 |
+
footer: 'Powered by Hydra (https://hydra.cc)
|
67 |
+
|
68 |
+
Use --hydra-help to view Hydra specific help
|
69 |
+
|
70 |
+
'
|
71 |
+
template: '${hydra.help.header}
|
72 |
+
|
73 |
+
== Configuration groups ==
|
74 |
+
|
75 |
+
Compose your configuration from those groups (group=option)
|
76 |
+
|
77 |
+
|
78 |
+
$APP_CONFIG_GROUPS
|
79 |
+
|
80 |
+
|
81 |
+
== Config ==
|
82 |
+
|
83 |
+
Override anything in the config (foo.bar=value)
|
84 |
+
|
85 |
+
|
86 |
+
$CONFIG
|
87 |
+
|
88 |
+
|
89 |
+
${hydra.help.footer}
|
90 |
+
|
91 |
+
'
|
92 |
+
hydra_help:
|
93 |
+
template: 'Hydra (${hydra.runtime.version})
|
94 |
+
|
95 |
+
See https://hydra.cc for more info.
|
96 |
+
|
97 |
+
|
98 |
+
== Flags ==
|
99 |
+
|
100 |
+
$FLAGS_HELP
|
101 |
+
|
102 |
+
|
103 |
+
== Configuration groups ==
|
104 |
+
|
105 |
+
Compose your configuration from those groups (For example, append hydra/job_logging=disabled
|
106 |
+
to command line)
|
107 |
+
|
108 |
+
|
109 |
+
$HYDRA_CONFIG_GROUPS
|
110 |
+
|
111 |
+
|
112 |
+
Use ''--cfg hydra'' to Show the Hydra config.
|
113 |
+
|
114 |
+
'
|
115 |
+
hydra_help: ???
|
116 |
+
hydra_logging:
|
117 |
+
version: 1
|
118 |
+
formatters:
|
119 |
+
colorlog:
|
120 |
+
(): colorlog.ColoredFormatter
|
121 |
+
format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s'
|
122 |
+
handlers:
|
123 |
+
console:
|
124 |
+
class: logging.StreamHandler
|
125 |
+
formatter: colorlog
|
126 |
+
stream: ext://sys.stdout
|
127 |
+
root:
|
128 |
+
level: INFO
|
129 |
+
handlers:
|
130 |
+
- console
|
131 |
+
disable_existing_loggers: false
|
132 |
+
job_logging:
|
133 |
+
version: 1
|
134 |
+
formatters:
|
135 |
+
simple:
|
136 |
+
format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s'
|
137 |
+
colorlog:
|
138 |
+
(): colorlog.ColoredFormatter
|
139 |
+
format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s]
|
140 |
+
- %(message)s'
|
141 |
+
log_colors:
|
142 |
+
DEBUG: purple
|
143 |
+
INFO: green
|
144 |
+
WARNING: yellow
|
145 |
+
ERROR: red
|
146 |
+
CRITICAL: red
|
147 |
+
handlers:
|
148 |
+
console:
|
149 |
+
class: logging.StreamHandler
|
150 |
+
formatter: colorlog
|
151 |
+
stream: ext://sys.stdout
|
152 |
+
file:
|
153 |
+
class: logging.FileHandler
|
154 |
+
formatter: simple
|
155 |
+
filename: ${hydra.job.name}.log
|
156 |
+
root:
|
157 |
+
level: INFO
|
158 |
+
handlers:
|
159 |
+
- console
|
160 |
+
- file
|
161 |
+
disable_existing_loggers: false
|
162 |
+
env: {}
|
163 |
+
searchpath: []
|
164 |
+
callbacks: {}
|
165 |
+
output_subdir: .hydra
|
166 |
+
overrides:
|
167 |
+
hydra: []
|
168 |
+
task:
|
169 |
+
- datamodule.batch_size=128
|
170 |
+
- model.lr=0.06633213937592236
|
171 |
+
- model.lin1_size=64
|
172 |
+
- model.lin2_size=512
|
173 |
+
- model.lin3_size=1024
|
174 |
+
- hparams_search=focusMSE_150_optuna.yaml
|
175 |
+
job:
|
176 |
+
name: train
|
177 |
+
override_dirname: datamodule.batch_size=128,hparams_search=focusMSE_150_optuna.yaml,model.lin1_size=64,model.lin2_size=512,model.lin3_size=1024,model.lr=0.06633213937592236
|
178 |
+
id: '12'
|
179 |
+
num: 12
|
180 |
+
config_name: train.yaml
|
181 |
+
env_set: {}
|
182 |
+
env_copy: []
|
183 |
+
config:
|
184 |
+
override_dirname:
|
185 |
+
kv_sep: '='
|
186 |
+
item_sep: ','
|
187 |
+
exclude_keys: []
|
188 |
+
runtime:
|
189 |
+
version: 1.1.2
|
190 |
+
cwd: /usr/src/app
|
191 |
+
config_sources:
|
192 |
+
- path: hydra.conf
|
193 |
+
schema: pkg
|
194 |
+
provider: hydra
|
195 |
+
- path: /usr/src/app/configs
|
196 |
+
schema: file
|
197 |
+
provider: main
|
198 |
+
- path: hydra_plugins.hydra_colorlog.conf
|
199 |
+
schema: pkg
|
200 |
+
provider: hydra-colorlog
|
201 |
+
- path: ''
|
202 |
+
schema: structured
|
203 |
+
provider: schema
|
204 |
+
choices:
|
205 |
+
local: default.yaml
|
206 |
+
hparams_search: focusMSE_150_optuna.yaml
|
207 |
+
debug: null
|
208 |
+
experiment: null
|
209 |
+
log_dir: default.yaml
|
210 |
+
trainer: long.yaml
|
211 |
+
logger: many_loggers
|
212 |
+
callbacks: default.yaml
|
213 |
+
model: focus150.yaml
|
214 |
+
datamodule: focus150.yaml
|
215 |
+
hydra/env: default
|
216 |
+
hydra/callbacks: null
|
217 |
+
hydra/job_logging: colorlog
|
218 |
+
hydra/hydra_logging: colorlog
|
219 |
+
hydra/hydra_help: default
|
220 |
+
hydra/help: default
|
221 |
+
hydra/sweeper: optuna
|
222 |
+
hydra/sweeper/sampler: tpe
|
223 |
+
hydra/launcher: basic
|
224 |
+
hydra/output: default
|
225 |
+
verbose: false
|
logs/experiments/multiruns/focusMSE_150_hyperparameter_search/2022-05-11_10-16-51/12/.hydra/overrides.yaml
ADDED
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
- datamodule.batch_size=128
|
2 |
+
- model.lr=0.06633213937592236
|
3 |
+
- model.lin1_size=64
|
4 |
+
- model.lin2_size=512
|
5 |
+
- model.lin3_size=1024
|
6 |
+
- hparams_search=focusMSE_150_optuna.yaml
|
logs/experiments/multiruns/focusMSE_150_hyperparameter_search/2022-05-11_10-16-51/12/checkpoints/epoch_048.ckpt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:d6d8ae30a6688e9377c2fccb1ae9f35400cbbd02cedbb3b43d0f349a9a674be2
|
3 |
+
size 58627014
|
logs/experiments/multiruns/focusMSE_150_hyperparameter_search/2022-05-11_10-16-51/12/checkpoints/last.ckpt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:8980f7385315390f23d30b063e15ee869b6de2dd1e6fc31a4412b74c574f5d7e
|
3 |
+
size 58627014
|
logs/experiments/multiruns/focusMSE_150_hyperparameter_search/2022-05-11_10-16-51/12/csv/version_0/hparams.yaml
ADDED
@@ -0,0 +1,50 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
trainer:
|
2 |
+
_target_: pytorch_lightning.Trainer
|
3 |
+
gpus: 1
|
4 |
+
min_epochs: 1
|
5 |
+
max_epochs: 100
|
6 |
+
resume_from_checkpoint: null
|
7 |
+
model:
|
8 |
+
_target_: src.models.focus_module.FocusMSELitModule
|
9 |
+
input_size: 67500
|
10 |
+
lin1_size: 64
|
11 |
+
lin2_size: 512
|
12 |
+
lin3_size: 1024
|
13 |
+
output_size: 1
|
14 |
+
lr: 0.06633213937592236
|
15 |
+
weight_decay: 0.0005
|
16 |
+
datamodule:
|
17 |
+
_target_: src.datamodules.focus_datamodule.FocusDataModule
|
18 |
+
data_dir: /usr/src/app/data/focus150
|
19 |
+
csv_train_file: /usr/src/app/data/focus150/train_metadata.csv
|
20 |
+
csv_val_file: /usr/src/app/data/focus150/validation_metadata.csv
|
21 |
+
csv_test_file: /usr/src/app/data/focus150/test_metadata.csv
|
22 |
+
batch_size: 128
|
23 |
+
num_workers: 0
|
24 |
+
pin_memory: false
|
25 |
+
seed: null
|
26 |
+
callbacks:
|
27 |
+
model_checkpoint:
|
28 |
+
_target_: pytorch_lightning.callbacks.ModelCheckpoint
|
29 |
+
monitor: val/mae
|
30 |
+
mode: min
|
31 |
+
save_top_k: 1
|
32 |
+
save_last: true
|
33 |
+
verbose: false
|
34 |
+
dirpath: checkpoints/
|
35 |
+
filename: epoch_{epoch:03d}
|
36 |
+
auto_insert_metric_name: false
|
37 |
+
early_stopping:
|
38 |
+
_target_: pytorch_lightning.callbacks.EarlyStopping
|
39 |
+
monitor: val/mae
|
40 |
+
mode: min
|
41 |
+
patience: 100
|
42 |
+
min_delta: 0
|
43 |
+
model_summary:
|
44 |
+
_target_: pytorch_lightning.callbacks.RichModelSummary
|
45 |
+
max_depth: -1
|
46 |
+
rich_progress_bar:
|
47 |
+
_target_: pytorch_lightning.callbacks.RichProgressBar
|
48 |
+
model/params/total: 4882881
|
49 |
+
model/params/trainable: 4882881
|
50 |
+
model/params/non_trainable: 0
|
logs/experiments/multiruns/focusMSE_150_hyperparameter_search/2022-05-11_10-16-51/12/csv/version_0/metrics.csv
ADDED
@@ -0,0 +1,202 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
val/loss,val/mae,val/mae_best,epoch,step,train/loss,train/mae,test/loss,test/mae
|
2 |
+
1.4005035161972046,0.6488082408905029,0.017218496650457382,0,23,,,,
|
3 |
+
,,,1,23,54.07636642456055,3.714243173599243,,
|
4 |
+
0.06610125303268433,0.1856696754693985,0.017218496650457382,1,47,,,,
|
5 |
+
,,,2,47,0.047171615064144135,0.1674470454454422,,
|
6 |
+
0.02189694531261921,0.09659868478775024,0.017218496650457382,2,71,,,,
|
7 |
+
,,,3,71,0.014685655012726784,0.11003847420215607,,
|
8 |
+
0.015045949257910252,0.0871950015425682,0.017218496650457382,3,95,,,,
|
9 |
+
,,,4,95,0.008835071697831154,0.08420317620038986,,
|
10 |
+
0.02212904579937458,0.09892279654741287,0.017218496650457382,4,119,,,,
|
11 |
+
,,,5,119,0.008137442171573639,0.07098430395126343,,
|
12 |
+
0.016815168783068657,0.10592982172966003,0.017218496650457382,5,143,,,,
|
13 |
+
,,,6,143,0.003801577491685748,0.04638281837105751,,
|
14 |
+
0.008429959416389465,0.05998719856142998,0.017218496650457382,6,167,,,,
|
15 |
+
,,,7,167,0.0037687409203499556,0.03909967839717865,,
|
16 |
+
0.07901178300380707,0.25438329577445984,0.017218496650457382,7,191,,,,
|
17 |
+
,,,8,191,0.002870397875085473,0.03387143090367317,,
|
18 |
+
0.04332831874489784,0.12689989805221558,0.017218496650457382,8,215,,,,
|
19 |
+
,,,9,215,0.0030897550750523806,0.032890964299440384,,
|
20 |
+
0.1466577798128128,0.30853140354156494,0.017218496650457382,9,239,,,,
|
21 |
+
,,,10,239,0.002536639105528593,0.030378755182027817,,
|
22 |
+
0.1997046321630478,0.43400776386260986,0.017218496650457382,10,263,,,,
|
23 |
+
,,,11,263,0.0018865498714148998,0.027412502095103264,,
|
24 |
+
0.08046688884496689,0.24711112678050995,0.017218496650457382,11,287,,,,
|
25 |
+
,,,12,287,0.0016858119051903486,0.02666635997593403,,
|
26 |
+
0.01200905255973339,0.0534665584564209,0.017218496650457382,12,311,,,,
|
27 |
+
,,,13,311,0.003102830145508051,0.030690735206007957,,
|
28 |
+
0.006804528180509806,0.04838329181075096,0.017218496650457382,13,335,,,,
|
29 |
+
,,,14,335,0.0012206804240122437,0.023728234693408012,,
|
30 |
+
0.22693806886672974,0.2976601719856262,0.017218496650457382,14,359,,,,
|
31 |
+
,,,15,359,0.0016804366605356336,0.026056798174977303,,
|
32 |
+
0.0040571377612650394,0.038624316453933716,0.017218496650457382,15,383,,,,
|
33 |
+
,,,16,383,0.0010731791844591498,0.022778408601880074,,
|
34 |
+
0.014706358313560486,0.0904657319188118,0.017218496650457382,16,407,,,,
|
35 |
+
,,,17,407,0.00119337800424546,0.02362772636115551,,
|
36 |
+
0.03354307636618614,0.08590974658727646,0.017218496650457382,17,431,,,,
|
37 |
+
,,,18,431,0.003075749846175313,0.02904515527188778,,
|
38 |
+
0.0039934804663062096,0.04510922729969025,0.017218496650457382,18,455,,,,
|
39 |
+
,,,19,455,0.0008682864136062562,0.021353770047426224,,
|
40 |
+
0.8375983238220215,0.812958836555481,0.017218496650457382,19,479,,,,
|
41 |
+
,,,20,479,0.0011307996464893222,0.023173801600933075,,
|
42 |
+
0.22419559955596924,0.3021785020828247,0.017218496650457382,20,503,,,,
|
43 |
+
,,,21,503,0.00101225427351892,0.022282468155026436,,
|
44 |
+
0.0024356101639568806,0.03226630017161369,0.017218496650457382,21,527,,,,
|
45 |
+
,,,22,527,0.0009197307517752051,0.02154434844851494,,
|
46 |
+
2.0875940322875977,1.3395546674728394,0.017218496650457382,22,551,,,,
|
47 |
+
,,,23,551,0.001097557251341641,0.022143663838505745,,
|
48 |
+
0.5539893507957458,0.6949453353881836,0.017218496650457382,23,575,,,,
|
49 |
+
,,,24,575,0.00111034803558141,0.022628827020525932,,
|
50 |
+
0.0067896973341703415,0.04505738988518715,0.017218496650457382,24,599,,,,
|
51 |
+
,,,25,599,0.0012696066405624151,0.023958146572113037,,
|
52 |
+
0.02457696758210659,0.1305203139781952,0.017218496650457382,25,623,,,,
|
53 |
+
,,,26,623,0.0012229218846186996,0.02320500835776329,,
|
54 |
+
0.0005445284186862409,0.01915891095995903,0.017218496650457382,26,647,,,,
|
55 |
+
,,,27,647,0.0009607664542272687,0.022057991474866867,,
|
56 |
+
0.9125652313232422,0.7192165851593018,0.017218496650457382,27,671,,,,
|
57 |
+
,,,28,671,0.0015255704056471586,0.023648887872695923,,
|
58 |
+
0.0007196252117864788,0.020858388394117355,0.017218496650457382,28,695,,,,
|
59 |
+
,,,29,695,0.022368047386407852,0.04653067886829376,,
|
60 |
+
0.00764499930664897,0.07674110680818558,0.017218496650457382,29,719,,,,
|
61 |
+
,,,30,719,0.0023535843938589096,0.027454201132059097,,
|
62 |
+
0.3549935221672058,0.5754386782646179,0.017218496650457382,30,743,,,,
|
63 |
+
,,,31,743,0.0007311697700060904,0.020084699615836143,,
|
64 |
+
0.00043781669228337705,0.01752190850675106,0.017218496650457382,31,767,,,,
|
65 |
+
,,,32,767,0.0006468445062637329,0.019313540309667587,,
|
66 |
+
0.6417291760444641,0.7755827307701111,0.017218496650457382,32,791,,,,
|
67 |
+
,,,33,791,0.0006834894884377718,0.019695136696100235,,
|
68 |
+
0.006463253870606422,0.06614986807107925,0.017218496650457382,33,815,,,,
|
69 |
+
,,,34,815,0.0005081009585410357,0.01816098764538765,,
|
70 |
+
0.02621070109307766,0.11034868657588959,0.017218496650457382,34,839,,,,
|
71 |
+
,,,35,839,0.0006764258723706007,0.01971345953643322,,
|
72 |
+
0.02655128948390484,0.155600443482399,0.017218496650457382,35,863,,,,
|
73 |
+
,,,36,863,0.0007514812750741839,0.020216191187500954,,
|
74 |
+
0.001216823235154152,0.022971976548433304,0.017218496650457382,36,887,,,,
|
75 |
+
,,,37,887,0.0008494326029904187,0.02090653032064438,,
|
76 |
+
0.0008770845597609878,0.022001858800649643,0.017218496650457382,37,911,,,,
|
77 |
+
,,,38,911,0.0006837532273493707,0.01971263438463211,,
|
78 |
+
0.7732111215591431,0.8395774960517883,0.017218496650457382,38,935,,,,
|
79 |
+
,,,39,935,0.004509251099079847,0.030156334862113,,
|
80 |
+
0.007971330545842648,0.07069620490074158,0.017218496650457382,39,959,,,,
|
81 |
+
,,,40,959,0.000859504216350615,0.021030351519584656,,
|
82 |
+
0.0016813280526548624,0.02357560582458973,0.017218496650457382,40,983,,,,
|
83 |
+
,,,41,983,0.0005498958635143936,0.01864101178944111,,
|
84 |
+
3.0607502460479736,1.7446457147598267,0.017218496650457382,41,1007,,,,
|
85 |
+
,,,42,1007,0.0006507318466901779,0.019267195835709572,,
|
86 |
+
0.0005163734895177186,0.018665490671992302,0.017218496650457382,42,1031,,,,
|
87 |
+
,,,43,1031,0.011815707199275494,0.042166732251644135,,
|
88 |
+
0.0013246755115687847,0.030083587393164635,0.017218496650457382,43,1055,,,,
|
89 |
+
,,,44,1055,0.0025024020578712225,0.02730366215109825,,
|
90 |
+
0.006318563129752874,0.041931454092264175,0.017218496650457382,44,1079,,,,
|
91 |
+
,,,45,1079,0.0006551924743689597,0.019621936604380608,,
|
92 |
+
0.010910924524068832,0.05320529639720917,0.017218496650457382,45,1103,,,,
|
93 |
+
,,,46,1103,0.0006133632850833237,0.019067926332354546,,
|
94 |
+
0.011710508726537228,0.09535360336303711,0.017218496650457382,46,1127,,,,
|
95 |
+
,,,47,1127,0.0005985312745906413,0.019086772575974464,,
|
96 |
+
0.0005314023583196104,0.018865535035729408,0.017218496650457382,47,1151,,,,
|
97 |
+
,,,48,1151,0.0005324443336576223,0.01855044811964035,,
|
98 |
+
0.0004183982964605093,0.017180705443024635,0.017180705443024635,48,1175,,,,
|
99 |
+
,,,49,1175,0.0015109553933143616,0.023887567222118378,,
|
100 |
+
0.041299305856227875,0.19330331683158875,0.017180705443024635,49,1199,,,,
|
101 |
+
,,,50,1199,0.0006703181425109506,0.01989581622183323,,
|
102 |
+
0.08184114098548889,0.24809157848358154,0.017180705443024635,50,1223,,,,
|
103 |
+
,,,51,1223,0.0004729798238258809,0.017890918999910355,,
|
104 |
+
0.0004294739046599716,0.017389073967933655,0.017180705443024635,51,1247,,,,
|
105 |
+
,,,52,1247,0.0005622144090011716,0.01863846927881241,,
|
106 |
+
0.047334134578704834,0.14859053492546082,0.017180705443024635,52,1271,,,,
|
107 |
+
,,,53,1271,0.000586554640904069,0.019003506749868393,,
|
108 |
+
0.021492861211299896,0.10731696337461472,0.017180705443024635,53,1295,,,,
|
109 |
+
,,,54,1295,0.0006458690040744841,0.01921287178993225,,
|
110 |
+
0.07701072841882706,0.27294543385505676,0.017180705443024635,54,1319,,,,
|
111 |
+
,,,55,1319,0.0007850450347177684,0.020508091896772385,,
|
112 |
+
0.015660956501960754,0.11661101877689362,0.017180705443024635,55,1343,,,,
|
113 |
+
,,,56,1343,0.0005699759349226952,0.01879400759935379,,
|
114 |
+
0.0018432473298162222,0.03757227212190628,0.017180705443024635,56,1367,,,,
|
115 |
+
,,,57,1367,0.0005116076790727675,0.018224947154521942,,
|
116 |
+
0.2223731428384781,0.4528248906135559,0.017180705443024635,57,1391,,,,
|
117 |
+
,,,58,1391,0.0028424495831131935,0.024933913722634315,,
|
118 |
+
1.0181502103805542,1.0070940256118774,0.017180705443024635,58,1415,,,,
|
119 |
+
,,,59,1415,0.0008674694108776748,0.020771626383066177,,
|
120 |
+
0.030489705502986908,0.1465703547000885,0.017180705443024635,59,1439,,,,
|
121 |
+
,,,60,1439,0.0004795722779817879,0.017809893935918808,,
|
122 |
+
0.00542249483987689,0.06705630570650101,0.017180705443024635,60,1463,,,,
|
123 |
+
,,,61,1463,0.0005225937929935753,0.018268827348947525,,
|
124 |
+
0.015800146386027336,0.12329985946416855,0.017180705443024635,61,1487,,,,
|
125 |
+
,,,62,1487,0.0005336090107448399,0.018434539437294006,,
|
126 |
+
0.19447053968906403,0.4402235746383667,0.017180705443024635,62,1511,,,,
|
127 |
+
,,,63,1511,0.000558815838303417,0.018544970080256462,,
|
128 |
+
0.04904676973819733,0.2204175889492035,0.017180705443024635,63,1535,,,,
|
129 |
+
,,,64,1535,0.0004880537453573197,0.017933707684278488,,
|
130 |
+
0.019598333165049553,0.13361631333827972,0.017180705443024635,64,1559,,,,
|
131 |
+
,,,65,1559,0.0009248870774172246,0.020547861233353615,,
|
132 |
+
0.10112261772155762,0.3154374361038208,0.017180705443024635,65,1583,,,,
|
133 |
+
,,,66,1583,0.0005168835050426424,0.018262594938278198,,
|
134 |
+
0.004356915131211281,0.05083636939525604,0.017180705443024635,66,1607,,,,
|
135 |
+
,,,67,1607,0.00048644206253811717,0.017983712255954742,,
|
136 |
+
0.003960317000746727,0.04846169427037239,0.017180705443024635,67,1631,,,,
|
137 |
+
,,,68,1631,0.0007406867807731032,0.02009342797100544,,
|
138 |
+
0.002961907535791397,0.03865721449255943,0.017180705443024635,68,1655,,,,
|
139 |
+
,,,69,1655,0.0005230917013250291,0.01835913583636284,,
|
140 |
+
0.015844378620386124,0.11250335723161697,0.017180705443024635,69,1679,,,,
|
141 |
+
,,,70,1679,0.000477787310956046,0.017776865512132645,,
|
142 |
+
1.063934326171875,0.9149213433265686,0.017180705443024635,70,1703,,,,
|
143 |
+
,,,71,1703,0.0005242907791398466,0.018322592601180077,,
|
144 |
+
0.0018523172475397587,0.0376739464700222,0.017180705443024635,71,1727,,,,
|
145 |
+
,,,72,1727,0.0005382026429288089,0.018222110345959663,,
|
146 |
+
2.554901361465454,1.5747536420822144,0.017180705443024635,72,1751,,,,
|
147 |
+
,,,73,1751,0.0006048736977390945,0.018744537606835365,,
|
148 |
+
0.0011594239622354507,0.028224775567650795,0.017180705443024635,73,1775,,,,
|
149 |
+
,,,74,1775,0.0012359829852357507,0.022658182308077812,,
|
150 |
+
0.0665513128042221,0.25386014580726624,0.017180705443024635,74,1799,,,,
|
151 |
+
,,,75,1799,0.0004927225527353585,0.018039094284176826,,
|
152 |
+
0.00042250793194398284,0.017237266525626183,0.017180705443024635,75,1823,,,,
|
153 |
+
,,,76,1823,0.0004932092851959169,0.017989709973335266,,
|
154 |
+
0.0025839393492788076,0.04669121280312538,0.017180705443024635,76,1847,,,,
|
155 |
+
,,,77,1847,0.0004927674890495837,0.01789621077477932,,
|
156 |
+
0.044573117047548294,0.19156844913959503,0.017180705443024635,77,1871,,,,
|
157 |
+
,,,78,1871,0.0006153031135909259,0.018996629863977432,,
|
158 |
+
0.028851181268692017,0.13890109956264496,0.017180705443024635,78,1895,,,,
|
159 |
+
,,,79,1895,0.0004852707788813859,0.018011093139648438,,
|
160 |
+
0.05455055087804794,0.22630269825458527,0.017180705443024635,79,1919,,,,
|
161 |
+
,,,80,1919,0.0007129383739084005,0.01975245401263237,,
|
162 |
+
0.011892820708453655,0.0849541574716568,0.017180705443024635,80,1943,,,,
|
163 |
+
,,,81,1943,0.00045943487202748656,0.017671087756752968,,
|
164 |
+
0.3147335946559906,0.5385775566101074,0.017180705443024635,81,1967,,,,
|
165 |
+
,,,82,1967,0.000548425829038024,0.018377181142568588,,
|
166 |
+
0.0022981425281614065,0.03404134139418602,0.017180705443024635,82,1991,,,,
|
167 |
+
,,,83,1991,0.000467005796963349,0.017726533114910126,,
|
168 |
+
0.0005508119938895106,0.019317612051963806,0.017180705443024635,83,2015,,,,
|
169 |
+
,,,84,2015,0.0004919940256513655,0.018078209832310677,,
|
170 |
+
0.014543388970196247,0.11494557559490204,0.017180705443024635,84,2039,,,,
|
171 |
+
,,,85,2039,0.000433510635048151,0.017255058512091637,,
|
172 |
+
0.15075534582138062,0.3813353180885315,0.017180705443024635,85,2063,,,,
|
173 |
+
,,,86,2063,0.00047546919086016715,0.017931675538420677,,
|
174 |
+
0.0015528991352766752,0.033904965966939926,0.017180705443024635,86,2087,,,,
|
175 |
+
,,,87,2087,0.00043623498640954494,0.01734207570552826,,
|
176 |
+
0.29829102754592896,0.5240517258644104,0.017180705443024635,87,2111,,,,
|
177 |
+
,,,88,2111,0.0004961778759025037,0.018057765439152718,,
|
178 |
+
0.0005058124079369009,0.018665987998247147,0.017180705443024635,88,2135,,,,
|
179 |
+
,,,89,2135,0.00046492242836393416,0.017667803913354874,,
|
180 |
+
0.1422794759273529,0.34079593420028687,0.017180705443024635,89,2159,,,,
|
181 |
+
,,,90,2159,0.0004799515591003001,0.01778796873986721,,
|
182 |
+
0.025250783190131187,0.150551900267601,0.017180705443024635,90,2183,,,,
|
183 |
+
,,,91,2183,0.0004947304259985685,0.017889825627207756,,
|
184 |
+
0.01864919438958168,0.12038414925336838,0.017180705443024635,91,2207,,,,
|
185 |
+
,,,92,2207,0.0004412145062815398,0.017394624650478363,,
|
186 |
+
0.049182139337062836,0.21264156699180603,0.017180705443024635,92,2231,,,,
|
187 |
+
,,,93,2231,0.0005026715225540102,0.01807544007897377,,
|
188 |
+
0.026302171871066093,0.16021811962127686,0.017180705443024635,93,2255,,,,
|
189 |
+
,,,94,2255,0.0004964582622051239,0.01798209361732006,,
|
190 |
+
0.0006178016774356365,0.02032533660531044,0.017180705443024635,94,2279,,,,
|
191 |
+
,,,95,2279,0.0004344590415712446,0.01735701411962509,,
|
192 |
+
0.0051972889341413975,0.06752314418554306,0.017180705443024635,95,2303,,,,
|
193 |
+
,,,96,2303,0.00044075510231778026,0.017331410199403763,,
|
194 |
+
0.0038401500787585974,0.05779774859547615,0.017180705443024635,96,2327,,,,
|
195 |
+
,,,97,2327,0.000445231213234365,0.017340615391731262,,
|
196 |
+
0.0013440120965242386,0.030466176569461823,0.017180705443024635,97,2351,,,,
|
197 |
+
,,,98,2351,0.0004229821206536144,0.01715032197535038,,
|
198 |
+
0.0005679248133674264,0.019570212811231613,0.017180705443024635,98,2375,,,,
|
199 |
+
,,,99,2375,0.0004272435908205807,0.0171833336353302,,
|
200 |
+
4.989948272705078,2.1465675830841064,0.017180705443024635,99,2399,,,,
|
201 |
+
,,,100,2399,0.0004637309175450355,0.017611447721719742,,
|
202 |
+
,,,48,2400,,,0.00040467947837896645,0.016861222684383392
|
logs/experiments/multiruns/focusMSE_150_hyperparameter_search/2022-05-11_10-16-51/12/tensorboard/focusMSE_150_hyperparameter_search/events.out.tfevents.1652269799.873c0dcdd84d.1.24
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:6025942fe8d477c83f3be6f4030a29074789581b7f1327f9fb13ddbe1d40a51c
|
3 |
+
size 36541
|