Hannes Kuchelmeister
commited on
Commit
·
1f561a0
1
Parent(s):
7e40cd9
add runs for first experiments
Browse files- configs/hparams_search/focusMSE_150_optuna.yaml +3 -2
- logs/experiments/runs/focusMAE_150/2022-03-10_13-27-16/.hydra/config.yaml +70 -0
- logs/experiments/runs/focusMAE_150/2022-03-10_13-27-16/.hydra/hydra.yaml +171 -0
- logs/experiments/runs/focusMAE_150/2022-03-10_13-27-16/.hydra/overrides.yaml +2 -0
- logs/experiments/runs/focusMAE_150/2022-03-10_13-27-16/csv/version_0/hparams.yaml +52 -0
- logs/experiments/runs/focusMAE_150/2022-03-10_13-27-16/csv/version_0/metrics.csv +202 -0
- logs/experiments/runs/focusMAE_150/2022-03-10_13-27-16/tensorboard/focusMAE_150/events.out.tfevents.1646918838.57666f6070c3.1.0 +3 -0
- logs/experiments/runs/focusMAE_150/2022-03-10_13-27-16/tensorboard/focusMAE_150/events.out.tfevents.1646919273.57666f6070c3.1.1 +3 -0
- logs/experiments/runs/focusMAE_150/2022-03-10_13-27-16/tensorboard/focusMAE_150/hparams.yaml +52 -0
- logs/experiments/runs/focusMAE_75/2022-03-10_14-07-57/.hydra/config.yaml +70 -0
- logs/experiments/runs/focusMAE_75/2022-03-10_14-07-57/.hydra/hydra.yaml +171 -0
- logs/experiments/runs/focusMAE_75/2022-03-10_14-07-57/.hydra/overrides.yaml +2 -0
- logs/experiments/runs/focusMAE_75/2022-03-10_14-07-57/csv/version_0/hparams.yaml +52 -0
- logs/experiments/runs/focusMAE_75/2022-03-10_14-07-57/csv/version_0/metrics.csv +202 -0
- logs/experiments/runs/focusMAE_75/2022-03-10_14-07-57/tensorboard/focusMAE_75/events.out.tfevents.1646921279.d007913868f5.1.0 +3 -0
- logs/experiments/runs/focusMAE_75/2022-03-10_14-07-57/tensorboard/focusMAE_75/events.out.tfevents.1646921340.d007913868f5.1.1 +3 -0
- logs/experiments/runs/focusMAE_75/2022-03-10_14-07-57/tensorboard/focusMAE_75/hparams.yaml +52 -0
- logs/experiments/runs/focusMSE_150/2022-03-10_13-11-24/.hydra/config.yaml +70 -0
- logs/experiments/runs/focusMSE_150/2022-03-10_13-11-24/.hydra/hydra.yaml +171 -0
- logs/experiments/runs/focusMSE_150/2022-03-10_13-11-24/.hydra/overrides.yaml +2 -0
- logs/experiments/runs/focusMSE_150/2022-03-10_13-11-24/csv/version_0/hparams.yaml +52 -0
- logs/experiments/runs/focusMSE_150/2022-03-10_13-11-24/csv/version_0/metrics.csv +202 -0
- logs/experiments/runs/focusMSE_150/2022-03-10_13-11-24/tensorboard/focusMSE_150/events.out.tfevents.1646917887.ea2217bd45f5.1.0 +3 -0
- logs/experiments/runs/focusMSE_150/2022-03-10_13-11-24/tensorboard/focusMSE_150/events.out.tfevents.1646918355.ea2217bd45f5.1.1 +3 -0
- logs/experiments/runs/focusMSE_150/2022-03-10_13-11-24/tensorboard/focusMSE_150/hparams.yaml +52 -0
configs/hparams_search/focusMSE_150_optuna.yaml
CHANGED
@@ -14,6 +14,9 @@ optimized_metric: "val/mae_best"
|
|
14 |
|
15 |
name: "focusMSE_150_hyperparameter_search"
|
16 |
|
|
|
|
|
|
|
17 |
# here we define Optuna hyperparameter search
|
18 |
# it optimizes for value returned from function with @hydra.main decorator
|
19 |
# docs: https://hydra.cc/docs/next/plugins/optuna_sweeper
|
@@ -49,8 +52,6 @@ hydra:
|
|
49 |
datamodule.batch_size:
|
50 |
type: categorical
|
51 |
choices: [64, 128]
|
52 |
-
model:
|
53 |
-
_target_: src.models.focus_module.FocusMSELitModule
|
54 |
model.lr:
|
55 |
type: float
|
56 |
low: 0.0001
|
|
|
14 |
|
15 |
name: "focusMSE_150_hyperparameter_search"
|
16 |
|
17 |
+
model:
|
18 |
+
_target_: src.models.focus_module.FocusMSELitModule
|
19 |
+
|
20 |
# here we define Optuna hyperparameter search
|
21 |
# it optimizes for value returned from function with @hydra.main decorator
|
22 |
# docs: https://hydra.cc/docs/next/plugins/optuna_sweeper
|
|
|
52 |
datamodule.batch_size:
|
53 |
type: categorical
|
54 |
choices: [64, 128]
|
|
|
|
|
55 |
model.lr:
|
56 |
type: float
|
57 |
low: 0.0001
|
logs/experiments/runs/focusMAE_150/2022-03-10_13-27-16/.hydra/config.yaml
ADDED
@@ -0,0 +1,70 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
original_work_dir: ${hydra:runtime.cwd}
|
2 |
+
data_dir: ${original_work_dir}/data
|
3 |
+
print_config: true
|
4 |
+
ignore_warnings: true
|
5 |
+
train: true
|
6 |
+
test: true
|
7 |
+
seed: 12345
|
8 |
+
name: focusMAE_150
|
9 |
+
datamodule:
|
10 |
+
_target_: src.datamodules.focus_datamodule.FocusDataModule
|
11 |
+
data_dir: ${data_dir}/focus150
|
12 |
+
csv_file: ${data_dir}/focus150/metadata.csv
|
13 |
+
batch_size: 128
|
14 |
+
train_val_test_split_percentage:
|
15 |
+
- 0.7
|
16 |
+
- 0.15
|
17 |
+
- 0.15
|
18 |
+
num_workers: 0
|
19 |
+
pin_memory: false
|
20 |
+
model:
|
21 |
+
_target_: src.models.focus_module.FocusLitModule
|
22 |
+
input_size: 67500
|
23 |
+
lin1_size: 128
|
24 |
+
lin2_size: 256
|
25 |
+
lin3_size: 64
|
26 |
+
output_size: 1
|
27 |
+
lr: 0.001
|
28 |
+
weight_decay: 0.0005
|
29 |
+
callbacks:
|
30 |
+
model_checkpoint:
|
31 |
+
_target_: pytorch_lightning.callbacks.ModelCheckpoint
|
32 |
+
monitor: val/mae
|
33 |
+
mode: min
|
34 |
+
save_top_k: 1
|
35 |
+
save_last: true
|
36 |
+
verbose: false
|
37 |
+
dirpath: checkpoints/
|
38 |
+
filename: epoch_{epoch:03d}
|
39 |
+
auto_insert_metric_name: false
|
40 |
+
early_stopping:
|
41 |
+
_target_: pytorch_lightning.callbacks.EarlyStopping
|
42 |
+
monitor: val/mae
|
43 |
+
mode: min
|
44 |
+
patience: 100
|
45 |
+
min_delta: 0
|
46 |
+
model_summary:
|
47 |
+
_target_: pytorch_lightning.callbacks.RichModelSummary
|
48 |
+
max_depth: -1
|
49 |
+
rich_progress_bar:
|
50 |
+
_target_: pytorch_lightning.callbacks.RichProgressBar
|
51 |
+
logger:
|
52 |
+
csv:
|
53 |
+
_target_: pytorch_lightning.loggers.csv_logs.CSVLogger
|
54 |
+
save_dir: .
|
55 |
+
name: csv/
|
56 |
+
prefix: ''
|
57 |
+
tensorboard:
|
58 |
+
_target_: pytorch_lightning.loggers.tensorboard.TensorBoardLogger
|
59 |
+
save_dir: tensorboard/
|
60 |
+
name: null
|
61 |
+
version: ${name}
|
62 |
+
log_graph: false
|
63 |
+
default_hp_metric: true
|
64 |
+
prefix: ''
|
65 |
+
trainer:
|
66 |
+
_target_: pytorch_lightning.Trainer
|
67 |
+
gpus: 0
|
68 |
+
min_epochs: 1
|
69 |
+
max_epochs: 100
|
70 |
+
resume_from_checkpoint: null
|
logs/experiments/runs/focusMAE_150/2022-03-10_13-27-16/.hydra/hydra.yaml
ADDED
@@ -0,0 +1,171 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
hydra:
|
2 |
+
run:
|
3 |
+
dir: logs/experiments/runs/${name}/${now:%Y-%m-%d}_${now:%H-%M-%S}
|
4 |
+
sweep:
|
5 |
+
dir: logs/experiments/multiruns/${name}/${now:%Y-%m-%d}_${now:%H-%M-%S}
|
6 |
+
subdir: ${hydra.job.num}
|
7 |
+
launcher:
|
8 |
+
_target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher
|
9 |
+
sweeper:
|
10 |
+
_target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper
|
11 |
+
max_batch_size: null
|
12 |
+
help:
|
13 |
+
app_name: ${hydra.job.name}
|
14 |
+
header: '${hydra.help.app_name} is powered by Hydra.
|
15 |
+
|
16 |
+
'
|
17 |
+
footer: 'Powered by Hydra (https://hydra.cc)
|
18 |
+
|
19 |
+
Use --hydra-help to view Hydra specific help
|
20 |
+
|
21 |
+
'
|
22 |
+
template: '${hydra.help.header}
|
23 |
+
|
24 |
+
== Configuration groups ==
|
25 |
+
|
26 |
+
Compose your configuration from those groups (group=option)
|
27 |
+
|
28 |
+
|
29 |
+
$APP_CONFIG_GROUPS
|
30 |
+
|
31 |
+
|
32 |
+
== Config ==
|
33 |
+
|
34 |
+
Override anything in the config (foo.bar=value)
|
35 |
+
|
36 |
+
|
37 |
+
$CONFIG
|
38 |
+
|
39 |
+
|
40 |
+
${hydra.help.footer}
|
41 |
+
|
42 |
+
'
|
43 |
+
hydra_help:
|
44 |
+
template: 'Hydra (${hydra.runtime.version})
|
45 |
+
|
46 |
+
See https://hydra.cc for more info.
|
47 |
+
|
48 |
+
|
49 |
+
== Flags ==
|
50 |
+
|
51 |
+
$FLAGS_HELP
|
52 |
+
|
53 |
+
|
54 |
+
== Configuration groups ==
|
55 |
+
|
56 |
+
Compose your configuration from those groups (For example, append hydra/job_logging=disabled
|
57 |
+
to command line)
|
58 |
+
|
59 |
+
|
60 |
+
$HYDRA_CONFIG_GROUPS
|
61 |
+
|
62 |
+
|
63 |
+
Use ''--cfg hydra'' to Show the Hydra config.
|
64 |
+
|
65 |
+
'
|
66 |
+
hydra_help: ???
|
67 |
+
hydra_logging:
|
68 |
+
version: 1
|
69 |
+
formatters:
|
70 |
+
colorlog:
|
71 |
+
(): colorlog.ColoredFormatter
|
72 |
+
format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s'
|
73 |
+
handlers:
|
74 |
+
console:
|
75 |
+
class: logging.StreamHandler
|
76 |
+
formatter: colorlog
|
77 |
+
stream: ext://sys.stdout
|
78 |
+
root:
|
79 |
+
level: INFO
|
80 |
+
handlers:
|
81 |
+
- console
|
82 |
+
disable_existing_loggers: false
|
83 |
+
job_logging:
|
84 |
+
version: 1
|
85 |
+
formatters:
|
86 |
+
simple:
|
87 |
+
format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s'
|
88 |
+
colorlog:
|
89 |
+
(): colorlog.ColoredFormatter
|
90 |
+
format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s]
|
91 |
+
- %(message)s'
|
92 |
+
log_colors:
|
93 |
+
DEBUG: purple
|
94 |
+
INFO: green
|
95 |
+
WARNING: yellow
|
96 |
+
ERROR: red
|
97 |
+
CRITICAL: red
|
98 |
+
handlers:
|
99 |
+
console:
|
100 |
+
class: logging.StreamHandler
|
101 |
+
formatter: colorlog
|
102 |
+
stream: ext://sys.stdout
|
103 |
+
file:
|
104 |
+
class: logging.FileHandler
|
105 |
+
formatter: simple
|
106 |
+
filename: ${hydra.job.name}.log
|
107 |
+
root:
|
108 |
+
level: INFO
|
109 |
+
handlers:
|
110 |
+
- console
|
111 |
+
- file
|
112 |
+
disable_existing_loggers: false
|
113 |
+
env: {}
|
114 |
+
searchpath: []
|
115 |
+
callbacks: {}
|
116 |
+
output_subdir: .hydra
|
117 |
+
overrides:
|
118 |
+
hydra: []
|
119 |
+
task:
|
120 |
+
- trainer.gpus=0
|
121 |
+
- experiment=focusMAE_150
|
122 |
+
job:
|
123 |
+
name: train
|
124 |
+
override_dirname: experiment=focusMAE_150,trainer.gpus=0
|
125 |
+
id: ???
|
126 |
+
num: ???
|
127 |
+
config_name: train.yaml
|
128 |
+
env_set: {}
|
129 |
+
env_copy: []
|
130 |
+
config:
|
131 |
+
override_dirname:
|
132 |
+
kv_sep: '='
|
133 |
+
item_sep: ','
|
134 |
+
exclude_keys: []
|
135 |
+
runtime:
|
136 |
+
version: 1.1.1
|
137 |
+
cwd: /usr/src/app
|
138 |
+
config_sources:
|
139 |
+
- path: hydra.conf
|
140 |
+
schema: pkg
|
141 |
+
provider: hydra
|
142 |
+
- path: /usr/src/app/configs
|
143 |
+
schema: file
|
144 |
+
provider: main
|
145 |
+
- path: hydra_plugins.hydra_colorlog.conf
|
146 |
+
schema: pkg
|
147 |
+
provider: hydra-colorlog
|
148 |
+
- path: ''
|
149 |
+
schema: structured
|
150 |
+
provider: schema
|
151 |
+
choices:
|
152 |
+
local: default.yaml
|
153 |
+
hparams_search: null
|
154 |
+
debug: null
|
155 |
+
experiment: focusMAE_150
|
156 |
+
log_dir: default.yaml
|
157 |
+
trainer: default.yaml
|
158 |
+
logger: many_loggers
|
159 |
+
callbacks: default.yaml
|
160 |
+
model: focus150.yaml
|
161 |
+
datamodule: focus150.yaml
|
162 |
+
hydra/env: default
|
163 |
+
hydra/callbacks: null
|
164 |
+
hydra/job_logging: colorlog
|
165 |
+
hydra/hydra_logging: colorlog
|
166 |
+
hydra/hydra_help: default
|
167 |
+
hydra/help: default
|
168 |
+
hydra/sweeper: basic
|
169 |
+
hydra/launcher: basic
|
170 |
+
hydra/output: default
|
171 |
+
verbose: false
|
logs/experiments/runs/focusMAE_150/2022-03-10_13-27-16/.hydra/overrides.yaml
ADDED
@@ -0,0 +1,2 @@
|
|
|
|
|
|
|
1 |
+
- trainer.gpus=0
|
2 |
+
- experiment=focusMAE_150
|
logs/experiments/runs/focusMAE_150/2022-03-10_13-27-16/csv/version_0/hparams.yaml
ADDED
@@ -0,0 +1,52 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
trainer:
|
2 |
+
_target_: pytorch_lightning.Trainer
|
3 |
+
gpus: 0
|
4 |
+
min_epochs: 1
|
5 |
+
max_epochs: 100
|
6 |
+
resume_from_checkpoint: null
|
7 |
+
model:
|
8 |
+
_target_: src.models.focus_module.FocusLitModule
|
9 |
+
input_size: 67500
|
10 |
+
lin1_size: 128
|
11 |
+
lin2_size: 256
|
12 |
+
lin3_size: 64
|
13 |
+
output_size: 1
|
14 |
+
lr: 0.001
|
15 |
+
weight_decay: 0.0005
|
16 |
+
datamodule:
|
17 |
+
_target_: src.datamodules.focus_datamodule.FocusDataModule
|
18 |
+
data_dir: /usr/src/app/data/focus150
|
19 |
+
csv_file: /usr/src/app/data/focus150/metadata.csv
|
20 |
+
batch_size: 128
|
21 |
+
train_val_test_split_percentage:
|
22 |
+
- 0.7
|
23 |
+
- 0.15
|
24 |
+
- 0.15
|
25 |
+
num_workers: 0
|
26 |
+
pin_memory: false
|
27 |
+
seed: 12345
|
28 |
+
callbacks:
|
29 |
+
model_checkpoint:
|
30 |
+
_target_: pytorch_lightning.callbacks.ModelCheckpoint
|
31 |
+
monitor: val/mae
|
32 |
+
mode: min
|
33 |
+
save_top_k: 1
|
34 |
+
save_last: true
|
35 |
+
verbose: false
|
36 |
+
dirpath: checkpoints/
|
37 |
+
filename: epoch_{epoch:03d}
|
38 |
+
auto_insert_metric_name: false
|
39 |
+
early_stopping:
|
40 |
+
_target_: pytorch_lightning.callbacks.EarlyStopping
|
41 |
+
monitor: val/mae
|
42 |
+
mode: min
|
43 |
+
patience: 100
|
44 |
+
min_delta: 0
|
45 |
+
model_summary:
|
46 |
+
_target_: pytorch_lightning.callbacks.RichModelSummary
|
47 |
+
max_depth: -1
|
48 |
+
rich_progress_bar:
|
49 |
+
_target_: pytorch_lightning.callbacks.RichProgressBar
|
50 |
+
model/params/total: 8690561
|
51 |
+
model/params/trainable: 8690561
|
52 |
+
model/params/non_trainable: 0
|
logs/experiments/runs/focusMAE_150/2022-03-10_13-27-16/csv/version_0/metrics.csv
ADDED
@@ -0,0 +1,202 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
val/loss,val/mae,val/mae_best,epoch,step,train/loss,train/mae,test/loss,test/mae
|
2 |
+
1.70963454246521,1.70963454246521,1.70963454246521,0,13,,,,
|
3 |
+
,,,0,13,2.007235288619995,2.007235288619995,,
|
4 |
+
1.6157541275024414,1.6157541275024414,1.6157541275024414,1,27,,,,
|
5 |
+
,,,1,27,1.7426551580429077,1.7426551580429077,,
|
6 |
+
1.4816635847091675,1.4816635847091675,1.4816635847091675,2,41,,,,
|
7 |
+
,,,2,41,1.593840479850769,1.593840479850769,,
|
8 |
+
1.4852845668792725,1.4852845668792725,1.4816635847091675,3,55,,,,
|
9 |
+
,,,3,55,1.4891314506530762,1.4891314506530762,,
|
10 |
+
1.4809471368789673,1.4809471368789673,1.4809471368789673,4,69,,,,
|
11 |
+
,,,4,69,1.4238530397415161,1.4238530397415161,,
|
12 |
+
1.5266541242599487,1.5266541242599487,1.4809471368789673,5,83,,,,
|
13 |
+
,,,5,83,1.37730872631073,1.37730872631073,,
|
14 |
+
1.438697099685669,1.438697099685669,1.438697099685669,6,97,,,,
|
15 |
+
,,,6,97,1.3616222143173218,1.3616222143173218,,
|
16 |
+
1.5415898561477661,1.5415898561477661,1.438697099685669,7,111,,,,
|
17 |
+
,,,7,111,1.2936798334121704,1.2936798334121704,,
|
18 |
+
1.5531002283096313,1.5531002283096313,1.438697099685669,8,125,,,,
|
19 |
+
,,,8,125,1.240011215209961,1.240011215209961,,
|
20 |
+
1.4295929670333862,1.4295929670333862,1.4295929670333862,9,139,,,,
|
21 |
+
,,,9,139,1.2200006246566772,1.2200006246566772,,
|
22 |
+
1.44516921043396,1.44516921043396,1.4295929670333862,10,153,,,,
|
23 |
+
,,,10,153,1.1742688417434692,1.1742688417434692,,
|
24 |
+
1.6427208185195923,1.6427208185195923,1.4295929670333862,11,167,,,,
|
25 |
+
,,,11,167,1.1714191436767578,1.1714191436767578,,
|
26 |
+
1.4477492570877075,1.4477492570877075,1.4295929670333862,12,181,,,,
|
27 |
+
,,,12,181,1.1195671558380127,1.1195671558380127,,
|
28 |
+
1.490426778793335,1.490426778793335,1.4295929670333862,13,195,,,,
|
29 |
+
,,,13,195,1.0833103656768799,1.0833103656768799,,
|
30 |
+
1.9021984338760376,1.9021984338760376,1.4295929670333862,14,209,,,,
|
31 |
+
,,,14,209,1.0909991264343262,1.0909991264343262,,
|
32 |
+
1.595913052558899,1.595913052558899,1.4295929670333862,15,223,,,,
|
33 |
+
,,,15,223,1.043480634689331,1.043480634689331,,
|
34 |
+
1.3689438104629517,1.3689438104629517,1.3689438104629517,16,237,,,,
|
35 |
+
,,,16,237,1.0076253414154053,1.0076253414154053,,
|
36 |
+
1.5466759204864502,1.5466759204864502,1.3689438104629517,17,251,,,,
|
37 |
+
,,,17,251,0.990264356136322,0.990264356136322,,
|
38 |
+
1.581822156906128,1.581822156906128,1.3689438104629517,18,265,,,,
|
39 |
+
,,,18,265,1.0111500024795532,1.0111500024795532,,
|
40 |
+
1.2208435535430908,1.2208435535430908,1.2208435535430908,19,279,,,,
|
41 |
+
,,,19,279,0.9487050771713257,0.9487050771713257,,
|
42 |
+
2.6602675914764404,2.6602675914764404,1.2208435535430908,20,293,,,,
|
43 |
+
,,,20,293,0.907093346118927,0.907093346118927,,
|
44 |
+
1.520420789718628,1.520420789718628,1.2208435535430908,21,307,,,,
|
45 |
+
,,,21,307,0.9430396556854248,0.9430396556854248,,
|
46 |
+
1.2865092754364014,1.2865092754364014,1.2208435535430908,22,321,,,,
|
47 |
+
,,,22,321,0.8749445080757141,0.8749445080757141,,
|
48 |
+
1.143312692642212,1.143312692642212,1.143312692642212,23,335,,,,
|
49 |
+
,,,23,335,0.8877695798873901,0.8877695798873901,,
|
50 |
+
1.333938717842102,1.333938717842102,1.143312692642212,24,349,,,,
|
51 |
+
,,,24,349,0.8808080554008484,0.8808080554008484,,
|
52 |
+
1.8297902345657349,1.8297902345657349,1.143312692642212,25,363,,,,
|
53 |
+
,,,25,363,0.8456642627716064,0.8456642627716064,,
|
54 |
+
2.77978777885437,2.77978777885437,1.143312692642212,26,377,,,,
|
55 |
+
,,,26,377,0.8254135251045227,0.8254135251045227,,
|
56 |
+
1.6757293939590454,1.6757293939590454,1.143312692642212,27,391,,,,
|
57 |
+
,,,27,391,0.8141047954559326,0.8141047954559326,,
|
58 |
+
1.4983477592468262,1.4983477592468262,1.143312692642212,28,405,,,,
|
59 |
+
,,,28,405,0.8685102462768555,0.8685102462768555,,
|
60 |
+
1.5529721975326538,1.5529721975326538,1.143312692642212,29,419,,,,
|
61 |
+
,,,29,419,0.8511419296264648,0.8511419296264648,,
|
62 |
+
1.3197510242462158,1.3197510242462158,1.143312692642212,30,433,,,,
|
63 |
+
,,,30,433,0.761264979839325,0.761264979839325,,
|
64 |
+
1.2458007335662842,1.2458007335662842,1.143312692642212,31,447,,,,
|
65 |
+
,,,31,447,0.7972810864448547,0.7972810864448547,,
|
66 |
+
1.5999006032943726,1.5999006032943726,1.143312692642212,32,461,,,,
|
67 |
+
,,,32,461,0.8531007170677185,0.8531007170677185,,
|
68 |
+
1.3021399974822998,1.3021399974822998,1.143312692642212,33,475,,,,
|
69 |
+
,,,33,475,0.8291800618171692,0.8291800618171692,,
|
70 |
+
1.3474700450897217,1.3474700450897217,1.143312692642212,34,489,,,,
|
71 |
+
,,,34,489,0.7812468409538269,0.7812468409538269,,
|
72 |
+
1.3034154176712036,1.3034154176712036,1.143312692642212,35,503,,,,
|
73 |
+
,,,35,503,0.7328928112983704,0.7328928112983704,,
|
74 |
+
1.952927589416504,1.952927589416504,1.143312692642212,36,517,,,,
|
75 |
+
,,,36,517,0.7546676397323608,0.7546676397323608,,
|
76 |
+
1.555153250694275,1.555153250694275,1.143312692642212,37,531,,,,
|
77 |
+
,,,37,531,0.7127090692520142,0.7127090692520142,,
|
78 |
+
1.0832419395446777,1.0832419395446777,1.0832419395446777,38,545,,,,
|
79 |
+
,,,38,545,0.8288543820381165,0.8288543820381165,,
|
80 |
+
2.4613094329833984,2.4613094329833984,1.0832419395446777,39,559,,,,
|
81 |
+
,,,39,559,0.7924514412879944,0.7924514412879944,,
|
82 |
+
1.3246917724609375,1.3246917724609375,1.0832419395446777,40,573,,,,
|
83 |
+
,,,40,573,0.7566817402839661,0.7566817402839661,,
|
84 |
+
1.2893974781036377,1.2893974781036377,1.0832419395446777,41,587,,,,
|
85 |
+
,,,41,587,0.6893467307090759,0.6893467307090759,,
|
86 |
+
1.1597803831100464,1.1597803831100464,1.0832419395446777,42,601,,,,
|
87 |
+
,,,42,601,0.6807125806808472,0.6807125806808472,,
|
88 |
+
1.1498645544052124,1.1498645544052124,1.0832419395446777,43,615,,,,
|
89 |
+
,,,43,615,0.694456160068512,0.694456160068512,,
|
90 |
+
1.55463445186615,1.55463445186615,1.0832419395446777,44,629,,,,
|
91 |
+
,,,44,629,0.6738094687461853,0.6738094687461853,,
|
92 |
+
1.5154918432235718,1.5154918432235718,1.0832419395446777,45,643,,,,
|
93 |
+
,,,45,643,0.684533953666687,0.684533953666687,,
|
94 |
+
1.0856791734695435,1.0856791734695435,1.0832419395446777,46,657,,,,
|
95 |
+
,,,46,657,0.6889572739601135,0.6889572739601135,,
|
96 |
+
1.5208189487457275,1.5208189487457275,1.0832419395446777,47,671,,,,
|
97 |
+
,,,47,671,0.6947969794273376,0.6947969794273376,,
|
98 |
+
1.4656461477279663,1.4656461477279663,1.0832419395446777,48,685,,,,
|
99 |
+
,,,48,685,0.6566392183303833,0.6566392183303833,,
|
100 |
+
2.202251434326172,2.202251434326172,1.0832419395446777,49,699,,,,
|
101 |
+
,,,49,699,0.6721158027648926,0.6721158027648926,,
|
102 |
+
1.3756095170974731,1.3756095170974731,1.0832419395446777,50,713,,,,
|
103 |
+
,,,50,713,0.6253563761711121,0.6253563761711121,,
|
104 |
+
2.442418336868286,2.442418336868286,1.0832419395446777,51,727,,,,
|
105 |
+
,,,51,727,0.7150925397872925,0.7150925397872925,,
|
106 |
+
1.4418970346450806,1.4418970346450806,1.0832419395446777,52,741,,,,
|
107 |
+
,,,52,741,0.7422524094581604,0.7422524094581604,,
|
108 |
+
1.7076001167297363,1.7076001167297363,1.0832419395446777,53,755,,,,
|
109 |
+
,,,53,755,0.7061676979064941,0.7061676979064941,,
|
110 |
+
1.399764060974121,1.399764060974121,1.0832419395446777,54,769,,,,
|
111 |
+
,,,54,769,0.6557216644287109,0.6557216644287109,,
|
112 |
+
1.1570920944213867,1.1570920944213867,1.0832419395446777,55,783,,,,
|
113 |
+
,,,55,783,0.7129756808280945,0.7129756808280945,,
|
114 |
+
1.3638644218444824,1.3638644218444824,1.0832419395446777,56,797,,,,
|
115 |
+
,,,56,797,0.6134735941886902,0.6134735941886902,,
|
116 |
+
1.9775629043579102,1.9775629043579102,1.0832419395446777,57,811,,,,
|
117 |
+
,,,57,811,0.670224666595459,0.670224666595459,,
|
118 |
+
1.3626362085342407,1.3626362085342407,1.0832419395446777,58,825,,,,
|
119 |
+
,,,58,825,0.6249423623085022,0.6249423623085022,,
|
120 |
+
1.2672889232635498,1.2672889232635498,1.0832419395446777,59,839,,,,
|
121 |
+
,,,59,839,0.6824653148651123,0.6824653148651123,,
|
122 |
+
1.2925689220428467,1.2925689220428467,1.0832419395446777,60,853,,,,
|
123 |
+
,,,60,853,0.6416271328926086,0.6416271328926086,,
|
124 |
+
1.6543433666229248,1.6543433666229248,1.0832419395446777,61,867,,,,
|
125 |
+
,,,61,867,0.5988612771034241,0.5988612771034241,,
|
126 |
+
1.5130512714385986,1.5130512714385986,1.0832419395446777,62,881,,,,
|
127 |
+
,,,62,881,0.6541032791137695,0.6541032791137695,,
|
128 |
+
1.1520020961761475,1.1520020961761475,1.0832419395446777,63,895,,,,
|
129 |
+
,,,63,895,0.6345020532608032,0.6345020532608032,,
|
130 |
+
1.9048707485198975,1.9048707485198975,1.0832419395446777,64,909,,,,
|
131 |
+
,,,64,909,0.636435329914093,0.636435329914093,,
|
132 |
+
1.581538438796997,1.581538438796997,1.0832419395446777,65,923,,,,
|
133 |
+
,,,65,923,0.622707724571228,0.622707724571228,,
|
134 |
+
1.6562321186065674,1.6562321186065674,1.0832419395446777,66,937,,,,
|
135 |
+
,,,66,937,0.6414665579795837,0.6414665579795837,,
|
136 |
+
1.4086389541625977,1.4086389541625977,1.0832419395446777,67,951,,,,
|
137 |
+
,,,67,951,0.6318880915641785,0.6318880915641785,,
|
138 |
+
1.2168550491333008,1.2168550491333008,1.0832419395446777,68,965,,,,
|
139 |
+
,,,68,965,0.6464003920555115,0.6464003920555115,,
|
140 |
+
2.1686744689941406,2.1686744689941406,1.0832419395446777,69,979,,,,
|
141 |
+
,,,69,979,0.6119938492774963,0.6119938492774963,,
|
142 |
+
2.084082841873169,2.084082841873169,1.0832419395446777,70,993,,,,
|
143 |
+
,,,70,993,0.6001316905021667,0.6001316905021667,,
|
144 |
+
1.9719725847244263,1.9719725847244263,1.0832419395446777,71,1007,,,,
|
145 |
+
,,,71,1007,0.5742686986923218,0.5742686986923218,,
|
146 |
+
3.2189762592315674,3.2189762592315674,1.0832419395446777,72,1021,,,,
|
147 |
+
,,,72,1021,0.5388190150260925,0.5388190150260925,,
|
148 |
+
0.8793362975120544,0.8793362975120544,0.8793362975120544,73,1035,,,,
|
149 |
+
,,,73,1035,0.5448437929153442,0.5448437929153442,,
|
150 |
+
1.2742916345596313,1.2742916345596313,0.8793362975120544,74,1049,,,,
|
151 |
+
,,,74,1049,0.5663913488388062,0.5663913488388062,,
|
152 |
+
1.617637276649475,1.617637276649475,0.8793362975120544,75,1063,,,,
|
153 |
+
,,,75,1063,0.6346433162689209,0.6346433162689209,,
|
154 |
+
1.2505650520324707,1.2505650520324707,0.8793362975120544,76,1077,,,,
|
155 |
+
,,,76,1077,0.5933310985565186,0.5933310985565186,,
|
156 |
+
1.376041293144226,1.376041293144226,0.8793362975120544,77,1091,,,,
|
157 |
+
,,,77,1091,0.5717899203300476,0.5717899203300476,,
|
158 |
+
1.656374216079712,1.656374216079712,0.8793362975120544,78,1105,,,,
|
159 |
+
,,,78,1105,0.5698116421699524,0.5698116421699524,,
|
160 |
+
1.9593688249588013,1.9593688249588013,0.8793362975120544,79,1119,,,,
|
161 |
+
,,,79,1119,0.6212266683578491,0.6212266683578491,,
|
162 |
+
1.4396153688430786,1.4396153688430786,0.8793362975120544,80,1133,,,,
|
163 |
+
,,,80,1133,0.6101555228233337,0.6101555228233337,,
|
164 |
+
1.7409688234329224,1.7409688234329224,0.8793362975120544,81,1147,,,,
|
165 |
+
,,,81,1147,0.6487681269645691,0.6487681269645691,,
|
166 |
+
1.89703369140625,1.89703369140625,0.8793362975120544,82,1161,,,,
|
167 |
+
,,,82,1161,0.609033465385437,0.609033465385437,,
|
168 |
+
2.194291591644287,2.194291591644287,0.8793362975120544,83,1175,,,,
|
169 |
+
,,,83,1175,0.6064599752426147,0.6064599752426147,,
|
170 |
+
1.2685073614120483,1.2685073614120483,0.8793362975120544,84,1189,,,,
|
171 |
+
,,,84,1189,0.5855345129966736,0.5855345129966736,,
|
172 |
+
1.1140916347503662,1.1140916347503662,0.8793362975120544,85,1203,,,,
|
173 |
+
,,,85,1203,0.5416260361671448,0.5416260361671448,,
|
174 |
+
2.2013204097747803,2.2013204097747803,0.8793362975120544,86,1217,,,,
|
175 |
+
,,,86,1217,0.5250001549720764,0.5250001549720764,,
|
176 |
+
1.21493399143219,1.21493399143219,0.8793362975120544,87,1231,,,,
|
177 |
+
,,,87,1231,0.6097092032432556,0.6097092032432556,,
|
178 |
+
1.5442334413528442,1.5442334413528442,0.8793362975120544,88,1245,,,,
|
179 |
+
,,,88,1245,0.6045495867729187,0.6045495867729187,,
|
180 |
+
1.273796796798706,1.273796796798706,0.8793362975120544,89,1259,,,,
|
181 |
+
,,,89,1259,0.6249043345451355,0.6249043345451355,,
|
182 |
+
2.43229603767395,2.43229603767395,0.8793362975120544,90,1273,,,,
|
183 |
+
,,,90,1273,0.6364671587944031,0.6364671587944031,,
|
184 |
+
1.0207698345184326,1.0207698345184326,0.8793362975120544,91,1287,,,,
|
185 |
+
,,,91,1287,0.5632449388504028,0.5632449388504028,,
|
186 |
+
1.738028645515442,1.738028645515442,0.8793362975120544,92,1301,,,,
|
187 |
+
,,,92,1301,0.5265096426010132,0.5265096426010132,,
|
188 |
+
1.4397560358047485,1.4397560358047485,0.8793362975120544,93,1315,,,,
|
189 |
+
,,,93,1315,0.5585227608680725,0.5585227608680725,,
|
190 |
+
2.3328046798706055,2.3328046798706055,0.8793362975120544,94,1329,,,,
|
191 |
+
,,,94,1329,0.5990500450134277,0.5990500450134277,,
|
192 |
+
1.0311558246612549,1.0311558246612549,0.8793362975120544,95,1343,,,,
|
193 |
+
,,,95,1343,0.5296130180358887,0.5296130180358887,,
|
194 |
+
2.6496644020080566,2.6496644020080566,0.8793362975120544,96,1357,,,,
|
195 |
+
,,,96,1357,0.5015162229537964,0.5015162229537964,,
|
196 |
+
1.847678780555725,1.847678780555725,0.8793362975120544,97,1371,,,,
|
197 |
+
,,,97,1371,0.5349109768867493,0.5349109768867493,,
|
198 |
+
2.1562821865081787,2.1562821865081787,0.8793362975120544,98,1385,,,,
|
199 |
+
,,,98,1385,0.5484206080436707,0.5484206080436707,,
|
200 |
+
1.597854733467102,1.597854733467102,0.8793362975120544,99,1399,,,,
|
201 |
+
,,,99,1399,0.5277596712112427,0.5277596712112427,,
|
202 |
+
,,,74,1036,,,0.8433762192726135,0.8433762192726135
|
logs/experiments/runs/focusMAE_150/2022-03-10_13-27-16/tensorboard/focusMAE_150/events.out.tfevents.1646918838.57666f6070c3.1.0
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:fa1f29a3299ef49b1f8346cc162d7305ee13b0123dc31a3f022f9ae0de60b8da
|
3 |
+
size 36394
|
logs/experiments/runs/focusMAE_150/2022-03-10_13-27-16/tensorboard/focusMAE_150/events.out.tfevents.1646919273.57666f6070c3.1.1
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:c90146dcee4ab8b557e67fbe2813230e7c23e11f70254bea583781776100efa0
|
3 |
+
size 179
|
logs/experiments/runs/focusMAE_150/2022-03-10_13-27-16/tensorboard/focusMAE_150/hparams.yaml
ADDED
@@ -0,0 +1,52 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
trainer:
|
2 |
+
_target_: pytorch_lightning.Trainer
|
3 |
+
gpus: 0
|
4 |
+
min_epochs: 1
|
5 |
+
max_epochs: 100
|
6 |
+
resume_from_checkpoint: null
|
7 |
+
model:
|
8 |
+
_target_: src.models.focus_module.FocusLitModule
|
9 |
+
input_size: 67500
|
10 |
+
lin1_size: 128
|
11 |
+
lin2_size: 256
|
12 |
+
lin3_size: 64
|
13 |
+
output_size: 1
|
14 |
+
lr: 0.001
|
15 |
+
weight_decay: 0.0005
|
16 |
+
datamodule:
|
17 |
+
_target_: src.datamodules.focus_datamodule.FocusDataModule
|
18 |
+
data_dir: /usr/src/app/data/focus150
|
19 |
+
csv_file: /usr/src/app/data/focus150/metadata.csv
|
20 |
+
batch_size: 128
|
21 |
+
train_val_test_split_percentage:
|
22 |
+
- 0.7
|
23 |
+
- 0.15
|
24 |
+
- 0.15
|
25 |
+
num_workers: 0
|
26 |
+
pin_memory: false
|
27 |
+
seed: 12345
|
28 |
+
callbacks:
|
29 |
+
model_checkpoint:
|
30 |
+
_target_: pytorch_lightning.callbacks.ModelCheckpoint
|
31 |
+
monitor: val/mae
|
32 |
+
mode: min
|
33 |
+
save_top_k: 1
|
34 |
+
save_last: true
|
35 |
+
verbose: false
|
36 |
+
dirpath: checkpoints/
|
37 |
+
filename: epoch_{epoch:03d}
|
38 |
+
auto_insert_metric_name: false
|
39 |
+
early_stopping:
|
40 |
+
_target_: pytorch_lightning.callbacks.EarlyStopping
|
41 |
+
monitor: val/mae
|
42 |
+
mode: min
|
43 |
+
patience: 100
|
44 |
+
min_delta: 0
|
45 |
+
model_summary:
|
46 |
+
_target_: pytorch_lightning.callbacks.RichModelSummary
|
47 |
+
max_depth: -1
|
48 |
+
rich_progress_bar:
|
49 |
+
_target_: pytorch_lightning.callbacks.RichProgressBar
|
50 |
+
model/params/total: 8690561
|
51 |
+
model/params/trainable: 8690561
|
52 |
+
model/params/non_trainable: 0
|
logs/experiments/runs/focusMAE_75/2022-03-10_14-07-57/.hydra/config.yaml
ADDED
@@ -0,0 +1,70 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
original_work_dir: ${hydra:runtime.cwd}
|
2 |
+
data_dir: ${original_work_dir}/data
|
3 |
+
print_config: true
|
4 |
+
ignore_warnings: true
|
5 |
+
train: true
|
6 |
+
test: true
|
7 |
+
seed: 12345
|
8 |
+
name: focusMAE_75
|
9 |
+
datamodule:
|
10 |
+
_target_: src.datamodules.focus_datamodule.FocusDataModule
|
11 |
+
data_dir: ${data_dir}/focus
|
12 |
+
csv_file: ${data_dir}/focus/metadata.csv
|
13 |
+
batch_size: 128
|
14 |
+
train_val_test_split_percentage:
|
15 |
+
- 0.7
|
16 |
+
- 0.15
|
17 |
+
- 0.15
|
18 |
+
num_workers: 0
|
19 |
+
pin_memory: false
|
20 |
+
model:
|
21 |
+
_target_: src.models.focus_module.FocusLitModule
|
22 |
+
input_size: 16875
|
23 |
+
lin1_size: 128
|
24 |
+
lin2_size: 256
|
25 |
+
lin3_size: 64
|
26 |
+
output_size: 1
|
27 |
+
lr: 0.001
|
28 |
+
weight_decay: 0.0005
|
29 |
+
callbacks:
|
30 |
+
model_checkpoint:
|
31 |
+
_target_: pytorch_lightning.callbacks.ModelCheckpoint
|
32 |
+
monitor: val/mae
|
33 |
+
mode: min
|
34 |
+
save_top_k: 1
|
35 |
+
save_last: true
|
36 |
+
verbose: false
|
37 |
+
dirpath: checkpoints/
|
38 |
+
filename: epoch_{epoch:03d}
|
39 |
+
auto_insert_metric_name: false
|
40 |
+
early_stopping:
|
41 |
+
_target_: pytorch_lightning.callbacks.EarlyStopping
|
42 |
+
monitor: val/mae
|
43 |
+
mode: min
|
44 |
+
patience: 100
|
45 |
+
min_delta: 0
|
46 |
+
model_summary:
|
47 |
+
_target_: pytorch_lightning.callbacks.RichModelSummary
|
48 |
+
max_depth: -1
|
49 |
+
rich_progress_bar:
|
50 |
+
_target_: pytorch_lightning.callbacks.RichProgressBar
|
51 |
+
logger:
|
52 |
+
csv:
|
53 |
+
_target_: pytorch_lightning.loggers.csv_logs.CSVLogger
|
54 |
+
save_dir: .
|
55 |
+
name: csv/
|
56 |
+
prefix: ''
|
57 |
+
tensorboard:
|
58 |
+
_target_: pytorch_lightning.loggers.tensorboard.TensorBoardLogger
|
59 |
+
save_dir: tensorboard/
|
60 |
+
name: null
|
61 |
+
version: ${name}
|
62 |
+
log_graph: false
|
63 |
+
default_hp_metric: true
|
64 |
+
prefix: ''
|
65 |
+
trainer:
|
66 |
+
_target_: pytorch_lightning.Trainer
|
67 |
+
gpus: 0
|
68 |
+
min_epochs: 1
|
69 |
+
max_epochs: 100
|
70 |
+
resume_from_checkpoint: null
|
logs/experiments/runs/focusMAE_75/2022-03-10_14-07-57/.hydra/hydra.yaml
ADDED
@@ -0,0 +1,171 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
hydra:
|
2 |
+
run:
|
3 |
+
dir: logs/experiments/runs/${name}/${now:%Y-%m-%d}_${now:%H-%M-%S}
|
4 |
+
sweep:
|
5 |
+
dir: logs/experiments/multiruns/${name}/${now:%Y-%m-%d}_${now:%H-%M-%S}
|
6 |
+
subdir: ${hydra.job.num}
|
7 |
+
launcher:
|
8 |
+
_target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher
|
9 |
+
sweeper:
|
10 |
+
_target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper
|
11 |
+
max_batch_size: null
|
12 |
+
help:
|
13 |
+
app_name: ${hydra.job.name}
|
14 |
+
header: '${hydra.help.app_name} is powered by Hydra.
|
15 |
+
|
16 |
+
'
|
17 |
+
footer: 'Powered by Hydra (https://hydra.cc)
|
18 |
+
|
19 |
+
Use --hydra-help to view Hydra specific help
|
20 |
+
|
21 |
+
'
|
22 |
+
template: '${hydra.help.header}
|
23 |
+
|
24 |
+
== Configuration groups ==
|
25 |
+
|
26 |
+
Compose your configuration from those groups (group=option)
|
27 |
+
|
28 |
+
|
29 |
+
$APP_CONFIG_GROUPS
|
30 |
+
|
31 |
+
|
32 |
+
== Config ==
|
33 |
+
|
34 |
+
Override anything in the config (foo.bar=value)
|
35 |
+
|
36 |
+
|
37 |
+
$CONFIG
|
38 |
+
|
39 |
+
|
40 |
+
${hydra.help.footer}
|
41 |
+
|
42 |
+
'
|
43 |
+
hydra_help:
|
44 |
+
template: 'Hydra (${hydra.runtime.version})
|
45 |
+
|
46 |
+
See https://hydra.cc for more info.
|
47 |
+
|
48 |
+
|
49 |
+
== Flags ==
|
50 |
+
|
51 |
+
$FLAGS_HELP
|
52 |
+
|
53 |
+
|
54 |
+
== Configuration groups ==
|
55 |
+
|
56 |
+
Compose your configuration from those groups (For example, append hydra/job_logging=disabled
|
57 |
+
to command line)
|
58 |
+
|
59 |
+
|
60 |
+
$HYDRA_CONFIG_GROUPS
|
61 |
+
|
62 |
+
|
63 |
+
Use ''--cfg hydra'' to Show the Hydra config.
|
64 |
+
|
65 |
+
'
|
66 |
+
hydra_help: ???
|
67 |
+
hydra_logging:
|
68 |
+
version: 1
|
69 |
+
formatters:
|
70 |
+
colorlog:
|
71 |
+
(): colorlog.ColoredFormatter
|
72 |
+
format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s'
|
73 |
+
handlers:
|
74 |
+
console:
|
75 |
+
class: logging.StreamHandler
|
76 |
+
formatter: colorlog
|
77 |
+
stream: ext://sys.stdout
|
78 |
+
root:
|
79 |
+
level: INFO
|
80 |
+
handlers:
|
81 |
+
- console
|
82 |
+
disable_existing_loggers: false
|
83 |
+
job_logging:
|
84 |
+
version: 1
|
85 |
+
formatters:
|
86 |
+
simple:
|
87 |
+
format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s'
|
88 |
+
colorlog:
|
89 |
+
(): colorlog.ColoredFormatter
|
90 |
+
format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s]
|
91 |
+
- %(message)s'
|
92 |
+
log_colors:
|
93 |
+
DEBUG: purple
|
94 |
+
INFO: green
|
95 |
+
WARNING: yellow
|
96 |
+
ERROR: red
|
97 |
+
CRITICAL: red
|
98 |
+
handlers:
|
99 |
+
console:
|
100 |
+
class: logging.StreamHandler
|
101 |
+
formatter: colorlog
|
102 |
+
stream: ext://sys.stdout
|
103 |
+
file:
|
104 |
+
class: logging.FileHandler
|
105 |
+
formatter: simple
|
106 |
+
filename: ${hydra.job.name}.log
|
107 |
+
root:
|
108 |
+
level: INFO
|
109 |
+
handlers:
|
110 |
+
- console
|
111 |
+
- file
|
112 |
+
disable_existing_loggers: false
|
113 |
+
env: {}
|
114 |
+
searchpath: []
|
115 |
+
callbacks: {}
|
116 |
+
output_subdir: .hydra
|
117 |
+
overrides:
|
118 |
+
hydra: []
|
119 |
+
task:
|
120 |
+
- trainer.gpus=0
|
121 |
+
- experiment=focusMAE_75
|
122 |
+
job:
|
123 |
+
name: train
|
124 |
+
override_dirname: experiment=focusMAE_75,trainer.gpus=0
|
125 |
+
id: ???
|
126 |
+
num: ???
|
127 |
+
config_name: train.yaml
|
128 |
+
env_set: {}
|
129 |
+
env_copy: []
|
130 |
+
config:
|
131 |
+
override_dirname:
|
132 |
+
kv_sep: '='
|
133 |
+
item_sep: ','
|
134 |
+
exclude_keys: []
|
135 |
+
runtime:
|
136 |
+
version: 1.1.1
|
137 |
+
cwd: /usr/src/app
|
138 |
+
config_sources:
|
139 |
+
- path: hydra.conf
|
140 |
+
schema: pkg
|
141 |
+
provider: hydra
|
142 |
+
- path: /usr/src/app/configs
|
143 |
+
schema: file
|
144 |
+
provider: main
|
145 |
+
- path: hydra_plugins.hydra_colorlog.conf
|
146 |
+
schema: pkg
|
147 |
+
provider: hydra-colorlog
|
148 |
+
- path: ''
|
149 |
+
schema: structured
|
150 |
+
provider: schema
|
151 |
+
choices:
|
152 |
+
local: default.yaml
|
153 |
+
hparams_search: null
|
154 |
+
debug: null
|
155 |
+
experiment: focusMAE_75
|
156 |
+
log_dir: default.yaml
|
157 |
+
trainer: default.yaml
|
158 |
+
logger: many_loggers
|
159 |
+
callbacks: default.yaml
|
160 |
+
model: focus.yaml
|
161 |
+
datamodule: focus.yaml
|
162 |
+
hydra/env: default
|
163 |
+
hydra/callbacks: null
|
164 |
+
hydra/job_logging: colorlog
|
165 |
+
hydra/hydra_logging: colorlog
|
166 |
+
hydra/hydra_help: default
|
167 |
+
hydra/help: default
|
168 |
+
hydra/sweeper: basic
|
169 |
+
hydra/launcher: basic
|
170 |
+
hydra/output: default
|
171 |
+
verbose: false
|
logs/experiments/runs/focusMAE_75/2022-03-10_14-07-57/.hydra/overrides.yaml
ADDED
@@ -0,0 +1,2 @@
|
|
|
|
|
|
|
1 |
+
- trainer.gpus=0
|
2 |
+
- experiment=focusMAE_75
|
logs/experiments/runs/focusMAE_75/2022-03-10_14-07-57/csv/version_0/hparams.yaml
ADDED
@@ -0,0 +1,52 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
trainer:
|
2 |
+
_target_: pytorch_lightning.Trainer
|
3 |
+
gpus: 0
|
4 |
+
min_epochs: 1
|
5 |
+
max_epochs: 100
|
6 |
+
resume_from_checkpoint: null
|
7 |
+
model:
|
8 |
+
_target_: src.models.focus_module.FocusLitModule
|
9 |
+
input_size: 16875
|
10 |
+
lin1_size: 128
|
11 |
+
lin2_size: 256
|
12 |
+
lin3_size: 64
|
13 |
+
output_size: 1
|
14 |
+
lr: 0.001
|
15 |
+
weight_decay: 0.0005
|
16 |
+
datamodule:
|
17 |
+
_target_: src.datamodules.focus_datamodule.FocusDataModule
|
18 |
+
data_dir: /usr/src/app/data/focus
|
19 |
+
csv_file: /usr/src/app/data/focus/metadata.csv
|
20 |
+
batch_size: 128
|
21 |
+
train_val_test_split_percentage:
|
22 |
+
- 0.7
|
23 |
+
- 0.15
|
24 |
+
- 0.15
|
25 |
+
num_workers: 0
|
26 |
+
pin_memory: false
|
27 |
+
seed: 12345
|
28 |
+
callbacks:
|
29 |
+
model_checkpoint:
|
30 |
+
_target_: pytorch_lightning.callbacks.ModelCheckpoint
|
31 |
+
monitor: val/mae
|
32 |
+
mode: min
|
33 |
+
save_top_k: 1
|
34 |
+
save_last: true
|
35 |
+
verbose: false
|
36 |
+
dirpath: checkpoints/
|
37 |
+
filename: epoch_{epoch:03d}
|
38 |
+
auto_insert_metric_name: false
|
39 |
+
early_stopping:
|
40 |
+
_target_: pytorch_lightning.callbacks.EarlyStopping
|
41 |
+
monitor: val/mae
|
42 |
+
mode: min
|
43 |
+
patience: 100
|
44 |
+
min_delta: 0
|
45 |
+
model_summary:
|
46 |
+
_target_: pytorch_lightning.callbacks.RichModelSummary
|
47 |
+
max_depth: -1
|
48 |
+
rich_progress_bar:
|
49 |
+
_target_: pytorch_lightning.callbacks.RichProgressBar
|
50 |
+
model/params/total: 2210561
|
51 |
+
model/params/trainable: 2210561
|
52 |
+
model/params/non_trainable: 0
|
logs/experiments/runs/focusMAE_75/2022-03-10_14-07-57/csv/version_0/metrics.csv
ADDED
@@ -0,0 +1,202 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
val/loss,val/mae,val/mae_best,epoch,step,train/loss,train/mae,test/loss,test/mae
|
2 |
+
2.3446929454803467,2.3446929454803467,1.9883708953857422,0,3,,,,
|
3 |
+
,,,0,3,2.3477797508239746,2.3477797508239746,,
|
4 |
+
1.951300024986267,1.951300024986267,1.951300024986267,1,7,,,,
|
5 |
+
,,,1,7,2.1104788780212402,2.1104788780212402,,
|
6 |
+
1.7989541292190552,1.7989541292190552,1.7989541292190552,2,11,,,,
|
7 |
+
,,,2,11,1.9923278093338013,1.9923278093338013,,
|
8 |
+
1.7231148481369019,1.7231148481369019,1.7231148481369019,3,15,,,,
|
9 |
+
,,,3,15,1.8969764709472656,1.8969764709472656,,
|
10 |
+
1.6877669095993042,1.6877669095993042,1.6877669095993042,4,19,,,,
|
11 |
+
,,,4,19,1.8385252952575684,1.8385252952575684,,
|
12 |
+
1.6997063159942627,1.6997063159942627,1.6877669095993042,5,23,,,,
|
13 |
+
,,,5,23,1.7758193016052246,1.7758193016052246,,
|
14 |
+
1.6767044067382812,1.6767044067382812,1.6767044067382812,6,27,,,,
|
15 |
+
,,,6,27,1.6879072189331055,1.6879072189331055,,
|
16 |
+
1.683190107345581,1.683190107345581,1.6767044067382812,7,31,,,,
|
17 |
+
,,,7,31,1.626947045326233,1.626947045326233,,
|
18 |
+
1.807665228843689,1.807665228843689,1.6767044067382812,8,35,,,,
|
19 |
+
,,,8,35,1.5883736610412598,1.5883736610412598,,
|
20 |
+
1.7391133308410645,1.7391133308410645,1.6767044067382812,9,39,,,,
|
21 |
+
,,,9,39,1.5447884798049927,1.5447884798049927,,
|
22 |
+
1.652360200881958,1.652360200881958,1.652360200881958,10,43,,,,
|
23 |
+
,,,10,43,1.5287734270095825,1.5287734270095825,,
|
24 |
+
1.5586925745010376,1.5586925745010376,1.5586925745010376,11,47,,,,
|
25 |
+
,,,11,47,1.4492356777191162,1.4492356777191162,,
|
26 |
+
1.6158185005187988,1.6158185005187988,1.5586925745010376,12,51,,,,
|
27 |
+
,,,12,51,1.4094675779342651,1.4094675779342651,,
|
28 |
+
1.645415186882019,1.645415186882019,1.5586925745010376,13,55,,,,
|
29 |
+
,,,13,55,1.3940879106521606,1.3940879106521606,,
|
30 |
+
1.7324628829956055,1.7324628829956055,1.5586925745010376,14,59,,,,
|
31 |
+
,,,14,59,1.354192852973938,1.354192852973938,,
|
32 |
+
1.7086338996887207,1.7086338996887207,1.5586925745010376,15,63,,,,
|
33 |
+
,,,15,63,1.284730076789856,1.284730076789856,,
|
34 |
+
1.8367301225662231,1.8367301225662231,1.5586925745010376,16,67,,,,
|
35 |
+
,,,16,67,1.275136947631836,1.275136947631836,,
|
36 |
+
1.8464158773422241,1.8464158773422241,1.5586925745010376,17,71,,,,
|
37 |
+
,,,17,71,1.2658562660217285,1.2658562660217285,,
|
38 |
+
1.6237857341766357,1.6237857341766357,1.5586925745010376,18,75,,,,
|
39 |
+
,,,18,75,1.2015395164489746,1.2015395164489746,,
|
40 |
+
1.5630764961242676,1.5630764961242676,1.5586925745010376,19,79,,,,
|
41 |
+
,,,19,79,1.150350570678711,1.150350570678711,,
|
42 |
+
1.7092353105545044,1.7092353105545044,1.5586925745010376,20,83,,,,
|
43 |
+
,,,20,83,1.1231166124343872,1.1231166124343872,,
|
44 |
+
1.9794765710830688,1.9794765710830688,1.5586925745010376,21,87,,,,
|
45 |
+
,,,21,87,1.0373468399047852,1.0373468399047852,,
|
46 |
+
2.015226364135742,2.015226364135742,1.5586925745010376,22,91,,,,
|
47 |
+
,,,22,91,1.082269549369812,1.082269549369812,,
|
48 |
+
1.472211241722107,1.472211241722107,1.472211241722107,23,95,,,,
|
49 |
+
,,,23,95,1.0530732870101929,1.0530732870101929,,
|
50 |
+
1.9599570035934448,1.9599570035934448,1.472211241722107,24,99,,,,
|
51 |
+
,,,24,99,1.014898419380188,1.014898419380188,,
|
52 |
+
1.7242220640182495,1.7242220640182495,1.472211241722107,25,103,,,,
|
53 |
+
,,,25,103,1.005287766456604,1.005287766456604,,
|
54 |
+
1.5354392528533936,1.5354392528533936,1.472211241722107,26,107,,,,
|
55 |
+
,,,26,107,0.997562825679779,0.997562825679779,,
|
56 |
+
1.5698376893997192,1.5698376893997192,1.472211241722107,27,111,,,,
|
57 |
+
,,,27,111,0.9320324659347534,0.9320324659347534,,
|
58 |
+
1.5753498077392578,1.5753498077392578,1.472211241722107,28,115,,,,
|
59 |
+
,,,28,115,0.9440838694572449,0.9440838694572449,,
|
60 |
+
1.5038267374038696,1.5038267374038696,1.472211241722107,29,119,,,,
|
61 |
+
,,,29,119,0.9454466700553894,0.9454466700553894,,
|
62 |
+
1.5622373819351196,1.5622373819351196,1.472211241722107,30,123,,,,
|
63 |
+
,,,30,123,0.922053337097168,0.922053337097168,,
|
64 |
+
1.9844919443130493,1.9844919443130493,1.472211241722107,31,127,,,,
|
65 |
+
,,,31,127,0.8263501524925232,0.8263501524925232,,
|
66 |
+
1.4059313535690308,1.4059313535690308,1.4059313535690308,32,131,,,,
|
67 |
+
,,,32,131,0.8468357920646667,0.8468357920646667,,
|
68 |
+
1.625054121017456,1.625054121017456,1.4059313535690308,33,135,,,,
|
69 |
+
,,,33,135,0.8854445815086365,0.8854445815086365,,
|
70 |
+
1.4018893241882324,1.4018893241882324,1.4018893241882324,34,139,,,,
|
71 |
+
,,,34,139,0.7997727990150452,0.7997727990150452,,
|
72 |
+
1.2538477182388306,1.2538477182388306,1.2538477182388306,35,143,,,,
|
73 |
+
,,,35,143,0.7486986517906189,0.7486986517906189,,
|
74 |
+
1.824753999710083,1.824753999710083,1.2538477182388306,36,147,,,,
|
75 |
+
,,,36,147,0.7691783308982849,0.7691783308982849,,
|
76 |
+
1.6562089920043945,1.6562089920043945,1.2538477182388306,37,151,,,,
|
77 |
+
,,,37,151,0.7924345135688782,0.7924345135688782,,
|
78 |
+
1.2863292694091797,1.2863292694091797,1.2538477182388306,38,155,,,,
|
79 |
+
,,,38,155,0.7809156179428101,0.7809156179428101,,
|
80 |
+
1.6022897958755493,1.6022897958755493,1.2538477182388306,39,159,,,,
|
81 |
+
,,,39,159,0.79623943567276,0.79623943567276,,
|
82 |
+
1.3245476484298706,1.3245476484298706,1.2538477182388306,40,163,,,,
|
83 |
+
,,,40,163,0.7981110215187073,0.7981110215187073,,
|
84 |
+
1.3914467096328735,1.3914467096328735,1.2538477182388306,41,167,,,,
|
85 |
+
,,,41,167,0.7772411704063416,0.7772411704063416,,
|
86 |
+
1.6755483150482178,1.6755483150482178,1.2538477182388306,42,171,,,,
|
87 |
+
,,,42,171,0.7428296208381653,0.7428296208381653,,
|
88 |
+
1.9794198274612427,1.9794198274612427,1.2538477182388306,43,175,,,,
|
89 |
+
,,,43,175,0.7463895082473755,0.7463895082473755,,
|
90 |
+
1.906415343284607,1.906415343284607,1.2538477182388306,44,179,,,,
|
91 |
+
,,,44,179,0.672076404094696,0.672076404094696,,
|
92 |
+
1.1878875494003296,1.1878875494003296,1.1878875494003296,45,183,,,,
|
93 |
+
,,,45,183,0.7175173759460449,0.7175173759460449,,
|
94 |
+
1.7174195051193237,1.7174195051193237,1.1878875494003296,46,187,,,,
|
95 |
+
,,,46,187,0.7588973045349121,0.7588973045349121,,
|
96 |
+
1.3446967601776123,1.3446967601776123,1.1878875494003296,47,191,,,,
|
97 |
+
,,,47,191,0.7565988302230835,0.7565988302230835,,
|
98 |
+
2.1821930408477783,2.1821930408477783,1.1878875494003296,48,195,,,,
|
99 |
+
,,,48,195,0.7117477059364319,0.7117477059364319,,
|
100 |
+
2.298060894012451,2.298060894012451,1.1878875494003296,49,199,,,,
|
101 |
+
,,,49,199,0.7183989882469177,0.7183989882469177,,
|
102 |
+
1.236243724822998,1.236243724822998,1.1878875494003296,50,203,,,,
|
103 |
+
,,,50,203,0.6358188390731812,0.6358188390731812,,
|
104 |
+
1.5621458292007446,1.5621458292007446,1.1878875494003296,51,207,,,,
|
105 |
+
,,,51,207,0.6559919118881226,0.6559919118881226,,
|
106 |
+
1.4248673915863037,1.4248673915863037,1.1878875494003296,52,211,,,,
|
107 |
+
,,,52,211,0.6091839671134949,0.6091839671134949,,
|
108 |
+
2.0800745487213135,2.0800745487213135,1.1878875494003296,53,215,,,,
|
109 |
+
,,,53,215,0.7509962916374207,0.7509962916374207,,
|
110 |
+
1.3738770484924316,1.3738770484924316,1.1878875494003296,54,219,,,,
|
111 |
+
,,,54,219,0.7524438500404358,0.7524438500404358,,
|
112 |
+
2.065077066421509,2.065077066421509,1.1878875494003296,55,223,,,,
|
113 |
+
,,,55,223,0.7347822189331055,0.7347822189331055,,
|
114 |
+
1.2348603010177612,1.2348603010177612,1.1878875494003296,56,227,,,,
|
115 |
+
,,,56,227,0.66017085313797,0.66017085313797,,
|
116 |
+
1.8173937797546387,1.8173937797546387,1.1878875494003296,57,231,,,,
|
117 |
+
,,,57,231,0.611933171749115,0.611933171749115,,
|
118 |
+
1.4586138725280762,1.4586138725280762,1.1878875494003296,58,235,,,,
|
119 |
+
,,,58,235,0.6272565722465515,0.6272565722465515,,
|
120 |
+
1.2043694257736206,1.2043694257736206,1.1878875494003296,59,239,,,,
|
121 |
+
,,,59,239,0.6116037964820862,0.6116037964820862,,
|
122 |
+
1.0817078351974487,1.0817078351974487,1.0817078351974487,60,243,,,,
|
123 |
+
,,,60,243,0.6100186705589294,0.6100186705589294,,
|
124 |
+
1.31791090965271,1.31791090965271,1.0817078351974487,61,247,,,,
|
125 |
+
,,,61,247,0.6260210871696472,0.6260210871696472,,
|
126 |
+
1.8742070198059082,1.8742070198059082,1.0817078351974487,62,251,,,,
|
127 |
+
,,,62,251,0.7127287983894348,0.7127287983894348,,
|
128 |
+
2.079773187637329,2.079773187637329,1.0817078351974487,63,255,,,,
|
129 |
+
,,,63,255,0.6411363482475281,0.6411363482475281,,
|
130 |
+
3.3381476402282715,3.3381476402282715,1.0817078351974487,64,259,,,,
|
131 |
+
,,,64,259,0.6192998886108398,0.6192998886108398,,
|
132 |
+
1.348172903060913,1.348172903060913,1.0817078351974487,65,263,,,,
|
133 |
+
,,,65,263,0.6750313639640808,0.6750313639640808,,
|
134 |
+
1.891974687576294,1.891974687576294,1.0817078351974487,66,267,,,,
|
135 |
+
,,,66,267,0.6221553087234497,0.6221553087234497,,
|
136 |
+
1.0347270965576172,1.0347270965576172,1.0347270965576172,67,271,,,,
|
137 |
+
,,,67,271,0.6063169836997986,0.6063169836997986,,
|
138 |
+
1.1459691524505615,1.1459691524505615,1.0347270965576172,68,275,,,,
|
139 |
+
,,,68,275,0.5792394280433655,0.5792394280433655,,
|
140 |
+
1.4413127899169922,1.4413127899169922,1.0347270965576172,69,279,,,,
|
141 |
+
,,,69,279,0.6165463924407959,0.6165463924407959,,
|
142 |
+
1.4486117362976074,1.4486117362976074,1.0347270965576172,70,283,,,,
|
143 |
+
,,,70,283,0.542451024055481,0.542451024055481,,
|
144 |
+
1.410490870475769,1.410490870475769,1.0347270965576172,71,287,,,,
|
145 |
+
,,,71,287,0.5221225619316101,0.5221225619316101,,
|
146 |
+
1.8628666400909424,1.8628666400909424,1.0347270965576172,72,291,,,,
|
147 |
+
,,,72,291,0.5991109609603882,0.5991109609603882,,
|
148 |
+
1.0799005031585693,1.0799005031585693,1.0347270965576172,73,295,,,,
|
149 |
+
,,,73,295,0.5070008039474487,0.5070008039474487,,
|
150 |
+
1.3997790813446045,1.3997790813446045,1.0347270965576172,74,299,,,,
|
151 |
+
,,,74,299,0.6225135326385498,0.6225135326385498,,
|
152 |
+
1.1575437784194946,1.1575437784194946,1.0347270965576172,75,303,,,,
|
153 |
+
,,,75,303,0.5841628313064575,0.5841628313064575,,
|
154 |
+
1.1143656969070435,1.1143656969070435,1.0347270965576172,76,307,,,,
|
155 |
+
,,,76,307,0.5086714625358582,0.5086714625358582,,
|
156 |
+
1.901267647743225,1.901267647743225,1.0347270965576172,77,311,,,,
|
157 |
+
,,,77,311,0.47735270857810974,0.47735270857810974,,
|
158 |
+
1.4064610004425049,1.4064610004425049,1.0347270965576172,78,315,,,,
|
159 |
+
,,,78,315,0.6010031700134277,0.6010031700134277,,
|
160 |
+
1.6694703102111816,1.6694703102111816,1.0347270965576172,79,319,,,,
|
161 |
+
,,,79,319,0.6306296586990356,0.6306296586990356,,
|
162 |
+
1.4535300731658936,1.4535300731658936,1.0347270965576172,80,323,,,,
|
163 |
+
,,,80,323,0.5416246652603149,0.5416246652603149,,
|
164 |
+
1.0869840383529663,1.0869840383529663,1.0347270965576172,81,327,,,,
|
165 |
+
,,,81,327,0.5715169310569763,0.5715169310569763,,
|
166 |
+
1.6695533990859985,1.6695533990859985,1.0347270965576172,82,331,,,,
|
167 |
+
,,,82,331,0.5542420744895935,0.5542420744895935,,
|
168 |
+
1.5430047512054443,1.5430047512054443,1.0347270965576172,83,335,,,,
|
169 |
+
,,,83,335,0.49806609749794006,0.49806609749794006,,
|
170 |
+
1.2416027784347534,1.2416027784347534,1.0347270965576172,84,339,,,,
|
171 |
+
,,,84,339,0.4860212206840515,0.4860212206840515,,
|
172 |
+
1.0733833312988281,1.0733833312988281,1.0347270965576172,85,343,,,,
|
173 |
+
,,,85,343,0.5044563412666321,0.5044563412666321,,
|
174 |
+
1.5053118467330933,1.5053118467330933,1.0347270965576172,86,347,,,,
|
175 |
+
,,,86,347,0.444906085729599,0.444906085729599,,
|
176 |
+
1.2467906475067139,1.2467906475067139,1.0347270965576172,87,351,,,,
|
177 |
+
,,,87,351,0.48637983202934265,0.48637983202934265,,
|
178 |
+
0.7707281708717346,0.7707281708717346,0.7707281708717346,88,355,,,,
|
179 |
+
,,,88,355,0.44943007826805115,0.44943007826805115,,
|
180 |
+
0.9066965579986572,0.9066965579986572,0.7707281708717346,89,359,,,,
|
181 |
+
,,,89,359,0.5219848155975342,0.5219848155975342,,
|
182 |
+
1.6973118782043457,1.6973118782043457,0.7707281708717346,90,363,,,,
|
183 |
+
,,,90,363,0.4625515043735504,0.4625515043735504,,
|
184 |
+
0.8749558329582214,0.8749558329582214,0.7707281708717346,91,367,,,,
|
185 |
+
,,,91,367,0.5775365829467773,0.5775365829467773,,
|
186 |
+
0.9708791971206665,0.9708791971206665,0.7707281708717346,92,371,,,,
|
187 |
+
,,,92,371,0.5470483303070068,0.5470483303070068,,
|
188 |
+
1.5438796281814575,1.5438796281814575,0.7707281708717346,93,375,,,,
|
189 |
+
,,,93,375,0.5146293044090271,0.5146293044090271,,
|
190 |
+
1.0273939371109009,1.0273939371109009,0.7707281708717346,94,379,,,,
|
191 |
+
,,,94,379,0.4896579682826996,0.4896579682826996,,
|
192 |
+
1.036439061164856,1.036439061164856,0.7707281708717346,95,383,,,,
|
193 |
+
,,,95,383,0.4825593829154968,0.4825593829154968,,
|
194 |
+
1.0776950120925903,1.0776950120925903,0.7707281708717346,96,387,,,,
|
195 |
+
,,,96,387,0.5524967312812805,0.5524967312812805,,
|
196 |
+
1.5079691410064697,1.5079691410064697,0.7707281708717346,97,391,,,,
|
197 |
+
,,,97,391,0.5014370679855347,0.5014370679855347,,
|
198 |
+
1.6152366399765015,1.6152366399765015,0.7707281708717346,98,395,,,,
|
199 |
+
,,,98,395,0.5738958716392517,0.5738958716392517,,
|
200 |
+
1.5969935655593872,1.5969935655593872,0.7707281708717346,99,399,,,,
|
201 |
+
,,,99,399,0.5723943710327148,0.5723943710327148,,
|
202 |
+
,,,89,356,,,0.7502508759498596,0.7502508759498596
|
logs/experiments/runs/focusMAE_75/2022-03-10_14-07-57/tensorboard/focusMAE_75/events.out.tfevents.1646921279.d007913868f5.1.0
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:dab129823f73d6dbea323d40d5138a0f358985cd96e556ecb0b656840990f4e6
|
3 |
+
size 36227
|
logs/experiments/runs/focusMAE_75/2022-03-10_14-07-57/tensorboard/focusMAE_75/events.out.tfevents.1646921340.d007913868f5.1.1
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:7d580f2ba81a596cc79f374b538ed9c0c72cc5d2c0a9415f5c1f664dd857185e
|
3 |
+
size 179
|
logs/experiments/runs/focusMAE_75/2022-03-10_14-07-57/tensorboard/focusMAE_75/hparams.yaml
ADDED
@@ -0,0 +1,52 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
trainer:
|
2 |
+
_target_: pytorch_lightning.Trainer
|
3 |
+
gpus: 0
|
4 |
+
min_epochs: 1
|
5 |
+
max_epochs: 100
|
6 |
+
resume_from_checkpoint: null
|
7 |
+
model:
|
8 |
+
_target_: src.models.focus_module.FocusLitModule
|
9 |
+
input_size: 16875
|
10 |
+
lin1_size: 128
|
11 |
+
lin2_size: 256
|
12 |
+
lin3_size: 64
|
13 |
+
output_size: 1
|
14 |
+
lr: 0.001
|
15 |
+
weight_decay: 0.0005
|
16 |
+
datamodule:
|
17 |
+
_target_: src.datamodules.focus_datamodule.FocusDataModule
|
18 |
+
data_dir: /usr/src/app/data/focus
|
19 |
+
csv_file: /usr/src/app/data/focus/metadata.csv
|
20 |
+
batch_size: 128
|
21 |
+
train_val_test_split_percentage:
|
22 |
+
- 0.7
|
23 |
+
- 0.15
|
24 |
+
- 0.15
|
25 |
+
num_workers: 0
|
26 |
+
pin_memory: false
|
27 |
+
seed: 12345
|
28 |
+
callbacks:
|
29 |
+
model_checkpoint:
|
30 |
+
_target_: pytorch_lightning.callbacks.ModelCheckpoint
|
31 |
+
monitor: val/mae
|
32 |
+
mode: min
|
33 |
+
save_top_k: 1
|
34 |
+
save_last: true
|
35 |
+
verbose: false
|
36 |
+
dirpath: checkpoints/
|
37 |
+
filename: epoch_{epoch:03d}
|
38 |
+
auto_insert_metric_name: false
|
39 |
+
early_stopping:
|
40 |
+
_target_: pytorch_lightning.callbacks.EarlyStopping
|
41 |
+
monitor: val/mae
|
42 |
+
mode: min
|
43 |
+
patience: 100
|
44 |
+
min_delta: 0
|
45 |
+
model_summary:
|
46 |
+
_target_: pytorch_lightning.callbacks.RichModelSummary
|
47 |
+
max_depth: -1
|
48 |
+
rich_progress_bar:
|
49 |
+
_target_: pytorch_lightning.callbacks.RichProgressBar
|
50 |
+
model/params/total: 2210561
|
51 |
+
model/params/trainable: 2210561
|
52 |
+
model/params/non_trainable: 0
|
logs/experiments/runs/focusMSE_150/2022-03-10_13-11-24/.hydra/config.yaml
ADDED
@@ -0,0 +1,70 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
original_work_dir: ${hydra:runtime.cwd}
|
2 |
+
data_dir: ${original_work_dir}/data
|
3 |
+
print_config: true
|
4 |
+
ignore_warnings: true
|
5 |
+
train: true
|
6 |
+
test: true
|
7 |
+
seed: 12345
|
8 |
+
name: focusMSE_150
|
9 |
+
datamodule:
|
10 |
+
_target_: src.datamodules.focus_datamodule.FocusDataModule
|
11 |
+
data_dir: ${data_dir}/focus150
|
12 |
+
csv_file: ${data_dir}/focus150/metadata.csv
|
13 |
+
batch_size: 128
|
14 |
+
train_val_test_split_percentage:
|
15 |
+
- 0.7
|
16 |
+
- 0.15
|
17 |
+
- 0.15
|
18 |
+
num_workers: 0
|
19 |
+
pin_memory: false
|
20 |
+
model:
|
21 |
+
_target_: src.models.focus_module.FocusMSELitModule
|
22 |
+
input_size: 67500
|
23 |
+
lin1_size: 128
|
24 |
+
lin2_size: 256
|
25 |
+
lin3_size: 64
|
26 |
+
output_size: 1
|
27 |
+
lr: 0.001
|
28 |
+
weight_decay: 0.0005
|
29 |
+
callbacks:
|
30 |
+
model_checkpoint:
|
31 |
+
_target_: pytorch_lightning.callbacks.ModelCheckpoint
|
32 |
+
monitor: val/mae
|
33 |
+
mode: min
|
34 |
+
save_top_k: 1
|
35 |
+
save_last: true
|
36 |
+
verbose: false
|
37 |
+
dirpath: checkpoints/
|
38 |
+
filename: epoch_{epoch:03d}
|
39 |
+
auto_insert_metric_name: false
|
40 |
+
early_stopping:
|
41 |
+
_target_: pytorch_lightning.callbacks.EarlyStopping
|
42 |
+
monitor: val/mae
|
43 |
+
mode: min
|
44 |
+
patience: 100
|
45 |
+
min_delta: 0
|
46 |
+
model_summary:
|
47 |
+
_target_: pytorch_lightning.callbacks.RichModelSummary
|
48 |
+
max_depth: -1
|
49 |
+
rich_progress_bar:
|
50 |
+
_target_: pytorch_lightning.callbacks.RichProgressBar
|
51 |
+
logger:
|
52 |
+
csv:
|
53 |
+
_target_: pytorch_lightning.loggers.csv_logs.CSVLogger
|
54 |
+
save_dir: .
|
55 |
+
name: csv/
|
56 |
+
prefix: ''
|
57 |
+
tensorboard:
|
58 |
+
_target_: pytorch_lightning.loggers.tensorboard.TensorBoardLogger
|
59 |
+
save_dir: tensorboard/
|
60 |
+
name: null
|
61 |
+
version: ${name}
|
62 |
+
log_graph: false
|
63 |
+
default_hp_metric: true
|
64 |
+
prefix: ''
|
65 |
+
trainer:
|
66 |
+
_target_: pytorch_lightning.Trainer
|
67 |
+
gpus: 0
|
68 |
+
min_epochs: 1
|
69 |
+
max_epochs: 100
|
70 |
+
resume_from_checkpoint: null
|
logs/experiments/runs/focusMSE_150/2022-03-10_13-11-24/.hydra/hydra.yaml
ADDED
@@ -0,0 +1,171 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
hydra:
|
2 |
+
run:
|
3 |
+
dir: logs/experiments/runs/${name}/${now:%Y-%m-%d}_${now:%H-%M-%S}
|
4 |
+
sweep:
|
5 |
+
dir: logs/experiments/multiruns/${name}/${now:%Y-%m-%d}_${now:%H-%M-%S}
|
6 |
+
subdir: ${hydra.job.num}
|
7 |
+
launcher:
|
8 |
+
_target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher
|
9 |
+
sweeper:
|
10 |
+
_target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper
|
11 |
+
max_batch_size: null
|
12 |
+
help:
|
13 |
+
app_name: ${hydra.job.name}
|
14 |
+
header: '${hydra.help.app_name} is powered by Hydra.
|
15 |
+
|
16 |
+
'
|
17 |
+
footer: 'Powered by Hydra (https://hydra.cc)
|
18 |
+
|
19 |
+
Use --hydra-help to view Hydra specific help
|
20 |
+
|
21 |
+
'
|
22 |
+
template: '${hydra.help.header}
|
23 |
+
|
24 |
+
== Configuration groups ==
|
25 |
+
|
26 |
+
Compose your configuration from those groups (group=option)
|
27 |
+
|
28 |
+
|
29 |
+
$APP_CONFIG_GROUPS
|
30 |
+
|
31 |
+
|
32 |
+
== Config ==
|
33 |
+
|
34 |
+
Override anything in the config (foo.bar=value)
|
35 |
+
|
36 |
+
|
37 |
+
$CONFIG
|
38 |
+
|
39 |
+
|
40 |
+
${hydra.help.footer}
|
41 |
+
|
42 |
+
'
|
43 |
+
hydra_help:
|
44 |
+
template: 'Hydra (${hydra.runtime.version})
|
45 |
+
|
46 |
+
See https://hydra.cc for more info.
|
47 |
+
|
48 |
+
|
49 |
+
== Flags ==
|
50 |
+
|
51 |
+
$FLAGS_HELP
|
52 |
+
|
53 |
+
|
54 |
+
== Configuration groups ==
|
55 |
+
|
56 |
+
Compose your configuration from those groups (For example, append hydra/job_logging=disabled
|
57 |
+
to command line)
|
58 |
+
|
59 |
+
|
60 |
+
$HYDRA_CONFIG_GROUPS
|
61 |
+
|
62 |
+
|
63 |
+
Use ''--cfg hydra'' to Show the Hydra config.
|
64 |
+
|
65 |
+
'
|
66 |
+
hydra_help: ???
|
67 |
+
hydra_logging:
|
68 |
+
version: 1
|
69 |
+
formatters:
|
70 |
+
colorlog:
|
71 |
+
(): colorlog.ColoredFormatter
|
72 |
+
format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s'
|
73 |
+
handlers:
|
74 |
+
console:
|
75 |
+
class: logging.StreamHandler
|
76 |
+
formatter: colorlog
|
77 |
+
stream: ext://sys.stdout
|
78 |
+
root:
|
79 |
+
level: INFO
|
80 |
+
handlers:
|
81 |
+
- console
|
82 |
+
disable_existing_loggers: false
|
83 |
+
job_logging:
|
84 |
+
version: 1
|
85 |
+
formatters:
|
86 |
+
simple:
|
87 |
+
format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s'
|
88 |
+
colorlog:
|
89 |
+
(): colorlog.ColoredFormatter
|
90 |
+
format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s]
|
91 |
+
- %(message)s'
|
92 |
+
log_colors:
|
93 |
+
DEBUG: purple
|
94 |
+
INFO: green
|
95 |
+
WARNING: yellow
|
96 |
+
ERROR: red
|
97 |
+
CRITICAL: red
|
98 |
+
handlers:
|
99 |
+
console:
|
100 |
+
class: logging.StreamHandler
|
101 |
+
formatter: colorlog
|
102 |
+
stream: ext://sys.stdout
|
103 |
+
file:
|
104 |
+
class: logging.FileHandler
|
105 |
+
formatter: simple
|
106 |
+
filename: ${hydra.job.name}.log
|
107 |
+
root:
|
108 |
+
level: INFO
|
109 |
+
handlers:
|
110 |
+
- console
|
111 |
+
- file
|
112 |
+
disable_existing_loggers: false
|
113 |
+
env: {}
|
114 |
+
searchpath: []
|
115 |
+
callbacks: {}
|
116 |
+
output_subdir: .hydra
|
117 |
+
overrides:
|
118 |
+
hydra: []
|
119 |
+
task:
|
120 |
+
- trainer.gpus=0
|
121 |
+
- experiment=focusMSE_150
|
122 |
+
job:
|
123 |
+
name: train
|
124 |
+
override_dirname: experiment=focusMSE_150,trainer.gpus=0
|
125 |
+
id: ???
|
126 |
+
num: ???
|
127 |
+
config_name: train.yaml
|
128 |
+
env_set: {}
|
129 |
+
env_copy: []
|
130 |
+
config:
|
131 |
+
override_dirname:
|
132 |
+
kv_sep: '='
|
133 |
+
item_sep: ','
|
134 |
+
exclude_keys: []
|
135 |
+
runtime:
|
136 |
+
version: 1.1.1
|
137 |
+
cwd: /usr/src/app
|
138 |
+
config_sources:
|
139 |
+
- path: hydra.conf
|
140 |
+
schema: pkg
|
141 |
+
provider: hydra
|
142 |
+
- path: /usr/src/app/configs
|
143 |
+
schema: file
|
144 |
+
provider: main
|
145 |
+
- path: hydra_plugins.hydra_colorlog.conf
|
146 |
+
schema: pkg
|
147 |
+
provider: hydra-colorlog
|
148 |
+
- path: ''
|
149 |
+
schema: structured
|
150 |
+
provider: schema
|
151 |
+
choices:
|
152 |
+
local: default.yaml
|
153 |
+
hparams_search: null
|
154 |
+
debug: null
|
155 |
+
experiment: focusMSE_150
|
156 |
+
log_dir: default.yaml
|
157 |
+
trainer: default.yaml
|
158 |
+
logger: many_loggers
|
159 |
+
callbacks: default.yaml
|
160 |
+
model: focus150.yaml
|
161 |
+
datamodule: focus150.yaml
|
162 |
+
hydra/env: default
|
163 |
+
hydra/callbacks: null
|
164 |
+
hydra/job_logging: colorlog
|
165 |
+
hydra/hydra_logging: colorlog
|
166 |
+
hydra/hydra_help: default
|
167 |
+
hydra/help: default
|
168 |
+
hydra/sweeper: basic
|
169 |
+
hydra/launcher: basic
|
170 |
+
hydra/output: default
|
171 |
+
verbose: false
|
logs/experiments/runs/focusMSE_150/2022-03-10_13-11-24/.hydra/overrides.yaml
ADDED
@@ -0,0 +1,2 @@
|
|
|
|
|
|
|
1 |
+
- trainer.gpus=0
|
2 |
+
- experiment=focusMSE_150
|
logs/experiments/runs/focusMSE_150/2022-03-10_13-11-24/csv/version_0/hparams.yaml
ADDED
@@ -0,0 +1,52 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
trainer:
|
2 |
+
_target_: pytorch_lightning.Trainer
|
3 |
+
gpus: 0
|
4 |
+
min_epochs: 1
|
5 |
+
max_epochs: 100
|
6 |
+
resume_from_checkpoint: null
|
7 |
+
model:
|
8 |
+
_target_: src.models.focus_module.FocusMSELitModule
|
9 |
+
input_size: 67500
|
10 |
+
lin1_size: 128
|
11 |
+
lin2_size: 256
|
12 |
+
lin3_size: 64
|
13 |
+
output_size: 1
|
14 |
+
lr: 0.001
|
15 |
+
weight_decay: 0.0005
|
16 |
+
datamodule:
|
17 |
+
_target_: src.datamodules.focus_datamodule.FocusDataModule
|
18 |
+
data_dir: /usr/src/app/data/focus150
|
19 |
+
csv_file: /usr/src/app/data/focus150/metadata.csv
|
20 |
+
batch_size: 128
|
21 |
+
train_val_test_split_percentage:
|
22 |
+
- 0.7
|
23 |
+
- 0.15
|
24 |
+
- 0.15
|
25 |
+
num_workers: 0
|
26 |
+
pin_memory: false
|
27 |
+
seed: 12345
|
28 |
+
callbacks:
|
29 |
+
model_checkpoint:
|
30 |
+
_target_: pytorch_lightning.callbacks.ModelCheckpoint
|
31 |
+
monitor: val/mae
|
32 |
+
mode: min
|
33 |
+
save_top_k: 1
|
34 |
+
save_last: true
|
35 |
+
verbose: false
|
36 |
+
dirpath: checkpoints/
|
37 |
+
filename: epoch_{epoch:03d}
|
38 |
+
auto_insert_metric_name: false
|
39 |
+
early_stopping:
|
40 |
+
_target_: pytorch_lightning.callbacks.EarlyStopping
|
41 |
+
monitor: val/mae
|
42 |
+
mode: min
|
43 |
+
patience: 100
|
44 |
+
min_delta: 0
|
45 |
+
model_summary:
|
46 |
+
_target_: pytorch_lightning.callbacks.RichModelSummary
|
47 |
+
max_depth: -1
|
48 |
+
rich_progress_bar:
|
49 |
+
_target_: pytorch_lightning.callbacks.RichProgressBar
|
50 |
+
model/params/total: 8690561
|
51 |
+
model/params/trainable: 8690561
|
52 |
+
model/params/non_trainable: 0
|
logs/experiments/runs/focusMSE_150/2022-03-10_13-11-24/csv/version_0/metrics.csv
ADDED
@@ -0,0 +1,202 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
val/loss,val/mae,val/mae_best,epoch,step,train/loss,train/mae,test/loss,test/mae
|
2 |
+
3.9090185165405273,1.5504119396209717,1.5504119396209717,0,13,,,,
|
3 |
+
,,,0,13,6.341573715209961,2.017768621444702,,
|
4 |
+
3.6448776721954346,1.4998838901519775,1.4998838901519775,1,27,,,,
|
5 |
+
,,,1,27,4.650768280029297,1.7403218746185303,,
|
6 |
+
3.3892288208007812,1.448609709739685,1.448609709739685,2,41,,,,
|
7 |
+
,,,2,41,3.730588912963867,1.57280433177948,,
|
8 |
+
3.3106086254119873,1.4690450429916382,1.448609709739685,3,55,,,,
|
9 |
+
,,,3,55,3.1740732192993164,1.4695543050765991,,
|
10 |
+
3.184706926345825,1.4420390129089355,1.4420390129089355,4,69,,,,
|
11 |
+
,,,4,69,2.9497790336608887,1.4106614589691162,,
|
12 |
+
3.998544216156006,1.6342074871063232,1.4420390129089355,5,83,,,,
|
13 |
+
,,,5,83,2.7456185817718506,1.3634170293807983,,
|
14 |
+
3.440368413925171,1.4773958921432495,1.4420390129089355,6,97,,,,
|
15 |
+
,,,6,97,2.7184715270996094,1.3397141695022583,,
|
16 |
+
3.394071578979492,1.4805296659469604,1.4420390129089355,7,111,,,,
|
17 |
+
,,,7,111,2.5637476444244385,1.313742756843567,,
|
18 |
+
3.4773972034454346,1.477279782295227,1.4420390129089355,8,125,,,,
|
19 |
+
,,,8,125,2.3757271766662598,1.2380567789077759,,
|
20 |
+
3.17680025100708,1.4259464740753174,1.4259464740753174,9,139,,,,
|
21 |
+
,,,9,139,2.223348617553711,1.200273036956787,,
|
22 |
+
3.9194443225860596,1.6226718425750732,1.4259464740753174,10,153,,,,
|
23 |
+
,,,10,153,2.09822940826416,1.1589058637619019,,
|
24 |
+
4.0389323234558105,1.5795488357543945,1.4259464740753174,11,167,,,,
|
25 |
+
,,,11,167,1.9324151277542114,1.1033174991607666,,
|
26 |
+
6.046604156494141,2.0167133808135986,1.4259464740753174,12,181,,,,
|
27 |
+
,,,12,181,1.851693034172058,1.068273901939392,,
|
28 |
+
3.3430557250976562,1.4696037769317627,1.4259464740753174,13,195,,,,
|
29 |
+
,,,13,195,1.7571626901626587,1.040431261062622,,
|
30 |
+
2.7428488731384277,1.3240962028503418,1.3240962028503418,14,209,,,,
|
31 |
+
,,,14,209,1.627685308456421,1.0066763162612915,,
|
32 |
+
2.6862003803253174,1.281518816947937,1.281518816947937,15,223,,,,
|
33 |
+
,,,15,223,1.398155689239502,0.9247750043869019,,
|
34 |
+
7.1135783195495605,2.202608108520508,1.281518816947937,16,237,,,,
|
35 |
+
,,,16,237,1.4556622505187988,0.9270832538604736,,
|
36 |
+
3.4653751850128174,1.507302165031433,1.281518816947937,17,251,,,,
|
37 |
+
,,,17,251,1.2802749872207642,0.875998854637146,,
|
38 |
+
2.2044589519500732,1.1742725372314453,1.1742725372314453,18,265,,,,
|
39 |
+
,,,18,265,1.12702476978302,0.8251340985298157,,
|
40 |
+
3.1551482677459717,1.4677238464355469,1.1742725372314453,19,279,,,,
|
41 |
+
,,,19,279,1.106533169746399,0.7953476905822754,,
|
42 |
+
4.245253086090088,1.6653687953948975,1.1742725372314453,20,293,,,,
|
43 |
+
,,,20,293,1.1528575420379639,0.8221195936203003,,
|
44 |
+
4.991257667541504,1.8378537893295288,1.1742725372314453,21,307,,,,
|
45 |
+
,,,21,307,1.1880255937576294,0.8374914526939392,,
|
46 |
+
2.4945051670074463,1.2524726390838623,1.1742725372314453,22,321,,,,
|
47 |
+
,,,22,321,1.0432500839233398,0.7769076228141785,,
|
48 |
+
4.141057014465332,1.6804609298706055,1.1742725372314453,23,335,,,,
|
49 |
+
,,,23,335,0.9234874844551086,0.7330545783042908,,
|
50 |
+
2.198812961578369,1.1750437021255493,1.1742725372314453,24,349,,,,
|
51 |
+
,,,24,349,0.9312455654144287,0.7370762825012207,,
|
52 |
+
1.9213582277297974,1.089828610420227,1.089828610420227,25,363,,,,
|
53 |
+
,,,25,363,0.7556602954864502,0.6408432126045227,,
|
54 |
+
2.1550352573394775,1.1508697271347046,1.089828610420227,26,377,,,,
|
55 |
+
,,,26,377,0.670892059803009,0.6174144148826599,,
|
56 |
+
2.9721758365631104,1.3731743097305298,1.089828610420227,27,391,,,,
|
57 |
+
,,,27,391,0.7417620420455933,0.6429144144058228,,
|
58 |
+
6.025863170623779,2.092292070388794,1.089828610420227,28,405,,,,
|
59 |
+
,,,28,405,0.7650352716445923,0.6578378677368164,,
|
60 |
+
6.8636674880981445,2.222670793533325,1.089828610420227,29,419,,,,
|
61 |
+
,,,29,419,0.7481452226638794,0.6476733088493347,,
|
62 |
+
3.4918134212493896,1.5204269886016846,1.089828610420227,30,433,,,,
|
63 |
+
,,,30,433,0.7707064747810364,0.6658239960670471,,
|
64 |
+
7.086388111114502,2.2070538997650146,1.089828610420227,31,447,,,,
|
65 |
+
,,,31,447,0.8360738754272461,0.7002860903739929,,
|
66 |
+
5.1271138191223145,1.8868597745895386,1.089828610420227,32,461,,,,
|
67 |
+
,,,32,461,0.927767813205719,0.7268351316452026,,
|
68 |
+
5.0161824226379395,1.7668359279632568,1.089828610420227,33,475,,,,
|
69 |
+
,,,33,475,0.7505946755409241,0.6515080332756042,,
|
70 |
+
1.7053743600845337,1.017513394355774,1.017513394355774,34,489,,,,
|
71 |
+
,,,34,489,0.6850369572639465,0.6343504190444946,,
|
72 |
+
2.0719754695892334,1.1400641202926636,1.017513394355774,35,503,,,,
|
73 |
+
,,,35,503,0.5105685591697693,0.5408263206481934,,
|
74 |
+
3.0538406372070312,1.4398294687271118,1.017513394355774,36,517,,,,
|
75 |
+
,,,36,517,0.5933236479759216,0.5576853156089783,,
|
76 |
+
8.8101806640625,2.609063148498535,1.017513394355774,37,531,,,,
|
77 |
+
,,,37,531,0.5839020013809204,0.5745837092399597,,
|
78 |
+
2.9417495727539062,1.3778327703475952,1.017513394355774,38,545,,,,
|
79 |
+
,,,38,545,0.6017018556594849,0.594438374042511,,
|
80 |
+
2.4010508060455322,1.274417519569397,1.017513394355774,39,559,,,,
|
81 |
+
,,,39,559,0.5342427492141724,0.5524643063545227,,
|
82 |
+
2.542184829711914,1.312879204750061,1.017513394355774,40,573,,,,
|
83 |
+
,,,40,573,0.6568379998207092,0.6159998178482056,,
|
84 |
+
3.9658477306365967,1.4965797662734985,1.017513394355774,41,587,,,,
|
85 |
+
,,,41,587,0.5906515121459961,0.5734637379646301,,
|
86 |
+
6.082784175872803,2.115468740463257,1.017513394355774,42,601,,,,
|
87 |
+
,,,42,601,0.529127299785614,0.5510073900222778,,
|
88 |
+
1.9308276176452637,1.086243987083435,1.017513394355774,43,615,,,,
|
89 |
+
,,,43,615,0.5599197745323181,0.5588628649711609,,
|
90 |
+
1.990827202796936,1.1503171920776367,1.017513394355774,44,629,,,,
|
91 |
+
,,,44,629,0.41495001316070557,0.4850643575191498,,
|
92 |
+
2.708662271499634,1.2828508615493774,1.017513394355774,45,643,,,,
|
93 |
+
,,,45,643,0.436250776052475,0.48865383863449097,,
|
94 |
+
2.721993923187256,1.2890279293060303,1.017513394355774,46,657,,,,
|
95 |
+
,,,46,657,0.4007551968097687,0.47556832432746887,,
|
96 |
+
1.3520567417144775,0.9081746935844421,0.9081746935844421,47,671,,,,
|
97 |
+
,,,47,671,0.4117766320705414,0.47841882705688477,,
|
98 |
+
3.64544939994812,1.5458178520202637,0.9081746935844421,48,685,,,,
|
99 |
+
,,,48,685,0.3564106523990631,0.4509842097759247,,
|
100 |
+
8.827122688293457,2.577078342437744,0.9081746935844421,49,699,,,,
|
101 |
+
,,,49,699,0.44367292523384094,0.506059467792511,,
|
102 |
+
2.90381121635437,1.3913854360580444,0.9081746935844421,50,713,,,,
|
103 |
+
,,,50,713,0.47677552700042725,0.5219942927360535,,
|
104 |
+
2.1751604080200195,1.1605476140975952,0.9081746935844421,51,727,,,,
|
105 |
+
,,,51,727,0.4805523157119751,0.5307119488716125,,
|
106 |
+
3.5105037689208984,1.32558274269104,0.9081746935844421,52,741,,,,
|
107 |
+
,,,52,741,0.42093318700790405,0.5029696822166443,,
|
108 |
+
2.082195520401001,1.129480004310608,0.9081746935844421,53,755,,,,
|
109 |
+
,,,53,755,0.4829401671886444,0.5233399271965027,,
|
110 |
+
1.5603581666946411,0.9781850576400757,0.9081746935844421,54,769,,,,
|
111 |
+
,,,54,769,0.42465946078300476,0.5025721788406372,,
|
112 |
+
1.5590522289276123,0.9613149762153625,0.9081746935844421,55,783,,,,
|
113 |
+
,,,55,783,0.37251123785972595,0.46525320410728455,,
|
114 |
+
1.938509225845337,1.086552381515503,0.9081746935844421,56,797,,,,
|
115 |
+
,,,56,797,0.35386478900909424,0.44945764541625977,,
|
116 |
+
3.0849618911743164,1.4383236169815063,0.9081746935844421,57,811,,,,
|
117 |
+
,,,57,811,0.4274559020996094,0.49673670530319214,,
|
118 |
+
3.21571946144104,1.4552662372589111,0.9081746935844421,58,825,,,,
|
119 |
+
,,,58,825,0.4684374928474426,0.5275321006774902,,
|
120 |
+
2.3003997802734375,1.2248640060424805,0.9081746935844421,59,839,,,,
|
121 |
+
,,,59,839,0.43541961908340454,0.49734723567962646,,
|
122 |
+
2.299614906311035,1.2438257932662964,0.9081746935844421,60,853,,,,
|
123 |
+
,,,60,853,0.3669969141483307,0.4487251043319702,,
|
124 |
+
3.650754928588867,1.663870930671692,0.9081746935844421,61,867,,,,
|
125 |
+
,,,61,867,0.3067220449447632,0.4131874740123749,,
|
126 |
+
3.624985694885254,1.5658018589019775,0.9081746935844421,62,881,,,,
|
127 |
+
,,,62,881,0.4012928009033203,0.4891420304775238,,
|
128 |
+
1.2930291891098022,0.8807826042175293,0.8807826042175293,63,895,,,,
|
129 |
+
,,,63,895,0.40934282541275024,0.49816712737083435,,
|
130 |
+
1.9793566465377808,1.1115094423294067,0.8807826042175293,64,909,,,,
|
131 |
+
,,,64,909,0.3540283441543579,0.45071592926979065,,
|
132 |
+
4.07856559753418,1.6394165754318237,0.8807826042175293,65,923,,,,
|
133 |
+
,,,65,923,0.3702802062034607,0.46419090032577515,,
|
134 |
+
3.3407633304595947,1.5669021606445312,0.8807826042175293,66,937,,,,
|
135 |
+
,,,66,937,0.3463621735572815,0.4454110860824585,,
|
136 |
+
7.587801933288574,2.2921195030212402,0.8807826042175293,67,951,,,,
|
137 |
+
,,,67,951,0.38096702098846436,0.469892680644989,,
|
138 |
+
3.2650105953216553,1.4884666204452515,0.8807826042175293,68,965,,,,
|
139 |
+
,,,68,965,0.39275798201560974,0.4615195691585541,,
|
140 |
+
2.0768086910247803,1.145806074142456,0.8807826042175293,69,979,,,,
|
141 |
+
,,,69,979,0.3411720097064972,0.443414568901062,,
|
142 |
+
2.1344869136810303,1.1639031171798706,0.8807826042175293,70,993,,,,
|
143 |
+
,,,70,993,0.2638455033302307,0.3928419351577759,,
|
144 |
+
2.1884186267852783,1.2349659204483032,0.8807826042175293,71,1007,,,,
|
145 |
+
,,,71,1007,0.3292909264564514,0.43765053153038025,,
|
146 |
+
1.3673959970474243,0.9001149535179138,0.8807826042175293,72,1021,,,,
|
147 |
+
,,,72,1021,0.2578433156013489,0.38283589482307434,,
|
148 |
+
0.9036887884140015,0.7182144522666931,0.7182144522666931,73,1035,,,,
|
149 |
+
,,,73,1035,0.23022013902664185,0.3615632653236389,,
|
150 |
+
2.78726863861084,1.3210660219192505,0.7182144522666931,74,1049,,,,
|
151 |
+
,,,74,1049,0.2424551397562027,0.37832528352737427,,
|
152 |
+
2.135455369949341,1.0961676836013794,0.7182144522666931,75,1063,,,,
|
153 |
+
,,,75,1063,0.27062898874282837,0.4086984097957611,,
|
154 |
+
1.3522340059280396,0.875568151473999,0.7182144522666931,76,1077,,,,
|
155 |
+
,,,76,1077,0.3379851281642914,0.42751795053482056,,
|
156 |
+
3.483870029449463,1.5274254083633423,0.7182144522666931,77,1091,,,,
|
157 |
+
,,,77,1091,0.33448725938796997,0.42318886518478394,,
|
158 |
+
1.4907900094985962,0.9487781524658203,0.7182144522666931,78,1105,,,,
|
159 |
+
,,,78,1105,0.3691990375518799,0.4627363383769989,,
|
160 |
+
1.8306686878204346,1.0534369945526123,0.7182144522666931,79,1119,,,,
|
161 |
+
,,,79,1119,0.35426458716392517,0.460135817527771,,
|
162 |
+
3.3229048252105713,1.4640874862670898,0.7182144522666931,80,1133,,,,
|
163 |
+
,,,80,1133,0.3671959340572357,0.465082049369812,,
|
164 |
+
3.7974352836608887,1.6002452373504639,0.7182144522666931,81,1147,,,,
|
165 |
+
,,,81,1147,0.46021077036857605,0.5084742903709412,,
|
166 |
+
2.3504397869110107,1.2490290403366089,0.7182144522666931,82,1161,,,,
|
167 |
+
,,,82,1161,0.39738380908966064,0.4730401933193207,,
|
168 |
+
2.0983028411865234,1.1787508726119995,0.7182144522666931,83,1175,,,,
|
169 |
+
,,,83,1175,0.36762285232543945,0.4602110683917999,,
|
170 |
+
1.65043306350708,1.020263433456421,0.7182144522666931,84,1189,,,,
|
171 |
+
,,,84,1189,0.3967060446739197,0.47652655839920044,,
|
172 |
+
1.8674767017364502,1.0809354782104492,0.7182144522666931,85,1203,,,,
|
173 |
+
,,,85,1203,0.33385640382766724,0.44490671157836914,,
|
174 |
+
1.4506281614303589,0.9466513991355896,0.7182144522666931,86,1217,,,,
|
175 |
+
,,,86,1217,0.313031405210495,0.4242140054702759,,
|
176 |
+
2.475145101547241,1.2705174684524536,0.7182144522666931,87,1231,,,,
|
177 |
+
,,,87,1231,0.2501091957092285,0.3806353807449341,,
|
178 |
+
3.22990345954895,1.4544631242752075,0.7182144522666931,88,1245,,,,
|
179 |
+
,,,88,1245,0.3327494263648987,0.4411628544330597,,
|
180 |
+
3.468653917312622,1.5193942785263062,0.7182144522666931,89,1259,,,,
|
181 |
+
,,,89,1259,0.3415555953979492,0.44830793142318726,,
|
182 |
+
2.5945775508880615,1.2907638549804688,0.7182144522666931,90,1273,,,,
|
183 |
+
,,,90,1273,0.34335044026374817,0.44188740849494934,,
|
184 |
+
2.3023581504821777,1.259584665298462,0.7182144522666931,91,1287,,,,
|
185 |
+
,,,91,1287,0.31369662284851074,0.43098536133766174,,
|
186 |
+
4.970507621765137,1.8774722814559937,0.7182144522666931,92,1301,,,,
|
187 |
+
,,,92,1301,0.31328335404396057,0.4350607395172119,,
|
188 |
+
1.1587599515914917,0.8233600854873657,0.7182144522666931,93,1315,,,,
|
189 |
+
,,,93,1315,0.229990154504776,0.3717668950557709,,
|
190 |
+
4.081864833831787,1.7696021795272827,0.7182144522666931,94,1329,,,,
|
191 |
+
,,,94,1329,0.2942165434360504,0.40740966796875,,
|
192 |
+
1.090889811515808,0.8003377318382263,0.7182144522666931,95,1343,,,,
|
193 |
+
,,,95,1343,0.22536863386631012,0.3575226664543152,,
|
194 |
+
6.081254959106445,2.0787765979766846,0.7182144522666931,96,1357,,,,
|
195 |
+
,,,96,1357,0.1982411891222,0.34187421202659607,,
|
196 |
+
3.200557231903076,1.4187779426574707,0.7182144522666931,97,1371,,,,
|
197 |
+
,,,97,1371,0.22740033268928528,0.366495281457901,,
|
198 |
+
3.3464701175689697,1.5527931451797485,0.7182144522666931,98,1385,,,,
|
199 |
+
,,,98,1385,0.22232940793037415,0.36230015754699707,,
|
200 |
+
1.4440816640853882,0.9454975724220276,0.7182144522666931,99,1399,,,,
|
201 |
+
,,,99,1399,0.23534300923347473,0.3648931086063385,,
|
202 |
+
,,,74,1036,,,0.7441176176071167,0.6261149048805237
|
logs/experiments/runs/focusMSE_150/2022-03-10_13-11-24/tensorboard/focusMSE_150/events.out.tfevents.1646917887.ea2217bd45f5.1.0
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:3bfb83b097a2d37657f2cc3465bea334fb829a6ca56a55cdc1ff6e75bf283560
|
3 |
+
size 36397
|
logs/experiments/runs/focusMSE_150/2022-03-10_13-11-24/tensorboard/focusMSE_150/events.out.tfevents.1646918355.ea2217bd45f5.1.1
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:5ece23ba7cc361c66cfa82beccae470d577095e6f78f81a3ffea56bba19d501c
|
3 |
+
size 179
|
logs/experiments/runs/focusMSE_150/2022-03-10_13-11-24/tensorboard/focusMSE_150/hparams.yaml
ADDED
@@ -0,0 +1,52 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
trainer:
|
2 |
+
_target_: pytorch_lightning.Trainer
|
3 |
+
gpus: 0
|
4 |
+
min_epochs: 1
|
5 |
+
max_epochs: 100
|
6 |
+
resume_from_checkpoint: null
|
7 |
+
model:
|
8 |
+
_target_: src.models.focus_module.FocusMSELitModule
|
9 |
+
input_size: 67500
|
10 |
+
lin1_size: 128
|
11 |
+
lin2_size: 256
|
12 |
+
lin3_size: 64
|
13 |
+
output_size: 1
|
14 |
+
lr: 0.001
|
15 |
+
weight_decay: 0.0005
|
16 |
+
datamodule:
|
17 |
+
_target_: src.datamodules.focus_datamodule.FocusDataModule
|
18 |
+
data_dir: /usr/src/app/data/focus150
|
19 |
+
csv_file: /usr/src/app/data/focus150/metadata.csv
|
20 |
+
batch_size: 128
|
21 |
+
train_val_test_split_percentage:
|
22 |
+
- 0.7
|
23 |
+
- 0.15
|
24 |
+
- 0.15
|
25 |
+
num_workers: 0
|
26 |
+
pin_memory: false
|
27 |
+
seed: 12345
|
28 |
+
callbacks:
|
29 |
+
model_checkpoint:
|
30 |
+
_target_: pytorch_lightning.callbacks.ModelCheckpoint
|
31 |
+
monitor: val/mae
|
32 |
+
mode: min
|
33 |
+
save_top_k: 1
|
34 |
+
save_last: true
|
35 |
+
verbose: false
|
36 |
+
dirpath: checkpoints/
|
37 |
+
filename: epoch_{epoch:03d}
|
38 |
+
auto_insert_metric_name: false
|
39 |
+
early_stopping:
|
40 |
+
_target_: pytorch_lightning.callbacks.EarlyStopping
|
41 |
+
monitor: val/mae
|
42 |
+
mode: min
|
43 |
+
patience: 100
|
44 |
+
min_delta: 0
|
45 |
+
model_summary:
|
46 |
+
_target_: pytorch_lightning.callbacks.RichModelSummary
|
47 |
+
max_depth: -1
|
48 |
+
rich_progress_bar:
|
49 |
+
_target_: pytorch_lightning.callbacks.RichProgressBar
|
50 |
+
model/params/total: 8690561
|
51 |
+
model/params/trainable: 8690561
|
52 |
+
model/params/non_trainable: 0
|