Hannes Kuchelmeister
commited on
Commit
•
9855451
1
Parent(s):
2833379
rerun and add logs for focusMAE_150
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- logs/experiments/runs/focusMAE_150/2022-03-22_14-50-59/.hydra/config.yaml +76 -0
- logs/experiments/runs/focusMAE_150/2022-03-22_14-50-59/.hydra/hydra.yaml +170 -0
- logs/experiments/runs/focusMAE_150/2022-03-22_14-50-59/.hydra/overrides.yaml +1 -0
- logs/experiments/runs/focusMAE_150/2022-03-22_14-50-59/checkpoints/epoch_098.ckpt +3 -0
- logs/experiments/runs/focusMAE_150/2022-03-22_14-50-59/checkpoints/last.ckpt +3 -0
- logs/experiments/runs/focusMAE_150/2022-03-22_14-50-59/csv/version_0/hparams.yaml +50 -0
- logs/experiments/runs/focusMAE_150/2022-03-22_14-50-59/csv/version_0/metrics.csv +202 -0
- logs/experiments/runs/focusMAE_150/2022-03-22_14-50-59/tensorboard/focusMAE_150/events.out.tfevents.1647960661.e7026f50b4d2.1.0 +3 -0
- logs/experiments/runs/focusMAE_150/2022-03-22_14-50-59/tensorboard/focusMAE_150/events.out.tfevents.1647960944.e7026f50b4d2.1.1 +3 -0
- logs/experiments/runs/focusMAE_150/2022-03-22_14-50-59/tensorboard/focusMAE_150/hparams.yaml +50 -0
- logs/mlflow/mlruns/2/783dae2360d64b5bacdcea58ff55bc5b/meta.yaml +15 -0
- logs/mlflow/mlruns/2/783dae2360d64b5bacdcea58ff55bc5b/metrics/epoch +201 -0
- logs/mlflow/mlruns/2/783dae2360d64b5bacdcea58ff55bc5b/metrics/test/loss +1 -0
- logs/mlflow/mlruns/2/783dae2360d64b5bacdcea58ff55bc5b/metrics/test/mae +1 -0
- logs/mlflow/mlruns/2/783dae2360d64b5bacdcea58ff55bc5b/metrics/train/loss +100 -0
- logs/mlflow/mlruns/2/783dae2360d64b5bacdcea58ff55bc5b/metrics/train/mae +100 -0
- logs/mlflow/mlruns/2/783dae2360d64b5bacdcea58ff55bc5b/metrics/val/loss +100 -0
- logs/mlflow/mlruns/2/783dae2360d64b5bacdcea58ff55bc5b/metrics/val/mae +100 -0
- logs/mlflow/mlruns/2/783dae2360d64b5bacdcea58ff55bc5b/metrics/val/mae_best +100 -0
- logs/mlflow/mlruns/2/783dae2360d64b5bacdcea58ff55bc5b/params/callbacks/early_stopping/_target_ +1 -0
- logs/mlflow/mlruns/2/783dae2360d64b5bacdcea58ff55bc5b/params/callbacks/early_stopping/min_delta +1 -0
- logs/mlflow/mlruns/2/783dae2360d64b5bacdcea58ff55bc5b/params/callbacks/early_stopping/mode +1 -0
- logs/mlflow/mlruns/2/783dae2360d64b5bacdcea58ff55bc5b/params/callbacks/early_stopping/monitor +1 -0
- logs/mlflow/mlruns/2/783dae2360d64b5bacdcea58ff55bc5b/params/callbacks/early_stopping/patience +1 -0
- logs/mlflow/mlruns/2/783dae2360d64b5bacdcea58ff55bc5b/params/callbacks/model_checkpoint/_target_ +1 -0
- logs/mlflow/mlruns/2/783dae2360d64b5bacdcea58ff55bc5b/params/callbacks/model_checkpoint/auto_insert_metric_name +1 -0
- logs/mlflow/mlruns/2/783dae2360d64b5bacdcea58ff55bc5b/params/callbacks/model_checkpoint/dirpath +1 -0
- logs/mlflow/mlruns/2/783dae2360d64b5bacdcea58ff55bc5b/params/callbacks/model_checkpoint/filename +1 -0
- logs/mlflow/mlruns/2/783dae2360d64b5bacdcea58ff55bc5b/params/callbacks/model_checkpoint/mode +1 -0
- logs/mlflow/mlruns/2/783dae2360d64b5bacdcea58ff55bc5b/params/callbacks/model_checkpoint/monitor +1 -0
- logs/mlflow/mlruns/2/783dae2360d64b5bacdcea58ff55bc5b/params/callbacks/model_checkpoint/save_last +1 -0
- logs/mlflow/mlruns/2/783dae2360d64b5bacdcea58ff55bc5b/params/callbacks/model_checkpoint/save_top_k +1 -0
- logs/mlflow/mlruns/2/783dae2360d64b5bacdcea58ff55bc5b/params/callbacks/model_checkpoint/verbose +1 -0
- logs/mlflow/mlruns/2/783dae2360d64b5bacdcea58ff55bc5b/params/callbacks/model_summary/_target_ +1 -0
- logs/mlflow/mlruns/2/783dae2360d64b5bacdcea58ff55bc5b/params/callbacks/model_summary/max_depth +1 -0
- logs/mlflow/mlruns/2/783dae2360d64b5bacdcea58ff55bc5b/params/callbacks/rich_progress_bar/_target_ +1 -0
- logs/mlflow/mlruns/2/783dae2360d64b5bacdcea58ff55bc5b/params/datamodule/_target_ +1 -0
- logs/mlflow/mlruns/2/783dae2360d64b5bacdcea58ff55bc5b/params/datamodule/batch_size +1 -0
- logs/mlflow/mlruns/2/783dae2360d64b5bacdcea58ff55bc5b/params/datamodule/csv_test_file +1 -0
- logs/mlflow/mlruns/2/783dae2360d64b5bacdcea58ff55bc5b/params/datamodule/csv_train_file +1 -0
- logs/mlflow/mlruns/2/783dae2360d64b5bacdcea58ff55bc5b/params/datamodule/csv_val_file +1 -0
- logs/mlflow/mlruns/2/783dae2360d64b5bacdcea58ff55bc5b/params/datamodule/data_dir +1 -0
- logs/mlflow/mlruns/2/783dae2360d64b5bacdcea58ff55bc5b/params/datamodule/num_workers +1 -0
- logs/mlflow/mlruns/2/783dae2360d64b5bacdcea58ff55bc5b/params/datamodule/pin_memory +1 -0
- logs/mlflow/mlruns/2/783dae2360d64b5bacdcea58ff55bc5b/params/model/_target_ +1 -0
- logs/mlflow/mlruns/2/783dae2360d64b5bacdcea58ff55bc5b/params/model/input_size +1 -0
- logs/mlflow/mlruns/2/783dae2360d64b5bacdcea58ff55bc5b/params/model/lin1_size +1 -0
- logs/mlflow/mlruns/2/783dae2360d64b5bacdcea58ff55bc5b/params/model/lin2_size +1 -0
- logs/mlflow/mlruns/2/783dae2360d64b5bacdcea58ff55bc5b/params/model/lin3_size +1 -0
- logs/mlflow/mlruns/2/783dae2360d64b5bacdcea58ff55bc5b/params/model/lr +1 -0
logs/experiments/runs/focusMAE_150/2022-03-22_14-50-59/.hydra/config.yaml
ADDED
@@ -0,0 +1,76 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
original_work_dir: ${hydra:runtime.cwd}
|
2 |
+
data_dir: ${original_work_dir}/data
|
3 |
+
print_config: true
|
4 |
+
ignore_warnings: true
|
5 |
+
train: true
|
6 |
+
test: true
|
7 |
+
seed: 12345
|
8 |
+
name: focusMAE_150
|
9 |
+
datamodule:
|
10 |
+
_target_: src.datamodules.focus_datamodule.FocusDataModule
|
11 |
+
data_dir: ${data_dir}/focus150
|
12 |
+
csv_train_file: ${data_dir}/focus150/train_metadata.csv
|
13 |
+
csv_val_file: ${data_dir}/focus150/validation_metadata.csv
|
14 |
+
csv_test_file: ${data_dir}/focus150/test_metadata.csv
|
15 |
+
batch_size: 128
|
16 |
+
num_workers: 0
|
17 |
+
pin_memory: false
|
18 |
+
model:
|
19 |
+
_target_: src.models.focus_module.FocusLitModule
|
20 |
+
input_size: 67500
|
21 |
+
lin1_size: 128
|
22 |
+
lin2_size: 256
|
23 |
+
lin3_size: 64
|
24 |
+
output_size: 1
|
25 |
+
lr: 0.001
|
26 |
+
weight_decay: 0.0005
|
27 |
+
callbacks:
|
28 |
+
model_checkpoint:
|
29 |
+
_target_: pytorch_lightning.callbacks.ModelCheckpoint
|
30 |
+
monitor: val/mae
|
31 |
+
mode: min
|
32 |
+
save_top_k: 1
|
33 |
+
save_last: true
|
34 |
+
verbose: false
|
35 |
+
dirpath: checkpoints/
|
36 |
+
filename: epoch_{epoch:03d}
|
37 |
+
auto_insert_metric_name: false
|
38 |
+
early_stopping:
|
39 |
+
_target_: pytorch_lightning.callbacks.EarlyStopping
|
40 |
+
monitor: val/mae
|
41 |
+
mode: min
|
42 |
+
patience: 100
|
43 |
+
min_delta: 0
|
44 |
+
model_summary:
|
45 |
+
_target_: pytorch_lightning.callbacks.RichModelSummary
|
46 |
+
max_depth: -1
|
47 |
+
rich_progress_bar:
|
48 |
+
_target_: pytorch_lightning.callbacks.RichProgressBar
|
49 |
+
logger:
|
50 |
+
csv:
|
51 |
+
_target_: pytorch_lightning.loggers.csv_logs.CSVLogger
|
52 |
+
save_dir: .
|
53 |
+
name: csv/
|
54 |
+
prefix: ''
|
55 |
+
mlflow:
|
56 |
+
_target_: pytorch_lightning.loggers.mlflow.MLFlowLogger
|
57 |
+
experiment_name: ${name}
|
58 |
+
tracking_uri: ${original_work_dir}/logs/mlflow/mlruns
|
59 |
+
tags: null
|
60 |
+
save_dir: ./mlruns
|
61 |
+
prefix: ''
|
62 |
+
artifact_location: null
|
63 |
+
tensorboard:
|
64 |
+
_target_: pytorch_lightning.loggers.tensorboard.TensorBoardLogger
|
65 |
+
save_dir: tensorboard/
|
66 |
+
name: null
|
67 |
+
version: ${name}
|
68 |
+
log_graph: false
|
69 |
+
default_hp_metric: true
|
70 |
+
prefix: ''
|
71 |
+
trainer:
|
72 |
+
_target_: pytorch_lightning.Trainer
|
73 |
+
gpus: 1
|
74 |
+
min_epochs: 1
|
75 |
+
max_epochs: 100
|
76 |
+
resume_from_checkpoint: null
|
logs/experiments/runs/focusMAE_150/2022-03-22_14-50-59/.hydra/hydra.yaml
ADDED
@@ -0,0 +1,170 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
hydra:
|
2 |
+
run:
|
3 |
+
dir: logs/experiments/runs/${name}/${now:%Y-%m-%d}_${now:%H-%M-%S}
|
4 |
+
sweep:
|
5 |
+
dir: logs/experiments/multiruns/${name}/${now:%Y-%m-%d}_${now:%H-%M-%S}
|
6 |
+
subdir: ${hydra.job.num}
|
7 |
+
launcher:
|
8 |
+
_target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher
|
9 |
+
sweeper:
|
10 |
+
_target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper
|
11 |
+
max_batch_size: null
|
12 |
+
help:
|
13 |
+
app_name: ${hydra.job.name}
|
14 |
+
header: '${hydra.help.app_name} is powered by Hydra.
|
15 |
+
|
16 |
+
'
|
17 |
+
footer: 'Powered by Hydra (https://hydra.cc)
|
18 |
+
|
19 |
+
Use --hydra-help to view Hydra specific help
|
20 |
+
|
21 |
+
'
|
22 |
+
template: '${hydra.help.header}
|
23 |
+
|
24 |
+
== Configuration groups ==
|
25 |
+
|
26 |
+
Compose your configuration from those groups (group=option)
|
27 |
+
|
28 |
+
|
29 |
+
$APP_CONFIG_GROUPS
|
30 |
+
|
31 |
+
|
32 |
+
== Config ==
|
33 |
+
|
34 |
+
Override anything in the config (foo.bar=value)
|
35 |
+
|
36 |
+
|
37 |
+
$CONFIG
|
38 |
+
|
39 |
+
|
40 |
+
${hydra.help.footer}
|
41 |
+
|
42 |
+
'
|
43 |
+
hydra_help:
|
44 |
+
template: 'Hydra (${hydra.runtime.version})
|
45 |
+
|
46 |
+
See https://hydra.cc for more info.
|
47 |
+
|
48 |
+
|
49 |
+
== Flags ==
|
50 |
+
|
51 |
+
$FLAGS_HELP
|
52 |
+
|
53 |
+
|
54 |
+
== Configuration groups ==
|
55 |
+
|
56 |
+
Compose your configuration from those groups (For example, append hydra/job_logging=disabled
|
57 |
+
to command line)
|
58 |
+
|
59 |
+
|
60 |
+
$HYDRA_CONFIG_GROUPS
|
61 |
+
|
62 |
+
|
63 |
+
Use ''--cfg hydra'' to Show the Hydra config.
|
64 |
+
|
65 |
+
'
|
66 |
+
hydra_help: ???
|
67 |
+
hydra_logging:
|
68 |
+
version: 1
|
69 |
+
formatters:
|
70 |
+
colorlog:
|
71 |
+
(): colorlog.ColoredFormatter
|
72 |
+
format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s'
|
73 |
+
handlers:
|
74 |
+
console:
|
75 |
+
class: logging.StreamHandler
|
76 |
+
formatter: colorlog
|
77 |
+
stream: ext://sys.stdout
|
78 |
+
root:
|
79 |
+
level: INFO
|
80 |
+
handlers:
|
81 |
+
- console
|
82 |
+
disable_existing_loggers: false
|
83 |
+
job_logging:
|
84 |
+
version: 1
|
85 |
+
formatters:
|
86 |
+
simple:
|
87 |
+
format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s'
|
88 |
+
colorlog:
|
89 |
+
(): colorlog.ColoredFormatter
|
90 |
+
format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s]
|
91 |
+
- %(message)s'
|
92 |
+
log_colors:
|
93 |
+
DEBUG: purple
|
94 |
+
INFO: green
|
95 |
+
WARNING: yellow
|
96 |
+
ERROR: red
|
97 |
+
CRITICAL: red
|
98 |
+
handlers:
|
99 |
+
console:
|
100 |
+
class: logging.StreamHandler
|
101 |
+
formatter: colorlog
|
102 |
+
stream: ext://sys.stdout
|
103 |
+
file:
|
104 |
+
class: logging.FileHandler
|
105 |
+
formatter: simple
|
106 |
+
filename: ${hydra.job.name}.log
|
107 |
+
root:
|
108 |
+
level: INFO
|
109 |
+
handlers:
|
110 |
+
- console
|
111 |
+
- file
|
112 |
+
disable_existing_loggers: false
|
113 |
+
env: {}
|
114 |
+
searchpath: []
|
115 |
+
callbacks: {}
|
116 |
+
output_subdir: .hydra
|
117 |
+
overrides:
|
118 |
+
hydra: []
|
119 |
+
task:
|
120 |
+
- experiment=focusMAE_150
|
121 |
+
job:
|
122 |
+
name: train
|
123 |
+
override_dirname: experiment=focusMAE_150
|
124 |
+
id: ???
|
125 |
+
num: ???
|
126 |
+
config_name: train.yaml
|
127 |
+
env_set: {}
|
128 |
+
env_copy: []
|
129 |
+
config:
|
130 |
+
override_dirname:
|
131 |
+
kv_sep: '='
|
132 |
+
item_sep: ','
|
133 |
+
exclude_keys: []
|
134 |
+
runtime:
|
135 |
+
version: 1.1.1
|
136 |
+
cwd: /usr/src/app
|
137 |
+
config_sources:
|
138 |
+
- path: hydra.conf
|
139 |
+
schema: pkg
|
140 |
+
provider: hydra
|
141 |
+
- path: /usr/src/app/configs
|
142 |
+
schema: file
|
143 |
+
provider: main
|
144 |
+
- path: hydra_plugins.hydra_colorlog.conf
|
145 |
+
schema: pkg
|
146 |
+
provider: hydra-colorlog
|
147 |
+
- path: ''
|
148 |
+
schema: structured
|
149 |
+
provider: schema
|
150 |
+
choices:
|
151 |
+
local: default.yaml
|
152 |
+
hparams_search: null
|
153 |
+
debug: null
|
154 |
+
experiment: focusMAE_150
|
155 |
+
log_dir: default.yaml
|
156 |
+
trainer: default.yaml
|
157 |
+
logger: many_loggers
|
158 |
+
callbacks: default.yaml
|
159 |
+
model: focus150.yaml
|
160 |
+
datamodule: focus150.yaml
|
161 |
+
hydra/env: default
|
162 |
+
hydra/callbacks: null
|
163 |
+
hydra/job_logging: colorlog
|
164 |
+
hydra/hydra_logging: colorlog
|
165 |
+
hydra/hydra_help: default
|
166 |
+
hydra/help: default
|
167 |
+
hydra/sweeper: basic
|
168 |
+
hydra/launcher: basic
|
169 |
+
hydra/output: default
|
170 |
+
verbose: false
|
logs/experiments/runs/focusMAE_150/2022-03-22_14-50-59/.hydra/overrides.yaml
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
- experiment=focusMAE_150
|
logs/experiments/runs/focusMAE_150/2022-03-22_14-50-59/checkpoints/epoch_098.ckpt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:b00dc349ed49fe19fb35c15ab867a3c0fb9a8392435b7db8d4ab1515f0a79bda
|
3 |
+
size 104307526
|
logs/experiments/runs/focusMAE_150/2022-03-22_14-50-59/checkpoints/last.ckpt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:77afcd98cd8b984690546481dfc8db2db99f147387371d2314dea637dbef65bb
|
3 |
+
size 104307526
|
logs/experiments/runs/focusMAE_150/2022-03-22_14-50-59/csv/version_0/hparams.yaml
ADDED
@@ -0,0 +1,50 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
trainer:
|
2 |
+
_target_: pytorch_lightning.Trainer
|
3 |
+
gpus: 1
|
4 |
+
min_epochs: 1
|
5 |
+
max_epochs: 100
|
6 |
+
resume_from_checkpoint: null
|
7 |
+
model:
|
8 |
+
_target_: src.models.focus_module.FocusLitModule
|
9 |
+
input_size: 67500
|
10 |
+
lin1_size: 128
|
11 |
+
lin2_size: 256
|
12 |
+
lin3_size: 64
|
13 |
+
output_size: 1
|
14 |
+
lr: 0.001
|
15 |
+
weight_decay: 0.0005
|
16 |
+
datamodule:
|
17 |
+
_target_: src.datamodules.focus_datamodule.FocusDataModule
|
18 |
+
data_dir: /usr/src/app/data/focus150
|
19 |
+
csv_train_file: /usr/src/app/data/focus150/train_metadata.csv
|
20 |
+
csv_val_file: /usr/src/app/data/focus150/validation_metadata.csv
|
21 |
+
csv_test_file: /usr/src/app/data/focus150/test_metadata.csv
|
22 |
+
batch_size: 128
|
23 |
+
num_workers: 0
|
24 |
+
pin_memory: false
|
25 |
+
seed: 12345
|
26 |
+
callbacks:
|
27 |
+
model_checkpoint:
|
28 |
+
_target_: pytorch_lightning.callbacks.ModelCheckpoint
|
29 |
+
monitor: val/mae
|
30 |
+
mode: min
|
31 |
+
save_top_k: 1
|
32 |
+
save_last: true
|
33 |
+
verbose: false
|
34 |
+
dirpath: checkpoints/
|
35 |
+
filename: epoch_{epoch:03d}
|
36 |
+
auto_insert_metric_name: false
|
37 |
+
early_stopping:
|
38 |
+
_target_: pytorch_lightning.callbacks.EarlyStopping
|
39 |
+
monitor: val/mae
|
40 |
+
mode: min
|
41 |
+
patience: 100
|
42 |
+
min_delta: 0
|
43 |
+
model_summary:
|
44 |
+
_target_: pytorch_lightning.callbacks.RichModelSummary
|
45 |
+
max_depth: -1
|
46 |
+
rich_progress_bar:
|
47 |
+
_target_: pytorch_lightning.callbacks.RichProgressBar
|
48 |
+
model/params/total: 8690561
|
49 |
+
model/params/trainable: 8690561
|
50 |
+
model/params/non_trainable: 0
|
logs/experiments/runs/focusMAE_150/2022-03-22_14-50-59/csv/version_0/metrics.csv
ADDED
@@ -0,0 +1,202 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
val/loss,val/mae,val/mae_best,epoch,step,train/loss,train/mae,test/loss,test/mae
|
2 |
+
0.08075811713933945,0.08075811713933945,0.019537409767508507,0,23,,,,
|
3 |
+
,,,0,23,0.11430194973945618,0.11430194973945618,,
|
4 |
+
0.04046476259827614,0.04046476259827614,0.019537409767508507,1,47,,,,
|
5 |
+
,,,1,47,0.040808238089084625,0.040808238089084625,,
|
6 |
+
0.040150225162506104,0.040150225162506104,0.019537409767508507,2,71,,,,
|
7 |
+
,,,2,71,0.029285309836268425,0.029285309836268425,,
|
8 |
+
0.030771758407354355,0.030771758407354355,0.019537409767508507,3,95,,,,
|
9 |
+
,,,3,95,0.02604392170906067,0.02604392170906067,,
|
10 |
+
0.03223102539777756,0.03223102539777756,0.019537409767508507,4,119,,,,
|
11 |
+
,,,4,119,0.02451612800359726,0.02451612800359726,,
|
12 |
+
0.028316136449575424,0.028316136449575424,0.019537409767508507,5,143,,,,
|
13 |
+
,,,5,143,0.023934854194521904,0.023934854194521904,,
|
14 |
+
0.026469340547919273,0.026469340547919273,0.019537409767508507,6,167,,,,
|
15 |
+
,,,6,167,0.021004604175686836,0.021004604175686836,,
|
16 |
+
0.03178180009126663,0.03178180009126663,0.019537409767508507,7,191,,,,
|
17 |
+
,,,7,191,0.021777832880616188,0.021777832880616188,,
|
18 |
+
0.03219511732459068,0.03219511732459068,0.019537409767508507,8,215,,,,
|
19 |
+
,,,8,215,0.022210093215107918,0.022210093215107918,,
|
20 |
+
0.023771945387125015,0.023771945387125015,0.019537409767508507,9,239,,,,
|
21 |
+
,,,9,239,0.021187154576182365,0.021187154576182365,,
|
22 |
+
0.024343175813555717,0.024343175813555717,0.019537409767508507,10,263,,,,
|
23 |
+
,,,10,263,0.020405923947691917,0.020405923947691917,,
|
24 |
+
0.021602997556328773,0.021602997556328773,0.019537409767508507,11,287,,,,
|
25 |
+
,,,11,287,0.018974581733345985,0.018974581733345985,,
|
26 |
+
0.02062990330159664,0.02062990330159664,0.019537409767508507,12,311,,,,
|
27 |
+
,,,12,311,0.01965630240738392,0.01965630240738392,,
|
28 |
+
0.023217681795358658,0.023217681795358658,0.019537409767508507,13,335,,,,
|
29 |
+
,,,13,335,0.018277371302247047,0.018277371302247047,,
|
30 |
+
0.020541978999972343,0.020541978999972343,0.019537409767508507,14,359,,,,
|
31 |
+
,,,14,359,0.017642932012677193,0.017642932012677193,,
|
32 |
+
0.021371176466345787,0.021371174603700638,0.019537409767508507,15,383,,,,
|
33 |
+
,,,15,383,0.0175026785582304,0.0175026785582304,,
|
34 |
+
0.020673882216215134,0.020673882216215134,0.019537409767508507,16,407,,,,
|
35 |
+
,,,16,407,0.01821238547563553,0.01821238547563553,,
|
36 |
+
0.019376106560230255,0.019376106560230255,0.019376106560230255,17,431,,,,
|
37 |
+
,,,17,431,0.017036771401762962,0.017036771401762962,,
|
38 |
+
0.02217184379696846,0.02217184379696846,0.019376106560230255,18,455,,,,
|
39 |
+
,,,18,455,0.017412351444363594,0.017412351444363594,,
|
40 |
+
0.022242296487092972,0.022242296487092972,0.019376106560230255,19,479,,,,
|
41 |
+
,,,19,479,0.016911355778574944,0.016911357641220093,,
|
42 |
+
0.02578875795006752,0.02578875795006752,0.019376106560230255,20,503,,,,
|
43 |
+
,,,20,503,0.01665208861231804,0.016652090474963188,,
|
44 |
+
0.027041783556342125,0.027041783556342125,0.019376106560230255,21,527,,,,
|
45 |
+
,,,21,527,0.01642896607518196,0.01642896607518196,,
|
46 |
+
0.030874773859977722,0.030874773859977722,0.019376106560230255,22,551,,,,
|
47 |
+
,,,22,551,0.01637101173400879,0.01637101173400879,,
|
48 |
+
0.03502318263053894,0.03502318263053894,0.019376106560230255,23,575,,,,
|
49 |
+
,,,23,575,0.01573258265852928,0.01573258265852928,,
|
50 |
+
0.02622189372777939,0.02622189372777939,0.019376106560230255,24,599,,,,
|
51 |
+
,,,24,599,0.016381485387682915,0.016381485387682915,,
|
52 |
+
0.02501680888235569,0.02501680888235569,0.019376106560230255,25,623,,,,
|
53 |
+
,,,25,623,0.016217797994613647,0.016217797994613647,,
|
54 |
+
0.024789953604340553,0.024789953604340553,0.019376106560230255,26,647,,,,
|
55 |
+
,,,26,647,0.016656070947647095,0.016656070947647095,,
|
56 |
+
0.025172732770442963,0.025172732770442963,0.019376106560230255,27,671,,,,
|
57 |
+
,,,27,671,0.016199888661503792,0.016199888661503792,,
|
58 |
+
0.023537959903478622,0.023537959903478622,0.019376106560230255,28,695,,,,
|
59 |
+
,,,28,695,0.016015876084566116,0.016015876084566116,,
|
60 |
+
0.019337965175509453,0.019337965175509453,0.019337965175509453,29,719,,,,
|
61 |
+
,,,29,719,0.016817055642604828,0.016817055642604828,,
|
62 |
+
0.026026232168078423,0.026026232168078423,0.019337965175509453,30,743,,,,
|
63 |
+
,,,30,743,0.016505641862750053,0.016505641862750053,,
|
64 |
+
0.035468939691782,0.035468939691782,0.019337965175509453,31,767,,,,
|
65 |
+
,,,31,767,0.016737060621380806,0.016737060621380806,,
|
66 |
+
0.03180081024765968,0.03180081024765968,0.019337965175509453,32,791,,,,
|
67 |
+
,,,32,791,0.016735482960939407,0.016735482960939407,,
|
68 |
+
0.04579645022749901,0.04579645022749901,0.019337965175509453,33,815,,,,
|
69 |
+
,,,33,815,0.015658926218748093,0.015658926218748093,,
|
70 |
+
0.019655367359519005,0.019655367359519005,0.019337965175509453,34,839,,,,
|
71 |
+
,,,34,839,0.015745699405670166,0.015745699405670166,,
|
72 |
+
0.02635784260928631,0.02635784260928631,0.019337965175509453,35,863,,,,
|
73 |
+
,,,35,863,0.015349706634879112,0.015349706634879112,,
|
74 |
+
0.02536688931286335,0.02536688931286335,0.019337965175509453,36,887,,,,
|
75 |
+
,,,36,887,0.01588103175163269,0.01588103175163269,,
|
76 |
+
0.02131745219230652,0.02131745219230652,0.019337965175509453,37,911,,,,
|
77 |
+
,,,37,911,0.01630052737891674,0.01630052737891674,,
|
78 |
+
0.02174573950469494,0.02174573950469494,0.019337965175509453,38,935,,,,
|
79 |
+
,,,38,935,0.015571440570056438,0.015571440570056438,,
|
80 |
+
0.027131935581564903,0.027131935581564903,0.019337965175509453,39,959,,,,
|
81 |
+
,,,39,959,0.015568464063107967,0.015568464063107967,,
|
82 |
+
0.019006643444299698,0.019006643444299698,0.019006643444299698,40,983,,,,
|
83 |
+
,,,40,983,0.015044812113046646,0.015044812113046646,,
|
84 |
+
0.024117305874824524,0.024117305874824524,0.019006643444299698,41,1007,,,,
|
85 |
+
,,,41,1007,0.014684031717479229,0.014684031717479229,,
|
86 |
+
0.021185727789998055,0.021185727789998055,0.019006643444299698,42,1031,,,,
|
87 |
+
,,,42,1031,0.015303990803658962,0.015303990803658962,,
|
88 |
+
0.02778777852654457,0.02778777852654457,0.019006643444299698,43,1055,,,,
|
89 |
+
,,,43,1055,0.014946572482585907,0.014946572482585907,,
|
90 |
+
0.024881377816200256,0.024881377816200256,0.019006643444299698,44,1079,,,,
|
91 |
+
,,,44,1079,0.015413569286465645,0.015413569286465645,,
|
92 |
+
0.02411677874624729,0.02411677874624729,0.019006643444299698,45,1103,,,,
|
93 |
+
,,,45,1103,0.015456431545317173,0.015456431545317173,,
|
94 |
+
0.0276863444596529,0.0276863444596529,0.019006643444299698,46,1127,,,,
|
95 |
+
,,,46,1127,0.01552390493452549,0.01552390493452549,,
|
96 |
+
0.02184155024588108,0.02184155024588108,0.019006643444299698,47,1151,,,,
|
97 |
+
,,,47,1151,0.015580768696963787,0.015580768696963787,,
|
98 |
+
0.023243313655257225,0.023243313655257225,0.019006643444299698,48,1175,,,,
|
99 |
+
,,,48,1175,0.014817407354712486,0.014817407354712486,,
|
100 |
+
0.055002447217702866,0.055002447217702866,0.019006643444299698,49,1199,,,,
|
101 |
+
,,,49,1199,0.015064268372952938,0.015064268372952938,,
|
102 |
+
0.026484446600079536,0.026484446600079536,0.019006643444299698,50,1223,,,,
|
103 |
+
,,,50,1223,0.014896915294229984,0.014896915294229984,,
|
104 |
+
0.02357296086847782,0.02357296086847782,0.019006643444299698,51,1247,,,,
|
105 |
+
,,,51,1247,0.015501330606639385,0.015501330606639385,,
|
106 |
+
0.03747256472706795,0.03747256472706795,0.019006643444299698,52,1271,,,,
|
107 |
+
,,,52,1271,0.015489108860492706,0.015489108860492706,,
|
108 |
+
0.02912999503314495,0.02912999503314495,0.019006643444299698,53,1295,,,,
|
109 |
+
,,,53,1295,0.015344677492976189,0.015344677492976189,,
|
110 |
+
0.024664100259542465,0.024664100259542465,0.019006643444299698,54,1319,,,,
|
111 |
+
,,,54,1319,0.014608598314225674,0.014608598314225674,,
|
112 |
+
0.023538075387477875,0.023538075387477875,0.019006643444299698,55,1343,,,,
|
113 |
+
,,,55,1343,0.014865655452013016,0.014865657314658165,,
|
114 |
+
0.02696441486477852,0.02696441486477852,0.019006643444299698,56,1367,,,,
|
115 |
+
,,,56,1367,0.014775116927921772,0.014775116927921772,,
|
116 |
+
0.025587955489754677,0.025587955489754677,0.019006643444299698,57,1391,,,,
|
117 |
+
,,,57,1391,0.015271090902388096,0.015271090902388096,,
|
118 |
+
0.03694944828748703,0.03694944828748703,0.019006643444299698,58,1415,,,,
|
119 |
+
,,,58,1415,0.015313581563532352,0.015313581563532352,,
|
120 |
+
0.030968736857175827,0.030968736857175827,0.019006643444299698,59,1439,,,,
|
121 |
+
,,,59,1439,0.014887236058712006,0.014887236058712006,,
|
122 |
+
0.021242544054985046,0.021242544054985046,0.019006643444299698,60,1463,,,,
|
123 |
+
,,,60,1463,0.015105010010302067,0.015105010010302067,,
|
124 |
+
0.021730512380599976,0.021730512380599976,0.019006643444299698,61,1487,,,,
|
125 |
+
,,,61,1487,0.014574754983186722,0.014574754983186722,,
|
126 |
+
0.0256672166287899,0.0256672166287899,0.019006643444299698,62,1511,,,,
|
127 |
+
,,,62,1511,0.014818240888416767,0.014818240888416767,,
|
128 |
+
0.02034403569996357,0.02034403569996357,0.019006643444299698,63,1535,,,,
|
129 |
+
,,,63,1535,0.01398531999439001,0.01398531999439001,,
|
130 |
+
0.029021065682172775,0.029021065682172775,0.019006643444299698,64,1559,,,,
|
131 |
+
,,,64,1559,0.014784194529056549,0.014784194529056549,,
|
132 |
+
0.06704822927713394,0.06704822927713394,0.019006643444299698,65,1583,,,,
|
133 |
+
,,,65,1583,0.014572982676327229,0.014572983607649803,,
|
134 |
+
0.01925153099000454,0.01925153099000454,0.019006643444299698,66,1607,,,,
|
135 |
+
,,,66,1607,0.015125010162591934,0.015125010162591934,,
|
136 |
+
0.02019408345222473,0.02019408345222473,0.019006643444299698,67,1631,,,,
|
137 |
+
,,,67,1631,0.014631032943725586,0.014631032943725586,,
|
138 |
+
0.02392183616757393,0.02392183616757393,0.019006643444299698,68,1655,,,,
|
139 |
+
,,,68,1655,0.015108865685760975,0.015108865685760975,,
|
140 |
+
0.061501890420913696,0.061501890420913696,0.019006643444299698,69,1679,,,,
|
141 |
+
,,,69,1679,0.015544437803328037,0.015544437803328037,,
|
142 |
+
0.0218352098017931,0.0218352098017931,0.019006643444299698,70,1703,,,,
|
143 |
+
,,,70,1703,0.015843627974390984,0.015843627974390984,,
|
144 |
+
0.02737290970981121,0.02737290970981121,0.019006643444299698,71,1727,,,,
|
145 |
+
,,,71,1727,0.015063464641571045,0.015063464641571045,,
|
146 |
+
0.03726331517100334,0.03726331517100334,0.019006643444299698,72,1751,,,,
|
147 |
+
,,,72,1751,0.015330381691455841,0.015330381691455841,,
|
148 |
+
0.019790584221482277,0.019790584221482277,0.019006643444299698,73,1775,,,,
|
149 |
+
,,,73,1775,0.015188358724117279,0.015188358724117279,,
|
150 |
+
0.04075721278786659,0.04075721278786659,0.019006643444299698,74,1799,,,,
|
151 |
+
,,,74,1799,0.014918417669832706,0.014918417669832706,,
|
152 |
+
0.021603049710392952,0.021603049710392952,0.019006643444299698,75,1823,,,,
|
153 |
+
,,,75,1823,0.014973972924053669,0.014973972924053669,,
|
154 |
+
0.02636726014316082,0.02636726014316082,0.019006643444299698,76,1847,,,,
|
155 |
+
,,,76,1847,0.014545914717018604,0.014545914717018604,,
|
156 |
+
0.03517881780862808,0.03517881780862808,0.019006643444299698,77,1871,,,,
|
157 |
+
,,,77,1871,0.015504920855164528,0.015504920855164528,,
|
158 |
+
0.04071342945098877,0.04071342945098877,0.019006643444299698,78,1895,,,,
|
159 |
+
,,,78,1895,0.015443055890500546,0.015443055890500546,,
|
160 |
+
0.02397548221051693,0.02397548221051693,0.019006643444299698,79,1919,,,,
|
161 |
+
,,,79,1919,0.015571530908346176,0.015571530908346176,,
|
162 |
+
0.026645613834261894,0.026645613834261894,0.019006643444299698,80,1943,,,,
|
163 |
+
,,,80,1943,0.014650711789727211,0.014650711789727211,,
|
164 |
+
0.02794080786406994,0.02794080786406994,0.019006643444299698,81,1967,,,,
|
165 |
+
,,,81,1967,0.01609613746404648,0.01609613746404648,,
|
166 |
+
0.019264165312051773,0.019264165312051773,0.019006643444299698,82,1991,,,,
|
167 |
+
,,,82,1991,0.016010327264666557,0.016010327264666557,,
|
168 |
+
0.019411463290452957,0.019411463290452957,0.019006643444299698,83,2015,,,,
|
169 |
+
,,,83,2015,0.015484006144106388,0.015484006144106388,,
|
170 |
+
0.020912759006023407,0.020912759006023407,0.019006643444299698,84,2039,,,,
|
171 |
+
,,,84,2039,0.015166169963777065,0.015166169963777065,,
|
172 |
+
0.027308855205774307,0.027308855205774307,0.019006643444299698,85,2063,,,,
|
173 |
+
,,,85,2063,0.015094844624400139,0.015094844624400139,,
|
174 |
+
0.019831309095025063,0.019831309095025063,0.019006643444299698,86,2087,,,,
|
175 |
+
,,,86,2087,0.015731262043118477,0.015731262043118477,,
|
176 |
+
0.03225191310048103,0.03225191310048103,0.019006643444299698,87,2111,,,,
|
177 |
+
,,,87,2111,0.014846889302134514,0.014846890233457088,,
|
178 |
+
0.023427240550518036,0.023427240550518036,0.019006643444299698,88,2135,,,,
|
179 |
+
,,,88,2135,0.01492029707878828,0.01492029707878828,,
|
180 |
+
0.030728697776794434,0.030728697776794434,0.019006643444299698,89,2159,,,,
|
181 |
+
,,,89,2159,0.015150352381169796,0.015150352381169796,,
|
182 |
+
0.01833338290452957,0.01833338290452957,0.01833338290452957,90,2183,,,,
|
183 |
+
,,,90,2183,0.01481334213167429,0.01481334213167429,,
|
184 |
+
0.021319905295968056,0.021319905295968056,0.01833338290452957,91,2207,,,,
|
185 |
+
,,,91,2207,0.01449230220168829,0.01449230220168829,,
|
186 |
+
0.021426469087600708,0.021426469087600708,0.01833338290452957,92,2231,,,,
|
187 |
+
,,,92,2231,0.014970287680625916,0.014970287680625916,,
|
188 |
+
0.02710602432489395,0.02710602432489395,0.01833338290452957,93,2255,,,,
|
189 |
+
,,,93,2255,0.014930694364011288,0.014930694364011288,,
|
190 |
+
0.02309969626367092,0.02309969626367092,0.01833338290452957,94,2279,,,,
|
191 |
+
,,,94,2279,0.014549686573445797,0.014549686573445797,,
|
192 |
+
0.023076578974723816,0.023076578974723816,0.01833338290452957,95,2303,,,,
|
193 |
+
,,,95,2303,0.014175274409353733,0.014175274409353733,,
|
194 |
+
0.027082040905952454,0.027082040905952454,0.01833338290452957,96,2327,,,,
|
195 |
+
,,,96,2327,0.014505917206406593,0.014505917206406593,,
|
196 |
+
0.02360316924750805,0.02360316924750805,0.01833338290452957,97,2351,,,,
|
197 |
+
,,,97,2351,0.015143108554184437,0.015143108554184437,,
|
198 |
+
0.01816936768591404,0.01816936768591404,0.01816936768591404,98,2375,,,,
|
199 |
+
,,,98,2375,0.015485869720578194,0.015485869720578194,,
|
200 |
+
0.023721860721707344,0.023721860721707344,0.01816936768591404,99,2399,,,,
|
201 |
+
,,,99,2399,0.015308608300983906,0.015308608300983906,,
|
202 |
+
,,,99,2376,,,0.018659403547644615,0.018659403547644615
|
logs/experiments/runs/focusMAE_150/2022-03-22_14-50-59/tensorboard/focusMAE_150/events.out.tfevents.1647960661.e7026f50b4d2.1.0
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:1aaf3193c67865db2b213d08cb1ac2818576edad5d4d79081bd0fe7a70b3bd2d
|
3 |
+
size 36541
|
logs/experiments/runs/focusMAE_150/2022-03-22_14-50-59/tensorboard/focusMAE_150/events.out.tfevents.1647960944.e7026f50b4d2.1.1
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:e2785824c084cad959c2ca9bb05a6b412d7f86339ff53c769134c67e86443ee9
|
3 |
+
size 179
|
logs/experiments/runs/focusMAE_150/2022-03-22_14-50-59/tensorboard/focusMAE_150/hparams.yaml
ADDED
@@ -0,0 +1,50 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
trainer:
|
2 |
+
_target_: pytorch_lightning.Trainer
|
3 |
+
gpus: 1
|
4 |
+
min_epochs: 1
|
5 |
+
max_epochs: 100
|
6 |
+
resume_from_checkpoint: null
|
7 |
+
model:
|
8 |
+
_target_: src.models.focus_module.FocusLitModule
|
9 |
+
input_size: 67500
|
10 |
+
lin1_size: 128
|
11 |
+
lin2_size: 256
|
12 |
+
lin3_size: 64
|
13 |
+
output_size: 1
|
14 |
+
lr: 0.001
|
15 |
+
weight_decay: 0.0005
|
16 |
+
datamodule:
|
17 |
+
_target_: src.datamodules.focus_datamodule.FocusDataModule
|
18 |
+
data_dir: /usr/src/app/data/focus150
|
19 |
+
csv_train_file: /usr/src/app/data/focus150/train_metadata.csv
|
20 |
+
csv_val_file: /usr/src/app/data/focus150/validation_metadata.csv
|
21 |
+
csv_test_file: /usr/src/app/data/focus150/test_metadata.csv
|
22 |
+
batch_size: 128
|
23 |
+
num_workers: 0
|
24 |
+
pin_memory: false
|
25 |
+
seed: 12345
|
26 |
+
callbacks:
|
27 |
+
model_checkpoint:
|
28 |
+
_target_: pytorch_lightning.callbacks.ModelCheckpoint
|
29 |
+
monitor: val/mae
|
30 |
+
mode: min
|
31 |
+
save_top_k: 1
|
32 |
+
save_last: true
|
33 |
+
verbose: false
|
34 |
+
dirpath: checkpoints/
|
35 |
+
filename: epoch_{epoch:03d}
|
36 |
+
auto_insert_metric_name: false
|
37 |
+
early_stopping:
|
38 |
+
_target_: pytorch_lightning.callbacks.EarlyStopping
|
39 |
+
monitor: val/mae
|
40 |
+
mode: min
|
41 |
+
patience: 100
|
42 |
+
min_delta: 0
|
43 |
+
model_summary:
|
44 |
+
_target_: pytorch_lightning.callbacks.RichModelSummary
|
45 |
+
max_depth: -1
|
46 |
+
rich_progress_bar:
|
47 |
+
_target_: pytorch_lightning.callbacks.RichProgressBar
|
48 |
+
model/params/total: 8690561
|
49 |
+
model/params/trainable: 8690561
|
50 |
+
model/params/non_trainable: 0
|
logs/mlflow/mlruns/2/783dae2360d64b5bacdcea58ff55bc5b/meta.yaml
ADDED
@@ -0,0 +1,15 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
artifact_uri: /usr/src/app/logs/mlflow/mlruns/2/783dae2360d64b5bacdcea58ff55bc5b/artifacts
|
2 |
+
end_time: 1647960944217
|
3 |
+
entry_point_name: ''
|
4 |
+
experiment_id: '2'
|
5 |
+
lifecycle_stage: active
|
6 |
+
name: ''
|
7 |
+
run_id: 783dae2360d64b5bacdcea58ff55bc5b
|
8 |
+
run_uuid: 783dae2360d64b5bacdcea58ff55bc5b
|
9 |
+
source_name: ''
|
10 |
+
source_type: 4
|
11 |
+
source_version: ''
|
12 |
+
start_time: 1647960661955
|
13 |
+
status: 3
|
14 |
+
tags: []
|
15 |
+
user_id: unknown
|
logs/mlflow/mlruns/2/783dae2360d64b5bacdcea58ff55bc5b/metrics/epoch
ADDED
@@ -0,0 +1,201 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
1647960668957 0 23
|
2 |
+
1647960669426 0 23
|
3 |
+
1647960670884 1 47
|
4 |
+
1647960672656 1 47
|
5 |
+
1647960674064 2 71
|
6 |
+
1647960675608 2 71
|
7 |
+
1647960676932 3 95
|
8 |
+
1647960678432 3 95
|
9 |
+
1647960679778 4 119
|
10 |
+
1647960681272 4 119
|
11 |
+
1647960682671 5 143
|
12 |
+
1647960684259 5 143
|
13 |
+
1647960685677 6 167
|
14 |
+
1647960687273 6 167
|
15 |
+
1647960688650 7 191
|
16 |
+
1647960689928 7 191
|
17 |
+
1647960691363 8 215
|
18 |
+
1647960692664 8 215
|
19 |
+
1647960694056 9 239
|
20 |
+
1647960695619 9 239
|
21 |
+
1647960696961 10 263
|
22 |
+
1647960698266 10 263
|
23 |
+
1647960699646 11 287
|
24 |
+
1647960701181 11 287
|
25 |
+
1647960702564 12 311
|
26 |
+
1647960704060 12 311
|
27 |
+
1647960705429 13 335
|
28 |
+
1647960706898 13 335
|
29 |
+
1647960708305 14 359
|
30 |
+
1647960709859 14 359
|
31 |
+
1647960711231 15 383
|
32 |
+
1647960712444 15 383
|
33 |
+
1647960713824 16 407
|
34 |
+
1647960715117 16 407
|
35 |
+
1647960716533 17 431
|
36 |
+
1647960718092 17 431
|
37 |
+
1647960719480 18 455
|
38 |
+
1647960720867 18 455
|
39 |
+
1647960722235 19 479
|
40 |
+
1647960724308 19 479
|
41 |
+
1647960725803 20 503
|
42 |
+
1647960727185 20 503
|
43 |
+
1647960728577 21 527
|
44 |
+
1647960729911 21 527
|
45 |
+
1647960731302 22 551
|
46 |
+
1647960732554 22 551
|
47 |
+
1647960733912 23 575
|
48 |
+
1647960735198 23 575
|
49 |
+
1647960736593 24 599
|
50 |
+
1647960737997 24 599
|
51 |
+
1647960739366 25 623
|
52 |
+
1647960740684 25 623
|
53 |
+
1647960742077 26 647
|
54 |
+
1647960743324 26 647
|
55 |
+
1647960744669 27 671
|
56 |
+
1647960746324 27 671
|
57 |
+
1647960747688 28 695
|
58 |
+
1647960748973 28 695
|
59 |
+
1647960750381 29 719
|
60 |
+
1647960752144 29 719
|
61 |
+
1647960753543 30 743
|
62 |
+
1647960754860 30 743
|
63 |
+
1647960756220 31 767
|
64 |
+
1647960757561 31 767
|
65 |
+
1647960758886 32 791
|
66 |
+
1647960760168 32 791
|
67 |
+
1647960761559 33 815
|
68 |
+
1647960762801 33 815
|
69 |
+
1647960764236 34 839
|
70 |
+
1647960765576 34 839
|
71 |
+
1647960766962 35 863
|
72 |
+
1647960768457 35 863
|
73 |
+
1647960769840 36 887
|
74 |
+
1647960771014 36 887
|
75 |
+
1647960772424 37 911
|
76 |
+
1647960773782 37 911
|
77 |
+
1647960775143 38 935
|
78 |
+
1647960776704 38 935
|
79 |
+
1647960778077 39 959
|
80 |
+
1647960779348 39 959
|
81 |
+
1647960780731 40 983
|
82 |
+
1647960782359 40 983
|
83 |
+
1647960783759 41 1007
|
84 |
+
1647960785085 41 1007
|
85 |
+
1647960786461 42 1031
|
86 |
+
1647960787879 42 1031
|
87 |
+
1647960789302 43 1055
|
88 |
+
1647960790523 43 1055
|
89 |
+
1647960791923 44 1079
|
90 |
+
1647960793179 44 1079
|
91 |
+
1647960794559 45 1103
|
92 |
+
1647960795885 45 1103
|
93 |
+
1647960797255 46 1127
|
94 |
+
1647960798563 46 1127
|
95 |
+
1647960799912 47 1151
|
96 |
+
1647960801194 47 1151
|
97 |
+
1647960802556 48 1175
|
98 |
+
1647960803807 48 1175
|
99 |
+
1647960805190 49 1199
|
100 |
+
1647960806568 49 1199
|
101 |
+
1647960807961 50 1223
|
102 |
+
1647960809301 50 1223
|
103 |
+
1647960810721 51 1247
|
104 |
+
1647960811862 51 1247
|
105 |
+
1647960813292 52 1271
|
106 |
+
1647960814821 52 1271
|
107 |
+
1647960816253 53 1295
|
108 |
+
1647960817595 53 1295
|
109 |
+
1647960818989 54 1319
|
110 |
+
1647960820268 54 1319
|
111 |
+
1647960821650 55 1343
|
112 |
+
1647960823006 55 1343
|
113 |
+
1647960824375 56 1367
|
114 |
+
1647960825721 56 1367
|
115 |
+
1647960827076 57 1391
|
116 |
+
1647960828266 57 1391
|
117 |
+
1647960829601 58 1415
|
118 |
+
1647960830850 58 1415
|
119 |
+
1647960832209 59 1439
|
120 |
+
1647960833552 59 1439
|
121 |
+
1647960834897 60 1463
|
122 |
+
1647960836207 60 1463
|
123 |
+
1647960837669 61 1487
|
124 |
+
1647960838967 61 1487
|
125 |
+
1647960840343 62 1511
|
126 |
+
1647960841649 62 1511
|
127 |
+
1647960843016 63 1535
|
128 |
+
1647960844432 63 1535
|
129 |
+
1647960845824 64 1559
|
130 |
+
1647960847072 64 1559
|
131 |
+
1647960848416 65 1583
|
132 |
+
1647960849703 65 1583
|
133 |
+
1647960851088 66 1607
|
134 |
+
1647960852451 66 1607
|
135 |
+
1647960853829 67 1631
|
136 |
+
1647960855178 67 1631
|
137 |
+
1647960856599 68 1655
|
138 |
+
1647960857960 68 1655
|
139 |
+
1647960859348 69 1679
|
140 |
+
1647960860592 69 1679
|
141 |
+
1647960862007 70 1703
|
142 |
+
1647960863340 70 1703
|
143 |
+
1647960864712 71 1727
|
144 |
+
1647960866049 71 1727
|
145 |
+
1647960867444 72 1751
|
146 |
+
1647960868609 72 1751
|
147 |
+
1647960870010 73 1775
|
148 |
+
1647960871289 73 1775
|
149 |
+
1647960872709 74 1799
|
150 |
+
1647960874115 74 1799
|
151 |
+
1647960875500 75 1823
|
152 |
+
1647960876909 75 1823
|
153 |
+
1647960878282 76 1847
|
154 |
+
1647960879626 76 1847
|
155 |
+
1647960881074 77 1871
|
156 |
+
1647960882373 77 1871
|
157 |
+
1647960883756 78 1895
|
158 |
+
1647960885097 78 1895
|
159 |
+
1647960886498 79 1919
|
160 |
+
1647960887684 79 1919
|
161 |
+
1647960889088 80 1943
|
162 |
+
1647960890349 80 1943
|
163 |
+
1647960891777 81 1967
|
164 |
+
1647960893129 81 1967
|
165 |
+
1647960894503 82 1991
|
166 |
+
1647960895868 82 1991
|
167 |
+
1647960897295 83 2015
|
168 |
+
1647960898636 83 2015
|
169 |
+
1647960900020 84 2039
|
170 |
+
1647960901280 84 2039
|
171 |
+
1647960902640 85 2063
|
172 |
+
1647960903956 85 2063
|
173 |
+
1647960905319 86 2087
|
174 |
+
1647960906633 86 2087
|
175 |
+
1647960908043 87 2111
|
176 |
+
1647960909247 87 2111
|
177 |
+
1647960910635 88 2135
|
178 |
+
1647960911963 88 2135
|
179 |
+
1647960913352 89 2159
|
180 |
+
1647960914635 89 2159
|
181 |
+
1647960916005 90 2183
|
182 |
+
1647960917698 90 2183
|
183 |
+
1647960919105 91 2207
|
184 |
+
1647960920409 91 2207
|
185 |
+
1647960921814 92 2231
|
186 |
+
1647960923273 92 2231
|
187 |
+
1647960924694 93 2255
|
188 |
+
1647960926030 93 2255
|
189 |
+
1647960927440 94 2279
|
190 |
+
1647960928610 94 2279
|
191 |
+
1647960930025 95 2303
|
192 |
+
1647960931279 95 2303
|
193 |
+
1647960932652 96 2327
|
194 |
+
1647960934037 96 2327
|
195 |
+
1647960935381 97 2351
|
196 |
+
1647960936706 97 2351
|
197 |
+
1647960938088 98 2375
|
198 |
+
1647960939585 98 2375
|
199 |
+
1647960940996 99 2399
|
200 |
+
1647960942373 99 2399
|
201 |
+
1647960944126 99 2376
|
logs/mlflow/mlruns/2/783dae2360d64b5bacdcea58ff55bc5b/metrics/test/loss
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
1647960944126 0.018659403547644615 2376
|
logs/mlflow/mlruns/2/783dae2360d64b5bacdcea58ff55bc5b/metrics/test/mae
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
1647960944126 0.018659403547644615 2376
|
logs/mlflow/mlruns/2/783dae2360d64b5bacdcea58ff55bc5b/metrics/train/loss
ADDED
@@ -0,0 +1,100 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
1647960669426 0.11430194973945618 23
|
2 |
+
1647960672656 0.040808238089084625 47
|
3 |
+
1647960675608 0.029285309836268425 71
|
4 |
+
1647960678432 0.02604392170906067 95
|
5 |
+
1647960681272 0.02451612800359726 119
|
6 |
+
1647960684259 0.023934854194521904 143
|
7 |
+
1647960687273 0.021004604175686836 167
|
8 |
+
1647960689928 0.021777832880616188 191
|
9 |
+
1647960692664 0.022210093215107918 215
|
10 |
+
1647960695619 0.021187154576182365 239
|
11 |
+
1647960698266 0.020405923947691917 263
|
12 |
+
1647960701181 0.018974581733345985 287
|
13 |
+
1647960704060 0.01965630240738392 311
|
14 |
+
1647960706898 0.018277371302247047 335
|
15 |
+
1647960709859 0.017642932012677193 359
|
16 |
+
1647960712444 0.0175026785582304 383
|
17 |
+
1647960715117 0.01821238547563553 407
|
18 |
+
1647960718092 0.017036771401762962 431
|
19 |
+
1647960720867 0.017412351444363594 455
|
20 |
+
1647960724308 0.016911355778574944 479
|
21 |
+
1647960727185 0.01665208861231804 503
|
22 |
+
1647960729911 0.01642896607518196 527
|
23 |
+
1647960732554 0.01637101173400879 551
|
24 |
+
1647960735198 0.01573258265852928 575
|
25 |
+
1647960737997 0.016381485387682915 599
|
26 |
+
1647960740684 0.016217797994613647 623
|
27 |
+
1647960743324 0.016656070947647095 647
|
28 |
+
1647960746324 0.016199888661503792 671
|
29 |
+
1647960748973 0.016015876084566116 695
|
30 |
+
1647960752144 0.016817055642604828 719
|
31 |
+
1647960754860 0.016505641862750053 743
|
32 |
+
1647960757561 0.016737060621380806 767
|
33 |
+
1647960760168 0.016735482960939407 791
|
34 |
+
1647960762801 0.015658926218748093 815
|
35 |
+
1647960765576 0.015745699405670166 839
|
36 |
+
1647960768457 0.015349706634879112 863
|
37 |
+
1647960771014 0.01588103175163269 887
|
38 |
+
1647960773782 0.01630052737891674 911
|
39 |
+
1647960776704 0.015571440570056438 935
|
40 |
+
1647960779348 0.015568464063107967 959
|
41 |
+
1647960782359 0.015044812113046646 983
|
42 |
+
1647960785085 0.014684031717479229 1007
|
43 |
+
1647960787879 0.015303990803658962 1031
|
44 |
+
1647960790523 0.014946572482585907 1055
|
45 |
+
1647960793179 0.015413569286465645 1079
|
46 |
+
1647960795885 0.015456431545317173 1103
|
47 |
+
1647960798563 0.01552390493452549 1127
|
48 |
+
1647960801194 0.015580768696963787 1151
|
49 |
+
1647960803807 0.014817407354712486 1175
|
50 |
+
1647960806568 0.015064268372952938 1199
|
51 |
+
1647960809301 0.014896915294229984 1223
|
52 |
+
1647960811862 0.015501330606639385 1247
|
53 |
+
1647960814821 0.015489108860492706 1271
|
54 |
+
1647960817595 0.015344677492976189 1295
|
55 |
+
1647960820268 0.014608598314225674 1319
|
56 |
+
1647960823006 0.014865655452013016 1343
|
57 |
+
1647960825721 0.014775116927921772 1367
|
58 |
+
1647960828266 0.015271090902388096 1391
|
59 |
+
1647960830850 0.015313581563532352 1415
|
60 |
+
1647960833552 0.014887236058712006 1439
|
61 |
+
1647960836207 0.015105010010302067 1463
|
62 |
+
1647960838967 0.014574754983186722 1487
|
63 |
+
1647960841649 0.014818240888416767 1511
|
64 |
+
1647960844432 0.01398531999439001 1535
|
65 |
+
1647960847072 0.014784194529056549 1559
|
66 |
+
1647960849703 0.014572982676327229 1583
|
67 |
+
1647960852451 0.015125010162591934 1607
|
68 |
+
1647960855178 0.014631032943725586 1631
|
69 |
+
1647960857960 0.015108865685760975 1655
|
70 |
+
1647960860592 0.015544437803328037 1679
|
71 |
+
1647960863340 0.015843627974390984 1703
|
72 |
+
1647960866049 0.015063464641571045 1727
|
73 |
+
1647960868609 0.015330381691455841 1751
|
74 |
+
1647960871289 0.015188358724117279 1775
|
75 |
+
1647960874115 0.014918417669832706 1799
|
76 |
+
1647960876909 0.014973972924053669 1823
|
77 |
+
1647960879626 0.014545914717018604 1847
|
78 |
+
1647960882373 0.015504920855164528 1871
|
79 |
+
1647960885097 0.015443055890500546 1895
|
80 |
+
1647960887684 0.015571530908346176 1919
|
81 |
+
1647960890349 0.014650711789727211 1943
|
82 |
+
1647960893129 0.01609613746404648 1967
|
83 |
+
1647960895868 0.016010327264666557 1991
|
84 |
+
1647960898636 0.015484006144106388 2015
|
85 |
+
1647960901280 0.015166169963777065 2039
|
86 |
+
1647960903956 0.015094844624400139 2063
|
87 |
+
1647960906633 0.015731262043118477 2087
|
88 |
+
1647960909247 0.014846889302134514 2111
|
89 |
+
1647960911963 0.01492029707878828 2135
|
90 |
+
1647960914635 0.015150352381169796 2159
|
91 |
+
1647960917698 0.01481334213167429 2183
|
92 |
+
1647960920409 0.01449230220168829 2207
|
93 |
+
1647960923273 0.014970287680625916 2231
|
94 |
+
1647960926030 0.014930694364011288 2255
|
95 |
+
1647960928610 0.014549686573445797 2279
|
96 |
+
1647960931279 0.014175274409353733 2303
|
97 |
+
1647960934037 0.014505917206406593 2327
|
98 |
+
1647960936706 0.015143108554184437 2351
|
99 |
+
1647960939585 0.015485869720578194 2375
|
100 |
+
1647960942373 0.015308608300983906 2399
|
logs/mlflow/mlruns/2/783dae2360d64b5bacdcea58ff55bc5b/metrics/train/mae
ADDED
@@ -0,0 +1,100 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
1647960669426 0.11430194973945618 23
|
2 |
+
1647960672656 0.040808238089084625 47
|
3 |
+
1647960675608 0.029285309836268425 71
|
4 |
+
1647960678432 0.02604392170906067 95
|
5 |
+
1647960681272 0.02451612800359726 119
|
6 |
+
1647960684259 0.023934854194521904 143
|
7 |
+
1647960687273 0.021004604175686836 167
|
8 |
+
1647960689928 0.021777832880616188 191
|
9 |
+
1647960692664 0.022210093215107918 215
|
10 |
+
1647960695619 0.021187154576182365 239
|
11 |
+
1647960698266 0.020405923947691917 263
|
12 |
+
1647960701181 0.018974581733345985 287
|
13 |
+
1647960704060 0.01965630240738392 311
|
14 |
+
1647960706898 0.018277371302247047 335
|
15 |
+
1647960709859 0.017642932012677193 359
|
16 |
+
1647960712444 0.0175026785582304 383
|
17 |
+
1647960715117 0.01821238547563553 407
|
18 |
+
1647960718092 0.017036771401762962 431
|
19 |
+
1647960720867 0.017412351444363594 455
|
20 |
+
1647960724308 0.016911357641220093 479
|
21 |
+
1647960727185 0.016652090474963188 503
|
22 |
+
1647960729911 0.01642896607518196 527
|
23 |
+
1647960732554 0.01637101173400879 551
|
24 |
+
1647960735198 0.01573258265852928 575
|
25 |
+
1647960737997 0.016381485387682915 599
|
26 |
+
1647960740684 0.016217797994613647 623
|
27 |
+
1647960743324 0.016656070947647095 647
|
28 |
+
1647960746324 0.016199888661503792 671
|
29 |
+
1647960748973 0.016015876084566116 695
|
30 |
+
1647960752144 0.016817055642604828 719
|
31 |
+
1647960754860 0.016505641862750053 743
|
32 |
+
1647960757561 0.016737060621380806 767
|
33 |
+
1647960760168 0.016735482960939407 791
|
34 |
+
1647960762801 0.015658926218748093 815
|
35 |
+
1647960765576 0.015745699405670166 839
|
36 |
+
1647960768457 0.015349706634879112 863
|
37 |
+
1647960771014 0.01588103175163269 887
|
38 |
+
1647960773782 0.01630052737891674 911
|
39 |
+
1647960776704 0.015571440570056438 935
|
40 |
+
1647960779348 0.015568464063107967 959
|
41 |
+
1647960782359 0.015044812113046646 983
|
42 |
+
1647960785085 0.014684031717479229 1007
|
43 |
+
1647960787879 0.015303990803658962 1031
|
44 |
+
1647960790523 0.014946572482585907 1055
|
45 |
+
1647960793179 0.015413569286465645 1079
|
46 |
+
1647960795885 0.015456431545317173 1103
|
47 |
+
1647960798563 0.01552390493452549 1127
|
48 |
+
1647960801194 0.015580768696963787 1151
|
49 |
+
1647960803807 0.014817407354712486 1175
|
50 |
+
1647960806568 0.015064268372952938 1199
|
51 |
+
1647960809301 0.014896915294229984 1223
|
52 |
+
1647960811862 0.015501330606639385 1247
|
53 |
+
1647960814821 0.015489108860492706 1271
|
54 |
+
1647960817595 0.015344677492976189 1295
|
55 |
+
1647960820268 0.014608598314225674 1319
|
56 |
+
1647960823006 0.014865657314658165 1343
|
57 |
+
1647960825721 0.014775116927921772 1367
|
58 |
+
1647960828266 0.015271090902388096 1391
|
59 |
+
1647960830850 0.015313581563532352 1415
|
60 |
+
1647960833552 0.014887236058712006 1439
|
61 |
+
1647960836207 0.015105010010302067 1463
|
62 |
+
1647960838967 0.014574754983186722 1487
|
63 |
+
1647960841649 0.014818240888416767 1511
|
64 |
+
1647960844432 0.01398531999439001 1535
|
65 |
+
1647960847072 0.014784194529056549 1559
|
66 |
+
1647960849703 0.014572983607649803 1583
|
67 |
+
1647960852451 0.015125010162591934 1607
|
68 |
+
1647960855178 0.014631032943725586 1631
|
69 |
+
1647960857960 0.015108865685760975 1655
|
70 |
+
1647960860592 0.015544437803328037 1679
|
71 |
+
1647960863340 0.015843627974390984 1703
|
72 |
+
1647960866049 0.015063464641571045 1727
|
73 |
+
1647960868609 0.015330381691455841 1751
|
74 |
+
1647960871289 0.015188358724117279 1775
|
75 |
+
1647960874115 0.014918417669832706 1799
|
76 |
+
1647960876909 0.014973972924053669 1823
|
77 |
+
1647960879626 0.014545914717018604 1847
|
78 |
+
1647960882373 0.015504920855164528 1871
|
79 |
+
1647960885097 0.015443055890500546 1895
|
80 |
+
1647960887684 0.015571530908346176 1919
|
81 |
+
1647960890349 0.014650711789727211 1943
|
82 |
+
1647960893129 0.01609613746404648 1967
|
83 |
+
1647960895868 0.016010327264666557 1991
|
84 |
+
1647960898636 0.015484006144106388 2015
|
85 |
+
1647960901280 0.015166169963777065 2039
|
86 |
+
1647960903956 0.015094844624400139 2063
|
87 |
+
1647960906633 0.015731262043118477 2087
|
88 |
+
1647960909247 0.014846890233457088 2111
|
89 |
+
1647960911963 0.01492029707878828 2135
|
90 |
+
1647960914635 0.015150352381169796 2159
|
91 |
+
1647960917698 0.01481334213167429 2183
|
92 |
+
1647960920409 0.01449230220168829 2207
|
93 |
+
1647960923273 0.014970287680625916 2231
|
94 |
+
1647960926030 0.014930694364011288 2255
|
95 |
+
1647960928610 0.014549686573445797 2279
|
96 |
+
1647960931279 0.014175274409353733 2303
|
97 |
+
1647960934037 0.014505917206406593 2327
|
98 |
+
1647960936706 0.015143108554184437 2351
|
99 |
+
1647960939585 0.015485869720578194 2375
|
100 |
+
1647960942373 0.015308608300983906 2399
|
logs/mlflow/mlruns/2/783dae2360d64b5bacdcea58ff55bc5b/metrics/val/loss
ADDED
@@ -0,0 +1,100 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
1647960668957 0.08075811713933945 23
|
2 |
+
1647960670884 0.04046476259827614 47
|
3 |
+
1647960674064 0.040150225162506104 71
|
4 |
+
1647960676932 0.030771758407354355 95
|
5 |
+
1647960679778 0.03223102539777756 119
|
6 |
+
1647960682671 0.028316136449575424 143
|
7 |
+
1647960685677 0.026469340547919273 167
|
8 |
+
1647960688650 0.03178180009126663 191
|
9 |
+
1647960691363 0.03219511732459068 215
|
10 |
+
1647960694056 0.023771945387125015 239
|
11 |
+
1647960696961 0.024343175813555717 263
|
12 |
+
1647960699646 0.021602997556328773 287
|
13 |
+
1647960702564 0.02062990330159664 311
|
14 |
+
1647960705429 0.023217681795358658 335
|
15 |
+
1647960708305 0.020541978999972343 359
|
16 |
+
1647960711231 0.021371176466345787 383
|
17 |
+
1647960713824 0.020673882216215134 407
|
18 |
+
1647960716533 0.019376106560230255 431
|
19 |
+
1647960719480 0.02217184379696846 455
|
20 |
+
1647960722235 0.022242296487092972 479
|
21 |
+
1647960725803 0.02578875795006752 503
|
22 |
+
1647960728577 0.027041783556342125 527
|
23 |
+
1647960731302 0.030874773859977722 551
|
24 |
+
1647960733912 0.03502318263053894 575
|
25 |
+
1647960736593 0.02622189372777939 599
|
26 |
+
1647960739366 0.02501680888235569 623
|
27 |
+
1647960742077 0.024789953604340553 647
|
28 |
+
1647960744669 0.025172732770442963 671
|
29 |
+
1647960747688 0.023537959903478622 695
|
30 |
+
1647960750381 0.019337965175509453 719
|
31 |
+
1647960753543 0.026026232168078423 743
|
32 |
+
1647960756220 0.035468939691782 767
|
33 |
+
1647960758886 0.03180081024765968 791
|
34 |
+
1647960761559 0.04579645022749901 815
|
35 |
+
1647960764236 0.019655367359519005 839
|
36 |
+
1647960766962 0.02635784260928631 863
|
37 |
+
1647960769840 0.02536688931286335 887
|
38 |
+
1647960772424 0.02131745219230652 911
|
39 |
+
1647960775143 0.02174573950469494 935
|
40 |
+
1647960778077 0.027131935581564903 959
|
41 |
+
1647960780731 0.019006643444299698 983
|
42 |
+
1647960783759 0.024117305874824524 1007
|
43 |
+
1647960786461 0.021185727789998055 1031
|
44 |
+
1647960789302 0.02778777852654457 1055
|
45 |
+
1647960791923 0.024881377816200256 1079
|
46 |
+
1647960794559 0.02411677874624729 1103
|
47 |
+
1647960797255 0.0276863444596529 1127
|
48 |
+
1647960799912 0.02184155024588108 1151
|
49 |
+
1647960802556 0.023243313655257225 1175
|
50 |
+
1647960805190 0.055002447217702866 1199
|
51 |
+
1647960807961 0.026484446600079536 1223
|
52 |
+
1647960810721 0.02357296086847782 1247
|
53 |
+
1647960813292 0.03747256472706795 1271
|
54 |
+
1647960816253 0.02912999503314495 1295
|
55 |
+
1647960818989 0.024664100259542465 1319
|
56 |
+
1647960821650 0.023538075387477875 1343
|
57 |
+
1647960824375 0.02696441486477852 1367
|
58 |
+
1647960827076 0.025587955489754677 1391
|
59 |
+
1647960829601 0.03694944828748703 1415
|
60 |
+
1647960832209 0.030968736857175827 1439
|
61 |
+
1647960834897 0.021242544054985046 1463
|
62 |
+
1647960837669 0.021730512380599976 1487
|
63 |
+
1647960840343 0.0256672166287899 1511
|
64 |
+
1647960843016 0.02034403569996357 1535
|
65 |
+
1647960845824 0.029021065682172775 1559
|
66 |
+
1647960848416 0.06704822927713394 1583
|
67 |
+
1647960851088 0.01925153099000454 1607
|
68 |
+
1647960853829 0.02019408345222473 1631
|
69 |
+
1647960856599 0.02392183616757393 1655
|
70 |
+
1647960859348 0.061501890420913696 1679
|
71 |
+
1647960862007 0.0218352098017931 1703
|
72 |
+
1647960864712 0.02737290970981121 1727
|
73 |
+
1647960867444 0.03726331517100334 1751
|
74 |
+
1647960870010 0.019790584221482277 1775
|
75 |
+
1647960872709 0.04075721278786659 1799
|
76 |
+
1647960875500 0.021603049710392952 1823
|
77 |
+
1647960878282 0.02636726014316082 1847
|
78 |
+
1647960881074 0.03517881780862808 1871
|
79 |
+
1647960883756 0.04071342945098877 1895
|
80 |
+
1647960886498 0.02397548221051693 1919
|
81 |
+
1647960889088 0.026645613834261894 1943
|
82 |
+
1647960891777 0.02794080786406994 1967
|
83 |
+
1647960894503 0.019264165312051773 1991
|
84 |
+
1647960897295 0.019411463290452957 2015
|
85 |
+
1647960900020 0.020912759006023407 2039
|
86 |
+
1647960902640 0.027308855205774307 2063
|
87 |
+
1647960905319 0.019831309095025063 2087
|
88 |
+
1647960908043 0.03225191310048103 2111
|
89 |
+
1647960910635 0.023427240550518036 2135
|
90 |
+
1647960913352 0.030728697776794434 2159
|
91 |
+
1647960916005 0.01833338290452957 2183
|
92 |
+
1647960919105 0.021319905295968056 2207
|
93 |
+
1647960921814 0.021426469087600708 2231
|
94 |
+
1647960924694 0.02710602432489395 2255
|
95 |
+
1647960927440 0.02309969626367092 2279
|
96 |
+
1647960930025 0.023076578974723816 2303
|
97 |
+
1647960932652 0.027082040905952454 2327
|
98 |
+
1647960935381 0.02360316924750805 2351
|
99 |
+
1647960938088 0.01816936768591404 2375
|
100 |
+
1647960940996 0.023721860721707344 2399
|
logs/mlflow/mlruns/2/783dae2360d64b5bacdcea58ff55bc5b/metrics/val/mae
ADDED
@@ -0,0 +1,100 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
1647960668957 0.08075811713933945 23
|
2 |
+
1647960670884 0.04046476259827614 47
|
3 |
+
1647960674064 0.040150225162506104 71
|
4 |
+
1647960676932 0.030771758407354355 95
|
5 |
+
1647960679778 0.03223102539777756 119
|
6 |
+
1647960682671 0.028316136449575424 143
|
7 |
+
1647960685677 0.026469340547919273 167
|
8 |
+
1647960688650 0.03178180009126663 191
|
9 |
+
1647960691363 0.03219511732459068 215
|
10 |
+
1647960694056 0.023771945387125015 239
|
11 |
+
1647960696961 0.024343175813555717 263
|
12 |
+
1647960699646 0.021602997556328773 287
|
13 |
+
1647960702564 0.02062990330159664 311
|
14 |
+
1647960705429 0.023217681795358658 335
|
15 |
+
1647960708305 0.020541978999972343 359
|
16 |
+
1647960711231 0.021371174603700638 383
|
17 |
+
1647960713824 0.020673882216215134 407
|
18 |
+
1647960716533 0.019376106560230255 431
|
19 |
+
1647960719480 0.02217184379696846 455
|
20 |
+
1647960722235 0.022242296487092972 479
|
21 |
+
1647960725803 0.02578875795006752 503
|
22 |
+
1647960728577 0.027041783556342125 527
|
23 |
+
1647960731302 0.030874773859977722 551
|
24 |
+
1647960733912 0.03502318263053894 575
|
25 |
+
1647960736593 0.02622189372777939 599
|
26 |
+
1647960739366 0.02501680888235569 623
|
27 |
+
1647960742077 0.024789953604340553 647
|
28 |
+
1647960744669 0.025172732770442963 671
|
29 |
+
1647960747688 0.023537959903478622 695
|
30 |
+
1647960750381 0.019337965175509453 719
|
31 |
+
1647960753543 0.026026232168078423 743
|
32 |
+
1647960756220 0.035468939691782 767
|
33 |
+
1647960758886 0.03180081024765968 791
|
34 |
+
1647960761559 0.04579645022749901 815
|
35 |
+
1647960764236 0.019655367359519005 839
|
36 |
+
1647960766962 0.02635784260928631 863
|
37 |
+
1647960769840 0.02536688931286335 887
|
38 |
+
1647960772424 0.02131745219230652 911
|
39 |
+
1647960775143 0.02174573950469494 935
|
40 |
+
1647960778077 0.027131935581564903 959
|
41 |
+
1647960780731 0.019006643444299698 983
|
42 |
+
1647960783759 0.024117305874824524 1007
|
43 |
+
1647960786461 0.021185727789998055 1031
|
44 |
+
1647960789302 0.02778777852654457 1055
|
45 |
+
1647960791923 0.024881377816200256 1079
|
46 |
+
1647960794559 0.02411677874624729 1103
|
47 |
+
1647960797255 0.0276863444596529 1127
|
48 |
+
1647960799912 0.02184155024588108 1151
|
49 |
+
1647960802556 0.023243313655257225 1175
|
50 |
+
1647960805190 0.055002447217702866 1199
|
51 |
+
1647960807961 0.026484446600079536 1223
|
52 |
+
1647960810721 0.02357296086847782 1247
|
53 |
+
1647960813292 0.03747256472706795 1271
|
54 |
+
1647960816253 0.02912999503314495 1295
|
55 |
+
1647960818989 0.024664100259542465 1319
|
56 |
+
1647960821650 0.023538075387477875 1343
|
57 |
+
1647960824375 0.02696441486477852 1367
|
58 |
+
1647960827076 0.025587955489754677 1391
|
59 |
+
1647960829601 0.03694944828748703 1415
|
60 |
+
1647960832209 0.030968736857175827 1439
|
61 |
+
1647960834897 0.021242544054985046 1463
|
62 |
+
1647960837669 0.021730512380599976 1487
|
63 |
+
1647960840343 0.0256672166287899 1511
|
64 |
+
1647960843016 0.02034403569996357 1535
|
65 |
+
1647960845824 0.029021065682172775 1559
|
66 |
+
1647960848416 0.06704822927713394 1583
|
67 |
+
1647960851088 0.01925153099000454 1607
|
68 |
+
1647960853829 0.02019408345222473 1631
|
69 |
+
1647960856599 0.02392183616757393 1655
|
70 |
+
1647960859348 0.061501890420913696 1679
|
71 |
+
1647960862007 0.0218352098017931 1703
|
72 |
+
1647960864712 0.02737290970981121 1727
|
73 |
+
1647960867444 0.03726331517100334 1751
|
74 |
+
1647960870010 0.019790584221482277 1775
|
75 |
+
1647960872709 0.04075721278786659 1799
|
76 |
+
1647960875500 0.021603049710392952 1823
|
77 |
+
1647960878282 0.02636726014316082 1847
|
78 |
+
1647960881074 0.03517881780862808 1871
|
79 |
+
1647960883756 0.04071342945098877 1895
|
80 |
+
1647960886498 0.02397548221051693 1919
|
81 |
+
1647960889088 0.026645613834261894 1943
|
82 |
+
1647960891777 0.02794080786406994 1967
|
83 |
+
1647960894503 0.019264165312051773 1991
|
84 |
+
1647960897295 0.019411463290452957 2015
|
85 |
+
1647960900020 0.020912759006023407 2039
|
86 |
+
1647960902640 0.027308855205774307 2063
|
87 |
+
1647960905319 0.019831309095025063 2087
|
88 |
+
1647960908043 0.03225191310048103 2111
|
89 |
+
1647960910635 0.023427240550518036 2135
|
90 |
+
1647960913352 0.030728697776794434 2159
|
91 |
+
1647960916005 0.01833338290452957 2183
|
92 |
+
1647960919105 0.021319905295968056 2207
|
93 |
+
1647960921814 0.021426469087600708 2231
|
94 |
+
1647960924694 0.02710602432489395 2255
|
95 |
+
1647960927440 0.02309969626367092 2279
|
96 |
+
1647960930025 0.023076578974723816 2303
|
97 |
+
1647960932652 0.027082040905952454 2327
|
98 |
+
1647960935381 0.02360316924750805 2351
|
99 |
+
1647960938088 0.01816936768591404 2375
|
100 |
+
1647960940996 0.023721860721707344 2399
|
logs/mlflow/mlruns/2/783dae2360d64b5bacdcea58ff55bc5b/metrics/val/mae_best
ADDED
@@ -0,0 +1,100 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
1647960668957 0.019537409767508507 23
|
2 |
+
1647960670884 0.019537409767508507 47
|
3 |
+
1647960674064 0.019537409767508507 71
|
4 |
+
1647960676932 0.019537409767508507 95
|
5 |
+
1647960679778 0.019537409767508507 119
|
6 |
+
1647960682671 0.019537409767508507 143
|
7 |
+
1647960685677 0.019537409767508507 167
|
8 |
+
1647960688650 0.019537409767508507 191
|
9 |
+
1647960691363 0.019537409767508507 215
|
10 |
+
1647960694056 0.019537409767508507 239
|
11 |
+
1647960696961 0.019537409767508507 263
|
12 |
+
1647960699646 0.019537409767508507 287
|
13 |
+
1647960702564 0.019537409767508507 311
|
14 |
+
1647960705429 0.019537409767508507 335
|
15 |
+
1647960708305 0.019537409767508507 359
|
16 |
+
1647960711231 0.019537409767508507 383
|
17 |
+
1647960713824 0.019537409767508507 407
|
18 |
+
1647960716533 0.019376106560230255 431
|
19 |
+
1647960719480 0.019376106560230255 455
|
20 |
+
1647960722235 0.019376106560230255 479
|
21 |
+
1647960725803 0.019376106560230255 503
|
22 |
+
1647960728577 0.019376106560230255 527
|
23 |
+
1647960731302 0.019376106560230255 551
|
24 |
+
1647960733912 0.019376106560230255 575
|
25 |
+
1647960736593 0.019376106560230255 599
|
26 |
+
1647960739366 0.019376106560230255 623
|
27 |
+
1647960742077 0.019376106560230255 647
|
28 |
+
1647960744669 0.019376106560230255 671
|
29 |
+
1647960747688 0.019376106560230255 695
|
30 |
+
1647960750381 0.019337965175509453 719
|
31 |
+
1647960753543 0.019337965175509453 743
|
32 |
+
1647960756220 0.019337965175509453 767
|
33 |
+
1647960758886 0.019337965175509453 791
|
34 |
+
1647960761559 0.019337965175509453 815
|
35 |
+
1647960764236 0.019337965175509453 839
|
36 |
+
1647960766962 0.019337965175509453 863
|
37 |
+
1647960769840 0.019337965175509453 887
|
38 |
+
1647960772424 0.019337965175509453 911
|
39 |
+
1647960775143 0.019337965175509453 935
|
40 |
+
1647960778077 0.019337965175509453 959
|
41 |
+
1647960780731 0.019006643444299698 983
|
42 |
+
1647960783759 0.019006643444299698 1007
|
43 |
+
1647960786461 0.019006643444299698 1031
|
44 |
+
1647960789302 0.019006643444299698 1055
|
45 |
+
1647960791923 0.019006643444299698 1079
|
46 |
+
1647960794559 0.019006643444299698 1103
|
47 |
+
1647960797255 0.019006643444299698 1127
|
48 |
+
1647960799912 0.019006643444299698 1151
|
49 |
+
1647960802556 0.019006643444299698 1175
|
50 |
+
1647960805190 0.019006643444299698 1199
|
51 |
+
1647960807961 0.019006643444299698 1223
|
52 |
+
1647960810721 0.019006643444299698 1247
|
53 |
+
1647960813292 0.019006643444299698 1271
|
54 |
+
1647960816253 0.019006643444299698 1295
|
55 |
+
1647960818989 0.019006643444299698 1319
|
56 |
+
1647960821650 0.019006643444299698 1343
|
57 |
+
1647960824375 0.019006643444299698 1367
|
58 |
+
1647960827076 0.019006643444299698 1391
|
59 |
+
1647960829601 0.019006643444299698 1415
|
60 |
+
1647960832209 0.019006643444299698 1439
|
61 |
+
1647960834897 0.019006643444299698 1463
|
62 |
+
1647960837669 0.019006643444299698 1487
|
63 |
+
1647960840343 0.019006643444299698 1511
|
64 |
+
1647960843016 0.019006643444299698 1535
|
65 |
+
1647960845824 0.019006643444299698 1559
|
66 |
+
1647960848416 0.019006643444299698 1583
|
67 |
+
1647960851088 0.019006643444299698 1607
|
68 |
+
1647960853829 0.019006643444299698 1631
|
69 |
+
1647960856599 0.019006643444299698 1655
|
70 |
+
1647960859348 0.019006643444299698 1679
|
71 |
+
1647960862007 0.019006643444299698 1703
|
72 |
+
1647960864712 0.019006643444299698 1727
|
73 |
+
1647960867444 0.019006643444299698 1751
|
74 |
+
1647960870010 0.019006643444299698 1775
|
75 |
+
1647960872709 0.019006643444299698 1799
|
76 |
+
1647960875500 0.019006643444299698 1823
|
77 |
+
1647960878282 0.019006643444299698 1847
|
78 |
+
1647960881074 0.019006643444299698 1871
|
79 |
+
1647960883756 0.019006643444299698 1895
|
80 |
+
1647960886498 0.019006643444299698 1919
|
81 |
+
1647960889088 0.019006643444299698 1943
|
82 |
+
1647960891777 0.019006643444299698 1967
|
83 |
+
1647960894503 0.019006643444299698 1991
|
84 |
+
1647960897295 0.019006643444299698 2015
|
85 |
+
1647960900020 0.019006643444299698 2039
|
86 |
+
1647960902640 0.019006643444299698 2063
|
87 |
+
1647960905319 0.019006643444299698 2087
|
88 |
+
1647960908043 0.019006643444299698 2111
|
89 |
+
1647960910635 0.019006643444299698 2135
|
90 |
+
1647960913352 0.019006643444299698 2159
|
91 |
+
1647960916005 0.01833338290452957 2183
|
92 |
+
1647960919105 0.01833338290452957 2207
|
93 |
+
1647960921814 0.01833338290452957 2231
|
94 |
+
1647960924694 0.01833338290452957 2255
|
95 |
+
1647960927440 0.01833338290452957 2279
|
96 |
+
1647960930025 0.01833338290452957 2303
|
97 |
+
1647960932652 0.01833338290452957 2327
|
98 |
+
1647960935381 0.01833338290452957 2351
|
99 |
+
1647960938088 0.01816936768591404 2375
|
100 |
+
1647960940996 0.01816936768591404 2399
|
logs/mlflow/mlruns/2/783dae2360d64b5bacdcea58ff55bc5b/params/callbacks/early_stopping/_target_
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
pytorch_lightning.callbacks.EarlyStopping
|
logs/mlflow/mlruns/2/783dae2360d64b5bacdcea58ff55bc5b/params/callbacks/early_stopping/min_delta
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
0
|
logs/mlflow/mlruns/2/783dae2360d64b5bacdcea58ff55bc5b/params/callbacks/early_stopping/mode
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
min
|
logs/mlflow/mlruns/2/783dae2360d64b5bacdcea58ff55bc5b/params/callbacks/early_stopping/monitor
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
val/mae
|
logs/mlflow/mlruns/2/783dae2360d64b5bacdcea58ff55bc5b/params/callbacks/early_stopping/patience
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
100
|
logs/mlflow/mlruns/2/783dae2360d64b5bacdcea58ff55bc5b/params/callbacks/model_checkpoint/_target_
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
pytorch_lightning.callbacks.ModelCheckpoint
|
logs/mlflow/mlruns/2/783dae2360d64b5bacdcea58ff55bc5b/params/callbacks/model_checkpoint/auto_insert_metric_name
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
False
|
logs/mlflow/mlruns/2/783dae2360d64b5bacdcea58ff55bc5b/params/callbacks/model_checkpoint/dirpath
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
checkpoints/
|
logs/mlflow/mlruns/2/783dae2360d64b5bacdcea58ff55bc5b/params/callbacks/model_checkpoint/filename
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
epoch_{epoch:03d}
|
logs/mlflow/mlruns/2/783dae2360d64b5bacdcea58ff55bc5b/params/callbacks/model_checkpoint/mode
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
min
|
logs/mlflow/mlruns/2/783dae2360d64b5bacdcea58ff55bc5b/params/callbacks/model_checkpoint/monitor
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
val/mae
|
logs/mlflow/mlruns/2/783dae2360d64b5bacdcea58ff55bc5b/params/callbacks/model_checkpoint/save_last
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
True
|
logs/mlflow/mlruns/2/783dae2360d64b5bacdcea58ff55bc5b/params/callbacks/model_checkpoint/save_top_k
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
1
|
logs/mlflow/mlruns/2/783dae2360d64b5bacdcea58ff55bc5b/params/callbacks/model_checkpoint/verbose
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
False
|
logs/mlflow/mlruns/2/783dae2360d64b5bacdcea58ff55bc5b/params/callbacks/model_summary/_target_
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
pytorch_lightning.callbacks.RichModelSummary
|
logs/mlflow/mlruns/2/783dae2360d64b5bacdcea58ff55bc5b/params/callbacks/model_summary/max_depth
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
-1
|
logs/mlflow/mlruns/2/783dae2360d64b5bacdcea58ff55bc5b/params/callbacks/rich_progress_bar/_target_
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
pytorch_lightning.callbacks.RichProgressBar
|
logs/mlflow/mlruns/2/783dae2360d64b5bacdcea58ff55bc5b/params/datamodule/_target_
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
src.datamodules.focus_datamodule.FocusDataModule
|
logs/mlflow/mlruns/2/783dae2360d64b5bacdcea58ff55bc5b/params/datamodule/batch_size
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
128
|
logs/mlflow/mlruns/2/783dae2360d64b5bacdcea58ff55bc5b/params/datamodule/csv_test_file
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
/usr/src/app/data/focus150/test_metadata.csv
|
logs/mlflow/mlruns/2/783dae2360d64b5bacdcea58ff55bc5b/params/datamodule/csv_train_file
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
/usr/src/app/data/focus150/train_metadata.csv
|
logs/mlflow/mlruns/2/783dae2360d64b5bacdcea58ff55bc5b/params/datamodule/csv_val_file
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
/usr/src/app/data/focus150/validation_metadata.csv
|
logs/mlflow/mlruns/2/783dae2360d64b5bacdcea58ff55bc5b/params/datamodule/data_dir
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
/usr/src/app/data/focus150
|
logs/mlflow/mlruns/2/783dae2360d64b5bacdcea58ff55bc5b/params/datamodule/num_workers
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
0
|
logs/mlflow/mlruns/2/783dae2360d64b5bacdcea58ff55bc5b/params/datamodule/pin_memory
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
False
|
logs/mlflow/mlruns/2/783dae2360d64b5bacdcea58ff55bc5b/params/model/_target_
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
src.models.focus_module.FocusLitModule
|
logs/mlflow/mlruns/2/783dae2360d64b5bacdcea58ff55bc5b/params/model/input_size
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
67500
|
logs/mlflow/mlruns/2/783dae2360d64b5bacdcea58ff55bc5b/params/model/lin1_size
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
128
|
logs/mlflow/mlruns/2/783dae2360d64b5bacdcea58ff55bc5b/params/model/lin2_size
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
256
|
logs/mlflow/mlruns/2/783dae2360d64b5bacdcea58ff55bc5b/params/model/lin3_size
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
64
|
logs/mlflow/mlruns/2/783dae2360d64b5bacdcea58ff55bc5b/params/model/lr
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
0.001
|