Hannes Kuchelmeister
commited on
Commit
·
1fb6fd9
1
Parent(s):
c722be2
change conv model to use activation function and rerun a training run
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- logs/experiments/runs/focusConvMSE_150/2022-05-11_07-46-18/.hydra/config.yaml +80 -0
- logs/experiments/runs/focusConvMSE_150/2022-05-11_07-46-18/.hydra/hydra.yaml +170 -0
- logs/experiments/runs/focusConvMSE_150/2022-05-11_07-46-18/.hydra/overrides.yaml +1 -0
- logs/experiments/runs/focusConvMSE_150/2022-05-11_07-46-18/checkpoints/epoch_075.ckpt +3 -0
- logs/experiments/runs/focusConvMSE_150/2022-05-11_07-46-18/checkpoints/last.ckpt +3 -0
- logs/experiments/runs/focusConvMSE_150/2022-05-11_07-46-18/csv/version_0/hparams.yaml +54 -0
- logs/experiments/runs/focusConvMSE_150/2022-05-11_07-46-18/csv/version_0/metrics.csv +202 -0
- logs/experiments/runs/focusConvMSE_150/2022-05-11_07-46-18/tensorboard/focusConvMSE_150/events.out.tfevents.1652255185.b0402e0214ff.1.0 +3 -0
- logs/experiments/runs/focusConvMSE_150/2022-05-11_07-46-18/tensorboard/focusConvMSE_150/events.out.tfevents.1652255403.b0402e0214ff.1.1 +3 -0
- logs/experiments/runs/focusConvMSE_150/2022-05-11_07-46-18/tensorboard/focusConvMSE_150/hparams.yaml +54 -0
- logs/mlflow/mlruns/1/32404d6b69574a41967cc1061badf333/meta.yaml +15 -0
- logs/mlflow/mlruns/1/32404d6b69574a41967cc1061badf333/metrics/epoch +201 -0
- logs/mlflow/mlruns/1/32404d6b69574a41967cc1061badf333/metrics/test/loss +1 -0
- logs/mlflow/mlruns/1/32404d6b69574a41967cc1061badf333/metrics/test/mae +1 -0
- logs/mlflow/mlruns/1/32404d6b69574a41967cc1061badf333/metrics/train/loss +100 -0
- logs/mlflow/mlruns/1/32404d6b69574a41967cc1061badf333/metrics/train/mae +100 -0
- logs/mlflow/mlruns/1/32404d6b69574a41967cc1061badf333/metrics/val/loss +100 -0
- logs/mlflow/mlruns/1/32404d6b69574a41967cc1061badf333/metrics/val/mae +100 -0
- logs/mlflow/mlruns/1/32404d6b69574a41967cc1061badf333/metrics/val/mae_best +100 -0
- logs/mlflow/mlruns/1/32404d6b69574a41967cc1061badf333/params/callbacks/early_stopping/_target_ +1 -0
- logs/mlflow/mlruns/1/32404d6b69574a41967cc1061badf333/params/callbacks/early_stopping/min_delta +1 -0
- logs/mlflow/mlruns/1/32404d6b69574a41967cc1061badf333/params/callbacks/early_stopping/mode +1 -0
- logs/mlflow/mlruns/1/32404d6b69574a41967cc1061badf333/params/callbacks/early_stopping/monitor +1 -0
- logs/mlflow/mlruns/1/32404d6b69574a41967cc1061badf333/params/callbacks/early_stopping/patience +1 -0
- logs/mlflow/mlruns/1/32404d6b69574a41967cc1061badf333/params/callbacks/model_checkpoint/_target_ +1 -0
- logs/mlflow/mlruns/1/32404d6b69574a41967cc1061badf333/params/callbacks/model_checkpoint/auto_insert_metric_name +1 -0
- logs/mlflow/mlruns/1/32404d6b69574a41967cc1061badf333/params/callbacks/model_checkpoint/dirpath +1 -0
- logs/mlflow/mlruns/1/32404d6b69574a41967cc1061badf333/params/callbacks/model_checkpoint/filename +1 -0
- logs/mlflow/mlruns/1/32404d6b69574a41967cc1061badf333/params/callbacks/model_checkpoint/mode +1 -0
- logs/mlflow/mlruns/1/32404d6b69574a41967cc1061badf333/params/callbacks/model_checkpoint/monitor +1 -0
- logs/mlflow/mlruns/1/32404d6b69574a41967cc1061badf333/params/callbacks/model_checkpoint/save_last +1 -0
- logs/mlflow/mlruns/1/32404d6b69574a41967cc1061badf333/params/callbacks/model_checkpoint/save_top_k +1 -0
- logs/mlflow/mlruns/1/32404d6b69574a41967cc1061badf333/params/callbacks/model_checkpoint/verbose +1 -0
- logs/mlflow/mlruns/1/32404d6b69574a41967cc1061badf333/params/callbacks/model_summary/_target_ +1 -0
- logs/mlflow/mlruns/1/32404d6b69574a41967cc1061badf333/params/callbacks/model_summary/max_depth +1 -0
- logs/mlflow/mlruns/1/32404d6b69574a41967cc1061badf333/params/callbacks/rich_progress_bar/_target_ +1 -0
- logs/mlflow/mlruns/1/32404d6b69574a41967cc1061badf333/params/datamodule/_target_ +1 -0
- logs/mlflow/mlruns/1/32404d6b69574a41967cc1061badf333/params/datamodule/batch_size +1 -0
- logs/mlflow/mlruns/1/32404d6b69574a41967cc1061badf333/params/datamodule/csv_test_file +1 -0
- logs/mlflow/mlruns/1/32404d6b69574a41967cc1061badf333/params/datamodule/csv_train_file +1 -0
- logs/mlflow/mlruns/1/32404d6b69574a41967cc1061badf333/params/datamodule/csv_val_file +1 -0
- logs/mlflow/mlruns/1/32404d6b69574a41967cc1061badf333/params/datamodule/data_dir +1 -0
- logs/mlflow/mlruns/1/32404d6b69574a41967cc1061badf333/params/datamodule/num_workers +1 -0
- logs/mlflow/mlruns/1/32404d6b69574a41967cc1061badf333/params/datamodule/pin_memory +1 -0
- logs/mlflow/mlruns/1/32404d6b69574a41967cc1061badf333/params/model/_target_ +1 -0
- logs/mlflow/mlruns/1/32404d6b69574a41967cc1061badf333/params/model/conv1_channels +1 -0
- logs/mlflow/mlruns/1/32404d6b69574a41967cc1061badf333/params/model/conv1_size +1 -0
- logs/mlflow/mlruns/1/32404d6b69574a41967cc1061badf333/params/model/conv2_channels +1 -0
- logs/mlflow/mlruns/1/32404d6b69574a41967cc1061badf333/params/model/conv2_size +1 -0
- logs/mlflow/mlruns/1/32404d6b69574a41967cc1061badf333/params/model/image_size +1 -0
logs/experiments/runs/focusConvMSE_150/2022-05-11_07-46-18/.hydra/config.yaml
ADDED
@@ -0,0 +1,80 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
original_work_dir: ${hydra:runtime.cwd}
|
2 |
+
data_dir: ${original_work_dir}/data
|
3 |
+
print_config: true
|
4 |
+
ignore_warnings: true
|
5 |
+
train: true
|
6 |
+
test: true
|
7 |
+
seed: 12345
|
8 |
+
name: focusConvMSE_150
|
9 |
+
datamodule:
|
10 |
+
_target_: src.datamodules.focus_datamodule.FocusDataModule
|
11 |
+
data_dir: ${data_dir}/focus150
|
12 |
+
csv_train_file: ${data_dir}/focus150/train_metadata.csv
|
13 |
+
csv_val_file: ${data_dir}/focus150/validation_metadata.csv
|
14 |
+
csv_test_file: ${data_dir}/focus150/test_metadata.csv
|
15 |
+
batch_size: 128
|
16 |
+
num_workers: 0
|
17 |
+
pin_memory: false
|
18 |
+
model:
|
19 |
+
_target_: src.models.focus_conv_module.FocusConvLitModule
|
20 |
+
image_size: 150
|
21 |
+
pool_size: 2
|
22 |
+
conv1_size: 5
|
23 |
+
conv1_channels: 6
|
24 |
+
conv2_size: 5
|
25 |
+
conv2_channels: 16
|
26 |
+
lin1_size: 100
|
27 |
+
lin2_size: 80
|
28 |
+
output_size: 1
|
29 |
+
lr: 0.001
|
30 |
+
weight_decay: 0.0005
|
31 |
+
callbacks:
|
32 |
+
model_checkpoint:
|
33 |
+
_target_: pytorch_lightning.callbacks.ModelCheckpoint
|
34 |
+
monitor: val/mae
|
35 |
+
mode: min
|
36 |
+
save_top_k: 1
|
37 |
+
save_last: true
|
38 |
+
verbose: false
|
39 |
+
dirpath: checkpoints/
|
40 |
+
filename: epoch_{epoch:03d}
|
41 |
+
auto_insert_metric_name: false
|
42 |
+
early_stopping:
|
43 |
+
_target_: pytorch_lightning.callbacks.EarlyStopping
|
44 |
+
monitor: val/mae
|
45 |
+
mode: min
|
46 |
+
patience: 100
|
47 |
+
min_delta: 0
|
48 |
+
model_summary:
|
49 |
+
_target_: pytorch_lightning.callbacks.RichModelSummary
|
50 |
+
max_depth: -1
|
51 |
+
rich_progress_bar:
|
52 |
+
_target_: pytorch_lightning.callbacks.RichProgressBar
|
53 |
+
logger:
|
54 |
+
csv:
|
55 |
+
_target_: pytorch_lightning.loggers.csv_logs.CSVLogger
|
56 |
+
save_dir: .
|
57 |
+
name: csv/
|
58 |
+
prefix: ''
|
59 |
+
mlflow:
|
60 |
+
_target_: pytorch_lightning.loggers.mlflow.MLFlowLogger
|
61 |
+
experiment_name: ${name}
|
62 |
+
tracking_uri: ${original_work_dir}/logs/mlflow/mlruns
|
63 |
+
tags: null
|
64 |
+
save_dir: ./mlruns
|
65 |
+
prefix: ''
|
66 |
+
artifact_location: null
|
67 |
+
tensorboard:
|
68 |
+
_target_: pytorch_lightning.loggers.tensorboard.TensorBoardLogger
|
69 |
+
save_dir: tensorboard/
|
70 |
+
name: null
|
71 |
+
version: ${name}
|
72 |
+
log_graph: false
|
73 |
+
default_hp_metric: true
|
74 |
+
prefix: ''
|
75 |
+
trainer:
|
76 |
+
_target_: pytorch_lightning.Trainer
|
77 |
+
gpus: 1
|
78 |
+
min_epochs: 1
|
79 |
+
max_epochs: 100
|
80 |
+
resume_from_checkpoint: null
|
logs/experiments/runs/focusConvMSE_150/2022-05-11_07-46-18/.hydra/hydra.yaml
ADDED
@@ -0,0 +1,170 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
hydra:
|
2 |
+
run:
|
3 |
+
dir: logs/experiments/runs/${name}/${now:%Y-%m-%d}_${now:%H-%M-%S}
|
4 |
+
sweep:
|
5 |
+
dir: logs/experiments/multiruns/${name}/${now:%Y-%m-%d}_${now:%H-%M-%S}
|
6 |
+
subdir: ${hydra.job.num}
|
7 |
+
launcher:
|
8 |
+
_target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher
|
9 |
+
sweeper:
|
10 |
+
_target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper
|
11 |
+
max_batch_size: null
|
12 |
+
help:
|
13 |
+
app_name: ${hydra.job.name}
|
14 |
+
header: '${hydra.help.app_name} is powered by Hydra.
|
15 |
+
|
16 |
+
'
|
17 |
+
footer: 'Powered by Hydra (https://hydra.cc)
|
18 |
+
|
19 |
+
Use --hydra-help to view Hydra specific help
|
20 |
+
|
21 |
+
'
|
22 |
+
template: '${hydra.help.header}
|
23 |
+
|
24 |
+
== Configuration groups ==
|
25 |
+
|
26 |
+
Compose your configuration from those groups (group=option)
|
27 |
+
|
28 |
+
|
29 |
+
$APP_CONFIG_GROUPS
|
30 |
+
|
31 |
+
|
32 |
+
== Config ==
|
33 |
+
|
34 |
+
Override anything in the config (foo.bar=value)
|
35 |
+
|
36 |
+
|
37 |
+
$CONFIG
|
38 |
+
|
39 |
+
|
40 |
+
${hydra.help.footer}
|
41 |
+
|
42 |
+
'
|
43 |
+
hydra_help:
|
44 |
+
template: 'Hydra (${hydra.runtime.version})
|
45 |
+
|
46 |
+
See https://hydra.cc for more info.
|
47 |
+
|
48 |
+
|
49 |
+
== Flags ==
|
50 |
+
|
51 |
+
$FLAGS_HELP
|
52 |
+
|
53 |
+
|
54 |
+
== Configuration groups ==
|
55 |
+
|
56 |
+
Compose your configuration from those groups (For example, append hydra/job_logging=disabled
|
57 |
+
to command line)
|
58 |
+
|
59 |
+
|
60 |
+
$HYDRA_CONFIG_GROUPS
|
61 |
+
|
62 |
+
|
63 |
+
Use ''--cfg hydra'' to Show the Hydra config.
|
64 |
+
|
65 |
+
'
|
66 |
+
hydra_help: ???
|
67 |
+
hydra_logging:
|
68 |
+
version: 1
|
69 |
+
formatters:
|
70 |
+
colorlog:
|
71 |
+
(): colorlog.ColoredFormatter
|
72 |
+
format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s'
|
73 |
+
handlers:
|
74 |
+
console:
|
75 |
+
class: logging.StreamHandler
|
76 |
+
formatter: colorlog
|
77 |
+
stream: ext://sys.stdout
|
78 |
+
root:
|
79 |
+
level: INFO
|
80 |
+
handlers:
|
81 |
+
- console
|
82 |
+
disable_existing_loggers: false
|
83 |
+
job_logging:
|
84 |
+
version: 1
|
85 |
+
formatters:
|
86 |
+
simple:
|
87 |
+
format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s'
|
88 |
+
colorlog:
|
89 |
+
(): colorlog.ColoredFormatter
|
90 |
+
format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s]
|
91 |
+
- %(message)s'
|
92 |
+
log_colors:
|
93 |
+
DEBUG: purple
|
94 |
+
INFO: green
|
95 |
+
WARNING: yellow
|
96 |
+
ERROR: red
|
97 |
+
CRITICAL: red
|
98 |
+
handlers:
|
99 |
+
console:
|
100 |
+
class: logging.StreamHandler
|
101 |
+
formatter: colorlog
|
102 |
+
stream: ext://sys.stdout
|
103 |
+
file:
|
104 |
+
class: logging.FileHandler
|
105 |
+
formatter: simple
|
106 |
+
filename: ${hydra.job.name}.log
|
107 |
+
root:
|
108 |
+
level: INFO
|
109 |
+
handlers:
|
110 |
+
- console
|
111 |
+
- file
|
112 |
+
disable_existing_loggers: false
|
113 |
+
env: {}
|
114 |
+
searchpath: []
|
115 |
+
callbacks: {}
|
116 |
+
output_subdir: .hydra
|
117 |
+
overrides:
|
118 |
+
hydra: []
|
119 |
+
task:
|
120 |
+
- experiment=focusConvMSE_150
|
121 |
+
job:
|
122 |
+
name: train
|
123 |
+
override_dirname: experiment=focusConvMSE_150
|
124 |
+
id: ???
|
125 |
+
num: ???
|
126 |
+
config_name: train.yaml
|
127 |
+
env_set: {}
|
128 |
+
env_copy: []
|
129 |
+
config:
|
130 |
+
override_dirname:
|
131 |
+
kv_sep: '='
|
132 |
+
item_sep: ','
|
133 |
+
exclude_keys: []
|
134 |
+
runtime:
|
135 |
+
version: 1.1.2
|
136 |
+
cwd: /usr/src/app
|
137 |
+
config_sources:
|
138 |
+
- path: hydra.conf
|
139 |
+
schema: pkg
|
140 |
+
provider: hydra
|
141 |
+
- path: /usr/src/app/configs
|
142 |
+
schema: file
|
143 |
+
provider: main
|
144 |
+
- path: hydra_plugins.hydra_colorlog.conf
|
145 |
+
schema: pkg
|
146 |
+
provider: hydra-colorlog
|
147 |
+
- path: ''
|
148 |
+
schema: structured
|
149 |
+
provider: schema
|
150 |
+
choices:
|
151 |
+
local: default.yaml
|
152 |
+
hparams_search: null
|
153 |
+
debug: null
|
154 |
+
experiment: focusConvMSE_150
|
155 |
+
log_dir: default.yaml
|
156 |
+
trainer: default.yaml
|
157 |
+
logger: many_loggers
|
158 |
+
callbacks: default.yaml
|
159 |
+
model: focusConv_150.yaml
|
160 |
+
datamodule: focus150.yaml
|
161 |
+
hydra/env: default
|
162 |
+
hydra/callbacks: null
|
163 |
+
hydra/job_logging: colorlog
|
164 |
+
hydra/hydra_logging: colorlog
|
165 |
+
hydra/hydra_help: default
|
166 |
+
hydra/help: default
|
167 |
+
hydra/sweeper: basic
|
168 |
+
hydra/launcher: basic
|
169 |
+
hydra/output: default
|
170 |
+
verbose: false
|
logs/experiments/runs/focusConvMSE_150/2022-05-11_07-46-18/.hydra/overrides.yaml
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
- experiment=focusConvMSE_150
|
logs/experiments/runs/focusConvMSE_150/2022-05-11_07-46-18/checkpoints/epoch_075.ckpt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:1388bc7e1659b1d7365261d6b7561f73b08cd759cc24b94028603d9cccfa82d7
|
3 |
+
size 22354437
|
logs/experiments/runs/focusConvMSE_150/2022-05-11_07-46-18/checkpoints/last.ckpt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:28a726f99e423ff51a8a6660f08a299d3bd4d24d038ab02d088118246133fa32
|
3 |
+
size 22354437
|
logs/experiments/runs/focusConvMSE_150/2022-05-11_07-46-18/csv/version_0/hparams.yaml
ADDED
@@ -0,0 +1,54 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
trainer:
|
2 |
+
_target_: pytorch_lightning.Trainer
|
3 |
+
gpus: 1
|
4 |
+
min_epochs: 1
|
5 |
+
max_epochs: 100
|
6 |
+
resume_from_checkpoint: null
|
7 |
+
model:
|
8 |
+
_target_: src.models.focus_conv_module.FocusConvLitModule
|
9 |
+
image_size: 150
|
10 |
+
pool_size: 2
|
11 |
+
conv1_size: 5
|
12 |
+
conv1_channels: 6
|
13 |
+
conv2_size: 5
|
14 |
+
conv2_channels: 16
|
15 |
+
lin1_size: 100
|
16 |
+
lin2_size: 80
|
17 |
+
output_size: 1
|
18 |
+
lr: 0.001
|
19 |
+
weight_decay: 0.0005
|
20 |
+
datamodule:
|
21 |
+
_target_: src.datamodules.focus_datamodule.FocusDataModule
|
22 |
+
data_dir: /usr/src/app/data/focus150
|
23 |
+
csv_train_file: /usr/src/app/data/focus150/train_metadata.csv
|
24 |
+
csv_val_file: /usr/src/app/data/focus150/validation_metadata.csv
|
25 |
+
csv_test_file: /usr/src/app/data/focus150/test_metadata.csv
|
26 |
+
batch_size: 128
|
27 |
+
num_workers: 0
|
28 |
+
pin_memory: false
|
29 |
+
seed: 12345
|
30 |
+
callbacks:
|
31 |
+
model_checkpoint:
|
32 |
+
_target_: pytorch_lightning.callbacks.ModelCheckpoint
|
33 |
+
monitor: val/mae
|
34 |
+
mode: min
|
35 |
+
save_top_k: 1
|
36 |
+
save_last: true
|
37 |
+
verbose: false
|
38 |
+
dirpath: checkpoints/
|
39 |
+
filename: epoch_{epoch:03d}
|
40 |
+
auto_insert_metric_name: false
|
41 |
+
early_stopping:
|
42 |
+
_target_: pytorch_lightning.callbacks.EarlyStopping
|
43 |
+
monitor: val/mae
|
44 |
+
mode: min
|
45 |
+
patience: 100
|
46 |
+
min_delta: 0
|
47 |
+
model_summary:
|
48 |
+
_target_: pytorch_lightning.callbacks.RichModelSummary
|
49 |
+
max_depth: -1
|
50 |
+
rich_progress_bar:
|
51 |
+
_target_: pytorch_lightning.callbacks.RichProgressBar
|
52 |
+
model/params/total: 1861789
|
53 |
+
model/params/trainable: 1861789
|
54 |
+
model/params/non_trainable: 0
|
logs/experiments/runs/focusConvMSE_150/2022-05-11_07-46-18/csv/version_0/metrics.csv
ADDED
@@ -0,0 +1,202 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
val/loss,val/mae,val/mae_best,epoch,step,train/loss,train/mae,test/loss,test/mae
|
2 |
+
0.00044489698484539986,0.017681343480944633,0.017681343480944633,0,23,,,,
|
3 |
+
,,,1,23,0.008887404575943947,0.04211921989917755,,
|
4 |
+
0.00043419786379672587,0.017488993704319,0.017488993704319,1,47,,,,
|
5 |
+
,,,2,47,0.00041541943210177124,0.016959283500909805,,
|
6 |
+
0.00043711316538974643,0.017531242221593857,0.017488993704319,2,71,,,,
|
7 |
+
,,,3,71,0.0004184250719845295,0.01708288863301277,,
|
8 |
+
0.0004245176096446812,0.01731405407190323,0.01731405407190323,3,95,,,,
|
9 |
+
,,,4,95,0.00041017148760147393,0.016914093866944313,,
|
10 |
+
0.0004187379090581089,0.01721024699509144,0.01721024699509144,4,119,,,,
|
11 |
+
,,,5,119,0.000404045800678432,0.01681438460946083,,
|
12 |
+
0.0004181358963251114,0.017218105494976044,0.01721024699509144,5,143,,,,
|
13 |
+
,,,6,143,0.00040396256372332573,0.016813334077596664,,
|
14 |
+
0.00041875889291986823,0.017209554091095924,0.017209554091095924,6,167,,,,
|
15 |
+
,,,7,167,0.00040354690281674266,0.0168131273239851,,
|
16 |
+
0.0004179416282568127,0.017206067219376564,0.017206067219376564,7,191,,,,
|
17 |
+
,,,8,191,0.0004028608382213861,0.01682186685502529,,
|
18 |
+
0.0004198983369860798,0.01723366603255272,0.017206067219376564,8,215,,,,
|
19 |
+
,,,9,215,0.0004042572109028697,0.016848096624016762,,
|
20 |
+
0.00042069426854141057,0.017255961894989014,0.017206067219376564,9,239,,,,
|
21 |
+
,,,10,239,0.00040450014057569206,0.0168466754257679,,
|
22 |
+
0.00041716272244229913,0.0172098558396101,0.017206067219376564,10,263,,,,
|
23 |
+
,,,11,263,0.00040507156518287957,0.01684463769197464,,
|
24 |
+
0.00041914221947081387,0.017262130975723267,0.017206067219376564,11,287,,,,
|
25 |
+
,,,12,287,0.000402468052925542,0.01681079901754856,,
|
26 |
+
0.0004204396391287446,0.01725824549794197,0.017206067219376564,12,311,,,,
|
27 |
+
,,,13,311,0.0004039796185679734,0.016856476664543152,,
|
28 |
+
0.0004188572056591511,0.01725197769701481,0.017206067219376564,13,335,,,,
|
29 |
+
,,,14,335,0.0004027166869491339,0.016827981919050217,,
|
30 |
+
0.0004182940174359828,0.01724371127784252,0.017206067219376564,14,359,,,,
|
31 |
+
,,,15,359,0.0004023931105621159,0.016813194379210472,,
|
32 |
+
0.0004178732051514089,0.01721191592514515,0.017206067219376564,15,383,,,,
|
33 |
+
,,,16,383,0.00040235259803012013,0.016825344413518906,,
|
34 |
+
0.00041836718446575105,0.017206646502017975,0.017206067219376564,16,407,,,,
|
35 |
+
,,,17,407,0.00040349853225052357,0.01684238761663437,,
|
36 |
+
0.0004207307065371424,0.017287323251366615,0.017206067219376564,17,431,,,,
|
37 |
+
,,,18,431,0.00040197305497713387,0.016820793971419334,,
|
38 |
+
0.0004213627544231713,0.01729772984981537,0.017206067219376564,18,455,,,,
|
39 |
+
,,,19,455,0.00040126233943738043,0.016783758997917175,,
|
40 |
+
0.0004186522855889052,0.01721062697470188,0.017206067219376564,19,479,,,,
|
41 |
+
,,,20,479,0.00040246162097901106,0.01681712083518505,,
|
42 |
+
0.0004174558271188289,0.01723051443696022,0.017206067219376564,20,503,,,,
|
43 |
+
,,,21,503,0.0004025833623018116,0.016804421320557594,,
|
44 |
+
0.0004171860055066645,0.017213787883520126,0.017206067219376564,21,527,,,,
|
45 |
+
,,,22,527,0.0004038927727378905,0.016853438690304756,,
|
46 |
+
0.0004291511722840369,0.017394769936800003,0.017206067219376564,22,551,,,,
|
47 |
+
,,,23,551,0.0004055915051139891,0.01690523698925972,,
|
48 |
+
0.0004276209219824523,0.017381761223077774,0.017206067219376564,23,575,,,,
|
49 |
+
,,,24,575,0.00040367263136431575,0.01687566190958023,,
|
50 |
+
0.00042920836131088436,0.017432034015655518,0.017206067219376564,24,599,,,,
|
51 |
+
,,,25,599,0.0004039823543280363,0.016855429857969284,,
|
52 |
+
0.00041709901415742934,0.017218973487615585,0.017206067219376564,25,623,,,,
|
53 |
+
,,,26,623,0.00040261808317154646,0.016842041164636612,,
|
54 |
+
0.00041712800157256424,0.01721384935081005,0.017206067219376564,26,647,,,,
|
55 |
+
,,,27,647,0.00040108486427925527,0.016801705583930016,,
|
56 |
+
0.0004201784322503954,0.017252901569008827,0.017206067219376564,27,671,,,,
|
57 |
+
,,,28,671,0.00040309198084287345,0.016849083825945854,,
|
58 |
+
0.00042025925358757377,0.017253605648875237,0.017206067219376564,28,695,,,,
|
59 |
+
,,,29,695,0.00040161918150261045,0.016825392842292786,,
|
60 |
+
0.00042653869604691863,0.017367159947752953,0.017206067219376564,29,719,,,,
|
61 |
+
,,,30,719,0.00040069606620818377,0.016792459413409233,,
|
62 |
+
0.0004173555935267359,0.017220433801412582,0.017206067219376564,30,743,,,,
|
63 |
+
,,,31,743,0.0004031160206068307,0.016852673143148422,,
|
64 |
+
0.0004176373186055571,0.017215099185705185,0.017206067219376564,31,767,,,,
|
65 |
+
,,,32,767,0.00040285923751071095,0.01686094142496586,,
|
66 |
+
0.00041881194920279086,0.017244061455130577,0.017206067219376564,32,791,,,,
|
67 |
+
,,,33,791,0.00040129502303898335,0.016810236498713493,,
|
68 |
+
0.0004195523215457797,0.017238574102520943,0.017206067219376564,33,815,,,,
|
69 |
+
,,,34,815,0.00040117636672221124,0.016804233193397522,,
|
70 |
+
0.00042345101246610284,0.017323631793260574,0.017206067219376564,34,839,,,,
|
71 |
+
,,,35,839,0.00040232783067040145,0.01682218350470066,,
|
72 |
+
0.0004176231741439551,0.01721755787730217,0.017206067219376564,35,863,,,,
|
73 |
+
,,,36,863,0.0004037619801238179,0.016855504363775253,,
|
74 |
+
0.0004179279785603285,0.017217906191945076,0.017206067219376564,36,887,,,,
|
75 |
+
,,,37,887,0.00040249721496365964,0.01684076339006424,,
|
76 |
+
0.00041897609480656683,0.017223307862877846,0.017206067219376564,37,911,,,,
|
77 |
+
,,,38,911,0.0004052222357131541,0.01687397062778473,,
|
78 |
+
0.0004206788435112685,0.017246928066015244,0.017206067219376564,38,935,,,,
|
79 |
+
,,,39,935,0.00040305015863850713,0.016854874789714813,,
|
80 |
+
0.0004184516437817365,0.017216704785823822,0.017206067219376564,39,959,,,,
|
81 |
+
,,,40,959,0.0004014472069684416,0.016809824854135513,,
|
82 |
+
0.00041768301161937416,0.017213817685842514,0.017206067219376564,40,983,,,,
|
83 |
+
,,,41,983,0.000400997610995546,0.016799842938780785,,
|
84 |
+
0.0004178591480012983,0.017215833067893982,0.017206067219376564,41,1007,,,,
|
85 |
+
,,,42,1007,0.000402215460781008,0.0168254766613245,,
|
86 |
+
0.00041770999087020755,0.017225690186023712,0.017206067219376564,42,1031,,,,
|
87 |
+
,,,43,1031,0.0004027055692858994,0.01682671345770359,,
|
88 |
+
0.00041899512871168554,0.017214616760611534,0.017206067219376564,43,1055,,,,
|
89 |
+
,,,44,1055,0.0004011373966932297,0.016789918765425682,,
|
90 |
+
0.0004203356511425227,0.01723295822739601,0.017206067219376564,44,1079,,,,
|
91 |
+
,,,45,1079,0.00040034711128100753,0.016774697229266167,,
|
92 |
+
0.00042004435090348125,0.01722612977027893,0.017206067219376564,45,1103,,,,
|
93 |
+
,,,46,1103,0.00040031064418144524,0.01679287850856781,,
|
94 |
+
0.00041753757977858186,0.017204368487000465,0.017204368487000465,46,1127,,,,
|
95 |
+
,,,47,1127,0.000403077166993171,0.01682465337216854,,
|
96 |
+
0.0004210607730783522,0.017243025824427605,0.017204368487000465,47,1151,,,,
|
97 |
+
,,,48,1151,0.0004017871688120067,0.01679857075214386,,
|
98 |
+
0.00041864137165248394,0.017214054241776466,0.017204368487000465,48,1175,,,,
|
99 |
+
,,,49,1175,0.00040093358256854117,0.016798079013824463,,
|
100 |
+
0.0004264790622983128,0.01733502931892872,0.017204368487000465,49,1199,,,,
|
101 |
+
,,,50,1199,0.0004013919678982347,0.016820937395095825,,
|
102 |
+
0.0004184645658824593,0.017207222059369087,0.017204368487000465,50,1223,,,,
|
103 |
+
,,,51,1223,0.00040150817949324846,0.016808636486530304,,
|
104 |
+
0.0004173447086941451,0.01720462366938591,0.017204368487000465,51,1247,,,,
|
105 |
+
,,,52,1247,0.0004022222419735044,0.016811121255159378,,
|
106 |
+
0.000419566233176738,0.017218230292201042,0.017204368487000465,52,1271,,,,
|
107 |
+
,,,53,1271,0.000399966724216938,0.01677583158016205,,
|
108 |
+
0.0004171229084022343,0.017202723771333694,0.017202723771333694,53,1295,,,,
|
109 |
+
,,,54,1295,0.00040028776857070625,0.01678842306137085,,
|
110 |
+
0.00041871971916407347,0.0172096136957407,0.017202723771333694,54,1319,,,,
|
111 |
+
,,,55,1319,0.00040193041786551476,0.016813410446047783,,
|
112 |
+
0.0004283871385268867,0.01736305095255375,0.017202723771333694,55,1343,,,,
|
113 |
+
,,,56,1343,0.00040868757059797645,0.016957229003310204,,
|
114 |
+
0.000419788237195462,0.01721891388297081,0.017202723771333694,56,1367,,,,
|
115 |
+
,,,57,1367,0.0004008015093859285,0.016789279878139496,,
|
116 |
+
0.0004191590123809874,0.01725078746676445,0.017202723771333694,57,1391,,,,
|
117 |
+
,,,58,1391,0.00040126245585270226,0.01676764152944088,,
|
118 |
+
0.00041842248174361885,0.017202183604240417,0.017202183604240417,58,1415,,,,
|
119 |
+
,,,59,1415,0.0004007353272754699,0.016795895993709564,,
|
120 |
+
0.000417087459936738,0.017204228788614273,0.017202183604240417,59,1439,,,,
|
121 |
+
,,,60,1439,0.0004064509994350374,0.01689194142818451,,
|
122 |
+
0.00041705917101353407,0.01720697060227394,0.017202183604240417,60,1463,,,,
|
123 |
+
,,,61,1463,0.000401376630179584,0.01680450327694416,,
|
124 |
+
0.000417282892158255,0.017197739332914352,0.017197739332914352,61,1487,,,,
|
125 |
+
,,,62,1487,0.00040122854989022017,0.016796989366412163,,
|
126 |
+
0.0004169566964264959,0.017204908654093742,0.017197739332914352,62,1511,,,,
|
127 |
+
,,,63,1511,0.0004019147309008986,0.016819288954138756,,
|
128 |
+
0.0004252276266925037,0.017327692359685898,0.017197739332914352,63,1535,,,,
|
129 |
+
,,,64,1535,0.0004008215619251132,0.01679813303053379,,
|
130 |
+
0.000418249168433249,0.017201870679855347,0.017197739332914352,64,1559,,,,
|
131 |
+
,,,65,1559,0.0004022548964712769,0.01681801863014698,,
|
132 |
+
0.0004171164473518729,0.01720881089568138,0.017197739332914352,65,1583,,,,
|
133 |
+
,,,66,1583,0.0004007138195447624,0.016796132549643517,,
|
134 |
+
0.00041693495586514473,0.017197327688336372,0.017197327688336372,66,1607,,,,
|
135 |
+
,,,67,1607,0.0004024529771413654,0.01681518740952015,,
|
136 |
+
0.0004169866442680359,0.01720130629837513,0.017197327688336372,67,1631,,,,
|
137 |
+
,,,68,1631,0.0004029108677059412,0.01683821901679039,,
|
138 |
+
0.00041698256973177195,0.01720035821199417,0.017197327688336372,68,1655,,,,
|
139 |
+
,,,69,1655,0.000400969001930207,0.0168012622743845,,
|
140 |
+
0.00042014766950160265,0.017233533784747124,0.017197327688336372,69,1679,,,,
|
141 |
+
,,,70,1679,0.00040110640111379325,0.016795136034488678,,
|
142 |
+
0.0004175274516455829,0.017218707129359245,0.017197327688336372,70,1703,,,,
|
143 |
+
,,,71,1703,0.00040186152909882367,0.016808127984404564,,
|
144 |
+
0.000420175027102232,0.017234478145837784,0.017197327688336372,71,1727,,,,
|
145 |
+
,,,72,1727,0.0004029869451187551,0.016820866614580154,,
|
146 |
+
0.00042154971743002534,0.017290374264121056,0.017197327688336372,72,1751,,,,
|
147 |
+
,,,73,1751,0.0004018393810838461,0.016819657757878304,,
|
148 |
+
0.0004168307932559401,0.017197584733366966,0.017197327688336372,73,1775,,,,
|
149 |
+
,,,74,1775,0.0004033347067888826,0.016849622130393982,,
|
150 |
+
0.0004174465429969132,0.01719658449292183,0.01719658449292183,74,1799,,,,
|
151 |
+
,,,75,1799,0.00040207285201177,0.016806870698928833,,
|
152 |
+
0.0004178695671726018,0.017195407301187515,0.017195407301187515,75,1823,,,,
|
153 |
+
,,,76,1823,0.00040633161552250385,0.01690552569925785,,
|
154 |
+
0.00041818569297902286,0.017232319340109825,0.017195407301187515,76,1847,,,,
|
155 |
+
,,,77,1847,0.0004016806778963655,0.016822833567857742,,
|
156 |
+
0.000421318196458742,0.017266282811760902,0.017195407301187515,77,1871,,,,
|
157 |
+
,,,78,1871,0.000402224191930145,0.016823161393404007,,
|
158 |
+
0.0004192620690446347,0.017222236841917038,0.017195407301187515,78,1895,,,,
|
159 |
+
,,,79,1895,0.00040338438702747226,0.016830578446388245,,
|
160 |
+
0.0004178639210294932,0.01719973422586918,0.017195407301187515,79,1919,,,,
|
161 |
+
,,,80,1919,0.00040178862400352955,0.01681240275502205,,
|
162 |
+
0.0004176554793957621,0.01719573885202408,0.017195407301187515,80,1943,,,,
|
163 |
+
,,,81,1943,0.00040252183680422604,0.01684090495109558,,
|
164 |
+
0.0004202329437248409,0.017268674448132515,0.017195407301187515,81,1967,,,,
|
165 |
+
,,,82,1967,0.0004018725885543972,0.016818096861243248,,
|
166 |
+
0.0004182016127742827,0.017230616882443428,0.017195407301187515,82,1991,,,,
|
167 |
+
,,,83,1991,0.0004029826959595084,0.016847999766469002,,
|
168 |
+
0.0004177166847512126,0.0171958077698946,0.017195407301187515,83,2015,,,,
|
169 |
+
,,,84,2015,0.0004030916898045689,0.016841478645801544,,
|
170 |
+
0.0004229063633829355,0.017308201640844345,0.017195407301187515,84,2039,,,,
|
171 |
+
,,,85,2039,0.00040160163189284503,0.016811050474643707,,
|
172 |
+
0.00041915738256648183,0.017223507165908813,0.017195407301187515,85,2063,,,,
|
173 |
+
,,,86,2063,0.00040224529220722616,0.016822297126054764,,
|
174 |
+
0.00041956276982091367,0.017231978476047516,0.017195407301187515,86,2087,,,,
|
175 |
+
,,,87,2087,0.00040121664642356336,0.016798874363303185,,
|
176 |
+
0.0004179154057055712,0.017200246453285217,0.017195407301187515,87,2111,,,,
|
177 |
+
,,,88,2111,0.00040274919592775404,0.016815654933452606,,
|
178 |
+
0.0004178369417786598,0.017202293500304222,0.017195407301187515,88,2135,,,,
|
179 |
+
,,,89,2135,0.000401629222324118,0.01680818572640419,,
|
180 |
+
0.0004194552602712065,0.01723109930753708,0.017195407301187515,89,2159,,,,
|
181 |
+
,,,90,2159,0.000402590143494308,0.016822924837470055,,
|
182 |
+
0.00041843776125460863,0.017202844843268394,0.017195407301187515,90,2183,,,,
|
183 |
+
,,,91,2183,0.00040649992297403514,0.01691357046365738,,
|
184 |
+
0.00042038620449602604,0.017252525314688683,0.017195407301187515,91,2207,,,,
|
185 |
+
,,,92,2207,0.00040353540680371225,0.01685115322470665,,
|
186 |
+
0.0004189791507087648,0.017222154885530472,0.017195407301187515,92,2231,,,,
|
187 |
+
,,,93,2231,0.0004033587174490094,0.016858424991369247,,
|
188 |
+
0.00041752157267183065,0.01719973050057888,0.017195407301187515,93,2255,,,,
|
189 |
+
,,,94,2255,0.00040301019907929003,0.016829803586006165,,
|
190 |
+
0.0004262323200237006,0.017374495044350624,0.017195407301187515,94,2279,,,,
|
191 |
+
,,,95,2279,0.0004019465995952487,0.01682070828974247,,
|
192 |
+
0.00041839166078716516,0.01721331663429737,0.017195407301187515,95,2303,,,,
|
193 |
+
,,,96,2303,0.0004020459018647671,0.01681322604417801,,
|
194 |
+
0.0004176109214313328,0.017199618741869926,0.017195407301187515,96,2327,,,,
|
195 |
+
,,,97,2327,0.00040511813131161034,0.01687493920326233,,
|
196 |
+
0.00041778298327699304,0.01720503717660904,0.017195407301187515,97,2351,,,,
|
197 |
+
,,,98,2351,0.0004028994881082326,0.016828566789627075,,
|
198 |
+
0.00042330913129262626,0.017334815114736557,0.017195407301187515,98,2375,,,,
|
199 |
+
,,,99,2375,0.0004031210264656693,0.016830336302518845,,
|
200 |
+
0.0004220327246002853,0.017304159700870514,0.017195407301187515,99,2399,,,,
|
201 |
+
,,,100,2399,0.00040244770934805274,0.016806816682219505,,
|
202 |
+
,,,75,2400,,,0.00040832016384229064,0.016924427822232246
|
logs/experiments/runs/focusConvMSE_150/2022-05-11_07-46-18/tensorboard/focusConvMSE_150/events.out.tfevents.1652255185.b0402e0214ff.1.0
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:9bd92aca0f9c4d373460bf639abe012dd0330c4f55ea5f8dca6f06c805b269ea
|
3 |
+
size 36778
|
logs/experiments/runs/focusConvMSE_150/2022-05-11_07-46-18/tensorboard/focusConvMSE_150/events.out.tfevents.1652255403.b0402e0214ff.1.1
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:ea41b41edb32583dddae423d78d80d14c8dd7b6bfd4208c7f508633d3e304788
|
3 |
+
size 179
|
logs/experiments/runs/focusConvMSE_150/2022-05-11_07-46-18/tensorboard/focusConvMSE_150/hparams.yaml
ADDED
@@ -0,0 +1,54 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
trainer:
|
2 |
+
_target_: pytorch_lightning.Trainer
|
3 |
+
gpus: 1
|
4 |
+
min_epochs: 1
|
5 |
+
max_epochs: 100
|
6 |
+
resume_from_checkpoint: null
|
7 |
+
model:
|
8 |
+
_target_: src.models.focus_conv_module.FocusConvLitModule
|
9 |
+
image_size: 150
|
10 |
+
pool_size: 2
|
11 |
+
conv1_size: 5
|
12 |
+
conv1_channels: 6
|
13 |
+
conv2_size: 5
|
14 |
+
conv2_channels: 16
|
15 |
+
lin1_size: 100
|
16 |
+
lin2_size: 80
|
17 |
+
output_size: 1
|
18 |
+
lr: 0.001
|
19 |
+
weight_decay: 0.0005
|
20 |
+
datamodule:
|
21 |
+
_target_: src.datamodules.focus_datamodule.FocusDataModule
|
22 |
+
data_dir: /usr/src/app/data/focus150
|
23 |
+
csv_train_file: /usr/src/app/data/focus150/train_metadata.csv
|
24 |
+
csv_val_file: /usr/src/app/data/focus150/validation_metadata.csv
|
25 |
+
csv_test_file: /usr/src/app/data/focus150/test_metadata.csv
|
26 |
+
batch_size: 128
|
27 |
+
num_workers: 0
|
28 |
+
pin_memory: false
|
29 |
+
seed: 12345
|
30 |
+
callbacks:
|
31 |
+
model_checkpoint:
|
32 |
+
_target_: pytorch_lightning.callbacks.ModelCheckpoint
|
33 |
+
monitor: val/mae
|
34 |
+
mode: min
|
35 |
+
save_top_k: 1
|
36 |
+
save_last: true
|
37 |
+
verbose: false
|
38 |
+
dirpath: checkpoints/
|
39 |
+
filename: epoch_{epoch:03d}
|
40 |
+
auto_insert_metric_name: false
|
41 |
+
early_stopping:
|
42 |
+
_target_: pytorch_lightning.callbacks.EarlyStopping
|
43 |
+
monitor: val/mae
|
44 |
+
mode: min
|
45 |
+
patience: 100
|
46 |
+
min_delta: 0
|
47 |
+
model_summary:
|
48 |
+
_target_: pytorch_lightning.callbacks.RichModelSummary
|
49 |
+
max_depth: -1
|
50 |
+
rich_progress_bar:
|
51 |
+
_target_: pytorch_lightning.callbacks.RichProgressBar
|
52 |
+
model/params/total: 1861789
|
53 |
+
model/params/trainable: 1861789
|
54 |
+
model/params/non_trainable: 0
|
logs/mlflow/mlruns/1/32404d6b69574a41967cc1061badf333/meta.yaml
ADDED
@@ -0,0 +1,15 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
artifact_uri: /usr/src/app/logs/mlflow/mlruns/1/32404d6b69574a41967cc1061badf333/artifacts
|
2 |
+
end_time: 1652255403056
|
3 |
+
entry_point_name: ''
|
4 |
+
experiment_id: '1'
|
5 |
+
lifecycle_stage: active
|
6 |
+
name: ''
|
7 |
+
run_id: 32404d6b69574a41967cc1061badf333
|
8 |
+
run_uuid: 32404d6b69574a41967cc1061badf333
|
9 |
+
source_name: ''
|
10 |
+
source_type: 4
|
11 |
+
source_version: ''
|
12 |
+
start_time: 1652255185021
|
13 |
+
status: 3
|
14 |
+
tags: []
|
15 |
+
user_id: unknown
|
logs/mlflow/mlruns/1/32404d6b69574a41967cc1061badf333/metrics/epoch
ADDED
@@ -0,0 +1,201 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
1652255226110 0 23
|
2 |
+
1652255226202 1 23
|
3 |
+
1652255227893 1 47
|
4 |
+
1652255228182 2 47
|
5 |
+
1652255229874 2 71
|
6 |
+
1652255230002 3 71
|
7 |
+
1652255231701 3 95
|
8 |
+
1652255231893 4 95
|
9 |
+
1652255233631 4 119
|
10 |
+
1652255233787 5 119
|
11 |
+
1652255235420 5 143
|
12 |
+
1652255235552 6 143
|
13 |
+
1652255237173 6 167
|
14 |
+
1652255237352 7 167
|
15 |
+
1652255238968 7 191
|
16 |
+
1652255239115 8 191
|
17 |
+
1652255240765 8 215
|
18 |
+
1652255240888 9 215
|
19 |
+
1652255242504 9 239
|
20 |
+
1652255242636 10 239
|
21 |
+
1652255244265 10 263
|
22 |
+
1652255244379 11 263
|
23 |
+
1652255245973 11 287
|
24 |
+
1652255246102 12 287
|
25 |
+
1652255247776 12 311
|
26 |
+
1652255247898 13 311
|
27 |
+
1652255249510 13 335
|
28 |
+
1652255249632 14 335
|
29 |
+
1652255251271 14 359
|
30 |
+
1652255251399 15 359
|
31 |
+
1652255253051 15 383
|
32 |
+
1652255253181 16 383
|
33 |
+
1652255254846 16 407
|
34 |
+
1652255254965 17 407
|
35 |
+
1652255256590 17 431
|
36 |
+
1652255256717 18 431
|
37 |
+
1652255258332 18 455
|
38 |
+
1652255258459 19 455
|
39 |
+
1652255260077 19 479
|
40 |
+
1652255260271 20 479
|
41 |
+
1652255261916 20 503
|
42 |
+
1652255262075 21 503
|
43 |
+
1652255263678 21 527
|
44 |
+
1652255263807 22 527
|
45 |
+
1652255265470 22 551
|
46 |
+
1652255265593 23 551
|
47 |
+
1652255267224 23 575
|
48 |
+
1652255267353 24 575
|
49 |
+
1652255269012 24 599
|
50 |
+
1652255269307 25 599
|
51 |
+
1652255270889 25 623
|
52 |
+
1652255271008 26 623
|
53 |
+
1652255272597 26 647
|
54 |
+
1652255272724 27 647
|
55 |
+
1652255274348 27 671
|
56 |
+
1652255274477 28 671
|
57 |
+
1652255276098 28 695
|
58 |
+
1652255276265 29 695
|
59 |
+
1652255277921 29 719
|
60 |
+
1652255278053 30 719
|
61 |
+
1652255279693 30 743
|
62 |
+
1652255279822 31 743
|
63 |
+
1652255281456 31 767
|
64 |
+
1652255281578 32 767
|
65 |
+
1652255283271 32 791
|
66 |
+
1652255283406 33 791
|
67 |
+
1652255285074 33 815
|
68 |
+
1652255285208 34 815
|
69 |
+
1652255286848 34 839
|
70 |
+
1652255286968 35 839
|
71 |
+
1652255288645 35 863
|
72 |
+
1652255288774 36 863
|
73 |
+
1652255290391 36 887
|
74 |
+
1652255290528 37 887
|
75 |
+
1652255292238 37 911
|
76 |
+
1652255292354 38 911
|
77 |
+
1652255293971 38 935
|
78 |
+
1652255294100 39 935
|
79 |
+
1652255295705 39 959
|
80 |
+
1652255295832 40 959
|
81 |
+
1652255297458 40 983
|
82 |
+
1652255297579 41 983
|
83 |
+
1652255299252 41 1007
|
84 |
+
1652255299387 42 1007
|
85 |
+
1652255301015 42 1031
|
86 |
+
1652255301145 43 1031
|
87 |
+
1652255302770 43 1055
|
88 |
+
1652255302894 44 1055
|
89 |
+
1652255304540 44 1079
|
90 |
+
1652255304673 45 1079
|
91 |
+
1652255306350 45 1103
|
92 |
+
1652255306511 46 1103
|
93 |
+
1652255308183 46 1127
|
94 |
+
1652255308369 47 1127
|
95 |
+
1652255310043 47 1151
|
96 |
+
1652255310173 48 1151
|
97 |
+
1652255311742 48 1175
|
98 |
+
1652255311868 49 1175
|
99 |
+
1652255313531 49 1199
|
100 |
+
1652255313710 50 1199
|
101 |
+
1652255315319 50 1223
|
102 |
+
1652255315450 51 1223
|
103 |
+
1652255317051 51 1247
|
104 |
+
1652255317182 52 1247
|
105 |
+
1652255318807 52 1271
|
106 |
+
1652255318925 53 1271
|
107 |
+
1652255320529 53 1295
|
108 |
+
1652255320700 54 1295
|
109 |
+
1652255322361 54 1319
|
110 |
+
1652255322494 55 1319
|
111 |
+
1652255324102 55 1343
|
112 |
+
1652255324269 56 1343
|
113 |
+
1652255325912 56 1367
|
114 |
+
1652255326041 57 1367
|
115 |
+
1652255327692 57 1391
|
116 |
+
1652255327819 58 1391
|
117 |
+
1652255329496 58 1415
|
118 |
+
1652255329661 59 1415
|
119 |
+
1652255331243 59 1439
|
120 |
+
1652255331378 60 1439
|
121 |
+
1652255333003 60 1463
|
122 |
+
1652255333132 61 1463
|
123 |
+
1652255334794 61 1487
|
124 |
+
1652255334951 62 1487
|
125 |
+
1652255336636 62 1511
|
126 |
+
1652255336765 63 1511
|
127 |
+
1652255338433 63 1535
|
128 |
+
1652255338563 64 1535
|
129 |
+
1652255340211 64 1559
|
130 |
+
1652255340354 65 1559
|
131 |
+
1652255341996 65 1583
|
132 |
+
1652255342123 66 1583
|
133 |
+
1652255343809 66 1607
|
134 |
+
1652255343981 67 1607
|
135 |
+
1652255345630 67 1631
|
136 |
+
1652255345751 68 1631
|
137 |
+
1652255347390 68 1655
|
138 |
+
1652255347515 69 1655
|
139 |
+
1652255349125 69 1679
|
140 |
+
1652255349254 70 1679
|
141 |
+
1652255350905 70 1703
|
142 |
+
1652255351054 71 1703
|
143 |
+
1652255352731 71 1727
|
144 |
+
1652255352857 72 1727
|
145 |
+
1652255354455 72 1751
|
146 |
+
1652255354583 73 1751
|
147 |
+
1652255356226 73 1775
|
148 |
+
1652255356373 74 1775
|
149 |
+
1652255358061 74 1799
|
150 |
+
1652255358273 75 1799
|
151 |
+
1652255359907 75 1823
|
152 |
+
1652255360079 76 1823
|
153 |
+
1652255361669 76 1847
|
154 |
+
1652255361795 77 1847
|
155 |
+
1652255363448 77 1871
|
156 |
+
1652255363580 78 1871
|
157 |
+
1652255365177 78 1895
|
158 |
+
1652255365307 79 1895
|
159 |
+
1652255366993 79 1919
|
160 |
+
1652255367125 80 1919
|
161 |
+
1652255368727 80 1943
|
162 |
+
1652255368853 81 1943
|
163 |
+
1652255370458 81 1967
|
164 |
+
1652255370587 82 1967
|
165 |
+
1652255372211 82 1991
|
166 |
+
1652255372343 83 1991
|
167 |
+
1652255374023 83 2015
|
168 |
+
1652255374158 84 2015
|
169 |
+
1652255375773 84 2039
|
170 |
+
1652255375904 85 2039
|
171 |
+
1652255377505 85 2063
|
172 |
+
1652255377640 86 2063
|
173 |
+
1652255379283 86 2087
|
174 |
+
1652255379409 87 2087
|
175 |
+
1652255381074 87 2111
|
176 |
+
1652255381209 88 2111
|
177 |
+
1652255382841 88 2135
|
178 |
+
1652255383016 89 2135
|
179 |
+
1652255384667 89 2159
|
180 |
+
1652255384822 90 2159
|
181 |
+
1652255386412 90 2183
|
182 |
+
1652255386547 91 2183
|
183 |
+
1652255388223 91 2207
|
184 |
+
1652255388373 92 2207
|
185 |
+
1652255390024 92 2231
|
186 |
+
1652255390155 93 2231
|
187 |
+
1652255391780 93 2255
|
188 |
+
1652255391910 94 2255
|
189 |
+
1652255393529 94 2279
|
190 |
+
1652255393645 95 2279
|
191 |
+
1652255395287 95 2303
|
192 |
+
1652255395414 96 2303
|
193 |
+
1652255397053 96 2327
|
194 |
+
1652255397182 97 2327
|
195 |
+
1652255398829 97 2351
|
196 |
+
1652255398978 98 2351
|
197 |
+
1652255400607 98 2375
|
198 |
+
1652255400734 99 2375
|
199 |
+
1652255402340 99 2399
|
200 |
+
1652255402545 100 2399
|
201 |
+
1652255403011 75 2400
|
logs/mlflow/mlruns/1/32404d6b69574a41967cc1061badf333/metrics/test/loss
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
1652255403011 0.00040832016384229064 2400
|
logs/mlflow/mlruns/1/32404d6b69574a41967cc1061badf333/metrics/test/mae
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
1652255403011 0.016924427822232246 2400
|
logs/mlflow/mlruns/1/32404d6b69574a41967cc1061badf333/metrics/train/loss
ADDED
@@ -0,0 +1,100 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
1652255226202 0.008887404575943947 23
|
2 |
+
1652255228182 0.00041541943210177124 47
|
3 |
+
1652255230002 0.0004184250719845295 71
|
4 |
+
1652255231893 0.00041017148760147393 95
|
5 |
+
1652255233787 0.000404045800678432 119
|
6 |
+
1652255235552 0.00040396256372332573 143
|
7 |
+
1652255237352 0.00040354690281674266 167
|
8 |
+
1652255239115 0.0004028608382213861 191
|
9 |
+
1652255240888 0.0004042572109028697 215
|
10 |
+
1652255242636 0.00040450014057569206 239
|
11 |
+
1652255244379 0.00040507156518287957 263
|
12 |
+
1652255246102 0.000402468052925542 287
|
13 |
+
1652255247898 0.0004039796185679734 311
|
14 |
+
1652255249632 0.0004027166869491339 335
|
15 |
+
1652255251399 0.0004023931105621159 359
|
16 |
+
1652255253181 0.00040235259803012013 383
|
17 |
+
1652255254965 0.00040349853225052357 407
|
18 |
+
1652255256717 0.00040197305497713387 431
|
19 |
+
1652255258459 0.00040126233943738043 455
|
20 |
+
1652255260271 0.00040246162097901106 479
|
21 |
+
1652255262075 0.0004025833623018116 503
|
22 |
+
1652255263807 0.0004038927727378905 527
|
23 |
+
1652255265593 0.0004055915051139891 551
|
24 |
+
1652255267353 0.00040367263136431575 575
|
25 |
+
1652255269307 0.0004039823543280363 599
|
26 |
+
1652255271008 0.00040261808317154646 623
|
27 |
+
1652255272724 0.00040108486427925527 647
|
28 |
+
1652255274477 0.00040309198084287345 671
|
29 |
+
1652255276265 0.00040161918150261045 695
|
30 |
+
1652255278053 0.00040069606620818377 719
|
31 |
+
1652255279822 0.0004031160206068307 743
|
32 |
+
1652255281578 0.00040285923751071095 767
|
33 |
+
1652255283406 0.00040129502303898335 791
|
34 |
+
1652255285208 0.00040117636672221124 815
|
35 |
+
1652255286968 0.00040232783067040145 839
|
36 |
+
1652255288774 0.0004037619801238179 863
|
37 |
+
1652255290528 0.00040249721496365964 887
|
38 |
+
1652255292354 0.0004052222357131541 911
|
39 |
+
1652255294100 0.00040305015863850713 935
|
40 |
+
1652255295832 0.0004014472069684416 959
|
41 |
+
1652255297579 0.000400997610995546 983
|
42 |
+
1652255299387 0.000402215460781008 1007
|
43 |
+
1652255301145 0.0004027055692858994 1031
|
44 |
+
1652255302894 0.0004011373966932297 1055
|
45 |
+
1652255304673 0.00040034711128100753 1079
|
46 |
+
1652255306511 0.00040031064418144524 1103
|
47 |
+
1652255308369 0.000403077166993171 1127
|
48 |
+
1652255310173 0.0004017871688120067 1151
|
49 |
+
1652255311868 0.00040093358256854117 1175
|
50 |
+
1652255313710 0.0004013919678982347 1199
|
51 |
+
1652255315450 0.00040150817949324846 1223
|
52 |
+
1652255317182 0.0004022222419735044 1247
|
53 |
+
1652255318925 0.000399966724216938 1271
|
54 |
+
1652255320700 0.00040028776857070625 1295
|
55 |
+
1652255322494 0.00040193041786551476 1319
|
56 |
+
1652255324269 0.00040868757059797645 1343
|
57 |
+
1652255326041 0.0004008015093859285 1367
|
58 |
+
1652255327819 0.00040126245585270226 1391
|
59 |
+
1652255329661 0.0004007353272754699 1415
|
60 |
+
1652255331378 0.0004064509994350374 1439
|
61 |
+
1652255333132 0.000401376630179584 1463
|
62 |
+
1652255334951 0.00040122854989022017 1487
|
63 |
+
1652255336765 0.0004019147309008986 1511
|
64 |
+
1652255338563 0.0004008215619251132 1535
|
65 |
+
1652255340354 0.0004022548964712769 1559
|
66 |
+
1652255342123 0.0004007138195447624 1583
|
67 |
+
1652255343981 0.0004024529771413654 1607
|
68 |
+
1652255345751 0.0004029108677059412 1631
|
69 |
+
1652255347515 0.000400969001930207 1655
|
70 |
+
1652255349254 0.00040110640111379325 1679
|
71 |
+
1652255351054 0.00040186152909882367 1703
|
72 |
+
1652255352857 0.0004029869451187551 1727
|
73 |
+
1652255354583 0.0004018393810838461 1751
|
74 |
+
1652255356373 0.0004033347067888826 1775
|
75 |
+
1652255358273 0.00040207285201177 1799
|
76 |
+
1652255360079 0.00040633161552250385 1823
|
77 |
+
1652255361795 0.0004016806778963655 1847
|
78 |
+
1652255363580 0.000402224191930145 1871
|
79 |
+
1652255365307 0.00040338438702747226 1895
|
80 |
+
1652255367125 0.00040178862400352955 1919
|
81 |
+
1652255368853 0.00040252183680422604 1943
|
82 |
+
1652255370587 0.0004018725885543972 1967
|
83 |
+
1652255372343 0.0004029826959595084 1991
|
84 |
+
1652255374158 0.0004030916898045689 2015
|
85 |
+
1652255375904 0.00040160163189284503 2039
|
86 |
+
1652255377640 0.00040224529220722616 2063
|
87 |
+
1652255379409 0.00040121664642356336 2087
|
88 |
+
1652255381209 0.00040274919592775404 2111
|
89 |
+
1652255383016 0.000401629222324118 2135
|
90 |
+
1652255384822 0.000402590143494308 2159
|
91 |
+
1652255386547 0.00040649992297403514 2183
|
92 |
+
1652255388373 0.00040353540680371225 2207
|
93 |
+
1652255390155 0.0004033587174490094 2231
|
94 |
+
1652255391910 0.00040301019907929003 2255
|
95 |
+
1652255393645 0.0004019465995952487 2279
|
96 |
+
1652255395414 0.0004020459018647671 2303
|
97 |
+
1652255397182 0.00040511813131161034 2327
|
98 |
+
1652255398978 0.0004028994881082326 2351
|
99 |
+
1652255400734 0.0004031210264656693 2375
|
100 |
+
1652255402545 0.00040244770934805274 2399
|
logs/mlflow/mlruns/1/32404d6b69574a41967cc1061badf333/metrics/train/mae
ADDED
@@ -0,0 +1,100 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
1652255226202 0.04211921989917755 23
|
2 |
+
1652255228182 0.016959283500909805 47
|
3 |
+
1652255230002 0.01708288863301277 71
|
4 |
+
1652255231893 0.016914093866944313 95
|
5 |
+
1652255233787 0.01681438460946083 119
|
6 |
+
1652255235552 0.016813334077596664 143
|
7 |
+
1652255237352 0.0168131273239851 167
|
8 |
+
1652255239115 0.01682186685502529 191
|
9 |
+
1652255240888 0.016848096624016762 215
|
10 |
+
1652255242636 0.0168466754257679 239
|
11 |
+
1652255244379 0.01684463769197464 263
|
12 |
+
1652255246102 0.01681079901754856 287
|
13 |
+
1652255247898 0.016856476664543152 311
|
14 |
+
1652255249632 0.016827981919050217 335
|
15 |
+
1652255251399 0.016813194379210472 359
|
16 |
+
1652255253181 0.016825344413518906 383
|
17 |
+
1652255254965 0.01684238761663437 407
|
18 |
+
1652255256717 0.016820793971419334 431
|
19 |
+
1652255258459 0.016783758997917175 455
|
20 |
+
1652255260271 0.01681712083518505 479
|
21 |
+
1652255262075 0.016804421320557594 503
|
22 |
+
1652255263807 0.016853438690304756 527
|
23 |
+
1652255265593 0.01690523698925972 551
|
24 |
+
1652255267353 0.01687566190958023 575
|
25 |
+
1652255269307 0.016855429857969284 599
|
26 |
+
1652255271008 0.016842041164636612 623
|
27 |
+
1652255272724 0.016801705583930016 647
|
28 |
+
1652255274477 0.016849083825945854 671
|
29 |
+
1652255276265 0.016825392842292786 695
|
30 |
+
1652255278053 0.016792459413409233 719
|
31 |
+
1652255279822 0.016852673143148422 743
|
32 |
+
1652255281578 0.01686094142496586 767
|
33 |
+
1652255283406 0.016810236498713493 791
|
34 |
+
1652255285208 0.016804233193397522 815
|
35 |
+
1652255286968 0.01682218350470066 839
|
36 |
+
1652255288774 0.016855504363775253 863
|
37 |
+
1652255290528 0.01684076339006424 887
|
38 |
+
1652255292354 0.01687397062778473 911
|
39 |
+
1652255294100 0.016854874789714813 935
|
40 |
+
1652255295832 0.016809824854135513 959
|
41 |
+
1652255297579 0.016799842938780785 983
|
42 |
+
1652255299387 0.0168254766613245 1007
|
43 |
+
1652255301145 0.01682671345770359 1031
|
44 |
+
1652255302894 0.016789918765425682 1055
|
45 |
+
1652255304673 0.016774697229266167 1079
|
46 |
+
1652255306511 0.01679287850856781 1103
|
47 |
+
1652255308369 0.01682465337216854 1127
|
48 |
+
1652255310173 0.01679857075214386 1151
|
49 |
+
1652255311868 0.016798079013824463 1175
|
50 |
+
1652255313710 0.016820937395095825 1199
|
51 |
+
1652255315450 0.016808636486530304 1223
|
52 |
+
1652255317182 0.016811121255159378 1247
|
53 |
+
1652255318925 0.01677583158016205 1271
|
54 |
+
1652255320700 0.01678842306137085 1295
|
55 |
+
1652255322494 0.016813410446047783 1319
|
56 |
+
1652255324269 0.016957229003310204 1343
|
57 |
+
1652255326041 0.016789279878139496 1367
|
58 |
+
1652255327819 0.01676764152944088 1391
|
59 |
+
1652255329661 0.016795895993709564 1415
|
60 |
+
1652255331378 0.01689194142818451 1439
|
61 |
+
1652255333132 0.01680450327694416 1463
|
62 |
+
1652255334951 0.016796989366412163 1487
|
63 |
+
1652255336765 0.016819288954138756 1511
|
64 |
+
1652255338563 0.01679813303053379 1535
|
65 |
+
1652255340354 0.01681801863014698 1559
|
66 |
+
1652255342123 0.016796132549643517 1583
|
67 |
+
1652255343981 0.01681518740952015 1607
|
68 |
+
1652255345751 0.01683821901679039 1631
|
69 |
+
1652255347515 0.0168012622743845 1655
|
70 |
+
1652255349254 0.016795136034488678 1679
|
71 |
+
1652255351054 0.016808127984404564 1703
|
72 |
+
1652255352857 0.016820866614580154 1727
|
73 |
+
1652255354583 0.016819657757878304 1751
|
74 |
+
1652255356373 0.016849622130393982 1775
|
75 |
+
1652255358273 0.016806870698928833 1799
|
76 |
+
1652255360079 0.01690552569925785 1823
|
77 |
+
1652255361795 0.016822833567857742 1847
|
78 |
+
1652255363580 0.016823161393404007 1871
|
79 |
+
1652255365307 0.016830578446388245 1895
|
80 |
+
1652255367125 0.01681240275502205 1919
|
81 |
+
1652255368853 0.01684090495109558 1943
|
82 |
+
1652255370587 0.016818096861243248 1967
|
83 |
+
1652255372343 0.016847999766469002 1991
|
84 |
+
1652255374158 0.016841478645801544 2015
|
85 |
+
1652255375904 0.016811050474643707 2039
|
86 |
+
1652255377640 0.016822297126054764 2063
|
87 |
+
1652255379409 0.016798874363303185 2087
|
88 |
+
1652255381209 0.016815654933452606 2111
|
89 |
+
1652255383016 0.01680818572640419 2135
|
90 |
+
1652255384822 0.016822924837470055 2159
|
91 |
+
1652255386547 0.01691357046365738 2183
|
92 |
+
1652255388373 0.01685115322470665 2207
|
93 |
+
1652255390155 0.016858424991369247 2231
|
94 |
+
1652255391910 0.016829803586006165 2255
|
95 |
+
1652255393645 0.01682070828974247 2279
|
96 |
+
1652255395414 0.01681322604417801 2303
|
97 |
+
1652255397182 0.01687493920326233 2327
|
98 |
+
1652255398978 0.016828566789627075 2351
|
99 |
+
1652255400734 0.016830336302518845 2375
|
100 |
+
1652255402545 0.016806816682219505 2399
|
logs/mlflow/mlruns/1/32404d6b69574a41967cc1061badf333/metrics/val/loss
ADDED
@@ -0,0 +1,100 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
1652255226110 0.00044489698484539986 23
|
2 |
+
1652255227893 0.00043419786379672587 47
|
3 |
+
1652255229874 0.00043711316538974643 71
|
4 |
+
1652255231701 0.0004245176096446812 95
|
5 |
+
1652255233631 0.0004187379090581089 119
|
6 |
+
1652255235420 0.0004181358963251114 143
|
7 |
+
1652255237173 0.00041875889291986823 167
|
8 |
+
1652255238968 0.0004179416282568127 191
|
9 |
+
1652255240765 0.0004198983369860798 215
|
10 |
+
1652255242504 0.00042069426854141057 239
|
11 |
+
1652255244265 0.00041716272244229913 263
|
12 |
+
1652255245973 0.00041914221947081387 287
|
13 |
+
1652255247776 0.0004204396391287446 311
|
14 |
+
1652255249510 0.0004188572056591511 335
|
15 |
+
1652255251271 0.0004182940174359828 359
|
16 |
+
1652255253051 0.0004178732051514089 383
|
17 |
+
1652255254846 0.00041836718446575105 407
|
18 |
+
1652255256590 0.0004207307065371424 431
|
19 |
+
1652255258332 0.0004213627544231713 455
|
20 |
+
1652255260077 0.0004186522855889052 479
|
21 |
+
1652255261916 0.0004174558271188289 503
|
22 |
+
1652255263678 0.0004171860055066645 527
|
23 |
+
1652255265470 0.0004291511722840369 551
|
24 |
+
1652255267224 0.0004276209219824523 575
|
25 |
+
1652255269012 0.00042920836131088436 599
|
26 |
+
1652255270889 0.00041709901415742934 623
|
27 |
+
1652255272597 0.00041712800157256424 647
|
28 |
+
1652255274348 0.0004201784322503954 671
|
29 |
+
1652255276098 0.00042025925358757377 695
|
30 |
+
1652255277921 0.00042653869604691863 719
|
31 |
+
1652255279693 0.0004173555935267359 743
|
32 |
+
1652255281456 0.0004176373186055571 767
|
33 |
+
1652255283271 0.00041881194920279086 791
|
34 |
+
1652255285074 0.0004195523215457797 815
|
35 |
+
1652255286848 0.00042345101246610284 839
|
36 |
+
1652255288645 0.0004176231741439551 863
|
37 |
+
1652255290391 0.0004179279785603285 887
|
38 |
+
1652255292238 0.00041897609480656683 911
|
39 |
+
1652255293971 0.0004206788435112685 935
|
40 |
+
1652255295705 0.0004184516437817365 959
|
41 |
+
1652255297458 0.00041768301161937416 983
|
42 |
+
1652255299252 0.0004178591480012983 1007
|
43 |
+
1652255301015 0.00041770999087020755 1031
|
44 |
+
1652255302770 0.00041899512871168554 1055
|
45 |
+
1652255304540 0.0004203356511425227 1079
|
46 |
+
1652255306350 0.00042004435090348125 1103
|
47 |
+
1652255308183 0.00041753757977858186 1127
|
48 |
+
1652255310043 0.0004210607730783522 1151
|
49 |
+
1652255311742 0.00041864137165248394 1175
|
50 |
+
1652255313531 0.0004264790622983128 1199
|
51 |
+
1652255315319 0.0004184645658824593 1223
|
52 |
+
1652255317051 0.0004173447086941451 1247
|
53 |
+
1652255318807 0.000419566233176738 1271
|
54 |
+
1652255320529 0.0004171229084022343 1295
|
55 |
+
1652255322361 0.00041871971916407347 1319
|
56 |
+
1652255324102 0.0004283871385268867 1343
|
57 |
+
1652255325912 0.000419788237195462 1367
|
58 |
+
1652255327692 0.0004191590123809874 1391
|
59 |
+
1652255329496 0.00041842248174361885 1415
|
60 |
+
1652255331243 0.000417087459936738 1439
|
61 |
+
1652255333003 0.00041705917101353407 1463
|
62 |
+
1652255334794 0.000417282892158255 1487
|
63 |
+
1652255336636 0.0004169566964264959 1511
|
64 |
+
1652255338433 0.0004252276266925037 1535
|
65 |
+
1652255340211 0.000418249168433249 1559
|
66 |
+
1652255341996 0.0004171164473518729 1583
|
67 |
+
1652255343809 0.00041693495586514473 1607
|
68 |
+
1652255345630 0.0004169866442680359 1631
|
69 |
+
1652255347390 0.00041698256973177195 1655
|
70 |
+
1652255349125 0.00042014766950160265 1679
|
71 |
+
1652255350905 0.0004175274516455829 1703
|
72 |
+
1652255352731 0.000420175027102232 1727
|
73 |
+
1652255354455 0.00042154971743002534 1751
|
74 |
+
1652255356226 0.0004168307932559401 1775
|
75 |
+
1652255358061 0.0004174465429969132 1799
|
76 |
+
1652255359907 0.0004178695671726018 1823
|
77 |
+
1652255361669 0.00041818569297902286 1847
|
78 |
+
1652255363448 0.000421318196458742 1871
|
79 |
+
1652255365177 0.0004192620690446347 1895
|
80 |
+
1652255366993 0.0004178639210294932 1919
|
81 |
+
1652255368727 0.0004176554793957621 1943
|
82 |
+
1652255370458 0.0004202329437248409 1967
|
83 |
+
1652255372211 0.0004182016127742827 1991
|
84 |
+
1652255374023 0.0004177166847512126 2015
|
85 |
+
1652255375773 0.0004229063633829355 2039
|
86 |
+
1652255377505 0.00041915738256648183 2063
|
87 |
+
1652255379283 0.00041956276982091367 2087
|
88 |
+
1652255381074 0.0004179154057055712 2111
|
89 |
+
1652255382841 0.0004178369417786598 2135
|
90 |
+
1652255384667 0.0004194552602712065 2159
|
91 |
+
1652255386412 0.00041843776125460863 2183
|
92 |
+
1652255388223 0.00042038620449602604 2207
|
93 |
+
1652255390024 0.0004189791507087648 2231
|
94 |
+
1652255391780 0.00041752157267183065 2255
|
95 |
+
1652255393529 0.0004262323200237006 2279
|
96 |
+
1652255395287 0.00041839166078716516 2303
|
97 |
+
1652255397053 0.0004176109214313328 2327
|
98 |
+
1652255398829 0.00041778298327699304 2351
|
99 |
+
1652255400607 0.00042330913129262626 2375
|
100 |
+
1652255402340 0.0004220327246002853 2399
|
logs/mlflow/mlruns/1/32404d6b69574a41967cc1061badf333/metrics/val/mae
ADDED
@@ -0,0 +1,100 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
1652255226110 0.017681343480944633 23
|
2 |
+
1652255227893 0.017488993704319 47
|
3 |
+
1652255229874 0.017531242221593857 71
|
4 |
+
1652255231701 0.01731405407190323 95
|
5 |
+
1652255233631 0.01721024699509144 119
|
6 |
+
1652255235420 0.017218105494976044 143
|
7 |
+
1652255237173 0.017209554091095924 167
|
8 |
+
1652255238968 0.017206067219376564 191
|
9 |
+
1652255240765 0.01723366603255272 215
|
10 |
+
1652255242504 0.017255961894989014 239
|
11 |
+
1652255244265 0.0172098558396101 263
|
12 |
+
1652255245973 0.017262130975723267 287
|
13 |
+
1652255247776 0.01725824549794197 311
|
14 |
+
1652255249510 0.01725197769701481 335
|
15 |
+
1652255251271 0.01724371127784252 359
|
16 |
+
1652255253051 0.01721191592514515 383
|
17 |
+
1652255254846 0.017206646502017975 407
|
18 |
+
1652255256590 0.017287323251366615 431
|
19 |
+
1652255258332 0.01729772984981537 455
|
20 |
+
1652255260077 0.01721062697470188 479
|
21 |
+
1652255261916 0.01723051443696022 503
|
22 |
+
1652255263678 0.017213787883520126 527
|
23 |
+
1652255265470 0.017394769936800003 551
|
24 |
+
1652255267224 0.017381761223077774 575
|
25 |
+
1652255269012 0.017432034015655518 599
|
26 |
+
1652255270889 0.017218973487615585 623
|
27 |
+
1652255272597 0.01721384935081005 647
|
28 |
+
1652255274348 0.017252901569008827 671
|
29 |
+
1652255276098 0.017253605648875237 695
|
30 |
+
1652255277921 0.017367159947752953 719
|
31 |
+
1652255279693 0.017220433801412582 743
|
32 |
+
1652255281456 0.017215099185705185 767
|
33 |
+
1652255283271 0.017244061455130577 791
|
34 |
+
1652255285074 0.017238574102520943 815
|
35 |
+
1652255286848 0.017323631793260574 839
|
36 |
+
1652255288645 0.01721755787730217 863
|
37 |
+
1652255290391 0.017217906191945076 887
|
38 |
+
1652255292238 0.017223307862877846 911
|
39 |
+
1652255293971 0.017246928066015244 935
|
40 |
+
1652255295705 0.017216704785823822 959
|
41 |
+
1652255297458 0.017213817685842514 983
|
42 |
+
1652255299252 0.017215833067893982 1007
|
43 |
+
1652255301015 0.017225690186023712 1031
|
44 |
+
1652255302770 0.017214616760611534 1055
|
45 |
+
1652255304540 0.01723295822739601 1079
|
46 |
+
1652255306350 0.01722612977027893 1103
|
47 |
+
1652255308183 0.017204368487000465 1127
|
48 |
+
1652255310043 0.017243025824427605 1151
|
49 |
+
1652255311742 0.017214054241776466 1175
|
50 |
+
1652255313531 0.01733502931892872 1199
|
51 |
+
1652255315319 0.017207222059369087 1223
|
52 |
+
1652255317051 0.01720462366938591 1247
|
53 |
+
1652255318807 0.017218230292201042 1271
|
54 |
+
1652255320529 0.017202723771333694 1295
|
55 |
+
1652255322361 0.0172096136957407 1319
|
56 |
+
1652255324102 0.01736305095255375 1343
|
57 |
+
1652255325912 0.01721891388297081 1367
|
58 |
+
1652255327692 0.01725078746676445 1391
|
59 |
+
1652255329496 0.017202183604240417 1415
|
60 |
+
1652255331243 0.017204228788614273 1439
|
61 |
+
1652255333003 0.01720697060227394 1463
|
62 |
+
1652255334794 0.017197739332914352 1487
|
63 |
+
1652255336636 0.017204908654093742 1511
|
64 |
+
1652255338433 0.017327692359685898 1535
|
65 |
+
1652255340211 0.017201870679855347 1559
|
66 |
+
1652255341996 0.01720881089568138 1583
|
67 |
+
1652255343809 0.017197327688336372 1607
|
68 |
+
1652255345630 0.01720130629837513 1631
|
69 |
+
1652255347390 0.01720035821199417 1655
|
70 |
+
1652255349125 0.017233533784747124 1679
|
71 |
+
1652255350905 0.017218707129359245 1703
|
72 |
+
1652255352731 0.017234478145837784 1727
|
73 |
+
1652255354455 0.017290374264121056 1751
|
74 |
+
1652255356226 0.017197584733366966 1775
|
75 |
+
1652255358061 0.01719658449292183 1799
|
76 |
+
1652255359907 0.017195407301187515 1823
|
77 |
+
1652255361669 0.017232319340109825 1847
|
78 |
+
1652255363448 0.017266282811760902 1871
|
79 |
+
1652255365177 0.017222236841917038 1895
|
80 |
+
1652255366993 0.01719973422586918 1919
|
81 |
+
1652255368727 0.01719573885202408 1943
|
82 |
+
1652255370458 0.017268674448132515 1967
|
83 |
+
1652255372211 0.017230616882443428 1991
|
84 |
+
1652255374023 0.0171958077698946 2015
|
85 |
+
1652255375773 0.017308201640844345 2039
|
86 |
+
1652255377505 0.017223507165908813 2063
|
87 |
+
1652255379283 0.017231978476047516 2087
|
88 |
+
1652255381074 0.017200246453285217 2111
|
89 |
+
1652255382841 0.017202293500304222 2135
|
90 |
+
1652255384667 0.01723109930753708 2159
|
91 |
+
1652255386412 0.017202844843268394 2183
|
92 |
+
1652255388223 0.017252525314688683 2207
|
93 |
+
1652255390024 0.017222154885530472 2231
|
94 |
+
1652255391780 0.01719973050057888 2255
|
95 |
+
1652255393529 0.017374495044350624 2279
|
96 |
+
1652255395287 0.01721331663429737 2303
|
97 |
+
1652255397053 0.017199618741869926 2327
|
98 |
+
1652255398829 0.01720503717660904 2351
|
99 |
+
1652255400607 0.017334815114736557 2375
|
100 |
+
1652255402340 0.017304159700870514 2399
|
logs/mlflow/mlruns/1/32404d6b69574a41967cc1061badf333/metrics/val/mae_best
ADDED
@@ -0,0 +1,100 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
1652255226110 0.017681343480944633 23
|
2 |
+
1652255227893 0.017488993704319 47
|
3 |
+
1652255229874 0.017488993704319 71
|
4 |
+
1652255231701 0.01731405407190323 95
|
5 |
+
1652255233631 0.01721024699509144 119
|
6 |
+
1652255235420 0.01721024699509144 143
|
7 |
+
1652255237173 0.017209554091095924 167
|
8 |
+
1652255238968 0.017206067219376564 191
|
9 |
+
1652255240765 0.017206067219376564 215
|
10 |
+
1652255242504 0.017206067219376564 239
|
11 |
+
1652255244265 0.017206067219376564 263
|
12 |
+
1652255245973 0.017206067219376564 287
|
13 |
+
1652255247776 0.017206067219376564 311
|
14 |
+
1652255249510 0.017206067219376564 335
|
15 |
+
1652255251271 0.017206067219376564 359
|
16 |
+
1652255253051 0.017206067219376564 383
|
17 |
+
1652255254846 0.017206067219376564 407
|
18 |
+
1652255256590 0.017206067219376564 431
|
19 |
+
1652255258332 0.017206067219376564 455
|
20 |
+
1652255260077 0.017206067219376564 479
|
21 |
+
1652255261916 0.017206067219376564 503
|
22 |
+
1652255263678 0.017206067219376564 527
|
23 |
+
1652255265470 0.017206067219376564 551
|
24 |
+
1652255267224 0.017206067219376564 575
|
25 |
+
1652255269012 0.017206067219376564 599
|
26 |
+
1652255270889 0.017206067219376564 623
|
27 |
+
1652255272597 0.017206067219376564 647
|
28 |
+
1652255274348 0.017206067219376564 671
|
29 |
+
1652255276098 0.017206067219376564 695
|
30 |
+
1652255277921 0.017206067219376564 719
|
31 |
+
1652255279693 0.017206067219376564 743
|
32 |
+
1652255281456 0.017206067219376564 767
|
33 |
+
1652255283271 0.017206067219376564 791
|
34 |
+
1652255285074 0.017206067219376564 815
|
35 |
+
1652255286848 0.017206067219376564 839
|
36 |
+
1652255288645 0.017206067219376564 863
|
37 |
+
1652255290391 0.017206067219376564 887
|
38 |
+
1652255292238 0.017206067219376564 911
|
39 |
+
1652255293971 0.017206067219376564 935
|
40 |
+
1652255295705 0.017206067219376564 959
|
41 |
+
1652255297458 0.017206067219376564 983
|
42 |
+
1652255299252 0.017206067219376564 1007
|
43 |
+
1652255301015 0.017206067219376564 1031
|
44 |
+
1652255302770 0.017206067219376564 1055
|
45 |
+
1652255304540 0.017206067219376564 1079
|
46 |
+
1652255306350 0.017206067219376564 1103
|
47 |
+
1652255308183 0.017204368487000465 1127
|
48 |
+
1652255310043 0.017204368487000465 1151
|
49 |
+
1652255311742 0.017204368487000465 1175
|
50 |
+
1652255313531 0.017204368487000465 1199
|
51 |
+
1652255315319 0.017204368487000465 1223
|
52 |
+
1652255317051 0.017204368487000465 1247
|
53 |
+
1652255318807 0.017204368487000465 1271
|
54 |
+
1652255320529 0.017202723771333694 1295
|
55 |
+
1652255322361 0.017202723771333694 1319
|
56 |
+
1652255324102 0.017202723771333694 1343
|
57 |
+
1652255325912 0.017202723771333694 1367
|
58 |
+
1652255327692 0.017202723771333694 1391
|
59 |
+
1652255329496 0.017202183604240417 1415
|
60 |
+
1652255331243 0.017202183604240417 1439
|
61 |
+
1652255333003 0.017202183604240417 1463
|
62 |
+
1652255334794 0.017197739332914352 1487
|
63 |
+
1652255336636 0.017197739332914352 1511
|
64 |
+
1652255338433 0.017197739332914352 1535
|
65 |
+
1652255340211 0.017197739332914352 1559
|
66 |
+
1652255341996 0.017197739332914352 1583
|
67 |
+
1652255343809 0.017197327688336372 1607
|
68 |
+
1652255345630 0.017197327688336372 1631
|
69 |
+
1652255347390 0.017197327688336372 1655
|
70 |
+
1652255349125 0.017197327688336372 1679
|
71 |
+
1652255350905 0.017197327688336372 1703
|
72 |
+
1652255352731 0.017197327688336372 1727
|
73 |
+
1652255354455 0.017197327688336372 1751
|
74 |
+
1652255356226 0.017197327688336372 1775
|
75 |
+
1652255358061 0.01719658449292183 1799
|
76 |
+
1652255359907 0.017195407301187515 1823
|
77 |
+
1652255361669 0.017195407301187515 1847
|
78 |
+
1652255363448 0.017195407301187515 1871
|
79 |
+
1652255365177 0.017195407301187515 1895
|
80 |
+
1652255366993 0.017195407301187515 1919
|
81 |
+
1652255368727 0.017195407301187515 1943
|
82 |
+
1652255370458 0.017195407301187515 1967
|
83 |
+
1652255372211 0.017195407301187515 1991
|
84 |
+
1652255374023 0.017195407301187515 2015
|
85 |
+
1652255375773 0.017195407301187515 2039
|
86 |
+
1652255377505 0.017195407301187515 2063
|
87 |
+
1652255379283 0.017195407301187515 2087
|
88 |
+
1652255381074 0.017195407301187515 2111
|
89 |
+
1652255382841 0.017195407301187515 2135
|
90 |
+
1652255384667 0.017195407301187515 2159
|
91 |
+
1652255386412 0.017195407301187515 2183
|
92 |
+
1652255388223 0.017195407301187515 2207
|
93 |
+
1652255390024 0.017195407301187515 2231
|
94 |
+
1652255391780 0.017195407301187515 2255
|
95 |
+
1652255393529 0.017195407301187515 2279
|
96 |
+
1652255395287 0.017195407301187515 2303
|
97 |
+
1652255397053 0.017195407301187515 2327
|
98 |
+
1652255398829 0.017195407301187515 2351
|
99 |
+
1652255400607 0.017195407301187515 2375
|
100 |
+
1652255402340 0.017195407301187515 2399
|
logs/mlflow/mlruns/1/32404d6b69574a41967cc1061badf333/params/callbacks/early_stopping/_target_
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
pytorch_lightning.callbacks.EarlyStopping
|
logs/mlflow/mlruns/1/32404d6b69574a41967cc1061badf333/params/callbacks/early_stopping/min_delta
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
0
|
logs/mlflow/mlruns/1/32404d6b69574a41967cc1061badf333/params/callbacks/early_stopping/mode
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
min
|
logs/mlflow/mlruns/1/32404d6b69574a41967cc1061badf333/params/callbacks/early_stopping/monitor
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
val/mae
|
logs/mlflow/mlruns/1/32404d6b69574a41967cc1061badf333/params/callbacks/early_stopping/patience
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
100
|
logs/mlflow/mlruns/1/32404d6b69574a41967cc1061badf333/params/callbacks/model_checkpoint/_target_
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
pytorch_lightning.callbacks.ModelCheckpoint
|
logs/mlflow/mlruns/1/32404d6b69574a41967cc1061badf333/params/callbacks/model_checkpoint/auto_insert_metric_name
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
False
|
logs/mlflow/mlruns/1/32404d6b69574a41967cc1061badf333/params/callbacks/model_checkpoint/dirpath
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
checkpoints/
|
logs/mlflow/mlruns/1/32404d6b69574a41967cc1061badf333/params/callbacks/model_checkpoint/filename
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
epoch_{epoch:03d}
|
logs/mlflow/mlruns/1/32404d6b69574a41967cc1061badf333/params/callbacks/model_checkpoint/mode
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
min
|
logs/mlflow/mlruns/1/32404d6b69574a41967cc1061badf333/params/callbacks/model_checkpoint/monitor
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
val/mae
|
logs/mlflow/mlruns/1/32404d6b69574a41967cc1061badf333/params/callbacks/model_checkpoint/save_last
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
True
|
logs/mlflow/mlruns/1/32404d6b69574a41967cc1061badf333/params/callbacks/model_checkpoint/save_top_k
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
1
|
logs/mlflow/mlruns/1/32404d6b69574a41967cc1061badf333/params/callbacks/model_checkpoint/verbose
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
False
|
logs/mlflow/mlruns/1/32404d6b69574a41967cc1061badf333/params/callbacks/model_summary/_target_
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
pytorch_lightning.callbacks.RichModelSummary
|
logs/mlflow/mlruns/1/32404d6b69574a41967cc1061badf333/params/callbacks/model_summary/max_depth
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
-1
|
logs/mlflow/mlruns/1/32404d6b69574a41967cc1061badf333/params/callbacks/rich_progress_bar/_target_
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
pytorch_lightning.callbacks.RichProgressBar
|
logs/mlflow/mlruns/1/32404d6b69574a41967cc1061badf333/params/datamodule/_target_
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
src.datamodules.focus_datamodule.FocusDataModule
|
logs/mlflow/mlruns/1/32404d6b69574a41967cc1061badf333/params/datamodule/batch_size
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
128
|
logs/mlflow/mlruns/1/32404d6b69574a41967cc1061badf333/params/datamodule/csv_test_file
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
/usr/src/app/data/focus150/test_metadata.csv
|
logs/mlflow/mlruns/1/32404d6b69574a41967cc1061badf333/params/datamodule/csv_train_file
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
/usr/src/app/data/focus150/train_metadata.csv
|
logs/mlflow/mlruns/1/32404d6b69574a41967cc1061badf333/params/datamodule/csv_val_file
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
/usr/src/app/data/focus150/validation_metadata.csv
|
logs/mlflow/mlruns/1/32404d6b69574a41967cc1061badf333/params/datamodule/data_dir
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
/usr/src/app/data/focus150
|
logs/mlflow/mlruns/1/32404d6b69574a41967cc1061badf333/params/datamodule/num_workers
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
0
|
logs/mlflow/mlruns/1/32404d6b69574a41967cc1061badf333/params/datamodule/pin_memory
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
False
|
logs/mlflow/mlruns/1/32404d6b69574a41967cc1061badf333/params/model/_target_
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
src.models.focus_conv_module.FocusConvLitModule
|
logs/mlflow/mlruns/1/32404d6b69574a41967cc1061badf333/params/model/conv1_channels
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
6
|
logs/mlflow/mlruns/1/32404d6b69574a41967cc1061badf333/params/model/conv1_size
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
5
|
logs/mlflow/mlruns/1/32404d6b69574a41967cc1061badf333/params/model/conv2_channels
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
16
|
logs/mlflow/mlruns/1/32404d6b69574a41967cc1061badf333/params/model/conv2_size
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
5
|
logs/mlflow/mlruns/1/32404d6b69574a41967cc1061badf333/params/model/image_size
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
150
|