diff --git a/configs/experiment/focusConvReLU_MSE_150.yaml b/configs/experiment/focusConvReLU_MSE_150.yaml new file mode 100644 index 0000000000000000000000000000000000000000..e87e187e0f40df17f5b2b10b702bb20c1db542dc --- /dev/null +++ b/configs/experiment/focusConvReLU_MSE_150.yaml @@ -0,0 +1,40 @@ +# @package _global_ + +# to execute this experiment run: +# python train.py experiment=example + +defaults: + - override /datamodule: focus150.yaml + - override /model: focusConvReLU_150.yaml + - override /callbacks: default.yaml + - override /logger: many_loggers + - override /trainer: default.yaml + +# all parameters below will be merged with parameters from default configurations set above +# this allows you to overwrite only specified parameters + +# name of the run determines folder name in logs +name: "focusConvReLU_MSE_150" +seed: 12345 + +trainer: + min_epochs: 1 + max_epochs: 100 + +model: + image_size: 150 + pool_size: 2 + conv1_size: 3 + conv1_channels: 9 + conv2_size: 7 + conv2_channels: 6 + lin1_size: 32 + lin2_size: 72 + output_size: 1 + lr: 0.001 + weight_decay: 0.0005 + +datamodule: + batch_size: 64 + augmentation: True + \ No newline at end of file diff --git a/configs/model/focusConvReLU_150.yaml b/configs/model/focusConvReLU_150.yaml new file mode 100644 index 0000000000000000000000000000000000000000..18ded84580b16724620df74716dfb985a44f47e8 --- /dev/null +++ b/configs/model/focusConvReLU_150.yaml @@ -0,0 +1,13 @@ +_target_: src.models.focus_conv_module.FocusConvReLULitModule + +image_size: 150 +pool_size: 2 +conv1_size: 5 +conv1_channels: 6 +conv2_size: 5 +conv2_channels: 16 +lin1_size: 100 +lin2_size: 80 +output_size: 1 +lr: 0.001 +weight_decay: 0.0005 diff --git a/logs/experiments/runs/focusConvReLU_MSE_150/2022-05-11_14-32-57/.hydra/config.yaml b/logs/experiments/runs/focusConvReLU_MSE_150/2022-05-11_14-32-57/.hydra/config.yaml new file mode 100644 index 0000000000000000000000000000000000000000..c1eb13c8d3022a0de4ef30fe1c6d18dd30de7410 --- /dev/null +++ b/logs/experiments/runs/focusConvReLU_MSE_150/2022-05-11_14-32-57/.hydra/config.yaml @@ -0,0 +1,81 @@ +original_work_dir: ${hydra:runtime.cwd} +data_dir: ${original_work_dir}/data +print_config: true +ignore_warnings: true +train: true +test: true +seed: 12345 +name: focusConvReLU_MSE_150 +datamodule: + _target_: src.datamodules.focus_datamodule.FocusDataModule + data_dir: ${data_dir}/focus150 + csv_train_file: ${data_dir}/focus150/train_metadata.csv + csv_val_file: ${data_dir}/focus150/validation_metadata.csv + csv_test_file: ${data_dir}/focus150/test_metadata.csv + batch_size: 64 + num_workers: 0 + pin_memory: false + augmentation: true +model: + _target_: src.models.focus_conv_module.FocusConvReLULitModule + image_size: 150 + pool_size: 2 + conv1_size: 3 + conv1_channels: 9 + conv2_size: 7 + conv2_channels: 6 + lin1_size: 32 + lin2_size: 72 + output_size: 1 + lr: 0.001 + weight_decay: 0.0005 +callbacks: + model_checkpoint: + _target_: pytorch_lightning.callbacks.ModelCheckpoint + monitor: val/mae + mode: min + save_top_k: 1 + save_last: true + verbose: false + dirpath: checkpoints/ + filename: epoch_{epoch:03d} + auto_insert_metric_name: false + early_stopping: + _target_: pytorch_lightning.callbacks.EarlyStopping + monitor: val/mae + mode: min + patience: 100 + min_delta: 0 + model_summary: + _target_: pytorch_lightning.callbacks.RichModelSummary + max_depth: -1 + rich_progress_bar: + _target_: pytorch_lightning.callbacks.RichProgressBar +logger: + csv: + _target_: pytorch_lightning.loggers.csv_logs.CSVLogger + save_dir: . + name: csv/ + prefix: '' + mlflow: + _target_: pytorch_lightning.loggers.mlflow.MLFlowLogger + experiment_name: ${name} + tracking_uri: ${original_work_dir}/logs/mlflow/mlruns + tags: null + save_dir: ./mlruns + prefix: '' + artifact_location: null + tensorboard: + _target_: pytorch_lightning.loggers.tensorboard.TensorBoardLogger + save_dir: tensorboard/ + name: null + version: ${name} + log_graph: false + default_hp_metric: true + prefix: '' +trainer: + _target_: pytorch_lightning.Trainer + gpus: 1 + min_epochs: 1 + max_epochs: 100 + resume_from_checkpoint: null diff --git a/logs/experiments/runs/focusConvReLU_MSE_150/2022-05-11_14-32-57/.hydra/hydra.yaml b/logs/experiments/runs/focusConvReLU_MSE_150/2022-05-11_14-32-57/.hydra/hydra.yaml new file mode 100644 index 0000000000000000000000000000000000000000..4e7172e5d6f585f39ad66e7ae6fa130750dd4107 --- /dev/null +++ b/logs/experiments/runs/focusConvReLU_MSE_150/2022-05-11_14-32-57/.hydra/hydra.yaml @@ -0,0 +1,170 @@ +hydra: + run: + dir: logs/experiments/runs/${name}/${now:%Y-%m-%d}_${now:%H-%M-%S} + sweep: + dir: logs/experiments/multiruns/${name}/${now:%Y-%m-%d}_${now:%H-%M-%S} + subdir: ${hydra.job.num} + launcher: + _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher + sweeper: + _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper + max_batch_size: null + help: + app_name: ${hydra.job.name} + header: '${hydra.help.app_name} is powered by Hydra. + + ' + footer: 'Powered by Hydra (https://hydra.cc) + + Use --hydra-help to view Hydra specific help + + ' + template: '${hydra.help.header} + + == Configuration groups == + + Compose your configuration from those groups (group=option) + + + $APP_CONFIG_GROUPS + + + == Config == + + Override anything in the config (foo.bar=value) + + + $CONFIG + + + ${hydra.help.footer} + + ' + hydra_help: + template: 'Hydra (${hydra.runtime.version}) + + See https://hydra.cc for more info. + + + == Flags == + + $FLAGS_HELP + + + == Configuration groups == + + Compose your configuration from those groups (For example, append hydra/job_logging=disabled + to command line) + + + $HYDRA_CONFIG_GROUPS + + + Use ''--cfg hydra'' to Show the Hydra config. + + ' + hydra_help: ??? + hydra_logging: + version: 1 + formatters: + colorlog: + (): colorlog.ColoredFormatter + format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s' + handlers: + console: + class: logging.StreamHandler + formatter: colorlog + stream: ext://sys.stdout + root: + level: INFO + handlers: + - console + disable_existing_loggers: false + job_logging: + version: 1 + formatters: + simple: + format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s' + colorlog: + (): colorlog.ColoredFormatter + format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] + - %(message)s' + log_colors: + DEBUG: purple + INFO: green + WARNING: yellow + ERROR: red + CRITICAL: red + handlers: + console: + class: logging.StreamHandler + formatter: colorlog + stream: ext://sys.stdout + file: + class: logging.FileHandler + formatter: simple + filename: ${hydra.job.name}.log + root: + level: INFO + handlers: + - console + - file + disable_existing_loggers: false + env: {} + searchpath: [] + callbacks: {} + output_subdir: .hydra + overrides: + hydra: [] + task: + - experiment=focusConvReLU_MSE_150 + job: + name: train + override_dirname: experiment=focusConvReLU_MSE_150 + id: ??? + num: ??? + config_name: train.yaml + env_set: {} + env_copy: [] + config: + override_dirname: + kv_sep: '=' + item_sep: ',' + exclude_keys: [] + runtime: + version: 1.1.2 + cwd: /usr/src/app + config_sources: + - path: hydra.conf + schema: pkg + provider: hydra + - path: /usr/src/app/configs + schema: file + provider: main + - path: hydra_plugins.hydra_colorlog.conf + schema: pkg + provider: hydra-colorlog + - path: '' + schema: structured + provider: schema + choices: + local: default.yaml + hparams_search: null + debug: null + experiment: focusConvReLU_MSE_150 + log_dir: default.yaml + trainer: default.yaml + logger: many_loggers + callbacks: default.yaml + model: focusConvReLU_150.yaml + datamodule: focus150.yaml + hydra/env: default + hydra/callbacks: null + hydra/job_logging: colorlog + hydra/hydra_logging: colorlog + hydra/hydra_help: default + hydra/help: default + hydra/sweeper: basic + hydra/launcher: basic + hydra/output: default + verbose: false diff --git a/logs/experiments/runs/focusConvReLU_MSE_150/2022-05-11_14-32-57/.hydra/overrides.yaml b/logs/experiments/runs/focusConvReLU_MSE_150/2022-05-11_14-32-57/.hydra/overrides.yaml new file mode 100644 index 0000000000000000000000000000000000000000..06579d09c760c0414d849ecee6d6efc4e4f076b7 --- /dev/null +++ b/logs/experiments/runs/focusConvReLU_MSE_150/2022-05-11_14-32-57/.hydra/overrides.yaml @@ -0,0 +1 @@ +- experiment=focusConvReLU_MSE_150 diff --git a/logs/experiments/runs/focusConvReLU_MSE_150/2022-05-11_14-33-41/.hydra/config.yaml b/logs/experiments/runs/focusConvReLU_MSE_150/2022-05-11_14-33-41/.hydra/config.yaml new file mode 100644 index 0000000000000000000000000000000000000000..c1eb13c8d3022a0de4ef30fe1c6d18dd30de7410 --- /dev/null +++ b/logs/experiments/runs/focusConvReLU_MSE_150/2022-05-11_14-33-41/.hydra/config.yaml @@ -0,0 +1,81 @@ +original_work_dir: ${hydra:runtime.cwd} +data_dir: ${original_work_dir}/data +print_config: true +ignore_warnings: true +train: true +test: true +seed: 12345 +name: focusConvReLU_MSE_150 +datamodule: + _target_: src.datamodules.focus_datamodule.FocusDataModule + data_dir: ${data_dir}/focus150 + csv_train_file: ${data_dir}/focus150/train_metadata.csv + csv_val_file: ${data_dir}/focus150/validation_metadata.csv + csv_test_file: ${data_dir}/focus150/test_metadata.csv + batch_size: 64 + num_workers: 0 + pin_memory: false + augmentation: true +model: + _target_: src.models.focus_conv_module.FocusConvReLULitModule + image_size: 150 + pool_size: 2 + conv1_size: 3 + conv1_channels: 9 + conv2_size: 7 + conv2_channels: 6 + lin1_size: 32 + lin2_size: 72 + output_size: 1 + lr: 0.001 + weight_decay: 0.0005 +callbacks: + model_checkpoint: + _target_: pytorch_lightning.callbacks.ModelCheckpoint + monitor: val/mae + mode: min + save_top_k: 1 + save_last: true + verbose: false + dirpath: checkpoints/ + filename: epoch_{epoch:03d} + auto_insert_metric_name: false + early_stopping: + _target_: pytorch_lightning.callbacks.EarlyStopping + monitor: val/mae + mode: min + patience: 100 + min_delta: 0 + model_summary: + _target_: pytorch_lightning.callbacks.RichModelSummary + max_depth: -1 + rich_progress_bar: + _target_: pytorch_lightning.callbacks.RichProgressBar +logger: + csv: + _target_: pytorch_lightning.loggers.csv_logs.CSVLogger + save_dir: . + name: csv/ + prefix: '' + mlflow: + _target_: pytorch_lightning.loggers.mlflow.MLFlowLogger + experiment_name: ${name} + tracking_uri: ${original_work_dir}/logs/mlflow/mlruns + tags: null + save_dir: ./mlruns + prefix: '' + artifact_location: null + tensorboard: + _target_: pytorch_lightning.loggers.tensorboard.TensorBoardLogger + save_dir: tensorboard/ + name: null + version: ${name} + log_graph: false + default_hp_metric: true + prefix: '' +trainer: + _target_: pytorch_lightning.Trainer + gpus: 1 + min_epochs: 1 + max_epochs: 100 + resume_from_checkpoint: null diff --git a/logs/experiments/runs/focusConvReLU_MSE_150/2022-05-11_14-33-41/.hydra/hydra.yaml b/logs/experiments/runs/focusConvReLU_MSE_150/2022-05-11_14-33-41/.hydra/hydra.yaml new file mode 100644 index 0000000000000000000000000000000000000000..09f79944a46515f7e6c8f9e15327b0bfcc5f724f --- /dev/null +++ b/logs/experiments/runs/focusConvReLU_MSE_150/2022-05-11_14-33-41/.hydra/hydra.yaml @@ -0,0 +1,170 @@ +hydra: + run: + dir: logs/experiments/runs/${name}/${now:%Y-%m-%d}_${now:%H-%M-%S} + sweep: + dir: logs/experiments/multiruns/${name}/${now:%Y-%m-%d}_${now:%H-%M-%S} + subdir: ${hydra.job.num} + launcher: + _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher + sweeper: + _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper + max_batch_size: null + help: + app_name: ${hydra.job.name} + header: '${hydra.help.app_name} is powered by Hydra. + + ' + footer: 'Powered by Hydra (https://hydra.cc) + + Use --hydra-help to view Hydra specific help + + ' + template: '${hydra.help.header} + + == Configuration groups == + + Compose your configuration from those groups (group=option) + + + $APP_CONFIG_GROUPS + + + == Config == + + Override anything in the config (foo.bar=value) + + + $CONFIG + + + ${hydra.help.footer} + + ' + hydra_help: + template: 'Hydra (${hydra.runtime.version}) + + See https://hydra.cc for more info. + + + == Flags == + + $FLAGS_HELP + + + == Configuration groups == + + Compose your configuration from those groups (For example, append hydra/job_logging=disabled + to command line) + + + $HYDRA_CONFIG_GROUPS + + + Use ''--cfg hydra'' to Show the Hydra config. + + ' + hydra_help: ??? + hydra_logging: + version: 1 + formatters: + colorlog: + (): colorlog.ColoredFormatter + format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s' + handlers: + console: + class: logging.StreamHandler + formatter: colorlog + stream: ext://sys.stdout + root: + level: INFO + handlers: + - console + disable_existing_loggers: false + job_logging: + version: 1 + formatters: + simple: + format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s' + colorlog: + (): colorlog.ColoredFormatter + format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] + - %(message)s' + log_colors: + DEBUG: purple + INFO: green + WARNING: yellow + ERROR: red + CRITICAL: red + handlers: + console: + class: logging.StreamHandler + formatter: colorlog + stream: ext://sys.stdout + file: + class: logging.FileHandler + formatter: simple + filename: ${hydra.job.name}.log + root: + level: INFO + handlers: + - console + - file + disable_existing_loggers: false + env: {} + searchpath: [] + callbacks: {} + output_subdir: .hydra + overrides: + hydra: [] + task: + - experiment=focusConvReLU_MSE_150.yaml + job: + name: train + override_dirname: experiment=focusConvReLU_MSE_150.yaml + id: ??? + num: ??? + config_name: train.yaml + env_set: {} + env_copy: [] + config: + override_dirname: + kv_sep: '=' + item_sep: ',' + exclude_keys: [] + runtime: + version: 1.1.2 + cwd: /usr/src/app + config_sources: + - path: hydra.conf + schema: pkg + provider: hydra + - path: /usr/src/app/configs + schema: file + provider: main + - path: hydra_plugins.hydra_colorlog.conf + schema: pkg + provider: hydra-colorlog + - path: '' + schema: structured + provider: schema + choices: + local: default.yaml + hparams_search: null + debug: null + experiment: focusConvReLU_MSE_150.yaml + log_dir: default.yaml + trainer: default.yaml + logger: many_loggers + callbacks: default.yaml + model: focusConvReLU_150.yaml + datamodule: focus150.yaml + hydra/env: default + hydra/callbacks: null + hydra/job_logging: colorlog + hydra/hydra_logging: colorlog + hydra/hydra_help: default + hydra/help: default + hydra/sweeper: basic + hydra/launcher: basic + hydra/output: default + verbose: false diff --git a/logs/experiments/runs/focusConvReLU_MSE_150/2022-05-11_14-33-41/.hydra/overrides.yaml b/logs/experiments/runs/focusConvReLU_MSE_150/2022-05-11_14-33-41/.hydra/overrides.yaml new file mode 100644 index 0000000000000000000000000000000000000000..0623216da8d7056900f9eeaf47133e860b1ebcc5 --- /dev/null +++ b/logs/experiments/runs/focusConvReLU_MSE_150/2022-05-11_14-33-41/.hydra/overrides.yaml @@ -0,0 +1 @@ +- experiment=focusConvReLU_MSE_150.yaml diff --git a/logs/experiments/runs/focusConvReLU_MSE_150/2022-05-11_14-34-32/.hydra/config.yaml b/logs/experiments/runs/focusConvReLU_MSE_150/2022-05-11_14-34-32/.hydra/config.yaml new file mode 100644 index 0000000000000000000000000000000000000000..c1eb13c8d3022a0de4ef30fe1c6d18dd30de7410 --- /dev/null +++ b/logs/experiments/runs/focusConvReLU_MSE_150/2022-05-11_14-34-32/.hydra/config.yaml @@ -0,0 +1,81 @@ +original_work_dir: ${hydra:runtime.cwd} +data_dir: ${original_work_dir}/data +print_config: true +ignore_warnings: true +train: true +test: true +seed: 12345 +name: focusConvReLU_MSE_150 +datamodule: + _target_: src.datamodules.focus_datamodule.FocusDataModule + data_dir: ${data_dir}/focus150 + csv_train_file: ${data_dir}/focus150/train_metadata.csv + csv_val_file: ${data_dir}/focus150/validation_metadata.csv + csv_test_file: ${data_dir}/focus150/test_metadata.csv + batch_size: 64 + num_workers: 0 + pin_memory: false + augmentation: true +model: + _target_: src.models.focus_conv_module.FocusConvReLULitModule + image_size: 150 + pool_size: 2 + conv1_size: 3 + conv1_channels: 9 + conv2_size: 7 + conv2_channels: 6 + lin1_size: 32 + lin2_size: 72 + output_size: 1 + lr: 0.001 + weight_decay: 0.0005 +callbacks: + model_checkpoint: + _target_: pytorch_lightning.callbacks.ModelCheckpoint + monitor: val/mae + mode: min + save_top_k: 1 + save_last: true + verbose: false + dirpath: checkpoints/ + filename: epoch_{epoch:03d} + auto_insert_metric_name: false + early_stopping: + _target_: pytorch_lightning.callbacks.EarlyStopping + monitor: val/mae + mode: min + patience: 100 + min_delta: 0 + model_summary: + _target_: pytorch_lightning.callbacks.RichModelSummary + max_depth: -1 + rich_progress_bar: + _target_: pytorch_lightning.callbacks.RichProgressBar +logger: + csv: + _target_: pytorch_lightning.loggers.csv_logs.CSVLogger + save_dir: . + name: csv/ + prefix: '' + mlflow: + _target_: pytorch_lightning.loggers.mlflow.MLFlowLogger + experiment_name: ${name} + tracking_uri: ${original_work_dir}/logs/mlflow/mlruns + tags: null + save_dir: ./mlruns + prefix: '' + artifact_location: null + tensorboard: + _target_: pytorch_lightning.loggers.tensorboard.TensorBoardLogger + save_dir: tensorboard/ + name: null + version: ${name} + log_graph: false + default_hp_metric: true + prefix: '' +trainer: + _target_: pytorch_lightning.Trainer + gpus: 1 + min_epochs: 1 + max_epochs: 100 + resume_from_checkpoint: null diff --git a/logs/experiments/runs/focusConvReLU_MSE_150/2022-05-11_14-34-32/.hydra/hydra.yaml b/logs/experiments/runs/focusConvReLU_MSE_150/2022-05-11_14-34-32/.hydra/hydra.yaml new file mode 100644 index 0000000000000000000000000000000000000000..09f79944a46515f7e6c8f9e15327b0bfcc5f724f --- /dev/null +++ b/logs/experiments/runs/focusConvReLU_MSE_150/2022-05-11_14-34-32/.hydra/hydra.yaml @@ -0,0 +1,170 @@ +hydra: + run: + dir: logs/experiments/runs/${name}/${now:%Y-%m-%d}_${now:%H-%M-%S} + sweep: + dir: logs/experiments/multiruns/${name}/${now:%Y-%m-%d}_${now:%H-%M-%S} + subdir: ${hydra.job.num} + launcher: + _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher + sweeper: + _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper + max_batch_size: null + help: + app_name: ${hydra.job.name} + header: '${hydra.help.app_name} is powered by Hydra. + + ' + footer: 'Powered by Hydra (https://hydra.cc) + + Use --hydra-help to view Hydra specific help + + ' + template: '${hydra.help.header} + + == Configuration groups == + + Compose your configuration from those groups (group=option) + + + $APP_CONFIG_GROUPS + + + == Config == + + Override anything in the config (foo.bar=value) + + + $CONFIG + + + ${hydra.help.footer} + + ' + hydra_help: + template: 'Hydra (${hydra.runtime.version}) + + See https://hydra.cc for more info. + + + == Flags == + + $FLAGS_HELP + + + == Configuration groups == + + Compose your configuration from those groups (For example, append hydra/job_logging=disabled + to command line) + + + $HYDRA_CONFIG_GROUPS + + + Use ''--cfg hydra'' to Show the Hydra config. + + ' + hydra_help: ??? + hydra_logging: + version: 1 + formatters: + colorlog: + (): colorlog.ColoredFormatter + format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s' + handlers: + console: + class: logging.StreamHandler + formatter: colorlog + stream: ext://sys.stdout + root: + level: INFO + handlers: + - console + disable_existing_loggers: false + job_logging: + version: 1 + formatters: + simple: + format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s' + colorlog: + (): colorlog.ColoredFormatter + format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] + - %(message)s' + log_colors: + DEBUG: purple + INFO: green + WARNING: yellow + ERROR: red + CRITICAL: red + handlers: + console: + class: logging.StreamHandler + formatter: colorlog + stream: ext://sys.stdout + file: + class: logging.FileHandler + formatter: simple + filename: ${hydra.job.name}.log + root: + level: INFO + handlers: + - console + - file + disable_existing_loggers: false + env: {} + searchpath: [] + callbacks: {} + output_subdir: .hydra + overrides: + hydra: [] + task: + - experiment=focusConvReLU_MSE_150.yaml + job: + name: train + override_dirname: experiment=focusConvReLU_MSE_150.yaml + id: ??? + num: ??? + config_name: train.yaml + env_set: {} + env_copy: [] + config: + override_dirname: + kv_sep: '=' + item_sep: ',' + exclude_keys: [] + runtime: + version: 1.1.2 + cwd: /usr/src/app + config_sources: + - path: hydra.conf + schema: pkg + provider: hydra + - path: /usr/src/app/configs + schema: file + provider: main + - path: hydra_plugins.hydra_colorlog.conf + schema: pkg + provider: hydra-colorlog + - path: '' + schema: structured + provider: schema + choices: + local: default.yaml + hparams_search: null + debug: null + experiment: focusConvReLU_MSE_150.yaml + log_dir: default.yaml + trainer: default.yaml + logger: many_loggers + callbacks: default.yaml + model: focusConvReLU_150.yaml + datamodule: focus150.yaml + hydra/env: default + hydra/callbacks: null + hydra/job_logging: colorlog + hydra/hydra_logging: colorlog + hydra/hydra_help: default + hydra/help: default + hydra/sweeper: basic + hydra/launcher: basic + hydra/output: default + verbose: false diff --git a/logs/experiments/runs/focusConvReLU_MSE_150/2022-05-11_14-34-32/.hydra/overrides.yaml b/logs/experiments/runs/focusConvReLU_MSE_150/2022-05-11_14-34-32/.hydra/overrides.yaml new file mode 100644 index 0000000000000000000000000000000000000000..0623216da8d7056900f9eeaf47133e860b1ebcc5 --- /dev/null +++ b/logs/experiments/runs/focusConvReLU_MSE_150/2022-05-11_14-34-32/.hydra/overrides.yaml @@ -0,0 +1 @@ +- experiment=focusConvReLU_MSE_150.yaml diff --git a/logs/experiments/runs/focusConvReLU_MSE_150/2022-05-11_14-36-47/.hydra/config.yaml b/logs/experiments/runs/focusConvReLU_MSE_150/2022-05-11_14-36-47/.hydra/config.yaml new file mode 100644 index 0000000000000000000000000000000000000000..c1eb13c8d3022a0de4ef30fe1c6d18dd30de7410 --- /dev/null +++ b/logs/experiments/runs/focusConvReLU_MSE_150/2022-05-11_14-36-47/.hydra/config.yaml @@ -0,0 +1,81 @@ +original_work_dir: ${hydra:runtime.cwd} +data_dir: ${original_work_dir}/data +print_config: true +ignore_warnings: true +train: true +test: true +seed: 12345 +name: focusConvReLU_MSE_150 +datamodule: + _target_: src.datamodules.focus_datamodule.FocusDataModule + data_dir: ${data_dir}/focus150 + csv_train_file: ${data_dir}/focus150/train_metadata.csv + csv_val_file: ${data_dir}/focus150/validation_metadata.csv + csv_test_file: ${data_dir}/focus150/test_metadata.csv + batch_size: 64 + num_workers: 0 + pin_memory: false + augmentation: true +model: + _target_: src.models.focus_conv_module.FocusConvReLULitModule + image_size: 150 + pool_size: 2 + conv1_size: 3 + conv1_channels: 9 + conv2_size: 7 + conv2_channels: 6 + lin1_size: 32 + lin2_size: 72 + output_size: 1 + lr: 0.001 + weight_decay: 0.0005 +callbacks: + model_checkpoint: + _target_: pytorch_lightning.callbacks.ModelCheckpoint + monitor: val/mae + mode: min + save_top_k: 1 + save_last: true + verbose: false + dirpath: checkpoints/ + filename: epoch_{epoch:03d} + auto_insert_metric_name: false + early_stopping: + _target_: pytorch_lightning.callbacks.EarlyStopping + monitor: val/mae + mode: min + patience: 100 + min_delta: 0 + model_summary: + _target_: pytorch_lightning.callbacks.RichModelSummary + max_depth: -1 + rich_progress_bar: + _target_: pytorch_lightning.callbacks.RichProgressBar +logger: + csv: + _target_: pytorch_lightning.loggers.csv_logs.CSVLogger + save_dir: . + name: csv/ + prefix: '' + mlflow: + _target_: pytorch_lightning.loggers.mlflow.MLFlowLogger + experiment_name: ${name} + tracking_uri: ${original_work_dir}/logs/mlflow/mlruns + tags: null + save_dir: ./mlruns + prefix: '' + artifact_location: null + tensorboard: + _target_: pytorch_lightning.loggers.tensorboard.TensorBoardLogger + save_dir: tensorboard/ + name: null + version: ${name} + log_graph: false + default_hp_metric: true + prefix: '' +trainer: + _target_: pytorch_lightning.Trainer + gpus: 1 + min_epochs: 1 + max_epochs: 100 + resume_from_checkpoint: null diff --git a/logs/experiments/runs/focusConvReLU_MSE_150/2022-05-11_14-36-47/.hydra/hydra.yaml b/logs/experiments/runs/focusConvReLU_MSE_150/2022-05-11_14-36-47/.hydra/hydra.yaml new file mode 100644 index 0000000000000000000000000000000000000000..09f79944a46515f7e6c8f9e15327b0bfcc5f724f --- /dev/null +++ b/logs/experiments/runs/focusConvReLU_MSE_150/2022-05-11_14-36-47/.hydra/hydra.yaml @@ -0,0 +1,170 @@ +hydra: + run: + dir: logs/experiments/runs/${name}/${now:%Y-%m-%d}_${now:%H-%M-%S} + sweep: + dir: logs/experiments/multiruns/${name}/${now:%Y-%m-%d}_${now:%H-%M-%S} + subdir: ${hydra.job.num} + launcher: + _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher + sweeper: + _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper + max_batch_size: null + help: + app_name: ${hydra.job.name} + header: '${hydra.help.app_name} is powered by Hydra. + + ' + footer: 'Powered by Hydra (https://hydra.cc) + + Use --hydra-help to view Hydra specific help + + ' + template: '${hydra.help.header} + + == Configuration groups == + + Compose your configuration from those groups (group=option) + + + $APP_CONFIG_GROUPS + + + == Config == + + Override anything in the config (foo.bar=value) + + + $CONFIG + + + ${hydra.help.footer} + + ' + hydra_help: + template: 'Hydra (${hydra.runtime.version}) + + See https://hydra.cc for more info. + + + == Flags == + + $FLAGS_HELP + + + == Configuration groups == + + Compose your configuration from those groups (For example, append hydra/job_logging=disabled + to command line) + + + $HYDRA_CONFIG_GROUPS + + + Use ''--cfg hydra'' to Show the Hydra config. + + ' + hydra_help: ??? + hydra_logging: + version: 1 + formatters: + colorlog: + (): colorlog.ColoredFormatter + format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s' + handlers: + console: + class: logging.StreamHandler + formatter: colorlog + stream: ext://sys.stdout + root: + level: INFO + handlers: + - console + disable_existing_loggers: false + job_logging: + version: 1 + formatters: + simple: + format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s' + colorlog: + (): colorlog.ColoredFormatter + format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] + - %(message)s' + log_colors: + DEBUG: purple + INFO: green + WARNING: yellow + ERROR: red + CRITICAL: red + handlers: + console: + class: logging.StreamHandler + formatter: colorlog + stream: ext://sys.stdout + file: + class: logging.FileHandler + formatter: simple + filename: ${hydra.job.name}.log + root: + level: INFO + handlers: + - console + - file + disable_existing_loggers: false + env: {} + searchpath: [] + callbacks: {} + output_subdir: .hydra + overrides: + hydra: [] + task: + - experiment=focusConvReLU_MSE_150.yaml + job: + name: train + override_dirname: experiment=focusConvReLU_MSE_150.yaml + id: ??? + num: ??? + config_name: train.yaml + env_set: {} + env_copy: [] + config: + override_dirname: + kv_sep: '=' + item_sep: ',' + exclude_keys: [] + runtime: + version: 1.1.2 + cwd: /usr/src/app + config_sources: + - path: hydra.conf + schema: pkg + provider: hydra + - path: /usr/src/app/configs + schema: file + provider: main + - path: hydra_plugins.hydra_colorlog.conf + schema: pkg + provider: hydra-colorlog + - path: '' + schema: structured + provider: schema + choices: + local: default.yaml + hparams_search: null + debug: null + experiment: focusConvReLU_MSE_150.yaml + log_dir: default.yaml + trainer: default.yaml + logger: many_loggers + callbacks: default.yaml + model: focusConvReLU_150.yaml + datamodule: focus150.yaml + hydra/env: default + hydra/callbacks: null + hydra/job_logging: colorlog + hydra/hydra_logging: colorlog + hydra/hydra_help: default + hydra/help: default + hydra/sweeper: basic + hydra/launcher: basic + hydra/output: default + verbose: false diff --git a/logs/experiments/runs/focusConvReLU_MSE_150/2022-05-11_14-36-47/.hydra/overrides.yaml b/logs/experiments/runs/focusConvReLU_MSE_150/2022-05-11_14-36-47/.hydra/overrides.yaml new file mode 100644 index 0000000000000000000000000000000000000000..0623216da8d7056900f9eeaf47133e860b1ebcc5 --- /dev/null +++ b/logs/experiments/runs/focusConvReLU_MSE_150/2022-05-11_14-36-47/.hydra/overrides.yaml @@ -0,0 +1 @@ +- experiment=focusConvReLU_MSE_150.yaml diff --git a/logs/experiments/runs/focusConvReLU_MSE_150/2022-05-11_14-36-47/checkpoints/epoch_096.ckpt b/logs/experiments/runs/focusConvReLU_MSE_150/2022-05-11_14-36-47/checkpoints/epoch_096.ckpt new file mode 100644 index 0000000000000000000000000000000000000000..f06772aca34cd17dadac024dd3a143ef4c423fe2 --- /dev/null +++ b/logs/experiments/runs/focusConvReLU_MSE_150/2022-05-11_14-36-47/checkpoints/epoch_096.ckpt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c3e9159964f903b3063b0a2170eb2baa7d138b219689f46620567a0b821cbf98 +size 2612879 diff --git a/logs/experiments/runs/focusConvReLU_MSE_150/2022-05-11_14-36-47/checkpoints/last.ckpt b/logs/experiments/runs/focusConvReLU_MSE_150/2022-05-11_14-36-47/checkpoints/last.ckpt new file mode 100644 index 0000000000000000000000000000000000000000..fd6237ca847e76546c4fffdbfa9dee611fc6401b --- /dev/null +++ b/logs/experiments/runs/focusConvReLU_MSE_150/2022-05-11_14-36-47/checkpoints/last.ckpt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3d046d8e08e9ab7ff46290261d0f10a025a04c4c43ff48de76df4a91d77ee0f6 +size 2612879 diff --git a/logs/experiments/runs/focusConvReLU_MSE_150/2022-05-11_14-36-47/csv/version_0/hparams.yaml b/logs/experiments/runs/focusConvReLU_MSE_150/2022-05-11_14-36-47/csv/version_0/hparams.yaml new file mode 100644 index 0000000000000000000000000000000000000000..aa4f86c17417fb210d8dda721de1afbf27448da0 --- /dev/null +++ b/logs/experiments/runs/focusConvReLU_MSE_150/2022-05-11_14-36-47/csv/version_0/hparams.yaml @@ -0,0 +1,55 @@ +trainer: + _target_: pytorch_lightning.Trainer + gpus: 1 + min_epochs: 1 + max_epochs: 100 + resume_from_checkpoint: null +model: + _target_: src.models.focus_conv_module.FocusConvReLULitModule + image_size: 150 + pool_size: 2 + conv1_size: 3 + conv1_channels: 9 + conv2_size: 7 + conv2_channels: 6 + lin1_size: 32 + lin2_size: 72 + output_size: 1 + lr: 0.001 + weight_decay: 0.0005 +datamodule: + _target_: src.datamodules.focus_datamodule.FocusDataModule + data_dir: /usr/src/app/data/focus150 + csv_train_file: /usr/src/app/data/focus150/train_metadata.csv + csv_val_file: /usr/src/app/data/focus150/validation_metadata.csv + csv_test_file: /usr/src/app/data/focus150/test_metadata.csv + batch_size: 64 + num_workers: 0 + pin_memory: false + augmentation: true +seed: 12345 +callbacks: + model_checkpoint: + _target_: pytorch_lightning.callbacks.ModelCheckpoint + monitor: val/mae + mode: min + save_top_k: 1 + save_last: true + verbose: false + dirpath: checkpoints/ + filename: epoch_{epoch:03d} + auto_insert_metric_name: false + early_stopping: + _target_: pytorch_lightning.callbacks.EarlyStopping + monitor: val/mae + mode: min + patience: 100 + min_delta: 0 + model_summary: + _target_: pytorch_lightning.callbacks.RichModelSummary + max_depth: -1 + rich_progress_bar: + _target_: pytorch_lightning.callbacks.RichProgressBar +model/params/total: 216231 +model/params/trainable: 216231 +model/params/non_trainable: 0 diff --git a/logs/experiments/runs/focusConvReLU_MSE_150/2022-05-11_14-36-47/csv/version_0/metrics.csv b/logs/experiments/runs/focusConvReLU_MSE_150/2022-05-11_14-36-47/csv/version_0/metrics.csv new file mode 100644 index 0000000000000000000000000000000000000000..eca42b6788d6c4e28fb353bfc61b257a4a3fd092 --- /dev/null +++ b/logs/experiments/runs/focusConvReLU_MSE_150/2022-05-11_14-36-47/csv/version_0/metrics.csv @@ -0,0 +1,202 @@ +val/loss,val/mae,val/mae_best,epoch,step,train/loss,train/mae,test/loss,test/mae +0.0010634043719619513,0.02607358619570732,0.02607358619570732,0,47,,,, +,,,1,47,0.015712032094597816,0.07011217623949051,, +0.0008477172814309597,0.023356162011623383,0.023356162011623383,1,95,,,, +,,,2,95,0.0006998287281021476,0.021280117332935333,, +0.0006586583331227303,0.02111729420721531,0.02111729420721531,2,143,,,, +,,,3,143,0.0006262350943870842,0.020297976210713387,, +0.0006095922435633838,0.020395344123244286,0.020395344123244286,3,191,,,, +,,,4,191,0.0006112757255323231,0.020039746537804604,, +0.000579743820708245,0.02008357085287571,0.02008357085287571,4,239,,,, +,,,5,239,0.0005892604240216315,0.019787773489952087,, +0.000539788044989109,0.019109351560473442,0.019109351560473442,5,287,,,, +,,,6,287,0.0004991526948288083,0.018337024375796318,, +0.0005701080663129687,0.020062454044818878,0.019109351560473442,6,335,,,, +,,,7,335,0.00046538031892850995,0.017815634608268738,, +0.0005549467750824988,0.018724771216511726,0.018724771216511726,7,383,,,, +,,,8,383,0.0004903865046799183,0.017987234517931938,, +0.0005555949755944312,0.01976374350488186,0.018724771216511726,8,431,,,, +,,,9,431,0.0004449250118341297,0.017149975523352623,, +0.0004789295489899814,0.018088672310113907,0.018088672310113907,9,479,,,, +,,,10,479,0.0004655240918509662,0.01764928176999092,, +0.0005822633975185454,0.02009824477136135,0.018088672310113907,10,527,,,, +,,,11,527,0.00044383766362443566,0.017262466251850128,, +0.00048447673907503486,0.017433589324355125,0.017433589324355125,11,575,,,, +,,,12,575,0.0004356526769697666,0.017195068299770355,, +0.00046181678771972656,0.018075214698910713,0.017433589324355125,12,623,,,, +,,,13,623,0.00041521640378050506,0.01676551066339016,, +0.00041472347220405936,0.016927972435951233,0.016927972435951233,13,671,,,, +,,,14,671,0.0003966042131651193,0.016418082639575005,, +0.0005150702781975269,0.01795283891260624,0.016927972435951233,14,719,,,, +,,,15,719,0.0004069829301442951,0.016572223976254463,, +0.00043978836038149893,0.017694905400276184,0.016927972435951233,15,767,,,, +,,,16,767,0.0004001975175924599,0.016431566327810287,, +0.0004524872056208551,0.017309539020061493,0.016927972435951233,16,815,,,, +,,,17,815,0.000411732355132699,0.016624923795461655,, +0.0004895858000963926,0.017853353172540665,0.016927972435951233,17,863,,,, +,,,18,863,0.0003962284536100924,0.016285978257656097,, +0.00048630539095029235,0.017320722341537476,0.016927972435951233,18,911,,,, +,,,19,911,0.0003755573998205364,0.015845887362957,, +0.00045636019785888493,0.017117636278271675,0.016927972435951233,19,959,,,, +,,,20,959,0.00040089385583996773,0.016440516337752342,, +0.00042457759263925254,0.01685478165745735,0.01685478165745735,20,1007,,,, +,,,21,1007,0.00037927881930954754,0.015909109264612198,, +0.0005504836444742978,0.01946953497827053,0.01685478165745735,21,1055,,,, +,,,22,1055,0.00037079042522236705,0.015598192811012268,, +0.0005828228895552456,0.019640492275357246,0.01685478165745735,22,1103,,,, +,,,23,1103,0.0003636741894297302,0.015478463843464851,, +0.000536997162271291,0.01877320185303688,0.01685478165745735,23,1151,,,, +,,,24,1151,0.0003473551769275218,0.014973259530961514,, +0.0004296917177271098,0.016905883327126503,0.01685478165745735,24,1199,,,, +,,,25,1199,0.0003502711479086429,0.014955243095755577,, +0.000369079876691103,0.015234655700623989,0.015234655700623989,25,1247,,,, +,,,26,1247,0.0003374406951479614,0.014734053052961826,, +0.0005687376251444221,0.019466156139969826,0.015234655700623989,26,1295,,,, +,,,27,1295,0.0003077377623412758,0.013931847177445889,, +0.0003877778071910143,0.01577661745250225,0.015234655700623989,27,1343,,,, +,,,28,1343,0.00030867080204188824,0.013890136033296585,, +0.0007391179096885026,0.021662525832653046,0.015234655700623989,28,1391,,,, +,,,29,1391,0.0003139588516205549,0.013945831917226315,, +0.0002891223703045398,0.013201462104916573,0.013201462104916573,29,1439,,,, +,,,30,1439,0.00029595510568469763,0.013475497253239155,, +0.00136762042529881,0.03221268951892853,0.013201462104916573,30,1487,,,, +,,,31,1487,0.0002615793200675398,0.012566673569381237,, +0.0005622635362669826,0.018861766904592514,0.013201462104916573,31,1535,,,, +,,,32,1535,0.0002453128108754754,0.012109467759728432,, +0.0002613224496599287,0.012555716559290886,0.012555716559290886,32,1583,,,, +,,,33,1583,0.0002475597430020571,0.01209949143230915,, +0.00026621491997502744,0.01243590097874403,0.01243590097874403,33,1631,,,, +,,,34,1631,0.00021561248286161572,0.011168932542204857,, +0.0003965417272411287,0.015400874428451061,0.01243590097874403,34,1679,,,, +,,,35,1679,0.0002476295630913228,0.012050420045852661,, +0.0006301697576418519,0.019847549498081207,0.01243590097874403,35,1727,,,, +,,,36,1727,0.00022967555560171604,0.011594988405704498,, +0.0004612059856299311,0.01633538119494915,0.01243590097874403,36,1775,,,, +,,,37,1775,0.0002333874726900831,0.011586652137339115,, +0.00025420173187740147,0.012520545162260532,0.01243590097874403,37,1823,,,, +,,,38,1823,0.00023667479399591684,0.01181836612522602,, +0.0003383368020877242,0.015009582974016666,0.01243590097874403,38,1871,,,, +,,,39,1871,0.0001987472496693954,0.010663148015737534,, +0.0003864422906190157,0.016114825382828712,0.01243590097874403,39,1919,,,, +,,,40,1919,0.00019275647355243564,0.010579877533018589,, +0.00022850626555737108,0.011395555920898914,0.011395555920898914,40,1967,,,, +,,,41,1967,0.00020515960932243615,0.010919580236077309,, +0.00036217307206243277,0.015877339988946915,0.011395555920898914,41,2015,,,, +,,,42,2015,0.00020770113042090088,0.010937352664768696,, +0.00026963415439240634,0.012874774634838104,0.011395555920898914,42,2063,,,, +,,,43,2063,0.00020995769591536373,0.010980397462844849,, +0.0003081822069361806,0.01374065037816763,0.011395555920898914,43,2111,,,, +,,,44,2111,0.0001873430737759918,0.010383875109255314,, +0.0008588534547016025,0.02455970272421837,0.011395555920898914,44,2159,,,, +,,,45,2159,0.0001887517428258434,0.010248820297420025,, +0.00025588576681911945,0.012964467518031597,0.011395555920898914,45,2207,,,, +,,,46,2207,0.00017797888722270727,0.009944379329681396,, +0.0012111393734812737,0.030586877837777138,0.011395555920898914,46,2255,,,, +,,,47,2255,0.00017462909454479814,0.009952726773917675,, +0.0005991487414576113,0.01970861665904522,0.011395555920898914,47,2303,,,, +,,,48,2303,0.00018134394485969096,0.010111919604241848,, +0.00027215175214223564,0.012582853436470032,0.011395555920898914,48,2351,,,, +,,,49,2351,0.0002078687393805012,0.010890776291489601,, +0.0002326530375285074,0.011895284987986088,0.011395555920898914,49,2399,,,, +,,,50,2399,0.00016190602036658674,0.009552615694701672,, +0.00034161240910179913,0.014539946801960468,0.011395555920898914,50,2447,,,, +,,,51,2447,0.0001574503257870674,0.009343606419861317,, +0.0002047311281785369,0.010513107292354107,0.010513107292354107,51,2495,,,, +,,,52,2495,0.00017259498417843133,0.00977871473878622,, +0.0002189291553804651,0.011585352011024952,0.010513107292354107,52,2543,,,, +,,,53,2543,0.00016215378127526492,0.009527333080768585,, +0.0008305492228828371,0.025230778381228447,0.010513107292354107,53,2591,,,, +,,,54,2591,0.00017192678933497518,0.009975402615964413,, +0.0003245118132326752,0.01448488887399435,0.010513107292354107,54,2639,,,, +,,,55,2639,0.00014025233394932002,0.008768484927713871,, +0.00043747638119384646,0.016850745305418968,0.010513107292354107,55,2687,,,, +,,,56,2687,0.00014388358977157623,0.008977909572422504,, +0.0003352710627950728,0.014916492626070976,0.010513107292354107,56,2735,,,, +,,,57,2735,0.00013614425552077591,0.008777343668043613,, +0.00017761132039595395,0.009772807359695435,0.009772807359695435,57,2783,,,, +,,,58,2783,0.00014166149776428938,0.008911573328077793,, +0.0002451064356137067,0.012413904070854187,0.009772807359695435,58,2831,,,, +,,,59,2831,0.00014307026867754757,0.008944649249315262,, +0.00045808948925696313,0.017842639237642288,0.009772807359695435,59,2879,,,, +,,,60,2879,0.00012790328764822334,0.008465489372611046,, +0.00016568403225392103,0.009426051750779152,0.009426051750779152,60,2927,,,, +,,,61,2927,0.00013250892516225576,0.008481234312057495,, +0.0002005849382840097,0.011247269809246063,0.009426051750779152,61,2975,,,, +,,,62,2975,0.00012225286627653986,0.008217735216021538,, +0.0004337916907388717,0.017680322751402855,0.009426051750779152,62,3023,,,, +,,,63,3023,0.00013184384442865849,0.008489103056490421,, +0.00043202925007790327,0.016938434913754463,0.009426051750779152,63,3071,,,, +,,,64,3071,0.0001139832820626907,0.00794004462659359,, +0.0006070298259146512,0.020868053659796715,0.009426051750779152,64,3119,,,, +,,,65,3119,0.00012551495456136763,0.00839218683540821,, +0.00125686835963279,0.03198228031396866,0.009426051750779152,65,3167,,,, +,,,66,3167,0.00013683686847798526,0.008686883375048637,, +0.0003458949795458466,0.01481217984110117,0.009426051750779152,66,3215,,,, +,,,67,3215,0.0001252585934707895,0.008334343321621418,, +0.00023582404537592083,0.01217477023601532,0.009426051750779152,67,3263,,,, +,,,68,3263,0.0001234338415088132,0.008174673654139042,, +0.00043027440551668406,0.0175019484013319,0.009426051750779152,68,3311,,,, +,,,69,3311,0.0001386790390824899,0.008791987784206867,, +0.00018619702314026654,0.01059352234005928,0.009426051750779152,69,3359,,,, +,,,70,3359,0.000133068097056821,0.008551678620278835,, +0.0004644407599698752,0.017586909234523773,0.009426051750779152,70,3407,,,, +,,,71,3407,0.0001232567592523992,0.008277542889118195,, +0.00025989525602199137,0.013158340007066727,0.009426051750779152,71,3455,,,, +,,,72,3455,0.00010765787737909704,0.007683832198381424,, +0.000271034223260358,0.013016403652727604,0.009426051750779152,72,3503,,,, +,,,73,3503,0.00010420197213534266,0.007514127530157566,, +0.00015595319564454257,0.00941098015755415,0.00941098015755415,73,3551,,,, +,,,74,3551,0.00011743942741304636,0.008030056953430176,, +0.00018362083937972784,0.010173797607421875,0.00941098015755415,74,3599,,,, +,,,75,3599,0.00013075751485303044,0.008549727499485016,, +0.0001723619207041338,0.010034985840320587,0.00941098015755415,75,3647,,,, +,,,76,3647,0.00012383297143969685,0.008334793150424957,, +0.00026339536998420954,0.01230852585285902,0.00941098015755415,76,3695,,,, +,,,77,3695,0.00012997408339288086,0.00849801953881979,, +0.0022478969767689705,0.043913356959819794,0.00941098015755415,77,3743,,,, +,,,78,3743,0.0001171917756437324,0.00794607400894165,, +0.00043688094592653215,0.01787818782031536,0.00941098015755415,78,3791,,,, +,,,79,3791,0.0001384491624776274,0.008685767650604248,, +0.000253666948992759,0.012860489077866077,0.00941098015755415,79,3839,,,, +,,,80,3839,0.00011071155313402414,0.007812082301825285,, +0.0007027353858575225,0.02276649698615074,0.00941098015755415,80,3887,,,, +,,,81,3887,0.00012242942466400564,0.008264751173555851,, +0.00036985089536756277,0.015199720859527588,0.00941098015755415,81,3935,,,, +,,,82,3935,0.00012077093560947105,0.008274727500975132,, +0.00042630801908671856,0.016393927857279778,0.00941098015755415,82,3983,,,, +,,,83,3983,0.00011708390229614452,0.008091365918517113,, +0.00018512520182412118,0.010693005286157131,0.00941098015755415,83,4031,,,, +,,,84,4031,0.00013511900033336133,0.008610659278929234,, +0.0006041005253791809,0.02147362008690834,0.00941098015755415,84,4079,,,, +,,,85,4079,0.00010951827425742522,0.007766522467136383,, +0.0005739088519476354,0.019695602357387543,0.00941098015755415,85,4127,,,, +,,,86,4127,0.00011793604062404484,0.008045573718845844,, +0.00020085058349650353,0.010959284380078316,0.00941098015755415,86,4175,,,, +,,,87,4175,0.00011460384848760441,0.007821462117135525,, +0.00035394675796851516,0.014944094233214855,0.00941098015755415,87,4223,,,, +,,,88,4223,0.00012705886911135167,0.008322164416313171,, +0.001467306399717927,0.03422271087765694,0.00941098015755415,88,4271,,,, +,,,89,4271,0.00011695512512233108,0.008126244880259037,, +0.0003908797516487539,0.015736214816570282,0.00941098015755415,89,4319,,,, +,,,90,4319,0.00011259406892349944,0.007857552729547024,, +0.0001805946958484128,0.009996251203119755,0.00941098015755415,90,4367,,,, +,,,91,4367,0.00011745867959689349,0.008035344071686268,, +0.00035339672467671335,0.015099494718015194,0.00941098015755415,91,4415,,,, +,,,92,4415,0.00013093209418002516,0.008491233922541142,, +0.00040678223012946546,0.015847915783524513,0.00941098015755415,92,4463,,,, +,,,93,4463,0.00013953317829873413,0.008733570575714111,, +0.00018399060354568064,0.010453518480062485,0.00941098015755415,93,4511,,,, +,,,94,4511,0.00012743366823997349,0.008426659740507603,, +0.0005399276851676404,0.018864858895540237,0.00941098015755415,94,4559,,,, +,,,95,4559,0.00011808565614046529,0.008046725764870644,, +0.00029979468672536314,0.01469734963029623,0.00941098015755415,95,4607,,,, +,,,96,4607,0.00015584049106109887,0.009322610683739185,, +0.00013096585462335497,0.00869279820472002,0.00869279820472002,96,4655,,,, +,,,97,4655,0.00013836825382895768,0.008852057158946991,, +0.00021977233700454235,0.011555126868188381,0.00869279820472002,97,4703,,,, +,,,98,4703,0.00012112926197005436,0.008111957460641861,, +0.00024704227689653635,0.01307331956923008,0.00869279820472002,98,4751,,,, +,,,99,4751,0.00012230231368448585,0.008170234970748425,, +0.0003437100094743073,0.01433614082634449,0.00869279820472002,99,4799,,,, +,,,100,4799,0.00011741576599888504,0.008008270524442196,, +,,,96,4800,,,0.00014260406896937639,0.008947640657424927 diff --git a/logs/experiments/runs/focusConvReLU_MSE_150/2022-05-11_14-36-47/tensorboard/focusConvReLU_MSE_150/events.out.tfevents.1652279809.c49d1a46d86b.1.0 b/logs/experiments/runs/focusConvReLU_MSE_150/2022-05-11_14-36-47/tensorboard/focusConvReLU_MSE_150/events.out.tfevents.1652279809.c49d1a46d86b.1.0 new file mode 100644 index 0000000000000000000000000000000000000000..ccca9215c52c11fa14ccde7af6973eb915b2d0f6 --- /dev/null +++ b/logs/experiments/runs/focusConvReLU_MSE_150/2022-05-11_14-36-47/tensorboard/focusConvReLU_MSE_150/events.out.tfevents.1652279809.c49d1a46d86b.1.0 @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7c87544f737c7a20d4e44d813742d70c0b9acd226f353912b880bbdbf298495a +size 36870 diff --git a/logs/experiments/runs/focusConvReLU_MSE_150/2022-05-11_14-36-47/tensorboard/focusConvReLU_MSE_150/events.out.tfevents.1652280195.c49d1a46d86b.1.1 b/logs/experiments/runs/focusConvReLU_MSE_150/2022-05-11_14-36-47/tensorboard/focusConvReLU_MSE_150/events.out.tfevents.1652280195.c49d1a46d86b.1.1 new file mode 100644 index 0000000000000000000000000000000000000000..dd02245ffda599fe8115cd4d0b899e2571726aef --- /dev/null +++ b/logs/experiments/runs/focusConvReLU_MSE_150/2022-05-11_14-36-47/tensorboard/focusConvReLU_MSE_150/events.out.tfevents.1652280195.c49d1a46d86b.1.1 @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7bdc9d2c64d369ea43ddafc00d36574863f5c53b08e1a856563afc76f942d11c +size 179 diff --git a/logs/experiments/runs/focusConvReLU_MSE_150/2022-05-11_14-36-47/tensorboard/focusConvReLU_MSE_150/hparams.yaml b/logs/experiments/runs/focusConvReLU_MSE_150/2022-05-11_14-36-47/tensorboard/focusConvReLU_MSE_150/hparams.yaml new file mode 100644 index 0000000000000000000000000000000000000000..aa4f86c17417fb210d8dda721de1afbf27448da0 --- /dev/null +++ b/logs/experiments/runs/focusConvReLU_MSE_150/2022-05-11_14-36-47/tensorboard/focusConvReLU_MSE_150/hparams.yaml @@ -0,0 +1,55 @@ +trainer: + _target_: pytorch_lightning.Trainer + gpus: 1 + min_epochs: 1 + max_epochs: 100 + resume_from_checkpoint: null +model: + _target_: src.models.focus_conv_module.FocusConvReLULitModule + image_size: 150 + pool_size: 2 + conv1_size: 3 + conv1_channels: 9 + conv2_size: 7 + conv2_channels: 6 + lin1_size: 32 + lin2_size: 72 + output_size: 1 + lr: 0.001 + weight_decay: 0.0005 +datamodule: + _target_: src.datamodules.focus_datamodule.FocusDataModule + data_dir: /usr/src/app/data/focus150 + csv_train_file: /usr/src/app/data/focus150/train_metadata.csv + csv_val_file: /usr/src/app/data/focus150/validation_metadata.csv + csv_test_file: /usr/src/app/data/focus150/test_metadata.csv + batch_size: 64 + num_workers: 0 + pin_memory: false + augmentation: true +seed: 12345 +callbacks: + model_checkpoint: + _target_: pytorch_lightning.callbacks.ModelCheckpoint + monitor: val/mae + mode: min + save_top_k: 1 + save_last: true + verbose: false + dirpath: checkpoints/ + filename: epoch_{epoch:03d} + auto_insert_metric_name: false + early_stopping: + _target_: pytorch_lightning.callbacks.EarlyStopping + monitor: val/mae + mode: min + patience: 100 + min_delta: 0 + model_summary: + _target_: pytorch_lightning.callbacks.RichModelSummary + max_depth: -1 + rich_progress_bar: + _target_: pytorch_lightning.callbacks.RichProgressBar +model/params/total: 216231 +model/params/trainable: 216231 +model/params/non_trainable: 0 diff --git a/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/meta.yaml b/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/meta.yaml new file mode 100644 index 0000000000000000000000000000000000000000..84b590c4b2e419271e216e350620c4ce74a4b112 --- /dev/null +++ b/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/meta.yaml @@ -0,0 +1,15 @@ +artifact_uri: /usr/src/app/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/artifacts +end_time: 1652280195396 +entry_point_name: '' +experiment_id: '11' +lifecycle_stage: active +name: '' +run_id: fc9d4c9c610e4ae9a51aeec7e544684b +run_uuid: fc9d4c9c610e4ae9a51aeec7e544684b +source_name: '' +source_type: 4 +source_version: '' +start_time: 1652279809740 +status: 3 +tags: [] +user_id: unknown diff --git a/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/metrics/epoch b/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/metrics/epoch new file mode 100644 index 0000000000000000000000000000000000000000..99539011071ab57287a36b60c1c0ada797c77f17 --- /dev/null +++ b/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/metrics/epoch @@ -0,0 +1,201 @@ +1652279820704 0 47 +1652279820749 1 47 +1652279824357 1 95 +1652279824405 2 95 +1652279828044 2 143 +1652279828096 3 143 +1652279831676 3 191 +1652279831720 4 191 +1652279835446 4 239 +1652279835495 5 239 +1652279839271 5 287 +1652279839319 6 287 +1652279843079 6 335 +1652279843119 7 335 +1652279846830 7 383 +1652279846878 8 383 +1652279850614 8 431 +1652279850653 9 431 +1652279854390 9 479 +1652279854434 10 479 +1652279858241 10 527 +1652279858281 11 527 +1652279862079 11 575 +1652279862126 12 575 +1652279865967 12 623 +1652279866012 13 623 +1652279869769 13 671 +1652279869817 14 671 +1652279873629 14 719 +1652279873668 15 719 +1652279877456 15 767 +1652279877494 16 767 +1652279881261 16 815 +1652279881296 17 815 +1652279885044 17 863 +1652279885082 18 863 +1652279888829 18 911 +1652279888864 19 911 +1652279892527 19 959 +1652279892566 20 959 +1652279896237 20 1007 +1652279896290 21 1007 +1652279900002 21 1055 +1652279900037 22 1055 +1652279903680 22 1103 +1652279903717 23 1103 +1652279907516 23 1151 +1652279907559 24 1151 +1652279911251 24 1199 +1652279911367 25 1199 +1652279915084 25 1247 +1652279915129 26 1247 +1652279918947 26 1295 +1652279918986 27 1295 +1652279922767 27 1343 +1652279922806 28 1343 +1652279926483 28 1391 +1652279926523 29 1391 +1652279930329 29 1439 +1652279930377 30 1439 +1652279934121 30 1487 +1652279934160 31 1487 +1652279937833 31 1535 +1652279937872 32 1535 +1652279941623 32 1583 +1652279941677 33 1583 +1652279945268 33 1631 +1652279945317 34 1631 +1652279949005 34 1679 +1652279949040 35 1679 +1652279952772 35 1727 +1652279952812 36 1727 +1652279956496 36 1775 +1652279956535 37 1775 +1652279960298 37 1823 +1652279960344 38 1823 +1652279964073 38 1871 +1652279964110 39 1871 +1652279967844 39 1919 +1652279967883 40 1919 +1652279971446 40 1967 +1652279971491 41 1967 +1652279975126 41 2015 +1652279975165 42 2015 +1652279978811 42 2063 +1652279978850 43 2063 +1652279982539 43 2111 +1652279982578 44 2111 +1652279986321 44 2159 +1652279986365 45 2159 +1652279990134 45 2207 +1652279990170 46 2207 +1652279993983 46 2255 +1652279994023 47 2255 +1652279997788 47 2303 +1652279997827 48 2303 +1652280001609 48 2351 +1652280001649 49 2351 +1652280005406 49 2399 +1652280005492 50 2399 +1652280009144 50 2447 +1652280009187 51 2447 +1652280012811 51 2495 +1652280012862 52 2495 +1652280016584 52 2543 +1652280016623 53 2543 +1652280020312 53 2591 +1652280020351 54 2591 +1652280024138 54 2639 +1652280024177 55 2639 +1652280027939 55 2687 +1652280027981 56 2687 +1652280031799 56 2735 +1652280031839 57 2735 +1652280035376 57 2783 +1652280035425 58 2783 +1652280039147 58 2831 +1652280039190 59 2831 +1652280043008 59 2879 +1652280043047 60 2879 +1652280046735 60 2927 +1652280046780 61 2927 +1652280050532 61 2975 +1652280050567 62 2975 +1652280054457 62 3023 +1652280054496 63 3023 +1652280058279 63 3071 +1652280058318 64 3071 +1652280062047 64 3119 +1652280062086 65 3119 +1652280065810 65 3167 +1652280065848 66 3167 +1652280069610 66 3215 +1652280069646 67 3215 +1652280073394 67 3263 +1652280073440 68 3263 +1652280077179 68 3311 +1652280077219 69 3311 +1652280081010 69 3359 +1652280081049 70 3359 +1652280084783 70 3407 +1652280084823 71 3407 +1652280088575 71 3455 +1652280088615 72 3455 +1652280092368 72 3503 +1652280092408 73 3503 +1652280096159 73 3551 +1652280096210 74 3551 +1652280099894 74 3599 +1652280099978 75 3599 +1652280103580 75 3647 +1652280103622 76 3647 +1652280107354 76 3695 +1652280107393 77 3695 +1652280111162 77 3743 +1652280111203 78 3743 +1652280114987 78 3791 +1652280115112 79 3791 +1652280118870 79 3839 +1652280118906 80 3839 +1652280122629 80 3887 +1652280122669 81 3887 +1652280126475 81 3935 +1652280126515 82 3935 +1652280130198 82 3983 +1652280130237 83 3983 +1652280134070 83 4031 +1652280134111 84 4031 +1652280137810 84 4079 +1652280137849 85 4079 +1652280141642 85 4127 +1652280141678 86 4127 +1652280145396 86 4175 +1652280145432 87 4175 +1652280149183 87 4223 +1652280149222 88 4223 +1652280152927 88 4271 +1652280152969 89 4271 +1652280156721 89 4319 +1652280156762 90 4319 +1652280160468 90 4367 +1652280160504 91 4367 +1652280164338 91 4415 +1652280164378 92 4415 +1652280168035 92 4463 +1652280168071 93 4463 +1652280171787 93 4511 +1652280171823 94 4511 +1652280175551 94 4559 +1652280175591 95 4559 +1652280179385 95 4607 +1652280179426 96 4607 +1652280183124 96 4655 +1652280183168 97 4655 +1652280186940 97 4703 +1652280186980 98 4703 +1652280190746 98 4751 +1652280190782 99 4751 +1652280194552 99 4799 +1652280194640 100 4799 +1652280195352 96 4800 diff --git a/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/metrics/test/loss b/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/metrics/test/loss new file mode 100644 index 0000000000000000000000000000000000000000..2dbf668358482024c27f66022e93f39c7481cc86 --- /dev/null +++ b/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/metrics/test/loss @@ -0,0 +1 @@ +1652280195352 0.00014260406896937639 4800 diff --git a/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/metrics/test/mae b/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/metrics/test/mae new file mode 100644 index 0000000000000000000000000000000000000000..62fe30ffa22a9dfc2bac22cc893e87c6359ee139 --- /dev/null +++ b/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/metrics/test/mae @@ -0,0 +1 @@ +1652280195352 0.008947640657424927 4800 diff --git a/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/metrics/train/loss b/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/metrics/train/loss new file mode 100644 index 0000000000000000000000000000000000000000..673f5e7ed66384854c274cde81c41a7b1c608382 --- /dev/null +++ b/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/metrics/train/loss @@ -0,0 +1,100 @@ +1652279820749 0.015712032094597816 47 +1652279824405 0.0006998287281021476 95 +1652279828096 0.0006262350943870842 143 +1652279831720 0.0006112757255323231 191 +1652279835495 0.0005892604240216315 239 +1652279839319 0.0004991526948288083 287 +1652279843119 0.00046538031892850995 335 +1652279846878 0.0004903865046799183 383 +1652279850653 0.0004449250118341297 431 +1652279854434 0.0004655240918509662 479 +1652279858281 0.00044383766362443566 527 +1652279862126 0.0004356526769697666 575 +1652279866012 0.00041521640378050506 623 +1652279869817 0.0003966042131651193 671 +1652279873668 0.0004069829301442951 719 +1652279877494 0.0004001975175924599 767 +1652279881296 0.000411732355132699 815 +1652279885082 0.0003962284536100924 863 +1652279888864 0.0003755573998205364 911 +1652279892566 0.00040089385583996773 959 +1652279896290 0.00037927881930954754 1007 +1652279900037 0.00037079042522236705 1055 +1652279903717 0.0003636741894297302 1103 +1652279907559 0.0003473551769275218 1151 +1652279911367 0.0003502711479086429 1199 +1652279915129 0.0003374406951479614 1247 +1652279918986 0.0003077377623412758 1295 +1652279922806 0.00030867080204188824 1343 +1652279926523 0.0003139588516205549 1391 +1652279930377 0.00029595510568469763 1439 +1652279934160 0.0002615793200675398 1487 +1652279937872 0.0002453128108754754 1535 +1652279941677 0.0002475597430020571 1583 +1652279945317 0.00021561248286161572 1631 +1652279949040 0.0002476295630913228 1679 +1652279952812 0.00022967555560171604 1727 +1652279956535 0.0002333874726900831 1775 +1652279960344 0.00023667479399591684 1823 +1652279964110 0.0001987472496693954 1871 +1652279967883 0.00019275647355243564 1919 +1652279971491 0.00020515960932243615 1967 +1652279975165 0.00020770113042090088 2015 +1652279978850 0.00020995769591536373 2063 +1652279982578 0.0001873430737759918 2111 +1652279986365 0.0001887517428258434 2159 +1652279990170 0.00017797888722270727 2207 +1652279994023 0.00017462909454479814 2255 +1652279997827 0.00018134394485969096 2303 +1652280001649 0.0002078687393805012 2351 +1652280005492 0.00016190602036658674 2399 +1652280009187 0.0001574503257870674 2447 +1652280012862 0.00017259498417843133 2495 +1652280016623 0.00016215378127526492 2543 +1652280020351 0.00017192678933497518 2591 +1652280024177 0.00014025233394932002 2639 +1652280027981 0.00014388358977157623 2687 +1652280031839 0.00013614425552077591 2735 +1652280035425 0.00014166149776428938 2783 +1652280039190 0.00014307026867754757 2831 +1652280043047 0.00012790328764822334 2879 +1652280046780 0.00013250892516225576 2927 +1652280050567 0.00012225286627653986 2975 +1652280054496 0.00013184384442865849 3023 +1652280058318 0.0001139832820626907 3071 +1652280062086 0.00012551495456136763 3119 +1652280065848 0.00013683686847798526 3167 +1652280069646 0.0001252585934707895 3215 +1652280073440 0.0001234338415088132 3263 +1652280077219 0.0001386790390824899 3311 +1652280081049 0.000133068097056821 3359 +1652280084823 0.0001232567592523992 3407 +1652280088615 0.00010765787737909704 3455 +1652280092408 0.00010420197213534266 3503 +1652280096210 0.00011743942741304636 3551 +1652280099978 0.00013075751485303044 3599 +1652280103622 0.00012383297143969685 3647 +1652280107393 0.00012997408339288086 3695 +1652280111203 0.0001171917756437324 3743 +1652280115112 0.0001384491624776274 3791 +1652280118906 0.00011071155313402414 3839 +1652280122669 0.00012242942466400564 3887 +1652280126515 0.00012077093560947105 3935 +1652280130237 0.00011708390229614452 3983 +1652280134111 0.00013511900033336133 4031 +1652280137849 0.00010951827425742522 4079 +1652280141678 0.00011793604062404484 4127 +1652280145432 0.00011460384848760441 4175 +1652280149222 0.00012705886911135167 4223 +1652280152969 0.00011695512512233108 4271 +1652280156762 0.00011259406892349944 4319 +1652280160504 0.00011745867959689349 4367 +1652280164378 0.00013093209418002516 4415 +1652280168071 0.00013953317829873413 4463 +1652280171823 0.00012743366823997349 4511 +1652280175591 0.00011808565614046529 4559 +1652280179426 0.00015584049106109887 4607 +1652280183168 0.00013836825382895768 4655 +1652280186980 0.00012112926197005436 4703 +1652280190782 0.00012230231368448585 4751 +1652280194640 0.00011741576599888504 4799 diff --git a/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/metrics/train/mae b/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/metrics/train/mae new file mode 100644 index 0000000000000000000000000000000000000000..290c3fc81687902eca1b930b6b8359c32f74fe37 --- /dev/null +++ b/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/metrics/train/mae @@ -0,0 +1,100 @@ +1652279820749 0.07011217623949051 47 +1652279824405 0.021280117332935333 95 +1652279828096 0.020297976210713387 143 +1652279831720 0.020039746537804604 191 +1652279835495 0.019787773489952087 239 +1652279839319 0.018337024375796318 287 +1652279843119 0.017815634608268738 335 +1652279846878 0.017987234517931938 383 +1652279850653 0.017149975523352623 431 +1652279854434 0.01764928176999092 479 +1652279858281 0.017262466251850128 527 +1652279862126 0.017195068299770355 575 +1652279866012 0.01676551066339016 623 +1652279869817 0.016418082639575005 671 +1652279873668 0.016572223976254463 719 +1652279877494 0.016431566327810287 767 +1652279881296 0.016624923795461655 815 +1652279885082 0.016285978257656097 863 +1652279888864 0.015845887362957 911 +1652279892566 0.016440516337752342 959 +1652279896290 0.015909109264612198 1007 +1652279900037 0.015598192811012268 1055 +1652279903717 0.015478463843464851 1103 +1652279907559 0.014973259530961514 1151 +1652279911367 0.014955243095755577 1199 +1652279915129 0.014734053052961826 1247 +1652279918986 0.013931847177445889 1295 +1652279922806 0.013890136033296585 1343 +1652279926523 0.013945831917226315 1391 +1652279930377 0.013475497253239155 1439 +1652279934160 0.012566673569381237 1487 +1652279937872 0.012109467759728432 1535 +1652279941677 0.01209949143230915 1583 +1652279945317 0.011168932542204857 1631 +1652279949040 0.012050420045852661 1679 +1652279952812 0.011594988405704498 1727 +1652279956535 0.011586652137339115 1775 +1652279960344 0.01181836612522602 1823 +1652279964110 0.010663148015737534 1871 +1652279967883 0.010579877533018589 1919 +1652279971491 0.010919580236077309 1967 +1652279975165 0.010937352664768696 2015 +1652279978850 0.010980397462844849 2063 +1652279982578 0.010383875109255314 2111 +1652279986365 0.010248820297420025 2159 +1652279990170 0.009944379329681396 2207 +1652279994023 0.009952726773917675 2255 +1652279997827 0.010111919604241848 2303 +1652280001649 0.010890776291489601 2351 +1652280005492 0.009552615694701672 2399 +1652280009187 0.009343606419861317 2447 +1652280012862 0.00977871473878622 2495 +1652280016623 0.009527333080768585 2543 +1652280020351 0.009975402615964413 2591 +1652280024177 0.008768484927713871 2639 +1652280027981 0.008977909572422504 2687 +1652280031839 0.008777343668043613 2735 +1652280035425 0.008911573328077793 2783 +1652280039190 0.008944649249315262 2831 +1652280043047 0.008465489372611046 2879 +1652280046780 0.008481234312057495 2927 +1652280050567 0.008217735216021538 2975 +1652280054496 0.008489103056490421 3023 +1652280058318 0.00794004462659359 3071 +1652280062086 0.00839218683540821 3119 +1652280065848 0.008686883375048637 3167 +1652280069646 0.008334343321621418 3215 +1652280073440 0.008174673654139042 3263 +1652280077219 0.008791987784206867 3311 +1652280081049 0.008551678620278835 3359 +1652280084823 0.008277542889118195 3407 +1652280088615 0.007683832198381424 3455 +1652280092408 0.007514127530157566 3503 +1652280096210 0.008030056953430176 3551 +1652280099978 0.008549727499485016 3599 +1652280103622 0.008334793150424957 3647 +1652280107393 0.00849801953881979 3695 +1652280111203 0.00794607400894165 3743 +1652280115112 0.008685767650604248 3791 +1652280118906 0.007812082301825285 3839 +1652280122669 0.008264751173555851 3887 +1652280126515 0.008274727500975132 3935 +1652280130237 0.008091365918517113 3983 +1652280134111 0.008610659278929234 4031 +1652280137849 0.007766522467136383 4079 +1652280141678 0.008045573718845844 4127 +1652280145432 0.007821462117135525 4175 +1652280149222 0.008322164416313171 4223 +1652280152969 0.008126244880259037 4271 +1652280156762 0.007857552729547024 4319 +1652280160504 0.008035344071686268 4367 +1652280164378 0.008491233922541142 4415 +1652280168071 0.008733570575714111 4463 +1652280171823 0.008426659740507603 4511 +1652280175591 0.008046725764870644 4559 +1652280179426 0.009322610683739185 4607 +1652280183168 0.008852057158946991 4655 +1652280186980 0.008111957460641861 4703 +1652280190782 0.008170234970748425 4751 +1652280194640 0.008008270524442196 4799 diff --git a/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/metrics/val/loss b/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/metrics/val/loss new file mode 100644 index 0000000000000000000000000000000000000000..15a4c3a9c6d9d243b4f5807570546ccddcbfbeab --- /dev/null +++ b/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/metrics/val/loss @@ -0,0 +1,100 @@ +1652279820704 0.0010634043719619513 47 +1652279824357 0.0008477172814309597 95 +1652279828044 0.0006586583331227303 143 +1652279831676 0.0006095922435633838 191 +1652279835446 0.000579743820708245 239 +1652279839271 0.000539788044989109 287 +1652279843079 0.0005701080663129687 335 +1652279846830 0.0005549467750824988 383 +1652279850614 0.0005555949755944312 431 +1652279854390 0.0004789295489899814 479 +1652279858241 0.0005822633975185454 527 +1652279862079 0.00048447673907503486 575 +1652279865967 0.00046181678771972656 623 +1652279869769 0.00041472347220405936 671 +1652279873629 0.0005150702781975269 719 +1652279877456 0.00043978836038149893 767 +1652279881261 0.0004524872056208551 815 +1652279885044 0.0004895858000963926 863 +1652279888829 0.00048630539095029235 911 +1652279892527 0.00045636019785888493 959 +1652279896237 0.00042457759263925254 1007 +1652279900002 0.0005504836444742978 1055 +1652279903680 0.0005828228895552456 1103 +1652279907516 0.000536997162271291 1151 +1652279911251 0.0004296917177271098 1199 +1652279915084 0.000369079876691103 1247 +1652279918947 0.0005687376251444221 1295 +1652279922767 0.0003877778071910143 1343 +1652279926483 0.0007391179096885026 1391 +1652279930329 0.0002891223703045398 1439 +1652279934121 0.00136762042529881 1487 +1652279937833 0.0005622635362669826 1535 +1652279941623 0.0002613224496599287 1583 +1652279945268 0.00026621491997502744 1631 +1652279949005 0.0003965417272411287 1679 +1652279952772 0.0006301697576418519 1727 +1652279956496 0.0004612059856299311 1775 +1652279960298 0.00025420173187740147 1823 +1652279964073 0.0003383368020877242 1871 +1652279967844 0.0003864422906190157 1919 +1652279971446 0.00022850626555737108 1967 +1652279975126 0.00036217307206243277 2015 +1652279978811 0.00026963415439240634 2063 +1652279982539 0.0003081822069361806 2111 +1652279986321 0.0008588534547016025 2159 +1652279990134 0.00025588576681911945 2207 +1652279993983 0.0012111393734812737 2255 +1652279997788 0.0005991487414576113 2303 +1652280001609 0.00027215175214223564 2351 +1652280005406 0.0002326530375285074 2399 +1652280009144 0.00034161240910179913 2447 +1652280012811 0.0002047311281785369 2495 +1652280016584 0.0002189291553804651 2543 +1652280020312 0.0008305492228828371 2591 +1652280024138 0.0003245118132326752 2639 +1652280027939 0.00043747638119384646 2687 +1652280031799 0.0003352710627950728 2735 +1652280035376 0.00017761132039595395 2783 +1652280039147 0.0002451064356137067 2831 +1652280043008 0.00045808948925696313 2879 +1652280046735 0.00016568403225392103 2927 +1652280050532 0.0002005849382840097 2975 +1652280054457 0.0004337916907388717 3023 +1652280058279 0.00043202925007790327 3071 +1652280062047 0.0006070298259146512 3119 +1652280065810 0.00125686835963279 3167 +1652280069610 0.0003458949795458466 3215 +1652280073394 0.00023582404537592083 3263 +1652280077179 0.00043027440551668406 3311 +1652280081010 0.00018619702314026654 3359 +1652280084783 0.0004644407599698752 3407 +1652280088575 0.00025989525602199137 3455 +1652280092368 0.000271034223260358 3503 +1652280096159 0.00015595319564454257 3551 +1652280099894 0.00018362083937972784 3599 +1652280103580 0.0001723619207041338 3647 +1652280107354 0.00026339536998420954 3695 +1652280111162 0.0022478969767689705 3743 +1652280114987 0.00043688094592653215 3791 +1652280118870 0.000253666948992759 3839 +1652280122629 0.0007027353858575225 3887 +1652280126475 0.00036985089536756277 3935 +1652280130198 0.00042630801908671856 3983 +1652280134070 0.00018512520182412118 4031 +1652280137810 0.0006041005253791809 4079 +1652280141642 0.0005739088519476354 4127 +1652280145396 0.00020085058349650353 4175 +1652280149183 0.00035394675796851516 4223 +1652280152927 0.001467306399717927 4271 +1652280156721 0.0003908797516487539 4319 +1652280160468 0.0001805946958484128 4367 +1652280164338 0.00035339672467671335 4415 +1652280168035 0.00040678223012946546 4463 +1652280171787 0.00018399060354568064 4511 +1652280175551 0.0005399276851676404 4559 +1652280179385 0.00029979468672536314 4607 +1652280183124 0.00013096585462335497 4655 +1652280186940 0.00021977233700454235 4703 +1652280190746 0.00024704227689653635 4751 +1652280194552 0.0003437100094743073 4799 diff --git a/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/metrics/val/mae b/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/metrics/val/mae new file mode 100644 index 0000000000000000000000000000000000000000..d5eef4a08fa2af2ec859a2a2b5a49376cbb685f1 --- /dev/null +++ b/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/metrics/val/mae @@ -0,0 +1,100 @@ +1652279820704 0.02607358619570732 47 +1652279824357 0.023356162011623383 95 +1652279828044 0.02111729420721531 143 +1652279831676 0.020395344123244286 191 +1652279835446 0.02008357085287571 239 +1652279839271 0.019109351560473442 287 +1652279843079 0.020062454044818878 335 +1652279846830 0.018724771216511726 383 +1652279850614 0.01976374350488186 431 +1652279854390 0.018088672310113907 479 +1652279858241 0.02009824477136135 527 +1652279862079 0.017433589324355125 575 +1652279865967 0.018075214698910713 623 +1652279869769 0.016927972435951233 671 +1652279873629 0.01795283891260624 719 +1652279877456 0.017694905400276184 767 +1652279881261 0.017309539020061493 815 +1652279885044 0.017853353172540665 863 +1652279888829 0.017320722341537476 911 +1652279892527 0.017117636278271675 959 +1652279896237 0.01685478165745735 1007 +1652279900002 0.01946953497827053 1055 +1652279903680 0.019640492275357246 1103 +1652279907516 0.01877320185303688 1151 +1652279911251 0.016905883327126503 1199 +1652279915084 0.015234655700623989 1247 +1652279918947 0.019466156139969826 1295 +1652279922767 0.01577661745250225 1343 +1652279926483 0.021662525832653046 1391 +1652279930329 0.013201462104916573 1439 +1652279934121 0.03221268951892853 1487 +1652279937833 0.018861766904592514 1535 +1652279941623 0.012555716559290886 1583 +1652279945268 0.01243590097874403 1631 +1652279949005 0.015400874428451061 1679 +1652279952772 0.019847549498081207 1727 +1652279956496 0.01633538119494915 1775 +1652279960298 0.012520545162260532 1823 +1652279964073 0.015009582974016666 1871 +1652279967844 0.016114825382828712 1919 +1652279971446 0.011395555920898914 1967 +1652279975126 0.015877339988946915 2015 +1652279978811 0.012874774634838104 2063 +1652279982539 0.01374065037816763 2111 +1652279986321 0.02455970272421837 2159 +1652279990134 0.012964467518031597 2207 +1652279993983 0.030586877837777138 2255 +1652279997788 0.01970861665904522 2303 +1652280001609 0.012582853436470032 2351 +1652280005406 0.011895284987986088 2399 +1652280009144 0.014539946801960468 2447 +1652280012811 0.010513107292354107 2495 +1652280016584 0.011585352011024952 2543 +1652280020312 0.025230778381228447 2591 +1652280024138 0.01448488887399435 2639 +1652280027939 0.016850745305418968 2687 +1652280031799 0.014916492626070976 2735 +1652280035376 0.009772807359695435 2783 +1652280039147 0.012413904070854187 2831 +1652280043008 0.017842639237642288 2879 +1652280046735 0.009426051750779152 2927 +1652280050532 0.011247269809246063 2975 +1652280054457 0.017680322751402855 3023 +1652280058279 0.016938434913754463 3071 +1652280062047 0.020868053659796715 3119 +1652280065810 0.03198228031396866 3167 +1652280069610 0.01481217984110117 3215 +1652280073394 0.01217477023601532 3263 +1652280077179 0.0175019484013319 3311 +1652280081010 0.01059352234005928 3359 +1652280084783 0.017586909234523773 3407 +1652280088575 0.013158340007066727 3455 +1652280092368 0.013016403652727604 3503 +1652280096159 0.00941098015755415 3551 +1652280099894 0.010173797607421875 3599 +1652280103580 0.010034985840320587 3647 +1652280107354 0.01230852585285902 3695 +1652280111162 0.043913356959819794 3743 +1652280114987 0.01787818782031536 3791 +1652280118870 0.012860489077866077 3839 +1652280122629 0.02276649698615074 3887 +1652280126475 0.015199720859527588 3935 +1652280130198 0.016393927857279778 3983 +1652280134070 0.010693005286157131 4031 +1652280137810 0.02147362008690834 4079 +1652280141642 0.019695602357387543 4127 +1652280145396 0.010959284380078316 4175 +1652280149183 0.014944094233214855 4223 +1652280152927 0.03422271087765694 4271 +1652280156721 0.015736214816570282 4319 +1652280160468 0.009996251203119755 4367 +1652280164338 0.015099494718015194 4415 +1652280168035 0.015847915783524513 4463 +1652280171787 0.010453518480062485 4511 +1652280175551 0.018864858895540237 4559 +1652280179385 0.01469734963029623 4607 +1652280183124 0.00869279820472002 4655 +1652280186940 0.011555126868188381 4703 +1652280190746 0.01307331956923008 4751 +1652280194552 0.01433614082634449 4799 diff --git a/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/metrics/val/mae_best b/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/metrics/val/mae_best new file mode 100644 index 0000000000000000000000000000000000000000..a1c5105ac57a49ae3d96747b7b95f72fcf3a829e --- /dev/null +++ b/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/metrics/val/mae_best @@ -0,0 +1,100 @@ +1652279820704 0.02607358619570732 47 +1652279824357 0.023356162011623383 95 +1652279828044 0.02111729420721531 143 +1652279831676 0.020395344123244286 191 +1652279835446 0.02008357085287571 239 +1652279839271 0.019109351560473442 287 +1652279843079 0.019109351560473442 335 +1652279846830 0.018724771216511726 383 +1652279850614 0.018724771216511726 431 +1652279854390 0.018088672310113907 479 +1652279858241 0.018088672310113907 527 +1652279862079 0.017433589324355125 575 +1652279865967 0.017433589324355125 623 +1652279869769 0.016927972435951233 671 +1652279873629 0.016927972435951233 719 +1652279877456 0.016927972435951233 767 +1652279881261 0.016927972435951233 815 +1652279885044 0.016927972435951233 863 +1652279888829 0.016927972435951233 911 +1652279892527 0.016927972435951233 959 +1652279896237 0.01685478165745735 1007 +1652279900002 0.01685478165745735 1055 +1652279903680 0.01685478165745735 1103 +1652279907516 0.01685478165745735 1151 +1652279911251 0.01685478165745735 1199 +1652279915084 0.015234655700623989 1247 +1652279918947 0.015234655700623989 1295 +1652279922767 0.015234655700623989 1343 +1652279926483 0.015234655700623989 1391 +1652279930329 0.013201462104916573 1439 +1652279934121 0.013201462104916573 1487 +1652279937833 0.013201462104916573 1535 +1652279941623 0.012555716559290886 1583 +1652279945268 0.01243590097874403 1631 +1652279949005 0.01243590097874403 1679 +1652279952772 0.01243590097874403 1727 +1652279956496 0.01243590097874403 1775 +1652279960298 0.01243590097874403 1823 +1652279964073 0.01243590097874403 1871 +1652279967844 0.01243590097874403 1919 +1652279971446 0.011395555920898914 1967 +1652279975126 0.011395555920898914 2015 +1652279978811 0.011395555920898914 2063 +1652279982539 0.011395555920898914 2111 +1652279986321 0.011395555920898914 2159 +1652279990134 0.011395555920898914 2207 +1652279993983 0.011395555920898914 2255 +1652279997788 0.011395555920898914 2303 +1652280001609 0.011395555920898914 2351 +1652280005406 0.011395555920898914 2399 +1652280009144 0.011395555920898914 2447 +1652280012811 0.010513107292354107 2495 +1652280016584 0.010513107292354107 2543 +1652280020312 0.010513107292354107 2591 +1652280024138 0.010513107292354107 2639 +1652280027939 0.010513107292354107 2687 +1652280031799 0.010513107292354107 2735 +1652280035376 0.009772807359695435 2783 +1652280039147 0.009772807359695435 2831 +1652280043008 0.009772807359695435 2879 +1652280046735 0.009426051750779152 2927 +1652280050532 0.009426051750779152 2975 +1652280054457 0.009426051750779152 3023 +1652280058279 0.009426051750779152 3071 +1652280062047 0.009426051750779152 3119 +1652280065810 0.009426051750779152 3167 +1652280069610 0.009426051750779152 3215 +1652280073394 0.009426051750779152 3263 +1652280077179 0.009426051750779152 3311 +1652280081010 0.009426051750779152 3359 +1652280084783 0.009426051750779152 3407 +1652280088575 0.009426051750779152 3455 +1652280092368 0.009426051750779152 3503 +1652280096159 0.00941098015755415 3551 +1652280099894 0.00941098015755415 3599 +1652280103580 0.00941098015755415 3647 +1652280107354 0.00941098015755415 3695 +1652280111162 0.00941098015755415 3743 +1652280114987 0.00941098015755415 3791 +1652280118870 0.00941098015755415 3839 +1652280122629 0.00941098015755415 3887 +1652280126475 0.00941098015755415 3935 +1652280130198 0.00941098015755415 3983 +1652280134070 0.00941098015755415 4031 +1652280137810 0.00941098015755415 4079 +1652280141642 0.00941098015755415 4127 +1652280145396 0.00941098015755415 4175 +1652280149183 0.00941098015755415 4223 +1652280152927 0.00941098015755415 4271 +1652280156721 0.00941098015755415 4319 +1652280160468 0.00941098015755415 4367 +1652280164338 0.00941098015755415 4415 +1652280168035 0.00941098015755415 4463 +1652280171787 0.00941098015755415 4511 +1652280175551 0.00941098015755415 4559 +1652280179385 0.00941098015755415 4607 +1652280183124 0.00869279820472002 4655 +1652280186940 0.00869279820472002 4703 +1652280190746 0.00869279820472002 4751 +1652280194552 0.00869279820472002 4799 diff --git a/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/callbacks/early_stopping/_target_ b/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/callbacks/early_stopping/_target_ new file mode 100644 index 0000000000000000000000000000000000000000..8195616d84e953852598636569b94503b7bee4ed --- /dev/null +++ b/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/callbacks/early_stopping/_target_ @@ -0,0 +1 @@ +pytorch_lightning.callbacks.EarlyStopping \ No newline at end of file diff --git a/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/callbacks/early_stopping/min_delta b/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/callbacks/early_stopping/min_delta new file mode 100644 index 0000000000000000000000000000000000000000..c227083464fb9af8955c90d2924774ee50abb547 --- /dev/null +++ b/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/callbacks/early_stopping/min_delta @@ -0,0 +1 @@ +0 \ No newline at end of file diff --git a/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/callbacks/early_stopping/mode b/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/callbacks/early_stopping/mode new file mode 100644 index 0000000000000000000000000000000000000000..5e8bc13c8f96ac2839228b82df8d777025967134 --- /dev/null +++ b/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/callbacks/early_stopping/mode @@ -0,0 +1 @@ +min \ No newline at end of file diff --git a/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/callbacks/early_stopping/monitor b/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/callbacks/early_stopping/monitor new file mode 100644 index 0000000000000000000000000000000000000000..124fae9ca472dccb8951663f7c53de5830ab2eb1 --- /dev/null +++ b/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/callbacks/early_stopping/monitor @@ -0,0 +1 @@ +val/mae \ No newline at end of file diff --git a/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/callbacks/early_stopping/patience b/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/callbacks/early_stopping/patience new file mode 100644 index 0000000000000000000000000000000000000000..105d7d9ad3afc7bb78a0dec4d829880831605dfb --- /dev/null +++ b/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/callbacks/early_stopping/patience @@ -0,0 +1 @@ +100 \ No newline at end of file diff --git a/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/callbacks/model_checkpoint/_target_ b/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/callbacks/model_checkpoint/_target_ new file mode 100644 index 0000000000000000000000000000000000000000..ad4a9f4df0538440c8cc70ee0595b4501fb92a0a --- /dev/null +++ b/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/callbacks/model_checkpoint/_target_ @@ -0,0 +1 @@ +pytorch_lightning.callbacks.ModelCheckpoint \ No newline at end of file diff --git a/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/callbacks/model_checkpoint/auto_insert_metric_name b/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/callbacks/model_checkpoint/auto_insert_metric_name new file mode 100644 index 0000000000000000000000000000000000000000..c1f22fbc23bb6ee67824843d6685826db10313d3 --- /dev/null +++ b/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/callbacks/model_checkpoint/auto_insert_metric_name @@ -0,0 +1 @@ +False \ No newline at end of file diff --git a/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/callbacks/model_checkpoint/dirpath b/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/callbacks/model_checkpoint/dirpath new file mode 100644 index 0000000000000000000000000000000000000000..1a53453ab9aa9e36c85aba110789aa9440de1850 --- /dev/null +++ b/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/callbacks/model_checkpoint/dirpath @@ -0,0 +1 @@ +checkpoints/ \ No newline at end of file diff --git a/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/callbacks/model_checkpoint/filename b/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/callbacks/model_checkpoint/filename new file mode 100644 index 0000000000000000000000000000000000000000..ed842244beff29c7cb59c151bbe7e5afb36a24da --- /dev/null +++ b/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/callbacks/model_checkpoint/filename @@ -0,0 +1 @@ +epoch_{epoch:03d} \ No newline at end of file diff --git a/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/callbacks/model_checkpoint/mode b/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/callbacks/model_checkpoint/mode new file mode 100644 index 0000000000000000000000000000000000000000..5e8bc13c8f96ac2839228b82df8d777025967134 --- /dev/null +++ b/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/callbacks/model_checkpoint/mode @@ -0,0 +1 @@ +min \ No newline at end of file diff --git a/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/callbacks/model_checkpoint/monitor b/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/callbacks/model_checkpoint/monitor new file mode 100644 index 0000000000000000000000000000000000000000..124fae9ca472dccb8951663f7c53de5830ab2eb1 --- /dev/null +++ b/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/callbacks/model_checkpoint/monitor @@ -0,0 +1 @@ +val/mae \ No newline at end of file diff --git a/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/callbacks/model_checkpoint/save_last b/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/callbacks/model_checkpoint/save_last new file mode 100644 index 0000000000000000000000000000000000000000..4791ed5559bd77f54e1520025768e2b368705876 --- /dev/null +++ b/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/callbacks/model_checkpoint/save_last @@ -0,0 +1 @@ +True \ No newline at end of file diff --git a/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/callbacks/model_checkpoint/save_top_k b/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/callbacks/model_checkpoint/save_top_k new file mode 100644 index 0000000000000000000000000000000000000000..56a6051ca2b02b04ef92d5150c9ef600403cb1de --- /dev/null +++ b/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/callbacks/model_checkpoint/save_top_k @@ -0,0 +1 @@ +1 \ No newline at end of file diff --git a/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/callbacks/model_checkpoint/verbose b/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/callbacks/model_checkpoint/verbose new file mode 100644 index 0000000000000000000000000000000000000000..c1f22fbc23bb6ee67824843d6685826db10313d3 --- /dev/null +++ b/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/callbacks/model_checkpoint/verbose @@ -0,0 +1 @@ +False \ No newline at end of file diff --git a/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/callbacks/model_summary/_target_ b/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/callbacks/model_summary/_target_ new file mode 100644 index 0000000000000000000000000000000000000000..c9accc51d650a565e61197d65f8ab38141cb0777 --- /dev/null +++ b/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/callbacks/model_summary/_target_ @@ -0,0 +1 @@ +pytorch_lightning.callbacks.RichModelSummary \ No newline at end of file diff --git a/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/callbacks/model_summary/max_depth b/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/callbacks/model_summary/max_depth new file mode 100644 index 0000000000000000000000000000000000000000..d7d17fcbef95ca19081c4cc5e97cbc592cc7081f --- /dev/null +++ b/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/callbacks/model_summary/max_depth @@ -0,0 +1 @@ +-1 \ No newline at end of file diff --git a/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/callbacks/rich_progress_bar/_target_ b/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/callbacks/rich_progress_bar/_target_ new file mode 100644 index 0000000000000000000000000000000000000000..ecbec9ada7c5f20145ac8b2bf628af8173aa792d --- /dev/null +++ b/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/callbacks/rich_progress_bar/_target_ @@ -0,0 +1 @@ +pytorch_lightning.callbacks.RichProgressBar \ No newline at end of file diff --git a/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/datamodule/_target_ b/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/datamodule/_target_ new file mode 100644 index 0000000000000000000000000000000000000000..c1d87c70e7c00cfb1adcb0e2891cb57d03d242e2 --- /dev/null +++ b/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/datamodule/_target_ @@ -0,0 +1 @@ +src.datamodules.focus_datamodule.FocusDataModule \ No newline at end of file diff --git a/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/datamodule/augmentation b/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/datamodule/augmentation new file mode 100644 index 0000000000000000000000000000000000000000..4791ed5559bd77f54e1520025768e2b368705876 --- /dev/null +++ b/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/datamodule/augmentation @@ -0,0 +1 @@ +True \ No newline at end of file diff --git a/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/datamodule/batch_size b/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/datamodule/batch_size new file mode 100644 index 0000000000000000000000000000000000000000..4b6f9c39e5c757bf387d465c53026b336dd8b96c --- /dev/null +++ b/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/datamodule/batch_size @@ -0,0 +1 @@ +64 \ No newline at end of file diff --git a/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/datamodule/csv_test_file b/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/datamodule/csv_test_file new file mode 100644 index 0000000000000000000000000000000000000000..69910129138d6d6714086863ffa853b10503abc6 --- /dev/null +++ b/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/datamodule/csv_test_file @@ -0,0 +1 @@ +/usr/src/app/data/focus150/test_metadata.csv \ No newline at end of file diff --git a/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/datamodule/csv_train_file b/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/datamodule/csv_train_file new file mode 100644 index 0000000000000000000000000000000000000000..6a397043cbb33603a2180ffea75f9c81916274ec --- /dev/null +++ b/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/datamodule/csv_train_file @@ -0,0 +1 @@ +/usr/src/app/data/focus150/train_metadata.csv \ No newline at end of file diff --git a/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/datamodule/csv_val_file b/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/datamodule/csv_val_file new file mode 100644 index 0000000000000000000000000000000000000000..2a4ead115d4267a7d21708c8c9bb5c051bf4d3d4 --- /dev/null +++ b/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/datamodule/csv_val_file @@ -0,0 +1 @@ +/usr/src/app/data/focus150/validation_metadata.csv \ No newline at end of file diff --git a/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/datamodule/data_dir b/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/datamodule/data_dir new file mode 100644 index 0000000000000000000000000000000000000000..b815b619ac8a58b21b05404aa1ebc935578e6539 --- /dev/null +++ b/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/datamodule/data_dir @@ -0,0 +1 @@ +/usr/src/app/data/focus150 \ No newline at end of file diff --git a/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/datamodule/num_workers b/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/datamodule/num_workers new file mode 100644 index 0000000000000000000000000000000000000000..c227083464fb9af8955c90d2924774ee50abb547 --- /dev/null +++ b/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/datamodule/num_workers @@ -0,0 +1 @@ +0 \ No newline at end of file diff --git a/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/datamodule/pin_memory b/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/datamodule/pin_memory new file mode 100644 index 0000000000000000000000000000000000000000..c1f22fbc23bb6ee67824843d6685826db10313d3 --- /dev/null +++ b/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/datamodule/pin_memory @@ -0,0 +1 @@ +False \ No newline at end of file diff --git a/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/model/_target_ b/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/model/_target_ new file mode 100644 index 0000000000000000000000000000000000000000..2facac8548c30bc3be1b3721fef7c1428c440965 --- /dev/null +++ b/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/model/_target_ @@ -0,0 +1 @@ +src.models.focus_conv_module.FocusConvReLULitModule \ No newline at end of file diff --git a/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/model/conv1_channels b/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/model/conv1_channels new file mode 100644 index 0000000000000000000000000000000000000000..f11c82a4cb6cc2e8f3bdf52b5cdeaad4d5bb214e --- /dev/null +++ b/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/model/conv1_channels @@ -0,0 +1 @@ +9 \ No newline at end of file diff --git a/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/model/conv1_size b/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/model/conv1_size new file mode 100644 index 0000000000000000000000000000000000000000..e440e5c842586965a7fb77deda2eca68612b1f53 --- /dev/null +++ b/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/model/conv1_size @@ -0,0 +1 @@ +3 \ No newline at end of file diff --git a/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/model/conv2_channels b/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/model/conv2_channels new file mode 100644 index 0000000000000000000000000000000000000000..62f9457511f879886bb7728c986fe10b0ece6bcb --- /dev/null +++ b/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/model/conv2_channels @@ -0,0 +1 @@ +6 \ No newline at end of file diff --git a/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/model/conv2_size b/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/model/conv2_size new file mode 100644 index 0000000000000000000000000000000000000000..c7930257dfef505fd996e1d6f22f2f35149990d0 --- /dev/null +++ b/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/model/conv2_size @@ -0,0 +1 @@ +7 \ No newline at end of file diff --git a/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/model/image_size b/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/model/image_size new file mode 100644 index 0000000000000000000000000000000000000000..4701cc793111aafe3a1e6f48d82e5b8c8a38943e --- /dev/null +++ b/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/model/image_size @@ -0,0 +1 @@ +150 \ No newline at end of file diff --git a/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/model/lin1_size b/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/model/lin1_size new file mode 100644 index 0000000000000000000000000000000000000000..1758dddccea2b3b02d21228a0d06a45a35c0d861 --- /dev/null +++ b/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/model/lin1_size @@ -0,0 +1 @@ +32 \ No newline at end of file diff --git a/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/model/lin2_size b/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/model/lin2_size new file mode 100644 index 0000000000000000000000000000000000000000..9cd72aa941214d4cb8522dda34eb12cf878e3e8b --- /dev/null +++ b/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/model/lin2_size @@ -0,0 +1 @@ +72 \ No newline at end of file diff --git a/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/model/lr b/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/model/lr new file mode 100644 index 0000000000000000000000000000000000000000..eb5a1db868251c6a5c775b49efde91a5ec3205df --- /dev/null +++ b/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/model/lr @@ -0,0 +1 @@ +0.001 \ No newline at end of file diff --git a/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/model/output_size b/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/model/output_size new file mode 100644 index 0000000000000000000000000000000000000000..56a6051ca2b02b04ef92d5150c9ef600403cb1de --- /dev/null +++ b/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/model/output_size @@ -0,0 +1 @@ +1 \ No newline at end of file diff --git a/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/model/params/non_trainable b/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/model/params/non_trainable new file mode 100644 index 0000000000000000000000000000000000000000..c227083464fb9af8955c90d2924774ee50abb547 --- /dev/null +++ b/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/model/params/non_trainable @@ -0,0 +1 @@ +0 \ No newline at end of file diff --git a/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/model/params/total b/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/model/params/total new file mode 100644 index 0000000000000000000000000000000000000000..839fb2f4d80e3d891188af1141fb74a15c9032e1 --- /dev/null +++ b/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/model/params/total @@ -0,0 +1 @@ +216231 \ No newline at end of file diff --git a/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/model/params/trainable b/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/model/params/trainable new file mode 100644 index 0000000000000000000000000000000000000000..839fb2f4d80e3d891188af1141fb74a15c9032e1 --- /dev/null +++ b/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/model/params/trainable @@ -0,0 +1 @@ +216231 \ No newline at end of file diff --git a/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/model/pool_size b/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/model/pool_size new file mode 100644 index 0000000000000000000000000000000000000000..d8263ee9860594d2806b0dfd1bfd17528b0ba2a4 --- /dev/null +++ b/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/model/pool_size @@ -0,0 +1 @@ +2 \ No newline at end of file diff --git a/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/model/weight_decay b/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/model/weight_decay new file mode 100644 index 0000000000000000000000000000000000000000..752f26d69b0a2d03af05585d38e9e9cdd94d78ea --- /dev/null +++ b/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/model/weight_decay @@ -0,0 +1 @@ +0.0005 \ No newline at end of file diff --git a/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/seed b/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/seed new file mode 100644 index 0000000000000000000000000000000000000000..bd41cba781d8349272bf3eb92568285b411c027c --- /dev/null +++ b/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/seed @@ -0,0 +1 @@ +12345 \ No newline at end of file diff --git a/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/trainer/_target_ b/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/trainer/_target_ new file mode 100644 index 0000000000000000000000000000000000000000..6b23728d56f799f2fedf10e2776b353c78e47498 --- /dev/null +++ b/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/trainer/_target_ @@ -0,0 +1 @@ +pytorch_lightning.Trainer \ No newline at end of file diff --git a/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/trainer/gpus b/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/trainer/gpus new file mode 100644 index 0000000000000000000000000000000000000000..56a6051ca2b02b04ef92d5150c9ef600403cb1de --- /dev/null +++ b/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/trainer/gpus @@ -0,0 +1 @@ +1 \ No newline at end of file diff --git a/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/trainer/max_epochs b/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/trainer/max_epochs new file mode 100644 index 0000000000000000000000000000000000000000..105d7d9ad3afc7bb78a0dec4d829880831605dfb --- /dev/null +++ b/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/trainer/max_epochs @@ -0,0 +1 @@ +100 \ No newline at end of file diff --git a/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/trainer/min_epochs b/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/trainer/min_epochs new file mode 100644 index 0000000000000000000000000000000000000000..56a6051ca2b02b04ef92d5150c9ef600403cb1de --- /dev/null +++ b/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/trainer/min_epochs @@ -0,0 +1 @@ +1 \ No newline at end of file diff --git a/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/trainer/resume_from_checkpoint b/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/trainer/resume_from_checkpoint new file mode 100644 index 0000000000000000000000000000000000000000..4af18322e32f3dd19579c80e26e4a306ad11e049 --- /dev/null +++ b/logs/mlflow/mlruns/11/fc9d4c9c610e4ae9a51aeec7e544684b/params/trainer/resume_from_checkpoint @@ -0,0 +1 @@ +None \ No newline at end of file diff --git a/logs/mlflow/mlruns/11/meta.yaml b/logs/mlflow/mlruns/11/meta.yaml new file mode 100644 index 0000000000000000000000000000000000000000..b2a96ae08c28163dbf5b6da46872f02f85c046f9 --- /dev/null +++ b/logs/mlflow/mlruns/11/meta.yaml @@ -0,0 +1,4 @@ +artifact_location: /usr/src/app/logs/mlflow/mlruns/11 +experiment_id: '11' +lifecycle_stage: active +name: focusConvReLU_MSE_150 diff --git a/src/models/focus_conv_module.py b/src/models/focus_conv_module.py index 3a17723ae951f812d07c98625d8687bb573d3e99..4e5d396d077fa4c3c52783b4b3bcf8945be17b20 100644 --- a/src/models/focus_conv_module.py +++ b/src/models/focus_conv_module.py @@ -44,6 +44,48 @@ class SimpleConvNet(nn.Module): return x +class SimpleConvReLUNet(nn.Module): + def __init__(self, hparams): + super().__init__() + + pool_size = hparams["pool_size"] # 2 + conv1_size = hparams["conv1_size"] # 5 + conv1_out = hparams["conv1_channels"] # 6 + conv2_size = hparams["conv1_channels"] # 5 + conv2_out = hparams["conv2_channels"] # 16 + size_img = hparams["image_size"] # 150 + + lin1_size = hparams["lin1_size"] # 100 + lin2_size = hparams["lin2_size"] # 80 + output_size = hparams["output_size"] # 1 + + size_img -= conv1_size - 1 + size_img = int((size_img) / pool_size) + size_img -= conv2_size - 1 + size_img = int(size_img / pool_size) + + self.model = nn.Sequential( + nn.Conv2d(3, conv1_out, conv1_size), + nn.BatchNorm2d(conv1_out), + nn.ReLU(), + nn.MaxPool2d(pool_size, pool_size), + nn.Conv2d(conv1_out, conv2_out, conv2_size), + nn.BatchNorm2d(conv2_out), + nn.ReLU(), + nn.MaxPool2d(pool_size, pool_size), + nn.Flatten(), + nn.Linear(conv2_out * size_img * size_img, lin1_size), + nn.ReLU(), + nn.Linear(lin1_size, lin2_size), + nn.ReLU(), + nn.Linear(lin2_size, output_size), + ) + + def forward(self, x): + x = self.model(x) + return x + + class FocusConvLitModule(LightningModule): """ Example of LightningModule for MNIST classification. @@ -173,3 +215,134 @@ class FocusConvLitModule(LightningModule): lr=self.hparams.lr, weight_decay=self.hparams.weight_decay, ) + + +class FocusConvReLULitModule(LightningModule): + """ + Example of LightningModule for MNIST classification. + + A LightningModule organizes your PyTorch code into 5 sections: + - Computations (init). + - Train loop (training_step) + - Validation loop (validation_step) + - Test loop (test_step) + - Optimizers (configure_optimizers) + + Read the docs: + https://pytorch-lightning.readthedocs.io/en/latest/common/lightning_module.html + """ + + def __init__( + self, + image_size: int = 150, + pool_size: int = 2, + conv1_size: int = 5, + conv1_channels: int = 6, + conv2_size: int = 5, + conv2_channels: int = 16, + lin1_size: int = 100, + lin2_size: int = 80, + output_size: int = 1, + lr: float = 0.001, + weight_decay: float = 0.0005, + ): + super().__init__() + + # this line allows to access init params with 'self.hparams' attribute + # it also ensures init params will be stored in ckpt + self.save_hyperparameters(logger=False) + + self.model = SimpleConvReLUNet(hparams=self.hparams) + + # loss function + self.criterion = torch.nn.MSELoss() + + # use separate metric instance for train, val and test step + # to ensure a proper reduction over the epoch + self.train_mae = MeanAbsoluteError() + self.val_mae = MeanAbsoluteError() + self.test_mae = MeanAbsoluteError() + + # for logging best so far validation accuracy + self.val_mae_best = MinMetric() + + def forward(self, x: torch.Tensor): + return self.model(x) + + def step(self, batch: Any): + x = batch["image"] + y = batch["focus_height"] + logits = self.forward(x) + loss = self.criterion(logits, y.unsqueeze(1)) + preds = torch.squeeze(logits) + return loss, preds, y + + def training_step(self, batch: Any, batch_idx: int): + loss, preds, targets = self.step(batch) + + # log train metrics + mae = self.train_mae(preds, targets) + self.log("train/loss", loss, on_step=False, on_epoch=True, prog_bar=False) + self.log("train/mae", mae, on_step=False, on_epoch=True, prog_bar=True) + + # we can return here dict with any tensors + # and then read it in some callback or in `training_epoch_end()`` below + # remember to always return loss from `training_step()` or else + # backpropagation will fail! + return {"loss": loss, "preds": preds, "targets": targets} + + def training_epoch_end(self, outputs: List[Any]): + # `outputs` is a list of dicts returned from `training_step()` + pass + + def validation_step(self, batch: Any, batch_idx: int): + loss, preds, targets = self.step(batch) + + # log val metrics + mae = self.val_mae(preds, targets) + self.log("val/loss", loss, on_step=False, on_epoch=True, prog_bar=False) + self.log("val/mae", mae, on_step=False, on_epoch=True, prog_bar=True) + + return {"loss": loss, "preds": preds, "targets": targets} + + def validation_epoch_end(self, outputs: List[Any]): + mae = self.val_mae.compute() # get val accuracy from current epoch + self.val_mae_best.update(mae) + self.log( + "val/mae_best", self.val_mae_best.compute(), on_epoch=True, prog_bar=True + ) + + def test_step(self, batch: Any, batch_idx: int): + loss, preds, targets = self.step(batch) + + # log test metrics + mae = self.test_mae(preds, targets) + self.log("test/loss", loss, on_step=False, on_epoch=True) + self.log("test/mae", mae, on_step=False, on_epoch=True) + + return {"loss": loss, "preds": preds, "targets": targets} + + def test_epoch_end(self, outputs: List[Any]): + print(outputs) + pass + + def on_epoch_end(self): + # reset metrics at the end of every epoch + self.train_mae.reset() + self.test_mae.reset() + self.val_mae.reset() + + def configure_optimizers(self): + """Choose what optimizers and learning-rate schedulers. + + Normally you'd need one. But in the case of GANs or similar you might + have multiple. + + See examples here: + https://pytorch-lightning.readthedocs.io/en/latest/common/lightning_module.html#configure-optimizers + """ + return torch.optim.Adam( + params=self.parameters(), + lr=self.hparams.lr, + weight_decay=self.hparams.weight_decay, + )