File size: 1,701 Bytes
cb9e677 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 |
wandb_version: 1
data:
desc: null
value:
data: ''
shuffle: false
instruct_data: /root/data/mol_instructions_train.jsonl
eval_instruct_data: ''
instruct:
shuffle: true
dynamic_chunk_fn_call: true
model_id_or_path:
desc: null
value: /root/mistral_models/7B-v0.3
run_dir:
desc: null
value: /root/mistral-finetune/runseed99
optim:
desc: null
value:
lr: 5.0e-05
weight_decay: 0.05
pct_start: 0.05
seed:
desc: null
value: 99
num_microbatches:
desc: null
value: 1
seq_len:
desc: null
value: 32768
batch_size:
desc: null
value: 2
max_norm:
desc: null
value: 1.0
max_steps:
desc: null
value: 500
log_freq:
desc: null
value: 1
ckpt_freq:
desc: null
value: 100
ckpt_only_lora:
desc: null
value: false
no_ckpt:
desc: null
value: false
num_ckpt_keep:
desc: null
value: 3
eval_freq:
desc: null
value: 100
no_eval:
desc: null
value: true
checkpoint:
desc: null
value: true
world_size:
desc: null
value: 1
wandb:
desc: null
value:
project: CHEMISTral7b-ft
offline: false
key: aaf77f83a4e316f6a8b47fa975ab6b5e73c7c8df
run_name: runseed99
mlflow:
desc: null
value:
tracking_uri: null
experiment_name: null
lora:
desc: null
value:
enable: true
rank: 64
dropout: 0.0
scaling: 2.0
_wandb:
desc: null
value:
python_version: 3.10.14
cli_version: 0.17.0
framework: torch
is_jupyter_run: false
is_kaggle_kernel: false
start_time: 1716687994
t:
1:
- 1
- 55
2:
- 1
- 55
3:
- 13
- 16
- 23
4: 3.10.14
5: 0.17.0
8:
- 5
13: linux-x86_64
|