aghorbani commited on
Commit
cdc3398
1 Parent(s): 8c071be

Upload cfg.yaml

Browse files
Files changed (1) hide show
  1. cfg.yaml +95 -0
cfg.yaml ADDED
@@ -0,0 +1,95 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ architecture:
2
+ backbone_dtype: int4
3
+ force_embedding_gradients: false
4
+ gradient_checkpointing: true
5
+ intermediate_dropout: 0.0
6
+ pretrained: true
7
+ pretrained_weights: ''
8
+ augmentation:
9
+ neftune_noise_alpha: 0.0
10
+ random_parent_probability: 0.0
11
+ skip_parent_probability: 0.0
12
+ token_mask_probability: 0.0
13
+ dataset:
14
+ add_eos_token_to_answer: false
15
+ add_eos_token_to_prompt: false
16
+ add_eos_token_to_system: false
17
+ answer_column: Category_ID
18
+ chatbot_author: H2O.ai
19
+ chatbot_name: h2oGPT
20
+ data_sample: 1.0
21
+ data_sample_choice:
22
+ - Train
23
+ - Validation
24
+ limit_chained_samples: false
25
+ mask_prompt_labels: true
26
+ num_classes: 11
27
+ parent_id_column: None
28
+ personalize: false
29
+ prompt_column:
30
+ - Description
31
+ system_column: None
32
+ text_answer_separator: ''
33
+ text_prompt_start: ''
34
+ text_system_start: ''
35
+ train_dataframe: /home/llmstudio/mount/data/user/bank_transactions_categories_v2/bank_transactions_categories_train.csv
36
+ validation_dataframe: /home/llmstudio/mount/data/user/bank_transactions_categories_v2/bank_transactions_categories_test.csv
37
+ validation_size: 0.01
38
+ validation_strategy: custom
39
+ environment:
40
+ compile_model: false
41
+ deepspeed_reduce_bucket_size: 1000000
42
+ deepspeed_stage3_param_persistence_threshold: 1000000
43
+ deepspeed_stage3_prefetch_bucket_size: 1000000
44
+ find_unused_parameters: false
45
+ gpus:
46
+ - '0'
47
+ huggingface_branch: main
48
+ mixed_precision: true
49
+ number_of_workers: 8
50
+ seed: -1
51
+ trust_remote_code: true
52
+ use_deepspeed: false
53
+ experiment_name: bank_tx_cat_opt_125m
54
+ llm_backbone: facebook/opt-125m
55
+ logging:
56
+ logger: None
57
+ neptune_project: ''
58
+ output_directory: /home/llmstudio/mount/output/user/bank_tx_cat_opt_125m/
59
+ prediction:
60
+ batch_size_inference: 0
61
+ metric: LogLoss
62
+ problem_type: text_causal_classification_modeling
63
+ tokenizer:
64
+ add_prefix_space: false
65
+ add_prompt_answer_tokens: false
66
+ max_length: 512
67
+ max_length_answer: 256
68
+ max_length_prompt: 256
69
+ padding_quantile: 1.0
70
+ use_fast: true
71
+ training:
72
+ batch_size: 2
73
+ differential_learning_rate: 1.0e-05
74
+ differential_learning_rate_layers:
75
+ - classification_head
76
+ drop_last_batch: true
77
+ epochs: 1
78
+ evaluate_before_training: false
79
+ evaluation_epochs: 1.0
80
+ grad_accumulation: 1
81
+ gradient_clip: 0.0
82
+ learning_rate: 0.0001
83
+ lora: true
84
+ lora_alpha: 16
85
+ lora_dropout: 0.05
86
+ lora_r: 4
87
+ lora_target_modules: ''
88
+ loss_function: CrossEntropyLoss
89
+ optimizer: AdamW
90
+ save_best_checkpoint: false
91
+ schedule: Cosine
92
+ train_validation_data: false
93
+ use_flash_attention_2: false
94
+ warmup_epochs: 0.0
95
+ weight_decay: 0.0