asahi417 commited on
Commit
69299ee
1 Parent(s): be9590d
training_scripts/finetune_t5.py CHANGED
@@ -196,6 +196,8 @@ def test(
196
  label = [i[dataset_column_label] for i in dataset_instance]
197
  data = [i[dataset_column_text] for i in dataset_instance]
198
  model_path = f'{output_dir}/best_model'
 
 
199
  prediction_file = f"{model_path}/prediction.{os.path.basename(dataset)}.{dataset_name}.txt"
200
  metric_file = f"{model_path}/metric.{os.path.basename(dataset)}.{dataset_name}.json"
201
  metric = get_metric(
@@ -221,12 +223,15 @@ def upload(
221
  logging.info('uploading to huggingface')
222
  output_dir = f'ckpt/{os.path.basename(model_name)}.{os.path.basename(dataset)}.{dataset_name}'
223
  args = {'use_auth_token': use_auth_token, 'organization': model_organization}
224
- # model = load_model(model_name=f'{output_dir}/best_model')
225
- # tokenizer = transformers.AutoTokenizer.from_pretrained(model_name, use_auth_token=use_auth_token)
226
- # model.push_to_hub(model_alias, **args)
227
- # tokenizer.push_to_hub(model_alias, **args)
 
 
 
228
  repo = Repository(model_alias, f'{model_organization}/{model_alias}')
229
- for i in glob(f'{output_dir}/best_model/*'):
230
  if not os.path.exists(f'{model_alias}/{os.path.basename(i)}'):
231
  copyfile(i, f'{model_alias}/{os.path.basename(i)}')
232
  dataset_instance = load_dataset(dataset, dataset_name, split='validation', use_auth_token=use_auth_token)
 
196
  label = [i[dataset_column_label] for i in dataset_instance]
197
  data = [i[dataset_column_text] for i in dataset_instance]
198
  model_path = f'{output_dir}/best_model'
199
+ if not os.path.exists(model_path):
200
+ model_path = os.path.basename(model_name)
201
  prediction_file = f"{model_path}/prediction.{os.path.basename(dataset)}.{dataset_name}.txt"
202
  metric_file = f"{model_path}/metric.{os.path.basename(dataset)}.{dataset_name}.json"
203
  metric = get_metric(
 
223
  logging.info('uploading to huggingface')
224
  output_dir = f'ckpt/{os.path.basename(model_name)}.{os.path.basename(dataset)}.{dataset_name}'
225
  args = {'use_auth_token': use_auth_token, 'organization': model_organization}
226
+ model_path = f'{output_dir}/best_model'
227
+ if not os.path.exists(model_path):
228
+ model_path = os.path.basename(model_name)
229
+ model = load_model(model_name=model_path)
230
+ tokenizer = transformers.AutoTokenizer.from_pretrained(model_name, use_auth_token=use_auth_token)
231
+ model.push_to_hub(model_alias, **args)
232
+ tokenizer.push_to_hub(model_alias, **args)
233
  repo = Repository(model_alias, f'{model_organization}/{model_alias}')
234
+ for i in glob(f'{model_path}/*'):
235
  if not os.path.exists(f'{model_alias}/{os.path.basename(i)}'):
236
  copyfile(i, f'{model_alias}/{os.path.basename(i)}')
237
  dataset_instance = load_dataset(dataset, dataset_name, split='validation', use_auth_token=use_auth_token)
training_scripts/script.sh CHANGED
@@ -1,24 +1,17 @@
1
 
2
  # training
3
  ## en_2022
4
- [Done] python finetune_t5.py --dataset-name en_2022 --low-cpu-mem-usage --model-alias mt5-small-tweet-topic-multi-en-2022 --model-organization cardiffnlp --use-auth-token
 
 
 
 
 
5
  ## single
6
- python finetune_t5.py --dataset-name es --low-cpu-mem-usage --model-alias mt5-small-tweet-topic-multi-es --model-organization cardiffnlp --use-auth-token
7
- [done] python finetune_t5.py --dataset-name en --low-cpu-mem-usage --model-alias mt5-small-tweet-topic-multi-en --model-organization cardiffnlp --use-auth-token
8
  python finetune_t5.py --dataset-name ja --low-cpu-mem-usage --model-alias mt5-small-tweet-topic-multi-ja --model-organization cardiffnlp --use-auth-token
9
  python finetune_t5.py --dataset-name gr --low-cpu-mem-usage --model-alias mt5-small-tweet-topic-multi-gr --model-organization cardiffnlp --use-auth-token
10
- # mix
11
- [Done] python finetune_t5.py --dataset-name mix --low-cpu-mem-usage --model-alias mt5-small-tweet-topic-multi-mix --model-organization cardiffnlp --use-auth-token
12
-
13
-
14
- # Zero-shot
15
- python finetune_t5.py -m cardiffnlp/mt5-small-tweet-topic-multi-en-2022 --dataset-name en --skip-train --low-cpu-mem-usage --model-alias mt5-small-tweet-topic-multi-en-2022 --model-organization cardiffnlp --use-auth-token
16
- python finetune_t5.py -m cardiffnlp/mt5-small-tweet-topic-multi-en-2022 --dataset-name en --skip-train --low-cpu-mem-usage --model-alias mt5-small-tweet-topic-multi-en-2022 --model-organization cardiffnlp --use-auth-token
17
- python finetune_t5.py -m cardiffnlp/mt5-small-tweet-topic-multi-en-2022 --dataset-name en --skip-train --low-cpu-mem-usage --model-alias mt5-small-tweet-topic-multi-en-2022 --model-organization cardiffnlp --use-auth-token
18
- python finetune_t5.py -m cardiffnlp/mt5-small-tweet-topic-multi-en-2022 --dataset-name en --skip-train --low-cpu-mem-usage --model-alias mt5-small-tweet-topic-multi-en-2022 --model-organization cardiffnlp --use-auth-token
19
-
20
- # mix
21
- python finetune_t5.py --dataset-name mix --low-cpu-mem-usage --model-alias mt5-small-tweet-topic-multi-mix --model-organization cardiffnlp --use-auth-token
22
 
23
 
24
  # continuous
@@ -27,3 +20,14 @@ python finetune_t5.py -m cardiffnlp/mt5-small-tweet-topic-multi-en-2022 --datase
27
  python finetune_t5.py -m cardiffnlp/mt5-small-tweet-topic-multi-en-2022 --dataset-name ja --low-cpu-mem-usage --model-alias mt5-small-tweet-topic-multi-en-2022-ja --model-organization cardiffnlp --use-auth-token
28
  python finetune_t5.py -m cardiffnlp/mt5-small-tweet-topic-multi-en-2022 --dataset-name gr --low-cpu-mem-usage --model-alias mt5-small-tweet-topic-multi-en-2022-gr --model-organization cardiffnlp --use-auth-token
29
 
 
 
 
 
 
 
 
 
 
 
 
 
1
 
2
  # training
3
  ## en_2022
4
+ python finetune_t5.py --dataset-name en_2022 --low-cpu-mem-usage --model-alias mt5-small-tweet-topic-multi-en-2022 --model-organization cardiffnlp --use-auth-token
5
+ python finetune_t5.py -m cardiffnlp/mt5-small-tweet-topic-multi-en-2022 --dataset-name en --skip-train --skip-validate --low-cpu-mem-usage --model-alias mt5-small-tweet-topic-multi-en-2022 --model-organization cardiffnlp --use-auth-token
6
+ python finetune_t5.py -m cardiffnlp/mt5-small-tweet-topic-multi-en-2022 --dataset-name ja --skip-train --skip-validate --low-cpu-mem-usage --model-alias mt5-small-tweet-topic-multi-en-2022 --model-organization cardiffnlp --use-auth-token
7
+ python finetune_t5.py -m cardiffnlp/mt5-small-tweet-topic-multi-en-2022 --dataset-name gr --skip-train --skip-validate --low-cpu-mem-usage --model-alias mt5-small-tweet-topic-multi-en-2022 --model-organization cardiffnlp --use-auth-token
8
+ python finetune_t5.py -m cardiffnlp/mt5-small-tweet-topic-multi-en-2022 --dataset-name es --skip-train --skip-validate --low-cpu-mem-usage --model-alias mt5-small-tweet-topic-multi-en-2022 --model-organization cardiffnlp --use-auth-token
9
+
10
  ## single
11
+ python finetune_t5.py --dataset-name en --low-cpu-mem-usage --model-alias mt5-small-tweet-topic-multi-en --model-organization cardiffnlp --use-auth-token
 
12
  python finetune_t5.py --dataset-name ja --low-cpu-mem-usage --model-alias mt5-small-tweet-topic-multi-ja --model-organization cardiffnlp --use-auth-token
13
  python finetune_t5.py --dataset-name gr --low-cpu-mem-usage --model-alias mt5-small-tweet-topic-multi-gr --model-organization cardiffnlp --use-auth-token
14
+ python finetune_t5.py --dataset-name es --low-cpu-mem-usage --model-alias mt5-small-tweet-topic-multi-es --model-organization cardiffnlp --use-auth-token
 
 
 
 
 
 
 
 
 
 
 
15
 
16
 
17
  # continuous
 
20
  python finetune_t5.py -m cardiffnlp/mt5-small-tweet-topic-multi-en-2022 --dataset-name ja --low-cpu-mem-usage --model-alias mt5-small-tweet-topic-multi-en-2022-ja --model-organization cardiffnlp --use-auth-token
21
  python finetune_t5.py -m cardiffnlp/mt5-small-tweet-topic-multi-en-2022 --dataset-name gr --low-cpu-mem-usage --model-alias mt5-small-tweet-topic-multi-en-2022-gr --model-organization cardiffnlp --use-auth-token
22
 
23
+ # mix
24
+ python finetune_t5.py --dataset-name mix --low-cpu-mem-usage --model-alias mt5-small-tweet-topic-multi-mix --model-organization cardiffnlp --use-auth-token
25
+
26
+
27
+
28
+
29
+
30
+ # Zero-shot
31
+
32
+
33
+