chuubjak commited on
Commit
0b4dd13
1 Parent(s): 8839fc2

Saving train state of step 40

Browse files
checkpoint-40-epoch-4/model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:b029dd2ccb30e92e6066f9eddda94088be2d59812663b42f260d6754899a0709
3
  size 1711916448
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:344bd2afb9c8d495d0a14fd82190591ea06f1c11c136d2c9be4ffd6dc0edd042
3
  size 1711916448
checkpoint-40-epoch-4/optimizer.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:62631a72759f0738d2f1477d7011ba2a2f6afb95e84a8366b8b2f28e60c0d65a
3
  size 962406586
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e82a26e4f6b02ddf89b012fe1c02b38e81153d660a31df46c01cb555738ee3d4
3
  size 962406586
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "biodatlab/distill-whisper-th-medium",
3
  "activation_dropout": 0.0,
4
  "activation_function": "gelu",
5
  "apply_spec_augment": true,
@@ -43,7 +43,7 @@
43
  "num_mel_bins": 80,
44
  "pad_token_id": 50257,
45
  "scale_embedding": false,
46
- "torch_dtype": "float32",
47
  "transformers_version": "4.38.1",
48
  "use_cache": true,
49
  "use_weighted_layer_sum": false,
 
1
  {
2
+ "_name_or_path": "distil-whisper/training/distil-whisper-th-medium-4dec",
3
  "activation_dropout": 0.0,
4
  "activation_function": "gelu",
5
  "apply_spec_augment": true,
 
43
  "num_mel_bins": 80,
44
  "pad_token_id": 50257,
45
  "scale_embedding": false,
46
+ "torch_dtype": "float16",
47
  "transformers_version": "4.38.1",
48
  "use_cache": true,
49
  "use_weighted_layer_sum": false,
distillation.log CHANGED
@@ -6,8 +6,8 @@ The following values were not passed to `accelerate launch` and had defaults use
6
  To avoid this warning pass in values for each of the problematic parameters or run `accelerate config`.
7
  /home/hzchng/ai-voicebot/.venv/lib/python3.10/site-packages/accelerate/accelerator.py:401: UserWarning: `log_with=[]` was passed but no supported trackers are currently installed.
8
  warnings.warn(f"`log_with={log_with}` was passed but no supported trackers are currently installed.")
9
- 05/31/2024 01:36:41 - WARNING - __main__ - Process rank: 0, device: cuda:0, n_gpu: 1, distributed training: False, 16-bits training: False
10
- 05/31/2024 01:36:41 - INFO - __main__ - Training/evaluation parameters DistillationTrainingArguments(
11
  _n_gpu=1,
12
  accelerator_config={'split_batches': False, 'dispatch_batches': None, 'even_batches': True, 'use_seedable_sampler': True},
13
  adafactor=False,
@@ -80,7 +80,7 @@ local_rank=0,
80
  log_level=passive,
81
  log_level_replica=warning,
82
  log_on_each_node=True,
83
- logging_dir=/home/hzchng/ai-voicebot/lib/whisper/output/runs/May31_01-36-40_ai-voicebot-training,
84
  logging_first_step=False,
85
  logging_nan_inf_filter=True,
86
  logging_steps=0.1,
@@ -138,16 +138,7 @@ warmup_ratio=0.1,
138
  warmup_steps=10,
139
  weight_decay=0.0,
140
  )
141
-
142
-
143
-
144
-
145
-
146
-
147
-
148
-
149
-
150
-
151
  /home/hzchng/ai-voicebot/.venv/lib/python3.10/site-packages/huggingface_hub/file_download.py:1132: FutureWarning: `resume_download` is deprecated and will be removed in version 1.0.0. Downloads always resume when possible. If you want to force a new download, use `force_download=True`.
152
  warnings.warn(
153
  loading configuration file config.json from cache at /home/hzchng/.cache/huggingface/hub/models--biodatlab--distill-whisper-th-medium/snapshots/e756107875a3905f3c4535cdca0d499cb56d2232/config.json
@@ -1021,7 +1012,7 @@ Generate config GenerationConfig {
1021
  }
1022
  }
1023
 
1024
- 05/31/2024 01:36:54 - INFO - __main__ - Disabling gradient checkpointing in the decoder since it's incompatible with `freeze_embed_positions`.
1025
  Feature extractor saved in /home/hzchng/ai-voicebot/lib/whisper/output/preprocessor_config.json
1026
  tokenizer config file saved in /home/hzchng/ai-voicebot/lib/whisper/output/tokenizer_config.json
1027
  Special tokens file saved in /home/hzchng/ai-voicebot/lib/whisper/output/special_tokens_map.json
@@ -2684,600 +2675,619 @@ Processor WhisperProcessor:
2684
  "processor_class": "WhisperProcessor"
2685
  }
2686
 
2687
-
2688
-
2689
-
2690
- 05/31/2024 01:36:58 - INFO - __main__ - eval_steps is not set, evaluating at the end of each epoch
2691
- 05/31/2024 01:36:59 - INFO - __main__ - ***** Running training *****
2692
- 05/31/2024 01:36:59 - INFO - __main__ - Num examples = 320
2693
- 05/31/2024 01:36:59 - INFO - __main__ - Num epochs = 5
2694
- 05/31/2024 01:36:59 - INFO - __main__ - Instantaneous batch size per device = 8
2695
- 05/31/2024 01:36:59 - INFO - __main__ - Gradient accumulation steps = 1
2696
- 05/31/2024 01:36:59 - INFO - __main__ - Total train batch size (w. parallel & distributed) = 8
2697
- 05/31/2024 01:36:59 - INFO - __main__ - Total optimization steps = 40
2698
-
2699
- To disable this warning, you can either:
2700
- - Avoid using `tokenizers` before the fork if possible
2701
- - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
2702
- `use_cache = True` is incompatible with gradient checkpointing. Setting `use_cache = False`...
2703
-
2704
- To disable this warning, you can either:
2705
- - Avoid using `tokenizers` before the fork if possible
2706
- - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
2707
-
2708
- To disable this warning, you can either:
2709
- - Avoid using `tokenizers` before the fork if possible
2710
- - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
2711
-
2712
- To disable this warning, you can either:
2713
- - Avoid using `tokenizers` before the fork if possible
2714
- - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
2715
-
2716
- To disable this warning, you can either:
2717
- - Avoid using `tokenizers` before the fork if possible
2718
- - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
2719
-
2720
- 05/31/2024 01:37:47 - WARNING - accelerate.utils.other - Removed shared tensor {'proj_out.weight'} while saving. This should be OK, but check by verifying that you don't receive any warning while reloading
2721
- 05/31/2024 01:37:50 - INFO - accelerate.checkpointing - Model weights saved in /home/hzchng/ai-voicebot/lib/whisper/output/checkpoint-40-epoch-4/model.safetensors
2722
- 05/31/2024 01:37:50 - WARNING - accelerate.utils.other - Removed shared tensor {'proj_out.weight'} while saving. This should be OK, but check by verifying that you don't receive any warning while reloading
2723
- 05/31/2024 01:37:55 - INFO - accelerate.checkpointing - Model weights saved in /home/hzchng/ai-voicebot/lib/whisper/output/checkpoint-40-epoch-4/model_1.safetensors
2724
- 05/31/2024 01:37:58 - INFO - accelerate.checkpointing - Optimizer state saved in /home/hzchng/ai-voicebot/lib/whisper/output/checkpoint-40-epoch-4/optimizer.bin
2725
- 05/31/2024 01:37:58 - INFO - accelerate.checkpointing - Scheduler state saved in /home/hzchng/ai-voicebot/lib/whisper/output/checkpoint-40-epoch-4/scheduler.bin
2726
- 05/31/2024 01:37:58 - INFO - accelerate.checkpointing - Sampler state for dataloader 0 saved in /home/hzchng/ai-voicebot/lib/whisper/output/checkpoint-40-epoch-4/sampler.bin
2727
- 05/31/2024 01:37:58 - INFO - accelerate.checkpointing - Sampler state for dataloader 1 saved in /home/hzchng/ai-voicebot/lib/whisper/output/checkpoint-40-epoch-4/sampler_1.bin
2728
- 05/31/2024 01:37:58 - INFO - accelerate.checkpointing - Sampler state for dataloader 2 saved in /home/hzchng/ai-voicebot/lib/whisper/output/checkpoint-40-epoch-4/sampler_2.bin
2729
- 05/31/2024 01:37:58 - INFO - accelerate.checkpointing - Sampler state for dataloader 3 saved in /home/hzchng/ai-voicebot/lib/whisper/output/checkpoint-40-epoch-4/sampler_3.bin
2730
- 05/31/2024 01:37:58 - INFO - accelerate.checkpointing - Sampler state for dataloader 4 saved in /home/hzchng/ai-voicebot/lib/whisper/output/checkpoint-40-epoch-4/sampler_4.bin
2731
- 05/31/2024 01:37:58 - INFO - accelerate.checkpointing - Random states saved in /home/hzchng/ai-voicebot/lib/whisper/output/checkpoint-40-epoch-4/random_states_0.pkl
2732
-
2733
-
2734
-
2735
-
2736
-
2737
-
2738
-
2739
-
2740
-
2741
-
2742
-
2743
-
2744
-
2745
-
2746
-
2747
 
2748
-
2749
 
 
2750
 
2751
-
2752
-
2753
-
2754
 
 
 
2755
 
2756
-
2757
-
2758
 
 
 
2759
 
2760
-
2761
-
2762
 
 
2763
 
2764
-
2765
-
2766
 
 
 
2767
 
2768
-
2769
-
2770
 
 
 
2771
 
2772
-
 
2773
 
 
 
2774
 
2775
-
2776
-
2777
-
2778
 
 
2779
 
2780
-
2781
-
 
 
 
2782
 
 
2783
 
2784
-
2785
-
2786
 
 
 
2787
 
2788
-
2789
-
2790
 
 
 
2791
 
2792
-
2793
-
2794
 
 
 
2795
 
2796
-
2797
-
 
2798
 
 
 
2799
 
2800
-
2801
-
2802
 
 
 
2803
 
2804
-
2805
-
2806
 
 
2807
 
2808
-
2809
-
 
2810
 
 
 
2811
 
2812
-
2813
-
 
2814
 
 
2815
 
2816
-
2817
-
2818
-
2819
 
 
 
2820
 
2821
-
2822
-
2823
 
 
2824
 
2825
-
2826
-
 
 
2827
 
 
2828
 
2829
-
2830
-
2831
-
2832
-
2833
-
2834
-
2835
-
2836
-
2837
-
2838
-
2839
-
2840
 
 
 
2841
 
2842
-
 
2843
 
 
 
2844
 
2845
-
2846
-
2847
-
 
2848
 
 
 
2849
 
2850
-
2851
-
2852
 
 
 
2853
 
2854
-
 
2855
 
 
2856
 
2857
-
2858
-
2859
-
2860
-
2861
 
 
 
 
2862
 
2863
-
2864
 
 
 
2865
 
2866
-
2867
-
2868
-
2869
 
 
 
 
2870
 
2871
-
2872
-
2873
 
 
 
 
 
2874
 
2875
-
2876
 
 
 
 
 
2877
 
2878
-
2879
-
2880
 
 
2881
 
2882
-
2883
 
 
2884
 
2885
-
2886
-
2887
-
2888
-
2889
 
 
2890
 
2891
-
2892
 
 
 
2893
 
2894
-
 
 
2895
 
 
2896
 
2897
-
2898
-
2899
-
2900
 
 
2901
 
2902
-
2903
 
 
 
2904
 
2905
-
 
2906
 
 
 
 
2907
 
2908
-
2909
-
2910
-
2911
-
2912
 
 
 
2913
 
2914
-
 
2915
 
 
2916
 
2917
-
 
 
2918
 
 
 
2919
 
2920
-
2921
-
2922
-
2923
 
 
2924
 
2925
-
2926
 
 
 
 
2927
 
2928
-
2929
-
 
2930
 
 
2931
 
2932
-
2933
 
 
2934
 
2935
-
2936
 
 
 
 
 
2937
 
2938
-
2939
-
2940
-
2941
 
 
2942
 
2943
-
2944
-
2945
-
2946
 
 
2947
 
2948
-
2949
 
 
 
 
 
2950
 
2951
-
2952
-
2953
-
2954
 
 
 
 
2955
 
2956
-
 
2957
 
 
2958
 
2959
-
2960
-
2961
-
2962
 
 
 
2963
 
2964
-
 
 
2965
 
 
2966
 
2967
-
2968
-
2969
-
2970
 
 
2971
 
2972
-
2973
-
2974
 
 
 
 
2975
 
2976
-
2977
 
 
 
2978
 
2979
-
2980
-
2981
-
2982
 
 
2983
 
2984
-
2985
 
 
2986
 
2987
-
2988
-
2989
-
2990
-
2991
 
 
 
 
2992
 
2993
-
 
2994
 
 
 
 
 
2995
 
2996
-
2997
-
2998
 
 
2999
 
3000
-
3001
-
3002
 
 
3003
 
3004
-
3005
-
3006
-
3007
 
 
 
3008
 
3009
-
3010
-
3011
-
3012
 
 
3013
 
3014
-
3015
-
3016
 
 
3017
 
3018
-
3019
-
3020
 
 
 
3021
 
3022
-
3023
-
3024
 
 
 
3025
 
3026
-
3027
-
3028
 
 
 
 
 
3029
 
3030
-
3031
-
3032
 
 
3033
 
3034
-
3035
 
 
 
 
3036
 
3037
-
3038
-
3039
-
3040
 
 
3041
 
3042
-
3043
-
 
 
3044
 
 
 
 
3045
 
3046
-
3047
-
3048
-
3049
 
 
 
3050
 
3051
-
 
3052
 
 
3053
 
3054
-
3055
-
3056
-
3057
 
 
 
3058
 
3059
-
3060
-
3061
 
 
 
3062
 
3063
-
3064
-
3065
-
3066
-
3067
-
3068
 
 
 
3069
 
3070
-
3071
-
3072
 
 
3073
 
3074
-
3075
-
3076
 
 
 
3077
 
3078
-
3079
-
3080
-
3081
 
 
 
 
3082
 
3083
-
 
3084
 
 
 
3085
 
3086
-
3087
-
3088
 
 
 
 
3089
 
3090
-
3091
-
3092
-
3093
 
 
 
3094
 
3095
-
3096
-
3097
-
3098
 
 
 
 
 
3099
 
3100
-
3101
-
3102
-
3103
 
 
3104
 
3105
-
3106
-
3107
 
 
 
3108
 
3109
-
3110
-
3111
 
 
 
3112
 
3113
-
3114
-
3115
 
3116
 
3117
-
3118
 
 
3119
 
3120
-
3121
-
3122
 
 
3123
 
3124
-
3125
-
3126
 
 
3127
 
3128
-
3129
-
3130
-
3131
 
 
3132
 
3133
-
3134
 
 
3135
 
3136
-
3137
-
3138
 
 
3139
 
3140
-
3141
-
3142
 
 
3143
 
3144
-
3145
-
3146
 
 
3147
 
3148
-
3149
-
3150
 
 
3151
 
3152
-
3153
-
3154
 
 
3155
 
3156
-
3157
-
3158
 
 
3159
 
3160
-
3161
-
3162
 
 
3163
 
3164
-
3165
-
3166
 
 
3167
 
3168
-
3169
-
3170
 
 
3171
 
3172
-
3173
-
3174
 
 
3175
 
3176
-
3177
-
3178
 
 
3179
 
3180
-
3181
-
3182
 
 
3183
 
3184
-
3185
 
 
3186
 
3187
-
3188
-
3189
-
3190
-
3191
 
 
3192
 
3193
-
3194
-
3195
-
3196
 
 
3197
 
3198
-
3199
-
3200
 
 
3201
 
3202
-
3203
-
3204
-
3205
-
3206
 
 
3207
 
3208
-
3209
-
3210
-
3211
-
3212
-
3213
-
3214
-
3215
 
 
3216
 
3217
-
3218
-
3219
 
3220
 
3221
-
3222
 
3223
-
3224
-
3225
-
3226
-
3227
-
3228
-
3229
-
3230
-
3231
-
3232
-
3233
-
3234
-
3235
-
3236
-
3237
-
3238
-
3239
-
3240
-
3241
-
3242
-
3243
-
3244
-
3245
-
3246
-
3247
-
3248
-
3249
-
3250
-
3251
-
3252
-
3253
-
3254
-
3255
-
3256
-
3257
-
3258
-
3259
-
3260
-
3261
-
3262
-
3263
-
3264
-
3265
-
3266
-
3267
-
3268
-
3269
-
3270
-
3271
-
3272
-
3273
-
3274
-
3275
-
3276
-
3277
-
3278
-
3279
- Some non-default generation parameters are set in the model config. These should go into a GenerationConfig file (https://huggingface.co/docs/transformers/generation_strategies#save-a-custom-decoding-strategy-with-your-model) instead. This warning will be raised to an exception in v4.41.
3280
- Non-default generation parameters: {'max_length': 448, 'begin_suppress_tokens': [220, 50257]}
3281
- Configuration saved in /home/hzchng/ai-voicebot/lib/whisper/output/config.json
3282
- Configuration saved in /home/hzchng/ai-voicebot/lib/whisper/output/generation_config.json
3283
- Model weights saved in /home/hzchng/ai-voicebot/lib/whisper/output/model.safetensors
 
6
  To avoid this warning pass in values for each of the problematic parameters or run `accelerate config`.
7
  /home/hzchng/ai-voicebot/.venv/lib/python3.10/site-packages/accelerate/accelerator.py:401: UserWarning: `log_with=[]` was passed but no supported trackers are currently installed.
8
  warnings.warn(f"`log_with={log_with}` was passed but no supported trackers are currently installed.")
9
+ 05/31/2024 02:23:34 - WARNING - __main__ - Process rank: 0, device: cuda:0, n_gpu: 1, distributed training: False, 16-bits training: False
10
+ 05/31/2024 02:23:34 - INFO - __main__ - Training/evaluation parameters DistillationTrainingArguments(
11
  _n_gpu=1,
12
  accelerator_config={'split_batches': False, 'dispatch_batches': None, 'even_batches': True, 'use_seedable_sampler': True},
13
  adafactor=False,
 
80
  log_level=passive,
81
  log_level_replica=warning,
82
  log_on_each_node=True,
83
+ logging_dir=/home/hzchng/ai-voicebot/lib/whisper/output/runs/May31_02-23-33_ai-voicebot-training,
84
  logging_first_step=False,
85
  logging_nan_inf_filter=True,
86
  logging_steps=0.1,
 
138
  warmup_steps=10,
139
  weight_decay=0.0,
140
  )
141
+
 
 
 
 
 
 
 
 
 
142
  /home/hzchng/ai-voicebot/.venv/lib/python3.10/site-packages/huggingface_hub/file_download.py:1132: FutureWarning: `resume_download` is deprecated and will be removed in version 1.0.0. Downloads always resume when possible. If you want to force a new download, use `force_download=True`.
143
  warnings.warn(
144
  loading configuration file config.json from cache at /home/hzchng/.cache/huggingface/hub/models--biodatlab--distill-whisper-th-medium/snapshots/e756107875a3905f3c4535cdca0d499cb56d2232/config.json
 
1012
  }
1013
  }
1014
 
1015
+ 05/31/2024 02:23:45 - INFO - __main__ - Disabling gradient checkpointing in the decoder since it's incompatible with `freeze_embed_positions`.
1016
  Feature extractor saved in /home/hzchng/ai-voicebot/lib/whisper/output/preprocessor_config.json
1017
  tokenizer config file saved in /home/hzchng/ai-voicebot/lib/whisper/output/tokenizer_config.json
1018
  Special tokens file saved in /home/hzchng/ai-voicebot/lib/whisper/output/special_tokens_map.json
 
2675
  "processor_class": "WhisperProcessor"
2676
  }
2677
 
2678
+ 05/31/2024 02:23:47 - INFO - __main__ - eval_steps is not set, evaluating at the end of each epoch
2679
+ 05/31/2024 02:23:48 - INFO - __main__ - ***** Running training *****
2680
+ 05/31/2024 02:23:48 - INFO - __main__ - Num examples = 320
2681
+ 05/31/2024 02:23:48 - INFO - __main__ - Num epochs = 5
2682
+ 05/31/2024 02:23:48 - INFO - __main__ - Instantaneous batch size per device = 8
2683
+ 05/31/2024 02:23:48 - INFO - __main__ - Gradient accumulation steps = 1
2684
+ 05/31/2024 02:23:48 - INFO - __main__ - Total train batch size (w. parallel & distributed) = 8
2685
+ 05/31/2024 02:23:48 - INFO - __main__ - Total optimization steps = 40
2686
+
2687
+
2688
+ 05/31/2024 02:24:35 - WARNING - accelerate.utils.other - Removed shared tensor {'proj_out.weight'} while saving. This should be OK, but check by verifying that you don't receive any warning while reloading
2689
+ 05/31/2024 02:24:45 - INFO - accelerate.checkpointing - Model weights saved in /home/hzchng/ai-voicebot/lib/whisper/output/checkpoint-40-epoch-4/model.safetensors
2690
+ 05/31/2024 02:24:45 - WARNING - accelerate.utils.other - Removed shared tensor {'proj_out.weight'} while saving. This should be OK, but check by verifying that you don't receive any warning while reloading
2691
+ 05/31/2024 02:24:57 - INFO - accelerate.checkpointing - Model weights saved in /home/hzchng/ai-voicebot/lib/whisper/output/checkpoint-40-epoch-4/model_1.safetensors
2692
+ 05/31/2024 02:25:03 - INFO - accelerate.checkpointing - Optimizer state saved in /home/hzchng/ai-voicebot/lib/whisper/output/checkpoint-40-epoch-4/optimizer.bin
2693
+ 05/31/2024 02:25:03 - INFO - accelerate.checkpointing - Scheduler state saved in /home/hzchng/ai-voicebot/lib/whisper/output/checkpoint-40-epoch-4/scheduler.bin
2694
+ 05/31/2024 02:25:03 - INFO - accelerate.checkpointing - Sampler state for dataloader 0 saved in /home/hzchng/ai-voicebot/lib/whisper/output/checkpoint-40-epoch-4/sampler.bin
2695
+ 05/31/2024 02:25:03 - INFO - accelerate.checkpointing - Sampler state for dataloader 1 saved in /home/hzchng/ai-voicebot/lib/whisper/output/checkpoint-40-epoch-4/sampler_1.bin
2696
+ 05/31/2024 02:25:03 - INFO - accelerate.checkpointing - Sampler state for dataloader 2 saved in /home/hzchng/ai-voicebot/lib/whisper/output/checkpoint-40-epoch-4/sampler_2.bin
2697
+ 05/31/2024 02:25:03 - INFO - accelerate.checkpointing - Sampler state for dataloader 3 saved in /home/hzchng/ai-voicebot/lib/whisper/output/checkpoint-40-epoch-4/sampler_3.bin
2698
+ 05/31/2024 02:25:03 - INFO - accelerate.checkpointing - Sampler state for dataloader 4 saved in /home/hzchng/ai-voicebot/lib/whisper/output/checkpoint-40-epoch-4/sampler_4.bin
2699
+ 05/31/2024 02:25:03 - INFO - accelerate.checkpointing - Random states saved in /home/hzchng/ai-voicebot/lib/whisper/output/checkpoint-40-epoch-4/random_states_0.pkl
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2700
 
2701
+
2702
 
2703
+
2704
 
 
 
 
2705
 
2706
+
2707
+
2708
 
2709
+
 
2710
 
2711
+
2712
+
2713
 
2714
+
 
2715
 
2716
+
2717
 
2718
+
2719
+
2720
 
2721
+
2722
+
2723
 
2724
+
2725
+
2726
 
2727
+
2728
+
2729
 
2730
+
2731
+
2732
 
2733
+
2734
+
2735
 
2736
+
 
 
2737
 
2738
+
2739
 
2740
+
2741
+
2742
+
2743
+
2744
+
2745
 
2746
+
2747
 
2748
+
2749
+
2750
 
2751
+
2752
+
2753
 
2754
+
 
2755
 
2756
+
2757
+
2758
 
2759
+
2760
+
2761
 
2762
+
2763
+
2764
 
2765
+
2766
+
2767
+
2768
 
2769
+
2770
+
2771
 
2772
+
2773
+
2774
 
2775
+
2776
+
2777
 
2778
+
2779
+
2780
 
2781
+
2782
 
2783
+
2784
+
2785
+
2786
 
2787
+
2788
+
2789
 
2790
+
2791
+
2792
+
2793
 
2794
+
2795
 
2796
+
2797
+
 
2798
 
2799
+
2800
+
2801
 
2802
+
2803
+
2804
 
2805
+
2806
 
2807
+
2808
+
2809
+
2810
+
2811
 
2812
+
2813
 
2814
+
 
 
 
 
 
 
 
 
 
 
2815
 
2816
+
2817
+
2818
 
2819
+
2820
+
2821
 
2822
+
2823
+
2824
 
2825
+
2826
+
2827
+
2828
+
2829
 
2830
+
2831
+
2832
 
2833
+
 
2834
 
2835
+
2836
+
2837
 
2838
+
2839
+
2840
 
2841
+
2842
 
2843
+
2844
+
 
 
2845
 
2846
+
2847
+
2848
+
2849
 
2850
+
2851
 
2852
+
2853
+
2854
 
2855
+
 
 
2856
 
2857
+
2858
+
2859
+
2860
 
2861
+
 
2862
 
2863
+
2864
+
2865
+
2866
+
2867
 
2868
+
2869
 
2870
+
2871
+
2872
+
2873
+
2874
 
2875
+
 
2876
 
2877
+
2878
 
2879
+
2880
 
2881
+
2882
 
2883
+
2884
+
2885
+
2886
+
2887
 
2888
+
2889
 
2890
+
2891
 
2892
+
2893
+
2894
 
2895
+
2896
+
2897
+
2898
 
2899
+
2900
 
2901
+
2902
+
 
2903
 
2904
+
2905
 
2906
+
2907
 
2908
+
2909
+
2910
 
2911
+
2912
+
2913
 
2914
+
2915
+
2916
+
2917
 
2918
+
 
 
 
2919
 
2920
+
2921
+
2922
 
2923
+
2924
+
2925
 
2926
+
2927
 
2928
+
2929
+
2930
+
2931
 
2932
+
2933
+
2934
 
2935
+
 
 
2936
 
2937
+
2938
 
2939
+
2940
 
2941
+
2942
+
2943
+
2944
 
2945
+
2946
+
2947
+
2948
 
2949
+
2950
 
2951
+
2952
 
2953
+
2954
 
2955
+
2956
 
2957
+
2958
+
2959
+
2960
+
2961
 
2962
+
2963
+
2964
+
2965
 
2966
+
2967
 
2968
+
 
 
2969
 
2970
+
2971
 
2972
+
2973
 
2974
+
2975
+
2976
+
2977
+
2978
 
2979
+
 
 
2980
 
2981
+
2982
+
2983
+
2984
 
2985
+
2986
+
2987
 
2988
+
2989
 
2990
+
2991
+
 
2992
 
2993
+
2994
+
2995
 
2996
+
2997
+
2998
+
2999
 
3000
+
3001
 
3002
+
 
 
3003
 
3004
+
3005
 
3006
+
3007
+
3008
 
3009
+
3010
+
3011
+
3012
 
3013
+
3014
 
3015
+
3016
+
3017
 
3018
+
3019
+
3020
+
3021
 
3022
+
3023
 
3024
+
3025
 
3026
+
3027
 
3028
+
 
 
 
3029
 
3030
+
3031
+
3032
+
3033
 
3034
+
3035
+
3036
 
3037
+
3038
+
3039
+
3040
+
3041
 
3042
+
 
3043
 
3044
+
3045
 
3046
+
 
3047
 
3048
+
3049
 
3050
+
3051
+
3052
+
3053
 
3054
+
3055
+
3056
 
3057
+
3058
+
 
3059
 
3060
+
3061
 
3062
+
 
3063
 
3064
+
3065
 
3066
+
3067
+
3068
 
3069
+
3070
+
3071
 
3072
+
3073
+
3074
 
3075
+
3076
+
3077
 
3078
+
 
3079
 
3080
+
3081
+
3082
+
3083
+
3084
 
3085
+
 
3086
 
3087
+
3088
 
3089
+
3090
 
3091
+
3092
+
3093
+
3094
 
3095
+
3096
+
 
3097
 
3098
+
3099
 
3100
+
3101
+
3102
+
3103
+
3104
 
3105
+
3106
+
3107
+
3108
 
3109
+
 
 
3110
 
3111
+
3112
+
3113
 
3114
+
3115
+
3116
 
3117
+
3118
 
3119
+
3120
+
 
3121
 
3122
+
3123
+
3124
 
3125
+
3126
+
3127
 
3128
+
3129
+
3130
 
3131
+
3132
+
 
 
 
3133
 
3134
+
3135
+
3136
 
3137
+
 
3138
 
3139
+
3140
 
3141
+
3142
+
3143
 
3144
+
3145
+
3146
 
3147
+
 
 
3148
 
3149
+
3150
+
3151
+
3152
 
3153
+
3154
+
3155
 
3156
+
3157
+
3158
 
3159
+
3160
+
3161
 
3162
+
3163
+
3164
+
3165
 
3166
+
 
 
3167
 
3168
+
3169
+
3170
 
3171
+
3172
+
 
3173
 
3174
+
3175
+
3176
+
3177
+
3178
 
3179
+
 
 
3180
 
3181
+
3182
 
3183
+
3184
+
3185
 
3186
+
3187
+
3188
 
3189
+
 
3190
 
3191
+
3192
+
3193
 
3194
+
 
3195
 
3196
 
3197
+
3198
 
3199
+
3200
 
3201
+
 
3202
 
3203
+
3204
 
3205
+
 
3206
 
3207
+
3208
 
3209
+
 
 
3210
 
3211
+
3212
 
3213
+
3214
 
3215
+
3216
 
3217
+
 
3218
 
3219
+
3220
 
3221
+
 
3222
 
3223
+
3224
 
3225
+
 
3226
 
3227
+
3228
 
3229
+
 
3230
 
3231
+
3232
 
3233
+
 
3234
 
3235
+
3236
 
3237
+
 
3238
 
3239
+
3240
 
3241
+
 
3242
 
3243
+
3244
 
3245
+
 
3246
 
3247
+
3248
 
3249
+
 
3250
 
3251
+
3252
 
3253
+
 
3254
 
3255
+
3256
 
3257
+
 
3258
 
3259
+
3260
 
3261
+
 
3262
 
3263
+
3264
 
3265
+
3266
 
3267
+
3268
 
3269
+
 
 
 
3270
 
3271
+
3272
 
3273
+
 
 
3274
 
3275
+
3276
 
3277
+
 
3278
 
3279
+
3280
 
3281
+
 
 
 
3282
 
3283
+
3284
 
3285
+
 
 
 
 
 
 
3286
 
3287
+
3288
 
3289
+
 
3290
 
3291
 
 
3292
 
3293
+