mohamedsaeed823 commited on
Commit
9c29448
1 Parent(s): fbd3316

Training in progress, epoch 0

Browse files
config.json ADDED
@@ -0,0 +1,109 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "apple/mobilevit-small",
3
+ "architectures": [
4
+ "MobileViTForImageClassification"
5
+ ],
6
+ "aspp_dropout_prob": 0.1,
7
+ "aspp_out_channels": 256,
8
+ "atrous_rates": [
9
+ 6,
10
+ 12,
11
+ 18
12
+ ],
13
+ "attention_probs_dropout_prob": 0.0,
14
+ "classifier_dropout_prob": 0.1,
15
+ "conv_kernel_size": 3,
16
+ "expand_ratio": 4.0,
17
+ "hidden_act": "silu",
18
+ "hidden_dropout_prob": 0.1,
19
+ "hidden_sizes": [
20
+ 144,
21
+ 192,
22
+ 240
23
+ ],
24
+ "id2label": {
25
+ "0": "ain",
26
+ "1": "aleff",
27
+ "10": "haa",
28
+ "11": "jeem",
29
+ "12": "kaaf",
30
+ "13": "khaa",
31
+ "14": "laam",
32
+ "15": "meem",
33
+ "16": "none",
34
+ "17": "nun",
35
+ "18": "ra",
36
+ "19": "saad",
37
+ "2": "bb",
38
+ "20": "seen",
39
+ "21": "sheen",
40
+ "22": "ta",
41
+ "23": "taa",
42
+ "24": "thaa",
43
+ "25": "thal",
44
+ "26": "waw",
45
+ "27": "ya",
46
+ "28": "zay",
47
+ "3": "dal",
48
+ "4": "dha",
49
+ "5": "dhad",
50
+ "6": "fa",
51
+ "7": "gaaf",
52
+ "8": "ghain",
53
+ "9": "ha"
54
+ },
55
+ "image_size": 256,
56
+ "initializer_range": 0.02,
57
+ "label2id": {
58
+ "ain": "0",
59
+ "aleff": "1",
60
+ "bb": "2",
61
+ "dal": "3",
62
+ "dha": "4",
63
+ "dhad": "5",
64
+ "fa": "6",
65
+ "gaaf": "7",
66
+ "ghain": "8",
67
+ "ha": "9",
68
+ "haa": "10",
69
+ "jeem": "11",
70
+ "kaaf": "12",
71
+ "khaa": "13",
72
+ "laam": "14",
73
+ "meem": "15",
74
+ "none": "16",
75
+ "nun": "17",
76
+ "ra": "18",
77
+ "saad": "19",
78
+ "seen": "20",
79
+ "sheen": "21",
80
+ "ta": "22",
81
+ "taa": "23",
82
+ "thaa": "24",
83
+ "thal": "25",
84
+ "waw": "26",
85
+ "ya": "27",
86
+ "zay": "28"
87
+ },
88
+ "layer_norm_eps": 1e-05,
89
+ "mlp_ratio": 2.0,
90
+ "model_type": "mobilevit",
91
+ "neck_hidden_sizes": [
92
+ 16,
93
+ 32,
94
+ 64,
95
+ 96,
96
+ 128,
97
+ 160,
98
+ 640
99
+ ],
100
+ "num_attention_heads": 4,
101
+ "num_channels": 3,
102
+ "output_stride": 32,
103
+ "patch_size": 2,
104
+ "problem_type": "single_label_classification",
105
+ "qkv_bias": true,
106
+ "semantic_loss_ignore_index": 255,
107
+ "torch_dtype": "float32",
108
+ "transformers_version": "4.41.2"
109
+ }
model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9555f7f10830f888a435bd707d13c5a0526e58344e613770e795a03a6c95e64c
3
+ size 19920796
preprocessor_config.json ADDED
@@ -0,0 +1,32 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_valid_processor_keys": [
3
+ "images",
4
+ "segmentation_maps",
5
+ "do_resize",
6
+ "size",
7
+ "resample",
8
+ "do_rescale",
9
+ "rescale_factor",
10
+ "do_center_crop",
11
+ "crop_size",
12
+ "do_flip_channel_order",
13
+ "return_tensors",
14
+ "data_format",
15
+ "input_data_format"
16
+ ],
17
+ "crop_size": {
18
+ "height": 256,
19
+ "width": 256
20
+ },
21
+ "do_center_crop": true,
22
+ "do_flip_channel_order": true,
23
+ "do_flip_channels": true,
24
+ "do_rescale": true,
25
+ "do_resize": true,
26
+ "image_processor_type": "MobileViTImageProcessor",
27
+ "resample": 2,
28
+ "rescale_factor": 0.00392156862745098,
29
+ "size": {
30
+ "shortest_edge": 288
31
+ }
32
+ }
runs/Jun27_19-55-15_c8024b46ff04/events.out.tfevents.1719518120.c8024b46ff04.1869.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:52368af54bcec2af0e71b817902c0091d18fe7ee373d3582d00ce7336ead2add
3
+ size 7837
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:58ed18f0e28e11afcdcc48be081ab09686da816374f98aa2c99146e10b4dd968
3
+ size 5112