Leommm-byte commited on
Commit
61b01b6
1 Parent(s): e0e7dc4

added the fp16 diffusers format

Browse files
.gitattributes CHANGED
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ *.json filter=lfs diff=lfs merge=lfs -text
model_index.json CHANGED
@@ -1,24 +1,3 @@
1
- {
2
- "_class_name": "AuraFlowPipeline",
3
- "_diffusers_version": "0.30.0.dev0",
4
- "scheduler": [
5
- "diffusers",
6
- "FlowMatchEulerDiscreteScheduler"
7
- ],
8
- "text_encoder": [
9
- "transformers",
10
- "UMT5EncoderModel"
11
- ],
12
- "tokenizer": [
13
- "transformers",
14
- "LlamaTokenizerFast"
15
- ],
16
- "transformer": [
17
- "diffusers",
18
- "AuraFlowTransformer2DModel"
19
- ],
20
- "vae": [
21
- "diffusers",
22
- "AutoencoderKL"
23
- ]
24
- }
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9aaf3ef19004133b0af46cc4d4d8abcfed6675dc454e4decf3306ae2e8100be9
3
+ size 442
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
scheduler/scheduler_config.json CHANGED
@@ -1,6 +1,3 @@
1
- {
2
- "_class_name": "FlowMatchEulerDiscreteScheduler",
3
- "_diffusers_version": "0.30.0.dev0",
4
- "num_train_timesteps": 1000,
5
- "shift": 1.73
6
- }
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b8b39187f682f8f6b89f790657fac7c56ca3b12404b6b1b73757b0b6896afe36
3
+ size 148
 
 
 
text_encoder/config.json CHANGED
@@ -1,34 +1,3 @@
1
- {
2
- "_name_or_path": "EleutherAI/pile-t5-xl",
3
- "architectures": [
4
- "UMT5EncoderModel"
5
- ],
6
- "classifier_dropout": 0.0,
7
- "d_ff": 5120,
8
- "d_kv": 64,
9
- "d_model": 2048,
10
- "decoder_start_token_id": 0,
11
- "dense_act_fn": "gelu_new",
12
- "dropout_rate": 0.1,
13
- "eos_token_id": 2,
14
- "feed_forward_proj": "gated-gelu",
15
- "initializer_factor": 1.0,
16
- "is_encoder_decoder": true,
17
- "is_gated_act": true,
18
- "layer_norm_epsilon": 1e-06,
19
- "model_type": "umt5",
20
- "num_decoder_layers": 24,
21
- "num_heads": 32,
22
- "num_layers": 24,
23
- "output_past": true,
24
- "pad_token_id": 0,
25
- "relative_attention_max_distance": 128,
26
- "relative_attention_num_buckets": 32,
27
- "scalable_attention": true,
28
- "tie_word_embeddings": false,
29
- "tokenizer_class": "LlamaTokenizerFast",
30
- "torch_dtype": "float16",
31
- "transformers_version": "4.42.3",
32
- "use_cache": true,
33
- "vocab_size": 32128
34
- }
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:224a6c4cd7a26e45d76825effc1c4bccc279e99f07462a657e5ec730635b768d
3
+ size 894
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
tokenizer/added_tokens.json CHANGED
@@ -1,102 +1,3 @@
1
- {
2
- "<extra_id_0>": 32099,
3
- "<extra_id_10>": 32089,
4
- "<extra_id_11>": 32088,
5
- "<extra_id_12>": 32087,
6
- "<extra_id_13>": 32086,
7
- "<extra_id_14>": 32085,
8
- "<extra_id_15>": 32084,
9
- "<extra_id_16>": 32083,
10
- "<extra_id_17>": 32082,
11
- "<extra_id_18>": 32081,
12
- "<extra_id_19>": 32080,
13
- "<extra_id_1>": 32098,
14
- "<extra_id_20>": 32079,
15
- "<extra_id_21>": 32078,
16
- "<extra_id_22>": 32077,
17
- "<extra_id_23>": 32076,
18
- "<extra_id_24>": 32075,
19
- "<extra_id_25>": 32074,
20
- "<extra_id_26>": 32073,
21
- "<extra_id_27>": 32072,
22
- "<extra_id_28>": 32071,
23
- "<extra_id_29>": 32070,
24
- "<extra_id_2>": 32097,
25
- "<extra_id_30>": 32069,
26
- "<extra_id_31>": 32068,
27
- "<extra_id_32>": 32067,
28
- "<extra_id_33>": 32066,
29
- "<extra_id_34>": 32065,
30
- "<extra_id_35>": 32064,
31
- "<extra_id_36>": 32063,
32
- "<extra_id_37>": 32062,
33
- "<extra_id_38>": 32061,
34
- "<extra_id_39>": 32060,
35
- "<extra_id_3>": 32096,
36
- "<extra_id_40>": 32059,
37
- "<extra_id_41>": 32058,
38
- "<extra_id_42>": 32057,
39
- "<extra_id_43>": 32056,
40
- "<extra_id_44>": 32055,
41
- "<extra_id_45>": 32054,
42
- "<extra_id_46>": 32053,
43
- "<extra_id_47>": 32052,
44
- "<extra_id_48>": 32051,
45
- "<extra_id_49>": 32050,
46
- "<extra_id_4>": 32095,
47
- "<extra_id_50>": 32049,
48
- "<extra_id_51>": 32048,
49
- "<extra_id_52>": 32047,
50
- "<extra_id_53>": 32046,
51
- "<extra_id_54>": 32045,
52
- "<extra_id_55>": 32044,
53
- "<extra_id_56>": 32043,
54
- "<extra_id_57>": 32042,
55
- "<extra_id_58>": 32041,
56
- "<extra_id_59>": 32040,
57
- "<extra_id_5>": 32094,
58
- "<extra_id_60>": 32039,
59
- "<extra_id_61>": 32038,
60
- "<extra_id_62>": 32037,
61
- "<extra_id_63>": 32036,
62
- "<extra_id_64>": 32035,
63
- "<extra_id_65>": 32034,
64
- "<extra_id_66>": 32033,
65
- "<extra_id_67>": 32032,
66
- "<extra_id_68>": 32031,
67
- "<extra_id_69>": 32030,
68
- "<extra_id_6>": 32093,
69
- "<extra_id_70>": 32029,
70
- "<extra_id_71>": 32028,
71
- "<extra_id_72>": 32027,
72
- "<extra_id_73>": 32026,
73
- "<extra_id_74>": 32025,
74
- "<extra_id_75>": 32024,
75
- "<extra_id_76>": 32023,
76
- "<extra_id_77>": 32022,
77
- "<extra_id_78>": 32021,
78
- "<extra_id_79>": 32020,
79
- "<extra_id_7>": 32092,
80
- "<extra_id_80>": 32019,
81
- "<extra_id_81>": 32018,
82
- "<extra_id_82>": 32017,
83
- "<extra_id_83>": 32016,
84
- "<extra_id_84>": 32015,
85
- "<extra_id_85>": 32014,
86
- "<extra_id_86>": 32013,
87
- "<extra_id_87>": 32012,
88
- "<extra_id_88>": 32011,
89
- "<extra_id_89>": 32010,
90
- "<extra_id_8>": 32091,
91
- "<extra_id_90>": 32009,
92
- "<extra_id_91>": 32008,
93
- "<extra_id_92>": 32007,
94
- "<extra_id_93>": 32006,
95
- "<extra_id_94>": 32005,
96
- "<extra_id_95>": 32004,
97
- "<extra_id_96>": 32003,
98
- "<extra_id_97>": 32002,
99
- "<extra_id_98>": 32001,
100
- "<extra_id_99>": 32000,
101
- "<extra_id_9>": 32090
102
- }
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e5dbed56da19e70eb766a51b841e788eccade864d03a40e58219fad7bfe2d2e1
3
+ size 2695
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
tokenizer/special_tokens_map.json CHANGED
@@ -1,132 +1,3 @@
1
- {
2
- "additional_special_tokens": [
3
- "<extra_id_99>",
4
- "<extra_id_98>",
5
- "<extra_id_97>",
6
- "<extra_id_96>",
7
- "<extra_id_95>",
8
- "<extra_id_94>",
9
- "<extra_id_93>",
10
- "<extra_id_92>",
11
- "<extra_id_91>",
12
- "<extra_id_90>",
13
- "<extra_id_89>",
14
- "<extra_id_88>",
15
- "<extra_id_87>",
16
- "<extra_id_86>",
17
- "<extra_id_85>",
18
- "<extra_id_84>",
19
- "<extra_id_83>",
20
- "<extra_id_82>",
21
- "<extra_id_81>",
22
- "<extra_id_80>",
23
- "<extra_id_79>",
24
- "<extra_id_78>",
25
- "<extra_id_77>",
26
- "<extra_id_76>",
27
- "<extra_id_75>",
28
- "<extra_id_74>",
29
- "<extra_id_73>",
30
- "<extra_id_72>",
31
- "<extra_id_71>",
32
- "<extra_id_70>",
33
- "<extra_id_69>",
34
- "<extra_id_68>",
35
- "<extra_id_67>",
36
- "<extra_id_66>",
37
- "<extra_id_65>",
38
- "<extra_id_64>",
39
- "<extra_id_63>",
40
- "<extra_id_62>",
41
- "<extra_id_61>",
42
- "<extra_id_60>",
43
- "<extra_id_59>",
44
- "<extra_id_58>",
45
- "<extra_id_57>",
46
- "<extra_id_56>",
47
- "<extra_id_55>",
48
- "<extra_id_54>",
49
- "<extra_id_53>",
50
- "<extra_id_52>",
51
- "<extra_id_51>",
52
- "<extra_id_50>",
53
- "<extra_id_49>",
54
- "<extra_id_48>",
55
- "<extra_id_47>",
56
- "<extra_id_46>",
57
- "<extra_id_45>",
58
- "<extra_id_44>",
59
- "<extra_id_43>",
60
- "<extra_id_42>",
61
- "<extra_id_41>",
62
- "<extra_id_40>",
63
- "<extra_id_39>",
64
- "<extra_id_38>",
65
- "<extra_id_37>",
66
- "<extra_id_36>",
67
- "<extra_id_35>",
68
- "<extra_id_34>",
69
- "<extra_id_33>",
70
- "<extra_id_32>",
71
- "<extra_id_31>",
72
- "<extra_id_30>",
73
- "<extra_id_29>",
74
- "<extra_id_28>",
75
- "<extra_id_27>",
76
- "<extra_id_26>",
77
- "<extra_id_25>",
78
- "<extra_id_24>",
79
- "<extra_id_23>",
80
- "<extra_id_22>",
81
- "<extra_id_21>",
82
- "<extra_id_20>",
83
- "<extra_id_19>",
84
- "<extra_id_18>",
85
- "<extra_id_17>",
86
- "<extra_id_16>",
87
- "<extra_id_15>",
88
- "<extra_id_14>",
89
- "<extra_id_13>",
90
- "<extra_id_12>",
91
- "<extra_id_11>",
92
- "<extra_id_10>",
93
- "<extra_id_9>",
94
- "<extra_id_8>",
95
- "<extra_id_7>",
96
- "<extra_id_6>",
97
- "<extra_id_5>",
98
- "<extra_id_4>",
99
- "<extra_id_3>",
100
- "<extra_id_2>",
101
- "<extra_id_1>",
102
- "<extra_id_0>"
103
- ],
104
- "bos_token": {
105
- "content": "<s>",
106
- "lstrip": false,
107
- "normalized": false,
108
- "rstrip": false,
109
- "single_word": false
110
- },
111
- "eos_token": {
112
- "content": "</s>",
113
- "lstrip": false,
114
- "normalized": false,
115
- "rstrip": false,
116
- "single_word": false
117
- },
118
- "pad_token": {
119
- "content": "<s>",
120
- "lstrip": false,
121
- "normalized": false,
122
- "rstrip": false,
123
- "single_word": false
124
- },
125
- "unk_token": {
126
- "content": "<unk>",
127
- "lstrip": false,
128
- "normalized": false,
129
- "rstrip": false,
130
- "single_word": false
131
- }
132
- }
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d5778f2b223bd0ed5c85556c4a6f57d008af83362d9f1267c4d41804ca733d07
3
+ size 2809
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
tokenizer/tokenizer.json CHANGED
The diff for this file is too large to render. See raw diff
 
tokenizer/tokenizer_config.json CHANGED
@@ -1,945 +1,3 @@
1
- {
2
- "add_bos_token": false,
3
- "add_eos_token": true,
4
- "add_prefix_space": true,
5
- "added_tokens_decoder": {
6
- "0": {
7
- "content": "<unk>",
8
- "lstrip": false,
9
- "normalized": false,
10
- "rstrip": false,
11
- "single_word": false,
12
- "special": true
13
- },
14
- "1": {
15
- "content": "<s>",
16
- "lstrip": false,
17
- "normalized": false,
18
- "rstrip": false,
19
- "single_word": false,
20
- "special": true
21
- },
22
- "2": {
23
- "content": "</s>",
24
- "lstrip": false,
25
- "normalized": false,
26
- "rstrip": false,
27
- "single_word": false,
28
- "special": true
29
- },
30
- "32000": {
31
- "content": "<extra_id_99>",
32
- "lstrip": false,
33
- "normalized": false,
34
- "rstrip": false,
35
- "single_word": false,
36
- "special": true
37
- },
38
- "32001": {
39
- "content": "<extra_id_98>",
40
- "lstrip": false,
41
- "normalized": false,
42
- "rstrip": false,
43
- "single_word": false,
44
- "special": true
45
- },
46
- "32002": {
47
- "content": "<extra_id_97>",
48
- "lstrip": false,
49
- "normalized": false,
50
- "rstrip": false,
51
- "single_word": false,
52
- "special": true
53
- },
54
- "32003": {
55
- "content": "<extra_id_96>",
56
- "lstrip": false,
57
- "normalized": false,
58
- "rstrip": false,
59
- "single_word": false,
60
- "special": true
61
- },
62
- "32004": {
63
- "content": "<extra_id_95>",
64
- "lstrip": false,
65
- "normalized": false,
66
- "rstrip": false,
67
- "single_word": false,
68
- "special": true
69
- },
70
- "32005": {
71
- "content": "<extra_id_94>",
72
- "lstrip": false,
73
- "normalized": false,
74
- "rstrip": false,
75
- "single_word": false,
76
- "special": true
77
- },
78
- "32006": {
79
- "content": "<extra_id_93>",
80
- "lstrip": false,
81
- "normalized": false,
82
- "rstrip": false,
83
- "single_word": false,
84
- "special": true
85
- },
86
- "32007": {
87
- "content": "<extra_id_92>",
88
- "lstrip": false,
89
- "normalized": false,
90
- "rstrip": false,
91
- "single_word": false,
92
- "special": true
93
- },
94
- "32008": {
95
- "content": "<extra_id_91>",
96
- "lstrip": false,
97
- "normalized": false,
98
- "rstrip": false,
99
- "single_word": false,
100
- "special": true
101
- },
102
- "32009": {
103
- "content": "<extra_id_90>",
104
- "lstrip": false,
105
- "normalized": false,
106
- "rstrip": false,
107
- "single_word": false,
108
- "special": true
109
- },
110
- "32010": {
111
- "content": "<extra_id_89>",
112
- "lstrip": false,
113
- "normalized": false,
114
- "rstrip": false,
115
- "single_word": false,
116
- "special": true
117
- },
118
- "32011": {
119
- "content": "<extra_id_88>",
120
- "lstrip": false,
121
- "normalized": false,
122
- "rstrip": false,
123
- "single_word": false,
124
- "special": true
125
- },
126
- "32012": {
127
- "content": "<extra_id_87>",
128
- "lstrip": false,
129
- "normalized": false,
130
- "rstrip": false,
131
- "single_word": false,
132
- "special": true
133
- },
134
- "32013": {
135
- "content": "<extra_id_86>",
136
- "lstrip": false,
137
- "normalized": false,
138
- "rstrip": false,
139
- "single_word": false,
140
- "special": true
141
- },
142
- "32014": {
143
- "content": "<extra_id_85>",
144
- "lstrip": false,
145
- "normalized": false,
146
- "rstrip": false,
147
- "single_word": false,
148
- "special": true
149
- },
150
- "32015": {
151
- "content": "<extra_id_84>",
152
- "lstrip": false,
153
- "normalized": false,
154
- "rstrip": false,
155
- "single_word": false,
156
- "special": true
157
- },
158
- "32016": {
159
- "content": "<extra_id_83>",
160
- "lstrip": false,
161
- "normalized": false,
162
- "rstrip": false,
163
- "single_word": false,
164
- "special": true
165
- },
166
- "32017": {
167
- "content": "<extra_id_82>",
168
- "lstrip": false,
169
- "normalized": false,
170
- "rstrip": false,
171
- "single_word": false,
172
- "special": true
173
- },
174
- "32018": {
175
- "content": "<extra_id_81>",
176
- "lstrip": false,
177
- "normalized": false,
178
- "rstrip": false,
179
- "single_word": false,
180
- "special": true
181
- },
182
- "32019": {
183
- "content": "<extra_id_80>",
184
- "lstrip": false,
185
- "normalized": false,
186
- "rstrip": false,
187
- "single_word": false,
188
- "special": true
189
- },
190
- "32020": {
191
- "content": "<extra_id_79>",
192
- "lstrip": false,
193
- "normalized": false,
194
- "rstrip": false,
195
- "single_word": false,
196
- "special": true
197
- },
198
- "32021": {
199
- "content": "<extra_id_78>",
200
- "lstrip": false,
201
- "normalized": false,
202
- "rstrip": false,
203
- "single_word": false,
204
- "special": true
205
- },
206
- "32022": {
207
- "content": "<extra_id_77>",
208
- "lstrip": false,
209
- "normalized": false,
210
- "rstrip": false,
211
- "single_word": false,
212
- "special": true
213
- },
214
- "32023": {
215
- "content": "<extra_id_76>",
216
- "lstrip": false,
217
- "normalized": false,
218
- "rstrip": false,
219
- "single_word": false,
220
- "special": true
221
- },
222
- "32024": {
223
- "content": "<extra_id_75>",
224
- "lstrip": false,
225
- "normalized": false,
226
- "rstrip": false,
227
- "single_word": false,
228
- "special": true
229
- },
230
- "32025": {
231
- "content": "<extra_id_74>",
232
- "lstrip": false,
233
- "normalized": false,
234
- "rstrip": false,
235
- "single_word": false,
236
- "special": true
237
- },
238
- "32026": {
239
- "content": "<extra_id_73>",
240
- "lstrip": false,
241
- "normalized": false,
242
- "rstrip": false,
243
- "single_word": false,
244
- "special": true
245
- },
246
- "32027": {
247
- "content": "<extra_id_72>",
248
- "lstrip": false,
249
- "normalized": false,
250
- "rstrip": false,
251
- "single_word": false,
252
- "special": true
253
- },
254
- "32028": {
255
- "content": "<extra_id_71>",
256
- "lstrip": false,
257
- "normalized": false,
258
- "rstrip": false,
259
- "single_word": false,
260
- "special": true
261
- },
262
- "32029": {
263
- "content": "<extra_id_70>",
264
- "lstrip": false,
265
- "normalized": false,
266
- "rstrip": false,
267
- "single_word": false,
268
- "special": true
269
- },
270
- "32030": {
271
- "content": "<extra_id_69>",
272
- "lstrip": false,
273
- "normalized": false,
274
- "rstrip": false,
275
- "single_word": false,
276
- "special": true
277
- },
278
- "32031": {
279
- "content": "<extra_id_68>",
280
- "lstrip": false,
281
- "normalized": false,
282
- "rstrip": false,
283
- "single_word": false,
284
- "special": true
285
- },
286
- "32032": {
287
- "content": "<extra_id_67>",
288
- "lstrip": false,
289
- "normalized": false,
290
- "rstrip": false,
291
- "single_word": false,
292
- "special": true
293
- },
294
- "32033": {
295
- "content": "<extra_id_66>",
296
- "lstrip": false,
297
- "normalized": false,
298
- "rstrip": false,
299
- "single_word": false,
300
- "special": true
301
- },
302
- "32034": {
303
- "content": "<extra_id_65>",
304
- "lstrip": false,
305
- "normalized": false,
306
- "rstrip": false,
307
- "single_word": false,
308
- "special": true
309
- },
310
- "32035": {
311
- "content": "<extra_id_64>",
312
- "lstrip": false,
313
- "normalized": false,
314
- "rstrip": false,
315
- "single_word": false,
316
- "special": true
317
- },
318
- "32036": {
319
- "content": "<extra_id_63>",
320
- "lstrip": false,
321
- "normalized": false,
322
- "rstrip": false,
323
- "single_word": false,
324
- "special": true
325
- },
326
- "32037": {
327
- "content": "<extra_id_62>",
328
- "lstrip": false,
329
- "normalized": false,
330
- "rstrip": false,
331
- "single_word": false,
332
- "special": true
333
- },
334
- "32038": {
335
- "content": "<extra_id_61>",
336
- "lstrip": false,
337
- "normalized": false,
338
- "rstrip": false,
339
- "single_word": false,
340
- "special": true
341
- },
342
- "32039": {
343
- "content": "<extra_id_60>",
344
- "lstrip": false,
345
- "normalized": false,
346
- "rstrip": false,
347
- "single_word": false,
348
- "special": true
349
- },
350
- "32040": {
351
- "content": "<extra_id_59>",
352
- "lstrip": false,
353
- "normalized": false,
354
- "rstrip": false,
355
- "single_word": false,
356
- "special": true
357
- },
358
- "32041": {
359
- "content": "<extra_id_58>",
360
- "lstrip": false,
361
- "normalized": false,
362
- "rstrip": false,
363
- "single_word": false,
364
- "special": true
365
- },
366
- "32042": {
367
- "content": "<extra_id_57>",
368
- "lstrip": false,
369
- "normalized": false,
370
- "rstrip": false,
371
- "single_word": false,
372
- "special": true
373
- },
374
- "32043": {
375
- "content": "<extra_id_56>",
376
- "lstrip": false,
377
- "normalized": false,
378
- "rstrip": false,
379
- "single_word": false,
380
- "special": true
381
- },
382
- "32044": {
383
- "content": "<extra_id_55>",
384
- "lstrip": false,
385
- "normalized": false,
386
- "rstrip": false,
387
- "single_word": false,
388
- "special": true
389
- },
390
- "32045": {
391
- "content": "<extra_id_54>",
392
- "lstrip": false,
393
- "normalized": false,
394
- "rstrip": false,
395
- "single_word": false,
396
- "special": true
397
- },
398
- "32046": {
399
- "content": "<extra_id_53>",
400
- "lstrip": false,
401
- "normalized": false,
402
- "rstrip": false,
403
- "single_word": false,
404
- "special": true
405
- },
406
- "32047": {
407
- "content": "<extra_id_52>",
408
- "lstrip": false,
409
- "normalized": false,
410
- "rstrip": false,
411
- "single_word": false,
412
- "special": true
413
- },
414
- "32048": {
415
- "content": "<extra_id_51>",
416
- "lstrip": false,
417
- "normalized": false,
418
- "rstrip": false,
419
- "single_word": false,
420
- "special": true
421
- },
422
- "32049": {
423
- "content": "<extra_id_50>",
424
- "lstrip": false,
425
- "normalized": false,
426
- "rstrip": false,
427
- "single_word": false,
428
- "special": true
429
- },
430
- "32050": {
431
- "content": "<extra_id_49>",
432
- "lstrip": false,
433
- "normalized": false,
434
- "rstrip": false,
435
- "single_word": false,
436
- "special": true
437
- },
438
- "32051": {
439
- "content": "<extra_id_48>",
440
- "lstrip": false,
441
- "normalized": false,
442
- "rstrip": false,
443
- "single_word": false,
444
- "special": true
445
- },
446
- "32052": {
447
- "content": "<extra_id_47>",
448
- "lstrip": false,
449
- "normalized": false,
450
- "rstrip": false,
451
- "single_word": false,
452
- "special": true
453
- },
454
- "32053": {
455
- "content": "<extra_id_46>",
456
- "lstrip": false,
457
- "normalized": false,
458
- "rstrip": false,
459
- "single_word": false,
460
- "special": true
461
- },
462
- "32054": {
463
- "content": "<extra_id_45>",
464
- "lstrip": false,
465
- "normalized": false,
466
- "rstrip": false,
467
- "single_word": false,
468
- "special": true
469
- },
470
- "32055": {
471
- "content": "<extra_id_44>",
472
- "lstrip": false,
473
- "normalized": false,
474
- "rstrip": false,
475
- "single_word": false,
476
- "special": true
477
- },
478
- "32056": {
479
- "content": "<extra_id_43>",
480
- "lstrip": false,
481
- "normalized": false,
482
- "rstrip": false,
483
- "single_word": false,
484
- "special": true
485
- },
486
- "32057": {
487
- "content": "<extra_id_42>",
488
- "lstrip": false,
489
- "normalized": false,
490
- "rstrip": false,
491
- "single_word": false,
492
- "special": true
493
- },
494
- "32058": {
495
- "content": "<extra_id_41>",
496
- "lstrip": false,
497
- "normalized": false,
498
- "rstrip": false,
499
- "single_word": false,
500
- "special": true
501
- },
502
- "32059": {
503
- "content": "<extra_id_40>",
504
- "lstrip": false,
505
- "normalized": false,
506
- "rstrip": false,
507
- "single_word": false,
508
- "special": true
509
- },
510
- "32060": {
511
- "content": "<extra_id_39>",
512
- "lstrip": false,
513
- "normalized": false,
514
- "rstrip": false,
515
- "single_word": false,
516
- "special": true
517
- },
518
- "32061": {
519
- "content": "<extra_id_38>",
520
- "lstrip": false,
521
- "normalized": false,
522
- "rstrip": false,
523
- "single_word": false,
524
- "special": true
525
- },
526
- "32062": {
527
- "content": "<extra_id_37>",
528
- "lstrip": false,
529
- "normalized": false,
530
- "rstrip": false,
531
- "single_word": false,
532
- "special": true
533
- },
534
- "32063": {
535
- "content": "<extra_id_36>",
536
- "lstrip": false,
537
- "normalized": false,
538
- "rstrip": false,
539
- "single_word": false,
540
- "special": true
541
- },
542
- "32064": {
543
- "content": "<extra_id_35>",
544
- "lstrip": false,
545
- "normalized": false,
546
- "rstrip": false,
547
- "single_word": false,
548
- "special": true
549
- },
550
- "32065": {
551
- "content": "<extra_id_34>",
552
- "lstrip": false,
553
- "normalized": false,
554
- "rstrip": false,
555
- "single_word": false,
556
- "special": true
557
- },
558
- "32066": {
559
- "content": "<extra_id_33>",
560
- "lstrip": false,
561
- "normalized": false,
562
- "rstrip": false,
563
- "single_word": false,
564
- "special": true
565
- },
566
- "32067": {
567
- "content": "<extra_id_32>",
568
- "lstrip": false,
569
- "normalized": false,
570
- "rstrip": false,
571
- "single_word": false,
572
- "special": true
573
- },
574
- "32068": {
575
- "content": "<extra_id_31>",
576
- "lstrip": false,
577
- "normalized": false,
578
- "rstrip": false,
579
- "single_word": false,
580
- "special": true
581
- },
582
- "32069": {
583
- "content": "<extra_id_30>",
584
- "lstrip": false,
585
- "normalized": false,
586
- "rstrip": false,
587
- "single_word": false,
588
- "special": true
589
- },
590
- "32070": {
591
- "content": "<extra_id_29>",
592
- "lstrip": false,
593
- "normalized": false,
594
- "rstrip": false,
595
- "single_word": false,
596
- "special": true
597
- },
598
- "32071": {
599
- "content": "<extra_id_28>",
600
- "lstrip": false,
601
- "normalized": false,
602
- "rstrip": false,
603
- "single_word": false,
604
- "special": true
605
- },
606
- "32072": {
607
- "content": "<extra_id_27>",
608
- "lstrip": false,
609
- "normalized": false,
610
- "rstrip": false,
611
- "single_word": false,
612
- "special": true
613
- },
614
- "32073": {
615
- "content": "<extra_id_26>",
616
- "lstrip": false,
617
- "normalized": false,
618
- "rstrip": false,
619
- "single_word": false,
620
- "special": true
621
- },
622
- "32074": {
623
- "content": "<extra_id_25>",
624
- "lstrip": false,
625
- "normalized": false,
626
- "rstrip": false,
627
- "single_word": false,
628
- "special": true
629
- },
630
- "32075": {
631
- "content": "<extra_id_24>",
632
- "lstrip": false,
633
- "normalized": false,
634
- "rstrip": false,
635
- "single_word": false,
636
- "special": true
637
- },
638
- "32076": {
639
- "content": "<extra_id_23>",
640
- "lstrip": false,
641
- "normalized": false,
642
- "rstrip": false,
643
- "single_word": false,
644
- "special": true
645
- },
646
- "32077": {
647
- "content": "<extra_id_22>",
648
- "lstrip": false,
649
- "normalized": false,
650
- "rstrip": false,
651
- "single_word": false,
652
- "special": true
653
- },
654
- "32078": {
655
- "content": "<extra_id_21>",
656
- "lstrip": false,
657
- "normalized": false,
658
- "rstrip": false,
659
- "single_word": false,
660
- "special": true
661
- },
662
- "32079": {
663
- "content": "<extra_id_20>",
664
- "lstrip": false,
665
- "normalized": false,
666
- "rstrip": false,
667
- "single_word": false,
668
- "special": true
669
- },
670
- "32080": {
671
- "content": "<extra_id_19>",
672
- "lstrip": false,
673
- "normalized": false,
674
- "rstrip": false,
675
- "single_word": false,
676
- "special": true
677
- },
678
- "32081": {
679
- "content": "<extra_id_18>",
680
- "lstrip": false,
681
- "normalized": false,
682
- "rstrip": false,
683
- "single_word": false,
684
- "special": true
685
- },
686
- "32082": {
687
- "content": "<extra_id_17>",
688
- "lstrip": false,
689
- "normalized": false,
690
- "rstrip": false,
691
- "single_word": false,
692
- "special": true
693
- },
694
- "32083": {
695
- "content": "<extra_id_16>",
696
- "lstrip": false,
697
- "normalized": false,
698
- "rstrip": false,
699
- "single_word": false,
700
- "special": true
701
- },
702
- "32084": {
703
- "content": "<extra_id_15>",
704
- "lstrip": false,
705
- "normalized": false,
706
- "rstrip": false,
707
- "single_word": false,
708
- "special": true
709
- },
710
- "32085": {
711
- "content": "<extra_id_14>",
712
- "lstrip": false,
713
- "normalized": false,
714
- "rstrip": false,
715
- "single_word": false,
716
- "special": true
717
- },
718
- "32086": {
719
- "content": "<extra_id_13>",
720
- "lstrip": false,
721
- "normalized": false,
722
- "rstrip": false,
723
- "single_word": false,
724
- "special": true
725
- },
726
- "32087": {
727
- "content": "<extra_id_12>",
728
- "lstrip": false,
729
- "normalized": false,
730
- "rstrip": false,
731
- "single_word": false,
732
- "special": true
733
- },
734
- "32088": {
735
- "content": "<extra_id_11>",
736
- "lstrip": false,
737
- "normalized": false,
738
- "rstrip": false,
739
- "single_word": false,
740
- "special": true
741
- },
742
- "32089": {
743
- "content": "<extra_id_10>",
744
- "lstrip": false,
745
- "normalized": false,
746
- "rstrip": false,
747
- "single_word": false,
748
- "special": true
749
- },
750
- "32090": {
751
- "content": "<extra_id_9>",
752
- "lstrip": false,
753
- "normalized": false,
754
- "rstrip": false,
755
- "single_word": false,
756
- "special": true
757
- },
758
- "32091": {
759
- "content": "<extra_id_8>",
760
- "lstrip": false,
761
- "normalized": false,
762
- "rstrip": false,
763
- "single_word": false,
764
- "special": true
765
- },
766
- "32092": {
767
- "content": "<extra_id_7>",
768
- "lstrip": false,
769
- "normalized": false,
770
- "rstrip": false,
771
- "single_word": false,
772
- "special": true
773
- },
774
- "32093": {
775
- "content": "<extra_id_6>",
776
- "lstrip": false,
777
- "normalized": false,
778
- "rstrip": false,
779
- "single_word": false,
780
- "special": true
781
- },
782
- "32094": {
783
- "content": "<extra_id_5>",
784
- "lstrip": false,
785
- "normalized": false,
786
- "rstrip": false,
787
- "single_word": false,
788
- "special": true
789
- },
790
- "32095": {
791
- "content": "<extra_id_4>",
792
- "lstrip": false,
793
- "normalized": false,
794
- "rstrip": false,
795
- "single_word": false,
796
- "special": true
797
- },
798
- "32096": {
799
- "content": "<extra_id_3>",
800
- "lstrip": false,
801
- "normalized": false,
802
- "rstrip": false,
803
- "single_word": false,
804
- "special": true
805
- },
806
- "32097": {
807
- "content": "<extra_id_2>",
808
- "lstrip": false,
809
- "normalized": false,
810
- "rstrip": false,
811
- "single_word": false,
812
- "special": true
813
- },
814
- "32098": {
815
- "content": "<extra_id_1>",
816
- "lstrip": false,
817
- "normalized": false,
818
- "rstrip": false,
819
- "single_word": false,
820
- "special": true
821
- },
822
- "32099": {
823
- "content": "<extra_id_0>",
824
- "lstrip": false,
825
- "normalized": false,
826
- "rstrip": false,
827
- "single_word": false,
828
- "special": true
829
- }
830
- },
831
- "additional_special_tokens": [
832
- "<extra_id_99>",
833
- "<extra_id_98>",
834
- "<extra_id_97>",
835
- "<extra_id_96>",
836
- "<extra_id_95>",
837
- "<extra_id_94>",
838
- "<extra_id_93>",
839
- "<extra_id_92>",
840
- "<extra_id_91>",
841
- "<extra_id_90>",
842
- "<extra_id_89>",
843
- "<extra_id_88>",
844
- "<extra_id_87>",
845
- "<extra_id_86>",
846
- "<extra_id_85>",
847
- "<extra_id_84>",
848
- "<extra_id_83>",
849
- "<extra_id_82>",
850
- "<extra_id_81>",
851
- "<extra_id_80>",
852
- "<extra_id_79>",
853
- "<extra_id_78>",
854
- "<extra_id_77>",
855
- "<extra_id_76>",
856
- "<extra_id_75>",
857
- "<extra_id_74>",
858
- "<extra_id_73>",
859
- "<extra_id_72>",
860
- "<extra_id_71>",
861
- "<extra_id_70>",
862
- "<extra_id_69>",
863
- "<extra_id_68>",
864
- "<extra_id_67>",
865
- "<extra_id_66>",
866
- "<extra_id_65>",
867
- "<extra_id_64>",
868
- "<extra_id_63>",
869
- "<extra_id_62>",
870
- "<extra_id_61>",
871
- "<extra_id_60>",
872
- "<extra_id_59>",
873
- "<extra_id_58>",
874
- "<extra_id_57>",
875
- "<extra_id_56>",
876
- "<extra_id_55>",
877
- "<extra_id_54>",
878
- "<extra_id_53>",
879
- "<extra_id_52>",
880
- "<extra_id_51>",
881
- "<extra_id_50>",
882
- "<extra_id_49>",
883
- "<extra_id_48>",
884
- "<extra_id_47>",
885
- "<extra_id_46>",
886
- "<extra_id_45>",
887
- "<extra_id_44>",
888
- "<extra_id_43>",
889
- "<extra_id_42>",
890
- "<extra_id_41>",
891
- "<extra_id_40>",
892
- "<extra_id_39>",
893
- "<extra_id_38>",
894
- "<extra_id_37>",
895
- "<extra_id_36>",
896
- "<extra_id_35>",
897
- "<extra_id_34>",
898
- "<extra_id_33>",
899
- "<extra_id_32>",
900
- "<extra_id_31>",
901
- "<extra_id_30>",
902
- "<extra_id_29>",
903
- "<extra_id_28>",
904
- "<extra_id_27>",
905
- "<extra_id_26>",
906
- "<extra_id_25>",
907
- "<extra_id_24>",
908
- "<extra_id_23>",
909
- "<extra_id_22>",
910
- "<extra_id_21>",
911
- "<extra_id_20>",
912
- "<extra_id_19>",
913
- "<extra_id_18>",
914
- "<extra_id_17>",
915
- "<extra_id_16>",
916
- "<extra_id_15>",
917
- "<extra_id_14>",
918
- "<extra_id_13>",
919
- "<extra_id_12>",
920
- "<extra_id_11>",
921
- "<extra_id_10>",
922
- "<extra_id_9>",
923
- "<extra_id_8>",
924
- "<extra_id_7>",
925
- "<extra_id_6>",
926
- "<extra_id_5>",
927
- "<extra_id_4>",
928
- "<extra_id_3>",
929
- "<extra_id_2>",
930
- "<extra_id_1>",
931
- "<extra_id_0>"
932
- ],
933
- "bos_token": "<s>",
934
- "clean_up_tokenization_spaces": false,
935
- "eos_token": "</s>",
936
- "legacy": false,
937
- "model_max_length": 512,
938
- "pad_token": "<s>",
939
- "padding_side": "right",
940
- "sp_model_kwargs": {},
941
- "spaces_between_special_tokens": false,
942
- "tokenizer_class": "LlamaTokenizer",
943
- "unk_token": "<unk>",
944
- "use_default_system_prompt": false
945
- }
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ec9914159b77d054342be92763d738effd0a79bbe225afe024fb80a9ae0f2a36
3
+ size 21923
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
transformer/config.json CHANGED
@@ -1,15 +1,3 @@
1
- {
2
- "_class_name": "AuraFlowTransformer2DModel",
3
- "_diffusers_version": "0.30.0.dev0",
4
- "attention_head_dim": 256,
5
- "caption_projection_dim": 3072,
6
- "in_channels": 4,
7
- "joint_attention_dim": 2048,
8
- "num_attention_heads": 12,
9
- "num_mmdit_layers": 4,
10
- "num_single_dit_layers": 32,
11
- "out_channels": 4,
12
- "patch_size": 2,
13
- "pos_embed_max_size": 4096,
14
- "sample_size": 64
15
- }
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1dc4921751a0610e5b88efe67e4d070ef01947d738b4d56a72c156f3c7a0ab10
3
+ size 394
 
 
 
 
 
 
 
 
 
 
 
 
transformer/diffusion_pytorch_model.safetensors.index.json CHANGED
@@ -1,338 +1,3 @@
1
- {
2
- "metadata": {
3
- "total_size": 13685753856
4
- },
5
- "weight_map": {
6
- "context_embedder.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
7
- "joint_transformer_blocks.0.attn.add_k_proj.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
8
- "joint_transformer_blocks.0.attn.add_q_proj.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
9
- "joint_transformer_blocks.0.attn.add_v_proj.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
10
- "joint_transformer_blocks.0.attn.to_add_out.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
11
- "joint_transformer_blocks.0.attn.to_k.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
12
- "joint_transformer_blocks.0.attn.to_out.0.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
13
- "joint_transformer_blocks.0.attn.to_q.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
14
- "joint_transformer_blocks.0.attn.to_v.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
15
- "joint_transformer_blocks.0.ff.linear_1.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
16
- "joint_transformer_blocks.0.ff.linear_2.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
17
- "joint_transformer_blocks.0.ff.out_projection.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
18
- "joint_transformer_blocks.0.ff_context.linear_1.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
19
- "joint_transformer_blocks.0.ff_context.linear_2.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
20
- "joint_transformer_blocks.0.ff_context.out_projection.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
21
- "joint_transformer_blocks.0.norm1.linear.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
22
- "joint_transformer_blocks.0.norm1_context.linear.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
23
- "joint_transformer_blocks.1.attn.add_k_proj.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
24
- "joint_transformer_blocks.1.attn.add_q_proj.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
25
- "joint_transformer_blocks.1.attn.add_v_proj.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
26
- "joint_transformer_blocks.1.attn.to_add_out.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
27
- "joint_transformer_blocks.1.attn.to_k.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
28
- "joint_transformer_blocks.1.attn.to_out.0.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
29
- "joint_transformer_blocks.1.attn.to_q.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
30
- "joint_transformer_blocks.1.attn.to_v.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
31
- "joint_transformer_blocks.1.ff.linear_1.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
32
- "joint_transformer_blocks.1.ff.linear_2.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
33
- "joint_transformer_blocks.1.ff.out_projection.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
34
- "joint_transformer_blocks.1.ff_context.linear_1.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
35
- "joint_transformer_blocks.1.ff_context.linear_2.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
36
- "joint_transformer_blocks.1.ff_context.out_projection.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
37
- "joint_transformer_blocks.1.norm1.linear.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
38
- "joint_transformer_blocks.1.norm1_context.linear.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
39
- "joint_transformer_blocks.2.attn.add_k_proj.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
40
- "joint_transformer_blocks.2.attn.add_q_proj.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
41
- "joint_transformer_blocks.2.attn.add_v_proj.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
42
- "joint_transformer_blocks.2.attn.to_add_out.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
43
- "joint_transformer_blocks.2.attn.to_k.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
44
- "joint_transformer_blocks.2.attn.to_out.0.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
45
- "joint_transformer_blocks.2.attn.to_q.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
46
- "joint_transformer_blocks.2.attn.to_v.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
47
- "joint_transformer_blocks.2.ff.linear_1.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
48
- "joint_transformer_blocks.2.ff.linear_2.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
49
- "joint_transformer_blocks.2.ff.out_projection.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
50
- "joint_transformer_blocks.2.ff_context.linear_1.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
51
- "joint_transformer_blocks.2.ff_context.linear_2.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
52
- "joint_transformer_blocks.2.ff_context.out_projection.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
53
- "joint_transformer_blocks.2.norm1.linear.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
54
- "joint_transformer_blocks.2.norm1_context.linear.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
55
- "joint_transformer_blocks.3.attn.add_k_proj.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
56
- "joint_transformer_blocks.3.attn.add_q_proj.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
57
- "joint_transformer_blocks.3.attn.add_v_proj.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
58
- "joint_transformer_blocks.3.attn.to_add_out.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
59
- "joint_transformer_blocks.3.attn.to_k.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
60
- "joint_transformer_blocks.3.attn.to_out.0.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
61
- "joint_transformer_blocks.3.attn.to_q.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
62
- "joint_transformer_blocks.3.attn.to_v.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
63
- "joint_transformer_blocks.3.ff.linear_1.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
64
- "joint_transformer_blocks.3.ff.linear_2.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
65
- "joint_transformer_blocks.3.ff.out_projection.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
66
- "joint_transformer_blocks.3.ff_context.linear_1.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
67
- "joint_transformer_blocks.3.ff_context.linear_2.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
68
- "joint_transformer_blocks.3.ff_context.out_projection.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
69
- "joint_transformer_blocks.3.norm1.linear.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
70
- "joint_transformer_blocks.3.norm1_context.linear.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
71
- "norm_out.linear.weight": "diffusion_pytorch_model-00002-of-00002.safetensors",
72
- "pos_embed.pos_embed": "diffusion_pytorch_model-00001-of-00002.safetensors",
73
- "pos_embed.proj.bias": "diffusion_pytorch_model-00001-of-00002.safetensors",
74
- "pos_embed.proj.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
75
- "proj_out.weight": "diffusion_pytorch_model-00002-of-00002.safetensors",
76
- "register_tokens": "diffusion_pytorch_model-00001-of-00002.safetensors",
77
- "single_transformer_blocks.0.attn.to_k.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
78
- "single_transformer_blocks.0.attn.to_out.0.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
79
- "single_transformer_blocks.0.attn.to_q.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
80
- "single_transformer_blocks.0.attn.to_v.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
81
- "single_transformer_blocks.0.ff.linear_1.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
82
- "single_transformer_blocks.0.ff.linear_2.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
83
- "single_transformer_blocks.0.ff.out_projection.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
84
- "single_transformer_blocks.0.norm1.linear.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
85
- "single_transformer_blocks.1.attn.to_k.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
86
- "single_transformer_blocks.1.attn.to_out.0.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
87
- "single_transformer_blocks.1.attn.to_q.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
88
- "single_transformer_blocks.1.attn.to_v.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
89
- "single_transformer_blocks.1.ff.linear_1.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
90
- "single_transformer_blocks.1.ff.linear_2.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
91
- "single_transformer_blocks.1.ff.out_projection.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
92
- "single_transformer_blocks.1.norm1.linear.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
93
- "single_transformer_blocks.10.attn.to_k.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
94
- "single_transformer_blocks.10.attn.to_out.0.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
95
- "single_transformer_blocks.10.attn.to_q.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
96
- "single_transformer_blocks.10.attn.to_v.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
97
- "single_transformer_blocks.10.ff.linear_1.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
98
- "single_transformer_blocks.10.ff.linear_2.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
99
- "single_transformer_blocks.10.ff.out_projection.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
100
- "single_transformer_blocks.10.norm1.linear.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
101
- "single_transformer_blocks.11.attn.to_k.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
102
- "single_transformer_blocks.11.attn.to_out.0.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
103
- "single_transformer_blocks.11.attn.to_q.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
104
- "single_transformer_blocks.11.attn.to_v.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
105
- "single_transformer_blocks.11.ff.linear_1.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
106
- "single_transformer_blocks.11.ff.linear_2.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
107
- "single_transformer_blocks.11.ff.out_projection.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
108
- "single_transformer_blocks.11.norm1.linear.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
109
- "single_transformer_blocks.12.attn.to_k.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
110
- "single_transformer_blocks.12.attn.to_out.0.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
111
- "single_transformer_blocks.12.attn.to_q.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
112
- "single_transformer_blocks.12.attn.to_v.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
113
- "single_transformer_blocks.12.ff.linear_1.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
114
- "single_transformer_blocks.12.ff.linear_2.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
115
- "single_transformer_blocks.12.ff.out_projection.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
116
- "single_transformer_blocks.12.norm1.linear.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
117
- "single_transformer_blocks.13.attn.to_k.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
118
- "single_transformer_blocks.13.attn.to_out.0.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
119
- "single_transformer_blocks.13.attn.to_q.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
120
- "single_transformer_blocks.13.attn.to_v.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
121
- "single_transformer_blocks.13.ff.linear_1.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
122
- "single_transformer_blocks.13.ff.linear_2.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
123
- "single_transformer_blocks.13.ff.out_projection.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
124
- "single_transformer_blocks.13.norm1.linear.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
125
- "single_transformer_blocks.14.attn.to_k.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
126
- "single_transformer_blocks.14.attn.to_out.0.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
127
- "single_transformer_blocks.14.attn.to_q.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
128
- "single_transformer_blocks.14.attn.to_v.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
129
- "single_transformer_blocks.14.ff.linear_1.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
130
- "single_transformer_blocks.14.ff.linear_2.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
131
- "single_transformer_blocks.14.ff.out_projection.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
132
- "single_transformer_blocks.14.norm1.linear.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
133
- "single_transformer_blocks.15.attn.to_k.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
134
- "single_transformer_blocks.15.attn.to_out.0.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
135
- "single_transformer_blocks.15.attn.to_q.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
136
- "single_transformer_blocks.15.attn.to_v.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
137
- "single_transformer_blocks.15.ff.linear_1.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
138
- "single_transformer_blocks.15.ff.linear_2.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
139
- "single_transformer_blocks.15.ff.out_projection.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
140
- "single_transformer_blocks.15.norm1.linear.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
141
- "single_transformer_blocks.16.attn.to_k.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
142
- "single_transformer_blocks.16.attn.to_out.0.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
143
- "single_transformer_blocks.16.attn.to_q.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
144
- "single_transformer_blocks.16.attn.to_v.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
145
- "single_transformer_blocks.16.ff.linear_1.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
146
- "single_transformer_blocks.16.ff.linear_2.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
147
- "single_transformer_blocks.16.ff.out_projection.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
148
- "single_transformer_blocks.16.norm1.linear.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
149
- "single_transformer_blocks.17.attn.to_k.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
150
- "single_transformer_blocks.17.attn.to_out.0.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
151
- "single_transformer_blocks.17.attn.to_q.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
152
- "single_transformer_blocks.17.attn.to_v.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
153
- "single_transformer_blocks.17.ff.linear_1.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
154
- "single_transformer_blocks.17.ff.linear_2.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
155
- "single_transformer_blocks.17.ff.out_projection.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
156
- "single_transformer_blocks.17.norm1.linear.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
157
- "single_transformer_blocks.18.attn.to_k.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
158
- "single_transformer_blocks.18.attn.to_out.0.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
159
- "single_transformer_blocks.18.attn.to_q.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
160
- "single_transformer_blocks.18.attn.to_v.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
161
- "single_transformer_blocks.18.ff.linear_1.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
162
- "single_transformer_blocks.18.ff.linear_2.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
163
- "single_transformer_blocks.18.ff.out_projection.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
164
- "single_transformer_blocks.18.norm1.linear.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
165
- "single_transformer_blocks.19.attn.to_k.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
166
- "single_transformer_blocks.19.attn.to_out.0.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
167
- "single_transformer_blocks.19.attn.to_q.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
168
- "single_transformer_blocks.19.attn.to_v.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
169
- "single_transformer_blocks.19.ff.linear_1.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
170
- "single_transformer_blocks.19.ff.linear_2.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
171
- "single_transformer_blocks.19.ff.out_projection.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
172
- "single_transformer_blocks.19.norm1.linear.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
173
- "single_transformer_blocks.2.attn.to_k.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
174
- "single_transformer_blocks.2.attn.to_out.0.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
175
- "single_transformer_blocks.2.attn.to_q.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
176
- "single_transformer_blocks.2.attn.to_v.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
177
- "single_transformer_blocks.2.ff.linear_1.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
178
- "single_transformer_blocks.2.ff.linear_2.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
179
- "single_transformer_blocks.2.ff.out_projection.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
180
- "single_transformer_blocks.2.norm1.linear.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
181
- "single_transformer_blocks.20.attn.to_k.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
182
- "single_transformer_blocks.20.attn.to_out.0.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
183
- "single_transformer_blocks.20.attn.to_q.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
184
- "single_transformer_blocks.20.attn.to_v.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
185
- "single_transformer_blocks.20.ff.linear_1.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
186
- "single_transformer_blocks.20.ff.linear_2.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
187
- "single_transformer_blocks.20.ff.out_projection.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
188
- "single_transformer_blocks.20.norm1.linear.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
189
- "single_transformer_blocks.21.attn.to_k.weight": "diffusion_pytorch_model-00002-of-00002.safetensors",
190
- "single_transformer_blocks.21.attn.to_out.0.weight": "diffusion_pytorch_model-00002-of-00002.safetensors",
191
- "single_transformer_blocks.21.attn.to_q.weight": "diffusion_pytorch_model-00002-of-00002.safetensors",
192
- "single_transformer_blocks.21.attn.to_v.weight": "diffusion_pytorch_model-00002-of-00002.safetensors",
193
- "single_transformer_blocks.21.ff.linear_1.weight": "diffusion_pytorch_model-00002-of-00002.safetensors",
194
- "single_transformer_blocks.21.ff.linear_2.weight": "diffusion_pytorch_model-00002-of-00002.safetensors",
195
- "single_transformer_blocks.21.ff.out_projection.weight": "diffusion_pytorch_model-00002-of-00002.safetensors",
196
- "single_transformer_blocks.21.norm1.linear.weight": "diffusion_pytorch_model-00002-of-00002.safetensors",
197
- "single_transformer_blocks.22.attn.to_k.weight": "diffusion_pytorch_model-00002-of-00002.safetensors",
198
- "single_transformer_blocks.22.attn.to_out.0.weight": "diffusion_pytorch_model-00002-of-00002.safetensors",
199
- "single_transformer_blocks.22.attn.to_q.weight": "diffusion_pytorch_model-00002-of-00002.safetensors",
200
- "single_transformer_blocks.22.attn.to_v.weight": "diffusion_pytorch_model-00002-of-00002.safetensors",
201
- "single_transformer_blocks.22.ff.linear_1.weight": "diffusion_pytorch_model-00002-of-00002.safetensors",
202
- "single_transformer_blocks.22.ff.linear_2.weight": "diffusion_pytorch_model-00002-of-00002.safetensors",
203
- "single_transformer_blocks.22.ff.out_projection.weight": "diffusion_pytorch_model-00002-of-00002.safetensors",
204
- "single_transformer_blocks.22.norm1.linear.weight": "diffusion_pytorch_model-00002-of-00002.safetensors",
205
- "single_transformer_blocks.23.attn.to_k.weight": "diffusion_pytorch_model-00002-of-00002.safetensors",
206
- "single_transformer_blocks.23.attn.to_out.0.weight": "diffusion_pytorch_model-00002-of-00002.safetensors",
207
- "single_transformer_blocks.23.attn.to_q.weight": "diffusion_pytorch_model-00002-of-00002.safetensors",
208
- "single_transformer_blocks.23.attn.to_v.weight": "diffusion_pytorch_model-00002-of-00002.safetensors",
209
- "single_transformer_blocks.23.ff.linear_1.weight": "diffusion_pytorch_model-00002-of-00002.safetensors",
210
- "single_transformer_blocks.23.ff.linear_2.weight": "diffusion_pytorch_model-00002-of-00002.safetensors",
211
- "single_transformer_blocks.23.ff.out_projection.weight": "diffusion_pytorch_model-00002-of-00002.safetensors",
212
- "single_transformer_blocks.23.norm1.linear.weight": "diffusion_pytorch_model-00002-of-00002.safetensors",
213
- "single_transformer_blocks.24.attn.to_k.weight": "diffusion_pytorch_model-00002-of-00002.safetensors",
214
- "single_transformer_blocks.24.attn.to_out.0.weight": "diffusion_pytorch_model-00002-of-00002.safetensors",
215
- "single_transformer_blocks.24.attn.to_q.weight": "diffusion_pytorch_model-00002-of-00002.safetensors",
216
- "single_transformer_blocks.24.attn.to_v.weight": "diffusion_pytorch_model-00002-of-00002.safetensors",
217
- "single_transformer_blocks.24.ff.linear_1.weight": "diffusion_pytorch_model-00002-of-00002.safetensors",
218
- "single_transformer_blocks.24.ff.linear_2.weight": "diffusion_pytorch_model-00002-of-00002.safetensors",
219
- "single_transformer_blocks.24.ff.out_projection.weight": "diffusion_pytorch_model-00002-of-00002.safetensors",
220
- "single_transformer_blocks.24.norm1.linear.weight": "diffusion_pytorch_model-00002-of-00002.safetensors",
221
- "single_transformer_blocks.25.attn.to_k.weight": "diffusion_pytorch_model-00002-of-00002.safetensors",
222
- "single_transformer_blocks.25.attn.to_out.0.weight": "diffusion_pytorch_model-00002-of-00002.safetensors",
223
- "single_transformer_blocks.25.attn.to_q.weight": "diffusion_pytorch_model-00002-of-00002.safetensors",
224
- "single_transformer_blocks.25.attn.to_v.weight": "diffusion_pytorch_model-00002-of-00002.safetensors",
225
- "single_transformer_blocks.25.ff.linear_1.weight": "diffusion_pytorch_model-00002-of-00002.safetensors",
226
- "single_transformer_blocks.25.ff.linear_2.weight": "diffusion_pytorch_model-00002-of-00002.safetensors",
227
- "single_transformer_blocks.25.ff.out_projection.weight": "diffusion_pytorch_model-00002-of-00002.safetensors",
228
- "single_transformer_blocks.25.norm1.linear.weight": "diffusion_pytorch_model-00002-of-00002.safetensors",
229
- "single_transformer_blocks.26.attn.to_k.weight": "diffusion_pytorch_model-00002-of-00002.safetensors",
230
- "single_transformer_blocks.26.attn.to_out.0.weight": "diffusion_pytorch_model-00002-of-00002.safetensors",
231
- "single_transformer_blocks.26.attn.to_q.weight": "diffusion_pytorch_model-00002-of-00002.safetensors",
232
- "single_transformer_blocks.26.attn.to_v.weight": "diffusion_pytorch_model-00002-of-00002.safetensors",
233
- "single_transformer_blocks.26.ff.linear_1.weight": "diffusion_pytorch_model-00002-of-00002.safetensors",
234
- "single_transformer_blocks.26.ff.linear_2.weight": "diffusion_pytorch_model-00002-of-00002.safetensors",
235
- "single_transformer_blocks.26.ff.out_projection.weight": "diffusion_pytorch_model-00002-of-00002.safetensors",
236
- "single_transformer_blocks.26.norm1.linear.weight": "diffusion_pytorch_model-00002-of-00002.safetensors",
237
- "single_transformer_blocks.27.attn.to_k.weight": "diffusion_pytorch_model-00002-of-00002.safetensors",
238
- "single_transformer_blocks.27.attn.to_out.0.weight": "diffusion_pytorch_model-00002-of-00002.safetensors",
239
- "single_transformer_blocks.27.attn.to_q.weight": "diffusion_pytorch_model-00002-of-00002.safetensors",
240
- "single_transformer_blocks.27.attn.to_v.weight": "diffusion_pytorch_model-00002-of-00002.safetensors",
241
- "single_transformer_blocks.27.ff.linear_1.weight": "diffusion_pytorch_model-00002-of-00002.safetensors",
242
- "single_transformer_blocks.27.ff.linear_2.weight": "diffusion_pytorch_model-00002-of-00002.safetensors",
243
- "single_transformer_blocks.27.ff.out_projection.weight": "diffusion_pytorch_model-00002-of-00002.safetensors",
244
- "single_transformer_blocks.27.norm1.linear.weight": "diffusion_pytorch_model-00002-of-00002.safetensors",
245
- "single_transformer_blocks.28.attn.to_k.weight": "diffusion_pytorch_model-00002-of-00002.safetensors",
246
- "single_transformer_blocks.28.attn.to_out.0.weight": "diffusion_pytorch_model-00002-of-00002.safetensors",
247
- "single_transformer_blocks.28.attn.to_q.weight": "diffusion_pytorch_model-00002-of-00002.safetensors",
248
- "single_transformer_blocks.28.attn.to_v.weight": "diffusion_pytorch_model-00002-of-00002.safetensors",
249
- "single_transformer_blocks.28.ff.linear_1.weight": "diffusion_pytorch_model-00002-of-00002.safetensors",
250
- "single_transformer_blocks.28.ff.linear_2.weight": "diffusion_pytorch_model-00002-of-00002.safetensors",
251
- "single_transformer_blocks.28.ff.out_projection.weight": "diffusion_pytorch_model-00002-of-00002.safetensors",
252
- "single_transformer_blocks.28.norm1.linear.weight": "diffusion_pytorch_model-00002-of-00002.safetensors",
253
- "single_transformer_blocks.29.attn.to_k.weight": "diffusion_pytorch_model-00002-of-00002.safetensors",
254
- "single_transformer_blocks.29.attn.to_out.0.weight": "diffusion_pytorch_model-00002-of-00002.safetensors",
255
- "single_transformer_blocks.29.attn.to_q.weight": "diffusion_pytorch_model-00002-of-00002.safetensors",
256
- "single_transformer_blocks.29.attn.to_v.weight": "diffusion_pytorch_model-00002-of-00002.safetensors",
257
- "single_transformer_blocks.29.ff.linear_1.weight": "diffusion_pytorch_model-00002-of-00002.safetensors",
258
- "single_transformer_blocks.29.ff.linear_2.weight": "diffusion_pytorch_model-00002-of-00002.safetensors",
259
- "single_transformer_blocks.29.ff.out_projection.weight": "diffusion_pytorch_model-00002-of-00002.safetensors",
260
- "single_transformer_blocks.29.norm1.linear.weight": "diffusion_pytorch_model-00002-of-00002.safetensors",
261
- "single_transformer_blocks.3.attn.to_k.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
262
- "single_transformer_blocks.3.attn.to_out.0.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
263
- "single_transformer_blocks.3.attn.to_q.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
264
- "single_transformer_blocks.3.attn.to_v.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
265
- "single_transformer_blocks.3.ff.linear_1.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
266
- "single_transformer_blocks.3.ff.linear_2.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
267
- "single_transformer_blocks.3.ff.out_projection.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
268
- "single_transformer_blocks.3.norm1.linear.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
269
- "single_transformer_blocks.30.attn.to_k.weight": "diffusion_pytorch_model-00002-of-00002.safetensors",
270
- "single_transformer_blocks.30.attn.to_out.0.weight": "diffusion_pytorch_model-00002-of-00002.safetensors",
271
- "single_transformer_blocks.30.attn.to_q.weight": "diffusion_pytorch_model-00002-of-00002.safetensors",
272
- "single_transformer_blocks.30.attn.to_v.weight": "diffusion_pytorch_model-00002-of-00002.safetensors",
273
- "single_transformer_blocks.30.ff.linear_1.weight": "diffusion_pytorch_model-00002-of-00002.safetensors",
274
- "single_transformer_blocks.30.ff.linear_2.weight": "diffusion_pytorch_model-00002-of-00002.safetensors",
275
- "single_transformer_blocks.30.ff.out_projection.weight": "diffusion_pytorch_model-00002-of-00002.safetensors",
276
- "single_transformer_blocks.30.norm1.linear.weight": "diffusion_pytorch_model-00002-of-00002.safetensors",
277
- "single_transformer_blocks.31.attn.to_k.weight": "diffusion_pytorch_model-00002-of-00002.safetensors",
278
- "single_transformer_blocks.31.attn.to_out.0.weight": "diffusion_pytorch_model-00002-of-00002.safetensors",
279
- "single_transformer_blocks.31.attn.to_q.weight": "diffusion_pytorch_model-00002-of-00002.safetensors",
280
- "single_transformer_blocks.31.attn.to_v.weight": "diffusion_pytorch_model-00002-of-00002.safetensors",
281
- "single_transformer_blocks.31.ff.linear_1.weight": "diffusion_pytorch_model-00002-of-00002.safetensors",
282
- "single_transformer_blocks.31.ff.linear_2.weight": "diffusion_pytorch_model-00002-of-00002.safetensors",
283
- "single_transformer_blocks.31.ff.out_projection.weight": "diffusion_pytorch_model-00002-of-00002.safetensors",
284
- "single_transformer_blocks.31.norm1.linear.weight": "diffusion_pytorch_model-00002-of-00002.safetensors",
285
- "single_transformer_blocks.4.attn.to_k.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
286
- "single_transformer_blocks.4.attn.to_out.0.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
287
- "single_transformer_blocks.4.attn.to_q.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
288
- "single_transformer_blocks.4.attn.to_v.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
289
- "single_transformer_blocks.4.ff.linear_1.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
290
- "single_transformer_blocks.4.ff.linear_2.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
291
- "single_transformer_blocks.4.ff.out_projection.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
292
- "single_transformer_blocks.4.norm1.linear.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
293
- "single_transformer_blocks.5.attn.to_k.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
294
- "single_transformer_blocks.5.attn.to_out.0.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
295
- "single_transformer_blocks.5.attn.to_q.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
296
- "single_transformer_blocks.5.attn.to_v.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
297
- "single_transformer_blocks.5.ff.linear_1.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
298
- "single_transformer_blocks.5.ff.linear_2.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
299
- "single_transformer_blocks.5.ff.out_projection.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
300
- "single_transformer_blocks.5.norm1.linear.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
301
- "single_transformer_blocks.6.attn.to_k.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
302
- "single_transformer_blocks.6.attn.to_out.0.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
303
- "single_transformer_blocks.6.attn.to_q.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
304
- "single_transformer_blocks.6.attn.to_v.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
305
- "single_transformer_blocks.6.ff.linear_1.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
306
- "single_transformer_blocks.6.ff.linear_2.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
307
- "single_transformer_blocks.6.ff.out_projection.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
308
- "single_transformer_blocks.6.norm1.linear.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
309
- "single_transformer_blocks.7.attn.to_k.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
310
- "single_transformer_blocks.7.attn.to_out.0.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
311
- "single_transformer_blocks.7.attn.to_q.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
312
- "single_transformer_blocks.7.attn.to_v.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
313
- "single_transformer_blocks.7.ff.linear_1.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
314
- "single_transformer_blocks.7.ff.linear_2.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
315
- "single_transformer_blocks.7.ff.out_projection.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
316
- "single_transformer_blocks.7.norm1.linear.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
317
- "single_transformer_blocks.8.attn.to_k.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
318
- "single_transformer_blocks.8.attn.to_out.0.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
319
- "single_transformer_blocks.8.attn.to_q.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
320
- "single_transformer_blocks.8.attn.to_v.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
321
- "single_transformer_blocks.8.ff.linear_1.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
322
- "single_transformer_blocks.8.ff.linear_2.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
323
- "single_transformer_blocks.8.ff.out_projection.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
324
- "single_transformer_blocks.8.norm1.linear.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
325
- "single_transformer_blocks.9.attn.to_k.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
326
- "single_transformer_blocks.9.attn.to_out.0.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
327
- "single_transformer_blocks.9.attn.to_q.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
328
- "single_transformer_blocks.9.attn.to_v.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
329
- "single_transformer_blocks.9.ff.linear_1.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
330
- "single_transformer_blocks.9.ff.linear_2.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
331
- "single_transformer_blocks.9.ff.out_projection.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
332
- "single_transformer_blocks.9.norm1.linear.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
333
- "time_step_proj.linear_1.bias": "diffusion_pytorch_model-00001-of-00002.safetensors",
334
- "time_step_proj.linear_1.weight": "diffusion_pytorch_model-00001-of-00002.safetensors",
335
- "time_step_proj.linear_2.bias": "diffusion_pytorch_model-00001-of-00002.safetensors",
336
- "time_step_proj.linear_2.weight": "diffusion_pytorch_model-00001-of-00002.safetensors"
337
- }
338
- }
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:48383d9002dc3c600d7c277007bc835c659fd7ee0037a888c8ac153a18d065d1
3
+ size 36346
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
vae/config.json CHANGED
@@ -1,37 +1,3 @@
1
- {
2
- "_class_name": "AutoencoderKL",
3
- "_diffusers_version": "0.30.0.dev0",
4
- "_name_or_path": "stabilityai/sdxl-vae",
5
- "act_fn": "silu",
6
- "block_out_channels": [
7
- 128,
8
- 256,
9
- 512,
10
- 512
11
- ],
12
- "down_block_types": [
13
- "DownEncoderBlock2D",
14
- "DownEncoderBlock2D",
15
- "DownEncoderBlock2D",
16
- "DownEncoderBlock2D"
17
- ],
18
- "force_upcast": true,
19
- "in_channels": 3,
20
- "latent_channels": 4,
21
- "latents_mean": null,
22
- "latents_std": null,
23
- "layers_per_block": 2,
24
- "norm_num_groups": 32,
25
- "out_channels": 3,
26
- "sample_size": 1024,
27
- "scaling_factor": 0.13025,
28
- "shift_factor": null,
29
- "up_block_types": [
30
- "UpDecoderBlock2D",
31
- "UpDecoderBlock2D",
32
- "UpDecoderBlock2D",
33
- "UpDecoderBlock2D"
34
- ],
35
- "use_post_quant_conv": true,
36
- "use_quant_conv": true
37
- }
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3fcd6fbd109f3333891fa360787d88fcfac0eb47d85635c01979a9755aa7fb81
3
+ size 815