owenzhangzhengzhong commited on
Commit
8ef6229
·
verified ·
1 Parent(s): 9d25231

Upload model files

Browse files
.gitattributes CHANGED
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ unet/fbaeb7b1-d931-11ef-9035-f0a65413afc2 filter=lfs diff=lfs merge=lfs -text
feature_extractor/preprocessor_config.json ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "crop_size": {
3
+ "height": 224,
4
+ "width": 224
5
+ },
6
+ "do_center_crop": true,
7
+ "do_convert_rgb": true,
8
+ "do_normalize": true,
9
+ "do_rescale": true,
10
+ "do_resize": true,
11
+ "image_mean": [
12
+ 0.48145466,
13
+ 0.4578275,
14
+ 0.40821073
15
+ ],
16
+ "image_processor_type": "CLIPImageProcessor",
17
+ "image_std": [
18
+ 0.26862954,
19
+ 0.26130258,
20
+ 0.27577711
21
+ ],
22
+ "resample": 3,
23
+ "rescale_factor": 0.00392156862745098,
24
+ "size": {
25
+ "shortest_edge": 224
26
+ }
27
+ }
model_index.json ADDED
@@ -0,0 +1,37 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_class_name": "OnnxStableDiffusionPipeline",
3
+ "_diffusers_version": "0.27.2",
4
+ "feature_extractor": [
5
+ "transformers",
6
+ "CLIPImageProcessor"
7
+ ],
8
+ "requires_safety_checker": true,
9
+ "safety_checker": [
10
+ "diffusers",
11
+ "OnnxRuntimeModel"
12
+ ],
13
+ "scheduler": [
14
+ "diffusers",
15
+ "PNDMScheduler"
16
+ ],
17
+ "text_encoder": [
18
+ "diffusers",
19
+ "OnnxRuntimeModel"
20
+ ],
21
+ "tokenizer": [
22
+ "transformers",
23
+ "CLIPTokenizer"
24
+ ],
25
+ "unet": [
26
+ "diffusers",
27
+ "OnnxRuntimeModel"
28
+ ],
29
+ "vae_decoder": [
30
+ "diffusers",
31
+ "OnnxRuntimeModel"
32
+ ],
33
+ "vae_encoder": [
34
+ "diffusers",
35
+ "OnnxRuntimeModel"
36
+ ]
37
+ }
safety_checker/model.onnx ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a36e94cb4b46e9ed1e791c59ce831d2773e6f348db5722a4c538634b4adf7728
3
+ size 608396240
scheduler/scheduler_config.json ADDED
@@ -0,0 +1,15 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_class_name": "PNDMScheduler",
3
+ "_diffusers_version": "0.27.2",
4
+ "beta_end": 0.012,
5
+ "beta_schedule": "scaled_linear",
6
+ "beta_start": 0.00085,
7
+ "clip_sample": false,
8
+ "num_train_timesteps": 1000,
9
+ "prediction_type": "epsilon",
10
+ "set_alpha_to_one": false,
11
+ "skip_prk_steps": true,
12
+ "steps_offset": 1,
13
+ "timestep_spacing": "leading",
14
+ "trained_betas": null
15
+ }
text_encoder/model.onnx ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2b2064aafacab7abdae3f61b32a213d8c3c5a62a786b0c90552a3c61c80d4141
3
+ size 246323634
tokenizer/merges.txt ADDED
The diff for this file is too large to render. See raw diff
 
tokenizer/special_tokens_map.json ADDED
@@ -0,0 +1,24 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": {
3
+ "content": "<|startoftext|>",
4
+ "lstrip": false,
5
+ "normalized": true,
6
+ "rstrip": false,
7
+ "single_word": false
8
+ },
9
+ "eos_token": {
10
+ "content": "<|endoftext|>",
11
+ "lstrip": false,
12
+ "normalized": true,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ },
16
+ "pad_token": "<|endoftext|>",
17
+ "unk_token": {
18
+ "content": "<|endoftext|>",
19
+ "lstrip": false,
20
+ "normalized": true,
21
+ "rstrip": false,
22
+ "single_word": false
23
+ }
24
+ }
tokenizer/tokenizer_config.json ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_prefix_space": false,
3
+ "added_tokens_decoder": {
4
+ "49406": {
5
+ "content": "<|startoftext|>",
6
+ "lstrip": false,
7
+ "normalized": true,
8
+ "rstrip": false,
9
+ "single_word": false,
10
+ "special": true
11
+ },
12
+ "49407": {
13
+ "content": "<|endoftext|>",
14
+ "lstrip": false,
15
+ "normalized": true,
16
+ "rstrip": false,
17
+ "single_word": false,
18
+ "special": true
19
+ }
20
+ },
21
+ "bos_token": "<|startoftext|>",
22
+ "clean_up_tokenization_spaces": true,
23
+ "do_lower_case": true,
24
+ "eos_token": "<|endoftext|>",
25
+ "errors": "replace",
26
+ "model_max_length": 77,
27
+ "pad_token": "<|endoftext|>",
28
+ "tokenizer_class": "CLIPTokenizer",
29
+ "unk_token": "<|endoftext|>"
30
+ }
tokenizer/vocab.json ADDED
The diff for this file is too large to render. See raw diff
 
unet/fbaeb7b1-d931-11ef-9035-f0a65413afc2 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:75b8a725f8ab6569fab6689e78ddc110285dfea1eed58db0847944f6ce298970
3
+ size 1718976000
unet/model.onnx ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:db187d050352ff506f8bcdcfc34e1e793ac17596b94492991bcf9efa0e7ec401
3
+ size 595435
unet/nodes.txt ADDED
@@ -0,0 +1,128 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ /down_blocks.0/attentions.0/transformer_blocks.0/attn1/to_out.0/Add
2
+ /down_blocks.0/attentions.0/transformer_blocks.0/attn1/to_out.0/MatMul
3
+ /down_blocks.0/attentions.0/transformer_blocks.0/attn2/to_out.0/Add
4
+ /down_blocks.0/attentions.0/transformer_blocks.0/attn2/to_out.0/MatMul
5
+ /down_blocks.0/attentions.0/transformer_blocks.0/ff/net.0/proj/Add
6
+ /down_blocks.0/attentions.0/transformer_blocks.0/ff/net.0/proj/MatMul
7
+ /down_blocks.0/attentions.0/transformer_blocks.0/ff/net.2/Add
8
+ /down_blocks.0/attentions.0/transformer_blocks.0/ff/net.2/MatMul
9
+ /down_blocks.0/attentions.1/transformer_blocks.0/attn1/to_out.0/Add
10
+ /down_blocks.0/attentions.1/transformer_blocks.0/attn1/to_out.0/MatMul
11
+ /down_blocks.0/attentions.1/transformer_blocks.0/attn2/to_out.0/Add
12
+ /down_blocks.0/attentions.1/transformer_blocks.0/attn2/to_out.0/MatMul
13
+ /down_blocks.0/attentions.1/transformer_blocks.0/ff/net.0/proj/Add
14
+ /down_blocks.0/attentions.1/transformer_blocks.0/ff/net.0/proj/MatMul
15
+ /down_blocks.0/attentions.1/transformer_blocks.0/ff/net.2/Add
16
+ /down_blocks.0/attentions.1/transformer_blocks.0/ff/net.2/MatMul
17
+ /down_blocks.1/attentions.0/transformer_blocks.0/attn1/to_out.0/Add
18
+ /down_blocks.1/attentions.0/transformer_blocks.0/attn1/to_out.0/MatMul
19
+ /down_blocks.1/attentions.0/transformer_blocks.0/attn2/to_out.0/Add
20
+ /down_blocks.1/attentions.0/transformer_blocks.0/attn2/to_out.0/MatMul
21
+ /down_blocks.1/attentions.0/transformer_blocks.0/ff/net.0/proj/Add
22
+ /down_blocks.1/attentions.0/transformer_blocks.0/ff/net.0/proj/MatMul
23
+ /down_blocks.1/attentions.0/transformer_blocks.0/ff/net.2/Add
24
+ /down_blocks.1/attentions.0/transformer_blocks.0/ff/net.2/MatMul
25
+ /down_blocks.1/attentions.1/transformer_blocks.0/attn1/to_out.0/Add
26
+ /down_blocks.1/attentions.1/transformer_blocks.0/attn1/to_out.0/MatMul
27
+ /down_blocks.1/attentions.1/transformer_blocks.0/attn2/to_out.0/Add
28
+ /down_blocks.1/attentions.1/transformer_blocks.0/attn2/to_out.0/MatMul
29
+ /down_blocks.1/attentions.1/transformer_blocks.0/ff/net.0/proj/Add
30
+ /down_blocks.1/attentions.1/transformer_blocks.0/ff/net.0/proj/MatMul
31
+ /down_blocks.1/attentions.1/transformer_blocks.0/ff/net.2/Add
32
+ /down_blocks.1/attentions.1/transformer_blocks.0/ff/net.2/MatMul
33
+ /down_blocks.2/attentions.0/transformer_blocks.0/attn1/to_out.0/Add
34
+ /down_blocks.2/attentions.0/transformer_blocks.0/attn1/to_out.0/MatMul
35
+ /down_blocks.2/attentions.0/transformer_blocks.0/attn2/to_out.0/Add
36
+ /down_blocks.2/attentions.0/transformer_blocks.0/attn2/to_out.0/MatMul
37
+ /down_blocks.2/attentions.0/transformer_blocks.0/ff/net.0/proj/Add
38
+ /down_blocks.2/attentions.0/transformer_blocks.0/ff/net.0/proj/MatMul
39
+ /down_blocks.2/attentions.0/transformer_blocks.0/ff/net.2/Add
40
+ /down_blocks.2/attentions.0/transformer_blocks.0/ff/net.2/MatMul
41
+ /down_blocks.2/attentions.1/transformer_blocks.0/attn1/to_out.0/Add
42
+ /down_blocks.2/attentions.1/transformer_blocks.0/attn1/to_out.0/MatMul
43
+ /down_blocks.2/attentions.1/transformer_blocks.0/attn2/to_out.0/Add
44
+ /down_blocks.2/attentions.1/transformer_blocks.0/attn2/to_out.0/MatMul
45
+ /down_blocks.2/attentions.1/transformer_blocks.0/ff/net.0/proj/Add
46
+ /down_blocks.2/attentions.1/transformer_blocks.0/ff/net.0/proj/MatMul
47
+ /down_blocks.2/attentions.1/transformer_blocks.0/ff/net.2/Add
48
+ /down_blocks.2/attentions.1/transformer_blocks.0/ff/net.2/MatMul
49
+ /mid_block/attentions.0/transformer_blocks.0/attn1/to_out.0/Add
50
+ /mid_block/attentions.0/transformer_blocks.0/attn1/to_out.0/MatMul
51
+ /mid_block/attentions.0/transformer_blocks.0/attn2/to_out.0/Add
52
+ /mid_block/attentions.0/transformer_blocks.0/attn2/to_out.0/MatMul
53
+ /mid_block/attentions.0/transformer_blocks.0/ff/net.0/proj/Add
54
+ /mid_block/attentions.0/transformer_blocks.0/ff/net.0/proj/MatMul
55
+ /mid_block/attentions.0/transformer_blocks.0/ff/net.2/Add
56
+ /mid_block/attentions.0/transformer_blocks.0/ff/net.2/MatMul
57
+ /up_blocks.1/attentions.0/transformer_blocks.0/attn1/to_out.0/Add
58
+ /up_blocks.1/attentions.0/transformer_blocks.0/attn1/to_out.0/MatMul
59
+ /up_blocks.1/attentions.0/transformer_blocks.0/attn2/to_out.0/Add
60
+ /up_blocks.1/attentions.0/transformer_blocks.0/attn2/to_out.0/MatMul
61
+ /up_blocks.1/attentions.0/transformer_blocks.0/ff/net.0/proj/Add
62
+ /up_blocks.1/attentions.0/transformer_blocks.0/ff/net.0/proj/MatMul
63
+ /up_blocks.1/attentions.0/transformer_blocks.0/ff/net.2/Add
64
+ /up_blocks.1/attentions.0/transformer_blocks.0/ff/net.2/MatMul
65
+ /up_blocks.1/attentions.1/transformer_blocks.0/attn1/to_out.0/Add
66
+ /up_blocks.1/attentions.1/transformer_blocks.0/attn1/to_out.0/MatMul
67
+ /up_blocks.1/attentions.1/transformer_blocks.0/attn2/to_out.0/Add
68
+ /up_blocks.1/attentions.1/transformer_blocks.0/attn2/to_out.0/MatMul
69
+ /up_blocks.1/attentions.1/transformer_blocks.0/ff/net.0/proj/Add
70
+ /up_blocks.1/attentions.1/transformer_blocks.0/ff/net.0/proj/MatMul
71
+ /up_blocks.1/attentions.1/transformer_blocks.0/ff/net.2/Add
72
+ /up_blocks.1/attentions.1/transformer_blocks.0/ff/net.2/MatMul
73
+ /up_blocks.1/attentions.2/transformer_blocks.0/attn1/to_out.0/Add
74
+ /up_blocks.1/attentions.2/transformer_blocks.0/attn1/to_out.0/MatMul
75
+ /up_blocks.1/attentions.2/transformer_blocks.0/attn2/to_out.0/Add
76
+ /up_blocks.1/attentions.2/transformer_blocks.0/attn2/to_out.0/MatMul
77
+ /up_blocks.1/attentions.2/transformer_blocks.0/ff/net.0/proj/Add
78
+ /up_blocks.1/attentions.2/transformer_blocks.0/ff/net.0/proj/MatMul
79
+ /up_blocks.1/attentions.2/transformer_blocks.0/ff/net.2/Add
80
+ /up_blocks.1/attentions.2/transformer_blocks.0/ff/net.2/MatMul
81
+ /up_blocks.2/attentions.0/transformer_blocks.0/attn1/to_out.0/Add
82
+ /up_blocks.2/attentions.0/transformer_blocks.0/attn1/to_out.0/MatMul
83
+ /up_blocks.2/attentions.0/transformer_blocks.0/attn2/to_out.0/Add
84
+ /up_blocks.2/attentions.0/transformer_blocks.0/attn2/to_out.0/MatMul
85
+ /up_blocks.2/attentions.0/transformer_blocks.0/ff/net.0/proj/Add
86
+ /up_blocks.2/attentions.0/transformer_blocks.0/ff/net.0/proj/MatMul
87
+ /up_blocks.2/attentions.0/transformer_blocks.0/ff/net.2/Add
88
+ /up_blocks.2/attentions.0/transformer_blocks.0/ff/net.2/MatMul
89
+ /up_blocks.2/attentions.1/transformer_blocks.0/attn1/to_out.0/Add
90
+ /up_blocks.2/attentions.1/transformer_blocks.0/attn1/to_out.0/MatMul
91
+ /up_blocks.2/attentions.1/transformer_blocks.0/attn2/to_out.0/Add
92
+ /up_blocks.2/attentions.1/transformer_blocks.0/attn2/to_out.0/MatMul
93
+ /up_blocks.2/attentions.1/transformer_blocks.0/ff/net.0/proj/Add
94
+ /up_blocks.2/attentions.1/transformer_blocks.0/ff/net.0/proj/MatMul
95
+ /up_blocks.2/attentions.1/transformer_blocks.0/ff/net.2/Add
96
+ /up_blocks.2/attentions.1/transformer_blocks.0/ff/net.2/MatMul
97
+ /up_blocks.2/attentions.2/transformer_blocks.0/attn1/to_out.0/Add
98
+ /up_blocks.2/attentions.2/transformer_blocks.0/attn1/to_out.0/MatMul
99
+ /up_blocks.2/attentions.2/transformer_blocks.0/attn2/to_out.0/Add
100
+ /up_blocks.2/attentions.2/transformer_blocks.0/attn2/to_out.0/MatMul
101
+ /up_blocks.2/attentions.2/transformer_blocks.0/ff/net.0/proj/Add
102
+ /up_blocks.2/attentions.2/transformer_blocks.0/ff/net.0/proj/MatMul
103
+ /up_blocks.2/attentions.2/transformer_blocks.0/ff/net.2/Add
104
+ /up_blocks.2/attentions.2/transformer_blocks.0/ff/net.2/MatMul
105
+ /up_blocks.3/attentions.0/transformer_blocks.0/attn1/to_out.0/Add
106
+ /up_blocks.3/attentions.0/transformer_blocks.0/attn1/to_out.0/MatMul
107
+ /up_blocks.3/attentions.0/transformer_blocks.0/attn2/to_out.0/Add
108
+ /up_blocks.3/attentions.0/transformer_blocks.0/attn2/to_out.0/MatMul
109
+ /up_blocks.3/attentions.0/transformer_blocks.0/ff/net.0/proj/Add
110
+ /up_blocks.3/attentions.0/transformer_blocks.0/ff/net.0/proj/MatMul
111
+ /up_blocks.3/attentions.0/transformer_blocks.0/ff/net.2/Add
112
+ /up_blocks.3/attentions.0/transformer_blocks.0/ff/net.2/MatMul
113
+ /up_blocks.3/attentions.1/transformer_blocks.0/attn1/to_out.0/Add
114
+ /up_blocks.3/attentions.1/transformer_blocks.0/attn1/to_out.0/MatMul
115
+ /up_blocks.3/attentions.1/transformer_blocks.0/attn2/to_out.0/Add
116
+ /up_blocks.3/attentions.1/transformer_blocks.0/attn2/to_out.0/MatMul
117
+ /up_blocks.3/attentions.1/transformer_blocks.0/ff/net.0/proj/Add
118
+ /up_blocks.3/attentions.1/transformer_blocks.0/ff/net.0/proj/MatMul
119
+ /up_blocks.3/attentions.1/transformer_blocks.0/ff/net.2/Add
120
+ /up_blocks.3/attentions.1/transformer_blocks.0/ff/net.2/MatMul
121
+ /up_blocks.3/attentions.2/transformer_blocks.0/attn1/to_out.0/Add
122
+ /up_blocks.3/attentions.2/transformer_blocks.0/attn1/to_out.0/MatMul
123
+ /up_blocks.3/attentions.2/transformer_blocks.0/attn2/to_out.0/Add
124
+ /up_blocks.3/attentions.2/transformer_blocks.0/attn2/to_out.0/MatMul
125
+ /up_blocks.3/attentions.2/transformer_blocks.0/ff/net.0/proj/Add
126
+ /up_blocks.3/attentions.2/transformer_blocks.0/ff/net.0/proj/MatMul
127
+ /up_blocks.3/attentions.2/transformer_blocks.0/ff/net.2/Add
128
+ /up_blocks.3/attentions.2/transformer_blocks.0/ff/net.2/MatMul
vae_decoder/model.onnx ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:743d234cab741b5adfb6dd9c435af1d0261927eea412281e23484def847e95c5
3
+ size 99039531
vae_encoder/model.onnx ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1a21051b05342daf05d7f58c9c98d60262cc177c38587e8bffc541d40fcc63ef
3
+ size 68392099
verinfo.txt ADDED
@@ -0,0 +1,13 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Model: Stable Diffusion 1.5
2
+ Base Model: https://huggingface.co/stable-diffusion-v1-5/stable-diffusion-v1-5
3
+ license: unknown
4
+
5
+ Version: 1.0
6
+ Optimizations: MHA, GEMM RS, FP32 Cast
7
+ last updated: 01/23/2025
8
+
9
+ Inference Parameters
10
+ Scheduler: DDIM
11
+ Width x Height: 512x512
12
+ Inference Steps: 30
13
+ Guidance Scale: 7.5