Spaces:
Build error
Build error
artificialguybr
commited on
Commit
•
2c1bcda
1
Parent(s):
9f69e3b
Update app.py
Browse files
app.py
CHANGED
@@ -9,12 +9,12 @@ import copy
|
|
9 |
import secrets
|
10 |
from pathlib import Path
|
11 |
|
12 |
-
tokenizer = AutoTokenizer.from_pretrained("Qwen/Qwen-VL-Chat-Int4"
|
13 |
-
config = AutoConfig.from_pretrained("Qwen/Qwen-VL-Chat-Int4", trust_remote_code=True)
|
14 |
#config.quantization_config["use_exllama"] = True
|
15 |
config.quantization_config["disable_exllama"] = False
|
16 |
config.quantization_config["exllama_config"] = {"version":2}
|
17 |
-
model = AutoModelForCausalLM.
|
18 |
|
19 |
BOX_TAG_PATTERN = r"<box>([\s\S]*?)</box>"
|
20 |
PUNCTUATION = "!?。"#$%&'()*+,-/:;<=>@[\]^_`{|}~⦅⦆「」、、〃》「」『』【】〔〕〖〗〘〙〚〛〜〝〞〟〰〾〿–—‘’‛“”„‟…‧﹏."
|
|
|
9 |
import secrets
|
10 |
from pathlib import Path
|
11 |
|
12 |
+
tokenizer = AutoTokenizer.from_pretrained("Qwen/Qwen-VL-Chat-Int4")
|
13 |
+
config = AutoConfig.from_pretrained("Qwen/Qwen-VL-Chat-Int4", trust_remote_code=True, torch_dtype=torch.float16)
|
14 |
#config.quantization_config["use_exllama"] = True
|
15 |
config.quantization_config["disable_exllama"] = False
|
16 |
config.quantization_config["exllama_config"] = {"version":2}
|
17 |
+
model = AutoModelForCausalLM.from_config(config, trust_remote_code=True, torch_dtype=torch.float16)
|
18 |
|
19 |
BOX_TAG_PATTERN = r"<box>([\s\S]*?)</box>"
|
20 |
PUNCTUATION = "!?。"#$%&'()*+,-/:;<=>@[\]^_`{|}~⦅⦆「」、、〃》「」『』【】〔〕〖〗〘〙〚〛〜〝〞〟〰〾〿–—‘’‛“”„‟…‧﹏."
|