Spaces:
Sleeping
Sleeping
safihaider
commited on
Commit
•
3f9ab1d
1
Parent(s):
f60145e
model change
Browse files- ChatController.py +1 -1
- ChatService.py +2 -2
ChatController.py
CHANGED
@@ -9,7 +9,7 @@ app = Flask(__name__)
|
|
9 |
CORS(app)
|
10 |
|
11 |
chatService = ChatService()
|
12 |
-
chatService.load_model("collinear-ai/
|
13 |
|
14 |
@app.route("/chat", methods=['POST'])
|
15 |
@cross_origin(origin='*')
|
|
|
9 |
CORS(app)
|
10 |
|
11 |
chatService = ChatService()
|
12 |
+
chatService.load_model("collinear-ai/LLaMA-2-7B-chat-csr")
|
13 |
|
14 |
@app.route("/chat", methods=['POST'])
|
15 |
@cross_origin(origin='*')
|
ChatService.py
CHANGED
@@ -1,7 +1,7 @@
|
|
1 |
import os
|
2 |
import torch
|
3 |
import transformers
|
4 |
-
|
5 |
|
6 |
os.environ["CUDA_VISIBLE_DEVICES"] = "0"
|
7 |
|
@@ -19,7 +19,7 @@ class ChatService:
|
|
19 |
gpu_count = torch.cuda.device_count()
|
20 |
print('gpu_count', gpu_count)
|
21 |
|
22 |
-
|
23 |
|
24 |
tokenizer = transformers.AutoTokenizer.from_pretrained(model_name)
|
25 |
pipeline = transformers.pipeline(
|
|
|
1 |
import os
|
2 |
import torch
|
3 |
import transformers
|
4 |
+
from huggingface_hub import login
|
5 |
|
6 |
os.environ["CUDA_VISIBLE_DEVICES"] = "0"
|
7 |
|
|
|
19 |
gpu_count = torch.cuda.device_count()
|
20 |
print('gpu_count', gpu_count)
|
21 |
|
22 |
+
login("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")
|
23 |
|
24 |
tokenizer = transformers.AutoTokenizer.from_pretrained(model_name)
|
25 |
pipeline = transformers.pipeline(
|