Spaces:
Sleeping
Sleeping
abrakjamson
commited on
Commit
•
95b2105
1
Parent(s):
a4c7dff
getting mistral model
Browse files
app.py
CHANGED
@@ -3,11 +3,15 @@ import torch
|
|
3 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
4 |
from repeng import ControlVector, ControlModel
|
5 |
import gradio as gr
|
|
|
6 |
|
7 |
# Initialize model and tokenizer
|
8 |
-
mistral_path = "
|
9 |
|
10 |
-
|
|
|
|
|
|
|
11 |
#tokenizer = AutoTokenizer.from_pretrained("E:/language_models/models/mistral")
|
12 |
tokenizer.pad_token_id = 0
|
13 |
|
|
|
3 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
4 |
from repeng import ControlVector, ControlModel
|
5 |
import gradio as gr
|
6 |
+
from huggingface_hub import login
|
7 |
|
8 |
# Initialize model and tokenizer
|
9 |
+
mistral_path = "mistralai/Mistral-7B-Instruct-v0.3" # Update this path as needed
|
10 |
|
11 |
+
access_token = os.getenv("mistralaccesstoken")
|
12 |
+
login(access_token)
|
13 |
+
|
14 |
+
tokenizer = AutoTokenizer.from_pretrained(mistral_path)
|
15 |
#tokenizer = AutoTokenizer.from_pretrained("E:/language_models/models/mistral")
|
16 |
tokenizer.pad_token_id = 0
|
17 |
|