Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -18,10 +18,10 @@ from transformers import AutoTokenizer, AutoModelForCausalLM, pipeline
|
|
18 |
from langchain.llms.huggingface_pipeline import HuggingFacePipeline
|
19 |
from huggingface_hub import login
|
20 |
|
21 |
-
hf_token = os.environ.get("HF_TOKEN")
|
22 |
-
print("-- hftoken1,",hf_token)
|
23 |
# Log in with your token (optional if already logged in via CLI)
|
24 |
-
login(token=hf_token)
|
25 |
|
26 |
# Load the model and tokenizer
|
27 |
model_name = "meta-llama/Llama-3.2-1B-Instruct"
|
|
|
18 |
from langchain.llms.huggingface_pipeline import HuggingFacePipeline
|
19 |
from huggingface_hub import login
|
20 |
|
21 |
+
# hf_token = os.environ.get("HF_TOKEN")
|
22 |
+
# print("-- hftoken1,",hf_token)
|
23 |
# Log in with your token (optional if already logged in via CLI)
|
24 |
+
# login(token=hf_token)
|
25 |
|
26 |
# Load the model and tokenizer
|
27 |
model_name = "meta-llama/Llama-3.2-1B-Instruct"
|