Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -1,4 +1,5 @@
|
|
1 |
import streamlit as st
|
|
|
2 |
import numpy as np
|
3 |
import re
|
4 |
import tempfile
|
@@ -17,8 +18,9 @@ from transformers import AutoTokenizer, AutoModelForCausalLM, pipeline
|
|
17 |
from langchain.llms.huggingface_pipeline import HuggingFacePipeline
|
18 |
from huggingface_hub import login
|
19 |
|
|
|
20 |
# Log in with your token (optional if already logged in via CLI)
|
21 |
-
login(token=
|
22 |
|
23 |
# Load the model and tokenizer
|
24 |
model_name = "meta-llama/Llama-3.2-1B-Instruct"
|
|
|
1 |
import streamlit as st
|
2 |
+
import os
|
3 |
import numpy as np
|
4 |
import re
|
5 |
import tempfile
|
|
|
18 |
from langchain.llms.huggingface_pipeline import HuggingFacePipeline
|
19 |
from huggingface_hub import login
|
20 |
|
21 |
+
hf_token = os.getenv(HF_API_TOKEN)
|
22 |
# Log in with your token (optional if already logged in via CLI)
|
23 |
+
login(token=hf_token)
|
24 |
|
25 |
# Load the model and tokenizer
|
26 |
model_name = "meta-llama/Llama-3.2-1B-Instruct"
|