Spaces:
Sleeping
Sleeping
updating index
Browse files
app.py
CHANGED
@@ -21,7 +21,9 @@ def load_model():
|
|
21 |
model = Llama(model_path, embedding=True)
|
22 |
|
23 |
st.success("Loaded NLP model from Hugging Face!") # 👈 Show a success message
|
24 |
-
|
|
|
|
|
25 |
|
26 |
# pc = Pinecone(api_key=api_key)
|
27 |
# index = pc.Index("law")
|
@@ -54,14 +56,12 @@ def load_model():
|
|
54 |
# template = prompt_template
|
55 |
# prompt = PromptTemplate.from_template(template)
|
56 |
|
57 |
-
return model, llm
|
58 |
|
59 |
|
60 |
st.title("Please ask your question on Lithuanian rules for foreigners.")
|
61 |
-
model,llm = load_model()
|
62 |
-
|
63 |
-
pc = Pinecone(api_key=apikey)
|
64 |
-
index = pc.Index("law")
|
65 |
question = st.text_input("Enter your question:")
|
66 |
|
67 |
if question != "":
|
|
|
21 |
model = Llama(model_path, embedding=True)
|
22 |
|
23 |
st.success("Loaded NLP model from Hugging Face!") # 👈 Show a success message
|
24 |
+
apikey = st.secrets["apikey"]
|
25 |
+
pc = Pinecone(api_key=apikey)
|
26 |
+
index = pc.Index("law")
|
27 |
|
28 |
# pc = Pinecone(api_key=api_key)
|
29 |
# index = pc.Index("law")
|
|
|
56 |
# template = prompt_template
|
57 |
# prompt = PromptTemplate.from_template(template)
|
58 |
|
59 |
+
return model, llm, index
|
60 |
|
61 |
|
62 |
st.title("Please ask your question on Lithuanian rules for foreigners.")
|
63 |
+
model,llm, index = load_model()
|
64 |
+
|
|
|
|
|
65 |
question = st.text_input("Enter your question:")
|
66 |
|
67 |
if question != "":
|