Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -4,10 +4,14 @@
|
|
4 |
import os
|
5 |
import uuid
|
6 |
import json
|
|
|
7 |
import gradio as gr
|
|
|
8 |
from openai import OpenAI
|
|
|
9 |
from langchain_community.embeddings.sentence_transformer import SentenceTransformerEmbeddings
|
10 |
from langchain_community.vectorstores import Chroma
|
|
|
11 |
from huggingface_hub import CommitScheduler
|
12 |
from pathlib import Path
|
13 |
|
@@ -15,8 +19,8 @@ from pathlib import Path
|
|
15 |
anyscale_api_key = userdata.get('anyscale_api_key') # Ensure to set this environment variable
|
16 |
|
17 |
client = OpenAI(
|
18 |
-
base_url="https://api.endpoints.anyscale.com/v1",
|
19 |
-
api_key=anyscale_api_key
|
20 |
)
|
21 |
|
22 |
# Define the embedding model and the vectorstore
|
@@ -40,6 +44,14 @@ retriever = vectorstore_persisted.as_retriever(
|
|
40 |
log_file = Path("logs/") / f"data_{uuid.uuid4()}.json"
|
41 |
log_folder = log_file.parent
|
42 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
43 |
# Define the Q&A system message
|
44 |
qna_system_message = """
|
45 |
You are an assistant to a financial technology firm who answers user queries on 10-K reports from various industry players which contain detailed information about financial performance, risk factors, market trends, and strategic initiatives.
|
@@ -94,14 +106,14 @@ def predict(user_input, company):
|
|
94 |
print(prediction)
|
95 |
|
96 |
# Log both the inputs and outputs to a local log file
|
97 |
-
|
98 |
-
|
99 |
-
|
100 |
-
|
101 |
-
|
102 |
-
|
103 |
-
|
104 |
-
|
105 |
|
106 |
return prediction
|
107 |
|
|
|
4 |
import os
|
5 |
import uuid
|
6 |
import json
|
7 |
+
|
8 |
import gradio as gr
|
9 |
+
|
10 |
from openai import OpenAI
|
11 |
+
|
12 |
from langchain_community.embeddings.sentence_transformer import SentenceTransformerEmbeddings
|
13 |
from langchain_community.vectorstores import Chroma
|
14 |
+
|
15 |
from huggingface_hub import CommitScheduler
|
16 |
from pathlib import Path
|
17 |
|
|
|
19 |
anyscale_api_key = userdata.get('anyscale_api_key') # Ensure to set this environment variable
|
20 |
|
21 |
client = OpenAI(
|
22 |
+
#base_url="https://api.endpoints.anyscale.com/v1",
|
23 |
+
api_key=os.environ["anyscale_api_key"]
|
24 |
)
|
25 |
|
26 |
# Define the embedding model and the vectorstore
|
|
|
44 |
log_file = Path("logs/") / f"data_{uuid.uuid4()}.json"
|
45 |
log_folder = log_file.parent
|
46 |
|
47 |
+
scheduler = CommitScheduler(
|
48 |
+
repo_id = "finsight-qna",
|
49 |
+
repo-type = "dataset",
|
50 |
+
folder_path = log_folder,
|
51 |
+
path_in_repo = "data",
|
52 |
+
every = 2
|
53 |
+
)
|
54 |
+
|
55 |
# Define the Q&A system message
|
56 |
qna_system_message = """
|
57 |
You are an assistant to a financial technology firm who answers user queries on 10-K reports from various industry players which contain detailed information about financial performance, risk factors, market trends, and strategic initiatives.
|
|
|
106 |
print(prediction)
|
107 |
|
108 |
# Log both the inputs and outputs to a local log file
|
109 |
+
with scheduler.lock:
|
110 |
+
with log_file.open("a") as f:
|
111 |
+
f.write(json.dumps({
|
112 |
+
'user_input': user_input,
|
113 |
+
'retrieved_context': context_for_query,
|
114 |
+
'model_response': prediction
|
115 |
+
}))
|
116 |
+
f.write("\n")
|
117 |
|
118 |
return prediction
|
119 |
|