Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
@@ -23,4 +23,23 @@ import tempfile
|
|
23 |
# API Key
|
24 |
os.environ["GROQ_API_KEY"] = st.secrets.get("GROQ_API_KEY", "")
|
25 |
|
26 |
-
# Initialize LLM
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
23 |
# API Key
|
24 |
os.environ["GROQ_API_KEY"] = st.secrets.get("GROQ_API_KEY", "")
|
25 |
|
26 |
+
# Initialize LLM
|
27 |
+
class LLMCallbackHandler(BaseCallbackHandler):
|
28 |
+
def __init__(self, log_path: Path):
|
29 |
+
self.log_path = log_path
|
30 |
+
|
31 |
+
def on_llm_start(self, serialized, prompts, **kwargs):
|
32 |
+
with self.log_path.open("a", encoding="utf-8") as file:
|
33 |
+
file.write(json.dumps({"event": "llm_start", "text": prompts[0], "timestamp": datetime.now().isoformat()}) + "\n")
|
34 |
+
|
35 |
+
def on_llm_end(self, response: LLMResult, **kwargs):
|
36 |
+
generation = response.generations[-1][-1].message.content
|
37 |
+
with self.log_path.open("a", encoding="utf-8") as file:
|
38 |
+
file.write(json.dumps({"event": "llm_end", "text": generation, "timestamp": datetime.now().isoformat()}) + "\n")
|
39 |
+
|
40 |
+
llm = ChatGroq(
|
41 |
+
temperature=0,
|
42 |
+
model_name="groq/llama-3.3-70b-versatile",
|
43 |
+
max_tokens=200,
|
44 |
+
callbacks=[LLMCallbackHandler(Path("prompts.jsonl"))],
|
45 |
+
)
|