Spaces:
Sleeping
Sleeping
Update Model and Generation logic
Browse files- chatbot/chains.py +5 -0
chatbot/chains.py
CHANGED
@@ -22,8 +22,13 @@ groq_llm = ChatGroq(model="llama-3.3-70b-versatile", api_key=os.environ.get("GRO
|
|
22 |
contextualize_q_chain = contextualize_q_prompt | groq_llm | StrOutputParser()
|
23 |
|
24 |
gemini_llm = ChatGoogleGenerativeAI(
|
|
|
25 |
model=os.getenv("MODEL"),
|
26 |
google_api_key=os.getenv("GOOGLE_API_KEY"),
|
|
|
|
|
|
|
|
|
27 |
temperature=0.5,
|
28 |
)
|
29 |
structured_llm = gemini_llm.with_structured_output(OutputFormat)
|
|
|
22 |
contextualize_q_chain = contextualize_q_prompt | groq_llm | StrOutputParser()
|
23 |
|
24 |
gemini_llm = ChatGoogleGenerativeAI(
|
25 |
+
<<<<<<< HEAD
|
26 |
model=os.getenv("MODEL"),
|
27 |
google_api_key=os.getenv("GOOGLE_API_KEY"),
|
28 |
+
=======
|
29 |
+
model="gemini-2.5-flash-preview-05-20",
|
30 |
+
google_api_key=os.environ.get("GOOGLE_API_KEY"),
|
31 |
+
>>>>>>> 93689ff4f23ab7484a10e8e0885ccb5bbe212925
|
32 |
temperature=0.5,
|
33 |
)
|
34 |
structured_llm = gemini_llm.with_structured_output(OutputFormat)
|