Spaces:
Running
Running
added reset feature
Browse files- app.py +3 -0
- app_modules/llm_inference.py +3 -0
app.py
CHANGED
@@ -67,6 +67,9 @@ def predict(message, history):
|
|
67 |
item = (element[0] or "", element[1] or "")
|
68 |
chat_history.append(item)
|
69 |
|
|
|
|
|
|
|
70 |
q = Queue()
|
71 |
result = Queue()
|
72 |
t = Thread(target=task, args=(message, chat_history, q, result))
|
|
|
67 |
item = (element[0] or "", element[1] or "")
|
68 |
chat_history.append(item)
|
69 |
|
70 |
+
if not chat_history:
|
71 |
+
qa_chain.reset()
|
72 |
+
|
73 |
q = Queue()
|
74 |
result = Queue()
|
75 |
t = Thread(target=task, args=(message, chat_history, q, result))
|
app_modules/llm_inference.py
CHANGED
@@ -28,6 +28,9 @@ class LLMInference(metaclass=abc.ABCMeta):
|
|
28 |
|
29 |
return self.chain
|
30 |
|
|
|
|
|
|
|
31 |
def run_chain(self, chain, inputs, callbacks: Optional[List] = []):
|
32 |
result = chain.invoke(inputs, {"callbacks": callbacks})
|
33 |
if "text" in result:
|
|
|
28 |
|
29 |
return self.chain
|
30 |
|
31 |
+
def reset(self) -> None:
|
32 |
+
self.chain = None
|
33 |
+
|
34 |
def run_chain(self, chain, inputs, callbacks: Optional[List] = []):
|
35 |
result = chain.invoke(inputs, {"callbacks": callbacks})
|
36 |
if "text" in result:
|