Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
@@ -93,14 +93,14 @@ def ollama_func(command):
|
|
93 |
else:
|
94 |
return "No supported command."
|
95 |
|
96 |
-
@spaces.GPU()
|
97 |
def launch():
|
98 |
global OLLAMA_SERVICE_THREAD
|
99 |
OLLAMA_SERVICE_THREAD = threading.Thread(target=ollama_service_thread)
|
100 |
OLLAMA_SERVICE_THREAD.start()
|
101 |
print("Giving ollama serve a moment")
|
102 |
time.sleep(10)
|
103 |
-
|
|
|
104 |
async def stream_chat(message: str, history: list, model: str, temperature: float, max_new_tokens: int, top_p: float, top_k: int, penalty: float):
|
105 |
print(f"message: {message}")
|
106 |
conversation = []
|
|
|
93 |
else:
|
94 |
return "No supported command."
|
95 |
|
|
|
96 |
def launch():
|
97 |
global OLLAMA_SERVICE_THREAD
|
98 |
OLLAMA_SERVICE_THREAD = threading.Thread(target=ollama_service_thread)
|
99 |
OLLAMA_SERVICE_THREAD.start()
|
100 |
print("Giving ollama serve a moment")
|
101 |
time.sleep(10)
|
102 |
+
|
103 |
+
@spaces.GPU()
|
104 |
async def stream_chat(message: str, history: list, model: str, temperature: float, max_new_tokens: int, top_p: float, top_k: int, penalty: float):
|
105 |
print(f"message: {message}")
|
106 |
conversation = []
|