theailearner commited on
Commit
78708c8
·
verified ·
1 Parent(s): d4dcafc

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +36 -0
app.py ADDED
@@ -0,0 +1,36 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import sys
2
+ from ollama import Client
3
+ import gradio as gr
4
+
5
+ host_url = sys.argv[1] if len(sys.argv) > 1 else "http://localhost:11434/"
6
+
7
+ client = Client(host=host_url)
8
+
9
+ model_list = client.list()
10
+ model_names = [model['model'] for model in model_list['models']]
11
+
12
+ def chat_ollama(user_input, history, Model):
13
+ stream = client.chat(
14
+ model=Model,
15
+ messages=[
16
+ {
17
+ 'role': 'user',
18
+ 'content': user_input
19
+ },
20
+ ],
21
+ stream=True,
22
+ )
23
+
24
+ partial_message = ""
25
+ for chunk in stream:
26
+ if len(chunk['message']['content']) != 0:
27
+ partial_message = partial_message + chunk['message']['content']
28
+ yield partial_message
29
+
30
+ with gr.Blocks(title="Ollama Chat", fill_height=True) as demo:
31
+ gr.Markdown("# Ollama Chat")
32
+ model_list = gr.Dropdown(model_names, value="llama3.1:latest", label="Model", info="Model to chat with")
33
+ gr.ChatInterface(chat_ollama, additional_inputs=model_list)
34
+
35
+ if __name__ == "__main__":
36
+ demo.launch()