ositamiles commited on
Commit
bde644c
·
verified ·
1 Parent(s): cf6a2b1

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +60 -0
app.py ADDED
@@ -0,0 +1,60 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ from flowise import Flowise, PredictionData
3
+ import json
4
+
5
+ # Flowise app base url
6
+ base_url = "https://startrz-devi.hf.space/api/v1/prediction"
7
+
8
+ # Chatflow/Agentflow ID
9
+ flow_id = "e54adffc-ae77-42e5-9fc0-c4584e081093"
10
+
11
+ # Show title and description.
12
+ st.title("Devi Research")
13
+ st.write(
14
+ "This is a simple chatbot that uses Flowise Python SDK"
15
+ )
16
+
17
+ # Create a Flowise client.
18
+ client = Flowise(base_url=base_url)
19
+
20
+ # Create a session state variable to store the chat messages. This ensures that the
21
+ # messages persist across reruns.
22
+ if "messages" not in st.session_state:
23
+ st.session_state.messages = []
24
+
25
+ # Display the existing chat messages via `st.chat_message`.
26
+ for message in st.session_state.messages:
27
+ with st.chat_message(message["role"]):
28
+ st.markdown(message["content"])
29
+
30
+ def generate_response(prompt: str):
31
+ print('generating response')
32
+ completion = client.create_prediction(
33
+ PredictionData(
34
+ chatflowId=flow_id,
35
+ question=prompt,
36
+ streaming=True
37
+ )
38
+ )
39
+
40
+ for chunk in completion:
41
+ print(chunk)
42
+ parsed_chunk = json.loads(chunk)
43
+ if (parsed_chunk['event'] == 'token' and parsed_chunk['data'] != ''):
44
+ yield str(parsed_chunk['data'])
45
+
46
+ # Create a chat input field to allow the user to enter a message. This will display
47
+ # automatically at the bottom of the page.
48
+ if prompt := st.chat_input("What is up?"):
49
+
50
+ # Store and display the current prompt.
51
+ st.session_state.messages.append({"role": "user", "content": prompt})
52
+ with st.chat_message("user"):
53
+ st.markdown(prompt)
54
+
55
+ # Stream the response to the chat using `st.write_stream`, then store it in
56
+ # session state.
57
+ with st.chat_message("assistant"):
58
+ response = generate_response(prompt)
59
+ full_response = st.write_stream(response)
60
+ st.session_state.messages.append({"role": "assistant", "content": full_response})