Joshua Sundance Bailey commited on
Commit
991fc4e
β€’
1 Parent(s): c21a491
.pre-commit-config.yaml CHANGED
@@ -48,10 +48,10 @@ repos:
48
  rev: v3.1.0
49
  hooks:
50
  - id: add-trailing-comma
51
- - repo: https://github.com/dannysepler/rm_unneeded_f_str
52
- rev: v0.2.0
53
- hooks:
54
- - id: rm-unneeded-f-str
55
  - repo: https://github.com/psf/black
56
  rev: 23.9.1
57
  hooks:
 
48
  rev: v3.1.0
49
  hooks:
50
  - id: add-trailing-comma
51
+ #- repo: https://github.com/dannysepler/rm_unneeded_f_str
52
+ # rev: v0.2.0
53
+ # hooks:
54
+ # - id: rm-unneeded-f-str
55
  - repo: https://github.com/psf/black
56
  rev: 23.9.1
57
  hooks:
.streamlit/config.toml ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ [theme]
2
+ primaryColor="#F63366"
3
+ backgroundColor="#FFFFFF"
4
+ secondaryBackgroundColor="#F0F2F6"
5
+ textColor="#262730"
6
+ font="sans serif"
AI_chatbot/app.py ADDED
@@ -0,0 +1,203 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from datetime import datetime
2
+
3
+ import streamlit as st
4
+ from langchain import LLMChain
5
+ from langchain.callbacks.base import BaseCallbackHandler
6
+ from langchain.callbacks.tracers.langchain import wait_for_all_tracers
7
+ from langchain.callbacks.tracers.run_collector import RunCollectorCallbackHandler
8
+ from langchain.chat_models import ChatOpenAI
9
+ from langchain.memory import StreamlitChatMessageHistory, ConversationBufferMemory
10
+ from langchain.prompts import ChatPromptTemplate, MessagesPlaceholder
11
+ from langchain.schema.runnable import RunnableConfig
12
+ from langsmith import Client
13
+ from streamlit_feedback import streamlit_feedback
14
+
15
+ st.set_page_config(
16
+ page_title="Chat LangSmith",
17
+ page_icon="🦜",
18
+ )
19
+
20
+
21
+ def get_llm_chain(system_prompt: str, memory: ConversationBufferMemory) -> LLMChain:
22
+ """Return a basic LLMChain with memory."""
23
+ prompt = ChatPromptTemplate.from_messages(
24
+ [
25
+ (
26
+ "system",
27
+ system_prompt + "\nIt's currently {time}.",
28
+ ),
29
+ MessagesPlaceholder(variable_name="chat_history"),
30
+ ("human", "{input}"),
31
+ ],
32
+ ).partial(time=lambda: str(datetime.now()))
33
+ llm = ChatOpenAI(temperature=0.7, streaming=True)
34
+ return LLMChain(prompt=prompt, llm=llm, memory=memory)
35
+
36
+
37
+ client = Client()
38
+
39
+
40
+ # "# ChatπŸ¦œπŸ› οΈ"
41
+ # Initialize State
42
+ if "trace_link" not in st.session_state:
43
+ st.session_state.trace_link = None
44
+ if "run_id" not in st.session_state:
45
+ st.session_state.run_id = None
46
+ st.sidebar.markdown(
47
+ """
48
+ # Menu
49
+ """,
50
+ )
51
+
52
+ _DEFAULT_SYSTEM_PROMPT = "You are a helpful chatbot."
53
+
54
+ system_prompt = st.sidebar.text_area(
55
+ "Custom Instructions",
56
+ _DEFAULT_SYSTEM_PROMPT,
57
+ help="Custom instructions to provide the language model to determine style, personality, etc.",
58
+ )
59
+ system_prompt = system_prompt.strip().replace("{", "{{").replace("}", "}}")
60
+ memory = ConversationBufferMemory(
61
+ chat_memory=StreamlitChatMessageHistory(key="langchain_messages"),
62
+ return_messages=True,
63
+ memory_key="chat_history",
64
+ )
65
+
66
+ chain = get_llm_chain(system_prompt, memory)
67
+
68
+ if st.sidebar.button("Clear message history"):
69
+ print("Clearing message history")
70
+ memory.clear()
71
+ st.session_state.trace_link = None
72
+ st.session_state.run_id = None
73
+
74
+
75
+ # Display chat messages from history on app rerun
76
+ # NOTE: This won't be necessary for Streamlit 1.26+, you can just pass the type directly
77
+ # https://github.com/streamlit/streamlit/pull/7094
78
+ def _get_openai_type(msg):
79
+ if msg.type == "human":
80
+ return "user"
81
+ if msg.type == "ai":
82
+ return "assistant"
83
+ return msg.role if msg.type == "chat" else msg.type
84
+
85
+
86
+ for msg in st.session_state.langchain_messages:
87
+ streamlit_type = _get_openai_type(msg)
88
+ avatar = "🦜" if streamlit_type == "assistant" else None
89
+ with st.chat_message(streamlit_type, avatar=avatar):
90
+ st.markdown(msg.content)
91
+
92
+ if st.session_state.trace_link:
93
+ st.sidebar.markdown(
94
+ f'<a href="{st.session_state.trace_link}" target="_blank"><button>Latest Trace: πŸ› οΈ</button></a>',
95
+ unsafe_allow_html=True,
96
+ )
97
+
98
+
99
+ class StreamHandler(BaseCallbackHandler):
100
+ def __init__(self, container, initial_text=""):
101
+ self.container = container
102
+ self.text = initial_text
103
+
104
+ def on_llm_new_token(self, token: str, **kwargs) -> None:
105
+ self.text += token
106
+ self.container.markdown(self.text)
107
+
108
+
109
+ run_collector = RunCollectorCallbackHandler()
110
+
111
+
112
+ def _reset_feedback():
113
+ st.session_state.feedback_update = None
114
+ st.session_state.feedback = None
115
+
116
+
117
+ if prompt := st.chat_input(placeholder="Ask me a question!"):
118
+ st.chat_message("user").write(prompt)
119
+ _reset_feedback()
120
+
121
+ with st.chat_message("assistant", avatar="🦜"):
122
+ message_placeholder = st.empty()
123
+ stream_handler = StreamHandler(message_placeholder)
124
+ runnable_config = RunnableConfig(
125
+ callbacks=[run_collector, stream_handler],
126
+ tags=["Streamlit Chat"],
127
+ )
128
+ full_response = chain.invoke({"input": prompt}, config=runnable_config)["text"]
129
+ message_placeholder.markdown(full_response)
130
+
131
+ run = run_collector.traced_runs[0]
132
+ run_collector.traced_runs = []
133
+ st.session_state.run_id = run.id
134
+ wait_for_all_tracers()
135
+ url = client.read_run(run.id).url
136
+ st.session_state.trace_link = url
137
+
138
+ # Simple feedback section
139
+ # Optionally add a thumbs up/down button for feedback
140
+ if st.session_state.get("run_id"):
141
+ feedback = streamlit_feedback(
142
+ feedback_type="thumbs",
143
+ key=f"feedback_{st.session_state.run_id}",
144
+ )
145
+ scores = {"πŸ‘": 1, "πŸ‘Ž": 0}
146
+ if feedback:
147
+ score = scores[feedback["score"]]
148
+ feedback = client.create_feedback(
149
+ st.session_state.run_id,
150
+ "user_score",
151
+ score=score,
152
+ )
153
+ st.session_state.feedback = {"feedback_id": str(feedback.id), "score": score}
154
+
155
+ # Prompt for more information, if feedback was submitted
156
+ if st.session_state.get("feedback"):
157
+ feedback = st.session_state.get("feedback")
158
+ feedback_id = feedback["feedback_id"]
159
+ score = feedback["score"]
160
+ if score == 0:
161
+ if correction := st.text_input(
162
+ label="What would the correct or preferred response have been?",
163
+ key=f"correction_{feedback_id}",
164
+ ):
165
+ st.session_state.feedback_update = {
166
+ "correction": {"desired": correction},
167
+ "feedback_id": feedback_id,
168
+ }
169
+ elif score == 1:
170
+ if comment := st.text_input(
171
+ label="Anything else you'd like to add about this response?",
172
+ key=f"comment_{feedback_id}",
173
+ ):
174
+ st.session_state.feedback_update = {
175
+ "comment": comment,
176
+ "feedback_id": feedback_id,
177
+ }
178
+ # Update the feedback if additional information was provided
179
+ if st.session_state.get("feedback_update"):
180
+ feedback_update = st.session_state.get("feedback_update")
181
+ feedback_id = feedback_update.pop("feedback_id")
182
+ client.update_feedback(feedback_id, **feedback_update)
183
+ # Clear the comment or correction box
184
+ _reset_feedback()
185
+
186
+ # # Feedback section
187
+ # if st.session_state.get("last_run"):
188
+ # run_url = client.read_run(st.session_state.last_run).url
189
+ # st.sidebar.markdown(f"[Latest Trace: πŸ› οΈ]({run_url})")
190
+ # feedback = streamlit_feedback(
191
+ # feedback_type="faces",
192
+ # optional_text_label="[Optional] Please provide an explanation",
193
+ # key=f"feedback_{st.session_state.last_run}",
194
+ # )
195
+ # if feedback:
196
+ # scores = {"πŸ˜€": 1, "πŸ™‚": 0.75, "😐": 0.5, "πŸ™": 0.25, "😞": 0}
197
+ # client.create_feedback(
198
+ # st.session_state.last_run,
199
+ # feedback["type"],
200
+ # score=scores[feedback["score"]],
201
+ # comment=feedback.get("text", None),
202
+ # )
203
+ # st.toast("Feedback recorded!", icon="πŸ“")
Dockerfile CHANGED
@@ -4,7 +4,8 @@ RUN adduser --uid 1001 --disabled-password --gecos '' appuser
4
  USER 1001
5
 
6
  ENV PYTHONDONTWRITEBYTECODE=1 \
7
- PYTHONUNBUFFERED=1
 
8
 
9
  RUN pip install --user --upgrade pip
10
  COPY ./requirements.txt /home/appuser/app/requirements.txt
 
4
  USER 1001
5
 
6
  ENV PYTHONDONTWRITEBYTECODE=1 \
7
+ PYTHONUNBUFFERED=1 \
8
+ PATH="/home/appuser/.local/bin:$PATH"
9
 
10
  RUN pip install --user --upgrade pip
11
  COPY ./requirements.txt /home/appuser/app/requirements.txt
docker-compose.yml CHANGED
@@ -11,4 +11,4 @@ services:
11
  volumes:
12
  - .:/home/appuser/app:rw
13
  working_dir: /home/appuser/app/
14
- entrypoint: ["streamlit", "run", "${APP}", "-w"]
 
11
  volumes:
12
  - .:/home/appuser/app:rw
13
  working_dir: /home/appuser/app/
14
+ entrypoint: ["python", "-m", "streamlit", "run", "${APP}", "--server.port", "8000", "--server.enableCORS", "false", "--server.address", "0.0.0.0"]
requirements.txt CHANGED
@@ -3,4 +3,5 @@ langchain
3
  langsmith
4
  openai
5
  streamlit
 
6
  tiktoken
 
3
  langsmith
4
  openai
5
  streamlit
6
+ streamlit-feedback
7
  tiktoken