eagle0504 commited on
Commit
362e436
·
verified ·
1 Parent(s): b401972

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +89 -0
app.py ADDED
@@ -0,0 +1,89 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ import os
3
+ from together import Together
4
+
5
+
6
+ client = Together(api_key=os.environ["TOGETHER_API_KEY"])
7
+
8
+
9
+ def call_llama(prompt: str) -> str:
10
+ """
11
+ Send a prompt to the Llama model and return the response.
12
+ Args:
13
+ prompt (str): The input prompt to send to the Llama model.
14
+
15
+ Returns:
16
+ str: The response from the Llama model.
17
+ """
18
+
19
+ # Create a completion request with the prompt
20
+ response = client.chat.completions.create(
21
+
22
+ # Use the Llama-3-8b-chat-hf model
23
+ model="meta-llama/Llama-3-8b-chat-hf",
24
+
25
+ # Define the prompt as a user message
26
+ messages=[
27
+ {
28
+ "role": "user",
29
+ "content": prompt # Use the input prompt
30
+ }
31
+ ],
32
+ )
33
+
34
+ # Return the content of the first response message
35
+ return response.choices[0].message.content
36
+
37
+
38
+ with st.sidebar:
39
+ with st.expander("Instruction Manual"):
40
+ st.markdown("""
41
+ ## Meta Llama3 🦙 Chatbot
42
+
43
+ This Streamlit app allows you to chat with Meta's Llama3 model.
44
+
45
+ ### How to Use:
46
+ 1. **Input**: Type your prompt into the chat input box labeled "What is up?".
47
+ 2. **Response**: The app will display a response from Llama3.
48
+ 3. **Chat History**: Previous conversations will be shown on the app.
49
+
50
+ ### Credits:
51
+ - **Developer**: Yiqiao Yin | [Site](https://www.y-yin.io/) | [LinkedIn](https://www.linkedin.com/in/yiqiaoyin/) | [YouTube](https://youtube.com/YiqiaoYin/)
52
+
53
+ Enjoy chatting with Meta's Llama3 model!
54
+ """)
55
+
56
+ # Add a button to clear the session state
57
+ if st.button("Clear Session"):
58
+ st.session_state.messages = []
59
+ st.experimental_rerun()
60
+
61
+
62
+ st.title("Meta Llama3 🦙")
63
+
64
+
65
+ # Initialize chat history
66
+ if "messages" not in st.session_state:
67
+ st.session_state.messages = []
68
+
69
+
70
+ # Display chat messages from history on app rerun
71
+ for message in st.session_state.messages:
72
+ with st.chat_message(message["role"]):
73
+ st.markdown(message["content"])
74
+
75
+
76
+ # React to user input
77
+ if prompt := st.chat_input("What is up?"):
78
+ # Display user message in chat message container
79
+ st.chat_message("user").markdown(prompt)
80
+ # Add user message to chat history
81
+ st.session_state.messages.append({"role": "user", "content": prompt})
82
+
83
+ response = call_llama(prompt)
84
+
85
+ # Display assistant response in chat message container
86
+ with st.chat_message("assistant"):
87
+ st.markdown(response)
88
+ # Add assistant response to chat history
89
+ st.session_state.messages.append({"role": "assistant", "content": response})