eagle0504 commited on
Commit
89cf3f4
·
verified ·
1 Parent(s): b7de47e

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +3 -32
app.py CHANGED
@@ -2,37 +2,11 @@ import streamlit as st
2
  import os
3
  from together import Together
4
 
 
5
 
6
- client = Together(api_key=os.environ["TOGETHER_API_KEY"])
7
 
8
-
9
- def call_llama(prompt: str) -> str:
10
- """
11
- Send a prompt to the Llama model and return the response.
12
- Args:
13
- prompt (str): The input prompt to send to the Llama model.
14
-
15
- Returns:
16
- str: The response from the Llama model.
17
- """
18
-
19
- # Create a completion request with the prompt
20
- response = client.chat.completions.create(
21
-
22
- # Use the Llama-3-8b-chat-hf model
23
- model="meta-llama/Llama-3-8b-chat-hf",
24
-
25
- # Define the prompt as a user message
26
- messages=[
27
- {
28
- "role": "user",
29
- "content": prompt # Use the input prompt
30
- }
31
- ],
32
- )
33
-
34
- # Return the content of the first response message
35
- return response.choices[0].message.content
36
 
37
 
38
  with st.sidebar:
@@ -59,9 +33,6 @@ with st.sidebar:
59
  st.experimental_rerun()
60
 
61
 
62
- st.title("Meta Llama3 🦙")
63
-
64
-
65
  # Initialize chat history
66
  if "messages" not in st.session_state:
67
  st.session_state.messages = []
 
2
  import os
3
  from together import Together
4
 
5
+ from utils.helper import *
6
 
 
7
 
8
+ st.set_page_config(layout="wide")
9
+ st.title("Meta Llama3 🦙")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
10
 
11
 
12
  with st.sidebar:
 
33
  st.experimental_rerun()
34
 
35
 
 
 
 
36
  # Initialize chat history
37
  if "messages" not in st.session_state:
38
  st.session_state.messages = []