Mr-Vicky-01 commited on
Commit
6fc35ba
1 Parent(s): 11114b2

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +11 -10
app.py CHANGED
@@ -10,6 +10,8 @@ import base64
10
  # Load environment variables
11
  load_dotenv()
12
 
 
 
13
  # Configure the Llama index settings
14
  Settings.llm = HuggingFaceInferenceAPI(
15
  model_name="meta-llama/Meta-Llama-3-8B-Instruct",
@@ -58,17 +60,16 @@ def handle_query(query):
58
  )
59
  ]
60
  text_qa_template = ChatPromptTemplate.from_messages(chat_text_qa_msgs)
61
- query_engine = index.as_query_engine(text_qa_template=text_qa_template, streaming=True)
62
  answer = query_engine.query(query)
63
- yield answer.print_response_stream()
64
 
65
 
66
- # if hasattr(answer, 'response'):
67
- # return answer.response
68
- # elif isinstance(answer, dict) and 'response' in answer:
69
- # return answer['response']
70
- # else:
71
- # return "Sorry, I couldn't find an answer."
72
 
73
 
74
  # Streamlit app initialization
@@ -80,7 +81,7 @@ if 'messages' not in st.session_state:
80
  st.session_state.messages = [{'role': 'assistant', "content": 'Hello! Upload a PDF and ask me anything about its content.'}]
81
 
82
  for message in st.session_state.messages:
83
- with st.chat_message(message['role']):
84
  st.write(message['content'])
85
 
86
  with st.sidebar:
@@ -103,7 +104,7 @@ user_prompt = st.chat_input("Ask me anything about the content of the PDF:")
103
 
104
  if user_prompt and uploaded_file:
105
  st.session_state.messages.append({'role': 'user', "content": user_prompt})
106
- with st.chat_message("user", avatar="👽"):
107
  st.write(user_prompt)
108
 
109
  if st.session_state.messages[-1]["role"] != "assistant":
 
10
  # Load environment variables
11
  load_dotenv()
12
 
13
+ icons = {"assistant": "👽", "user": "👦🏻"}
14
+
15
  # Configure the Llama index settings
16
  Settings.llm = HuggingFaceInferenceAPI(
17
  model_name="meta-llama/Meta-Llama-3-8B-Instruct",
 
60
  )
61
  ]
62
  text_qa_template = ChatPromptTemplate.from_messages(chat_text_qa_msgs)
63
+ query_engine = index.as_query_engine(text_qa_template=text_qa_template)
64
  answer = query_engine.query(query)
 
65
 
66
 
67
+ if hasattr(answer, 'response'):
68
+ return answer.response
69
+ elif isinstance(answer, dict) and 'response' in answer:
70
+ return answer['response']
71
+ else:
72
+ return "Sorry, I couldn't find an answer."
73
 
74
 
75
  # Streamlit app initialization
 
81
  st.session_state.messages = [{'role': 'assistant', "content": 'Hello! Upload a PDF and ask me anything about its content.'}]
82
 
83
  for message in st.session_state.messages:
84
+ with st.chat_message(message['role'], avatar=icon[message['role']]):
85
  st.write(message['content'])
86
 
87
  with st.sidebar:
 
104
 
105
  if user_prompt and uploaded_file:
106
  st.session_state.messages.append({'role': 'user', "content": user_prompt})
107
+ with st.chat_message("user", avatar="👦🏻"):
108
  st.write(user_prompt)
109
 
110
  if st.session_state.messages[-1]["role"] != "assistant":