Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
@@ -147,11 +147,31 @@ def search_arxiv(query):
|
|
147 |
def perform_ai_lookup(q):
|
148 |
start = time.time()
|
149 |
client = Client("awacke1/Arxiv-Paper-Search-And-QA-RAG-Pattern")
|
|
|
150 |
r = client.predict(q,20,"Semantic Search","mistralai/Mixtral-8x7B-Instruct-v0.1",api_name="/update_with_rag_md")
|
151 |
refs = r[0]
|
|
|
152 |
r2 = client.predict(q,"mistralai/Mixtral-8x7B-Instruct-v0.1",True,api_name="/ask_llm")
|
153 |
result = f"### π {q}\n\n{r2}\n\n{refs}"
|
|
|
154 |
speech_synthesis_html(r2)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
155 |
st.markdown(result)
|
156 |
elapsed = time.time()-start
|
157 |
st.write(f"Elapsed: {elapsed:.2f} s")
|
@@ -266,7 +286,9 @@ def display_file_manager():
|
|
266 |
def main():
|
267 |
st.sidebar.markdown("### π²BikeAIπ Multi-Agent Research AI")
|
268 |
tab_main = st.radio("Action:",["π€ Voice Input","πΈ Media Gallery","π Search ArXiv","π File Editor"],horizontal=True)
|
269 |
-
|
|
|
|
|
270 |
|
271 |
# Speech-to-Text component placeholder (example)
|
272 |
mycomponent = components.declare_component("mycomponent", path="mycomponent")
|
@@ -277,7 +299,12 @@ def main():
|
|
277 |
process_with_gpt(user_input)
|
278 |
elif model_choice == "Claude-3":
|
279 |
process_with_claude(user_input)
|
|
|
|
|
|
|
|
|
280 |
else:
|
|
|
281 |
col1,col2,col3=st.columns(3)
|
282 |
with col1:
|
283 |
st.subheader("GPT-4o Omni:")
|
@@ -304,7 +331,11 @@ def main():
|
|
304 |
process_with_gpt(user_text)
|
305 |
elif model_choice == "Claude-3":
|
306 |
process_with_claude(user_text)
|
|
|
|
|
|
|
307 |
else:
|
|
|
308 |
col1,col2,col3=st.columns(3)
|
309 |
with col1:
|
310 |
st.subheader("GPT-4o Omni:")
|
|
|
147 |
def perform_ai_lookup(q):
|
148 |
start = time.time()
|
149 |
client = Client("awacke1/Arxiv-Paper-Search-And-QA-RAG-Pattern")
|
150 |
+
# Perform a RAG-based search
|
151 |
r = client.predict(q,20,"Semantic Search","mistralai/Mixtral-8x7B-Instruct-v0.1",api_name="/update_with_rag_md")
|
152 |
refs = r[0]
|
153 |
+
# Ask model for answer
|
154 |
r2 = client.predict(q,"mistralai/Mixtral-8x7B-Instruct-v0.1",True,api_name="/ask_llm")
|
155 |
result = f"### π {q}\n\n{r2}\n\n{refs}"
|
156 |
+
# Speak results
|
157 |
speech_synthesis_html(r2)
|
158 |
+
|
159 |
+
# Attempt to speak summaries and titles from refs
|
160 |
+
# Assuming refs contain a set of references in Markdown with possible titles.
|
161 |
+
# We'll just re-speak refs as "summaries".
|
162 |
+
summaries_text = "Here are the summaries from the references: " + refs.replace('"','')
|
163 |
+
speech_synthesis_html(summaries_text)
|
164 |
+
|
165 |
+
# Extract titles from refs (looking for markdown links [Title](URL))
|
166 |
+
titles = []
|
167 |
+
for line in refs.split('\n'):
|
168 |
+
m = re.search(r"\[([^\]]+)\]", line)
|
169 |
+
if m:
|
170 |
+
titles.append(m.group(1))
|
171 |
+
if titles:
|
172 |
+
titles_text = "Here are the titles of the papers: " + ", ".join(titles)
|
173 |
+
speech_synthesis_html(titles_text)
|
174 |
+
|
175 |
st.markdown(result)
|
176 |
elapsed = time.time()-start
|
177 |
st.write(f"Elapsed: {elapsed:.2f} s")
|
|
|
286 |
def main():
|
287 |
st.sidebar.markdown("### π²BikeAIπ Multi-Agent Research AI")
|
288 |
tab_main = st.radio("Action:",["π€ Voice Input","πΈ Media Gallery","π Search ArXiv","π File Editor"],horizontal=True)
|
289 |
+
|
290 |
+
# Changed model order and default:
|
291 |
+
model_choice = st.sidebar.radio("AI Model:", ["Arxiv","GPT-4o","Claude-3","GPT+Claude+Arxiv"], index=0)
|
292 |
|
293 |
# Speech-to-Text component placeholder (example)
|
294 |
mycomponent = components.declare_component("mycomponent", path="mycomponent")
|
|
|
299 |
process_with_gpt(user_input)
|
300 |
elif model_choice == "Claude-3":
|
301 |
process_with_claude(user_input)
|
302 |
+
elif model_choice == "Arxiv":
|
303 |
+
# Just Arxiv on its own, full column, speak results
|
304 |
+
st.subheader("Arxiv Only Results:")
|
305 |
+
perform_ai_lookup(user_input)
|
306 |
else:
|
307 |
+
# GPT+Claude+Arxiv
|
308 |
col1,col2,col3=st.columns(3)
|
309 |
with col1:
|
310 |
st.subheader("GPT-4o Omni:")
|
|
|
331 |
process_with_gpt(user_text)
|
332 |
elif model_choice == "Claude-3":
|
333 |
process_with_claude(user_text)
|
334 |
+
elif model_choice == "Arxiv":
|
335 |
+
st.subheader("Arxiv Only Results:")
|
336 |
+
perform_ai_lookup(user_text)
|
337 |
else:
|
338 |
+
# GPT+Claude+Arxiv
|
339 |
col1,col2,col3=st.columns(3)
|
340 |
with col1:
|
341 |
st.subheader("GPT-4o Omni:")
|