adhinojosa commited on
Commit
4f8ab06
1 Parent(s): b750de1

incorporo st.write_stream

Browse files
Files changed (3) hide show
  1. __pycache__/model.cpython-310.pyc +0 -0
  2. app.py +35 -2
  3. requirements.txt +1 -1
__pycache__/model.cpython-310.pyc ADDED
Binary file (335 Bytes). View file
 
app.py CHANGED
@@ -1,8 +1,41 @@
1
  from model import model
2
- from vectorstore import similarity_search
 
 
 
3
  import streamlit as st
4
 
 
 
 
5
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
6
  text= st.text_area("Preg煤ntale a Don Sim贸n")
7
 
8
  if text:
@@ -10,5 +43,5 @@ if text:
10
  answer=""
11
  for _ in model(text,max_new_tokens=25, stream=True):
12
  answer= answer+str(_)
13
- print(answer)
14
 
 
1
  from model import model
2
+ #from vectorstore import similarity_search
3
+ import time
4
+ import numpy as np
5
+ import pandas as pd
6
  import streamlit as st
7
 
8
+ def stream_data(prompt):
9
+ for _ in model(prompt,_stream):
10
+ yield str(_)
11
 
12
+ if "model" not in st.session_state:
13
+ st.session_state["model"] = "TheBloke/Mistral-7B-OpenOrca-GGUF"
14
+
15
+ # Initialize chat history
16
+ if "messages" not in st.session_state:
17
+ st.session_state.messages = []
18
+
19
+ # Display chat messages from history on app rerun
20
+ for message in st.session_state.messages:
21
+ with st.chat_message(message["role"]):
22
+ st.markdown(message["content"])
23
+
24
+
25
+ if prompt := st.chat_input("Preg煤ntale a Don Sim贸n"):
26
+ # Add user message to chat history
27
+ st.session_state.messages.append({"role": "user", "content": prompt})
28
+ # Display user message in chat message container
29
+ with st.chat_message("user"):
30
+ st.markdown(prompt)
31
+
32
+ with st.chat_message("assistant"):
33
+ stream = model("Hola",max_new_tokens=25, stream=True)
34
+ response = st.write_stream(stream)
35
+ #st.session_state.messages.append({"role": "assistant", "content": response})
36
+
37
+
38
+ '''
39
  text= st.text_area("Preg煤ntale a Don Sim贸n")
40
 
41
  if text:
 
43
  answer=""
44
  for _ in model(text,max_new_tokens=25, stream=True):
45
  answer= answer+str(_)
46
+ print(answer)'''
47
 
requirements.txt CHANGED
@@ -1,4 +1,4 @@
1
- streamlit==1.30.0
2
  pymupdf==1.23.18
3
  chromadb==0.4.22
4
  langchain==0.1.3
 
1
+ streamlit==1.31.1
2
  pymupdf==1.23.18
3
  chromadb==0.4.22
4
  langchain==0.1.3