Spaces:
Sleeping
Sleeping
fixing rag
Browse files- data/meta10k.txt +0 -0
- qa.py +3 -15
data/meta10k.txt
ADDED
The diff for this file is too large to render.
See raw diff
|
|
qa.py
CHANGED
@@ -19,23 +19,11 @@ text_splitter = RecursiveCharacterTextSplitter(chunk_size=1000, chunk_overlap=10
|
|
19 |
|
20 |
@cl.on_chat_start
|
21 |
async def on_chat_start():
|
22 |
-
files = None
|
23 |
|
24 |
-
|
25 |
-
while files == None:
|
26 |
-
files = await cl.AskFileMessage(
|
27 |
-
content="Please upload a text file to begin!",
|
28 |
-
accept=["text/plain"],
|
29 |
-
max_size_mb=20,
|
30 |
-
timeout=180,
|
31 |
-
).send()
|
32 |
-
|
33 |
-
file = files[0]
|
34 |
-
|
35 |
-
msg = cl.Message(content=f"Processing `{file.name}`...", disable_feedback=True)
|
36 |
await msg.send()
|
37 |
|
38 |
-
with open(
|
39 |
text = f.read()
|
40 |
|
41 |
# Split the text into chunks
|
@@ -69,7 +57,7 @@ async def on_chat_start():
|
|
69 |
)
|
70 |
|
71 |
# Let the user know that the system is ready
|
72 |
-
msg.content = f"Processing
|
73 |
await msg.update()
|
74 |
|
75 |
cl.user_session.set("chain", chain)
|
|
|
19 |
|
20 |
@cl.on_chat_start
|
21 |
async def on_chat_start():
|
|
|
22 |
|
23 |
+
msg = cl.Message(content=f"Reading Meta-10k... Please wait.", disable_feedback=True)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
24 |
await msg.send()
|
25 |
|
26 |
+
with open("data/meta10k.txt", "r", encoding="utf-8") as f:
|
27 |
text = f.read()
|
28 |
|
29 |
# Split the text into chunks
|
|
|
57 |
)
|
58 |
|
59 |
# Let the user know that the system is ready
|
60 |
+
msg.content = f"Processing Meta10K done. You can now ask questions!"
|
61 |
await msg.update()
|
62 |
|
63 |
cl.user_session.set("chain", chain)
|