Spaces:
Build error
Build error
Update app.py
Browse files
app.py
CHANGED
@@ -16,7 +16,8 @@ from prompts import (
|
|
16 |
PREFIX,
|
17 |
)
|
18 |
client = InferenceClient("mistralai/Mixtral-8x7B-Instruct-v0.1")
|
19 |
-
|
|
|
20 |
token_self = os.environ['HF_TOKEN']
|
21 |
api=HfApi(token=token_self)
|
22 |
|
@@ -188,14 +189,14 @@ def summarize(inp,history,data=None):
|
|
188 |
yield "", [(inp,f'{mes}\n{new_history}')]
|
189 |
resp = run_gpt(
|
190 |
COMPRESS_DATA_PROMPT,
|
191 |
-
stop_tokens=[
|
192 |
max_tokens=2048,
|
193 |
seed=seed,
|
194 |
purpose=purpose,
|
195 |
task=task,
|
196 |
knowledge=new_history,
|
197 |
history=hist,
|
198 |
-
)
|
199 |
new_history = resp
|
200 |
print (resp)
|
201 |
out+=resp
|
@@ -204,7 +205,7 @@ def summarize(inp,history,data=None):
|
|
204 |
#history = "preliminary result: {}\n".format(resp)
|
205 |
#yield "", (inp,f'{mes}\n{history}')
|
206 |
print ("final" + resp)
|
207 |
-
out_hist = "result
|
208 |
#return history
|
209 |
yield "", [(inp,out_hist)]
|
210 |
|
@@ -297,26 +298,32 @@ def find_rss():
|
|
297 |
api.upload_file(
|
298 |
path_or_fileobj="tmp1.json",
|
299 |
path_in_repo=f"/rss/{timename}.json",
|
300 |
-
repo_id=
|
|
|
301 |
token=token_self,
|
302 |
repo_type="dataset",
|
303 |
)
|
304 |
yield out_box,[(None,'Sources are loaded. You can ask a question about them now.')]
|
305 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
306 |
with gr.Blocks() as app:
|
307 |
cb = gr.Chatbot(height=500)
|
308 |
with gr.Row():
|
309 |
-
|
310 |
-
|
311 |
with gr.Row():
|
312 |
-
|
313 |
-
|
314 |
-
r_btn=gr.Button("read")
|
315 |
with gr.Row():
|
316 |
out_json = gr.JSON()
|
317 |
fil = gr.Textbox()
|
318 |
-
|
319 |
-
|
320 |
-
|
321 |
-
#btn.click(get_rss,rss,out_json)
|
322 |
app.launch()
|
|
|
16 |
PREFIX,
|
17 |
)
|
18 |
client = InferenceClient("mistralai/Mixtral-8x7B-Instruct-v0.1")
|
19 |
+
reponame="Omnibus/tmp"
|
20 |
+
save_data=f'https://huggingface.co/datasets/{reponame}/raw/main/'
|
21 |
token_self = os.environ['HF_TOKEN']
|
22 |
api=HfApi(token=token_self)
|
23 |
|
|
|
189 |
yield "", [(inp,f'{mes}\n{new_history}')]
|
190 |
resp = run_gpt(
|
191 |
COMPRESS_DATA_PROMPT,
|
192 |
+
stop_tokens=[],
|
193 |
max_tokens=2048,
|
194 |
seed=seed,
|
195 |
purpose=purpose,
|
196 |
task=task,
|
197 |
knowledge=new_history,
|
198 |
history=hist,
|
199 |
+
)
|
200 |
new_history = resp
|
201 |
print (resp)
|
202 |
out+=resp
|
|
|
205 |
#history = "preliminary result: {}\n".format(resp)
|
206 |
#yield "", (inp,f'{mes}\n{history}')
|
207 |
print ("final" + resp)
|
208 |
+
out_hist = "result:\n{}".format(resp)
|
209 |
#return history
|
210 |
yield "", [(inp,out_hist)]
|
211 |
|
|
|
298 |
api.upload_file(
|
299 |
path_or_fileobj="tmp1.json",
|
300 |
path_in_repo=f"/rss/{timename}.json",
|
301 |
+
repo_id=reponame,
|
302 |
+
#repo_id=save_data.split('datasets/',1)[1].split('/raw',1)[0],
|
303 |
token=token_self,
|
304 |
repo_type="dataset",
|
305 |
)
|
306 |
yield out_box,[(None,'Sources are loaded. You can ask a question about them now.')]
|
307 |
|
308 |
+
def load_data():
|
309 |
+
f_ist = (api.list_repo_files(repo_id=reponame, repo_type="dataset"))
|
310 |
+
f_ist.sort(reverse=True)
|
311 |
+
print(f_ist)
|
312 |
+
r = requests.get(f'{save_data}/{f_ist[0]}')
|
313 |
+
lod = json.loads(r.text)
|
314 |
+
return lod
|
315 |
with gr.Blocks() as app:
|
316 |
cb = gr.Chatbot(height=500)
|
317 |
with gr.Row():
|
318 |
+
inst = gr.Textbox(label="Instructions")
|
319 |
+
sub_btn=gr.Button("Submit")
|
320 |
with gr.Row():
|
321 |
+
load_btn = gr.Button("Load RSS")
|
322 |
+
u_btn=gr.Button("Update [RSS Data]")
|
|
|
323 |
with gr.Row():
|
324 |
out_json = gr.JSON()
|
325 |
fil = gr.Textbox()
|
326 |
+
load_btn.click(load_data,None,out_json)
|
327 |
+
u_btn.click(find_rss,None,[out_json,cb])
|
328 |
+
sub_btn.click(summarize,[inst,cb,out_json],[inst,cb])
|
|
|
329 |
app.launch()
|