Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -422,10 +422,101 @@ examples = [
|
|
422 |
"Seamless Web Assembly Embedding"]
|
423 |
]
|
424 |
|
425 |
-
|
426 |
-
|
427 |
-
|
428 |
-
|
429 |
-
|
430 |
-
|
431 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
422 |
"Seamless Web Assembly Embedding"]
|
423 |
]
|
424 |
|
425 |
+
def parse_action(line):
|
426 |
+
action_name, action_input = line.strip("action: ").split("=")
|
427 |
+
action_input = action_input.strip()
|
428 |
+
return action_name, action_input
|
429 |
+
|
430 |
+
def get_file_tree(path):
|
431 |
+
"""
|
432 |
+
Recursively explores a directory and returns a nested dictionary representing its file tree.
|
433 |
+
"""
|
434 |
+
tree = {}
|
435 |
+
for item in os.listdir(path):
|
436 |
+
item_path = os.path.join(path, item)
|
437 |
+
if os.path.isdir(item_path):
|
438 |
+
tree[item] = get_file_tree(item_path)
|
439 |
+
else:
|
440 |
+
tree[item] = None
|
441 |
+
return tree
|
442 |
+
|
443 |
+
def display_file_tree(tree, indent=0):
|
444 |
+
"""
|
445 |
+
Prints a formatted representation of the file tree.
|
446 |
+
"""
|
447 |
+
for name, subtree in tree.items():
|
448 |
+
print(f"{' ' * indent}{name}")
|
449 |
+
if subtree is not None:
|
450 |
+
display_file_tree(subtree, indent + 1)
|
451 |
+
|
452 |
+
def project_explorer(path):
|
453 |
+
"""
|
454 |
+
Displays the file tree of a given path in a Streamlit app.
|
455 |
+
"""
|
456 |
+
tree = get_file_tree(path)
|
457 |
+
display_file_tree(tree)
|
458 |
+
|
459 |
+
def chat_app_logic(message, history, purpose, agent_name, sys_prompt, temperature, max_new_tokens, top_p, repetition_penalty):
|
460 |
+
"""
|
461 |
+
Handles the chat app logic, including running the model and displaying the output.
|
462 |
+
"""
|
463 |
+
if message:
|
464 |
+
# Run the model and get the response
|
465 |
+
response = generate(
|
466 |
+
prompt=message,
|
467 |
+
history=history,
|
468 |
+
agent_name=agent_name,
|
469 |
+
sys_prompt=sys_prompt,
|
470 |
+
temperature=temperature,
|
471 |
+
max_new_tokens=max_new_tokens,
|
472 |
+
top_p=top_p,
|
473 |
+
repetition_penalty=repetition_penalty,
|
474 |
+
)
|
475 |
+
history.append((message, response))
|
476 |
+
return history
|
477 |
+
|
478 |
+
return history
|
479 |
+
|
480 |
+
def main():
|
481 |
+
with gr.Blocks() as demo:
|
482 |
+
gr.Markdown("## FragMixt")
|
483 |
+
gr.Markdown("### Agents w/ Agents")
|
484 |
+
|
485 |
+
# Chat Interface
|
486 |
+
chatbot = gr.Chatbot(show_label=False, show_share_button=False, show_copy_button=True, likeable=True, layout="panel")
|
487 |
+
chatbot.load(examples)
|
488 |
+
|
489 |
+
# Input Components
|
490 |
+
message = gr.Textbox(label="Enter your message", placeholder="Ask me anything!")
|
491 |
+
purpose = gr.Textbox(label="Purpose", placeholder="What is the purpose of this interaction?")
|
492 |
+
agent_name = gr.Dropdown(label="Agents", choices=[s for s in agents], value=agents[0], interactive=True)
|
493 |
+
sys_prompt = gr.Textbox(label="System Prompt", max_lines=1, interactive=True)
|
494 |
+
temperature = gr.Slider(label="Temperature", value=0.9, minimum=0.0, maximum=1.0, step=0.05, interactive=True, info="Higher values produce more diverse outputs")
|
495 |
+
max_new_tokens = gr.Slider(label="Max new tokens", value=1048*10, minimum=0, maximum=1048*10, step=64, interactive=True, info="The maximum numbers of new tokens")
|
496 |
+
top_p = gr.Slider(label="Top-p (nucleus sampling)", value=0.90, minimum=0.0, maximum=1, step=0.05, interactive=True, info="Higher values sample more low-probability tokens")
|
497 |
+
repetition_penalty = gr.Slider(label="Repetition penalty", value=1.2, minimum=1.0, maximum=2.0, step=0.05, interactive=True, info="Penalize repeated tokens")
|
498 |
+
|
499 |
+
# Button to submit the message
|
500 |
+
submit_button = gr.Button(value="Send")
|
501 |
+
|
502 |
+
# Project Explorer Tab
|
503 |
+
with gr.Tab("Project Explorer"):
|
504 |
+
project_path = gr.Textbox(label="Project Path", placeholder="/path/to/project")
|
505 |
+
explore_button = gr.Button(value="Explore")
|
506 |
+
project_output = gr.Textbox(label="File Tree", lines=20)
|
507 |
+
|
508 |
+
# Chat App Logic Tab
|
509 |
+
with gr.Tab("Chat App"):
|
510 |
+
history = gr.State([])
|
511 |
+
|
512 |
+
# Connect components to the chat app logic
|
513 |
+
submit_button.click(chat_app_logic, inputs=[message, history, purpose, agent_name, sys_prompt, temperature, max_new_tokens, top_p, repetition_penalty], outputs=chatbot)
|
514 |
+
message.submit(chat_app_logic, inputs=[message, history, purpose, agent_name, sys_prompt, temperature, max_new_tokens, top_p, repetition_penalty], outputs=chatbot)
|
515 |
+
|
516 |
+
# Connect components to the project explorer
|
517 |
+
explore_button.click(project_explorer, inputs=project_path, outputs=project_output)
|
518 |
+
|
519 |
+
demo.launch(show_api=False)
|
520 |
+
|
521 |
+
if __name__ == "__main__":
|
522 |
+
main()
|