|
import streamlit as st |
|
from graph import EssayWriter, RouteQuery, GraphState |
|
from language_options import language_options |
|
from crew import * |
|
import os |
|
import re |
|
import traceback |
|
import base64 |
|
|
|
|
|
if os.system("which dot") != 0: |
|
os.system("apt-get update && apt-get install -y graphviz") |
|
|
|
st.markdown( |
|
""" |
|
<h1 style="text-align: center; white-space: nowrap; font-size: 2.5em;"> |
|
Multi-Agent Essay Writing Assistant |
|
</h1> |
|
""", |
|
unsafe_allow_html=True |
|
) |
|
|
|
|
|
if "messages" not in st.session_state: |
|
st.session_state["messages"] = [{"role": "assistant", "content": "Hello! How can I assist you today?"}] |
|
|
|
if "app" not in st.session_state: |
|
st.session_state["app"] = None |
|
|
|
if "chat_active" not in st.session_state: |
|
st.session_state["chat_active"] = True |
|
|
|
|
|
|
|
with st.sidebar: |
|
st.subheader("📝 Note:") |
|
st.info( |
|
"\n\n 1. This app uses the 'gpt-4o-mini-2024-07-18' model." |
|
"\n\n 2. Writing essays may take some time, approximately 1-2 minutes." |
|
) |
|
|
|
|
|
openai_key = st.secrets.get("OPENAI_API_KEY", "") |
|
|
|
st.divider() |
|
|
|
|
|
st.subheader("⚙️🛠️ Configure Essay Settings:") |
|
essay_length = st.number_input( |
|
"Select Essay Length (words):", |
|
min_value=150, |
|
max_value=500, |
|
value=250, |
|
step=50 |
|
) |
|
|
|
|
|
|
|
|
|
|
|
selected_language = st.selectbox("Choose Language:", sorted(language_options.keys()), index=list(language_options.keys()).index("English")) |
|
|
|
st.divider() |
|
|
|
|
|
st.subheader("📖 References:") |
|
st.markdown( |
|
"[1. Multi-Agent System with CrewAI and LangChain](https://discuss.streamlit.io/t/new-project-i-have-build-a-multi-agent-system-with-crewai-and-langchain/84002)", |
|
unsafe_allow_html=True |
|
) |
|
|
|
|
|
|
|
def initialize_agents(): |
|
if not openai_key: |
|
st.error("⚠️ OpenAI API key is missing! Please provide a valid key through Hugging Face Secrets.") |
|
st.session_state["chat_active"] = True |
|
return None |
|
|
|
os.environ["OPENAI_API_KEY"] = openai_key |
|
try: |
|
|
|
if "app" in st.session_state and st.session_state["app"] is not None: |
|
return st.session_state["app"] |
|
|
|
|
|
essay_writer = EssayWriter() |
|
st.session_state["app"] = essay_writer |
|
st.session_state["chat_active"] = False |
|
|
|
return essay_writer |
|
except Exception as e: |
|
st.error(f"❌ Error initializing agents: {e}") |
|
st.session_state["chat_active"] = True |
|
return None |
|
|
|
|
|
|
|
if st.session_state["app"] is None: |
|
st.session_state["app"] = initialize_agents() |
|
|
|
if st.session_state["app"] is None: |
|
st.error("⚠️ Failed to initialize agents. Please check your API key and restart the app.") |
|
|
|
app = st.session_state["app"] |
|
|
|
|
|
def enforce_word_limit(text, limit): |
|
"""Ensure the essay is exactly within the word limit.""" |
|
words = text.split() |
|
return " ".join(words[:limit]) |
|
|
|
def detect_unexpected_english(text, selected_language): |
|
"""Detect unintended English words when another language is selected.""" |
|
if selected_language != "English": |
|
common_english_words = set(["the", "is", "and", "in", "on", "at", "to", "for", "of", "by", "it", "that", "this", "was", "he", "she", "they", "we", "you", "I"]) |
|
words = text.split() |
|
english_count = sum(1 for word in words if word.lower() in common_english_words) |
|
return english_count > 5 |
|
|
|
def generate_response(topic, length, selected_language): |
|
if not app or not hasattr(app, "graph"): |
|
st.error("⚠️ Agents are not initialized. Please check the system or restart the app.") |
|
return {"response": "Error: Agents not initialized."} |
|
|
|
|
|
intro_limit = max(40, length // 6) |
|
body_limit = max(80, length - intro_limit - (length // 6)) |
|
conclusion_limit = length - (intro_limit + body_limit) |
|
num_sections = min(4, max(2, length // 150)) |
|
|
|
|
|
refined_prompt = f""" |
|
Write a structured and concise essay on "{topic}" in {selected_language}. |
|
|
|
**Word Limit:** {length} words. **DO NOT exceed or fall short.** |
|
**Language:** Only {selected_language}. **No English unless explicitly requested.** |
|
|
|
**Essay Structure:** |
|
- **Title:** Max 10 words. |
|
- **Introduction ({intro_limit} words max)**: Introduce topic, thesis, and key points. |
|
- **Main Body ({body_limit} words max, {num_sections} sections)**: |
|
- Each section has **one key idea**. |
|
- Follow **strict section word limits**. |
|
- Avoid redundancy. |
|
- **Conclusion ({conclusion_limit} words max)**: |
|
- Summarize key points **without repeating introduction**. |
|
- End with a strong closing statement. |
|
|
|
**Rules:** |
|
- **STOP at exactly {length} words**. |
|
- **Do not repeat conclusion**. |
|
""" |
|
|
|
|
|
response = app.graph.invoke(input={ |
|
"topic": topic, |
|
"length": length, |
|
"prompt": refined_prompt, |
|
"language": selected_language, |
|
"max_tokens": int(length * 0.75) |
|
}) |
|
|
|
essay_text = enforce_word_limit(response.get("essay", ""), length) |
|
|
|
if detect_unexpected_english(essay_text, selected_language): |
|
return {"response": f"⚠️ Warning: English detected in {selected_language} essay. Try regenerating."} |
|
|
|
return {"essay": essay_text} |
|
|
|
|
|
|
|
tab1, tab2 = st.tabs(["📜 Essay Generation", "📊 Workflow Viz"]) |
|
|
|
|
|
with tab1: |
|
|
|
if "messages" not in st.session_state: |
|
st.session_state["messages"] = [{"role": "assistant", "content": "Hello! How can I assist you today?"}] |
|
|
|
for message in st.session_state["messages"]: |
|
with st.chat_message(message["role"]): |
|
st.markdown(message["content"], unsafe_allow_html=True) |
|
|
|
|
|
topic = st.text_input("📝 Provide an essay topic:", value="Write an essay on the cultural diversity of India") |
|
|
|
|
|
st.write("") |
|
|
|
|
|
if st.button("🚀 Generate Essay"): |
|
if topic and topic.strip(): |
|
|
|
if not any(msg["content"] == topic for msg in st.session_state["messages"]): |
|
st.session_state["messages"].append({"role": "user", "content": topic}) |
|
|
|
with st.spinner("⏳ Generating your essay..."): |
|
response = None |
|
if app: |
|
response = generate_response(topic, essay_length, selected_language) |
|
else: |
|
st.error("⚠️ Agents are not initialized. Please check the system or restart the app.") |
|
|
|
|
|
if response and "essay" in response: |
|
essay = response["essay"] |
|
|
|
assistant_response = f"Here is your {essay_length}-word essay preview and the download link." |
|
st.session_state["messages"].append({"role": "assistant", "content": assistant_response}) |
|
|
|
st.chat_message("assistant").markdown(assistant_response) |
|
|
|
|
|
col1, col2 = st.columns(2) |
|
|
|
with col1: |
|
st.markdown(f"### 📝 Essay Preview ({essay_length} words)") |
|
|
|
|
|
essay_parts = essay.split("\n\n") |
|
for part in essay_parts: |
|
if part.startswith("## "): |
|
st.markdown(f"#### {part[3:]}") |
|
elif part.startswith("### "): |
|
st.markdown(f"**{part[4:]}**") |
|
else: |
|
st.markdown(part) |
|
|
|
with col2: |
|
st.markdown("### ✍️ Edit Your Essay:") |
|
|
|
|
|
edited_essay = st.text_area("Edit Here:", value=essay, height=300) |
|
|
|
|
|
save_col1, save_col2 = st.columns(2) |
|
|
|
with save_col1: |
|
if st.button("💾 Save as TXT"): |
|
with open("edited_essay.txt", "w", encoding="utf-8") as file: |
|
file.write(edited_essay) |
|
with open("edited_essay.txt", "rb") as file: |
|
st.download_button(label="⬇️ Download TXT", data=file, file_name="edited_essay.txt", mime="text/plain") |
|
|
|
with save_col2: |
|
if st.button("📄 Save as PDF"): |
|
from fpdf import FPDF |
|
|
|
pdf = FPDF() |
|
pdf.set_auto_page_break(auto=True, margin=15) |
|
pdf.add_page() |
|
pdf.set_font("Arial", size=12) |
|
|
|
for line in edited_essay.split("\n"): |
|
pdf.cell(200, 10, txt=line, ln=True, align='L') |
|
|
|
pdf.output("edited_essay.pdf") |
|
|
|
with open("edited_essay.pdf", "rb") as file: |
|
st.download_button(label="⬇️ Download PDF", data=file, file_name="edited_essay.pdf", mime="application/pdf") |
|
|
|
|
|
pdf_name = response.get("pdf_name") |
|
if pdf_name and os.path.exists(pdf_name): |
|
with open(pdf_name, "rb") as pdf_file: |
|
b64 = base64.b64encode(pdf_file.read()).decode() |
|
href = f"<a href='data:application/octet-stream;base64,{b64}' download='{pdf_name}'>📄 Click here to download the original PDF</a>" |
|
st.markdown(href, unsafe_allow_html=True) |
|
|
|
|
|
st.session_state["messages"].append( |
|
{"role": "assistant", "content": f"Here is your {essay_length}-word essay preview and the download link."} |
|
) |
|
elif response: |
|
st.markdown(response["response"]) |
|
st.session_state["messages"].append({"role": "assistant", "content": response["response"]}) |
|
else: |
|
st.error("⚠️ No response received. Please try again.") |
|
|
|
|
|
|
|
with tab2: |
|
|
|
|
|
try: |
|
graph_path = "/tmp/graph.png" |
|
if os.path.exists(graph_path): |
|
st.image(graph_path, caption="Multi-Agent Essay Writer Workflow Visualization", use_container_width=True) |
|
else: |
|
st.warning("⚠️ Workflow graph not found. Please run `graph.py` to regenerate `graph.png`.") |
|
|
|
except Exception as e: |
|
st.error("❌ An error occurred while generating the workflow visualization.") |
|
st.text_area("Error Details:", traceback.format_exc(), height=500) |
|
|
|
|
|
|
|
st.markdown( |
|
""" |
|
<div style="text-align: center; font-size: 14px; color: #555; padding-top: 200px; margin-top: 200px;"> |
|
<strong>Acknowledgement:</strong> This app is based on Mesut Duman's work: |
|
<a href="https://github.com/mesutdmn/Autonomous-Multi-Agent-Systems-with-CrewAI-Essay-Writer/tree/main" |
|
target="_blank" style="color: #007BFF; text-decoration: none;"> |
|
CrewAI Essay Writer |
|
</a> |
|
</div> |
|
""", |
|
unsafe_allow_html=True, |
|
) |
|
|