Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -88,116 +88,11 @@ def main():
|
|
88 |
|
89 |
st.sidebar.markdown(get_table_download_link(filename), unsafe_allow_html=True)
|
90 |
|
91 |
-
htm_files = glob.glob("*.htm")
|
92 |
-
for file in htm_files:
|
93 |
-
st.sidebar.mark
|
94 |
-
|
95 |
-
|
96 |
-
|
97 |
-
|
98 |
-
User
|
99 |
-
show full code listing
|
100 |
-
ChatGPT
|
101 |
-
Here is the full Python script:
|
102 |
-
|
103 |
-
python
|
104 |
-
Copy code
|
105 |
-
import streamlit as st
|
106 |
-
import openai
|
107 |
-
import os
|
108 |
-
import base64
|
109 |
-
import glob
|
110 |
-
import json
|
111 |
-
from xml.etree import ElementTree as ET
|
112 |
-
from datetime import datetime
|
113 |
-
from dotenv import load_dotenv
|
114 |
-
from openai import ChatCompletion
|
115 |
-
|
116 |
-
load_dotenv()
|
117 |
-
|
118 |
-
openai.api_key = os.getenv('OPENAI_KEY')
|
119 |
-
|
120 |
-
def chat_with_model(prompts):
|
121 |
-
model = "gpt-3.5-turbo"
|
122 |
-
|
123 |
-
conversation = [{'role': 'system', 'content': 'You are a helpful assistant.'}]
|
124 |
-
conversation.extend([{'role': 'user', 'content': prompt} for prompt in prompts])
|
125 |
-
|
126 |
-
response = openai.ChatCompletion.create(model=model, messages=conversation)
|
127 |
-
return response['choices'][0]['message']['content']
|
128 |
-
|
129 |
-
def generate_filename(prompt):
|
130 |
-
safe_date_time = datetime.now().strftime("%m_%d_%H_%M")
|
131 |
-
safe_prompt = "".join(x for x in prompt if x.isalnum())[:50]
|
132 |
-
return f"{safe_date_time}_{safe_prompt}.htm"
|
133 |
-
|
134 |
-
def create_file(filename, prompt, response):
|
135 |
-
with open(filename, 'w') as file:
|
136 |
-
file.write(f"<h1>Prompt:</h1> <p>{prompt}</p> <h1>Response:</h1> <p>{response}</p>")
|
137 |
-
|
138 |
-
def get_table_download_link(file_path):
|
139 |
-
with open(file_path, 'r') as file:
|
140 |
-
data = file.read()
|
141 |
-
b64 = base64.b64encode(data.encode()).decode()
|
142 |
-
href = f'<a href="data:file/htm;base64,{b64}" target="_blank" download="{os.path.basename(file_path)}">{os.path.basename(file_path)}</a>'
|
143 |
-
return href
|
144 |
-
|
145 |
-
def CompressXML(xml_text):
|
146 |
-
tree = ET.ElementTree(ET.fromstring(xml_text))
|
147 |
-
for elem in tree.iter():
|
148 |
-
if isinstance(elem.tag, ET.Comment):
|
149 |
-
elem.getparent().remove(elem)
|
150 |
-
return ET.tostring(tree.getroot(), encoding='unicode')
|
151 |
-
|
152 |
-
def read_file_content(file):
|
153 |
-
if file.type == "application/json":
|
154 |
-
content = json.load(file)
|
155 |
-
return str(content)
|
156 |
-
elif file.type == "text/html":
|
157 |
-
content = BeautifulSoup(file, "html.parser")
|
158 |
-
return content.text
|
159 |
-
elif file.type == "application/xml" or file.type == "text/xml":
|
160 |
-
xml_text = file.getvalue().decode()
|
161 |
-
compressed_text = CompressXML(xml_text)
|
162 |
-
return compressed_text
|
163 |
-
elif file.type == "text/plain":
|
164 |
-
return file.getvalue().decode()
|
165 |
-
else:
|
166 |
-
return ""
|
167 |
-
|
168 |
-
|
169 |
-
def main():
|
170 |
-
st.title("Chat with AI")
|
171 |
-
|
172 |
-
prompts = ['']
|
173 |
-
|
174 |
-
user_prompt = st.text_area("Your question:", '', height=120)
|
175 |
-
uploaded_file = st.file_uploader("Choose a file", type=["xml", "json", "htm", "txt"])
|
176 |
-
|
177 |
-
if user_prompt:
|
178 |
-
prompts.append(user_prompt)
|
179 |
-
|
180 |
-
if uploaded_file is not None:
|
181 |
-
file_content = read_file_content(uploaded_file)
|
182 |
-
prompts.append(file_content)
|
183 |
-
|
184 |
-
if st.button('Chat'):
|
185 |
-
st.write('Chatting with GPT-3...')
|
186 |
-
response = chat_with_model(prompts)
|
187 |
-
st.write('Response:')
|
188 |
-
st.write(response)
|
189 |
-
|
190 |
-
filename = generate_filename(user_prompt)
|
191 |
-
create_file(filename, user_prompt, response)
|
192 |
-
|
193 |
-
st.sidebar.markdown(get_table_download_link(filename), unsafe_allow_html=True)
|
194 |
-
|
195 |
htm_files = glob.glob("*.htm")
|
196 |
for file in htm_files:
|
197 |
st.sidebar.markdown(get_table_download_link(file), unsafe_allow_html=True)
|
198 |
if st.sidebar.button(f"Delete {file}"):
|
199 |
os.remove(file)
|
200 |
-
st.experimental_rerun()
|
201 |
|
202 |
if __name__ == "__main__":
|
203 |
main()
|
|
|
88 |
|
89 |
st.sidebar.markdown(get_table_download_link(filename), unsafe_allow_html=True)
|
90 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
91 |
htm_files = glob.glob("*.htm")
|
92 |
for file in htm_files:
|
93 |
st.sidebar.markdown(get_table_download_link(file), unsafe_allow_html=True)
|
94 |
if st.sidebar.button(f"Delete {file}"):
|
95 |
os.remove(file)
|
|
|
96 |
|
97 |
if __name__ == "__main__":
|
98 |
main()
|