awacke1 commited on
Commit
16504a0
·
1 Parent(s): d7e561a

Update backupapp.py

Browse files
Files changed (1) hide show
  1. backupapp.py +30 -21
backupapp.py CHANGED
@@ -1,19 +1,18 @@
1
  import streamlit as st
2
  import openai
3
  import os
4
-
5
- from streamlit_chat import message
 
6
  from dotenv import load_dotenv
7
  from openai import ChatCompletion
8
 
9
  load_dotenv()
10
 
11
  openai.api_key = os.getenv('OPENAI_KEY')
12
- # keys are here: https://platform.openai.com/auth/callback?code=ReZ4izEw0DwkUKrHR-Opxr5AMMgo9SojxC9pNHQUcjD6M&state=OGZFNDJmLlJGNlIwOUxlakpXZkVFfjNxNy02ZlFtLWN4eUcuOXJobXouSQ%3D%3D#
13
 
14
- # Define a function to chat with the model
15
  def chat_with_model(prompts):
16
- model = "gpt-3.5-turbo" # change this to the model you're using
17
 
18
  conversation = [{'role': 'system', 'content': 'You are a helpful assistant.'}]
19
  conversation.extend([{'role': 'user', 'content': prompt} for prompt in prompts])
@@ -21,21 +20,22 @@ def chat_with_model(prompts):
21
  response = openai.ChatCompletion.create(model=model, messages=conversation)
22
  return response['choices'][0]['message']['content']
23
 
24
- load_dotenv('api_key.env')
25
-
26
- def generate_response(prompt):
27
- completion=openai.Completion.create(
28
- engine='text-davinci-003',
29
- prompt=prompt,
30
- max_tokens=1024,
31
- n=1,
32
- stop=None,
33
- temperature=0.6,
34
- )
35
- message=completion.choices[0].text
36
- return message
37
-
38
- # Streamlit App
 
39
  def main():
40
  st.title("Chat with AI")
41
 
@@ -54,5 +54,14 @@ def main():
54
  st.write('Response:')
55
  st.write(response)
56
 
 
 
 
 
 
 
 
 
 
57
  if __name__ == "__main__":
58
- main()
 
1
  import streamlit as st
2
  import openai
3
  import os
4
+ import base64
5
+ import glob
6
+ from datetime import datetime
7
  from dotenv import load_dotenv
8
  from openai import ChatCompletion
9
 
10
  load_dotenv()
11
 
12
  openai.api_key = os.getenv('OPENAI_KEY')
 
13
 
 
14
  def chat_with_model(prompts):
15
+ model = "gpt-3.5-turbo"
16
 
17
  conversation = [{'role': 'system', 'content': 'You are a helpful assistant.'}]
18
  conversation.extend([{'role': 'user', 'content': prompt} for prompt in prompts])
 
20
  response = openai.ChatCompletion.create(model=model, messages=conversation)
21
  return response['choices'][0]['message']['content']
22
 
23
+ def generate_filename(prompt):
24
+ safe_date_time = datetime.now().strftime("%Y_%m_%d_%H_%M_%S")
25
+ safe_prompt = "".join(x for x in prompt if x.isalnum())[:50]
26
+ return f"{safe_date_time}_{safe_prompt}.md"
27
+
28
+ def create_file(filename, prompt, response):
29
+ with open(filename, 'w') as file:
30
+ file.write(f"Prompt: {prompt}\n\nResponse: {response}")
31
+
32
+ def get_table_download_link(file_path):
33
+ with open(file_path, 'r') as file:
34
+ data = file.read()
35
+ b64 = base64.b64encode(data.encode()).decode()
36
+ href = f'<a href="data:file/txt;base64,{b64}" download="{os.path.basename(file_path)}">Download Response File</a>'
37
+ return href
38
+
39
  def main():
40
  st.title("Chat with AI")
41
 
 
54
  st.write('Response:')
55
  st.write(response)
56
 
57
+ filename = generate_filename(user_prompt)
58
+ create_file(filename, user_prompt, response)
59
+
60
+ st.markdown(get_table_download_link(filename), unsafe_allow_html=True)
61
+
62
+ md_files = glob.glob("*.md")
63
+ for file in md_files:
64
+ st.markdown(get_table_download_link(file), unsafe_allow_html=True)
65
+
66
  if __name__ == "__main__":
67
+ main()