Streamlit-ChatGPT / backupapp.py
awacke1's picture
Create backupapp.py
02063af
raw
history blame
1.66 kB
import streamlit as st
import openai
import os
from streamlit_chat import message
from dotenv import load_dotenv
from openai import ChatCompletion
load_dotenv()
openai.api_key = os.getenv('OPENAI_KEY')
# keys are here: https://platform.openai.com/auth/callback?code=ReZ4izEw0DwkUKrHR-Opxr5AMMgo9SojxC9pNHQUcjD6M&state=OGZFNDJmLlJGNlIwOUxlakpXZkVFfjNxNy02ZlFtLWN4eUcuOXJobXouSQ%3D%3D#
# Define a function to chat with the model
def chat_with_model(prompts):
model = "gpt-3.5-turbo" # change this to the model you're using
conversation = [{'role': 'system', 'content': 'You are a helpful assistant.'}]
conversation.extend([{'role': 'user', 'content': prompt} for prompt in prompts])
response = openai.ChatCompletion.create(model=model, messages=conversation)
return response['choices'][0]['message']['content']
load_dotenv('api_key.env')
def generate_response(prompt):
completion=openai.Completion.create(
engine='text-davinci-003',
prompt=prompt,
max_tokens=1024,
n=1,
stop=None,
temperature=0.6,
)
message=completion.choices[0].text
return message
# Streamlit App
def main():
st.title("Chat with AI")
# Pre-defined prompts
prompts = ['Hows the weather?', 'Tell me a joke.', 'What is the meaning of life?']
# User prompt input
user_prompt = st.text_input("Your question:", '')
if user_prompt:
prompts.append(user_prompt)
if st.button('Chat'):
st.write('Chatting with GPT-3...')
response = chat_with_model(prompts)
st.write('Response:')
st.write(response)
if __name__ == "__main__":
main()