Spaces:
Sleeping
Sleeping
File size: 3,944 Bytes
cb4c6c0 d892054 cb4c6c0 d892054 cb4c6c0 d892054 cb4c6c0 d892054 cb4c6c0 d892054 cb4c6c0 d892054 cb4c6c0 d892054 cb4c6c0 d892054 cb4c6c0 d892054 cb4c6c0 d892054 cb4c6c0 d892054 cb4c6c0 d892054 cb4c6c0 d892054 cb4c6c0 d892054 cb4c6c0 d892054 cb4c6c0 d892054 cb4c6c0 d892054 cb4c6c0 d892054 cb4c6c0 d892054 cb4c6c0 48a9cb5 cb4c6c0 d892054 cb4c6c0 d892054 cb4c6c0 d892054 cb4c6c0 d892054 cb4c6c0 d892054 cb4c6c0 7e27288 cb4c6c0 d892054 cb4c6c0 d892054 cb4c6c0 d892054 cb4c6c0 c1c5da9 d892054 359cecc cb4c6c0 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 |
import os, re
from flask import Flask, request, jsonify, make_response
from flask_cors import CORS
from time import sleep
from openai import OpenAI
# Set environment variables and OpenAI configurations
print("OpenAI:\t\t"+os.environ['OPENAI_API_KEY'])
# Connect to the assistant
openai_client = OpenAI(api_key=os.environ['OPENAI_API_KEY'])
openai_assistant = openai_client.beta.assistants.retrieve(assistant_id=os.environ['OPENAI_ASSISTANT_ID'])
openai_assistant_id=openai_assistant.id
openai_thread_id = ""
openai_additional_instruction = os.environ['OPENAI_ADDITIONAL_INSTRUCTION']
max_response_length = os.environ['MAX_RESPONSE_LENGTH']
app = Flask(__name__)
CORS(app)
# Function to create a thread
def create_thread():
openai_thread = openai_client.beta.threads.create()
return(openai_thread.id)
# Function to create a message in a given thread
def create_message(thread_id,user_message):
thread_message = openai_client.beta.threads.messages.create(
thread_id,
role='user',
content=user_message,
)
return thread_message
# Function to retrieve a message from a given thread
def retrieve_message(thread_id,message_id):
message = openai_client.beta.threads.messages.retrieve(
message_id=message_id,
thread_id=thread_id,
)
return message
# Function to run the thread
def run_thread(thread_id,assistant_id):
run = openai_client.beta.threads.runs.create_and_poll(
thread_id=thread_id,
additional_instructions=openai_additional_instruction,
assistant_id=assistant_id,
)
return run
# Function to check the status of the run
def run_status(run):
return run.status
# Function to clear a thread
def delete_thread(thread_id):
return openai_client.beta.threads.delete(thread_id)
def delete_message(message_id,thread_id):
deleted_message = openai_client.beta.threads.messages.delete(
message_id=message_id,
thread_id=thread_id,
)
return deleted_message.id
#This handles general Q&A to the LLM
def process_query(query,thread_id):
retval = {"answer":""}
query = "Answer the user's question in " + str(max_response_length) + " words or less: '" + query + "'"
new_message = create_message(thread_id,query)
run =run_thread(thread_id,openai_assistant_id)
messages = openai_client.beta.threads.messages.list(thread_id=thread_id)
for message in messages:
if message.run_id == run.id:
if message.role=='assistant':
# gets the answer from the assistant
answer = str(message.content[0].text.value)
# kills the source reference in the response, if there
regex_pattern = r"【.*?】"
scrubbed_answer = re.sub(regex_pattern, '', answer)
retval = {"answer":scrubbed_answer}
return retval
#POST request to this service
@app.route('/query', methods=['POST'])
def handle_query():
print(request)
print(request.json)
data = request.json
query=data['prompt']
openai_thread_id=data['thread']
print("Assistant:\t\t "+openai_assistant.id)
print("Thread:\t\t"+openai_thread_id)
answer = ''
# need to grab a thread id or create a new thread
if openai_thread_id == "":
print("Creating a new thread ")
# create the thread
openai_thread_id=create_thread()
print("New thread:\t"+openai_thread_id)
result = process_query(query,openai_thread_id)
answer = result['answer']
serialized_result = {
"answer": answer,
"matchedContext": "",
"conversationPayload": "",
"thread": openai_thread_id
}
print(serialized_result['answer'])
return jsonify(serialized_result), 200
@app.route('/')
def hello():
version = os.environ.get("CODE_VERSION")
return jsonify({"status": "Healthy", "version": version}), 200
if __name__ == "__main__":
app.run(host="0.0.0.0", port=15002) |