Update app.py
Browse files
app.py
CHANGED
@@ -4,17 +4,19 @@ from pydantic import BaseModel
|
|
4 |
from fastapi.responses import JSONResponse
|
5 |
import requests
|
6 |
import json
|
7 |
-
|
|
|
8 |
|
9 |
class Text(BaseModel):
|
10 |
content: str = ""
|
11 |
|
12 |
|
13 |
app = FastAPI()
|
14 |
-
|
|
|
15 |
headers = {
|
16 |
'Content-Type': 'application/json',
|
17 |
-
'Authorization': 'Bearer ' +
|
18 |
}
|
19 |
|
20 |
|
@@ -125,6 +127,36 @@ def chat_gpt_ep(content: Text = None):
|
|
125 |
return JSONResponse(content=content)
|
126 |
|
127 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
128 |
@app.post("/embeddings")
|
129 |
def embeddings_ep(content: Text = None):
|
130 |
url = 'https://api.openai.com/v1/embeddings'
|
|
|
4 |
from fastapi.responses import JSONResponse
|
5 |
import requests
|
6 |
import json
|
7 |
+
import openai
|
8 |
+
import time
|
9 |
|
10 |
class Text(BaseModel):
|
11 |
content: str = ""
|
12 |
|
13 |
|
14 |
app = FastAPI()
|
15 |
+
key = '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'
|
16 |
+
openai.api_key = key
|
17 |
headers = {
|
18 |
'Content-Type': 'application/json',
|
19 |
+
'Authorization': 'Bearer ' + key
|
20 |
}
|
21 |
|
22 |
|
|
|
127 |
return JSONResponse(content=content)
|
128 |
|
129 |
|
130 |
+
@app.post("/chatgptstream")
|
131 |
+
def chat_gpt_stream_ep(content: Text = None):
|
132 |
+
start_time = time.time()
|
133 |
+
obj = json.loads(content.content)
|
134 |
+
response = openai.Completion.create(
|
135 |
+
model='gpt-3.5-turbo',
|
136 |
+
messages=obj['messages'],
|
137 |
+
stream=True, # this time, we set stream=True
|
138 |
+
)
|
139 |
+
# create variables to collect the stream of chunks
|
140 |
+
collected_chunks = []
|
141 |
+
collected_messages = []
|
142 |
+
# iterate through the stream of events
|
143 |
+
for chunk in response:
|
144 |
+
chunk_time = time.time() - start_time # calculate the time delay of the chunk
|
145 |
+
collected_chunks.append(chunk) # save the event response
|
146 |
+
chunk_message = chunk['choices'][0]['delta'] # extract the message
|
147 |
+
collected_messages.append(chunk_message) # save the message
|
148 |
+
print(f"Message received {chunk_time:.2f} seconds after request: {chunk_message}") # print the delay and text
|
149 |
+
|
150 |
+
# print the time delay and text received
|
151 |
+
print(f"Full response received {chunk_time:.2f} seconds after request")
|
152 |
+
full_reply_content = ''.join([m.get('content', '') for m in collected_messages])
|
153 |
+
print(f"Full conversation received: {full_reply_content}")
|
154 |
+
|
155 |
+
content = {'content': full_reply_content}
|
156 |
+
print('content:', content)
|
157 |
+
return JSONResponse(content=content)
|
158 |
+
|
159 |
+
|
160 |
@app.post("/embeddings")
|
161 |
def embeddings_ep(content: Text = None):
|
162 |
url = 'https://api.openai.com/v1/embeddings'
|