Spaces:
Paused
Paused
File size: 5,354 Bytes
60bb91e 4c9b469 60bb91e 82b42e7 6500788 d42b6d6 6500788 7bfabdf 60bb91e 38acba7 60bb91e 6500788 60bb91e 6500788 60bb91e 6500788 60bb91e 6500788 60bb91e 6500788 60bb91e 6500788 60bb91e 1828807 6500788 1828807 6500788 4c9b469 1828807 c32808f a14ad21 c5c1677 a14ad21 4c9b469 7bfabdf d42b6d6 0fdaf32 d42b6d6 0fdaf32 4c9b469 d42b6d6 4c9b469 7bfabdf 0fdaf32 7bfabdf 4c9b469 60bb91e bb9b91b 4c9b469 60bb91e |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 |
import uvicorn
from fastapi import FastAPI
from pydantic import BaseModel, Field
from sse_starlette.sse import EventSourceResponse
from conversations import (
ConversationConnector,
ConversationCreator,
MessageComposer,
)
class ChatAPIApp:
def __init__(self):
self.app = FastAPI(
docs_url="/",
title="Bing Chat API",
swagger_ui_parameters={"defaultModelsExpandDepth": -1},
version="1.0",
)
self.setup_routes()
def get_available_models(self):
self.available_models = [
{
"id": "precise",
"description": "Bing (Precise): Concise and straightforward.",
},
{
"id": "balanced",
"description": "Bing (Balanced): Informative and friendly.",
},
{
"id": "creative",
"description": "Bing (Creative): Original and imaginative.",
},
{
"id": "precise-offline",
"description": "Bing (Precise): (No Internet) Concise and straightforward.",
},
{
"id": "balanced-offline",
"description": "Bing (Balanced): (No Internet) Informative and friendly.",
},
{
"id": "creative-offline",
"description": "Bing (Creative): (No Internet) Original and imaginative.",
},
]
return self.available_models
class CreateConversationSessionPostItem(BaseModel):
model: str = Field(
default="precise",
description="(str) `precise`, `balanced`, `creative`, `precise-offline`, `balanced-offline`, `creative-offline`",
)
def create_conversation_session(self, item: CreateConversationSessionPostItem):
creator = ConversationCreator()
creator.create()
return {
"model": item.model,
"sec_access_token": creator.sec_access_token,
"client_id": creator.client_id,
"conversation_id": creator.conversation_id,
}
class ChatPostItem(BaseModel):
prompt: str = Field(
default="Hello, who are you?",
description="(str) Prompt",
)
model: str = Field(
default="precise",
description="(str) `precise`, `balanced`, `creative`, `precise-offline`, `balanced-offline`, `creative-offline`",
)
sec_access_token: str = Field(
default="",
description="(str) Sec Access Token",
)
client_id: str = Field(
default="",
description="(str) Client ID",
)
conversation_id: str = Field(
default="",
description="(str) Conversation ID",
)
invocation_id: int = Field(
default=0,
description="(int) Invocation ID",
)
def chat(self, item: ChatPostItem):
connector = ConversationConnector(
conversation_style=item.model,
sec_access_token=item.sec_access_token,
client_id=item.client_id,
conversation_id=item.conversation_id,
invocation_id=item.invocation_id,
)
return EventSourceResponse(
connector.stream_chat(prompt=item.prompt, yield_output=True),
media_type="text/event-stream",
)
class ChatCompletionsPostItem(BaseModel):
model: str = Field(
default="precise",
description="(str) `precise`, `balanced`, `creative`, `precise-offline`, `balanced-offline`, `creative-offline`",
)
messages: list = Field(
default=[{"role": "user", "content": "Hello, who are you?"}],
description="(list) Messages",
)
def chat_completions(self, item: ChatCompletionsPostItem):
creator = ConversationCreator()
creator.create()
connector = ConversationConnector(
conversation_style=item.model,
sec_access_token=creator.sec_access_token,
client_id=creator.client_id,
conversation_id=creator.conversation_id,
invocation_id=0,
)
message_composer = MessageComposer()
prompt = message_composer.merge(item.messages)
return EventSourceResponse(
connector.stream_chat(prompt=prompt, yield_output=True),
media_type="text/event-stream",
)
def setup_routes(self):
for prefix in ["", "/v1"]:
self.app.get(
prefix + "/models",
summary="Get available models",
)(self.get_available_models)
self.app.post(
prefix + "/create",
summary="Create a conversation session",
)(self.create_conversation_session)
self.app.post(
prefix + "/chat",
summary="Chat in conversation session",
)(self.chat)
self.app.post(
prefix + "/chat/completions",
summary="Chat completions in conversation session",
)(self.chat_completions)
app = ChatAPIApp().app
if __name__ == "__main__":
uvicorn.run("__main__:app", host="0.0.0.0", port=22222, reload=True)
|