Spaces:
Sleeping
Sleeping
File size: 2,767 Bytes
e7055d3 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 |
import logging
from typing import Any, List, Optional
import requests
from langchain_core.callbacks import CallbackManagerForLLMRun
from langchain_core.language_models import BaseChatModel, BaseLanguageModel
from langchain_core.messages import AIMessage, BaseMessage, HumanMessage, SystemMessage
from langchain_core.outputs import ChatGeneration, ChatGenerationChunk, ChatResult
from langchain_core.pydantic_v1 import BaseModel, Field, SecretStr, root_validator
logger = logging.getLogger(__name__)
class ChatLaaS(BaseChatModel):
laas_api_base: Optional[str] = Field(
default="https://api-laas.wanted.co.kr/api/preset", alias="base_url"
)
laas_api_key: Optional[SecretStr] = Field(default=None, alias="api_key")
laas_project: Optional[str] = Field(default=None, alias="project")
laas_hash: Optional[str] = Field(default=None, alias="hash")
timeout: Optional[float] = Field(default=60.0)
_ROLE_MAP = {
"human": "user",
"ai": "assistant",
}
@property
def _llm_type(self) -> str:
"""Return type of chat model."""
return "laas-chat"
@classmethod
def is_lc_serializable(cls) -> bool:
"""Return whether this model can be serialized by Langchain."""
return False
def _generate(
self,
messages: List[BaseMessage],
stop: Optional[List[str]] = None,
run_manager: Optional[CallbackManagerForLLMRun] = None,
**kwargs: Any,
) -> ChatResult:
try:
body = {
"hash": self.laas_hash,
"messages": [
{
"role": self._ROLE_MAP.get(msg.type, msg.type),
"content": msg.content,
}
for msg in messages
if msg.content.strip() # This filters out empty or whitespace-only content
],
**kwargs,
}
print(body)
# return
headers = {
"Content-Type": "application/json",
"apiKey": self.laas_api_key.get_secret_value(),
"project": self.laas_project,
}
response = requests.post(
f"{self.laas_api_base}/chat/completions",
headers=headers,
json=body,
timeout=self.timeout,
).json()
# Extract the content from the API response
content = response["choices"][0]["message"]["content"]
message = AIMessage(id=response["id"], content=content)
generation = ChatGeneration(message=message)
return ChatResult(generations=[generation])
except Exception as e:
raise
|