Spaces:
Sleeping
Sleeping
import os | |
from dotenv import load_dotenv | |
from typing import Optional | |
from langchain_openai import ChatOpenAI | |
# import inspect | |
# load_dotenv(os.path.join(os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe()))) , '.env')) | |
load_dotenv() | |
class MyChatOpenAI: | |
def from_model( | |
cls, | |
model: str = 'gpt-4o-mini', | |
*, | |
langsmith_project: str = 'default', | |
temperature: float = 0.7, | |
max_tokens: Optional[int] = 4096, | |
max_retries: int = 1, | |
**kwargs | |
)-> ChatOpenAI: | |
if model in ['gpt-4o', 'GPT-4o', 'GPT-4O', 'gpt-4O', 'gpt4o', 'GPT4o', 'GPT4O', 'gpt4O']: | |
model = 'gpt-4o' | |
elif model in ['gpt-4o-mini', 'GPT-4o-mini', 'GPT-4O-mini', 'gpt-4O-mini', 'gpt4o-mini', 'GPT4o-mini', 'GPT4O-mini', 'gpt4O-mini', 'gpt4omini', 'GPT4omini', 'GPT4Omini', 'gpt4Omini']: | |
model = 'gpt-4o-mini' | |
else: | |
raise ValueError(f"Model {model} is currently not supported. Supported models are: ['gpt-4o', 'gpt-4o-mini']") | |
return ChatOpenAI( | |
openai_api_key=os.getenv("OPENAI_API_KEY"), | |
model=model, | |
temperature=temperature, | |
max_tokens=max_tokens, | |
max_retries=max_retries, | |
**kwargs | |
) | |