carotis_chatbot / llm.py
rasmus1610's picture
gosh dang
390386d
raw
history blame contribute delete
688 Bytes
from openai import OpenAI
import os
client = OpenAI()
class BaseLLM:
def __init__(self, model):
self.model = model
def get_response(self, system_prompt, query):
raise NotImplementedError
class OpenAILLM(BaseLLM):
def __init__(self, model):
self.model = model
def get_response(self, system_prompt, query, **kwargs):
response = client.chat.completions.create(
model=self.model,
messages=[
{"role": "system", "content": system_prompt},
{"role": "user", "content": query},
],
**kwargs,
)
return response.choices[0].message.content