from openai import OpenAI import os client = OpenAI() class BaseLLM: def __init__(self, model): self.model = model def get_response(self, system_prompt, query): raise NotImplementedError class OpenAILLM(BaseLLM): def __init__(self, model): self.model = model def get_response(self, system_prompt, query, **kwargs): response = client.chat.completions.create( model=self.model, messages=[ {"role": "system", "content": system_prompt}, {"role": "user", "content": query}, ], **kwargs, ) return response.choices[0].message.content