from openai import OpenAI
import os
from functools import lru_cache
from retry import retry


@retry(tries=3)
def chat_with_model(prompt, model, open_router_key=None, openai_api_key=None, max_tokens=4000, temperature=0,top_p=0):
    if open_router_key:
        client = OpenAI(
            api_key=open_router_key,
            base_url="https://openrouter.ai/api/v1"
        )
    elif openai_api_key:
        client = OpenAI(api_key=openai_api_key)
    else:
        raise ValueError("Either open_router_key or openai_api_key must be provided.")

    response = client.chat.completions.create(
        model=model,
        messages=[
            {
                "role": "user",
                "content": prompt
            }
        ],
        max_tokens=max_tokens,
        temperature=temperature,
        top_p=top_p
    )
    return response.choices[0].message.content


@lru_cache(maxsize=10000)
@retry(tries=3)
def embed(text, openai_api_key=None):
    if openai_api_key:
        client = OpenAI(api_key=openai_api_key)
    else:
        raise ValueError("openai_api_key must be provided.")

    response = client.embeddings.create(
        model="text-embedding-3-large", input=[text])
    return response.data[0].embedding