from typing import Union

from fastapi import FastAPI
import asyncio

from groq import Groq, AsyncGroq

from fastapi import FastAPI, File, UploadFile
from fastapi.responses import HTMLResponse
import shutil
import os


# client = AsyncGroq(
#     api_key="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",
# )

# SYSTEM_PROMPT = """
# Ты ассистент, помогай людям!
# """

app = FastAPI()

import google.generativeai as genai
import os

genai.configure(api_key="AIzaSyBGhEOy-JYMzGtTcRjBjP51OGR168WKRFw")


# client = AsyncGroq(
#     api_key="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",
# )
#         messages=messages,
#         model="llama3-70b-8192",
#     )
#    return chat_completion.choices[0].message.content


@app.post("/upload-image/{prompt}")
async def upload_image(prompt: str, file: UploadFile = File(...)):
    os.makedirs("uploads", exist_ok=True)


    file_location = f"uploads/{file.filename}"
    with open(file_location, "wb") as buffer:
        shutil.copyfileobj(file.file, buffer)

    myfile = genai.upload_file(file_location)
    
    model = genai.GenerativeModel("gemini-1.5-pro-latest")
    result = model.generate_content(
       [myfile, "\n\n", prompt]
    )

    return result.text



# @app.post("/get_response")
# async def read_root(messages: list[dict]):
#     messages.insert(0, {
#             "role": "system",
#             "content": SYSTEM_PROMPT
#         }
#     )
#     chat_completion = await client.chat.completions.create(
#         messages=messages,
#         model="llama3-70b-8192",
#     )
#     return chat_completion.choices[0].message.content