Spaces:
Sleeping
Sleeping
File size: 1,423 Bytes
1277216 5d324b0 1277216 afae04c 1277216 d280e75 1277216 d280e75 1277216 6bb5a75 caf090a |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 |
import os
import requests
#from google.colab import userdata # Secure storage for API keys in Colab
import gradio as gr
# Get the Groq API key securely (use userdata in Colab)
groq_api_key = os.getenv("GROQ_API_KEY") # Store using userdata.set("GROQ_API_KEY", "your_api_key")
if not groq_api_key:
raise ValueError("GROQ_API_KEY not found! Set it using userdata.set('GROQ_API_KEY', 'your_api_key')")
# Define the URL for the Groq API endpoint
url = "https://api.groq.com/openai/v1/chat/completions"
# Set the headers for the API request
headers = {
"Authorization": f"Bearer {groq_api_key}"
}
def chat_with_groq(user_input):
body = {
"model": "deepseek-r1-distill-qwen-32b",
"messages": [
{"role": "user", "content": user_input}
]
}
# Send a POST request to the Groq API
response = requests.post(url, headers=headers, json=body)
# Check if the request was successful
if response.status_code == 200:
return response.json()['choices'][0]['message']['content']
else:
return "Error:", response.json()
interface=gr.Interface(
fn=chat_with_groq,
inputs=gr.Textbox("Ask me anything..."),
outputs=gr.Textbox(),
title="Chat bot using groq (deepseek-r1-distill-qwen-32b)",
description="Type your question below and get a response powered by Groq's Llama 3.1-8B model"
)
if __name__ =="__main__":
interface.launch()
#interface.launch() |