Spaces:
Running
Running
import gradio as gr | |
import requests | |
import os | |
import json | |
import google.generativeai as genai | |
from bs4 import BeautifulSoup | |
#from groq import Groq | |
# Load environment variables | |
genai.configure(api_key=os.environ["geminiapikey"]) | |
read_key = os.environ.get('HF_TOKEN', None) | |
cx="77f1602c0ff764edb" | |
custom_css = """ | |
#md { | |
height: 400px; | |
font-size: 30px; | |
background: #121212; | |
padding: 20px; | |
color: white; | |
border: 1 px solid white; | |
} | |
""" | |
#api_key = os.getenv('groq') | |
google_api_key = os.getenv('google_search') | |
API_URL = "https://blavken-flowiseblav.hf.space/api/v1/prediction/fbc118dc-ec00-4b59-acff-600648958be3" | |
def query(payload): | |
response = requests.post(API_URL, json=payload) | |
return response.json() | |
def querys(payloads): | |
output = query({ | |
"question": f"{payloads}", | |
}) | |
print(output.text) | |
#return result_text | |
texte="" | |
for o in output: | |
texte +=o | |
return output | |
#very simple (and extremly fast) websearch | |
def websearch(prompt): | |
headers = { | |
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36" | |
} | |
url = f"https://www.googleapis.com/customsearch/v1?key={google_api_key}&cx={cx}&q={prompt}" | |
response = requests.get(url, headers=headers) | |
data = response.json() # JSON-Daten direkt verarbeiten | |
# Extrahieren des Textes aus den Ergebnissen | |
items = data.get('items', []) | |
results = [item['snippet'] for item in items] | |
result_text = '\n'.join(results) | |
# Formuliere die Antwort | |
search_query = f"{prompt} antworte kurz und knapp. antworte auf deutsch. du findest die antwort hier: {result_text}" | |
result = predict(search_query) | |
return result | |
return result_text | |
return results | |
def predict(prompt): | |
generation_config = { | |
"temperature": 0.4, | |
"top_p": 0.95, | |
"top_k": 40, | |
"max_output_tokens": 8192, | |
"response_mime_type": "text/plain", | |
} | |
model = genai.GenerativeModel( | |
model_name="gemini-2.0-flash-exp", | |
generation_config=generation_config, | |
) | |
chat_session = model.start_chat( | |
history=[] | |
) | |
response = chat_session.send_message(f"{prompt}\n antworte immer auf deutsch") | |
response_value = response.candidates[0].content.parts[0].text | |
return response_value | |
# Create the Gradio interface | |
with gr.Blocks(css=custom_css) as demo: | |
with gr.Row(): | |
#details_output = gr.Markdown(label="answer", elem_id="md") | |
details_output = gr.Textbox(label="Ausgabe", value = f"\n\n\n\n") | |
with gr.Row(): | |
ort_input = gr.Textbox(label="prompt", placeholder="ask anything...") | |
#audio_input=gr.Microphone(type="filepath") | |
with gr.Row(): | |
button = gr.Button("Senden") | |
# Connect the button to the function | |
button.click(fn=querys, inputs=ort_input, outputs=details_output) | |
# Launch the Gradio application | |
demo.launch() |