Spaces:
Running
Running
import os | |
import gradio as gr | |
import requests | |
import json | |
import logging | |
import random | |
import time | |
from datetime import datetime | |
# Configure logging | |
logging.basicConfig(level=logging.INFO) | |
logger = logging.getLogger(__name__) | |
# API configuration | |
API_URL = "https://api-inference.huggingface.co/models/mistralai/Mistral-7B-Instruct-v0.3" | |
HEADERS = {"Authorization": f"Bearer {os.getenv('HUGGINGFACEHUB_API_TOKEN')}"} | |
def get_timestamp(): | |
"""Generate a timestamp for making responses unique""" | |
return int(time.time() * 1000) | |
def query(payload): | |
"""Make API request with retry logic""" | |
max_retries = 3 | |
for attempt in range(max_retries): | |
try: | |
response = requests.post(API_URL, headers=HEADERS, json=payload) | |
response.raise_for_status() | |
return response.json() | |
except requests.exceptions.RequestException as e: | |
if attempt == max_retries - 1: | |
logger.error(f"API request failed after {max_retries} attempts: {e}") | |
raise | |
time.sleep(1) | |
def get_dynamic_prompt(base_prompt, input_text): | |
"""Generate dynamic prompts with variations""" | |
styles = [ | |
"analytical and scholarly", | |
"practical and applicable", | |
"historical and contextual", | |
"theological and doctrinal" | |
] | |
perspectives = [ | |
"modern interpretation", | |
"historical context", | |
"cross-cultural significance", | |
"contemporary relevance" | |
] | |
return f"{base_prompt} Consider this from a {random.choice(styles)} approach, focusing on {random.choice(perspectives)}: {input_text}" | |
def generate_exegesis(passage): | |
if not passage.strip(): | |
return "Please enter a Bible passage." | |
prompt = get_dynamic_prompt( | |
"<s>[INST] As a Bible Scholar, provide an exegesis with original language analysis and cultural context for:", | |
passage | |
) | |
try: | |
response = query({"inputs": f"{prompt} [timestamp: {get_timestamp()}]"}) | |
if isinstance(response, list) and response: | |
return response[0]["generated_text"].split("[/INST]")[-1].strip() | |
return "Error: Invalid response from model." | |
except Exception as e: | |
logger.error(f"Exegesis Error: {e}") | |
return f"Generation failed. Please try again." | |
# Similar modifications for other functions... | |
def ask_any_questions(question): | |
if not question.strip(): | |
return "Please enter a question." | |
prompt = get_dynamic_prompt( | |
"<s>[INST] As a Bible Scholar, provide a comprehensive answer with scriptural references for:", | |
question | |
) | |
try: | |
response = query({"inputs": f"{prompt} [timestamp: {get_timestamp()}]"}) | |
if isinstance(response, list) and response: | |
return response[0]["generated_text"].split("[/INST]")[-1].strip() | |
return "Error: Invalid response from model." | |
except Exception as e: | |
logger.error(f"Question Error: {e}") | |
return f"Generation failed. Please try again." | |
# Updated interface with better styling | |
css = """ | |
.gradio-container { | |
font-family: 'Arial', sans-serif; | |
} | |
.gr-button { | |
background-color: #2e5090 !important; | |
} | |
""" | |
# Gradio interface definitions with improved styling | |
exegesis_demo = gr.Interface( | |
fn=generate_exegesis, | |
inputs=gr.Textbox( | |
label="Enter Bible Passage", | |
placeholder="e.g., John 3:16", | |
lines=2 | |
), | |
outputs=gr.Textbox(label="Exegesis Commentary", lines=10), | |
title="JR Study Bible", | |
description="Enter a Bible passage to receive insightful exegesis commentary", | |
css=css | |
) | |
# ... rest of your Gradio interface code ... | |
# Launch with sharing enabled for Hugging Face Spaces | |
if __name__ == "__main__": | |
bible_app.launch(share=True) |