File size: 3,782 Bytes
5cc33fe
59b0b83
31d4cfe
 
1300b3c
b31abbf
1e88c0f
b31abbf
59ef385
1e88c0f
 
 
1300b3c
1e88c0f
 
 
59b0b83
1e88c0f
 
 
a6a015a
1e88c0f
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
b31abbf
1e88c0f
4d779a7
0ff902b
59b0b83
6a87828
b31abbf
1e88c0f
 
 
 
 
 
4d779a7
1e88c0f
4d779a7
1e88c0f
 
 
51c18be
1e88c0f
0ff902b
e208d2d
 
b31abbf
1e88c0f
 
 
 
 
 
4d779a7
1e88c0f
4d779a7
1e88c0f
 
 
51c18be
1e88c0f
 
 
 
 
 
 
 
 
b0064be
1e88c0f
2ae9b86
0ff902b
1e88c0f
 
 
 
 
 
59b0b83
1e88c0f
 
 
9b09a86
1e88c0f
59b0b83
1e88c0f
9b09a86
1e88c0f
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
import os
import gradio as gr
import requests
import json
import logging
import random
import time
from datetime import datetime

# Configure logging
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)

# API configuration
API_URL = "https://api-inference.huggingface.co/models/mistralai/Mistral-7B-Instruct-v0.3"
HEADERS = {"Authorization": f"Bearer {os.getenv('HUGGINGFACEHUB_API_TOKEN')}"}

def get_timestamp():
    """Generate a timestamp for making responses unique"""
    return int(time.time() * 1000)

def query(payload):
    """Make API request with retry logic"""
    max_retries = 3
    for attempt in range(max_retries):
        try:
            response = requests.post(API_URL, headers=HEADERS, json=payload)
            response.raise_for_status()
            return response.json()
        except requests.exceptions.RequestException as e:
            if attempt == max_retries - 1:
                logger.error(f"API request failed after {max_retries} attempts: {e}")
                raise
            time.sleep(1)

def get_dynamic_prompt(base_prompt, input_text):
    """Generate dynamic prompts with variations"""
    styles = [
        "analytical and scholarly",
        "practical and applicable",
        "historical and contextual",
        "theological and doctrinal"
    ]
    perspectives = [
        "modern interpretation",
        "historical context",
        "cross-cultural significance",
        "contemporary relevance"
    ]
    return f"{base_prompt} Consider this from a {random.choice(styles)} approach, focusing on {random.choice(perspectives)}: {input_text}"

def generate_exegesis(passage):
    if not passage.strip():
        return "Please enter a Bible passage."
    
    prompt = get_dynamic_prompt(
        "<s>[INST] As a Bible Scholar, provide an exegesis with original language analysis and cultural context for:",
        passage
    )
    try:
        response = query({"inputs": f"{prompt} [timestamp: {get_timestamp()}]"})
        if isinstance(response, list) and response:
            return response[0]["generated_text"].split("[/INST]")[-1].strip()
        return "Error: Invalid response from model."
    except Exception as e:
        logger.error(f"Exegesis Error: {e}")
        return f"Generation failed. Please try again."

# Similar modifications for other functions...
def ask_any_questions(question):
    if not question.strip():
        return "Please enter a question."
    
    prompt = get_dynamic_prompt(
        "<s>[INST] As a Bible Scholar, provide a comprehensive answer with scriptural references for:",
        question
    )
    try:
        response = query({"inputs": f"{prompt} [timestamp: {get_timestamp()}]"})
        if isinstance(response, list) and response:
            return response[0]["generated_text"].split("[/INST]")[-1].strip()
        return "Error: Invalid response from model."
    except Exception as e:
        logger.error(f"Question Error: {e}")
        return f"Generation failed. Please try again."

# Updated interface with better styling
css = """
.gradio-container {
    font-family: 'Arial', sans-serif;
}
.gr-button {
    background-color: #2e5090 !important;
}
"""

# Gradio interface definitions with improved styling
exegesis_demo = gr.Interface(
    fn=generate_exegesis,
    inputs=gr.Textbox(
        label="Enter Bible Passage",
        placeholder="e.g., John 3:16",
        lines=2
    ),
    outputs=gr.Textbox(label="Exegesis Commentary", lines=10),
    title="JR Study Bible",
    description="Enter a Bible passage to receive insightful exegesis commentary",
    css=css
)

# ... rest of your Gradio interface code ...

# Launch with sharing enabled for Hugging Face Spaces
if __name__ == "__main__":
    bible_app.launch(share=True)