Josebert commited on
Commit
1e88c0f
·
verified ·
1 Parent(s): b31abbf

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +81 -125
app.py CHANGED
@@ -4,156 +4,112 @@ import requests
4
  import json
5
  import logging
6
  import random
 
7
  from datetime import datetime
8
- from cachetools import TTLCache
9
 
10
- # Add cache with 1-hour expiration
11
- response_cache = TTLCache(maxsize=100, ttl=3600)
 
12
 
13
- # ...existing code...
 
 
14
 
15
- def get_cached_response(key, prompt_func):
16
- cache_key = f"{key}_{datetime.now().strftime('%Y%m%d%H')}"
17
- if cache_key in response_cache:
18
- return response_cache[cache_key]
19
- response = prompt_func()
20
- response_cache[cache_key] = response
21
- return response
22
 
23
- def generate_dynamic_prompt(base_prompt, variations):
24
- modifiers = [
25
- "Include specific examples and applications.",
26
- "Provide practical insights and modern relevance.",
27
- "Consider different interpretations and perspectives.",
28
- "Draw connections across different biblical passages.",
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
29
  ]
30
- return f"{base_prompt} {random.choice(modifiers)} {random.choice(variations)}"
31
 
32
  def generate_exegesis(passage):
33
  if not passage.strip():
34
  return "Please enter a Bible passage."
35
 
36
- variations = [
37
- "Analyze the historical context and cultural significance.",
38
- "Examine the linguistic nuances and theological implications.",
39
- "Focus on the practical applications and modern relevance.",
40
- "Consider the passage's role in the broader biblical narrative."
41
- ]
42
-
43
- def get_response():
44
- prompt = generate_dynamic_prompt(
45
- f"""<s>[INST] As a professional Bible Scholar, provide a detailed exegesis of: {passage}.""",
46
- variations
47
- )
48
- response = query({"inputs": prompt})
49
  if isinstance(response, list) and response:
50
- return response[0]["generated_text"].split("Exegesis:", 1)[-1].strip()
51
  return "Error: Invalid response from model."
52
-
53
- return get_cached_response(f"exegesis_{passage}", get_response)
 
54
 
 
55
  def ask_any_questions(question):
56
  if not question.strip():
57
  return "Please enter a question."
58
 
59
- variations = [
60
- "Consider multiple perspectives and interpretations.",
61
- "Provide practical applications and modern context.",
62
- "Include relevant cross-references and supporting verses.",
63
- "Examine the historical and cultural context."
64
- ]
65
-
66
- def get_response():
67
- prompt = generate_dynamic_prompt(
68
- f"""<s>[INST] As a Bible Scholar, answer this question: {question}.""",
69
- variations
70
- )
71
- response = query({"inputs": prompt})
72
- if isinstance(response, list) and response:
73
- return response[0]["generated_text"].split("Answer:", 1)[-1].strip()
74
- return "Error: Invalid response from model."
75
-
76
- return get_cached_response(f"question_{question}", get_response)
77
-
78
- def generate_sermon(topic):
79
- if not topic.strip():
80
- return "Please enter a topic."
81
-
82
- variations = [
83
- "Structure the sermon with clear main points and supporting scripture.",
84
- "Include real-life applications and contemporary examples.",
85
- "Incorporate both Old and New Testament perspectives.",
86
- "Focus on practical implementation and spiritual growth."
87
- ]
88
-
89
- def get_response():
90
- prompt = generate_dynamic_prompt(
91
- f"""<s>[INST] As a Pastor and Bible Scholar, create an engaging sermon about: {topic}.""",
92
- variations
93
- )
94
- response = query({"inputs": prompt})
95
  if isinstance(response, list) and response:
96
- return response[0]["generated_text"].split("Sermon:", 1)[-1].strip()
97
  return "Error: Invalid response from model."
98
-
99
- return get_cached_response(f"sermon_{topic}", get_response)
 
100
 
101
- def keyword_search(keyword):
102
- if not keyword.strip():
103
- return "Please enter a keyword."
104
-
105
- variations = [
106
- "Include context and interpretation for each passage.",
107
- "Focus on both literal and thematic occurrences.",
108
- "Consider related terms and concepts.",
109
- "Examine different biblical contexts where this concept appears."
110
- ]
111
-
112
- def get_response():
113
- prompt = generate_dynamic_prompt(
114
- f"""<s>[INST] As a Bible Scholar, find and analyze passages containing: {keyword}.""",
115
- variations
116
- )
117
- response = query({"inputs": prompt})
118
- if isinstance(response, list) and response:
119
- return response[0]["generated_text"].split("Search Results:", 1)[-1].strip()
120
- return "Error: Invalid response from model."
121
-
122
- return get_cached_response(f"search_{keyword}", get_response)
123
 
124
- # Gradio interface definitions
125
  exegesis_demo = gr.Interface(
126
  fn=generate_exegesis,
127
- inputs=gr.Textbox(label="Enter Bible Passage for Exegesis", placeholder="e.g., John 3:16"),
128
- outputs=gr.Textbox(label="Exegesis Commentary"),
 
 
 
 
129
  title="JR Study Bible",
130
- description="Enter a Bible passage to receive insightful exegesis commentary.")
131
-
132
- lookup_demo = gr.Interface(
133
- fn=ask_any_questions,
134
- inputs=gr.Textbox(label="Ask Any Bible Question", placeholder="e.g., What does John 3:16 mean?"),
135
- outputs=gr.Textbox(label="Answer"),
136
- title="Bible Question Answering",
137
- description="Enter a Bible-related question to receive a detailed answer.")
138
-
139
- sermon_demo = gr.Interface(
140
- fn=generate_sermon,
141
- inputs=gr.Textbox(label="Generate Sermon", placeholder="e.g., Faith"),
142
- outputs=gr.Textbox(label="Sermon"),
143
- title="Bible Sermon Generator",
144
- description="Enter a topic to generate a detailed sermon.")
145
-
146
- keyword_search_demo = gr.Interface(
147
- fn=keyword_search,
148
- inputs=gr.Textbox(label="Keyword Search", placeholder="e.g., love"),
149
- outputs=gr.Textbox(label="Search Results"),
150
- title="Bible Keyword Search",
151
- description="Enter a keyword to search in the Bible.")
152
 
153
- # Combine all interfaces into one app
154
- bible_app = gr.TabbedInterface(
155
- [exegesis_demo, lookup_demo, sermon_demo, keyword_search_demo],
156
- ["Exegesis", "Question Answering", "Sermon Generator", "Keyword Search"])
157
 
 
158
  if __name__ == "__main__":
159
- bible_app.launch()
 
4
  import json
5
  import logging
6
  import random
7
+ import time
8
  from datetime import datetime
 
9
 
10
+ # Configure logging
11
+ logging.basicConfig(level=logging.INFO)
12
+ logger = logging.getLogger(__name__)
13
 
14
+ # API configuration
15
+ API_URL = "https://api-inference.huggingface.co/models/mistralai/Mistral-7B-Instruct-v0.3"
16
+ HEADERS = {"Authorization": f"Bearer {os.getenv('HUGGINGFACEHUB_API_TOKEN')}"}
17
 
18
+ def get_timestamp():
19
+ """Generate a timestamp for making responses unique"""
20
+ return int(time.time() * 1000)
 
 
 
 
21
 
22
+ def query(payload):
23
+ """Make API request with retry logic"""
24
+ max_retries = 3
25
+ for attempt in range(max_retries):
26
+ try:
27
+ response = requests.post(API_URL, headers=HEADERS, json=payload)
28
+ response.raise_for_status()
29
+ return response.json()
30
+ except requests.exceptions.RequestException as e:
31
+ if attempt == max_retries - 1:
32
+ logger.error(f"API request failed after {max_retries} attempts: {e}")
33
+ raise
34
+ time.sleep(1)
35
+
36
+ def get_dynamic_prompt(base_prompt, input_text):
37
+ """Generate dynamic prompts with variations"""
38
+ styles = [
39
+ "analytical and scholarly",
40
+ "practical and applicable",
41
+ "historical and contextual",
42
+ "theological and doctrinal"
43
+ ]
44
+ perspectives = [
45
+ "modern interpretation",
46
+ "historical context",
47
+ "cross-cultural significance",
48
+ "contemporary relevance"
49
  ]
50
+ return f"{base_prompt} Consider this from a {random.choice(styles)} approach, focusing on {random.choice(perspectives)}: {input_text}"
51
 
52
  def generate_exegesis(passage):
53
  if not passage.strip():
54
  return "Please enter a Bible passage."
55
 
56
+ prompt = get_dynamic_prompt(
57
+ "<s>[INST] As a Bible Scholar, provide an exegesis with original language analysis and cultural context for:",
58
+ passage
59
+ )
60
+ try:
61
+ response = query({"inputs": f"{prompt} [timestamp: {get_timestamp()}]"})
 
 
 
 
 
 
 
62
  if isinstance(response, list) and response:
63
+ return response[0]["generated_text"].split("[/INST]")[-1].strip()
64
  return "Error: Invalid response from model."
65
+ except Exception as e:
66
+ logger.error(f"Exegesis Error: {e}")
67
+ return f"Generation failed. Please try again."
68
 
69
+ # Similar modifications for other functions...
70
  def ask_any_questions(question):
71
  if not question.strip():
72
  return "Please enter a question."
73
 
74
+ prompt = get_dynamic_prompt(
75
+ "<s>[INST] As a Bible Scholar, provide a comprehensive answer with scriptural references for:",
76
+ question
77
+ )
78
+ try:
79
+ response = query({"inputs": f"{prompt} [timestamp: {get_timestamp()}]"})
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
80
  if isinstance(response, list) and response:
81
+ return response[0]["generated_text"].split("[/INST]")[-1].strip()
82
  return "Error: Invalid response from model."
83
+ except Exception as e:
84
+ logger.error(f"Question Error: {e}")
85
+ return f"Generation failed. Please try again."
86
 
87
+ # Updated interface with better styling
88
+ css = """
89
+ .gradio-container {
90
+ font-family: 'Arial', sans-serif;
91
+ }
92
+ .gr-button {
93
+ background-color: #2e5090 !important;
94
+ }
95
+ """
 
 
 
 
 
 
 
 
 
 
 
 
 
96
 
97
+ # Gradio interface definitions with improved styling
98
  exegesis_demo = gr.Interface(
99
  fn=generate_exegesis,
100
+ inputs=gr.Textbox(
101
+ label="Enter Bible Passage",
102
+ placeholder="e.g., John 3:16",
103
+ lines=2
104
+ ),
105
+ outputs=gr.Textbox(label="Exegesis Commentary", lines=10),
106
  title="JR Study Bible",
107
+ description="Enter a Bible passage to receive insightful exegesis commentary",
108
+ css=css
109
+ )
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
110
 
111
+ # ... rest of your Gradio interface code ...
 
 
 
112
 
113
+ # Launch with sharing enabled for Hugging Face Spaces
114
  if __name__ == "__main__":
115
+ bible_app.launch(share=True)