Josebert commited on
Commit
b31abbf
·
verified ·
1 Parent(s): bd99dec

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +85 -53
app.py CHANGED
@@ -3,91 +3,123 @@ import gradio as gr
3
  import requests
4
  import json
5
  import logging
 
 
 
6
 
7
- # Retrieve the API token from secrets
8
- api_token = os.getenv("API_TOKEN")
9
- if not api_token:
10
- raise ValueError("API token not found. Make sure 'API_TOKEN' is set in the Secrets.")
11
 
12
- # Configure logging
13
- logging.basicConfig(level=logging.INFO)
14
- logger = logging.getLogger(__name__)
15
 
16
- # Use the token in your request headers
17
- API_URL = "https://api-inference.huggingface.co/models/mistralai/Mistral-7B-Instruct-v0.3"
18
- HEADERS = {"Authorization": f"Bearer {api_token}"}
 
 
 
 
19
 
20
- def query(payload):
21
- response = requests.post(API_URL, headers=HEADERS, json=payload)
22
- return response.json()
 
 
 
 
 
23
 
24
  def generate_exegesis(passage):
25
  if not passage.strip():
26
  return "Please enter a Bible passage."
27
- prompt = f"""<s>[INST] You are a professional Bible Scholar. Provide a detailed exegesis of the following biblical verse, including: The original Greek text and transliteration with word-by-word analysis and meanings, historical and cultural context, and theological significance for: {passage} [/INST] Exegesis:</s>"""
28
- try:
 
 
 
 
 
 
 
 
 
 
 
29
  response = query({"inputs": prompt})
30
  if isinstance(response, list) and response:
31
- generated_text = response[0]["generated_text"]
32
- marker = "Exegesis:"
33
- if marker in generated_text:
34
- return generated_text.split(marker, 1)[1].strip()
35
- return generated_text
36
  return "Error: Invalid response from model."
37
- except Exception as e:
38
- logger.error(f"Generation Error: {e}")
39
- return f"Generation Error: {e}"
40
 
41
  def ask_any_questions(question):
42
  if not question.strip():
43
  return "Please enter a question."
44
- prompt = f"""<s>[INST] You are a professional Bible Scholar. Provide a detailed answer to the following question, including: Relevant Bible verses, their explanations, and theological significance for: {question} [/INST] Answer:</s>"""
45
- try:
 
 
 
 
 
 
 
 
 
 
 
46
  response = query({"inputs": prompt})
47
  if isinstance(response, list) and response:
48
- answer_text = response[0]["generated_text"]
49
- marker = "Answer:"
50
- if marker in answer_text:
51
- return answer_text.split(marker, 1)[1].strip()
52
- return answer_text
53
  return "Error: Invalid response from model."
54
- except Exception as e:
55
- logger.error(f"Generation Error: {e}")
56
- return f"Generation Error: {e}"
57
 
58
  def generate_sermon(topic):
59
  if not topic.strip():
60
  return "Please enter a topic."
61
- prompt = f"""<s>[INST] You are a highly knowledgeable Bible Scholar and Pastor. Generate a detailed sermon on the following topic: {topic}. Include relevant Bible verses and theological insights. [/INST] Sermon:</s>"""
62
- try:
 
 
 
 
 
 
 
 
 
 
 
63
  response = query({"inputs": prompt})
64
  if isinstance(response, list) and response:
65
- sermon_text = response[0]["generated_text"]
66
- marker = "Sermon:"
67
- if marker in sermon_text:
68
- return sermon_text.split(marker, 1)[1].strip()
69
- return sermon_text
70
  return "Error: Invalid response from model."
71
- except Exception as e:
72
- logger.error(f"Generation Error: {e}")
73
- return f"Generation Error: {e}"
74
 
75
  def keyword_search(keyword):
76
  if not keyword.strip():
77
  return "Please enter a keyword."
78
- prompt = f"""<s>[INST] You are a Bible Scholar. Find relevant Bible passages containing the keyword: {keyword} [/INST] Search Results:</s>"""
79
- try:
 
 
 
 
 
 
 
 
 
 
 
80
  response = query({"inputs": prompt})
81
  if isinstance(response, list) and response:
82
- search_results = response[0]["generated_text"]
83
- marker = "Search Results:"
84
- if marker in search_results:
85
- return search_results.split(marker, 1)[1].strip()
86
- return search_results
87
  return "Error: Invalid response from model."
88
- except Exception as e:
89
- logger.error(f"Generation Error: {e}")
90
- return f"Generation Error: {e}"
91
 
92
  # Gradio interface definitions
93
  exegesis_demo = gr.Interface(
 
3
  import requests
4
  import json
5
  import logging
6
+ import random
7
+ from datetime import datetime
8
+ from cachetools import TTLCache
9
 
10
+ # Add cache with 1-hour expiration
11
+ response_cache = TTLCache(maxsize=100, ttl=3600)
 
 
12
 
13
+ # ...existing code...
 
 
14
 
15
+ def get_cached_response(key, prompt_func):
16
+ cache_key = f"{key}_{datetime.now().strftime('%Y%m%d%H')}"
17
+ if cache_key in response_cache:
18
+ return response_cache[cache_key]
19
+ response = prompt_func()
20
+ response_cache[cache_key] = response
21
+ return response
22
 
23
+ def generate_dynamic_prompt(base_prompt, variations):
24
+ modifiers = [
25
+ "Include specific examples and applications.",
26
+ "Provide practical insights and modern relevance.",
27
+ "Consider different interpretations and perspectives.",
28
+ "Draw connections across different biblical passages.",
29
+ ]
30
+ return f"{base_prompt} {random.choice(modifiers)} {random.choice(variations)}"
31
 
32
  def generate_exegesis(passage):
33
  if not passage.strip():
34
  return "Please enter a Bible passage."
35
+
36
+ variations = [
37
+ "Analyze the historical context and cultural significance.",
38
+ "Examine the linguistic nuances and theological implications.",
39
+ "Focus on the practical applications and modern relevance.",
40
+ "Consider the passage's role in the broader biblical narrative."
41
+ ]
42
+
43
+ def get_response():
44
+ prompt = generate_dynamic_prompt(
45
+ f"""<s>[INST] As a professional Bible Scholar, provide a detailed exegesis of: {passage}.""",
46
+ variations
47
+ )
48
  response = query({"inputs": prompt})
49
  if isinstance(response, list) and response:
50
+ return response[0]["generated_text"].split("Exegesis:", 1)[-1].strip()
 
 
 
 
51
  return "Error: Invalid response from model."
52
+
53
+ return get_cached_response(f"exegesis_{passage}", get_response)
 
54
 
55
  def ask_any_questions(question):
56
  if not question.strip():
57
  return "Please enter a question."
58
+
59
+ variations = [
60
+ "Consider multiple perspectives and interpretations.",
61
+ "Provide practical applications and modern context.",
62
+ "Include relevant cross-references and supporting verses.",
63
+ "Examine the historical and cultural context."
64
+ ]
65
+
66
+ def get_response():
67
+ prompt = generate_dynamic_prompt(
68
+ f"""<s>[INST] As a Bible Scholar, answer this question: {question}.""",
69
+ variations
70
+ )
71
  response = query({"inputs": prompt})
72
  if isinstance(response, list) and response:
73
+ return response[0]["generated_text"].split("Answer:", 1)[-1].strip()
 
 
 
 
74
  return "Error: Invalid response from model."
75
+
76
+ return get_cached_response(f"question_{question}", get_response)
 
77
 
78
  def generate_sermon(topic):
79
  if not topic.strip():
80
  return "Please enter a topic."
81
+
82
+ variations = [
83
+ "Structure the sermon with clear main points and supporting scripture.",
84
+ "Include real-life applications and contemporary examples.",
85
+ "Incorporate both Old and New Testament perspectives.",
86
+ "Focus on practical implementation and spiritual growth."
87
+ ]
88
+
89
+ def get_response():
90
+ prompt = generate_dynamic_prompt(
91
+ f"""<s>[INST] As a Pastor and Bible Scholar, create an engaging sermon about: {topic}.""",
92
+ variations
93
+ )
94
  response = query({"inputs": prompt})
95
  if isinstance(response, list) and response:
96
+ return response[0]["generated_text"].split("Sermon:", 1)[-1].strip()
 
 
 
 
97
  return "Error: Invalid response from model."
98
+
99
+ return get_cached_response(f"sermon_{topic}", get_response)
 
100
 
101
  def keyword_search(keyword):
102
  if not keyword.strip():
103
  return "Please enter a keyword."
104
+
105
+ variations = [
106
+ "Include context and interpretation for each passage.",
107
+ "Focus on both literal and thematic occurrences.",
108
+ "Consider related terms and concepts.",
109
+ "Examine different biblical contexts where this concept appears."
110
+ ]
111
+
112
+ def get_response():
113
+ prompt = generate_dynamic_prompt(
114
+ f"""<s>[INST] As a Bible Scholar, find and analyze passages containing: {keyword}.""",
115
+ variations
116
+ )
117
  response = query({"inputs": prompt})
118
  if isinstance(response, list) and response:
119
+ return response[0]["generated_text"].split("Search Results:", 1)[-1].strip()
 
 
 
 
120
  return "Error: Invalid response from model."
121
+
122
+ return get_cached_response(f"search_{keyword}", get_response)
 
123
 
124
  # Gradio interface definitions
125
  exegesis_demo = gr.Interface(