Threatthriver commited on
Commit
a585fa1
1 Parent(s): d35e677

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +0 -23
app.py CHANGED
@@ -1,8 +1,6 @@
1
  import gradio as gr
2
  from huggingface_hub import InferenceClient
3
  import logging
4
- import json
5
- import os
6
 
7
  # Initialize the InferenceClient with the model ID from Hugging Face
8
  client = InferenceClient(model="HuggingFaceH4/zephyr-7b-beta")
@@ -14,27 +12,12 @@ logging.basicConfig(
14
  format='%(asctime)s - %(levelname)s - %(message)s',
15
  )
16
 
17
- API_KEYS_FILE = 'api_keys.json'
18
-
19
- def load_api_keys():
20
- """Load API keys from the storage."""
21
- if os.path.exists(API_KEYS_FILE):
22
- with open(API_KEYS_FILE, 'r') as f:
23
- return json.load(f)
24
- return {}
25
-
26
- def authenticate(api_key: str):
27
- """Authenticates the API key by checking against stored keys."""
28
- api_keys = load_api_keys()
29
- return api_key in api_keys.values()
30
-
31
  def log_conversation(user_message, bot_response):
32
  """Logs the conversation between the user and the AI."""
33
  logging.info(f"User: {user_message}")
34
  logging.info(f"Bot: {bot_response}")
35
 
36
  def respond(
37
- api_key: str,
38
  message: str,
39
  history: list[tuple[str, str]],
40
  system_message: str,
@@ -46,11 +29,6 @@ def respond(
46
  ):
47
  """Generates a response from the AI model based on the user's message and chat history."""
48
 
49
- # Authenticate the API key
50
- if not authenticate(api_key):
51
- yield "Invalid API key. Access denied."
52
- return
53
-
54
  # Prepare the conversation history for the API call
55
  messages = [{"role": "system", "content": system_message}]
56
 
@@ -103,7 +81,6 @@ def respond(
103
  demo = gr.ChatInterface(
104
  fn=respond,
105
  additional_inputs=[
106
- gr.Textbox(value="", label="API Key", lines=1, type="password"),
107
  gr.Textbox(value="You are a friendly Chatbot.", label="System Message", lines=2),
108
  gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max New Tokens"),
109
  gr.Slider(minimum=0.1, maximum=2.0, value=0.7, step=0.1, label="Temperature"),
 
1
  import gradio as gr
2
  from huggingface_hub import InferenceClient
3
  import logging
 
 
4
 
5
  # Initialize the InferenceClient with the model ID from Hugging Face
6
  client = InferenceClient(model="HuggingFaceH4/zephyr-7b-beta")
 
12
  format='%(asctime)s - %(levelname)s - %(message)s',
13
  )
14
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
15
  def log_conversation(user_message, bot_response):
16
  """Logs the conversation between the user and the AI."""
17
  logging.info(f"User: {user_message}")
18
  logging.info(f"Bot: {bot_response}")
19
 
20
  def respond(
 
21
  message: str,
22
  history: list[tuple[str, str]],
23
  system_message: str,
 
29
  ):
30
  """Generates a response from the AI model based on the user's message and chat history."""
31
 
 
 
 
 
 
32
  # Prepare the conversation history for the API call
33
  messages = [{"role": "system", "content": system_message}]
34
 
 
81
  demo = gr.ChatInterface(
82
  fn=respond,
83
  additional_inputs=[
 
84
  gr.Textbox(value="You are a friendly Chatbot.", label="System Message", lines=2),
85
  gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max New Tokens"),
86
  gr.Slider(minimum=0.1, maximum=2.0, value=0.7, step=0.1, label="Temperature"),