Uhhy commited on
Commit
4c925e3
·
verified ·
1 Parent(s): 78ca21d

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +6 -2
app.py CHANGED
@@ -57,7 +57,7 @@ class ModelManager:
57
  return {"model": Llama.from_pretrained(repo_id=model_config['repo_id'], filename=model_config['filename']), "name": model_config['name']}
58
  except Exception as e:
59
  print(f"Error loading model {model_config['name']}: {e}")
60
- return None
61
 
62
  def load_all_models(self):
63
  if self.loaded:
@@ -77,6 +77,7 @@ class ModelManager:
77
  return models
78
  except Exception as e:
79
  print(f"Error loading models: {e}")
 
80
  return []
81
 
82
  model_manager = ModelManager()
@@ -121,6 +122,7 @@ async def generate_model_response(model, inputs, top_k, top_p, temperature):
121
  return responses
122
  except Exception as e:
123
  print(f"Error generating model response: {e}")
 
124
  return []
125
 
126
  @app.post("/generate")
@@ -128,7 +130,7 @@ async def generate(request: ChatRequest):
128
  try:
129
  if not global_data['models']:
130
  raise HTTPException(status_code=500, detail="Models not loaded")
131
-
132
  model = global_data['models'][0]['model']
133
  inputs = normalize_input(request.message)
134
  responses = await generate_model_response(model, inputs, request.top_k, request.top_p, request.temperature)
@@ -140,6 +142,7 @@ async def generate(request: ChatRequest):
140
  }
141
  except Exception as e:
142
  print(f"Error in generate endpoint: {e}")
 
143
  return {"error": str(e)}
144
 
145
  @app.api_route("/{method_name:path}", methods=["GET", "POST", "PUT", "DELETE", "PATCH"])
@@ -149,6 +152,7 @@ async def handle_request(method_name: str, request: Request):
149
  return {"message": "Request handled successfully", "body": body}
150
  except Exception as e:
151
  print(f"Error handling request: {e}")
 
152
  return {"error": str(e)}
153
 
154
  if __name__ == "__main__":
 
57
  return {"model": Llama.from_pretrained(repo_id=model_config['repo_id'], filename=model_config['filename']), "name": model_config['name']}
58
  except Exception as e:
59
  print(f"Error loading model {model_config['name']}: {e}")
60
+ pass
61
 
62
  def load_all_models(self):
63
  if self.loaded:
 
77
  return models
78
  except Exception as e:
79
  print(f"Error loading models: {e}")
80
+ pass
81
  return []
82
 
83
  model_manager = ModelManager()
 
122
  return responses
123
  except Exception as e:
124
  print(f"Error generating model response: {e}")
125
+ pass
126
  return []
127
 
128
  @app.post("/generate")
 
130
  try:
131
  if not global_data['models']:
132
  raise HTTPException(status_code=500, detail="Models not loaded")
133
+
134
  model = global_data['models'][0]['model']
135
  inputs = normalize_input(request.message)
136
  responses = await generate_model_response(model, inputs, request.top_k, request.top_p, request.temperature)
 
142
  }
143
  except Exception as e:
144
  print(f"Error in generate endpoint: {e}")
145
+ pass
146
  return {"error": str(e)}
147
 
148
  @app.api_route("/{method_name:path}", methods=["GET", "POST", "PUT", "DELETE", "PATCH"])
 
152
  return {"message": "Request handled successfully", "body": body}
153
  except Exception as e:
154
  print(f"Error handling request: {e}")
155
+ pass
156
  return {"error": str(e)}
157
 
158
  if __name__ == "__main__":