Custom Handler
Browse files- handler.py +25 -0
- requirements.txt +2 -0
handler.py
ADDED
@@ -0,0 +1,25 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from typing import Dict, List, Any
|
2 |
+
from lmdeploy import pipeline
|
3 |
+
from lmdeploy.vl import load_image
|
4 |
+
from lmdeploy.messages import TurbomindEngineConfig
|
5 |
+
|
6 |
+
class EndpointHandler():
|
7 |
+
def __init__(self, path):
|
8 |
+
# Preload the model at initialization
|
9 |
+
backend_config = TurbomindEngineConfig(model_name ="deepseek-ai/deepseek-coder-33b-instruct",model_format='hf',tp=1)
|
10 |
+
self.pipe = pipeline(f"{path}", backend_config=backend_config, log_level='INFO')
|
11 |
+
|
12 |
+
def __call__(self, data: Dict[str, Any]) -> Dict[str, Any]:
|
13 |
+
"""
|
14 |
+
data args:
|
15 |
+
inputs (:obj: `str`)
|
16 |
+
kwargs
|
17 |
+
Return:
|
18 |
+
A :obj:`str`| `Dict`: will be serialized and returned
|
19 |
+
"""
|
20 |
+
query = data.get('query')
|
21 |
+
if not query:
|
22 |
+
return [{'error': 'No query provided'}]
|
23 |
+
|
24 |
+
response = self.pipe([query])
|
25 |
+
return {'response': response.text}
|
requirements.txt
ADDED
@@ -0,0 +1,2 @@
|
|
|
|
|
|
|
1 |
+
lmdeploy
|
2 |
+
timm
|