krisoei commited on
Commit
1264bba
·
verified ·
1 Parent(s): 110e4b6

Create handler.py

Browse files
Files changed (1) hide show
  1. handler.py +29 -0
handler.py ADDED
@@ -0,0 +1,29 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from transformers import AutoTokenizer, AutoModelForCausalLM, pipeline
2
+ import torch
3
+
4
+ class EndpointHandler:
5
+ def __init__(self, path=""):
6
+ self.tokenizer = AutoTokenizer.from_pretrained(path)
7
+ self.model = AutoModelForCausalLM.from_pretrained(path, torch_dtype=torch.float16, device_map="auto")
8
+ self.pipe = pipeline(
9
+ "text-generation",
10
+ model=self.model,
11
+ tokenizer=self.tokenizer,
12
+ max_new_tokens=512,
13
+ do_sample=True,
14
+ temperature=0.7,
15
+ top_p=0.95,
16
+ )
17
+
18
+ def __call__(self, data):
19
+ prompt = data.get("inputs", "")
20
+ if not prompt:
21
+ return {"error": "No input provided"}
22
+
23
+ # Generate response
24
+ response = self.pipe(prompt)[0]['generated_text']
25
+
26
+ # Remove the original prompt from the response
27
+ response = response[len(prompt):].strip()
28
+
29
+ return {"generated_text": response}