fix handler bug
Browse files- handler.py +1 -13
handler.py
CHANGED
@@ -6,7 +6,7 @@ from transformers import AutoTokenizer, AutoModelForCausalLM, pipeline
|
|
6 |
class EndpointHandler:
|
7 |
def __init__(self, path=""):
|
8 |
# load the model
|
9 |
-
tokenizer =
|
10 |
model = AutoModelForCausalLM.from_pretrained(
|
11 |
"microsoft/Phi-3-mini-128k-instruct",
|
12 |
device_map="cuda",
|
@@ -19,18 +19,6 @@ class EndpointHandler:
|
|
19 |
def __call__(self, data: Any) -> List[List[Dict[str, float]]]:
|
20 |
inputs = data.pop("inputs", data)
|
21 |
parameters = data.pop("parameters", None)
|
22 |
-
|
23 |
-
# Print parameters for debugging
|
24 |
-
print("Parameters before cleaning:", parameters)
|
25 |
-
|
26 |
-
# Remove unwanted keys from parameters
|
27 |
-
if parameters is not None:
|
28 |
-
for key in ['stop_sequences', 'watermark', 'stop']:
|
29 |
-
if key in parameters:
|
30 |
-
del parameters[key]
|
31 |
-
|
32 |
-
# Print parameters after cleaning
|
33 |
-
print("Parameters after cleaning:", parameters)
|
34 |
|
35 |
# pass inputs with all kwargs in data
|
36 |
if parameters is not None:
|
|
|
6 |
class EndpointHandler:
|
7 |
def __init__(self, path=""):
|
8 |
# load the model
|
9 |
+
tokenizer = AutoTokenizer.from_pretrained("microsoft/Phi-3-mini-128k-instruct")
|
10 |
model = AutoModelForCausalLM.from_pretrained(
|
11 |
"microsoft/Phi-3-mini-128k-instruct",
|
12 |
device_map="cuda",
|
|
|
19 |
def __call__(self, data: Any) -> List[List[Dict[str, float]]]:
|
20 |
inputs = data.pop("inputs", data)
|
21 |
parameters = data.pop("parameters", None)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
22 |
|
23 |
# pass inputs with all kwargs in data
|
24 |
if parameters is not None:
|