Spaces:
Sleeping
Sleeping
to cpu
Browse files- inference.py +1 -1
inference.py
CHANGED
@@ -33,7 +33,7 @@ def load_model(
|
|
33 |
def prepare_input(
|
34 |
messages: List[Dict[str, str]],
|
35 |
tokenizer: any,
|
36 |
-
device: str = "
|
37 |
) -> torch.Tensor:
|
38 |
"""
|
39 |
Prepare input for the model by applying chat template and tokenization.
|
|
|
33 |
def prepare_input(
|
34 |
messages: List[Dict[str, str]],
|
35 |
tokenizer: any,
|
36 |
+
device: str = "cpu"
|
37 |
) -> torch.Tensor:
|
38 |
"""
|
39 |
Prepare input for the model by applying chat template and tokenization.
|