Spaces:
Sleeping
Sleeping
Upload main.py
Browse files
main.py
CHANGED
@@ -53,7 +53,7 @@ def main():
|
|
53 |
# Load the model
|
54 |
llm = Llama(
|
55 |
model_path=model_path,
|
56 |
-
n_ctx=
|
57 |
max_tokens=max_tokens # Control the maximum number of tokens generated in the response
|
58 |
)
|
59 |
|
|
|
53 |
# Load the model
|
54 |
llm = Llama(
|
55 |
model_path=model_path,
|
56 |
+
n_ctx=572, # Set the maximum context length
|
57 |
max_tokens=max_tokens # Control the maximum number of tokens generated in the response
|
58 |
)
|
59 |
|