File size: 308 Bytes
f97ff82 614af73 f97ff82 56d90b7 f97ff82 614af73 f97ff82 |
1 2 3 4 5 6 7 8 9 10 11 |
from ctransformers import AutoModelForCausalLM
llm = AutoModelForCausalLM.from_pretrained("chat.gguf")
while True:
ask = input("Enter a Question (Q for quit): ")
if ask == "q" or ask == "Q":
break
ans = llm(ask, max_new_tokens=1024)
print(ask+ans)
print("Goodbye!") |