File size: 307 Bytes
f97ff82 bc15c02 f97ff82 046e869 f97ff82 a731dd6 f97ff82 |
1 2 3 4 5 6 7 8 9 10 11 |
from ctransformers import AutoModelForCausalLM
llm = AutoModelForCausalLM.from_pretrained("chat.gguf")
while True:
ask = input("Enter a Question (Q for quit): ")
if ask == "q" or ask == "Q":
break
ans = llm(ask, max_new_tokens=1024)
print(ask+ans)
print("Goodbye!") |