``` | |
from transformers import AutoModelForCausalLM, AutoTokenizer | |
model_name = "shellchat-v1" | |
model = AutoModelForCausalLM.from_pretrained(model_name, trust_remote_code=True).to("cuda") | |
tokenizer = AutoTokenizer.from_pretrained(model_name) | |
query = "hello world!" | |
history = [] | |
response = model.chat(query, history, tokenizer) | |
``` |