yugamj commited on
Commit
aeea180
·
verified ·
1 Parent(s): 5745b4f

removed max_len param from model.gen

Browse files
Files changed (1) hide show
  1. app.py +2 -2
app.py CHANGED
@@ -28,7 +28,7 @@ def infer_1(inp, chat_history):
28
  inp_tok = tokenizer(inp_1, return_tensors="pt")
29
  X = inp_tok["input_ids"].to(device)
30
  a = inp_tok["attention_mask"].to(device)
31
- output = model.generate(X, attention_mask=a, max_length=100, num_beams=5,top_k=50)
32
  output = tokenizer.decode(output[0])
33
  #Remove the user input part seq
34
  output = output[len(inp_1):]
@@ -91,7 +91,7 @@ training_data = pd.read_csv('ques_ans_v5.csv', index_col = 0)
91
  #Launch with gradio
92
  with gr.Blocks() as demo:
93
  gr.HTML(project_heading)
94
- #gr.Markdown(value = "This Chatbot is Based on a fine-tuned version of 'GPT2-Small'. Primarily the text from Robert Kiyosaki's book, \"Rich Dad Poor Dad\" was processed and used in training.")
95
  gr.Markdown(value = project_des)
96
 
97
  chatbot = gr.Chatbot(label = "Trained on scripts from \"Rich Dad Poor Dad\"") #value = [['Hey there User', 'Hey there CB']]
 
28
  inp_tok = tokenizer(inp_1, return_tensors="pt")
29
  X = inp_tok["input_ids"].to(device)
30
  a = inp_tok["attention_mask"].to(device)
31
+ output = model.generate(X, attention_mask=a)
32
  output = tokenizer.decode(output[0])
33
  #Remove the user input part seq
34
  output = output[len(inp_1):]
 
91
  #Launch with gradio
92
  with gr.Blocks() as demo:
93
  gr.HTML(project_heading)
94
+
95
  gr.Markdown(value = project_des)
96
 
97
  chatbot = gr.Chatbot(label = "Trained on scripts from \"Rich Dad Poor Dad\"") #value = [['Hey there User', 'Hey there CB']]