Finance_chatbot / app.py
yugamj's picture
removed a redundant statement
94d8173 verified
raw
history blame
1.77 kB
#Import libraries
import numpy as np
import pandas as pd
import transformers
from transformers import GPT2LMHeadModel, GPT2Tokenizer
import torch
import gradio as gr
#Load model
device = "cuda" if torch.cuda.is_available() else "mps" if torch.backends.mps.is_available() else "cpu"
model = torch.load('finance_chatbot_gpt2_complete_model.pt', map_location=torch.device('cpu'))
model = model.to(device)
#Get LLM tokenizer
tokenizer = GPT2Tokenizer.from_pretrained("gpt2")
tokenizer.add_special_tokens({"pad_token": "<pad>",
"bos_token": "<startofstring>",
"eos_token": "<endofstring>"})
tokenizer.add_tokens(["<bot>:"])
#Inference function
def infer(inp, history):
inp = "<startofstring>"+inp+"<bot>:"
inp_tok = tokenizer(inp, return_tensors="pt")
X = inp_tok["input_ids"].to(device)
a = inp_tok["attention_mask"].to(device)
output = model.generate(X, attention_mask=a )
output = tokenizer.decode(output[0])
return output[len(inp):]
#Launch with gradio
gr.ChatInterface(
infer,
chatbot=gr.Chatbot(height=300),
textbox=gr.Textbox(placeholder="Type Here", container=False, scale=10),
title="Finance Chatbot Based on Rich Dad Poor Dad",
description="This Chatbot is Based on a fine-tuned version of 'GPT2'. Popular quotes of Robert Kiyosaki from his book, 'Rich Dad Poor Dad' and book summary were used for training this model.",
theme="soft",
examples=["What do you want to earn more passive income?", "What is the result of people working all their lives for someone else?", "What tells the story of how a person handles money?"],
cache_examples=True,
retry_btn=None,
undo_btn="Delete Previous",
clear_btn="Clear",
).launch()