SusiePHaltmann commited on
Commit
b087f1f
1 Parent(s): a6b1def

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +30 -5
app.py CHANGED
@@ -1,9 +1,34 @@
1
  import gradio as gr
 
2
 
 
 
 
 
 
 
3
  def chat(input_text):
4
- inputs = tokenizer.encode(input_text, return_tensors="pt")
5
- outputs = model.generate(inputs)
6
- response = tokenizer.decode(outputs[0])
7
- return response
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
8
 
9
- gr.Interface(fn=chat, inputs="text", outputs="text").launch()
 
 
 
1
  import gradio as gr
2
+ from transformers import AutoModelForCausalLM, AutoTokenizer
3
 
4
+ # Load the model and tokenizer
5
+ model_name = "CatGPT" # Replace with the exact model name if necessary
6
+ model = AutoModelForCausalLM.from_pretrained(model_name)
7
+ tokenizer = AutoTokenizer.from_pretrained(model_name)
8
+
9
+ # Define the chat function
10
  def chat(input_text):
11
+ try:
12
+ # Tokenize the input
13
+ inputs = tokenizer(input_text, return_tensors="pt")
14
+
15
+ # Generate a response from the model
16
+ outputs = model.generate(**inputs, max_length=150)
17
+
18
+ # Decode and return the response
19
+ response = tokenizer.decode(outputs[0], skip_special_tokens=True)
20
+ return response
21
+
22
+ except Exception as e:
23
+ return f"An error occurred: {str(e)}"
24
+
25
+ # Create the Gradio interface
26
+ iface = gr.Interface(fn=chat,
27
+ inputs=gr.inputs.Textbox(lines=7, label="Enter your message"),
28
+ outputs=gr.outputs.Textbox(label="Response"),
29
+ title="CatGPT - Chatbot",
30
+ description="Chat with CatGPT, a fun and intelligent chatbot!")
31
 
32
+ # Launch the interface
33
+ if __name__ == "__main__":
34
+ iface.launch()