Update app.py
Browse files
app.py
CHANGED
@@ -4,6 +4,7 @@ import os, sys
|
|
4 |
from transformers import AutoTokenizer, AutoModelForCausalLM, GenerationConfig, pipeline
|
5 |
import torch
|
6 |
import spaces
|
|
|
7 |
|
8 |
# Define the model repository
|
9 |
# REPO_NAME = 'schuler/experimental-JP47D20'
|
@@ -108,8 +109,10 @@ def respond(
|
|
108 |
"""
|
109 |
For information on how to customize the ChatInterface, peruse the gradio docs: https://www.gradio.app/docs/chatinterface
|
110 |
"""
|
|
|
111 |
status_text = \
|
112 |
f"This chat uses the {REPO_NAME} model with {model.get_memory_footprint() / 1e6:.2f} MB memory footprint. " + \
|
|
|
113 |
f"You may ask questions such as 'What is biology?' or 'What is the human body?'"
|
114 |
|
115 |
"""
|
|
|
4 |
from transformers import AutoTokenizer, AutoModelForCausalLM, GenerationConfig, pipeline
|
5 |
import torch
|
6 |
import spaces
|
7 |
+
import psutil
|
8 |
|
9 |
# Define the model repository
|
10 |
# REPO_NAME = 'schuler/experimental-JP47D20'
|
|
|
109 |
"""
|
110 |
For information on how to customize the ChatInterface, peruse the gradio docs: https://www.gradio.app/docs/chatinterface
|
111 |
"""
|
112 |
+
cpu_usage = psutil.cpu_percent(interval=1)
|
113 |
status_text = \
|
114 |
f"This chat uses the {REPO_NAME} model with {model.get_memory_footprint() / 1e6:.2f} MB memory footprint. " + \
|
115 |
+
f"Current CPU usage is {cpu_usage:.2f}% .'" + \
|
116 |
f"You may ask questions such as 'What is biology?' or 'What is the human body?'"
|
117 |
|
118 |
"""
|