yijin928's picture
Upload 309 files
6af0efe verified
raw
history blame
429 Bytes
from transformers import pipeline
from PIL import Image
import logging
SCORE = 0.965 # 0.965 and less - is safety content
logging.getLogger('transformers').setLevel(logging.ERROR)
def nsfw_image(img_path: str, model_path: str):
with Image.open(img_path) as img:
predict = pipeline("image-classification", model=model_path)
result = predict(img)
return True if result[0]["score"] > SCORE else False