VamooseBambel commited on
Commit
61a2aa3
1 Parent(s): 00f2e78

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +7 -7
app.py CHANGED
@@ -7,11 +7,11 @@ import numpy as np
7
  from nsfw_detector import NSFWDetector, create_error_image
8
  from PIL import Image
9
  import time
10
- import logging
11
  from threading import Timer
12
 
13
- logging.basicConfig(level=logging.INFO)
14
- logger = logging.getLogger(__name__)
15
 
16
  # Global variables
17
  global_model = None
@@ -81,7 +81,7 @@ def unload_model():
81
  global global_model, last_use_time
82
  current_time = time.time()
83
  if last_use_time and (current_time - last_use_time) >= TIMEOUT_SECONDS:
84
- logger.info("Unloading model due to inactivity...")
85
  global_model = None
86
  torch.cuda.empty_cache()
87
  return "Model unloaded due to inactivity"
@@ -104,7 +104,7 @@ def generate_image(prompt, height, width, guidance_scale, pag_guidance_scale, nu
104
 
105
  # Load model if needed
106
  if global_model is None:
107
- logger.info("Loading model...")
108
  global_model = SanaPipeline("configs/sana_config/1024ms/Sana_1600M_img1024.yaml")
109
  global_model.from_pretrained("hf://Efficient-Large-Model/Sana_1600M_1024px/checkpoints/Sana_1600M_1024px.pth")
110
 
@@ -134,11 +134,11 @@ def generate_image(prompt, height, width, guidance_scale, pag_guidance_scale, nu
134
  if category == "SAFE":
135
  return image
136
  else:
137
- logger.warning(f"NSFW content detected ({category} with {confidence:.2f}% confidence)")
138
  return create_error_image()
139
 
140
  except Exception as e:
141
- logger.error(f"Error in generate_image: {str(e)}")
142
  raise gr.Error(f"Generation failed: {str(e)}")
143
 
144
  # Gradio Interface
 
7
  from nsfw_detector import NSFWDetector, create_error_image
8
  from PIL import Image
9
  import time
10
+ # import logging
11
  from threading import Timer
12
 
13
+ # logging.basicConfig(level=logging.INFO)
14
+ # logger = logging.getLogger(__name__)
15
 
16
  # Global variables
17
  global_model = None
 
81
  global global_model, last_use_time
82
  current_time = time.time()
83
  if last_use_time and (current_time - last_use_time) >= TIMEOUT_SECONDS:
84
+ # logger.info("Unloading model due to inactivity...")
85
  global_model = None
86
  torch.cuda.empty_cache()
87
  return "Model unloaded due to inactivity"
 
104
 
105
  # Load model if needed
106
  if global_model is None:
107
+ # logger.info("Loading model...")
108
  global_model = SanaPipeline("configs/sana_config/1024ms/Sana_1600M_img1024.yaml")
109
  global_model.from_pretrained("hf://Efficient-Large-Model/Sana_1600M_1024px/checkpoints/Sana_1600M_1024px.pth")
110
 
 
134
  if category == "SAFE":
135
  return image
136
  else:
137
+ # logger.warning(f"NSFW content detected ({category} with {confidence:.2f}% confidence)")
138
  return create_error_image()
139
 
140
  except Exception as e:
141
+ # logger.error(f"Error in generate_image: {str(e)}")
142
  raise gr.Error(f"Generation failed: {str(e)}")
143
 
144
  # Gradio Interface