Spaces:
Running
on
Zero
Running
on
Zero
try lisa
Browse files
app.py
CHANGED
@@ -20,14 +20,10 @@ import gradio as gr
|
|
20 |
|
21 |
import torch
|
22 |
import torch.nn.functional as F
|
23 |
-
import transformers
|
24 |
from PIL import Image
|
25 |
import numpy as np
|
26 |
import time
|
27 |
import threading
|
28 |
-
import subprocess
|
29 |
-
import sys
|
30 |
-
import importlib
|
31 |
|
32 |
from ncut_pytorch.backbone import extract_features, load_model
|
33 |
from ncut_pytorch.backbone import MODEL_DICT, LAYER_DICT, RES_DICT
|
@@ -546,45 +542,54 @@ def run_fn(
|
|
546 |
images = torch.stack(images)
|
547 |
|
548 |
|
549 |
-
|
550 |
-
|
551 |
-
|
552 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
553 |
|
554 |
-
|
555 |
-
|
556 |
|
557 |
-
|
558 |
-
|
559 |
-
|
560 |
-
|
561 |
|
562 |
|
563 |
-
|
564 |
-
|
565 |
-
|
566 |
-
|
567 |
|
568 |
-
|
569 |
-
|
570 |
-
|
571 |
|
572 |
-
|
573 |
-
|
574 |
-
|
575 |
|
576 |
-
|
577 |
-
|
578 |
|
579 |
-
|
580 |
|
581 |
-
|
582 |
-
|
|
|
|
|
|
|
583 |
|
584 |
-
|
585 |
-
|
586 |
|
587 |
-
|
588 |
|
589 |
|
590 |
|
@@ -1293,6 +1298,7 @@ with demo:
|
|
1293 |
gr.Markdown("- **Nyström** Normalized Cut, is a new approximation algorithm developed for large-scale graph cuts, a large-graph of million nodes can be processed in under 10s (cpu) or 2s (gpu).")
|
1294 |
gr.Markdown("- **spectral-tSNE** visualization, a new method to visualize the high-dimensional eigenvector space with 3D RGB cube. Color is aligned across images, color infers distance in representation.")
|
1295 |
|
|
|
1296 |
with gr.Row():
|
1297 |
with gr.Column():
|
1298 |
gr.Markdown("##### This demo is for `ncut-pytorch`, [Documentation](https://ncut-pytorch.readthedocs.io/) ")
|
|
|
20 |
|
21 |
import torch
|
22 |
import torch.nn.functional as F
|
|
|
23 |
from PIL import Image
|
24 |
import numpy as np
|
25 |
import time
|
26 |
import threading
|
|
|
|
|
|
|
27 |
|
28 |
from ncut_pytorch.backbone import extract_features, load_model
|
29 |
from ncut_pytorch.backbone import MODEL_DICT, LAYER_DICT, RES_DICT
|
|
|
542 |
images = torch.stack(images)
|
543 |
|
544 |
|
545 |
+
if is_lisa:
|
546 |
+
import subprocess
|
547 |
+
import sys
|
548 |
+
import importlib
|
549 |
+
gr.Warning("LISA model is not compatible with the current version of transformers. Please contact the LISA and Llava author for update.")
|
550 |
+
gr.Warning("This is a dirty patch for the LISA model. switch to the old version of transformers.")
|
551 |
+
gr.Warning("Not garanteed to work.")
|
552 |
+
# LISA and Llava is not compatible with the current version of transformers
|
553 |
+
# please contact the author for update
|
554 |
+
# this is a dirty patch for the LISA model
|
555 |
|
556 |
+
# pre-import the SD3 pipeline
|
557 |
+
from diffusers import StableDiffusion3Pipeline
|
558 |
|
559 |
+
# unloading the current transformers
|
560 |
+
for module in list(sys.modules.keys()):
|
561 |
+
if "transformers" in module:
|
562 |
+
del sys.modules[module]
|
563 |
|
564 |
|
565 |
+
def install_transformers_version(version, target_dir):
|
566 |
+
"""Install a specific version of transformers to a target directory."""
|
567 |
+
if not os.path.exists(target_dir):
|
568 |
+
os.makedirs(target_dir)
|
569 |
|
570 |
+
# Use subprocess to run the pip command
|
571 |
+
# subprocess.check_call([sys.executable, '-m', 'pip', 'install', f'transformers=={version}', '-t', target_dir])
|
572 |
+
os.system(f"{sys.executable} -m pip install transformers=={version} -t {target_dir} >> /dev/null 2>&1")
|
573 |
|
574 |
+
target_dir = '/tmp/lisa_transformers_v433'
|
575 |
+
if not os.path.exists(target_dir):
|
576 |
+
install_transformers_version('4.33.0', target_dir)
|
577 |
|
578 |
+
# Add the new version path to sys.path
|
579 |
+
sys.path.insert(0, target_dir)
|
580 |
|
581 |
+
transformers = importlib.import_module("transformers")
|
582 |
|
583 |
+
if not is_lisa:
|
584 |
+
import subprocess
|
585 |
+
import sys
|
586 |
+
import importlib
|
587 |
+
# remove the LISA model from the sys.path
|
588 |
|
589 |
+
if "/tmp/lisa_transformers_v433" in sys.path:
|
590 |
+
sys.path.remove("/tmp/lisa_transformers_v433")
|
591 |
|
592 |
+
transformers = importlib.import_module("transformers")
|
593 |
|
594 |
|
595 |
|
|
|
1298 |
gr.Markdown("- **Nyström** Normalized Cut, is a new approximation algorithm developed for large-scale graph cuts, a large-graph of million nodes can be processed in under 10s (cpu) or 2s (gpu).")
|
1299 |
gr.Markdown("- **spectral-tSNE** visualization, a new method to visualize the high-dimensional eigenvector space with 3D RGB cube. Color is aligned across images, color infers distance in representation.")
|
1300 |
|
1301 |
+
|
1302 |
with gr.Row():
|
1303 |
with gr.Column():
|
1304 |
gr.Markdown("##### This demo is for `ncut-pytorch`, [Documentation](https://ncut-pytorch.readthedocs.io/) ")
|