huzey commited on
Commit
c15986b
1 Parent(s): 38ab9d8
Files changed (1) hide show
  1. app.py +31 -31
app.py CHANGED
@@ -546,45 +546,45 @@ def run_fn(
546
  images = torch.stack(images)
547
 
548
 
549
- if is_lisa:
550
- # LISA and Llava is not compatible with the current version of transformers
551
- # please contact the author for update
552
- # this is a dirty patch for the LISA model
553
 
554
- # pre-import the SD3 pipeline
555
- from diffusers import StableDiffusion3Pipeline
556
 
557
- # unloading the current transformers
558
- for module in list(sys.modules.keys()):
559
- if "transformers" in module:
560
- del sys.modules[module]
561
 
562
 
563
- def install_transformers_version(version, target_dir):
564
- """Install a specific version of transformers to a target directory."""
565
- if not os.path.exists(target_dir):
566
- os.makedirs(target_dir)
567
 
568
- # Use subprocess to run the pip command
569
- # subprocess.check_call([sys.executable, '-m', 'pip', 'install', f'transformers=={version}', '-t', target_dir])
570
- os.system(f"{sys.executable} -m pip install transformers=={version} -t {target_dir} >> /dev/null 2>&1")
571
 
572
- target_dir = '/tmp/lisa_transformers_v433'
573
- if not os.path.exists(target_dir):
574
- install_transformers_version('4.33.0', target_dir)
575
 
576
- # Add the new version path to sys.path
577
- sys.path.insert(0, target_dir)
578
 
579
- transformers = importlib.import_module("transformers")
580
 
581
- if not is_lisa:
582
- # remove the LISA model from the sys.path
583
 
584
- if "/tmp/lisa_transformers_v433" in sys.path:
585
- sys.path.remove("/tmp/lisa_transformers_v433")
586
 
587
- transformers = importlib.import_module("transformers")
588
 
589
 
590
 
@@ -1299,9 +1299,9 @@ with demo:
1299
  with gr.Column():
1300
  gr.Markdown("###### Running out of GPU? Try [Demo](https://ncut-pytorch.readthedocs.io/en/latest/demo/) hosted at UPenn")
1301
 
1302
- # for local development
1303
- if os.path.exists("/hf_token.txt"):
1304
- os.environ["HF_ACCESS_TOKEN"] = open("/hf_token.txt").read().strip()
1305
 
1306
  if DOWNLOAD_ALL_MODELS_DATASETS:
1307
  from ncut_pytorch.backbone import download_all_models
 
546
  images = torch.stack(images)
547
 
548
 
549
+ # if is_lisa:
550
+ # # LISA and Llava is not compatible with the current version of transformers
551
+ # # please contact the author for update
552
+ # # this is a dirty patch for the LISA model
553
 
554
+ # # pre-import the SD3 pipeline
555
+ # from diffusers import StableDiffusion3Pipeline
556
 
557
+ # # unloading the current transformers
558
+ # for module in list(sys.modules.keys()):
559
+ # if "transformers" in module:
560
+ # del sys.modules[module]
561
 
562
 
563
+ # def install_transformers_version(version, target_dir):
564
+ # """Install a specific version of transformers to a target directory."""
565
+ # if not os.path.exists(target_dir):
566
+ # os.makedirs(target_dir)
567
 
568
+ # # Use subprocess to run the pip command
569
+ # # subprocess.check_call([sys.executable, '-m', 'pip', 'install', f'transformers=={version}', '-t', target_dir])
570
+ # os.system(f"{sys.executable} -m pip install transformers=={version} -t {target_dir} >> /dev/null 2>&1")
571
 
572
+ # target_dir = '/tmp/lisa_transformers_v433'
573
+ # if not os.path.exists(target_dir):
574
+ # install_transformers_version('4.33.0', target_dir)
575
 
576
+ # # Add the new version path to sys.path
577
+ # sys.path.insert(0, target_dir)
578
 
579
+ # transformers = importlib.import_module("transformers")
580
 
581
+ # if not is_lisa:
582
+ # # remove the LISA model from the sys.path
583
 
584
+ # if "/tmp/lisa_transformers_v433" in sys.path:
585
+ # sys.path.remove("/tmp/lisa_transformers_v433")
586
 
587
+ # transformers = importlib.import_module("transformers")
588
 
589
 
590
 
 
1299
  with gr.Column():
1300
  gr.Markdown("###### Running out of GPU? Try [Demo](https://ncut-pytorch.readthedocs.io/en/latest/demo/) hosted at UPenn")
1301
 
1302
+ # # for local development
1303
+ # if os.path.exists("/hf_token.txt"):
1304
+ # os.environ["HF_ACCESS_TOKEN"] = open("/hf_token.txt").read().strip()
1305
 
1306
  if DOWNLOAD_ALL_MODELS_DATASETS:
1307
  from ncut_pytorch.backbone import download_all_models