asahi417 commited on
Commit
bea31cf
·
verified ·
1 Parent(s): cac69c8

Upload folder using huggingface_hub

Browse files
Files changed (1) hide show
  1. app.py +0 -15
app.py CHANGED
@@ -1,23 +1,8 @@
1
- import os
2
- import subprocess
3
-
4
  import torch
5
  import gradio as gr
6
  from clip_interrogator import Config, Interrogator
7
 
8
 
9
- CACHE_URLS = [
10
- 'https://huggingface.co/pharma/ci-preprocess/resolve/main/ViT-H-14_laion2b_s32b_b79k_artists.pkl',
11
- 'https://huggingface.co/pharma/ci-preprocess/resolve/main/ViT-H-14_laion2b_s32b_b79k_flavors.pkl',
12
- 'https://huggingface.co/pharma/ci-preprocess/resolve/main/ViT-H-14_laion2b_s32b_b79k_mediums.pkl',
13
- 'https://huggingface.co/pharma/ci-preprocess/resolve/main/ViT-H-14_laion2b_s32b_b79k_movements.pkl',
14
- 'https://huggingface.co/pharma/ci-preprocess/resolve/main/ViT-H-14_laion2b_s32b_b79k_trendings.pkl',
15
- ]
16
- os.makedirs('cache', exist_ok=True)
17
- for url in CACHE_URLS:
18
- subprocess.run(['wget', url, '-P', 'cache'], stdout=subprocess.PIPE).stdout.decode('utf-8')
19
-
20
-
21
  config = Config()
22
  config.device = 'cuda' if torch.cuda.is_available() else 'cpu'
23
  config.blip_offload = False if torch.cuda.is_available() else True
 
 
 
 
1
  import torch
2
  import gradio as gr
3
  from clip_interrogator import Config, Interrogator
4
 
5
 
 
 
 
 
 
 
 
 
 
 
 
 
6
  config = Config()
7
  config.device = 'cuda' if torch.cuda.is_available() else 'cpu'
8
  config.blip_offload = False if torch.cuda.is_available() else True