Spaces:
Paused
Paused
File size: 4,115 Bytes
5efa5ee b8f7a78 aeed8f1 5efa5ee ac28d89 5efa5ee aeed8f1 feb5ce7 aeed8f1 feb5ce7 aeed8f1 feb5ce7 9b708eb b8f7a78 aeed8f1 feb5ce7 aeed8f1 b8f7a78 feb5ce7 aeed8f1 feb5ce7 5efa5ee aeed8f1 5efa5ee f0de3f7 5efa5ee feb5ce7 b8f7a78 9b708eb aeed8f1 b8f7a78 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 |
import os
import requests
import json
import urllib.request
from requests.exceptions import RequestException
from tqdm import tqdm
from concurrent.futures import ThreadPoolExecutor, as_completed
CACHE_DIR = os.getenv("CACHE_DIR")
CACHE_JSON_PATH = os.path.join(CACHE_DIR, "cached_films.json")
MAX_WORKERS = 4 # Adjust the number of threads for concurrent downloads
def get_system_proxies():
try:
proxies = urllib.request.getproxies()
print("System proxies:", proxies)
return {
"http": proxies.get("http"),
"https": proxies.get("http")
}
except Exception as e:
print(f"Error getting system proxies: {e}")
return {}
def download_file_chunk(url, headers, proxies, start, end):
headers['Range'] = f"bytes={start}-{end}"
response = requests.get(url, headers=headers, proxies=proxies, stream=True)
response.raise_for_status()
return response.content
def download_and_cache_file(file_url, token, cache_path, proxies=None):
print(f"Downloading file from URL: {file_url} to {cache_path} with proxies: {proxies}")
headers = {'Authorization': f'Bearer {token}'}
try:
response = requests.head(file_url, headers=headers, proxies=proxies)
response.raise_for_status()
total_size = int(response.headers.get('content-length', 0))
os.makedirs(os.path.dirname(cache_path), exist_ok=True)
chunk_size = total_size // MAX_WORKERS
ranges = [(i, min(i + chunk_size, total_size) - 1) for i in range(0, total_size, chunk_size)]
with ThreadPoolExecutor(max_workers=MAX_WORKERS) as executor:
futures = [executor.submit(download_file_chunk, file_url, headers, proxies, start, end) for start, end in ranges]
with open(cache_path, 'wb') as f, tqdm(total=total_size, unit='B', unit_scale=True, desc=cache_path) as pbar:
for future in as_completed(futures):
chunk = future.result()
f.write(chunk)
pbar.update(len(chunk))
print(f'File cached to {cache_path} successfully.')
update_cache_json(file_url, cache_path)
return True
except RequestException as e:
print(f"Error downloading file: {e}")
except IOError as e:
print(f"Error writing file {cache_path}: {e}")
return False
def update_cache_json(file_url, cache_path):
cache_data = {}
if os.path.exists(CACHE_JSON_PATH):
with open(CACHE_JSON_PATH, 'r') as json_file:
cache_data = json.load(json_file)
film_title = os.path.basename(cache_path)
cache_data[film_title] = cache_path
with open(CACHE_JSON_PATH, 'w') as json_file:
json.dump(cache_data, json_file, indent=2)
print(f'Updated cache JSON: {CACHE_JSON_PATH} with {film_title}: {cache_path}')
def get_file_structure(repo, token, path="", proxies=None):
api_url = f"https://huggingface.co/api/models/{repo}/tree/main/{path}"
headers = {'Authorization': f'Bearer {token}'}
print(f"Fetching file structure from URL: {api_url} with proxies: {proxies}")
try:
response = requests.get(api_url, headers=headers, proxies=proxies)
response.raise_for_status()
return response.json()
except RequestException as e:
print(f"Error fetching file structure: {e}")
return []
def write_file_structure_to_json(file_structure, file_path):
try:
with open(file_path, 'w') as json_file:
json.dump(file_structure, json_file, indent=2)
print(f'File structure written to {file_path}')
except IOError as e:
print(f"Error writing file structure to JSON: {e}")
if __name__ == "__main__":
file_url = "https://huggingface.co/Unicone-Studio/jellyfin_media/resolve/main/films/Funky%20Monkey%202004/Funky%20Monkey%20(2004)%20Web-dl%201080p.mp4"
token = os.getenv("TOKEN")
cache_path = os.path.join(CACHE_DIR, "films/Funky Monkey 2004/Funky Monkey (2004) Web-dl 1080p.mp4")
proxy = get_system_proxies()
download_and_cache_file(file_url, token, cache_path, proxies=proxy)
|