Spaces:
Runtime error
Runtime error
import gradio as gr | |
import subprocess | |
import os | |
import shutil | |
from huggingface_hub import HfApi, snapshot_download | |
from gradio_huggingfacehub_search import HuggingfaceHubSearch | |
from apscheduler.schedulers.background import BackgroundScheduler | |
HF_TOKEN = os.environ.get("HF_TOKEN") | |
def process_model( | |
model_id: str, | |
file_path: str, | |
file_path_dropdown: str, | |
key: str, | |
value: str, | |
oauth_token: gr.OAuthToken | None, | |
): | |
if oauth_token.token is None: | |
raise ValueError("You must be logged in to use gguf-metadata-updater") | |
print(f"Model ID: {model_id}") | |
print(f"file_path: {file_path}") | |
print(f"file_path_dropdown: {file_path_dropdown}") | |
print(f"key: {key}") | |
print(f"value: {value}") | |
MODEL_NAME = model_id.split("/")[-1] | |
if file_path_dropdown: | |
FILE_PATH = file_path_dropdown | |
else: | |
FILE_PATH = file_path | |
print(f"FILE_PATH: {FILE_PATH}") | |
try: | |
api = HfApi(token=oauth_token.token) | |
FILE_NAME = file_path.split("/")[-1] | |
print("Starting download") | |
api.snapshot_download( | |
repo_id=model_id, | |
allow_patterns=FILE_PATH, | |
local_dir=f"{MODEL_NAME}", | |
) | |
print("Model downloaded successully!") | |
metadata_update = f"python3 llama.cpp/gguf-py/scripts/gguf_set_metadata.py {MODEL_NAME}/{FILE_PATH} {key} {value} --force" | |
subprocess.run(metadata_update, shell=True) | |
print(f"Model metadata {key} updated to {value} successully!") | |
print(f"Reuploading file") | |
# Upload gguf files | |
api.upload_folder( | |
folder_path=MODEL_NAME, | |
commit_message=f"Updating {FILE_NAME} metadata {key} to {value}", | |
repo_id=model_id, | |
allow_patterns=["*.gguf"], | |
) | |
print("Uploaded successfully!") | |
shutil.rmtree(f"{MODEL_NAME}/") | |
return f"Processing complete" | |
except Exception as e: | |
return f"Something went wrong: {e}" | |
with gr.Blocks() as demo: | |
gr.Markdown("You must be logged in to use GGUF metadata updated.") | |
gr.LoginButton(min_width=250) | |
model_id = HuggingfaceHubSearch( | |
label="Hub Model ID", | |
placeholder="Search for model id on Huggingface", | |
search_type="model", | |
) | |
file_path = gr.Textbox(lines=1, label="File path") | |
file_path_dropdown = gr.Dropdown(["None"], label="File", visible=False) | |
key = gr.Textbox(lines=1, label="Key") | |
value = gr.Textbox(lines=1, label="Value") | |
iface = gr.Interface( | |
fn=process_model, | |
inputs=[model_id, file_path, file_path_dropdown, key, value], | |
outputs=[ | |
gr.Markdown(label="output"), | |
], | |
title="Update metadata for a GGUF file", | |
description="The space takes an HF repo, a file within that repo, a metadata key, and new metadata value to update it to.", | |
api_name=False, | |
) | |
def updateFilePath(model_id: HuggingfaceHubSearch): | |
try: | |
api = HfApi() | |
files = [] | |
for file in api.list_repo_tree( | |
repo_id=model_id, | |
recursive=True, | |
): | |
if "of-0000" in file.path and "0001-of-" not in file.path: | |
pass | |
elif file.path.endswith("gguf"): | |
files.append(file.path) | |
return gr.update(visible=False), gr.update(visible=True, choices=files) | |
except Exception: | |
return gr.update(visible=True), gr.update(visible=False) | |
model_id.change( | |
fn=updateFilePath, inputs=model_id, outputs=[file_path, file_path_dropdown] | |
) | |
def restart_space(): | |
HfApi().restart_space( | |
repo_id="bartowski/gguf-metadata-updated", token=HF_TOKEN, factory_reboot=True | |
) | |
scheduler = BackgroundScheduler() | |
scheduler.add_job(restart_space, "interval", seconds=21600) | |
scheduler.start() | |
# Launch the interface | |
demo.queue(default_concurrency_limit=1, max_size=5).launch(debug=True, show_api=False) | |