Spaces:
Runtime error
Runtime error
File size: 2,499 Bytes
110e4da 5be238a 110e4da 5be238a 110e4da 9f96495 110e4da 9f96495 110e4da 5be238a 110e4da 9f96495 110e4da 9f96495 110e4da 9f96495 5be238a |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 |
import gradio as gr
import subprocess
import os
from huggingface_hub import HfApi, snapshot_download
from gradio_huggingfacehub_search import HuggingfaceHubSearch
from apscheduler.schedulers.background import BackgroundScheduler
HF_TOKEN = os.environ.get("HF_TOKEN")
def process_model(
model_id: str,
file_path: str,
key: str,
value: str,
oauth_token: gr.OAuthToken | None,
):
if oauth_token.token is None:
raise ValueError("You must be logged in to use gguf-metadata-updater")
api = HfApi(token=oauth_token.token)
MODEL_NAME = model_id.split("/")[-1]
FILE_NAME = file_path.split("/")[-1]
api.snapshot_download(
repo_id=model_id,
allow_patterns=file_path,
local_dir=f"{MODEL_NAME}",
)
print("Model downloaded successully!")
metadata_update = f"python llama.cpp/gguf-py/scripts/gguf_set_metadata.py {MODEL_NAME}/{file_path} {key} {value}"
subprocess.run(metadata_update, shell=True)
print(f"Model metadata {key} updated to {value} successully!")
# Upload gguf files
api.upload_folder(
folder_path=MODEL_NAME,
repo_id=model_id,
allow_patterns=["*.gguf"],
)
print("Uploaded successfully!")
return "Processing complete."
with gr.Blocks() as demo:
gr.Markdown("You must be logged in to use GGUF metadata updated.")
gr.LoginButton(min_width=250)
model_id = HuggingfaceHubSearch(
label="Hub Model ID",
placeholder="Search for model id on Huggingface",
search_type="model",
)
file_path = gr.Textbox(lines=1, label="File path")
key = gr.Textbox(lines=1, label="Key")
value = gr.Textbox(lines=1, label="Value")
iface = gr.Interface(
fn=process_model,
inputs=[model_id, file_path, key, value],
outputs=[
gr.Markdown(label="output"),
gr.Image(show_label=False),
],
title="Update metadata for a GGUF file",
description="The space takes an HF repo, a file within that repo, a metadata key, and new metadata value to update it to.",
api_name=False,
)
def restart_space():
HfApi().restart_space(
repo_id="bartowski/gguf-metadata-updated", token=HF_TOKEN, factory_reboot=True
)
scheduler = BackgroundScheduler()
scheduler.add_job(restart_space, "interval", seconds=21600)
scheduler.start()
# Launch the interface
demo.queue(default_concurrency_limit=1, max_size=5).launch(debug=True, show_api=False)
|