File size: 1,925 Bytes
465af14 c147e35 465af14 0382281 c147e35 d956a72 c147e35 465af14 0382281 c147e35 0382281 73178f8 0382281 c147e35 0382281 465af14 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 |
import re
import json
import time
from huggingface_hub import HfApi
def current_seconds_time():
return round(time.time())
def form_file_name(model_name, commit_id, inference_function):
return f"predictions_{re.sub('/', '_', model_name)}_{commit_id}_{inference_function}.json"
def update_model_queue(repo_id, model_name, commit_id, inference_function, status):
assert status in ["queued", "in_progress", "failed (online)"]
api = HfApi()
timestamp = current_seconds_time()
predictions_filename = form_file_name(model_name, commit_id, inference_function)
predictions_object = {
"model_name": model_name,
"predictions": [[""]],
"commit_id": commit_id,
"inference_function": inference_function,
"last_updated_timestamp": timestamp,
"status": status,
}
with open(predictions_filename, "w") as f:
json.dump(predictions_object, f)
future = api.upload_file(
path_or_fileobj=predictions_filename,
path_in_repo=predictions_filename,
repo_id=repo_id,
repo_type="dataset",
run_as_future=True,
)
def upload_predictions(repo_id, predictions, model_name, commit_id, inference_function):
api = HfApi()
timestamp = current_seconds_time()
predictions_filename = form_file_name(model_name, commit_id, inference_function)
predictions_object = {
"model_name": model_name,
"predictions": predictions,
"commit_id": commit_id,
"inference_function": inference_function,
"last_updated_timestamp": timestamp,
"status": "completed",
}
with open(predictions_filename, "w") as f:
json.dump(predictions_object, f)
future = api.upload_file(
path_or_fileobj=predictions_filename,
path_in_repo=predictions_filename,
repo_id=repo_id,
repo_type="dataset",
run_as_future=True,
)
|