Mardiyyah's picture
uploading custom model to hub
a4a1e96 verified
raw
history blame
5.81 kB
{
"os": "Linux-4.18.0-513.24.1.el8_9.x86_64-x86_64-with-glibc2.28",
"python": "3.11.4",
"heartbeatAt": "2024-09-17T11:27:27.284628",
"startedAt": "2024-09-17T11:27:26.786345",
"docker": null,
"cuda": null,
"args": [],
"state": "running",
"program": "<python with no main file>",
"codePathLocal": null,
"host": "codon-gpu-006.ebi.ac.uk",
"username": "amrufai",
"executable": "/nfs/production/literature/amina-mardiyyah/envs/llm-prompt/bin/python",
"cpu_count": 48,
"cpu_count_logical": 48,
"cpu_freq": {
"current": 2758.33775,
"min": 0.0,
"max": 0.0
},
"cpu_freq_per_core": [
{
"current": 1157.924,
"min": 0.0,
"max": 0.0
},
{
"current": 2800.0,
"min": 0.0,
"max": 0.0
},
{
"current": 2800.0,
"min": 0.0,
"max": 0.0
},
{
"current": 2800.0,
"min": 0.0,
"max": 0.0
},
{
"current": 2800.0,
"min": 0.0,
"max": 0.0
},
{
"current": 2800.0,
"min": 0.0,
"max": 0.0
},
{
"current": 2800.0,
"min": 0.0,
"max": 0.0
},
{
"current": 2800.0,
"min": 0.0,
"max": 0.0
},
{
"current": 2800.0,
"min": 0.0,
"max": 0.0
},
{
"current": 2800.0,
"min": 0.0,
"max": 0.0
},
{
"current": 2800.0,
"min": 0.0,
"max": 0.0
},
{
"current": 2800.0,
"min": 0.0,
"max": 0.0
},
{
"current": 2800.0,
"min": 0.0,
"max": 0.0
},
{
"current": 2800.0,
"min": 0.0,
"max": 0.0
},
{
"current": 2800.0,
"min": 0.0,
"max": 0.0
},
{
"current": 2800.0,
"min": 0.0,
"max": 0.0
},
{
"current": 2800.0,
"min": 0.0,
"max": 0.0
},
{
"current": 2800.0,
"min": 0.0,
"max": 0.0
},
{
"current": 2800.0,
"min": 0.0,
"max": 0.0
},
{
"current": 2800.0,
"min": 0.0,
"max": 0.0
},
{
"current": 2800.0,
"min": 0.0,
"max": 0.0
},
{
"current": 2800.0,
"min": 0.0,
"max": 0.0
},
{
"current": 2800.0,
"min": 0.0,
"max": 0.0
},
{
"current": 2800.0,
"min": 0.0,
"max": 0.0
},
{
"current": 2800.0,
"min": 0.0,
"max": 0.0
},
{
"current": 2800.0,
"min": 0.0,
"max": 0.0
},
{
"current": 2800.0,
"min": 0.0,
"max": 0.0
},
{
"current": 2800.0,
"min": 0.0,
"max": 0.0
},
{
"current": 2800.0,
"min": 0.0,
"max": 0.0
},
{
"current": 2800.0,
"min": 0.0,
"max": 0.0
},
{
"current": 2800.0,
"min": 0.0,
"max": 0.0
},
{
"current": 2800.0,
"min": 0.0,
"max": 0.0
},
{
"current": 2800.0,
"min": 0.0,
"max": 0.0
},
{
"current": 2800.0,
"min": 0.0,
"max": 0.0
},
{
"current": 2800.0,
"min": 0.0,
"max": 0.0
},
{
"current": 2800.0,
"min": 0.0,
"max": 0.0
},
{
"current": 2800.0,
"min": 0.0,
"max": 0.0
},
{
"current": 2800.0,
"min": 0.0,
"max": 0.0
},
{
"current": 2800.0,
"min": 0.0,
"max": 0.0
},
{
"current": 2800.0,
"min": 0.0,
"max": 0.0
},
{
"current": 2800.0,
"min": 0.0,
"max": 0.0
},
{
"current": 2800.0,
"min": 0.0,
"max": 0.0
},
{
"current": 2800.0,
"min": 0.0,
"max": 0.0
},
{
"current": 2800.0,
"min": 0.0,
"max": 0.0
},
{
"current": 2800.0,
"min": 0.0,
"max": 0.0
},
{
"current": 2800.0,
"min": 0.0,
"max": 0.0
},
{
"current": 2800.0,
"min": 0.0,
"max": 0.0
},
{
"current": 2800.0,
"min": 0.0,
"max": 0.0
}
],
"disk": {
"/": {
"total": 47.760292053222656,
"used": 15.848060607910156
}
},
"gpu": "NVIDIA A100 80GB PCIe",
"gpu_count": 1,
"gpu_devices": [
{
"name": "NVIDIA A100 80GB PCIe",
"memory_total": 85899345920
}
],
"memory": {
"total": 502.8375930786133
}
}