File size: 364 Bytes
0b6fafa |
1 |
{"model": "1TuanPham/T-Llama", "base_model": "", "revision": "main", "private": false, "precision": "bfloat16", "params": 6.849, "architectures": "LlamaForCausalLM", "weight_type": "Original", "status": "PENDING", "submitted_time": "2024-03-29T19:36:08Z", "model_type": "\ud83d\udd36 : fine-tuned on domain-specific datasets", "job_id": -1, "job_start_time": null} |