File size: 372 Bytes
6343e32 |
1 |
{"model": "Arc53/docsgpt-7b-mistral", "base_model": "", "revision": "main", "private": false, "precision": "float16", "params": 7.242, "architectures": "MistralForCausalLM", "weight_type": "Original", "status": "PENDING", "submitted_time": "2024-03-12T14:08:22Z", "model_type": "\ud83d\udd36 : fine-tuned on domain-specific datasets", "job_id": -1, "job_start_time": null} |