File size: 372 Bytes
7bf7413 |
1 |
{"model": "AmeerH/Mistral-Math-2x7b-mix", "base_model": "Mistral 7B", "revision": "main", "private": false, "precision": "float16", "params": 12.879, "architectures": "MixtralForCausalLM", "weight_type": "Original", "status": "PENDING", "submitted_time": "2024-03-22T12:12:27Z", "model_type": "\ud83e\udd1d : base merges and moerges", "job_id": -1, "job_start_time": null} |