Datasets:

Modalities:
Text
Formats:
json
Size:
< 1K
Libraries:
Datasets
Dask
License:
SaylorTwift HF staff commited on
Commit
4dc8b65
1 Parent(s): 3c5a238

Upload /notadib/Mistral-7B-Instruct-v0.2-attention-sparsity-30_eval_request_False_float16_Original.json with huggingface_hub

Browse files
notadib/Mistral-7B-Instruct-v0.2-attention-sparsity-30_eval_request_False_float16_Original.json CHANGED
@@ -1 +1,15 @@
1
- {"model": "notadib/Mistral-7B-Instruct-v0.2-attention-sparsity-30", "base_model": "Mistral-Instruct-v0.2", "revision": "main", "private": false, "precision": "float16", "params": 7.242, "architectures": "MistralForCausalLM", "weight_type": "Original", "status": "PENDING", "submitted_time": "2024-01-28T16:25:03Z", "model_type": "\ud83d\udcac : chat models (RLHF, DPO, IFT, ...)", "job_id": -1, "job_start_time": null}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "model": "notadib/Mistral-7B-Instruct-v0.2-attention-sparsity-30",
3
+ "base_model": "Mistral-Instruct-v0.2",
4
+ "revision": "main",
5
+ "private": false,
6
+ "precision": "float16",
7
+ "params": 7.242,
8
+ "architectures": "MistralForCausalLM",
9
+ "weight_type": "Original",
10
+ "status": "RUNNING",
11
+ "submitted_time": "2024-01-28T16:25:03Z",
12
+ "model_type": "\ud83d\udcac : chat models (RLHF, DPO, IFT, ...)",
13
+ "job_id": "1564137",
14
+ "job_start_time": "2024-01-28T16:30:33.127185"
15
+ }