automerger commited on
Commit
1b39687
1 Parent(s): 5df1e4f

Upload folder using huggingface_hub

Browse files
README.md CHANGED
@@ -6,37 +6,31 @@ tags:
6
  - lazymergekit
7
  - automerger
8
  base_model:
9
- - yam-peleg/Experiment26-7B
10
  - mayacinka/yam-jom-7B
11
  ---
12
 
13
  # Experiment26Yam-7B
14
 
15
  Experiment26Yam-7B is an automated merge created by [Maxime Labonne](https://huggingface.co/mlabonne) using the following configuration.
16
- * [yam-peleg/Experiment26-7B](https://huggingface.co/yam-peleg/Experiment26-7B)
17
  * [mayacinka/yam-jom-7B](https://huggingface.co/mayacinka/yam-jom-7B)
18
 
19
  ## 🧩 Configuration
20
 
21
  ```yaml
22
- slices:
23
- - sources:
24
- - model: yam-peleg/Experiment26-7B
25
- layer_range: [0, 32]
26
- - model: mayacinka/yam-jom-7B
27
- layer_range: [0, 32]
28
- merge_method: slerp
29
- base_model: yam-peleg/Experiment26-7B
 
30
  parameters:
31
- t:
32
- - filter: self_attn
33
- value: [0, 0.5, 0.3, 0.7, 1]
34
- - filter: mlp
35
- value: [1, 0.5, 0.7, 0.3, 0]
36
- - value: 0.5
37
  dtype: bfloat16
38
  random_seed: 0
39
- ```
40
 
41
  ## 💻 Usage
42
 
 
6
  - lazymergekit
7
  - automerger
8
  base_model:
 
9
  - mayacinka/yam-jom-7B
10
  ---
11
 
12
  # Experiment26Yam-7B
13
 
14
  Experiment26Yam-7B is an automated merge created by [Maxime Labonne](https://huggingface.co/mlabonne) using the following configuration.
 
15
  * [mayacinka/yam-jom-7B](https://huggingface.co/mayacinka/yam-jom-7B)
16
 
17
  ## 🧩 Configuration
18
 
19
  ```yaml
20
+ models:
21
+ - model: rwitz/experiment26-truthy-iter-0
22
+ # No parameters necessary for base model
23
+ - model: mayacinka/yam-jom-7B
24
+ parameters:
25
+ density: 0.53
26
+ weight: 0.6
27
+ merge_method: dare_ties
28
+ base_model: rwitz/experiment26-truthy-iter-0
29
  parameters:
30
+ int8_mask: true
 
 
 
 
 
31
  dtype: bfloat16
32
  random_seed: 0
33
+ ```
34
 
35
  ## 💻 Usage
36
 
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "yam-peleg/Experiment26-7B",
3
  "architectures": [
4
  "MistralForCausalLM"
5
  ],
@@ -20,7 +20,7 @@
20
  "sliding_window": 4096,
21
  "tie_word_embeddings": false,
22
  "torch_dtype": "bfloat16",
23
- "transformers_version": "4.38.2",
24
  "use_cache": true,
25
  "vocab_size": 32000
26
  }
 
1
  {
2
+ "_name_or_path": "rwitz/experiment26-truthy-iter-0",
3
  "architectures": [
4
  "MistralForCausalLM"
5
  ],
 
20
  "sliding_window": 4096,
21
  "tie_word_embeddings": false,
22
  "torch_dtype": "bfloat16",
23
+ "transformers_version": "4.39.0",
24
  "use_cache": true,
25
  "vocab_size": 32000
26
  }
mergekit_config.yml CHANGED
@@ -1,19 +1,14 @@
1
 
2
- slices:
3
- - sources:
4
- - model: yam-peleg/Experiment26-7B
5
- layer_range: [0, 32]
6
- - model: mayacinka/yam-jom-7B
7
- layer_range: [0, 32]
8
- merge_method: slerp
9
- base_model: yam-peleg/Experiment26-7B
 
10
  parameters:
11
- t:
12
- - filter: self_attn
13
- value: [0, 0.5, 0.3, 0.7, 1]
14
- - filter: mlp
15
- value: [1, 0.5, 0.7, 0.3, 0]
16
- - value: 0.5
17
  dtype: bfloat16
18
  random_seed: 0
19
-
 
1
 
2
+ models:
3
+ - model: rwitz/experiment26-truthy-iter-0
4
+ # No parameters necessary for base model
5
+ - model: mayacinka/yam-jom-7B
6
+ parameters:
7
+ density: 0.53
8
+ weight: 0.6
9
+ merge_method: dare_ties
10
+ base_model: rwitz/experiment26-truthy-iter-0
11
  parameters:
12
+ int8_mask: true
 
 
 
 
 
13
  dtype: bfloat16
14
  random_seed: 0
 
model-00001-of-00002.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:2f1f59151807fbb860ea337343eec278d99aa8cfe4f7c65f3b52ba7d8f979c65
3
  size 9942981696
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b80409a92aaa659bc0c16a92f7a2745d4adcd1b89fb992a8f35e37cf8d9cf942
3
  size 9942981696
model-00002-of-00002.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:06b8d18694dbde1b5dab910c5c6832593fbf0697b55b9681ef2173a436d9c711
3
  size 4540516344
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9713367b9ddbc47fc19d43848a55e08cf885a6ad65afc89e926e5391a1a5f8cb
3
  size 4540516344