kmfoda commited on
Commit
92272a1
·
verified ·
1 Parent(s): df137c1

Outer Step 1. Inner Step 764. Batch Size 452

Browse files
Files changed (2) hide show
  1. config.json +7 -7
  2. model.safetensors +1 -1
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "distributed/optimized-gpt2-1b",
3
  "activation_function": "gelu_new",
4
  "all_reduce_scores": {
5
  "0": "NON_PARTICIPATING",
@@ -268,18 +268,18 @@
268
  "AutoModelForCausalLM": "distributed/optimized-gpt2-500m--modeling_gpt_optimized.GPTOptim"
269
  },
270
  "block_list": [
271
- 5068675,
272
- 5068678,
273
- 5068682,
274
- 5068685,
275
- 5068689
276
  ],
277
  "block_size": 1024,
278
  "bos_token_id": 50256,
279
  "embd_pdrop": 0.1,
280
  "eos_token_id": 50256,
281
  "initializer_range": 0.02,
282
- "inner_step": 762,
283
  "inner_steps": 0,
284
  "last_allreduce_block": 5063053,
285
  "layer_norm_epsilon": 1e-05,
 
1
  {
2
+ "_name_or_path": "kmfoda/gpt2-1b-miner-2",
3
  "activation_function": "gelu_new",
4
  "all_reduce_scores": {
5
  "0": "NON_PARTICIPATING",
 
268
  "AutoModelForCausalLM": "distributed/optimized-gpt2-500m--modeling_gpt_optimized.GPTOptim"
269
  },
270
  "block_list": [
271
+ 5068729,
272
+ 5068733,
273
+ 5068735,
274
+ 5068740,
275
+ 5068744
276
  ],
277
  "block_size": 1024,
278
  "bos_token_id": 50256,
279
  "embd_pdrop": 0.1,
280
  "eos_token_id": 50256,
281
  "initializer_range": 0.02,
282
+ "inner_step": 764,
283
  "inner_steps": 0,
284
  "last_allreduce_block": 5063053,
285
  "layer_norm_epsilon": 1e-05,
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:3c32a4ed907fba35b3fff16e54a1c89ffa046a5be7a0e8f44fb83f0b7d4db97b
3
  size 4040701744
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9681423bf1903ecdf7123f628aeb842a12f8439eb7862107e870491b7592ff78
3
  size 4040701744