tensor-tech commited on
Commit
7b3f75b
·
verified ·
1 Parent(s): 8e5b712

Run 3. Outer Step 7. Inner Step 1.

Browse files
Files changed (2) hide show
  1. config.json +6 -5
  2. inner_optimizer.pt +1 -1
config.json CHANGED
@@ -268,17 +268,18 @@
268
  "AutoModelForCausalLM": "distributed/optimized-gpt2-500m--modeling_gpt_optimized.GPTOptim"
269
  },
270
  "block_list": [
271
- 5386688,
272
- 5386694,
273
- 5386699,
274
- 5386705
 
275
  ],
276
  "block_size": 1024,
277
  "bos_token_id": 50256,
278
  "embd_pdrop": 0.1,
279
  "eos_token_id": 50256,
280
  "initializer_range": 0.02,
281
- "inner_step": 0,
282
  "inner_steps": 0,
283
  "last_allreduce_block": 5373127,
284
  "layer_norm_epsilon": 1e-05,
 
268
  "AutoModelForCausalLM": "distributed/optimized-gpt2-500m--modeling_gpt_optimized.GPTOptim"
269
  },
270
  "block_list": [
271
+ 5386737,
272
+ 5386743,
273
+ 5386748,
274
+ 5386754,
275
+ 5386759
276
  ],
277
  "block_size": 1024,
278
  "bos_token_id": 50256,
279
  "embd_pdrop": 0.1,
280
  "eos_token_id": 50256,
281
  "initializer_range": 0.02,
282
+ "inner_step": 1,
283
  "inner_steps": 0,
284
  "last_allreduce_block": 5373127,
285
  "layer_norm_epsilon": 1e-05,
inner_optimizer.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:7116c15a48b00abfc4254295c06a77f3a78481ebcb31df3bacea2fc2f2faa33a
3
  size 8081782026
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:594320553679816d6d2755b4085ea6199269e570e53bff7074b398d46acda040
3
  size 8081782026