tensor-tech commited on
Commit
c9e0b8f
·
verified ·
1 Parent(s): 31491df

Run 3. Outer Step 8. Inner Step 92.

Browse files
Files changed (1) hide show
  1. config.json +6 -6
config.json CHANGED
@@ -268,18 +268,18 @@
268
  "AutoModelForCausalLM": "distributed/optimized-gpt2-500m--modeling_gpt_optimized.GPTOptim"
269
  },
270
  "block_list": [
271
- 5398140,
272
- 5398144,
273
- 5398148,
274
- 5398152,
275
- 5398156
276
  ],
277
  "block_size": 1024,
278
  "bos_token_id": 50256,
279
  "embd_pdrop": 0.1,
280
  "eos_token_id": 50256,
281
  "initializer_range": 0.02,
282
- "inner_step": 90,
283
  "inner_steps": 0,
284
  "last_allreduce_block": 5392742,
285
  "layer_norm_epsilon": 1e-05,
 
268
  "AutoModelForCausalLM": "distributed/optimized-gpt2-500m--modeling_gpt_optimized.GPTOptim"
269
  },
270
  "block_list": [
271
+ 5398188,
272
+ 5398193,
273
+ 5398196,
274
+ 5398201,
275
+ 5398205
276
  ],
277
  "block_size": 1024,
278
  "bos_token_id": 50256,
279
  "embd_pdrop": 0.1,
280
  "eos_token_id": 50256,
281
  "initializer_range": 0.02,
282
+ "inner_step": 92,
283
  "inner_steps": 0,
284
  "last_allreduce_block": 5392742,
285
  "layer_norm_epsilon": 1e-05,