rafaym commited on
Commit
a46df02
·
1 Parent(s): be94d83

Delete checkpoint_5000 (1)/content

Browse files
checkpoint_5000 (1)/content/distilgpt2-finetuned-causal/checkpoint-5000/config.json DELETED
@@ -1,46 +0,0 @@
1
- {
2
- "_name_or_path": "distilgpt2",
3
- "_num_labels": 1,
4
- "activation_function": "gelu_new",
5
- "architectures": [
6
- "GPT2LMHeadModel"
7
- ],
8
- "attn_pdrop": 0.1,
9
- "bos_token_id": 50256,
10
- "embd_pdrop": 0.1,
11
- "eos_token_id": 50256,
12
- "id2label": {
13
- "0": "LABEL_0"
14
- },
15
- "initializer_range": 0.02,
16
- "label2id": {
17
- "LABEL_0": 0
18
- },
19
- "layer_norm_epsilon": 1e-05,
20
- "model_type": "gpt2",
21
- "n_ctx": 1024,
22
- "n_embd": 768,
23
- "n_head": 12,
24
- "n_inner": null,
25
- "n_layer": 6,
26
- "n_positions": 1024,
27
- "reorder_and_upcast_attn": false,
28
- "resid_pdrop": 0.1,
29
- "scale_attn_by_inverse_layer_idx": false,
30
- "scale_attn_weights": true,
31
- "summary_activation": null,
32
- "summary_first_dropout": 0.1,
33
- "summary_proj_to_labels": true,
34
- "summary_type": "cls_index",
35
- "summary_use_proj": true,
36
- "task_specific_params": {
37
- "text-generation": {
38
- "do_sample": true,
39
- "max_length": 50
40
- }
41
- },
42
- "torch_dtype": "float32",
43
- "transformers_version": "4.30.2",
44
- "use_cache": true,
45
- "vocab_size": 50257
46
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
checkpoint_5000 (1)/content/distilgpt2-finetuned-causal/checkpoint-5000/generation_config.json DELETED
@@ -1,6 +0,0 @@
1
- {
2
- "_from_model_config": true,
3
- "bos_token_id": 50256,
4
- "eos_token_id": 50256,
5
- "transformers_version": "4.30.2"
6
- }
 
 
 
 
 
 
 
checkpoint_5000 (1)/content/distilgpt2-finetuned-causal/checkpoint-5000/merges.txt DELETED
The diff for this file is too large to render. See raw diff
 
checkpoint_5000 (1)/content/distilgpt2-finetuned-causal/checkpoint-5000/optimizer.pt DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:c3e7c7d24f3636847ad37d9dcd9849213590bfe627d32e1880beefe57ae1e771
3
- size 655345093
 
 
 
 
checkpoint_5000 (1)/content/distilgpt2-finetuned-causal/checkpoint-5000/pytorch_model.bin DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:95b5d445272ce5124b381aa4a9526a0744b9dc9a5da02166678acd8aeb6a6367
3
- size 327674773
 
 
 
 
checkpoint_5000 (1)/content/distilgpt2-finetuned-causal/checkpoint-5000/rng_state.pth DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:0d52ad5b46c2d84d3d57dd9503e47782a2d05ff2a3bbfb945a7eae7dec648d30
3
- size 14575
 
 
 
 
checkpoint_5000 (1)/content/distilgpt2-finetuned-causal/checkpoint-5000/scheduler.pt DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:5113844dd81345e733f760270707efba2267b48bd813dd002144c6b864e7f337
3
- size 627
 
 
 
 
checkpoint_5000 (1)/content/distilgpt2-finetuned-causal/checkpoint-5000/special_tokens_map.json DELETED
@@ -1,6 +0,0 @@
1
- {
2
- "bos_token": "<|endoftext|>",
3
- "eos_token": "<|endoftext|>",
4
- "pad_token": "<|endoftext|>",
5
- "unk_token": "<|endoftext|>"
6
- }
 
 
 
 
 
 
 
checkpoint_5000 (1)/content/distilgpt2-finetuned-causal/checkpoint-5000/tokenizer.json DELETED
The diff for this file is too large to render. See raw diff
 
checkpoint_5000 (1)/content/distilgpt2-finetuned-causal/checkpoint-5000/tokenizer_config.json DELETED
@@ -1,9 +0,0 @@
1
- {
2
- "add_prefix_space": false,
3
- "bos_token": "<|endoftext|>",
4
- "clean_up_tokenization_spaces": true,
5
- "eos_token": "<|endoftext|>",
6
- "model_max_length": 1024,
7
- "tokenizer_class": "GPT2Tokenizer",
8
- "unk_token": "<|endoftext|>"
9
- }
 
 
 
 
 
 
 
 
 
 
checkpoint_5000 (1)/content/distilgpt2-finetuned-causal/checkpoint-5000/trainer_state.json DELETED
@@ -1,44 +0,0 @@
1
- {
2
- "best_metric": null,
3
- "best_model_checkpoint": null,
4
- "epoch": 2.765486725663717,
5
- "global_step": 5000,
6
- "is_hyper_param_search": false,
7
- "is_local_process_zero": true,
8
- "is_world_process_zero": true,
9
- "log_history": [
10
- {
11
- "epoch": 1.0,
12
- "learning_rate": 1.5015040558432561e-05,
13
- "loss": 4.2302,
14
- "step": 1807
15
- },
16
- {
17
- "epoch": 1.0,
18
- "eval_loss": 4.0081586837768555,
19
- "eval_runtime": 33.5458,
20
- "eval_samples_per_second": 3435.628,
21
- "eval_steps_per_second": 13.444,
22
- "step": 1808
23
- },
24
- {
25
- "epoch": 2.0,
26
- "learning_rate": 5.020077553310694e-06,
27
- "loss": 4.0653,
28
- "step": 3614
29
- },
30
- {
31
- "epoch": 2.0,
32
- "eval_loss": 3.9669103622436523,
33
- "eval_runtime": 33.7689,
34
- "eval_samples_per_second": 3412.936,
35
- "eval_steps_per_second": 13.355,
36
- "step": 3616
37
- }
38
- ],
39
- "max_steps": 5424,
40
- "num_train_epochs": 3,
41
- "total_flos": 3918208057933824.0,
42
- "trial_name": null,
43
- "trial_params": null
44
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
checkpoint_5000 (1)/content/distilgpt2-finetuned-causal/checkpoint-5000/training_args.bin DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:e008d35b83974f323bb79f5386e08bec58fd48fd28bd7077782fb62391feda3d
3
- size 3963
 
 
 
 
checkpoint_5000 (1)/content/distilgpt2-finetuned-causal/checkpoint-5000/vocab.json DELETED
The diff for this file is too large to render. See raw diff