t10gyal commited on
Commit
1f1b915
1 Parent(s): 619c9b0

Training in progress, epoch 1

Browse files
config.json CHANGED
@@ -1,13 +1,13 @@
1
  {
2
- "_name_or_path": "distilbert-base-uncased",
3
- "activation": "gelu",
4
  "architectures": [
5
- "DistilBertForTokenClassification"
6
  ],
7
- "attention_dropout": 0.1,
8
- "dim": 768,
9
- "dropout": 0.1,
10
- "hidden_dim": 3072,
11
  "id2label": {
12
  "0": "O",
13
  "1": "B-corporation",
@@ -39,16 +39,31 @@
39
  "I-product": 12,
40
  "O": 0
41
  },
42
- "max_position_embeddings": 512,
43
- "model_type": "distilbert",
44
- "n_heads": 12,
45
- "n_layers": 6,
46
- "pad_token_id": 0,
47
- "qa_dropout": 0.1,
48
- "seq_classif_dropout": 0.2,
49
- "sinusoidal_pos_embds": false,
50
- "tie_weights_": true,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
51
  "torch_dtype": "float32",
52
  "transformers_version": "4.31.0",
53
- "vocab_size": 30522
 
54
  }
 
1
  {
2
+ "_name_or_path": "gpt2",
3
+ "activation_function": "gelu_new",
4
  "architectures": [
5
+ "GPT2ForTokenClassification"
6
  ],
7
+ "attn_pdrop": 0.1,
8
+ "bos_token_id": 50256,
9
+ "embd_pdrop": 0.1,
10
+ "eos_token_id": 50256,
11
  "id2label": {
12
  "0": "O",
13
  "1": "B-corporation",
 
39
  "I-product": 12,
40
  "O": 0
41
  },
42
+ "layer_norm_epsilon": 1e-05,
43
+ "model_type": "gpt2",
44
+ "n_ctx": 1024,
45
+ "n_embd": 768,
46
+ "n_head": 12,
47
+ "n_inner": null,
48
+ "n_layer": 12,
49
+ "n_positions": 1024,
50
+ "reorder_and_upcast_attn": false,
51
+ "resid_pdrop": 0.1,
52
+ "scale_attn_by_inverse_layer_idx": false,
53
+ "scale_attn_weights": true,
54
+ "summary_activation": null,
55
+ "summary_first_dropout": 0.1,
56
+ "summary_proj_to_labels": true,
57
+ "summary_type": "cls_index",
58
+ "summary_use_proj": true,
59
+ "task_specific_params": {
60
+ "text-generation": {
61
+ "do_sample": true,
62
+ "max_length": 50
63
+ }
64
+ },
65
  "torch_dtype": "float32",
66
  "transformers_version": "4.31.0",
67
+ "use_cache": true,
68
+ "vocab_size": 50257
69
  }
merges.txt ADDED
The diff for this file is too large to render. See raw diff
 
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:21efa693efa9ab265fc7b6a99c39f27fbd14974360151e29c631b2c67bf69c50
3
- size 265526309
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1960b4cb2c76a2a97f2ee1249565339235361fb03683bca32a556b6e9ee70aee
3
+ size 497847653
runs/Jul25_06-52-54_68551202228a/events.out.tfevents.1690267978.68551202228a.23702.9 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e5a1a5e82be65fa5e1e0e2d4fb4a5f495e3ab116b547591f6abe4d472c2b00ae
3
+ size 4919
runs/Jul25_06-54-36_68551202228a/events.out.tfevents.1690268079.68551202228a.23702.10 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d630001c55ed1ec84d23d23ebe18ee7772d0309830f70277368350268bcda54f
3
+ size 5391
special_tokens_map.json CHANGED
@@ -1,7 +1,6 @@
1
  {
2
- "cls_token": "[CLS]",
3
- "mask_token": "[MASK]",
4
- "pad_token": "[PAD]",
5
- "sep_token": "[SEP]",
6
- "unk_token": "[UNK]"
7
  }
 
1
  {
2
+ "bos_token": "<|endoftext|>",
3
+ "eos_token": "<|endoftext|>",
4
+ "pad_token": "<|endoftext|>",
5
+ "unk_token": "<|endoftext|>"
 
6
  }
tokenizer.json CHANGED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json CHANGED
@@ -1,13 +1,9 @@
1
  {
 
 
2
  "clean_up_tokenization_spaces": true,
3
- "cls_token": "[CLS]",
4
- "do_lower_case": true,
5
- "mask_token": "[MASK]",
6
- "model_max_length": 512,
7
- "pad_token": "[PAD]",
8
- "sep_token": "[SEP]",
9
- "strip_accents": null,
10
- "tokenize_chinese_chars": true,
11
- "tokenizer_class": "DistilBertTokenizer",
12
- "unk_token": "[UNK]"
13
  }
 
1
  {
2
+ "add_prefix_space": true,
3
+ "bos_token": "<|endoftext|>",
4
  "clean_up_tokenization_spaces": true,
5
+ "eos_token": "<|endoftext|>",
6
+ "model_max_length": 1024,
7
+ "tokenizer_class": "GPT2Tokenizer",
8
+ "unk_token": "<|endoftext|>"
 
 
 
 
 
 
9
  }
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:a7e8b92a6e3c3349e29eac1f2bd4e2fe96be174af9d5cfaf7cc211f796d4cb90
3
  size 3963
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:da0016bb450c3a6f53e23fbc5fd9b4c20a4e06c7da7a3474ef49600eec9c8db0
3
  size 3963
vocab.json ADDED
The diff for this file is too large to render. See raw diff