tiagoblima commited on
Commit
3884010
1 Parent(s): f9ebb58

Training in progress, step 500

Browse files
Files changed (4) hide show
  1. config.json +1 -1
  2. model.safetensors +1 -1
  3. tokenizer.json +2 -16
  4. training_args.bin +2 -2
config.json CHANGED
@@ -26,7 +26,7 @@
26
  "relative_attention_max_distance": 128,
27
  "relative_attention_num_buckets": 32,
28
  "torch_dtype": "float32",
29
- "transformers_version": "4.35.2",
30
  "use_cache": true,
31
  "vocab_size": 32128
32
  }
 
26
  "relative_attention_max_distance": 128,
27
  "relative_attention_num_buckets": 32,
28
  "torch_dtype": "float32",
29
+ "transformers_version": "4.37.0.dev0",
30
  "use_cache": true,
31
  "vocab_size": 32128
32
  }
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:c0eb796d6113f37b0f3b0667955858bf5d07a75b2886a733f32fe4583fc70b92
3
  size 3101246448
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cdcdbb05dd500840cb592cdf6e26e12e91975b0a6d694c7ee269422e81d8400e
3
  size 3101246448
tokenizer.json CHANGED
@@ -1,21 +1,7 @@
1
  {
2
  "version": "1.0",
3
- "truncation": {
4
- "direction": "Right",
5
- "max_length": 30,
6
- "strategy": "LongestFirst",
7
- "stride": 0
8
- },
9
- "padding": {
10
- "strategy": {
11
- "Fixed": 30
12
- },
13
- "direction": "Right",
14
- "pad_to_multiple_of": null,
15
- "pad_id": 0,
16
- "pad_type_id": 0,
17
- "pad_token": "<pad>"
18
- },
19
  "added_tokens": [
20
  {
21
  "id": 0,
 
1
  {
2
  "version": "1.0",
3
+ "truncation": null,
4
+ "padding": null,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
5
  "added_tokens": [
6
  {
7
  "id": 0,
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:c677650e95687cc0f4cf12719f96d14685142ebdae22df7f06b65eaba968806a
3
- size 4728
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1439ab4b7d1959910657eca3a857a861070805dd4a2ef75b6efee15cc0e2e41a
3
+ size 4856