Vasanth commited on
Commit
a02f685
·
1 Parent(s): f10f41c

Training in progress, epoch 1

Browse files
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "bert-base-uncased",
3
  "architectures": [
4
  "BertForTokenClassification"
5
  ],
@@ -11,42 +11,26 @@
11
  "hidden_size": 768,
12
  "id2label": {
13
  "0": "O",
14
- "1": "B-geo",
15
- "2": "B-gpe",
16
- "3": "B-per",
17
- "4": "I-geo",
18
- "5": "B-org",
19
- "6": "I-org",
20
- "7": "B-tim",
21
- "8": "B-art",
22
- "9": "I-art",
23
- "10": "I-per",
24
- "11": "I-gpe",
25
- "12": "I-tim",
26
- "13": "B-nat",
27
- "14": "B-eve",
28
- "15": "I-eve",
29
- "16": "I-nat"
30
  },
31
  "initializer_range": 0.02,
32
  "intermediate_size": 3072,
33
  "label2id": {
34
- "B-art": 8,
35
- "B-eve": 14,
36
- "B-geo": 1,
37
- "B-gpe": 2,
38
- "B-nat": 13,
39
- "B-org": 5,
40
- "B-per": 3,
41
- "B-tim": 7,
42
- "I-art": 9,
43
- "I-eve": 15,
44
- "I-geo": 4,
45
- "I-gpe": 11,
46
- "I-nat": 16,
47
- "I-org": 6,
48
- "I-per": 10,
49
- "I-tim": 12,
50
  "O": 0
51
  },
52
  "layer_norm_eps": 1e-12,
@@ -57,8 +41,8 @@
57
  "pad_token_id": 0,
58
  "position_embedding_type": "absolute",
59
  "torch_dtype": "float32",
60
- "transformers_version": "4.30.2",
61
  "type_vocab_size": 2,
62
  "use_cache": true,
63
- "vocab_size": 30522
64
  }
 
1
  {
2
+ "_name_or_path": "bert-base-cased",
3
  "architectures": [
4
  "BertForTokenClassification"
5
  ],
 
11
  "hidden_size": 768,
12
  "id2label": {
13
  "0": "O",
14
+ "1": "B-PER",
15
+ "2": "I-PER",
16
+ "3": "B-ORG",
17
+ "4": "I-ORG",
18
+ "5": "B-LOC",
19
+ "6": "I-LOC",
20
+ "7": "B-MISC",
21
+ "8": "I-MISC"
 
 
 
 
 
 
 
 
22
  },
23
  "initializer_range": 0.02,
24
  "intermediate_size": 3072,
25
  "label2id": {
26
+ "B-LOC": 5,
27
+ "B-MISC": 7,
28
+ "B-ORG": 3,
29
+ "B-PER": 1,
30
+ "I-LOC": 6,
31
+ "I-MISC": 8,
32
+ "I-ORG": 4,
33
+ "I-PER": 2,
 
 
 
 
 
 
 
 
34
  "O": 0
35
  },
36
  "layer_norm_eps": 1e-12,
 
41
  "pad_token_id": 0,
42
  "position_embedding_type": "absolute",
43
  "torch_dtype": "float32",
44
+ "transformers_version": "4.31.0",
45
  "type_vocab_size": 2,
46
  "use_cache": true,
47
+ "vocab_size": 28996
48
  }
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:d49cf36f4c080b112f008c1f087f8deddeda9ebdee4a3ae8b48be7eb4363a35b
3
- size 435691053
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f611cdbaf93915dc7fcb44bc59d25265bc42f29fa690fe54aad2f8d802af69fd
3
+ size 430974121
runs/Aug01_11-46-27_8993e2eed947/events.out.tfevents.1690890511.8993e2eed947.166.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b9be1fded2c7ca89a11fb14c71134837092e9075f81aadb0cd37e651de8b7a41
3
+ size 5316
tokenizer.json CHANGED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json CHANGED
@@ -1,7 +1,7 @@
1
  {
2
  "clean_up_tokenization_spaces": true,
3
  "cls_token": "[CLS]",
4
- "do_lower_case": true,
5
  "mask_token": "[MASK]",
6
  "model_max_length": 512,
7
  "pad_token": "[PAD]",
 
1
  {
2
  "clean_up_tokenization_spaces": true,
3
  "cls_token": "[CLS]",
4
+ "do_lower_case": false,
5
  "mask_token": "[MASK]",
6
  "model_max_length": 512,
7
  "pad_token": "[PAD]",
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:7644b53cdc2f147c444819e3ea6e782cf3362aa7eed5eadacf98f626370f668f
3
- size 3899
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1f99847ebd768960269066ae23ad70b35ac0fb175fbc5701cb3d47edff181e50
3
+ size 3963
vocab.txt CHANGED
The diff for this file is too large to render. See raw diff