TusharGoel commited on
Commit
99874f2
1 Parent(s): af9ace2

Upload 7 files

Browse files
Files changed (3) hide show
  1. config.json +1 -1
  2. pytorch_model.bin +1 -1
  3. tokenizer_config.json +0 -7
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "TusharGoel/LayoutLMv2-finetuned-docvqa",
3
  "architectures": [
4
  "LayoutLMv2ForQuestionAnswering"
5
  ],
 
1
  {
2
+ "_name_or_path": "tiennvcs/layoutlmv2-base-uncased-finetuned-docvqa",
3
  "architectures": [
4
  "LayoutLMv2ForQuestionAnswering"
5
  ],
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:d1720f058ab4cb777adba15e6d698e7a3212293bbe6cef67531f640e187121f4
3
  size 802214001
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:61dce98ddcb0c697fe833dcb50f8391e69d23a8b8705c10d67ef3457a25c3e59
3
  size 802214001
tokenizer_config.json CHANGED
@@ -53,11 +53,9 @@
53
  "do_basic_tokenize": true,
54
  "do_lower_case": true,
55
  "mask_token": "[MASK]",
56
- "max_length": 512,
57
  "model_max_length": 512,
58
  "never_split": null,
59
  "only_label_first_subword": true,
60
- "pad_to_multiple_of": null,
61
  "pad_token": "[PAD]",
62
  "pad_token_box": [
63
  0,
@@ -66,8 +64,6 @@
66
  0
67
  ],
68
  "pad_token_label": -100,
69
- "pad_token_type_id": 0,
70
- "padding_side": "right",
71
  "sep_token": "[SEP]",
72
  "sep_token_box": [
73
  1000,
@@ -75,11 +71,8 @@
75
  1000,
76
  1000
77
  ],
78
- "stride": 0,
79
  "strip_accents": null,
80
  "tokenize_chinese_chars": true,
81
  "tokenizer_class": "LayoutLMv2Tokenizer",
82
- "truncation_side": "right",
83
- "truncation_strategy": "longest_first",
84
  "unk_token": "[UNK]"
85
  }
 
53
  "do_basic_tokenize": true,
54
  "do_lower_case": true,
55
  "mask_token": "[MASK]",
 
56
  "model_max_length": 512,
57
  "never_split": null,
58
  "only_label_first_subword": true,
 
59
  "pad_token": "[PAD]",
60
  "pad_token_box": [
61
  0,
 
64
  0
65
  ],
66
  "pad_token_label": -100,
 
 
67
  "sep_token": "[SEP]",
68
  "sep_token_box": [
69
  1000,
 
71
  1000,
72
  1000
73
  ],
 
74
  "strip_accents": null,
75
  "tokenize_chinese_chars": true,
76
  "tokenizer_class": "LayoutLMv2Tokenizer",
 
 
77
  "unk_token": "[UNK]"
78
  }