danielhanchen commited on
Commit
fe67d89
1 Parent(s): 117a0a1

Upload tokenizer

Browse files
.gitattributes CHANGED
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ tokenizer.json filter=lfs diff=lfs merge=lfs -text
special_tokens_map.json CHANGED
@@ -12,12 +12,5 @@
12
  "normalized": false,
13
  "rstrip": false,
14
  "single_word": false
15
- },
16
- "pad_token": {
17
- "content": "<|finetune_right_pad_id|>",
18
- "lstrip": false,
19
- "normalized": false,
20
- "rstrip": false,
21
- "single_word": false
22
  }
23
  }
 
12
  "normalized": false,
13
  "rstrip": false,
14
  "single_word": false
 
 
 
 
 
 
 
15
  }
16
  }
tokenizer.json CHANGED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json CHANGED
@@ -2058,7 +2058,5 @@
2058
  "attention_mask"
2059
  ],
2060
  "model_max_length": 131072,
2061
- "pad_token": "<|finetune_right_pad_id|>",
2062
- "padding_side": "left",
2063
  "tokenizer_class": "PreTrainedTokenizerFast"
2064
  }
 
2058
  "attention_mask"
2059
  ],
2060
  "model_max_length": 131072,
 
 
2061
  "tokenizer_class": "PreTrainedTokenizerFast"
2062
  }