bert-fa-pos-lscp-500k / preprocessor /tokenizer_config.yaml
arxyzan's picture
Hezar: Upload tokenizer and config
309a24b
raw
history blame
453 Bytes
name: wordpiece_tokenizer
config_type: preprocessor
max_length: 512
truncation_strategy: longest_first
truncation_direction: right
stride: 0
padding_strategy: longest
padding_direction: right
pad_to_multiple_of: 0
pad_token_type_id: 0
unk_token: '[UNK]'
sep_token: '[SEP]'
pad_token: '[PAD]'
cls_token: '[CLS]'
mask_token: '[MASK]'
wordpieces_prefix: '##'
vocab_size: 42000
min_frequency: 2
limit_alphabet: 1000
initial_alphabet: []
show_progress: true