tamasheq-99-2 / vocab.json
ad019el's picture
Upload tokenizer
f185b10
raw
history blame
526 Bytes
{
"[PAD]": 43,
"[UNK]": 42,
"|": 31,
"ء": 16,
"آ": 13,
"أ": 17,
"ؤ": 7,
"إ": 29,
"ئ": 40,
"ا": 41,
"ب": 5,
"ة": 3,
"ت": 12,
"ث": 10,
"ج": 6,
"ح": 2,
"خ": 37,
"د": 38,
"ذ": 39,
"ر": 20,
"ز": 32,
"س": 30,
"ش": 18,
"ص": 26,
"ض": 23,
"ط": 19,
"ظ": 27,
"ع": 28,
"غ": 14,
"ف": 25,
"ق": 35,
"ك": 33,
"ل": 11,
"م": 1,
"ن": 8,
"ه": 15,
"و": 22,
"ى": 4,
"ي": 36,
"ٱ": 24,
"چ": 0,
"ڤ": 21,
"ک": 34,
"ی": 9
}