llama : fix bpe tokenize from byte (#2889)
This commit is contained in:
parent
d9151e6f57
commit
3730134776
1 changed files with 8 additions and 2 deletions
10
llama.cpp
10
llama.cpp
|
@ -3366,9 +3366,15 @@ struct llm_tokenizer_bpe {
|
|||
std::string byte_str(1, *j);
|
||||
auto token_multibyte = vocab.token_to_id.find(byte_str);
|
||||
if (token_multibyte == vocab.token_to_id.end()) {
|
||||
fprintf(stderr,"ERROR: byte not found in vocab: '%s'\n", byte_str.c_str());
|
||||
try {
|
||||
llama_token token_byte = llama_byte_to_token(vocab, *j);
|
||||
output.push_back(token_byte);
|
||||
} catch (const std::out_of_range & err) {
|
||||
fprintf(stderr,"ERROR: byte not found in vocab: '%s'\n", byte_str.c_str());
|
||||
}
|
||||
} else {
|
||||
output.push_back((*token_multibyte).second);
|
||||
}
|
||||
output.push_back((*token_multibyte).second);
|
||||
}
|
||||
} else {
|
||||
output.push_back((*token).second);
|
||||
|
|
Loading…
Add table
Reference in a new issue