Skip to content

Commit 5fe89c0

Browse files
Revert "Tokenizer: fixed decode of special tokens during init stage (#1823)"
This reverts commit 966b6fc.
1 parent 34400b7 commit 5fe89c0

File tree

1 file changed

+3
-3
lines changed

1 file changed

+3
-3
lines changed

src/cpp/src/tokenizer.cpp

+3-3
Original file line numberDiff line numberDiff line change
@@ -309,11 +309,11 @@ class Tokenizer::TokenizerImpl {
309309

310310
// Unset/-1 token causes exception in SentencePiece detokenization.
311311
if (m_pad_token_id != -1 && m_pad_token.empty())
312-
m_pad_token = decode(std::vector{m_pad_token_id}, {ov::genai::skip_special_tokens(false)});
312+
m_pad_token = decode(std::vector{m_pad_token_id}, {ov::genai::add_special_tokens(true)});
313313
if (m_bos_token_id != -1 && m_bos_token.empty())
314-
m_bos_token = decode(std::vector{m_bos_token_id}, {ov::genai::skip_special_tokens(false)});
314+
m_bos_token = decode(std::vector{m_bos_token_id}, {ov::genai::add_special_tokens(true)});
315315
if (m_eos_token_id != -1 && m_eos_token.empty())
316-
m_eos_token = decode(std::vector{m_eos_token_id}, {ov::genai::skip_special_tokens(false)});
316+
m_eos_token = decode(std::vector{m_eos_token_id}, {ov::genai::add_special_tokens(true)});
317317
// Initialize detokenizer's cache to save time later.
318318
decode({1, 33, 199, 42, 42});
319319
}

0 commit comments

Comments
 (0)