Skip to content

Commit 9f1c8b8

Browse files
committed
fix
1 parent 3af64f4 commit 9f1c8b8

File tree

1 file changed

+0
-14
lines changed

1 file changed

+0
-14
lines changed

paddlenlp/transformers/mamba/tokenizer.py

Lines changed: 0 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -101,20 +101,6 @@ def __init__(
101101
unk_token=unk_token,
102102
)
103103

104-
# NOTE: add special tokens to the vocab
105-
value = kwargs.pop("added_tokens_decoder", {})
106-
additional_special_tokens = []
107-
for _, token_kwargs in value.items():
108-
if isinstance(token_kwargs, AddedToken):
109-
content = token_kwargs
110-
else:
111-
content = AddedToken(**token_kwargs)
112-
additional_special_tokens.append(content)
113-
if len(additional_special_tokens) > 0:
114-
self._build_special_tokens_map_extended(
115-
additional_special_tokens=additional_special_tokens,
116-
)
117-
118104
self._vocab_file = vocab_file
119105
self._merges_file = merges_file
120106
self.max_length = max_length if max_length is not None else int(1e12)

0 commit comments

Comments
 (0)