diff --git a/flair/embeddings/transformer.py b/flair/embeddings/transformer.py index 8635825eec..552bad798a 100644 --- a/flair/embeddings/transformer.py +++ b/flair/embeddings/transformer.py @@ -1139,8 +1139,8 @@ def is_supported_t5_model(config: PretrainedConfig) -> bool: # If we use a context separator, add a new special token self.use_context_separator = use_context_separator if use_context_separator: - self.tokenizer.add_special_tokens({"additional_special_tokens": [SENTENCE_BOUNDARY_TAG]}) - transformer_model.resize_token_embeddings(len(self.tokenizer)) + added = self.tokenizer.add_special_tokens({"additional_special_tokens": [SENTENCE_BOUNDARY_TAG]}) + transformer_model.resize_token_embeddings(transformer_model.config.vocab_size + added) super().__init__(**self.to_args())