@@ -314,15 +314,16 @@ def from_pretrained(
314
314
__tokenizer = tokenizer .tokenizer
315
315
# Add padding side as well
316
316
__tokenizer .padding_side = "right"
317
- # Check bos, eos, pad, unk tokens
318
- tokens = ["bos_token" , "eos_token" , "pad_token" , "unk_token" ]
319
- for token in tokens :
320
- if hasattr (__tokenizer , token ) and not hasattr (tokenizer , token ):
321
- _args = {"__tokenizer" : __tokenizer , "tokenizer" : tokenizer }
322
- exec (f"tokenizer.{ token } = __tokenizer.{ token } " , _args )
323
- exec (f"tokenizer.{ token } _id = __tokenizer.{ token } _id" , _args )
324
- pass
325
- pass
317
+ # Check bos, eos, pad tokens
318
+ if hasattr (tokenizer , "bos_token" ):
319
+ tokenizer .bos_token = tokenizer .tokenizer .bos_token
320
+ tokenizer .bos_token_id = tokenizer .tokenizer .bos_token_id
321
+ if hasattr (tokenizer , "eos_token" ):
322
+ tokenizer .eos_token = tokenizer .tokenizer .eos_token
323
+ tokenizer .eos_token_id = tokenizer .tokenizer .eos_token_id
324
+ if hasattr (tokenizer , "pad_token" ):
325
+ tokenizer .pad_token = tokenizer .tokenizer .pad_token
326
+ tokenizer .pad_token_id = tokenizer .tokenizer .pad_token_id
326
327
pass
327
328
model , tokenizer = patch_tokenizer (model , tokenizer )
328
329
model = post_patch_loss_function (model )
0 commit comments