Skip to content

Commit e4b335b

Browse files
committed
Minor changes 1
Signed-off-by: Dipankar Sarkar <[email protected]>
1 parent ef01192 commit e4b335b

File tree

2 files changed

+3
-2
lines changed

2 files changed

+3
-2
lines changed

QEfficient/transformers/models/modeling_auto.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1415,7 +1415,7 @@ def __init__(
14151415
self.num_layers = model.config.num_hidden_layers
14161416
self.continuous_batching = continuous_batching
14171417
self.model.qaic_config = qaic_config
1418-
self.pretrained_model_name_or_path = kwargs.pop("pretrained_model_name_or_path", None)
1418+
self.pretrained_model_name_or_path = kwargs.get("pretrained_model_name_or_path", None)
14191419
self.model, transformed = SpDTransform.apply(self.model, qaic_config, **kwargs)
14201420
self.is_tlm = transformed
14211421

QEfficient/transformers/models/pytorch_transforms.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -503,7 +503,7 @@ class SpDTransform:
503503
@classmethod
504504
def apply(cls, model: nn.Module, qaic_config: Optional[dict] = None, **kwargs) -> Tuple[nn.Module, bool]:
505505
transformed = False
506-
506+
pretrained_model_name_or_path_temp = kwargs.pop("pretrained_model_name_or_path", None)
507507
if qaic_config is None or (speculative_model_type := qaic_config.get("speculative_model_type")) is None:
508508
return model, transformed
509509
elif speculative_model_type not in (
@@ -525,6 +525,7 @@ def apply(cls, model: nn.Module, qaic_config: Optional[dict] = None, **kwargs) -
525525
raise NotImplementedError(
526526
f"model class {model_class} does not yet support returning multiple logits to keep."
527527
)
528+
kwargs["pretrained_model_name_or_path"] = pretrained_model_name_or_path_temp
528529
return model, transformed
529530

530531

0 commit comments

Comments
 (0)