We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
1 parent 66b41d4 commit 63bc9ebCopy full SHA for 63bc9eb
QEfficient/cloud/finetune.py
@@ -110,7 +110,7 @@ def main(**kwargs):
110
# Enable gradient checkpointing
111
if train_config.gradient_checkpointing:
112
# Note: below attribute and method is only available in HuggingFace Transformer models.
113
- if model.supports_gradient_checkpointing:
+ if hasattr(model, "supports_gradient_checkpointing") and model.supports_gradient_checkpointing:
114
model.gradient_checkpointing_enable(gradient_checkpointing_kwargs={"preserve_rng_state": False})
115
else:
116
raise RuntimeError("Given model doesn't support gradient checkpointing. Please disable it and run it.")
0 commit comments