Skip to content

Commit b1980b2

Browse files
Revert "Make dynamism code robust to NotImplementedException (pytorch#148823)"
This reverts commit 6057641. Reverted pytorch#148823 on behalf of https://github.com/ZainRizvi due to Sorry but this is breaking internally, see D71042206 for details. To validate your fixes internally before relanding, you can follow the instructions here: https://fburl.com/fixing-ghfirst-reverts ([comment](pytorch#148823 (comment)))
1 parent 38c5cf9 commit b1980b2

File tree

2 files changed

+7
-50
lines changed

2 files changed

+7
-50
lines changed

test/fx/test_dynamism.py

Lines changed: 0 additions & 36 deletions
Original file line numberDiff line numberDiff line change
@@ -110,42 +110,6 @@ def forward(self, x):
110110
}
111111
self.assertEqual(result, expected)
112112

113-
def test_property_not_implemented(self):
114-
class ModuleWithNotImplementedProperty(torch.nn.Module):
115-
def __init__(self, x, y):
116-
super().__init__()
117-
self.linear = torch.nn.Linear(x, y)
118-
119-
@property
120-
def not_implemented_property(self):
121-
raise NotImplementedError("This property is not implemented")
122-
123-
module1 = ModuleWithNotImplementedProperty(10, 10)
124-
module2 = ModuleWithNotImplementedProperty(10, 10)
125-
126-
result = track_dynamism_across_examples(
127-
[
128-
{"self": module1},
129-
{"self": module2},
130-
]
131-
)
132-
133-
expected = {
134-
"self": {
135-
"L['self']['_modules']['linear']['_parameters']['weight']": (
136-
False,
137-
False,
138-
),
139-
"L['self']['_modules']['linear']['_parameters']['bias']": (False,),
140-
"L['self']['_modules']['linear']['bias']": (False,),
141-
"L['self']['_modules']['linear']['in_features']": (False,),
142-
"L['self']['_modules']['linear']['out_features']": (False,),
143-
"L['self']['_modules']['linear']['weight']": (False, False),
144-
}
145-
}
146-
147-
self.assertEqual(result, expected)
148-
149113

150114
if __name__ == "__main__":
151115
run_tests()

torch/fx/experimental/_dynamism.py

Lines changed: 7 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -29,21 +29,14 @@ def module_to_nested_dict(module: torch.nn.Module) -> dict[str, Any]:
2929
self_dict["_modules"] = {}
3030

3131
for attr_name in dir(module):
32-
try:
33-
if not attr_name.startswith("_") and not callable(
34-
getattr(module, attr_name)
32+
if not attr_name.startswith("_") and not callable(getattr(module, attr_name)):
33+
attr_value = getattr(module, attr_name)
34+
if (
35+
not isinstance(attr_value, torch.nn.Module)
36+
and isinstance(attr_value, (int, float, torch.Tensor))
37+
and type(attr_value) is not bool
3538
):
36-
attr_value = getattr(module, attr_name)
37-
if (
38-
not isinstance(attr_value, torch.nn.Module)
39-
and isinstance(attr_value, (int, float, torch.Tensor))
40-
and type(attr_value) is not bool
41-
):
42-
self_dict[attr_name] = attr_value
43-
except NotImplementedError:
44-
# Skip attributes that raise NotImplementedError since they won't
45-
# contain any dynamism anyways.
46-
continue
39+
self_dict[attr_name] = attr_value
4740

4841
for name, param in module.named_parameters(recurse=False):
4942
self_dict["_parameters"][name] = param

0 commit comments

Comments
 (0)