Skip to content

Commit fb1a806

Browse files
Raymond Yanghouseroad
Raymond Yang
authored andcommitted
Fix wrongly handled attribute in MVN and test generating scripts (onnx#1877)
* fix * Add attribute * update * Resolve comments
1 parent b22041c commit fb1a806

File tree

9 files changed

+27
-20
lines changed

9 files changed

+27
-20
lines changed

docs/Changelog.md

+2-2
Original file line numberDiff line numberDiff line change
@@ -8761,8 +8761,8 @@ This version of the operator has been available since version 9 of the default O
87618761
#### Attributes
87628762

87638763
<dl>
8764-
<dt><tt>axes</tt> : list of ints</dt>
8765-
<dd>A list of integers, along which to reduce. The default is to reduce over all the dimensions of the input tensor. Use [0,2,3] (without C axis for N-D cases) for calculating means and variances along channels. Two variables with the same C-coordinate are associated with the same mean and variance.</dd>
8764+
<dt><tt>axes</tt> : list of ints (default is ['0', '2', '3'])</dt>
8765+
<dd>A list of integers, along which to reduce. The default is to caculate along axes [0,2,3] for calculating mean and variance along each channel. Two variables with the same C-coordinate are associated with the same mean and variance.</dd>
87668766
</dl>
87678767

87688768
#### Inputs

docs/Operators.md

+4-4
Original file line numberDiff line numberDiff line change
@@ -6945,8 +6945,8 @@ This version of the operator has been available since version 9 of the default O
69456945
#### Attributes
69466946

69476947
<dl>
6948-
<dt><tt>axes</tt> : list of ints</dt>
6949-
<dd>A list of integers, along which to reduce. The default is to reduce over all the dimensions of the input tensor. Use [0,2,3] (without C axis for N-D cases) for calculating means and variances along channels. Two variables with the same C-coordinate are associated with the same mean and variance.</dd>
6948+
<dt><tt>axes</tt> : list of ints (default is ['0', '2', '3'])</dt>
6949+
<dd>A list of integers, along which to reduce. The default is to caculate along axes [0,2,3] for calculating mean and variance along each channel. Two variables with the same C-coordinate are associated with the same mean and variance.</dd>
69506950
</dl>
69516951

69526952
#### Inputs
@@ -6998,10 +6998,10 @@ input_data = np.array([[[[0.8439683], [0.5665144], [0.05836735]],
69986998
[[0.69248444], [0.54119414], [0.07513223]]]], dtype=np.float32)
69996999

70007000
# Calculate expected output data
7001-
data_mean = np.mean(input_data, axis=(0, 1, 2, 3), keepdims=1)
7001+
data_mean = np.mean(input_data, axis=(0, 2, 3), keepdims=1)
70027002
data_mean_squared = np.power(data_mean, 2)
70037003
data_squared = np.power(input_data, 2)
7004-
data_squared_mean = np.mean(data_squared, axis=(0, 1, 2, 3), keepdims=1)
7004+
data_squared_mean = np.mean(data_squared, axis=(0, 2, 3), keepdims=1)
70057005
std = np.sqrt(data_squared_mean - data_mean_squared)
70067006
expected_output = (input_data - data_mean) / (std + 1e-9)
70077007

docs/TestCoverage.md

+2-2
Original file line numberDiff line numberDiff line change
@@ -3691,10 +3691,10 @@ input_data = np.array([[[[0.8439683], [0.5665144], [0.05836735]],
36913691
[[0.69248444], [0.54119414], [0.07513223]]]], dtype=np.float32)
36923692

36933693
# Calculate expected output data
3694-
data_mean = np.mean(input_data, axis=(0, 1, 2, 3), keepdims=1)
3694+
data_mean = np.mean(input_data, axis=(0, 2, 3), keepdims=1)
36953695
data_mean_squared = np.power(data_mean, 2)
36963696
data_squared = np.power(input_data, 2)
3697-
data_squared_mean = np.mean(data_squared, axis=(0, 1, 2, 3), keepdims=1)
3697+
data_squared_mean = np.mean(data_squared, axis=(0, 2, 3), keepdims=1)
36983698
std = np.sqrt(data_squared_mean - data_mean_squared)
36993699
expected_output = (input_data - data_mean) / (std + 1e-9)
37003700

onnx/backend/test/case/node/__init__.py

+7-1
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,7 @@ def function_expand_helper(node, # type: NodeProto
2929
node_list = []
3030
input_names_map = dict()
3131
output_names_map = dict()
32-
attribute_map = node.attribute
32+
attribute_map = dict((a.name, a) for a in node.attribute)
3333

3434
for idx in range(len(function_proto.input)):
3535
input_names_map[function_proto.input[idx]] = node.input[idx] \
@@ -77,6 +77,12 @@ def function_testcase_helper(node, name): # type: (NodeProto, Text) -> List[Nod
7777
return []
7878
function_proto = schema.function_body # type: ignore
7979

80+
for attr in schema.attributes:
81+
if attr in [a.name for a in node.attribute]:
82+
continue
83+
if schema.attributes[attr].default_value:
84+
node.attribute.extend([schema.attributes[attr].default_value])
85+
8086
# function_proto.attributes
8187
node_list = function_expand_helper(node, function_proto, op_prefix)
8288
return node_list

onnx/backend/test/case/node/meanvariancenormalization.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -31,10 +31,10 @@ def export(): # type: () -> None
3131
[[0.69248444], [0.54119414], [0.07513223]]]], dtype=np.float32)
3232

3333
# Calculate expected output data
34-
data_mean = np.mean(input_data, axis=(0, 1, 2, 3), keepdims=1)
34+
data_mean = np.mean(input_data, axis=(0, 2, 3), keepdims=1)
3535
data_mean_squared = np.power(data_mean, 2)
3636
data_squared = np.power(input_data, 2)
37-
data_squared_mean = np.mean(data_squared, axis=(0, 1, 2, 3), keepdims=1)
37+
data_squared_mean = np.mean(data_squared, axis=(0, 2, 3), keepdims=1)
3838
std = np.sqrt(data_squared_mean - data_mean_squared)
3939
expected_output = (input_data - data_mean) / (std + 1e-9)
4040

onnx/backend/test/data/node/test_mvn/model.onnx

+2-2
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
 backend-test:g
1+
 backend-test:g
22
!
33
XY"MeanVarianceNormalizationtest_mvnZ
44
X
@@ -12,4 +12,4 @@
1212

1313

1414

15-
B
15+
B
Binary file not shown.
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
BYJlY�~?��>E�������W�����"� �X?�u�?uª?[m�2��EG�Y�>�Ŀ�å>�ƞ?&��?T���+Q��.h�?�h>�hk?H�Q?�-����?S*�= ���
1+
BYJl�d�?�;�>:�ſr����d���>D��>6nQ?��[?:���#rc�vyH�E3U?> ����,?�UD?�Pi?�ҿ�o����?��>�2�?�ϗ?��m����=msþ���

onnx/defs/nn/defs.cc

+7-6
Original file line numberDiff line numberDiff line change
@@ -1776,6 +1776,8 @@ static const char* mvn_ver9_doc = R"DOC(
17761776
on the input tensor X using formula: <br/> ``` (X-EX)/sqrt(E(X-EX)^2) ```
17771777
)DOC";
17781778

1779+
static std::vector<int64_t> mvn_default_axes = {0, 2, 3};
1780+
17791781
ONNX_OPERATOR_SET_SCHEMA(
17801782
MeanVarianceNormalization,
17811783
9,
@@ -1785,13 +1787,12 @@ ONNX_OPERATOR_SET_SCHEMA(
17851787
.Output(0, "Y", "Output tensor", "T")
17861788
.Attr(
17871789
"axes",
1788-
"A list of integers, along which to reduce. The default is to reduce over "
1789-
"all the dimensions of the input tensor. Use [0,2,3] (without C axis for "
1790-
"N-D cases) for calculating means and variances along channels. Two "
1791-
"variables with the same C-coordinate are associated "
1792-
"with the same mean and variance.",
1790+
"A list of integers, along which to reduce. The default is to "
1791+
"caculate along axes [0,2,3] for calculating mean and variance "
1792+
"along each channel. Two variables with the same C-coordinate "
1793+
"are associated with the same mean and variance.",
17931794
AttributeProto::INTS,
1794-
OPTIONAL)
1795+
mvn_default_axes)
17951796
.TypeConstraint(
17961797
"T",
17971798
{"tensor(float16)", "tensor(float)", "tensor(double)"},

0 commit comments

Comments
 (0)