Skip to content

Commit bf63b17

Browse files
qcdipankarquic-dhirajku
authored andcommitted
Dependency package upgrade (quic#407)
Upgrading onnx , onnxruntime ,onnscript and protobuff. Also Updating transformer to 4.52.3 1.onnx==1.18.0 2.onnxruntime==1.22 3.onnxscript==0.2.5 4. protobuff ==6.31.0 --------- Signed-off-by: Dipankar Sarkar <[email protected]> Signed-off-by: Dhiraj Kumar Sah <[email protected]>
1 parent e3f5ab4 commit bf63b17

File tree

2 files changed

+10
-9
lines changed

2 files changed

+10
-9
lines changed

pyproject.toml

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -28,11 +28,11 @@ dependencies = [
2828
"multidict==6.0.4",
2929
"urllib3<2",
3030
"sentencepiece==0.2.0",
31-
"onnx==1.16.0",
32-
"onnxruntime==1.16.3",
31+
"onnx==1.18.0",
32+
"onnxruntime==1.22",
3333
"numpy==1.26.4",
34-
"protobuf==3.20.2",
35-
"onnxscript==0.1.0.dev20240327",
34+
"protobuf==6.31.0",
35+
"onnxscript==0.2.5",
3636
"pillow===10.4.0",
3737
"sympy",
3838
"tensorboard",

tests/peft/test_peft_onnx_transforms.py

Lines changed: 6 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -46,18 +46,19 @@ def test_adapter_weights_to_inputs_transform():
4646

4747
out_onnx, transformed = AdapterWeightsToInputsTransform.apply(test_onnx, adapter_name=adapter_name)
4848
assert transformed
49+
4950
assert (
5051
onnx.printer.to_text(out_onnx)
5152
== textwrap.dedent("""
5253
<
5354
ir_version: 8,
5455
opset_import: ["" : 17]
5556
>
56-
test_adapter_weights (float[n,32] input, float[32,32] layer1.weight, float[32,32] layer2.weight) => (float[n,32] output, float[32,32] layer1.weight_RetainedState, float[32,32] layer2.weight_RetainedState) {
57-
layer1output = MatMul (input, layer1.weight)
58-
output = MatMul (layer1output, layer2.weight)
59-
layer1.weight_RetainedState = Identity (layer1.weight)
60-
layer2.weight_RetainedState = Identity (layer2.weight)
57+
test_adapter_weights (float[n,32] input, float[32,32] "layer1.weight", float[32,32] "layer2.weight") => (float[n,32] output, float[32,32] "layer1.weight_RetainedState", float[32,32] "layer2.weight_RetainedState") {
58+
layer1output = MatMul (input, "layer1.weight")
59+
output = MatMul (layer1output, "layer2.weight")
60+
["layer1.weight_identity"] "layer1.weight_RetainedState" = Identity ("layer1.weight")
61+
["layer2.weight_identity"] "layer2.weight_RetainedState" = Identity ("layer2.weight")
6162
}
6263
""").strip()
6364
)

0 commit comments

Comments
 (0)