From 38f4193cfed18e529018da120fc3f389b003ad46 Mon Sep 17 00:00:00 2001 From: Zheng-Bicheng Date: Thu, 16 May 2024 18:06:38 +0800 Subject: [PATCH] update reduce_logsumexp --- paddle2onnx/mapper/tensor/reduce_logsumexp.cc | 57 +++++++-- paddle2onnx/mapper/tensor/reduce_logsumexp.h | 13 +- tests/test_all.py | 114 ----------------- tests/test_any.py | 110 ----------------- tests/test_auto_scan_layer_norm.py | 1 + tests/test_auto_scan_logsumexp.py | 2 +- tests/test_auto_scan_reduce_ops.py | 8 +- tests/test_logsumexp.py | 115 ------------------ 8 files changed, 53 insertions(+), 367 deletions(-) delete mode 100644 tests/test_all.py delete mode 100644 tests/test_any.py delete mode 100644 tests/test_logsumexp.py diff --git a/paddle2onnx/mapper/tensor/reduce_logsumexp.cc b/paddle2onnx/mapper/tensor/reduce_logsumexp.cc index 92f701465..a902ff46a 100644 --- a/paddle2onnx/mapper/tensor/reduce_logsumexp.cc +++ b/paddle2onnx/mapper/tensor/reduce_logsumexp.cc @@ -23,43 +23,74 @@ int32_t ReduceLogSumExpMapper::GetMinOpset(bool verbose) { return op_version; } -void ReduceLogSumExpMapper::Opset7() { +void ReduceLogSumExpMapper::Opset18() { + GetAttr("keepdim", &keep_dim_); + GetAttr("reduce_all", &reduce_all_); + GetAttr("axis", &dim_); + auto x_info = GetInput("X"); - auto out_info = GetOutput("Out"); - std::string axis_name = "axis"; - if (IsAttrVar(axis_name)) { - auto info = GetAttrVar(axis_name); - TryGetValue(info[0], &dim_); + std::string dims; + if (!reduce_all_) { + dims = helper_->Constant(ONNX_NAMESPACE::TensorProto::INT64, dim_); } else { - GetAttr(axis_name, &dim_); + dims = helper_->Constant(ONNX_NAMESPACE::TensorProto::INT64, Arange(0, x_info[0].Rank())); + } + + std::string input_name = x_info[0].name; + auto input_tpye = x_info[0].dtype; + if (x_info[0].dtype == P2ODataType::BOOL) { + input_name = helper_->AutoCast(input_name, input_tpye, P2ODataType::INT32); + input_tpye = P2ODataType::INT32; } + auto reduce_node = helper_->MakeNode("ReduceLogSumExp", {input_name, dims}); + + // Add attribute + AddAttribute(reduce_node, "keepdims", static_cast(keep_dim_)); + auto out_node_name = reduce_node->output(0); bool reduce_all_axes = dim_.size() == x_info[0].Rank(); if (reduce_all_) { reduce_all_axes = true; } + if (!keep_dim_ && reduce_all_axes) { + out_node_name = helper_->Reshape(out_node_name, {-1}); + } + auto out_info = GetOutput("Out"); + helper_->AutoCast(out_node_name, out_info[0].name, input_tpye, out_info[0].dtype); +} + +void ReduceLogSumExpMapper::Opset11() { + GetAttr("keepdim", &keep_dim_); + GetAttr("reduce_all", &reduce_all_); + GetAttr("axis", &dim_); + auto x_info = GetInput("X"); + auto out_info = GetOutput("Out"); std::string input_name = x_info[0].name; - if (OpType() == "reduce_prod" && x_info[0].dtype == P2ODataType::FP64) { - input_name = helper_->AutoCast(x_info[0].name, P2ODataType::FP64, P2ODataType::FP32); + auto input_tpye = x_info[0].dtype; + if (x_info[0].dtype == P2ODataType::BOOL) { + input_name = helper_->AutoCast(input_name, input_tpye, P2ODataType::INT32); + input_tpye = P2ODataType::INT32; } auto reduce_node = helper_->MakeNode("ReduceLogSumExp", {input_name}); - + // Add attribute if (!reduce_all_) { AddAttribute(reduce_node, "axes", dim_); } else { AddAttribute(reduce_node, "axes", Arange(0, x_info[0].Rank())); } AddAttribute(reduce_node, "keepdims", static_cast(keep_dim_)); + auto out = reduce_node->output(0); - if (OpType() == "reduce_prod" && x_info[0].dtype == P2ODataType::FP64) { - out = helper_->AutoCast(reduce_node->output(0), P2ODataType::FP32, P2ODataType::FP64); + bool reduce_all_axes = dim_.size() == x_info[0].Rank(); + if (reduce_all_) { + reduce_all_axes = true; } if (!keep_dim_ && reduce_all_axes) { out = helper_->Reshape(out, {-1}); } - helper_->AutoCast(out, out_info[0].name, x_info[0].dtype, out_info[0].dtype); + helper_->AutoCast(out, out_info[0].name, input_tpye, out_info[0].dtype); } } // namespace paddle2onnx diff --git a/paddle2onnx/mapper/tensor/reduce_logsumexp.h b/paddle2onnx/mapper/tensor/reduce_logsumexp.h index 182b775fc..96098932e 100755 --- a/paddle2onnx/mapper/tensor/reduce_logsumexp.h +++ b/paddle2onnx/mapper/tensor/reduce_logsumexp.h @@ -25,17 +25,10 @@ class ReduceLogSumExpMapper : public Mapper { ReduceLogSumExpMapper(const PaddleParser& p, OnnxHelper* helper, int64_t block_id, int64_t op_id) : Mapper(p, helper, block_id, op_id) { - if (OpType() == "logsumexp") { - GetAttr("keepdim", &keep_dim_); - GetAttr("reduce_all", &reduce_all_); - } else { - GetAttr("keep_dim", &keep_dim_); - GetAttr("reduce_all", &reduce_all_); - GetAttr("in_dtype", &in_dtype_); - GetAttr("out_dtype", &out_dtype_); - } } - void Opset7(); + + void Opset18() override; + void Opset11(); int32_t GetMinOpset(bool verbose = false); diff --git a/tests/test_all.py b/tests/test_all.py deleted file mode 100644 index d63b5bab4..000000000 --- a/tests/test_all.py +++ /dev/null @@ -1,114 +0,0 @@ -# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License" -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import paddle -from onnxbase import APIOnnx -from onnxbase import randtool - - -class Net(paddle.nn.Layer): - """ - simple Net - """ - - def __init__(self, axis=None, keepdim=False): - super(Net, self).__init__() - self.axis = axis - self.keepdim = keepdim - - def forward(self, inputs): - """ - forward - """ - x = paddle.all(inputs, axis=self.axis, keepdim=self.keepdim) - return x - - -def test_all_11(): - """ - api: paddle.all - op version: 11 - """ - op = Net() - op.eval() - # net, name, ver_list, delta=1e-6, rtol=1e-5 - obj = APIOnnx(op, 'all', [11]) - obj.set_input_data( - "input_data", - paddle.to_tensor(randtool("float", -1, 1, [3, 10]).astype('bool'))) - obj.run() - - -def test_all_12(): - """ - api: paddle.all - op version: 12 - """ - op = Net() - op.eval() - # net, name, ver_list, delta=1e-6, rtol=1e-5 - obj = APIOnnx(op, 'all', [12]) - obj.set_input_data( - "input_data", - paddle.to_tensor(randtool("float", -1, 1, [3, 10]).astype('bool'))) - obj.run() - - -def test_all_keepdim(): - """ - api: paddle.all - op version: 12 - """ - op = Net(keepdim=True) - op.eval() - # net, name, ver_list, delta=1e-6, rtol=1e-5 - obj = APIOnnx(op, 'all', [12]) - obj.set_input_data( - "input_data", - paddle.to_tensor(randtool("float", -1, 1, [4, 3, 10]).astype('bool'))) - obj.run() - - -def test_all_axis(): - """ - api: paddle.all - op version: 12 - """ - op = Net(axis=1) - op.eval() - # net, name, ver_list, delta=1e-6, rtol=1e-5 - obj = APIOnnx(op, 'all', [12]) - obj.set_input_data( - "input_data", - paddle.to_tensor(randtool("float", -1, 1, [4, 3, 10]).astype('bool'))) - obj.run() - - -def test_all_axis_keepdim(): - """ - api: paddle.all - op version: 12 - """ - op = Net(axis=1, keepdim=True) - op.eval() - # net, name, ver_list, delta=1e-6, rtol=1e-5 - obj = APIOnnx(op, 'all', [12]) - obj.set_input_data( - "input_data", - paddle.to_tensor(randtool("float", -1, 1, [4, 3, 10]).astype('bool'))) - obj.run() - - -if __name__ == "__main__": - test_all_11() \ No newline at end of file diff --git a/tests/test_any.py b/tests/test_any.py deleted file mode 100644 index 4bc7b0459..000000000 --- a/tests/test_any.py +++ /dev/null @@ -1,110 +0,0 @@ -# Copyright (c) 2021 PaddlePaddle Authors. any Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License" -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import paddle -from onnxbase import APIOnnx -from onnxbase import randtool - - -class Net(paddle.nn.Layer): - """ - simple Net - """ - - def __init__(self, axis=None, keepdim=False): - super(Net, self).__init__() - self.axis = axis - self.keepdim = keepdim - - def forward(self, inputs): - """ - forward - """ - x = paddle.any(inputs, axis=self.axis, keepdim=self.keepdim) - return x - - -def test_any_11(): - """ - api: paddle.any - op version: 11 - """ - op = Net() - op.eval() - # net, name, ver_list, delta=1e-6, rtol=1e-5 - obj = APIOnnx(op, 'any', [11]) - obj.set_input_data( - "input_data", - paddle.to_tensor(randtool("float", -1, 1, [3, 10]).astype('bool'))) - obj.run() - - -def test_any_12(): - """ - api: paddle.any - op version: 12 - """ - op = Net() - op.eval() - # net, name, ver_list, delta=1e-6, rtol=1e-5 - obj = APIOnnx(op, 'any', [12]) - obj.set_input_data( - "input_data", - paddle.to_tensor(randtool("float", -1, 1, [3, 10]).astype('bool'))) - obj.run() - - -def test_any_keepdim(): - """ - api: paddle.any - op version: 12 - """ - op = Net(keepdim=True) - op.eval() - # net, name, ver_list, delta=1e-6, rtol=1e-5 - obj = APIOnnx(op, 'any', [12]) - obj.set_input_data( - "input_data", - paddle.to_tensor(randtool("float", -1, 1, [4, 3, 10]).astype('bool'))) - obj.run() - - -def test_any_axis(): - """ - api: paddle.any - op version: 12 - """ - op = Net(axis=1) - op.eval() - # net, name, ver_list, delta=1e-6, rtol=1e-5 - obj = APIOnnx(op, 'any', [12]) - obj.set_input_data( - "input_data", - paddle.to_tensor(randtool("float", -1, 1, [4, 3, 10]).astype('bool'))) - obj.run() - - -def test_any_axis_keepdim(): - """ - api: paddle.any - op version: 12 - """ - op = Net(axis=1, keepdim=True) - op.eval() - # net, name, ver_list, delta=1e-6, rtol=1e-5 - obj = APIOnnx(op, 'any', [12]) - obj.set_input_data( - "input_data", - paddle.to_tensor(randtool("float", -1, 1, [4, 3, 10]).astype('bool'))) - obj.run() diff --git a/tests/test_auto_scan_layer_norm.py b/tests/test_auto_scan_layer_norm.py index ba416d2d4..0dadef66f 100755 --- a/tests/test_auto_scan_layer_norm.py +++ b/tests/test_auto_scan_layer_norm.py @@ -68,6 +68,7 @@ def sample_convert_config(self, draw): input_shape[4] = 10 axis = draw(st.integers(min_value=1, max_value=len(input_shape) - 1)) + # axis_type = draw(st.sampled_from(["int", "list"])) axis_type = draw(st.sampled_from(["int", "list"])) if axis_type == "int": normalized_shape = input_shape[-1] diff --git a/tests/test_auto_scan_logsumexp.py b/tests/test_auto_scan_logsumexp.py index bf125dd89..2750e3898 100644 --- a/tests/test_auto_scan_logsumexp.py +++ b/tests/test_auto_scan_logsumexp.py @@ -56,7 +56,7 @@ def sample_convert_config(self, draw): "op_names": ["logsumexp"], "test_data_shapes": [input_shape], "test_data_types": [[dtype]], - "opset_version": [7, 9, 15], + "opset_version": [11, 13, 18], "input_spec_shape": [], "axis": axis, "keepdim": keepdim, diff --git a/tests/test_auto_scan_reduce_ops.py b/tests/test_auto_scan_reduce_ops.py index cefa7fb72..223d67d16 100755 --- a/tests/test_auto_scan_reduce_ops.py +++ b/tests/test_auto_scan_reduce_ops.py @@ -21,11 +21,11 @@ import random op_api_map = { - # "reduce_max": paddle.max, + "reduce_max": paddle.max, "reduce_min": paddle.min, - # "reduce_mean": paddle.mean, - # "reduce_sum": paddle.sum, - # "reduce_prod": paddle.prod, + "reduce_mean": paddle.mean, + "reduce_sum": paddle.sum, + "reduce_prod": paddle.prod, } opset_version_map = { diff --git a/tests/test_logsumexp.py b/tests/test_logsumexp.py deleted file mode 100644 index fe02d51d2..000000000 --- a/tests/test_logsumexp.py +++ /dev/null @@ -1,115 +0,0 @@ -# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License" -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import paddle -from onnxbase import APIOnnx -from onnxbase import randtool - - -class Net(paddle.nn.Layer): - """ - simple Net - """ - - def __init__(self, keepdim=False, axis=None): - super(Net, self).__init__() - self.keepdim = keepdim - self.axis = axis - - def forward(self, inputs): - """ - forward - """ - x = paddle.logsumexp(inputs, keepdim=self.keepdim, axis=self.axis) - return x - - -def test_logsumexp_10(): - """ - api: paddle.logsumexp - op version: 10 - """ - op = Net() - op.eval() - # net, name, ver_list, delta=1e-6, rtol=1e-5 - obj = APIOnnx(op, 'logsumexp', [10]) - obj.set_input_data( - "input_data", - paddle.to_tensor( - randtool("float", -1, 1, [3, 3, 10]).astype('float32'))) - obj.run() - - -def test_logsumexp_11(): - """ - api: paddle.logsumexp - op version: 11 - """ - op = Net() - op.eval() - # net, name, ver_list, delta=1e-6, rtol=1e-5 - obj = APIOnnx(op, 'logsumexp', [11]) - obj.set_input_data( - "input_data", - paddle.to_tensor( - randtool("float", -1, 1, [3, 3, 10]).astype('float32'))) - obj.run() - - -def test_logsumexp_12(): - """ - api: paddle.logsumexp - op version: 12 - """ - op = Net() - op.eval() - # net, name, ver_list, delta=1e-6, rtol=1e-5 - obj = APIOnnx(op, 'logsumexp', [12]) - obj.set_input_data( - "input_data", - paddle.to_tensor( - randtool("float", -1, 1, [3, 3, 10]).astype('float32'))) - obj.run() - - -def test_logsumexp_keepdim(): - """ - api: paddle.logsumexp - op version: 12 - """ - op = Net(keepdim=True) - op.eval() - # net, name, ver_list, delta=1e-6, rtol=1e-5 - obj = APIOnnx(op, 'logsumexp', [12]) - obj.set_input_data( - "input_data", - paddle.to_tensor( - randtool("float", -1, 1, [3, 3, 10]).astype('float32'))) - obj.run() - - -def test_logsumexp_axis(): - """ - api: paddle.logsumexp - op version: 12 - """ - op = Net(axis=2) - op.eval() - # net, name, ver_list, delta=1e-6, rtol=1e-5 - obj = APIOnnx(op, 'logsumexp', [12]) - obj.set_input_data( - "input_data", - paddle.to_tensor( - randtool("float", -1, 1, [3, 3, 10]).astype('float32'))) - obj.run()