diff --git a/modules/dnn/src/layers/convolution_layer.cpp b/modules/dnn/src/layers/convolution_layer.cpp index e291d6ef6d..6bb8994a3c 100644 --- a/modules/dnn/src/layers/convolution_layer.cpp +++ b/modules/dnn/src/layers/convolution_layer.cpp @@ -1325,19 +1325,6 @@ public: const int group = numOutput / outGroupCn; if (backendId == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH) { - if (padMode.empty()) { - for (int i = 0; i < adjust_pads.size(); i++) { - if (pads_end[i] < adjust_pads[i]) - return false; - } - } else if (padMode == "SAME") { - for (int i = 0; i < adjust_pads.size(); i++) { - if (kernel_size[i] < pads_begin[i] + 1 + adjust_pads[i]) - return false; - } - } else if (padMode == "VALID") - return false; - return group == 1; } @@ -2042,20 +2029,16 @@ public: ieWeights = std::make_shared(ngraph::element::f32, kernel_shape, newWeights.data); } std::vector paddings_end; - if (padMode.empty()) - { - for (int i = 0; i < pads_end.size(); i++) { - paddings_end.push_back(pads_end[i] - adjust_pads[i]); - } - } - else if (padMode == "SAME") + if (padMode == "SAME") { for (int i = 0; i < pads_begin.size(); i++) { paddings_end.push_back(kernel_size[i] - pads_begin[i] - 1 - adjust_pads[i]); } + adjust_pads = std::vector(pads_begin.size(), 0); } else { paddings_end = pads_end; } + ngraph::op::PadType pad_type = padMode == "VALID" ? ngraph::op::PadType::VALID : ngraph::op::PadType::EXPLICIT; auto deconv = std::make_shared( ieInpNode, @@ -2063,7 +2046,10 @@ public: ngraph::Strides(strides), ngraph::CoordinateDiff(std::vector(pads_begin.begin(), pads_begin.end())), ngraph::CoordinateDiff(std::vector(paddings_end.begin(), paddings_end.end())), - ngraph::Strides(dilations)); + ngraph::Strides(dilations), + pad_type, + ngraph::CoordinateDiff(std::vector(adjust_pads.begin(), adjust_pads.end()))); + if (hasBias() || fusedBias) { std::vector shape(deconv->get_shape().size(), 1);