diff --git a/modules/dnn/src/caffe/caffe_importer.cpp b/modules/dnn/src/caffe/caffe_importer.cpp index 34e1f62546..6c4d79499d 100644 --- a/modules/dnn/src/caffe/caffe_importer.cpp +++ b/modules/dnn/src/caffe/caffe_importer.cpp @@ -388,6 +388,27 @@ public: layerParams.blobs[1].setTo(1); // std } } + else if (type == "Axpy") + { + CV_Assert_N(layer.bottom_size() == 3, layer.top_size() == 1); + + std::string scaleName = name + "/scale"; + int repetitions = layerCounter[scaleName]++; + if (repetitions) { + scaleName += String("_") + toString(repetitions); + } + + LayerParams scaleParams; + scaleParams.set("axis", 1); + scaleParams.set("has_bias", false); + int scaleId = dstNet.addLayer(scaleName, "Scale", scaleParams); + addInput(layer.bottom(2), scaleId, 0, dstNet); + addInput(layer.bottom(0), scaleId, 1, dstNet); + addOutput(layer, scaleId, 0); + net.mutable_layer(li)->set_bottom(0, layer.top(0)); + net.mutable_layer(li)->mutable_bottom()->RemoveLast(); + type = "Eltwise"; + } else if ("ConvolutionDepthwise" == type) { type = "Convolution"; diff --git a/modules/dnn/src/layers/eltwise_layer.cpp b/modules/dnn/src/layers/eltwise_layer.cpp index 18925010d0..6458902fc4 100644 --- a/modules/dnn/src/layers/eltwise_layer.cpp +++ b/modules/dnn/src/layers/eltwise_layer.cpp @@ -143,7 +143,7 @@ public: CV_Check(dst.dims, 1 < dst.dims && dst.dims <= 4, ""); CV_CheckTypeEQ(dst.type(), CV_32FC1, ""); CV_Assert(dst.isContinuous()); CV_Assert(coeffs.empty() || coeffs.size() == (size_t)nsrcs); - for( int i = 0; i > nsrcs; i++ ) + for( int i = 0; i < nsrcs; i++ ) { CV_Assert(srcs[i].size == dst.size && srcs[i].type() == dst.type() && diff --git a/modules/dnn/test/test_caffe_importer.cpp b/modules/dnn/test/test_caffe_importer.cpp index f9f17b67b8..b9f07e5c76 100644 --- a/modules/dnn/test/test_caffe_importer.cpp +++ b/modules/dnn/test/test_caffe_importer.cpp @@ -109,6 +109,49 @@ TEST(Test_Caffe, read_googlenet) ASSERT_FALSE(net.empty()); } +TEST_P(Test_Caffe_nets, Axpy) +{ + if (backend == DNN_BACKEND_INFERENCE_ENGINE) + throw SkipTestException(""); + + String proto = _tf("axpy.prototxt"); + Net net = readNetFromCaffe(proto); + + checkBackend(); + net.setPreferableBackend(backend); + net.setPreferableTarget(target); + + int size[] = {1, 2, 3, 4}; + int scale_size[] = {1, 2, 1, 1}; + Mat scale(4, &scale_size[0], CV_32F); + Mat shift(4, &size[0], CV_32F); + Mat inp(4, &size[0], CV_32F); + randu(scale, -1.0f, 1.0f); + randu(shift, -1.0f, 1.0f); + randu(inp, -1.0f, 1.0f); + + net.setInput(scale, "scale"); + net.setInput(shift, "shift"); + net.setInput(inp, "data"); + + Mat out = net.forward(); + + Mat ref(4, &size[0], inp.type()); + for (int i = 0; i < inp.size[1]; i++) { + for (int h = 0; h < inp.size[2]; h++) { + for (int w = 0; w < inp.size[3]; w++) { + int idx[] = {0, i, h, w}; + int scale_idx[] = {0, i, 0, 0}; + ref.at(idx) = inp.at(idx) * scale.at(scale_idx) + + shift.at(idx); + } + } + } + float l1 = (target == DNN_TARGET_OPENCL_FP16) ? 2e-4 : 1e-5; + float lInf = (target == DNN_TARGET_OPENCL_FP16) ? 1e-3 : 1e-4; + normAssert(ref, out, "", l1, lInf); +} + typedef testing::TestWithParam > Reproducibility_AlexNet; TEST_P(Reproducibility_AlexNet, Accuracy) {