diff --git a/modules/dnn/src/layers/blank_layer.cpp b/modules/dnn/src/layers/blank_layer.cpp index 1ed17bb357..0794eff9af 100644 --- a/modules/dnn/src/layers/blank_layer.cpp +++ b/modules/dnn/src/layers/blank_layer.cpp @@ -40,6 +40,7 @@ // //M*/ #include "../precomp.hpp" +#include "../op_inf_engine.hpp" namespace cv { @@ -53,6 +54,12 @@ public: setParamsFrom(params); } + virtual bool supportBackend(int backendId) CV_OVERRIDE + { + return backendId == DNN_BACKEND_DEFAULT || + backendId == DNN_BACKEND_INFERENCE_ENGINE && haveInfEngine(); + } + bool getMemoryShapes(const std::vector &inputs, const int requiredOutputs, std::vector &outputs, @@ -104,6 +111,19 @@ public: if (outputs[i].data != inputs[i]->data) inputs[i]->copyTo(outputs[i]); } + + virtual Ptr initInfEngine(const std::vector >&) CV_OVERRIDE + { +#ifdef HAVE_INF_ENGINE + InferenceEngine::LayerParams lp; + lp.name = name; + lp.type = "Split"; + lp.precision = InferenceEngine::Precision::FP32; + std::shared_ptr ieLayer(new InferenceEngine::SplitLayer(lp)); + return Ptr(new InfEngineBackendNode(ieLayer)); +#endif // HAVE_INF_ENGINE + return Ptr(); + } }; Ptr BlankLayer::create(const LayerParams& params) diff --git a/modules/dnn/src/layers/normalize_bbox_layer.cpp b/modules/dnn/src/layers/normalize_bbox_layer.cpp index 580b6b3e06..5e8ed65157 100644 --- a/modules/dnn/src/layers/normalize_bbox_layer.cpp +++ b/modules/dnn/src/layers/normalize_bbox_layer.cpp @@ -42,6 +42,7 @@ #include "../precomp.hpp" #include "layers_common.hpp" +#include "../op_inf_engine.hpp" namespace cv { namespace dnn { @@ -60,6 +61,13 @@ public: CV_Assert(pnorm > 0); } + virtual bool supportBackend(int backendId) CV_OVERRIDE + { + return backendId == DNN_BACKEND_DEFAULT || + backendId == DNN_BACKEND_INFERENCE_ENGINE && haveInfEngine() && + pnorm == 2 && !blobs.empty(); + } + bool getMemoryShapes(const std::vector &inputs, const int requiredOutputs, std::vector &outputs, @@ -228,6 +236,28 @@ public: } } + virtual Ptr initInfEngine(const std::vector >&) CV_OVERRIDE + { +#ifdef HAVE_INF_ENGINE + InferenceEngine::LayerParams lp; + lp.name = name; + lp.type = "Normalize"; + lp.precision = InferenceEngine::Precision::FP32; + std::shared_ptr ieLayer(new InferenceEngine::CNNLayer(lp)); + + CV_Assert(!blobs.empty()); + + ieLayer->params["eps"] = format("%f", epsilon); + ieLayer->params["across_spatial"] = acrossSpatial ? "1" : "0"; + ieLayer->params["channel_shared"] = blobs[0].total() == 1 ? "1" : "0"; + + const int numChannels = blobs[0].total(); + ieLayer->blobs["weights"] = wrapToInfEngineBlob(blobs[0], {numChannels}, InferenceEngine::Layout::C); + return Ptr(new InfEngineBackendNode(ieLayer)); +#endif // HAVE_INF_ENGINE + return Ptr(); + } + private: int startAxis, endAxis; }; diff --git a/modules/dnn/src/op_inf_engine.cpp b/modules/dnn/src/op_inf_engine.cpp index 1514573095..129ed94a5d 100644 --- a/modules/dnn/src/op_inf_engine.cpp +++ b/modules/dnn/src/op_inf_engine.cpp @@ -18,6 +18,11 @@ namespace cv { namespace dnn { #ifdef HAVE_INF_ENGINE +static int infEngineVersion() +{ + return std::atoi(InferenceEngine::GetInferenceEngineVersion()->buildNumber); +} + InfEngineBackendNode::InfEngineBackendNode(const InferenceEngine::CNNLayerPtr& _layer) : BackendNode(DNN_BACKEND_INFERENCE_ENGINE), layer(_layer) {} @@ -58,9 +63,23 @@ static InferenceEngine::DataPtr wrapToInfEngineDataNode(const Mat& m, const std: { std::vector reversedShape(&m.size[0], &m.size[0] + m.dims); std::reverse(reversedShape.begin(), reversedShape.end()); - return InferenceEngine::DataPtr( - new InferenceEngine::Data(name, reversedShape, InferenceEngine::Precision::FP32) - ); + if (infEngineVersion() > 5855) + { + InferenceEngine::Layout l = InferenceEngine::Layout::ANY; + if (m.dims == 4) + l = InferenceEngine::Layout::NCHW; + else if (m.dims == 2) + l = InferenceEngine::Layout::NC; + return InferenceEngine::DataPtr( + new InferenceEngine::Data(name, reversedShape, InferenceEngine::Precision::FP32, l) + ); + } + else + { + return InferenceEngine::DataPtr( + new InferenceEngine::Data(name, reversedShape, InferenceEngine::Precision::FP32) + ); + } } InferenceEngine::TBlob::Ptr wrapToInfEngineBlob(const Mat& m, const std::vector& shape, @@ -336,10 +355,9 @@ void InfEngineBackendNet::initPlugin(InferenceEngine::ICNNNetwork& net) InferenceEngine::StatusCode status; InferenceEngine::ResponseDesc resp; - const InferenceEngine::Version* v = InferenceEngine::GetInferenceEngineVersion(); plugin = InferenceEngine::PluginDispatcher({""}).getSuitablePlugin(targetDevice); - if (std::atoi(v->buildNumber) > 5855) + if (infEngineVersion() > 5855 && targetDevice == InferenceEngine::TargetDevice::eCPU) { #ifdef _WIN32 InferenceEngine::IExtensionPtr extension =