From 0687cffe2163a16210421f6e5d9260e38abdbc8a Mon Sep 17 00:00:00 2001 From: Liubov Batanina Date: Thu, 23 Jan 2020 15:32:16 +0300 Subject: [PATCH] Support logSoftMax --- modules/dnn/src/layers/softmax_layer.cpp | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/modules/dnn/src/layers/softmax_layer.cpp b/modules/dnn/src/layers/softmax_layer.cpp index 75e31006de..d7ffef0bbf 100644 --- a/modules/dnn/src/layers/softmax_layer.cpp +++ b/modules/dnn/src/layers/softmax_layer.cpp @@ -92,7 +92,8 @@ public: { return backendId == DNN_BACKEND_OPENCV || (backendId == DNN_BACKEND_HALIDE && haveHalide() && axisRaw == 1) || - ((backendId == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 || backendId == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH) && haveInfEngine() && !logSoftMax); + backendId == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH || + (backendId == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 && haveInfEngine() && !logSoftMax); } #ifdef HAVE_OPENCL @@ -330,6 +331,9 @@ public: auto& ieInpNode = nodes[0].dynamicCast()->node; int axis = clamp(axisRaw, ieInpNode->get_shape().size()); auto softmax = std::make_shared(ieInpNode, axis); + if (logSoftMax) + return Ptr(new InfEngineNgraphNode(std::make_shared(softmax))); + return Ptr(new InfEngineNgraphNode(softmax)); } #endif // HAVE_DNN_NGRAPH