diff --git a/cmake/OpenCVDetectInferenceEngine.cmake b/cmake/OpenCVDetectInferenceEngine.cmake index 8656e851f4..5a4ddc6470 100644 --- a/cmake/OpenCVDetectInferenceEngine.cmake +++ b/cmake/OpenCVDetectInferenceEngine.cmake @@ -87,9 +87,9 @@ endif() if(INF_ENGINE_TARGET) if(NOT INF_ENGINE_RELEASE) - message(WARNING "InferenceEngine version have not been set, 2018R5 will be used by default. Set INF_ENGINE_RELEASE variable if you experience build errors.") + message(WARNING "InferenceEngine version have not been set, 2019R1 will be used by default. Set INF_ENGINE_RELEASE variable if you experience build errors.") endif() - set(INF_ENGINE_RELEASE "2018050000" CACHE STRING "Force IE version, should be in form YYYYAABBCC (e.g. 2018R2.0.2 -> 2018020002)") + set(INF_ENGINE_RELEASE "2019010000" CACHE STRING "Force IE version, should be in form YYYYAABBCC (e.g. 2018R2.0.2 -> 2018020002)") set_target_properties(${INF_ENGINE_TARGET} PROPERTIES INTERFACE_COMPILE_DEFINITIONS "HAVE_INF_ENGINE=1;INF_ENGINE_RELEASE=${INF_ENGINE_RELEASE}" ) diff --git a/modules/dnn/src/dnn.cpp b/modules/dnn/src/dnn.cpp index 2246c6dc44..fc46db8938 100644 --- a/modules/dnn/src/dnn.cpp +++ b/modules/dnn/src/dnn.cpp @@ -1637,7 +1637,7 @@ struct Net::Impl preferableTarget == DNN_TARGET_MYRIAD || preferableTarget == DNN_TARGET_FPGA) && !fused) { -#if INF_ENGINE_VER_MAJOR_GT(INF_ENGINE_RELEASE_2018R5) +#if INF_ENGINE_VER_MAJOR_GE(INF_ENGINE_RELEASE_2019R1) for (const std::string& name : {"weights", "biases"}) { auto it = ieNode->layer.getParameters().find(name); diff --git a/modules/dnn/src/layers/normalize_bbox_layer.cpp b/modules/dnn/src/layers/normalize_bbox_layer.cpp index 09385714a8..8760cad33b 100644 --- a/modules/dnn/src/layers/normalize_bbox_layer.cpp +++ b/modules/dnn/src/layers/normalize_bbox_layer.cpp @@ -290,7 +290,7 @@ public: weights = wrapToInfEngineBlob(blobs[0], {(size_t)numChannels}, InferenceEngine::Layout::C); l.getParameters()["channel_shared"] = blobs[0].total() == 1; } -#if INF_ENGINE_VER_MAJOR_GT(INF_ENGINE_RELEASE_2018R5) +#if INF_ENGINE_VER_MAJOR_GE(INF_ENGINE_RELEASE_2019R1) l.getParameters()["weights"] = weights; #else l.addConstantData("weights", weights); diff --git a/modules/dnn/src/op_inf_engine.cpp b/modules/dnn/src/op_inf_engine.cpp index ac55fdfea7..788a15e2f4 100644 --- a/modules/dnn/src/op_inf_engine.cpp +++ b/modules/dnn/src/op_inf_engine.cpp @@ -130,7 +130,7 @@ void InfEngineBackendNet::init(int targetId) for (int id : unconnectedLayersIds) { InferenceEngine::Builder::OutputLayer outLayer("myconv1"); -#if INF_ENGINE_VER_MAJOR_GT(INF_ENGINE_RELEASE_2018R5) +#if INF_ENGINE_VER_MAJOR_GE(INF_ENGINE_RELEASE_2019R1) // Inference Engine determines network precision by ports. InferenceEngine::Precision p = (targetId == DNN_TARGET_MYRIAD || targetId == DNN_TARGET_OPENCL_FP16) ? @@ -188,7 +188,7 @@ void InfEngineBackendNet::init(int targetId) void InfEngineBackendNet::addLayer(InferenceEngine::Builder::Layer& layer) { -#if INF_ENGINE_VER_MAJOR_GT(INF_ENGINE_RELEASE_2018R5) +#if INF_ENGINE_VER_MAJOR_GE(INF_ENGINE_RELEASE_2019R1) // Add weights to network and connect them after input blobs. std::map& params = layer.getParameters(); std::vector blobsIds; @@ -229,7 +229,7 @@ void InfEngineBackendNet::addLayer(InferenceEngine::Builder::Layer& layer) CV_Assert(layers.insert({layerName, id}).second); unconnectedLayersIds.insert(id); -#if INF_ENGINE_VER_MAJOR_GT(INF_ENGINE_RELEASE_2018R5) +#if INF_ENGINE_VER_MAJOR_GE(INF_ENGINE_RELEASE_2019R1) // By default, all the weights are connected to last ports ids. for (int i = 0; i < blobsIds.size(); ++i) { @@ -903,7 +903,7 @@ InferenceEngine::Blob::Ptr convertFp16(const InferenceEngine::Blob::Ptr& blob) void addConstantData(const std::string& name, InferenceEngine::Blob::Ptr data, InferenceEngine::Builder::Layer& l) { -#if INF_ENGINE_VER_MAJOR_GT(INF_ENGINE_RELEASE_2018R5) +#if INF_ENGINE_VER_MAJOR_GE(INF_ENGINE_RELEASE_2019R1) l.getParameters()[name] = data; #else l.addConstantData(name, data); diff --git a/modules/dnn/src/op_inf_engine.hpp b/modules/dnn/src/op_inf_engine.hpp index 8a661f3a13..48ee07e745 100644 --- a/modules/dnn/src/op_inf_engine.hpp +++ b/modules/dnn/src/op_inf_engine.hpp @@ -27,10 +27,11 @@ #define INF_ENGINE_RELEASE_2018R3 2018030000 #define INF_ENGINE_RELEASE_2018R4 2018040000 #define INF_ENGINE_RELEASE_2018R5 2018050000 +#define INF_ENGINE_RELEASE_2019R1 2019010000 #ifndef INF_ENGINE_RELEASE -#warning("IE version have not been provided via command-line. Using 2018R5 by default") -#define INF_ENGINE_RELEASE INF_ENGINE_RELEASE_2018R5 +#warning("IE version have not been provided via command-line. Using 2019R1 by default") +#define INF_ENGINE_RELEASE INF_ENGINE_RELEASE_2019R1 #endif #define INF_ENGINE_VER_MAJOR_GT(ver) (((INF_ENGINE_RELEASE) / 10000) > ((ver) / 10000)) diff --git a/modules/dnn/test/test_backends.cpp b/modules/dnn/test/test_backends.cpp index 8f8c42ced0..24c0c8ce94 100644 --- a/modules/dnn/test/test_backends.cpp +++ b/modules/dnn/test/test_backends.cpp @@ -289,7 +289,7 @@ TEST_P(DNNTestNetwork, OpenFace) #if INF_ENGINE_VER_MAJOR_EQ(2018050000) if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD) throw SkipTestException("Test is disabled for Myriad targets"); -#elif INF_ENGINE_VER_MAJOR_GT(2018050000) +#elif INF_ENGINE_VER_MAJOR_GE(2019010000) if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD && getInferenceEngineVPUType() == CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X ) diff --git a/modules/dnn/test/test_darknet_importer.cpp b/modules/dnn/test/test_darknet_importer.cpp index 080fbaee68..34ecda0e7e 100644 --- a/modules/dnn/test/test_darknet_importer.cpp +++ b/modules/dnn/test/test_darknet_importer.cpp @@ -267,7 +267,7 @@ public: TEST_P(Test_Darknet_nets, YoloVoc) { -#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_GT(2018050000) +#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_GE(2019010000) if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_OPENCL_FP16) throw SkipTestException("Test is disabled"); #endif diff --git a/modules/dnn/test/test_halide_layers.cpp b/modules/dnn/test/test_halide_layers.cpp index 67d209f362..4c378c78fc 100644 --- a/modules/dnn/test/test_halide_layers.cpp +++ b/modules/dnn/test/test_halide_layers.cpp @@ -169,7 +169,7 @@ TEST_P(Deconvolution, Accuracy) throw SkipTestException("Test is disabled for OpenVINO 2018R4"); #endif -#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_GT(2018050000) +#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_GE(2019010000) if (backendId == DNN_BACKEND_INFERENCE_ENGINE && targetId == DNN_TARGET_MYRIAD && getInferenceEngineVPUType() == CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X && inChannels == 6 && outChannels == 4 && group == 1 @@ -351,7 +351,7 @@ TEST_P(MaxPooling, Accuracy) throw SkipTestException("Problems with output dimension in OpenVINO 2018R5"); #endif -#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_GT(2018050000) +#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_GE(2019010000) if (backendId == DNN_BACKEND_INFERENCE_ENGINE && targetId == DNN_TARGET_MYRIAD && getInferenceEngineVPUType() == CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X && (stride == Size(1, 1) || stride == Size(2, 2)) @@ -561,7 +561,7 @@ TEST_P(ReLU, Accuracy) float negativeSlope = get<0>(GetParam()); Backend backendId = get<0>(get<1>(GetParam())); Target targetId = get<1>(get<1>(GetParam())); -#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_GT(2018050000) +#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_GE(2019010000) if (backendId == DNN_BACKEND_INFERENCE_ENGINE && negativeSlope < 0 ) @@ -589,7 +589,7 @@ TEST_P(NoParamActivation, Accuracy) LayerParams lp; lp.type = get<0>(GetParam()); lp.name = "testLayer"; -#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_GT(2018050000) +#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_GE(2019010000) if (backendId == DNN_BACKEND_INFERENCE_ENGINE && lp.type == "AbsVal" ) @@ -688,7 +688,7 @@ TEST_P(Concat, Accuracy) throw SkipTestException("Test is disabled for Myriad target"); // crash #endif -#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_GT(2018050000) +#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_GE(2019010000) if (backendId == DNN_BACKEND_INFERENCE_ENGINE && targetId == DNN_TARGET_CPU && inSize == Vec3i(1, 4, 5) && numChannels == Vec3i(1, 6, 2) ) @@ -769,7 +769,7 @@ TEST_P(Eltwise, Accuracy) throw SkipTestException("Test is disabled for Myriad target"); #endif -#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_GT(2018050000) +#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_GE(2019010000) if (backendId == DNN_BACKEND_INFERENCE_ENGINE && numConv > 1) throw SkipTestException("Test is disabled for DLIE backend"); #endif diff --git a/modules/dnn/test/test_layers.cpp b/modules/dnn/test/test_layers.cpp index aff0a349a1..6ce89e3033 100644 --- a/modules/dnn/test/test_layers.cpp +++ b/modules/dnn/test/test_layers.cpp @@ -236,7 +236,7 @@ TEST_P(Test_Caffe_layers, Dropout) TEST_P(Test_Caffe_layers, Concat) { -#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_GT(2018050000) +#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_GE(2019010000) if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD) throw SkipTestException("Test is disabled for Myriad targets"); #endif @@ -247,7 +247,7 @@ TEST_P(Test_Caffe_layers, Concat) TEST_P(Test_Caffe_layers, Fused_Concat) { -#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_GT(2018050000) +#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_GE(2019010000) if (backend == DNN_BACKEND_INFERENCE_ENGINE) throw SkipTestException("Test is disabled for DLIE due negative_slope parameter"); #endif diff --git a/modules/dnn/test/test_onnx_importer.cpp b/modules/dnn/test/test_onnx_importer.cpp index 531a60fad1..0a9ce6141a 100644 --- a/modules/dnn/test/test_onnx_importer.cpp +++ b/modules/dnn/test/test_onnx_importer.cpp @@ -319,7 +319,7 @@ TEST_P(Test_ONNX_nets, ResNet50v1) TEST_P(Test_ONNX_nets, ResNet101_DUC_HDC) { -#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_GT(2018050000) +#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_GE(2019010000) if (backend == DNN_BACKEND_INFERENCE_ENGINE) throw SkipTestException("Test is disabled for DLIE targets"); #endif diff --git a/modules/dnn/test/test_tf_importer.cpp b/modules/dnn/test/test_tf_importer.cpp index 395a965ada..31f0c748a6 100644 --- a/modules/dnn/test/test_tf_importer.cpp +++ b/modules/dnn/test/test_tf_importer.cpp @@ -140,7 +140,7 @@ TEST_P(Test_TensorFlow_layers, padding) TEST_P(Test_TensorFlow_layers, padding_same) { -#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_GT(2018050000) +#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_GE(2019010000) if (backend == DNN_BACKEND_INFERENCE_ENGINE) throw SkipTestException("Test is disabled for DLIE"); #endif @@ -197,7 +197,7 @@ TEST_P(Test_TensorFlow_layers, pooling) TEST_P(Test_TensorFlow_layers, ave_pool_same) { // Reference output values are in range [-0.519531, 0.112976] -#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_GT(2018050000) +#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_GE(2019010000) if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD && getInferenceEngineVPUType() == CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X ) @@ -241,7 +241,7 @@ TEST_P(Test_TensorFlow_layers, reshape) TEST_P(Test_TensorFlow_layers, flatten) { -#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_GT(2018050000) +#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_GE(2019010000) if (backend == DNN_BACKEND_INFERENCE_ENGINE) throw SkipTestException("Test is disabled for DLIE"); #endif @@ -257,7 +257,7 @@ TEST_P(Test_TensorFlow_layers, flatten) TEST_P(Test_TensorFlow_layers, unfused_flatten) { -#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_GT(2018050000) +#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_GE(2019010000) if (backend == DNN_BACKEND_INFERENCE_ENGINE) throw SkipTestException("Test is disabled for DLIE"); #endif @@ -279,7 +279,7 @@ TEST_P(Test_TensorFlow_layers, leaky_relu) TEST_P(Test_TensorFlow_layers, l2_normalize) { -#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_GT(2018050000) +#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_GE(2019010000) if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD && getInferenceEngineVPUType() == CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X ) @@ -587,7 +587,7 @@ TEST_P(Test_TensorFlow_layers, fp16_weights) TEST_P(Test_TensorFlow_layers, fp16_padding_same) { -#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_GT(2018050000) +#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_GE(2019010000) if (backend == DNN_BACKEND_INFERENCE_ENGINE) throw SkipTestException("Test is disabled for DLIE"); #endif