From 413c05650420090143bdb7d6214b983577331fbb Mon Sep 17 00:00:00 2001 From: Anatoliy Talamanov Date: Wed, 17 Feb 2021 14:43:18 +0300 Subject: [PATCH] Merge pull request #19533 from TolyaTalamanov:at/async-requests-hotfix [G-API] Async infer request hotfix * Fix hanging on empty roi list * Prevent possible data race * Clean up --- modules/gapi/src/backends/ie/giebackend.cpp | 47 ++++--- .../gapi/test/infer/gapi_infer_ie_test.cpp | 120 ++++++++++++++++-- 2 files changed, 141 insertions(+), 26 deletions(-) diff --git a/modules/gapi/src/backends/ie/giebackend.cpp b/modules/gapi/src/backends/ie/giebackend.cpp index bc5a16ba70..949c803270 100644 --- a/modules/gapi/src/backends/ie/giebackend.cpp +++ b/modules/gapi/src/backends/ie/giebackend.cpp @@ -293,12 +293,12 @@ public: } // Syntax sugar - cv::GShape inShape(int i) const; - const cv::Mat& inMat(std::size_t input) const; + cv::GShape inShape(std::size_t input) const; + const cv::Mat& inMat (std::size_t input) const; const cv::MediaFrame& inFrame(std::size_t input) const; cv::Mat& outMatR(std::size_t idx); - cv::GRunArgP output(int idx); + cv::GRunArgP output (std::size_t idx); const IEUnit &uu; cv::gimpl::GIslandExecutable::IOutput &out; @@ -367,7 +367,7 @@ const cv::GArgs& IECallContext::inArgs() const { return m_args; } -cv::GShape IECallContext::inShape(int i) const { +cv::GShape IECallContext::inShape(std::size_t i) const { return m_in_shapes[i]; } @@ -383,7 +383,7 @@ cv::Mat& IECallContext::outMatR(std::size_t idx) { return *cv::util::get(m_results.at(idx)); } -cv::GRunArgP IECallContext::output(int idx) { +cv::GRunArgP IECallContext::output(std::size_t idx) { return m_output_objs[idx].second; }; @@ -518,16 +518,17 @@ cv::gimpl::ie::GIEExecutable::GIEExecutable(const ade::Graph &g, void cv::gimpl::ie::GIEExecutable::run(cv::gimpl::GIslandExecutable::IInput &in, cv::gimpl::GIslandExecutable::IOutput &out) { // General alghoritm: - // 1. Get input message from IInput + // 1. Since only single async request is supported + // wait until it is over and start collecting new data. // 2. Collect island inputs/outputs. // 3. Create kernel context. (Every kernel has his own context.) - // 4. Since only single async request is supported - // wait until it is over and run kernel. - // (At this point, an asynchronous request will be started.) - // 5. Without waiting for the completion of the asynchronous request - // started by kernel go to the next frame (1) + // 4. Go to the next frame without waiting until the async request is over (1) // - // 6. If graph is compiled in non-streaming mode, wait until request is over. + // 5. If graph is compiled in non-streaming mode, wait until request is over. + + // (1) To prevent data race on the IOutput object, need to wait + // for async request callback, which post outputs and only after that get new data. + m_sync.wait(); std::vector input_objs; std::vector output_objs; @@ -538,9 +539,6 @@ void cv::gimpl::ie::GIEExecutable::run(cv::gimpl::GIslandExecutable::IInput &in if (cv::util::holds_alternative(in_msg)) { - // (1) Since kernel is executing asynchronously - // need to wait until the previous is over - m_sync.wait(); out.post(cv::gimpl::EndOfStream{}); return; } @@ -570,9 +568,6 @@ void cv::gimpl::ie::GIEExecutable::run(cv::gimpl::GIslandExecutable::IInput &in std::move(input_objs), std::move(output_objs)); - // (4) Only single async request is supported now, - // so need to wait until the previous is over. - m_sync.wait(); // (5) Run the kernel and start handle next frame. const auto &kk = giem.metadata(this_nh).get(); // FIXME: Running just a single node now. @@ -859,6 +854,14 @@ struct InferList: public cv::detail::KernelTag { // by some resetInternalData(), etc? (Probably at the GExecutor level) } + // NB: If list of roi is empty need to post output data anyway. + if (in_roi_vec.empty()) { + for (auto i : ade::util::iota(ctx->uu.params.num_out)) { + ctx->out.post(ctx->output(i)); + } + return; + } + for (auto&& rc : in_roi_vec) { // NB: Only single async request is supported now, // so need to wait until previos iteration is over. @@ -984,6 +987,14 @@ struct InferList2: public cv::detail::KernelTag { // by some resetInternalData(), etc? (Probably at the GExecutor level) } + // NB: If list of roi is empty need to post output data anyway. + if (list_size == 0u) { + for (auto i : ade::util::iota(ctx->uu.params.num_out)) { + ctx->out.post(ctx->output(i)); + } + return; + } + for (const auto &list_idx : ade::util::iota(list_size)) { // NB: Only single async request is supported now, // so need to wait until previos iteration is over. diff --git a/modules/gapi/test/infer/gapi_infer_ie_test.cpp b/modules/gapi/test/infer/gapi_infer_ie_test.cpp index 4611c3be9e..93571e5fd1 100644 --- a/modules/gapi/test/infer/gapi_infer_ie_test.cpp +++ b/modules/gapi/test/infer/gapi_infer_ie_test.cpp @@ -1112,10 +1112,7 @@ TEST(InferList, TestStreamingInfer) // Load IE network, initialize input data using that. cv::Mat in_mat; - std::vector ie_ages; - std::vector ie_genders; - std::vector gapi_ages; - std::vector gapi_genders; + std::vector ie_ages, ie_genders, gapi_ages, gapi_genders; std::vector roi_list = { cv::Rect(cv::Point{64, 60}, cv::Size{ 96, 96}), @@ -1206,10 +1203,7 @@ TEST(Infer2, TestStreamingInfer) // Load IE network, initialize input data using that. cv::Mat in_mat; - std::vector ie_ages; - std::vector ie_genders; - std::vector gapi_ages; - std::vector gapi_genders; + std::vector ie_ages, ie_genders, gapi_ages, gapi_genders; std::vector roi_list = { cv::Rect(cv::Point{64, 60}, cv::Size{ 96, 96}), @@ -1286,6 +1280,116 @@ TEST(Infer2, TestStreamingInfer) pipeline.stop(); } +TEST(InferEmptyList, TestStreamingInfer) +{ + initTestDataPath(); + initDLDTDataPath(); + + std::string filepath = findDataFile("cv/video/768x576.avi"); + + cv::gapi::ie::detail::ParamDesc params; + params.model_path = findDataFile(SUBDIR + "age-gender-recognition-retail-0013.xml"); + params.weights_path = findDataFile(SUBDIR + "age-gender-recognition-retail-0013.bin"); + params.device_id = "CPU"; + + // Load IE network, initialize input data using that. + cv::Mat in_mat; + std::vector ie_ages, ie_genders, gapi_ages, gapi_genders; + + // NB: Empty list of roi + std::vector roi_list; + + using AGInfo = std::tuple; + G_API_NET(AgeGender, , "test-age-gender"); + + cv::GMat in; + cv::GArray roi; + cv::GArray age, gender; + + std::tie(age, gender) = cv::gapi::infer(roi, in); + cv::GComputation comp(cv::GIn(in, roi), cv::GOut(age, gender)); + + auto pp = cv::gapi::ie::Params { + params.model_path, params.weights_path, params.device_id + }.cfgOutputLayers({ "age_conv3", "prob" }); + + + std::size_t num_frames = 0u; + std::size_t max_frames = 1u; + + cv::VideoCapture cap; + cap.open(filepath); + if (!cap.isOpened()) + throw SkipTestException("Video file can not be opened"); + + cap >> in_mat; + auto pipeline = comp.compileStreaming(cv::compile_args(cv::gapi::networks(pp))); + pipeline.setSource( + cv::gin(cv::gapi::wip::make_src(filepath), roi_list)); + + pipeline.start(); + while (num_frames < max_frames && pipeline.pull(cv::gout(gapi_ages, gapi_genders))) + { + EXPECT_TRUE(gapi_ages.empty()); + EXPECT_TRUE(gapi_genders.empty()); + } +} + +TEST(Infer2EmptyList, TestStreamingInfer) +{ + initTestDataPath(); + initDLDTDataPath(); + + std::string filepath = findDataFile("cv/video/768x576.avi"); + + cv::gapi::ie::detail::ParamDesc params; + params.model_path = findDataFile(SUBDIR + "age-gender-recognition-retail-0013.xml"); + params.weights_path = findDataFile(SUBDIR + "age-gender-recognition-retail-0013.bin"); + params.device_id = "CPU"; + + // Load IE network, initialize input data using that. + cv::Mat in_mat; + std::vector ie_ages, ie_genders, gapi_ages, gapi_genders; + + // NB: Empty list of roi + std::vector roi_list; + + using AGInfo = std::tuple; + G_API_NET(AgeGender, , "test-age-gender"); + + cv::GArray rr; + cv::GMat in; + cv::GArray age, gender; + std::tie(age, gender) = cv::gapi::infer2(in, rr); + + cv::GComputation comp(cv::GIn(in, rr), cv::GOut(age, gender)); + + auto pp = cv::gapi::ie::Params { + params.model_path, params.weights_path, params.device_id + }.cfgOutputLayers({ "age_conv3", "prob" }); + + + std::size_t num_frames = 0u; + std::size_t max_frames = 1u; + + cv::VideoCapture cap; + cap.open(filepath); + if (!cap.isOpened()) + throw SkipTestException("Video file can not be opened"); + + cap >> in_mat; + auto pipeline = comp.compileStreaming(cv::compile_args(cv::gapi::networks(pp))); + pipeline.setSource( + cv::gin(cv::gapi::wip::make_src(filepath), roi_list)); + + pipeline.start(); + while (num_frames < max_frames && pipeline.pull(cv::gout(gapi_ages, gapi_genders))) + { + EXPECT_TRUE(gapi_ages.empty()); + EXPECT_TRUE(gapi_genders.empty()); + } +} + } // namespace opencv_test #endif // HAVE_INF_ENGINE