Merge remote-tracking branch 'upstream/3.4' into merge-3.4
This commit is contained in:
commit
dd1494eebf
5
3rdparty/protobuf/CMakeLists.txt
vendored
5
3rdparty/protobuf/CMakeLists.txt
vendored
@ -153,6 +153,11 @@ set_target_properties(libprotobuf
|
||||
ARCHIVE_OUTPUT_DIRECTORY ${3P_LIBRARY_OUTPUT_PATH}
|
||||
)
|
||||
|
||||
if(ANDROID)
|
||||
# https://github.com/opencv/opencv/issues/17282
|
||||
target_link_libraries(libprotobuf INTERFACE "-landroid" "-llog")
|
||||
endif()
|
||||
|
||||
get_protobuf_version(Protobuf_VERSION "${PROTOBUF_ROOT}/src")
|
||||
set(Protobuf_VERSION ${Protobuf_VERSION} CACHE INTERNAL "" FORCE)
|
||||
|
||||
|
||||
@ -3,7 +3,14 @@
|
||||
# installation/package
|
||||
#
|
||||
# Parameters:
|
||||
# MKL_WITH_TBB
|
||||
# MKL_ROOT_DIR / ENV{MKLROOT}
|
||||
# MKL_INCLUDE_DIR
|
||||
# MKL_LIBRARIES
|
||||
# MKL_USE_SINGLE_DYNAMIC_LIBRARY - use single dynamic library mkl_rt.lib / libmkl_rt.so
|
||||
# MKL_WITH_TBB / MKL_WITH_OPENMP
|
||||
#
|
||||
# Extra:
|
||||
# MKL_LIB_FIND_PATHS
|
||||
#
|
||||
# On return this will define:
|
||||
#
|
||||
@ -13,12 +20,6 @@
|
||||
# MKL_LIBRARIES - MKL libraries that are used by OpenCV
|
||||
#
|
||||
|
||||
macro (mkl_find_lib VAR NAME DIRS)
|
||||
find_path(${VAR} ${NAME} ${DIRS} NO_DEFAULT_PATH)
|
||||
set(${VAR} ${${VAR}}/${NAME})
|
||||
unset(${VAR} CACHE)
|
||||
endmacro()
|
||||
|
||||
macro(mkl_fail)
|
||||
set(HAVE_MKL OFF)
|
||||
set(MKL_ROOT_DIR "${MKL_ROOT_DIR}" CACHE PATH "Path to MKL directory")
|
||||
@ -39,43 +40,50 @@ macro(get_mkl_version VERSION_FILE)
|
||||
set(MKL_VERSION_STR "${MKL_VERSION_MAJOR}.${MKL_VERSION_MINOR}.${MKL_VERSION_UPDATE}" CACHE STRING "MKL version" FORCE)
|
||||
endmacro()
|
||||
|
||||
OCV_OPTION(MKL_USE_SINGLE_DYNAMIC_LIBRARY "Use MKL Single Dynamic Library thorugh mkl_rt.lib / libmkl_rt.so" OFF)
|
||||
OCV_OPTION(MKL_WITH_TBB "Use MKL with TBB multithreading" OFF)#ON IF WITH_TBB)
|
||||
OCV_OPTION(MKL_WITH_OPENMP "Use MKL with OpenMP multithreading" OFF)#ON IF WITH_OPENMP)
|
||||
|
||||
if(NOT DEFINED MKL_USE_MULTITHREAD)
|
||||
OCV_OPTION(MKL_WITH_TBB "Use MKL with TBB multithreading" OFF)#ON IF WITH_TBB)
|
||||
OCV_OPTION(MKL_WITH_OPENMP "Use MKL with OpenMP multithreading" OFF)#ON IF WITH_OPENMP)
|
||||
if(NOT MKL_ROOT_DIR AND DEFINED MKL_INCLUDE_DIR AND EXISTS "${MKL_INCLUDE_DIR}/mkl.h")
|
||||
file(TO_CMAKE_PATH "${MKL_INCLUDE_DIR}" MKL_INCLUDE_DIR)
|
||||
get_filename_component(MKL_ROOT_DIR "${MKL_INCLUDE_DIR}/.." ABSOLUTE)
|
||||
endif()
|
||||
if(NOT MKL_ROOT_DIR)
|
||||
file(TO_CMAKE_PATH "${MKL_ROOT_DIR}" mkl_root_paths)
|
||||
if(DEFINED ENV{MKLROOT})
|
||||
file(TO_CMAKE_PATH "$ENV{MKLROOT}" path)
|
||||
list(APPEND mkl_root_paths "${path}")
|
||||
endif()
|
||||
|
||||
if(WITH_MKL AND NOT mkl_root_paths)
|
||||
if(WIN32)
|
||||
set(ProgramFilesx86 "ProgramFiles(x86)")
|
||||
file(TO_CMAKE_PATH "$ENV{${ProgramFilesx86}}" path)
|
||||
list(APPEND mkl_root_paths ${path}/IntelSWTools/compilers_and_libraries/windows/mkl)
|
||||
endif()
|
||||
if(UNIX)
|
||||
list(APPEND mkl_root_paths "/opt/intel/mkl")
|
||||
endif()
|
||||
endif()
|
||||
|
||||
find_path(MKL_ROOT_DIR include/mkl.h PATHS ${mkl_root_paths})
|
||||
endif()
|
||||
|
||||
#check current MKL_ROOT_DIR
|
||||
if(NOT MKL_ROOT_DIR OR NOT EXISTS "${MKL_ROOT_DIR}/include/mkl.h")
|
||||
set(mkl_root_paths "${MKL_ROOT_DIR}")
|
||||
if(DEFINED ENV{MKLROOT})
|
||||
list(APPEND mkl_root_paths "$ENV{MKLROOT}")
|
||||
endif()
|
||||
|
||||
if(WITH_MKL AND NOT mkl_root_paths)
|
||||
if(WIN32)
|
||||
set(ProgramFilesx86 "ProgramFiles(x86)")
|
||||
list(APPEND mkl_root_paths $ENV{${ProgramFilesx86}}/IntelSWTools/compilers_and_libraries/windows/mkl)
|
||||
endif()
|
||||
if(UNIX)
|
||||
list(APPEND mkl_root_paths "/opt/intel/mkl")
|
||||
endif()
|
||||
endif()
|
||||
|
||||
find_path(MKL_ROOT_DIR include/mkl.h PATHS ${mkl_root_paths})
|
||||
mkl_fail()
|
||||
endif()
|
||||
|
||||
set(MKL_INCLUDE_DIRS "${MKL_ROOT_DIR}/include" CACHE PATH "Path to MKL include directory")
|
||||
set(MKL_INCLUDE_DIR "${MKL_ROOT_DIR}/include" CACHE PATH "Path to MKL include directory")
|
||||
|
||||
if(NOT MKL_ROOT_DIR
|
||||
OR NOT EXISTS "${MKL_ROOT_DIR}"
|
||||
OR NOT EXISTS "${MKL_INCLUDE_DIRS}"
|
||||
OR NOT EXISTS "${MKL_INCLUDE_DIRS}/mkl_version.h"
|
||||
OR NOT EXISTS "${MKL_INCLUDE_DIR}"
|
||||
OR NOT EXISTS "${MKL_INCLUDE_DIR}/mkl_version.h"
|
||||
)
|
||||
mkl_fail()
|
||||
mkl_fail()
|
||||
endif()
|
||||
|
||||
get_mkl_version(${MKL_INCLUDE_DIRS}/mkl_version.h)
|
||||
get_mkl_version(${MKL_INCLUDE_DIR}/mkl_version.h)
|
||||
|
||||
#determine arch
|
||||
if(CMAKE_CXX_SIZEOF_DATA_PTR EQUAL 8)
|
||||
@ -95,52 +103,66 @@ else()
|
||||
set(MKL_ARCH_SUFFIX "c")
|
||||
endif()
|
||||
|
||||
if(MKL_VERSION_STR VERSION_GREATER "11.3.0" OR MKL_VERSION_STR VERSION_EQUAL "11.3.0")
|
||||
set(mkl_lib_find_paths
|
||||
${MKL_ROOT_DIR}/lib)
|
||||
foreach(MKL_ARCH ${MKL_ARCH_LIST})
|
||||
list(APPEND mkl_lib_find_paths
|
||||
${MKL_ROOT_DIR}/lib/${MKL_ARCH}
|
||||
${MKL_ROOT_DIR}/../tbb/lib/${MKL_ARCH}
|
||||
${MKL_ROOT_DIR}/${MKL_ARCH})
|
||||
endforeach()
|
||||
set(mkl_lib_find_paths ${MKL_LIB_FIND_PATHS} ${MKL_ROOT_DIR}/lib)
|
||||
foreach(MKL_ARCH ${MKL_ARCH_LIST})
|
||||
list(APPEND mkl_lib_find_paths
|
||||
${MKL_ROOT_DIR}/lib/${MKL_ARCH}
|
||||
${MKL_ROOT_DIR}/${MKL_ARCH}
|
||||
)
|
||||
endforeach()
|
||||
|
||||
set(mkl_lib_list "mkl_intel_${MKL_ARCH_SUFFIX}")
|
||||
if(MKL_USE_SINGLE_DYNAMIC_LIBRARY AND NOT (MKL_VERSION_STR VERSION_LESS "10.3.0"))
|
||||
|
||||
if(MKL_WITH_TBB)
|
||||
list(APPEND mkl_lib_list mkl_tbb_thread tbb)
|
||||
elseif(MKL_WITH_OPENMP)
|
||||
if(MSVC)
|
||||
list(APPEND mkl_lib_list mkl_intel_thread libiomp5md)
|
||||
else()
|
||||
list(APPEND mkl_lib_list mkl_gnu_thread)
|
||||
endif()
|
||||
# https://software.intel.com/content/www/us/en/develop/articles/a-new-linking-model-single-dynamic-library-mkl_rt-since-intel-mkl-103.html
|
||||
set(mkl_lib_list "mkl_rt")
|
||||
|
||||
elseif(NOT (MKL_VERSION_STR VERSION_LESS "11.3.0"))
|
||||
|
||||
foreach(MKL_ARCH ${MKL_ARCH_LIST})
|
||||
list(APPEND mkl_lib_find_paths
|
||||
${MKL_ROOT_DIR}/../tbb/lib/${MKL_ARCH}
|
||||
)
|
||||
endforeach()
|
||||
|
||||
set(mkl_lib_list "mkl_intel_${MKL_ARCH_SUFFIX}")
|
||||
|
||||
if(MKL_WITH_TBB)
|
||||
list(APPEND mkl_lib_list mkl_tbb_thread tbb)
|
||||
elseif(MKL_WITH_OPENMP)
|
||||
if(MSVC)
|
||||
list(APPEND mkl_lib_list mkl_intel_thread libiomp5md)
|
||||
else()
|
||||
list(APPEND mkl_lib_list mkl_sequential)
|
||||
list(APPEND mkl_lib_list mkl_gnu_thread)
|
||||
endif()
|
||||
else()
|
||||
list(APPEND mkl_lib_list mkl_sequential)
|
||||
endif()
|
||||
|
||||
list(APPEND mkl_lib_list mkl_core)
|
||||
list(APPEND mkl_lib_list mkl_core)
|
||||
else()
|
||||
message(STATUS "MKL version ${MKL_VERSION_STR} is not supported")
|
||||
mkl_fail()
|
||||
message(STATUS "MKL version ${MKL_VERSION_STR} is not supported")
|
||||
mkl_fail()
|
||||
endif()
|
||||
|
||||
set(MKL_LIBRARIES "")
|
||||
foreach(lib ${mkl_lib_list})
|
||||
find_library(${lib} NAMES ${lib} ${lib}_dll HINTS ${mkl_lib_find_paths})
|
||||
mark_as_advanced(${lib})
|
||||
if(NOT ${lib})
|
||||
mkl_fail()
|
||||
if(NOT MKL_LIBRARIES)
|
||||
set(MKL_LIBRARIES "")
|
||||
foreach(lib ${mkl_lib_list})
|
||||
set(lib_var_name MKL_LIBRARY_${lib})
|
||||
find_library(${lib_var_name} NAMES ${lib} ${lib}_dll HINTS ${mkl_lib_find_paths})
|
||||
mark_as_advanced(${lib_var_name})
|
||||
if(NOT ${lib_var_name})
|
||||
mkl_fail()
|
||||
endif()
|
||||
list(APPEND MKL_LIBRARIES ${${lib}})
|
||||
endforeach()
|
||||
list(APPEND MKL_LIBRARIES ${${lib_var_name}})
|
||||
endforeach()
|
||||
endif()
|
||||
|
||||
message(STATUS "Found MKL ${MKL_VERSION_STR} at: ${MKL_ROOT_DIR}")
|
||||
set(HAVE_MKL ON)
|
||||
set(MKL_ROOT_DIR "${MKL_ROOT_DIR}" CACHE PATH "Path to MKL directory")
|
||||
set(MKL_INCLUDE_DIRS "${MKL_INCLUDE_DIRS}" CACHE PATH "Path to MKL include directory")
|
||||
set(MKL_LIBRARIES "${MKL_LIBRARIES}" CACHE STRING "MKL libraries")
|
||||
if(UNIX AND NOT MKL_LIBRARIES_DONT_HACK)
|
||||
set(MKL_INCLUDE_DIRS "${MKL_INCLUDE_DIR}")
|
||||
set(MKL_LIBRARIES "${MKL_LIBRARIES}")
|
||||
if(UNIX AND NOT MKL_USE_SINGLE_DYNAMIC_LIBRARY AND NOT MKL_LIBRARIES_DONT_HACK)
|
||||
#it's ugly but helps to avoid cyclic lib problem
|
||||
set(MKL_LIBRARIES ${MKL_LIBRARIES} ${MKL_LIBRARIES} ${MKL_LIBRARIES} "-lpthread" "-lm" "-ldl")
|
||||
endif()
|
||||
|
||||
@ -1670,6 +1670,7 @@ typedef CirclesGridFinderParameters CirclesGridFinderParameters2;
|
||||
- **CALIB_CB_CLUSTERING** uses a special algorithm for grid detection. It is more robust to
|
||||
perspective distortions but much more sensitive to background clutter.
|
||||
@param blobDetector feature detector that finds blobs like dark circles on light background.
|
||||
If `blobDetector` is NULL then `image` represents Point2f array of candidates.
|
||||
@param parameters struct for finding circles in a grid pattern.
|
||||
|
||||
The function attempts to determine whether the input image contains a grid of circles. If it is, the
|
||||
|
||||
@ -2178,13 +2178,6 @@ void drawChessboardCorners( InputOutputArray image, Size patternSize,
|
||||
}
|
||||
}
|
||||
|
||||
static int quiet_error(int /*status*/, const char* /*func_name*/,
|
||||
const char* /*err_msg*/, const char* /*file_name*/,
|
||||
int /*line*/, void* /*userdata*/)
|
||||
{
|
||||
return 0;
|
||||
}
|
||||
|
||||
bool findCirclesGrid( InputArray _image, Size patternSize,
|
||||
OutputArray _centers, int flags, const Ptr<FeatureDetector> &blobDetector,
|
||||
const CirclesGridFinderParameters& parameters_)
|
||||
@ -2197,15 +2190,22 @@ bool findCirclesGrid( InputArray _image, Size patternSize,
|
||||
bool isSymmetricGrid = (flags & CALIB_CB_SYMMETRIC_GRID ) ? true : false;
|
||||
CV_Assert(isAsymmetricGrid ^ isSymmetricGrid);
|
||||
|
||||
Mat image = _image.getMat();
|
||||
std::vector<Point2f> centers;
|
||||
|
||||
std::vector<KeyPoint> keypoints;
|
||||
blobDetector->detect(image, keypoints);
|
||||
std::vector<Point2f> points;
|
||||
for (size_t i = 0; i < keypoints.size(); i++)
|
||||
if (blobDetector)
|
||||
{
|
||||
points.push_back (keypoints[i].pt);
|
||||
std::vector<KeyPoint> keypoints;
|
||||
blobDetector->detect(_image, keypoints);
|
||||
for (size_t i = 0; i < keypoints.size(); i++)
|
||||
{
|
||||
points.push_back(keypoints[i].pt);
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
CV_CheckTypeEQ(_image.type(), CV_32FC2, "blobDetector must be provided or image must contains Point2f array (std::vector<Point2f>) with candidates");
|
||||
_image.copyTo(points);
|
||||
}
|
||||
|
||||
if(flags & CALIB_CB_ASYMMETRIC_GRID)
|
||||
@ -2221,64 +2221,59 @@ bool findCirclesGrid( InputArray _image, Size patternSize,
|
||||
return !centers.empty();
|
||||
}
|
||||
|
||||
bool isValid = false;
|
||||
const int attempts = 2;
|
||||
const size_t minHomographyPoints = 4;
|
||||
Mat H;
|
||||
for (int i = 0; i < attempts; i++)
|
||||
{
|
||||
centers.clear();
|
||||
CirclesGridFinder boxFinder(patternSize, points, parameters);
|
||||
bool isFound = false;
|
||||
#define BE_QUIET 1
|
||||
#if BE_QUIET
|
||||
void* oldCbkData;
|
||||
ErrorCallback oldCbk = redirectError(quiet_error, 0, &oldCbkData); // FIXIT not thread safe
|
||||
#endif
|
||||
try
|
||||
{
|
||||
isFound = boxFinder.findHoles();
|
||||
}
|
||||
catch (const cv::Exception &)
|
||||
{
|
||||
|
||||
}
|
||||
#if BE_QUIET
|
||||
redirectError(oldCbk, oldCbkData);
|
||||
#endif
|
||||
if (isFound)
|
||||
{
|
||||
switch(parameters.gridType)
|
||||
centers.clear();
|
||||
CirclesGridFinder boxFinder(patternSize, points, parameters);
|
||||
try
|
||||
{
|
||||
case CirclesGridFinderParameters::SYMMETRIC_GRID:
|
||||
boxFinder.getHoles(centers);
|
||||
break;
|
||||
case CirclesGridFinderParameters::ASYMMETRIC_GRID:
|
||||
boxFinder.getAsymmetricHoles(centers);
|
||||
break;
|
||||
default:
|
||||
CV_Error(Error::StsBadArg, "Unknown pattern type");
|
||||
bool isFound = boxFinder.findHoles();
|
||||
if (isFound)
|
||||
{
|
||||
switch(parameters.gridType)
|
||||
{
|
||||
case CirclesGridFinderParameters::SYMMETRIC_GRID:
|
||||
boxFinder.getHoles(centers);
|
||||
break;
|
||||
case CirclesGridFinderParameters::ASYMMETRIC_GRID:
|
||||
boxFinder.getAsymmetricHoles(centers);
|
||||
break;
|
||||
default:
|
||||
CV_Error(Error::StsBadArg, "Unknown pattern type");
|
||||
}
|
||||
|
||||
isValid = true;
|
||||
break; // done, return result
|
||||
}
|
||||
}
|
||||
catch (const cv::Exception& e)
|
||||
{
|
||||
CV_UNUSED(e);
|
||||
CV_LOG_DEBUG(NULL, "findCirclesGrid2: attempt=" << i << ": " << e.what());
|
||||
// nothing, next attempt
|
||||
}
|
||||
|
||||
if (i != 0)
|
||||
boxFinder.getHoles(centers);
|
||||
if (i != attempts - 1)
|
||||
{
|
||||
Mat orgPointsMat;
|
||||
transform(centers, orgPointsMat, H.inv());
|
||||
convertPointsFromHomogeneous(orgPointsMat, centers);
|
||||
if (centers.size() < minHomographyPoints)
|
||||
break;
|
||||
H = CirclesGridFinder::rectifyGrid(boxFinder.getDetectedGridSize(), centers, points, points);
|
||||
}
|
||||
Mat(centers).copyTo(_centers);
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
boxFinder.getHoles(centers);
|
||||
if (i != attempts - 1)
|
||||
{
|
||||
if (centers.size() < minHomographyPoints)
|
||||
break;
|
||||
H = CirclesGridFinder::rectifyGrid(boxFinder.getDetectedGridSize(), centers, points, points);
|
||||
}
|
||||
if (!H.empty()) // undone rectification
|
||||
{
|
||||
Mat orgPointsMat;
|
||||
transform(centers, orgPointsMat, H.inv());
|
||||
convertPointsFromHomogeneous(orgPointsMat, centers);
|
||||
}
|
||||
Mat(centers).copyTo(_centers);
|
||||
return false;
|
||||
return isValid;
|
||||
}
|
||||
|
||||
bool findCirclesGrid(InputArray _image, Size patternSize,
|
||||
|
||||
@ -1614,7 +1614,7 @@ size_t CirclesGridFinder::getFirstCorner(std::vector<Point> &largeCornerIndices,
|
||||
int cornerIdx = 0;
|
||||
bool waitOutsider = true;
|
||||
|
||||
for(;;)
|
||||
for (size_t i = 0; i < cornersCount * 2; ++i)
|
||||
{
|
||||
if (waitOutsider)
|
||||
{
|
||||
@ -1624,11 +1624,11 @@ size_t CirclesGridFinder::getFirstCorner(std::vector<Point> &largeCornerIndices,
|
||||
else
|
||||
{
|
||||
if (isInsider[(cornerIdx + 1) % cornersCount])
|
||||
break;
|
||||
return cornerIdx;
|
||||
}
|
||||
|
||||
cornerIdx = (cornerIdx + 1) % cornersCount;
|
||||
}
|
||||
|
||||
return cornerIdx;
|
||||
CV_Error(Error::StsNoConv, "isInsider array has the same values");
|
||||
}
|
||||
|
||||
@ -656,5 +656,59 @@ TEST(Calib3d_CirclesPatternDetectorWithClustering, accuracy)
|
||||
ASSERT_LE(error, precise_success_error_level);
|
||||
}
|
||||
|
||||
TEST(Calib3d_AsymmetricCirclesPatternDetector, regression_18713)
|
||||
{
|
||||
float pts_[][2] = {
|
||||
{ 166.5, 107 }, { 146, 236 }, { 147, 92 }, { 184, 162 }, { 150, 185.5 },
|
||||
{ 215, 105 }, { 270.5, 186 }, { 159, 142 }, { 6, 205.5 }, { 32, 148.5 },
|
||||
{ 126, 163.5 }, { 181, 208.5 }, { 240.5, 62 }, { 84.5, 76.5 }, { 190, 120.5 },
|
||||
{ 10, 189 }, { 266, 104 }, { 307.5, 207.5 }, { 97, 184 }, { 116.5, 210 },
|
||||
{ 114, 139 }, { 84.5, 233 }, { 269.5, 139 }, { 136, 126.5 }, { 120, 107.5 },
|
||||
{ 129.5, 65.5 }, { 212.5, 140.5 }, { 204.5, 60.5 }, { 207.5, 241 }, { 61.5, 94.5 },
|
||||
{ 186.5, 61.5 }, { 220, 63 }, { 239, 120.5 }, { 212, 186 }, { 284, 87.5 },
|
||||
{ 62, 114.5 }, { 283, 61.5 }, { 238.5, 88.5 }, { 243, 159 }, { 245, 208 },
|
||||
{ 298.5, 158.5 }, { 57, 129 }, { 156.5, 63.5 }, { 192, 90.5 }, { 281, 235.5 },
|
||||
{ 172, 62.5 }, { 291.5, 119.5 }, { 90, 127 }, { 68.5, 166.5 }, { 108.5, 83.5 },
|
||||
{ 22, 176 }
|
||||
};
|
||||
Mat candidates(51, 1, CV_32FC2, (void*)pts_);
|
||||
Size patternSize(4, 9);
|
||||
|
||||
std::vector< Point2f > result;
|
||||
bool res = false;
|
||||
|
||||
// issue reports about hangs
|
||||
EXPECT_NO_THROW(res = findCirclesGrid(candidates, patternSize, result, CALIB_CB_ASYMMETRIC_GRID, Ptr<FeatureDetector>()/*blobDetector=NULL*/));
|
||||
EXPECT_FALSE(res);
|
||||
|
||||
if (cvtest::debugLevel > 0)
|
||||
{
|
||||
std::cout << Mat(candidates) << std::endl;
|
||||
std::cout << Mat(result) << std::endl;
|
||||
Mat img(Size(400, 300), CV_8UC3, Scalar::all(0));
|
||||
|
||||
std::vector< Point2f > centers;
|
||||
candidates.copyTo(centers);
|
||||
|
||||
for (size_t i = 0; i < centers.size(); i++)
|
||||
{
|
||||
const Point2f& pt = centers[i];
|
||||
//printf("{ %g, %g }, \n", pt.x, pt.y);
|
||||
circle(img, pt, 5, Scalar(0, 255, 0));
|
||||
}
|
||||
for (size_t i = 0; i < result.size(); i++)
|
||||
{
|
||||
const Point2f& pt = result[i];
|
||||
circle(img, pt, 10, Scalar(0, 0, 255));
|
||||
}
|
||||
imwrite("test_18713.png", img);
|
||||
if (cvtest::debugLevel >= 10)
|
||||
{
|
||||
imshow("result", img);
|
||||
waitKey();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}} // namespace
|
||||
/* End of file. */
|
||||
|
||||
@ -71,6 +71,14 @@ using std::min;
|
||||
using namespace cv::dnn::ocl4dnn;
|
||||
#endif
|
||||
|
||||
#ifdef HAVE_HALIDE
|
||||
#if 0 // size_t is not well supported in Halide operations
|
||||
typedef size_t HALIDE_DIFF_T;
|
||||
#else
|
||||
typedef int HALIDE_DIFF_T;
|
||||
#endif
|
||||
#endif
|
||||
|
||||
#ifdef HAVE_CUDA
|
||||
#include "../cuda4dnn/primitives/pooling.hpp"
|
||||
#include "../cuda4dnn/primitives/roi_pooling.hpp"
|
||||
@ -78,6 +86,7 @@ using namespace cv::dnn::ocl4dnn;
|
||||
using namespace cv::dnn::cuda4dnn;
|
||||
#endif
|
||||
|
||||
|
||||
namespace cv
|
||||
{
|
||||
namespace dnn
|
||||
@ -1097,12 +1106,12 @@ public:
|
||||
Halide::Buffer<float> inputBuffer = halideBuffer(inputs[0]);
|
||||
const int inWidth = inputBuffer.width();
|
||||
const int inHeight = inputBuffer.height();
|
||||
const size_t kernelHeight = kernel_size[0];
|
||||
const size_t kernelWidth = kernel_size[1];
|
||||
const size_t strideHeight = strides[0];
|
||||
const size_t strideWidth = strides[1];
|
||||
const size_t paddingTop = pads_begin[0];
|
||||
const size_t paddingLeft = pads_begin[1];
|
||||
const HALIDE_DIFF_T kernelHeight = (HALIDE_DIFF_T)kernel_size[0];
|
||||
const HALIDE_DIFF_T kernelWidth = (HALIDE_DIFF_T)kernel_size[1];
|
||||
const HALIDE_DIFF_T strideHeight = (HALIDE_DIFF_T)strides[0];
|
||||
const HALIDE_DIFF_T strideWidth = (HALIDE_DIFF_T)strides[1];
|
||||
const HALIDE_DIFF_T paddingTop = (HALIDE_DIFF_T)pads_begin[0];
|
||||
const HALIDE_DIFF_T paddingLeft = (HALIDE_DIFF_T)pads_begin[1];
|
||||
|
||||
Halide::Var x("x"), y("y"), c("c"), n("n");
|
||||
Halide::Func top = (name.empty() ? Halide::Func() : Halide::Func(name));
|
||||
@ -1148,10 +1157,10 @@ public:
|
||||
Halide::Buffer<float> inputBuffer = halideBuffer(inputs[0]);
|
||||
|
||||
const int inW = inputBuffer.width(), inH = inputBuffer.height();
|
||||
const size_t kernelHeight = kernel_size[0];
|
||||
const size_t kernelWidth = kernel_size[1];
|
||||
const size_t strideHeight = strides[0];
|
||||
const size_t strideWidth = strides[1];
|
||||
const HALIDE_DIFF_T kernelHeight = (HALIDE_DIFF_T)kernel_size[0];
|
||||
const HALIDE_DIFF_T kernelWidth = (HALIDE_DIFF_T)kernel_size[1];
|
||||
const HALIDE_DIFF_T strideHeight = (HALIDE_DIFF_T)strides[0];
|
||||
const HALIDE_DIFF_T strideWidth = (HALIDE_DIFF_T)strides[1];
|
||||
if ((inW - kernelWidth) % strideWidth || (inH - kernelHeight) % strideHeight)
|
||||
{
|
||||
CV_Error(cv::Error::StsNotImplemented,
|
||||
|
||||
@ -124,7 +124,7 @@ public:
|
||||
|
||||
Mat& inp = inputs[0];
|
||||
Mat& out = outputs[0];
|
||||
if (interpolation == "nearest" || interpolation == "opencv_linear" || (interpolation == "bilinear" && halfPixelCenters))
|
||||
if ((interpolation == "nearest" && !alignCorners && !halfPixelCenters) || interpolation == "opencv_linear" || (interpolation == "bilinear" && halfPixelCenters))
|
||||
{
|
||||
InterpolationFlags mode = interpolation == "nearest" ? INTER_NEAREST : INTER_LINEAR;
|
||||
for (size_t n = 0; n < inputs[0].size[0]; ++n)
|
||||
@ -136,6 +136,54 @@ public:
|
||||
}
|
||||
}
|
||||
}
|
||||
else if (interpolation == "nearest")
|
||||
{
|
||||
const int inpHeight = inp.size[2];
|
||||
const int inpWidth = inp.size[3];
|
||||
const int inpSpatialSize = inpHeight * inpWidth;
|
||||
const int outSpatialSize = outHeight * outWidth;
|
||||
const int numPlanes = inp.size[0] * inp.size[1];
|
||||
CV_Assert_N(inp.isContinuous(), out.isContinuous());
|
||||
|
||||
Mat inpPlanes = inp.reshape(1, numPlanes * inpHeight);
|
||||
Mat outPlanes = out.reshape(1, numPlanes * outHeight);
|
||||
|
||||
float heightOffset = 0.0f;
|
||||
float widthOffset = 0.0f;
|
||||
|
||||
if (halfPixelCenters)
|
||||
{
|
||||
heightOffset = 0.5f * scaleHeight;
|
||||
widthOffset = 0.5f * scaleWidth;
|
||||
}
|
||||
|
||||
for (int y = 0; y < outHeight; ++y)
|
||||
{
|
||||
float input_y = y * scaleHeight + heightOffset;
|
||||
int y0 = halfPixelCenters ? std::floor(input_y) : lroundf(input_y);
|
||||
y0 = std::min(y0, inpHeight - 1);
|
||||
|
||||
const float* inpData_row = inpPlanes.ptr<float>(y0);
|
||||
|
||||
for (int x = 0; x < outWidth; ++x)
|
||||
{
|
||||
float input_x = x * scaleWidth + widthOffset;
|
||||
int x0 = halfPixelCenters ? std::floor(input_x) : lroundf(input_x);
|
||||
x0 = std::min(x0, inpWidth - 1);
|
||||
|
||||
float* outData = outPlanes.ptr<float>(y, x);
|
||||
const float* inpData_row_c = inpData_row;
|
||||
|
||||
for (int c = 0; c < numPlanes; ++c)
|
||||
{
|
||||
*outData = inpData_row_c[x0];
|
||||
|
||||
inpData_row_c += inpSpatialSize;
|
||||
outData += outSpatialSize;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
else if (interpolation == "bilinear")
|
||||
{
|
||||
const int inpHeight = inp.size[2];
|
||||
|
||||
@ -101,6 +101,9 @@ public:
|
||||
TEST_P(DNNTestNetwork, AlexNet)
|
||||
{
|
||||
applyTestTag(CV_TEST_TAG_MEMORY_1GB);
|
||||
if (backend == DNN_BACKEND_HALIDE) // Realization contains wrong number of Images (1) for realizing pipeline with 2 outputs
|
||||
applyTestTag(CV_TEST_TAG_DNN_SKIP_HALIDE);
|
||||
|
||||
processNet("dnn/bvlc_alexnet.caffemodel", "dnn/bvlc_alexnet.prototxt",
|
||||
Size(227, 227), "prob",
|
||||
target == DNN_TARGET_OPENCL ? "dnn/halide_scheduler_opencl_alexnet.yml" :
|
||||
@ -115,6 +118,9 @@ TEST_P(DNNTestNetwork, ResNet_50)
|
||||
(target == DNN_TARGET_CPU ? CV_TEST_TAG_MEMORY_512MB : CV_TEST_TAG_MEMORY_1GB),
|
||||
CV_TEST_TAG_DEBUG_LONG
|
||||
);
|
||||
if (backend == DNN_BACKEND_HALIDE) // Realization contains wrong number of Images (1) for realizing pipeline with 2 outputs
|
||||
applyTestTag(CV_TEST_TAG_DNN_SKIP_HALIDE);
|
||||
|
||||
processNet("dnn/ResNet-50-model.caffemodel", "dnn/ResNet-50-deploy.prototxt",
|
||||
Size(224, 224), "prob",
|
||||
target == DNN_TARGET_OPENCL ? "dnn/halide_scheduler_opencl_resnet_50.yml" :
|
||||
@ -125,6 +131,9 @@ TEST_P(DNNTestNetwork, ResNet_50)
|
||||
|
||||
TEST_P(DNNTestNetwork, SqueezeNet_v1_1)
|
||||
{
|
||||
if (backend == DNN_BACKEND_HALIDE) // Realization contains wrong number of Images (1) for realizing pipeline with 2 outputs
|
||||
applyTestTag(CV_TEST_TAG_DNN_SKIP_HALIDE);
|
||||
|
||||
processNet("dnn/squeezenet_v1.1.caffemodel", "dnn/squeezenet_v1.1.prototxt",
|
||||
Size(227, 227), "prob",
|
||||
target == DNN_TARGET_OPENCL ? "dnn/halide_scheduler_opencl_squeezenet_v1_1.yml" :
|
||||
@ -136,6 +145,9 @@ TEST_P(DNNTestNetwork, SqueezeNet_v1_1)
|
||||
TEST_P(DNNTestNetwork, GoogLeNet)
|
||||
{
|
||||
applyTestTag(target == DNN_TARGET_CPU ? "" : CV_TEST_TAG_MEMORY_512MB);
|
||||
if (backend == DNN_BACKEND_HALIDE) // Realization contains wrong number of Images (1) for realizing pipeline with 2 outputs
|
||||
applyTestTag(CV_TEST_TAG_DNN_SKIP_HALIDE);
|
||||
|
||||
processNet("dnn/bvlc_googlenet.caffemodel", "dnn/bvlc_googlenet.prototxt",
|
||||
Size(224, 224), "prob");
|
||||
expectNoFallbacksFromIE(net);
|
||||
@ -145,6 +157,9 @@ TEST_P(DNNTestNetwork, GoogLeNet)
|
||||
TEST_P(DNNTestNetwork, Inception_5h)
|
||||
{
|
||||
applyTestTag(CV_TEST_TAG_MEMORY_512MB);
|
||||
if (backend == DNN_BACKEND_HALIDE) // Realization contains wrong number of Images (1) for realizing pipeline with 2 outputs
|
||||
applyTestTag(CV_TEST_TAG_DNN_SKIP_HALIDE);
|
||||
|
||||
double l1 = default_l1, lInf = default_lInf;
|
||||
if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 && (target == DNN_TARGET_CPU || target == DNN_TARGET_OPENCL))
|
||||
{
|
||||
@ -162,6 +177,9 @@ TEST_P(DNNTestNetwork, Inception_5h)
|
||||
TEST_P(DNNTestNetwork, ENet)
|
||||
{
|
||||
applyTestTag(target == DNN_TARGET_CPU ? "" : CV_TEST_TAG_MEMORY_512MB);
|
||||
if (backend == DNN_BACKEND_HALIDE) // Realization contains wrong number of Images (1) for realizing pipeline with 2 outputs
|
||||
applyTestTag(CV_TEST_TAG_DNN_SKIP_HALIDE);
|
||||
|
||||
if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)
|
||||
applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
|
||||
if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH)
|
||||
|
||||
@ -1001,6 +1001,19 @@ TEST_P(Test_TensorFlow_layers, resize_nearest_neighbor)
|
||||
runTensorFlowNet("keras_upsampling2d");
|
||||
}
|
||||
|
||||
TEST_P(Test_TensorFlow_layers, resize_nearest_neighbor_align_corners)
|
||||
{
|
||||
runTensorFlowNet("resize_nearest_neighbor", false, 0.0, 0.0, false, "_align_corners");
|
||||
}
|
||||
|
||||
TEST_P(Test_TensorFlow_layers, resize_nearest_neighbor_half_pixel)
|
||||
{
|
||||
if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH)
|
||||
applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NGRAPH);
|
||||
|
||||
runTensorFlowNet("resize_nearest_neighbor", false, 0.0, 0.0, false, "_half_pixel");
|
||||
}
|
||||
|
||||
TEST_P(Test_TensorFlow_layers, fused_resize_conv)
|
||||
{
|
||||
runTensorFlowNet("fused_resize_conv");
|
||||
|
||||
@ -1025,15 +1025,20 @@ void ORB_Impl::detectAndCompute( InputArray _image, InputArray _mask,
|
||||
Mat imagePyramid, maskPyramid;
|
||||
UMat uimagePyramid, ulayerInfo;
|
||||
|
||||
int level_dy = image.rows + border*2;
|
||||
Point level_ofs(0,0);
|
||||
Size bufSize((cvRound(image.cols/getScale(0, firstLevel, scaleFactor)) + border*2 + 15) & -16, 0);
|
||||
float level0_inv_scale = 1.0f / getScale(0, firstLevel, scaleFactor);
|
||||
size_t level0_width = (size_t)cvRound(image.cols * level0_inv_scale);
|
||||
size_t level0_height = (size_t)cvRound(image.rows * level0_inv_scale);
|
||||
Size bufSize((int)alignSize(level0_width + border*2, 16), 0); // TODO change alignment to 64
|
||||
|
||||
int level_dy = (int)level0_height + border*2;
|
||||
Point level_ofs(0, 0);
|
||||
|
||||
for( level = 0; level < nLevels; level++ )
|
||||
{
|
||||
float scale = getScale(level, firstLevel, scaleFactor);
|
||||
layerScale[level] = scale;
|
||||
Size sz(cvRound(image.cols/scale), cvRound(image.rows/scale));
|
||||
float inv_scale = 1.0f / scale;
|
||||
Size sz(cvRound(image.cols * inv_scale), cvRound(image.rows * inv_scale));
|
||||
Size wholeSize(sz.width + border*2, sz.height + border*2);
|
||||
if( level_ofs.x + wholeSize.width > bufSize.width )
|
||||
{
|
||||
|
||||
@ -90,7 +90,7 @@ TEST(Features2D_ORB, _1996)
|
||||
ASSERT_EQ(0, roiViolations);
|
||||
}
|
||||
|
||||
TEST(Features2D_ORB, crash)
|
||||
TEST(Features2D_ORB, crash_5031)
|
||||
{
|
||||
cv::Mat image = cv::Mat::zeros(cv::Size(1920, 1080), CV_8UC3);
|
||||
|
||||
@ -123,4 +123,23 @@ TEST(Features2D_ORB, crash)
|
||||
ASSERT_NO_THROW(orb->compute(image, keypoints, descriptors));
|
||||
}
|
||||
|
||||
|
||||
TEST(Features2D_ORB, regression_16197)
|
||||
{
|
||||
Mat img(Size(72, 72), CV_8UC1, Scalar::all(0));
|
||||
Ptr<ORB> orbPtr = ORB::create();
|
||||
orbPtr->setNLevels(5);
|
||||
orbPtr->setFirstLevel(3);
|
||||
orbPtr->setScaleFactor(1.8);
|
||||
orbPtr->setPatchSize(8);
|
||||
orbPtr->setEdgeThreshold(8);
|
||||
|
||||
std::vector<KeyPoint> kps;
|
||||
Mat fv;
|
||||
|
||||
// exception in debug mode, crash in release
|
||||
ASSERT_NO_THROW(orbPtr->detectAndCompute(img, noArray(), kps, fv));
|
||||
}
|
||||
|
||||
|
||||
}} // namespace
|
||||
|
||||
@ -1344,8 +1344,8 @@ public class ImgprocTest extends OpenCVTestCase {
|
||||
|
||||
RotatedRect rrect = Imgproc.minAreaRect(points);
|
||||
|
||||
assertEquals(new Size(2, 5), rrect.size);
|
||||
assertEquals(-90., rrect.angle);
|
||||
assertEquals(new Size(5, 2), rrect.size);
|
||||
assertEquals(0., rrect.angle);
|
||||
assertEquals(new Point(3.5, 2), rrect.center);
|
||||
}
|
||||
|
||||
|
||||
@ -352,7 +352,7 @@ cv::RotatedRect cv::minAreaRect( InputArray _points )
|
||||
Point2f out[3];
|
||||
RotatedRect box;
|
||||
|
||||
convexHull(_points, hull, true, true);
|
||||
convexHull(_points, hull, false, true);
|
||||
|
||||
if( hull.depth() != CV_32F )
|
||||
{
|
||||
|
||||
@ -1197,6 +1197,78 @@ void hlineSmoothONa_yzy_a<uint8_t, ufixedpoint16>(const uint8_t* src, int cn, co
|
||||
}
|
||||
}
|
||||
}
|
||||
template <>
|
||||
void hlineSmoothONa_yzy_a<uint16_t, ufixedpoint32>(const uint16_t* src, int cn, const ufixedpoint32* m, int n, ufixedpoint32* dst, int len, int borderType)
|
||||
{
|
||||
int pre_shift = n / 2;
|
||||
int post_shift = n - pre_shift;
|
||||
int i = 0;
|
||||
for (; i < min(pre_shift, len); i++, dst += cn) // Points that fall left from border
|
||||
{
|
||||
for (int k = 0; k < cn; k++)
|
||||
dst[k] = m[pre_shift - i] * src[k];
|
||||
if (borderType != BORDER_CONSTANT)// If BORDER_CONSTANT out of border values are equal to zero and could be skipped
|
||||
for (int j = i - pre_shift, mid = 0; j < 0; j++, mid++)
|
||||
{
|
||||
int src_idx = borderInterpolate(j, len, borderType);
|
||||
for (int k = 0; k < cn; k++)
|
||||
dst[k] = dst[k] + m[mid] * src[src_idx*cn + k];
|
||||
}
|
||||
int j, mid;
|
||||
for (j = 1, mid = pre_shift - i + 1; j < min(i + post_shift, len); j++, mid++)
|
||||
for (int k = 0; k < cn; k++)
|
||||
dst[k] = dst[k] + m[mid] * src[j*cn + k];
|
||||
if (borderType != BORDER_CONSTANT)
|
||||
for (; j < i + post_shift; j++, mid++)
|
||||
{
|
||||
int src_idx = borderInterpolate(j, len, borderType);
|
||||
for (int k = 0; k < cn; k++)
|
||||
dst[k] = dst[k] + m[mid] * src[src_idx*cn + k];
|
||||
}
|
||||
}
|
||||
i *= cn;
|
||||
int lencn = (len - post_shift + 1)*cn;
|
||||
#if CV_SIMD
|
||||
const int VECSZ = v_uint32::nlanes;
|
||||
for (; i <= lencn - VECSZ * 2; i += VECSZ * 2, src += VECSZ * 2, dst += VECSZ * 2)
|
||||
{
|
||||
v_uint32 v_res0, v_res1;
|
||||
v_mul_expand(vx_load(src + pre_shift * cn), vx_setall_u16((uint16_t) *((uint32_t*)(m + pre_shift))), v_res0, v_res1);
|
||||
for (int j = 0; j < pre_shift; j ++)
|
||||
{
|
||||
v_uint32 v_add0, v_add1;
|
||||
v_mul_expand(vx_load(src + j * cn) + vx_load(src + (n - 1 - j)*cn), vx_setall_u16((uint16_t) *((uint32_t*)(m + j))), v_add0, v_add1);
|
||||
v_res0 += v_add0;
|
||||
v_res1 += v_add1;
|
||||
}
|
||||
v_store((uint32_t*)dst, v_res0);
|
||||
v_store((uint32_t*)dst + VECSZ, v_res1);
|
||||
}
|
||||
#endif
|
||||
for (; i < lencn; i++, src++, dst++)
|
||||
{
|
||||
*dst = m[pre_shift] * src[pre_shift*cn];
|
||||
for (int j = 0; j < pre_shift; j++)
|
||||
*dst = *dst + m[j] * src[j*cn] + m[j] * src[(n - 1 - j)*cn];
|
||||
}
|
||||
i /= cn;
|
||||
for (i -= pre_shift; i < len - pre_shift; i++, src += cn, dst += cn) // Points that fall right from border
|
||||
{
|
||||
for (int k = 0; k < cn; k++)
|
||||
dst[k] = m[0] * src[k];
|
||||
int j = 1;
|
||||
for (; j < len - i; j++)
|
||||
for (int k = 0; k < cn; k++)
|
||||
dst[k] = dst[k] + m[j] * src[j*cn + k];
|
||||
if (borderType != BORDER_CONSTANT)// If BORDER_CONSTANT out of border values are equal to zero and could be skipped
|
||||
for (; j < n; j++)
|
||||
{
|
||||
int src_idx = borderInterpolate(i + j, len, borderType) - i;
|
||||
for (int k = 0; k < cn; k++)
|
||||
dst[k] = dst[k] + m[j] * src[src_idx*cn + k];
|
||||
}
|
||||
}
|
||||
}
|
||||
template <typename ET, typename FT>
|
||||
void vlineSmooth1N(const FT* const * src, const FT* m, int, ET* dst, int len)
|
||||
{
|
||||
@ -1788,6 +1860,62 @@ void vlineSmoothONa_yzy_a<uint8_t, ufixedpoint16>(const ufixedpoint16* const * s
|
||||
dst[i] = val;
|
||||
}
|
||||
}
|
||||
template <>
|
||||
void vlineSmoothONa_yzy_a<uint16_t, ufixedpoint32>(const ufixedpoint32* const * src, const ufixedpoint32* m, int n, uint16_t* dst, int len)
|
||||
{
|
||||
int i = 0;
|
||||
#if CV_SIMD
|
||||
int pre_shift = n / 2;
|
||||
const int VECSZ = v_uint32::nlanes;
|
||||
for (; i <= len - 2*VECSZ; i += 2*VECSZ)
|
||||
{
|
||||
v_uint32 v_src00, v_src10, v_src01, v_src11;
|
||||
v_uint64 v_res0, v_res1, v_res2, v_res3;
|
||||
v_uint64 v_tmp0, v_tmp1, v_tmp2, v_tmp3, v_tmp4, v_tmp5, v_tmp6, v_tmp7;
|
||||
|
||||
v_uint32 v_mul = vx_setall_u32(*((uint32_t*)(m + pre_shift)));
|
||||
const uint32_t* srcp = (const uint32_t*)src[pre_shift] + i;
|
||||
v_src00 = vx_load(srcp);
|
||||
v_src10 = vx_load(srcp + VECSZ);
|
||||
v_mul_expand(v_src00, v_mul, v_res0, v_res1);
|
||||
v_mul_expand(v_src10, v_mul, v_res2, v_res3);
|
||||
|
||||
int j = 0;
|
||||
for (; j < pre_shift; j++)
|
||||
{
|
||||
v_mul = vx_setall_u32(*((uint32_t*)(m + j)));
|
||||
|
||||
const uint32_t* srcj0 = (const uint32_t*)src[j] + i;
|
||||
const uint32_t* srcj1 = (const uint32_t*)src[n - 1 - j] + i;
|
||||
v_src00 = vx_load(srcj0);
|
||||
v_src01 = vx_load(srcj1);
|
||||
v_mul_expand(v_src00, v_mul, v_tmp0, v_tmp1);
|
||||
v_mul_expand(v_src01, v_mul, v_tmp2, v_tmp3);
|
||||
v_res0 += v_tmp0 + v_tmp2;
|
||||
v_res1 += v_tmp1 + v_tmp3;
|
||||
|
||||
v_src10 = vx_load(srcj0 + VECSZ);
|
||||
v_src11 = vx_load(srcj1 + VECSZ);
|
||||
v_mul_expand(v_src10, v_mul, v_tmp4, v_tmp5);
|
||||
v_mul_expand(v_src11, v_mul, v_tmp6, v_tmp7);
|
||||
v_res2 += v_tmp4 + v_tmp6;
|
||||
v_res3 += v_tmp5 + v_tmp7;
|
||||
}
|
||||
|
||||
v_store(dst + i, v_pack(v_rshr_pack<32>(v_res0, v_res1),
|
||||
v_rshr_pack<32>(v_res2, v_res3)));
|
||||
}
|
||||
#endif
|
||||
for (; i < len; i++)
|
||||
{
|
||||
ufixedpoint64 val = m[0] * src[0][i];
|
||||
for (int j = 1; j < n; j++)
|
||||
{
|
||||
val = val + m[j] * src[j][i];
|
||||
}
|
||||
dst[i] = (uint16_t)val;
|
||||
}
|
||||
}
|
||||
template <typename ET, typename FT>
|
||||
class fixedSmoothInvoker : public ParallelLoopBody
|
||||
{
|
||||
|
||||
@ -2306,5 +2306,83 @@ TEST(Imgproc_ConvexHull, overflow)
|
||||
ASSERT_EQ(hull, hullf);
|
||||
}
|
||||
|
||||
static
|
||||
bool checkMinAreaRect(const RotatedRect& rr, const Mat& c, double eps = 0.5f)
|
||||
{
|
||||
int N = c.rows;
|
||||
|
||||
Mat rr_pts;
|
||||
boxPoints(rr, rr_pts);
|
||||
|
||||
double maxError = 0.0;
|
||||
int nfailed = 0;
|
||||
for (int i = 0; i < N; i++)
|
||||
{
|
||||
double d = pointPolygonTest(rr_pts, c.at<Point2f>(i), true);
|
||||
maxError = std::max(-d, maxError);
|
||||
if (d < -eps)
|
||||
nfailed++;
|
||||
}
|
||||
|
||||
if (nfailed)
|
||||
std::cout << "nfailed=" << nfailed << " (total=" << N << ") maxError=" << maxError << std::endl;
|
||||
return nfailed == 0;
|
||||
}
|
||||
|
||||
TEST(Imgproc_minAreaRect, reproducer_18157)
|
||||
{
|
||||
const int N = 168;
|
||||
float pts_[N][2] = {
|
||||
{ 1903, 266 }, { 1897, 267 }, { 1893, 268 }, { 1890, 269 },
|
||||
{ 1878, 275 }, { 1875, 277 }, { 1872, 279 }, { 1868, 282 },
|
||||
{ 1862, 287 }, { 1750, 400 }, { 1748, 402 }, { 1742, 407 },
|
||||
{ 1742, 408 }, { 1740, 410 }, { 1738, 412 }, { 1593, 558 },
|
||||
{ 1590, 560 }, { 1588, 562 }, { 1586, 564 }, { 1580, 570 },
|
||||
{ 1443, 709 }, { 1437, 714 }, { 1435, 716 }, { 1304, 848 },
|
||||
{ 1302, 850 }, { 1292, 860 }, { 1175, 979 }, { 1172, 981 },
|
||||
{ 1049, 1105 }, { 936, 1220 }, { 933, 1222 }, { 931, 1224 },
|
||||
{ 830, 1326 }, { 774, 1383 }, { 769, 1389 }, { 766, 1393 },
|
||||
{ 764, 1396 }, { 762, 1399 }, { 760, 1402 }, { 757, 1408 },
|
||||
{ 757, 1410 }, { 755, 1413 }, { 754, 1416 }, { 753, 1420 },
|
||||
{ 752, 1424 }, { 752, 1442 }, { 753, 1447 }, { 754, 1451 },
|
||||
{ 755, 1454 }, { 757, 1457 }, { 757, 1459 }, { 761, 1467 },
|
||||
{ 763, 1470 }, { 765, 1473 }, { 767, 1476 }, { 771, 1481 },
|
||||
{ 779, 1490 }, { 798, 1510 }, { 843, 1556 }, { 847, 1560 },
|
||||
{ 851, 1564 }, { 863, 1575 }, { 907, 1620 }, { 909, 1622 },
|
||||
{ 913, 1626 }, { 1154, 1866 }, { 1156, 1868 }, { 1158, 1870 },
|
||||
{ 1207, 1918 }, { 1238, 1948 }, { 1252, 1961 }, { 1260, 1968 },
|
||||
{ 1264, 1971 }, { 1268, 1974 }, { 1271, 1975 }, { 1273, 1977 },
|
||||
{ 1283, 1982 }, { 1286, 1983 }, { 1289, 1984 }, { 1294, 1985 },
|
||||
{ 1300, 1986 }, { 1310, 1986 }, { 1316, 1985 }, { 1320, 1984 },
|
||||
{ 1323, 1983 }, { 1326, 1982 }, { 1338, 1976 }, { 1341, 1974 },
|
||||
{ 1344, 1972 }, { 1349, 1968 }, { 1358, 1960 }, { 1406, 1911 },
|
||||
{ 1421, 1897 }, { 1624, 1693 }, { 1788, 1528 }, { 1790, 1526 },
|
||||
{ 1792, 1524 }, { 1794, 1522 }, { 1796, 1520 }, { 1798, 1518 },
|
||||
{ 1800, 1516 }, { 1919, 1396 }, { 1921, 1394 }, { 2038, 1275 },
|
||||
{ 2047, 1267 }, { 2048, 1265 }, { 2145, 1168 }, { 2148, 1165 },
|
||||
{ 2260, 1052 }, { 2359, 952 }, { 2434, 876 }, { 2446, 863 },
|
||||
{ 2450, 858 }, { 2453, 854 }, { 2455, 851 }, { 2457, 846 },
|
||||
{ 2459, 844 }, { 2460, 842 }, { 2460, 840 }, { 2462, 837 },
|
||||
{ 2463, 834 }, { 2464, 830 }, { 2465, 825 }, { 2465, 809 },
|
||||
{ 2464, 804 }, { 2463, 800 }, { 2462, 797 }, { 2461, 794 },
|
||||
{ 2456, 784 }, { 2454, 781 }, { 2452, 778 }, { 2450, 775 },
|
||||
{ 2446, 770 }, { 2437, 760 }, { 2412, 734 }, { 2410, 732 },
|
||||
{ 2408, 730 }, { 2382, 704 }, { 2380, 702 }, { 2378, 700 },
|
||||
{ 2376, 698 }, { 2372, 694 }, { 2370, 692 }, { 2368, 690 },
|
||||
{ 2366, 688 }, { 2362, 684 }, { 2360, 682 }, { 2252, 576 },
|
||||
{ 2250, 573 }, { 2168, 492 }, { 2166, 490 }, { 2085, 410 },
|
||||
{ 2026, 352 }, { 1988, 315 }, { 1968, 296 }, { 1958, 287 },
|
||||
{ 1953, 283 }, { 1949, 280 }, { 1946, 278 }, { 1943, 276 },
|
||||
{ 1940, 274 }, { 1936, 272 }, { 1934, 272 }, { 1931, 270 },
|
||||
{ 1928, 269 }, { 1925, 268 }, { 1921, 267 }, { 1915, 266 }
|
||||
};
|
||||
|
||||
Mat contour(N, 1, CV_32FC2, (void*)pts_);
|
||||
|
||||
RotatedRect rr = cv::minAreaRect(contour);
|
||||
|
||||
EXPECT_TRUE(checkMinAreaRect(rr, contour)) << rr.center << " " << rr.size << " " << rr.angle;
|
||||
}
|
||||
|
||||
}} // namespace
|
||||
/* End of file. */
|
||||
|
||||
@ -76,7 +76,7 @@ class Hackathon244Tests(NewOpenCVTests):
|
||||
mc, mr = cv.minEnclosingCircle(a)
|
||||
|
||||
be0 = ((150.2511749267578, 150.77322387695312), (158.024658203125, 197.57696533203125), 37.57804489135742)
|
||||
br0 = ((161.2974090576172, 154.41793823242188), (199.2301483154297, 207.7177734375), -9.164555549621582)
|
||||
br0 = ((161.2974090576172, 154.41793823242188), (207.7177734375, 199.2301483154297), 80.83544921875)
|
||||
mc0, mr0 = (160.41790771484375, 144.55152893066406), 136.713500977
|
||||
|
||||
self.check_close_boxes(be, be0, 5, 15)
|
||||
|
||||
@ -870,6 +870,11 @@ bool GStreamerCapture::open(const String &filename_)
|
||||
gst_app_sink_set_max_buffers(GST_APP_SINK(sink.get()), 1);
|
||||
}
|
||||
|
||||
if (!manualpipeline)
|
||||
{
|
||||
gst_base_sink_set_sync(GST_BASE_SINK(sink.get()), FALSE);
|
||||
}
|
||||
|
||||
//do not emit signals: all calls will be synchronous and blocking
|
||||
gst_app_sink_set_emit_signals (GST_APP_SINK(sink.get()), FALSE);
|
||||
|
||||
|
||||
3
samples/dnn/.gitignore
vendored
Normal file
3
samples/dnn/.gitignore
vendored
Normal file
@ -0,0 +1,3 @@
|
||||
*.caffemodel
|
||||
*.pb
|
||||
*.weights
|
||||
@ -19,6 +19,36 @@ Check `-h` option to know which values are used by default:
|
||||
python object_detection.py opencv_fd -h
|
||||
```
|
||||
|
||||
### Sample models
|
||||
|
||||
You can download sample models using ```download_models.py```. For example, the following command will download network weights for OpenCV Face Detector model and store them in FaceDetector folder:
|
||||
|
||||
```bash
|
||||
python download_models.py --save_dir FaceDetector opencv_fd
|
||||
```
|
||||
|
||||
You can use default configuration files adopted for OpenCV from [here](https://github.com/opencv/opencv_extra/tree/master/testdata/dnn).
|
||||
|
||||
You also can use the script to download necessary files from your code. Assume you have the following code inside ```your_script.py```:
|
||||
|
||||
```python
|
||||
from download_models import downloadFile
|
||||
|
||||
filepath1 = downloadFile("https://drive.google.com/uc?export=download&id=0B3gersZ2cHIxRm5PMWRoTkdHdHc", None, filename="MobileNetSSD_deploy.caffemodel", save_dir="save_dir_1")
|
||||
filepath2 = downloadFile("https://drive.google.com/uc?export=download&id=0B3gersZ2cHIxRm5PMWRoTkdHdHc", "994d30a8afaa9e754d17d2373b2d62a7dfbaaf7a", filename="MobileNetSSD_deploy.caffemodel")
|
||||
print(filepath1)
|
||||
print(filepath2)
|
||||
# Your code
|
||||
```
|
||||
|
||||
By running the following commands, you will get **MobileNetSSD_deploy.caffemodel** file:
|
||||
```bash
|
||||
export OPENCV_DOWNLOAD_DATA_PATH=download_folder
|
||||
python your_script.py
|
||||
```
|
||||
|
||||
**Note** that you can provide a directory using **save_dir** parameter or via **OPENCV_SAVE_DIR** environment variable.
|
||||
|
||||
#### Face detection
|
||||
[An origin model](https://github.com/opencv/opencv/tree/master/samples/dnn/face_detector)
|
||||
with single precision floating point weights has been quantized using [TensorFlow framework](https://www.tensorflow.org/).
|
||||
@ -48,7 +78,7 @@ AR @[ IoU=0.50:0.95 | area= large | maxDets=100 ] | 0.528 | 0.528 |
|
||||
```
|
||||
|
||||
## References
|
||||
* [Models downloading script](https://github.com/opencv/opencv_extra/blob/master/testdata/dnn/download_models.py)
|
||||
* [Models downloading script](https://github.com/opencv/opencv/samples/dnn/download_models.py)
|
||||
* [Configuration files adopted for OpenCV](https://github.com/opencv/opencv_extra/tree/master/testdata/dnn)
|
||||
* [How to import models from TensorFlow Object Detection API](https://github.com/opencv/opencv/wiki/TensorFlow-Object-Detection-API)
|
||||
* [Names of classes from different datasets](https://github.com/opencv/opencv/tree/master/samples/data/dnn)
|
||||
|
||||
364
samples/dnn/download_models.py
Normal file
364
samples/dnn/download_models.py
Normal file
@ -0,0 +1,364 @@
|
||||
'''
|
||||
Helper module to download extra data from Internet
|
||||
'''
|
||||
from __future__ import print_function
|
||||
import os
|
||||
import cv2
|
||||
import sys
|
||||
import yaml
|
||||
import argparse
|
||||
import tarfile
|
||||
import platform
|
||||
import tempfile
|
||||
import hashlib
|
||||
import requests
|
||||
import shutil
|
||||
from pathlib import Path
|
||||
from datetime import datetime
|
||||
if sys.version_info[0] < 3:
|
||||
from urllib2 import urlopen
|
||||
else:
|
||||
from urllib.request import urlopen
|
||||
import xml.etree.ElementTree as ET
|
||||
|
||||
__all__ = ["downloadFile"]
|
||||
|
||||
class HashMismatchException(Exception):
|
||||
def __init__(self, expected, actual):
|
||||
Exception.__init__(self)
|
||||
self.expected = expected
|
||||
self.actual = actual
|
||||
def __str__(self):
|
||||
return 'Hash mismatch: expected {} vs actual of {}'.format(self.expected, self.actual)
|
||||
|
||||
def getHashsumFromFile(filepath):
|
||||
sha = hashlib.sha1()
|
||||
if os.path.exists(filepath):
|
||||
print(' there is already a file with the same name')
|
||||
with open(filepath, 'rb') as f:
|
||||
while True:
|
||||
buf = f.read(10*1024*1024)
|
||||
if not buf:
|
||||
break
|
||||
sha.update(buf)
|
||||
hashsum = sha.hexdigest()
|
||||
return hashsum
|
||||
|
||||
def checkHashsum(expected_sha, filepath, silent=True):
|
||||
print(' expected SHA1: {}'.format(expected_sha))
|
||||
actual_sha = getHashsumFromFile(filepath)
|
||||
print(' actual SHA1:{}'.format(actual_sha))
|
||||
hashes_matched = expected_sha == actual_sha
|
||||
if not hashes_matched and not silent:
|
||||
raise HashMismatchException(expected_sha, actual_sha)
|
||||
return hashes_matched
|
||||
|
||||
def isArchive(filepath):
|
||||
return tarfile.is_tarfile(filepath)
|
||||
|
||||
class DownloadInstance:
|
||||
def __init__(self, **kwargs):
|
||||
self.name = kwargs.pop('name')
|
||||
self.filename = kwargs.pop('filename')
|
||||
self.loader = kwargs.pop('loader', None)
|
||||
self.save_dir = kwargs.pop('save_dir')
|
||||
self.sha = kwargs.pop('sha', None)
|
||||
|
||||
def __str__(self):
|
||||
return 'DownloadInstance <{}>'.format(self.name)
|
||||
|
||||
def get(self):
|
||||
print(" Working on " + self.name)
|
||||
print(" Getting file " + self.filename)
|
||||
if self.sha is None:
|
||||
print(' No expected hashsum provided, loading file')
|
||||
else:
|
||||
filepath = os.path.join(self.save_dir, self.sha, self.filename)
|
||||
if checkHashsum(self.sha, filepath):
|
||||
print(' hash match - file already exists, skipping')
|
||||
return filepath
|
||||
else:
|
||||
print(' hash didn\'t match, loading file')
|
||||
|
||||
if not os.path.exists(self.save_dir):
|
||||
print(' creating directory: ' + self.save_dir)
|
||||
os.makedirs(self.save_dir)
|
||||
|
||||
|
||||
print(' hash check failed - loading')
|
||||
assert self.loader
|
||||
try:
|
||||
self.loader.load(self.filename, self.sha, self.save_dir)
|
||||
print(' done')
|
||||
print(' file {}'.format(self.filename))
|
||||
if self.sha is None:
|
||||
download_path = os.path.join(self.save_dir, self.filename)
|
||||
self.sha = getHashsumFromFile(download_path)
|
||||
new_dir = os.path.join(self.save_dir, self.sha)
|
||||
|
||||
if not os.path.exists(new_dir):
|
||||
os.makedirs(new_dir)
|
||||
filepath = os.path.join(new_dir, self.filename)
|
||||
if not (os.path.exists(filepath)):
|
||||
shutil.move(download_path, new_dir)
|
||||
print(' No expected hashsum provided, actual SHA is {}'.format(self.sha))
|
||||
else:
|
||||
checkHashsum(self.sha, filepath, silent=False)
|
||||
except Exception as e:
|
||||
print(" There was some problem with loading file {} for {}".format(self.filename, self.name))
|
||||
print(" Exception: {}".format(e))
|
||||
return
|
||||
|
||||
print(" Finished " + self.name)
|
||||
return filepath
|
||||
|
||||
class Loader(object):
|
||||
MB = 1024*1024
|
||||
BUFSIZE = 10*MB
|
||||
def __init__(self, download_name, download_sha, archive_member = None):
|
||||
self.download_name = download_name
|
||||
self.download_sha = download_sha
|
||||
self.archive_member = archive_member
|
||||
|
||||
def load(self, requested_file, sha, save_dir):
|
||||
if self.download_sha is None:
|
||||
download_dir = save_dir
|
||||
else:
|
||||
# create a new folder in save_dir to avoid possible name conflicts
|
||||
download_dir = os.path.join(save_dir, self.download_sha)
|
||||
if not os.path.exists(download_dir):
|
||||
os.makedirs(download_dir)
|
||||
download_path = os.path.join(download_dir, self.download_name)
|
||||
print(" Preparing to download file " + self.download_name)
|
||||
if checkHashsum(self.download_sha, download_path):
|
||||
print(' hash match - file already exists, no need to download')
|
||||
else:
|
||||
filesize = self.download(download_path)
|
||||
print(' Downloaded {} with size {} Mb'.format(self.download_name, filesize/self.MB))
|
||||
if self.download_sha is not None:
|
||||
checkHashsum(self.download_sha, download_path, silent=False)
|
||||
if self.download_name == requested_file:
|
||||
return
|
||||
else:
|
||||
if isArchive(download_path):
|
||||
if sha is not None:
|
||||
extract_dir = os.path.join(save_dir, sha)
|
||||
else:
|
||||
extract_dir = save_dir
|
||||
if not os.path.exists(extract_dir):
|
||||
os.makedirs(extract_dir)
|
||||
self.extract(requested_file, download_path, extract_dir)
|
||||
else:
|
||||
raise Exception("Downloaded file has different name")
|
||||
|
||||
def download(self, filepath):
|
||||
print("Warning: download is not implemented, this is a base class")
|
||||
return 0
|
||||
|
||||
def extract(self, requested_file, archive_path, save_dir):
|
||||
filepath = os.path.join(save_dir, requested_file)
|
||||
try:
|
||||
with tarfile.open(archive_path) as f:
|
||||
if self.archive_member is None:
|
||||
pathDict = dict((os.path.split(elem)[1], os.path.split(elem)[0]) for elem in f.getnames())
|
||||
self.archive_member = pathDict[requested_file]
|
||||
assert self.archive_member in f.getnames()
|
||||
self.save(filepath, f.extractfile(self.archive_member))
|
||||
except Exception as e:
|
||||
print(' catch {}'.format(e))
|
||||
|
||||
def save(self, filepath, r):
|
||||
with open(filepath, 'wb') as f:
|
||||
print(' progress ', end="")
|
||||
sys.stdout.flush()
|
||||
while True:
|
||||
buf = r.read(self.BUFSIZE)
|
||||
if not buf:
|
||||
break
|
||||
f.write(buf)
|
||||
print('>', end="")
|
||||
sys.stdout.flush()
|
||||
|
||||
class URLLoader(Loader):
|
||||
def __init__(self, download_name, download_sha, url, archive_member = None):
|
||||
super(URLLoader, self).__init__(download_name, download_sha, archive_member)
|
||||
self.download_name = download_name
|
||||
self.download_sha = download_sha
|
||||
self.url = url
|
||||
|
||||
def download(self, filepath):
|
||||
r = urlopen(self.url, timeout=60)
|
||||
self.printRequest(r)
|
||||
self.save(filepath, r)
|
||||
return os.path.getsize(filepath)
|
||||
|
||||
def printRequest(self, r):
|
||||
def getMB(r):
|
||||
d = dict(r.info())
|
||||
for c in ['content-length', 'Content-Length']:
|
||||
if c in d:
|
||||
return int(d[c]) / self.MB
|
||||
return '<unknown>'
|
||||
print(' {} {} [{} Mb]'.format(r.getcode(), r.msg, getMB(r)))
|
||||
|
||||
class GDriveLoader(Loader):
|
||||
BUFSIZE = 1024 * 1024
|
||||
PROGRESS_SIZE = 10 * 1024 * 1024
|
||||
def __init__(self, download_name, download_sha, gid, archive_member = None):
|
||||
super(GDriveLoader, self).__init__(download_name, download_sha, archive_member)
|
||||
self.download_name = download_name
|
||||
self.download_sha = download_sha
|
||||
self.gid = gid
|
||||
|
||||
def download(self, filepath):
|
||||
session = requests.Session() # re-use cookies
|
||||
|
||||
URL = "https://docs.google.com/uc?export=download"
|
||||
response = session.get(URL, params = { 'id' : self.gid }, stream = True)
|
||||
|
||||
def get_confirm_token(response): # in case of large files
|
||||
for key, value in response.cookies.items():
|
||||
if key.startswith('download_warning'):
|
||||
return value
|
||||
return None
|
||||
token = get_confirm_token(response)
|
||||
|
||||
if token:
|
||||
params = { 'id' : self.gid, 'confirm' : token }
|
||||
response = session.get(URL, params = params, stream = True)
|
||||
|
||||
sz = 0
|
||||
progress_sz = self.PROGRESS_SIZE
|
||||
with open(filepath, "wb") as f:
|
||||
for chunk in response.iter_content(self.BUFSIZE):
|
||||
if not chunk:
|
||||
continue # keep-alive
|
||||
|
||||
f.write(chunk)
|
||||
sz += len(chunk)
|
||||
if sz >= progress_sz:
|
||||
progress_sz += self.PROGRESS_SIZE
|
||||
print('>', end='')
|
||||
sys.stdout.flush()
|
||||
print('')
|
||||
return sz
|
||||
|
||||
def produceDownloadInstance(instance_name, filename, sha, url, save_dir, download_name=None, download_sha=None, archive_member=None):
|
||||
spec_param = url
|
||||
loader = URLLoader
|
||||
if download_name is None:
|
||||
download_name = filename
|
||||
if download_sha is None:
|
||||
download_sha = sha
|
||||
if "drive.google.com" in url:
|
||||
token = ""
|
||||
token_part = url.rsplit('/', 1)[-1]
|
||||
if "&id=" not in token_part:
|
||||
token_part = url.rsplit('/', 1)[-2]
|
||||
for param in token_part.split("&"):
|
||||
if param.startswith("id="):
|
||||
token = param[3:]
|
||||
if token:
|
||||
loader = GDriveLoader
|
||||
spec_param = token
|
||||
else:
|
||||
print("Warning: possibly wrong Google Drive link")
|
||||
return DownloadInstance(
|
||||
name=instance_name,
|
||||
filename=filename,
|
||||
sha=sha,
|
||||
save_dir=save_dir,
|
||||
loader=loader(download_name, download_sha, spec_param, archive_member)
|
||||
)
|
||||
|
||||
def getSaveDir():
|
||||
env_path = os.environ.get("OPENCV_DOWNLOAD_DATA_PATH", None)
|
||||
if env_path:
|
||||
save_dir = env_path
|
||||
else:
|
||||
# TODO reuse binding function cv2.utils.fs.getCacheDirectory when issue #19011 is fixed
|
||||
if platform.system() == "Darwin":
|
||||
#On Apple devices
|
||||
temp_env = os.environ.get("TMPDIR", None)
|
||||
if temp_env is None or not os.path.isdir(temp_env):
|
||||
temp_dir = Path("/tmp")
|
||||
print("Using world accessible cache directory. This may be not secure: ", temp_dir)
|
||||
else:
|
||||
temp_dir = temp_env
|
||||
elif platform.system() == "Windows":
|
||||
temp_dir = tempfile.gettempdir()
|
||||
else:
|
||||
xdg_cache_env = os.environ.get("XDG_CACHE_HOME", None)
|
||||
if (xdg_cache_env and xdg_cache_env[0] and os.path.isdir(xdg_cache_env)):
|
||||
temp_dir = xdg_cache_env
|
||||
else:
|
||||
home_env = os.environ.get("HOME", None)
|
||||
if (home_env and home_env[0] and os.path.isdir(home_env)):
|
||||
home_path = os.path.join(home_env, ".cache/")
|
||||
if os.path.isdir(home_path):
|
||||
temp_dir = home_path
|
||||
else:
|
||||
temp_dir = tempfile.gettempdir()
|
||||
print("Using world accessible cache directory. This may be not secure: ", temp_dir)
|
||||
|
||||
save_dir = os.path.join(temp_dir, "downloads")
|
||||
if not os.path.exists(save_dir):
|
||||
os.makedirs(save_dir)
|
||||
return save_dir
|
||||
|
||||
def downloadFile(url, sha=None, save_dir=None, filename=None):
|
||||
if save_dir is None:
|
||||
save_dir = getSaveDir()
|
||||
if filename is None:
|
||||
filename = "download_" + datetime.now().__str__()
|
||||
name = filename
|
||||
return produceDownloadInstance(name, filename, sha, url, save_dir).get()
|
||||
|
||||
def parseMetalinkFile(metalink_filepath, save_dir):
|
||||
NS = {'ml': 'urn:ietf:params:xml:ns:metalink'}
|
||||
models = []
|
||||
for file_elem in ET.parse(metalink_filepath).getroot().findall('ml:file', NS):
|
||||
url = file_elem.find('ml:url', NS).text
|
||||
fname = file_elem.attrib['name']
|
||||
name = file_elem.find('ml:identity', NS).text
|
||||
hash_sum = file_elem.find('ml:hash', NS).text
|
||||
models.append(produceDownloadInstance(name, fname, hash_sum, url, save_dir))
|
||||
return models
|
||||
|
||||
def parseYAMLFile(yaml_filepath, save_dir):
|
||||
models = []
|
||||
with open(yaml_filepath, 'r') as stream:
|
||||
data_loaded = yaml.safe_load(stream)
|
||||
for name, params in data_loaded.items():
|
||||
load_info = params.get("load_info", None)
|
||||
if load_info:
|
||||
fname = os.path.basename(params.get("model"))
|
||||
hash_sum = load_info.get("sha1")
|
||||
url = load_info.get("url")
|
||||
download_sha = load_info.get("download_sha")
|
||||
download_name = load_info.get("download_name")
|
||||
archive_member = load_info.get("member")
|
||||
models.append(produceDownloadInstance(name, fname, hash_sum, url, save_dir,
|
||||
download_name=download_name, download_sha=download_sha, archive_member=archive_member))
|
||||
|
||||
return models
|
||||
|
||||
if __name__ == '__main__':
|
||||
parser = argparse.ArgumentParser(description='This is a utility script for downloading DNN models for samples.')
|
||||
|
||||
parser.add_argument('--save_dir', action="store", default=os.getcwd(),
|
||||
help='Path to the directory to store downloaded files')
|
||||
parser.add_argument('model_name', type=str, default="", nargs='?', action="store",
|
||||
help='name of the model to download')
|
||||
args = parser.parse_args()
|
||||
models = []
|
||||
save_dir = args.save_dir
|
||||
selected_model_name = args.model_name
|
||||
models.extend(parseMetalinkFile('face_detector/weights.meta4', save_dir))
|
||||
models.extend(parseYAMLFile('models.yml', save_dir))
|
||||
for m in models:
|
||||
print(m)
|
||||
if selected_model_name and not m.name.startswith(selected_model_name):
|
||||
continue
|
||||
print('Model: ' + selected_model_name)
|
||||
m.get()
|
||||
@ -1,74 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
from __future__ import print_function
|
||||
import hashlib
|
||||
import time
|
||||
import sys
|
||||
import xml.etree.ElementTree as ET
|
||||
if sys.version_info[0] < 3:
|
||||
from urllib2 import urlopen
|
||||
else:
|
||||
from urllib.request import urlopen
|
||||
|
||||
class HashMismatchException(Exception):
|
||||
def __init__(self, expected, actual):
|
||||
Exception.__init__(self)
|
||||
self.expected = expected
|
||||
self.actual = actual
|
||||
def __str__(self):
|
||||
return 'Hash mismatch: {} vs {}'.format(self.expected, self.actual)
|
||||
|
||||
class MetalinkDownloader(object):
|
||||
BUFSIZE = 10*1024*1024
|
||||
NS = {'ml': 'urn:ietf:params:xml:ns:metalink'}
|
||||
tick = 0
|
||||
|
||||
def download(self, metalink_file):
|
||||
status = True
|
||||
for file_elem in ET.parse(metalink_file).getroot().findall('ml:file', self.NS):
|
||||
url = file_elem.find('ml:url', self.NS).text
|
||||
fname = file_elem.attrib['name']
|
||||
hash_sum = file_elem.find('ml:hash', self.NS).text
|
||||
print('*** {}'.format(fname))
|
||||
try:
|
||||
self.verify(hash_sum, fname)
|
||||
except Exception as ex:
|
||||
print(' {}'.format(ex))
|
||||
try:
|
||||
print(' {}'.format(url))
|
||||
with open(fname, 'wb') as file_stream:
|
||||
self.buffered_read(urlopen(url), file_stream.write)
|
||||
self.verify(hash_sum, fname)
|
||||
except Exception as ex:
|
||||
print(' {}'.format(ex))
|
||||
print(' FAILURE')
|
||||
status = False
|
||||
continue
|
||||
print(' SUCCESS')
|
||||
return status
|
||||
|
||||
def print_progress(self, msg, timeout = 0):
|
||||
if time.time() - self.tick > timeout:
|
||||
print(msg, end='')
|
||||
sys.stdout.flush()
|
||||
self.tick = time.time()
|
||||
|
||||
def buffered_read(self, in_stream, processing):
|
||||
self.print_progress(' >')
|
||||
while True:
|
||||
buf = in_stream.read(self.BUFSIZE)
|
||||
if not buf:
|
||||
break
|
||||
processing(buf)
|
||||
self.print_progress('>', 5)
|
||||
print(' done')
|
||||
|
||||
def verify(self, hash_sum, fname):
|
||||
sha = hashlib.sha1()
|
||||
with open(fname, 'rb') as file_stream:
|
||||
self.buffered_read(file_stream, sha.update)
|
||||
if hash_sum != sha.hexdigest():
|
||||
raise HashMismatchException(hash_sum, sha.hexdigest())
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(0 if MetalinkDownloader().download('weights.meta4') else 1)
|
||||
@ -1,12 +1,12 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<metalink xmlns="urn:ietf:params:xml:ns:metalink">
|
||||
<file name="res10_300x300_ssd_iter_140000_fp16.caffemodel">
|
||||
<identity>OpenCV face detector FP16 weights</identity>
|
||||
<identity>opencv_face_detector_fp16</identity>
|
||||
<hash type="sha-1">31fc22bfdd907567a04bb45b7cfad29966caddc1</hash>
|
||||
<url>https://raw.githubusercontent.com/opencv/opencv_3rdparty/dnn_samples_face_detector_20180205_fp16/res10_300x300_ssd_iter_140000_fp16.caffemodel</url>
|
||||
</file>
|
||||
<file name="opencv_face_detector_uint8.pb">
|
||||
<identity>OpenCV face detector UINT8 weights</identity>
|
||||
<identity>opencv_face_detector_uint8</identity>
|
||||
<hash type="sha-1">4f2fdf6f231d759d7bbdb94353c5a68690f3d2ae</hash>
|
||||
<url>https://raw.githubusercontent.com/opencv/opencv_3rdparty/dnn_samples_face_detector_20180220_uint8/opencv_face_detector_uint8.pb</url>
|
||||
</file>
|
||||
|
||||
@ -1,11 +1,14 @@
|
||||
%YAML:1.0
|
||||
|
||||
%YAML 1.0
|
||||
---
|
||||
################################################################################
|
||||
# Object detection models.
|
||||
################################################################################
|
||||
|
||||
# OpenCV's face detection network
|
||||
opencv_fd:
|
||||
load_info:
|
||||
url: "https://github.com/opencv/opencv_3rdparty/raw/dnn_samples_face_detector_20170830/res10_300x300_ssd_iter_140000.caffemodel"
|
||||
sha1: "15aa726b4d46d9f023526d85537db81cbc8dd566"
|
||||
model: "opencv_face_detector.caffemodel"
|
||||
config: "opencv_face_detector.prototxt"
|
||||
mean: [104, 177, 123]
|
||||
@ -19,6 +22,9 @@ opencv_fd:
|
||||
# YOLO object detection family from Darknet (https://pjreddie.com/darknet/yolo/)
|
||||
# Might be used for all YOLOv2, TinyYolov2, YOLOv3, YOLOv4 and TinyYolov4
|
||||
yolo:
|
||||
load_info:
|
||||
url: "https://pjreddie.com/media/files/yolov3.weights"
|
||||
sha1: "520878f12e97cf820529daea502acca380f1cb8e"
|
||||
model: "yolov3.weights"
|
||||
config: "yolov3.cfg"
|
||||
mean: [0, 0, 0]
|
||||
@ -30,6 +36,9 @@ yolo:
|
||||
sample: "object_detection"
|
||||
|
||||
tiny-yolo-voc:
|
||||
load_info:
|
||||
url: "https://pjreddie.com/media/files/yolov2-tiny-voc.weights"
|
||||
sha1: "24b4bd049fc4fa5f5e95f684a8967e65c625dff9"
|
||||
model: "tiny-yolo-voc.weights"
|
||||
config: "tiny-yolo-voc.cfg"
|
||||
mean: [0, 0, 0]
|
||||
@ -42,6 +51,9 @@ tiny-yolo-voc:
|
||||
|
||||
# Caffe implementation of SSD model from https://github.com/chuanqi305/MobileNet-SSD
|
||||
ssd_caffe:
|
||||
load_info:
|
||||
url: "https://drive.google.com/uc?export=download&id=0B3gersZ2cHIxRm5PMWRoTkdHdHc"
|
||||
sha1: "994d30a8afaa9e754d17d2373b2d62a7dfbaaf7a"
|
||||
model: "MobileNetSSD_deploy.caffemodel"
|
||||
config: "MobileNetSSD_deploy.prototxt"
|
||||
mean: [127.5, 127.5, 127.5]
|
||||
@ -54,6 +66,12 @@ ssd_caffe:
|
||||
|
||||
# TensorFlow implementation of SSD model from https://github.com/tensorflow/models/tree/master/research/object_detection
|
||||
ssd_tf:
|
||||
load_info:
|
||||
url: "http://download.tensorflow.org/models/object_detection/ssd_mobilenet_v1_coco_2017_11_17.tar.gz"
|
||||
sha1: "9e4bcdd98f4c6572747679e4ce570de4f03a70e2"
|
||||
download_sha: "6157ddb6da55db2da89dd561eceb7f944928e317"
|
||||
download_name: "ssd_mobilenet_v1_coco_2017_11_17.tar.gz"
|
||||
member: "ssd_mobilenet_v1_coco_2017_11_17/frozen_inference_graph.pb"
|
||||
model: "ssd_mobilenet_v1_coco_2017_11_17.pb"
|
||||
config: "ssd_mobilenet_v1_coco_2017_11_17.pbtxt"
|
||||
mean: [0, 0, 0]
|
||||
@ -66,6 +84,12 @@ ssd_tf:
|
||||
|
||||
# TensorFlow implementation of Faster-RCNN model from https://github.com/tensorflow/models/tree/master/research/object_detection
|
||||
faster_rcnn_tf:
|
||||
load_info:
|
||||
url: "http://download.tensorflow.org/models/object_detection/faster_rcnn_inception_v2_coco_2018_01_28.tar.gz"
|
||||
sha1: "f2e4bf386b9bb3e25ddfcbbd382c20f417e444f3"
|
||||
download_sha: "c710f25e5c6a3ce85fe793d5bf266d581ab1c230"
|
||||
download_name: "faster_rcnn_inception_v2_coco_2018_01_28.tar.gz"
|
||||
member: "faster_rcnn_inception_v2_coco_2018_01_28/frozen_inference_graph.pb"
|
||||
model: "faster_rcnn_inception_v2_coco_2018_01_28.pb"
|
||||
config: "faster_rcnn_inception_v2_coco_2018_01_28.pbtxt"
|
||||
mean: [0, 0, 0]
|
||||
@ -81,6 +105,9 @@ faster_rcnn_tf:
|
||||
|
||||
# SqueezeNet v1.1 from https://github.com/DeepScale/SqueezeNet
|
||||
squeezenet:
|
||||
load_info:
|
||||
url: "https://raw.githubusercontent.com/DeepScale/SqueezeNet/b5c3f1a23713c8b3fd7b801d229f6b04c64374a5/SqueezeNet_v1.1/squeezenet_v1.1.caffemodel"
|
||||
sha1: "3397f026368a45ae236403ccc81cfcbe8ebe1bd0"
|
||||
model: "squeezenet_v1.1.caffemodel"
|
||||
config: "squeezenet_v1.1.prototxt"
|
||||
mean: [0, 0, 0]
|
||||
@ -93,6 +120,9 @@ squeezenet:
|
||||
|
||||
# Googlenet from https://github.com/BVLC/caffe/tree/master/models/bvlc_googlenet
|
||||
googlenet:
|
||||
load_info:
|
||||
url: "http://dl.caffe.berkeleyvision.org/bvlc_googlenet.caffemodel"
|
||||
sha1: "405fc5acd08a3bb12de8ee5e23a96bec22f08204"
|
||||
model: "bvlc_googlenet.caffemodel"
|
||||
config: "bvlc_googlenet.prototxt"
|
||||
mean: [104, 117, 123]
|
||||
@ -110,6 +140,9 @@ googlenet:
|
||||
# ENet road scene segmentation network from https://github.com/e-lab/ENet-training
|
||||
# Works fine for different input sizes.
|
||||
enet:
|
||||
load_info:
|
||||
url: "https://www.dropbox.com/s/tdde0mawbi5dugq/Enet-model-best.net?dl=1"
|
||||
sha1: "b4123a73bf464b9ebe9cfc4ab9c2d5c72b161315"
|
||||
model: "Enet-model-best.net"
|
||||
mean: [0, 0, 0]
|
||||
scale: 0.00392
|
||||
@ -120,6 +153,9 @@ enet:
|
||||
sample: "segmentation"
|
||||
|
||||
fcn8s:
|
||||
load_info:
|
||||
url: "http://dl.caffe.berkeleyvision.org/fcn8s-heavy-pascal.caffemodel"
|
||||
sha1: "c449ea74dd7d83751d1357d6a8c323fcf4038962"
|
||||
model: "fcn8s-heavy-pascal.caffemodel"
|
||||
config: "fcn8s-heavy-pascal.prototxt"
|
||||
mean: [0, 0, 0]
|
||||
|
||||
Loading…
Reference in New Issue
Block a user