Merge pull request #18624 from qchateau:similarity-mask
* support similarity masks * add test for similarity threshold * short license in test * use UMat in buildSimilarityMask * fix win32 warnings * fix test indentation * fix umat/mat sync * no in-place argument for erode/dilate
This commit is contained in:
parent
3a99fb9d34
commit
ea1e3fb90d
@ -115,7 +115,7 @@ public:
|
||||
CV_WRAP GainCompensator()
|
||||
: GainCompensator(1) {}
|
||||
CV_WRAP GainCompensator(int nr_feeds)
|
||||
: nr_feeds_(nr_feeds) {}
|
||||
: nr_feeds_(nr_feeds), similarity_threshold_(1) {}
|
||||
void feed(const std::vector<Point> &corners, const std::vector<UMat> &images,
|
||||
const std::vector<std::pair<UMat,uchar> > &masks) CV_OVERRIDE;
|
||||
void singleFeed(const std::vector<Point> &corners, const std::vector<UMat> &images,
|
||||
@ -125,11 +125,18 @@ public:
|
||||
CV_WRAP void setMatGains(std::vector<Mat>& umv) CV_OVERRIDE ;
|
||||
CV_WRAP void setNrFeeds(int nr_feeds) { nr_feeds_ = nr_feeds; }
|
||||
CV_WRAP int getNrFeeds() { return nr_feeds_; }
|
||||
CV_WRAP void setSimilarityThreshold(double similarity_threshold) { similarity_threshold_ = similarity_threshold; }
|
||||
CV_WRAP double getSimilarityThreshold() const { return similarity_threshold_; }
|
||||
void prepareSimilarityMask(const std::vector<Point> &corners, const std::vector<UMat> &images);
|
||||
std::vector<double> gains() const;
|
||||
|
||||
private:
|
||||
UMat buildSimilarityMask(InputArray src_array1, InputArray src_array2);
|
||||
|
||||
Mat_<double> gains_;
|
||||
int nr_feeds_;
|
||||
double similarity_threshold_;
|
||||
std::vector<UMat> similarities_;
|
||||
};
|
||||
|
||||
/** @brief Exposure compensator which tries to remove exposure related artifacts by adjusting image
|
||||
@ -138,7 +145,8 @@ intensities on each channel independently.
|
||||
class CV_EXPORTS_W ChannelsCompensator : public ExposureCompensator
|
||||
{
|
||||
public:
|
||||
CV_WRAP ChannelsCompensator(int nr_feeds=1) : nr_feeds_(nr_feeds) {}
|
||||
CV_WRAP ChannelsCompensator(int nr_feeds=1)
|
||||
: nr_feeds_(nr_feeds), similarity_threshold_(1) {}
|
||||
void feed(const std::vector<Point> &corners, const std::vector<UMat> &images,
|
||||
const std::vector<std::pair<UMat,uchar> > &masks) CV_OVERRIDE;
|
||||
CV_WRAP void apply(int index, Point corner, InputOutputArray image, InputArray mask) CV_OVERRIDE;
|
||||
@ -146,11 +154,14 @@ public:
|
||||
CV_WRAP void setMatGains(std::vector<Mat>& umv) CV_OVERRIDE;
|
||||
CV_WRAP void setNrFeeds(int nr_feeds) { nr_feeds_ = nr_feeds; }
|
||||
CV_WRAP int getNrFeeds() { return nr_feeds_; }
|
||||
CV_WRAP void setSimilarityThreshold(double similarity_threshold) { similarity_threshold_ = similarity_threshold; }
|
||||
CV_WRAP double getSimilarityThreshold() const { return similarity_threshold_; }
|
||||
std::vector<Scalar> gains() const { return gains_; }
|
||||
|
||||
private:
|
||||
std::vector<Scalar> gains_;
|
||||
int nr_feeds_;
|
||||
double similarity_threshold_;
|
||||
};
|
||||
|
||||
/** @brief Exposure compensator which tries to remove exposure related artifacts by adjusting image blocks.
|
||||
@ -159,12 +170,15 @@ class CV_EXPORTS_W BlocksCompensator : public ExposureCompensator
|
||||
{
|
||||
public:
|
||||
BlocksCompensator(int bl_width=32, int bl_height=32, int nr_feeds=1)
|
||||
: bl_width_(bl_width), bl_height_(bl_height), nr_feeds_(nr_feeds), nr_gain_filtering_iterations_(2) {}
|
||||
: bl_width_(bl_width), bl_height_(bl_height), nr_feeds_(nr_feeds), nr_gain_filtering_iterations_(2),
|
||||
similarity_threshold_(1) {}
|
||||
CV_WRAP void apply(int index, Point corner, InputOutputArray image, InputArray mask) CV_OVERRIDE;
|
||||
CV_WRAP void getMatGains(CV_OUT std::vector<Mat>& umv) CV_OVERRIDE;
|
||||
CV_WRAP void setMatGains(std::vector<Mat>& umv) CV_OVERRIDE;
|
||||
CV_WRAP void setNrFeeds(int nr_feeds) { nr_feeds_ = nr_feeds; }
|
||||
CV_WRAP int getNrFeeds() { return nr_feeds_; }
|
||||
CV_WRAP void setSimilarityThreshold(double similarity_threshold) { similarity_threshold_ = similarity_threshold; }
|
||||
CV_WRAP double getSimilarityThreshold() const { return similarity_threshold_; }
|
||||
CV_WRAP void setBlockSize(int width, int height) { bl_width_ = width; bl_height_ = height; }
|
||||
CV_WRAP void setBlockSize(Size size) { setBlockSize(size.width, size.height); }
|
||||
CV_WRAP Size getBlockSize() const { return Size(bl_width_, bl_height_); }
|
||||
@ -184,6 +198,7 @@ private:
|
||||
std::vector<UMat> gain_maps_;
|
||||
int nr_feeds_;
|
||||
int nr_gain_filtering_iterations_;
|
||||
double similarity_threshold_;
|
||||
};
|
||||
|
||||
/** @brief Exposure compensator which tries to remove exposure related artifacts by adjusting image block
|
||||
|
||||
@ -90,6 +90,7 @@ void GainCompensator::feed(const std::vector<Point> &corners, const std::vector<
|
||||
|
||||
const int num_images = static_cast<int>(images.size());
|
||||
Mat accumulated_gains;
|
||||
prepareSimilarityMask(corners, images);
|
||||
|
||||
for (int n = 0; n < nr_feeds_; ++n)
|
||||
{
|
||||
@ -133,6 +134,8 @@ void GainCompensator::singleFeed(const std::vector<Point> &corners, const std::v
|
||||
Mat subimg1, subimg2;
|
||||
Mat_<uchar> submask1, submask2, intersect;
|
||||
|
||||
std::vector<UMat>::iterator similarity_it = similarities_.begin();
|
||||
|
||||
for (int i = 0; i < num_images; ++i)
|
||||
{
|
||||
for (int j = i; j < num_images; ++j)
|
||||
@ -147,6 +150,13 @@ void GainCompensator::singleFeed(const std::vector<Point> &corners, const std::v
|
||||
submask2 = masks[j].first(Rect(roi.tl() - corners[j], roi.br() - corners[j])).getMat(ACCESS_READ);
|
||||
intersect = (submask1 == masks[i].second) & (submask2 == masks[j].second);
|
||||
|
||||
if (!similarities_.empty())
|
||||
{
|
||||
CV_Assert(similarity_it != similarities_.end());
|
||||
UMat similarity = *similarity_it++;
|
||||
bitwise_and(intersect, similarity, intersect);
|
||||
}
|
||||
|
||||
int intersect_count = countNonZero(intersect);
|
||||
N(i, j) = N(j, i) = std::max(1, intersect_count);
|
||||
|
||||
@ -298,6 +308,88 @@ void GainCompensator::setMatGains(std::vector<Mat>& umv)
|
||||
}
|
||||
}
|
||||
|
||||
void GainCompensator::prepareSimilarityMask(
|
||||
const std::vector<Point> &corners, const std::vector<UMat> &images)
|
||||
{
|
||||
if (similarity_threshold_ >= 1)
|
||||
{
|
||||
LOGLN(" skipping similarity mask: disabled");
|
||||
return;
|
||||
}
|
||||
if (!similarities_.empty())
|
||||
{
|
||||
LOGLN(" skipping similarity mask: already set");
|
||||
return;
|
||||
}
|
||||
|
||||
LOGLN(" calculating similarity mask");
|
||||
const int num_images = static_cast<int>(images.size());
|
||||
for (int i = 0; i < num_images; ++i)
|
||||
{
|
||||
for (int j = i; j < num_images; ++j)
|
||||
{
|
||||
Rect roi;
|
||||
if (overlapRoi(corners[i], corners[j], images[i].size(), images[j].size(), roi))
|
||||
{
|
||||
UMat subimg1 = images[i](Rect(roi.tl() - corners[i], roi.br() - corners[i]));
|
||||
UMat subimg2 = images[j](Rect(roi.tl() - corners[j], roi.br() - corners[j]));
|
||||
UMat similarity = buildSimilarityMask(subimg1, subimg2);
|
||||
similarities_.push_back(similarity);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
UMat GainCompensator::buildSimilarityMask(InputArray src_array1, InputArray src_array2)
|
||||
{
|
||||
CV_Assert(src_array1.rows() == src_array2.rows() && src_array1.cols() == src_array2.cols());
|
||||
CV_Assert(src_array1.type() == src_array2.type());
|
||||
CV_Assert(src_array1.type() == CV_8UC3 || src_array1.type() == CV_8UC1);
|
||||
|
||||
Mat src1 = src_array1.getMat();
|
||||
Mat src2 = src_array2.getMat();
|
||||
|
||||
UMat umat_similarity(src1.rows, src1.cols, CV_8UC1);
|
||||
Mat similarity = umat_similarity.getMat(ACCESS_WRITE);
|
||||
|
||||
if (src1.channels() == 3)
|
||||
{
|
||||
for (int y = 0; y < similarity.rows; ++y)
|
||||
{
|
||||
for (int x = 0; x < similarity.cols; ++x)
|
||||
{
|
||||
Vec<float, 3> vec_diff =
|
||||
Vec<float, 3>(*src1.ptr<Vec<uchar, 3>>(y, x))
|
||||
- Vec<float, 3>(*src2.ptr<Vec<uchar, 3>>(y, x));
|
||||
double diff = norm(vec_diff * (1.f / 255.f));
|
||||
|
||||
*similarity.ptr<uchar>(y, x) = diff <= similarity_threshold_ ? 255 : 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
else // if (src1.channels() == 1)
|
||||
{
|
||||
for (int y = 0; y < similarity.rows; ++y)
|
||||
{
|
||||
for (int x = 0; x < similarity.cols; ++x)
|
||||
{
|
||||
float diff = std::abs(static_cast<int>(*src1.ptr<uchar>(y, x))
|
||||
- static_cast<int>(*src2.ptr<uchar>(y, x))) / 255.f;
|
||||
|
||||
*similarity.ptr<uchar>(y, x) = diff <= similarity_threshold_ ? 255 : 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
similarity.release();
|
||||
|
||||
Mat kernel = getStructuringElement(MORPH_RECT, Size(3,3));
|
||||
UMat umat_erode;
|
||||
erode(umat_similarity, umat_erode, kernel);
|
||||
dilate(umat_erode, umat_similarity, kernel);
|
||||
|
||||
return umat_similarity;
|
||||
}
|
||||
|
||||
void ChannelsCompensator::feed(const std::vector<Point> &corners, const std::vector<UMat> &images,
|
||||
const std::vector<std::pair<UMat,uchar> > &masks)
|
||||
{
|
||||
@ -317,11 +409,15 @@ void ChannelsCompensator::feed(const std::vector<Point> &corners, const std::vec
|
||||
// For each channel, feed the channel of each image in a GainCompensator
|
||||
gains_.clear();
|
||||
gains_.resize(images.size());
|
||||
|
||||
GainCompensator compensator(getNrFeeds());
|
||||
compensator.setSimilarityThreshold(getSimilarityThreshold());
|
||||
compensator.prepareSimilarityMask(corners, images);
|
||||
|
||||
for (int c = 0; c < 3; ++c)
|
||||
{
|
||||
const std::vector<UMat>& channels = images_channels[c];
|
||||
|
||||
GainCompensator compensator(getNrFeeds());
|
||||
compensator.feed(corners, channels, masks);
|
||||
|
||||
std::vector<double> gains = compensator.gains();
|
||||
@ -400,6 +496,7 @@ void BlocksCompensator::feed(const std::vector<Point> &corners, const std::vecto
|
||||
{
|
||||
Compensator compensator;
|
||||
compensator.setNrFeeds(getNrFeeds());
|
||||
compensator.setSimilarityThreshold(getSimilarityThreshold());
|
||||
compensator.feed(block_corners, block_images, block_masks);
|
||||
|
||||
gain_maps_.clear();
|
||||
|
||||
70
modules/stitching/test/test_exposure_compensate.cpp
Normal file
70
modules/stitching/test/test_exposure_compensate.cpp
Normal file
@ -0,0 +1,70 @@
|
||||
// This file is part of OpenCV project.
|
||||
// It is subject to the license terms in the LICENSE file found in the top-level directory
|
||||
// of this distribution and at http://opencv.org/license.html.
|
||||
|
||||
#include "test_precomp.hpp"
|
||||
|
||||
namespace opencv_test {
|
||||
namespace {
|
||||
|
||||
double minPSNR(UMat src1, UMat src2)
|
||||
{
|
||||
std::vector<UMat> src1_channels, src2_channels;
|
||||
split(src1, src1_channels);
|
||||
split(src2, src2_channels);
|
||||
|
||||
double psnr = cvtest::PSNR(src1_channels[0], src2_channels[0]);
|
||||
psnr = std::min(psnr, cvtest::PSNR(src1_channels[1], src2_channels[1]));
|
||||
return std::min(psnr, cvtest::PSNR(src1_channels[2], src2_channels[2]));
|
||||
}
|
||||
|
||||
TEST(ExposureCompensate, SimilarityThreshold)
|
||||
{
|
||||
UMat source;
|
||||
imread(cvtest::TS::ptr()->get_data_path() + "stitching/s1.jpg").copyTo(source);
|
||||
|
||||
UMat image1 = source.clone();
|
||||
UMat image2 = source.clone();
|
||||
|
||||
// Add a big artifact
|
||||
image2(Rect(150, 150, 100, 100)).setTo(Scalar(0, 0, 255));
|
||||
|
||||
UMat mask(image1.size(), CV_8U);
|
||||
mask.setTo(255);
|
||||
|
||||
detail::BlocksChannelsCompensator compensator;
|
||||
compensator.setNrGainsFilteringIterations(0); // makes it more clear
|
||||
|
||||
// Feed the compensator, image 1 and 2 are perfectly
|
||||
// identical, except for the red artifact in image 2
|
||||
// Apart from that artifact, there is no exposure to compensate
|
||||
compensator.setSimilarityThreshold(1);
|
||||
uchar xff = 255;
|
||||
compensator.feed(
|
||||
{{}, {}},
|
||||
{image1, image2},
|
||||
{{mask, xff}, {mask, xff}}
|
||||
);
|
||||
// Verify that the artifact in image 2 did create
|
||||
// an artifact in image1 during the exposure compensation
|
||||
UMat image1_result = image1.clone();
|
||||
compensator.apply(0, {}, image1_result, mask);
|
||||
double psnr_no_similarity_mask = minPSNR(image1, image1_result);
|
||||
EXPECT_LT(psnr_no_similarity_mask, 45);
|
||||
|
||||
// Add a similarity threshold and verify that
|
||||
// the artifact in image1 is gone
|
||||
compensator.setSimilarityThreshold(0.1);
|
||||
compensator.feed(
|
||||
{{}, {}},
|
||||
{image1, image2},
|
||||
{{mask, xff}, {mask, xff}}
|
||||
);
|
||||
image1_result = image1.clone();
|
||||
compensator.apply(0, {}, image1_result, mask);
|
||||
double psnr_similarity_mask = minPSNR(image1, image1_result);
|
||||
EXPECT_GT(psnr_similarity_mask, 300);
|
||||
}
|
||||
|
||||
} // namespace
|
||||
} // namespace opencv_test
|
||||
@ -8,6 +8,7 @@
|
||||
#include "opencv2/stitching.hpp"
|
||||
#include "opencv2/stitching/detail/matchers.hpp"
|
||||
#include "opencv2/stitching/detail/blenders.hpp"
|
||||
#include "opencv2/stitching/detail/exposure_compensate.hpp"
|
||||
|
||||
#ifdef HAVE_OPENCV_XFEATURES2D
|
||||
#include "opencv2/xfeatures2d/nonfree.hpp"
|
||||
|
||||
Loading…
Reference in New Issue
Block a user