Merge pull request #14682 from l-bat:axpy_layer
* Add Axpy layer * Fix test * fix caffe importer
This commit is contained in:
parent
f1fb002682
commit
3efd2df87f
@ -388,6 +388,27 @@ public:
|
||||
layerParams.blobs[1].setTo(1); // std
|
||||
}
|
||||
}
|
||||
else if (type == "Axpy")
|
||||
{
|
||||
CV_Assert_N(layer.bottom_size() == 3, layer.top_size() == 1);
|
||||
|
||||
std::string scaleName = name + "/scale";
|
||||
int repetitions = layerCounter[scaleName]++;
|
||||
if (repetitions) {
|
||||
scaleName += String("_") + toString(repetitions);
|
||||
}
|
||||
|
||||
LayerParams scaleParams;
|
||||
scaleParams.set("axis", 1);
|
||||
scaleParams.set("has_bias", false);
|
||||
int scaleId = dstNet.addLayer(scaleName, "Scale", scaleParams);
|
||||
addInput(layer.bottom(2), scaleId, 0, dstNet);
|
||||
addInput(layer.bottom(0), scaleId, 1, dstNet);
|
||||
addOutput(layer, scaleId, 0);
|
||||
net.mutable_layer(li)->set_bottom(0, layer.top(0));
|
||||
net.mutable_layer(li)->mutable_bottom()->RemoveLast();
|
||||
type = "Eltwise";
|
||||
}
|
||||
else if ("ConvolutionDepthwise" == type)
|
||||
{
|
||||
type = "Convolution";
|
||||
|
||||
@ -143,7 +143,7 @@ public:
|
||||
CV_Check(dst.dims, 1 < dst.dims && dst.dims <= 4, ""); CV_CheckTypeEQ(dst.type(), CV_32FC1, ""); CV_Assert(dst.isContinuous());
|
||||
CV_Assert(coeffs.empty() || coeffs.size() == (size_t)nsrcs);
|
||||
|
||||
for( int i = 0; i > nsrcs; i++ )
|
||||
for( int i = 0; i < nsrcs; i++ )
|
||||
{
|
||||
CV_Assert(srcs[i].size == dst.size &&
|
||||
srcs[i].type() == dst.type() &&
|
||||
|
||||
@ -109,6 +109,49 @@ TEST(Test_Caffe, read_googlenet)
|
||||
ASSERT_FALSE(net.empty());
|
||||
}
|
||||
|
||||
TEST_P(Test_Caffe_nets, Axpy)
|
||||
{
|
||||
if (backend == DNN_BACKEND_INFERENCE_ENGINE)
|
||||
throw SkipTestException("");
|
||||
|
||||
String proto = _tf("axpy.prototxt");
|
||||
Net net = readNetFromCaffe(proto);
|
||||
|
||||
checkBackend();
|
||||
net.setPreferableBackend(backend);
|
||||
net.setPreferableTarget(target);
|
||||
|
||||
int size[] = {1, 2, 3, 4};
|
||||
int scale_size[] = {1, 2, 1, 1};
|
||||
Mat scale(4, &scale_size[0], CV_32F);
|
||||
Mat shift(4, &size[0], CV_32F);
|
||||
Mat inp(4, &size[0], CV_32F);
|
||||
randu(scale, -1.0f, 1.0f);
|
||||
randu(shift, -1.0f, 1.0f);
|
||||
randu(inp, -1.0f, 1.0f);
|
||||
|
||||
net.setInput(scale, "scale");
|
||||
net.setInput(shift, "shift");
|
||||
net.setInput(inp, "data");
|
||||
|
||||
Mat out = net.forward();
|
||||
|
||||
Mat ref(4, &size[0], inp.type());
|
||||
for (int i = 0; i < inp.size[1]; i++) {
|
||||
for (int h = 0; h < inp.size[2]; h++) {
|
||||
for (int w = 0; w < inp.size[3]; w++) {
|
||||
int idx[] = {0, i, h, w};
|
||||
int scale_idx[] = {0, i, 0, 0};
|
||||
ref.at<float>(idx) = inp.at<float>(idx) * scale.at<float>(scale_idx) +
|
||||
shift.at<float>(idx);
|
||||
}
|
||||
}
|
||||
}
|
||||
float l1 = (target == DNN_TARGET_OPENCL_FP16) ? 2e-4 : 1e-5;
|
||||
float lInf = (target == DNN_TARGET_OPENCL_FP16) ? 1e-3 : 1e-4;
|
||||
normAssert(ref, out, "", l1, lInf);
|
||||
}
|
||||
|
||||
typedef testing::TestWithParam<tuple<bool, Target> > Reproducibility_AlexNet;
|
||||
TEST_P(Reproducibility_AlexNet, Accuracy)
|
||||
{
|
||||
|
||||
Loading…
Reference in New Issue
Block a user