Add docs reduce mean
This commit is contained in:
parent
e9e3af0aaa
commit
4625337179
@ -152,9 +152,9 @@ void getPoolingKernelParams(const LayerParams ¶ms, std::vector<size_t>& kern
|
||||
globalPooling = std::vector<bool>(3, is_global);
|
||||
if (params.has("global_pooling_d"))
|
||||
globalPooling[0] = params.get<bool>("global_pooling_d");
|
||||
else if (params.has("global_pooling_h"))
|
||||
if (params.has("global_pooling_h"))
|
||||
globalPooling[1] = params.get<bool>("global_pooling_h");
|
||||
else if (params.has("global_pooling_w"))
|
||||
if (params.has("global_pooling_w"))
|
||||
globalPooling[2] = params.get<bool>("global_pooling_w");
|
||||
|
||||
if (is_global)
|
||||
|
||||
@ -124,10 +124,8 @@ public:
|
||||
}
|
||||
else
|
||||
CV_Error(Error::StsBadArg, "Cannot determine pooling type");
|
||||
|
||||
setParamsFrom(params);
|
||||
ceilMode = params.get<bool>("ceil_mode", true);
|
||||
|
||||
spatialScale = params.get<float>("spatial_scale", 1);
|
||||
avePoolPaddedArea = params.get<bool>("ave_pool_padded_area", true);
|
||||
}
|
||||
|
||||
@ -116,7 +116,9 @@ public:
|
||||
return backendId == DNN_BACKEND_OPENCV ||
|
||||
(backendId == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH && sliceRanges.size() == 1) ||
|
||||
(backendId == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 &&
|
||||
|
||||
#ifdef HAVE_INF_ENGINE
|
||||
INF_ENGINE_VER_MAJOR_GE(INF_ENGINE_RELEASE_2019R1) &&
|
||||
#endif
|
||||
sliceRanges.size() == 1 && sliceRanges[0].size() == 4);
|
||||
}
|
||||
|
||||
|
||||
@ -1932,9 +1932,29 @@ void TFImporter::populateNet(Net dstNet)
|
||||
}
|
||||
else if (type == "Mean")
|
||||
{
|
||||
// Computes the mean of elements across dimensions of a tensor.
|
||||
// If keepdims is false (default) reduces input_tensor along the dimensions given in axis,
|
||||
// else the reduced dimensions are retained with length 1.
|
||||
// if indices = [1, 2] in NHWC layout we use global pooling: NxCxHxW --Pooling--> NxCx1x1
|
||||
// if keepdims is false we use Flatten after Pooling: out_shape = NxC
|
||||
// if indices = [0] we use a global pooling by indices.
|
||||
// To return correct shape, we use Reshape after Pooling. To determine input shape use Slice for input,
|
||||
// if keepdims is false we use Flatten after Slice.
|
||||
// Example: input_shape = NxCxHxW
|
||||
// determine out shape: NxCxHxW --Slice--> 1xCxHxW
|
||||
// out_shape = 1xCxHxW if keepDims else (1xCxHxW --Flatten--> CxHxW)
|
||||
// global pool: NxCxHxW --Flatten--> Nx(C*H*W) --Reshape--> 1x1xNx(C*H*W) --Pooling--> 1x1x1x(C*H*W) --Reshape--> out_shape
|
||||
|
||||
Mat indices = getTensorContent(getConstBlob(layer, value_id, 1));
|
||||
CV_Assert(indices.type() == CV_32SC1);
|
||||
|
||||
// There are two attributes, "keepdims" and a deprecated "keep_dims".
|
||||
bool keepDims = false;
|
||||
if (hasLayerAttr(layer, "keepdims"))
|
||||
keepDims = getLayerAttr(layer, "keepdims").b();
|
||||
else if (hasLayerAttr(layer, "keep_dims"))
|
||||
keepDims = getLayerAttr(layer, "keep_dims").b();
|
||||
|
||||
if (indices.total() == 1 && indices.at<int>(0) == 0)
|
||||
{
|
||||
LayerParams flattenLp;
|
||||
@ -1968,49 +1988,44 @@ void TFImporter::populateNet(Net dstNet)
|
||||
connect(layer_id, dstNet, Pin(reshapeName), avgId, 0);
|
||||
|
||||
LayerParams sliceLp;
|
||||
std::string sliceName = name + "/slice";
|
||||
CV_Assert(layer_id.find(sliceName) == layer_id.end());
|
||||
std::string layerShapeName = name + "/slice";
|
||||
CV_Assert(layer_id.find(layerShapeName) == layer_id.end());
|
||||
sliceLp.set("axis", indices.at<int>(0));
|
||||
int begin[] = {0};
|
||||
int size[] = {1};
|
||||
sliceLp.set("begin", DictValue::arrayInt(&begin[0], 1));
|
||||
sliceLp.set("size", DictValue::arrayInt(&size[0], 1));
|
||||
int sliceId = dstNet.addLayer(sliceName, "Slice", sliceLp);
|
||||
layer_id[sliceName] = sliceId;
|
||||
int sliceId = dstNet.addLayer(layerShapeName, "Slice", sliceLp);
|
||||
layer_id[layerShapeName] = sliceId;
|
||||
connect(layer_id, dstNet, Pin(layer.input(0)), sliceId, 0);
|
||||
|
||||
LayerParams squeezeLp;
|
||||
std::string squeezeName = name + "/squeeze";
|
||||
CV_Assert(layer_id.find(squeezeName) == layer_id.end());
|
||||
squeezeLp.set("axis", indices.at<int>(0));
|
||||
squeezeLp.set("end_axis", indices.at<int>(0) + 1);
|
||||
int squeezeId = dstNet.addLayer(squeezeName, "Flatten", squeezeLp);
|
||||
layer_id[squeezeName] = squeezeId;
|
||||
connect(layer_id, dstNet, Pin(sliceName), squeezeId, 0);
|
||||
if (!keepDims)
|
||||
{
|
||||
LayerParams squeezeLp;
|
||||
std::string squeezeName = name + "/squeeze";
|
||||
CV_Assert(layer_id.find(squeezeName) == layer_id.end());
|
||||
squeezeLp.set("axis", indices.at<int>(0));
|
||||
squeezeLp.set("end_axis", indices.at<int>(0) + 1);
|
||||
int squeezeId = dstNet.addLayer(squeezeName, "Flatten", squeezeLp);
|
||||
layer_id[squeezeName] = squeezeId;
|
||||
connect(layer_id, dstNet, Pin(layerShapeName), squeezeId, 0);
|
||||
layerShapeName = squeezeName;
|
||||
}
|
||||
|
||||
int id = dstNet.addLayer(name, "Reshape", layerParams);
|
||||
layer_id[name] = id;
|
||||
connect(layer_id, dstNet, Pin(avgName), id, 0);
|
||||
connect(layer_id, dstNet, Pin(squeezeName), id, 1);
|
||||
connect(layer_id, dstNet, Pin(layerShapeName), id, 1);
|
||||
} else {
|
||||
if (indices.total() != 2 || indices.at<int>(0) != 1 || indices.at<int>(1) != 2)
|
||||
CV_Error(Error::StsNotImplemented, "Unsupported mode of reduce_mean operation.");
|
||||
|
||||
layerParams.set("pool", "ave");
|
||||
layerParams.set("global_pooling", true);
|
||||
|
||||
int id = dstNet.addLayer(name, "Pooling", layerParams);
|
||||
layer_id[name] = id;
|
||||
|
||||
connect(layer_id, dstNet, parsePin(layer.input(0)), id, 0);
|
||||
|
||||
// There are two attributes, "keepdims" and a deprecated "keep_dims".
|
||||
bool keepDims = false;
|
||||
if (hasLayerAttr(layer, "keepdims"))
|
||||
keepDims = getLayerAttr(layer, "keepdims").b();
|
||||
else if (hasLayerAttr(layer, "keep_dims"))
|
||||
keepDims = getLayerAttr(layer, "keep_dims").b();
|
||||
|
||||
if (!keepDims)
|
||||
{
|
||||
LayerParams flattenLp;
|
||||
|
||||
Loading…
Reference in New Issue
Block a user