Fix ISliceLayer
This commit is contained in:
@@ -10,7 +10,7 @@
|
||||
|
||||
nvinfer1::ITensor*
|
||||
reorgLayer(int layerIdx, std::map<std::string, std::string>& block, nvinfer1::ITensor* input,
|
||||
nvinfer1::INetworkDefinition* network)
|
||||
nvinfer1::INetworkDefinition* network, uint batchSize)
|
||||
{
|
||||
nvinfer1::ITensor* output;
|
||||
|
||||
@@ -24,39 +24,34 @@ reorgLayer(int layerIdx, std::map<std::string, std::string>& block, nvinfer1::IT
|
||||
nvinfer1::Dims inputDims = input->getDimensions();
|
||||
|
||||
if (block.at("type") == "reorg3d") {
|
||||
nvinfer1::ISliceLayer* slice1 = network->addSlice(*input, nvinfer1::Dims{4, {0, 0, 0, 0}},
|
||||
nvinfer1::Dims{4, {inputDims.d[0], inputDims.d[1], inputDims.d[2] / stride, inputDims.d[3] / stride}},
|
||||
nvinfer1::Dims{4, {1, 1, stride, stride}});
|
||||
assert(slice1 != nullptr);
|
||||
std::string slice1LayerName = "slice1_" + std::to_string(layerIdx);
|
||||
slice1->setName(slice1LayerName.c_str());
|
||||
std::string name1 = "slice1";
|
||||
std::string name2 = "slice2";
|
||||
std::string name3 = "slice3";
|
||||
std::string name4 = "slice4";
|
||||
nvinfer1::Dims start1 = {4, {0, 0, 0, 0}};
|
||||
nvinfer1::Dims start2 = {4, {0, 0, 0, 1}};
|
||||
nvinfer1::Dims start3 = {4, {0, 0, 1, 0}};
|
||||
nvinfer1::Dims start4 = {4, {0, 0, 1, 1}};
|
||||
nvinfer1::Dims sizeAll = {4, {inputDims.d[0], inputDims.d[1], inputDims.d[2] / stride, inputDims.d[3] / stride}};
|
||||
nvinfer1::Dims strideAll = {4, {1, 1, stride, stride}};
|
||||
|
||||
nvinfer1::ISliceLayer* slice2 = network->addSlice(*input, nvinfer1::Dims{4, {0, 0, 0, 1}},
|
||||
nvinfer1::Dims{4, {inputDims.d[0], inputDims.d[1], inputDims.d[2] / stride, inputDims.d[3] / stride}},
|
||||
nvinfer1::Dims{4, {1, 1, stride, stride}});
|
||||
assert(slice2 != nullptr);
|
||||
std::string slice2LayerName = "slice2_" + std::to_string(layerIdx);
|
||||
slice2->setName(slice2LayerName.c_str());
|
||||
nvinfer1::ITensor* slice1 = sliceLayer(layerIdx, name1, input, start1, sizeAll, strideAll, network, batchSize);
|
||||
assert(output != nullptr);
|
||||
|
||||
nvinfer1::ISliceLayer* slice3 = network->addSlice(*input, nvinfer1::Dims{4, {0, 0, 1, 0}},
|
||||
nvinfer1::Dims{4, {inputDims.d[0], inputDims.d[1], inputDims.d[2] / stride, inputDims.d[3] / stride}},
|
||||
nvinfer1::Dims{4, {1, 1, stride, stride}});
|
||||
assert(slice3 != nullptr);
|
||||
std::string slice3LayerName = "slice3_" + std::to_string(layerIdx);
|
||||
slice3->setName(slice3LayerName.c_str());
|
||||
nvinfer1::ITensor* slice2 = sliceLayer(layerIdx, name2, input, start2, sizeAll, strideAll, network, batchSize);
|
||||
assert(output != nullptr);
|
||||
|
||||
nvinfer1::ISliceLayer* slice4 = network->addSlice(*input, nvinfer1::Dims{4, {0, 0, 1, 1}},
|
||||
nvinfer1::Dims{4, {inputDims.d[0], inputDims.d[1], inputDims.d[2] / stride, inputDims.d[3] / stride}},
|
||||
nvinfer1::Dims{4, {1, 1, stride, stride}});
|
||||
assert(slice4 != nullptr);
|
||||
std::string slice4LayerName = "slice4_" + std::to_string(layerIdx);
|
||||
slice4->setName(slice4LayerName.c_str());
|
||||
nvinfer1::ITensor* slice3 = sliceLayer(layerIdx, name3, input, start3, sizeAll, strideAll, network, batchSize);
|
||||
assert(output != nullptr);
|
||||
|
||||
nvinfer1::ITensor* slice4 = sliceLayer(layerIdx, name4, input, start4, sizeAll, strideAll, network, batchSize);
|
||||
assert(output != nullptr);
|
||||
|
||||
std::vector<nvinfer1::ITensor*> concatInputs;
|
||||
concatInputs.push_back(slice1->getOutput(0));
|
||||
concatInputs.push_back(slice2->getOutput(0));
|
||||
concatInputs.push_back(slice3->getOutput(0));
|
||||
concatInputs.push_back(slice4->getOutput(0));
|
||||
concatInputs.push_back(slice1);
|
||||
concatInputs.push_back(slice2);
|
||||
concatInputs.push_back(slice3);
|
||||
concatInputs.push_back(slice4);
|
||||
|
||||
nvinfer1::IConcatenationLayer* concat = network->addConcatenation(concatInputs.data(), concatInputs.size());
|
||||
assert(concat != nullptr);
|
||||
|
||||
@@ -11,7 +11,9 @@
|
||||
|
||||
#include "NvInfer.h"
|
||||
|
||||
#include "slice_layer.h"
|
||||
|
||||
nvinfer1::ITensor* reorgLayer(int layerIdx, std::map<std::string, std::string>& block, nvinfer1::ITensor* input,
|
||||
nvinfer1::INetworkDefinition* network);
|
||||
nvinfer1::INetworkDefinition* network, uint batchSize);
|
||||
|
||||
#endif
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
|
||||
nvinfer1::ITensor*
|
||||
routeLayer(int layerIdx, std::string& layers, std::map<std::string, std::string>& block,
|
||||
std::vector<nvinfer1::ITensor*> tensorOutputs, nvinfer1::INetworkDefinition* network)
|
||||
std::vector<nvinfer1::ITensor*> tensorOutputs, nvinfer1::INetworkDefinition* network, uint batchSize)
|
||||
{
|
||||
nvinfer1::ITensor* output;
|
||||
|
||||
@@ -69,13 +69,14 @@ routeLayer(int layerIdx, std::string& layers, std::map<std::string, std::string>
|
||||
int group_id = stoi(block.at("group_id"));
|
||||
int startSlice = (prevTensorDims.d[1] / groups) * group_id;
|
||||
int channelSlice = (prevTensorDims.d[1] / groups);
|
||||
nvinfer1::ISliceLayer* slice = network->addSlice(*output, nvinfer1::Dims{4, {0, startSlice, 0, 0}},
|
||||
nvinfer1::Dims{4, {prevTensorDims.d[0], channelSlice, prevTensorDims.d[2], prevTensorDims.d[3]}},
|
||||
nvinfer1::Dims{4, {1, 1, 1, 1}});
|
||||
assert(slice != nullptr);
|
||||
std::string sliceLayerName = "slice_" + std::to_string(layerIdx);
|
||||
slice->setName(sliceLayerName.c_str());
|
||||
output = slice->getOutput(0);
|
||||
|
||||
std::string name = "slice";
|
||||
nvinfer1::Dims start = {4, {0, startSlice, 0, 0}};
|
||||
nvinfer1::Dims size = {4, {prevTensorDims.d[0], channelSlice, prevTensorDims.d[2], prevTensorDims.d[3]}};
|
||||
nvinfer1::Dims stride = {4, {1, 1, 1, 1}};
|
||||
|
||||
output = sliceLayer(layerIdx, name, output, start, size, stride, network, batchSize);
|
||||
assert(output != nullptr);
|
||||
}
|
||||
|
||||
return output;
|
||||
|
||||
@@ -8,7 +8,9 @@
|
||||
|
||||
#include "../utils.h"
|
||||
|
||||
#include "slice_layer.h"
|
||||
|
||||
nvinfer1::ITensor* routeLayer(int layerIdx, std::string& layers, std::map<std::string, std::string>& block,
|
||||
std::vector<nvinfer1::ITensor*> tensorOutputs, nvinfer1::INetworkDefinition* network);
|
||||
std::vector<nvinfer1::ITensor*> tensorOutputs, nvinfer1::INetworkDefinition* network, uint batchSize);
|
||||
|
||||
#endif
|
||||
|
||||
@@ -10,19 +10,20 @@
|
||||
nvinfer1::ITensor*
|
||||
shortcutLayer(int layerIdx, std::string activation, std::string inputVol, std::string shortcutVol,
|
||||
std::map<std::string, std::string>& block, nvinfer1::ITensor* input, nvinfer1::ITensor* shortcutInput,
|
||||
nvinfer1::INetworkDefinition* network)
|
||||
nvinfer1::INetworkDefinition* network, uint batchSize)
|
||||
{
|
||||
nvinfer1::ITensor* output;
|
||||
|
||||
assert(block.at("type") == "shortcut");
|
||||
|
||||
if (inputVol != shortcutVol) {
|
||||
nvinfer1::ISliceLayer* slice = network->addSlice(*shortcutInput, nvinfer1::Dims{4, {0, 0, 0, 0}}, input->getDimensions(),
|
||||
nvinfer1::Dims{4, {1, 1, 1, 1}});
|
||||
assert(slice != nullptr);
|
||||
std::string sliceLayerName = "slice_" + std::to_string(layerIdx);
|
||||
slice->setName(sliceLayerName.c_str());
|
||||
output = slice->getOutput(0);
|
||||
std::string name = "slice";
|
||||
nvinfer1::Dims start = {4, {0, 0, 0, 0}};
|
||||
nvinfer1::Dims size = input->getDimensions();
|
||||
nvinfer1::Dims stride = nvinfer1::Dims{4, {1, 1, 1, 1}};
|
||||
|
||||
output = sliceLayer(layerIdx, name, shortcutInput, start, size, stride, network, batchSize);
|
||||
assert(output != nullptr);
|
||||
}
|
||||
else
|
||||
output = shortcutInput;
|
||||
|
||||
@@ -10,10 +10,11 @@
|
||||
|
||||
#include "NvInfer.h"
|
||||
|
||||
#include "slice_layer.h"
|
||||
#include "activation_layer.h"
|
||||
|
||||
nvinfer1::ITensor* shortcutLayer(int layerIdx, std::string activation, std::string inputVol, std::string shortcutVol,
|
||||
std::map<std::string, std::string>& block, nvinfer1::ITensor* input, nvinfer1::ITensor* shortcut,
|
||||
nvinfer1::INetworkDefinition* network);
|
||||
nvinfer1::INetworkDefinition* network, uint batchSize);
|
||||
|
||||
#endif
|
||||
|
||||
69
nvdsinfer_custom_impl_Yolo/layers/slice_layer.cpp
Normal file
69
nvdsinfer_custom_impl_Yolo/layers/slice_layer.cpp
Normal file
@@ -0,0 +1,69 @@
|
||||
/*
|
||||
* Created by Marcos Luciano
|
||||
* https://www.github.com/marcoslucianops
|
||||
*/
|
||||
|
||||
#include "slice_layer.h"
|
||||
|
||||
#include <cassert>
|
||||
|
||||
nvinfer1::ITensor*
|
||||
sliceLayer(int layerIdx, std::string& name, nvinfer1::ITensor* input, nvinfer1::Dims start, nvinfer1::Dims size,
|
||||
nvinfer1::Dims stride, nvinfer1::INetworkDefinition* network, uint batchSize)
|
||||
{
|
||||
nvinfer1::ITensor* output;
|
||||
|
||||
int tensorBatch = input->getDimensions().d[0];
|
||||
|
||||
nvinfer1::ISliceLayer* slice = network->addSlice(*input, start, size, stride);
|
||||
|
||||
if (tensorBatch == -1) {
|
||||
int nbDims = size.nbDims;
|
||||
|
||||
nvinfer1::Weights constant1Wt {nvinfer1::DataType::kINT32, nullptr, nbDims};
|
||||
|
||||
int* val1 = new int[nbDims];
|
||||
val1[0] = 1;
|
||||
for (int i = 1; i < nbDims; ++i) {
|
||||
val1[i] = size.d[i];
|
||||
}
|
||||
constant1Wt.values = val1;
|
||||
|
||||
nvinfer1::IConstantLayer* constant1 = network->addConstant(nvinfer1::Dims{1, {nbDims}}, constant1Wt);
|
||||
assert(constant1 != nullptr);
|
||||
std::string constant1LayerName = "constant1_" + name + "_" + std::to_string(layerIdx);
|
||||
constant1->setName(constant1LayerName.c_str());
|
||||
nvinfer1::ITensor* constant1Tensor = constant1->getOutput(0);
|
||||
|
||||
nvinfer1::Weights constant2Wt {nvinfer1::DataType::kINT32, nullptr, nbDims};
|
||||
|
||||
int* val2 = new int[nbDims];
|
||||
val2[0] = batchSize;
|
||||
for (int i = 1; i < nbDims; ++i) {
|
||||
val2[i] = 1;
|
||||
}
|
||||
constant2Wt.values = val2;
|
||||
|
||||
nvinfer1::IConstantLayer* constant2 = network->addConstant(nvinfer1::Dims{1, {nbDims}}, constant2Wt);
|
||||
assert(constant2 != nullptr);
|
||||
std::string constant2LayerName = "constant2_" + name + "_" + std::to_string(layerIdx);
|
||||
constant2->setName(constant2LayerName.c_str());
|
||||
nvinfer1::ITensor* constant2Tensor = constant2->getOutput(0);
|
||||
|
||||
nvinfer1::IElementWiseLayer* newSize = network->addElementWise(*constant1Tensor, *constant2Tensor,
|
||||
nvinfer1::ElementWiseOperation::kPROD);
|
||||
assert(newSize != nullptr);
|
||||
std::string newSizeLayerName = "new_size_" + name + "_" + std::to_string(layerIdx);
|
||||
newSize->setName(newSizeLayerName.c_str());
|
||||
nvinfer1::ITensor* newSizeTensor = newSize->getOutput(0);
|
||||
|
||||
slice->setInput(2, *newSizeTensor);
|
||||
}
|
||||
|
||||
assert(slice != nullptr);
|
||||
std::string sliceLayerName = name + "_" + std::to_string(layerIdx);
|
||||
slice->setName(sliceLayerName.c_str());
|
||||
output = slice->getOutput(0);
|
||||
|
||||
return output;
|
||||
}
|
||||
16
nvdsinfer_custom_impl_Yolo/layers/slice_layer.h
Normal file
16
nvdsinfer_custom_impl_Yolo/layers/slice_layer.h
Normal file
@@ -0,0 +1,16 @@
|
||||
/*
|
||||
* Created by Marcos Luciano
|
||||
* https://www.github.com/marcoslucianops
|
||||
*/
|
||||
|
||||
#ifndef __SLICE_LAYER_H__
|
||||
#define __SLICE_LAYER_H__
|
||||
|
||||
#include <string>
|
||||
|
||||
#include "NvInfer.h"
|
||||
|
||||
nvinfer1::ITensor* sliceLayer(int layerIdx, std::string& name, nvinfer1::ITensor* input, nvinfer1::Dims start,
|
||||
nvinfer1::Dims size, nvinfer1::Dims stride, nvinfer1::INetworkDefinition* network, uint batchSize);
|
||||
|
||||
#endif
|
||||
@@ -361,7 +361,7 @@ Yolo::buildYoloNetwork(std::vector<float>& weights, nvinfer1::INetworkDefinition
|
||||
std::string inputVol = dimsToString(previous->getDimensions());
|
||||
std::string shortcutVol = dimsToString(tensorOutputs[i + from - 1]->getDimensions());
|
||||
previous = shortcutLayer(i, activation, inputVol, shortcutVol, m_ConfigBlocks.at(i), previous,
|
||||
tensorOutputs[i + from - 1], &network);
|
||||
tensorOutputs[i + from - 1], &network, m_BatchSize);
|
||||
assert(previous != nullptr);
|
||||
std::string outputVol = dimsToString(previous->getDimensions());
|
||||
tensorOutputs.push_back(previous);
|
||||
@@ -394,7 +394,7 @@ Yolo::buildYoloNetwork(std::vector<float>& weights, nvinfer1::INetworkDefinition
|
||||
}
|
||||
else if (m_ConfigBlocks.at(i).at("type") == "route") {
|
||||
std::string layers;
|
||||
previous = routeLayer(i, layers, m_ConfigBlocks.at(i), tensorOutputs, &network);
|
||||
previous = routeLayer(i, layers, m_ConfigBlocks.at(i), tensorOutputs, &network, m_BatchSize);
|
||||
assert(previous != nullptr);
|
||||
std::string outputVol = dimsToString(previous->getDimensions());
|
||||
tensorOutputs.push_back(previous);
|
||||
@@ -422,7 +422,7 @@ Yolo::buildYoloNetwork(std::vector<float>& weights, nvinfer1::INetworkDefinition
|
||||
}
|
||||
else if (m_ConfigBlocks.at(i).at("type") == "reorg" || m_ConfigBlocks.at(i).at("type") == "reorg3d") {
|
||||
std::string inputVol = dimsToString(previous->getDimensions());
|
||||
previous = reorgLayer(i, m_ConfigBlocks.at(i), previous, &network);
|
||||
previous = reorgLayer(i, m_ConfigBlocks.at(i), previous, &network, m_BatchSize);
|
||||
assert(previous != nullptr);
|
||||
std::string outputVol = dimsToString(previous->getDimensions());
|
||||
tensorOutputs.push_back(previous);
|
||||
|
||||
Reference in New Issue
Block a user