NVIDIA DeepStream SDK API Reference

6.4 Release
infer_postprocess.h
Go to the documentation of this file.
1 /*
2  * SPDX-FileCopyrightText: Copyright (c) 2019-2023 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
3  * SPDX-License-Identifier: LicenseRef-NvidiaProprietary
4  *
5  * NVIDIA CORPORATION, its affiliates and licensors retain all intellectual
6  * property and proprietary rights in and to this material, related
7  * documentation and any modifications thereto. Any use, reproduction,
8  * disclosure or distribution of this material and related documentation
9  * without an express license agreement from NVIDIA CORPORATION or
10  * its affiliates is strictly prohibited.
11  */
12 
22 #ifndef __NVDSINFERSERVER_POST_PROCESS_H__
23 #define __NVDSINFERSERVER_POST_PROCESS_H__
24 
25 #include <stdarg.h>
26 #include <condition_variable>
27 #include <functional>
28 #include <list>
29 #include <memory>
30 #include <mutex>
31 #include <queue>
32 
33 #include <cuda_runtime_api.h>
34 
35 #include "infer_batch_buffer.h"
36 #include "infer_common.h"
37 #include "infer_cuda_utils.h"
38 #include "infer_datatypes.h"
39 #include "infer_ibackend.h"
40 #include "infer_iprocess.h"
41 #include "infer_post_datatypes.h"
42 #include "nvdsinfer_custom_impl.h"
43 #include "nvdsinferserver_config.pb.h"
44 
45 namespace ic = nvdsinferserver::config;
46 
47 struct NvDsInferDBScan;
48 
49 namespace nvdsinferserver {
50 
55 public:
56  using TensorAllocator = std::function<SharedSysMem(const std::string& name, size_t bytes)>;
57  using EventAllocator = std::function<SharedCuEvent()>;
58 
59 protected:
61  : BasePostprocessor(type, id){}
62 
63 public:
64  virtual ~Postprocessor() = default;
65  void setDllHandle(const SharedDllHandle& dlHandle) {
66  m_CustomLibHandle = dlHandle;
67  }
68  void setLabelPath(const std::string& path) { m_LabelPath = path; }
70  m_NetworkInfo = info;
71  }
72  void setOutputLayerCount(uint32_t num) { m_OutputLayerCount = num; }
73  void setInputCopy(bool enable) { m_CopyInputToHostBuffers = enable; }
74  const std::vector<std::vector<std::string>>& getLabels() const {
75  return m_Labels;
76  }
78  {
79  m_CpuAllocator = cpuAlloc;
80  m_EventAllocator = event;
81  }
82  bool needInputCopy() const { return m_CopyInputToHostBuffers; }
83 
85  const std::vector<int>& devIds) override;
86 
87 protected:
89  SharedBatchArray& outbuf, SharedCuStream& mainStream) override;
90 
92  SharedBatchArray& outbuf, SharedCuStream& mainStream) override;
93 
96 
97 private:
98  virtual NvDsInferStatus batchParse(
99  std::vector<NvDsInferLayerInfo>& outputLayers,
100  const std::vector<SharedBatchBuf> outputBufs, uint32_t batchSize,
101  SharedBatchArray& results) = 0;
102 
103 protected:
104  NvDsInferStatus parseLabelsFile(const std::string& path);
105 
106 private:
107  DISABLE_CLASS_COPY(Postprocessor);
108 
109 protected:
115  uint32_t m_OutputLayerCount = 0;
116  std::string m_LabelPath;
117 
119  std::vector<std::vector<std::string>> m_Labels;
120 
123 };
124 
129 public:
130  DetectPostprocessor(int uid, const ic::DetectionParams &params);
131  ~DetectPostprocessor() override = default;
132 
133  NvDsInferStatus allocateResource(const std::vector<int>& devIds) override;
134 
135 private:
136  NvDsInferStatus batchParse(
137  std::vector<NvDsInferLayerInfo>& outputLayers,
138  const std::vector<SharedBatchBuf> outputBufs, uint32_t batchSize,
139  SharedBatchArray& results) override;
140 
141  bool parseBoundingBox(
142  std::vector<NvDsInferLayerInfo> const& outputLayersInfo,
143  NvDsInferNetworkInfo const& networkInfo,
144  NvDsInferParseDetectionParams const& detectionParams,
145  std::vector<NvDsInferObjectDetectionInfo>& objectList);
146 
147  void clusterAndFillDetectionOutputCV(
148  const std::vector<NvDsInferObjectDetectionInfo>& objectList,
149  std::vector<NvDsInferObject> &outputs);
150  void clusterAndFillDetectionOutputDBSCAN(
151  const std::vector<NvDsInferObjectDetectionInfo>& objectList,
152  std::vector<NvDsInferObject>& outputs);
153  void copyWithoutCluster(
154  const std::vector<NvDsInferObjectDetectionInfo>& objectList,
155  std::vector<NvDsInferObject>& outputs);
156  NvDsInferStatus fillDetectionOutput(
157  const std::vector<NvDsInferLayerInfo>& outputLayers,
158  std::vector<NvDsInferObject> &output);
159  void filterDetectionOutput(
160  NvDsInferParseDetectionParams const &detectionParams,
161  std::vector<NvDsInferObjectDetectionInfo> &objectList);
162  void clusterAndFillDetectionOutputNMS(
163  const std::vector<NvDsInferObjectDetectionInfo>& objectList,
164  uint32_t topk, std::vector<NvDsInferObject>& outputs);
165  std::vector<int> nonMaximumSuppression(
166  const std::vector<std::pair<float, int>>& scoreIndex,
167  const std::vector<NvDsInferParseObjectInfo>& bbox,
168  const float nmsThreshold);
169 
170 private:
171  struct NvDsInferDetectionParams
172  {
174  float threshold;
177  float eps;
180  int minBoxes;
183  int groupThreshold;
187  float minScore;
189  float nmsIOUThreshold;
190  };
191 
192 private:
193  std::shared_ptr<NvDsInferDBScan> m_DBScanHandle;
195  uint32_t m_NumDetectedClasses = 0;
196 
198  std::vector<NvDsInferDetectionParams> m_PerClassDetectionParams;
199  NvDsInferParseDetectionParams m_DetectionParams = {0, {}, {}};
200 
201  NvDsInferParseCustomFunc m_CustomBBoxParseFunc = nullptr;
202  ic::DetectionParams m_DetectConfig;
203 };
204 
209 public:
210  ClassifyPostprocessor(int uid, const ic::ClassificationParams &params);
211  NvDsInferStatus allocateResource(const std::vector<int>& devIds) override;
212 
213 private:
214  NvDsInferStatus batchParse(
215  std::vector<NvDsInferLayerInfo>& outputLayers,
216  const std::vector<SharedBatchBuf> outputBufs, uint32_t batchSize,
217  SharedBatchArray& results) override;
218 
219  NvDsInferStatus fillClassificationOutput(
220  const std::vector<NvDsInferLayerInfo>& outputLayers,
221  InferClassificationOutput& output);
222 
223  bool parseAttributesFromSoftmaxLayers(
224  std::vector<NvDsInferLayerInfo> const& outputLayersInfo,
225  NvDsInferNetworkInfo const& networkInfo, float classifierThreshold,
226  std::vector<NvDsInferAttribute>& attrList, std::string& attrString);
227 
228 private:
229  float m_ClassifierThreshold = 0;
230  NvDsInferClassiferParseCustomFunc m_CustomClassifierParseFunc = nullptr;
231 
232  ic::ClassificationParams m_Config;
233 };
234 
239 public:
240  SegmentPostprocessor(int uid, const ic::SegmentationParams &params);
241  NvDsInferStatus allocateResource(const std::vector<int>& devIds) override;
242 
243 private:
244  NvDsInferStatus batchParse(
245  std::vector<NvDsInferLayerInfo>& outputLayers,
246  const std::vector<SharedBatchBuf> outputBufs, uint32_t batchSize,
247  SharedBatchArray& results) override;
248 
249  NvDsInferStatus fillSegmentationOutput(
250  const std::vector<NvDsInferLayerInfo>& outputLayers,
252 
253  bool parseSemanticSegmentationOutput(
254  std::vector<NvDsInferLayerInfo> const& outputLayersInfo,
255  NvDsInferNetworkInfo const& networkInfo,
256  float segmentationThreshold, unsigned int numClasses,
257  int* classificationMap, float*& classProbabilityMap);
258 
259 private:
260  float m_SegmentationThreshold = 0.0f;
262  m_CustomSemSegmentationParseFunc = nullptr;
263  unsigned int m_NumSegmentationClasses = 0;
264  ic::SegmentationParams m_Config;
265 };
266 
271 public:
272  OtherPostprocessor(int uid, const ic::OtherNetworkParams &params);
273 
274 private:
275  NvDsInferStatus batchParse(
276  std::vector<NvDsInferLayerInfo>& outputLayers,
277  const std::vector<SharedBatchBuf> outputBufs, uint32_t batchSize,
278  SharedBatchArray& results) override
279  {
280  return NVDSINFER_SUCCESS;
281  }
282 
283 private:
284  ic::OtherNetworkParams m_Config;
285 };
286 
291 public:
292  TrtIsClassifier(int uid, const ic::TritonClassifyParams& params);
293 
294 private:
295  NvDsInferStatus postHostImpl(
296  SharedBatchArray& inBuf, SharedBatchArray& outbuf,
297  SharedCuStream& mainStream) override;
298  NvDsInferStatus batchParse(
299  std::vector<NvDsInferLayerInfo>& outputLayers,
300  const std::vector<SharedBatchBuf> outputBufs, uint32_t batchSize,
301  SharedBatchArray& results) override
302  {
303  /* should never reach */
304  InferError(
305  "TrtIsClassifer(uid:%d) should not reach here, check error",
306  uniqueId());
308  }
309 
310 private:
311  ic::TritonClassifyParams m_Config;
312 };
313 
314 } // namespace nvdsinferserver
315 
316 #endif
nvdsinferserver
Copyright (c) 2021, NVIDIA CORPORATION.
Definition: infer_custom_process.h:28
nvdsinferserver::Postprocessor::setLabelPath
void setLabelPath(const std::string &path)
Definition: infer_postprocess.h:68
nvdsinferserver::SharedDllHandle
std::shared_ptr< DlLibHandle > SharedDllHandle
Definition: infer_common.h:111
nvdsinferserver::SegmentPostprocessor::SegmentPostprocessor
SegmentPostprocessor(int uid, const ic::SegmentationParams &params)
nvdsinferserver::Postprocessor::allocateResource
virtual NvDsInferStatus allocateResource(const std::vector< int > &devIds) override
nvdsinferserver::Postprocessor::Postprocessor
Postprocessor(InferPostprocessType type, int id)
Definition: infer_postprocess.h:60
nvdsinferserver::DetectPostprocessor::DetectPostprocessor
DetectPostprocessor(int uid, const ic::DetectionParams &params)
nvdsinferserver::InferPostprocessType
InferPostprocessType
Inference post processing types.
Definition: infer_datatypes.h:103
nvdsinferserver::BasePostprocessor
Base post-processor class.
Definition: infer_iprocess.h:162
nvdsinferserver::SharedSysMem
std::shared_ptr< SysMem > SharedSysMem
Definition: infer_common.h:88
infer_datatypes.h
Header file for the data types used in the inference processing.
nvdsinferserver::OtherPostprocessor
Post processor class for tensor output for custom post processing.
Definition: infer_postprocess.h:270
nvdsinferserver::Postprocessor::needInputCopy
bool needInputCopy() const
Definition: infer_postprocess.h:82
nvdsinferserver::TrtIsClassifier::TrtIsClassifier
TrtIsClassifier(int uid, const ic::TritonClassifyParams &params)
NvDsInferSegmentationOutput
Holds the information parsed from segmentation network output for one frame.
Definition: infer_post_datatypes.h:80
NVDSINFER_SUCCESS
@ NVDSINFER_SUCCESS
NvDsInferContext operation succeeded.
Definition: nvdsinfer.h:220
infer_batch_buffer.h
Header file of batch buffer related class declarations.
nvdsinferserver::Postprocessor::setDllHandle
void setDllHandle(const SharedDllHandle &dlHandle)
Definition: infer_postprocess.h:65
infer_ibackend.h
Inference processing backend interface header file.
nvdsinferserver::Postprocessor::m_CpuAllocator
TensorAllocator m_CpuAllocator
Definition: infer_postprocess.h:121
nvdsinferserver::Postprocessor::setInputCopy
void setInputCopy(bool enable)
Definition: infer_postprocess.h:73
nvdsinferserver::Postprocessor::postHostImpl
NvDsInferStatus postHostImpl(SharedBatchArray &inBuf, SharedBatchArray &outbuf, SharedCuStream &mainStream) override
nvdsinferserver::Postprocessor::m_OutputLayerCount
uint32_t m_OutputLayerCount
Definition: infer_postprocess.h:115
NvDsInferParseDetectionParams
Holds the detection parameters required for parsing objects.
Definition: nvdsinfer_custom_impl.h:191
nvdsinferserver::Postprocessor
A generic post processor class.
Definition: infer_postprocess.h:54
infer_common.h
Header file of the common declarations for the nvinferserver library.
nvdsinferserver::DetectPostprocessor
Post processor class for detection output.
Definition: infer_postprocess.h:128
nvdsinferserver::Postprocessor::m_NetworkInfo
NvDsInferNetworkInfo m_NetworkInfo
Network input information.
Definition: infer_postprocess.h:114
NVDSINFER_UNKNOWN_ERROR
@ NVDSINFER_UNKNOWN_ERROR
Unknown error was encountered.
Definition: nvdsinfer.h:243
nvdsinferserver::Postprocessor::setOutputLayerCount
void setOutputLayerCount(uint32_t num)
Definition: infer_postprocess.h:72
nvdsinferserver::Postprocessor::~Postprocessor
virtual ~Postprocessor()=default
nvdsinferserver::ClassifyPostprocessor
Post processor class for classification output.
Definition: infer_postprocess.h:208
nvdsinferserver::Postprocessor::getLabels
const std::vector< std::vector< std::string > > & getLabels() const
Definition: infer_postprocess.h:74
nvdsinferserver::DetectPostprocessor::allocateResource
NvDsInferStatus allocateResource(const std::vector< int > &devIds) override
nvdsinferserver::Postprocessor::EventAllocator
std::function< SharedCuEvent()> EventAllocator
Definition: infer_postprocess.h:57
nvdsinferserver::SegmentPostprocessor
Post processor class for segmentation output.
Definition: infer_postprocess.h:238
NvDsInferNetworkInfo
Holds information about the model network.
Definition: nvdsinfer.h:110
nvdsinfer_custom_impl.h
infer_cuda_utils.h
Header file declaring utility classes for CUDA memory management, CIDA streams and events.
nvdsinferserver::SegmentPostprocessor::allocateResource
NvDsInferStatus allocateResource(const std::vector< int > &devIds) override
nvdsinferserver::Postprocessor::m_EventAllocator
EventAllocator m_EventAllocator
Definition: infer_postprocess.h:122
NvDsInferSemSegmentationParseCustomFunc
bool(* NvDsInferSemSegmentationParseCustomFunc)(std::vector< NvDsInferLayerInfo > const &outputLayersInfo, NvDsInferNetworkInfo const &networkInfo, float segmentationThreshold, unsigned int numClasses, int *classificationMap, float *&classProbabilityMap)
Type definition for the custom semantic segmentation output parsing function.
Definition: nvdsinfer_custom_impl.h:319
nvdsinferserver::Postprocessor::parseLabelsFile
NvDsInferStatus parseLabelsFile(const std::string &path)
nvdsinferserver::SharedCuStream
std::shared_ptr< CudaStream > SharedCuStream
Cuda based pointers.
Definition: infer_common.h:84
NvDsInferClassiferParseCustomFunc
bool(* NvDsInferClassiferParseCustomFunc)(std::vector< NvDsInferLayerInfo > const &outputLayersInfo, NvDsInferNetworkInfo const &networkInfo, float classifierThreshold, std::vector< NvDsInferAttribute > &attrList, std::string &descString)
Type definition for the custom classifier output parsing function.
Definition: nvdsinfer_custom_impl.h:282
nvdsinferserver::Postprocessor::m_Labels
std::vector< std::vector< std::string > > m_Labels
Holds the string labels for classes.
Definition: infer_postprocess.h:119
nvdsinferserver::Postprocessor::m_CopyInputToHostBuffers
bool m_CopyInputToHostBuffers
Definition: infer_postprocess.h:112
nvdsinferserver::OtherPostprocessor::OtherPostprocessor
OtherPostprocessor(int uid, const ic::OtherNetworkParams &params)
infer_post_datatypes.h
NvDsInferParseCustomFunc
bool(* NvDsInferParseCustomFunc)(std::vector< NvDsInferLayerInfo > const &outputLayersInfo, NvDsInferNetworkInfo const &networkInfo, NvDsInferParseDetectionParams const &detectionParams, std::vector< NvDsInferObjectDetectionInfo > &objectList)
Type definition for the custom bounding box parsing function.
Definition: nvdsinfer_custom_impl.h:222
nvdsinferserver::BasePostprocessor::uniqueId
int uniqueId() const
Definition: infer_iprocess.h:179
nvdsinferserver::TrtIsClassifier
Post processor class for Triton Classification option.
Definition: infer_postprocess.h:290
nvdsinferserver::ClassifyPostprocessor::ClassifyPostprocessor
ClassifyPostprocessor(int uid, const ic::ClassificationParams &params)
nvdsinferserver::Postprocessor::m_LabelPath
std::string m_LabelPath
Definition: infer_postprocess.h:116
nvdsinferserver::Postprocessor::TensorAllocator
std::function< SharedSysMem(const std::string &name, size_t bytes)> TensorAllocator
Definition: infer_postprocess.h:56
nvdsinferserver::Postprocessor::postCudaImpl
NvDsInferStatus postCudaImpl(SharedBatchArray &inBuf, SharedBatchArray &outbuf, SharedCuStream &mainStream) override
nvdsinferserver::Postprocessor::m_CustomLibHandle
SharedDllHandle m_CustomLibHandle
Custom library implementation.
Definition: infer_postprocess.h:111
nvdsinferserver::Postprocessor::setNetworkInfo
void setNetworkInfo(const NvDsInferNetworkInfo &info)
Definition: infer_postprocess.h:69
infer_iprocess.h
Preprocessing and postprocessing interface header file.
nvdsinferserver::SharedCuEvent
std::shared_ptr< CudaEvent > SharedCuEvent
Definition: infer_common.h:86
nvdsinferserver::Postprocessor::requestHostOutBufs
SharedBatchArray requestHostOutBufs(const SharedBatchArray &inBuf) override
InferError
#define InferError(fmt,...)
Definition: infer_defines.h:51
nvdsinferserver::Postprocessor::requestCudaOutBufs
SharedBatchArray requestCudaOutBufs(const SharedBatchArray &inBuf) override
nvdsinferserver::Postprocessor::setAllocator
void setAllocator(TensorAllocator cpuAlloc, EventAllocator event)
Definition: infer_postprocess.h:77
nvdsinferserver::DetectPostprocessor::~DetectPostprocessor
~DetectPostprocessor() override=default
nvdsinferserver::SharedBatchArray
std::shared_ptr< BaseBatchArray > SharedBatchArray
Definition: infer_common.h:75
NvDsInferStatus
NvDsInferStatus
Enum for the status codes returned by NvDsInferContext.
Definition: nvdsinfer.h:218
nvdsinferserver::ClassifyPostprocessor::allocateResource
NvDsInferStatus allocateResource(const std::vector< int > &devIds) override