NVIDIA DeepStream SDK API Reference

6.4 Release
infer_simple_context.h
Go to the documentation of this file.
1 
12 #ifndef __NVDSINFERSERVER_SIMPLE_CONTEXT_H__
13 #define __NVDSINFERSERVER_SIMPLE_CONTEXT_H__
14 
15 #include "infer_base_backend.h"
16 #include "infer_base_context.h"
17 #include "infer_cuda_utils.h"
18 #include "infer_datatypes.h"
19 #include "infer_utils.h"
20 
21 namespace nvdsinferserver {
22 
23 
25 public:
27  ~InferSimpleContext() override;
28 
29 protected:
31  const ic::BackendParams& params, int maxBatchSize,
34  const ic::InferenceConfig& config, BaseBackend& backend) override;
35  NvDsInferStatus deinit() override;
36 
37 private:
38  // should not called
39  NvDsInferStatus createPreprocessor(
40  const ic::PreProcessParams& params,
41  std::vector<UniqPreprocessor>& processors) override;
42  NvDsInferStatus createPostprocessor(
43  const ic::PostProcessParams& params,
44  UniqPostprocessor& processor) override;
45  NvDsInferStatus allocateResource(
46  const ic::InferenceConfig& config) override;
47 
48  void getNetworkInputInfo(NvDsInferNetworkInfo& networkInfo) override
49  {
50  networkInfo = m_NetworkImageInfo;
51  }
52  void notifyError(NvDsInferStatus status) override {}
53  SharedCuStream& mainStream() override { return m_Stream; }
54 
55 private:
56  // Optional, not needed
57  NvDsInferNetworkInfo m_NetworkImageInfo{0, 0, 0};
58 
59  SharedCuStream m_Stream{nullptr};
60 };
61 
62 } // namespace nvdsinferserver
63 
64 #endif //__NVDSINFERSERVER_SIMPLE_CONTEXT_H__
nvdsinferserver
Copyright (c) 2021, NVIDIA CORPORATION.
Definition: infer_custom_process.h:28
nvdsinferserver::InferSimpleContext::InferSimpleContext
InferSimpleContext()
nvdsinferserver::InferBaseContext::maxBatchSize
int maxBatchSize() const
Definition: infer_base_context.h:99
nvdsinferserver::InferSimpleContext::fixateInferenceInfo
NvDsInferStatus fixateInferenceInfo(const ic::InferenceConfig &config, BaseBackend &backend) override
infer_datatypes.h
Header file for the data types used in the inference processing.
nvdsinferserver::InferSimpleContext::~InferSimpleContext
~InferSimpleContext() override
nvdsinferserver::InferSimpleContext::deinit
NvDsInferStatus deinit() override
nvdsinferserver::InferSimpleContext::createNNBackend
NvDsInferStatus createNNBackend(const ic::BackendParams &params, int maxBatchSize, UniqBackend &backend)
infer_utils.h
Header file containing utility functions and classes used by the nvinferserver low level library.
nvdsinferserver::UniqBackend
std::unique_ptr< BaseBackend > UniqBackend
Definition: infer_base_backend.h:212
NvDsInferNetworkInfo
Holds information about the model network.
Definition: nvdsinfer.h:110
infer_cuda_utils.h
Header file declaring utility classes for CUDA memory management, CIDA streams and events.
nvdsinferserver::SharedCuStream
std::shared_ptr< CudaStream > SharedCuStream
Cuda based pointers.
Definition: infer_common.h:84
nvdsinferserver::InferBaseContext::backend
BaseBackend * backend()
Definition: infer_base_context.h:101
nvdsinferserver::InferBaseContext
The base class for handling the inference context.
Definition: infer_base_context.h:52
nvdsinferserver::InferBaseContext::config
const ic::InferenceConfig & config() const
Definition: infer_base_context.h:98
nvdsinferserver::InferSimpleContext
Definition: infer_simple_context.h:24
nvdsinferserver::UniqPostprocessor
std::unique_ptr< BasePostprocessor > UniqPostprocessor
Processor interfaces.
Definition: infer_common.h:98
nvdsinferserver::BaseBackend
Base class of inference backend processing.
Definition: infer_base_backend.h:40
infer_base_backend.h
Header file for inference processing backend base class.
infer_base_context.h
Header file of the base class for inference context.
NvDsInferStatus
NvDsInferStatus
Enum for the status codes returned by NvDsInferContext.
Definition: nvdsinfer.h:218