|
NVIDIA DeepStream SDK API Reference
|
6.4 Release
|
Go to the documentation of this file.
12 #ifndef __NVDSINFER_SERVER_PROTO_UTILS_H__
13 #define __NVDSINFER_SERVER_PROTO_UTILS_H__
20 #include <condition_variable>
29 #pragma GCC diagnostic push
31 #pragma GCC diagnostic ignored "-Wrestrict"
33 #include "nvdsinferserver_config.pb.h"
34 #include "nvdsinferserver_plugin.pb.h"
35 #pragma GCC diagnostic pop
37 namespace ic = nvdsinferserver::config;
52 const ic::TritonModelRepo& repoA,
const ic::TritonModelRepo& repoB);
57 return params.has_triton() || params.has_trt_is();
59 inline const ic::TritonParams&
62 if (params.has_triton()) {
63 return params.triton();
65 assert(params.has_trt_is());
66 return params.trt_is();
69 inline ic::TritonParams*
72 if (params.has_triton()) {
73 return params.mutable_triton();
75 assert(params.has_trt_is());
76 return params.mutable_trt_is();
Copyright (c) 2021, NVIDIA CORPORATION.
InferDataType
Datatype of the tensor buffer.
InferMediaFormat
Image formats.
bool validateProtoConfig(ic::InferenceConfig &c, const std::string &path)
Header file for the data types used in the inference processing.
InferTensorOrder
The type of tensor order.
InferDataType dataTypeFromDsProto(ic::TensorDataType dt)
InferMediaFormat mediaFormatFromDsProto(ic::MediaFormat f)
const ic::TritonParams & getTritonParam(const ic::BackendParams ¶ms)
NvBufSurfTransform_Compute
Specifies compute devices used by NvBufSurfTransform.
NvBufSurfTransform_Compute computeHWFromDsProto(ic::FrameScalingHW h)
InferTensorOrder tensorOrderFromDsProto(ic::TensorOrder o)
NvBufSurfTransform_Inter scalingFilterFromDsProto(uint32_t filter)
InferMemType
The memory types of inference buffers.
bool compareModelRepo(const ic::TritonModelRepo &repoA, const ic::TritonModelRepo &repoB)
NvBufSurfTransform_Inter
Specifies video interpolation methods.
ic::TritonParams * mutableTriton(ic::BackendParams ¶ms)
InferMemType memTypeFromDsProto(ic::MemoryType t)
bool hasTriton(const ic::BackendParams ¶ms)