NVIDIA DeepStream SDK API Reference

7.0 Release
infer_proto_utils.h File Reference

Go to the source code of this file.

Namespaces

 nvdsinferserver
 This is a header file for pre-processing cuda kernels with normalization and mean subtraction required by nvdsinfer.
 

Functions

InferDataType nvdsinferserver::dataTypeFromDsProto (ic::TensorDataType dt)
 
InferTensorOrder nvdsinferserver::tensorOrderFromDsProto (ic::TensorOrder o)
 
InferMediaFormat nvdsinferserver::mediaFormatFromDsProto (ic::MediaFormat f)
 
InferMemType nvdsinferserver::memTypeFromDsProto (ic::MemoryType t)
 
NvBufSurfTransform_Compute nvdsinferserver::computeHWFromDsProto (ic::FrameScalingHW h)
 
NvBufSurfTransform_Inter nvdsinferserver::scalingFilterFromDsProto (uint32_t filter)
 
bool nvdsinferserver::validateProtoConfig (ic::InferenceConfig &c, const std::string &path)
 
bool nvdsinferserver::compareModelRepo (const ic::TritonModelRepo &repoA, const ic::TritonModelRepo &repoB)
 
bool nvdsinferserver::hasTriton (const ic::BackendParams &params)
 
const ic::TritonParams & nvdsinferserver::getTritonParam (const ic::BackendParams &params)
 
ic::TritonParams * nvdsinferserver::mutableTriton (ic::BackendParams &params)