|
|
NVIDIA DeepStream SDK API Reference
|
8.0 Release
|
Go to the documentation of this file.
19 #ifndef __NVDSINFER_TRTIS_UTILS_H__
20 #define __NVDSINFER_TRTIS_UTILS_H__
29 #include "infer_datatypes.h"
32 #include "model_config.pb.h"
33 #include "tritonserver.h"
40 #define CHECK_TRTIS_ERR_W_ACTION(trtisExpr, action, fmt, ...) \
42 UniqTritonT<TRITONSERVER_Error> errPtr( \
43 (trtisExpr), TRITONSERVER_ErrorDelete); \
46 "Triton: " fmt ", triton_err_str:%s, err_msg:%s", \
47 ##__VA_ARGS__, TRITONSERVER_ErrorCodeString(errPtr.get()), \
48 TRITONSERVER_ErrorMessage(errPtr.get())); \
57 #define RETURN_TRTIS_ERROR(trtisExpr, fmt, ...) \
58 CHECK_TRTIS_ERR_W_ACTION(trtisExpr, return NVDSINFER_TRITON_ERROR, fmt, ##__VA_ARGS__)
64 #define CONTINUE_TRTIS_ERROR(trtisExpr, fmt, ...) \
65 CHECK_TRTIS_ERR_W_ACTION(trtisExpr, , fmt, ##__VA_ARGS__)
67 namespace ni = inference;
129 template <
typename VecDims>
136 for (
const auto &v : shape) {
This is a header file for pre-processing cuda kernels with normalization and mean subtraction require...
InferMemType MemTypeFromTriton(TRITONSERVER_MemoryType type)
Maps the TRITONSERVER_MemoryType to the InferMemType.
InferDataType
Datatype of the tensor buffer.
enum TRITONSERVER_memorytype_enum TRITONSERVER_MemoryType
TRITONSERVER_MemoryType.
InferDataType DataTypeFromTritonPb(ni::DataType type)
Maps the data type from Triton model configuration proto definition to InferDataType.
InferTensorOrder
The type of tensor order.
TRITONSERVER_DataType DataTypeToTriton(InferDataType type)
Maps the InferDataType to TRITONSERVER_DataType.
void normalizeDims(InferDims &dims)
InferDataType DataTypeFromTriton(TRITONSERVER_DataType type)
Maps the TRITONSERVER_DataType to the InferDataType.
Header file of the common declarations for the nvinferserver library.
InferDims DimsFromTriton(const VecDims &shape)
Converts the input shape vector from Triton to InferDims type.
bool hasWildcard(const nvinfer1::Dims &dims)
#define NVDSINFER_MAX_DIMS
enum TRITONSERVER_datatype_enum TRITONSERVER_DataType
TRITONSERVER_DataType.
InferMemType
The memory types of inference buffers.
Holds the information about the dimensions of a neural network layer.
Header file containing utility functions and classes used by the nvinferserver low level library.
InferTensorOrder TensorOrderFromTritonPb(ni::ModelInput::Format order)
Maps the tensor order from Triton model configuration proto definition to the InferTensorOrder type.
InferTensorOrder TensorOrderFromTritonMeta(const std::string &format)
Maps the tensor order from Triton metadata string to the InferTensorOrder type.
TRITONSERVER_MemoryType MemTypeToTriton(InferMemType type)
Maps the InferMemType to the TRITONSERVER_MemoryType.
const char * TritonControlModeToStr(int32_t mode)
Returns a string describing the TRITONSERVER_ModelControlMode: none, explicit or poll.
static std::string format(const char *fmt,...)