DriveWorks SDK Reference

| 0.6.67 Release

DNN.h
Go to the documentation of this file.
1 // This code contains NVIDIA Confidential Information and is disclosed
3 // under the Mutual Non-Disclosure Agreement.
4 //
5 // Notice
6 // ALL NVIDIA DESIGN SPECIFICATIONS AND CODE ("MATERIALS") ARE PROVIDED "AS IS" NVIDIA MAKES
7 // NO REPRESENTATIONS, WARRANTIES, EXPRESSED, IMPLIED, STATUTORY, OR OTHERWISE WITH RESPECT TO
8 // THE MATERIALS, AND EXPRESSLY DISCLAIMS ANY IMPLIED WARRANTIES OF NONINFRINGEMENT,
9 // MERCHANTABILITY, OR FITNESS FOR A PARTICULAR PURPOSE.
10 //
11 // NVIDIA Corporation assumes no responsibility for the consequences of use of such
12 // information or for any infringement of patents or other rights of third parties that may
13 // result from its use. No license is granted by implication or otherwise under any patent
14 // or patent rights of NVIDIA Corporation. No third party distribution is allowed unless
15 // expressly authorized by NVIDIA. Details are subject to change without notice.
16 // This code supersedes and replaces all information previously supplied.
17 // NVIDIA Corporation products are not authorized for use as critical
18 // components in life support devices or systems without express written approval of
19 // NVIDIA Corporation.
20 //
21 // Copyright (c) 2016 NVIDIA Corporation. All rights reserved.
22 //
23 // NVIDIA Corporation and its licensors retain all intellectual property and proprietary
24 // rights in and to this software and related documentation and any modifications thereto.
25 // Any use, reproduction, disclosure or distribution of this software and related
26 // documentation without an express license agreement from NVIDIA Corporation is
27 // strictly prohibited.
28 //
30 
45 #ifndef DW_DNN_H__
46 #define DW_DNN_H__
47 
49 #include <dw/core/Config.h>
50 #include <dw/core/Status.h>
51 #include <dw/core/Context.h>
52 #include <dw/isp/Tonemap.h>
53 #include <host_defines.h>
54 #include <cuda_fp16.h>
55 #include <driver_types.h>
56 
57 #ifdef __cplusplus
58 extern "C" {
59 #endif
60 
63 typedef struct dwDNNObject *dwDNNHandle_t;
64 typedef struct dwDNNObject const *dwConstDNNHandle_t;
65 
68 typedef struct {
72 
109 dwStatus dwDNN_initializeTensorRTFromFile(dwDNNHandle_t *network, dwContextHandle_t context,
110  const char *modelFilename);
111 
132 dwStatus dwDNN_initializeTensorRTFromMemory(dwDNNHandle_t *network, dwContextHandle_t context,
133  const char *modelContent,
134  uint32_t modelContentSize);
135 
145 dwStatus dwDNN_reset(dwDNNHandle_t network);
146 
156 dwStatus dwDNN_release(dwDNNHandle_t *network);
157 
178 dwStatus dwDNN_inferSIO(float32_t *d_output, float32_t *d_input, dwDNNHandle_t network);
179 
180 
199 dwStatus dwDNN_infer(float32_t **d_output, float32_t **d_input, dwDNNHandle_t network);
200 
201 
214 dwStatus dwDNN_setCUDAStream(cudaStream_t stream, dwDNNHandle_t network);
215 
226 dwStatus dwDNN_getCUDAStream(cudaStream_t *stream, dwDNNHandle_t network);
227 
228 
241 dwStatus dwDNN_getInputSize(dwBlobSize *blobSize, uint16_t blobIndex, dwDNNHandle_t network);
242 
255 dwStatus dwDNN_getOutputSize(dwBlobSize *blobSize, uint16_t blobIndex, dwDNNHandle_t network);
256 
267 dwStatus dwDNN_getInputBlobCount(uint16_t *count, dwDNNHandle_t network);
268 
278 dwStatus dwDNN_getOutputBlobCount(uint16_t *count, dwDNNHandle_t network);
279 
292 dwStatus dwDNN_getInputIndex(uint16_t *blobIndex, const char *blobName, dwDNNHandle_t network);
293 
306 dwStatus dwDNN_getOutputIndex(uint16_t *blobIndex, const char *blobName, dwDNNHandle_t network);
307 
319 dwStatus dwDNN_getMetaData(dwDNNMetaData *metaData, dwDNNHandle_t network);
320 
321 
322 #ifdef __cplusplus
323 }
324 #endif
325 
326 #endif // DW_DNN_H__
float float32_t
Specifies POD types.
Definition: Types.h:77
DW_API_PUBLIC dwStatus dwDNN_getInputBlobCount(uint16_t *count, dwDNNHandle_t network)
Gets the input blob count.
DW_API_PUBLIC dwStatus dwDNN_getInputSize(dwBlobSize *blobSize, uint16_t blobIndex, dwDNNHandle_t network)
Gets the input blob size at blobIndex.
DW_API_PUBLIC dwStatus dwDNN_getCUDAStream(cudaStream_t *stream, dwDNNHandle_t network)
Gets the CUDA stream used by the feature list.
DW_API_PUBLIC dwStatus dwDNN_getOutputSize(dwBlobSize *blobSize, uint16_t blobIndex, dwDNNHandle_t network)
Gets the output blob size at blobIndex.
DW_API_PUBLIC dwStatus dwDNN_getOutputBlobCount(uint16_t *count, dwDNNHandle_t network)
Gets the output blob count.
DW_API_PUBLIC dwStatus dwDNN_release(dwDNNHandle_t *network)
Releases a given network.
NVIDIA DriveWorks API: Core Methods
DW_API_PUBLIC dwStatus dwDNN_infer(float32_t **d_output, float32_t **d_input, dwDNNHandle_t network)
Forwards pass from all input blobs to all output blobs.
Holds blob dimensions.
Definition: DNNTypes.h:57
NVIDIA DriveWorks API: Tonemap Types
DW_API_PUBLIC dwStatus dwDNN_reset(dwDNNHandle_t network)
Resets a given network.
DW_API_PUBLIC dwStatus dwDNN_setCUDAStream(cudaStream_t stream, dwDNNHandle_t network)
Sets the CUDA stream for infer operations.
dwStatus
Status definition.
Definition: Status.h:167
dwTonemapType
Definition: Tonemap.h:45
NVIDIA DriveWorks API: Data Conditioner Methods
DW_API_PUBLIC dwStatus dwDNN_getOutputIndex(uint16_t *blobIndex, const char *blobName, dwDNNHandle_t network)
Gets the index of an output blob with a given blob name.
struct dwDNNObject const * dwConstDNNHandle_t
Definition: DNN.h:64
struct dwDNNObject * dwDNNHandle_t
Handles representing Deep Neural Network interface.
Definition: DNN.h:63
struct dwContextObject * dwContextHandle_t
Context handle.
Definition: Context.h:78
dwDataConditionerParams dataConditionerParams
DataConditioner parameters for running this network.
Definition: DNN.h:69
Specifies TensorRT model header.
Definition: DNN.h:68
DW_API_PUBLIC dwStatus dwDNN_getMetaData(dwDNNMetaData *metaData, dwDNNHandle_t network)
Returns the metadata for the associated network model.
dwTonemapType tonemapType
Definition: DNN.h:70
DW_API_PUBLIC dwStatus dwDNN_initializeTensorRTFromMemory(dwDNNHandle_t *network, dwContextHandle_t context, const char *modelContent, uint32_t modelContentSize)
Creates and initializes a TensorRT Network from memory.
#define DW_API_PUBLIC
Definition: Exports.h:76
DW_API_PUBLIC dwStatus dwDNN_inferSIO(float32_t *d_output, float32_t *d_input, dwDNNHandle_t network)
Forwards pass from the first input blob to the first output blob (a shortcut for a single input - sin...
DW_API_PUBLIC dwStatus dwDNN_initializeTensorRTFromFile(dwDNNHandle_t *network, dwContextHandle_t context, const char *modelFilename)
Creates and initializes a TensorRT Network from file.
NVIDIA DriveWorks API: Core Status Methods
DW_API_PUBLIC dwStatus dwDNN_getInputIndex(uint16_t *blobIndex, const char *blobName, dwDNNHandle_t network)
Gets the index of an input blob with a given blob name.