NVIDIA DeepStream SDK API Reference

7.0 Release
infer_utils.h
Go to the documentation of this file.
1 /*
2  * SPDX-FileCopyrightText: Copyright (c) 2019-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
3  * SPDX-License-Identifier: LicenseRef-NvidiaProprietary
4  *
5  * NVIDIA CORPORATION, its affiliates and licensors retain all intellectual
6  * property and proprietary rights in and to this material, related
7  * documentation and any modifications thereto. Any use, reproduction,
8  * disclosure or distribution of this material and related documentation
9  * without an express license agreement from NVIDIA CORPORATION or
10  * its affiliates is strictly prohibited.
11  */
12 
20 #ifndef __NVDSINFER_SERVER_INFER_UTILS_H__
21 #define __NVDSINFER_SERVER_INFER_UTILS_H__
22 
23 #include <infer_batch_buffer.h>
24 #include <infer_common.h>
25 #include <infer_datatypes.h>
26 
27 #include <algorithm>
28 #include <iomanip>
29 #include <iostream>
30 #include <limits>
31 #include <numeric>
32 
34 
39 void dsInferLogPrint__(NvDsInferLogLevel level, const char* fmt, ...);
40 
56 void dsInferLogVPrint__(NvDsInferLogLevel level, const char* fmt, va_list args);
57 
58 
64 inline const char* safeStr(const char* str) {
65  return !str ? "" : str;
66 }
67 
68 inline const char* safeStr(const std::string& str) {
69  return str.c_str();
70 }
77 inline bool string_empty(const char* str) {
78  return !str || strlen(str) == 0;
79 }
80 
86 inline bool file_accessible(const char* path) {
87  assert(path);
88  return (access(path, F_OK) != -1);
89 }
90 
91 inline bool file_accessible(const std::string& path) {
92  return (!path.empty()) && file_accessible(path.c_str());
93 }
99 template <typename T>
100 inline bool
102 {
103  return b == 0;
104 }
105 
111 std::string dims2Str(const InferDims& d);
112 std::string batchDims2Str(const InferBatchDims& d);
113 std::string dataType2Str(const InferDataType type);
114 std::string dataType2GrpcStr(const InferDataType type);
115 InferDataType grpcStr2DataType(const std::string &type);
117  const InferDims& d, InferTensorOrder order);
118 std::string tensorOrder2Str(InferTensorOrder order);
125 bool fEqual(float a, float b);
126 
130 class DlLibHandle {
131 public:
137  DlLibHandle(const std::string& path, int mode = RTLD_LAZY);
138 
139  /*
140  * @brief Destructor. Close the dynamically loaded library.
141  */
142  ~DlLibHandle();
143 
144  /*
145  * @brief Check that the library handle is valid.
146  */
147  bool isValid() const { return m_LibHandle; }
148 
149  /*
150  * @brief Get the filename of the library.
151  */
152  const std::string& getPath() const { return m_LibPath; }
153 
154  /*
155  * @brief Get the function pointer from the library for given function
156  * name.
157  */
159  template <typename FuncPtr>
160  FuncPtr symbol(const char* func) {
161  assert(!string_empty(func));
162  if (!m_LibHandle)
163  return nullptr;
164  InferDebug("lib: %s dlsym :%s", safeStr(m_LibPath), safeStr(func));
165  return (FuncPtr)dlsym(m_LibHandle, func);
166  }
167 
168  template <typename FuncPtr>
169  FuncPtr symbol(const std::string& func) {
170  return symbol<FuncPtr>(func.c_str());
171  }
174 private:
175  /*
176  * @brief Handle for the library returned by dlopen().
177  */
178  void* m_LibHandle{nullptr};
179  /*
180  * @brief Filename of the dynamically loaded library.
181  */
182  const std::string m_LibPath;
183 };
184 
188 class WakeupException : public std::exception {
189  std::string m_Msg;
190 
191 public:
192  WakeupException(const std::string& s) : m_Msg(s) {}
193  const char* what() const noexcept override { return m_Msg.c_str(); }
194 };
195 
202 template <typename Container>
203 class GuardQueue {
204 public:
205  typedef typename Container::value_type T;
209  void push(T data) {
210  std::unique_lock<std::mutex> lock(m_Mutex);
211  m_Queue.emplace_back(std::move(data));
212  m_Cond.notify_one();
213  }
220  T pop() {
221  std::unique_lock<std::mutex> lock(m_Mutex);
222  m_Cond.wait(
223  lock, [this]() { return m_WakeupOnce || !m_Queue.empty(); });
224  if (m_WakeupOnce) {
225  m_WakeupOnce = false;
226  InferDebug("GuardQueue pop end on wakeup signal");
227  throw WakeupException("GuardQueue stopped");
228  }
229  assert(!m_Queue.empty());
230  T ret = std::move(*m_Queue.begin());
231  m_Queue.erase(m_Queue.begin());
232  return ret;
233  }
237  void wakeupOnce() {
238  InferDebug("GuardQueue trigger wakeup once");
239  std::unique_lock<std::mutex> lock(m_Mutex);
240  m_WakeupOnce = true;
241  m_Cond.notify_all();
242  }
246  void clear() {
247  InferDebug("GuardQueue clear");
248  std::unique_lock<std::mutex> lock(m_Mutex);
249  m_Queue.clear();
250  m_WakeupOnce = false;
251  }
255  int size() {
256  std::unique_lock<std::mutex> lock(m_Mutex);
257  return m_Queue.size();
258  }
259 
260 private:
264  std::mutex m_Mutex;
268  std::condition_variable m_Cond;
272  Container m_Queue;
276  bool m_WakeupOnce = false;
277 };
278 
286 template <typename Container>
287 class QueueThread {
288 public:
289  using Item = typename Container::value_type;
290  using RunFunc = std::function<bool(Item)>;
291 
299  QueueThread(RunFunc runFunc, const std::string& name) : m_Run(runFunc) {
300  std::promise<void> p;
301  std::future<void> f = p.get_future();
302  InferDebug("QueueThread starting new thread");
303  m_Thread = std::thread([&p, this]() {
304  p.set_value();
305  this->threadLoop();
306  });
307  setThreadName(name);
308  f.wait();
309  }
314  void setThreadName(const std::string &name) {
315  assert(!name.empty());
316  m_Name = name;
317  if (m_Thread.joinable()) {
318  const int kMakLen = 16;
319  char cName[kMakLen];
320  strncpy(cName, name.c_str(), kMakLen);
321  cName[kMakLen - 1] = 0;
322  if (pthread_setname_np(m_Thread.native_handle(), cName) != 0) {
323  InferError("set thread name: %s failed", safeStr(name));
324  return;
325  }
326  InferDebug("QueueThread set new thread name:%s", cName);
327  }
328  }
333  ~QueueThread() { join(); }
334  void join() {
335  InferDebug("QueueThread: %s join", safeStr(m_Name));
336  if (m_Thread.joinable()) {
337  m_Queue.wakeupOnce();
338  m_Thread.join();
339  }
340  m_Queue.clear();
341  }
345  bool queueItem(Item item) {
346  m_Queue.push(std::move(item));
347  return true;
348  }
349 
350 private:
358  void threadLoop() {
359  while (true) {
360  try {
361  Item item = m_Queue.pop();
362  if (!m_Run(std::move(item))) {
363  InferDebug("QueueThread:%s return and stop", safeStr(m_Name));
364  return;
365  }
366  }
367  catch (const WakeupException& e) {
368  InferDebug("QueueThread:%s stopped", safeStr(m_Name));
369  return;
370  }
371  catch (...) { // unexpected
372  InferError(
373  "QueueThread:%s internal unexpected error, may cause stop",
374  safeStr(m_Name));
375  // Usually can move on to next, but need developer to check
376  continue;
377  }
378  }
379  }
380 
381 private:
385  std::thread m_Thread;
389  std::string m_Name;
393  RunFunc m_Run;
397  GuardQueue<Container> m_Queue;
398 };
399 
404 template <class UniPtr>
405 class BufferPool : public std::enable_shared_from_this<BufferPool<UniPtr> > {
406 public:
407  using ItemType = typename UniPtr::element_type;
408  using RecylePtr = std::unique_ptr<ItemType, std::function<void(ItemType*)>>;
412  BufferPool(const std::string &name)
413  : m_Name(name) {}
417  virtual ~BufferPool()
418  {
419  InferDebug(
420  "BufferPool: %s deleted with free buffer size:%d", safeStr(m_Name),
421  m_FreeBuffers.size());
422  }
428  bool setBuffer(UniPtr buf) {
429  assert(buf);
430  buf->reuse();
431  m_FreeBuffers.push(std::move(buf));
432  InferDebug("BufferPool: %s set buf to free, available size:%d",
433  safeStr(m_Name), m_FreeBuffers.size());
434  return true;
435  }
439  int size() { return m_FreeBuffers.size(); }
440 
451  try {
452  UniPtr p = m_FreeBuffers.pop();
453  auto deleter = p.get_deleter();
454  std::weak_ptr<BufferPool<UniPtr>> poolPtr =
455  this->shared_from_this();
456  RecylePtr recBuf(
457  p.release(), [poolPtr, d = deleter](ItemType* buf) {
458  assert(buf);
459  UniPtr data(buf, d);
460  auto pool = poolPtr.lock();
461  if (pool) {
462  InferDebug("BufferPool: %s release a buffer", safeStr(pool->m_Name));
463  pool->setBuffer(std::move(data));
464  } else {
465  InferError("BufferPool is deleted, check internal error.");
466  assert(false);
467  }
468  });
469  InferDebug("BufferPool: %s acquired buffer, available free buffer left:%d",
470  safeStr(m_Name), m_FreeBuffers.size());
471  return recBuf;
472  } catch (...) {
473  InferDebug(
474  "BufferPool: %s acquired buffer failed, queue maybe waked up.",
475  safeStr(m_Name));
476  assert(false);
477  return nullptr;
478  }
479  }
480 
481 private:
485  GuardQueue<std::deque<UniPtr>> m_FreeBuffers;
489  const std::string m_Name;
490 };
491 
492 template <class UniPtr>
493 using SharedBufPool = std::shared_ptr<BufferPool<UniPtr>>;
494 
500 template<typename Key, typename UniqBuffer>
502 public:
505 public:
509  MapBufferPool(const std::string &name): m_Name(name) {}
513  virtual ~MapBufferPool()
514  {
515  InferDebug(
516  "MapBufferPool: %s deleted with buffer pool size:%d",
517  safeStr(m_Name), (int)m_MapPool.size());
518  }
519 
522  MapBufferPool(const MapBufferPool& other) = delete;
523  MapBufferPool& operator=(const MapBufferPool& other) = delete;
536  bool setBuffer(const Key &key, UniqBuffer buf) {
537  std::unique_lock<std::shared_timed_mutex> uniqLock(m_MapPoolMutex);
538  assert(buf);
539  SharedPool &pool = m_MapPool[key];
540  if(!pool) {
541  uint32_t id = m_MapPool.size() - 1;
542  std::string poolName = m_Name + std::to_string(id);
543  pool = std::make_shared<BufferPool<UniqBuffer>>(poolName);
544  assert(pool);
545  InferDebug("MapBufferPool: %s create new pool id:%d",
546  safeStr(m_Name), id);
547  }
548  if(!pool) {
549  return false;
550  }
551  return pool->setBuffer(std::move(buf));
552  }
558  uint32_t getPoolSize(const Key &key) {
559  SharedPool pool = findPool(key);
560  if (!pool)
561  return 0;
562  return pool->size();
563  }
564 
571  RecylePtr acquireBuffer(const Key &key) {
572  SharedPool pool = findPool(key);
573  assert(pool);
574  if (!pool) {
575  InferWarning(
576  "MapBufferPool: %s acquire buffer failed, no key found",
577  safeStr(m_Name));
578  return nullptr;
579  }
580  InferDebug("MapBufferPool: %s acquire buffer", safeStr(m_Name));
581  return pool->acquireBuffer();
582  }
586  void clear() {
587  InferDebug("MapBufferPool: %s clear all buffers", safeStr(m_Name));
588  std::unique_lock<std::shared_timed_mutex> uniqLock(m_MapPoolMutex);
589  m_MapPool.clear();
590  }
591 
592 private:
596  SharedPool findPool(const Key& key) {
597  std::shared_lock<std::shared_timed_mutex> sharedLock(m_MapPoolMutex);
598  auto iter = m_MapPool.find(key);
599  if (iter != m_MapPool.end()) {
600  assert(iter->second);
601  return iter->second;
602  }
603  return nullptr;
604  }
605 
606 private:
610  std::map<Key, SharedPool> m_MapPool;
614  std::shared_timed_mutex m_MapPoolMutex;
618  const std::string m_Name;
619 };
620 
624 inline uint32_t getElementSize(InferDataType t) {
625  switch (t) {
626  case InferDataType::kInt32:
627  case InferDataType::kUint32:
628  case InferDataType::kFp32:
629  return 4;
630  case InferDataType::kFp16:
631  case InferDataType::kInt16:
632  case InferDataType::kUint16:
633  return 2;
634  case InferDataType::kInt8:
635  case InferDataType::kUint8:
636  case InferDataType::kBool:
637  return 1;
638  case InferDataType::kString:
639  return 0;
640  case InferDataType::kFp64:
641  case InferDataType::kInt64:
642  case InferDataType::kUint64:
643  return 8;
644  default:
645  InferError("Failed to get element size on Unknown datatype:%d",
646  static_cast<int>(t));
647  return 0;
648  }
649 }
650 
655 inline bool
656 hasWildcard(const InferDims& dims)
657 {
658  return std::any_of(
659  dims.d, dims.d + dims.numDims,
660  [](int d) { return d <= INFER_WILDCARD_DIM_VALUE; });
661 }
662 
669 inline size_t
670 dimsSize(const InferDims& dims)
671 {
672  if (hasWildcard(dims) || !dims.numDims) {
673  return 0;
674  } else {
675  return std::accumulate(
676  dims.d, dims.d + dims.numDims, 1,
677  [](int s, int i) { return s * i; });
678  }
679 }
680 
685 inline void
686 normalizeDims(InferDims& dims)
687 {
688  dims.numElements = dimsSize(dims);
689 }
690 
695 bool operator<=(const InferDims& a, const InferDims& b);
696 bool operator>(const InferDims& a, const InferDims& b);
697 bool operator==(const InferDims& a, const InferDims& b);
698 bool operator!=(const InferDims& a, const InferDims& b);
701 struct LayerDescription;
702 
707 NvDsInferLayerInfo toCapi(const LayerDescription& desc, void* bufPtr);
708 
713 NvDsInferDims toCapi(const InferDims &dims);
714 
720  const InferBufferDescription& desc, void* buf = nullptr);
721 
727 
742 bool intersectDims(
743  const InferDims& a, const InferDims& b, InferDims& c);
744 
750 bool isPrivateTensor(const std::string &tensorName);
751 
756 std::string joinPath(const std::string& a, const std::string& b);
757 std::string dirName(const std::string& path);
758 bool isAbsolutePath(const std::string& path);
759 bool realPath(const std::string &inPath, std::string &absPath);
765 bool isCpuMem(InferMemType type);
766 
770 std::string memType2Str(InferMemType type);
771 
778 InferDims fullDims(int batchSize, const InferDims& in);
779 
788 bool debatchFullDims(
789  const InferDims& full, InferDims& debatched, uint32_t& batch);
790 
799 bool squeezeMatch(const InferDims& a, const InferDims& b);
800 
814  const SharedBatchBuf& in, uint32_t batch, const InferDims& dims,
815  bool reCalcBytes = false);
816 
827  bool reCalcBytes = false);
828 
844  const SharedBatchBuf& in, const SharedBatchBuf& out,
845  const SharedCuStream& stream);
846 
847 } // namespace nvdsinferserver
848 
849 extern "C" {
850 
855 
870  const std::string& configStr, const std::string& path, std::string& updated);
871 }
872 
873 #endif
nvdsinferserver
This is a header file for pre-processing cuda kernels with normalization and mean subtraction require...
Definition: infer_custom_process.h:24
nvdsinferserver::InferDataType
InferDataType
Datatype of the tensor buffer.
Definition: infer_datatypes.h:83
INFER_EXPORT_API::isNonBatch
bool isNonBatch(T b)
Checks if the input batch size is zero.
Definition: infer_utils.h:101
INFER_EXPORT_API::GuardQueue::pop
T pop()
Pop an item from the queue.
Definition: infer_utils.h:220
INFER_EXPORT_API::intersectDims
bool intersectDims(const InferDims &a, const InferDims &b, InferDims &c)
Get the intersection of the two input dimensions.
INFER_EXPORT_API::operator<=
bool operator<=(const InferDims &a, const InferDims &b)
Comparison operators for the InferDims type.
INFER_EXPORT_API::grpcStr2DataType
InferDataType grpcStr2DataType(const std::string &type)
INFER_EXPORT_API::debatchFullDims
bool debatchFullDims(const InferDims &full, InferDims &debatched, uint32_t &batch)
Separates batch size from given dimensions.
INFER_EXPORT_API::operator>
bool operator>(const InferDims &a, const InferDims &b)
nvdsinferserver::SharedBatchBuf
std::shared_ptr< BaseBatchBuffer > SharedBatchBuf
Common buffer interfaces (internal).
Definition: infer_common.h:71
INFER_EXPORT_API::GuardQueue
Template class for creating a thread safe queue for the given container class.
Definition: infer_utils.h:203
validateInferConfigStr
INFER_EXPORT_API bool validateInferConfigStr(const std::string &configStr, const std::string &path, std::string &updated)
Validates the provided nvinferserver configuration string.
INFER_EXPORT_API::dims2Str
std::string dims2Str(const InferDims &d)
Helper functions to convert the various data types to string values for debug, log information.
INFER_EXPORT_API::realPath
bool realPath(const std::string &inPath, std::string &absPath)
INFER_EXPORT_API::GuardQueue::T
Container::value_type T
Definition: infer_utils.h:205
INFER_EXPORT_API::dims2ImageInfo
NvDsInferNetworkInfo dims2ImageInfo(const InferDims &d, InferTensorOrder order)
INFER_EXPORT_API::safeStr
const char * safeStr(const std::string &str)
Definition: infer_utils.h:68
INFER_EXPORT_API::MapBufferPool::MapBufferPool
MapBufferPool(const std::string &name)
Construct the buffer pool map with a name.
Definition: infer_utils.h:509
INFER_EXPORT_API::toCapi
NvDsInferDims toCapi(const InferDims &dims)
Convert the InferDims to NvDsInferDims of the library interface.
infer_datatypes.h
Header file for the data types used in the inference processing.
INFER_EXPORT_API::QueueThread::Item
typename Container::value_type Item
Definition: infer_utils.h:289
INFER_EXPORT_API::getElementSize
uint32_t getElementSize(InferDataType t)
Get the size of the element from the data type.
Definition: infer_utils.h:624
INFER_EXPORT_API::dataType2GrpcStr
std::string dataType2GrpcStr(const InferDataType type)
nvdsinferserver::InferTensorOrder
InferTensorOrder
The type of tensor order.
Definition: infer_datatypes.h:41
INFER_EXPORT_API::BufferPool::setBuffer
bool setBuffer(UniPtr buf)
Add a buffer to the pool.
Definition: infer_utils.h:428
NvDsInferDims
Holds the dimensions of a layer.
Definition: nvdsinfer.h:46
INFER_EXPORT_API::joinPath
std::string joinPath(const std::string &a, const std::string &b)
Helper functions for parsing the configuration file.
INFER_EXPORT_API::toCapiLayerInfo
NvDsInferLayerInfo toCapiLayerInfo(const InferBufferDescription &desc, void *buf=nullptr)
Generate NvDsInferLayerInfo of the interface from the buffer description and buffer pointer.
INFER_EXPORT_API::DlLibHandle::symbol
FuncPtr symbol(const char *func)
Definition: infer_utils.h:160
INFER_EXPORT_API::WakeupException::what
const char * what() const noexcept override
Definition: infer_utils.h:193
infer_batch_buffer.h
Header file of batch buffer related class declarations.
INFER_EXPORT_API::BufferPool::~BufferPool
virtual ~BufferPool()
Destructor.
Definition: infer_utils.h:417
INFER_EXPORT_API::dirName
std::string dirName(const std::string &path)
INFER_EXPORT_API::DlLibHandle
Helper class for dynamic loading of custom library.
Definition: infer_utils.h:130
INFER_EXPORT_API::SharedBufPool
std::shared_ptr< BufferPool< UniPtr > > SharedBufPool
Definition: infer_utils.h:493
INFER_EXPORT_API::GuardQueue::wakeupOnce
void wakeupOnce()
Send the wakeup trigger to the queue thread.
Definition: infer_utils.h:237
INFER_EXPORT_API::dimsSize
size_t dimsSize(const InferDims &dims)
Calculate the total number of elements for the given dimensions.
Definition: infer_utils.h:670
INFER_EXPORT_API::DlLibHandle::isValid
bool isValid() const
Definition: infer_utils.h:147
INFER_EXPORT_API::MapBufferPool
Template class for a map of buffer pools.
Definition: infer_utils.h:501
INFER_EXPORT_API::BufferPool::RecylePtr
std::unique_ptr< ItemType, std::function< void(ItemType *)> > RecylePtr
Definition: infer_utils.h:408
INFER_EXPORT_API::GuardQueue::push
void push(T data)
Push an item to the queue.
Definition: infer_utils.h:209
INFER_EXPORT_API::DlLibHandle::symbol
FuncPtr symbol(const std::string &func)
Definition: infer_utils.h:169
NvDsInferLogLevel
NvDsInferLogLevel
Enum for the log levels of NvDsInferContext.
Definition: nvdsinfer.h:249
INFER_EXPORT_API::squeezeMatch
bool squeezeMatch(const InferDims &a, const InferDims &b)
Check that the two dimensions are equal ignoring single element values.
NvDsInferDataType
NvDsInferDataType
Specifies the data type of a layer.
Definition: nvdsinfer.h:72
INFER_EXPORT_API::dsInferLogVPrint__
void dsInferLogVPrint__(NvDsInferLogLevel level, const char *fmt, va_list args)
Helper function to print the nvinferserver logs.
INFER_EXPORT_API::MapBufferPool::clear
void clear()
Remove all pools from the map.
Definition: infer_utils.h:586
INFER_EXPORT_API::MapBufferPool::getPoolSize
uint32_t getPoolSize(const Key &key)
Get the size of a pool from the map.
Definition: infer_utils.h:558
infer_common.h
Header file of the common declarations for the nvinferserver library.
INFER_EXPORT_API::BufferPool
Template class for buffer pool of the specified buffer type.
Definition: infer_utils.h:405
INFER_EXPORT_API::QueueThread::QueueThread
QueueThread(RunFunc runFunc, const std::string &name)
Create a new thread that runs the specified function over the queued items in a loop.
Definition: infer_utils.h:299
dsInferLogPrint__
void dsInferLogPrint__(NvDsInferLogLevel level, const char *fmt,...)
Print the nvinferserver log messages as per the configured log level.
Definition: infer_options.h:27
INFER_EXPORT_API::batchDims2Str
std::string batchDims2Str(const InferBatchDims &d)
INFER_EXPORT_API::QueueThread::queueItem
bool queueItem(Item item)
Add an item to the queue for processing.
Definition: infer_utils.h:345
INFER_EXPORT_API::QueueThread
Template class for running the specified function on the queue items in a separate thread.
Definition: infer_utils.h:287
NvDsInferNetworkInfo
Holds information about the model network.
Definition: nvdsinfer.h:110
InferDebug
#define InferDebug(fmt,...)
Definition: infer_defines.h:64
INFER_EXPORT_API::file_accessible
bool file_accessible(const std::string &path)
Definition: infer_utils.h:91
NvDsInferLayerInfo
Holds information about one layer in the model.
Definition: nvdsinfer.h:87
INFER_EXPORT_API::GuardQueue::size
int size()
Current size of the queue.
Definition: infer_utils.h:255
INFER_EXPORT_API::memType2Str
std::string memType2Str(InferMemType type)
Returns a string object corresponding to the InferMemType name.
INFER_EXPORT_API::QueueThread::~QueueThread
~QueueThread()
Destructor.
Definition: infer_utils.h:333
INFER_EXPORT_API::toCapiDataType
NvDsInferDataType toCapiDataType(InferDataType dt)
Convert the InferDataType to NvDsInferDataType of the library interface.
INFER_EXPORT_API::BufferPool::BufferPool
BufferPool(const std::string &name)
Constructor.
Definition: infer_utils.h:412
INFER_EXPORT_API::isAbsolutePath
bool isAbsolutePath(const std::string &path)
INFER_EXPORT_API::WakeupException
Wrapper class for handling exception.
Definition: infer_utils.h:188
INFER_EXPORT_API::QueueThread::join
void join()
Definition: infer_utils.h:334
INFER_EXPORT_API::MapBufferPool::RecylePtr
typename BufferPool< UniqBuffer >::RecylePtr RecylePtr
Definition: infer_utils.h:504
InferWarning
#define InferWarning(fmt,...)
Definition: infer_defines.h:52
INFER_EXPORT_API::isPrivateTensor
bool isPrivateTensor(const std::string &tensorName)
Check if the given tensor is marked as private (contains INFER_SERVER_PRIVATE_BUF in the name).
INFER_EXPORT_API::dataType2Str
std::string dataType2Str(const InferDataType type)
INFER_EXPORT_API::tensorOrder2Str
std::string tensorOrder2Str(InferTensorOrder order)
INFER_EXPORT_API::DlLibHandle::getPath
const std::string & getPath() const
Definition: infer_utils.h:152
INFER_EXPORT_API::QueueThread::RunFunc
std::function< bool(Item)> RunFunc
Definition: infer_utils.h:290
nvdsinferserver::SharedCuStream
std::shared_ptr< CudaStream > SharedCuStream
Cuda based pointers.
Definition: infer_common.h:84
INFER_EXPORT_API::normalizeDims
void normalizeDims(InferDims &dims)
Recalculates the total number of elements for the dimensions.
Definition: infer_utils.h:686
INFER_EXPORT_API::MapBufferPool::acquireBuffer
RecylePtr acquireBuffer(const Key &key)
Acquire a buffer from the selected pool.
Definition: infer_utils.h:571
INFER_EXPORT_API::isCpuMem
bool isCpuMem(InferMemType type)
Check if the memory type uses CPU memory (kCpu or kCpuCuda).
InferError
#define InferError(fmt,...)
Definition: infer_defines.h:46
INFER_EXPORT_API::MapBufferPool::~MapBufferPool
virtual ~MapBufferPool()
Destructor.
Definition: infer_utils.h:513
INFER_EXPORT_API
Definition: infer_utils.h:33
INFER_EXPORT_API::MapBufferPool::setBuffer
bool setBuffer(const Key &key, UniqBuffer buf)
Add a buffer to the pool map.
Definition: infer_utils.h:536
INFER_EXPORT_API::MapBufferPool::SharedPool
SharedBufPool< UniqBuffer > SharedPool
Definition: infer_utils.h:503
nvdsinferserver::InferMemType
InferMemType
The memory types of inference buffers.
Definition: infer_datatypes.h:56
INFER_EXPORT_API::hasWildcard
bool hasWildcard(const InferDims &dims)
Check if any of the InferDims dimensions are of dynamic size (-1 or negative values).
Definition: infer_utils.h:656
NvDsInferStatus2Str
const INFER_EXPORT_API char * NvDsInferStatus2Str(NvDsInferStatus status)
Returns the NvDsInferStatus enum name as a string.
INFER_EXPORT_API::operator==
bool operator==(const InferDims &a, const InferDims &b)
INFER_EXPORT_API::BufferPool::size
int size()
Get the number of free buffers.
Definition: infer_utils.h:439
INFER_EXPORT_API::BufferPool::acquireBuffer
RecylePtr acquireBuffer()
Acquire a buffer from the pool.
Definition: infer_utils.h:450
INFER_EXPORT_API::fEqual
bool fEqual(float a, float b)
Check if the two floating point values are equal, the difference is less than or equal to the epsilon...
INFER_EXPORT_API::WakeupException::WakeupException
WakeupException(const std::string &s)
Definition: infer_utils.h:192
INFER_EXPORT_API::QueueThread::setThreadName
void setThreadName(const std::string &name)
Set the internal (m_Name) name of the thread and system name using pthread_setname_np().
Definition: infer_utils.h:314
INFER_EXPORT_API::operator!=
bool operator!=(const InferDims &a, const InferDims &b)
INFER_EXPORT_API::reshapeToFullDimsBuf
SharedBatchBuf reshapeToFullDimsBuf(const SharedBatchBuf &buf, bool reCalcBytes=false)
Reshape the buffer dimensions with batch size added as new dimension.
INFER_EXPORT_API::GuardQueue::clear
void clear()
Clear the queue.
Definition: infer_utils.h:246
INFER_EXPORT_API::ReshapeBuf
SharedBatchBuf ReshapeBuf(const SharedBatchBuf &in, uint32_t batch, const InferDims &dims, bool reCalcBytes=false)
Update the buffer dimensions as per provided new dimensions.
INFER_EXPORT_API::fullDims
InferDims fullDims(int batchSize, const InferDims &in)
Extend the dimensions to include batch size.
string_empty
bool string_empty(const char *str)
Definition: post_processor_struct.h:64
INFER_EXPORT_API::tensorBufferCopy
NvDsInferStatus tensorBufferCopy(const SharedBatchBuf &in, const SharedBatchBuf &out, const SharedCuStream &stream)
Copy one tensor buffer to another.
INFER_EXPORT_API::BufferPool::ItemType
typename UniPtr::element_type ItemType
Definition: infer_utils.h:407
NvDsInferStatus
NvDsInferStatus
Enum for the status codes returned by NvDsInferContext.
Definition: nvdsinfer.h:218