Program Listing for File infer_manager.hpp

Return to documentation for file (modules/holoinfer/src/manager/infer_manager.hpp)

Copy
Copied!
            

/* * SPDX-FileCopyrightText: Copyright (c) 2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved. * SPDX-License-Identifier: Apache-2.0 * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #ifndef _HOLOSCAN_INFER_MANAGER_H #define _HOLOSCAN_INFER_MANAGER_H #include <functional> #include <future> #include <iostream> #include <map> #include <memory> #include <string> #include <holoinfer.hpp> #include <holoinfer_buffer.hpp> #include <infer/infer.hpp> #include <infer/onnx/core.hpp> #include <infer/trt/core.hpp> #include <params/infer_param.hpp> namespace holoscan { namespace inference { class ManagerInfer { public: ManagerInfer(); InferStatus set_inference_params(std::shared_ptr<MultiAISpecs>& multiai_specs); InferStatus execute_inference(DataMap& preprocess_data_map, DataMap& output_data_map); InferStatus run_core_inference(const std::string& model_name, DataMap& permodel_preprocess_data, DataMap& permodel_output_data); void cleanup(); void print_dimensions(); DimType get_input_dimensions() const; DimType get_output_dimensions() const; private: bool parallel_processing_ = false; bool cuda_buffer_in_ = false; bool cuda_buffer_out_ = false; std::map<std::string, std::unique_ptr<Params>> infer_param_; std::map<std::string, std::unique_ptr<InferBase>> holo_infer_context_; std::map<std::string, std::string> inference_map_; DimType models_input_dims_; DimType models_output_dims_; std::map<std::string, bool> supported_backend_{ {"onnxrt", true}, {"trt", true}, {"pytorch", false}}; }; std::unique_ptr<ManagerInfer> manager; } // namespace inference } // namespace holoscan #endif

© Copyright 2022, NVIDIA. Last updated on Jun 28, 2023.