Program Listing for File multi_inference_fil.hpp

Return to documentation for file (morpheus/_lib/include/morpheus/messages/multi_inference_fil.hpp)

Copy
Copied!
            

#pragma once #include "morpheus/messages/memory/inference_memory.hpp"// for InferenceMemory #include "morpheus/messages/meta.hpp"// for MessageMeta #include "morpheus/messages/multi_inference.hpp" #include "morpheus/objects/tensor_object.hpp" #include <cudf/types.hpp> #include <cstddef>// for size_t #include <memory> namespace morpheus { /****** Component public implementations *******************/ /****** MultiInferenceFILMessage****************************************/ #pragma GCC visibility push(default) class MultiInferenceFILMessage : public MultiInferenceMessage { public: MultiInferenceFILMessage(std::shared_ptr<morpheus::MessageMeta> meta, size_t mess_offset, size_t mess_count, std::shared_ptr<morpheus::InferenceMemory> memory, size_t offset, size_t count); const TensorObject get_input__0() const; void set_input__0(const TensorObject& input__0); const TensorObject get_seq_ids() const; void set_seq_ids(const TensorObject& seq_ids); }; /****** MultiInferenceFILMessageInterfaceProxy *************************/ struct MultiInferenceFILMessageInterfaceProxy { static std::shared_ptr<MultiInferenceFILMessage> init(std::shared_ptr<MessageMeta> meta, cudf::size_type mess_offset, cudf::size_type mess_count, std::shared_ptr<InferenceMemory> memory, cudf::size_type offset, cudf::size_type count); static std::shared_ptr<morpheus::InferenceMemory> memory(MultiInferenceFILMessage& self); static std::size_t offset(MultiInferenceFILMessage& self); static std::size_t count(MultiInferenceFILMessage& self); }; #pragma GCC visibility pop// end of group } // namespace morpheus

© Copyright 2023, NVIDIA. Last updated on Feb 3, 2023.