↰ Return to documentation for file (morpheus/_lib/src/messages/multi_inference_nlp.cpp
)
#include "morpheus/messages/multi_inference_nlp.hpp"
#include "morpheus/messages/memory/inference_memory.hpp"
#include "morpheus/messages/meta.hpp"
#include "morpheus/messages/multi_inference.hpp"
#include "morpheus/utilities/cupy_util.hpp"
#include <cudf/types.hpp>
#include <pybind11/pytypes.h>
#include <memory>
#include <utility>
namespace morpheus {
/****** Component public implementations *******************/
/****** MultiInferenceNLPMessage****************************************/
MultiInferenceNLPMessage::MultiInferenceNLPMessage(std::shared_ptr<morpheus::MessageMeta> meta,
size_t mess_offset,
size_t mess_count,
std::shared_ptr<morpheus::InferenceMemory> memory,
size_t offset,
size_t count) :
MultiInferenceMessage(meta, mess_offset, mess_count, memory, offset, count)
{}
const TensorObject MultiInferenceNLPMessage::get_input_ids() const
{
return this->get_input("input_ids");
}
void MultiInferenceNLPMessage::set_input_ids(const TensorObject &input_ids)
{
this->set_input("input_ids", input_ids);
}
const TensorObject MultiInferenceNLPMessage::get_input_mask() const
{
return this->get_input("input_mask");
}
void MultiInferenceNLPMessage::set_input_mask(const TensorObject &input_mask)
{
this->set_input("input_mask", input_mask);
}
const TensorObject MultiInferenceNLPMessage::get_seq_ids() const
{
return this->get_input("seq_ids");
}
void MultiInferenceNLPMessage::set_seq_ids(const TensorObject &seq_ids)
{
this->set_input("seq_ids", seq_ids);
}
/****** MultiInferenceNLPMessageInterfaceProxy *************************/
std::shared_ptr<MultiInferenceNLPMessage> MultiInferenceNLPMessageInterfaceProxy::init(
std::shared_ptr<MessageMeta> meta,
cudf::size_type mess_offset,
cudf::size_type mess_count,
std::shared_ptr<InferenceMemory> memory,
cudf::size_type offset,
cudf::size_type count)
{
return std::make_shared<MultiInferenceNLPMessage>(
std::move(meta), mess_offset, mess_count, std::move(memory), offset, count);
}
std::shared_ptr<morpheus::InferenceMemory> MultiInferenceNLPMessageInterfaceProxy::memory(
MultiInferenceNLPMessage &self)
{
DCHECK(std::dynamic_pointer_cast<morpheus::InferenceMemory>(self.memory) != nullptr);
return std::static_pointer_cast<morpheus::InferenceMemory>(self.memory);
}
std::size_t MultiInferenceNLPMessageInterfaceProxy::offset(MultiInferenceNLPMessage &self)
{
return self.offset;
}
std::size_t MultiInferenceNLPMessageInterfaceProxy::count(MultiInferenceNLPMessage &self)
{
return self.count;
}
pybind11::object MultiInferenceNLPMessageInterfaceProxy::input_ids(MultiInferenceNLPMessage &self)
{
// Get and convert
auto tensor = self.get_input_ids();
return CupyUtil::tensor_to_cupy(tensor);
}
pybind11::object MultiInferenceNLPMessageInterfaceProxy::input_mask(MultiInferenceNLPMessage &self)
{
// Get and convert
auto tensor = self.get_input_mask();
return CupyUtil::tensor_to_cupy(tensor);
}
pybind11::object MultiInferenceNLPMessageInterfaceProxy::seq_ids(MultiInferenceNLPMessage &self)
{
// Get and convert
auto tensor = self.get_seq_ids();
return CupyUtil::tensor_to_cupy(tensor);
}
} // namespace morpheus