nat.llm.nim_llm#

Classes#

NIMModelConfig

An NVIDIA Inference Microservice (NIM) llm provider to be used with an LLM client.

Functions#

nim_model(llm_config, _builder)

Module Contents#

class NIMModelConfig#

Bases: nat.data_models.llm.LLMBaseConfig, nat.data_models.retry_mixin.RetryMixin, nat.data_models.optimizable.OptimizableMixin, nat.data_models.thinking_mixin.ThinkingMixin, nat.data_models.ssl_verification_mixin.SSLVerificationMixin

An NVIDIA Inference Microservice (NIM) llm provider to be used with an LLM client.

model_config#
api_key: nat.data_models.common.OptionalSecretStr = None#
base_url: str | None = None#
model_name: str#
max_tokens: pydantic.PositiveInt#
temperature: float | None#
top_p: float | None#
async nim_model(
llm_config: NIMModelConfig,
_builder: nat.builder.builder.Builder,
)#