nemo_microservices.types.shared_params.task_prompt#

Module Contents#

Classes#

Data#

API#

nemo_microservices.types.shared_params.task_prompt.Message: typing_extensions.TypeAlias#

None

class nemo_microservices.types.shared_params.task_prompt.TaskPrompt#

Bases: typing_extensions.TypedDict

content: str#

None

The content of the prompt, if it’s a string.

max_length: int#

None

The maximum length of the prompt in number of characters.

max_tokens: int#

None

The maximum number of tokens that can be generated in the chat completion.

messages: nemo_microservices._types.SequenceNotStr[nemo_microservices.types.shared_params.task_prompt.Message]#

None

The list of messages included in the prompt. Used for chat models.

mode: str#

None

Corresponds to the prompting_mode for which this prompt is fetched.

Default is ‘standard’.

models: nemo_microservices._types.SequenceNotStr[str]#

None

If specified, the prompt will be used only for the given LLM engines/models.

The format is a list of strings with the format: or /.

output_parser: str#

None

The name of the output parser to use for this prompt.

stop: nemo_microservices._types.SequenceNotStr[str]#

None

If specified, will be configure stop tokens for models that support this.

task: typing_extensions.Required[str]#

None

The id of the task associated with this prompt.