services.openai_client#

Module Contents#

Classes#

API#

class services.openai_client.AsyncOpenAIClient(async_openai_client: openai.AsyncOpenAI)#

Bases: services.model_client.AsyncLLMClient

Initialization

async query_model(
*,
messages: collections.abc.Iterable,
model: str,
conversation_formatter: nemo_curator.services.conversation_formatter.ConversationFormatter | None = None,
max_tokens: int | None | openai._types.NotGiven = NOT_GIVEN,
n: int | None | openai._types.NotGiven = NOT_GIVEN,
seed: int | None | openai._types.NotGiven = NOT_GIVEN,
stop: str | None | list[str] | openai._types.NotGiven = NOT_GIVEN,
stream: bool | None | openai._types.NotGiven = False,
temperature: float | None | openai._types.NotGiven = NOT_GIVEN,
top_k: int | None = None,
top_p: float | None | openai._types.NotGiven = NOT_GIVEN,
) list[str]#
async query_reward_model(
*,
messages: collections.abc.Iterable,
model: str,
) dict#
class services.openai_client.OpenAIClient(openai_client: openai.OpenAI)#

Bases: services.model_client.LLMClient

Initialization

query_model(
*,
messages: collections.abc.Iterable,
model: str,
conversation_formatter: nemo_curator.services.conversation_formatter.ConversationFormatter | None = None,
max_tokens: int | None | openai._types.NotGiven = NOT_GIVEN,
n: int | None | openai._types.NotGiven = NOT_GIVEN,
seed: int | None | openai._types.NotGiven = NOT_GIVEN,
stop: str | None | list[str] | openai._types.NotGiven = NOT_GIVEN,
stream: bool | None | openai._types.NotGiven = False,
temperature: float | None | openai._types.NotGiven = NOT_GIVEN,
top_k: int | None = None,
top_p: float | None | openai._types.NotGiven = NOT_GIVEN,
) list[str]#
query_reward_model(
*,
messages: collections.abc.Iterable,
model: str,
) dict#