metrics#

Module Contents#

Classes#

API#

class metrics.AsyncMetricsResource(client: nemo_platform._client.AsyncNeMoPlatform)#

Bases: nemo_platform._resource.AsyncAPIResource

Initialization

async create(
name: str,
*,
workspace: str | None = None,
model: nemo_platform.types.metric_create_params.LLMJudgeMetricModel | nemo_platform._types.Omit = omit,
scores: typing.Iterable[nemo_platform.types.metric_create_params.LLMJudgeMetricScore] | typing.Iterable[nemo_platform.types.remote_score_param.RemoteScoreParam] | nemo_platform._types.Omit = omit,
description: str | nemo_platform._types.Omit = omit,
ignore_inference_failure: bool | nemo_platform._types.Omit = omit,
inference: nemo_platform.types.inference_params_param.InferenceParamsParam | nemo_platform._types.Omit = omit,
labels: typing.Dict[str,
str] | nemo_platform._types.Omit = omit,
prompt_template: typing.Union[str,
typing.Dict[str,
object]] | nemo_platform._types.Omit = omit,
reasoning: nemo_platform.types.reasoning_params_param.ReasoningParamsParam | nemo_platform._types.Omit = omit,
structured_output: typing.Dict[str,
object] | nemo_platform._types.Omit = omit,
supported_job_types: typing.List[typing_extensions.Literal[online,
offline]] | nemo_platform._types.Omit = omit,
system_prompt: str | nemo_platform._types.Omit = omit,
type: typing_extensions.Literal[llm-judge] | typing_extensions.Literal[topic_adherence] | typing_extensions.Literal[agent_goal_accuracy] | typing_extensions.Literal[answer_accuracy] | typing_extensions.Literal[context_relevance] | typing_extensions.Literal[response_groundedness] | typing_extensions.Literal[context_recall] | typing_extensions.Literal[context_precision] | typing_extensions.Literal[context_entity_recall] | typing_extensions.Literal[response_relevancy] | typing_extensions.Literal[faithfulness] | typing_extensions.Literal[noise_sensitivity] | typing_extensions.Literal[tool_call_accuracy] | typing_extensions.Literal[bleu] | typing_extensions.Literal[exact-match] | typing_extensions.Literal[f1] | typing_extensions.Literal[number-check] | typing_extensions.Literal[remote] | typing_extensions.Literal[nemo-agent-toolkit-remote] | typing_extensions.Literal[rouge] | typing_extensions.Literal[string-check] | typing_extensions.Literal[tool-calling] | nemo_platform._types.Omit = omit,
judge_model: nemo_platform.types.metric_create_params.TopicAdherenceMetricJudgeModel | nemo_platform.types.metric_create_params.AgentGoalAccuracyMetricJudgeModel | nemo_platform.types.metric_create_params.AnswerAccuracyMetricJudgeModel | nemo_platform.types.metric_create_params.ContextRelevanceMetricJudgeModel | nemo_platform.types.metric_create_params.ResponseGroundednessMetricJudgeModel | nemo_platform.types.metric_create_params.ContextRecallMetricJudgeModel | nemo_platform.types.metric_create_params.ContextPrecisionMetricJudgeModel | nemo_platform.types.metric_create_params.ContextEntityRecallMetricJudgeModel | nemo_platform.types.metric_create_params.ResponseRelevancyMetricJudgeModel | nemo_platform.types.metric_create_params.FaithfulnessMetricJudgeModel | nemo_platform.types.metric_create_params.NoiseSensitivityMetricJudgeModel | nemo_platform._types.Omit = omit,
input_template: typing.Dict[str,
object] | nemo_platform._types.Omit = omit,
metric_mode: typing_extensions.Literal[f1,
precision,
recall] | nemo_platform._types.Omit = omit,
use_reference: bool | nemo_platform._types.Omit = omit,
embeddings_model: nemo_platform.types.metric_create_params.ResponseRelevancyMetricEmbeddingsModel | nemo_platform._types.Omit = omit,
strictness: int | nemo_platform._types.Omit = omit,
references: nemo_platform._types.SequenceNotStr[str] | nemo_platform._types.Omit = omit,
candidate: str | nemo_platform._types.Omit = omit,
reference: str | nemo_platform._types.Omit = omit,
left_template: str | nemo_platform._types.Omit = omit,
operation: typing_extensions.Literal[equals,
==,
!=,
<>,
not equals,
>=,
gte,
greater than or equal,
>,
gt,
greater than,
<=,
lte,
less than or equal,
<,
lt,
less than,
absolute difference] | typing_extensions.Literal[equals,
==,
!=,
<>,
not equals,
contains,
not contains,
startswith,
endswith] | nemo_platform._types.Omit = omit,
right_template: str | nemo_platform._types.Omit = omit,
epsilon: float | nemo_platform._types.Omit = omit,
body: typing.Dict[str,
object] | nemo_platform._types.Omit = omit,
url: str | nemo_platform._types.Omit = omit,
api_key_secret: nemo_platform.types.files.secret_ref.SecretRef | nemo_platform._types.Omit = omit,
max_retries: int | nemo_platform._types.Omit = omit,
timeout_seconds: float | nemo_platform._types.Omit = omit,
evaluator_name: str | nemo_platform._types.Omit = omit,
extra_headers: nemo_platform._types.Headers | None = None,
extra_query: nemo_platform._types.Query | None = None,
extra_body: nemo_platform._types.Body | None = None,
timeout: float | httpx.Timeout | None | nemo_platform._types.NotGiven = not_given,
) nemo_platform.types.metric_create_response.MetricCreateResponse#
async delete(
name: str,
*,
workspace: str | None = None,
extra_headers: nemo_platform._types.Headers | None = None,
extra_query: nemo_platform._types.Query | None = None,
extra_body: nemo_platform._types.Body | None = None,
timeout: float | httpx.Timeout | None | nemo_platform._types.NotGiven = not_given,
) nemo_platform.types.shared.delete_response.DeleteResponse#
async evaluate(
*,
workspace: str | None = None,
dataset: nemo_platform.types.evaluate_dataset_rows_param.EvaluateDatasetRowsParam,
metric: nemo_platform.types.metric_evaluate_params.Metric,
aggregate_fields: List[typing_extensions.Literal[nan_count, sum, mean, min, max, std_dev, variance, score_type, percentiles, histogram, rubric_distribution, mode_category]] | nemo_platform._types.Omit = omit,
extra_headers: nemo_platform._types.Headers | None = None,
extra_query: nemo_platform._types.Query | None = None,
extra_body: nemo_platform._types.Body | None = None,
timeout: float | httpx.Timeout | None | nemo_platform._types.NotGiven = not_given,
) nemo_platform.types.metric_evaluation_response.MetricEvaluationResponse#
list(
*,
workspace: str | None = None,
filter: nemo_platform.types.metrics_list_filter_param.MetricsListFilterParam | nemo_platform._types.Omit = omit,
page: int | nemo_platform._types.Omit = omit,
page_size: int | nemo_platform._types.Omit = omit,
search: nemo_platform.types.metrics_list_search_param.MetricsListSearchParam | nemo_platform._types.Omit = omit,
sort: typing_extensions.Literal[-created_at, created_at, -updated_at, updated_at, -name, name] | nemo_platform._types.Omit = omit,
extra_headers: nemo_platform._types.Headers | None = None,
extra_query: nemo_platform._types.Query | None = None,
extra_body: nemo_platform._types.Body | None = None,
timeout: float | httpx.Timeout | None | nemo_platform._types.NotGiven = not_given,
) nemo_platform._base_client.AsyncPaginator[nemo_platform.types.metrics_list_response.Data, nemo_platform.pagination.AsyncDefaultPagination[nemo_platform.types.metrics_list_response.Data]]#
async retrieve(
name: str,
*,
workspace: str | None = None,
extra_headers: nemo_platform._types.Headers | None = None,
extra_query: nemo_platform._types.Query | None = None,
extra_body: nemo_platform._types.Body | None = None,
timeout: float | httpx.Timeout | None | nemo_platform._types.NotGiven = not_given,
) nemo_platform.types.metric_retrieve_response.MetricRetrieveResponse#
property with_raw_response: metrics.AsyncMetricsResourceWithRawResponse#
property with_streaming_response: metrics.AsyncMetricsResourceWithStreamingResponse#
class metrics.AsyncMetricsResourceWithRawResponse(
metrics: metrics.AsyncMetricsResource,
)#

Initialization

class metrics.AsyncMetricsResourceWithStreamingResponse(
metrics: metrics.AsyncMetricsResource,
)#

Initialization

class metrics.MetricsResource(client: nemo_platform._client.NeMoPlatform)#

Bases: nemo_platform._resource.SyncAPIResource

Initialization

create(
name: str,
*,
workspace: str | None = None,
model: nemo_platform.types.metric_create_params.LLMJudgeMetricModel | nemo_platform._types.Omit = omit,
scores: typing.Iterable[nemo_platform.types.metric_create_params.LLMJudgeMetricScore] | typing.Iterable[nemo_platform.types.remote_score_param.RemoteScoreParam] | nemo_platform._types.Omit = omit,
description: str | nemo_platform._types.Omit = omit,
ignore_inference_failure: bool | nemo_platform._types.Omit = omit,
inference: nemo_platform.types.inference_params_param.InferenceParamsParam | nemo_platform._types.Omit = omit,
labels: typing.Dict[str,
str] | nemo_platform._types.Omit = omit,
prompt_template: typing.Union[str,
typing.Dict[str,
object]] | nemo_platform._types.Omit = omit,
reasoning: nemo_platform.types.reasoning_params_param.ReasoningParamsParam | nemo_platform._types.Omit = omit,
structured_output: typing.Dict[str,
object] | nemo_platform._types.Omit = omit,
supported_job_types: typing.List[typing_extensions.Literal[online,
offline]] | nemo_platform._types.Omit = omit,
system_prompt: str | nemo_platform._types.Omit = omit,
type: typing_extensions.Literal[llm-judge] | typing_extensions.Literal[topic_adherence] | typing_extensions.Literal[agent_goal_accuracy] | typing_extensions.Literal[answer_accuracy] | typing_extensions.Literal[context_relevance] | typing_extensions.Literal[response_groundedness] | typing_extensions.Literal[context_recall] | typing_extensions.Literal[context_precision] | typing_extensions.Literal[context_entity_recall] | typing_extensions.Literal[response_relevancy] | typing_extensions.Literal[faithfulness] | typing_extensions.Literal[noise_sensitivity] | typing_extensions.Literal[tool_call_accuracy] | typing_extensions.Literal[bleu] | typing_extensions.Literal[exact-match] | typing_extensions.Literal[f1] | typing_extensions.Literal[number-check] | typing_extensions.Literal[remote] | typing_extensions.Literal[nemo-agent-toolkit-remote] | typing_extensions.Literal[rouge] | typing_extensions.Literal[string-check] | typing_extensions.Literal[tool-calling] | nemo_platform._types.Omit = omit,
judge_model: nemo_platform.types.metric_create_params.TopicAdherenceMetricJudgeModel | nemo_platform.types.metric_create_params.AgentGoalAccuracyMetricJudgeModel | nemo_platform.types.metric_create_params.AnswerAccuracyMetricJudgeModel | nemo_platform.types.metric_create_params.ContextRelevanceMetricJudgeModel | nemo_platform.types.metric_create_params.ResponseGroundednessMetricJudgeModel | nemo_platform.types.metric_create_params.ContextRecallMetricJudgeModel | nemo_platform.types.metric_create_params.ContextPrecisionMetricJudgeModel | nemo_platform.types.metric_create_params.ContextEntityRecallMetricJudgeModel | nemo_platform.types.metric_create_params.ResponseRelevancyMetricJudgeModel | nemo_platform.types.metric_create_params.FaithfulnessMetricJudgeModel | nemo_platform.types.metric_create_params.NoiseSensitivityMetricJudgeModel | nemo_platform._types.Omit = omit,
input_template: typing.Dict[str,
object] | nemo_platform._types.Omit = omit,
metric_mode: typing_extensions.Literal[f1,
precision,
recall] | nemo_platform._types.Omit = omit,
use_reference: bool | nemo_platform._types.Omit = omit,
embeddings_model: nemo_platform.types.metric_create_params.ResponseRelevancyMetricEmbeddingsModel | nemo_platform._types.Omit = omit,
strictness: int | nemo_platform._types.Omit = omit,
references: nemo_platform._types.SequenceNotStr[str] | nemo_platform._types.Omit = omit,
candidate: str | nemo_platform._types.Omit = omit,
reference: str | nemo_platform._types.Omit = omit,
left_template: str | nemo_platform._types.Omit = omit,
operation: typing_extensions.Literal[equals,
==,
!=,
<>,
not equals,
>=,
gte,
greater than or equal,
>,
gt,
greater than,
<=,
lte,
less than or equal,
<,
lt,
less than,
absolute difference] | typing_extensions.Literal[equals,
==,
!=,
<>,
not equals,
contains,
not contains,
startswith,
endswith] | nemo_platform._types.Omit = omit,
right_template: str | nemo_platform._types.Omit = omit,
epsilon: float | nemo_platform._types.Omit = omit,
body: typing.Dict[str,
object] | nemo_platform._types.Omit = omit,
url: str | nemo_platform._types.Omit = omit,
api_key_secret: nemo_platform.types.files.secret_ref.SecretRef | nemo_platform._types.Omit = omit,
max_retries: int | nemo_platform._types.Omit = omit,
timeout_seconds: float | nemo_platform._types.Omit = omit,
evaluator_name: str | nemo_platform._types.Omit = omit,
extra_headers: nemo_platform._types.Headers | None = None,
extra_query: nemo_platform._types.Query | None = None,
extra_body: nemo_platform._types.Body | None = None,
timeout: float | httpx.Timeout | None | nemo_platform._types.NotGiven = not_given,
) nemo_platform.types.metric_create_response.MetricCreateResponse#
delete(
name: str,
*,
workspace: str | None = None,
extra_headers: nemo_platform._types.Headers | None = None,
extra_query: nemo_platform._types.Query | None = None,
extra_body: nemo_platform._types.Body | None = None,
timeout: float | httpx.Timeout | None | nemo_platform._types.NotGiven = not_given,
) nemo_platform.types.shared.delete_response.DeleteResponse#
evaluate(
*,
workspace: str | None = None,
dataset: nemo_platform.types.evaluate_dataset_rows_param.EvaluateDatasetRowsParam,
metric: nemo_platform.types.metric_evaluate_params.Metric,
aggregate_fields: List[typing_extensions.Literal[nan_count, sum, mean, min, max, std_dev, variance, score_type, percentiles, histogram, rubric_distribution, mode_category]] | nemo_platform._types.Omit = omit,
extra_headers: nemo_platform._types.Headers | None = None,
extra_query: nemo_platform._types.Query | None = None,
extra_body: nemo_platform._types.Body | None = None,
timeout: float | httpx.Timeout | None | nemo_platform._types.NotGiven = not_given,
) nemo_platform.types.metric_evaluation_response.MetricEvaluationResponse#
list(
*,
workspace: str | None = None,
filter: nemo_platform.types.metrics_list_filter_param.MetricsListFilterParam | nemo_platform._types.Omit = omit,
page: int | nemo_platform._types.Omit = omit,
page_size: int | nemo_platform._types.Omit = omit,
search: nemo_platform.types.metrics_list_search_param.MetricsListSearchParam | nemo_platform._types.Omit = omit,
sort: typing_extensions.Literal[-created_at, created_at, -updated_at, updated_at, -name, name] | nemo_platform._types.Omit = omit,
extra_headers: nemo_platform._types.Headers | None = None,
extra_query: nemo_platform._types.Query | None = None,
extra_body: nemo_platform._types.Body | None = None,
timeout: float | httpx.Timeout | None | nemo_platform._types.NotGiven = not_given,
) nemo_platform.pagination.SyncDefaultPagination[nemo_platform.types.metrics_list_response.Data]#
retrieve(
name: str,
*,
workspace: str | None = None,
extra_headers: nemo_platform._types.Headers | None = None,
extra_query: nemo_platform._types.Query | None = None,
extra_body: nemo_platform._types.Body | None = None,
timeout: float | httpx.Timeout | None | nemo_platform._types.NotGiven = not_given,
) nemo_platform.types.metric_retrieve_response.MetricRetrieveResponse#
property with_raw_response: metrics.MetricsResourceWithRawResponse#
property with_streaming_response: metrics.MetricsResourceWithStreamingResponse#
class metrics.MetricsResourceWithRawResponse(metrics: metrics.MetricsResource)#

Initialization

class metrics.MetricsResourceWithStreamingResponse(metrics: metrics.MetricsResource)#

Initialization