Coverage for credoai/modules/constants_threshold_metrics.py: 100%
3 statements
« prev ^ index » next coverage.py v6.5.0, created at 2022-12-08 07:32 +0000
« prev ^ index » next coverage.py v6.5.0, created at 2022-12-08 07:32 +0000
1"""Constants for threshold metrics
3Define relationships between metric names (strings) and
4threshold metric functions,
5as well as alternate names for each metric name
6"""
8from credoai.modules.metrics_credoai import (
9 credo_det_curve,
10 credo_pr_curve,
11 credo_roc_curve,
12)
14"""
15Current outputting functionality in evaluators (e.g. Performance) relies on
16the assumption that threshold metric functions return DataFrames, with columns labeled.
18Other return types are possible, in principle. These may require further wrangling on the
19evaluator side before converting to Evidence to ensure that the underlying data structure
20can easily be read by the Credo AI Platform.
21"""
23# MODEL METRICS
24THRESHOLD_PROBABILITY_FUNCTIONS = {
25 "roc_curve": credo_roc_curve,
26 "precision_recall_curve": credo_pr_curve,
27 "det_curve": credo_det_curve,
28}
30# Included for consistency relative to Metric and constants_metrics.py
31# Empty because there are no supported equivalent names for threshold-varying metric functions
32THRESHOLD_METRIC_EQUIVALENTS = {
33 "precision_recall_curve": ["pr_curve"],
34 "det_curve": ["detection_error_tradeoff"],
35}