Coverage for credoai/modules/constants_threshold_metrics.py: 100%
3 statements
« prev ^ index » next coverage.py v7.1.0, created at 2023-02-13 21:56 +0000
« prev ^ index » next coverage.py v7.1.0, created at 2023-02-13 21:56 +0000
1"""Constants for threshold metrics
3Define relationships between metric names (strings) and
4threshold metric functions,
5as well as alternate names for each metric name
6"""
8from credoai.modules.metrics_credoai import (
9 credo_det_curve,
10 credo_pr_curve,
11 credo_roc_curve,
12 credo_gain_chart,
13)
15"""
16Current outputting functionality in evaluators (e.g. Performance) relies on
17the assumption that threshold metric functions return DataFrames, with columns labeled.
19Other return types are possible, in principle. These may require further wrangling on the
20evaluator side before converting to Evidence to ensure that the underlying data structure
21can easily be read by the Credo AI Platform.
22"""
24# MODEL METRICS
25THRESHOLD_PROBABILITY_FUNCTIONS = {
26 "det_curve": credo_det_curve,
27 "gain_chart": credo_gain_chart,
28 "precision_recall_curve": credo_pr_curve,
29 "roc_curve": credo_roc_curve,
30}
32# Included for consistency relative to Metric and constants_metrics.py
33# Empty because there are no supported equivalent names for threshold-varying metric functions
34THRESHOLD_METRIC_EQUIVALENTS = {
35 "precision_recall_curve": ["pr_curve"],
36 "det_curve": ["detection_error_tradeoff"],
37}