RIME Metrics
Module containing metric definitions for RIME Tabular.
- class rime.tabular.metric.MetricName(value)
Parseable tabular metric name.
- AUC = 'area_under_curve'
Binary area under the ROC curve (AUC)
- MULTICLASS_AUC = 'multiclass_area_under_curve'
Multi-Class area under the ROC curve (AUC)
- ACCURACY = 'accuracy'
Accuracy
- MULTICLASS_ACCURACY = 'multiclass_accuracy'
Multi-class accuracy
- F1 = 'f1'
Binary F1 score
- MACRO_F1 = 'macro_f1'
Macro F1 score
- POS_PRED_RATE = 'positive_prediction_rate'
Positive prediction rate
- AVG_PRED = 'average_prediction'
Binary average prediction
- AVG_CONFIDENCE = 'average_confidence'
Average confidence
- ATC = 'average_thresholded_confidence'
Average Thresholded Confidence (ATC)
- PRECISION = 'precision'
Binary precision
- MACRO_PRECISION = 'macro_precision'
Macro precision
- FPR = 'false_positive_rate'
Binary false positive rate
- FNR = 'false_negative_rate'
Binary false negative rate
- RECALL = 'recall'
Binary recall
- MACRO_RECALL = 'macro_recall'
Macro recall
- PRED_VARIANCE_POS = 'positive_prediction_variance'
Prediction variance over the positive labels
- PRED_VARIANCE_NEG = 'negative_prediction_variance'
Prediction variance over the negative labels
- PRED_VARIANCE_ALL = 'prediction_variance'
Prediction variance
- MSE = 'mean_squared_error'
Mean squared error (MSE)
- RMSE = 'root_mean_squared_error'
Root mean squared error (RMSE)
- MAE = 'mean_absolute_error'
Mean absolute error (MAE)
- MAPE = 'mean_absolute_percentage_error'
Mean absolute percentage error (MAPE)
- RANK_CORRELATION = 'rank_correlation'
Rank correlation
- MRR = 'mean_reciprocal_rank'
Mean reciprocal rank (MRR)
- AVG_RANK = 'average_rank'
Average rank
- AVG_ROW_WISE_PRED_DIFF = 'avg_row_wise_pred_diff'
Average row-wise prediction difference