"""
Collection of functions to compute various performance measures from a 2x2 confusion matrix.
As an introduction to evaluating classificators, we recommend reading this `paper`_ about ROC analysis.
TP = true positive
FP = false positive
TN = true negative
FN = false negative
.. _paper: http://dx.doi.org/10.1016/j.patrec.2005.10.010.
"""
import numpy as np
from math import sqrt
[docs]def mcc(TP, FN, FP, TN):
"""Matthews Correlation Coefficient"""
return np.divide(
TP * TN + FP * FN, sqrt((TP + FP) * (TP + FN) * (TN + FP) * (TN + FN))
)
[docs]def sens(TP, FN, FP, TN):
"""Sensitivity"""
return np.divide(TP, (TP + FN))
[docs]def spec(TP, FN, FP, TN):
"""Specificity"""
return np.divide(TN, (TN + FP))
[docs]def prec_pos(TP, FN, FP, TN):
"""Posivitve Precision"""
return np.divide(TP, (TP + FP))
[docs]def recall_pos(TP, FN, FP, TN):
"""Positive recall"""
return np.divide(TP, (TP + FN))
[docs]def prec_neg(TP, FN, FP, TN):
"""Negative Precision"""
return np.divide(TN, (TN + FN))
[docs]def recall_neg(TP, FN, FP, TN):
"""Negative recall"""
return np.divide(TN, (TN + FP))
[docs]def f1_pos(TP, FN, FP, TN):
"""f1-measure on positive instances"""
return np.divide(
2 * prec_pos(TP, FN, FP, TN) * recall_pos(TP, FN, FP, TN),
(prec_pos(TP, FN, FP, TN) + recall_pos(TP, FN, FP, TN)),
)
[docs]def f1_neg(TP, FN, FP, TN):
"""f1-measure on negative instances."""
return np.divide(
2 * prec_neg(TP, FN, FP, TN) * recall_neg(TP, FN, FP, TN),
(prec_neg(TP, FN, FP, TN) + recall_neg(TP, FN, FP, TN)),
)