From b91dabaaa9d968466bf25f4ed9a3d41a4677b709 Mon Sep 17 00:00:00 2001 From: Tássia Camões Araújo Date: Wed, 14 Sep 2011 03:56:08 +0000 Subject: [PATCH] Added MCC metric. --- src/evaluation.py | 23 +++++++++++++++++++++++ 1 file changed, 23 insertions(+), 0 deletions(-) diff --git a/src/evaluation.py b/src/evaluation.py index af76d14..825082a 100644 --- a/src/evaluation.py +++ b/src/evaluation.py @@ -140,6 +140,29 @@ class FPR(Metric): return (float(len(evaluation.false_positive))/ evaluation.real_negative_len) +class MCC(Metric): + """ + Matthews correlation coefficient. + """ + def __init__(self): + """ + Set metric description. + """ + self.desc = " MCC " + + def run(self,evaluation): + """ + Compute metric. + """ + VP = len(evaluation.true_positive) + FP = len(evaluation.false_positive) + FN = len(evaluation.false_negative) + VN = evaluation.true_negative_len + if (VP+FP)==0 or (VP+FN)==0 or (VN+FP)==0 or (VN+FN)==0: + return 0 + MCC = (((VP*VN)-(FP*FN))/math.sqrt((VP+FP)*(VP+FN)*(VN+FP)*(VN+FN))) + return MCC + class F_score(Metric): """ Classification accuracy metric which correlates precision and recall into an -- libgit2 0.21.2