Skip to content
Snippets Groups Projects
Commit 759112d0 authored by Pierre LOTTE's avatar Pierre LOTTE
Browse files

Add F1 score computation from precision recall curve

parent a806e498
Branches
Tags
No related merge requests found
......@@ -7,12 +7,15 @@ import subprocess
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
from warnings import simplefilter
from sklearn.metrics import roc_auc_score, roc_curve, f1_score
from sklearn.metrics import roc_auc_score, precision_recall_curve
from sklearn.preprocessing import Normalizer
from .base import BaseResults
simplefilter("ignore", category=RuntimeWarning)
class ROCResults(BaseResults):
"""
This class will compute the ROC metric.
......@@ -107,19 +110,11 @@ class ROCResults(BaseResults):
# Once the correct anomaly scores have been computed, we can compute the metrics
roc = roc_auc_score(labels, result)
fpr, tpr, thresh = roc_curve(labels, result)
closest_dist = np.inf
closest_thresh = np.inf
best_couple = (0,0)
for f, t, th in zip(fpr, tpr, thresh):
dist = np.sqrt((f-0)**2+(t-1)**2)
if dist < closest_dist:
closest_dist = dist
closest_thresh = th
best_couple = (f, t)
binary_labels = (result > closest_thresh).astype(int)
f1 = f1_score(labels, binary_labels)
return roc, f1
prec, rec, _ = precision_recall_curve(labels, result)
fscore = (2 * prec * rec) / (prec + rec)
fscore = np.nan_to_num(fscore)
idx = np.argmax(fscore)
return roc, fscore[idx]
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment