-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathutils.py
More file actions
51 lines (36 loc) · 1.39 KB
/
utils.py
File metadata and controls
51 lines (36 loc) · 1.39 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
from __future__ import division
'functions'
__author__ = 'Ma Cong'
import torch
import numpy as np
def precision(y_true, y_pred):
# Calculates the precision
true_positives = np.sum(np.round(np.clip(y_true * y_pred, 0, 1)))
predicted_positives = np.sum(np.round(np.clip(y_pred, 0, 1)))
precision = true_positives / (predicted_positives + 1e-6)
return precision
def recall(y_true, y_pred):
# Calculates the recall
true_positives = np.sum(np.round(np.clip(y_true * y_pred, 0, 1)))
possible_positives = np.sum(np.round(np.clip(y_true, 0, 1)))
recall = true_positives / (possible_positives + 1e-6)
return recall
def fbeta_score(y_true, y_pred, beta=1):
# Calculates the F score, the weighted harmonic mean of precision and recall.
if beta < 0:
raise ValueError('The lowest choosable beta is zero (only precision).')
# If there are no true positives, fix the F score at 0 like sklearn.
if np.sum(np.round(np.clip(y_true, 0, 1))) == 0:
return 0
p = precision(y_true, y_pred)
r = recall(y_true, y_pred)
bb = beta ** 2
fbeta_score = (1 + bb) * (p * r) / (bb * p + r + 1e-6)
return fbeta_score
def fmeasure(p, r):
# Calculates the f-measure, the harmonic mean of precision and recall.
return 2 * (p * r) / (p + r + 1e-6)
def sigmoid(x):
return 1 / (1 + np.exp(-x))