-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathmachinelearning_confusionmatrix.py
46 lines (40 loc) · 1.51 KB
/
machinelearning_confusionmatrix.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
# -*- coding: utf-8 -*-
"""MachineLearning_ConfusionMatrix.ipynb
Automatically generated by Colaboratory.
Original file is located at
https://colab.research.google.com/drive/1z0lZldXvY4rw2b9WEzZqvIaN7FHRAVIq
"""
import matplotlib.pyplot as plt
import numpy
from sklearn import metrics
#generating random data
actual = numpy.random.binomial(1,.9,size = 1000)
predicted = numpy.random.binomial(1,.9,size = 1000)
#function call
confusion_matrix = metrics.confusion_matrix(actual, predicted)
cm_display = metrics.ConfusionMatrixDisplay(confusion_matrix = confusion_matrix, display_labels = [False, True])
cm_display.plot()
plt.show()
'''
False Negative (Top-Left Quadrant)
False Positive (Top-Right Quadrant)
True Negative (Bottom-Left Quadrant)
True Positive (Bottom-Right Quadrant)
'''
#Accuracy
#(True Positive + True Negative) / Total Predictions
Accuracy = metrics.accuracy_score(actual, predicted)
#Precision
#True Positive / (True Positive + False Positive)
Precision = metrics.precision_score(actual, predicted)
#Sensitivity
#True Positive / (True Positive + False Negative)
Sensitivity_recall = metrics.recall_score(actual, predicted)
#Specificity
#True Negative / (True Negative + False Positive)
Specificity = metrics.recall_score(actual, predicted, pos_label=0)
#F-score
# 2 * ((Precision * Sensitivity) / (Precision + Sensitivity))
F1_score = metrics.f1_score(actual, predicted)
#metrics
print({"Accuracy":Accuracy,"Precision":Precision,"Sensitivity_recall":Sensitivity_recall,"Specificity":Specificity,"F1_score":F1_score})