2 Ensemble Anomaly Detector Framework 14 from sklearn.metrics
import precision_score
15 from sklearn.metrics
import recall_score
16 from sklearn.metrics
import f1_score
35 Sets the confusion matrix values. 46 Before measuers are set, make sure confusion matrix 50 if self.
__tp is None or self.
__fp is None or \
51 self.
__tn is None or self.
__fn is None:
52 print 'Error: Confusion matrix values not set.' 63 Measures are printed and returned. 71 print '\taccuracy', acc
73 print '\tprecision', prec
74 print '\tf1measure', f1
76 return acc, rec, prec, f1
97 acc = (tp + fp) / (tp + fp + tn + fn) 100 res = float(self.
__tp + self.
__tn) / \
112 res = float(self.
__tp + 1e-6) / (self.
__tp + self.
__fn + 1e-6)
118 prec = tp / (tp + fp) 123 res = float(self.
__tp + 1e-6) / (self.
__tp + self.
__fp + 1e-6)
129 f1 = (2 * rec * prec) / (rec + prec) 131 Balanced F1 score that is the harmonic mean of both recall 142 Meant to print the output confusion matrix. Specifically 148 The above gets printed for both the training data and testing data. 151 count_train = len(true_y)
152 tpos = fpos = tneg = fneg = 0
153 for i
in xrange(count_train):
155 if (pred_y[i] == -1)
and (true_y[i, 0] == -1):
157 elif (pred_y[i] == -1) & (true_y[i, 0] == 1):
159 elif (pred_y[i] == 1) & (true_y[i, 0] == 1):
161 elif (pred_y[i] == 1) & (true_y[i, 0] == -1):
164 benign = float(fneg + tneg) / count_train
165 attack = float(tpos + fpos) / count_train
167 tpos = float(tpos) / count_train
168 fpos = float(fpos) / count_train
169 tneg = float(tneg) / count_train
170 fneg = float(fneg) / count_train
175 print 'Benign/Attack:\t',
"{:.4f}".format(benign),
'\t',
"{:.4f}".format(attack)
176 print 'Output Results - ' 177 print '\ttpos: \t',
"{:.4f}".format(tpos)
178 print '\tfpos: \t',
"{:.4f}".format(fpos)
179 print '\ttneg: \t',
"{:.4f}".format(tneg)
180 print '\tfneg: \t',
"{:.4f}".format(fneg)
182 print '\taccuracy: \t',
"{:.4f}".format(self.
get_accuracy())
183 print '\trecall: \t',
"{:.4f}".format(self.
get_recall())
184 print '\tprecision: \t',
"{:.4f}".format(self.
get_precision())
185 print '\tf1measure: \t',
"{:.4f}".format(self.
get_f1measure())
def set_confusion_matrix(self, tp, fp, tn, fn)
def output_matrix(self, true_y, pred_y)