1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48
| from sklearn.metrics import confusion_matrix import itertools def plot_confusion_matrix(cm, target_names,title='Confusion matrix',cmap=None,normalize=False): accuracy = np.trace(cm) / float(np.sum(cm)) misclass = 1 - accuracy if cmap is None: cmap = plt.get_cmap('Blues') plt.figure(figsize=(6, 4)) plt.imshow(cm, interpolation='nearest', cmap=cmap) plt.title(title) plt.colorbar()
if target_names is not None: tick_marks = np.arange(len(target_names)) plt.xticks(tick_marks, target_names, rotation=45) plt.yticks(tick_marks, target_names)
if normalize: cm = cm.astype('float32') / cm.sum(axis=1) cm = np.round(cm,2)
thresh = cm.max() / 1.5 if normalize else cm.max() / 2 for i, j in itertools.product(range(cm.shape[0]), range(cm.shape[1])): if normalize: plt.text(j, i, "{:0.2f}".format(cm[i, j]), horizontalalignment="center", color="white" if cm[i, j] > thresh else "black") else: plt.text(j, i, "{:,}".format(cm[i, j]), horizontalalignment="center", color="white" if cm[i, j] > thresh else "black")
plt.tight_layout() plt.ylabel('True label') plt.xlabel("Predicted label\naccuracy={:0.4f}\n misclass={:0.4f}".format(accuracy, misclass)) plt.show()
labels = ['0-30', '31-60', '60-100']
Y_pred = model.predict(test_generator, test_num // batch_size + 1)
Y_pred_classes = np.argmax(Y_pred, axis = 1)
confusion_mtx = confusion_matrix(y_true = test_generator.classes,y_pred = Y_pred_classes)
plot_confusion_matrix(confusion_mtx, normalize=True, target_names=labels)
|