from sklearn.metrics import accuracy_score
from sklearn.metrics import classification_report, confusion_matrix
print(confusion_matrix(y_test, y_pred_test.round()))
print(classification_report(y_test, y_pred_test.round()))
# Output:
[[99450 250]
[ 4165 11192]]
precision recall f1-score support
0 0.96 1.00 0.98 99700
1 0.98 0.73 0.84 15357
accuracy 0.96 115057
macro avg 0.97 0.86 0.91 115057
weighted avg 0.96 0.96 0.96 115057