Module
Learn.Metrics.ConfusionMatrix
- Package
- purescript-learn
- Repository
- carldata/purescript-learn
Compute confusion matrix to evaluate the accuracy of any classifier.
#toString Source
toString :: ConfusionMatrix -> String
#precision Source
precision :: ConfusionMatrix -> Number
precision = TP / (TP + FP)
#recall Source
recall :: ConfusionMatrix -> Number
recall = TP / (TP + TN)
#accuracy Source
accuracy :: ConfusionMatrix -> Number
accuracy = (TP + TN) / (TP + TN + FP + FN)
#fscore Source
fscore :: ConfusionMatrix -> Number
F1 score = 2 * (precision * recall) / (precision + recall)