Module
Learn.Metrics.ConfusionMatrix
- Package
- purescript-learn
- Repository
- carldata/purescript-learn
Compute confusion matrix to evaluate the accuracy of any classifier.
#toString Source
toString :: ConfusionMatrix -> String#precision Source
precision :: ConfusionMatrix -> Numberprecision = TP / (TP + FP)
#recall Source
recall :: ConfusionMatrix -> Numberrecall = TP / (TP + TN)
#accuracy Source
accuracy :: ConfusionMatrix -> Numberaccuracy = (TP + TN) / (TP + TN + FP + FN)
#fscore Source
fscore :: ConfusionMatrix -> NumberF1 score = 2 * (precision * recall) / (precision + recall)