HLearn-classification-1.0.1.1

Safe HaskellNone

HLearn.Evaluation.CrossValidation

Synopsis

Documentation

crossvalidation :: (HomTrainer model, Monoid ret, Monoid (container (Datapoint model)), Partitionable container, PartitionableConstraint container (Datapoint model), Foldable container, Functor container) => container (Datapoint model) -> (model -> container (Datapoint model) -> ret) -> Int -> retSource

This is the standard cross-validation technique for use with the HomTrainer type class. It is asymptotically faster than standard k-fold cross-validation (implemented with lame_crossvalidation), yet is guaranteed to get the exact same answer.

type LossFunction model = model -> [Datapoint model] -> DoubleSource

leaveOneOut :: [dp] -> [[dp]]Source

folds :: Int -> [dp] -> [[dp]]Source

errorRate :: (Classifier model, Labeled (Datapoint model), Eq (Label (Datapoint model))) => LossFunction modelSource

crossValidate :: (HomTrainer model, Eq (Datapoint model)) => [[Datapoint model]] -> LossFunction model -> Normal DoubleSource

crossValidate_group :: (HomTrainer model, Group model) => [[Datapoint model]] -> LossFunction model -> Normal DoubleSource

listAllBut2 :: Monoid a => [a] -> [a]Source

listAllBut :: Monoid a => [a] -> [a]Source

genTestList :: Monoid a => [a] -> [(a, a)]Source