-- Hoogle documentation, generated by Haddock -- See Hoogle, http://www.haskell.org/hoogle/ -- | The most frequently used machine learning tools -- -- Please see the README on Github at -- https://github.com/masterdezign/Learning#readme @package Learning @version 0.0.3 -- |
-- 0 0 0 0 0 -- 0 0 0 0 0 -- 1 1 1 1 1 <- Desired class index is 2 -- 0 0 0 0 0 <- Number of classes is 4 -- ^ -- 5 repetitions --type Teacher = Matrix Double -- | Create a binary Teacher matrix with ones row corresponding to -- the desired class index teacher :: Int -> Int -> Int -> Teacher -- | Classifier function that maps some measurements as matrix columns and -- corresponding features as rows, into a categorical output. newtype Classifier a Classifier :: (Matrix Double -> a) -> Classifier a [classify] :: Classifier a -> Matrix Double -> a -- | Regressor function that maps some feature matrix into a continuous -- multidimensional output. The feature matrix is expected to have -- columns corresponding to measurements (data points) and rows, -- features. newtype Regressor Regressor :: (Matrix Double -> Matrix Double) -> Regressor [predict] :: Regressor -> Matrix Double -> Matrix Double -- | Linear readout (matrix) type Readout = Matrix Double -- | Perform supervised learning (ridge regression) and create a linear -- Classifier function. The regression is run with regularization -- parameter μ = 1e-4. learnClassifier :: (Storable a, Eq a) => Vector a -> Matrix Double -> Matrix Double -> Either String (Classifier a) -- | Perform supervised learning (ridge regression) and create a linear -- Regressor function. learnRegressor :: Matrix Double -> Matrix Double -> Either String Regressor -- | Create a linear Readout using the ridge regression. Similar to -- learnRegressor, but instead of a Regressor function a -- (already transposed) Readout matrix may be returned. learn' :: Matrix Double -> Matrix Double -> Maybe Readout -- | Evaluate the network state (nonlinear response) according to some -- Readout matrix. Used by classification strategies such as -- winnerTakesAll. scores :: Readout -> Matrix Double -> Vector Double -- | Winner-takes-all classification method winnerTakesAll :: (Storable a, Eq a) => Readout -> Vector a -> Classifier a -- | Accuracy of classification, 100% - errorRate -- --
-- >>> accuracy [1,2,3,4] [1,2,3,7] -- 75.0 --accuracy :: (Eq lab, Fractional acc) => [lab] -> [lab] -> acc -- | Error rate in %, an error measure for classification tasks -- --
-- >>> errorRate [1,2,3,4] [1,2,3,7] -- 25.0 --errorRate :: (Eq a, Fractional err) => [a] -> [a] -> err -- | Pairs of misclassified and correct values -- --
-- >>> errors $ zip ['x','y','z'] ['x','b','a']
-- [('y','b'),('z','a')]
--
errors :: Eq lab => [(lab, lab)] -> [(lab, lab)]
-- | Confusion matrix normalized by row: ASCII representation.
--
-- Note: it is assumed that target (true) labels list contains all
-- possible labels.
--
-- -- | Predicted -- ---+------------ -- | _ _ _ _ _ -- True | _ _ _ _ _ -- | _ _ _ _ _ -- label | _ _ _ _ _ -- | _ _ _ _ _ ---- --
-- >>> putStr $ showConfusion [1, 2, 3, 1] [1, 2, 3, 2] -- 1 2 3 -- 1 50.0 50.0 0.0 -- 2 0.0 100.0 0.0 -- 3 0.0 0.0 100.0 --showConfusion :: (Ord lab, Eq lab, Show lab) => [lab] -> [lab] -> String -- | Normalized confusion matrix for arbitrary number of classes confusion :: (Ord lab, Eq lab) => Normalize -> [lab] -> [lab] -> Map (lab, lab) Double -- | Normalization strategies for confusion matrix data Normalize ByRow :: Normalize ByColumn :: Normalize -- | Confusion matrix for arbitrary number of classes (not normalized) confusion' :: (Ord lab, Eq lab) => [lab] -> [lab] -> Map (lab, lab) Int -- | Normalized root mean square error (NRMSE), one of the most common -- error measures for regression tasks nrmse :: (Storable a, Floating a) => Vector a -> Vector a -> a instance GHC.Classes.Eq Learning.Normalize instance GHC.Show.Show Learning.Normalize