{-# LANGUAGE MagicHash, FlexibleInstances, UndecidableInstances, BangPatterns, ParallelListComp, ViewPatterns #-} {-| This module is designed to implement high-speed versions of the function @ rangeMinBy :: (Ix i, IArray a e) => (e -> e -> Ordering) -> a i e -> ((i, i) -> (i, e)) rangeMinBy cmp arr = \\r -> minimumBy (\\ (i1, e1) (i2, e2) -> cmp e1 e2) [(i, arr ! i) | i \<- range r] @ for indices that are, in a sense, scalars -- that is, the array is one-dimensional. This restriction is encapsulated in the type restriction 'Enum'. To be precise, the implementation this module supplies for sufficiently large n (lazily) performs @O(n log* n)@ in preprocessing and answers queries in @O(log* n)@ time, where @log* n@ is defined as @ log* n | n \<= 2 = 0 | otherwise = 1 + log* (log2 n) @ While there is an asymptotically O(n) algorithm, the overhead associated with certain conversions in that algorithm take considerably longer than this implementation for all reasonable n. Note that log* n grows extraordinarily slowly with n: log* n \< 5 for n\<10^19729. Using "Test.QuickCheck", generating and processing 100 arrays of 20,000 'Int's, and performing 1,000 uniformly distributed range-min queries on each, with @+RTS -H512m@, consistently takes approximately 3.5 CPU seconds on a Dell Latitude D630 with dual 2.5GHz Intel Core Duo processors. (Parallelism is not currently used by this module, but may be included in a later distribution.) This module also includes implementations of a lowest-common-ancestor algorithm on trees with identical asymptotics. Warning: the internal libraries of this module include several nonstandard list fusions. For details, see . -} -- An archaic version of this module took 1m 42-46 s for the same process. module Data.RangeMin (RangeMin, Comparator, lowestCommonAncestor, lowestCommonAncestorBy, lowestCommonAncestorBy', rangeMinBy, rangeMin, rangeMinByM, rangeMinM, rangeMinBy', rangeMin') where import Control.Monad(liftM, liftM2) import Data.Maybe(fromJust, catMaybes, fromMaybe) import Data.Tree (Tree(Node)) import Data.List(tails) import Data.Array.IArray(listArray, array, elems, bounds, Ix, Array) import GHC.Arr(unsafeIndex, unsafeRangeSize) import Data.Array.MArray(freeze) import Data.Foldable (Foldable, toList) import Data.Array.Base(MArray(getNumElements), IArray(numElements, unsafeAt, unsafeArray)) import Data.RangeMin.Internal.HandyArray import Data.RangeMin.Internal.HandyOrdering (minimumBy, orderPair, minBy, comparing) import Data.RangeMin.Internal.Combinators (onPair, on, (.:)) import Data.RangeMin.Internal.HandyNumeric(intLog, pow2, divCeil, modCeil) import Data.RangeMin.Internal.HandyList() import GHC.Exts -- | The type of a range-min function. Takes as an argument a range of indices and returns -- the position and value of the smallest element in that range. type RangeMin i e = (i, i) -> (i, e) -- woo higher-order functions type RangeMinVal i e = (i, i) -> e -- for internal use type Comparator e = e -> e -> Ordering {-# INLINE offset #-} offset :: Enum i => i -> Int -> i offset (fromEnum -> I# x#) (I# i#) = toEnum (I# (x# +# i#)) {-# INLINE rangeSize #-} rangeSize :: Enum i => (i, i) -> Int rangeSize (fromEnum -> I# i#, fromEnum -> I# j#) = I# (j# -# i# +# 1#) {-# INLINE index #-} index :: Enum i => (i, i) -> i -> Int index (fromEnum -> I# i#, fromEnum -> I# j#) (fromEnum -> I# x#) = I# (x# -# i#) -------------------------------------------------------------------- data IntAccum1 e = IA1 Int# {-# UNPACK #-} !e data IntAccum e = IA Int# e -- hopefully unboxed by GHC data BlockAccum e f = BA Int# [e] f -- hopefully unboxed by GHC -- folds/builds {-# INLINE assocs' #-} assocs' :: Enum i => (i, i) -> [e] -> [(i, e)] assocs' (_, fromEnum -> I# iMax#) l = build (\ c nil -> case foldr (assocs'' c) (IA iMax# nil) l of IA _ ls -> ls) where assocs'' c x (IA i# ls) = IA (i# -# 1#) ((toEnum (I# i#), x) `c` ls) -- should fold/build, yay! {-# INLINE blockify #-} blockify :: Int -> Int -> [e] -> [(Int, [e])] blockify (I# n#) (I# bS#) l = build (\ c nil -> case foldr (blocker c) (BA n# [] nil) l of BA _ _ bs -> bs) where breakpoint# = n# -# bS# blocker c x (BA i# b bs) = let j# = i# -# 1# in if j# `remInt#` bS# ==# 0# then BA j# [] ((I# (if j# ># breakpoint# then n# -# j# else bS#), x:b) `c` bs) else BA j# (x:b) bs -- should fold/build, yay! -- {-# INLINE blockify' #-} {-blockify' :: Enum i => (i, i) -> Int -> [e] -> [(Int, [(i, e)])] blockify' (fromEnum -> I# iMin#, fromEnum -> I# iMax#) (I# bS#) l = build (\ c nil -> case foldr (blocker c) (BA iMax# [] nil) l of BA _ _ bs -> bs) where iMinPos# = iMin# `remInt#` bS# breakpoint# = iMax# -# bS# blocker c x (BA i# !b !bs) = let j# = i# -# 1#; assocs = (toEnum (I# i#), x):b in if i# `remInt#` bS# ==# iMinPos# then BA j# [] ((I# (if i# ># breakpoint# then iMax# -# j# else bS#), assocs) `c` bs) else BA j# assocs bs-} {-------------------------------------------------------------------- The rangeMinBy function produces an optimized version of the following method: rangeMinBy cmp arr = \r -> minimumBy (cmp `on` snd) [(i, arr ! i) | i <- range r] This module contains three implementations of rangeMinBy, and have the following asymptotics: rangeMin3: O(n^2) rangeMin2: O(n log n) rangeMin1: O(n log* n) where log* n | log n < 1 = 0 | otherwise = 1 + log* (log n) This is essentially linear for all imaginable inputs, since log* n <= 5 for n<10^19729. (The asymptotics of rangeMin1, by the way, are achieved by bootstrapping to itself until it reaches n small enough to confidently call up rangeMin2 or rangeMin3.) While there is a strictly linear-time range-min algorithm, it requires converting the input to a tree, traversing this tree in a special fashion, and calling a specialized blocked range-min (see below for explanation) on the list of the depth reached at each step of the traversal. The time and memory overhead associated with this traversal is such that it is considerably faster for any conceivable n to use this implementation. -------------------------------------------------------------------} {--------------------- O(n^2) range-min data structure with O(1) query time, with low constant factors on both construction and query. Empirical tests indicate that this is the most efficient data structure for arrays of size less than 30. Simply finds the minimum element in every subrange of the array. ----------------------} rangeMin3Common :: Comparator e -> Int -> [e] -> RangeMinVal Int e rangeMin3Common cmp !n elems = let folderer c t (IA1 m# !l) = IA1 (m# +# 1#) (listLookup' (I# m#) (scanl1 (minBy cmp) t) `c` l) !rMs = {-# SCC "rangeMin3_table_build" #-} listLookup' (n+1) (build (\ c nil -> case foldr (folderer c) (IA1 0# nil) (tails elems) of IA1 _ ans -> ans)) in \ (i, j) -> {-# SCC "rangeMin3_query" #-} rMs i (j-i) -- This seems the maximum possible fold/build construction. tails elems consumes and produces, though -- the individual tails themselves must be computed explicitly (since we do have n^2 array elements, after all), -- folderer consumes the product of tails, listLookup' consumes scanl1, and another listLookup' consumes the entire array production. -- The only intermediate lists evar produced are the actual elements of tails elems; everything else is processed directly -- or put into an array. rangeMin3Internal :: Comparator e -> Int -> [e] -> RangeMinVal Int e rangeMin3Internal = rangeMin3Common {-------------------------------------------------------------------------------------------- O(n log n) construction, O(1) query time range-min algorithm, lazy in computation but does some allocation immediately. Relatively low constant factor; best for arrays of size at most 100. The algorithm works by finding the minimum element of each range of width a power of two. (Recursion allows each to be found in constant time from the previous level). When a query is received of width n, let m be the largest power of two at most n; the two (possibly overlapping) ranges of width m that cover the query range are looked up and compared. --------------------------------------------------------------------------------------------} rangeMin2Common :: Comparator e -> Int -> (Int -> e) -> RangeMinVal Int e rangeMin2Common cmp n@(I# n#) arr = rMs `seq` \ r@(i, j) -> {-# SCC "rangeMin2_queries" #-} let !logWidth = intLog (unsafeRangeSize r) left = i right = j - pow2 logWidth + 1 in rM logWidth left right where lastIx# = n# -# 1# nextRow p# row = unsafeMemoize# (\ i# -> minBy cmp (row i#) (row (i# +# p#))) (I# (lastIx# -# p#)) rower p# rowM c nil | p# ># lastIx# = nil | otherwise = rowM `c` rower (p# +# p#) (nextRow p# rowM) c nil rMs = {-# SCC "rangeMin2_rows" #-} listLookup' (intLog n + 1) (build (rower 1# (\ i# -> arr (I# i#)))) rM !i = minBy cmp `on` (\ (I# j#) -> rMs i j#) -- Note that no lists are being computed explicitly at all. Woo! rangeMin2Internal :: Comparator e -> Int -> [e] -> RangeMinVal Int e rangeMin2Internal cmp = liftM2 (.) (rangeMin2Common cmp) listLookup' rangeMin2By' :: (Int -> e) -> Comparator e -> Int -> [e] -> RangeMinVal Int e rangeMin2By' arr cmp n _ = rangeMin2Common cmp n arr {----------------------------------------------------------------- Suppose we have a range-min algorithm that takes time f(n) for some superlinear f. Then if the array is broken into blocks of size b, we can solve range-min queries as follows: Break the query into a fractional piece of a block on each end with an integral number of blocks in the middle. Given range-min functions for internal queries to each individual block, and for any integral range of blocks, we can then answer the query by making three separate subqueries, two internal block queries and one multi-block query. Note that the range-min of an integral number of blocks is precisely a range-min of the array containing the minimum element of each block. Furthermore, this algorithm will require only n/b f(b) + f(n/b) time. For b=log n, and the f(n) = n log n construction, this is n/(log n) log n log log n + n/log n log (n/log n) <= n log log n In point of fact, replacing f(n) by this new n log log n gives an n log log log n construction, and so on, yielding a bootstrapped O(n log* n) construction. i bi bj j [--<--|-----|-----|-----|--->-|-] ----------------------------------------------------------------} --Generic code for a RangeMin on a list divided into blocks. --Does all the generic lookup work; the worker methods do all of the list manipulation, folding, etc. {-# INLINE blockedRangeMin #-} blockedRangeMin :: Comparator e -> Int -> (Int -> RangeMinVal Int e) -> RangeMinVal Int e -> RangeMinVal (Int, Int) e blockedRangeMin cmp bS !blockRM !multiBlockRM = \ ((!bi, !xi), (!bj, !xj)) -> if bi == bj then {-# SCC "single_block_query" #-} blockRM bi (xi, xj) else let sidesMin = min' (blockRM bi (xi, lastIx)) (blockRM bj (0, xj)) in (if bi == pred bj then {-# SCC "two_block_query" #-} sidesMin else {-# SCC "multi_block_query" #-} min' sidesMin (multiBlockRM (succ bi, pred bj))) where min' = minBy cmp; !lastIx = bS - 1 {-------------------------------------------------------------------- Implementation of an O(n log* n) range-min algorithm, since internalRangeMin will enable this function to bootstrap to itself. The bulk of the bootstrapping is not actually done in the dominant O(n log* n) component, but in (internalRangeMinBy cmp nBlocks mins), which bootstraps with n' = n / log n. However, with the changeover to the explicit nonrecursive algorithm at n=100, bootstrapping will occur at most twice for n<1.16e5 and at most thrice for n<1.97e6. -------------------------------------------------------------------} rangeMin1Common :: Comparator e -> Int -> Int -> [(Int, [e])] -> RangeMinVal Int e rangeMin1Common cmp n blockSize blocks = blockedRangeMin cmp blockSize blockRM multiBlockRM . onPair (`divMod` blockSize) where !nBlocks = n `divCeil` blockSize blockRM = {-# SCC "block_internal_rangemins" #-} listLookup' nBlocks (map (uncurry (internalRangeMinBy cmp)) blocks) multiBlockRM = {-# SCC "multi_block_rangemins" #-} internalRangeMinBy cmp nBlocks (map (minimumBy cmp . snd) blocks) -- the lists produced explicitly are the minima (maybe), and the blocks. The blockify calls consume, -- but do not produce. Ideally, we would process the blocks in a single fold, but currently that -- forces an unzip, constructing explicitly both the block-min list and the minima list, and -- it stands to reason that the blocks themselves would be less of a hassle than their rangemins. rangeMin1Internal :: Comparator e -> Int -> [e] -> RangeMinVal Int e rangeMin1Internal cmp n = rangeMin1Common cmp n blockSize . blockify n blockSize where blockSize = 1 + intLog n --------------------------------------------------------------------------------- rangeMin3Size = 150 rangeMin2Size = 660 rangeMinFunc :: Int -> a -> a -> a -> a rangeMinFunc !n f3 f2 f1 | n < rangeMin3Size = f3 | n < rangeMin2Size = f2 | otherwise = f1 -- For the frequent case where internal recursive range-min operations are performed -- where both the list length and the list itself are precomputed, this cuts directly -- to the optimized function versions that take the range and the association-list -- as arguments. Note the use of RangeMinVal rather than RangeMin, an optimization -- that avoids unnecessary indirection when bootstrapping. internalRangeMinBy :: Comparator e -> Int -> [e] -> RangeMinVal Int e internalRangeMinBy cmp !n = {-# SCC "internal_RM" #-} rangeMinFunc n rangeMin3Internal rangeMin2Internal rangeMin1Internal cmp n internalRangeMin :: Ord e => Int -> [e] -> RangeMinVal Int e internalRangeMin = internalRangeMinBy compare {-# SPECIALIZE rangeMinBy' :: Comparator e -> (Int, Int) -> [e] -> RangeMin Int e #-} -- {-# SPECIALIZE rangeMinBy' :: (Ix i, Num i, Enum i) => Comparator e -> (i, i) -> [e] -> RangeMin i e #-} -- | Given a comparator, an index range and a list of elements in index order, returns a function that -- takes two indices and returns the index and the value at the minimum value between those two indices inclusive. rangeMinBy' :: (Enum i) => Comparator e -- ^ A comparator function on elements. -> (i, i) -- ^ Index range. -> [e] -- ^ List of elements in index order. -> RangeMin i e -- ^ Given a subrange, returns the index and value at the minimum in that range. rangeMinBy' cmp bounds@(rangeSize -> n) elems = {-# SCC "initial_range_min" #-} internalRangeMinBy (cmp `on` snd) n (assocs' bounds elems) . onPair (index bounds) --rangeMinFunc n rangeMin3By rangeMin2By rangeMin1By cmp bounds -- | A version of 'rangeMinBy'' on elements with their default ordering. rangeMin' :: (Enum i, Ord e) => (i, i) -- ^ Index range. -> [e] -- ^ List of elements in index order. -> RangeMin i e -- ^ Given a subrange, returns the index and value at the minimum in that range. rangeMin' = rangeMinBy' compare -- | Given a comparator and an array, returns a function that takes a subrange of the array and -- returns the index and value at the minimum in that subrange. /NOT/ necessarily faster than 'rangeMinBy'', -- primarily because most of the algorithms used are based upon careful list folding. rangeMinBy :: (IArray a e, Enum i, Ix i) => Comparator e -- ^ A comparator function on elements. -> a i e -- ^ An array of elements. -> RangeMin i e -- ^ Given a subrange, returns the index and value at the minimum in that range. rangeMinBy cmp arr@(numElements -> n) = let arrBounds@(iMin, _) = bounds arr in rangeMinFunc n rangeMin3Internal (rangeMin2By' (\ i -> (iMin `offset` i, arr `unsafeAt` i))) rangeMin1Internal (cmp `on` snd) n (assocs' arrBounds (elems arr)) . onPair (index arrBounds) --liftM2 (rangeMinFunc n rangeMin3By (rangeMin2By' arr) rangeMin1By cmp) bounds elems arr -- | A version of 'rangeMinBy' on elements with their default ordering. rangeMin :: (IArray a e, Enum i, Ix i, Ord e) => a i e -> RangeMin i e rangeMin = rangeMinBy compare rangeMinByM :: (Monad m, MArray a e m, Enum i, Ix i) => Comparator e -> a i e -> m (RangeMin i e) rangeMinByM cmp = liftM (rangeMinBy cmp . asPureArray) . freeze rangeMinM :: (Monad m, MArray a e m, Enum i, Ix i, Ord e) => a i e -> m (RangeMin i e) rangeMinM = rangeMinByM compare ----------------------------------------------------------- -- Traverses the elements of the tree in an order such that the lowest common ancestor of two nodes is always the -- minimum element between them. Returns a list of depth and node-label pairs as well as the list's length. lcaTraversal :: Tree e -> (Int, [(Int, e)]) lcaTraversal t = case lcaTraversal' 0 t (IA 0# []) of IA n# l -> (I# n#, l); where lcaTraversal' !d (Node a ts) trav = let cat = lcaTraversal' (d+1); consHere (IA m# tr) = IA (m# +# 1#) ((d, a):tr) in case ts of [] -> consHere trav [t] -> consHere (t `cat` trav) (t:ts@(_t:_)) -> t `cat` foldr (consHere .: cat) trav ts -- | Given an indexing function on node labels, returns a function that takes two node -- labels and returns the label of their lowest common ancestor. lowestCommonAncestorBy :: Ix i => (a -> Maybe i) -- ^ An index on tree labels so they can be indexed on an array. -- Need not account for every node label, hence the Maybe. -> (i, i) -- ^ Bounds on indices of tree labels. -> Tree a -- ^ The tree to provide lowest-common-ancestor service for. -> ((a, a) -> a) -- ^ Given two node labels, returns the label of their lowest common ancestor. lowestCommonAncestorBy ix r tree = lowestCommonAncestorBy' ix r tree . onPair (fromJust . ix) -- | Given an indexing function on node labels, returns a function that takes two indices and returns the label of the associated nodes' lowest common ancestor. lowestCommonAncestorBy' :: Ix i => (a -> Maybe i) -- ^ An index on tree labels, so they can be indexed in an array. -- Need not account for every node label, hence the Maybe. -> (i, i) -- ^ Bounds on indices of tree labels. -> Tree a -- ^ The tree to provide lowest-common-ancestor service for. -> ((i, i) -> a) -- ^ Given two indices, return the label of the associated nodes' lowest common ancestor. -- The behavior of this method is not specified if these indices do not correspond -- to any trees. lowestCommonAncestorBy' ix r (lcaTraversal -> (n, trav)) = snd . rM . orderPair . onPair indexes where pairList = [(x, i) | (ix . snd -> Just x) <- trav | i <- [0..]] rM = internalRangeMinBy (comparing fst) n trav indexes = pureLookup $ array r pairList -- | Given a tree of indexed nodes, returns a function that takes two nodes and returns their lowest common ancestor. lowestCommonAncestor :: Ix i => (i, i) -- ^ Bounds on node labels. -> Tree i -- ^ The tree to provide lowest-common-ancestor service for. -> ((i, i) -> i) -- ^ A function that takes two node labels and returns the label of their lowest common ancestor. lowestCommonAncestor = lowestCommonAncestorBy Just