Data.StorableVector.Lazy
 Contents Introducing and eliminating Vectors Basic interface Transformations Reducing Vectors inspecting a vector sub-vectors other functions Helper functions for StorableVector IO
Description

Chunky signal stream build on StorableVector.

Hints for fusion: - Higher order functions should always be inlined in the end in order to turn them into machine loops instead of calling a function in an inner loop.

Synopsis
newtype Vector a = SV {
 chunks :: [Vector a]
}
newtype ChunkSize = ChunkSize Int
chunkSize :: Int -> ChunkSize
defaultChunkSize :: ChunkSize
empty :: Storable a => Vector a
singleton :: Storable a => a -> Vector a
fromChunks :: Storable a => [Vector a] -> Vector a
pack :: Storable a => ChunkSize -> [a] -> Vector a
unpack :: Storable a => Vector a -> [a]
packWith :: Storable b => ChunkSize -> (a -> b) -> [a] -> Vector b
unpackWith :: Storable a => (a -> b) -> Vector a -> [b]
unfoldr :: Storable b => ChunkSize -> (a -> Maybe (b, a)) -> a -> Vector b
iterate :: Storable a => ChunkSize -> (a -> a) -> a -> Vector a
repeat :: Storable a => ChunkSize -> a -> Vector a
cycle :: Storable a => Vector a -> Vector a
replicate :: Storable a => ChunkSize -> Int -> a -> Vector a
null :: Storable a => Vector a -> Bool
length :: Vector a -> Int
cons :: Storable a => a -> Vector a -> Vector a
append :: Storable a => Vector a -> Vector a -> Vector a
extendL :: Storable a => ChunkSize -> Vector a -> Vector a -> Vector a
concat :: Storable a => [Vector a] -> Vector a
map :: (Storable x, Storable y) => (x -> y) -> Vector x -> Vector y
reverse :: Storable a => Vector a -> Vector a
foldl :: Storable b => (a -> b -> a) -> a -> Vector b -> a
foldl' :: Storable b => (a -> b -> a) -> a -> Vector b -> a
any :: Storable a => (a -> Bool) -> Vector a -> Bool
all :: Storable a => (a -> Bool) -> Vector a -> Bool
maximum :: (Storable a, Ord a) => Vector a -> a
minimum :: (Storable a, Ord a) => Vector a -> a
viewL :: Storable a => Vector a -> Maybe (a, Vector a)
viewR :: Storable a => Vector a -> Maybe (Vector a, a)
switchL :: Storable a => b -> (a -> Vector a -> b) -> Vector a -> b
switchR :: Storable a => b -> (Vector a -> a -> b) -> Vector a -> b
scanl :: (Storable a, Storable b) => (a -> b -> a) -> a -> Vector b -> Vector a
mapAccumL :: (Storable a, Storable b) => (acc -> a -> (acc, b)) -> acc -> Vector a -> (acc, Vector b)
mapAccumR :: (Storable a, Storable b) => (acc -> a -> (acc, b)) -> acc -> Vector a -> (acc, Vector b)
crochetLChunk :: (Storable x, Storable y) => (x -> acc -> Maybe (y, acc)) -> acc -> Vector x -> (Vector y, Maybe acc)
crochetL :: (Storable x, Storable y) => (x -> acc -> Maybe (y, acc)) -> acc -> Vector x -> Vector y
take :: Storable a => Int -> Vector a -> Vector a
drop :: Storable a => Int -> Vector a -> Vector a
splitAt :: Storable a => Int -> Vector a -> (Vector a, Vector a)
dropMarginRem :: Storable a => Int -> Int -> Vector a -> (Int, Vector a)
dropMargin :: Storable a => Int -> Int -> Vector a -> Vector a
dropWhile :: Storable a => (a -> Bool) -> Vector a -> Vector a
takeWhile :: Storable a => (a -> Bool) -> Vector a -> Vector a
span :: Storable a => (a -> Bool) -> Vector a -> (Vector a, Vector a)
filter :: Storable a => (a -> Bool) -> Vector a -> Vector a
zipWith :: (Storable a, Storable b, Storable c) => (a -> b -> c) -> Vector a -> Vector b -> Vector c
zipWith3 :: (Storable a, Storable b, Storable c, Storable d) => (a -> b -> c -> d) -> Vector a -> Vector b -> Vector c -> Vector d
zipWith4 :: (Storable a, Storable b, Storable c, Storable d, Storable e) => (a -> b -> c -> d -> e) -> Vector a -> Vector b -> Vector c -> Vector d -> Vector e
zipWithSize :: (Storable a, Storable b, Storable c) => ChunkSize -> (a -> b -> c) -> Vector a -> Vector b -> Vector c
zipWithSize3 :: (Storable a, Storable b, Storable c, Storable d) => ChunkSize -> (a -> b -> c -> d) -> Vector a -> Vector b -> Vector c -> Vector d
zipWithSize4 :: (Storable a, Storable b, Storable c, Storable d, Storable e) => ChunkSize -> (a -> b -> c -> d -> e) -> Vector a -> Vector b -> Vector c -> Vector d -> Vector e
pad :: Storable a => ChunkSize -> a -> Int -> Vector a -> Vector a
padAlt :: Storable a => ChunkSize -> a -> Int -> Vector a -> Vector a
cancelNullVector :: (Vector a, b) -> Maybe (Vector a, b)
fromChunk :: Storable a => Vector a -> Vector a
hGetContentsAsync :: Storable a => ChunkSize -> Handle -> IO (IOError, Vector a)
hPut :: Storable a => Handle -> Vector a -> IO ()
readFileAsync :: Storable a => ChunkSize -> FilePath -> IO (IOError, Vector a)
writeFile :: Storable a => FilePath -> Vector a -> IO ()
appendFile :: Storable a => FilePath -> Vector a -> IO ()
Documentation
 newtype Vector a Source
Constructors
SV
 chunks :: [Vector a]
 newtype ChunkSize Source
Constructors
 ChunkSize Int
 chunkSize :: Int -> ChunkSize Source
 defaultChunkSize :: ChunkSize Source
Introducing and eliminating Vectors
 empty :: Storable a => Vector a Source
 singleton :: Storable a => a -> Vector a Source
 fromChunks :: Storable a => [Vector a] -> Vector a Source
 pack :: Storable a => ChunkSize -> [a] -> Vector a Source
 unpack :: Storable a => Vector a -> [a] Source
 packWith :: Storable b => ChunkSize -> (a -> b) -> [a] -> Vector b Source
 unpackWith :: Storable a => (a -> b) -> Vector a -> [b] Source
 unfoldr :: Storable b => ChunkSize -> (a -> Maybe (b, a)) -> a -> Vector b Source
 iterate :: Storable a => ChunkSize -> (a -> a) -> a -> Vector a Source
 repeat :: Storable a => ChunkSize -> a -> Vector a Source
 cycle :: Storable a => Vector a -> Vector a Source
 replicate :: Storable a => ChunkSize -> Int -> a -> Vector a Source
Basic interface
 null :: Storable a => Vector a -> Bool Source
 length :: Vector a -> Int Source
 cons :: Storable a => a -> Vector a -> Vector a Source
 append :: Storable a => Vector a -> Vector a -> Vector a Source
 extendL :: Storable a => ChunkSize -> Vector a -> Vector a -> Vector a Source
extendL size x y prepends the chunk x and merges it with the first chunk of y if the total size is at most size. This way you can prepend small chunks while asserting a reasonable average size for chunks.
 concat :: Storable a => [Vector a] -> Vector a Source
Transformations
 map :: (Storable x, Storable y) => (x -> y) -> Vector x -> Vector y Source
 reverse :: Storable a => Vector a -> Vector a Source
Reducing Vectors
 foldl :: Storable b => (a -> b -> a) -> a -> Vector b -> a Source
 foldl' :: Storable b => (a -> b -> a) -> a -> Vector b -> a Source
 any :: Storable a => (a -> Bool) -> Vector a -> Bool Source
 all :: Storable a => (a -> Bool) -> Vector a -> Bool Source
 maximum :: (Storable a, Ord a) => Vector a -> a Source
 minimum :: (Storable a, Ord a) => Vector a -> a Source
inspecting a vector
 viewL :: Storable a => Vector a -> Maybe (a, Vector a) Source
 viewR :: Storable a => Vector a -> Maybe (Vector a, a) Source
 switchL :: Storable a => b -> (a -> Vector a -> b) -> Vector a -> b Source
 switchR :: Storable a => b -> (Vector a -> a -> b) -> Vector a -> b Source
 scanl :: (Storable a, Storable b) => (a -> b -> a) -> a -> Vector b -> Vector a Source
 mapAccumL :: (Storable a, Storable b) => (acc -> a -> (acc, b)) -> acc -> Vector a -> (acc, Vector b) Source
 mapAccumR :: (Storable a, Storable b) => (acc -> a -> (acc, b)) -> acc -> Vector a -> (acc, Vector b) Source
 crochetLChunk :: (Storable x, Storable y) => (x -> acc -> Maybe (y, acc)) -> acc -> Vector x -> (Vector y, Maybe acc) Source
 crochetL :: (Storable x, Storable y) => (x -> acc -> Maybe (y, acc)) -> acc -> Vector x -> Vector y Source
sub-vectors
 take :: Storable a => Int -> Vector a -> Vector a Source
 drop :: Storable a => Int -> Vector a -> Vector a Source
 splitAt :: Storable a => Int -> Vector a -> (Vector a, Vector a) Source
 dropMarginRem :: Storable a => Int -> Int -> Vector a -> (Int, Vector a) Source
dropMarginRem n m xs drops at most the first m elements of xs and ensures that xs still contains n elements. Additionally returns the number of elements that could not be dropped due to the margin constraint. That is dropMarginRem n m xs == (k,ys) implies length xs - m == length ys - k. Requires length xs >= n.
 dropMargin :: Storable a => Int -> Int -> Vector a -> Vector a Source
 dropWhile :: Storable a => (a -> Bool) -> Vector a -> Vector a Source
 takeWhile :: Storable a => (a -> Bool) -> Vector a -> Vector a Source
 span :: Storable a => (a -> Bool) -> Vector a -> (Vector a, Vector a) Source
other functions
 filter :: Storable a => (a -> Bool) -> Vector a -> Vector a Source
 zipWith :: (Storable a, Storable b, Storable c) => (a -> b -> c) -> Vector a -> Vector b -> Vector c Source
 zipWith3 :: (Storable a, Storable b, Storable c, Storable d) => (a -> b -> c -> d) -> Vector a -> Vector b -> Vector c -> Vector d Source
 zipWith4 :: (Storable a, Storable b, Storable c, Storable d, Storable e) => (a -> b -> c -> d -> e) -> Vector a -> Vector b -> Vector c -> Vector d -> Vector e Source
 zipWithSize :: (Storable a, Storable b, Storable c) => ChunkSize -> (a -> b -> c) -> Vector a -> Vector b -> Vector c Source
 zipWithSize3 :: (Storable a, Storable b, Storable c, Storable d) => ChunkSize -> (a -> b -> c -> d) -> Vector a -> Vector b -> Vector c -> Vector d Source
 zipWithSize4 :: (Storable a, Storable b, Storable c, Storable d, Storable e) => ChunkSize -> (a -> b -> c -> d -> e) -> Vector a -> Vector b -> Vector c -> Vector d -> Vector e Source
 pad :: Storable a => ChunkSize -> a -> Int -> Vector a -> Vector a Source
Ensure a minimal length of the list by appending pad values.
 padAlt :: Storable a => ChunkSize -> a -> Int -> Vector a -> Vector a Source
Helper functions for StorableVector
 cancelNullVector :: (Vector a, b) -> Maybe (Vector a, b) Source
 fromChunk :: Storable a => Vector a -> Vector a Source
IO
 hGetContentsAsync :: Storable a => ChunkSize -> Handle -> IO (IOError, Vector a) Source

Read the rest of a file lazily and provide the reason of termination as IOError. If IOError is EOF (check with System.Error.isEOFError err), then the file was read successfully. Only access the final IOError after you have consumed the file contents, since finding out the terminating reason forces to read the entire file. Make also sure you read the file completely, because it is only closed when the file end is reached (or an exception is encountered).

TODO: In ByteString.Lazy the chunk size is reduced if data is not immediately available. Maybe we should adapt that behaviour but when working with realtime streams that may mean that the chunks are very small.

 hPut :: Storable a => Handle -> Vector a -> IO () Source
 readFileAsync :: Storable a => ChunkSize -> FilePath -> IO (IOError, Vector a) Source
The file can only closed after all values are consumed. That is you must always assert that you consume all elements of the stream, and that no values are missed due to lazy evaluation. This requirement makes this function useless in many applications.
 writeFile :: Storable a => FilePath -> Vector a -> IO () Source
 appendFile :: Storable a => FilePath -> Vector a -> IO () Source