Chunky signal stream build on StorableVector.

Hints for fusion: - Higher order functions should always be inlined in the end in order to turn them into machine loops instead of calling a function in an inner loop.

- newtype Vector a = SV {}
- newtype ChunkSize = ChunkSize Int
- chunkSize :: Int -> ChunkSize
- defaultChunkSize :: ChunkSize
- empty :: Storable a => Vector a
- singleton :: Storable a => a -> Vector a
- fromChunks :: Storable a => [Vector a] -> Vector a
- pack :: Storable a => ChunkSize -> [a] -> Vector a
- unpack :: Storable a => Vector a -> [a]
- packWith :: Storable b => ChunkSize -> (a -> b) -> [a] -> Vector b
- unpackWith :: Storable a => (a -> b) -> Vector a -> [b]
- unfoldr :: Storable b => ChunkSize -> (a -> Maybe (b, a)) -> a -> Vector b
- sample :: Storable a => ChunkSize -> (Int -> a) -> Vector a
- sampleN :: Storable a => ChunkSize -> Int -> (Int -> a) -> Vector a
- iterate :: Storable a => ChunkSize -> (a -> a) -> a -> Vector a
- repeat :: Storable a => ChunkSize -> a -> Vector a
- cycle :: Storable a => Vector a -> Vector a
- replicate :: Storable a => ChunkSize -> Int -> a -> Vector a
- null :: Storable a => Vector a -> Bool
- length :: Vector a -> Int
- cons :: Storable a => a -> Vector a -> Vector a
- append :: Storable a => Vector a -> Vector a -> Vector a
- extendL :: Storable a => ChunkSize -> Vector a -> Vector a -> Vector a
- concat :: Storable a => [Vector a] -> Vector a
- map :: (Storable x, Storable y) => (x -> y) -> Vector x -> Vector y
- reverse :: Storable a => Vector a -> Vector a
- foldl :: Storable b => (a -> b -> a) -> a -> Vector b -> a
- foldl' :: Storable b => (a -> b -> a) -> a -> Vector b -> a
- foldr :: Storable b => (b -> a -> a) -> a -> Vector b -> a
- any :: Storable a => (a -> Bool) -> Vector a -> Bool
- all :: Storable a => (a -> Bool) -> Vector a -> Bool
- maximum :: (Storable a, Ord a) => Vector a -> a
- minimum :: (Storable a, Ord a) => Vector a -> a
- viewL :: Storable a => Vector a -> Maybe (a, Vector a)
- viewR :: Storable a => Vector a -> Maybe (Vector a, a)
- switchL :: Storable a => b -> (a -> Vector a -> b) -> Vector a -> b
- switchR :: Storable a => b -> (Vector a -> a -> b) -> Vector a -> b
- scanl :: (Storable a, Storable b) => (a -> b -> a) -> a -> Vector b -> Vector a
- mapAccumL :: (Storable a, Storable b) => (acc -> a -> (acc, b)) -> acc -> Vector a -> (acc, Vector b)
- mapAccumR :: (Storable a, Storable b) => (acc -> a -> (acc, b)) -> acc -> Vector a -> (acc, Vector b)
- crochetLChunk :: (Storable x, Storable y) => (x -> acc -> Maybe (y, acc)) -> acc -> Vector x -> (Vector y, Maybe acc)
- crochetL :: (Storable x, Storable y) => (x -> acc -> Maybe (y, acc)) -> acc -> Vector x -> Vector y
- take :: Storable a => Int -> Vector a -> Vector a
- drop :: Storable a => Int -> Vector a -> Vector a
- splitAt :: Storable a => Int -> Vector a -> (Vector a, Vector a)
- dropMarginRem :: Storable a => Int -> Int -> Vector a -> (Int, Vector a)
- dropMargin :: Storable a => Int -> Int -> Vector a -> Vector a
- dropWhile :: Storable a => (a -> Bool) -> Vector a -> Vector a
- takeWhile :: Storable a => (a -> Bool) -> Vector a -> Vector a
- span :: Storable a => (a -> Bool) -> Vector a -> (Vector a, Vector a)
- filter :: Storable a => (a -> Bool) -> Vector a -> Vector a
- zipWith :: (Storable a, Storable b, Storable c) => (a -> b -> c) -> Vector a -> Vector b -> Vector c
- zipWith3 :: (Storable a, Storable b, Storable c, Storable d) => (a -> b -> c -> d) -> Vector a -> Vector b -> Vector c -> Vector d
- zipWith4 :: (Storable a, Storable b, Storable c, Storable d, Storable e) => (a -> b -> c -> d -> e) -> Vector a -> Vector b -> Vector c -> Vector d -> Vector e
- zipWithSize :: (Storable a, Storable b, Storable c) => ChunkSize -> (a -> b -> c) -> Vector a -> Vector b -> Vector c
- zipWithSize3 :: (Storable a, Storable b, Storable c, Storable d) => ChunkSize -> (a -> b -> c -> d) -> Vector a -> Vector b -> Vector c -> Vector d
- zipWithSize4 :: (Storable a, Storable b, Storable c, Storable d, Storable e) => ChunkSize -> (a -> b -> c -> d -> e) -> Vector a -> Vector b -> Vector c -> Vector d -> Vector e
- pad :: Storable a => ChunkSize -> a -> Int -> Vector a -> Vector a
- padAlt :: Storable a => ChunkSize -> a -> Int -> Vector a -> Vector a
- cancelNullVector :: (Vector a, b) -> Maybe (Vector a, b)
- fromChunk :: Storable a => Vector a -> Vector a
- hGetContentsAsync :: Storable a => ChunkSize -> Handle -> IO (IOError, Vector a)
- hPut :: Storable a => Handle -> Vector a -> IO ()
- readFileAsync :: Storable a => ChunkSize -> FilePath -> IO (IOError, Vector a)
- writeFile :: Storable a => FilePath -> Vector a -> IO ()
- appendFile :: Storable a => FilePath -> Vector a -> IO ()

# Documentation

# Introducing and eliminating `Vector`

s

fromChunks :: Storable a => [Vector a] -> Vector aSource

unpackWith :: Storable a => (a -> b) -> Vector a -> [b]Source

# Basic interface

extendL :: Storable a => ChunkSize -> Vector a -> Vector a -> Vector aSource

`extendL size x y`

prepends the chunk `x`

and merges it with the first chunk of `y`

if the total size is at most `size`

.
This way you can prepend small chunks
while asserting a reasonable average size for chunks.

# Transformations

# Reducing `Vector`

s

# inspecting a vector

mapAccumL :: (Storable a, Storable b) => (acc -> a -> (acc, b)) -> acc -> Vector a -> (acc, Vector b)Source

mapAccumR :: (Storable a, Storable b) => (acc -> a -> (acc, b)) -> acc -> Vector a -> (acc, Vector b)Source

crochetLChunk :: (Storable x, Storable y) => (x -> acc -> Maybe (y, acc)) -> acc -> Vector x -> (Vector y, Maybe acc)Source

crochetL :: (Storable x, Storable y) => (x -> acc -> Maybe (y, acc)) -> acc -> Vector x -> Vector ySource

# sub-vectors

dropMarginRem :: Storable a => Int -> Int -> Vector a -> (Int, Vector a)Source

`dropMarginRem n m xs`

drops at most the first `m`

elements of `xs`

and ensures that `xs`

still contains `n`

elements.
Additionally returns the number of elements that could not be dropped
due to the margin constraint.
That is `dropMarginRem n m xs == (k,ys)`

implies `length xs - m == length ys - k`

.
Requires `length xs >= n`

.

# other functions

zipWith :: (Storable a, Storable b, Storable c) => (a -> b -> c) -> Vector a -> Vector b -> Vector cSource

zipWith3 :: (Storable a, Storable b, Storable c, Storable d) => (a -> b -> c -> d) -> Vector a -> Vector b -> Vector c -> Vector dSource

zipWith4 :: (Storable a, Storable b, Storable c, Storable d, Storable e) => (a -> b -> c -> d -> e) -> Vector a -> Vector b -> Vector c -> Vector d -> Vector eSource

zipWithSize :: (Storable a, Storable b, Storable c) => ChunkSize -> (a -> b -> c) -> Vector a -> Vector b -> Vector cSource

zipWithSize3 :: (Storable a, Storable b, Storable c, Storable d) => ChunkSize -> (a -> b -> c -> d) -> Vector a -> Vector b -> Vector c -> Vector dSource

zipWithSize4 :: (Storable a, Storable b, Storable c, Storable d, Storable e) => ChunkSize -> (a -> b -> c -> d -> e) -> Vector a -> Vector b -> Vector c -> Vector d -> Vector eSource

pad :: Storable a => ChunkSize -> a -> Int -> Vector a -> Vector aSource

Ensure a minimal length of the list by appending pad values.

# Helper functions for StorableVector

cancelNullVector :: (Vector a, b) -> Maybe (Vector a, b)Source

# IO

hGetContentsAsync :: Storable a => ChunkSize -> Handle -> IO (IOError, Vector a)Source

Read the rest of a file lazily and
provide the reason of termination as IOError.
If IOError is EOF (check with `System.Error.isEOFError err`

),
then the file was read successfully.
Only access the final IOError after you have consumed the file contents,
since finding out the terminating reason forces to read the entire file.
Make also sure you read the file completely,
because it is only closed when the file end is reached
(or an exception is encountered).

TODO: In ByteString.Lazy the chunk size is reduced if data is not immediately available. Maybe we should adapt that behaviour but when working with realtime streams that may mean that the chunks are very small.

readFileAsync :: Storable a => ChunkSize -> FilePath -> IO (IOError, Vector a)Source

The file can only closed after all values are consumed. That is you must always assert that you consume all elements of the stream, and that no values are missed due to lazy evaluation. This requirement makes this function useless in many applications.