Copyright | (c) Lev Dvorkin 2022 |
---|---|
License | MIT |
Maintainer | lev_135@mail.ru |
Stability | Experimental |
Safe Haskell | None |
Language | Haskell2010 |
This module contains implementation of uniqueness checking algorithm based on Sardinas-Patterson's algorithm
Synopsis
- data Rem c
- = Rem1 [Repeatable c]
- | Rem2 [Repeatable c]
- data MergeRes c = MergeRes {
- merged :: [Repeatable c]
- mergeRem :: Rem c
- mergeReps :: Ord c => [Repeatable c] -> [Repeatable c] -> Alt (MergeRes c)
- mergedList :: MergeRes c -> [Repeatable c]
- remList :: MergeRes c -> [Repeatable c]
- rem1 :: MergeRes c -> [Repeatable c]
- rem2 :: MergeRes c -> [Repeatable c]
- data Suff c = Suff {
- srbeh :: [Repeatable c]
- scur :: [Repeatable c]
- sahead :: [Repeatable c]
- data Div c = Div {}
- initDiv :: RToken c -> Div c
- stepDiv :: Ord c => Int -> Div c -> RToken c -> Alt (Div c)
- data ConflictTokens k c = ConflictTokens {
- tokList1, tokList2 :: [(k, [BlackWhiteSet c])]
- checkUniqueTokenizing :: forall k c. Ord c => [Token k c] -> Either (ConflictTokens k c) ()
Documentation
Rem1 [Repeatable c] | First list reminder. May be empty if there is no rem |
Rem2 [Repeatable c] | Second list reminder. Always is nonempty |
mergeReps :: Ord c => [Repeatable c] -> [Repeatable c] -> Alt (MergeRes c) Source #
mergedList :: MergeRes c -> [Repeatable c] Source #
remList :: MergeRes c -> [Repeatable c] Source #
rem1 :: MergeRes c -> [Repeatable c] Source #
rem2 :: MergeRes c -> [Repeatable c] Source #
Dangling suffix
Suff | |
|
Result of division.
It looks like
rtoks | lastTok --------|---------|-----------------------|~~~~~ rprefToks | -----|-----|---------| suff (remained part): behind | current | ahead -------|====================|~~~~~
data ConflictTokens k c Source #
Two ways of tokenizing a string, demonstrating non-uniqueness
ConflictTokens | |
|
Instances
(Eq k, Eq c) => Eq (ConflictTokens k c) Source # | |
Defined in Text.Tokenizer.Uniqueness (==) :: ConflictTokens k c -> ConflictTokens k c -> Bool # (/=) :: ConflictTokens k c -> ConflictTokens k c -> Bool # | |
(Ord k, Ord c) => Ord (ConflictTokens k c) Source # | |
Defined in Text.Tokenizer.Uniqueness compare :: ConflictTokens k c -> ConflictTokens k c -> Ordering # (<) :: ConflictTokens k c -> ConflictTokens k c -> Bool # (<=) :: ConflictTokens k c -> ConflictTokens k c -> Bool # (>) :: ConflictTokens k c -> ConflictTokens k c -> Bool # (>=) :: ConflictTokens k c -> ConflictTokens k c -> Bool # max :: ConflictTokens k c -> ConflictTokens k c -> ConflictTokens k c # min :: ConflictTokens k c -> ConflictTokens k c -> ConflictTokens k c # | |
(Show k, Show c) => Show (ConflictTokens k c) Source # | |
Defined in Text.Tokenizer.Uniqueness showsPrec :: Int -> ConflictTokens k c -> ShowS # show :: ConflictTokens k c -> String # showList :: [ConflictTokens k c] -> ShowS # |
checkUniqueTokenizing :: forall k c. Ord c => [Token k c] -> Either (ConflictTokens k c) () Source #
Check that there is no list of symbols, that can be decomposed to ways on the tokens from given list