| Safe Haskell | None | 
|---|---|
| Language | Haskell2010 | 
Database.Bloodhound.Internal.Analysis
Synopsis
- data Analysis = Analysis {}
 - newtype Tokenizer = Tokenizer Text
 - data AnalyzerDefinition = AnalyzerDefinition {}
 - data CharFilterDefinition
 - newtype TokenizerDefinition = TokenizerDefinitionNgram Ngram
 - data Ngram = Ngram {
- ngramMinGram :: Int
 - ngramMaxGram :: Int
 - ngramTokenChars :: [TokenChar]
 
 - data TokenChar
 - data TokenFilterDefinition
- = TokenFilterDefinitionLowercase (Maybe Language)
 - | TokenFilterDefinitionUppercase (Maybe Language)
 - | TokenFilterDefinitionApostrophe
 - | TokenFilterDefinitionReverse
 - | TokenFilterDefinitionSnowball Language
 - | TokenFilterDefinitionShingle Shingle
 - | TokenFilterDefinitionStemmer Language
 - | TokenFilterDefinitionStop (Either Language [StopWord])
 
 - data Language
- = Arabic
 - | Armenian
 - | Basque
 - | Bengali
 - | Brazilian
 - | Bulgarian
 - | Catalan
 - | Cjk
 - | Czech
 - | Danish
 - | Dutch
 - | English
 - | Finnish
 - | French
 - | Galician
 - | German
 - | German2
 - | Greek
 - | Hindi
 - | Hungarian
 - | Indonesian
 - | Irish
 - | Italian
 - | Kp
 - | Latvian
 - | Lithuanian
 - | Lovins
 - | Norwegian
 - | Persian
 - | Porter
 - | Portuguese
 - | Romanian
 - | Russian
 - | Sorani
 - | Spanish
 - | Swedish
 - | Thai
 - | Turkish
 
 - languageToText :: Language -> Text
 - languageFromText :: Text -> Maybe Language
 - data Shingle = Shingle {}
 
Documentation
Constructors
| Analysis | |
data AnalyzerDefinition Source #
Constructors
| AnalyzerDefinition | |
Instances
| Eq AnalyzerDefinition Source # | |
Defined in Database.Bloodhound.Internal.Analysis Methods (==) :: AnalyzerDefinition -> AnalyzerDefinition -> Bool # (/=) :: AnalyzerDefinition -> AnalyzerDefinition -> Bool #  | |
| Show AnalyzerDefinition Source # | |
Defined in Database.Bloodhound.Internal.Analysis Methods showsPrec :: Int -> AnalyzerDefinition -> ShowS # show :: AnalyzerDefinition -> String # showList :: [AnalyzerDefinition] -> ShowS #  | |
| ToJSON AnalyzerDefinition Source # | |
Defined in Database.Bloodhound.Internal.Analysis Methods toJSON :: AnalyzerDefinition -> Value # toEncoding :: AnalyzerDefinition -> Encoding # toJSONList :: [AnalyzerDefinition] -> Value # toEncodingList :: [AnalyzerDefinition] -> Encoding #  | |
| FromJSON AnalyzerDefinition Source # | |
Defined in Database.Bloodhound.Internal.Analysis Methods parseJSON :: Value -> Parser AnalyzerDefinition # parseJSONList :: Value -> Parser [AnalyzerDefinition] #  | |
data CharFilterDefinition Source #
Character filters are used to preprocess the stream of characters before it is passed to the tokenizer.
Constructors
| CharFilterDefinitionMapping (Map Text Text) | |
| CharFilterDefinitionPatternReplace | |
Instances
| Eq CharFilterDefinition Source # | |
Defined in Database.Bloodhound.Internal.Analysis Methods (==) :: CharFilterDefinition -> CharFilterDefinition -> Bool # (/=) :: CharFilterDefinition -> CharFilterDefinition -> Bool #  | |
| Show CharFilterDefinition Source # | |
Defined in Database.Bloodhound.Internal.Analysis Methods showsPrec :: Int -> CharFilterDefinition -> ShowS # show :: CharFilterDefinition -> String # showList :: [CharFilterDefinition] -> ShowS #  | |
| ToJSON CharFilterDefinition Source # | |
Defined in Database.Bloodhound.Internal.Analysis Methods toJSON :: CharFilterDefinition -> Value # toEncoding :: CharFilterDefinition -> Encoding # toJSONList :: [CharFilterDefinition] -> Value # toEncodingList :: [CharFilterDefinition] -> Encoding #  | |
| FromJSON CharFilterDefinition Source # | |
Defined in Database.Bloodhound.Internal.Analysis Methods parseJSON :: Value -> Parser CharFilterDefinition # parseJSONList :: Value -> Parser [CharFilterDefinition] #  | |
newtype TokenizerDefinition Source #
Constructors
| TokenizerDefinitionNgram Ngram | 
Instances
| Eq TokenizerDefinition Source # | |
Defined in Database.Bloodhound.Internal.Analysis Methods (==) :: TokenizerDefinition -> TokenizerDefinition -> Bool # (/=) :: TokenizerDefinition -> TokenizerDefinition -> Bool #  | |
| Show TokenizerDefinition Source # | |
Defined in Database.Bloodhound.Internal.Analysis Methods showsPrec :: Int -> TokenizerDefinition -> ShowS # show :: TokenizerDefinition -> String # showList :: [TokenizerDefinition] -> ShowS #  | |
| ToJSON TokenizerDefinition Source # | |
Defined in Database.Bloodhound.Internal.Analysis Methods toJSON :: TokenizerDefinition -> Value # toEncoding :: TokenizerDefinition -> Encoding # toJSONList :: [TokenizerDefinition] -> Value # toEncodingList :: [TokenizerDefinition] -> Encoding #  | |
| FromJSON TokenizerDefinition Source # | |
Defined in Database.Bloodhound.Internal.Analysis Methods parseJSON :: Value -> Parser TokenizerDefinition # parseJSONList :: Value -> Parser [TokenizerDefinition] #  | |
Constructors
| Ngram | |
Fields 
  | |
Constructors
| TokenLetter | |
| TokenDigit | |
| TokenWhitespace | |
| TokenPunctuation | |
| TokenSymbol | 
data TokenFilterDefinition Source #
Token filters are used to create custom analyzers.
Constructors
Instances
| Eq TokenFilterDefinition Source # | |
Defined in Database.Bloodhound.Internal.Analysis Methods (==) :: TokenFilterDefinition -> TokenFilterDefinition -> Bool # (/=) :: TokenFilterDefinition -> TokenFilterDefinition -> Bool #  | |
| Show TokenFilterDefinition Source # | |
Defined in Database.Bloodhound.Internal.Analysis Methods showsPrec :: Int -> TokenFilterDefinition -> ShowS # show :: TokenFilterDefinition -> String # showList :: [TokenFilterDefinition] -> ShowS #  | |
| ToJSON TokenFilterDefinition Source # | |
Defined in Database.Bloodhound.Internal.Analysis Methods toJSON :: TokenFilterDefinition -> Value # toEncoding :: TokenFilterDefinition -> Encoding # toJSONList :: [TokenFilterDefinition] -> Value # toEncodingList :: [TokenFilterDefinition] -> Encoding #  | |
| FromJSON TokenFilterDefinition Source # | |
Defined in Database.Bloodhound.Internal.Analysis Methods parseJSON :: Value -> Parser TokenFilterDefinition # parseJSONList :: Value -> Parser [TokenFilterDefinition] #  | |
The set of languages that can be passed to various analyzers,
   filters, etc. in Elasticsearch. Most data types in this module
   that have a Language field are actually only actually to
   handle a subset of these languages. Consult the official
   Elasticsearch documentation to see what is actually supported.
Constructors
languageToText :: Language -> Text Source #