{-# LINE 1 "Bindings/Groonga/Raw/Tokenizer.hsc" #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
{-# LINE 2 "Bindings/Groonga/Raw/Tokenizer.hsc" #-}

{-# LINE 3 "Bindings/Groonga/Raw/Tokenizer.hsc" #-}

{-# LINE 4 "Bindings/Groonga/Raw/Tokenizer.hsc" #-}
module Bindings.Groonga.Raw.Tokenizer where
import Foreign.Ptr
import Foreign.Ptr (Ptr,FunPtr,plusPtr)
import Foreign.Ptr (wordPtrToPtr,castPtrToFunPtr)
import Foreign.Storable
import Foreign.C.Types
import Foreign.C.String (CString,CStringLen,CWString,CWStringLen)
import Foreign.Marshal.Alloc (alloca)
import Foreign.Marshal.Array (peekArray,pokeArray)
import Data.Int
import Data.Word

{-# LINE 7 "Bindings/Groonga/Raw/Tokenizer.hsc" #-}

import Bindings.Groonga.Raw
import Bindings.Groonga.Raw.Plugin
foreign import ccall "grn_tokenizer_charlen" c'grn_tokenizer_charlen
  :: Ptr C'_grn_ctx -> CString -> CUInt -> C'grn_encoding -> IO CInt
foreign import ccall "&grn_tokenizer_charlen" p'grn_tokenizer_charlen
  :: FunPtr (Ptr C'_grn_ctx -> CString -> CUInt -> C'grn_encoding -> IO CInt)

{-# LINE 11 "Bindings/Groonga/Raw/Tokenizer.hsc" #-}
foreign import ccall "grn_tokenizer_isspace" c'grn_tokenizer_isspace
  :: Ptr C'_grn_ctx -> CString -> CUInt -> C'grn_encoding -> IO CInt
foreign import ccall "&grn_tokenizer_isspace" p'grn_tokenizer_isspace
  :: FunPtr (Ptr C'_grn_ctx -> CString -> CUInt -> C'grn_encoding -> IO CInt)

{-# LINE 12 "Bindings/Groonga/Raw/Tokenizer.hsc" #-}
foreign import ccall "grn_tokenizer_is_tokenized_delimiter" c'grn_tokenizer_is_tokenized_delimiter
  :: Ptr C'_grn_ctx -> CString -> CUInt -> C'grn_encoding -> IO CUChar
foreign import ccall "&grn_tokenizer_is_tokenized_delimiter" p'grn_tokenizer_is_tokenized_delimiter
  :: FunPtr (Ptr C'_grn_ctx -> CString -> CUInt -> C'grn_encoding -> IO CUChar)

{-# LINE 13 "Bindings/Groonga/Raw/Tokenizer.hsc" #-}
foreign import ccall "grn_tokenizer_have_tokenized_delimiter" c'grn_tokenizer_have_tokenized_delimiter
  :: Ptr C'_grn_ctx -> CString -> CUInt -> C'grn_encoding -> IO CUChar
foreign import ccall "&grn_tokenizer_have_tokenized_delimiter" p'grn_tokenizer_have_tokenized_delimiter
  :: FunPtr (Ptr C'_grn_ctx -> CString -> CUInt -> C'grn_encoding -> IO CUChar)

{-# LINE 14 "Bindings/Groonga/Raw/Tokenizer.hsc" #-}
{- typedef struct _grn_tokenizer_query grn_tokenizer_query; -}
type C'grn_tokenizer_query = C'_grn_tokenizer_query

{-# LINE 16 "Bindings/Groonga/Raw/Tokenizer.hsc" #-}
{- struct _grn_tokenizer_query {
    grn_obj * normalized_query;
    char * query_buf;
    const char * ptr;
    unsigned int length;
    grn_encoding encoding;
    unsigned int flags;
    grn_bool have_tokenized_delimiter;
}; -}

{-# LINE 26 "Bindings/Groonga/Raw/Tokenizer.hsc" #-}

{-# LINE 27 "Bindings/Groonga/Raw/Tokenizer.hsc" #-}

{-# LINE 28 "Bindings/Groonga/Raw/Tokenizer.hsc" #-}

{-# LINE 29 "Bindings/Groonga/Raw/Tokenizer.hsc" #-}

{-# LINE 30 "Bindings/Groonga/Raw/Tokenizer.hsc" #-}

{-# LINE 31 "Bindings/Groonga/Raw/Tokenizer.hsc" #-}

{-# LINE 32 "Bindings/Groonga/Raw/Tokenizer.hsc" #-}

{-# LINE 33 "Bindings/Groonga/Raw/Tokenizer.hsc" #-}

{-# LINE 34 "Bindings/Groonga/Raw/Tokenizer.hsc" #-}
data C'_grn_tokenizer_query = C'_grn_tokenizer_query{
  c'_grn_tokenizer_query'normalized_query :: Ptr C'_grn_obj,
  c'_grn_tokenizer_query'query_buf :: CString,
  c'_grn_tokenizer_query'ptr :: CString,
  c'_grn_tokenizer_query'length :: CUInt,
  c'_grn_tokenizer_query'encoding :: C'grn_encoding,
  c'_grn_tokenizer_query'flags :: CUInt,
  c'_grn_tokenizer_query'have_tokenized_delimiter :: CUChar,
  c'_grn_tokenizer_query'token_mode :: CUInt
} deriving (Eq,Show)
p'_grn_tokenizer_query'normalized_query p = plusPtr p 0
p'_grn_tokenizer_query'normalized_query :: Ptr (C'_grn_tokenizer_query) -> Ptr (Ptr C'_grn_obj)
p'_grn_tokenizer_query'query_buf p = plusPtr p 8
p'_grn_tokenizer_query'query_buf :: Ptr (C'_grn_tokenizer_query) -> Ptr (CString)
p'_grn_tokenizer_query'ptr p = plusPtr p 16
p'_grn_tokenizer_query'ptr :: Ptr (C'_grn_tokenizer_query) -> Ptr (CString)
p'_grn_tokenizer_query'length p = plusPtr p 24
p'_grn_tokenizer_query'length :: Ptr (C'_grn_tokenizer_query) -> Ptr (CUInt)
p'_grn_tokenizer_query'encoding p = plusPtr p 28
p'_grn_tokenizer_query'encoding :: Ptr (C'_grn_tokenizer_query) -> Ptr (C'grn_encoding)
p'_grn_tokenizer_query'flags p = plusPtr p 32
p'_grn_tokenizer_query'flags :: Ptr (C'_grn_tokenizer_query) -> Ptr (CUInt)
p'_grn_tokenizer_query'have_tokenized_delimiter p = plusPtr p 36
p'_grn_tokenizer_query'have_tokenized_delimiter :: Ptr (C'_grn_tokenizer_query) -> Ptr (CUChar)
p'_grn_tokenizer_query'token_mode p = plusPtr p 40
p'_grn_tokenizer_query'token_mode :: Ptr (C'_grn_tokenizer_query) -> Ptr (CUInt)
instance Storable C'_grn_tokenizer_query where
  sizeOf _ = 48
  alignment _ = 8
  peek p = do
    v0 <- peekByteOff p 0
    v1 <- peekByteOff p 8
    v2 <- peekByteOff p 16
    v3 <- peekByteOff p 24
    v4 <- peekByteOff p 28
    v5 <- peekByteOff p 32
    v6 <- peekByteOff p 36
    v7 <- peekByteOff p 40
    return $ C'_grn_tokenizer_query v0 v1 v2 v3 v4 v5 v6 v7
  poke p (C'_grn_tokenizer_query v0 v1 v2 v3 v4 v5 v6 v7) = do
    pokeByteOff p 0 v0
    pokeByteOff p 8 v1
    pokeByteOff p 16 v2
    pokeByteOff p 24 v3
    pokeByteOff p 28 v4
    pokeByteOff p 32 v5
    pokeByteOff p 36 v6
    pokeByteOff p 40 v7
    return ()

{-# LINE 35 "Bindings/Groonga/Raw/Tokenizer.hsc" #-}
foreign import ccall "grn_tokenizer_query_open" c'grn_tokenizer_query_open
  :: Ptr C'_grn_ctx -> CInt -> Ptr (Ptr C'_grn_obj) -> CUInt -> IO (Ptr C'_grn_tokenizer_query)
foreign import ccall "&grn_tokenizer_query_open" p'grn_tokenizer_query_open
  :: FunPtr (Ptr C'_grn_ctx -> CInt -> Ptr (Ptr C'_grn_obj) -> CUInt -> IO (Ptr C'_grn_tokenizer_query))

{-# LINE 36 "Bindings/Groonga/Raw/Tokenizer.hsc" #-}
foreign import ccall "grn_tokenizer_query_create" c'grn_tokenizer_query_create
  :: Ptr C'_grn_ctx -> CInt -> Ptr (Ptr C'_grn_obj) -> IO (Ptr C'_grn_tokenizer_query)
foreign import ccall "&grn_tokenizer_query_create" p'grn_tokenizer_query_create
  :: FunPtr (Ptr C'_grn_ctx -> CInt -> Ptr (Ptr C'_grn_obj) -> IO (Ptr C'_grn_tokenizer_query))

{-# LINE 37 "Bindings/Groonga/Raw/Tokenizer.hsc" #-}
foreign import ccall "grn_tokenizer_query_close" c'grn_tokenizer_query_close
  :: Ptr C'_grn_ctx -> Ptr C'_grn_tokenizer_query -> IO ()
foreign import ccall "&grn_tokenizer_query_close" p'grn_tokenizer_query_close
  :: FunPtr (Ptr C'_grn_ctx -> Ptr C'_grn_tokenizer_query -> IO ())

{-# LINE 38 "Bindings/Groonga/Raw/Tokenizer.hsc" #-}
foreign import ccall "grn_tokenizer_query_destroy" c'grn_tokenizer_query_destroy
  :: Ptr C'_grn_ctx -> Ptr C'_grn_tokenizer_query -> IO ()
foreign import ccall "&grn_tokenizer_query_destroy" p'grn_tokenizer_query_destroy
  :: FunPtr (Ptr C'_grn_ctx -> Ptr C'_grn_tokenizer_query -> IO ())

{-# LINE 39 "Bindings/Groonga/Raw/Tokenizer.hsc" #-}
{- typedef struct _grn_tokenizer_token grn_tokenizer_token; -}
type C'grn_tokenizer_token = C'_grn_tokenizer_token

{-# LINE 41 "Bindings/Groonga/Raw/Tokenizer.hsc" #-}
{- struct _grn_tokenizer_token {
    grn_obj str; grn_obj status;
}; -}

{-# LINE 45 "Bindings/Groonga/Raw/Tokenizer.hsc" #-}

{-# LINE 46 "Bindings/Groonga/Raw/Tokenizer.hsc" #-}

{-# LINE 47 "Bindings/Groonga/Raw/Tokenizer.hsc" #-}
data C'_grn_tokenizer_token = C'_grn_tokenizer_token{
  c'_grn_tokenizer_token'str :: C'_grn_obj,
  c'_grn_tokenizer_token'status :: C'_grn_obj
} deriving (Eq,Show)
p'_grn_tokenizer_token'str p = plusPtr p 0
p'_grn_tokenizer_token'str :: Ptr (C'_grn_tokenizer_token) -> Ptr (C'_grn_obj)
p'_grn_tokenizer_token'status p = plusPtr p 32
p'_grn_tokenizer_token'status :: Ptr (C'_grn_tokenizer_token) -> Ptr (C'_grn_obj)
instance Storable C'_grn_tokenizer_token where
  sizeOf _ = 64
  alignment _ = 8
  peek p = do
    v0 <- peekByteOff p 0
    v1 <- peekByteOff p 32
    return $ C'_grn_tokenizer_token v0 v1
  poke p (C'_grn_tokenizer_token v0 v1) = do
    pokeByteOff p 0 v0
    pokeByteOff p 32 v1
    return ()

{-# LINE 48 "Bindings/Groonga/Raw/Tokenizer.hsc" #-}
foreign import ccall "grn_tokenizer_token_init" c'grn_tokenizer_token_init
  :: Ptr C'_grn_ctx -> Ptr C'_grn_tokenizer_token -> IO ()
foreign import ccall "&grn_tokenizer_token_init" p'grn_tokenizer_token_init
  :: FunPtr (Ptr C'_grn_ctx -> Ptr C'_grn_tokenizer_token -> IO ())

{-# LINE 49 "Bindings/Groonga/Raw/Tokenizer.hsc" #-}
foreign import ccall "grn_tokenizer_token_fin" c'grn_tokenizer_token_fin
  :: Ptr C'_grn_ctx -> Ptr C'_grn_tokenizer_token -> IO ()
foreign import ccall "&grn_tokenizer_token_fin" p'grn_tokenizer_token_fin
  :: FunPtr (Ptr C'_grn_ctx -> Ptr C'_grn_tokenizer_token -> IO ())

{-# LINE 50 "Bindings/Groonga/Raw/Tokenizer.hsc" #-}
{- typedef unsigned int grn_tokenizer_status; -}
type C'grn_tokenizer_status = CUInt

{-# LINE 52 "Bindings/Groonga/Raw/Tokenizer.hsc" #-}
foreign import ccall "grn_tokenizer_token_push" c'grn_tokenizer_token_push
  :: Ptr C'_grn_ctx -> Ptr C'_grn_tokenizer_token -> CString -> CUInt -> CUInt -> IO ()
foreign import ccall "&grn_tokenizer_token_push" p'grn_tokenizer_token_push
  :: FunPtr (Ptr C'_grn_ctx -> Ptr C'_grn_tokenizer_token -> CString -> CUInt -> CUInt -> IO ())

{-# LINE 53 "Bindings/Groonga/Raw/Tokenizer.hsc" #-}
foreign import ccall "grn_tokenizer_tokenized_delimiter_next" c'grn_tokenizer_tokenized_delimiter_next
  :: Ptr C'_grn_ctx -> Ptr C'_grn_tokenizer_token -> CString -> CUInt -> C'grn_encoding -> IO CString
foreign import ccall "&grn_tokenizer_tokenized_delimiter_next" p'grn_tokenizer_tokenized_delimiter_next
  :: FunPtr (Ptr C'_grn_ctx -> Ptr C'_grn_tokenizer_token -> CString -> CUInt -> C'grn_encoding -> IO CString)

{-# LINE 54 "Bindings/Groonga/Raw/Tokenizer.hsc" #-}
foreign import ccall "grn_tokenizer_register" c'grn_tokenizer_register
  :: Ptr C'_grn_ctx -> CString -> CUInt -> Ptr C'grn_proc_func -> Ptr C'grn_proc_func -> Ptr C'grn_proc_func -> IO C'grn_rc
foreign import ccall "&grn_tokenizer_register" p'grn_tokenizer_register
  :: FunPtr (Ptr C'_grn_ctx -> CString -> CUInt -> Ptr C'grn_proc_func -> Ptr C'grn_proc_func -> Ptr C'grn_proc_func -> IO C'grn_rc)

{-# LINE 55 "Bindings/Groonga/Raw/Tokenizer.hsc" #-}