module Bindings.Groonga.Raw.Tokenizer where
import Foreign.Ptr
import Foreign.Ptr (Ptr,FunPtr,plusPtr)
import Foreign.Ptr (wordPtrToPtr,castPtrToFunPtr)
import Foreign.Storable
import Foreign.C.Types
import Foreign.C.String (CString,CStringLen,CWString,CWStringLen)
import Foreign.Marshal.Alloc (alloca)
import Foreign.Marshal.Array (peekArray,pokeArray)
import Data.Int
import Data.Word
import Bindings.Groonga.Raw
import Bindings.Groonga.Raw.Plugin
foreign import ccall "grn_tokenizer_charlen" c'grn_tokenizer_charlen
:: Ptr C'_grn_ctx -> CString -> CUInt -> C'grn_encoding -> IO CInt
foreign import ccall "&grn_tokenizer_charlen" p'grn_tokenizer_charlen
:: FunPtr (Ptr C'_grn_ctx -> CString -> CUInt -> C'grn_encoding -> IO CInt)
foreign import ccall "grn_tokenizer_isspace" c'grn_tokenizer_isspace
:: Ptr C'_grn_ctx -> CString -> CUInt -> C'grn_encoding -> IO CInt
foreign import ccall "&grn_tokenizer_isspace" p'grn_tokenizer_isspace
:: FunPtr (Ptr C'_grn_ctx -> CString -> CUInt -> C'grn_encoding -> IO CInt)
foreign import ccall "grn_tokenizer_is_tokenized_delimiter" c'grn_tokenizer_is_tokenized_delimiter
:: Ptr C'_grn_ctx -> CString -> CUInt -> C'grn_encoding -> IO CUChar
foreign import ccall "&grn_tokenizer_is_tokenized_delimiter" p'grn_tokenizer_is_tokenized_delimiter
:: FunPtr (Ptr C'_grn_ctx -> CString -> CUInt -> C'grn_encoding -> IO CUChar)
foreign import ccall "grn_tokenizer_have_tokenized_delimiter" c'grn_tokenizer_have_tokenized_delimiter
:: Ptr C'_grn_ctx -> CString -> CUInt -> C'grn_encoding -> IO CUChar
foreign import ccall "&grn_tokenizer_have_tokenized_delimiter" p'grn_tokenizer_have_tokenized_delimiter
:: FunPtr (Ptr C'_grn_ctx -> CString -> CUInt -> C'grn_encoding -> IO CUChar)
type C'grn_tokenizer_query = C'_grn_tokenizer_query
data C'_grn_tokenizer_query = C'_grn_tokenizer_query{
c'_grn_tokenizer_query'normalized_query :: Ptr C'_grn_obj,
c'_grn_tokenizer_query'query_buf :: CString,
c'_grn_tokenizer_query'ptr :: CString,
c'_grn_tokenizer_query'length :: CUInt,
c'_grn_tokenizer_query'encoding :: C'grn_encoding,
c'_grn_tokenizer_query'flags :: CUInt,
c'_grn_tokenizer_query'have_tokenized_delimiter :: CUChar
} deriving (Eq,Show)
p'_grn_tokenizer_query'normalized_query p = plusPtr p 0
p'_grn_tokenizer_query'normalized_query :: Ptr (C'_grn_tokenizer_query) -> Ptr (Ptr C'_grn_obj)
p'_grn_tokenizer_query'query_buf p = plusPtr p 8
p'_grn_tokenizer_query'query_buf :: Ptr (C'_grn_tokenizer_query) -> Ptr (CString)
p'_grn_tokenizer_query'ptr p = plusPtr p 16
p'_grn_tokenizer_query'ptr :: Ptr (C'_grn_tokenizer_query) -> Ptr (CString)
p'_grn_tokenizer_query'length p = plusPtr p 24
p'_grn_tokenizer_query'length :: Ptr (C'_grn_tokenizer_query) -> Ptr (CUInt)
p'_grn_tokenizer_query'encoding p = plusPtr p 28
p'_grn_tokenizer_query'encoding :: Ptr (C'_grn_tokenizer_query) -> Ptr (C'grn_encoding)
p'_grn_tokenizer_query'flags p = plusPtr p 32
p'_grn_tokenizer_query'flags :: Ptr (C'_grn_tokenizer_query) -> Ptr (CUInt)
p'_grn_tokenizer_query'have_tokenized_delimiter p = plusPtr p 36
p'_grn_tokenizer_query'have_tokenized_delimiter :: Ptr (C'_grn_tokenizer_query) -> Ptr (CUChar)
instance Storable C'_grn_tokenizer_query where
sizeOf _ = 40
alignment _ = 8
peek p = do
v0 <- peekByteOff p 0
v1 <- peekByteOff p 8
v2 <- peekByteOff p 16
v3 <- peekByteOff p 24
v4 <- peekByteOff p 28
v5 <- peekByteOff p 32
v6 <- peekByteOff p 36
return $ C'_grn_tokenizer_query v0 v1 v2 v3 v4 v5 v6
poke p (C'_grn_tokenizer_query v0 v1 v2 v3 v4 v5 v6) = do
pokeByteOff p 0 v0
pokeByteOff p 8 v1
pokeByteOff p 16 v2
pokeByteOff p 24 v3
pokeByteOff p 28 v4
pokeByteOff p 32 v5
pokeByteOff p 36 v6
return ()
foreign import ccall "grn_tokenizer_query_open" c'grn_tokenizer_query_open
:: Ptr C'_grn_ctx -> CInt -> Ptr (Ptr C'_grn_obj) -> CUInt -> IO (Ptr C'_grn_tokenizer_query)
foreign import ccall "&grn_tokenizer_query_open" p'grn_tokenizer_query_open
:: FunPtr (Ptr C'_grn_ctx -> CInt -> Ptr (Ptr C'_grn_obj) -> CUInt -> IO (Ptr C'_grn_tokenizer_query))
foreign import ccall "grn_tokenizer_query_create" c'grn_tokenizer_query_create
:: Ptr C'_grn_ctx -> CInt -> Ptr (Ptr C'_grn_obj) -> IO (Ptr C'_grn_tokenizer_query)
foreign import ccall "&grn_tokenizer_query_create" p'grn_tokenizer_query_create
:: FunPtr (Ptr C'_grn_ctx -> CInt -> Ptr (Ptr C'_grn_obj) -> IO (Ptr C'_grn_tokenizer_query))
foreign import ccall "grn_tokenizer_query_close" c'grn_tokenizer_query_close
:: Ptr C'_grn_ctx -> Ptr C'_grn_tokenizer_query -> IO ()
foreign import ccall "&grn_tokenizer_query_close" p'grn_tokenizer_query_close
:: FunPtr (Ptr C'_grn_ctx -> Ptr C'_grn_tokenizer_query -> IO ())
foreign import ccall "grn_tokenizer_query_destroy" c'grn_tokenizer_query_destroy
:: Ptr C'_grn_ctx -> Ptr C'_grn_tokenizer_query -> IO ()
foreign import ccall "&grn_tokenizer_query_destroy" p'grn_tokenizer_query_destroy
:: FunPtr (Ptr C'_grn_ctx -> Ptr C'_grn_tokenizer_query -> IO ())
type C'grn_tokenizer_token = C'_grn_tokenizer_token
data C'_grn_tokenizer_token = C'_grn_tokenizer_token{
c'_grn_tokenizer_token'str :: C'_grn_obj,
c'_grn_tokenizer_token'status :: C'_grn_obj
} deriving (Eq,Show)
p'_grn_tokenizer_token'str p = plusPtr p 0
p'_grn_tokenizer_token'str :: Ptr (C'_grn_tokenizer_token) -> Ptr (C'_grn_obj)
p'_grn_tokenizer_token'status p = plusPtr p 32
p'_grn_tokenizer_token'status :: Ptr (C'_grn_tokenizer_token) -> Ptr (C'_grn_obj)
instance Storable C'_grn_tokenizer_token where
sizeOf _ = 64
alignment _ = 8
peek p = do
v0 <- peekByteOff p 0
v1 <- peekByteOff p 32
return $ C'_grn_tokenizer_token v0 v1
poke p (C'_grn_tokenizer_token v0 v1) = do
pokeByteOff p 0 v0
pokeByteOff p 32 v1
return ()
foreign import ccall "grn_tokenizer_token_init" c'grn_tokenizer_token_init
:: Ptr C'_grn_ctx -> Ptr C'_grn_tokenizer_token -> IO ()
foreign import ccall "&grn_tokenizer_token_init" p'grn_tokenizer_token_init
:: FunPtr (Ptr C'_grn_ctx -> Ptr C'_grn_tokenizer_token -> IO ())
foreign import ccall "grn_tokenizer_token_fin" c'grn_tokenizer_token_fin
:: Ptr C'_grn_ctx -> Ptr C'_grn_tokenizer_token -> IO ()
foreign import ccall "&grn_tokenizer_token_fin" p'grn_tokenizer_token_fin
:: FunPtr (Ptr C'_grn_ctx -> Ptr C'_grn_tokenizer_token -> IO ())
type C'grn_tokenizer_status = CUInt
foreign import ccall "grn_tokenizer_token_push" c'grn_tokenizer_token_push
:: Ptr C'_grn_ctx -> Ptr C'_grn_tokenizer_token -> CString -> CUInt -> CUInt -> IO ()
foreign import ccall "&grn_tokenizer_token_push" p'grn_tokenizer_token_push
:: FunPtr (Ptr C'_grn_ctx -> Ptr C'_grn_tokenizer_token -> CString -> CUInt -> CUInt -> IO ())
foreign import ccall "grn_tokenizer_tokenized_delimiter_next" c'grn_tokenizer_tokenized_delimiter_next
:: Ptr C'_grn_ctx -> Ptr C'_grn_tokenizer_token -> CString -> CUInt -> C'grn_encoding -> IO CString
foreign import ccall "&grn_tokenizer_tokenized_delimiter_next" p'grn_tokenizer_tokenized_delimiter_next
:: FunPtr (Ptr C'_grn_ctx -> Ptr C'_grn_tokenizer_token -> CString -> CUInt -> C'grn_encoding -> IO CString)
foreign import ccall "grn_tokenizer_register" c'grn_tokenizer_register
:: Ptr C'_grn_ctx -> CString -> CUInt -> Ptr C'grn_proc_func -> Ptr C'grn_proc_func -> Ptr C'grn_proc_func -> IO C'grn_rc
foreign import ccall "&grn_tokenizer_register" p'grn_tokenizer_register
:: FunPtr (Ptr C'_grn_ctx -> CString -> CUInt -> Ptr C'grn_proc_func -> Ptr C'grn_proc_func -> Ptr C'grn_proc_func -> IO C'grn_rc)