spaCy/spacy/lang.pxd

83 lines
2.5 KiB
Cython
Raw Normal View History

2014-08-02 20:51:52 +00:00
from libc.stdint cimport uint32_t
from libc.stdint cimport uint64_t
2014-08-25 14:42:22 +00:00
from spacy.word cimport Lexeme
2014-09-10 16:11:13 +00:00
from spacy.tokens cimport Tokens
from spacy.lexeme cimport LexemeC
2014-09-10 16:11:13 +00:00
from libcpp.utility cimport pair
from libcpp.vector cimport vector
from libc.stdint cimport uint64_t, int64_t
2014-09-10 16:11:13 +00:00
cdef extern from "sparsehash/dense_hash_map" namespace "google":
cdef cppclass dense_hash_map[K, D]:
K& key_type
D& data_type
pair[K, D]& value_type
uint64_t size_type
cppclass iterator:
pair[K, D]& operator*() nogil
iterator operator++() nogil
iterator operator--() nogil
bint operator==(iterator) nogil
bint operator!=(iterator) nogil
iterator begin()
iterator end()
uint64_t size()
uint64_t max_size()
bint empty()
uint64_t bucket_count()
uint64_t bucket_size(uint64_t i)
uint64_t bucket(K& key)
double max_load_factor()
void max_load_vactor(double new_grow)
double min_load_factor()
double min_load_factor(double new_grow)
void set_resizing_parameters(double shrink, double grow)
void resize(uint64_t n)
void rehash(uint64_t n)
dense_hash_map()
dense_hash_map(uint64_t n)
void swap(dense_hash_map&)
pair[iterator, bint] insert(pair[K, D]) nogil
void set_empty_key(K&)
void set_deleted_key(K& key)
void clear_deleted_key()
void erase(iterator pos)
uint64_t erase(K& k)
void erase(iterator first, iterator last)
void clear()
void clear_no_resize()
pair[iterator, iterator] equal_range(K& k)
D& operator[](K&) nogil
cdef struct LexList:
LexemeC* lex
LexList* tail
cdef class Lexicon:
2014-09-10 16:11:13 +00:00
cpdef readonly size_t size
cpdef Lexeme lookup(self, unicode string)
cdef size_t get(self, Py_UNICODE* characters, size_t length)
2014-08-28 23:59:23 +00:00
cdef dense_hash_map[uint64_t, size_t] _dict
2014-08-28 23:59:23 +00:00
cdef list _string_features
cdef list _flag_features
cdef class Language:
2014-08-28 23:59:23 +00:00
cdef unicode name
cdef dense_hash_map[uint64_t, size_t] cache
cdef size_t cache_size
cpdef readonly Lexicon lexicon
2014-09-10 16:11:13 +00:00
cpdef readonly object tokens_class
2014-09-11 19:37:32 +00:00
cpdef Tokens tokenize(self, unicode text)
2014-08-28 23:59:23 +00:00
cpdef Lexeme lookup(self, unicode text)
2014-08-16 01:22:03 +00:00
cdef int _tokenize(self, Tokens tokens, Py_UNICODE* characters, size_t length) except -1
cdef int _split_one(self, Py_UNICODE* characters, size_t length)