Exploration of layers for tokenization and parsing.

This commit is contained in:
kenkeiras 2018-04-23 22:48:10 +02:00
parent c18c9b8cb1
commit a444766c7c
10 changed files with 173 additions and 108 deletions

View file

@ -26,4 +26,4 @@ BASIC_TOKENIZATION_EXAMPLES = (
def train_basic_tokenization(knowledge_base):
with session().log('Training basic tokenization'):
for example in BASIC_TOKENIZATION_EXAMPLES:
knowledge_base.train_tokenizer(example)
knowledge_base.layers.tokenization.train(example)