Learn from tokenizations inferred.

This commit is contained in:
kenkeiras 2018-04-15 20:45:24 +02:00
parent 6fb1e1e649
commit d63781a0d2
3 changed files with 16 additions and 6 deletions

View file

@ -7,6 +7,11 @@ BASIC_TOKENIZATION_EXAMPLES = (
"text": 'cat',
"tokens": ['cat'],
}),
({
"text": 'cats',
"tokens": ['cats'],
"meaning": { 'cats': ('add-modifier', 'cat', 'plural') },
}),
({
"text": 'text separated by spaces',
"tokens": ['text', 'separated', 'by', 'spaces'],