Increase logging, add failing tokenization tests.

This commit is contained in:
kenkeiras 2018-04-15 17:08:01 +02:00
parent 40b63128af
commit d601ae3f83
3 changed files with 24 additions and 6 deletions

View file

@ -26,7 +26,10 @@ EXAMPLES = [
"text": 'text separated by spaces',
"tokens": ['text', 'separated', 'by', 'spaces'],
}),
('example', {
"text": 'is earth a planet?',
"tokens": ['is', 'earth', 'a', 'planet', '?'],
}),
('test', {
"text": 'plane',
"tokens": ['plane'],
@ -39,6 +42,10 @@ EXAMPLES = [
('test', {
"text": 'some other text',
"tokens": ['some', 'other', 'text'],
}),
('test', {
"text": 'is the sun a star?',
"tokens": ['is', 'the', 'sun', 'a', 'star', '?'],
})
]