Add learning phase to shallow (1 level) nested structures.

This commit is contained in:
kenkeiras 2017-05-15 16:51:39 +02:00
parent 099af2a815
commit 5f6b067e17
4 changed files with 111 additions and 27 deletions

View file

@ -3,10 +3,16 @@
import knowledge_evaluation
import re
import copy
from functools import reduce
def make_template(knowledge_base, text, parsed):
tokens = re.findall(r'(\w+|[^\s])', text)
# TODO: more flexible tokenization
def to_tokens(text):
return re.findall(r'(\w+|[^\s])', text)
def make_template(knowledge_base, tokens, parsed):
matcher = list(tokens)
template = list(parsed)
for i in range(len(matcher)):
@ -28,50 +34,85 @@ def is_bottom_level(tree):
def get_lower_levels(parsed):
lower = []
def aux(subtree, top_level):
def aux(subtree, path):
nonlocal lower
deeper = top_level
for element in subtree:
deeper = len(path) == 0
for i, element in enumerate(subtree):
if isinstance(element, list) or isinstance(element, tuple):
aux(element, top_level=False)
aux(element, path + (i,))
deeper = True
if not deeper:
lower.append(subtree)
lower.append((path, subtree))
aux(parsed, top_level=True)
aux(parsed, path=())
return lower
# TODO: probably optimize this, it creates lots of unnecessary tuples
def replace_position(tree, position, new_element):
def aux(current_tree, remaining_route):
if len(remaining_route) == 0:
return new_element
else:
step = remaining_route[0]
return (
tree[:step]
+ (aux(tree[step], remaining_route[1:]),)
+ tree[step + 2:]
)
return aux(tree, position)
def integrate_language(knowledge_base, example):
text = example["text"].lower()
parsed = example["parsed"]
print("P:", parsed)
resolved_parsed = copy.deepcopy(parsed)
tokens = to_tokens(text)
while True:
lower_levels = get_lower_levels(parsed)
print("P:", resolved_parsed)
lower_levels = get_lower_levels(resolved_parsed)
print("Lower:", lower_levels)
if len(lower_levels) == 0:
break
for atom in lower_levels:
for position, atom in lower_levels:
print("\x1b[1mSelecting\x1b[0m:", atom)
similar = get_similar_tree(knowledge_base, atom)
print("___>", similar)
remix, (start_bounds, end_bounds) = build_remix_matrix(knowledge_base, text, atom, similar)
tokens, matcher, result = make_template(knowledge_base, text, atom)
remix, (start_bounds, end_bounds) = build_remix_matrix(knowledge_base, tokens, atom, similar)
_, matcher, result = make_template(knowledge_base, tokens, atom)
print("Tx:", tokens)
print("Mx:", matcher)
print("Rx:", result)
print("Remix:", remix)
after_remix = apply_remix(tokens[len(start_bounds):-len(end_bounds)], remix)
assert(len(after_remix) + len(start_bounds) + len(end_bounds) == len(tokens))
print(" \\->", after_remix)
print( " +->", after_remix)
subquery_type = knowledge_evaluation.get_subquery_type(knowledge_base.knowledge, atom)
print(r" \-> <{}>".format(subquery_type))
# Clean remaining tokens
new_tokens = list(tokens)
offset = len(start_bounds)
for _ in range(len(remix)):
new_tokens.pop(offset)
# TODO: Get a specific types for... types
new_tokens.insert(offset, "<type: {}>".format(subquery_type))
tokens = new_tokens
resolved_parsed = replace_position(resolved_parsed, position, subquery_type)
print("#########")
break
tokens, matcher, result = make_template(knowledge_base, text, parsed)
tokens, matcher, result = make_template(knowledge_base, tokens, parsed)
print("T:", tokens)
print("M:", matcher)
print("R:", result)
@ -86,10 +127,11 @@ def apply_remix(tokens, remix):
return rebuilt
def build_remix_matrix(knowledge_base, text, atom, similar):
def build_remix_matrix(knowledge_base, tokens, atom, similar):
# print("+" * 20)
tokens, matcher, result = make_template(knowledge_base, text, atom)
tokens = list(tokens)
tokens, matcher, result = make_template(knowledge_base, tokens, atom)
similar_matcher, similar_result, similar_result_resolved, _ = similar
# print("NEW:")