Move MAX_RECURSIONS to parameter file.

This commit is contained in:
kenkeiras 2017-05-21 14:12:25 +02:00
parent e3ab9583fe
commit ae8d717344
3 changed files with 13 additions and 14 deletions

View File

@ -1,11 +1,10 @@
import sys import sys
import parameters
def show_depth(depth: int): def show_depth(depth: int, zoom: int=2):
multiplier = 3 offset = int((parameters.MAX_RECURSIONS - depth) / (2 / zoom))
max_depth = 5
offset = int((max_depth - depth) / (2 / multiplier))
depth = depth * multiplier depth = depth * zoom
offset -= int(depth % 2) offset -= int(depth % 2)
sys.stdout.write("\r|\x1b[K" + (u'' * int(depth / 2)) + (u'' * int(depth % 2)) + ' ' * offset + "|\x1b[7m \x1b[0m\b") sys.stdout.write("\r|\x1b[K" + (u'' * int(depth / 2)) + (u'' * int(depth % 2)) + ' ' * offset + "|\x1b[7m \x1b[0m\b")

1
naive-nlu/parameters.py Normal file
View File

@ -0,0 +1 @@
MAX_RECURSIONS = 5

View File

@ -10,8 +10,7 @@ import copy
from functools import reduce from functools import reduce
from typing import List from typing import List
from modifiable_property import ModifiableProperty from modifiable_property import ModifiableProperty
import parameters
MAX_RECURSIONS = 5
# TODO: more flexible tokenization # TODO: more flexible tokenization
def to_tokens(text): def to_tokens(text):
@ -326,7 +325,7 @@ def fitting_return_type(knowledge,
input_stream, input_stream,
tail_of_ouput_stream, tail_of_ouput_stream,
remaining_recursions: int): remaining_recursions: int):
indent = " " + " " * (MAX_RECURSIONS - remaining_recursions) indent = " " + " " * (parameters.MAX_RECURSIONS - remaining_recursions)
for sample, ast in knowledge.trained: for sample, ast in knowledge.trained:
try: try:
@ -396,11 +395,11 @@ def match_token(knowledge,
if remaining_recursions < 1: if remaining_recursions < 1:
yield None yield None
# logging.debug("#" * (MAX_RECURSIONS - remaining_recursions)) # logging.debug("#" * (parameters.MAX_RECURSIONS - remaining_recursions))
# logging.debug("Input:", input) # logging.debug("Input:", input)
# logging.debug("Output:", trained) # logging.debug("Output:", trained)
depth_meter.show_depth(MAX_RECURSIONS - remaining_recursions) depth_meter.show_depth(parameters.MAX_RECURSIONS - remaining_recursions)
indent = " " + " " * (MAX_RECURSIONS - remaining_recursions) indent = " " + " " * (parameters.MAX_RECURSIONS - remaining_recursions)
first_input = input[0] first_input = input[0]
expected_first = trained[0] expected_first = trained[0]
logging.debug(indent + "Ex?", expected_first) logging.debug(indent + "Ex?", expected_first)
@ -426,7 +425,7 @@ def match_token(knowledge,
def get_fit_onwards(knowledge, ast, remaining_input, remaining_output, remaining_recursions): def get_fit_onwards(knowledge, ast, remaining_input, remaining_output, remaining_recursions):
indent = "." + " " * (MAX_RECURSIONS - remaining_recursions) indent = "." + " " * (parameters.MAX_RECURSIONS - remaining_recursions)
try: try:
# TODO: merge with get_return type, as uses the same mechanism # TODO: merge with get_return type, as uses the same mechanism
if len(remaining_output) > 0: if len(remaining_output) > 0:
@ -465,9 +464,9 @@ def get_fit_onwards(knowledge, ast, remaining_input, remaining_output, remaining
return None return None
def get_fit(knowledge, row, remaining_recursions=MAX_RECURSIONS): def get_fit(knowledge, row, remaining_recursions=parameters.MAX_RECURSIONS):
tokens = to_tokens(row) tokens = to_tokens(row)
indent = " " * (MAX_RECURSIONS - remaining_recursions) indent = " " * (parameters.MAX_RECURSIONS - remaining_recursions)
for sample, ast in knowledge.trained: for sample, ast in knowledge.trained:
logging.debug("-----") logging.debug("-----")
logging.debug("TOK:", tokens) logging.debug("TOK:", tokens)