Compare commits

...

5 Commits

Author SHA1 Message Date
bfe60271eb Merge pull request 'Fix text parsing issues' (#9) from fix/require-whitespace-for-list-item-tag-separator into develop
All checks were successful
Testing / pytest (push) Successful in 33s
Testing / mypy (push) Successful in 32s
Testing / style-formatting (push) Successful in 26s
Testing / style-sorted-imports (push) Successful in 32s
Testing / stability-extra-test (push) Successful in 33s
Reviewed-on: #9
2024-09-01 12:10:25 +00:00
Sergio Martínez Portela
4af4cda44b Fix formatting.
All checks were successful
Testing / pytest (push) Successful in 31s
Testing / mypy (push) Successful in 41s
Testing / style-formatting (push) Successful in 43s
Testing / style-sorted-imports (push) Successful in 28s
Testing / stability-extra-test (push) Successful in 41s
2024-08-22 00:26:11 +02:00
Sergio Martínez Portela
5552b3324b Handle ] which not close link descriptions or references.
Some checks failed
Testing / stability-extra-test (push) Waiting to run
Testing / pytest (push) Successful in 38s
Testing / style-sorted-imports (push) Waiting to run
Testing / mypy (push) Successful in 46s
Testing / style-formatting (push) Has been cancelled
2024-08-22 00:21:02 +02:00
Sergio Martínez Portela
f31c64c242 Properly track which tokens are used for closing formats. 2024-08-22 00:20:54 +02:00
Sergio Martínez Portela
490b36887a Require space before list item tag separator. 2024-08-22 00:20:15 +02:00

View File

@ -103,7 +103,7 @@ PLANNING_RE = re.compile(
r")+\s*"
)
LIST_ITEM_RE = re.compile(
r"(?P<indentation>\s*)((?P<bullet>[*\-+])|((?P<counter>\d|[a-zA-Z])(?P<counter_sep>[.)]))) ((?P<checkbox_indentation>\s*)\[(?P<checkbox_value>[ Xx])\])?((?P<tag_indentation>\s*)(?P<tag>.*?)::)?(?P<content>.*)"
r"(?P<indentation>\s*)((?P<bullet>[*\-+])|((?P<counter>\d|[a-zA-Z])(?P<counter_sep>[.)]))) ((?P<checkbox_indentation>\s*)\[(?P<checkbox_value>[ Xx])\])?((?P<tag_indentation>\s*)((?P<tag>.*?)\s::))?(?P<content>.*)"
)
IMPLICIT_LINK_RE = re.compile(r"(https?:[^<> ]*[a-zA-Z0-9])")
@ -1911,7 +1911,12 @@ def tokenize_contents(contents: str) -> List[TokenItems]:
continue
# Possible link close or open of description
if char == "]" and len(contents) > i + 1 and in_link:
if (
char == "]"
and len(contents) > i + 1
and in_link
and contents[i + 1] in "]["
):
if contents[i + 1] == "]":
cut_string()
@ -1962,6 +1967,7 @@ def tokenize_contents(contents: str) -> List[TokenItems]:
cut_string()
tokens.append((TOKEN_TYPE_CLOSE_MARKER, char))
has_changed = True
closes.remove(i)
if not has_changed:
text.append(char)