forked from kenkeiras/org-rw
Compare commits
10 Commits
d4b0d0301f
...
6710775882
Author | SHA1 | Date | |
---|---|---|---|
6710775882 | |||
8280949f23 | |||
![]() |
691ce30a68 | ||
48de06abc7 | |||
![]() |
d4b40e404d | ||
![]() |
5432c23202 | ||
![]() |
8fe3c27595 | ||
![]() |
1dc6eb0b43 | ||
5019b44dd5 | |||
![]() |
78bc57e55d |
105
org_rw/org_rw.py
105
org_rw/org_rw.py
@ -113,7 +113,7 @@ BEGIN_BLOCK_RE = re.compile(r"^\s*#\+BEGIN_(?P<subtype>[^ ]+)(?P<arguments>.*)$"
|
||||
END_BLOCK_RE = re.compile(r"^\s*#\+END_(?P<subtype>[^ ]+)\s*$", re.I)
|
||||
RESULTS_DRAWER_RE = re.compile(r"^\s*:results:\s*$", re.I)
|
||||
CodeSnippet = collections.namedtuple(
|
||||
"CodeSnippet", ("name", "content", "result", "arguments")
|
||||
"CodeSnippet", ("name", "content", "result", "language", "arguments")
|
||||
)
|
||||
|
||||
# Groupings
|
||||
@ -337,7 +337,7 @@ class Headline:
|
||||
self.priority = priority
|
||||
self.title_start = title_start
|
||||
self.title = parse_content_block([RawLine(linenum=start_line, line=title)])
|
||||
self.state = state
|
||||
self._state = state
|
||||
self.tags_start = tags_start
|
||||
self.shallow_tags = tags
|
||||
self.contents = contents
|
||||
@ -415,7 +415,6 @@ class Headline:
|
||||
isinstance(line, DelimiterLine)
|
||||
and line.delimiter_type == DelimiterLineType.END_BLOCK
|
||||
):
|
||||
|
||||
start = current_node.header.linenum
|
||||
end = line.linenum
|
||||
|
||||
@ -726,6 +725,42 @@ class Headline:
|
||||
def id(self, value):
|
||||
self.set_property("ID", value)
|
||||
|
||||
@property
|
||||
def state(self) -> HeadlineState:
|
||||
return self._state
|
||||
|
||||
@state.setter
|
||||
def state(self, new_state: Union[None, str, HeadlineState]) -> None:
|
||||
"""
|
||||
Update the state of a Headline. If the state is a known one it will update it's TODO/DONE properties.
|
||||
|
||||
Args:
|
||||
new_state (str|HeadlineState): New state, either it's literal value or it's structure.
|
||||
"""
|
||||
if new_state is None:
|
||||
self.is_todo = False
|
||||
self.is_done = False
|
||||
# TODO: Check & log if appropriate?
|
||||
self._state = None
|
||||
return
|
||||
|
||||
if isinstance(new_state, str):
|
||||
new_state = HeadlineState(name=new_state)
|
||||
|
||||
state_name = new_state["name"]
|
||||
if state_name in [kw["name"] for kw in self.doc.todo_keywords]:
|
||||
self.is_todo = True
|
||||
self.is_done = False
|
||||
# TODO: Check & log if appropriate?
|
||||
elif state_name in [kw["name"] for kw in self.doc.done_keywords]:
|
||||
self.is_todo = False
|
||||
self.is_done = True
|
||||
# TODO: Check, log & if appropriate?
|
||||
else:
|
||||
# TODO: Should we raise a warning, raise an exception, update the is_todo/is_done?
|
||||
pass
|
||||
self._state = new_state
|
||||
|
||||
@property
|
||||
def clock(self):
|
||||
times = []
|
||||
@ -754,15 +789,15 @@ class Headline:
|
||||
@property
|
||||
def tags(self) -> list[str]:
|
||||
parent_tags = self.parent.tags
|
||||
if self.doc.environment.get('org-use-tag-inheritance'):
|
||||
if self.doc.environment.get("org-use-tag-inheritance"):
|
||||
accepted_tags = []
|
||||
for tag in self.doc.environment.get('org-use-tag-inheritance'):
|
||||
for tag in self.doc.environment.get("org-use-tag-inheritance"):
|
||||
if tag in parent_tags:
|
||||
accepted_tags.append(tag)
|
||||
parent_tags = accepted_tags
|
||||
|
||||
elif self.doc.environment.get('org-tags-exclude-from-inheritance'):
|
||||
for tag in self.doc.environment.get('org-tags-exclude-from-inheritance'):
|
||||
elif self.doc.environment.get("org-tags-exclude-from-inheritance"):
|
||||
for tag in self.doc.environment.get("org-tags-exclude-from-inheritance"):
|
||||
if tag in parent_tags:
|
||||
parent_tags.remove(tag)
|
||||
return list(self.shallow_tags) + parent_tags
|
||||
@ -779,7 +814,6 @@ class Headline:
|
||||
|
||||
def set_property(self, name: str, value: str):
|
||||
for prop in self.properties:
|
||||
|
||||
# A matching property is found, update it
|
||||
if prop.key == name:
|
||||
prop.value = value
|
||||
@ -882,6 +916,12 @@ class Headline:
|
||||
sections = []
|
||||
arguments = None
|
||||
|
||||
names_by_line = {}
|
||||
for kw in self.keywords:
|
||||
if kw.key == "NAME":
|
||||
names_by_line[kw.linenum] = kw.value
|
||||
|
||||
name = None
|
||||
for delimiter in self.delimiters:
|
||||
if (
|
||||
delimiter.delimiter_type == DelimiterLineType.BEGIN_BLOCK
|
||||
@ -890,6 +930,12 @@ class Headline:
|
||||
line_start = delimiter.linenum
|
||||
inside_code = True
|
||||
arguments = delimiter.arguments
|
||||
|
||||
name_line = line_start - 1
|
||||
if name_line in names_by_line:
|
||||
name = names_by_line[name_line]
|
||||
else:
|
||||
name = None
|
||||
elif (
|
||||
delimiter.delimiter_type == DelimiterLineType.END_BLOCK
|
||||
and delimiter.type_data.subtype.lower() == "src"
|
||||
@ -904,14 +950,26 @@ class Headline:
|
||||
# the content parsing must be re-thinked
|
||||
contents = contents[:-1]
|
||||
|
||||
language = None
|
||||
if arguments is not None:
|
||||
arguments = arguments.strip()
|
||||
if " " in arguments:
|
||||
language = arguments[: arguments.index(" ")]
|
||||
arguments = arguments[arguments.index(" ") + 1 :]
|
||||
else:
|
||||
language = arguments
|
||||
arguments = None
|
||||
sections.append(
|
||||
{
|
||||
"line_first": start + 1,
|
||||
"line_last": end - 1,
|
||||
"content": contents,
|
||||
"arguments": arguments,
|
||||
"language": language,
|
||||
"name": name,
|
||||
}
|
||||
)
|
||||
name = None
|
||||
arguments = None
|
||||
line_start = None
|
||||
|
||||
@ -940,7 +998,6 @@ class Headline:
|
||||
and result_first[0] == "structural"
|
||||
and result_first[1].strip().upper() == ":RESULTS:"
|
||||
):
|
||||
|
||||
(end_line, _) = self.get_structural_end_after(
|
||||
kword.linenum + 1
|
||||
)
|
||||
@ -960,13 +1017,18 @@ class Headline:
|
||||
|
||||
results = []
|
||||
for section in sections:
|
||||
name = None
|
||||
content = section["content"]
|
||||
code_result = section.get("result", None)
|
||||
arguments = section.get("arguments", None)
|
||||
language = section.get("language", None)
|
||||
name = section.get("name", None)
|
||||
results.append(
|
||||
CodeSnippet(
|
||||
name=name, content=content, result=code_result, arguments=arguments
|
||||
content=content,
|
||||
result=code_result,
|
||||
arguments=arguments,
|
||||
language=language,
|
||||
name=name,
|
||||
)
|
||||
)
|
||||
|
||||
@ -1730,7 +1792,7 @@ def token_list_to_plaintext(tok_list) -> str:
|
||||
else:
|
||||
assert isinstance(chunk, MarkerToken)
|
||||
|
||||
return "".join(contents)
|
||||
return "".join(contents).strip()
|
||||
|
||||
|
||||
def token_list_to_raw(tok_list):
|
||||
@ -1952,7 +2014,6 @@ def tokenize_contents(contents: str) -> List[TokenItems]:
|
||||
and is_pre(last_char)
|
||||
and ((i + 1 < len(contents)) and is_border(contents[i + 1]))
|
||||
):
|
||||
|
||||
is_valid_mark = False
|
||||
# Check that is closed later
|
||||
text_in_line = True
|
||||
@ -2294,7 +2355,7 @@ class OrgDoc:
|
||||
def tags(self) -> list[str]:
|
||||
for kw in self.keywords:
|
||||
if kw.key == "FILETAGS":
|
||||
return kw.value.strip(':').split(':')
|
||||
return kw.value.strip(":").split(":")
|
||||
return []
|
||||
|
||||
@property
|
||||
@ -2338,26 +2399,32 @@ class OrgDoc:
|
||||
yield hl
|
||||
|
||||
def get_code_snippets(self):
|
||||
for headline in self.headlines:
|
||||
for headline in self.getAllHeadlines():
|
||||
yield from headline.get_code_snippets()
|
||||
|
||||
# Writing
|
||||
def dump_headline(self, headline, recursive=True):
|
||||
|
||||
tags = ""
|
||||
if len(headline.shallow_tags) > 0:
|
||||
tags = ":" + ":".join(headline.shallow_tags) + ":"
|
||||
|
||||
state = ""
|
||||
if headline.state:
|
||||
state = headline.state["name"] + " "
|
||||
if headline._state:
|
||||
state = headline._state["name"] + " "
|
||||
|
||||
raw_title = token_list_to_raw(headline.title.contents)
|
||||
tags_padding = ""
|
||||
if not (raw_title.endswith(" ") or raw_title.endswith("\t")) and tags:
|
||||
tags_padding = " "
|
||||
|
||||
yield "*" * headline.depth + headline.spacing + state + raw_title + tags_padding + tags
|
||||
yield (
|
||||
"*" * headline.depth
|
||||
+ headline.spacing
|
||||
+ state
|
||||
+ raw_title
|
||||
+ tags_padding
|
||||
+ tags
|
||||
)
|
||||
|
||||
planning = headline.get_planning_line()
|
||||
if planning is not None:
|
||||
|
@ -9,6 +9,7 @@
|
||||
:CREATED: [2020-01-01 Wed 01:01]
|
||||
:END:
|
||||
|
||||
#+NAME: first-code-name
|
||||
#+BEGIN_SRC shell :results verbatim
|
||||
echo "This is a test"
|
||||
echo "with two lines"
|
||||
|
@ -480,20 +480,22 @@ class TestSerde(unittest.TestCase):
|
||||
|
||||
snippets = list(doc.get_code_snippets())
|
||||
self.assertEqual(len(snippets), 3)
|
||||
self.assertEqual(snippets[0].name, "first-code-name")
|
||||
self.assertEqual(snippets[0].language, "shell")
|
||||
self.assertEqual(
|
||||
snippets[0].content,
|
||||
'echo "This is a test"\n'
|
||||
+ 'echo "with two lines"\n'
|
||||
+ "exit 0 # Exit successfully",
|
||||
)
|
||||
self.assertEqual(
|
||||
snippets[0].arguments.split(), ["shell", ":results", "verbatim"]
|
||||
)
|
||||
self.assertEqual(snippets[0].arguments.split(), [":results", "verbatim"])
|
||||
self.assertEqual(
|
||||
snippets[0].result,
|
||||
"This is a test\n" + "with two lines",
|
||||
)
|
||||
|
||||
self.assertEqual(snippets[1].name, None)
|
||||
self.assertEqual(snippets[1].language, "shell")
|
||||
self.assertEqual(
|
||||
snippets[1].content,
|
||||
'echo "This is another test"\n'
|
||||
@ -504,6 +506,8 @@ class TestSerde(unittest.TestCase):
|
||||
snippets[1].result, "This is another test\n" + "with two lines too"
|
||||
)
|
||||
|
||||
self.assertEqual(snippets[2].name, None)
|
||||
self.assertEqual(snippets[2].language, "c")
|
||||
self.assertEqual(
|
||||
snippets[2].content,
|
||||
"/* This code has to be escaped to\n"
|
||||
@ -877,73 +881,149 @@ class TestSerde(unittest.TestCase):
|
||||
orig = f.read()
|
||||
doc = loads(orig)
|
||||
|
||||
self.assertEqual(doc.tags, ['filetag'])
|
||||
self.assertEqual(doc.tags, ["filetag"])
|
||||
|
||||
h1_1, h1_2 = doc.getTopHeadlines()
|
||||
self.assertEqual(sorted(h1_1.tags), ['filetag', 'h1tag'])
|
||||
self.assertEqual(sorted(h1_2.tags), ['filetag', 'otherh1tag'])
|
||||
self.assertEqual(sorted(h1_1.tags), ["filetag", "h1tag"])
|
||||
self.assertEqual(sorted(h1_2.tags), ["filetag", "otherh1tag"])
|
||||
|
||||
h1_1_h2 = h1_1.children[0]
|
||||
self.assertEqual(sorted(h1_1_h2.tags), ['filetag', 'h1tag', 'h2tag'])
|
||||
self.assertEqual(sorted(h1_1_h2.tags), ["filetag", "h1tag", "h2tag"])
|
||||
|
||||
h1_2_h2 = h1_2.children[0]
|
||||
self.assertEqual(sorted(h1_2_h2.tags), ['filetag', 'otherh1tag', 'otherh2tag'])
|
||||
self.assertEqual(sorted(h1_2_h2.tags), ["filetag", "otherh1tag", "otherh2tag"])
|
||||
|
||||
def test_shallow_tag_property_read_13(self):
|
||||
with open(os.path.join(DIR, "13-tags.org")) as f:
|
||||
orig = f.read()
|
||||
doc = loads(orig)
|
||||
|
||||
self.assertEqual(doc.shallow_tags, ['filetag'])
|
||||
self.assertEqual(doc.shallow_tags, ["filetag"])
|
||||
|
||||
h1_1, h1_2 = doc.getTopHeadlines()
|
||||
self.assertEqual(sorted(h1_1.shallow_tags), ['h1tag'])
|
||||
self.assertEqual(sorted(h1_2.shallow_tags), ['otherh1tag'])
|
||||
self.assertEqual(sorted(h1_1.shallow_tags), ["h1tag"])
|
||||
self.assertEqual(sorted(h1_2.shallow_tags), ["otherh1tag"])
|
||||
|
||||
h1_1_h2 = h1_1.children[0]
|
||||
self.assertEqual(sorted(h1_1_h2.shallow_tags), ['h2tag'])
|
||||
self.assertEqual(sorted(h1_1_h2.shallow_tags), ["h2tag"])
|
||||
|
||||
h1_2_h2 = h1_2.children[0]
|
||||
self.assertEqual(sorted(h1_2_h2.shallow_tags), ['otherh2tag'])
|
||||
self.assertEqual(sorted(h1_2_h2.shallow_tags), ["otherh2tag"])
|
||||
|
||||
def test_exclude_tags_from_inheritance_property_read_13(self):
|
||||
with open(os.path.join(DIR, "13-tags.org")) as f:
|
||||
orig = f.read()
|
||||
doc = loads(orig, {
|
||||
'org-tags-exclude-from-inheritance': ('h1tag', 'otherh2tag'),
|
||||
})
|
||||
doc = loads(
|
||||
orig,
|
||||
{
|
||||
"org-tags-exclude-from-inheritance": ("h1tag", "otherh2tag"),
|
||||
},
|
||||
)
|
||||
|
||||
self.assertEqual(doc.tags, ['filetag'])
|
||||
self.assertEqual(doc.tags, ["filetag"])
|
||||
|
||||
h1_1, h1_2 = doc.getTopHeadlines()
|
||||
self.assertEqual(sorted(h1_1.tags), ['filetag', 'h1tag'])
|
||||
self.assertEqual(sorted(h1_2.tags), ['filetag', 'otherh1tag'])
|
||||
self.assertEqual(sorted(h1_1.tags), ["filetag", "h1tag"])
|
||||
self.assertEqual(sorted(h1_2.tags), ["filetag", "otherh1tag"])
|
||||
|
||||
h1_1_h2 = h1_1.children[0]
|
||||
self.assertEqual(sorted(h1_1_h2.tags), ['filetag', 'h2tag'])
|
||||
self.assertEqual(sorted(h1_1_h2.tags), ["filetag", "h2tag"])
|
||||
|
||||
h1_2_h2 = h1_2.children[0]
|
||||
self.assertEqual(sorted(h1_2_h2.tags), ['filetag', 'otherh1tag', 'otherh2tag'])
|
||||
self.assertEqual(sorted(h1_2_h2.tags), ["filetag", "otherh1tag", "otherh2tag"])
|
||||
|
||||
def test_select_tags_to_inheritance_property_read_13(self):
|
||||
with open(os.path.join(DIR, "13-tags.org")) as f:
|
||||
orig = f.read()
|
||||
doc = loads(orig, {
|
||||
'org-tags-exclude-from-inheritance': ('h1tag', 'otherh2tag'),
|
||||
'org-use-tag-inheritance': ('h1tag',),
|
||||
})
|
||||
doc = loads(
|
||||
orig,
|
||||
{
|
||||
"org-tags-exclude-from-inheritance": ("h1tag", "otherh2tag"),
|
||||
"org-use-tag-inheritance": ("h1tag",),
|
||||
},
|
||||
)
|
||||
|
||||
self.assertEqual(doc.tags, ['filetag'])
|
||||
self.assertEqual(doc.tags, ["filetag"])
|
||||
|
||||
h1_1, h1_2 = doc.getTopHeadlines()
|
||||
self.assertEqual(sorted(h1_1.tags), ['h1tag'])
|
||||
self.assertEqual(sorted(h1_2.tags), ['otherh1tag'])
|
||||
self.assertEqual(sorted(h1_1.tags), ["h1tag"])
|
||||
self.assertEqual(sorted(h1_2.tags), ["otherh1tag"])
|
||||
|
||||
h1_1_h2 = h1_1.children[0]
|
||||
self.assertEqual(sorted(h1_1_h2.tags), ['h1tag', 'h2tag'])
|
||||
self.assertEqual(sorted(h1_1_h2.tags), ["h1tag", "h2tag"])
|
||||
|
||||
h1_2_h2 = h1_2.children[0]
|
||||
self.assertEqual(sorted(h1_2_h2.tags), ['otherh2tag'])
|
||||
self.assertEqual(sorted(h1_2_h2.tags), ["otherh2tag"])
|
||||
|
||||
def test_update_headline_from_none_to_todo(self):
|
||||
orig = "* First entry"
|
||||
doc = loads(orig)
|
||||
self.assertEqual(doc.headlines[0].is_todo, False)
|
||||
self.assertEqual(doc.headlines[0].is_done, False)
|
||||
self.assertEqual(doc.headlines[0].state, None)
|
||||
|
||||
doc.headlines[0].state = "TODO"
|
||||
self.assertEqual(doc.headlines[0].is_todo, True)
|
||||
self.assertEqual(doc.headlines[0].is_done, False)
|
||||
self.assertEqual(doc.headlines[0].state["name"], "TODO")
|
||||
|
||||
self.assertEqual(dumps(doc), "* TODO First entry")
|
||||
|
||||
def test_update_headline_from_none_to_done(self):
|
||||
orig = "* First entry"
|
||||
doc = loads(orig)
|
||||
self.assertEqual(doc.headlines[0].is_todo, False)
|
||||
self.assertEqual(doc.headlines[0].is_done, False)
|
||||
self.assertEqual(doc.headlines[0].state, None)
|
||||
|
||||
doc.headlines[0].state = org_rw.HeadlineState(name="DONE")
|
||||
self.assertEqual(doc.headlines[0].is_todo, False)
|
||||
self.assertEqual(doc.headlines[0].is_done, True)
|
||||
self.assertEqual(doc.headlines[0].state["name"], "DONE")
|
||||
|
||||
self.assertEqual(dumps(doc), "* DONE First entry")
|
||||
|
||||
def test_update_headline_from_todo_to_none(self):
|
||||
orig = "* TODO First entry"
|
||||
doc = loads(orig)
|
||||
self.assertEqual(doc.headlines[0].is_todo, True)
|
||||
self.assertEqual(doc.headlines[0].is_done, False)
|
||||
self.assertEqual(doc.headlines[0].state["name"], "TODO")
|
||||
|
||||
doc.headlines[0].state = None
|
||||
self.assertEqual(doc.headlines[0].is_todo, False)
|
||||
self.assertEqual(doc.headlines[0].is_done, False)
|
||||
self.assertEqual(doc.headlines[0].state, None)
|
||||
|
||||
self.assertEqual(dumps(doc), "* First entry")
|
||||
|
||||
def test_update_headline_from_todo_to_done(self):
|
||||
orig = "* TODO First entry"
|
||||
doc = loads(orig)
|
||||
self.assertEqual(doc.headlines[0].is_todo, True)
|
||||
self.assertEqual(doc.headlines[0].is_done, False)
|
||||
self.assertEqual(doc.headlines[0].state["name"], "TODO")
|
||||
|
||||
doc.headlines[0].state = "DONE"
|
||||
self.assertEqual(doc.headlines[0].is_todo, False)
|
||||
self.assertEqual(doc.headlines[0].is_done, True)
|
||||
self.assertEqual(doc.headlines[0].state["name"], "DONE")
|
||||
self.assertEqual(dumps(doc), "* DONE First entry")
|
||||
|
||||
def test_update_headline_from_done_to_todo(self):
|
||||
orig = "* DONE First entry"
|
||||
doc = loads(orig)
|
||||
self.assertEqual(doc.headlines[0].is_todo, False)
|
||||
self.assertEqual(doc.headlines[0].is_done, True)
|
||||
self.assertEqual(doc.headlines[0].state["name"], "DONE")
|
||||
|
||||
doc.headlines[0].state = org_rw.HeadlineState(name="TODO")
|
||||
self.assertEqual(doc.headlines[0].is_todo, True)
|
||||
self.assertEqual(doc.headlines[0].is_done, False)
|
||||
self.assertEqual(doc.headlines[0].state["name"], "TODO")
|
||||
|
||||
self.assertEqual(dumps(doc), "* TODO First entry")
|
||||
|
||||
|
||||
def print_tree(tree, indentation=0, headline=None):
|
||||
for element in tree:
|
||||
|
Loading…
Reference in New Issue
Block a user