Merge branch 'dev/centered-graph-drawing' into develop
This commit is contained in:
commit
f81673d76a
154
scripts/gen_centered_graph.py
Normal file
154
scripts/gen_centered_graph.py
Normal file
@ -0,0 +1,154 @@
|
||||
import subprocess
|
||||
import ops_cache
|
||||
import copy
|
||||
import tempfile
|
||||
import os
|
||||
|
||||
|
||||
@ops_cache.cache
|
||||
def gen(headline_id, graph, doc_to_headline_remapping):
|
||||
reference_node = headline_id
|
||||
|
||||
linked_from_internal = set()
|
||||
g = copy.deepcopy(graph)
|
||||
|
||||
if 'id:' + reference_node in doc_to_headline_remapping:
|
||||
reference_node = doc_to_headline_remapping['id:' + reference_node].split(':', 1)[1]
|
||||
|
||||
centered_graph = { reference_node: g[reference_node] }
|
||||
for l in g[reference_node]['links']:
|
||||
lt = l['target']
|
||||
if lt.startswith("id:"):
|
||||
lt = lt[3:]
|
||||
linked_from_internal.add(lt)
|
||||
del g[reference_node]
|
||||
new_nodes = True
|
||||
|
||||
in_emacs_tree = {
|
||||
reference_node: set(),
|
||||
}
|
||||
|
||||
while new_nodes:
|
||||
new_nodes = False
|
||||
removed = set()
|
||||
for k, v in g.items():
|
||||
if 'id:' + k in doc_to_headline_remapping:
|
||||
k = doc_to_headline_remapping['id:' + k].split(':', 1)[1]
|
||||
|
||||
for link in v["links"]:
|
||||
if link["target"].startswith("id:"):
|
||||
link["target"] = link["target"][3:]
|
||||
if link['target'] in centered_graph and link.get('relation') == 'in':
|
||||
centered_graph[k] = v
|
||||
|
||||
for l in v["links"]:
|
||||
if l.get('relation') == 'in':
|
||||
t = l['target']
|
||||
if t.startswith("id:"):
|
||||
t = t[3:]
|
||||
|
||||
if '[' in t:
|
||||
# Special case, to be handled on org_rw
|
||||
continue
|
||||
|
||||
if t not in in_emacs_tree:
|
||||
in_emacs_tree[t] = set()
|
||||
in_emacs_tree[t].add(k)
|
||||
|
||||
v['links'] = [
|
||||
l for l in v["links"]
|
||||
if l.get('relation') != 'in'
|
||||
]
|
||||
for l in v['links']:
|
||||
lt = l['target']
|
||||
if lt.startswith("id:"):
|
||||
lt = lt[3:]
|
||||
linked_from_internal.add(lt)
|
||||
|
||||
removed.add(k)
|
||||
new_nodes = True
|
||||
break
|
||||
for k in removed:
|
||||
del g[k]
|
||||
|
||||
in_emacs = set(centered_graph.keys())
|
||||
|
||||
# One more round for the rest, not requiring "in"
|
||||
for k, v in g.items():
|
||||
if 'id:' + k in doc_to_headline_remapping:
|
||||
k = doc_to_headline_remapping['id:' + k].split(':', 1)[1]
|
||||
|
||||
backlinked = False
|
||||
for link in v["links"]:
|
||||
if link["target"].startswith("id:"):
|
||||
link["target"] = link["target"][3:]
|
||||
if link['target'] in in_emacs:
|
||||
centered_graph[k] = v
|
||||
backlinked = True
|
||||
removed.add(k)
|
||||
if not backlinked and (k in linked_from_internal):
|
||||
centered_graph[k] = v
|
||||
removed.add(k)
|
||||
|
||||
g = centered_graph
|
||||
|
||||
with tempfile.NamedTemporaryFile(suffix='.dot', mode='wt') as f:
|
||||
f.write('strict digraph {\n')
|
||||
f.write('maxiter=1000\n')
|
||||
f.write('splines=curved\n')
|
||||
# f.write('splines=spline\n') # Not supported with edges to cluster
|
||||
f.write('node[shape=rect]\n')
|
||||
|
||||
def draw_subgraph(node_id):
|
||||
f.write("subgraph cluster_{} {{\n".format(node_id.replace("-", "_")))
|
||||
f.write(' URL="./{}.node.html"\n'.format(node_id))
|
||||
|
||||
f.write(" label=\"{}\"\n".format(g[node_id]['title'].replace("\"", "'")))
|
||||
f.write("\n")
|
||||
|
||||
# print("T: {}".format(in_emacs_tree), file=sys.stderr)
|
||||
for k in in_emacs_tree[node_id]:
|
||||
v = g[k]
|
||||
|
||||
if k in in_emacs_tree:
|
||||
draw_subgraph(k)
|
||||
else:
|
||||
print(" _" + k.replace("-", "_") + "[label=\"" + v["title"].replace("\"", "'") + "\", URL=\"" + k + ".node.html\"];", file=f)
|
||||
|
||||
|
||||
f.write("\n}\n")
|
||||
|
||||
draw_subgraph(reference_node)
|
||||
|
||||
for k, v in g.items():
|
||||
if k not in in_emacs:
|
||||
print("_" + k.replace("-", "_") + "[label=\"" + v["title"].replace("\"", "'") + "\", URL=\"" + k + ".node.html\"];", file=f)
|
||||
|
||||
for k, v in g.items():
|
||||
link_src = '_' + k.replace("-", "_")
|
||||
if k in in_emacs_tree:
|
||||
link_src = 'cluster_{}'.format(k.replace("-", "_"))
|
||||
|
||||
for link in v["links"]:
|
||||
if link["target"].startswith("id:"):
|
||||
link["target"] = link["target"][3:]
|
||||
|
||||
if '[' in link['target']:
|
||||
# Special case, to be handled on org_rw
|
||||
continue
|
||||
if link['target'] not in g:
|
||||
# Irrelevant
|
||||
continue
|
||||
if link['target'] in in_emacs_tree:
|
||||
t = 'cluster_{}'.format(link['target'].replace("-", "_"))
|
||||
else:
|
||||
t = "_" + link["target"].replace("-", "_")
|
||||
print(link_src + "->" + t, file=f)
|
||||
|
||||
f.write('}\n')
|
||||
f.flush()
|
||||
|
||||
with tempfile.NamedTemporaryFile(suffix='.svg') as fsvg:
|
||||
subprocess.call(['fdp', f.name, '-Tsvg', '-o', fsvg.name])
|
||||
fsvg.seek(0)
|
||||
return fsvg.read().decode()
|
@ -25,6 +25,7 @@ from org_rw import token_list_to_raw
|
||||
import pygments
|
||||
import pygments.lexers
|
||||
import pygments.formatters
|
||||
import gen_centered_graph
|
||||
|
||||
# Set custom states
|
||||
for state in ("NEXT", "MEETING", "Q", "PAUSED", "SOMETIME", "TRACK", "WAITING"):
|
||||
@ -288,34 +289,6 @@ def regen_all(src_top, dest_top, *, docs=None, db=None):
|
||||
backlink_graph[main_headline_id] = set()
|
||||
backlink_graph[main_headline_id].add(backlink)
|
||||
|
||||
# Render docs after we've built the graph
|
||||
# Render main headlines
|
||||
full_graph_info = { "nodes": graph, "backlinks": backlink_graph, "main_headlines": main_headlines_by_path }
|
||||
for _docpath, main_headline in main_headlines_by_path.items():
|
||||
if main_headline.doc.id:
|
||||
endpath = os.path.join(dest_top, main_headline.doc.id + ".node.html")
|
||||
with open(endpath, "wt") as f:
|
||||
f.write(render_as_document(main_headline, main_headline.doc, headlineLevel=0, graph=full_graph_info,
|
||||
title=org_rw.token_list_to_plaintext(main_headline.title.contents)))
|
||||
|
||||
|
||||
# Render all headlines
|
||||
for headline in all_headlines:
|
||||
endpath = os.path.join(dest_top, headline.id + ".node.html")
|
||||
|
||||
# Render HTML
|
||||
with open(endpath, "wt") as f:
|
||||
f.write(render_as_document(headline, headline.doc, headlineLevel=0, graph=full_graph_info,
|
||||
title=org_rw.token_list_to_plaintext(headline.title.contents)))
|
||||
files_generated += 1
|
||||
|
||||
if headline.id == INDEX_ID:
|
||||
index_endpath = os.path.join(dest_top, "index.html")
|
||||
with open(index_endpath, "wt") as f:
|
||||
f.write(render_as_document(headline, headline.doc, headlineLevel=0, graph=full_graph_info,
|
||||
title=org_rw.token_list_to_plaintext(headline.title.contents)))
|
||||
files_generated += 1
|
||||
|
||||
# Output graph files
|
||||
graphpath = os.path.join(dest_top, "graph.json")
|
||||
graph_explorer_path = os.path.join(dest_top, "graph.html")
|
||||
@ -328,6 +301,37 @@ def regen_all(src_top, dest_top, *, docs=None, db=None):
|
||||
f.write(source.replace('<!-- REPLACE_THIS_WITH_GRAPH -->',
|
||||
json.dumps(graph)))
|
||||
logging.info("Generated {} files".format(files_generated))
|
||||
|
||||
# Render docs after we've built the graph
|
||||
# Render main headlines
|
||||
full_graph_info = { "nodes": graph, "backlinks": backlink_graph, "main_headlines": main_headlines_by_path }
|
||||
for _docpath, main_headline in main_headlines_by_path.items():
|
||||
if main_headline.doc.id:
|
||||
endpath = os.path.join(dest_top, main_headline.doc.id + ".node.html")
|
||||
with open(endpath, "wt") as f:
|
||||
f.write(render_as_document(main_headline, main_headline.doc, headlineLevel=0, graph=full_graph_info,
|
||||
doc_to_headline_remapping=doc_to_headline_remapping,
|
||||
title=org_rw.token_list_to_plaintext(main_headline.title.contents)))
|
||||
|
||||
# Render all headlines
|
||||
for headline in all_headlines:
|
||||
endpath = os.path.join(dest_top, headline.id + ".node.html")
|
||||
|
||||
# Render HTML
|
||||
with open(endpath, "wt") as f:
|
||||
f.write(render_as_document(headline, headline.doc, headlineLevel=0, graph=full_graph_info,
|
||||
doc_to_headline_remapping=doc_to_headline_remapping,
|
||||
title=org_rw.token_list_to_plaintext(headline.title.contents)))
|
||||
files_generated += 1
|
||||
|
||||
if headline.id == INDEX_ID:
|
||||
index_endpath = os.path.join(dest_top, "index.html")
|
||||
with open(index_endpath, "wt") as f:
|
||||
f.write(render_as_document(headline, headline.doc, headlineLevel=0, graph=full_graph_info,
|
||||
doc_to_headline_remapping=doc_to_headline_remapping,
|
||||
title=org_rw.token_list_to_plaintext(headline.title.contents)))
|
||||
files_generated += 1
|
||||
|
||||
cur.close()
|
||||
db.commit()
|
||||
|
||||
@ -336,6 +340,7 @@ def regen_all(src_top, dest_top, *, docs=None, db=None):
|
||||
os.makedirs(attachments_dir, exist_ok=True)
|
||||
for base in base_dirs:
|
||||
data_dir = os.path.join(src_top, base, 'data')
|
||||
logging.info("Copying attachments from: {}".format(data_dir))
|
||||
if not os.path.exists(data_dir):
|
||||
continue
|
||||
for subdir in os.listdir(data_dir):
|
||||
@ -651,7 +656,7 @@ def render_inline(tree, f, headline, graph):
|
||||
return ''.join(acc)
|
||||
|
||||
|
||||
def render_as_document(headline, doc, headlineLevel, graph, title):
|
||||
def render_as_document(headline, doc, headlineLevel, graph, title, doc_to_headline_remapping):
|
||||
if isinstance(headline.parent, org_rw.Headline):
|
||||
topLevelHeadline = headline.parent
|
||||
while isinstance(topLevelHeadline.parent, org_rw.Headline):
|
||||
@ -674,7 +679,9 @@ def render_as_document(headline, doc, headlineLevel, graph, title):
|
||||
</html>
|
||||
"""
|
||||
else:
|
||||
return as_document(render(headline, doc, graph=graph, headlineLevel=headlineLevel), title, render_toc(doc))
|
||||
return as_document(render(headline, doc, graph=graph, headlineLevel=headlineLevel,
|
||||
doc_to_headline_remapping=doc_to_headline_remapping),
|
||||
title, render_toc(doc))
|
||||
|
||||
def render_toc(doc):
|
||||
acc = ['<ul class="toc">']
|
||||
@ -702,18 +709,15 @@ def render_toc_headline(headline, acc):
|
||||
|
||||
|
||||
|
||||
def render_connections(headline_id, content, graph):
|
||||
if headline_id not in graph['backlinks']:
|
||||
return
|
||||
def render_connections(headline_id, content, graph, doc_to_headline_remapping):
|
||||
# if headline_id != 'aa29be89-70e7-4465-91ed-361cf0ce62f2':
|
||||
# return
|
||||
|
||||
content.append("<ul><li class='connections'><span class='tag backlink-explanation'>Linked from</span></li><ul>")
|
||||
for backlink in sorted(graph['backlinks'][headline_id], key=lambda x: graph['nodes'][x]['title']):
|
||||
link = graph["nodes"][backlink]
|
||||
title = link["title"]
|
||||
content.append(f"<li><a class='internal backlink' href='./{backlink}.node.html'>{html.escape(title)}</a></li>")
|
||||
content.append("</ul></ul></div>")
|
||||
logging.info("Generating centered graph for {}".format(headline_id))
|
||||
svg = gen_centered_graph.gen(headline_id, graph['nodes'], doc_to_headline_remapping)
|
||||
content.append("<div class='connections'>{}</div>".format(svg))
|
||||
|
||||
def render(headline, doc, graph, headlineLevel):
|
||||
def render(headline, doc, graph, headlineLevel, doc_to_headline_remapping):
|
||||
try:
|
||||
dom = headline.as_dom()
|
||||
except:
|
||||
@ -722,12 +726,14 @@ def render(headline, doc, graph, headlineLevel):
|
||||
print_tree(dom, indentation=2, headline=headline)
|
||||
|
||||
content = []
|
||||
if headline.id and headlineLevel == 0:
|
||||
render_connections(headline.id, content, graph, doc_to_headline_remapping=doc_to_headline_remapping)
|
||||
|
||||
render_tree(dom, content, headline, graph)
|
||||
if headline.id:
|
||||
render_connections(headline.id, content, graph)
|
||||
|
||||
for child in headline.children:
|
||||
content.append(render(child, doc, headlineLevel=headlineLevel+1, graph=graph))
|
||||
content.append(render(child, doc, headlineLevel=headlineLevel+1, graph=graph,
|
||||
doc_to_headline_remapping=doc_to_headline_remapping))
|
||||
|
||||
if headline.state is None:
|
||||
state = ""
|
||||
|
75
scripts/ops_cache.py
Normal file
75
scripts/ops_cache.py
Normal file
@ -0,0 +1,75 @@
|
||||
import sqlite3
|
||||
import json
|
||||
import logging
|
||||
from typing import Optional
|
||||
import xdg
|
||||
import os
|
||||
import datetime
|
||||
|
||||
CACHE_DB: Optional[sqlite3.Connection] = None
|
||||
CACHE_PATH = os.path.join(xdg.xdg_cache_home(), 'codigoparallevar', 'ops.sqlite3')
|
||||
|
||||
def init_db():
|
||||
global CACHE_DB
|
||||
|
||||
os.makedirs(os.path.dirname(CACHE_PATH), exist_ok=True)
|
||||
CACHE_DB = sqlite3.connect(CACHE_PATH)
|
||||
|
||||
cur = CACHE_DB.cursor()
|
||||
cur.execute('''CREATE TABLE IF NOT EXISTS ops(
|
||||
in_val TEXT PRIMARY KEY,
|
||||
code TEXT,
|
||||
out_val TEXT,
|
||||
added_at DateTime
|
||||
);
|
||||
''')
|
||||
CACHE_DB.commit()
|
||||
cur.close()
|
||||
|
||||
def query_cache(in_val, code):
|
||||
if CACHE_DB is None:
|
||||
init_db()
|
||||
assert CACHE_DB is not None
|
||||
cur = CACHE_DB.cursor()
|
||||
cur.execute('''SELECT out_val FROM ops WHERE in_val = ? AND code = ?''', (in_val, code))
|
||||
|
||||
# Should return only one result, right? 🤷
|
||||
results = cur.fetchall()
|
||||
assert len(results) < 2
|
||||
if len(results) == 0:
|
||||
return None
|
||||
else:
|
||||
return results[0][0]
|
||||
|
||||
def save_cache(in_val, code, out_val):
|
||||
if CACHE_DB is None:
|
||||
init_db()
|
||||
assert CACHE_DB is not None
|
||||
cur = CACHE_DB.cursor()
|
||||
cur.execute('''
|
||||
INSERT INTO ops(in_val, code, out_val, added_at)
|
||||
VALUES (?, ?, ?, ?);''',
|
||||
(in_val, code, out_val, datetime.datetime.now()))
|
||||
CACHE_DB.commit()
|
||||
cur.close()
|
||||
|
||||
def cache(fun):
|
||||
fun_code = fun.__code__.co_code.decode('latin-1')
|
||||
def wrapped(*kargs, **kwargs):
|
||||
in_val = json.dumps({
|
||||
'kargs': kargs,
|
||||
'kwargs': kwargs,
|
||||
'fun_code': fun_code,
|
||||
})
|
||||
|
||||
cache_result = query_cache(in_val, fun_code)
|
||||
found_in_cache = cache_result is not None
|
||||
if not found_in_cache:
|
||||
out_val = fun(*kargs, **kwargs)
|
||||
save_cache(in_val, fun_code, out_val)
|
||||
else:
|
||||
out_val = cache_result
|
||||
|
||||
logging.info("{} bytes in, {} bytes out (in_cache: {})".format(len(in_val), len(out_val), found_in_cache))
|
||||
return out_val
|
||||
return wrapped
|
@ -480,6 +480,17 @@ tr.__table-separator {
|
||||
border-bottom: 0.5ex solid black;
|
||||
}
|
||||
|
||||
.connections svg {
|
||||
max-width: 100%;
|
||||
height: auto;
|
||||
}
|
||||
|
||||
.connections svg #graph0 > polygon {
|
||||
/* Main box */
|
||||
fill: transparent;
|
||||
stroke: none;
|
||||
}
|
||||
|
||||
/* Side-to-side */
|
||||
@media (min-width: 120ex) {
|
||||
body:not(.no-toc) {
|
||||
@ -617,4 +628,15 @@ tr.__table-separator {
|
||||
tr.__table-separator {
|
||||
border-bottom: 0.5ex solid #eee;
|
||||
}
|
||||
|
||||
.connections svg polygon {
|
||||
stroke: white;
|
||||
fill: #222;
|
||||
}
|
||||
.connections svg text {
|
||||
fill: white;
|
||||
}
|
||||
.connections svg path {
|
||||
stroke: white;
|
||||
}
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user