summaryrefslogtreecommitdiff
path: root/sphinx/domains/std.py
diff options
context:
space:
mode:
Diffstat (limited to 'sphinx/domains/std.py')
-rw-r--r--sphinx/domains/std.py52
1 files changed, 21 insertions, 31 deletions
diff --git a/sphinx/domains/std.py b/sphinx/domains/std.py
index 5adf12c07..193f7bd69 100644
--- a/sphinx/domains/std.py
+++ b/sphinx/domains/std.py
@@ -214,7 +214,7 @@ class OptionXRefRole(XRefRole):
return title, target
-def make_termnodes_from_paragraph_node(env, node, new_id=None):
+def register_term_to_glossary(env, node, new_id=None):
gloss_entries = env.temp_data.setdefault('gloss_entries', set())
objects = env.domaindata['std']['objects']
@@ -229,25 +229,18 @@ def make_termnodes_from_paragraph_node(env, node, new_id=None):
# add an index entry too
indexnode = addnodes.index()
indexnode['entries'] = [('single', termtext, new_id, 'main')]
- new_termnodes = []
- new_termnodes.append(indexnode)
- new_termnodes.extend(node.children)
- new_termnodes.append(addnodes.termsep())
- for termnode in new_termnodes:
- termnode.source, termnode.line = node.source, node.line
+ indexnode.source, indexnode.line = node.source, node.line
+ node.append(indexnode)
+ node['ids'].append(new_id)
+ node['names'].append(new_id)
- return new_id, termtext, new_termnodes
-
-def make_term_from_paragraph_node(termnodes, ids):
- # make a single "term" node with all the terms, separated by termsep
- # nodes (remove the dangling trailing separator)
- term = nodes.term('', '', *termnodes[:-1])
- term.source, term.line = termnodes[0].source, termnodes[0].line
- term.rawsource = term.astext()
- term['ids'].extend(ids)
- term['names'].extend(ids)
- return term
+def make_termset_from_termnodes(termnodes):
+ # make a single "termset" node with all the terms
+ termset = addnodes.termset('', *termnodes)
+ termset.source, termset.line = termnodes[0].source, termnodes[0].line
+ termset.rawsource = termset.astext()
+ return termset
class Glossary(Directive):
@@ -330,7 +323,6 @@ class Glossary(Directive):
termtexts = []
termnodes = []
system_messages = []
- ids = []
for line, source, lineno in terms:
# parse the term with inline markup
res = self.state.inline_text(line, lineno)
@@ -338,17 +330,15 @@ class Glossary(Directive):
# get a text-only representation of the term and register it
# as a cross-reference target
- tmp = nodes.paragraph('', '', *res[0])
- tmp.source = source
- tmp.line = lineno
- new_id, termtext, new_termnodes = \
- make_termnodes_from_paragraph_node(env, tmp)
- ids.append(new_id)
- termtexts.append(termtext)
- termnodes.extend(new_termnodes)
-
- term = make_term_from_paragraph_node(termnodes, ids)
- term += system_messages
+ term = nodes.term('', '', *res[0])
+ term.source = source
+ term.line = lineno
+ register_term_to_glossary(env, term)
+ termtexts.append(term.astext())
+ termnodes.append(term)
+
+ termset = make_termset_from_termnodes(termnodes)
+ termset += system_messages
defnode = nodes.definition()
if definition:
@@ -356,7 +346,7 @@ class Glossary(Directive):
defnode)
items.append((termtexts,
- nodes.definition_list_item('', term, defnode)))
+ nodes.definition_list_item('', termset, defnode)))
if 'sorted' in self.options:
items.sort(key=lambda x: