summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorTakeshi KOMIYA <i.tkomiya@gmail.com>2019-06-09 01:27:34 +0900
committerTakeshi KOMIYA <i.tkomiya@gmail.com>2019-06-09 01:27:34 +0900
commit9f470cec6c943a9437dac2c09ea796e78e08156d (patch)
tree31e26a9be046f8b2e7499a93db1a5ee8dd6d4c3a
parent5ce5c2c3156c53c1f1b758c38150e48080138b15 (diff)
parent5cabe8be35ee44ca9a9ddaf5d93e2dd9f62abdc9 (diff)
downloadsphinx-git-9f470cec6c943a9437dac2c09ea796e78e08156d.tar.gz
Merge branch '2.0'
-rw-r--r--CHANGES10
-rw-r--r--sphinx/builders/linkcheck.py3
-rw-r--r--sphinx/domains/python.py5
-rw-r--r--sphinx/environment/collectors/asset.py2
-rw-r--r--sphinx/ext/autosummary/generate.py2
-rw-r--r--sphinx/templates/apidoc/package.rst4
-rw-r--r--sphinx/texinputs/sphinx.sty4
-rw-r--r--sphinx/themes/classic/static/classic.css_t5
-rw-r--r--sphinx/util/__init__.py173
-rw-r--r--sphinx/util/compat.py15
-rw-r--r--sphinx/util/console.py37
-rw-r--r--sphinx/util/docfields.py99
-rw-r--r--sphinx/util/docstrings.py11
-rw-r--r--sphinx/util/docutils.py149
-rw-r--r--sphinx/util/fileutil.py14
-rw-r--r--sphinx/util/i18n.py70
-rw-r--r--sphinx/util/images.py24
-rw-r--r--sphinx/util/inspect.py80
-rw-r--r--sphinx/util/inventory.py47
-rw-r--r--sphinx/util/jsdump.py26
-rw-r--r--sphinx/util/jsonimpl.py20
-rw-r--r--sphinx/util/logging.py116
-rw-r--r--sphinx/util/matching.py26
-rw-r--r--sphinx/util/math.py15
-rw-r--r--sphinx/util/nodes.py98
-rw-r--r--sphinx/util/osutil.py72
-rw-r--r--sphinx/util/parallel.py32
-rw-r--r--sphinx/util/png.py6
-rw-r--r--sphinx/util/pycompat.py11
-rw-r--r--sphinx/util/requests.py22
-rw-r--r--sphinx/util/rst.py30
-rw-r--r--sphinx/util/smartypants.py13
-rw-r--r--sphinx/util/stemmer/__init__.py15
-rw-r--r--sphinx/util/stemmer/porter.py48
-rw-r--r--sphinx/util/tags.py34
-rw-r--r--sphinx/util/template.py34
-rw-r--r--sphinx/util/texescape.py14
-rw-r--r--sphinx/util/typing.py3
-rw-r--r--tests/roots/test-ext-autodoc/target/__init__.py52
-rw-r--r--tests/roots/test-ext-autodoc/target/autoclass_content.py35
-rw-r--r--tests/roots/test-ext-autodoc/target/descriptor.py31
-rw-r--r--tests/roots/test-ext-autodoc/target/docstring_signature.py19
-rw-r--r--tests/roots/test-ext-autodoc/target/inheritance.py19
-rw-r--r--tests/roots/test-ext-autodoc/target/process_docstring.py8
-rw-r--r--tests/roots/test-roles-download/another/dummy.dat1
-rw-r--r--tests/roots/test-roles-download/index.rst1
-rw-r--r--tests/test_autodoc.py637
-rw-r--r--tests/test_build_html.py9
-rw-r--r--tests/test_ext_apidoc.py17
-rw-r--r--tests/test_ext_autodoc_configs.py619
-rw-r--r--tests/test_ext_autodoc_events.py81
-rw-r--r--tests/test_ext_autosummary.py18
52 files changed, 1407 insertions, 1529 deletions
diff --git a/CHANGES b/CHANGES
index c980568e5..4591926be 100644
--- a/CHANGES
+++ b/CHANGES
@@ -39,6 +39,11 @@ Features added
Bugs fixed
----------
+* py domain: duplicated warning does not point the location of source code
+* #1125: html theme: scrollbar is hard to see on classic theme and macOS
+* #5502: linkcheck: Consider HTTP 503 response as not an error
+* #6439: Make generated download links reproducible
+
Testing
--------
@@ -60,6 +65,11 @@ Features added
Bugs fixed
----------
+* #6442: LaTeX: admonitions of :rst:dir:`note` type can get separated from
+ immediately preceding section title by pagebreak
+* #6448: autodoc: crashed when autodocumenting classes with ``__slots__ = None``
+* #6452: autosummary: crashed when generating document of properties
+
Testing
--------
diff --git a/sphinx/builders/linkcheck.py b/sphinx/builders/linkcheck.py
index 164f1e6b7..8b7c7ba9a 100644
--- a/sphinx/builders/linkcheck.py
+++ b/sphinx/builders/linkcheck.py
@@ -159,6 +159,9 @@ class CheckExternalLinksBuilder(Builder):
if err.response.status_code == 401:
# We'll take "Unauthorized" as working.
return 'working', ' - unauthorized', 0
+ elif err.response.status_code == 503:
+ # We'll take "Service Unavailable" as ignored.
+ return 'ignored', str(err), 0
else:
return 'broken', str(err), 0
except Exception as err:
diff --git a/sphinx/domains/python.py b/sphinx/domains/python.py
index f5bb3000c..e88f6b1db 100644
--- a/sphinx/domains/python.py
+++ b/sphinx/domains/python.py
@@ -328,7 +328,8 @@ class PyObject(ObjectDescription):
self.state.document.note_explicit_target(signode)
domain = cast(PythonDomain, self.env.get_domain('py'))
- domain.note_object(fullname, self.objtype)
+ domain.note_object(fullname, self.objtype,
+ location=(self.env.docname, self.lineno))
indextext = self.get_index_text(modname, name_cls)
if indextext:
@@ -743,7 +744,7 @@ class PyModule(SphinxDirective):
self.options.get('synopsis', ''),
self.options.get('platform', ''),
'deprecated' in self.options)
- domain.note_object(modname, 'module')
+ domain.note_object(modname, 'module', location=(self.env.docname, self.lineno))
targetnode = nodes.target('', '', ids=['module-' + modname],
ismod=True)
diff --git a/sphinx/environment/collectors/asset.py b/sphinx/environment/collectors/asset.py
index 572dea7a4..fed8280c1 100644
--- a/sphinx/environment/collectors/asset.py
+++ b/sphinx/environment/collectors/asset.py
@@ -137,7 +137,7 @@ class DownloadFileCollector(EnvironmentCollector):
logger.warning(__('download file not readable: %s') % filename,
location=node, type='download', subtype='not_readable')
continue
- node['filename'] = app.env.dlfiles.add_file(app.env.docname, filename)
+ node['filename'] = app.env.dlfiles.add_file(app.env.docname, rel_filename)
def setup(app):
diff --git a/sphinx/ext/autosummary/generate.py b/sphinx/ext/autosummary/generate.py
index ba1ec219a..1bfbb0da4 100644
--- a/sphinx/ext/autosummary/generate.py
+++ b/sphinx/ext/autosummary/generate.py
@@ -221,7 +221,7 @@ def generate_autosummary_docs(sources, output_dir=None, suffix='.rst',
get_members(obj, {'attribute', 'property'})
parts = name.split('.')
- if doc.objtype in ('method', 'attribute'):
+ if doc.objtype in ('method', 'attribute', 'property'):
mod_name = '.'.join(parts[:-2])
cls_name = parts[-2]
obj_name = '.'.join(parts[-2:])
diff --git a/sphinx/templates/apidoc/package.rst b/sphinx/templates/apidoc/package.rst
index 0026af34c..ed9f669ea 100644
--- a/sphinx/templates/apidoc/package.rst
+++ b/sphinx/templates/apidoc/package.rst
@@ -40,8 +40,8 @@ Submodules
{{- [submodule, "module"] | join(" ") | e | heading(2) }}
{% endif %}
{{ automodule(submodule, automodule_options) }}
-{%- endfor %}
-{% endif %}
+{% endfor %}
+{%- endif %}
{% endif %}
{%- if not modulefirst and not is_namespace %}
diff --git a/sphinx/texinputs/sphinx.sty b/sphinx/texinputs/sphinx.sty
index 6ace6a9ce..184b0d820 100644
--- a/sphinx/texinputs/sphinx.sty
+++ b/sphinx/texinputs/sphinx.sty
@@ -6,7 +6,7 @@
%
\NeedsTeXFormat{LaTeX2e}[1995/12/01]
-\ProvidesPackage{sphinx}[2019/01/12 v1.8.4 LaTeX package (Sphinx markup)]
+\ProvidesPackage{sphinx}[2019/06/04 v2.1.1 LaTeX package (Sphinx markup)]
% provides \ltx@ifundefined
% (many packages load ltxcmds: graphicx does for pdftex and lualatex but
@@ -1444,7 +1444,7 @@
% Some are quite plain
% the spx@notice@bordercolor etc are set in the sphinxadmonition environment
\newenvironment{sphinxlightbox}{%
- \par\allowbreak
+ \par
\noindent{\color{spx@notice@bordercolor}%
\rule{\linewidth}{\spx@notice@border}}\par\nobreak
{\parskip\z@skip\noindent}%
diff --git a/sphinx/themes/classic/static/classic.css_t b/sphinx/themes/classic/static/classic.css_t
index 1ee0d2298..a062d6d14 100644
--- a/sphinx/themes/classic/static/classic.css_t
+++ b/sphinx/themes/classic/static/classic.css_t
@@ -13,6 +13,11 @@
/* -- page layout ----------------------------------------------------------- */
+html {
+ /* CSS hack for macOS's scrollbar (see #1125) */
+ background-color: #FFFFFF;
+}
+
body {
font-family: {{ theme_bodyfont }};
font-size: 100%;
diff --git a/sphinx/util/__init__.py b/sphinx/util/__init__.py
index cc2cd9aa1..f31c34e18 100644
--- a/sphinx/util/__init__.py
+++ b/sphinx/util/__init__.py
@@ -24,6 +24,9 @@ from datetime import datetime
from hashlib import md5
from os import path
from time import mktime, strptime
+from typing import (
+ Any, Callable, Dict, IO, Iterable, Iterator, List, Pattern, Set, Tuple, Type
+)
from urllib.parse import urlsplit, urlunsplit, quote_plus, parse_qsl, urlencode
from sphinx.deprecation import RemovedInSphinx40Warning
@@ -31,6 +34,7 @@ from sphinx.errors import PycodeError, SphinxParallelError, ExtensionError
from sphinx.locale import __
from sphinx.util import logging
from sphinx.util.console import strip_colors, colorize, bold, term_width_line # type: ignore
+from sphinx.util.typing import PathMatcher
from sphinx.util import smartypants # noqa
# import other utilities; partly for backwards compatibility, so don't
@@ -43,10 +47,11 @@ from sphinx.util.nodes import ( # noqa
caption_ref_re)
from sphinx.util.matching import patfilter # noqa
+
if False:
# For type annotation
- from typing import Any, Callable, Dict, IO, Iterable, Iterator, List, Pattern, Set, Tuple, Type, Union # NOQA
-
+ from sphinx.application import Sphinx
+ from sphinx.builders import Builder
logger = logging.getLogger(__name__)
@@ -57,21 +62,19 @@ url_re = re.compile(r'(?P<schema>.+)://.*') # type: Pattern
# High-level utility functions.
-def docname_join(basedocname, docname):
- # type: (str, str) -> str
+def docname_join(basedocname: str, docname: str) -> str:
return posixpath.normpath(
posixpath.join('/' + basedocname, '..', docname))[1:]
-def path_stabilize(filepath):
- # type: (str) -> str
+def path_stabilize(filepath: str) -> str:
"normalize path separater and unicode string"
newpath = filepath.replace(os.path.sep, SEP)
return unicodedata.normalize('NFC', newpath)
-def get_matching_files(dirname, exclude_matchers=()):
- # type: (str, Tuple[Callable[[str], bool], ...]) -> Iterable[str]
+def get_matching_files(dirname: str,
+ exclude_matchers: Tuple[PathMatcher, ...] = ()) -> Iterable[str]: # NOQA
"""Get all file names in a directory, recursively.
Exclude files and dirs matching some matcher in *exclude_matchers*.
@@ -97,8 +100,8 @@ def get_matching_files(dirname, exclude_matchers=()):
yield filename
-def get_matching_docs(dirname, suffixes, exclude_matchers=()):
- # type: (str, List[str], Tuple[Callable[[str], bool], ...]) -> Iterable[str] # NOQA
+def get_matching_docs(dirname: str, suffixes: List[str],
+ exclude_matchers: Tuple[PathMatcher, ...] = ()) -> Iterable[str]:
"""Get all file names (without suffixes) matching a suffix in a directory,
recursively.
@@ -120,12 +123,10 @@ class FilenameUniqDict(dict):
interpreted as filenames, and keeps track of a set of docnames they
appear in. Used for images and downloadable files in the environment.
"""
- def __init__(self):
- # type: () -> None
+ def __init__(self) -> None:
self._existing = set() # type: Set[str]
- def add_file(self, docname, newfile):
- # type: (str, str) -> str
+ def add_file(self, docname: str, newfile: str) -> str:
if newfile in self:
self[newfile][0].add(docname)
return self[newfile][1]
@@ -139,26 +140,22 @@ class FilenameUniqDict(dict):
self._existing.add(uniquename)
return uniquename
- def purge_doc(self, docname):
- # type: (str) -> None
+ def purge_doc(self, docname: str) -> None:
for filename, (docs, unique) in list(self.items()):
docs.discard(docname)
if not docs:
del self[filename]
self._existing.discard(unique)
- def merge_other(self, docnames, other):
- # type: (Set[str], Dict[str, Tuple[Set[str], Any]]) -> None
+ def merge_other(self, docnames: Set[str], other: Dict[str, Tuple[Set[str], Any]]) -> None:
for filename, (docs, unique) in other.items():
for doc in docs & set(docnames):
self.add_file(doc, filename)
- def __getstate__(self):
- # type: () -> Set[str]
+ def __getstate__(self) -> Set[str]:
return self._existing
- def __setstate__(self, state):
- # type: (Set[str]) -> None
+ def __setstate__(self, state: Set[str]) -> None:
self._existing = state
@@ -169,8 +166,7 @@ class DownloadFiles(dict):
Hence don't hack this directly.
"""
- def add_file(self, docname, filename):
- # type: (str, str) -> None
+ def add_file(self, docname: str, filename: str) -> None:
if filename not in self:
digest = md5(filename.encode()).hexdigest()
dest = '%s/%s' % (digest, os.path.basename(filename))
@@ -179,15 +175,13 @@ class DownloadFiles(dict):
self[filename][0].add(docname)
return self[filename][1]
- def purge_doc(self, docname):
- # type: (str) -> None
+ def purge_doc(self, docname: str) -> None:
for filename, (docs, dest) in list(self.items()):
docs.discard(docname)
if not docs:
del self[filename]
- def merge_other(self, docnames, other):
- # type: (Set[str], Dict[str, Tuple[Set[str], Any]]) -> None
+ def merge_other(self, docnames: Set[str], other: Dict[str, Tuple[Set[str], Any]]) -> None:
for filename, (docs, dest) in other.items():
for docname in docs & set(docnames):
self.add_file(docname, filename)
@@ -204,8 +198,7 @@ _DEBUG_HEADER = '''\
'''
-def save_traceback(app):
- # type: (Any) -> str
+def save_traceback(app: "Sphinx") -> str:
"""Save the current exception's traceback in a temporary file."""
import sphinx
import jinja2
@@ -240,8 +233,7 @@ def save_traceback(app):
return path
-def get_module_source(modname):
- # type: (str) -> Tuple[str, str]
+def get_module_source(modname: str) -> Tuple[str, str]:
"""Try to find the source code for a module.
Can return ('file', 'filename') in which case the source is in the given
@@ -288,8 +280,7 @@ def get_module_source(modname):
return 'file', filename
-def get_full_modname(modname, attribute):
- # type: (str, str) -> str
+def get_full_modname(modname: str, attribute: str) -> str:
if modname is None:
# Prevents a TypeError: if the last getattr() call will return None
# then it's better to return it directly
@@ -311,19 +302,16 @@ def get_full_modname(modname, attribute):
_coding_re = re.compile(r'coding[:=]\s*([-\w.]+)')
-def detect_encoding(readline):
- # type: (Callable[[], bytes]) -> str
+def detect_encoding(readline: Callable[[], bytes]) -> str:
"""Like tokenize.detect_encoding() from Py3k, but a bit simplified."""
- def read_or_stop():
- # type: () -> bytes
+ def read_or_stop() -> bytes:
try:
return readline()
except StopIteration:
return None
- def get_normal_name(orig_enc):
- # type: (str) -> str
+ def get_normal_name(orig_enc: str) -> str:
"""Imitates get_normal_name in tokenizer.c."""
# Only care about the first 12 characters.
enc = orig_enc[:12].lower().replace('_', '-')
@@ -334,8 +322,7 @@ def detect_encoding(readline):
return 'iso-8859-1'
return orig_enc
- def find_cookie(line):
- # type: (bytes) -> str
+ def find_cookie(line: bytes) -> str:
try:
line_string = line.decode('ascii')
except UnicodeDecodeError:
@@ -368,12 +355,10 @@ def detect_encoding(readline):
class UnicodeDecodeErrorHandler:
"""Custom error handler for open() that warns and replaces."""
- def __init__(self, docname):
- # type: (str) -> None
+ def __init__(self, docname: str) -> None:
self.docname = docname
- def __call__(self, error):
- # type: (UnicodeDecodeError) -> Tuple[Union[str, str], int]
+ def __call__(self, error: UnicodeDecodeError) -> Tuple[str, int]:
linestart = error.object.rfind(b'\n', 0, error.start)
lineend = error.object.find(b'\n', error.start)
if lineend == -1:
@@ -393,26 +378,22 @@ class Tee:
"""
File-like object writing to two streams.
"""
- def __init__(self, stream1, stream2):
- # type: (IO, IO) -> None
+ def __init__(self, stream1: IO, stream2: IO) -> None:
self.stream1 = stream1
self.stream2 = stream2
- def write(self, text):
- # type: (str) -> None
+ def write(self, text: str) -> None:
self.stream1.write(text)
self.stream2.write(text)
- def flush(self):
- # type: () -> None
+ def flush(self) -> None:
if hasattr(self.stream1, 'flush'):
self.stream1.flush()
if hasattr(self.stream2, 'flush'):
self.stream2.flush()
-def parselinenos(spec, total):
- # type: (str, int) -> List[int]
+def parselinenos(spec: str, total: int) -> List[int]:
"""Parse a line number spec (such as "1,2,4-6") and return a list of
wanted line numbers.
"""
@@ -439,8 +420,7 @@ def parselinenos(spec, total):
return items
-def force_decode(string, encoding):
- # type: (str, str) -> str
+def force_decode(string: str, encoding: str) -> str:
"""Forcibly get a unicode string out of a bytestring."""
warnings.warn('force_decode() is deprecated.',
RemovedInSphinx40Warning, stacklevel=2)
@@ -458,26 +438,22 @@ def force_decode(string, encoding):
class attrdict(dict):
- def __init__(self, *args, **kwargs):
+ def __init__(self, *args, **kwargs) -> None:
super().__init__(*args, **kwargs)
warnings.warn('The attrdict class is deprecated.',
RemovedInSphinx40Warning, stacklevel=2)
- def __getattr__(self, key):
- # type: (str) -> str
+ def __getattr__(self, key: str) -> str:
return self[key]
- def __setattr__(self, key, val):
- # type: (str, str) -> None
+ def __setattr__(self, key: str, val: str) -> None:
self[key] = val
- def __delattr__(self, key):
- # type: (str) -> None
+ def __delattr__(self, key: str) -> None:
del self[key]
-def rpartition(s, t):
- # type: (str, str) -> Tuple[str, str]
+def rpartition(s: str, t: str) -> Tuple[str, str]:
"""Similar to str.rpartition from 2.5, but doesn't return the separator."""
i = s.rfind(t)
if i != -1:
@@ -485,8 +461,7 @@ def rpartition(s, t):
return '', s
-def split_into(n, type, value):
- # type: (int, str, str) -> List[str]
+def split_into(n: int, type: str, value: str) -> List[str]:
"""Split an index entry into a given number of parts at semicolons."""
parts = [x.strip() for x in value.split(';', n - 1)]
if sum(1 for part in parts if part) < n:
@@ -494,8 +469,7 @@ def split_into(n, type, value):
return parts
-def split_index_msg(type, value):
- # type: (str, str) -> List[str]
+def split_index_msg(type: str, value: str) -> List[str]:
# new entry types must be listed in directives/other.py!
if type == 'single':
try:
@@ -516,8 +490,7 @@ def split_index_msg(type, value):
return result
-def format_exception_cut_frames(x=1):
- # type: (int) -> str
+def format_exception_cut_frames(x: int = 1) -> str:
"""Format an exception with traceback, but only the last x frames."""
typ, val, tb = sys.exc_info()
# res = ['Traceback (most recent call last):\n']
@@ -533,19 +506,16 @@ class PeekableIterator:
An iterator which wraps any iterable and makes it possible to peek to see
what's the next item.
"""
- def __init__(self, iterable):
- # type: (Iterable) -> None
+ def __init__(self, iterable: Iterable) -> None:
self.remaining = deque() # type: deque
self._iterator = iter(iterable)
warnings.warn('PeekableIterator is deprecated.',
RemovedInSphinx40Warning, stacklevel=2)
- def __iter__(self):
- # type: () -> PeekableIterator
+ def __iter__(self) -> "PeekableIterator":
return self
- def __next__(self):
- # type: () -> Any
+ def __next__(self) -> Any:
"""Return the next item from the iterator."""
if self.remaining:
return self.remaining.popleft()
@@ -553,23 +523,20 @@ class PeekableIterator:
next = __next__ # Python 2 compatibility
- def push(self, item):
- # type: (Any) -> None
+ def push(self, item: Any) -> None:
"""Push the `item` on the internal stack, it will be returned on the
next :meth:`next` call.
"""
self.remaining.append(item)
- def peek(self):
- # type: () -> Any
+ def peek(self) -> Any:
"""Return the next item without changing the state of the iterator."""
item = next(self)
self.push(item)
return item
-def import_object(objname, source=None):
- # type: (str, str) -> Any
+def import_object(objname: str, source: str = None) -> Any:
"""Import python object by qualname."""
try:
objpath = objname.split('.')
@@ -592,8 +559,7 @@ def import_object(objname, source=None):
raise ExtensionError('Could not import %s' % objname, exc)
-def encode_uri(uri):
- # type: (str) -> str
+def encode_uri(uri: str) -> str:
split = list(urlsplit(uri))
split[1] = split[1].encode('idna').decode('ascii')
split[2] = quote_plus(split[2].encode(), '/')
@@ -602,8 +568,7 @@ def encode_uri(uri):
return urlunsplit(split)
-def display_chunk(chunk):
- # type: (Any) -> str
+def display_chunk(chunk: Any) -> str:
if isinstance(chunk, (list, tuple)):
if len(chunk) == 1:
return str(chunk[0])
@@ -611,8 +576,8 @@ def display_chunk(chunk):
return str(chunk)
-def old_status_iterator(iterable, summary, color="darkgreen", stringify_func=display_chunk):
- # type: (Iterable, str, str, Callable[[Any], str]) -> Iterator
+def old_status_iterator(iterable: Iterable, summary: str, color: str = "darkgreen",
+ stringify_func: Callable[[Any], str] = display_chunk) -> Iterator:
l = 0
for item in iterable:
if l == 0:
@@ -626,9 +591,9 @@ def old_status_iterator(iterable, summary, color="darkgreen", stringify_func=dis
# new version with progress info
-def status_iterator(iterable, summary, color="darkgreen", length=0, verbosity=0,
- stringify_func=display_chunk):
- # type: (Iterable, str, str, int, int, Callable[[Any], str]) -> Iterable
+def status_iterator(iterable: Iterable, summary: str, color: str = "darkgreen",
+ length: int = 0, verbosity: int = 0,
+ stringify_func: Callable[[Any], str] = display_chunk) -> Iterable:
if length == 0:
yield from old_status_iterator(iterable, summary, color, stringify_func)
return
@@ -652,16 +617,13 @@ class SkipProgressMessage(Exception):
class progress_message:
- def __init__(self, message):
- # type: (str) -> None
+ def __init__(self, message: str) -> None:
self.message = message
- def __enter__(self):
- # type: () -> None
+ def __enter__(self) -> None:
logger.info(bold(self.message + '... '), nonl=True)
- def __exit__(self, exc_type, exc_value, traceback):
- # type: (Type[Exception], Exception, Any) -> bool
+ def __exit__(self, exc_type: Type[Exception], exc_value: Exception, traceback: Any) -> bool: # NOQA
if isinstance(exc_value, SkipProgressMessage):
logger.info(__('skipped'))
if exc_value.args:
@@ -674,8 +636,7 @@ class progress_message:
return False
- def __call__(self, f):
- # type: (Callable) -> Callable
+ def __call__(self, f: Callable) -> Callable:
@functools.wraps(f)
def wrapper(*args, **kwargs):
with self:
@@ -684,8 +645,7 @@ class progress_message:
return wrapper
-def epoch_to_rfc1123(epoch):
- # type: (float) -> str
+def epoch_to_rfc1123(epoch: float) -> str:
"""Convert datetime format epoch to RFC1123."""
from babel.dates import format_datetime
@@ -694,13 +654,11 @@ def epoch_to_rfc1123(epoch):
return format_datetime(dt, fmt, locale='en') + ' GMT'
-def rfc1123_to_epoch(rfc1123):
- # type: (str) -> float
+def rfc1123_to_epoch(rfc1123: str) -> float:
return mktime(strptime(rfc1123, '%a, %d %b %Y %H:%M:%S %Z'))
-def xmlname_checker():
- # type: () -> Pattern
+def xmlname_checker() -> Pattern:
# https://www.w3.org/TR/REC-xml/#NT-Name
name_start_chars = [
':', ['A', 'Z'], '_', ['a', 'z'], ['\u00C0', '\u00D6'],
@@ -714,8 +672,7 @@ def xmlname_checker():
['\u203F', '\u2040']
]
- def convert(entries, splitter='|'):
- # type: (Any, str) -> str
+ def convert(entries: Any, splitter: str = '|') -> str:
results = []
for entry in entries:
if isinstance(entry, list):
diff --git a/sphinx/util/compat.py b/sphinx/util/compat.py
index 805c17e5d..340dd77fa 100644
--- a/sphinx/util/compat.py
+++ b/sphinx/util/compat.py
@@ -10,22 +10,21 @@
import sys
import warnings
+from typing import Any, Dict
from docutils.utils import get_source_line
from sphinx import addnodes
+from sphinx.config import Config
from sphinx.deprecation import RemovedInSphinx40Warning
from sphinx.transforms import SphinxTransform
if False:
# For type annotation
- from typing import Any, Dict # NOQA
- from sphinx.application import Sphinx # NOQA
- from sphinx.config import Config # NOQA
+ from sphinx.application import Sphinx
-def register_application_for_autosummary(app):
- # type: (Sphinx) -> None
+def register_application_for_autosummary(app: "Sphinx") -> None:
"""Register application object to autosummary module.
Since Sphinx-1.7, documenters and attrgetters are registered into
@@ -42,8 +41,7 @@ class IndexEntriesMigrator(SphinxTransform):
"""Migrating indexentries from old style (4columns) to new style (5columns)."""
default_priority = 700
- def apply(self, **kwargs):
- # type: (Any) -> None
+ def apply(self, **kwargs) -> None:
for node in self.document.traverse(addnodes.index):
for i, entries in enumerate(node['entries']):
if len(entries) == 4:
@@ -53,8 +51,7 @@ class IndexEntriesMigrator(SphinxTransform):
node['entries'][i] = entries + (None,)
-def setup(app):
- # type: (Sphinx) -> Dict[str, Any]
+def setup(app: "Sphinx") -> Dict[str, Any]:
app.add_transform(IndexEntriesMigrator)
app.connect('builder-inited', register_application_for_autosummary)
diff --git a/sphinx/util/console.py b/sphinx/util/console.py
index c207d32ac..d73d0563e 100644
--- a/sphinx/util/console.py
+++ b/sphinx/util/console.py
@@ -11,6 +11,7 @@
import os
import re
import sys
+from typing import Dict
try:
# check if colorama is installed to support color on Windows
@@ -18,23 +19,17 @@ try:
except ImportError:
colorama = None
-if False:
- # For type annotation
- from typing import Dict # NOQA
-
_ansi_re = re.compile('\x1b\\[(\\d\\d;){0,2}\\d\\dm')
codes = {} # type: Dict[str, str]
-def terminal_safe(s):
- # type: (str) -> str
+def terminal_safe(s: str) -> str:
"""safely encode a string for printing to the terminal."""
return s.encode('ascii', 'backslashreplace').decode('ascii')
-def get_terminal_width():
- # type: () -> int
+def get_terminal_width() -> int:
"""Borrowed from the py lib."""
try:
import termios
@@ -53,8 +48,7 @@ def get_terminal_width():
_tw = get_terminal_width()
-def term_width_line(text):
- # type: (str) -> str
+def term_width_line(text: str) -> str:
if not codes:
# if no coloring, don't output fancy backspaces
return text + '\n'
@@ -63,8 +57,7 @@ def term_width_line(text):
return text.ljust(_tw + len(text) - len(_ansi_re.sub('', text))) + '\r'
-def color_terminal():
- # type: () -> bool
+def color_terminal() -> bool:
if sys.platform == 'win32' and colorama is not None:
colorama.init()
return True
@@ -80,21 +73,18 @@ def color_terminal():
return False
-def nocolor():
- # type: () -> None
+def nocolor() -> None:
if sys.platform == 'win32' and colorama is not None:
colorama.deinit()
codes.clear()
-def coloron():
- # type: () -> None
+def coloron() -> None:
codes.update(_orig_codes)
-def colorize(name, text, input_mode=False):
- # type: (str, str, bool) -> str
- def escseq(name):
+def colorize(name: str, text: str, input_mode: bool = False) -> str:
+ def escseq(name: str) -> str:
# Wrap escape sequence with ``\1`` and ``\2`` to let readline know
# it is non-printable characters
# ref: https://tiswww.case.edu/php/chet/readline/readline.html
@@ -109,15 +99,12 @@ def colorize(name, text, input_mode=False):
return escseq(name) + text + escseq('reset')
-def strip_colors(s):
- # type: (str) -> str
+def strip_colors(s: str) -> str:
return re.compile('\x1b.*?m').sub('', s)
-def create_color_func(name):
- # type: (str) -> None
- def inner(text):
- # type: (str) -> str
+def create_color_func(name: str) -> None:
+ def inner(text: str) -> str:
return colorize(name, text)
globals()[name] = inner
diff --git a/sphinx/util/docfields.py b/sphinx/util/docfields.py
index 9b19d229d..f3729c0c9 100644
--- a/sphinx/util/docfields.py
+++ b/sphinx/util/docfields.py
@@ -10,23 +10,23 @@
"""
import warnings
-from typing import List, Tuple, cast
+from typing import Any, Dict, List, Tuple, Type, Union
+from typing import cast
from docutils import nodes
+from docutils.nodes import Node
from sphinx import addnodes
from sphinx.deprecation import RemovedInSphinx40Warning
+from sphinx.util.typing import TextlikeNode
if False:
# For type annotation
- from typing import Any, Dict, Type, Union # NOQA
- from sphinx.directive import ObjectDescription # NOQA
- from sphinx.environment import BuildEnvironment # NOQA
- from sphinx.util.typing import TextlikeNode # NOQA
+ from sphinx.environment import BuildEnvironment
+ from sphinx.directive import ObjectDescription
-def _is_single_paragraph(node):
- # type: (nodes.field_body) -> bool
+def _is_single_paragraph(node: nodes.field_body) -> bool:
"""True if the node only contains one paragraph (and system messages)."""
if len(node) == 0:
return False
@@ -55,9 +55,8 @@ class Field:
is_grouped = False
is_typed = False
- def __init__(self, name, names=(), label=None, has_arg=True, rolename=None,
- bodyrolename=None):
- # type: (str, Tuple[str, ...], str, bool, str, str) -> None
+ def __init__(self, name: str, names: Tuple[str, ...] = (), label: str = None,
+ has_arg: bool = True, rolename: str = None, bodyrolename: str = None) -> None:
self.name = name
self.names = names
self.label = label
@@ -65,15 +64,9 @@ class Field:
self.rolename = rolename
self.bodyrolename = bodyrolename
- def make_xref(self,
- rolename, # type: str
- domain, # type: str
- target, # type: str
- innernode=addnodes.literal_emphasis, # type: Type[TextlikeNode]
- contnode=None, # type: nodes.Node
- env=None, # type: BuildEnvironment
- ):
- # type: (...) -> nodes.Node
+ def make_xref(self, rolename: str, domain: str, target: str,
+ innernode: Type[TextlikeNode] = addnodes.literal_emphasis,
+ contnode: Node = None, env: "BuildEnvironment" = None) -> Node:
if not rolename:
return contnode or innernode(target, target)
refnode = addnodes.pending_xref('', refdomain=domain, refexplicit=False,
@@ -83,28 +76,16 @@ class Field:
env.get_domain(domain).process_field_xref(refnode)
return refnode
- def make_xrefs(self,
- rolename, # type: str
- domain, # type: str
- target, # type: str
- innernode=addnodes.literal_emphasis, # type: Type[TextlikeNode]
- contnode=None, # type: nodes.Node
- env=None, # type: BuildEnvironment
- ):
- # type: (...) -> List[nodes.Node]
+ def make_xrefs(self, rolename: str, domain: str, target: str,
+ innernode: Type[TextlikeNode] = addnodes.literal_emphasis,
+ contnode: Node = None, env: "BuildEnvironment" = None) -> List[Node]:
return [self.make_xref(rolename, domain, target, innernode, contnode, env)]
- def make_entry(self, fieldarg, content):
- # type: (str, List[nodes.Node]) -> Tuple[str, List[nodes.Node]]
+ def make_entry(self, fieldarg: str, content: List[Node]) -> Tuple[str, List[Node]]:
return (fieldarg, content)
- def make_field(self,
- types, # type: Dict[str, List[nodes.Node]]
- domain, # type: str
- item, # type: Tuple
- env=None, # type: BuildEnvironment
- ):
- # type: (...) -> nodes.field
+ def make_field(self, types: Dict[str, List[Node]], domain: str,
+ item: Tuple, env: "BuildEnvironment" = None) -> nodes.field:
fieldarg, content = item
fieldname = nodes.field_name('', self.label)
if fieldarg:
@@ -138,19 +119,13 @@ class GroupedField(Field):
is_grouped = True
list_type = nodes.bullet_list
- def __init__(self, name, names=(), label=None, rolename=None,
- can_collapse=False):
- # type: (str, Tuple[str, ...], str, str, bool) -> None
+ def __init__(self, name: str, names: Tuple[str, ...] = (), label: str = None,
+ rolename: str = None, can_collapse: bool = False) -> None:
super().__init__(name, names, label, True, rolename)
self.can_collapse = can_collapse
- def make_field(self,
- types, # type: Dict[str, List[nodes.Node]]
- domain, # type: str
- items, # type: Tuple
- env=None, # type: BuildEnvironment
- ):
- # type: (...) -> nodes.field
+ def make_field(self, types: Dict[str, List[Node]], domain: str,
+ items: Tuple, env: "BuildEnvironment" = None) -> nodes.field:
fieldname = nodes.field_name('', self.label)
listnode = self.list_type()
for fieldarg, content in items:
@@ -191,22 +166,16 @@ class TypedField(GroupedField):
"""
is_typed = True
- def __init__(self, name, names=(), typenames=(), label=None,
- rolename=None, typerolename=None, can_collapse=False):
- # type: (str, Tuple[str, ...], Tuple[str, ...], str, str, str, bool) -> None
+ def __init__(self, name: str, names: Tuple[str, ...] = (), typenames: Tuple[str, ...] = (),
+ label: str = None, rolename: str = None, typerolename: str = None,
+ can_collapse: bool = False) -> None:
super().__init__(name, names, label, rolename, can_collapse)
self.typenames = typenames
self.typerolename = typerolename
- def make_field(self,
- types, # type: Dict[str, List[nodes.Node]]
- domain, # type: str
- items, # type: Tuple
- env=None, # type: BuildEnvironment
- ):
- # type: (...) -> nodes.field
- def handle_item(fieldarg, content):
- # type: (str, str) -> nodes.paragraph
+ def make_field(self, types: Dict[str, List[Node]], domain: str,
+ items: Tuple, env: "BuildEnvironment" = None) -> nodes.field:
+ def handle_item(fieldarg: str, content: str) -> nodes.paragraph:
par = nodes.paragraph()
par.extend(self.make_xrefs(self.rolename, domain, fieldarg,
addnodes.literal_strong, env=env))
@@ -246,13 +215,11 @@ class DocFieldTransformer:
"""
typemap = None # type: Dict[str, Tuple[Field, bool]]
- def __init__(self, directive):
- # type: (ObjectDescription) -> None
+ def __init__(self, directive: "ObjectDescription") -> None:
self.directive = directive
self.typemap = directive.get_field_type_map()
- def preprocess_fieldtypes(self, types):
- # type: (List[Field]) -> Dict[str, Tuple[Field, bool]]
+ def preprocess_fieldtypes(self, types: List[Field]) -> Dict[str, Tuple[Field, bool]]:
warnings.warn('DocFieldTransformer.preprocess_fieldtypes() is deprecated.',
RemovedInSphinx40Warning)
typemap = {}
@@ -265,16 +232,14 @@ class DocFieldTransformer:
typemap[name] = typed_field, True
return typemap
- def transform_all(self, node):
- # type: (addnodes.desc_content) -> None
+ def transform_all(self, node: addnodes.desc_content) -> None:
"""Transform all field list children of a node."""
# don't traverse, only handle field lists that are immediate children
for child in node:
if isinstance(child, nodes.field_list):
self.transform(child)
- def transform(self, node):
- # type: (nodes.field_list) -> None
+ def transform(self, node: nodes.field_list) -> None:
"""Transform a single field list *node*."""
typemap = self.typemap
diff --git a/sphinx/util/docstrings.py b/sphinx/util/docstrings.py
index 31943b2cb..c2fe17004 100644
--- a/sphinx/util/docstrings.py
+++ b/sphinx/util/docstrings.py
@@ -9,14 +9,10 @@
"""
import sys
+from typing import List
-if False:
- # For type annotation
- from typing import List # NOQA
-
-def prepare_docstring(s, ignore=1, tabsize=8):
- # type: (str, int, int) -> List[str]
+def prepare_docstring(s: str, ignore: int = 1, tabsize: int = 8) -> List[str]:
"""Convert a docstring into lines of parseable reST. Remove common leading
indentation, where the indentation of a given number of lines (usually just
one) is ignored.
@@ -49,8 +45,7 @@ def prepare_docstring(s, ignore=1, tabsize=8):
return lines
-def prepare_commentdoc(s):
- # type: (str) -> List[str]
+def prepare_commentdoc(s: str) -> List[str]:
"""Extract documentation comment lines (starting with #:) and return them
as a list of lines. Returns an empty list if there is no documentation.
"""
diff --git a/sphinx/util/docutils.py b/sphinx/util/docutils.py
index d38c780f4..9f01a9f9f 100644
--- a/sphinx/util/docutils.py
+++ b/sphinx/util/docutils.py
@@ -14,31 +14,31 @@ from contextlib import contextmanager
from copy import copy
from distutils.version import LooseVersion
from os import path
-from typing import IO, cast
+from types import ModuleType
+from typing import Any, Callable, Dict, Generator, IO, List, Set, Tuple, Type
+from typing import cast
import docutils
from docutils import nodes
from docutils.io import FileOutput
+from docutils.nodes import Element, Node, system_message
from docutils.parsers.rst import Directive, directives, roles
-from docutils.statemachine import StateMachine
+from docutils.parsers.rst.states import Inliner
+from docutils.statemachine import StateMachine, State, StringList
from docutils.utils import Reporter, unescape
from sphinx.errors import SphinxError
from sphinx.util import logging
+from sphinx.util.typing import RoleFunction
logger = logging.getLogger(__name__)
report_re = re.compile('^(.+?:(?:\\d+)?): \\((DEBUG|INFO|WARNING|ERROR|SEVERE)/(\\d+)?\\) ')
if False:
# For type annotation
- from types import ModuleType # NOQA
- from typing import Any, Callable, Dict, Generator, List, Set, Tuple, Type # NOQA
- from docutils.parsers.rst.states import Inliner # NOQA
- from docutils.statemachine import State, StringList # NOQA
- from sphinx.builders import Builder # NOQA
- from sphinx.config import Config # NOQA
- from sphinx.environment import BuildEnvironment # NOQA
- from sphinx.util.typing import RoleFunction # NOQA
+ from sphinx.builders import Builder
+ from sphinx.config import Config
+ from sphinx.environment import BuildEnvironment
__version_info__ = tuple(LooseVersion(docutils.__version__).version)
@@ -46,8 +46,7 @@ additional_nodes = set() # type: Set[Type[nodes.Element]]
@contextmanager
-def docutils_namespace():
- # type: () -> Generator[None, None, None]
+def docutils_namespace() -> Generator[None, None, None]:
"""Create namespace for reST parsers."""
try:
_directives = copy(directives._directives) # type: ignore
@@ -63,14 +62,12 @@ def docutils_namespace():
additional_nodes.discard(node)
-def is_directive_registered(name):
- # type: (str) -> bool
+def is_directive_registered(name: str) -> bool:
"""Check the *name* directive is already registered."""
return name in directives._directives # type: ignore
-def register_directive(name, directive):
- # type: (str, Type[Directive]) -> None
+def register_directive(name: str, directive: Type[Directive]) -> None:
"""Register a directive to docutils.
This modifies global state of docutils. So it is better to use this
@@ -79,14 +76,12 @@ def register_directive(name, directive):
directives.register_directive(name, directive)
-def is_role_registered(name):
- # type: (str) -> bool
+def is_role_registered(name: str) -> bool:
"""Check the *name* role is already registered."""
return name in roles._roles # type: ignore
-def register_role(name, role):
- # type: (str, RoleFunction) -> None
+def register_role(name: str, role: RoleFunction) -> None:
"""Register a role to docutils.
This modifies global state of docutils. So it is better to use this
@@ -95,20 +90,17 @@ def register_role(name, role):
roles.register_local_role(name, role)
-def unregister_role(name):
- # type: (str) -> None
+def unregister_role(name: str) -> None:
"""Unregister a role from docutils."""
roles._roles.pop(name, None) # type: ignore
-def is_node_registered(node):
- # type: (Type[nodes.Element]) -> bool
+def is_node_registered(node: Type[Element]) -> bool:
"""Check the *node* is already registered."""
return hasattr(nodes.GenericNodeVisitor, 'visit_' + node.__name__)
-def register_node(node):
- # type: (Type[nodes.Element]) -> None
+def register_node(node: Type[Element]) -> None:
"""Register a node to docutils.
This modifies global state of some visitors. So it is better to use this
@@ -119,8 +111,7 @@ def register_node(node):
additional_nodes.add(node)
-def unregister_node(node):
- # type: (Type[nodes.Element]) -> None
+def unregister_node(node: Type[Element]) -> None:
"""Unregister a node from docutils.
This is inverse of ``nodes._add_nodes_class_names()``.
@@ -133,8 +124,7 @@ def unregister_node(node):
@contextmanager
-def patched_get_language():
- # type: () -> Generator[None, None, None]
+def patched_get_language() -> Generator[None, None, None]:
"""Patch docutils.languages.get_language() temporarily.
This ignores the second argument ``reporter`` to suppress warnings.
@@ -142,8 +132,7 @@ def patched_get_language():
"""
from docutils.languages import get_language
- def patched_get_language(language_code, reporter=None):
- # type: (str, Reporter) -> Any
+ def patched_get_language(language_code: str, reporter: Reporter = None) -> Any:
return get_language(language_code)
try:
@@ -155,8 +144,7 @@ def patched_get_language():
@contextmanager
-def using_user_docutils_conf(confdir):
- # type: (str) -> Generator[None, None, None]
+def using_user_docutils_conf(confdir: str) -> Generator[None, None, None]:
"""Let docutils know the location of ``docutils.conf`` for Sphinx."""
try:
docutilsconfig = os.environ.get('DOCUTILSCONFIG', None)
@@ -172,8 +160,7 @@ def using_user_docutils_conf(confdir):
@contextmanager
-def patch_docutils(confdir=None):
- # type: (str) -> Generator[None, None, None]
+def patch_docutils(confdir: str = None) -> Generator[None, None, None]:
"""Patch to docutils temporarily."""
with patched_get_language(), using_user_docutils_conf(confdir):
yield
@@ -187,35 +174,30 @@ class sphinx_domains:
"""Monkey-patch directive and role dispatch, so that domain-specific
markup takes precedence.
"""
- def __init__(self, env):
- # type: (BuildEnvironment) -> None
+ def __init__(self, env: "BuildEnvironment") -> None:
self.env = env
self.directive_func = None # type: Callable
self.roles_func = None # type: Callable
- def __enter__(self):
- # type: () -> None
+ def __enter__(self) -> None:
self.enable()
- def __exit__(self, type, value, traceback):
- # type: (str, str, str) -> None
+ def __exit__(self, exc_type: Type[Exception], exc_value: Exception, traceback: Any) -> bool: # NOQA
self.disable()
+ return True
- def enable(self):
- # type: () -> None
+ def enable(self) -> None:
self.directive_func = directives.directive
self.role_func = roles.role
directives.directive = self.lookup_directive # type: ignore
roles.role = self.lookup_role # type: ignore
- def disable(self):
- # type: () -> None
+ def disable(self) -> None:
directives.directive = self.directive_func
roles.role = self.role_func
- def lookup_domain_element(self, type, name):
- # type: (str, str) -> Any
+ def lookup_domain_element(self, type: str, name: str) -> Any:
"""Lookup a markup element (directive or role), given its name which can
be a full name (with domain).
"""
@@ -243,15 +225,13 @@ class sphinx_domains:
raise ElementLookupError
- def lookup_directive(self, name, lang_module, document):
- # type: (str, ModuleType, nodes.document) -> Tuple[Type[Directive], List[nodes.system_message]] # NOQA
+ def lookup_directive(self, name: str, lang_module: ModuleType, document: nodes.document) -> Tuple[Type[Directive], List[system_message]]: # NOQA
try:
return self.lookup_domain_element('directive', name)
except ElementLookupError:
return self.directive_func(name, lang_module, document)
- def lookup_role(self, name, lang_module, lineno, reporter):
- # type: (str, ModuleType, int, Reporter) -> Tuple[RoleFunction, List[nodes.system_message]] # NOQA
+ def lookup_role(self, name: str, lang_module: ModuleType, lineno: int, reporter: Reporter) -> Tuple[RoleFunction, List[system_message]]: # NOQA
try:
return self.lookup_domain_element('role', name)
except ElementLookupError:
@@ -259,8 +239,7 @@ class sphinx_domains:
class WarningStream:
- def write(self, text):
- # type: (str) -> None
+ def write(self, text: str) -> None:
matched = report_re.search(text)
if not matched:
logger.warning(text.rstrip("\r\n"))
@@ -272,16 +251,14 @@ class WarningStream:
class LoggingReporter(Reporter):
@classmethod
- def from_reporter(cls, reporter):
- # type: (Reporter) -> LoggingReporter
+ def from_reporter(cls, reporter: Reporter) -> "LoggingReporter":
"""Create an instance of LoggingReporter from other reporter object."""
return cls(reporter.source, reporter.report_level, reporter.halt_level,
reporter.debug_flag, reporter.error_handler)
- def __init__(self, source, report_level=Reporter.WARNING_LEVEL,
- halt_level=Reporter.SEVERE_LEVEL, debug=False,
- error_handler='backslashreplace'):
- # type: (str, int, int, bool, str) -> None
+ def __init__(self, source: str, report_level: int = Reporter.WARNING_LEVEL,
+ halt_level: int = Reporter.SEVERE_LEVEL, debug: bool = False,
+ error_handler: str = 'backslashreplace') -> None:
stream = cast(IO, WarningStream())
super().__init__(source, report_level, halt_level,
stream, debug, error_handler=error_handler)
@@ -290,19 +267,16 @@ class LoggingReporter(Reporter):
class NullReporter(Reporter):
"""A dummy reporter; write nothing."""
- def __init__(self):
- # type: () -> None
+ def __init__(self) -> None:
super().__init__('', 999, 4)
-def is_html5_writer_available():
- # type: () -> bool
+def is_html5_writer_available() -> bool:
return __version_info__ > (0, 13, 0)
@contextmanager
-def switch_source_input(state, content):
- # type: (State, StringList) -> Generator[None, None, None]
+def switch_source_input(state: State, content: StringList) -> Generator[None, None, None]:
"""Switch current source input of state temporarily."""
try:
# remember the original ``get_source_and_line()`` method
@@ -322,13 +296,11 @@ def switch_source_input(state, content):
class SphinxFileOutput(FileOutput):
"""Better FileOutput class for Sphinx."""
- def __init__(self, **kwargs):
- # type: (Any) -> None
+ def __init__(self, **kwargs) -> None:
self.overwrite_if_changed = kwargs.pop('overwrite_if_changed', False)
super().__init__(**kwargs)
- def write(self, data):
- # type: (str) -> str
+ def write(self, data: str) -> str:
if (self.destination_path and self.autoclose and 'b' not in self.mode and
self.overwrite_if_changed and os.path.exists(self.destination_path)):
with open(self.destination_path, encoding=self.encoding) as f:
@@ -349,19 +321,16 @@ class SphinxDirective(Directive):
"""
@property
- def env(self):
- # type: () -> BuildEnvironment
+ def env(self) -> "BuildEnvironment":
"""Reference to the :class:`.BuildEnvironment` object."""
return self.state.document.settings.env
@property
- def config(self):
- # type: () -> Config
+ def config(self) -> "Config":
"""Reference to the :class:`.Config` object."""
return self.env.config
- def set_source_info(self, node):
- # type: (nodes.Node) -> None
+ def set_source_info(self, node: Node) -> None:
"""Set source and line number to the node."""
node.source, node.line = self.state_machine.get_source_and_line(self.lineno)
@@ -384,8 +353,9 @@ class SphinxRole:
content = None #: A list of strings, the directive content for customization
#: (from the "role" directive).
- def __call__(self, name, rawtext, text, lineno, inliner, options={}, content=[]):
- # type: (str, str, str, int, Inliner, Dict, List[str]) -> Tuple[List[nodes.Node], List[nodes.system_message]] # NOQA
+ def __call__(self, name: str, rawtext: str, text: str, lineno: int,
+ inliner: Inliner, options: Dict = {}, content: List[str] = []
+ ) -> Tuple[List[Node], List[system_message]]:
self.rawtext = rawtext
self.text = unescape(text)
self.lineno = lineno
@@ -405,24 +375,20 @@ class SphinxRole:
return self.run()
- def run(self):
- # type: () -> Tuple[List[nodes.Node], List[nodes.system_message]]
+ def run(self) -> Tuple[List[Node], List[system_message]]:
raise NotImplementedError
@property
- def env(self):
- # type: () -> BuildEnvironment
+ def env(self) -> "BuildEnvironment":
"""Reference to the :class:`.BuildEnvironment` object."""
return self.inliner.document.settings.env
@property
- def config(self):
- # type: () -> Config
+ def config(self) -> "Config":
"""Reference to the :class:`.Config` object."""
return self.env.config
- def set_source_info(self, node, lineno=None):
- # type: (nodes.Node, int) -> None
+ def set_source_info(self, node: Node, lineno: int = None) -> None:
if lineno is None:
lineno = self.lineno
@@ -444,8 +410,9 @@ class ReferenceRole(SphinxRole):
# \x00 means the "<" was backslash-escaped
explicit_title_re = re.compile(r'^(.+?)\s*(?<!\x00)<(.*?)>$', re.DOTALL)
- def __call__(self, name, rawtext, text, lineno, inliner, options={}, content=[]):
- # type: (str, str, str, int, Inliner, Dict, List[str]) -> Tuple[List[nodes.Node], List[nodes.system_message]] # NOQA
+ def __call__(self, name: str, rawtext: str, text: str, lineno: int,
+ inliner: Inliner, options: Dict = {}, content: List[str] = []
+ ) -> Tuple[List[Node], List[system_message]]:
matched = self.explicit_title_re.match(text)
if matched:
self.has_explicit_title = True
@@ -468,8 +435,7 @@ class SphinxTranslator(nodes.NodeVisitor):
This class is strongly coupled with Sphinx.
"""
- def __init__(self, document, builder):
- # type: (nodes.document, Builder) -> None
+ def __init__(self, document: nodes.document, builder: "Builder") -> None:
super().__init__(document)
self.builder = builder
self.config = builder.config
@@ -481,8 +447,7 @@ class SphinxTranslator(nodes.NodeVisitor):
__document_cache__ = None # type: nodes.document
-def new_document(source_path, settings=None):
- # type: (str, Any) -> nodes.document
+def new_document(source_path: str, settings: Any = None) -> nodes.document:
"""Return a new empty document object. This is an alternative of docutils'.
This is a simple wrapper for ``docutils.utils.new_document()``. It
diff --git a/sphinx/util/fileutil.py b/sphinx/util/fileutil.py
index 68dd7151f..4a734cc21 100644
--- a/sphinx/util/fileutil.py
+++ b/sphinx/util/fileutil.py
@@ -10,20 +10,20 @@
import os
import posixpath
+from typing import Dict
from docutils.utils import relative_path
from sphinx.util.osutil import copyfile, ensuredir
+from sphinx.util.typing import PathMatcher
if False:
# For type annotation
- from typing import Callable, Dict, Union # NOQA
- from sphinx.util.matching import Matcher # NOQA
- from sphinx.util.template import BaseRenderer # NOQA
+ from sphinx.util.template import BaseRenderer
-def copy_asset_file(source, destination, context=None, renderer=None):
- # type: (str, str, Dict, BaseRenderer) -> None
+def copy_asset_file(source: str, destination: str,
+ context: Dict = None, renderer: "BaseRenderer" = None) -> None:
"""Copy an asset file to destination.
On copying, it expands the template variables if context argument is given and
@@ -55,8 +55,8 @@ def copy_asset_file(source, destination, context=None, renderer=None):
copyfile(source, destination)
-def copy_asset(source, destination, excluded=lambda path: False, context=None, renderer=None):
- # type: (str, str, Union[Callable[[str], bool], Matcher], Dict, BaseRenderer) -> None
+def copy_asset(source: str, destination: str, excluded: PathMatcher = lambda path: False,
+ context: Dict = None, renderer: "BaseRenderer" = None) -> None:
"""Copy asset files to destination recursively.
On copying, it expands the template variables if context argument is given and
diff --git a/sphinx/util/i18n.py b/sphinx/util/i18n.py
index 0caee3af6..9e2373637 100644
--- a/sphinx/util/i18n.py
+++ b/sphinx/util/i18n.py
@@ -14,6 +14,7 @@ import warnings
from collections import namedtuple
from datetime import datetime
from os import path
+from typing import Callable, Generator, List, Set, Tuple
import babel.dates
from babel.messages.mofile import write_mo
@@ -26,13 +27,12 @@ from sphinx.util import logging
from sphinx.util.matching import Matcher
from sphinx.util.osutil import SEP, canon_path, relpath
-
-logger = logging.getLogger(__name__)
-
if False:
# For type annotation
- from typing import Callable, Generator, List, Set, Tuple # NOQA
- from sphinx.environment import BuildEnvironment # NOQA
+ from sphinx.environment import BuildEnvironment
+
+
+logger = logging.getLogger(__name__)
LocaleFileInfoBase = namedtuple('CatalogInfo', 'base_dir,domain,charset')
@@ -40,33 +40,27 @@ LocaleFileInfoBase = namedtuple('CatalogInfo', 'base_dir,domain,charset')
class CatalogInfo(LocaleFileInfoBase):
@property
- def po_file(self):
- # type: () -> str
+ def po_file(self) -> str:
return self.domain + '.po'
@property
- def mo_file(self):
- # type: () -> str
+ def mo_file(self) -> str:
return self.domain + '.mo'
@property
- def po_path(self):
- # type: () -> str
+ def po_path(self) -> str:
return path.join(self.base_dir, self.po_file)
@property
- def mo_path(self):
- # type: () -> str
+ def mo_path(self) -> str:
return path.join(self.base_dir, self.mo_file)
- def is_outdated(self):
- # type: () -> bool
+ def is_outdated(self) -> bool:
return (
not path.exists(self.mo_path) or
path.getmtime(self.mo_path) < path.getmtime(self.po_path))
- def write_mo(self, locale):
- # type: (str) -> None
+ def write_mo(self, locale: str) -> None:
with open(self.po_path, encoding=self.charset) as file_po:
try:
po = read_po(file_po, locale)
@@ -84,16 +78,15 @@ class CatalogInfo(LocaleFileInfoBase):
class CatalogRepository:
"""A repository for message catalogs."""
- def __init__(self, basedir, locale_dirs, language, encoding):
- # type: (str, List[str], str, str) -> None
+ def __init__(self, basedir: str, locale_dirs: List[str],
+ language: str, encoding: str) -> None:
self.basedir = basedir
self._locale_dirs = locale_dirs
self.language = language
self.encoding = encoding
@property
- def locale_dirs(self):
- # type: () -> Generator[str, None, None]
+ def locale_dirs(self) -> Generator[str, None, None]:
if not self.language:
return
@@ -103,8 +96,7 @@ class CatalogRepository:
yield locale_dir
@property
- def pofiles(self):
- # type: () -> Generator[Tuple[str, str], None, None]
+ def pofiles(self) -> Generator[Tuple[str, str], None, None]:
for locale_dir in self.locale_dirs:
basedir = path.join(locale_dir, self.language, 'LC_MESSAGES')
for root, dirnames, filenames in os.walk(basedir):
@@ -119,15 +111,13 @@ class CatalogRepository:
yield basedir, relpath(fullpath, basedir)
@property
- def catalogs(self):
- # type: () -> Generator[CatalogInfo, None, None]
+ def catalogs(self) -> Generator[CatalogInfo, None, None]:
for basedir, filename in self.pofiles:
domain = canon_path(path.splitext(filename)[0])
yield CatalogInfo(basedir, domain, self.encoding)
-def find_catalog(docname, compaction):
- # type: (str, bool) -> str
+def find_catalog(docname: str, compaction: bool) -> str:
warnings.warn('find_catalog() is deprecated.',
RemovedInSphinx40Warning, stacklevel=2)
if compaction:
@@ -138,8 +128,7 @@ def find_catalog(docname, compaction):
return ret
-def docname_to_domain(docname, compation):
- # type: (str, bool) -> str
+def docname_to_domain(docname: str, compation: bool) -> str:
"""Convert docname to domain for catalogs."""
if compation:
return docname.split(SEP, 1)[0]
@@ -147,8 +136,8 @@ def docname_to_domain(docname, compation):
return docname
-def find_catalog_files(docname, srcdir, locale_dirs, lang, compaction):
- # type: (str, str, List[str], str, bool) -> List[str]
+def find_catalog_files(docname: str, srcdir: str, locale_dirs: List[str],
+ lang: str, compaction: bool) -> List[str]:
warnings.warn('find_catalog_files() is deprecated.',
RemovedInSphinx40Warning, stacklevel=2)
if not(lang and locale_dirs):
@@ -161,9 +150,9 @@ def find_catalog_files(docname, srcdir, locale_dirs, lang, compaction):
return files
-def find_catalog_source_files(locale_dirs, locale, domains=None, charset='utf-8',
- force_all=False, excluded=Matcher([])):
- # type: (List[str], str, List[str], str, bool, Matcher) -> Set[CatalogInfo]
+def find_catalog_source_files(locale_dirs: List[str], locale: str, domains: List[str] = None,
+ charset: str = 'utf-8', force_all: bool = False,
+ excluded: Matcher = Matcher([])) -> Set[CatalogInfo]:
"""
:param list locale_dirs:
list of path as `['locale_dir1', 'locale_dir2', ...]` to find
@@ -252,8 +241,8 @@ date_format_mappings = {
date_format_re = re.compile('(%s)' % '|'.join(date_format_mappings))
-def babel_format_date(date, format, locale, formatter=babel.dates.format_date):
- # type: (datetime, str, str, Callable) -> str
+def babel_format_date(date: datetime, format: str, locale: str,
+ formatter: Callable = babel.dates.format_date) -> str:
if locale is None:
locale = 'en'
@@ -273,8 +262,7 @@ def babel_format_date(date, format, locale, formatter=babel.dates.format_date):
return format
-def format_date(format, date=None, language=None):
- # type: (str, datetime, str) -> str
+def format_date(format: str, date: datetime = None, language: str = None) -> str:
if date is None:
# If time is not specified, try to use $SOURCE_DATE_EPOCH variable
# See https://wiki.debian.org/ReproducibleBuilds/TimestampsProposal
@@ -308,8 +296,7 @@ def format_date(format, date=None, language=None):
return "".join(result)
-def get_image_filename_for_language(filename, env):
- # type: (str, BuildEnvironment) -> str
+def get_image_filename_for_language(filename: str, env: "BuildEnvironment") -> str:
if not env.config.language:
return filename
@@ -328,8 +315,7 @@ def get_image_filename_for_language(filename, env):
raise SphinxError('Invalid figure_language_filename: %r' % exc)
-def search_image_for_language(filename, env):
- # type: (str, BuildEnvironment) -> str
+def search_image_for_language(filename: str, env: "BuildEnvironment") -> str:
if not env.config.language:
return filename
diff --git a/sphinx/util/images.py b/sphinx/util/images.py
index 76bffd88c..8471e56df 100644
--- a/sphinx/util/images.py
+++ b/sphinx/util/images.py
@@ -12,7 +12,7 @@ import base64
import imghdr
from collections import OrderedDict
from os import path
-from typing import NamedTuple
+from typing import IO, NamedTuple, Tuple
import imagesize
@@ -21,10 +21,6 @@ try:
except ImportError:
Image = None
-if False:
- # For type annotation
- from typing import IO, Tuple # NOQA
-
mime_suffixes = OrderedDict([
('.gif', 'image/gif'),
('.jpg', 'image/jpeg'),
@@ -39,8 +35,7 @@ DataURI = NamedTuple('DataURI', [('mimetype', str),
('data', bytes)])
-def get_image_size(filename):
- # type: (str) -> Tuple[int, int]
+def get_image_size(filename: str) -> Tuple[int, int]:
try:
size = imagesize.get(filename)
if size[0] == -1:
@@ -59,8 +54,7 @@ def get_image_size(filename):
return None
-def guess_mimetype_for_stream(stream, default=None):
- # type: (IO, str) -> str
+def guess_mimetype_for_stream(stream: IO, default: str = None) -> str:
imgtype = imghdr.what(stream) # type: ignore
if imgtype:
return 'image/' + imgtype
@@ -68,8 +62,7 @@ def guess_mimetype_for_stream(stream, default=None):
return default
-def guess_mimetype(filename, default=None):
- # type: (str, str) -> str
+def guess_mimetype(filename: str = '', default: str = None) -> str:
_, ext = path.splitext(filename.lower())
if ext in mime_suffixes:
return mime_suffixes[ext]
@@ -80,8 +73,7 @@ def guess_mimetype(filename, default=None):
return default
-def get_image_extension(mimetype):
- # type: (str) -> str
+def get_image_extension(mimetype: str) -> str:
for ext, _mimetype in mime_suffixes.items():
if mimetype == _mimetype:
return ext
@@ -89,8 +81,7 @@ def get_image_extension(mimetype):
return None
-def parse_data_uri(uri):
- # type: (str) -> DataURI
+def parse_data_uri(uri: str) -> DataURI:
if not uri.startswith('data:'):
return None
@@ -111,8 +102,7 @@ def parse_data_uri(uri):
return DataURI(mimetype, charset, image_data)
-def test_svg(h, f):
- # type: (bytes, IO) -> str
+def test_svg(h: bytes, f: IO) -> str:
"""An additional imghdr library helper; test the header is SVG's or not."""
try:
if '<svg' in h.decode().lower():
diff --git a/sphinx/util/inspect.py b/sphinx/util/inspect.py
index 9dc422778..fc6dafe9f 100644
--- a/sphinx/util/inspect.py
+++ b/sphinx/util/inspect.py
@@ -19,14 +19,11 @@ from inspect import ( # NOQA
isclass, ismethod, ismethoddescriptor, isroutine
)
from io import StringIO
+from typing import Any, Callable, Mapping, List, Tuple
from sphinx.util import logging
from sphinx.util.typing import NoneType
-if False:
- # For type annotation
- from typing import Any, Callable, Mapping, List, Tuple, Type # NOQA
-
if sys.version_info > (3, 7):
from types import (
ClassMethodDescriptorType,
@@ -112,26 +109,22 @@ def getargspec(func):
kwonlyargs, kwdefaults, annotations)
-def isenumclass(x):
- # type: (Type) -> bool
+def isenumclass(x: Any) -> bool:
"""Check if the object is subclass of enum."""
return inspect.isclass(x) and issubclass(x, enum.Enum)
-def isenumattribute(x):
- # type: (Any) -> bool
+def isenumattribute(x: Any) -> bool:
"""Check if the object is attribute of enum."""
return isinstance(x, enum.Enum)
-def ispartial(obj):
- # type: (Any) -> bool
+def ispartial(obj: Any) -> bool:
"""Check if the object is partial."""
return isinstance(obj, (partial, partialmethod))
-def isclassmethod(obj):
- # type: (Any) -> bool
+def isclassmethod(obj: Any) -> bool:
"""Check if the object is classmethod."""
if isinstance(obj, classmethod):
return True
@@ -141,8 +134,7 @@ def isclassmethod(obj):
return False
-def isstaticmethod(obj, cls=None, name=None):
- # type: (Any, Any, str) -> bool
+def isstaticmethod(obj: Any, cls: Any = None, name: str = None) -> bool:
"""Check if the object is staticmethod."""
if isinstance(obj, staticmethod):
return True
@@ -161,8 +153,7 @@ def isstaticmethod(obj, cls=None, name=None):
return False
-def isdescriptor(x):
- # type: (Any) -> bool
+def isdescriptor(x: Any) -> bool:
"""Check if the object is some kind of descriptor."""
for item in '__get__', '__set__', '__delete__':
if hasattr(safe_getattr(x, item, None), '__call__'):
@@ -170,14 +161,12 @@ def isdescriptor(x):
return False
-def isabstractmethod(obj):
- # type: (Any) -> bool
+def isabstractmethod(obj: Any) -> bool:
"""Check if the object is an abstractmethod."""
return safe_getattr(obj, '__isabstractmethod__', False) is True
-def isattributedescriptor(obj):
- # type: (Any) -> bool
+def isattributedescriptor(obj: Any) -> bool:
"""Check if the object is an attribute like descriptor."""
if inspect.isdatadescriptor(object):
# data descriptor is kind of attribute
@@ -204,20 +193,17 @@ def isattributedescriptor(obj):
return False
-def isfunction(obj):
- # type: (Any) -> bool
+def isfunction(obj: Any) -> bool:
"""Check if the object is function."""
return inspect.isfunction(obj) or ispartial(obj) and inspect.isfunction(obj.func)
-def isbuiltin(obj):
- # type: (Any) -> bool
+def isbuiltin(obj: Any) -> bool:
"""Check if the object is builtin."""
return inspect.isbuiltin(obj) or ispartial(obj) and inspect.isbuiltin(obj.func)
-def iscoroutinefunction(obj):
- # type: (Any) -> bool
+def iscoroutinefunction(obj: Any) -> bool:
"""Check if the object is coroutine-function."""
if inspect.iscoroutinefunction(obj):
return True
@@ -228,14 +214,12 @@ def iscoroutinefunction(obj):
return False
-def isproperty(obj):
- # type: (Any) -> bool
+def isproperty(obj: Any) -> bool:
"""Check if the object is property."""
return isinstance(obj, property)
-def safe_getattr(obj, name, *defargs):
- # type: (Any, str, Any) -> Any
+def safe_getattr(obj: Any, name: str, *defargs) -> Any:
"""A getattr() that turns all exceptions into AttributeErrors."""
try:
return getattr(obj, name, *defargs)
@@ -257,8 +241,8 @@ def safe_getattr(obj, name, *defargs):
raise AttributeError(name)
-def safe_getmembers(object, predicate=None, attr_getter=safe_getattr):
- # type: (Any, Callable[[str], bool], Callable) -> List[Tuple[str, Any]]
+def safe_getmembers(object: Any, predicate: Callable[[str], bool] = None,
+ attr_getter: Callable = safe_getattr) -> List[Tuple[str, Any]]:
"""A version of inspect.getmembers() that uses safe_getattr()."""
results = [] # type: List[Tuple[str, Any]]
for key in dir(object):
@@ -272,8 +256,7 @@ def safe_getmembers(object, predicate=None, attr_getter=safe_getattr):
return results
-def object_description(object):
- # type: (Any) -> str
+def object_description(object: Any) -> str:
"""A repr() implementation that returns text safe to use in reST context."""
if isinstance(object, dict):
try:
@@ -310,8 +293,7 @@ def object_description(object):
return s.replace('\n', ' ')
-def is_builtin_class_method(obj, attr_name):
- # type: (Any, str) -> bool
+def is_builtin_class_method(obj: Any, attr_name: str) -> bool:
"""If attr_name is implemented at builtin class, return True.
>>> is_builtin_class_method(int, '__init__')
@@ -333,8 +315,8 @@ class Signature:
its return annotation.
"""
- def __init__(self, subject, bound_method=False, has_retval=True):
- # type: (Callable, bool, bool) -> None
+ def __init__(self, subject: Callable, bound_method: bool = False,
+ has_retval: bool = True) -> None:
# check subject is not a built-in class (ex. int, str)
if (isinstance(subject, type) and
is_builtin_class_method(subject, "__new__") and
@@ -379,16 +361,14 @@ class Signature:
self.skip_first_argument = False
@property
- def parameters(self):
- # type: () -> Mapping
+ def parameters(self) -> Mapping:
if self.partialmethod_with_noargs:
return {}
else:
return self.signature.parameters
@property
- def return_annotation(self):
- # type: () -> Any
+ def return_annotation(self) -> Any:
if self.signature:
if self.has_retval:
return self.signature.return_annotation
@@ -397,8 +377,7 @@ class Signature:
else:
return None
- def format_args(self, show_annotation=True):
- # type: (bool) -> str
+ def format_args(self, show_annotation: bool = True) -> str:
args = []
last_kind = None
for i, param in enumerate(self.parameters.values()):
@@ -453,8 +432,7 @@ class Signature:
return '(%s) -> %s' % (', '.join(args), annotation)
- def format_annotation(self, annotation):
- # type: (Any) -> str
+ def format_annotation(self, annotation: Any) -> str:
"""Return formatted representation of a type annotation.
Show qualified names for types and additional details for types from
@@ -480,8 +458,7 @@ class Signature:
else:
return self.format_annotation_old(annotation)
- def format_annotation_new(self, annotation):
- # type: (Any) -> str
+ def format_annotation_new(self, annotation: Any) -> str:
"""format_annotation() for py37+"""
module = getattr(annotation, '__module__', None)
if module == 'typing':
@@ -517,8 +494,7 @@ class Signature:
return qualname
- def format_annotation_old(self, annotation):
- # type: (Any) -> str
+ def format_annotation_old(self, annotation: Any) -> str:
"""format_annotation() for py36 or below"""
module = getattr(annotation, '__module__', None)
if module == 'typing':
@@ -619,8 +595,8 @@ class Signature:
return qualname
-def getdoc(obj, attrgetter=safe_getattr, allow_inherited=False):
- # type: (Any, Callable, bool) -> str
+def getdoc(obj: Any, attrgetter: Callable = safe_getattr,
+ allow_inherited: bool = False) -> str:
"""Get the docstring for the object.
This tries to obtain the docstring for some kind of objects additionally:
diff --git a/sphinx/util/inventory.py b/sphinx/util/inventory.py
index 0fdc3e833..43a868e95 100644
--- a/sphinx/util/inventory.py
+++ b/sphinx/util/inventory.py
@@ -10,20 +10,20 @@
import os
import re
import zlib
+from typing import Callable, IO, Iterator
from sphinx.util import logging
-
-if False:
- # For type annotation
- from typing import Callable, IO, Iterator # NOQA
- from sphinx.builders import Builder # NOQA
- from sphinx.environment import BuildEnvironment # NOQA
- from sphinx.util.typing import Inventory # NOQA
+from sphinx.util.typing import Inventory
BUFSIZE = 16 * 1024
logger = logging.getLogger(__name__)
+if False:
+ # For type annotation
+ from sphinx.builders import Builder
+ from sphinx.environment import BuildEnvironment
+
class InventoryFileReader:
"""A file reader for inventory file.
@@ -31,21 +31,18 @@ class InventoryFileReader:
This reader supports mixture of texts and compressed texts.
"""
- def __init__(self, stream):
- # type: (IO) -> None
+ def __init__(self, stream: IO) -> None:
self.stream = stream
self.buffer = b''
self.eof = False
- def read_buffer(self):
- # type: () -> None
+ def read_buffer(self) -> None:
chunk = self.stream.read(BUFSIZE)
if chunk == b'':
self.eof = True
self.buffer += chunk
- def readline(self):
- # type: () -> str
+ def readline(self) -> str:
pos = self.buffer.find(b'\n')
if pos != -1:
line = self.buffer[:pos].decode()
@@ -59,15 +56,13 @@ class InventoryFileReader:
return line
- def readlines(self):
- # type: () -> Iterator[str]
+ def readlines(self) -> Iterator[str]:
while not self.eof:
line = self.readline()
if line:
yield line
- def read_compressed_chunks(self):
- # type: () -> Iterator[bytes]
+ def read_compressed_chunks(self) -> Iterator[bytes]:
decompressor = zlib.decompressobj()
while not self.eof:
self.read_buffer()
@@ -75,8 +70,7 @@ class InventoryFileReader:
self.buffer = b''
yield decompressor.flush()
- def read_compressed_lines(self):
- # type: () -> Iterator[str]
+ def read_compressed_lines(self) -> Iterator[str]:
buf = b''
for chunk in self.read_compressed_chunks():
buf += chunk
@@ -89,8 +83,7 @@ class InventoryFileReader:
class InventoryFile:
@classmethod
- def load(cls, stream, uri, joinfunc):
- # type: (IO, str, Callable) -> Inventory
+ def load(cls, stream: IO, uri: str, joinfunc: Callable) -> Inventory:
reader = InventoryFileReader(stream)
line = reader.readline().rstrip()
if line == '# Sphinx inventory version 1':
@@ -101,8 +94,7 @@ class InventoryFile:
raise ValueError('invalid inventory header: %s' % line)
@classmethod
- def load_v1(cls, stream, uri, join):
- # type: (InventoryFileReader, str, Callable) -> Inventory
+ def load_v1(cls, stream: InventoryFileReader, uri: str, join: Callable) -> Inventory:
invdata = {} # type: Inventory
projname = stream.readline().rstrip()[11:]
version = stream.readline().rstrip()[11:]
@@ -120,8 +112,7 @@ class InventoryFile:
return invdata
@classmethod
- def load_v2(cls, stream, uri, join):
- # type: (InventoryFileReader, str, Callable) -> Inventory
+ def load_v2(cls, stream: InventoryFileReader, uri: str, join: Callable) -> Inventory:
invdata = {} # type: Inventory
projname = stream.readline().rstrip()[11:]
version = stream.readline().rstrip()[11:]
@@ -150,10 +141,8 @@ class InventoryFile:
return invdata
@classmethod
- def dump(cls, filename, env, builder):
- # type: (str, BuildEnvironment, Builder) -> None
- def escape(string):
- # type: (str) -> str
+ def dump(cls, filename: str, env: "BuildEnvironment", builder: "Builder") -> None:
+ def escape(string: str) -> str:
return re.sub("\\s+", " ", string)
with open(os.path.join(filename), 'wb') as f:
diff --git a/sphinx/util/jsdump.py b/sphinx/util/jsdump.py
index 0bab2f014..bfdba170b 100644
--- a/sphinx/util/jsdump.py
+++ b/sphinx/util/jsdump.py
@@ -10,10 +10,7 @@
"""
import re
-
-if False:
- # For type annotation
- from typing import Any, Dict, IO, List, Match, Union # NOQA
+from typing import Any, Dict, IO, List, Match, Union
_str_re = re.compile(r'"(\\\\|\\"|[^"])*"')
_int_re = re.compile(r'\d+')
@@ -35,10 +32,8 @@ ESCAPE_DICT = {
ESCAPED = re.compile(r'\\u.{4}|\\.')
-def encode_string(s):
- # type: (str) -> str
- def replace(match):
- # type: (Match) -> str
+def encode_string(s: str) -> str:
+ def replace(match: Match) -> str:
s = match.group(0)
try:
return ESCAPE_DICT[s]
@@ -55,8 +50,7 @@ def encode_string(s):
return '"' + str(ESCAPE_ASCII.sub(replace, s)) + '"'
-def decode_string(s):
- # type: (str) -> str
+def decode_string(s: str) -> str:
return ESCAPED.sub(lambda m: eval('"' + m.group() + '"'), s)
@@ -78,8 +72,7 @@ do import static with
double in super""".split())
-def dumps(obj, key=False):
- # type: (Any, bool) -> str
+def dumps(obj: Any, key: bool = False) -> str:
if key:
if not isinstance(obj, str):
obj = str(obj)
@@ -107,13 +100,11 @@ def dumps(obj, key=False):
raise TypeError(type(obj))
-def dump(obj, f):
- # type: (Any, IO) -> None
+def dump(obj: Any, f: IO) -> None:
f.write(dumps(obj))
-def loads(x):
- # type: (str) -> Any
+def loads(x: str) -> Any:
"""Loader that can read the JS subset the indexer produces."""
nothing = object()
i = 0
@@ -205,6 +196,5 @@ def loads(x):
return obj
-def load(f):
- # type: (IO) -> Any
+def load(f: IO) -> Any:
return loads(f.read())
diff --git a/sphinx/util/jsonimpl.py b/sphinx/util/jsonimpl.py
index 4fb8e1f5d..c5336a195 100644
--- a/sphinx/util/jsonimpl.py
+++ b/sphinx/util/jsonimpl.py
@@ -11,13 +11,10 @@
import json
import warnings
from collections import UserString
+from typing import Any, IO
from sphinx.deprecation import RemovedInSphinx40Warning
-if False:
- # For type annotation
- from typing import Any, IO # NOQA
-
warnings.warn('sphinx.util.jsonimpl is deprecated',
RemovedInSphinx40Warning, stacklevel=2)
@@ -25,30 +22,25 @@ warnings.warn('sphinx.util.jsonimpl is deprecated',
class SphinxJSONEncoder(json.JSONEncoder):
"""JSONEncoder subclass that forces translation proxies."""
- def default(self, obj):
- # type: (Any) -> str
+ def default(self, obj: Any) -> str:
if isinstance(obj, UserString):
return str(obj)
return super().default(obj)
-def dump(obj, fp, *args, **kwds):
- # type: (Any, IO, Any, Any) -> None
+def dump(obj: Any, fp: IO, *args, **kwds) -> None:
kwds['cls'] = SphinxJSONEncoder
json.dump(obj, fp, *args, **kwds)
-def dumps(obj, *args, **kwds):
- # type: (Any, Any, Any) -> str
+def dumps(obj: Any, *args, **kwds) -> str:
kwds['cls'] = SphinxJSONEncoder
return json.dumps(obj, *args, **kwds)
-def load(*args, **kwds):
- # type: (Any, Any) -> Any
+def load(*args, **kwds) -> Any:
return json.load(*args, **kwds)
-def loads(*args, **kwds):
- # type: (Any, Any) -> Any
+def loads(*args, **kwds) -> Any:
return json.loads(*args, **kwds)
diff --git a/sphinx/util/logging.py b/sphinx/util/logging.py
index afa4ebd23..d6667dacd 100644
--- a/sphinx/util/logging.py
+++ b/sphinx/util/logging.py
@@ -12,8 +12,10 @@ import logging
import logging.handlers
from collections import defaultdict
from contextlib import contextmanager
+from typing import Any, Dict, Generator, IO, List, Tuple, Type, Union
from docutils import nodes
+from docutils.nodes import Node
from docutils.utils import get_source_line
from sphinx.errors import SphinxWarning
@@ -21,8 +23,7 @@ from sphinx.util.console import colorize
if False:
# For type annotation
- from typing import Any, Dict, Generator, IO, List, Tuple, Type, Union # NOQA
- from sphinx.application import Sphinx # NOQA
+ from sphinx.application import Sphinx
NAMESPACE = 'sphinx'
@@ -54,8 +55,7 @@ COLOR_MAP = defaultdict(lambda: 'blue',
})
-def getLogger(name):
- # type: (str) -> SphinxLoggerAdapter
+def getLogger(name: str) -> "SphinxLoggerAdapter":
"""Get logger wrapped by :class:`sphinx.util.logging.SphinxLoggerAdapter`.
Sphinx logger always uses ``sphinx.*`` namespace to be independent from
@@ -77,8 +77,7 @@ def getLogger(name):
return SphinxLoggerAdapter(logger, {})
-def convert_serializable(records):
- # type: (List[logging.LogRecord]) -> None
+def convert_serializable(records: List[logging.LogRecord]) -> None:
"""Convert LogRecord serializable."""
for r in records:
# extract arguments to a message and clear them
@@ -95,8 +94,7 @@ class SphinxLogRecord(logging.LogRecord):
prefix = ''
location = None # type: Any
- def getMessage(self):
- # type: () -> str
+ def getMessage(self) -> str:
message = super().getMessage()
location = getattr(self, 'location', None)
if location:
@@ -120,20 +118,17 @@ class SphinxWarningLogRecord(SphinxLogRecord):
class SphinxLoggerAdapter(logging.LoggerAdapter):
"""LoggerAdapter allowing ``type`` and ``subtype`` keywords."""
- def log(self, level, msg, *args, **kwargs):
- # type: (Union[int, str], str, Any, Any) -> None
+ def log(self, level: Union[int, str], msg: str, *args, **kwargs) -> None:
if isinstance(level, int):
super().log(level, msg, *args, **kwargs)
else:
levelno = LEVEL_NAMES[level]
super().log(levelno, msg, *args, **kwargs)
- def verbose(self, msg, *args, **kwargs):
- # type: (str, Any, Any) -> None
+ def verbose(self, msg: str, *args, **kwargs) -> None:
self.log(VERBOSE, msg, *args, **kwargs)
- def process(self, msg, kwargs): # type: ignore
- # type: (str, Dict) -> Tuple[str, Dict]
+ def process(self, msg: str, kwargs: Dict) -> Tuple[str, Dict]: # type: ignore
extra = kwargs.setdefault('extra', {})
if 'type' in kwargs:
extra['type'] = kwargs.pop('type')
@@ -148,8 +143,7 @@ class SphinxLoggerAdapter(logging.LoggerAdapter):
return msg, kwargs
- def handle(self, record):
- # type: (logging.LogRecord) -> None
+ def handle(self, record: logging.LogRecord) -> None:
self.logger.handle(record)
@@ -161,8 +155,7 @@ class WarningStreamHandler(logging.StreamHandler):
class NewLineStreamHandler(logging.StreamHandler):
"""StreamHandler which switches line terminator by record.nonl flag."""
- def emit(self, record):
- # type: (logging.LogRecord) -> None
+ def emit(self, record: logging.LogRecord) -> None:
try:
self.acquire()
if getattr(record, 'nonl', False):
@@ -177,16 +170,13 @@ class NewLineStreamHandler(logging.StreamHandler):
class MemoryHandler(logging.handlers.BufferingHandler):
"""Handler buffering all logs."""
- def __init__(self):
- # type: () -> None
+ def __init__(self) -> None:
super().__init__(-1)
- def shouldFlush(self, record):
- # type: (logging.LogRecord) -> bool
+ def shouldFlush(self, record: logging.LogRecord) -> bool:
return False # never flush
- def flushTo(self, logger):
- # type: (logging.Logger) -> None
+ def flushTo(self, logger: logging.Logger) -> None:
self.acquire()
try:
for record in self.buffer:
@@ -195,15 +185,13 @@ class MemoryHandler(logging.handlers.BufferingHandler):
finally:
self.release()
- def clear(self):
- # type: () -> List[logging.LogRecord]
+ def clear(self) -> List[logging.LogRecord]:
buffer, self.buffer = self.buffer, []
return buffer
@contextmanager
-def pending_warnings():
- # type: () -> Generator
+def pending_warnings() -> Generator[logging.Handler, None, None]:
"""Contextmanager to pend logging warnings temporary.
Similar to :func:`pending_logging`.
@@ -231,8 +219,7 @@ def pending_warnings():
@contextmanager
-def pending_logging():
- # type: () -> Generator
+def pending_logging() -> Generator[MemoryHandler, None, None]:
"""Contextmanager to pend logging all logs temporary.
For example::
@@ -264,8 +251,7 @@ def pending_logging():
@contextmanager
-def skip_warningiserror(skip=True):
- # type: (bool) -> Generator
+def skip_warningiserror(skip: bool = True) -> Generator[None, None, None]:
"""contextmanager to skip WarningIsErrorFilter for a while."""
logger = logging.getLogger(NAMESPACE)
@@ -285,8 +271,7 @@ def skip_warningiserror(skip=True):
@contextmanager
-def prefixed_warnings(prefix):
- # type: (str) -> Generator
+def prefixed_warnings(prefix: str) -> Generator[None, None, None]:
"""Prepend prefix to all records for a while.
For example::
@@ -332,13 +317,11 @@ def prefixed_warnings(prefix):
class LogCollector:
- def __init__(self):
- # type: () -> None
+ def __init__(self) -> None:
self.logs = [] # type: List[logging.LogRecord]
@contextmanager
- def collect(self):
- # type: () -> Generator
+ def collect(self) -> Generator[None, None, None]:
with pending_logging() as memhandler:
yield
@@ -348,16 +331,14 @@ class LogCollector:
class InfoFilter(logging.Filter):
"""Filter error and warning messages."""
- def filter(self, record):
- # type: (logging.LogRecord) -> bool
+ def filter(self, record: logging.LogRecord) -> bool:
if record.levelno < logging.WARNING:
return True
else:
return False
-def is_suppressed_warning(type, subtype, suppress_warnings):
- # type: (str, str, List[str]) -> bool
+def is_suppressed_warning(type: str, subtype: str, suppress_warnings: List[str]) -> bool:
"""Check the warning is suppressed or not."""
if type is None:
return False
@@ -379,13 +360,11 @@ def is_suppressed_warning(type, subtype, suppress_warnings):
class WarningSuppressor(logging.Filter):
"""Filter logs by `suppress_warnings`."""
- def __init__(self, app):
- # type: (Sphinx) -> None
+ def __init__(self, app: "Sphinx") -> None:
self.app = app
super().__init__()
- def filter(self, record):
- # type: (logging.LogRecord) -> bool
+ def filter(self, record: logging.LogRecord) -> bool:
type = getattr(record, 'type', None)
subtype = getattr(record, 'subtype', None)
@@ -405,13 +384,11 @@ class WarningSuppressor(logging.Filter):
class WarningIsErrorFilter(logging.Filter):
"""Raise exception if warning emitted."""
- def __init__(self, app):
- # type: (Sphinx) -> None
+ def __init__(self, app: "Sphinx") -> None:
self.app = app
super().__init__()
- def filter(self, record):
- # type: (logging.LogRecord) -> bool
+ def filter(self, record: logging.LogRecord) -> bool:
if getattr(record, 'skip_warningsiserror', False):
# disabled by DisableWarningIsErrorFilter
return True
@@ -433,8 +410,7 @@ class WarningIsErrorFilter(logging.Filter):
class DisableWarningIsErrorFilter(logging.Filter):
"""Disable WarningIsErrorFilter if this filter installed."""
- def filter(self, record):
- # type: (logging.LogRecord) -> bool
+ def filter(self, record: logging.LogRecord) -> bool:
record.skip_warningsiserror = True # type: ignore
return True
@@ -442,13 +418,11 @@ class DisableWarningIsErrorFilter(logging.Filter):
class MessagePrefixFilter(logging.Filter):
"""Prepend prefix to all records."""
- def __init__(self, prefix):
- # type: (str) -> None
+ def __init__(self, prefix: str) -> None:
self.prefix = prefix
super().__init__()
- def filter(self, record):
- # type: (logging.LogRecord) -> bool
+ def filter(self, record: logging.LogRecord) -> bool:
if self.prefix:
record.msg = self.prefix + ' ' + record.msg
return True
@@ -462,13 +436,11 @@ class SphinxLogRecordTranslator(logging.Filter):
"""
LogRecordClass = None # type: Type[logging.LogRecord]
- def __init__(self, app):
- # type: (Sphinx) -> None
+ def __init__(self, app: "Sphinx") -> None:
self.app = app
super().__init__()
- def filter(self, record): # type: ignore
- # type: (SphinxWarningLogRecord) -> bool
+ def filter(self, record: SphinxWarningLogRecord) -> bool: # type: ignore
if isinstance(record, logging.LogRecord):
# force subclassing to handle location
record.__class__ = self.LogRecordClass # type: ignore
@@ -500,8 +472,7 @@ class WarningLogRecordTranslator(SphinxLogRecordTranslator):
LogRecordClass = SphinxWarningLogRecord
-def get_node_location(node):
- # type: (nodes.Node) -> str
+def get_node_location(node: Node) -> str:
(source, line) = get_source_line(node)
if source and line:
return "%s:%s" % (source, line)
@@ -514,8 +485,7 @@ def get_node_location(node):
class ColorizeFormatter(logging.Formatter):
- def format(self, record):
- # type: (logging.LogRecord) -> str
+ def format(self, record: logging.LogRecord) -> str:
message = super().format(record)
color = getattr(record, 'color', None)
if color is None:
@@ -529,13 +499,11 @@ class ColorizeFormatter(logging.Formatter):
class SafeEncodingWriter:
"""Stream writer which ignores UnicodeEncodeError silently"""
- def __init__(self, stream):
- # type: (IO) -> None
+ def __init__(self, stream: IO) -> None:
self.stream = stream
self.encoding = getattr(stream, 'encoding', 'ascii') or 'ascii'
- def write(self, data):
- # type: (str) -> None
+ def write(self, data: str) -> None:
try:
self.stream.write(data)
except UnicodeEncodeError:
@@ -543,25 +511,21 @@ class SafeEncodingWriter:
# non-encodable characters, then decode them.
self.stream.write(data.encode(self.encoding, 'replace').decode(self.encoding))
- def flush(self):
- # type: () -> None
+ def flush(self) -> None:
if hasattr(self.stream, 'flush'):
self.stream.flush()
class LastMessagesWriter:
"""Stream writer which memories last 10 messages to save trackback"""
- def __init__(self, app, stream):
- # type: (Sphinx, IO) -> None
+ def __init__(self, app: "Sphinx", stream: IO) -> None:
self.app = app
- def write(self, data):
- # type: (str) -> None
+ def write(self, data: str) -> None:
self.app.messagelog.append(data)
-def setup(app, status, warning):
- # type: (Sphinx, IO, IO) -> None
+def setup(app: "Sphinx", status: IO, warning: IO) -> None:
"""Setup root logger for Sphinx"""
logger = logging.getLogger(NAMESPACE)
logger.setLevel(logging.DEBUG)
diff --git a/sphinx/util/matching.py b/sphinx/util/matching.py
index b1725cb46..97eeff40f 100644
--- a/sphinx/util/matching.py
+++ b/sphinx/util/matching.py
@@ -9,16 +9,12 @@
"""
import re
+from typing import Callable, Dict, List, Match, Pattern
from sphinx.util.osutil import canon_path
-if False:
- # For type annotation
- from typing import Callable, Dict, List, Match, Pattern # NOQA
-
-def _translate_pattern(pat):
- # type: (str) -> str
+def _translate_pattern(pat: str) -> str:
"""Translate a shell-style glob pattern to a regular expression.
Adapted from the fnmatch module, but enhanced so that single stars don't
@@ -64,8 +60,7 @@ def _translate_pattern(pat):
return res + '$'
-def compile_matchers(patterns):
- # type: (List[str]) -> List[Callable[[str], Match[str]]]
+def compile_matchers(patterns: List[str]) -> List[Callable[[str], Match[str]]]:
return [re.compile(_translate_pattern(pat)).match for pat in patterns]
@@ -76,17 +71,14 @@ class Matcher:
For example, "**/index.rst" matches with "index.rst"
"""
- def __init__(self, patterns):
- # type: (List[str]) -> None
+ def __init__(self, patterns: List[str]) -> None:
expanded = [pat[3:] for pat in patterns if pat.startswith('**/')]
self.patterns = compile_matchers(patterns + expanded)
- def __call__(self, string):
- # type: (str) -> bool
+ def __call__(self, string: str) -> bool:
return self.match(string)
- def match(self, string):
- # type: (str) -> bool
+ def match(self, string: str) -> bool:
string = canon_path(string)
return any(pat(string) for pat in self.patterns)
@@ -97,16 +89,14 @@ DOTFILES = Matcher(['**/.*'])
_pat_cache = {} # type: Dict[str, Pattern]
-def patmatch(name, pat):
- # type: (str, str) -> Match[str]
+def patmatch(name: str, pat: str) -> Match[str]:
"""Return if name matches pat. Adapted from fnmatch module."""
if pat not in _pat_cache:
_pat_cache[pat] = re.compile(_translate_pattern(pat))
return _pat_cache[pat].match(name)
-def patfilter(names, pat):
- # type: (List[str], str) -> List[str]
+def patfilter(names: List[str], pat: str) -> List[str]:
"""Return the subset of the list NAMES that match PAT.
Adapted from fnmatch module.
diff --git a/sphinx/util/math.py b/sphinx/util/math.py
index d296a3fe0..2af4e4db6 100644
--- a/sphinx/util/math.py
+++ b/sphinx/util/math.py
@@ -8,15 +8,12 @@
:license: BSD, see LICENSE for details.
"""
+from docutils import nodes
-if False:
- # For type annotation
- from docutils import nodes # NOQA
- from sphinx.builders.html import HTMLTranslator # NOQA
+from sphinx.builders.html import HTMLTranslator
-def get_node_equation_number(writer, node):
- # type: (HTMLTranslator, nodes.math_block) -> str
+def get_node_equation_number(writer: HTMLTranslator, node: nodes.math_block) -> str:
if writer.builder.config.math_numfig and writer.builder.config.numfig:
figtype = 'displaymath'
if writer.builder.name == 'singlehtml':
@@ -31,10 +28,8 @@ def get_node_equation_number(writer, node):
return node['number']
-def wrap_displaymath(text, label, numbering):
- # type: (str, str, bool) -> str
- def is_equation(part):
- # type: (str) -> str
+def wrap_displaymath(text: str, label: str, numbering: bool) -> str:
+ def is_equation(part: str) -> str:
return part.strip()
if label is None:
diff --git a/sphinx/util/nodes.py b/sphinx/util/nodes.py
index 58c6a6698..e0aaaa14e 100644
--- a/sphinx/util/nodes.py
+++ b/sphinx/util/nodes.py
@@ -10,9 +10,14 @@
import re
import warnings
-from typing import Any, cast
+from typing import Any, Callable, Iterable, List, Set, Tuple, Type
+from typing import cast
from docutils import nodes
+from docutils.nodes import Element, Node
+from docutils.parsers.rst import Directive
+from docutils.parsers.rst.states import Inliner
+from docutils.statemachine import StringList
from sphinx import addnodes
from sphinx.deprecation import RemovedInSphinx40Warning
@@ -21,11 +26,8 @@ from sphinx.util import logging
if False:
# For type annotation
- from typing import Callable, Iterable, List, Optional, Set, Tuple, Type # NOQA
- from docutils.parsers.rst.states import Inliner # NOQA
- from docutils.statemachine import StringList # NOQA
- from sphinx.builders import Builder # NOQA
- from sphinx.utils.tags import Tags # NOQA
+ from sphinx.builders import Builder
+ from sphinx.utils.tags import Tags
logger = logging.getLogger(__name__)
@@ -57,13 +59,11 @@ class NodeMatcher:
# => [<reference ...>, <reference ...>, ...]
"""
- def __init__(self, *classes, **attrs):
- # type: (Type[nodes.Node], Any) -> None
+ def __init__(self, *classes: Type[Node], **attrs) -> None:
self.classes = classes
self.attrs = attrs
- def match(self, node):
- # type: (nodes.Node) -> bool
+ def match(self, node: Node) -> bool:
try:
if self.classes and not isinstance(node, self.classes):
return False
@@ -85,13 +85,11 @@ class NodeMatcher:
# for non-Element nodes
return False
- def __call__(self, node):
- # type: (nodes.Node) -> bool
+ def __call__(self, node: Node) -> bool:
return self.match(node)
-def get_full_module_name(node):
- # type: (nodes.Node) -> str
+def get_full_module_name(node: Node) -> str:
"""
return full module dotted path like: 'docutils.nodes.paragraph'
@@ -101,8 +99,7 @@ def get_full_module_name(node):
return '{}.{}'.format(node.__module__, node.__class__.__name__)
-def repr_domxml(node, length=80):
- # type: (nodes.Node, Optional[int]) -> str
+def repr_domxml(node: Node, length: int = 80) -> str:
"""
return DOM XML representation of the specified node like:
'<paragraph translatable="False"><inline classes="versionmodified">New in version...'
@@ -122,8 +119,7 @@ def repr_domxml(node, length=80):
return text
-def apply_source_workaround(node):
- # type: (nodes.Element) -> None
+def apply_source_workaround(node: Element) -> None:
# workaround: nodes.term have wrong rawsource if classifier is specified.
# The behavior of docutils-0.11, 0.12 is:
# * when ``term text : classifier1 : classifier2`` is specified,
@@ -186,8 +182,7 @@ IGNORED_NODES = (
)
-def is_pending_meta(node):
- # type: (nodes.Node) -> bool
+def is_pending_meta(node: Node) -> bool:
if (isinstance(node, nodes.pending) and
isinstance(node.details.get('nodes', [None])[0], addnodes.meta)):
return True
@@ -195,8 +190,7 @@ def is_pending_meta(node):
return False
-def is_translatable(node):
- # type: (nodes.Node) -> bool
+def is_translatable(node: Node) -> bool:
if isinstance(node, addnodes.translatable):
return True
@@ -251,8 +245,7 @@ META_TYPE_NODES = (
)
-def extract_messages(doctree):
- # type: (nodes.Element) -> Iterable[Tuple[nodes.Element, str]]
+def extract_messages(doctree: Element) -> Iterable[Tuple[Element, str]]:
"""Extract translatable messages from a document tree."""
for node in doctree.traverse(is_translatable): # type: nodes.Element
if isinstance(node, addnodes.translatable):
@@ -279,39 +272,34 @@ def extract_messages(doctree):
yield node, msg
-def find_source_node(node):
- # type: (nodes.Element) -> str
+def find_source_node(node: Element) -> str:
warnings.warn('find_source_node() is deprecated.',
RemovedInSphinx40Warning)
return get_node_source(node)
-def get_node_source(node):
- # type: (nodes.Element) -> str
+def get_node_source(node: Element) -> str:
for pnode in traverse_parent(node):
if pnode.source:
return pnode.source
return None
-def get_node_line(node):
- # type: (nodes.Element) -> int
+def get_node_line(node: Element) -> int:
for pnode in traverse_parent(node):
if pnode.line:
return pnode.line
return None
-def traverse_parent(node, cls=None):
- # type: (nodes.Element, Any) -> Iterable[nodes.Element]
+def traverse_parent(node: Element, cls: Any = None) -> Iterable[Element]:
while node:
if cls is None or isinstance(node, cls):
yield node
node = node.parent
-def get_prev_node(node):
- # type: (nodes.Node) -> nodes.Node
+def get_prev_node(node: Node) -> Node:
pos = node.parent.index(node)
if pos > 0:
return node.parent[pos - 1]
@@ -319,8 +307,7 @@ def get_prev_node(node):
return None
-def traverse_translatable_index(doctree):
- # type: (nodes.Element) -> Iterable[Tuple[nodes.Element, List[str]]]
+def traverse_translatable_index(doctree: Element) -> Iterable[Tuple[Element, List[str]]]:
"""Traverse translatable index node from a document tree."""
for node in doctree.traverse(NodeMatcher(addnodes.index, inline=False)): # type: addnodes.index # NOQA
if 'raw_entries' in node:
@@ -330,8 +317,7 @@ def traverse_translatable_index(doctree):
yield node, entries
-def nested_parse_with_titles(state, content, node):
- # type: (Any, StringList, nodes.Node) -> str
+def nested_parse_with_titles(state: Any, content: StringList, node: Node) -> str:
"""Version of state.nested_parse() that allows titles and does not require
titles to have the same decoration as the calling document.
@@ -350,8 +336,7 @@ def nested_parse_with_titles(state, content, node):
state.memo.section_level = surrounding_section_level
-def clean_astext(node):
- # type: (nodes.Element) -> str
+def clean_astext(node: Element) -> str:
"""Like node.astext(), but ignore images."""
node = node.deepcopy()
for img in node.traverse(nodes.image):
@@ -361,8 +346,7 @@ def clean_astext(node):
return node.astext()
-def split_explicit_title(text):
- # type: (str) -> Tuple[bool, str, str]
+def split_explicit_title(text: str) -> Tuple[bool, str, str]:
"""Split role content into title and target, if given."""
match = explicit_title_re.match(text)
if match:
@@ -375,8 +359,7 @@ indextypes = [
]
-def process_index_entry(entry, targetid):
- # type: (str, str) -> List[Tuple[str, str, str, str, str]]
+def process_index_entry(entry: str, targetid: str) -> List[Tuple[str, str, str, str, str]]:
from sphinx.domains.python import pairindextypes
indexentries = [] # type: List[Tuple[str, str, str, str, str]]
@@ -414,8 +397,9 @@ def process_index_entry(entry, targetid):
return indexentries
-def inline_all_toctrees(builder, docnameset, docname, tree, colorfunc, traversed):
- # type: (Builder, Set[str], str, nodes.document, Callable, List[str]) -> nodes.document
+def inline_all_toctrees(builder: "Builder", docnameset: Set[str], docname: str,
+ tree: nodes.document, colorfunc: Callable, traversed: List[str]
+ ) -> nodes.document:
"""Inline all toctrees in the *tree*.
Record all docnames in *docnameset*, and output docnames with *colorfunc*.
@@ -447,8 +431,8 @@ def inline_all_toctrees(builder, docnameset, docname, tree, colorfunc, traversed
return tree
-def make_refnode(builder, fromdocname, todocname, targetid, child, title=None):
- # type: (Builder, str, str, str, nodes.Node, str) -> nodes.reference
+def make_refnode(builder: "Builder", fromdocname: str, todocname: str, targetid: str,
+ child: Node, title: str = None) -> nodes.reference:
"""Shortcut to create a reference node."""
node = nodes.reference('', '', internal=True)
if fromdocname == todocname and targetid:
@@ -465,19 +449,16 @@ def make_refnode(builder, fromdocname, todocname, targetid, child, title=None):
return node
-def set_source_info(directive, node):
- # type: (Any, nodes.Node) -> None
+def set_source_info(directive: Directive, node: Node) -> None:
node.source, node.line = \
directive.state_machine.get_source_and_line(directive.lineno)
-def set_role_source_info(inliner, lineno, node):
- # type: (Inliner, int, nodes.Node) -> None
+def set_role_source_info(inliner: Inliner, lineno: int, node: Node) -> None:
node.source, node.line = inliner.reporter.get_source_and_line(lineno) # type: ignore
-def copy_source_info(src, dst):
- # type: (nodes.Element, nodes.Element) -> None
+def copy_source_info(src: Element, dst: Element) -> None:
dst.source = get_node_source(src)
dst.line = get_node_line(src)
@@ -493,8 +474,7 @@ NON_SMARTQUOTABLE_PARENT_NODES = (
)
-def is_smartquotable(node):
- # type: (nodes.Node) -> bool
+def is_smartquotable(node: Node) -> bool:
"""Check the node is smart-quotable or not."""
if isinstance(node.parent, NON_SMARTQUOTABLE_PARENT_NODES):
return False
@@ -506,8 +486,7 @@ def is_smartquotable(node):
return True
-def process_only_nodes(document, tags):
- # type: (nodes.Node, Tags) -> None
+def process_only_nodes(document: Node, tags: "Tags") -> None:
"""Filter ``only`` nodes which does not match *tags*."""
for node in document.traverse(addnodes.only):
try:
@@ -530,8 +509,7 @@ def process_only_nodes(document, tags):
# monkey-patch Element.copy to copy the rawsource and line
# for docutils-0.14 or older versions.
-def _new_copy(self):
- # type: (nodes.Element) -> nodes.Element
+def _new_copy(self: Element) -> Element:
newnode = self.__class__(self.rawsource, **self.attributes)
if isinstance(self, nodes.Element):
newnode.source = self.source
diff --git a/sphinx/util/osutil.py b/sphinx/util/osutil.py
index fe98783e7..08c7349dd 100644
--- a/sphinx/util/osutil.py
+++ b/sphinx/util/osutil.py
@@ -18,14 +18,11 @@ import sys
import warnings
from io import StringIO
from os import path
+from typing import Any, Generator, Iterator, List, Tuple, Type
from sphinx.deprecation import RemovedInSphinx40Warning
from sphinx.testing.path import path as Path
-if False:
- # For type annotation
- from typing import Any, Iterator, List, Tuple # NOQA
-
# Errnos that we need.
EEXIST = getattr(errno, 'EEXIST', 0) # RemovedInSphinx40Warning
ENOENT = getattr(errno, 'ENOENT', 0) # RemovedInSphinx40Warning
@@ -40,19 +37,16 @@ EINVAL = getattr(errno, 'EINVAL', 0) # RemovedInSphinx40Warning
SEP = "/"
-def os_path(canonicalpath):
- # type: (str) -> str
+def os_path(canonicalpath: str) -> str:
return canonicalpath.replace(SEP, path.sep)
-def canon_path(nativepath):
- # type: (str) -> str
+def canon_path(nativepath: str) -> str:
"""Return path in OS-independent form"""
return nativepath.replace(path.sep, SEP)
-def relative_uri(base, to):
- # type: (str, str) -> str
+def relative_uri(base: str, to: str) -> str:
"""Return a relative URL from ``base`` to ``to``."""
if to.startswith(SEP):
return to
@@ -75,22 +69,19 @@ def relative_uri(base, to):
return ('..' + SEP) * (len(b2) - 1) + SEP.join(t2)
-def ensuredir(path):
- # type: (str) -> None
+def ensuredir(path: str) -> None:
"""Ensure that a path exists."""
os.makedirs(path, exist_ok=True)
-def walk(top, topdown=True, followlinks=False):
- # type: (str, bool, bool) -> Iterator[Tuple[str, List[str], List[str]]]
+def walk(top: str, topdown: bool = True, followlinks: bool = False) -> Iterator[Tuple[str, List[str], List[str]]]: # NOQA
warnings.warn('sphinx.util.osutil.walk() is deprecated for removal. '
'Please use os.walk() instead.',
RemovedInSphinx40Warning)
return os.walk(top, topdown=topdown, followlinks=followlinks)
-def mtimes_of_files(dirnames, suffix):
- # type: (List[str], str) -> Iterator[float]
+def mtimes_of_files(dirnames: List[str], suffix: str) -> Iterator[float]:
for dirname in dirnames:
for root, dirs, files in os.walk(dirname):
for sfile in files:
@@ -101,8 +92,7 @@ def mtimes_of_files(dirnames, suffix):
pass
-def movefile(source, dest):
- # type: (str, str) -> None
+def movefile(source: str, dest: str) -> None:
"""Move a file, removing the destination if it exists."""
if os.path.exists(dest):
try:
@@ -112,16 +102,14 @@ def movefile(source, dest):
os.rename(source, dest)
-def copytimes(source, dest):
- # type: (str, str) -> None
+def copytimes(source: str, dest: str) -> None:
"""Copy a file's modification times."""
st = os.stat(source)
if hasattr(os, 'utime'):
os.utime(dest, (st.st_atime, st.st_mtime))
-def copyfile(source, dest):
- # type: (str, str) -> None
+def copyfile(source: str, dest: str) -> None:
"""Copy a file and its modification times, if possible.
Note: ``copyfile`` skips copying if the file has not been changed"""
@@ -138,18 +126,15 @@ no_fn_re = re.compile(r'[^a-zA-Z0-9_-]')
project_suffix_re = re.compile(' Documentation$')
-def make_filename(string):
- # type: (str) -> str
+def make_filename(string: str) -> str:
return no_fn_re.sub('', string) or 'sphinx'
-def make_filename_from_project(project):
- # type: (str) -> str
+def make_filename_from_project(project: str) -> str:
return make_filename(project_suffix_re.sub('', project)).lower()
-def relpath(path, start=os.curdir):
- # type: (str, str) -> str
+def relpath(path: str, start: str = os.curdir) -> str:
"""Return a relative filepath to *path* either from the current directory or
from an optional *start* directory.
@@ -166,8 +151,7 @@ safe_relpath = relpath # for compatibility
fs_encoding = sys.getfilesystemencoding() or sys.getdefaultencoding()
-def abspath(pathdir):
- # type: (str) -> str
+def abspath(pathdir: str) -> str:
if isinstance(pathdir, Path):
return pathdir.abspath()
else:
@@ -182,8 +166,7 @@ def abspath(pathdir):
return pathdir
-def getcwd():
- # type: () -> str
+def getcwd() -> str:
warnings.warn('sphinx.util.osutil.getcwd() is deprecated. '
'Please use os.getcwd() instead.',
RemovedInSphinx40Warning)
@@ -191,8 +174,7 @@ def getcwd():
@contextlib.contextmanager
-def cd(target_dir):
- # type: (str) -> Iterator[None]
+def cd(target_dir: str) -> Generator[None, None, None]:
cwd = os.getcwd()
try:
os.chdir(target_dir)
@@ -213,19 +195,16 @@ class FileAvoidWrite:
Objects can be used as context managers.
"""
- def __init__(self, path):
- # type: (str) -> None
+ def __init__(self, path: str) -> None:
self._path = path
self._io = None # type: StringIO
- def write(self, data):
- # type: (str) -> None
+ def write(self, data: str) -> None:
if not self._io:
self._io = StringIO()
self._io.write(data)
- def close(self):
- # type: () -> None
+ def close(self) -> None:
"""Stop accepting writes and write file, if needed."""
if not self._io:
raise Exception('FileAvoidWrite does not support empty files.')
@@ -244,16 +223,14 @@ class FileAvoidWrite:
with open(self._path, 'w') as f:
f.write(buf)
- def __enter__(self):
- # type: () -> FileAvoidWrite
+ def __enter__(self) -> "FileAvoidWrite":
return self
- def __exit__(self, type, value, traceback):
- # type: (str, str, str) -> None
+ def __exit__(self, exc_type: Type[Exception], exc_value: Exception, traceback: Any) -> bool: # NOQA
self.close()
+ return True
- def __getattr__(self, name):
- # type: (str) -> Any
+ def __getattr__(self, name: str) -> Any:
# Proxy to _io instance.
if not self._io:
raise Exception('Must write to FileAvoidWrite before other '
@@ -262,8 +239,7 @@ class FileAvoidWrite:
return getattr(self._io, name)
-def rmtree(path):
- # type: (str) -> None
+def rmtree(path: str) -> None:
if os.path.isdir(path):
shutil.rmtree(path)
else:
diff --git a/sphinx/util/parallel.py b/sphinx/util/parallel.py
index 013dc3071..2d519a8d3 100644
--- a/sphinx/util/parallel.py
+++ b/sphinx/util/parallel.py
@@ -12,6 +12,7 @@ import os
import time
import traceback
from math import sqrt
+from typing import Any, Callable, Dict, List, Sequence
try:
import multiprocessing
@@ -21,10 +22,6 @@ except ImportError:
from sphinx.errors import SphinxParallelError
from sphinx.util import logging
-if False:
- # For type annotation
- from typing import Any, Callable, Dict, List, Sequence # NOQA
-
logger = logging.getLogger(__name__)
@@ -35,12 +32,10 @@ parallel_available = multiprocessing and (os.name == 'posix')
class SerialTasks:
"""Has the same interface as ParallelTasks, but executes tasks directly."""
- def __init__(self, nproc=1):
- # type: (int) -> None
+ def __init__(self, nproc: int = 1) -> None:
pass
- def add_task(self, task_func, arg=None, result_func=None):
- # type: (Callable, Any, Callable) -> None
+ def add_task(self, task_func: Callable, arg: Any = None, result_func: Callable = None) -> None: # NOQA
if arg is not None:
res = task_func(arg)
else:
@@ -48,16 +43,14 @@ class SerialTasks:
if result_func:
result_func(res)
- def join(self):
- # type: () -> None
+ def join(self) -> None:
pass
class ParallelTasks:
"""Executes *nproc* tasks in parallel after forking."""
- def __init__(self, nproc):
- # type: (int) -> None
+ def __init__(self, nproc: int) -> None:
self.nproc = nproc
# (optional) function performed by each task on the result of main task
self._result_funcs = {} # type: Dict[int, Callable]
@@ -74,8 +67,7 @@ class ParallelTasks:
# task number of each subprocess
self._taskid = 0
- def _process(self, pipe, func, arg):
- # type: (Any, Callable, Any) -> None
+ def _process(self, pipe: Any, func: Callable, arg: Any) -> None:
try:
collector = logging.LogCollector()
with collector.collect():
@@ -91,8 +83,7 @@ class ParallelTasks:
logging.convert_serializable(collector.logs)
pipe.send((failed, collector.logs, ret))
- def add_task(self, task_func, arg=None, result_func=None):
- # type: (Callable, Any, Callable) -> None
+ def add_task(self, task_func: Callable, arg: Any = None, result_func: Callable = None) -> None: # NOQA
tid = self._taskid
self._taskid += 1
self._result_funcs[tid] = result_func or (lambda arg, result: None)
@@ -104,13 +95,11 @@ class ParallelTasks:
self._precvsWaiting[tid] = precv
self._join_one()
- def join(self):
- # type: () -> None
+ def join(self) -> None:
while self._pworking:
self._join_one()
- def _join_one(self):
- # type: () -> None
+ def _join_one(self) -> None:
for tid, pipe in self._precvs.items():
if pipe.poll():
exc, logs, result = pipe.recv()
@@ -132,8 +121,7 @@ class ParallelTasks:
self._pworking += 1
-def make_chunks(arguments, nproc, maxbatch=10):
- # type: (Sequence[str], int, int) -> List[Any]
+def make_chunks(arguments: Sequence[str], nproc: int, maxbatch: int = 10) -> List[Any]:
# determine how many documents to read in one go
nargs = len(arguments)
chunksize = nargs // nproc
diff --git a/sphinx/util/png.py b/sphinx/util/png.py
index 2fb97a901..911547db6 100644
--- a/sphinx/util/png.py
+++ b/sphinx/util/png.py
@@ -20,8 +20,7 @@ DEPTH_CHUNK_START = b'tEXtDepth\x00'
IEND_CHUNK = b'\x00\x00\x00\x00IEND\xAE\x42\x60\x82'
-def read_png_depth(filename):
- # type: (str) -> int
+def read_png_depth(filename: str) -> int:
"""Read the special tEXt chunk indicating the depth from a PNG file."""
with open(filename, 'rb') as f:
f.seek(- (LEN_IEND + LEN_DEPTH), 2)
@@ -33,8 +32,7 @@ def read_png_depth(filename):
return struct.unpack('!i', depthchunk[14:18])[0]
-def write_png_depth(filename, depth):
- # type: (str, int) -> None
+def write_png_depth(filename: str, depth: int) -> None:
"""Write the special tEXt chunk indicating the depth to a PNG file.
The chunk is placed immediately before the special IEND chunk.
diff --git a/sphinx/util/pycompat.py b/sphinx/util/pycompat.py
index dca2849c2..06d3bcc2c 100644
--- a/sphinx/util/pycompat.py
+++ b/sphinx/util/pycompat.py
@@ -13,6 +13,7 @@ import io
import sys
import textwrap
import warnings
+from typing import Any, Callable
from sphinx.deprecation import RemovedInSphinx40Warning, deprecated_alias
from sphinx.locale import __
@@ -20,10 +21,6 @@ from sphinx.util import logging
from sphinx.util.console import terminal_safe
from sphinx.util.typing import NoneType
-if False:
- # For type annotation
- from typing import Any, Callable # NOQA
-
logger = logging.getLogger(__name__)
@@ -33,8 +30,7 @@ logger = logging.getLogger(__name__)
# convert_with_2to3():
# support for running 2to3 over config files
-def convert_with_2to3(filepath):
- # type: (str) -> str
+def convert_with_2to3(filepath: str) -> str:
from lib2to3.refactor import RefactoringTool, get_fixers_from_package
from lib2to3.pgen2.parse import ParseError
fixers = get_fixers_from_package('lib2to3.fixes')
@@ -62,8 +58,7 @@ class UnicodeMixin:
return self.__unicode__()
-def execfile_(filepath, _globals, open=open):
- # type: (str, Any, Callable) -> None
+def execfile_(filepath: str, _globals: Any, open: Callable = open) -> None:
from sphinx.util.osutil import fs_encoding
with open(filepath, 'rb') as f:
source = f.read()
diff --git a/sphinx/util/requests.py b/sphinx/util/requests.py
index dee52e281..a279b4eb4 100644
--- a/sphinx/util/requests.py
+++ b/sphinx/util/requests.py
@@ -10,11 +10,14 @@
import warnings
from contextlib import contextmanager
+from typing import Generator, Union
from urllib.parse import urlsplit
import pkg_resources
import requests
+from sphinx.config import Config
+
try:
from requests.packages.urllib3.exceptions import SSLError
except ImportError:
@@ -54,17 +57,12 @@ else:
pkg_resources.VersionConflict):
pass # ignored
-if False:
- # For type annotation
- from typing import Any, Generator, Union # NOQA
- from sphinx.config import Config # NOQA
useragent_header = [('User-Agent',
'Mozilla/5.0 (X11; Linux x86_64; rv:25.0) Gecko/20100101 Firefox/25.0')]
-def is_ssl_error(exc):
- # type: (Exception) -> bool
+def is_ssl_error(exc: Exception) -> bool:
"""Check an exception is SSLError."""
if isinstance(exc, SSLError):
return True
@@ -77,8 +75,7 @@ def is_ssl_error(exc):
@contextmanager
-def ignore_insecure_warning(**kwargs):
- # type: (Any) -> Generator
+def ignore_insecure_warning(**kwargs) -> Generator[None, None, None]:
with warnings.catch_warnings():
if not kwargs.get('verify') and InsecureRequestWarning:
# ignore InsecureRequestWarning if verify=False
@@ -86,8 +83,7 @@ def ignore_insecure_warning(**kwargs):
yield
-def _get_tls_cacert(url, config):
- # type: (str, Config) -> Union[str, bool]
+def _get_tls_cacert(url: str, config: Config) -> Union[str, bool]:
"""Get additional CA cert for a specific URL.
This also returns ``False`` if verification is disabled.
@@ -109,8 +105,7 @@ def _get_tls_cacert(url, config):
return certs.get(hostname, True)
-def get(url, **kwargs):
- # type: (str, Any) -> requests.Response
+def get(url: str, **kwargs) -> requests.Response:
"""Sends a GET request like requests.get().
This sets up User-Agent header and TLS verification automatically."""
@@ -123,8 +118,7 @@ def get(url, **kwargs):
return requests.get(url, **kwargs)
-def head(url, **kwargs):
- # type: (str, Any) -> requests.Response
+def head(url: str, **kwargs) -> requests.Response:
"""Sends a HEAD request like requests.head().
This sets up User-Agent header and TLS verification automatically."""
diff --git a/sphinx/util/rst.py b/sphinx/util/rst.py
index c897b075a..0824e413f 100644
--- a/sphinx/util/rst.py
+++ b/sphinx/util/rst.py
@@ -11,23 +11,20 @@
import re
from collections import defaultdict
from contextlib import contextmanager
+from typing import Dict, Generator
from unicodedata import east_asian_width
from docutils.parsers.rst import roles
from docutils.parsers.rst.languages import en as english
+from docutils.statemachine import StringList
from docutils.utils import Reporter
+from jinja2 import Environment
from jinja2 import environmentfilter
from sphinx.locale import __
from sphinx.util import docutils
from sphinx.util import logging
-if False:
- # For type annotation
- from typing import Callable, Dict, Generator # NOQA
- from docutils.statemachine import StringList # NOQA
- from jinja2 import Environment # NOQA
-
logger = logging.getLogger(__name__)
docinfo_re = re.compile(':\\w+:.*?')
@@ -40,18 +37,15 @@ WIDECHARS = defaultdict(lambda: "WF") # type: Dict[str, str]
WIDECHARS["ja"] = "WFA" # In Japanese, Ambiguous characters also have double width
-def escape(text):
- # type: (str) -> str
+def escape(text: str) -> str:
text = symbols_re.sub(r'\\\1', text)
text = re.sub(r'^\.', r'\.', text) # escape a dot at top
return text
-def textwidth(text, widechars='WF'):
- # type: (str, str) -> int
+def textwidth(text: str, widechars: str = 'WF') -> int:
"""Get width of text."""
- def charwidth(char, widechars):
- # type: (str, str) -> int
+ def charwidth(char: str, widechars: str) -> int:
if east_asian_width(char) in widechars:
return 2
else:
@@ -61,8 +55,7 @@ def textwidth(text, widechars='WF'):
@environmentfilter
-def heading(env, text, level=1):
- # type: (Environment, str, int) -> str
+def heading(env: Environment, text: str, level: int = 1) -> str:
"""Create a heading for *level*."""
assert level <= 3
width = textwidth(text, WIDECHARS[env.language]) # type: ignore
@@ -71,8 +64,7 @@ def heading(env, text, level=1):
@contextmanager
-def default_role(docname, name):
- # type: (str, str) -> Generator
+def default_role(docname: str, name: str) -> Generator[None, None, None]:
if name:
dummy_reporter = Reporter('', 4, 4)
role_fn, _ = roles.role(name, english, 0, dummy_reporter)
@@ -86,8 +78,7 @@ def default_role(docname, name):
docutils.unregister_role('')
-def prepend_prolog(content, prolog):
- # type: (StringList, str) -> None
+def prepend_prolog(content: StringList, prolog: str) -> None:
"""Prepend a string to content body as prolog."""
if prolog:
pos = 0
@@ -109,8 +100,7 @@ def prepend_prolog(content, prolog):
content.insert(pos + lineno + 1, '', '<generated>', 0)
-def append_epilog(content, epilog):
- # type: (StringList, str) -> None
+def append_epilog(content: StringList, epilog: str) -> None:
"""Append a string to content body as epilog."""
if epilog:
content.append('', '<generated>', 0)
diff --git a/sphinx/util/smartypants.py b/sphinx/util/smartypants.py
index 7450e07b8..47f8b59b2 100644
--- a/sphinx/util/smartypants.py
+++ b/sphinx/util/smartypants.py
@@ -26,14 +26,12 @@
"""
import re
+from typing import Generator, Iterable, Tuple
from docutils.utils import smartquotes
from sphinx.util.docutils import __version_info__ as docutils_version
-if False: # For type annotation
- from typing import Generator, Iterable, Tuple # NOQA
-
langquotes = {'af': '“”‘’',
'af-x-altquot': '„”‚’',
@@ -125,8 +123,7 @@ langquotes = {'af': '“”‘’',
}
-def educateQuotes(text, language='en'):
- # type: (str, str) -> str
+def educateQuotes(text: str, language: str = 'en') -> str:
"""
Parameter: - text string (unicode or bytes).
- language (`BCP 47` language tag.)
@@ -240,8 +237,10 @@ def educateQuotes(text, language='en'):
return text
-def educate_tokens(text_tokens, attr=smartquotes.default_smartypants_attr, language='en'):
- # type: (Iterable[Tuple[str, str]], str, str) -> Generator[str, None, None]
+def educate_tokens(text_tokens: Iterable[Tuple[str, str]],
+ attr: str = smartquotes.default_smartypants_attr,
+ language: str = 'en'
+ ) -> Generator[str, None, None]:
"""Return iterator that "educates" the items of `text_tokens`.
This is modified to intercept the ``attr='2'`` as it was used by the
diff --git a/sphinx/util/stemmer/__init__.py b/sphinx/util/stemmer/__init__.py
index 047aac708..bda5d2bc2 100644
--- a/sphinx/util/stemmer/__init__.py
+++ b/sphinx/util/stemmer/__init__.py
@@ -18,18 +18,15 @@ except ImportError:
class BaseStemmer:
- def stem(self, word):
- # type: (str) -> str
+ def stem(self, word: str) -> str:
raise NotImplementedError()
class PyStemmer(BaseStemmer):
- def __init__(self):
- # type: () -> None
+ def __init__(self) -> None:
self.stemmer = _PyStemmer('porter')
- def stem(self, word):
- # type: (str) -> str
+ def stem(self, word: str) -> str:
return self.stemmer.stemWord(word)
@@ -37,13 +34,11 @@ class StandardStemmer(PorterStemmer, BaseStemmer): # type: ignore
"""All those porter stemmer implementations look hideous;
make at least the stem method nicer.
"""
- def stem(self, word): # type: ignore
- # type: (str) -> str
+ def stem(self, word: str) -> str: # type: ignore
return super().stem(word, 0, len(word) - 1)
-def get_stemmer():
- # type: () -> BaseStemmer
+def get_stemmer() -> BaseStemmer:
if PYSTEMMER:
return PyStemmer()
else:
diff --git a/sphinx/util/stemmer/porter.py b/sphinx/util/stemmer/porter.py
index 51c132c2c..1f979624a 100644
--- a/sphinx/util/stemmer/porter.py
+++ b/sphinx/util/stemmer/porter.py
@@ -30,8 +30,7 @@
class PorterStemmer:
- def __init__(self):
- # type: () -> None
+ def __init__(self) -> None:
"""The main part of the stemming algorithm starts here.
b is a buffer holding a word to be stemmed. The letters are in b[k0],
b[k0+1] ... ending at b[k]. In fact k0 = 0 in this demo program. k is
@@ -47,8 +46,7 @@ class PorterStemmer:
self.k0 = 0
self.j = 0 # j is a general offset into the string
- def cons(self, i):
- # type: (int) -> int
+ def cons(self, i: int) -> int:
"""cons(i) is TRUE <=> b[i] is a consonant."""
if self.b[i] == 'a' or self.b[i] == 'e' or self.b[i] == 'i' \
or self.b[i] == 'o' or self.b[i] == 'u':
@@ -60,8 +58,7 @@ class PorterStemmer:
return (not self.cons(i - 1))
return 1
- def m(self):
- # type: () -> int
+ def m(self) -> int:
"""m() measures the number of consonant sequences between k0 and j.
if c is a consonant sequence and v a vowel sequence, and <..>
indicates arbitrary presence,
@@ -98,16 +95,14 @@ class PorterStemmer:
i = i + 1
i = i + 1
- def vowelinstem(self):
- # type: () -> int
+ def vowelinstem(self) -> int:
"""vowelinstem() is TRUE <=> k0,...j contains a vowel"""
for i in range(self.k0, self.j + 1):
if not self.cons(i):
return 1
return 0
- def doublec(self, j):
- # type: (int) -> int
+ def doublec(self, j: int) -> int:
"""doublec(j) is TRUE <=> j,(j-1) contain a double consonant."""
if j < (self.k0 + 1):
return 0
@@ -115,8 +110,7 @@ class PorterStemmer:
return 0
return self.cons(j)
- def cvc(self, i):
- # type: (int) -> int
+ def cvc(self, i: int) -> int:
"""cvc(i) is TRUE <=> i-2,i-1,i has the form
consonant - vowel - consonant
and also if the second c is not w,x or y. this is used when trying to
@@ -133,8 +127,7 @@ class PorterStemmer:
return 0
return 1
- def ends(self, s):
- # type: (str) -> int
+ def ends(self, s: str) -> int:
"""ends(s) is TRUE <=> k0,...k ends with the string s."""
length = len(s)
if s[length - 1] != self.b[self.k]: # tiny speed-up
@@ -146,22 +139,19 @@ class PorterStemmer:
self.j = self.k - length
return 1
- def setto(self, s):
- # type: (str) -> None
+ def setto(self, s: str) -> None:
"""setto(s) sets (j+1),...k to the characters in the string s,
readjusting k."""
length = len(s)
self.b = self.b[:self.j + 1] + s + self.b[self.j + length + 1:]
self.k = self.j + length
- def r(self, s):
- # type: (str) -> None
+ def r(self, s: str) -> None:
"""r(s) is used further down."""
if self.m() > 0:
self.setto(s)
- def step1ab(self):
- # type: () -> None
+ def step1ab(self) -> None:
"""step1ab() gets rid of plurals and -ed or -ing. e.g.
caresses -> caress
@@ -208,15 +198,13 @@ class PorterStemmer:
elif (self.m() == 1 and self.cvc(self.k)):
self.setto("e")
- def step1c(self):
- # type: () -> None
+ def step1c(self) -> None:
"""step1c() turns terminal y to i when there is another vowel in
the stem."""
if (self.ends("y") and self.vowelinstem()):
self.b = self.b[:self.k] + 'i' + self.b[self.k + 1:]
- def step2(self):
- # type: () -> None
+ def step2(self) -> None:
"""step2() maps double suffices to single ones.
so -ization ( = -ize plus -ation) maps to -ize etc. note that the
string before the suffix must give m() > 0.
@@ -275,8 +263,7 @@ class PorterStemmer:
self.r("log")
# To match the published algorithm, delete this phrase
- def step3(self):
- # type: () -> None
+ def step3(self) -> None:
"""step3() dels with -ic-, -full, -ness etc. similar strategy
to step2."""
if self.b[self.k] == 'e':
@@ -298,8 +285,7 @@ class PorterStemmer:
if self.ends("ness"):
self.r("")
- def step4(self):
- # type: () -> None
+ def step4(self) -> None:
"""step4() takes off -ant, -ence etc., in context <c>vcvc<v>."""
if self.b[self.k - 1] == 'a':
if self.ends("al"):
@@ -382,8 +368,7 @@ class PorterStemmer:
if self.m() > 1:
self.k = self.j
- def step5(self):
- # type: () -> None
+ def step5(self) -> None:
"""step5() removes a final -e if m() > 1, and changes -ll to -l if
m() > 1.
"""
@@ -395,8 +380,7 @@ class PorterStemmer:
if self.b[self.k] == 'l' and self.doublec(self.k) and self.m() > 1:
self.k = self.k - 1
- def stem(self, p, i, j):
- # type: (str, int, int) -> str
+ def stem(self, p: str, i: int, j: int) -> str:
"""In stem(p,i,j), p is a char pointer, and the string to be stemmed
is from p[i] to p[j] inclusive. Typically i is zero and j is the
offset to the last character of a string, (p[j+1] == '\0'). The
diff --git a/sphinx/util/tags.py b/sphinx/util/tags.py
index 193e7ef1e..7e7ba4661 100644
--- a/sphinx/util/tags.py
+++ b/sphinx/util/tags.py
@@ -6,26 +6,23 @@
:license: BSD, see LICENSE for details.
"""
-# (ab)use the Jinja parser for parsing our boolean expressions
+from typing import Iterator, List
+
from jinja2 import nodes
from jinja2.environment import Environment
+from jinja2.nodes import Node
from jinja2.parser import Parser
env = Environment()
-if False:
- # For type annotation
- from typing import Iterator, List # NOQA
-
class BooleanParser(Parser):
"""
Only allow condition exprs and/or/not operations.
"""
- def parse_compare(self):
- # type: () -> nodes.Node
- node = None # type: nodes.Node
+ def parse_compare(self) -> Node:
+ node = None # type: Node
token = self.stream.current
if token.type == 'name':
if token.value in ('true', 'false', 'True', 'False'):
@@ -46,38 +43,31 @@ class BooleanParser(Parser):
class Tags:
- def __init__(self, tags=None):
- # type: (List[str]) -> None
+ def __init__(self, tags: List[str] = None) -> None:
self.tags = dict.fromkeys(tags or [], True)
- def has(self, tag):
- # type: (str) -> bool
+ def has(self, tag: str) -> bool:
return tag in self.tags
__contains__ = has
- def __iter__(self):
- # type: () -> Iterator[str]
+ def __iter__(self) -> Iterator[str]:
return iter(self.tags)
- def add(self, tag):
- # type: (str) -> None
+ def add(self, tag: str) -> None:
self.tags[tag] = True
- def remove(self, tag):
- # type: (str) -> None
+ def remove(self, tag: str) -> None:
self.tags.pop(tag, None)
- def eval_condition(self, condition):
- # type: (str) -> bool
+ def eval_condition(self, condition: str) -> bool:
# exceptions are handled by the caller
parser = BooleanParser(env, condition, state='variable')
expr = parser.parse_expression()
if not parser.stream.eos:
raise ValueError('chunk after expression')
- def eval_node(node):
- # type: (nodes.Node) -> bool
+ def eval_node(node: Node) -> bool:
if isinstance(node, nodes.CondExpr):
if eval_node(node.test): # type: ignore
return eval_node(node.expr1) # type: ignore
diff --git a/sphinx/util/template.py b/sphinx/util/template.py
index b521c5c79..636767d41 100644
--- a/sphinx/util/template.py
+++ b/sphinx/util/template.py
@@ -9,7 +9,9 @@
"""
import os
+from typing import Dict
+from jinja2.loaders import BaseLoader
from jinja2.sandbox import SandboxedEnvironment
from sphinx import package_dir
@@ -17,58 +19,45 @@ from sphinx.jinja2glue import SphinxFileSystemLoader
from sphinx.locale import get_translator
from sphinx.util import rst, texescape
-if False:
- # For type annotation
- from typing import Dict # NOQA
- from jinja2.loaders import BaseLoader # NOQA
-
class BaseRenderer:
- def __init__(self, loader=None):
- # type: (BaseLoader) -> None
+ def __init__(self, loader: BaseLoader = None) -> None:
self.env = SandboxedEnvironment(loader=loader, extensions=['jinja2.ext.i18n'])
self.env.filters['repr'] = repr
self.env.install_gettext_translations(get_translator()) # type: ignore
- def render(self, template_name, context):
- # type: (str, Dict) -> str
+ def render(self, template_name: str, context: Dict) -> str:
return self.env.get_template(template_name).render(context)
- def render_string(self, source, context):
- # type: (str, Dict) -> str
+ def render_string(self, source: str, context: Dict) -> str:
return self.env.from_string(source).render(context)
class FileRenderer(BaseRenderer):
- def __init__(self, search_path):
- # type: (str) -> None
+ def __init__(self, search_path: str) -> None:
loader = SphinxFileSystemLoader(search_path)
super().__init__(loader)
@classmethod
- def render_from_file(cls, filename, context):
- # type: (str, Dict) -> str
+ def render_from_file(cls, filename: str, context: Dict) -> str:
dirname = os.path.dirname(filename)
basename = os.path.basename(filename)
return cls(dirname).render(basename, context)
class SphinxRenderer(FileRenderer):
- def __init__(self, template_path=None):
- # type: (str) -> None
+ def __init__(self, template_path: str = None) -> None:
if template_path is None:
template_path = os.path.join(package_dir, 'templates')
super().__init__(template_path)
@classmethod
- def render_from_file(cls, filename, context):
- # type: (str, Dict) -> str
+ def render_from_file(cls, filename: str, context: Dict) -> str:
return FileRenderer.render_from_file(filename, context)
class LaTeXRenderer(SphinxRenderer):
- def __init__(self, template_path=None):
- # type: (str) -> None
+ def __init__(self, template_path: str = None) -> None:
if template_path is None:
template_path = os.path.join(package_dir, 'templates', 'latex')
super().__init__(template_path)
@@ -87,8 +76,7 @@ class LaTeXRenderer(SphinxRenderer):
class ReSTRenderer(SphinxRenderer):
- def __init__(self, template_path=None, language=None):
- # type: (str, str) -> None
+ def __init__(self, template_path: str = None, language: str = None) -> None:
super().__init__(template_path)
# add language to environment
diff --git a/sphinx/util/texescape.py b/sphinx/util/texescape.py
index cc8c0fe1e..408ec1253 100644
--- a/sphinx/util/texescape.py
+++ b/sphinx/util/texescape.py
@@ -9,10 +9,7 @@
"""
import re
-
-if False:
- # For type annotation
- from typing import Dict # NOQA
+from typing import Dict
tex_replacements = [
# map TeX special chars
@@ -78,20 +75,17 @@ tex_replace_map = {}
tex_hl_escape_map_new = {}
-def escape(s):
- # type: (str) -> str
+def escape(s: str) -> str:
"""Escape text for LaTeX output."""
return s.translate(tex_escape_map)
-def escape_abbr(text):
- # type: (str) -> str
+def escape_abbr(text: str) -> str:
"""Adjust spacing after abbreviations. Works with @ letter or other."""
return re.sub(r'\.(?=\s|$)', r'.\@{}', text)
-def init():
- # type: () -> None
+def init() -> None:
for a, b in tex_replacements:
tex_escape_map[ord(a)] = b
tex_replace_map[ord(a)] = '_'
diff --git a/sphinx/util/typing.py b/sphinx/util/typing.py
index 78e8fe61f..77724d38b 100644
--- a/sphinx/util/typing.py
+++ b/sphinx/util/typing.py
@@ -23,6 +23,9 @@ TextlikeNode = Union[nodes.Text, nodes.TextElement]
# type of None
NoneType = type(None)
+# path matcher
+PathMatcher = Callable[[str], bool]
+
# common role functions
RoleFunction = Callable[[str, str, str, int, Inliner, Dict, List[str]],
Tuple[List[nodes.Node], List[nodes.system_message]]]
diff --git a/tests/roots/test-ext-autodoc/target/__init__.py b/tests/roots/test-ext-autodoc/target/__init__.py
index c60bab20b..f6970a36c 100644
--- a/tests/roots/test-ext-autodoc/target/__init__.py
+++ b/tests/roots/test-ext-autodoc/target/__init__.py
@@ -22,31 +22,6 @@ class CustomEx(Exception):
"""Exception method."""
-class CustomDataDescriptor(object):
- """Descriptor class docstring."""
-
- def __init__(self, doc):
- self.__doc__ = doc
-
- def __get__(self, obj, type=None):
- if obj is None:
- return self
- return 42
-
- def meth(self):
- """Function."""
- return "The Answer"
-
-
-class CustomDataDescriptorMeta(type):
- """Descriptor metaclass docstring."""
-
-
-class CustomDataDescriptor2(CustomDataDescriptor):
- """Descriptor class with custom metaclass docstring."""
- __metaclass__ = CustomDataDescriptorMeta
-
-
def _funky_classmethod(name, b, c, d, docstring=None):
"""Generates a classmethod for a class from a template by filling out
some arguments."""
@@ -59,30 +34,9 @@ def _funky_classmethod(name, b, c, d, docstring=None):
return classmethod(function)
-class Base(object):
- def inheritedmeth(self):
- """Inherited function."""
-
- @classmethod
- def inheritedclassmeth(cls):
- """Inherited class method."""
-
- @staticmethod
- def inheritedstaticmeth(cls):
- """Inherited static method."""
-
-
-class Derived(Base):
- def inheritedmeth(self):
- # no docstring here
- pass
-
-
-class Class(Base):
+class Class(object):
"""Class to document."""
- descr = CustomDataDescriptor("Descriptor instance docstring.")
-
def meth(self):
"""Function."""
@@ -101,10 +55,6 @@ class Class(Base):
#: should be documented -- süß
attr = 'bar'
- @property
- def prop(self):
- """Property."""
-
docattr = 'baz'
"""should likewise be documented -- süß"""
diff --git a/tests/roots/test-ext-autodoc/target/autoclass_content.py b/tests/roots/test-ext-autodoc/target/autoclass_content.py
new file mode 100644
index 000000000..8924ab09b
--- /dev/null
+++ b/tests/roots/test-ext-autodoc/target/autoclass_content.py
@@ -0,0 +1,35 @@
+class A:
+ """A class having no __init__, no __new__"""
+
+
+class B:
+ """A class having __init__(no docstring), no __new__"""
+ def __init__(self):
+ pass
+
+
+class C:
+ """A class having __init__, no __new__"""
+ def __init__(self):
+ """__init__ docstring"""
+
+
+class D:
+ """A class having no __init__, __new__(no docstring)"""
+ def __new__(cls):
+ pass
+
+
+class E:
+ """A class having no __init__, __new__"""
+ def __new__(cls):
+ """__new__ docstring"""
+
+
+class F:
+ """A class having both __init__ and __new__"""
+ def __init__(self):
+ """__init__ docstring"""
+
+ def __new__(cls):
+ """__new__ docstring"""
diff --git a/tests/roots/test-ext-autodoc/target/descriptor.py b/tests/roots/test-ext-autodoc/target/descriptor.py
new file mode 100644
index 000000000..63d179b65
--- /dev/null
+++ b/tests/roots/test-ext-autodoc/target/descriptor.py
@@ -0,0 +1,31 @@
+class CustomDataDescriptor(object):
+ """Descriptor class docstring."""
+
+ def __init__(self, doc):
+ self.__doc__ = doc
+
+ def __get__(self, obj, type=None):
+ if obj is None:
+ return self
+ return 42
+
+ def meth(self):
+ """Function."""
+ return "The Answer"
+
+
+class CustomDataDescriptorMeta(type):
+ """Descriptor metaclass docstring."""
+
+
+class CustomDataDescriptor2(CustomDataDescriptor):
+ """Descriptor class with custom metaclass docstring."""
+ __metaclass__ = CustomDataDescriptorMeta
+
+
+class Class:
+ descr = CustomDataDescriptor("Descriptor instance docstring.")
+
+ @property
+ def prop(self):
+ """Property."""
diff --git a/tests/roots/test-ext-autodoc/target/docstring_signature.py b/tests/roots/test-ext-autodoc/target/docstring_signature.py
new file mode 100644
index 000000000..2e5499770
--- /dev/null
+++ b/tests/roots/test-ext-autodoc/target/docstring_signature.py
@@ -0,0 +1,19 @@
+class A:
+ """A(foo, bar)"""
+
+
+class B:
+ """B(foo, bar)"""
+ def __init__(self):
+ """B(foo, bar, baz)"""
+
+
+class C:
+ """C(foo, bar)"""
+ def __new__(cls):
+ """C(foo, bar, baz)"""
+
+
+class D:
+ def __init__(self):
+ """D(foo, bar, baz)"""
diff --git a/tests/roots/test-ext-autodoc/target/inheritance.py b/tests/roots/test-ext-autodoc/target/inheritance.py
new file mode 100644
index 000000000..3a5fc0711
--- /dev/null
+++ b/tests/roots/test-ext-autodoc/target/inheritance.py
@@ -0,0 +1,19 @@
+class Base(object):
+ def inheritedmeth(self):
+ """Inherited function."""
+
+ @classmethod
+ def inheritedclassmeth(cls):
+ """Inherited class method."""
+
+ @staticmethod
+ def inheritedstaticmeth(cls):
+ """Inherited static method."""
+
+
+class Derived(Base):
+ def inheritedmeth(self):
+ # no docstring here
+ pass
+
+
diff --git a/tests/roots/test-ext-autodoc/target/process_docstring.py b/tests/roots/test-ext-autodoc/target/process_docstring.py
new file mode 100644
index 000000000..6005943b6
--- /dev/null
+++ b/tests/roots/test-ext-autodoc/target/process_docstring.py
@@ -0,0 +1,8 @@
+def func():
+ """
+ first line
+ ---
+ second line
+ ---
+ third line
+ """
diff --git a/tests/roots/test-roles-download/another/dummy.dat b/tests/roots/test-roles-download/another/dummy.dat
new file mode 100644
index 000000000..f6d9fed11
--- /dev/null
+++ b/tests/roots/test-roles-download/another/dummy.dat
@@ -0,0 +1 @@
+this one will have some content
diff --git a/tests/roots/test-roles-download/index.rst b/tests/roots/test-roles-download/index.rst
index 41cda621a..cdb075ef3 100644
--- a/tests/roots/test-roles-download/index.rst
+++ b/tests/roots/test-roles-download/index.rst
@@ -2,5 +2,6 @@ test-roles-download
===================
* :download:`dummy.dat`
+* :download:`another/dummy.dat`
* :download:`not_found.dat`
* :download:`Sphinx logo <http://www.sphinx-doc.org/en/master/_static/sphinxheader.png>`
diff --git a/tests/test_autodoc.py b/tests/test_autodoc.py
index 4d893ba42..98f8f1a99 100644
--- a/tests/test_autodoc.py
+++ b/tests/test_autodoc.py
@@ -9,7 +9,6 @@
:license: BSD, see LICENSE for details.
"""
-import platform
import sys
from unittest.mock import Mock
from warnings import catch_warnings
@@ -17,7 +16,7 @@ from warnings import catch_warnings
import pytest
from docutils.statemachine import ViewList
-from sphinx.ext.autodoc import ModuleLevelDocumenter, cut_lines, between, ALL, Options
+from sphinx.ext.autodoc import ModuleLevelDocumenter, ALL, Options
from sphinx.ext.autodoc.directive import DocumenterBridge, process_documenter_options
from sphinx.testing.util import SphinxTestApp, Struct # NOQA
from sphinx.util import logging
@@ -25,8 +24,6 @@ from sphinx.util.docutils import LoggingReporter
app = None
-IS_PYPY = platform.python_implementation() == 'PyPy'
-
def do_autodoc(app, objtype, name, options=None):
if options is None:
@@ -55,7 +52,6 @@ def setup_module(rootdir, sphinx_test_tempdir):
app = SphinxTestApp(srcdir=srcdir)
app.builder.env.app = app
app.builder.env.temp_data['docname'] = 'dummy'
- app.connect('autodoc-process-docstring', process_docstring)
app.connect('autodoc-process-signature', process_signature)
app.connect('autodoc-skip-member', skip_member)
yield
@@ -70,7 +66,7 @@ directive = options = None
@pytest.fixture
def setup_test():
global options, directive
- global processed_docstrings, processed_signatures
+ global processed_signatures
options = Options(
inherited_members = False,
@@ -99,7 +95,6 @@ def setup_test():
)
directive.state.document.settings.tab_width = 8
- processed_docstrings = []
processed_signatures = []
app._status.truncate(0)
@@ -110,16 +105,9 @@ def setup_test():
app.registry.autodoc_attrgettrs.clear()
-processed_docstrings = []
processed_signatures = []
-def process_docstring(app, what, name, obj, options, lines):
- processed_docstrings.append((what, name))
- if name == 'bar':
- lines.extend(['42', ''])
-
-
def process_signature(app, what, name, obj, options, args, retann):
processed_signatures.append((what, name))
if name == 'bar':
@@ -337,127 +325,6 @@ def test_get_doc():
"""Döcstring"""
assert getdocl('function', f) == ['Döcstring']
- # class docstring: depends on config value which one is taken
- class C:
- """Class docstring"""
- def __init__(self):
- """Init docstring"""
-
- def __new__(cls):
- """New docstring"""
- directive.env.config.autoclass_content = 'class'
- assert getdocl('class', C) == ['Class docstring']
- directive.env.config.autoclass_content = 'init'
- assert getdocl('class', C) == ['Init docstring']
- directive.env.config.autoclass_content = 'both'
- assert getdocl('class', C) == ['Class docstring', '', 'Init docstring']
-
- class D:
- """Class docstring"""
- def __init__(self):
- """Init docstring
-
- Other
- lines
- """
-
- # Indentation is normalized for 'both'
- assert getdocl('class', D) == ['Class docstring', '', 'Init docstring',
- '', 'Other', ' lines']
-
- # __init__ have signature at first line of docstring
- class E:
- """Class docstring"""
- def __init__(self, *args, **kw):
- """
- __init__(a1, a2, kw1=True, kw2=False)
-
- Init docstring
- """
-
- # signature line in the docstring will be kept when
- # autodoc_docstring_signature == False
- directive.env.config.autodoc_docstring_signature = False
- directive.env.config.autoclass_content = 'class'
- assert getdocl('class', E) == ['Class docstring']
- directive.env.config.autoclass_content = 'init'
- assert getdocl('class', E) == ['__init__(a1, a2, kw1=True, kw2=False)',
- '', 'Init docstring']
- directive.env.config.autoclass_content = 'both'
- assert getdocl('class', E) == ['Class docstring', '',
- '__init__(a1, a2, kw1=True, kw2=False)',
- '', 'Init docstring']
-
- # signature line in the docstring will be removed when
- # autodoc_docstring_signature == True
- directive.env.config.autodoc_docstring_signature = True # default
- directive.env.config.autoclass_content = 'class'
- assert getdocl('class', E) == ['Class docstring']
- directive.env.config.autoclass_content = 'init'
- assert getdocl('class', E) == ['Init docstring']
- directive.env.config.autoclass_content = 'both'
- assert getdocl('class', E) == ['Class docstring', '', 'Init docstring']
-
- # class does not have __init__ method
- class F:
- """Class docstring"""
-
- # docstring in the __init__ method of base class will be discard
- for f in (False, True):
- directive.env.config.autodoc_docstring_signature = f
- directive.env.config.autoclass_content = 'class'
- assert getdocl('class', F) == ['Class docstring']
- directive.env.config.autoclass_content = 'init'
- assert getdocl('class', F) == ['Class docstring']
- directive.env.config.autoclass_content = 'both'
- assert getdocl('class', F) == ['Class docstring']
-
- # class has __init__ method with no docstring
- class G:
- """Class docstring"""
- def __init__(self):
- pass
-
- # docstring in the __init__ method of base class will not be used
- for f in (False, True):
- directive.env.config.autodoc_docstring_signature = f
- directive.env.config.autoclass_content = 'class'
- assert getdocl('class', G) == ['Class docstring']
- directive.env.config.autoclass_content = 'init'
- assert getdocl('class', G) == ['Class docstring']
- directive.env.config.autoclass_content = 'both'
- assert getdocl('class', G) == ['Class docstring']
-
- # class has __new__ method with docstring
- # class docstring: depends on config value which one is taken
- class H:
- """Class docstring"""
- def __init__(self):
- pass
-
- def __new__(cls):
- """New docstring"""
- directive.env.config.autoclass_content = 'class'
- assert getdocl('class', H) == ['Class docstring']
- directive.env.config.autoclass_content = 'init'
- assert getdocl('class', H) == ['New docstring']
- directive.env.config.autoclass_content = 'both'
- assert getdocl('class', H) == ['Class docstring', '', 'New docstring']
-
- # class has __init__ method without docstring and
- # __new__ method with docstring
- # class docstring: depends on config value which one is taken
- class I: # NOQA
- """Class docstring"""
- def __new__(cls):
- """New docstring"""
- directive.env.config.autoclass_content = 'class'
- assert getdocl('class', I) == ['Class docstring']
- directive.env.config.autoclass_content = 'init'
- assert getdocl('class', I) == ['New docstring']
- directive.env.config.autoclass_content = 'both'
- assert getdocl('class', I) == ['Class docstring', '', 'New docstring']
-
# verify that method docstrings get extracted in both normal case
# and in case of bound method posing as a function
class J: # NOQA
@@ -466,70 +333,6 @@ def test_get_doc():
assert getdocl('method', J.foo) == ['Method docstring']
assert getdocl('function', J().foo) == ['Method docstring']
- from target import Base, Derived
-
- # NOTE: inspect.getdoc seems not to work with locally defined classes
- directive.env.config.autodoc_inherit_docstrings = False
- assert getdocl('method', Base.inheritedmeth) == ['Inherited function.']
- assert getdocl('method', Derived.inheritedmeth) == []
- directive.env.config.autodoc_inherit_docstrings = True
- assert getdocl('method', Derived.inheritedmeth) == ['Inherited function.']
-
-
-@pytest.mark.usefixtures('setup_test')
-def test_docstring_processing():
- def process(objtype, name, obj):
- inst = app.registry.documenters[objtype](directive, name)
- inst.object = obj
- inst.fullname = name
- return list(inst.process_doc(inst.get_doc()))
-
- class E:
- def __init__(self):
- """Init docstring"""
-
- # docstring processing by event handler
- assert process('class', 'bar', E) == ['Init docstring', '', '42', '']
-
- lid = app.connect('autodoc-process-docstring',
- cut_lines(1, 1, ['function']))
-
- def f():
- """
- first line
- second line
- third line
- """
- assert process('function', 'f', f) == ['second line', '']
- app.disconnect(lid)
-
- lid = app.connect('autodoc-process-docstring', between('---', ['function']))
-
- def g():
- """
- first line
- ---
- second line
- ---
- third line
- """
- assert process('function', 'g', g) == ['second line', '']
- app.disconnect(lid)
-
- lid = app.connect('autodoc-process-docstring',
- between('---', ['function'], exclude=True))
-
- def h():
- """
- first line
- ---
- second line
- ---
- third line
- """
- assert process('function', 'h', h) == ['first line', 'third line', '']
- app.disconnect(lid)
-
@pytest.mark.sphinx('html', testroot='ext-autodoc')
def test_new_documenter(app):
@@ -565,6 +368,7 @@ def test_new_documenter(app):
@pytest.mark.usefixtures('setup_test')
def test_attrgetter_using():
from target import Class
+ from target.inheritance import Derived
def assert_getter_works(objtype, name, obj, attrs=[], **kw):
getattr_spy = []
@@ -594,7 +398,7 @@ def test_attrgetter_using():
assert_getter_works('class', 'target.Class', Class, ['meth'])
options.inherited_members = True
- assert_getter_works('class', 'target.Class', Class, ['meth', 'inheritedmeth'])
+ assert_getter_works('class', 'target.inheritance.Derived', Derived, ['inheritedmeth'])
@pytest.mark.sphinx('html', testroot='ext-autodoc')
@@ -697,14 +501,14 @@ def test_autodoc_attributes(app):
@pytest.mark.sphinx('html', testroot='ext-autodoc')
def test_autodoc_members(app):
# default (no-members)
- actual = do_autodoc(app, 'class', 'target.Base')
+ actual = do_autodoc(app, 'class', 'target.inheritance.Base')
assert list(filter(lambda l: '::' in l, actual)) == [
'.. py:class:: Base',
]
# default ALL-members
options = {"members": None}
- actual = do_autodoc(app, 'class', 'target.Base', options)
+ actual = do_autodoc(app, 'class', 'target.inheritance.Base', options)
assert list(filter(lambda l: '::' in l, actual)) == [
'.. py:class:: Base',
' .. py:method:: Base.inheritedclassmeth()',
@@ -714,7 +518,7 @@ def test_autodoc_members(app):
# default specific-members
options = {"members": "inheritedmeth,inheritedstaticmeth"}
- actual = do_autodoc(app, 'class', 'target.Base', options)
+ actual = do_autodoc(app, 'class', 'target.inheritance.Base', options)
assert list(filter(lambda l: '::' in l, actual)) == [
'.. py:class:: Base',
' .. py:method:: Base.inheritedmeth()',
@@ -726,7 +530,7 @@ def test_autodoc_members(app):
def test_autodoc_exclude_members(app):
options = {"members": None,
"exclude-members": "inheritedmeth,inheritedstaticmeth"}
- actual = do_autodoc(app, 'class', 'target.Base', options)
+ actual = do_autodoc(app, 'class', 'target.inheritance.Base', options)
assert list(filter(lambda l: '::' in l, actual)) == [
'.. py:class:: Base',
' .. py:method:: Base.inheritedclassmeth()'
@@ -735,7 +539,7 @@ def test_autodoc_exclude_members(app):
# members vs exclude-members
options = {"members": "inheritedmeth",
"exclude-members": "inheritedmeth"}
- actual = do_autodoc(app, 'class', 'target.Base', options)
+ actual = do_autodoc(app, 'class', 'target.inheritance.Base', options)
assert list(filter(lambda l: '::' in l, actual)) == [
'.. py:class:: Base',
]
@@ -749,7 +553,6 @@ def test_autodoc_undoc_members(app):
assert list(filter(lambda l: '::' in l, actual)) == [
'.. py:class:: Class(arg)',
' .. py:attribute:: Class.attr',
- ' .. py:attribute:: Class.descr',
' .. py:attribute:: Class.docattr',
' .. py:method:: Class.excludemeth()',
' .. py:attribute:: Class.inst_attr_comment',
@@ -758,7 +561,6 @@ def test_autodoc_undoc_members(app):
' .. py:attribute:: Class.mdocattr',
' .. py:method:: Class.meth()',
' .. py:method:: Class.moore(a, e, f) -> happiness',
- ' .. py:method:: Class.prop',
' .. py:method:: Class.roger(a, *, b=2, c=3, d=4, e=5, f=6)',
' .. py:attribute:: Class.skipattr',
' .. py:method:: Class.skipmeth()',
@@ -771,16 +573,11 @@ def test_autodoc_undoc_members(app):
def test_autodoc_inherited_members(app):
options = {"members": None,
"inherited-members": None}
- actual = do_autodoc(app, 'class', 'target.Class', options)
+ actual = do_autodoc(app, 'class', 'target.inheritance.Derived', options)
assert list(filter(lambda l: 'method::' in l, actual)) == [
- ' .. py:method:: Class.excludemeth()',
- ' .. py:method:: Class.inheritedclassmeth()',
- ' .. py:method:: Class.inheritedmeth()',
- ' .. py:method:: Class.inheritedstaticmeth(cls)',
- ' .. py:method:: Class.meth()',
- ' .. py:method:: Class.moore(a, e, f) -> happiness',
- ' .. py:method:: Class.prop',
- ' .. py:method:: Class.skipmeth()'
+ ' .. py:method:: Derived.inheritedclassmeth()',
+ ' .. py:method:: Derived.inheritedmeth()',
+ ' .. py:method:: Derived.inheritedstaticmeth(cls)',
]
@@ -790,7 +587,7 @@ def test_autodoc_imported_members(app):
"imported-members": None,
"ignore-module-all": None}
actual = do_autodoc(app, 'module', 'target', options)
- assert '.. py:function:: save_traceback(app)' in actual
+ assert '.. py:function:: save_traceback(app: Sphinx) -> str' in actual
@pytest.mark.sphinx('html', testroot='ext-autodoc')
@@ -825,12 +622,13 @@ def test_autodoc_special_members(app):
actual = do_autodoc(app, 'class', 'target.Class', options)
assert list(filter(lambda l: '::' in l, actual)) == [
'.. py:class:: Class(arg)',
+ ' .. py:attribute:: Class.__dict__',
' .. py:method:: Class.__init__(arg)',
' .. py:attribute:: Class.__module__',
' .. py:method:: Class.__special1__()',
' .. py:method:: Class.__special2__()',
+ ' .. py:attribute:: Class.__weakref__',
' .. py:attribute:: Class.attr',
- ' .. py:attribute:: Class.descr',
' .. py:attribute:: Class.docattr',
' .. py:method:: Class.excludemeth()',
' .. py:attribute:: Class.inst_attr_comment',
@@ -839,7 +637,6 @@ def test_autodoc_special_members(app):
' .. py:attribute:: Class.mdocattr',
' .. py:method:: Class.meth()',
' .. py:method:: Class.moore(a, e, f) -> happiness',
- ' .. py:method:: Class.prop',
' .. py:method:: Class.roger(a, *, b=2, c=3, d=4, e=5, f=6)',
' .. py:attribute:: Class.skipattr',
' .. py:method:: Class.skipmeth()',
@@ -863,9 +660,6 @@ def test_autodoc_ignore_module_all(app):
actual = do_autodoc(app, 'module', 'target', options)
assert list(filter(lambda l: 'class::' in l, actual)) == [
'.. py:class:: Class(arg)',
- '.. py:class:: CustomDataDescriptor(doc)',
- '.. py:class:: CustomDataDescriptor2(doc)',
- '.. py:class:: CustomDataDescriptorMeta',
'.. py:class:: CustomDict',
'.. py:class:: InstAttCls()',
'.. py:class:: Outer',
@@ -887,12 +681,12 @@ def test_autodoc_noindex(app):
# TODO: :noindex: should be propagated to children of target item.
- actual = do_autodoc(app, 'class', 'target.Base', options)
+ actual = do_autodoc(app, 'class', 'target.inheritance.Base', options)
assert list(actual) == [
'',
'.. py:class:: Base',
' :noindex:',
- ' :module: target',
+ ' :module: target.inheritance',
''
]
@@ -960,11 +754,11 @@ def test_autodoc_inner_class(app):
@pytest.mark.sphinx('html', testroot='ext-autodoc')
def test_autodoc_classmethod(app):
- actual = do_autodoc(app, 'method', 'target.Base.inheritedclassmeth')
+ actual = do_autodoc(app, 'method', 'target.inheritance.Base.inheritedclassmeth')
assert list(actual) == [
'',
'.. py:method:: Base.inheritedclassmeth()',
- ' :module: target',
+ ' :module: target.inheritance',
' :classmethod:',
'',
' Inherited class method.',
@@ -974,11 +768,11 @@ def test_autodoc_classmethod(app):
@pytest.mark.sphinx('html', testroot='ext-autodoc')
def test_autodoc_staticmethod(app):
- actual = do_autodoc(app, 'method', 'target.Base.inheritedstaticmeth')
+ actual = do_autodoc(app, 'method', 'target.inheritance.Base.inheritedstaticmeth')
assert list(actual) == [
'',
'.. py:method:: Base.inheritedstaticmeth(cls)',
- ' :module: target',
+ ' :module: target.inheritance',
' :staticmethod:',
'',
' Inherited static method.',
@@ -988,14 +782,27 @@ def test_autodoc_staticmethod(app):
@pytest.mark.sphinx('html', testroot='ext-autodoc')
def test_autodoc_descriptor(app):
- actual = do_autodoc(app, 'attribute', 'target.Class.descr')
+ options = {"members": None,
+ "undoc-members": True}
+ actual = do_autodoc(app, 'class', 'target.descriptor.Class', options)
assert list(actual) == [
'',
- '.. py:attribute:: Class.descr',
- ' :module: target',
+ '.. py:class:: Class',
+ ' :module: target.descriptor',
'',
- ' Descriptor instance docstring.',
- ' '
+ ' ',
+ ' .. py:attribute:: Class.descr',
+ ' :module: target.descriptor',
+ ' ',
+ ' Descriptor instance docstring.',
+ ' ',
+ ' ',
+ ' .. py:method:: Class.prop',
+ ' :module: target.descriptor',
+ ' :property:',
+ ' ',
+ ' Property.',
+ ' '
]
@@ -1024,14 +831,12 @@ def test_autodoc_member_order(app):
actual = do_autodoc(app, 'class', 'target.Class', options)
assert list(filter(lambda l: '::' in l, actual)) == [
'.. py:class:: Class(arg)',
- ' .. py:attribute:: Class.descr',
' .. py:method:: Class.meth()',
' .. py:method:: Class.undocmeth()',
' .. py:method:: Class.skipmeth()',
' .. py:method:: Class.excludemeth()',
' .. py:attribute:: Class.skipattr',
' .. py:attribute:: Class.attr',
- ' .. py:method:: Class.prop',
' .. py:attribute:: Class.docattr',
' .. py:attribute:: Class.udocattr',
' .. py:attribute:: Class.mdocattr',
@@ -1059,13 +864,11 @@ def test_autodoc_member_order(app):
' .. py:method:: Class.undocmeth()',
' .. py:attribute:: Class._private_inst_attr',
' .. py:attribute:: Class.attr',
- ' .. py:attribute:: Class.descr',
' .. py:attribute:: Class.docattr',
' .. py:attribute:: Class.inst_attr_comment',
' .. py:attribute:: Class.inst_attr_inline',
' .. py:attribute:: Class.inst_attr_string',
' .. py:attribute:: Class.mdocattr',
- ' .. py:method:: Class.prop',
' .. py:attribute:: Class.skipattr',
' .. py:attribute:: Class.udocattr'
]
@@ -1079,7 +882,6 @@ def test_autodoc_member_order(app):
'.. py:class:: Class(arg)',
' .. py:attribute:: Class._private_inst_attr',
' .. py:attribute:: Class.attr',
- ' .. py:attribute:: Class.descr',
' .. py:attribute:: Class.docattr',
' .. py:method:: Class.excludemeth()',
' .. py:attribute:: Class.inst_attr_comment',
@@ -1088,7 +890,6 @@ def test_autodoc_member_order(app):
' .. py:attribute:: Class.mdocattr',
' .. py:method:: Class.meth()',
' .. py:method:: Class.moore(a, e, f) -> happiness',
- ' .. py:method:: Class.prop',
' .. py:method:: Class.roger(a, *, b=2, c=3, d=4, e=5, f=6)',
' .. py:attribute:: Class.skipattr',
' .. py:method:: Class.skipmeth()',
@@ -1129,95 +930,6 @@ def test_autodoc_class_scope(app):
@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_autodoc_docstring_signature(app):
- options = {"members": None}
- actual = do_autodoc(app, 'class', 'target.DocstringSig', options)
- assert list(actual) == [
- '',
- '.. py:class:: DocstringSig',
- ' :module: target',
- '',
- ' ',
- ' .. py:method:: DocstringSig.meth(FOO, BAR=1) -> BAZ',
- ' :module: target',
- ' ',
- ' First line of docstring',
- ' ',
- ' rest of docstring',
- ' ',
- ' ',
- ' .. py:method:: DocstringSig.meth2()',
- ' :module: target',
- ' ',
- ' First line, no signature',
- ' Second line followed by indentation::',
- ' ',
- ' indented line',
- ' ',
- ' ',
- ' .. py:method:: DocstringSig.prop1',
- ' :module: target',
- ' :property:',
- ' ',
- ' First line of docstring',
- ' ',
- ' ',
- ' .. py:method:: DocstringSig.prop2',
- ' :module: target',
- ' :property:',
- ' ',
- ' First line of docstring',
- ' Second line of docstring',
- ' '
- ]
-
- # disable autodoc_docstring_signature
- app.config.autodoc_docstring_signature = False
- actual = do_autodoc(app, 'class', 'target.DocstringSig', options)
- assert list(actual) == [
- '',
- '.. py:class:: DocstringSig',
- ' :module: target',
- '',
- ' ',
- ' .. py:method:: DocstringSig.meth()',
- ' :module: target',
- ' ',
- ' meth(FOO, BAR=1) -> BAZ',
- ' First line of docstring',
- ' ',
- ' rest of docstring',
- ' ',
- ' ',
- ' ',
- ' .. py:method:: DocstringSig.meth2()',
- ' :module: target',
- ' ',
- ' First line, no signature',
- ' Second line followed by indentation::',
- ' ',
- ' indented line',
- ' ',
- ' ',
- ' .. py:method:: DocstringSig.prop1',
- ' :module: target',
- ' :property:',
- ' ',
- ' DocstringSig.prop1(self)',
- ' First line of docstring',
- ' ',
- ' ',
- ' .. py:method:: DocstringSig.prop2',
- ' :module: target',
- ' :property:',
- ' ',
- ' First line of docstring',
- ' Second line of docstring',
- ' '
- ]
-
-
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
def test_class_attributes(app):
options = {"members": None,
"undoc-members": True}
@@ -1419,26 +1131,26 @@ def test_enum_class(app):
@pytest.mark.sphinx('html', testroot='ext-autodoc')
def test_descriptor_class(app):
options = {"members": 'CustomDataDescriptor,CustomDataDescriptor2'}
- actual = do_autodoc(app, 'module', 'target', options)
+ actual = do_autodoc(app, 'module', 'target.descriptor', options)
assert list(actual) == [
'',
- '.. py:module:: target',
+ '.. py:module:: target.descriptor',
'',
'',
'.. py:class:: CustomDataDescriptor(doc)',
- ' :module: target',
+ ' :module: target.descriptor',
'',
' Descriptor class docstring.',
' ',
' ',
' .. py:method:: CustomDataDescriptor.meth()',
- ' :module: target',
+ ' :module: target.descriptor',
' ',
' Function.',
' ',
'',
'.. py:class:: CustomDataDescriptor2(doc)',
- ' :module: target',
+ ' :module: target.descriptor',
'',
' Descriptor class with custom metaclass docstring.',
' '
@@ -1471,57 +1183,6 @@ def test_autofunction_for_method(app):
]
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_mocked_module_imports(app, warning):
- # no autodoc_mock_imports
- options = {"members": 'TestAutodoc,decoratedFunction,func'}
- actual = do_autodoc(app, 'module', 'target.need_mocks', options)
- assert list(actual) == []
- assert "autodoc: failed to import module 'need_mocks'" in warning.getvalue()
-
- # with autodoc_mock_imports
- app.config.autodoc_mock_imports = [
- 'missing_module',
- 'missing_package1',
- 'missing_package2',
- 'missing_package3',
- 'sphinx.missing_module4',
- ]
-
- warning.truncate(0)
- actual = do_autodoc(app, 'module', 'target.need_mocks', options)
- assert list(actual) == [
- '',
- '.. py:module:: target.need_mocks',
- '',
- '',
- '.. py:class:: TestAutodoc',
- ' :module: target.need_mocks',
- '',
- ' TestAutodoc docstring.',
- ' ',
- ' ',
- ' .. py:method:: TestAutodoc.decoratedMethod()',
- ' :module: target.need_mocks',
- ' ',
- ' TestAutodoc::decoratedMethod docstring',
- ' ',
- '',
- '.. py:function:: decoratedFunction()',
- ' :module: target.need_mocks',
- '',
- ' decoratedFunction docstring',
- ' ',
- '',
- '.. py:function:: func(arg: missing_module.Class)',
- ' :module: target.need_mocks',
- '',
- ' a function takes mocked object as an argument',
- ' '
- ]
- assert warning.getvalue() == ''
-
-
@pytest.mark.usefixtures('setup_test')
def test_abstractmethods():
options = {"members": None,
@@ -1710,214 +1371,6 @@ def test_module_variables():
]
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_autodoc_typehints_signature(app):
- app.config.autodoc_typehints = "signature"
-
- options = {"members": None,
- "undoc-members": True}
- actual = do_autodoc(app, 'module', 'target.typehints', options)
- assert list(actual) == [
- '',
- '.. py:module:: target.typehints',
- '',
- '',
- '.. py:class:: Math(s: str, o: object = None)',
- ' :module: target.typehints',
- '',
- ' ',
- ' .. py:method:: Math.incr(a: int, b: int = 1) -> int',
- ' :module: target.typehints',
- ' ',
- '',
- '.. py:function:: incr(a: int, b: int = 1) -> int',
- ' :module: target.typehints',
- ''
- ]
-
-
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_autodoc_typehints_none(app):
- app.config.autodoc_typehints = "none"
-
- options = {"members": None,
- "undoc-members": True}
- actual = do_autodoc(app, 'module', 'target.typehints', options)
- assert list(actual) == [
- '',
- '.. py:module:: target.typehints',
- '',
- '',
- '.. py:class:: Math(s, o = None)',
- ' :module: target.typehints',
- '',
- ' ',
- ' .. py:method:: Math.incr(a, b = 1) -> int',
- ' :module: target.typehints',
- ' ',
- '',
- '.. py:function:: incr(a, b = 1) -> int',
- ' :module: target.typehints',
- ''
- ]
-
-
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_autodoc_default_options(app):
- # no settings
- actual = do_autodoc(app, 'class', 'target.enum.EnumCls')
- assert ' .. py:attribute:: EnumCls.val1' not in actual
- assert ' .. py:attribute:: EnumCls.val4' not in actual
- actual = do_autodoc(app, 'class', 'target.CustomIter')
- assert ' .. py:method:: target.CustomIter' not in actual
- actual = do_autodoc(app, 'module', 'target')
- assert '.. py:function:: save_traceback(app)' not in actual
-
- # with :members:
- app.config.autodoc_default_options = {'members': None}
- actual = do_autodoc(app, 'class', 'target.enum.EnumCls')
- assert ' .. py:attribute:: EnumCls.val1' in actual
- assert ' .. py:attribute:: EnumCls.val4' not in actual
-
- # with :members: = True
- app.config.autodoc_default_options = {'members': True}
- actual = do_autodoc(app, 'class', 'target.enum.EnumCls')
- assert ' .. py:attribute:: EnumCls.val1' in actual
- assert ' .. py:attribute:: EnumCls.val4' not in actual
-
- # with :members: and :undoc-members:
- app.config.autodoc_default_options = {
- 'members': None,
- 'undoc-members': None,
- }
- actual = do_autodoc(app, 'class', 'target.enum.EnumCls')
- assert ' .. py:attribute:: EnumCls.val1' in actual
- assert ' .. py:attribute:: EnumCls.val4' in actual
-
- # with :special-members:
- # Note that :members: must be *on* for :special-members: to work.
- app.config.autodoc_default_options = {
- 'members': None,
- 'special-members': None
- }
- actual = do_autodoc(app, 'class', 'target.CustomIter')
- assert ' .. py:method:: CustomIter.__init__()' in actual
- assert ' Create a new `CustomIter`.' in actual
- assert ' .. py:method:: CustomIter.__iter__()' in actual
- assert ' Iterate squares of each value.' in actual
- if not IS_PYPY:
- assert ' .. py:attribute:: CustomIter.__weakref__' in actual
- assert ' list of weak references to the object (if defined)' in actual
-
- # :exclude-members: None - has no effect. Unlike :members:,
- # :special-members:, etc. where None == "include all", here None means
- # "no/false/off".
- app.config.autodoc_default_options = {
- 'members': None,
- 'exclude-members': None,
- }
- actual = do_autodoc(app, 'class', 'target.enum.EnumCls')
- assert ' .. py:attribute:: EnumCls.val1' in actual
- assert ' .. py:attribute:: EnumCls.val4' not in actual
- app.config.autodoc_default_options = {
- 'members': None,
- 'special-members': None,
- 'exclude-members': None,
- }
- actual = do_autodoc(app, 'class', 'target.CustomIter')
- assert ' .. py:method:: CustomIter.__init__()' in actual
- assert ' Create a new `CustomIter`.' in actual
- assert ' .. py:method:: CustomIter.__iter__()' in actual
- assert ' Iterate squares of each value.' in actual
- if not IS_PYPY:
- assert ' .. py:attribute:: CustomIter.__weakref__' in actual
- assert ' list of weak references to the object (if defined)' in actual
- assert ' .. py:method:: CustomIter.snafucate()' in actual
- assert ' Makes this snafucated.' in actual
-
- # with :imported-members:
- app.config.autodoc_default_options = {
- 'members': None,
- 'imported-members': None,
- 'ignore-module-all': None,
- }
- actual = do_autodoc(app, 'module', 'target')
- assert '.. py:function:: save_traceback(app)' in actual
-
-
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_autodoc_default_options_with_values(app):
- # with :members:
- app.config.autodoc_default_options = {'members': 'val1,val2'}
- actual = do_autodoc(app, 'class', 'target.enum.EnumCls')
- assert ' .. py:attribute:: EnumCls.val1' in actual
- assert ' .. py:attribute:: EnumCls.val2' in actual
- assert ' .. py:attribute:: EnumCls.val3' not in actual
- assert ' .. py:attribute:: EnumCls.val4' not in actual
-
- # with :member-order:
- app.config.autodoc_default_options = {
- 'members': None,
- 'member-order': 'bysource',
- }
- actual = do_autodoc(app, 'class', 'target.Class')
- assert list(filter(lambda l: '::' in l, actual)) == [
- '.. py:class:: Class(arg)',
- ' .. py:attribute:: Class.descr',
- ' .. py:method:: Class.meth()',
- ' .. py:method:: Class.skipmeth()',
- ' .. py:method:: Class.excludemeth()',
- ' .. py:attribute:: Class.attr',
- ' .. py:method:: Class.prop',
- ' .. py:attribute:: Class.docattr',
- ' .. py:attribute:: Class.udocattr',
- ' .. py:attribute:: Class.mdocattr',
- ' .. py:method:: Class.moore(a, e, f) -> happiness',
- ' .. py:attribute:: Class.inst_attr_inline',
- ' .. py:attribute:: Class.inst_attr_comment',
- ' .. py:attribute:: Class.inst_attr_string',
- ]
-
- # with :special-members:
- app.config.autodoc_default_options = {
- 'special-members': '__init__,__iter__',
- }
- actual = do_autodoc(app, 'class', 'target.CustomIter')
- assert ' .. py:method:: CustomIter.__init__()' in actual
- assert ' Create a new `CustomIter`.' in actual
- assert ' .. py:method:: CustomIter.__iter__()' in actual
- assert ' Iterate squares of each value.' in actual
- if not IS_PYPY:
- assert ' .. py:attribute:: CustomIter.__weakref__' not in actual
- assert ' list of weak references to the object (if defined)' not in actual
-
- # with :exclude-members:
- app.config.autodoc_default_options = {
- 'members': None,
- 'exclude-members': 'val1'
- }
- actual = do_autodoc(app, 'class', 'target.enum.EnumCls')
- assert ' .. py:attribute:: EnumCls.val1' not in actual
- assert ' .. py:attribute:: EnumCls.val2' in actual
- assert ' .. py:attribute:: EnumCls.val3' in actual
- assert ' .. py:attribute:: EnumCls.val4' not in actual
- app.config.autodoc_default_options = {
- 'members': None,
- 'special-members': None,
- 'exclude-members': '__weakref__,snafucate',
- }
- actual = do_autodoc(app, 'class', 'target.CustomIter')
- assert ' .. py:method:: CustomIter.__init__()' in actual
- assert ' Create a new `CustomIter`.' in actual
- assert ' .. py:method:: CustomIter.__iter__()' in actual
- assert ' Iterate squares of each value.' in actual
- if not IS_PYPY:
- assert ' .. py:attribute:: CustomIter.__weakref__' not in actual
- assert ' list of weak references to the object (if defined)' not in actual
- assert ' .. py:method:: CustomIter.snafucate()' not in actual
- assert ' Makes this snafucated.' not in actual
-
-
@pytest.mark.sphinx('html', testroot='pycode-egg')
def test_autodoc_for_egged_code(app):
options = {"members": None,
diff --git a/tests/test_build_html.py b/tests/test_build_html.py
index 2df1f8412..677ca9de0 100644
--- a/tests/test_build_html.py
+++ b/tests/test_build_html.py
@@ -451,8 +451,10 @@ def test_html_download(app):
@pytest.mark.sphinx('html', testroot='roles-download')
def test_html_download_role(app, status, warning):
app.build()
- digest = md5((app.srcdir / 'dummy.dat').encode()).hexdigest()
+ digest = md5(b'dummy.dat').hexdigest()
assert (app.outdir / '_downloads' / digest / 'dummy.dat').exists()
+ digest_another = md5(b'another/dummy.dat').hexdigest()
+ assert (app.outdir / '_downloads' / digest_another / 'dummy.dat').exists()
content = (app.outdir / 'index.html').text()
assert (('<li><p><a class="reference download internal" download="" '
@@ -460,6 +462,11 @@ def test_html_download_role(app, status, warning):
'<code class="xref download docutils literal notranslate">'
'<span class="pre">dummy.dat</span></code></a></p></li>' % digest)
in content)
+ assert (('<li><p><a class="reference download internal" download="" '
+ 'href="_downloads/%s/dummy.dat">'
+ '<code class="xref download docutils literal notranslate">'
+ '<span class="pre">another/dummy.dat</span></code></a></p></li>' %
+ digest_another) in content)
assert ('<li><p><code class="xref download docutils literal notranslate">'
'<span class="pre">not_found.dat</span></code></p></li>' in content)
assert ('<li><p><a class="reference download external" download="" '
diff --git a/tests/test_ext_apidoc.py b/tests/test_ext_apidoc.py
index c6cf43c7e..34c6ec824 100644
--- a/tests/test_ext_apidoc.py
+++ b/tests/test_ext_apidoc.py
@@ -467,7 +467,8 @@ def test_package_file(tempdir):
outdir = path(tempdir)
(outdir / 'testpkg').makedirs()
(outdir / 'testpkg' / '__init__.py').write_text('')
- (outdir / 'testpkg' / 'example.py').write_text('')
+ (outdir / 'testpkg' / 'hello.py').write_text('')
+ (outdir / 'testpkg' / 'world.py').write_text('')
(outdir / 'testpkg' / 'subpkg').makedirs()
(outdir / 'testpkg' / 'subpkg' / '__init__.py').write_text('')
apidoc_main(['-o', tempdir, tempdir / 'testpkg'])
@@ -488,10 +489,18 @@ def test_package_file(tempdir):
"Submodules\n"
"----------\n"
"\n"
- "testpkg.example module\n"
- "----------------------\n"
+ "testpkg.hello module\n"
+ "--------------------\n"
"\n"
- ".. automodule:: testpkg.example\n"
+ ".. automodule:: testpkg.hello\n"
+ " :members:\n"
+ " :undoc-members:\n"
+ " :show-inheritance:\n"
+ "\n"
+ "testpkg.world module\n"
+ "--------------------\n"
+ "\n"
+ ".. automodule:: testpkg.world\n"
" :members:\n"
" :undoc-members:\n"
" :show-inheritance:\n"
diff --git a/tests/test_ext_autodoc_configs.py b/tests/test_ext_autodoc_configs.py
new file mode 100644
index 000000000..1ae7a2a23
--- /dev/null
+++ b/tests/test_ext_autodoc_configs.py
@@ -0,0 +1,619 @@
+"""
+ test_ext_autodoc_configs
+ ~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Test the autodoc extension. This tests mainly for config variables
+
+ :copyright: Copyright 2007-2019 by the Sphinx team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import platform
+
+import pytest
+
+from test_autodoc import do_autodoc
+
+IS_PYPY = platform.python_implementation() == 'PyPy'
+
+
+@pytest.mark.sphinx('html', testroot='ext-autodoc')
+def test_autoclass_content_class(app):
+ app.config.autoclass_content = 'class'
+ options = {"members": None}
+ actual = do_autodoc(app, 'module', 'target.autoclass_content', options)
+ assert list(actual) == [
+ '',
+ '.. py:module:: target.autoclass_content',
+ '',
+ '',
+ '.. py:class:: A',
+ ' :module: target.autoclass_content',
+ '',
+ ' A class having no __init__, no __new__',
+ ' ',
+ '',
+ '.. py:class:: B()',
+ ' :module: target.autoclass_content',
+ '',
+ ' A class having __init__(no docstring), no __new__',
+ ' ',
+ '',
+ '.. py:class:: C()',
+ ' :module: target.autoclass_content',
+ '',
+ ' A class having __init__, no __new__',
+ ' ',
+ '',
+ '.. py:class:: D',
+ ' :module: target.autoclass_content',
+ '',
+ ' A class having no __init__, __new__(no docstring)',
+ ' ',
+ '',
+ '.. py:class:: E',
+ ' :module: target.autoclass_content',
+ '',
+ ' A class having no __init__, __new__',
+ ' ',
+ '',
+ '.. py:class:: F()',
+ ' :module: target.autoclass_content',
+ '',
+ ' A class having both __init__ and __new__',
+ ' '
+ ]
+
+
+@pytest.mark.sphinx('html', testroot='ext-autodoc')
+def test_autoclass_content_init(app):
+ app.config.autoclass_content = 'init'
+ options = {"members": None}
+ actual = do_autodoc(app, 'module', 'target.autoclass_content', options)
+ assert list(actual) == [
+ '',
+ '.. py:module:: target.autoclass_content',
+ '',
+ '',
+ '.. py:class:: A',
+ ' :module: target.autoclass_content',
+ '',
+ ' A class having no __init__, no __new__',
+ ' ',
+ '',
+ '.. py:class:: B()',
+ ' :module: target.autoclass_content',
+ '',
+ ' A class having __init__(no docstring), no __new__',
+ ' ',
+ '',
+ '.. py:class:: C()',
+ ' :module: target.autoclass_content',
+ '',
+ ' __init__ docstring',
+ ' ',
+ '',
+ '.. py:class:: D',
+ ' :module: target.autoclass_content',
+ '',
+ ' A class having no __init__, __new__(no docstring)',
+ ' ',
+ '',
+ '.. py:class:: E',
+ ' :module: target.autoclass_content',
+ '',
+ ' __new__ docstring',
+ ' ',
+ '',
+ '.. py:class:: F()',
+ ' :module: target.autoclass_content',
+ '',
+ ' __init__ docstring',
+ ' '
+ ]
+
+
+@pytest.mark.sphinx('html', testroot='ext-autodoc')
+def test_autoclass_content_both(app):
+ app.config.autoclass_content = 'both'
+ options = {"members": None}
+ actual = do_autodoc(app, 'module', 'target.autoclass_content', options)
+ assert list(actual) == [
+ '',
+ '.. py:module:: target.autoclass_content',
+ '',
+ '',
+ '.. py:class:: A',
+ ' :module: target.autoclass_content',
+ '',
+ ' A class having no __init__, no __new__',
+ ' ',
+ '',
+ '.. py:class:: B()',
+ ' :module: target.autoclass_content',
+ '',
+ ' A class having __init__(no docstring), no __new__',
+ ' ',
+ '',
+ '.. py:class:: C()',
+ ' :module: target.autoclass_content',
+ '',
+ ' A class having __init__, no __new__',
+ ' ',
+ ' __init__ docstring',
+ ' ',
+ '',
+ '.. py:class:: D',
+ ' :module: target.autoclass_content',
+ '',
+ ' A class having no __init__, __new__(no docstring)',
+ ' ',
+ '',
+ '.. py:class:: E',
+ ' :module: target.autoclass_content',
+ '',
+ ' A class having no __init__, __new__',
+ ' ',
+ ' __new__ docstring',
+ ' ',
+ '',
+ '.. py:class:: F()',
+ ' :module: target.autoclass_content',
+ '',
+ ' A class having both __init__ and __new__',
+ ' ',
+ ' __init__ docstring',
+ ' '
+ ]
+
+
+@pytest.mark.sphinx('html', testroot='ext-autodoc')
+def test_autodoc_inherit_docstrings(app):
+ assert app.config.autodoc_inherit_docstrings is True # default
+ actual = do_autodoc(app, 'method', 'target.inheritance.Derived.inheritedmeth')
+ assert list(actual) == [
+ '',
+ '.. py:method:: Derived.inheritedmeth()',
+ ' :module: target.inheritance',
+ '',
+ ' Inherited function.',
+ ' '
+ ]
+
+ # disable autodoc_inherit_docstrings
+ app.config.autodoc_inherit_docstrings = False
+ actual = do_autodoc(app, 'method', 'target.inheritance.Derived.inheritedmeth')
+ assert list(actual) == [
+ '',
+ '.. py:method:: Derived.inheritedmeth()',
+ ' :module: target.inheritance',
+ ''
+ ]
+
+
+@pytest.mark.sphinx('html', testroot='ext-autodoc')
+def test_autodoc_docstring_signature(app):
+ options = {"members": None}
+ actual = do_autodoc(app, 'class', 'target.DocstringSig', options)
+ assert list(actual) == [
+ '',
+ '.. py:class:: DocstringSig',
+ ' :module: target',
+ '',
+ ' ',
+ ' .. py:method:: DocstringSig.meth(FOO, BAR=1) -> BAZ',
+ ' :module: target',
+ ' ',
+ ' First line of docstring',
+ ' ',
+ ' rest of docstring',
+ ' ',
+ ' ',
+ ' .. py:method:: DocstringSig.meth2()',
+ ' :module: target',
+ ' ',
+ ' First line, no signature',
+ ' Second line followed by indentation::',
+ ' ',
+ ' indented line',
+ ' ',
+ ' ',
+ ' .. py:method:: DocstringSig.prop1',
+ ' :module: target',
+ ' :property:',
+ ' ',
+ ' First line of docstring',
+ ' ',
+ ' ',
+ ' .. py:method:: DocstringSig.prop2',
+ ' :module: target',
+ ' :property:',
+ ' ',
+ ' First line of docstring',
+ ' Second line of docstring',
+ ' '
+ ]
+
+ # disable autodoc_docstring_signature
+ app.config.autodoc_docstring_signature = False
+ actual = do_autodoc(app, 'class', 'target.DocstringSig', options)
+ assert list(actual) == [
+ '',
+ '.. py:class:: DocstringSig',
+ ' :module: target',
+ '',
+ ' ',
+ ' .. py:method:: DocstringSig.meth()',
+ ' :module: target',
+ ' ',
+ ' meth(FOO, BAR=1) -> BAZ',
+ ' First line of docstring',
+ ' ',
+ ' rest of docstring',
+ ' ',
+ ' ',
+ ' ',
+ ' .. py:method:: DocstringSig.meth2()',
+ ' :module: target',
+ ' ',
+ ' First line, no signature',
+ ' Second line followed by indentation::',
+ ' ',
+ ' indented line',
+ ' ',
+ ' ',
+ ' .. py:method:: DocstringSig.prop1',
+ ' :module: target',
+ ' :property:',
+ ' ',
+ ' DocstringSig.prop1(self)',
+ ' First line of docstring',
+ ' ',
+ ' ',
+ ' .. py:method:: DocstringSig.prop2',
+ ' :module: target',
+ ' :property:',
+ ' ',
+ ' First line of docstring',
+ ' Second line of docstring',
+ ' '
+ ]
+
+
+@pytest.mark.sphinx('html', testroot='ext-autodoc')
+def test_autoclass_content_and_docstring_signature_class(app):
+ app.config.autoclass_content = 'class'
+ options = {"members": None,
+ "undoc-members": None}
+ actual = do_autodoc(app, 'module', 'target.docstring_signature', options)
+ assert list(actual) == [
+ '',
+ '.. py:module:: target.docstring_signature',
+ '',
+ '',
+ '.. py:class:: A(foo, bar)',
+ ' :module: target.docstring_signature',
+ '',
+ '',
+ '.. py:class:: B(foo, bar)',
+ ' :module: target.docstring_signature',
+ '',
+ '',
+ '.. py:class:: C(foo, bar)',
+ ' :module: target.docstring_signature',
+ '',
+ '',
+ '.. py:class:: D()',
+ ' :module: target.docstring_signature',
+ ''
+ ]
+
+
+@pytest.mark.sphinx('html', testroot='ext-autodoc')
+def test_autoclass_content_and_docstring_signature_init(app):
+ app.config.autoclass_content = 'init'
+ options = {"members": None,
+ "undoc-members": None}
+ actual = do_autodoc(app, 'module', 'target.docstring_signature', options)
+ assert list(actual) == [
+ '',
+ '.. py:module:: target.docstring_signature',
+ '',
+ '',
+ '.. py:class:: A(foo, bar)',
+ ' :module: target.docstring_signature',
+ '',
+ '',
+ '.. py:class:: B(foo, bar, baz)',
+ ' :module: target.docstring_signature',
+ '',
+ '',
+ '.. py:class:: C(foo, bar, baz)',
+ ' :module: target.docstring_signature',
+ '',
+ '',
+ '.. py:class:: D(foo, bar, baz)',
+ ' :module: target.docstring_signature',
+ ''
+ ]
+
+
+@pytest.mark.sphinx('html', testroot='ext-autodoc')
+def test_autoclass_content_and_docstring_signature_both(app):
+ app.config.autoclass_content = 'both'
+ options = {"members": None,
+ "undoc-members": None}
+ actual = do_autodoc(app, 'module', 'target.docstring_signature', options)
+ assert list(actual) == [
+ '',
+ '.. py:module:: target.docstring_signature',
+ '',
+ '',
+ '.. py:class:: A(foo, bar)',
+ ' :module: target.docstring_signature',
+ '',
+ '',
+ '.. py:class:: B(foo, bar)',
+ ' :module: target.docstring_signature',
+ '',
+ ' B(foo, bar, baz)',
+ ' ',
+ '',
+ '.. py:class:: C(foo, bar)',
+ ' :module: target.docstring_signature',
+ '',
+ ' C(foo, bar, baz)',
+ ' ',
+ '',
+ '.. py:class:: D(foo, bar, baz)',
+ ' :module: target.docstring_signature',
+ '',
+ ]
+
+
+@pytest.mark.sphinx('html', testroot='ext-autodoc')
+def test_mocked_module_imports(app, warning):
+ # no autodoc_mock_imports
+ options = {"members": 'TestAutodoc,decoratedFunction,func'}
+ actual = do_autodoc(app, 'module', 'target.need_mocks', options)
+ assert list(actual) == []
+ assert "autodoc: failed to import module 'need_mocks'" in warning.getvalue()
+
+ # with autodoc_mock_imports
+ app.config.autodoc_mock_imports = [
+ 'missing_module',
+ 'missing_package1',
+ 'missing_package2',
+ 'missing_package3',
+ 'sphinx.missing_module4',
+ ]
+
+ warning.truncate(0)
+ actual = do_autodoc(app, 'module', 'target.need_mocks', options)
+ assert list(actual) == [
+ '',
+ '.. py:module:: target.need_mocks',
+ '',
+ '',
+ '.. py:class:: TestAutodoc',
+ ' :module: target.need_mocks',
+ '',
+ ' TestAutodoc docstring.',
+ ' ',
+ ' ',
+ ' .. py:method:: TestAutodoc.decoratedMethod()',
+ ' :module: target.need_mocks',
+ ' ',
+ ' TestAutodoc::decoratedMethod docstring',
+ ' ',
+ '',
+ '.. py:function:: decoratedFunction()',
+ ' :module: target.need_mocks',
+ '',
+ ' decoratedFunction docstring',
+ ' ',
+ '',
+ '.. py:function:: func(arg: missing_module.Class)',
+ ' :module: target.need_mocks',
+ '',
+ ' a function takes mocked object as an argument',
+ ' '
+ ]
+ assert warning.getvalue() == ''
+
+
+@pytest.mark.sphinx('html', testroot='ext-autodoc')
+def test_autodoc_typehints_signature(app):
+ app.config.autodoc_typehints = "signature"
+
+ options = {"members": None,
+ "undoc-members": True}
+ actual = do_autodoc(app, 'module', 'target.typehints', options)
+ assert list(actual) == [
+ '',
+ '.. py:module:: target.typehints',
+ '',
+ '',
+ '.. py:class:: Math(s: str, o: object = None)',
+ ' :module: target.typehints',
+ '',
+ ' ',
+ ' .. py:method:: Math.incr(a: int, b: int = 1) -> int',
+ ' :module: target.typehints',
+ ' ',
+ '',
+ '.. py:function:: incr(a: int, b: int = 1) -> int',
+ ' :module: target.typehints',
+ ''
+ ]
+
+
+@pytest.mark.sphinx('html', testroot='ext-autodoc')
+def test_autodoc_typehints_none(app):
+ app.config.autodoc_typehints = "none"
+
+ options = {"members": None,
+ "undoc-members": True}
+ actual = do_autodoc(app, 'module', 'target.typehints', options)
+ assert list(actual) == [
+ '',
+ '.. py:module:: target.typehints',
+ '',
+ '',
+ '.. py:class:: Math(s, o = None)',
+ ' :module: target.typehints',
+ '',
+ ' ',
+ ' .. py:method:: Math.incr(a, b = 1) -> int',
+ ' :module: target.typehints',
+ ' ',
+ '',
+ '.. py:function:: incr(a, b = 1) -> int',
+ ' :module: target.typehints',
+ ''
+ ]
+
+
+@pytest.mark.sphinx('html', testroot='ext-autodoc')
+def test_autodoc_default_options(app):
+ # no settings
+ actual = do_autodoc(app, 'class', 'target.enum.EnumCls')
+ assert ' .. py:attribute:: EnumCls.val1' not in actual
+ assert ' .. py:attribute:: EnumCls.val4' not in actual
+ actual = do_autodoc(app, 'class', 'target.CustomIter')
+ assert ' .. py:method:: target.CustomIter' not in actual
+ actual = do_autodoc(app, 'module', 'target')
+ assert '.. py:function:: save_traceback(app)' not in actual
+
+ # with :members:
+ app.config.autodoc_default_options = {'members': None}
+ actual = do_autodoc(app, 'class', 'target.enum.EnumCls')
+ assert ' .. py:attribute:: EnumCls.val1' in actual
+ assert ' .. py:attribute:: EnumCls.val4' not in actual
+
+ # with :members: = True
+ app.config.autodoc_default_options = {'members': True}
+ actual = do_autodoc(app, 'class', 'target.enum.EnumCls')
+ assert ' .. py:attribute:: EnumCls.val1' in actual
+ assert ' .. py:attribute:: EnumCls.val4' not in actual
+
+ # with :members: and :undoc-members:
+ app.config.autodoc_default_options = {
+ 'members': None,
+ 'undoc-members': None,
+ }
+ actual = do_autodoc(app, 'class', 'target.enum.EnumCls')
+ assert ' .. py:attribute:: EnumCls.val1' in actual
+ assert ' .. py:attribute:: EnumCls.val4' in actual
+
+ # with :special-members:
+ # Note that :members: must be *on* for :special-members: to work.
+ app.config.autodoc_default_options = {
+ 'members': None,
+ 'special-members': None
+ }
+ actual = do_autodoc(app, 'class', 'target.CustomIter')
+ assert ' .. py:method:: CustomIter.__init__()' in actual
+ assert ' Create a new `CustomIter`.' in actual
+ assert ' .. py:method:: CustomIter.__iter__()' in actual
+ assert ' Iterate squares of each value.' in actual
+ if not IS_PYPY:
+ assert ' .. py:attribute:: CustomIter.__weakref__' in actual
+ assert ' list of weak references to the object (if defined)' in actual
+
+ # :exclude-members: None - has no effect. Unlike :members:,
+ # :special-members:, etc. where None == "include all", here None means
+ # "no/false/off".
+ app.config.autodoc_default_options = {
+ 'members': None,
+ 'exclude-members': None,
+ }
+ actual = do_autodoc(app, 'class', 'target.enum.EnumCls')
+ assert ' .. py:attribute:: EnumCls.val1' in actual
+ assert ' .. py:attribute:: EnumCls.val4' not in actual
+ app.config.autodoc_default_options = {
+ 'members': None,
+ 'special-members': None,
+ 'exclude-members': None,
+ }
+ actual = do_autodoc(app, 'class', 'target.CustomIter')
+ assert ' .. py:method:: CustomIter.__init__()' in actual
+ assert ' Create a new `CustomIter`.' in actual
+ assert ' .. py:method:: CustomIter.__iter__()' in actual
+ assert ' Iterate squares of each value.' in actual
+ if not IS_PYPY:
+ assert ' .. py:attribute:: CustomIter.__weakref__' in actual
+ assert ' list of weak references to the object (if defined)' in actual
+ assert ' .. py:method:: CustomIter.snafucate()' in actual
+ assert ' Makes this snafucated.' in actual
+
+
+@pytest.mark.sphinx('html', testroot='ext-autodoc')
+def test_autodoc_default_options_with_values(app):
+ # with :members:
+ app.config.autodoc_default_options = {'members': 'val1,val2'}
+ actual = do_autodoc(app, 'class', 'target.enum.EnumCls')
+ assert ' .. py:attribute:: EnumCls.val1' in actual
+ assert ' .. py:attribute:: EnumCls.val2' in actual
+ assert ' .. py:attribute:: EnumCls.val3' not in actual
+ assert ' .. py:attribute:: EnumCls.val4' not in actual
+
+ # with :member-order:
+ app.config.autodoc_default_options = {
+ 'members': None,
+ 'member-order': 'bysource',
+ }
+ actual = do_autodoc(app, 'class', 'target.Class')
+ assert list(filter(lambda l: '::' in l, actual)) == [
+ '.. py:class:: Class(arg)',
+ ' .. py:method:: Class.meth()',
+ ' .. py:method:: Class.skipmeth()',
+ ' .. py:method:: Class.excludemeth()',
+ ' .. py:attribute:: Class.attr',
+ ' .. py:attribute:: Class.docattr',
+ ' .. py:attribute:: Class.udocattr',
+ ' .. py:attribute:: Class.mdocattr',
+ ' .. py:method:: Class.moore(a, e, f) -> happiness',
+ ' .. py:attribute:: Class.inst_attr_inline',
+ ' .. py:attribute:: Class.inst_attr_comment',
+ ' .. py:attribute:: Class.inst_attr_string',
+ ]
+
+ # with :special-members:
+ app.config.autodoc_default_options = {
+ 'special-members': '__init__,__iter__',
+ }
+ actual = do_autodoc(app, 'class', 'target.CustomIter')
+ assert ' .. py:method:: CustomIter.__init__()' in actual
+ assert ' Create a new `CustomIter`.' in actual
+ assert ' .. py:method:: CustomIter.__iter__()' in actual
+ assert ' Iterate squares of each value.' in actual
+ if not IS_PYPY:
+ assert ' .. py:attribute:: CustomIter.__weakref__' not in actual
+ assert ' list of weak references to the object (if defined)' not in actual
+
+ # with :exclude-members:
+ app.config.autodoc_default_options = {
+ 'members': None,
+ 'exclude-members': 'val1'
+ }
+ actual = do_autodoc(app, 'class', 'target.enum.EnumCls')
+ assert ' .. py:attribute:: EnumCls.val1' not in actual
+ assert ' .. py:attribute:: EnumCls.val2' in actual
+ assert ' .. py:attribute:: EnumCls.val3' in actual
+ assert ' .. py:attribute:: EnumCls.val4' not in actual
+ app.config.autodoc_default_options = {
+ 'members': None,
+ 'special-members': None,
+ 'exclude-members': '__weakref__,snafucate',
+ }
+ actual = do_autodoc(app, 'class', 'target.CustomIter')
+ assert ' .. py:method:: CustomIter.__init__()' in actual
+ assert ' Create a new `CustomIter`.' in actual
+ assert ' .. py:method:: CustomIter.__iter__()' in actual
+ assert ' Iterate squares of each value.' in actual
+ if not IS_PYPY:
+ assert ' .. py:attribute:: CustomIter.__weakref__' not in actual
+ assert ' list of weak references to the object (if defined)' not in actual
+ assert ' .. py:method:: CustomIter.snafucate()' not in actual
+ assert ' Makes this snafucated.' not in actual
diff --git a/tests/test_ext_autodoc_events.py b/tests/test_ext_autodoc_events.py
new file mode 100644
index 000000000..647def3d7
--- /dev/null
+++ b/tests/test_ext_autodoc_events.py
@@ -0,0 +1,81 @@
+"""
+ test_ext_autodoc_events
+ ~~~~~~~~~~~~~~~~~~~~~~~
+
+ Test the autodoc extension. This tests mainly for autodoc events
+
+ :copyright: Copyright 2007-2019 by the Sphinx team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import pytest
+
+from sphinx.ext.autodoc import between, cut_lines
+from test_autodoc import do_autodoc
+
+
+@pytest.mark.sphinx('html', testroot='ext-autodoc')
+def test_process_docstring(app):
+ def on_process_docstring(app, what, name, obj, options, lines):
+ lines.clear()
+ lines.append('my docstring')
+
+ app.connect('autodoc-process-docstring', on_process_docstring)
+
+ actual = do_autodoc(app, 'function', 'target.process_docstring.func')
+ assert list(actual) == [
+ '',
+ '.. py:function:: func()',
+ ' :module: target.process_docstring',
+ '',
+ ' my docstring'
+ ]
+
+
+@pytest.mark.sphinx('html', testroot='ext-autodoc')
+def test_cut_lines(app):
+ app.connect('autodoc-process-docstring',
+ cut_lines(2, 2, ['function']))
+
+ actual = do_autodoc(app, 'function', 'target.process_docstring.func')
+ assert list(actual) == [
+ '',
+ '.. py:function:: func()',
+ ' :module: target.process_docstring',
+ '',
+ ' second line',
+ ' '
+ ]
+
+
+@pytest.mark.sphinx('html', testroot='ext-autodoc')
+def test_between(app):
+ app.connect('autodoc-process-docstring',
+ between('---', ['function']))
+
+ actual = do_autodoc(app, 'function', 'target.process_docstring.func')
+ assert list(actual) == [
+ '',
+ '.. py:function:: func()',
+ ' :module: target.process_docstring',
+ '',
+ ' second line',
+ ' '
+ ]
+
+
+@pytest.mark.sphinx('html', testroot='ext-autodoc')
+def test_between_exclude(app):
+ app.connect('autodoc-process-docstring',
+ between('---', ['function'], exclude=True))
+
+ actual = do_autodoc(app, 'function', 'target.process_docstring.func')
+ assert list(actual) == [
+ '',
+ '.. py:function:: func()',
+ ' :module: target.process_docstring',
+ '',
+ ' first line',
+ ' third line',
+ ' '
+ ]
diff --git a/tests/test_ext_autosummary.py b/tests/test_ext_autosummary.py
index 04af9ed85..ae97d3b57 100644
--- a/tests/test_ext_autosummary.py
+++ b/tests/test_ext_autosummary.py
@@ -10,7 +10,7 @@
import sys
from io import StringIO
-from unittest.mock import Mock
+from unittest.mock import Mock, patch
import pytest
from docutils import nodes
@@ -19,6 +19,7 @@ from sphinx import addnodes
from sphinx.ext.autosummary import (
autosummary_table, autosummary_toc, mangle_signature, import_by_name, extract_summary
)
+from sphinx.ext.autosummary.generate import generate_autosummary_docs
from sphinx.testing.util import assert_node, etree_parse
from sphinx.util.docutils import new_document
@@ -286,3 +287,18 @@ def test_autosummary_imported_members(app, status, warning):
' \n' in module)
finally:
sys.modules.pop('autosummary_dummy_package', None)
+
+
+@pytest.mark.sphinx(testroot='ext-autodoc')
+def test_generate_autosummary_docs_property(app):
+ with patch('sphinx.ext.autosummary.generate.find_autosummary_in_files') as mock:
+ mock.return_value = [('target.methods.Base.prop', 'prop', None)]
+ generate_autosummary_docs([], output_dir=app.srcdir, builder=app.builder, app=app)
+
+ content = (app.srcdir / 'target.methods.Base.prop.rst').text()
+ assert content == ("target.methods.Base.prop\n"
+ "========================\n"
+ "\n"
+ ".. currentmodule:: target.methods\n"
+ "\n"
+ ".. autoproperty:: Base.prop")