From 21e9f181e708c5697cc5fe17f9a009e594e00402 Mon Sep 17 00:00:00 2001
From: Adam Turner <9087854+aa-turner@users.noreply.github.com>
Date: Sun, 2 Mar 2025 22:56:56 +0000
Subject: [PATCH 001/466] Bump version
---
sphinx/__init__.py | 6 +++---
1 file changed, 3 insertions(+), 3 deletions(-)
diff --git a/sphinx/__init__.py b/sphinx/__init__.py
index 35da74e775d..b70b6db47a6 100644
--- a/sphinx/__init__.py
+++ b/sphinx/__init__.py
@@ -17,7 +17,7 @@
if TYPE_CHECKING:
from typing import Final
-__version__: Final = '8.2.3'
+__version__: Final = '8.3.0'
__display_version__: Final = __version__ # used for command line version
warnings.filterwarnings(
@@ -35,11 +35,11 @@
#:
#: .. versionadded:: 1.2
#: Before version 1.2, check the string ``sphinx.__version__``.
-version_info: Final = (8, 2, 3, 'final', 0)
+version_info: Final = (8, 3, 0, 'beta', 0)
package_dir: Final = _StrPath(__file__).resolve().parent
-_in_development = False
+_in_development = True
if _in_development:
# Only import subprocess if needed
import subprocess
From 2fa51bb49351cff4e3c67adafbe51ecfd0fdb43b Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Mon, 3 Mar 2025 05:08:01 +0000
Subject: [PATCH 002/466] Bump pyright to 1.1.396 (#13404)
---
pyproject.toml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/pyproject.toml b/pyproject.toml
index c4b1b6d73df..704ef739062 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -102,7 +102,7 @@ lint = [
"types-Pygments==2.19.0.20250219",
"types-requests==2.32.0.20241016", # align with requests
"types-urllib3==1.26.25.14",
- "pyright==1.1.395",
+ "pyright==1.1.396",
"pytest>=8.0",
"pypi-attestations==0.0.21",
"betterproto==2.0.0b6",
From 54ab557828a1a2bbbb5a93e71d0edc594ca36106 Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Wed, 5 Mar 2025 15:48:29 +0000
Subject: [PATCH 003/466] Bump types-requests to 2.32.0.20250301 (#13405)
---
pyproject.toml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/pyproject.toml b/pyproject.toml
index 704ef739062..37da894c7e1 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -100,7 +100,7 @@ lint = [
"types-docutils==0.21.0.20241128",
"types-Pillow==10.2.0.20240822",
"types-Pygments==2.19.0.20250219",
- "types-requests==2.32.0.20241016", # align with requests
+ "types-requests==2.32.0.20250301", # align with requests
"types-urllib3==1.26.25.14",
"pyright==1.1.396",
"pytest>=8.0",
From 8962398b761c3d85a7c74b6f789b3ffb127bde0c Mon Sep 17 00:00:00 2001
From: Adam Turner <9087854+AA-Turner@users.noreply.github.com>
Date: Thu, 6 Mar 2025 16:03:44 +0000
Subject: [PATCH 004/466] autosummary: Update test for Python 3.14.0a5+
(#13418)
`types.UnionType` and `typing.Union` have been merged.
---
tests/test_extensions/test_ext_autosummary.py | 8 ++++++--
1 file changed, 6 insertions(+), 2 deletions(-)
diff --git a/tests/test_extensions/test_ext_autosummary.py b/tests/test_extensions/test_ext_autosummary.py
index 35dc7d180ef..c807ddba3d1 100644
--- a/tests/test_extensions/test_ext_autosummary.py
+++ b/tests/test_extensions/test_ext_autosummary.py
@@ -447,8 +447,12 @@ def test_autosummary_generate_content_for_module_imported_members(app):
]
assert context['functions'] == ['bar']
assert context['all_functions'] == ['_quux', 'bar']
- assert context['classes'] == ['Class', 'Foo']
- assert context['all_classes'] == ['Class', 'Foo', '_Baz']
+ if sys.version_info >= (3, 14, 0, 'alpha', 5):
+ assert context['classes'] == ['Class', 'Foo', 'Union']
+ assert context['all_classes'] == ['Class', 'Foo', 'Union', '_Baz']
+ else:
+ assert context['classes'] == ['Class', 'Foo']
+ assert context['all_classes'] == ['Class', 'Foo', '_Baz']
assert context['exceptions'] == ['Exc']
assert context['all_exceptions'] == ['Exc', '_Exc']
assert context['attributes'] == ['CONSTANT1', 'qux', 'quuz', 'non_imported_member']
From 6b9eb6f98533017428e8890b94acc2c4ce4a4849 Mon Sep 17 00:00:00 2001
From: Adam Dangoor
Date: Fri, 7 Mar 2025 20:25:56 +0000
Subject: [PATCH 005/466] Remove mypy overrides for ``tests.conftest`` (#13417)
Co-authored-by: Adam Turner <9087854+AA-Turner@users.noreply.github.com>
---
pyproject.toml | 1 -
tests/conftest.py | 14 ++++++++++----
2 files changed, 10 insertions(+), 5 deletions(-)
diff --git a/pyproject.toml b/pyproject.toml
index 37da894c7e1..9645f148dd3 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -203,7 +203,6 @@ ignore_missing_imports = true
[[tool.mypy.overrides]]
module = [
# tests/
- "tests.conftest",
"tests.test_addnodes",
"tests.test_application",
"tests.test_events",
diff --git a/tests/conftest.py b/tests/conftest.py
index fde7b8f005b..8860e24e65c 100644
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -16,7 +16,10 @@
from sphinx.testing.util import _clean_up_global_state
if TYPE_CHECKING:
- from collections.abc import Iterator
+ from collections.abc import Callable, Iterator
+ from typing import Any
+
+ from sphinx.testing.util import SphinxTestApp
_TESTS_ROOT = Path(__file__).resolve().parent
if 'CI' in os.environ and (_TESTS_ROOT / 'roots-read-only').is_dir():
@@ -83,7 +86,7 @@ def _http_teapot(monkeypatch: pytest.MonkeyPatch) -> Iterator[None]:
# https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/418
response = SimpleNamespace(status_code=418)
- def _request(*args, **kwargs):
+ def _request(*args: Any, **kwargs: Any) -> SimpleNamespace:
return response
with monkeypatch.context() as m:
@@ -92,10 +95,13 @@ def _request(*args, **kwargs):
@pytest.fixture
-def make_app_with_empty_project(make_app, tmp_path):
+def make_app_with_empty_project(
+ make_app: Callable[..., SphinxTestApp],
+ tmp_path: Path,
+) -> Callable[..., SphinxTestApp]:
(tmp_path / 'conf.py').touch()
- def _make_app(*args, **kw):
+ def _make_app(*args: Any, **kw: Any) -> SphinxTestApp:
kw.setdefault('srcdir', Path(tmp_path))
return make_app(*args, **kw)
From 457754b9d936509d695d75e6b251e86100850eca Mon Sep 17 00:00:00 2001
From: Justine Krejcha
Date: Fri, 7 Mar 2025 19:24:01 -0800
Subject: [PATCH 006/466] Correct "can be install" to "can be installed"
(#13423)
---
doc/usage/installation.rst | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/doc/usage/installation.rst b/doc/usage/installation.rst
index 96d1594d6e6..8b0aca1cab3 100644
--- a/doc/usage/installation.rst
+++ b/doc/usage/installation.rst
@@ -168,7 +168,7 @@ __ https://www.macports.org/ports.php?by=library&substr=py313-sphinx
Windows
~~~~~~~
-Sphinx can be install using `Chocolatey`__.
+Sphinx can be installed using `Chocolatey`__.
__ https://chocolatey.org/
From 292e0266f08d624a91f9cf541c1b665e91295677 Mon Sep 17 00:00:00 2001
From: Adam Turner <9087854+aa-turner@users.noreply.github.com>
Date: Wed, 19 Mar 2025 19:23:39 +0000
Subject: [PATCH 007/466] Bump Ruff to 0.9.10
---
pyproject.toml | 2 +-
sphinx/directives/other.py | 2 +-
sphinx/domains/c/_parser.py | 2 +-
sphinx/domains/citation.py | 2 +-
sphinx/domains/cpp/__init__.py | 2 +-
sphinx/domains/cpp/_parser.py | 2 +-
sphinx/domains/python/__init__.py | 12 ++---
sphinx/domains/rst.py | 2 +-
sphinx/environment/adapters/indexentries.py | 2 +-
sphinx/environment/collectors/asset.py | 2 +-
sphinx/ext/autodoc/__init__.py | 10 ++---
sphinx/ext/autosummary/__init__.py | 2 +-
sphinx/ext/autosummary/generate.py | 2 +-
sphinx/ext/graphviz.py | 4 +-
sphinx/ext/imgmath.py | 6 +--
sphinx/ext/inheritance_diagram.py | 2 +-
sphinx/ext/napoleon/docstring.py | 2 +-
sphinx/ext/viewcode.py | 6 +--
sphinx/roles.py | 2 +-
sphinx/util/docfields.py | 2 +-
sphinx/util/docutils.py | 2 +-
sphinx/writers/texinfo.py | 2 +-
tests/test_builders/test_build_linkcheck.py | 4 +-
tests/test_config/test_config.py | 2 +-
tests/test_directives/test_directive_code.py | 44 +++++++++----------
tests/test_domains/test_domain_py.py | 2 +-
tests/test_extensions/test_ext_coverage.py | 2 +-
.../test_ext_intersphinx_cache.py | 6 +--
28 files changed, 66 insertions(+), 66 deletions(-)
diff --git a/pyproject.toml b/pyproject.toml
index 9645f148dd3..59312974f8b 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -92,7 +92,7 @@ docs = [
"sphinxcontrib-websupport",
]
lint = [
- "ruff==0.9.9",
+ "ruff==0.9.10",
"mypy==1.15.0",
"sphinx-lint>=0.9",
"types-colorama==0.4.15.20240311",
diff --git a/sphinx/directives/other.py b/sphinx/directives/other.py
index d9c2b98fd84..8c66ed383b5 100644
--- a/sphinx/directives/other.py
+++ b/sphinx/directives/other.py
@@ -411,7 +411,7 @@ def _insert_input(include_lines: list[str], source: str) -> None:
if self.arguments[0].startswith('<') and self.arguments[0].endswith('>'):
# docutils "standard" includes, do not do path processing
return super().run()
- rel_filename, filename = self.env.relfn2path(self.arguments[0])
+ _rel_filename, filename = self.env.relfn2path(self.arguments[0])
self.arguments[0] = str(filename)
self.env.note_included(filename)
return super().run()
diff --git a/sphinx/domains/c/_parser.py b/sphinx/domains/c/_parser.py
index 7eb09f6f7b8..bd7ddbe2326 100644
--- a/sphinx/domains/c/_parser.py
+++ b/sphinx/domains/c/_parser.py
@@ -230,7 +230,7 @@ def _parse_paren_expression_list(self) -> ASTParenExprList | None:
#
# expression-list
# -> initializer-list
- exprs, trailing_comma = self._parse_initializer_list(
+ exprs, _trailing_comma = self._parse_initializer_list(
'parenthesized expression-list', '(', ')'
)
if exprs is None:
diff --git a/sphinx/domains/citation.py b/sphinx/domains/citation.py
index 49b74cca269..348888c2d50 100644
--- a/sphinx/domains/citation.py
+++ b/sphinx/domains/citation.py
@@ -106,7 +106,7 @@ def resolve_xref(
node: pending_xref,
contnode: Element,
) -> nodes.reference | None:
- docname, labelid, lineno = self.citations.get(target, ('', '', 0))
+ docname, labelid, _lineno = self.citations.get(target, ('', '', 0))
if not docname:
return None
diff --git a/sphinx/domains/cpp/__init__.py b/sphinx/domains/cpp/__init__.py
index 75d7732a405..fc72e208791 100644
--- a/sphinx/domains/cpp/__init__.py
+++ b/sphinx/domains/cpp/__init__.py
@@ -744,7 +744,7 @@ def apply(self, **kwargs: Any) -> None:
template_decls = ns.templatePrefix.templates
else:
template_decls = []
- symbols, fail_reason = parent_symbol.find_name(
+ symbols, _fail_reason = parent_symbol.find_name(
nestedName=name,
templateDecls=template_decls,
typ='any',
diff --git a/sphinx/domains/cpp/_parser.py b/sphinx/domains/cpp/_parser.py
index d28c474795d..aa941260da9 100644
--- a/sphinx/domains/cpp/_parser.py
+++ b/sphinx/domains/cpp/_parser.py
@@ -365,7 +365,7 @@ def _parse_paren_expression_list(self) -> ASTParenExprList:
#
# expression-list
# -> initializer-list
- exprs, trailing_comma = self._parse_initializer_list(
+ exprs, _trailing_comma = self._parse_initializer_list(
'parenthesized expression-list', '(', ')'
)
if exprs is None:
diff --git a/sphinx/domains/python/__init__.py b/sphinx/domains/python/__init__.py
index 97519ee028e..af923cae70e 100644
--- a/sphinx/domains/python/__init__.py
+++ b/sphinx/domains/python/__init__.py
@@ -108,7 +108,7 @@ def add_target_and_index(
modname = self.options.get('module', self.env.ref_context.get('py:module'))
node_id = signode['ids'][0]
- name, cls = name_cls
+ name, _cls = name_cls
if modname:
text = _('%s() (in module %s)') % (name, modname)
self.indexnode['entries'].append(('single', text, node_id, '', None))
@@ -175,7 +175,7 @@ def handle_signature(self, sig: str, signode: desc_signature) -> tuple[str, str]
return fullname, prefix
def get_index_text(self, modname: str, name_cls: tuple[str, str]) -> str:
- name, cls = name_cls
+ name, _cls = name_cls
if modname:
return _('%s (in module %s)') % (name, modname)
else:
@@ -268,7 +268,7 @@ def get_signature_prefix(self, sig: str) -> Sequence[nodes.Node]:
return prefix
def get_index_text(self, modname: str, name_cls: tuple[str, str]) -> str:
- name, cls = name_cls
+ name, _cls = name_cls
try:
clsname, methname = name.rsplit('.', 1)
if modname and self.config.add_module_names:
@@ -364,7 +364,7 @@ def handle_signature(self, sig: str, signode: desc_signature) -> tuple[str, str]
return fullname, prefix
def get_index_text(self, modname: str, name_cls: tuple[str, str]) -> str:
- name, cls = name_cls
+ name, _cls = name_cls
try:
clsname, attrname = name.rsplit('.', 1)
if modname and self.config.add_module_names:
@@ -424,7 +424,7 @@ def get_signature_prefix(self, sig: str) -> Sequence[nodes.Node]:
return prefix
def get_index_text(self, modname: str, name_cls: tuple[str, str]) -> str:
- name, cls = name_cls
+ name, _cls = name_cls
try:
clsname, attrname = name.rsplit('.', 1)
if modname and self.config.add_module_names:
@@ -464,7 +464,7 @@ def handle_signature(self, sig: str, signode: desc_signature) -> tuple[str, str]
return fullname, prefix
def get_index_text(self, modname: str, name_cls: tuple[str, str]) -> str:
- name, cls = name_cls
+ name, _cls = name_cls
try:
clsname, attrname = name.rsplit('.', 1)
if modname and self.config.add_module_names:
diff --git a/sphinx/domains/rst.py b/sphinx/domains/rst.py
index cd5d8312d4a..55aa3103d8a 100644
--- a/sphinx/domains/rst.py
+++ b/sphinx/domains/rst.py
@@ -83,7 +83,7 @@ def _toc_entry_name(self, sig_node: desc_signature) -> str:
return ''
objtype = sig_node.parent.get('objtype')
- *parents, name = sig_node['_toc_parts']
+ *_parents, name = sig_node['_toc_parts']
if objtype == 'directive:option':
return f':{name}:'
if self.config.toc_object_entries_show_parents in {'domain', 'all'}:
diff --git a/sphinx/environment/adapters/indexentries.py b/sphinx/environment/adapters/indexentries.py
index c19628515b6..e9e6e408b6c 100644
--- a/sphinx/environment/adapters/indexentries.py
+++ b/sphinx/environment/adapters/indexentries.py
@@ -253,7 +253,7 @@ def _key_func_2(entry: tuple[str, _IndexEntryTargets]) -> str:
def _group_by_func(entry: tuple[str, _IndexEntry]) -> str:
"""Group the entries by letter or category key."""
- key, (targets, sub_items, category_key) = entry
+ key, (_targets, _sub_items, category_key) = entry
if category_key is not None:
return category_key
diff --git a/sphinx/environment/collectors/asset.py b/sphinx/environment/collectors/asset.py
index 13105587673..e199fc90124 100644
--- a/sphinx/environment/collectors/asset.py
+++ b/sphinx/environment/collectors/asset.py
@@ -117,7 +117,7 @@ def collect_candidates(
try:
mimetype = guess_mimetype(filename)
if mimetype is None:
- basename, suffix = os.path.splitext(filename)
+ _basename, suffix = os.path.splitext(filename)
mimetype = 'image/x-' + suffix[1:]
if mimetype not in candidates:
globbed.setdefault(mimetype, []).append(new_imgpath.as_posix())
diff --git a/sphinx/ext/autodoc/__init__.py b/sphinx/ext/autodoc/__init__.py
index 8cdb039df3e..560b6905208 100644
--- a/sphinx/ext/autodoc/__init__.py
+++ b/sphinx/ext/autodoc/__init__.py
@@ -463,7 +463,7 @@ def parse_name(self) -> bool:
type='autodoc',
)
return False
- explicit_modname, path, base, tp_list, args, retann = matched.groups()
+ explicit_modname, path, base, _tp_list, args, retann = matched.groups()
# support explicit module and class name separation via ::
if explicit_modname is not None:
@@ -1359,7 +1359,7 @@ def resolve_name(
# ... if still falsy, there's no way to know
if not mod_cls:
return None, []
- modname, sep, cls = mod_cls.rpartition('.')
+ modname, _sep, cls = mod_cls.rpartition('.')
parents = [cls]
# if the module name is still missing, get it like above
if not modname:
@@ -1405,7 +1405,7 @@ def _find_signature(self) -> tuple[str | None, str | None] | None:
match = py_ext_sig_re.match(line)
if not match:
break
- exmod, path, base, tp_list, args, retann = match.groups()
+ _exmod, _path, base, _tp_list, args, retann = match.groups()
# the base name must match ours
if base not in valid_names:
@@ -2295,7 +2295,7 @@ def should_suppress_value_header(self) -> bool:
return True
else:
doc = self.get_doc() or []
- docstring, metadata = separate_metadata(
+ _docstring, metadata = separate_metadata(
'\n'.join(functools.reduce(operator.iadd, doc, []))
)
if 'hide-value' in metadata:
@@ -2947,7 +2947,7 @@ def should_suppress_value_header(self) -> bool:
else:
doc = self.get_doc()
if doc:
- docstring, metadata = separate_metadata(
+ _docstring, metadata = separate_metadata(
'\n'.join(functools.reduce(operator.iadd, doc, []))
)
if 'hide-value' in metadata:
diff --git a/sphinx/ext/autosummary/__init__.py b/sphinx/ext/autosummary/__init__.py
index 97c64a37cd1..733c936d8f0 100644
--- a/sphinx/ext/autosummary/__init__.py
+++ b/sphinx/ext/autosummary/__init__.py
@@ -814,7 +814,7 @@ def import_ivar_by_name(
"""
try:
name, attr = name.rsplit('.', 1)
- real_name, obj, parent, modname = import_by_name(name, prefixes)
+ real_name, obj, _parent, modname = import_by_name(name, prefixes)
# Get ancestors of the object (class.__mro__ includes the class itself as
# the first entry)
diff --git a/sphinx/ext/autosummary/generate.py b/sphinx/ext/autosummary/generate.py
index d865c0de2af..62a106479ea 100644
--- a/sphinx/ext/autosummary/generate.py
+++ b/sphinx/ext/autosummary/generate.py
@@ -719,7 +719,7 @@ def find_autosummary_in_docstring(
See `find_autosummary_in_lines`.
"""
try:
- real_name, obj, parent, modname = import_by_name(name)
+ _real_name, obj, _parent, _modname = import_by_name(name)
lines = pydoc.getdoc(obj).splitlines()
return find_autosummary_in_lines(lines, module=name, filename=filename)
except AttributeError:
diff --git a/sphinx/ext/graphviz.py b/sphinx/ext/graphviz.py
index 9cd4d163e36..b973c1f5870 100644
--- a/sphinx/ext/graphviz.py
+++ b/sphinx/ext/graphviz.py
@@ -431,7 +431,7 @@ def render_dot_latex(
filename: str | None = None,
) -> None:
try:
- fname, outfn = render_dot(self, code, options, 'pdf', prefix, filename)
+ fname, _outfn = render_dot(self, code, options, 'pdf', prefix, filename)
except GraphvizError as exc:
logger.warning(__('dot code %r: %s'), code, exc)
raise nodes.SkipNode from exc
@@ -475,7 +475,7 @@ def render_dot_texinfo(
prefix: str = 'graphviz',
) -> None:
try:
- fname, outfn = render_dot(self, code, options, 'png', prefix)
+ fname, _outfn = render_dot(self, code, options, 'png', prefix)
except GraphvizError as exc:
logger.warning(__('dot code %r: %s'), code, exc)
raise nodes.SkipNode from exc
diff --git a/sphinx/ext/imgmath.py b/sphinx/ext/imgmath.py
index b7bcf4a7a67..5b58db7b084 100644
--- a/sphinx/ext/imgmath.py
+++ b/sphinx/ext/imgmath.py
@@ -200,7 +200,7 @@ def convert_dvi_to_png(dvipath: Path, builder: Builder, out_path: Path) -> int |
command.append('--depth')
command.append(dvipath)
- stdout, stderr = convert_dvi_to_image(command, name)
+ stdout, _stderr = convert_dvi_to_image(command, name)
depth = None
if builder.config.imgmath_use_preview:
@@ -221,7 +221,7 @@ def convert_dvi_to_svg(dvipath: Path, builder: Builder, out_path: Path) -> int |
command.extend(builder.config.imgmath_dvisvgm_args)
command.append(dvipath)
- stdout, stderr = convert_dvi_to_image(command, name)
+ _stdout, stderr = convert_dvi_to_image(command, name)
depth = None
if builder.config.imgmath_use_preview:
@@ -370,7 +370,7 @@ def html_visit_displaymath(self: HTML5Translator, node: nodes.math_block) -> Non
else:
latex = wrap_displaymath(node.astext(), None, False)
try:
- rendered_path, depth = render_math(self, latex)
+ rendered_path, _depth = render_math(self, latex)
except MathExtError as exc:
msg = str(exc)
sm = nodes.system_message(
diff --git a/sphinx/ext/inheritance_diagram.py b/sphinx/ext/inheritance_diagram.py
index ce05626abe5..83a6d4b7b01 100644
--- a/sphinx/ext/inheritance_diagram.py
+++ b/sphinx/ext/inheritance_diagram.py
@@ -436,7 +436,7 @@ def run(self) -> list[Node]:
# references to real URLs later. These nodes will eventually be
# removed from the doctree after we're done with them.
for name in graph.get_all_class_names():
- refnodes, x = class_role( # type: ignore[misc]
+ refnodes, _x = class_role( # type: ignore[misc]
'class', f':class:`{name}`', name, 0, self.state.inliner
)
node.extend(refnodes)
diff --git a/sphinx/ext/napoleon/docstring.py b/sphinx/ext/napoleon/docstring.py
index d1317e9d841..38325df1d94 100644
--- a/sphinx/ext/napoleon/docstring.py
+++ b/sphinx/ext/napoleon/docstring.py
@@ -476,7 +476,7 @@ def _consume_field(
) -> tuple[str, str, list[str]]:
line = self._lines.next()
- before, colon, after = self._partition_field_on_colon(line)
+ before, _colon, after = self._partition_field_on_colon(line)
_name, _type, _desc = before, '', after
if parse_type:
diff --git a/sphinx/ext/viewcode.py b/sphinx/ext/viewcode.py
index 4b1c62ad0d1..39e4cf420b7 100644
--- a/sphinx/ext/viewcode.py
+++ b/sphinx/ext/viewcode.py
@@ -205,7 +205,7 @@ def env_purge_doc(app: Sphinx, env: BuildEnvironment, docname: str) -> None:
if entry is False:
continue
- code, tags, used, refname = entry
+ _code, _tags, used, _refname = entry
for fullname in list(used):
if used[fullname] == docname:
used.pop(fullname)
@@ -250,7 +250,7 @@ def get_module_filename(app: Sphinx, modname: str) -> _StrPath | None:
return None
else:
try:
- filename, source = ModuleAnalyzer.get_module_source(modname)
+ filename, _source = ModuleAnalyzer.get_module_source(modname)
return filename
except Exception:
return None
@@ -323,7 +323,7 @@ def collect_pages(app: Sphinx) -> Iterator[tuple[str, dict[str, Any], str]]:
max_index = len(lines) - 1
link_text = _('[docs]')
for name, docname in used.items():
- type, start, end = tags[name]
+ _type, start, end = tags[name]
backlink = urito(pagename, docname) + '#' + refname + '.' + name
lines[start] = (
f'\n'
diff --git a/sphinx/roles.py b/sphinx/roles.py
index 98843de5a95..04469f45488 100644
--- a/sphinx/roles.py
+++ b/sphinx/roles.py
@@ -115,7 +115,7 @@ def create_non_xref_node(self) -> tuple[list[Node], list[system_message]]:
text = utils.unescape(self.text[1:])
if self.fix_parens:
self.has_explicit_title = False # treat as implicit
- text, target = self.update_title_and_target(text, '')
+ text, _target = self.update_title_and_target(text, '')
node = self.innernodeclass(self.rawtext, text, classes=self.classes)
return self.result_nodes(self.inliner.document, self.env, node, is_ref=False)
diff --git a/sphinx/util/docfields.py b/sphinx/util/docfields.py
index 1c24a73bf2e..dcc24753862 100644
--- a/sphinx/util/docfields.py
+++ b/sphinx/util/docfields.py
@@ -118,7 +118,7 @@ def make_xref(
if location is not None:
with contextlib.suppress(ValueError):
lineno = get_node_line(location)
- ns, messages = role(rolename, target, target, lineno, inliner, {}, [])
+ ns, _messages = role(rolename, target, target, lineno, inliner, {}, [])
return nodes.inline(target, '', *ns)
def make_xrefs(
diff --git a/sphinx/util/docutils.py b/sphinx/util/docutils.py
index 70d8e69be6f..b53774aa26f 100644
--- a/sphinx/util/docutils.py
+++ b/sphinx/util/docutils.py
@@ -370,7 +370,7 @@ def write(self, text: str) -> None:
if not matched:
logger.warning(text.rstrip('\r\n'), type='docutils')
else:
- location, type, level = matched.groups()
+ location, type, _level = matched.groups()
message = report_re.sub('', text).rstrip()
logger.log(type, message, location=location, type='docutils')
diff --git a/sphinx/writers/texinfo.py b/sphinx/writers/texinfo.py
index b4f1272d49d..eaa7dbdb6e7 100644
--- a/sphinx/writers/texinfo.py
+++ b/sphinx/writers/texinfo.py
@@ -886,7 +886,7 @@ def depart_collected_footnote(self, node: Element) -> None:
def visit_footnote_reference(self, node: Element) -> None:
num = node.astext().strip()
try:
- footnode, used = self.footnotestack[-1][num]
+ footnode, _used = self.footnotestack[-1][num]
except (KeyError, IndexError) as exc:
raise nodes.SkipNode from exc
# footnotes are repeated for each reference
diff --git a/tests/test_builders/test_build_linkcheck.py b/tests/test_builders/test_build_linkcheck.py
index 32b7ae79ff7..82baa62f3ef 100644
--- a/tests/test_builders/test_build_linkcheck.py
+++ b/tests/test_builders/test_build_linkcheck.py
@@ -713,7 +713,7 @@ def log_date_time_string(self):
def test_follows_redirects_on_HEAD(app, capsys):
with serve_application(app, make_redirect_handler(support_head=True)) as address:
app.build()
- stdout, stderr = capsys.readouterr()
+ _stdout, stderr = capsys.readouterr()
content = (app.outdir / 'output.txt').read_text(encoding='utf8')
assert content == (
'index.rst:1: [redirected with Found] '
@@ -736,7 +736,7 @@ def test_follows_redirects_on_HEAD(app, capsys):
def test_follows_redirects_on_GET(app, capsys):
with serve_application(app, make_redirect_handler(support_head=False)) as address:
app.build()
- stdout, stderr = capsys.readouterr()
+ _stdout, stderr = capsys.readouterr()
content = (app.outdir / 'output.txt').read_text(encoding='utf8')
assert content == (
'index.rst:1: [redirected with Found] '
diff --git a/tests/test_config/test_config.py b/tests/test_config/test_config.py
index d297af6f2ee..5e68b4a9657 100644
--- a/tests/test_config/test_config.py
+++ b/tests/test_config/test_config.py
@@ -68,7 +68,7 @@ def test_config_opt_deprecated(recwarn):
opt = _Opt('default', '', ())
with pytest.warns(RemovedInSphinx90Warning):
- default, rebuild, valid_types = opt
+ _default, _rebuild, _valid_types = opt
with pytest.warns(RemovedInSphinx90Warning):
_ = opt[0]
diff --git a/tests/test_directives/test_directive_code.py b/tests/test_directives/test_directive_code.py
index 625d15c1f31..16a25dda687 100644
--- a/tests/test_directives/test_directive_code.py
+++ b/tests/test_directives/test_directive_code.py
@@ -51,7 +51,7 @@ def test_LiteralIncludeReader_lineno_start(literal_inc_path):
def test_LiteralIncludeReader_pyobject1(literal_inc_path):
options = {'lineno-match': True, 'pyobject': 'Foo'}
reader = LiteralIncludeReader(literal_inc_path, options, DUMMY_CONFIG)
- content, lines = reader.read()
+ content, _lines = reader.read()
assert content == 'class Foo:\n pass\n'
assert reader.lineno_start == 5
@@ -59,7 +59,7 @@ def test_LiteralIncludeReader_pyobject1(literal_inc_path):
def test_LiteralIncludeReader_pyobject2(literal_inc_path):
options = {'pyobject': 'Bar'}
reader = LiteralIncludeReader(literal_inc_path, options, DUMMY_CONFIG)
- content, lines = reader.read()
+ content, _lines = reader.read()
assert content == 'class Bar:\n def baz():\n pass\n'
assert reader.lineno_start == 1 # no lineno-match
@@ -67,21 +67,21 @@ def test_LiteralIncludeReader_pyobject2(literal_inc_path):
def test_LiteralIncludeReader_pyobject3(literal_inc_path):
options = {'pyobject': 'Bar.baz'}
reader = LiteralIncludeReader(literal_inc_path, options, DUMMY_CONFIG)
- content, lines = reader.read()
+ content, _lines = reader.read()
assert content == ' def baz():\n pass\n'
def test_LiteralIncludeReader_pyobject_and_lines(literal_inc_path):
options = {'pyobject': 'Bar', 'lines': '2-'}
reader = LiteralIncludeReader(literal_inc_path, options, DUMMY_CONFIG)
- content, lines = reader.read()
+ content, _lines = reader.read()
assert content == ' def baz():\n pass\n'
def test_LiteralIncludeReader_lines1(literal_inc_path):
options = {'lines': '1-3'}
reader = LiteralIncludeReader(literal_inc_path, options, DUMMY_CONFIG)
- content, lines = reader.read()
+ content, _lines = reader.read()
assert content == (
'# Literally included file using Python highlighting\n'
'\n'
@@ -92,7 +92,7 @@ def test_LiteralIncludeReader_lines1(literal_inc_path):
def test_LiteralIncludeReader_lines2(literal_inc_path):
options = {'lines': '1,3,5'}
reader = LiteralIncludeReader(literal_inc_path, options, DUMMY_CONFIG)
- content, lines = reader.read()
+ content, _lines = reader.read()
assert content == (
'# Literally included file using Python highlighting\n'
'foo = "Including Unicode characters: üöä"\n'
@@ -103,7 +103,7 @@ def test_LiteralIncludeReader_lines2(literal_inc_path):
def test_LiteralIncludeReader_lines_and_lineno_match1(literal_inc_path):
options = {'lines': '3-5', 'lineno-match': True}
reader = LiteralIncludeReader(literal_inc_path, options, DUMMY_CONFIG)
- content, lines = reader.read()
+ content, _lines = reader.read()
assert content == 'foo = "Including Unicode characters: üöä"\n\nclass Foo:\n'
assert reader.lineno_start == 3
@@ -133,7 +133,7 @@ def test_LiteralIncludeReader_lines_and_lineno_match3(literal_inc_path, app):
def test_LiteralIncludeReader_start_at(literal_inc_path):
options = {'lineno-match': True, 'start-at': 'Foo', 'end-at': 'Bar'}
reader = LiteralIncludeReader(literal_inc_path, options, DUMMY_CONFIG)
- content, lines = reader.read()
+ content, _lines = reader.read()
assert content == 'class Foo:\n pass\n\nclass Bar:\n'
assert reader.lineno_start == 5
@@ -141,7 +141,7 @@ def test_LiteralIncludeReader_start_at(literal_inc_path):
def test_LiteralIncludeReader_start_after(literal_inc_path):
options = {'lineno-match': True, 'start-after': 'Foo', 'end-before': 'Bar'}
reader = LiteralIncludeReader(literal_inc_path, options, DUMMY_CONFIG)
- content, lines = reader.read()
+ content, _lines = reader.read()
assert content == ' pass\n\n'
assert reader.lineno_start == 6
@@ -154,7 +154,7 @@ def test_LiteralIncludeReader_start_after_and_lines(literal_inc_path):
'end-before': 'comment',
}
reader = LiteralIncludeReader(literal_inc_path, options, DUMMY_CONFIG)
- content, lines = reader.read()
+ content, _lines = reader.read()
assert content == '\nclass Bar:\n def baz():\n pass\n\n'
assert reader.lineno_start == 7
@@ -162,7 +162,7 @@ def test_LiteralIncludeReader_start_after_and_lines(literal_inc_path):
def test_LiteralIncludeReader_start_at_and_lines(literal_inc_path):
options = {'lines': '2, 3, 5', 'start-at': 'foo', 'end-before': '#'}
reader = LiteralIncludeReader(literal_inc_path, options, DUMMY_CONFIG)
- content, lines = reader.read()
+ content, _lines = reader.read()
assert content == '\nclass Foo:\n\n'
assert reader.lineno_start == 1
@@ -192,14 +192,14 @@ def test_LiteralIncludeReader_missing_start_and_end(literal_inc_path):
def test_LiteralIncludeReader_end_before(literal_inc_path):
options = {'end-before': 'nclud'} # *nclud* matches first and third lines.
reader = LiteralIncludeReader(literal_inc_path, options, DUMMY_CONFIG)
- content, lines = reader.read()
+ content, _lines = reader.read()
assert content == '# Literally included file using Python highlighting\n\n'
def test_LiteralIncludeReader_prepend(literal_inc_path):
options = {'lines': '1', 'prepend': 'Hello', 'append': 'Sphinx'}
reader = LiteralIncludeReader(literal_inc_path, options, DUMMY_CONFIG)
- content, lines = reader.read()
+ content, _lines = reader.read()
assert content == (
'Hello\n# Literally included file using Python highlighting\nSphinx\n'
)
@@ -209,25 +209,25 @@ def test_LiteralIncludeReader_dedent(literal_inc_path):
# dedent: 2
options = {'lines': '9-11', 'dedent': 2}
reader = LiteralIncludeReader(literal_inc_path, options, DUMMY_CONFIG)
- content, lines = reader.read()
+ content, _lines = reader.read()
assert content == ' def baz():\n pass\n\n'
# dedent: 4
options = {'lines': '9-11', 'dedent': 4}
reader = LiteralIncludeReader(literal_inc_path, options, DUMMY_CONFIG)
- content, lines = reader.read()
+ content, _lines = reader.read()
assert content == 'def baz():\n pass\n\n'
# dedent: 6
options = {'lines': '9-11', 'dedent': 6}
reader = LiteralIncludeReader(literal_inc_path, options, DUMMY_CONFIG)
- content, lines = reader.read()
+ content, _lines = reader.read()
assert content == 'f baz():\n pass\n\n'
# dedent: None
options = {'lines': '9-11', 'dedent': None}
reader = LiteralIncludeReader(literal_inc_path, options, DUMMY_CONFIG)
- content, lines = reader.read()
+ content, _lines = reader.read()
assert content == 'def baz():\n pass\n\n'
@@ -240,7 +240,7 @@ def test_LiteralIncludeReader_dedent_and_append_and_prepend(literal_inc_path):
'append': '# comment',
}
reader = LiteralIncludeReader(literal_inc_path, options, DUMMY_CONFIG)
- content, lines = reader.read()
+ content, _lines = reader.read()
assert content == 'class Foo:\n def baz():\n pass\n\n# comment\n'
@@ -248,20 +248,20 @@ def test_LiteralIncludeReader_tabwidth(testroot):
# tab-width: 4
options = {'tab-width': 4, 'pyobject': 'Qux'}
reader = LiteralIncludeReader(testroot / 'target.py', options, DUMMY_CONFIG)
- content, lines = reader.read()
+ content, _lines = reader.read()
assert content == 'class Qux:\n def quux(self):\n pass\n'
# tab-width: 8
options = {'tab-width': 8, 'pyobject': 'Qux'}
reader = LiteralIncludeReader(testroot / 'target.py', options, DUMMY_CONFIG)
- content, lines = reader.read()
+ content, _lines = reader.read()
assert content == 'class Qux:\n def quux(self):\n pass\n'
def test_LiteralIncludeReader_tabwidth_dedent(testroot):
options = {'tab-width': 4, 'dedent': 4, 'pyobject': 'Qux.quux'}
reader = LiteralIncludeReader(testroot / 'target.py', options, DUMMY_CONFIG)
- content, lines = reader.read()
+ content, _lines = reader.read()
assert content == 'def quux(self):\n pass\n'
@@ -269,7 +269,7 @@ def test_LiteralIncludeReader_diff(testroot, literal_inc_path):
literal_diff_path = testroot / 'literal-diff.inc'
options = {'diff': literal_diff_path}
reader = LiteralIncludeReader(literal_inc_path, options, DUMMY_CONFIG)
- content, lines = reader.read()
+ content, _lines = reader.read()
assert content == (
f'--- {literal_diff_path}\n'
f'+++ {literal_inc_path}\n'
diff --git a/tests/test_domains/test_domain_py.py b/tests/test_domains/test_domain_py.py
index 151fb4494f7..b3c246297ad 100644
--- a/tests/test_domains/test_domain_py.py
+++ b/tests/test_domains/test_domain_py.py
@@ -43,7 +43,7 @@ def parse(sig):
m = py_sig_re.match(sig)
if m is None:
raise ValueError
- name_prefix, tp_list, name, arglist, retann = m.groups()
+ _name_prefix, _tp_list, _name, arglist, _retann = m.groups()
signode = addnodes.desc_signature(sig, '')
_pseudo_parse_arglist(signode, arglist)
return signode.astext()
diff --git a/tests/test_extensions/test_ext_coverage.py b/tests/test_extensions/test_ext_coverage.py
index 3c40d3dfaea..7422cd3560f 100644
--- a/tests/test_extensions/test_ext_coverage.py
+++ b/tests/test_extensions/test_ext_coverage.py
@@ -38,7 +38,7 @@ def test_build(app: SphinxTestApp) -> None:
assert 'api.h' in c_undoc
assert ' * Py_SphinxTest' in c_undoc
- undoc_py, undoc_c, py_undocumented, py_documented = pickle.loads(
+ undoc_py, undoc_c, _py_undocumented, _py_documented = pickle.loads(
(app.outdir / 'undoc.pickle').read_bytes()
)
assert len(undoc_c) == 1
diff --git a/tests/test_extensions/test_ext_intersphinx_cache.py b/tests/test_extensions/test_ext_intersphinx_cache.py
index b2e52a5b180..3431f136874 100644
--- a/tests/test_extensions/test_ext_intersphinx_cache.py
+++ b/tests/test_extensions/test_ext_intersphinx_cache.py
@@ -242,7 +242,7 @@ def test_load_mappings_cache(tmp_path):
item = dict((project.normalise(entry),))
inventories = InventoryAdapter(app.env)
assert list(inventories.cache) == ['http://localhost:9341/a']
- e_name, e_time, e_inv = inventories.cache['http://localhost:9341/a']
+ e_name, _e_time, e_inv = inventories.cache['http://localhost:9341/a']
assert e_name == 'spam'
assert e_inv == {'py:module': item}
assert inventories.named_inventory == {'spam': {'py:module': item}}
@@ -273,7 +273,7 @@ def test_load_mappings_cache_update(tmp_path):
inventories = InventoryAdapter(app2.env)
# check that the URLs were changed accordingly
assert list(inventories.cache) == ['http://localhost:9341/new']
- e_name, e_time, e_inv = inventories.cache['http://localhost:9341/new']
+ e_name, _e_time, e_inv = inventories.cache['http://localhost:9341/new']
assert e_name == 'spam'
assert e_inv == {'py:module': item}
assert inventories.named_inventory == {'spam': {'py:module': item}}
@@ -310,7 +310,7 @@ def test_load_mappings_cache_revert_update(tmp_path):
inventories = InventoryAdapter(app3.env)
# check that the URLs were changed accordingly
assert list(inventories.cache) == ['http://localhost:9341/old']
- e_name, e_time, e_inv = inventories.cache['http://localhost:9341/old']
+ e_name, _e_time, e_inv = inventories.cache['http://localhost:9341/old']
assert e_name == 'spam'
assert e_inv == {'py:module': item}
assert inventories.named_inventory == {'spam': {'py:module': item}}
From 5c4b29ee324115f2963245c55466a658cfafeefe Mon Sep 17 00:00:00 2001
From: Adam Turner <9087854+aa-turner@users.noreply.github.com>
Date: Wed, 19 Mar 2025 19:24:50 +0000
Subject: [PATCH 008/466] Bump Ruff to 0.10.0
---
pyproject.toml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/pyproject.toml b/pyproject.toml
index 59312974f8b..fbee41063fe 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -92,7 +92,7 @@ docs = [
"sphinxcontrib-websupport",
]
lint = [
- "ruff==0.9.10",
+ "ruff==0.10.0",
"mypy==1.15.0",
"sphinx-lint>=0.9",
"types-colorama==0.4.15.20240311",
From 97d8d6bd24b0824393abaae2a8420e6b28f69aa9 Mon Sep 17 00:00:00 2001
From: Adam Turner <9087854+aa-turner@users.noreply.github.com>
Date: Wed, 19 Mar 2025 19:27:40 +0000
Subject: [PATCH 009/466] Bump Ruff to 0.11.0
---
pyproject.toml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/pyproject.toml b/pyproject.toml
index fbee41063fe..5d480cf3f76 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -92,7 +92,7 @@ docs = [
"sphinxcontrib-websupport",
]
lint = [
- "ruff==0.10.0",
+ "ruff==0.11.0",
"mypy==1.15.0",
"sphinx-lint>=0.9",
"types-colorama==0.4.15.20240311",
From e01e42f5fc738815b8499c4ede30c6caf130f0a4 Mon Sep 17 00:00:00 2001
From: Adam Turner <9087854+aa-turner@users.noreply.github.com>
Date: Wed, 19 Mar 2025 20:11:35 +0000
Subject: [PATCH 010/466] Fix ``INVALID_BUILTIN_CLASSES`` test for Python
3.14.0a6+
---
tests/test_util/test_util_typing.py | 6 +++---
1 file changed, 3 insertions(+), 3 deletions(-)
diff --git a/tests/test_util/test_util_typing.py b/tests/test_util/test_util_typing.py
index 35ee240f7b8..8a561c378ed 100644
--- a/tests/test_util/test_util_typing.py
+++ b/tests/test_util/test_util_typing.py
@@ -205,7 +205,7 @@ def test_is_invalid_builtin_class() -> None:
zipfile.Path,
zipfile.CompleteDirs,
)
- if sys.version_info[:2] >= (3, 13):
+ if sys.version_info[:2] == (3, 13):
invalid_types += (
# pathlib
Path,
@@ -217,7 +217,7 @@ def test_is_invalid_builtin_class() -> None:
)
invalid_names = {(cls.__module__, cls.__qualname__) for cls in invalid_types}
- if sys.version_info[:2] < (3, 13):
+ if sys.version_info[:2] != (3, 13):
invalid_names |= {
('pathlib._local', 'Path'),
('pathlib._local', 'PosixPath'),
@@ -231,7 +231,7 @@ def test_is_invalid_builtin_class() -> None:
('zipfile._path', 'Path'),
('zipfile._path', 'CompleteDirs'),
}
- assert _INVALID_BUILTIN_CLASSES.keys() == invalid_names
+ assert set(_INVALID_BUILTIN_CLASSES) == invalid_names
def test_restify_type_hints_containers():
From 05838f0e29eb71ec18dfb9c220a21d15e89e4071 Mon Sep 17 00:00:00 2001
From: Victor Wheeler
Date: Wed, 19 Mar 2025 19:05:14 -0600
Subject: [PATCH 011/466] Add an introduction to HTML templating (#13430)
Co-authored-by: Adam Turner <9087854+AA-Turner@users.noreply.github.com>
---
AUTHORS.rst | 1 +
doc/development/html_themes/templating.rst | 26 ++++++++++++++++++++++
2 files changed, 27 insertions(+)
diff --git a/AUTHORS.rst b/AUTHORS.rst
index ff92ab7eab7..eed59b68cbd 100644
--- a/AUTHORS.rst
+++ b/AUTHORS.rst
@@ -107,6 +107,7 @@ Contributors
* Thomas Lamb -- linkcheck builder
* Thomas Waldmann -- apidoc module fixes
* Tim Hoffmann -- theme improvements
+* Victor Wheeler -- documentation improvements
* Vince Salvino -- JavaScript search improvements
* Will Maier -- directory HTML builder
* Zac Hatfield-Dodds -- doctest reporting improvements, intersphinx performance
diff --git a/doc/development/html_themes/templating.rst b/doc/development/html_themes/templating.rst
index e7c1d11f453..77b43882f86 100644
--- a/doc/development/html_themes/templating.rst
+++ b/doc/development/html_themes/templating.rst
@@ -6,6 +6,32 @@
Templating
==========
+What Is Templating?
+-------------------
+
+Templating is a method of generating HTML pages by combining static templates
+with variable data.
+The template files contain the static parts of the desired HTML output
+and include special syntax describing how variable content will be inserted.
+For example, this can be used to insert the current date in the footer of each page,
+or to surround the main content of the document with a scaffold of HTML for layout
+and formatting purposes.
+Doing so only requires an understanding of HTML and the templating syntax.
+Knowledge of Python can be helpful, but is not required.
+
+Templating uses an inheritance mechanism which allows child templates files
+(e.g. in a theme) to override as much (or as little) of their 'parents' as desired.
+Likewise, content authors can use their own local templates to override as much (or
+as little) of the theme templates as desired.
+
+The result is that the Sphinx core, without needing to be changed, provides basic
+HTML generation, independent of the structure and appearance of the final output,
+while granting a great deal of flexibility to theme and content authors.
+
+
+Sphinx Templating
+-----------------
+
Sphinx uses the `Jinja `_ templating engine
for its HTML templates. Jinja is a text-based engine, inspired by Django
templates, so anyone having used Django will already be familiar with it. It
From 9d5c22e59365f5e354d57a6f8bfc9edf2dcc872a Mon Sep 17 00:00:00 2001
From: Adam Turner <9087854+AA-Turner@users.noreply.github.com>
Date: Thu, 20 Mar 2025 15:19:35 +0000
Subject: [PATCH 012/466] Improve the error message for themes using the
removed 'style' field (#13443)
---
sphinx/builders/html/__init__.py | 14 ++++++++++++++
1 file changed, 14 insertions(+)
diff --git a/sphinx/builders/html/__init__.py b/sphinx/builders/html/__init__.py
index 5e6acdeaf9d..1ba026a61d0 100644
--- a/sphinx/builders/html/__init__.py
+++ b/sphinx/builders/html/__init__.py
@@ -17,6 +17,7 @@
from urllib.parse import quote
import docutils.readers.doctree
+import jinja2.exceptions
from docutils import nodes
from docutils.core import Publisher
from docutils.frontend import OptionParser
@@ -1221,6 +1222,19 @@ def js_tag(js: _JavaScript | str) -> str:
)
return
except Exception as exc:
+ if (
+ isinstance(exc, jinja2.exceptions.UndefinedError)
+ and exc.message == "'style' is undefined"
+ ):
+ msg = __(
+ "The '%s' theme does not support this version of Sphinx, "
+ "because it uses the 'style' field in HTML templates, "
+ 'which was was deprecated in Sphinx 5.1 and removed in Sphinx 7.0. '
+ "The theme must be updated to use the 'styles' field instead. "
+ 'See https://www.sphinx-doc.org/en/master/development/html_themes/templating.html#styles'
+ )
+ raise ThemeError(msg % self.config.html_theme) from None
+
msg = __('An error happened in rendering the page %s.\nReason: %r') % (
pagename,
exc,
From e6e7f99ca05efce9de421d6b259548b586275493 Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Thu, 20 Mar 2025 15:47:49 +0000
Subject: [PATCH 013/466] Bump pyright to 1.1.397 (#13440)
---
pyproject.toml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/pyproject.toml b/pyproject.toml
index 5d480cf3f76..6a6e2027887 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -102,7 +102,7 @@ lint = [
"types-Pygments==2.19.0.20250219",
"types-requests==2.32.0.20250301", # align with requests
"types-urllib3==1.26.25.14",
- "pyright==1.1.396",
+ "pyright==1.1.397",
"pytest>=8.0",
"pypi-attestations==0.0.21",
"betterproto==2.0.0b6",
From 35b8bf0cab1eb4bc830a149e3e893262766ae7a0 Mon Sep 17 00:00:00 2001
From: Adam Dangoor
Date: Thu, 20 Mar 2025 15:48:38 +0000
Subject: [PATCH 014/466] Remove mypy overrides for ``tests.test_addnodes``
(#13420)
---
pyproject.toml | 1 -
tests/test_addnodes.py | 4 +++-
2 files changed, 3 insertions(+), 2 deletions(-)
diff --git a/pyproject.toml b/pyproject.toml
index 6a6e2027887..4dc02fa7811 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -203,7 +203,6 @@ ignore_missing_imports = true
[[tool.mypy.overrides]]
module = [
# tests/
- "tests.test_addnodes",
"tests.test_application",
"tests.test_events",
"tests.test_highlighting",
diff --git a/tests/test_addnodes.py b/tests/test_addnodes.py
index b3f77ad2bb9..8cac53b2828 100644
--- a/tests/test_addnodes.py
+++ b/tests/test_addnodes.py
@@ -20,7 +20,9 @@ def sig_elements() -> Iterator[set[type[addnodes.desc_sig_element]]]:
addnodes.SIG_ELEMENTS = original # restore the previous value
-def test_desc_sig_element_nodes(sig_elements):
+def test_desc_sig_element_nodes(
+ sig_elements: set[type[addnodes.desc_sig_element]],
+) -> None:
"""Test the registration of ``desc_sig_element`` subclasses."""
# expected desc_sig_* node classes (must be declared *after* reloading
# the module since otherwise the objects are not the correct ones)
From 62594f0fb2297cd60cc1feb6bdc9659ad0d60d73 Mon Sep 17 00:00:00 2001
From: Adam Dangoor
Date: Thu, 20 Mar 2025 15:49:51 +0000
Subject: [PATCH 015/466] Remove mypy overrides for ``tests.test_events``
(#13422)
---
pyproject.toml | 1 -
tests/test_events.py | 10 ++++++++--
2 files changed, 8 insertions(+), 3 deletions(-)
diff --git a/pyproject.toml b/pyproject.toml
index 4dc02fa7811..a698e02bdf5 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -204,7 +204,6 @@ ignore_missing_imports = true
module = [
# tests/
"tests.test_application",
- "tests.test_events",
"tests.test_highlighting",
"tests.test_project",
"tests.test_versioning",
diff --git a/tests/test_events.py b/tests/test_events.py
index 56f76511dcb..412116c9f4b 100644
--- a/tests/test_events.py
+++ b/tests/test_events.py
@@ -3,12 +3,18 @@
from __future__ import annotations
from types import SimpleNamespace
+from typing import TYPE_CHECKING
import pytest
from sphinx.errors import ExtensionError
from sphinx.events import EventManager
+if TYPE_CHECKING:
+ from typing import NoReturn
+
+ from sphinx.application import Sphinx
+
def test_event_priority() -> None:
result = []
@@ -27,7 +33,7 @@ def test_event_priority() -> None:
def test_event_allowed_exceptions() -> None:
- def raise_error(app):
+ def raise_error(app: Sphinx) -> NoReturn:
raise RuntimeError
app = SimpleNamespace(pdb=False) # pass a dummy object as an app
@@ -44,7 +50,7 @@ def raise_error(app):
def test_event_pdb() -> None:
- def raise_error(app):
+ def raise_error(app: Sphinx) -> NoReturn:
raise RuntimeError
app = SimpleNamespace(pdb=True) # pass a dummy object as an app
From 21d93b585ac854458c82afcff84e3dd0805e7b68 Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Thu, 20 Mar 2025 15:53:45 +0000
Subject: [PATCH 016/466] Bump types-requests to 2.32.0.20250306 (#13416)
---
pyproject.toml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/pyproject.toml b/pyproject.toml
index a698e02bdf5..442f29b2f4d 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -100,7 +100,7 @@ lint = [
"types-docutils==0.21.0.20241128",
"types-Pillow==10.2.0.20240822",
"types-Pygments==2.19.0.20250219",
- "types-requests==2.32.0.20250301", # align with requests
+ "types-requests==2.32.0.20250306", # align with requests
"types-urllib3==1.26.25.14",
"pyright==1.1.397",
"pytest>=8.0",
From 5bd70fc5d2b84f49c21569967f65eb1ff4225ab1 Mon Sep 17 00:00:00 2001
From: Adam Turner <9087854+AA-Turner@users.noreply.github.com>
Date: Thu, 20 Mar 2025 15:58:13 +0000
Subject: [PATCH 017/466] Bump pypi-attestations to 0.0.22 (#13444)
---
.github/workflows/create-release.yml | 2 +-
pyproject.toml | 2 +-
utils/convert_attestations.py | 2 +-
3 files changed, 3 insertions(+), 3 deletions(-)
diff --git a/.github/workflows/create-release.yml b/.github/workflows/create-release.yml
index 6f3ebf264a8..7d854e764b8 100644
--- a/.github/workflows/create-release.yml
+++ b/.github/workflows/create-release.yml
@@ -44,7 +44,7 @@ jobs:
run: |
uv pip install build "twine>=5.1"
# resolution fails without betterproto
- uv pip install pypi-attestations==0.0.21 betterproto==2.0.0b6
+ uv pip install pypi-attestations==0.0.22 betterproto==2.0.0b6
- name: Build distribution
run: python -m build
diff --git a/pyproject.toml b/pyproject.toml
index 442f29b2f4d..13e3e186ef9 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -104,7 +104,7 @@ lint = [
"types-urllib3==1.26.25.14",
"pyright==1.1.397",
"pytest>=8.0",
- "pypi-attestations==0.0.21",
+ "pypi-attestations==0.0.22",
"betterproto==2.0.0b6",
]
test = [
diff --git a/utils/convert_attestations.py b/utils/convert_attestations.py
index 7e227c21358..0d013bf97ce 100644
--- a/utils/convert_attestations.py
+++ b/utils/convert_attestations.py
@@ -7,7 +7,7 @@
# /// script
# requires-python = ">=3.11"
# dependencies = [
-# "pypi-attestations==0.0.21",
+# "pypi-attestations==0.0.22",
# "betterproto==2.0.0b6",
# ]
# ///
From 9191c34ebf67f44245788a068079ed734c4ae8b2 Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Thu, 20 Mar 2025 15:59:49 +0000
Subject: [PATCH 018/466] Bump types-pygments to 2.19.0.20250305 (#13412)
---
pyproject.toml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/pyproject.toml b/pyproject.toml
index 13e3e186ef9..d63af5241b4 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -99,7 +99,7 @@ lint = [
"types-defusedxml==0.7.0.20240218",
"types-docutils==0.21.0.20241128",
"types-Pillow==10.2.0.20240822",
- "types-Pygments==2.19.0.20250219",
+ "types-Pygments==2.19.0.20250305",
"types-requests==2.32.0.20250306", # align with requests
"types-urllib3==1.26.25.14",
"pyright==1.1.397",
From e979b1faaf9b878eba29d9f2e1bef8c57aa97034 Mon Sep 17 00:00:00 2001
From: Adam Turner <9087854+AA-Turner@users.noreply.github.com>
Date: Thu, 20 Mar 2025 18:28:11 +0000
Subject: [PATCH 019/466] Use the PEP 735 ``[dependency-groups]`` table
(#13073)
---
.github/workflows/builddoc.yml | 2 +-
.github/workflows/create-release.yml | 5 +---
.github/workflows/lint.yml | 8 ++---
.github/workflows/main.yml | 14 ++++-----
.github/workflows/transifex.yml | 4 +--
pyproject.toml | 45 ++++++++++++++++++++++++++++
tox.ini | 16 +++++-----
7 files changed, 68 insertions(+), 26 deletions(-)
diff --git a/.github/workflows/builddoc.yml b/.github/workflows/builddoc.yml
index 7f8471deecb..8955cf2988a 100644
--- a/.github/workflows/builddoc.yml
+++ b/.github/workflows/builddoc.yml
@@ -36,7 +36,7 @@ jobs:
version: latest
enable-cache: false
- name: Install dependencies
- run: uv pip install .[docs]
+ run: uv pip install . --group docs
- name: Render the documentation
run: >
sphinx-build
diff --git a/.github/workflows/create-release.yml b/.github/workflows/create-release.yml
index 7d854e764b8..5d07fcbd18d 100644
--- a/.github/workflows/create-release.yml
+++ b/.github/workflows/create-release.yml
@@ -41,10 +41,7 @@ jobs:
enable-cache: false
- name: Install build dependencies (pypa/build, twine)
- run: |
- uv pip install build "twine>=5.1"
- # resolution fails without betterproto
- uv pip install pypi-attestations==0.0.22 betterproto==2.0.0b6
+ run: uv pip install --group package
- name: Build distribution
run: python -m build
diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml
index a3b5cf7ae52..7e72b6f6fd0 100644
--- a/.github/workflows/lint.yml
+++ b/.github/workflows/lint.yml
@@ -55,7 +55,7 @@ jobs:
version: latest
enable-cache: false
- name: Install dependencies
- run: uv pip install ".[lint,test]"
+ run: uv pip install -r pyproject.toml --group package --group test --group types
- name: Type check with mypy
run: mypy
@@ -76,7 +76,7 @@ jobs:
version: latest
enable-cache: false
- name: Install dependencies
- run: uv pip install ".[lint,test]"
+ run: uv pip install -r pyproject.toml --group package --group test --group types
- name: Type check with pyright
run: pyright
@@ -97,7 +97,7 @@ jobs:
version: latest
enable-cache: false
- name: Install dependencies
- run: uv pip install --upgrade sphinx-lint
+ run: uv pip install --group lint
- name: Lint documentation with sphinx-lint
run: make doclinter
@@ -118,7 +118,7 @@ jobs:
version: latest
enable-cache: false
- name: Install dependencies
- run: uv pip install --upgrade twine build
+ run: uv pip install --group package
- name: Lint with twine
run: |
python -m build .
diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml
index 1758254c633..7e7bdb6dab2 100644
--- a/.github/workflows/main.yml
+++ b/.github/workflows/main.yml
@@ -67,7 +67,7 @@ jobs:
version: latest
enable-cache: false
- name: Install dependencies
- run: uv pip install .[test]
+ run: uv pip install . --group test
- name: Install Docutils ${{ matrix.docutils }}
run: uv pip install --upgrade "docutils~=${{ matrix.docutils }}.0"
- name: Test with pytest
@@ -198,7 +198,7 @@ jobs:
version: latest
enable-cache: false
- name: Install dependencies
- run: uv pip install .[test]
+ run: uv pip install . --group test
- name: Test with pytest
run: python -m pytest -vv --durations 25
env:
@@ -227,7 +227,7 @@ jobs:
version: latest
enable-cache: false
- name: Install dependencies
- run: uv pip install .[test]
+ run: uv pip install . --group test
- name: Test with pytest
run: python -m pytest -vv --durations 25
env:
@@ -262,7 +262,7 @@ jobs:
version: latest
enable-cache: false
- name: Install dependencies
- run: uv pip install .[test]
+ run: uv pip install . --group test
- name: Install Docutils' HEAD
run: uv pip install "docutils @ git+https://repo.or.cz/docutils.git#subdirectory=docutils"
- name: Test with pytest
@@ -296,7 +296,7 @@ jobs:
enable-cache: false
- name: Install dependencies
run: |
- uv pip install .[test] --resolution lowest-direct
+ uv pip install . --group test --resolution lowest-direct
uv pip install alabaster==1.0.0
- name: Test with pytest
run: python -m pytest -n logical --dist=worksteal -vv --durations 25
@@ -326,7 +326,7 @@ jobs:
version: latest
enable-cache: false
- name: Install dependencies
- run: uv pip install .[test]
+ run: uv pip install . --group test
- name: Test with pytest
run: python -m pytest -vv --durations 25
env:
@@ -357,7 +357,7 @@ jobs:
version: latest
enable-cache: false
- name: Install dependencies
- run: uv pip install .[test] pytest-cov
+ run: uv pip install . --group test pytest-cov
- name: Test with pytest
run: python -m pytest -vv --cov . --cov-append --cov-config pyproject.toml
env:
diff --git a/.github/workflows/transifex.yml b/.github/workflows/transifex.yml
index 09437cb7ece..56246266515 100644
--- a/.github/workflows/transifex.yml
+++ b/.github/workflows/transifex.yml
@@ -41,7 +41,7 @@ jobs:
version: latest
enable-cache: false
- name: Install dependencies
- run: uv pip install --upgrade babel jinja2
+ run: uv pip install --group translations
- name: Extract translations from source code
run: python utils/babel_runner.py extract
- name: Push translations to transifex.com
@@ -77,7 +77,7 @@ jobs:
version: latest
enable-cache: false
- name: Install dependencies
- run: uv pip install --upgrade babel jinja2
+ run: uv pip install --group translations
- name: Extract translations from source code
run: python utils/babel_runner.py extract
- name: Pull translations from transifex.com
diff --git a/pyproject.toml b/pyproject.toml
index d63af5241b4..dd50b0aa6ef 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -130,6 +130,48 @@ sphinx-quickstart = "sphinx.cmd.quickstart:main"
sphinx-apidoc = "sphinx.ext.apidoc:main"
sphinx-autogen = "sphinx.ext.autosummary.generate:main"
+[dependency-groups]
+docs = [
+ "sphinxcontrib-websupport",
+]
+lint = [
+ "ruff==0.11.0",
+ "sphinx-lint>=0.9",
+]
+package = [
+ "betterproto==2.0.0b6", # resolution fails without betterproto
+ "build",
+ "pypi-attestations==0.0.22",
+ "twine>=5.1",
+]
+test = [
+ "pytest>=8.0",
+ "pytest-xdist[psutil]>=3.4",
+ "cython>=3.0", # for Cython compilation
+ "defusedxml>=0.7.1", # for secure XML/HTML parsing
+ "setuptools>=70.0", # for Cython compilation
+ "typing_extensions>=4.9", # for typing_extensions.Unpack
+]
+translations = [
+ "babel>=2.13",
+ "Jinja2>=3.1",
+]
+types = [
+ "mypy==1.15.0",
+ "pyright==1.1.397",
+ { include-group = "type-stubs" },
+]
+type-stubs = [
+ # align with versions used elsewhere
+ "types-colorama==0.4.15.20240311",
+ "types-defusedxml==0.7.0.20240218",
+ "types-docutils==0.21.0.20241128",
+ "types-Pillow==10.2.0.20240822",
+ "types-Pygments==2.19.0.20250305",
+ "types-requests==2.32.0.20250306",
+ "types-urllib3==1.26.25.14",
+]
+
[tool.flit.module]
name = "sphinx"
@@ -420,3 +462,6 @@ reportUnusedFunction = "none"
reportUnusedImport = "none"
reportUnusedVariable = "none"
reportWildcardImportFromLibrary = "none"
+
+[tool.uv]
+default-groups = "all"
diff --git a/tox.ini b/tox.ini
index 674013fdc08..23b239c7ffc 100644
--- a/tox.ini
+++ b/tox.ini
@@ -20,7 +20,7 @@ passenv =
READTHEDOCS
description =
py{311,312,313,314}: Run unit tests against {envname}.
-extras =
+dependency_groups =
test
setenv =
PYTHONWARNINGS = error
@@ -31,8 +31,9 @@ commands=
[testenv:lint]
description =
Run linters.
-extras =
+dependency_groups =
lint
+ types
# If you update any of these commands, don't forget to update the equivalent
# GitHub Workflow step
commands =
@@ -43,7 +44,7 @@ commands =
[testenv:docs]
description =
Build documentation.
-extras =
+dependency_groups =
docs
commands =
python -c "import shutil; shutil.rmtree('./build/sphinx', ignore_errors=True) if '{env:CLEAN:}' else None"
@@ -52,7 +53,7 @@ commands =
[testenv:docs-live]
description =
Build documentation.
-extras =
+dependency_groups =
docs
deps =
sphinx-autobuild
@@ -70,7 +71,7 @@ commands =
[testenv:ruff]
description =
Run ruff formatting and linting.
-extras =
+dependency_groups =
lint
commands =
ruff format .
@@ -79,8 +80,7 @@ commands =
[testenv:mypy]
description =
Run mypy type checking.
-extras =
- lint
- test
+dependency_groups =
+ types
commands =
mypy {posargs}
From fc8054cc30042eff2461a26f5566a23e7cd41221 Mon Sep 17 00:00:00 2001
From: Aarni Koskela
Date: Thu, 20 Mar 2025 20:49:07 +0200
Subject: [PATCH 020/466] Fall back to 'en' if `format_date` is called with a
falsy value (#13408)
---
sphinx/util/i18n.py | 8 ++++++++
tests/test_util/test_util_i18n.py | 7 -------
2 files changed, 8 insertions(+), 7 deletions(-)
diff --git a/sphinx/util/i18n.py b/sphinx/util/i18n.py
index 6cc4b31766e..05542876fc3 100644
--- a/sphinx/util/i18n.py
+++ b/sphinx/util/i18n.py
@@ -228,6 +228,14 @@ def babel_format_date(
if not hasattr(date, 'tzinfo'):
formatter = babel.dates.format_date
+ if not locale:
+ # Babel would not accept a falsy locale
+ # (or would try to fall back to the LC_TIME
+ # locale, which would be not what was requested),
+ # so we can just short-cut to English, as we
+ # would for the `"fallback to English"` case.
+ locale = 'en'
+
try:
return formatter(date, format, locale=locale)
except (ValueError, babel.core.UnknownLocaleError):
diff --git a/tests/test_util/test_util_i18n.py b/tests/test_util/test_util_i18n.py
index 4326b4382dd..31e774b7932 100644
--- a/tests/test_util/test_util_i18n.py
+++ b/tests/test_util/test_util_i18n.py
@@ -4,12 +4,10 @@
import datetime
import os
-import sys
import time
from pathlib import Path
from typing import TYPE_CHECKING
-import babel
import pytest
from babel.messages.mofile import read_mo
@@ -60,11 +58,6 @@ def test_catalog_write_mo(tmp_path):
assert read_mo(f) is not None
-# https://github.com/python-babel/babel/issues/1183
-@pytest.mark.xfail(
- sys.platform == 'win32' and babel.__version__ == '2.17.0',
- reason='Windows tests fail with Babel 2.17',
-)
def test_format_date():
date = datetime.date(2016, 2, 7)
From 7d4528b17a60f12dd13857913541b9386a4066ed Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Fri, 21 Mar 2025 16:18:35 +0000
Subject: [PATCH 021/466] Bump Ruff to 0.11.1 (#13447)
---
pyproject.toml | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/pyproject.toml b/pyproject.toml
index dd50b0aa6ef..9829fb8bc8c 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -92,7 +92,7 @@ docs = [
"sphinxcontrib-websupport",
]
lint = [
- "ruff==0.11.0",
+ "ruff==0.11.1",
"mypy==1.15.0",
"sphinx-lint>=0.9",
"types-colorama==0.4.15.20240311",
@@ -135,7 +135,7 @@ docs = [
"sphinxcontrib-websupport",
]
lint = [
- "ruff==0.11.0",
+ "ruff==0.11.1",
"sphinx-lint>=0.9",
]
package = [
From d066c2be731df5f1ffed5d657c696b57f39a4f39 Mon Sep 17 00:00:00 2001
From: Adam Dangoor
Date: Fri, 21 Mar 2025 16:18:52 +0000
Subject: [PATCH 022/466] Remove mypy overrides for ``tests.test_application``
(#13421)
---
pyproject.toml | 1 -
tests/test_application.py | 5 +++--
2 files changed, 3 insertions(+), 3 deletions(-)
diff --git a/pyproject.toml b/pyproject.toml
index 9829fb8bc8c..6151d3b45c6 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -245,7 +245,6 @@ ignore_missing_imports = true
[[tool.mypy.overrides]]
module = [
# tests/
- "tests.test_application",
"tests.test_highlighting",
"tests.test_project",
"tests.test_versioning",
diff --git a/tests/test_application.py b/tests/test_application.py
index b2bd7bbc66c..73c3f3556ca 100644
--- a/tests/test_application.py
+++ b/tests/test_application.py
@@ -20,6 +20,7 @@
if TYPE_CHECKING:
import os
+ from typing import Any
def test_instantiation(
@@ -50,7 +51,7 @@ def test_instantiation(
@pytest.mark.sphinx('html', testroot='root')
def test_events(app: SphinxTestApp) -> None:
- def empty():
+ def empty() -> None:
pass
with pytest.raises(ExtensionError) as excinfo:
@@ -62,7 +63,7 @@ def empty():
app.add_event('my_event')
assert "Event 'my_event' already present" in str(excinfo.value)
- def mock_callback(a_app, *args):
+ def mock_callback(a_app: SphinxTestApp, *args: Any) -> str:
assert a_app is app
assert emit_args == args
return 'ret'
From 10beeeb10a809a52885fd016f47aa6f3b51ad2f8 Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Mon, 24 Mar 2025 19:24:31 +0000
Subject: [PATCH 023/466] Bump Ruff to 0.11.2 (#13450)
---
pyproject.toml | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/pyproject.toml b/pyproject.toml
index 6151d3b45c6..c9d902d6ed6 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -92,7 +92,7 @@ docs = [
"sphinxcontrib-websupport",
]
lint = [
- "ruff==0.11.1",
+ "ruff==0.11.2",
"mypy==1.15.0",
"sphinx-lint>=0.9",
"types-colorama==0.4.15.20240311",
@@ -135,7 +135,7 @@ docs = [
"sphinxcontrib-websupport",
]
lint = [
- "ruff==0.11.1",
+ "ruff==0.11.2",
"sphinx-lint>=0.9",
]
package = [
From 8049b4bd2d98d288750bb1b552496b14b7434980 Mon Sep 17 00:00:00 2001
From: Adam Turner <9087854+AA-Turner@users.noreply.github.com>
Date: Mon, 24 Mar 2025 19:24:58 +0000
Subject: [PATCH 024/466] Allow ignoring type stub files for native modules
(#13446)
---
sphinx/ext/autodoc/importer.py | 3 ++-
1 file changed, 2 insertions(+), 1 deletion(-)
diff --git a/sphinx/ext/autodoc/importer.py b/sphinx/ext/autodoc/importer.py
index ea5b47e41e6..ca9c7ca7778 100644
--- a/sphinx/ext/autodoc/importer.py
+++ b/sphinx/ext/autodoc/importer.py
@@ -164,6 +164,7 @@ def import_module(modname: str, try_reload: bool = False) -> Any:
if modname in sys.modules:
return sys.modules[modname]
+ skip_pyi = bool(os.getenv('SPHINX_AUTODOC_IGNORE_NATIVE_MODULE_TYPE_STUBS', ''))
original_module_names = frozenset(sys.modules)
try:
spec = find_spec(modname)
@@ -171,7 +172,7 @@ def import_module(modname: str, try_reload: bool = False) -> Any:
msg = f'No module named {modname!r}'
raise ModuleNotFoundError(msg, name=modname) # NoQA: TRY301
spec, pyi_path = _find_type_stub_spec(spec, modname)
- if pyi_path is None:
+ if skip_pyi or pyi_path is None:
module = importlib.import_module(modname)
else:
if spec.loader is None:
From c0c041349302a783d160361d5bc0449be70f406a Mon Sep 17 00:00:00 2001
From: Adam Dangoor
Date: Mon, 24 Mar 2025 19:28:03 +0000
Subject: [PATCH 025/466] Remove mypy overrides for
``tests.test_util.test_util_matching`` (#13449)
---
pyproject.toml | 1 -
tests/test_util/test_util_matching.py | 23 ++++++++++++++---------
2 files changed, 14 insertions(+), 10 deletions(-)
diff --git a/pyproject.toml b/pyproject.toml
index c9d902d6ed6..d841d10747d 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -292,7 +292,6 @@ module = [
"tests.test_util.test_util_docutils",
"tests.test_util.test_util_images",
"tests.test_util.test_util_inventory",
- "tests.test_util.test_util_matching",
# tests/test_writers
"tests.test_writers.test_docutilsconf",
]
diff --git a/tests/test_util/test_util_matching.py b/tests/test_util/test_util_matching.py
index 0c17280aa6e..a2ad1ff5ba6 100644
--- a/tests/test_util/test_util_matching.py
+++ b/tests/test_util/test_util_matching.py
@@ -2,8 +2,13 @@
from __future__ import annotations
+from typing import TYPE_CHECKING
+
from sphinx.util.matching import Matcher, compile_matchers, get_matching_files
+if TYPE_CHECKING:
+ from pathlib import Path
+
def test_compile_matchers() -> None:
# exact matching
@@ -85,7 +90,7 @@ def test_Matcher() -> None:
assert matcher('subdir/world.py')
-def test_get_matching_files_all(rootdir):
+def test_get_matching_files_all(rootdir: Path) -> None:
files = get_matching_files(rootdir / 'test-root')
assert sorted(files) == [
'Makefile',
@@ -133,7 +138,7 @@ def test_get_matching_files_all(rootdir):
]
-def test_get_matching_files_all_exclude_single(rootdir):
+def test_get_matching_files_all_exclude_single(rootdir: Path) -> None:
files = get_matching_files(rootdir / 'test-root', exclude_patterns=['**.html'])
assert sorted(files) == [
'Makefile',
@@ -178,7 +183,7 @@ def test_get_matching_files_all_exclude_single(rootdir):
]
-def test_get_matching_files_all_exclude_multiple(rootdir):
+def test_get_matching_files_all_exclude_multiple(rootdir: Path) -> None:
files = get_matching_files(
rootdir / 'test-root', exclude_patterns=['**.html', '**.inc']
)
@@ -218,7 +223,7 @@ def test_get_matching_files_all_exclude_multiple(rootdir):
]
-def test_get_matching_files_all_exclude_nonexistent(rootdir):
+def test_get_matching_files_all_exclude_nonexistent(rootdir: Path) -> None:
files = get_matching_files(rootdir / 'test-root', exclude_patterns=['halibut/**'])
assert sorted(files) == [
'Makefile',
@@ -266,7 +271,7 @@ def test_get_matching_files_all_exclude_nonexistent(rootdir):
]
-def test_get_matching_files_all_include_single(rootdir):
+def test_get_matching_files_all_include_single(rootdir: Path) -> None:
files = get_matching_files(rootdir / 'test-root', include_patterns=['subdir/**'])
assert sorted(files) == [
'subdir/excluded.txt',
@@ -278,7 +283,7 @@ def test_get_matching_files_all_include_single(rootdir):
]
-def test_get_matching_files_all_include_multiple(rootdir):
+def test_get_matching_files_all_include_multiple(rootdir: Path) -> None:
files = get_matching_files(
rootdir / 'test-root', include_patterns=['special/**', 'subdir/**']
)
@@ -294,12 +299,12 @@ def test_get_matching_files_all_include_multiple(rootdir):
]
-def test_get_matching_files_all_include_nonexistent(rootdir):
+def test_get_matching_files_all_include_nonexistent(rootdir: Path) -> None:
files = get_matching_files(rootdir / 'test-root', include_patterns=['halibut/**'])
assert sorted(files) == []
-def test_get_matching_files_all_include_prefix(rootdir):
+def test_get_matching_files_all_include_prefix(rootdir: Path) -> None:
files = get_matching_files(rootdir / 'test-root', include_patterns=['autodoc*'])
assert sorted(files) == [
'autodoc.txt',
@@ -307,7 +312,7 @@ def test_get_matching_files_all_include_prefix(rootdir):
]
-def test_get_matching_files_all_include_question_mark(rootdir):
+def test_get_matching_files_all_include_question_mark(rootdir: Path) -> None:
files = get_matching_files(rootdir / 'test-root', include_patterns=['img.???'])
assert sorted(files) == [
'img.gif',
From d9b20d055308f12c9c3db2df2272000d1ac0e795 Mon Sep 17 00:00:00 2001
From: Alicia Garcia-Raboso
Date: Mon, 24 Mar 2025 20:32:35 +0100
Subject: [PATCH 026/466] Correctly parse and cross-reference unpacked type
annotations (#13369)
---
CHANGES.rst | 3 +++
sphinx/domains/python/_annotations.py | 4 ++++
sphinx/pycode/ast.py | 3 +++
tests/test_domains/test_domain_py.py | 22 ++++++++++++++++++++++
tests/test_pycode/test_pycode_ast.py | 1 +
5 files changed, 33 insertions(+)
diff --git a/CHANGES.rst b/CHANGES.rst
index c257b3b11b1..b37ce847c49 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -16,5 +16,8 @@ Features added
Bugs fixed
----------
+* #13369: Correctly parse and cross-reference unpacked type annotations.
+ Patch by Alicia Garcia-Raboso.
+
Testing
-------
diff --git a/sphinx/domains/python/_annotations.py b/sphinx/domains/python/_annotations.py
index 823aac01316..29e47fa7151 100644
--- a/sphinx/domains/python/_annotations.py
+++ b/sphinx/domains/python/_annotations.py
@@ -124,6 +124,10 @@ def unparse(node: ast.AST) -> list[Node]:
return [nodes.Text(repr(node.value))]
if isinstance(node, ast.Expr):
return unparse(node.value)
+ if isinstance(node, ast.Starred):
+ result = [addnodes.desc_sig_operator('', '*')]
+ result.extend(unparse(node.value))
+ return result
if isinstance(node, ast.Invert):
return [addnodes.desc_sig_punctuation('', '~')]
if isinstance(node, ast.USub):
diff --git a/sphinx/pycode/ast.py b/sphinx/pycode/ast.py
index b1521595b49..640864e467a 100644
--- a/sphinx/pycode/ast.py
+++ b/sphinx/pycode/ast.py
@@ -202,5 +202,8 @@ def visit_Tuple(self, node: ast.Tuple) -> str:
else:
return '(' + ', '.join(self.visit(e) for e in node.elts) + ')'
+ def visit_Starred(self, node: ast.Starred) -> str:
+ return f'*{self.visit(node.value)}'
+
def generic_visit(self, node: ast.AST) -> NoReturn:
raise NotImplementedError('Unable to parse %s object' % type(node).__name__)
diff --git a/tests/test_domains/test_domain_py.py b/tests/test_domains/test_domain_py.py
index b3c246297ad..26c79ffd8fb 100644
--- a/tests/test_domains/test_domain_py.py
+++ b/tests/test_domains/test_domain_py.py
@@ -508,6 +508,28 @@ def test_parse_annotation(app):
),
)
+ doctree = _parse_annotation('*tuple[str, int]', app.env)
+ assert_node(
+ doctree,
+ (
+ [desc_sig_operator, '*'],
+ [pending_xref, 'tuple'],
+ [desc_sig_punctuation, '['],
+ [pending_xref, 'str'],
+ [desc_sig_punctuation, ','],
+ desc_sig_space,
+ [pending_xref, 'int'],
+ [desc_sig_punctuation, ']'],
+ ),
+ )
+ assert_node(
+ doctree[1],
+ pending_xref,
+ refdomain='py',
+ reftype='class',
+ reftarget='tuple',
+ )
+
@pytest.mark.sphinx('html', testroot='_blank')
def test_parse_annotation_suppress(app):
diff --git a/tests/test_pycode/test_pycode_ast.py b/tests/test_pycode/test_pycode_ast.py
index 6ebc1a91099..409e5806d1b 100644
--- a/tests/test_pycode/test_pycode_ast.py
+++ b/tests/test_pycode/test_pycode_ast.py
@@ -62,6 +62,7 @@
'x[:, np.newaxis, :, :]'), # Index, Subscript, numpy extended syntax
('y[:, 1:3][np.array([0, 2, 4]), :]',
'y[:, 1:3][np.array([0, 2, 4]), :]'), # Index, 2x Subscript, numpy extended syntax
+ ('*tuple[str, int]', '*tuple[str, int]'), # Starred
],
) # fmt: skip
def test_unparse(source, expected):
From 5831b3eeafe5e78281d394359545da378859561d Mon Sep 17 00:00:00 2001
From: Till Hoffmann
Date: Mon, 24 Mar 2025 15:44:16 -0400
Subject: [PATCH 027/466] Add ``doctest_fail_fast`` option to exit after the
first failed test (#13332)
Co-authored-by: Adam Turner <9087854+aa-turner@users.noreply.github.com>
---
AUTHORS.rst | 1 +
CHANGES.rst | 4 ++
doc/usage/extensions/doctest.rst | 8 ++++
sphinx/ext/doctest.py | 45 ++++++++++++++-----
.../roots/test-ext-doctest-fail-fast/conf.py | 11 +++++
.../test-ext-doctest-fail-fast/fail-fast.txt | 11 +++++
tests/test_extensions/test_ext_doctest.py | 20 +++++++++
7 files changed, 88 insertions(+), 12 deletions(-)
create mode 100644 tests/roots/test-ext-doctest-fail-fast/conf.py
create mode 100644 tests/roots/test-ext-doctest-fail-fast/fail-fast.txt
diff --git a/AUTHORS.rst b/AUTHORS.rst
index eed59b68cbd..5ff09219c02 100644
--- a/AUTHORS.rst
+++ b/AUTHORS.rst
@@ -106,6 +106,7 @@ Contributors
* Taku Shimizu -- epub3 builder
* Thomas Lamb -- linkcheck builder
* Thomas Waldmann -- apidoc module fixes
+* Till Hoffmann -- doctest option to exit after first failed test
* Tim Hoffmann -- theme improvements
* Victor Wheeler -- documentation improvements
* Vince Salvino -- JavaScript search improvements
diff --git a/CHANGES.rst b/CHANGES.rst
index b37ce847c49..82396f2a33e 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -13,6 +13,10 @@ Deprecated
Features added
--------------
+* #13332: Add :confval:`doctest_fail_fast` option to exit after the first failed
+ test.
+ Patch by Till Hoffmann.
+
Bugs fixed
----------
diff --git a/doc/usage/extensions/doctest.rst b/doc/usage/extensions/doctest.rst
index 60c67827967..10e8f67dfe2 100644
--- a/doc/usage/extensions/doctest.rst
+++ b/doc/usage/extensions/doctest.rst
@@ -452,3 +452,11 @@ The doctest extension uses the following configuration values:
Also, removal of ```` and ``# doctest:`` options only works in
:rst:dir:`doctest` blocks, though you may set :confval:`trim_doctest_flags`
to achieve that in all code blocks with Python console content.
+
+.. confval:: doctest_fail_fast
+ :type: :code-py:`bool`
+ :default: :code-py:`False`
+
+ Exit when the first failure is encountered.
+
+ .. versionadded:: 8.3
diff --git a/sphinx/ext/doctest.py b/sphinx/ext/doctest.py
index 105c50a6923..343534f10ce 100644
--- a/sphinx/ext/doctest.py
+++ b/sphinx/ext/doctest.py
@@ -358,10 +358,17 @@ def finish(self) -> None:
def s(v: int) -> str:
return 's' if v != 1 else ''
+ header = 'Doctest summary'
+ if self.total_failures or self.setup_failures or self.cleanup_failures:
+ self.app.statuscode = 1
+ if self.config.doctest_fail_fast:
+ header = f'{header} (exiting after first failed test)'
+ underline = '=' * len(header)
+
self._out(
f"""
-Doctest summary
-===============
+{header}
+{underline}
{self.total_tries:5} test{s(self.total_tries)}
{self.total_failures:5} failure{s(self.total_failures)} in tests
{self.setup_failures:5} failure{s(self.setup_failures)} in setup code
@@ -370,15 +377,14 @@ def s(v: int) -> str:
)
self.outfile.close()
- if self.total_failures or self.setup_failures or self.cleanup_failures:
- self.app.statuscode = 1
-
def write_documents(self, docnames: Set[str]) -> None:
logger.info(bold('running tests...'))
for docname in sorted(docnames):
# no need to resolve the doctree
doctree = self.env.get_doctree(docname)
- self.test_doc(docname, doctree)
+ success = self.test_doc(docname, doctree)
+ if not success and self.config.doctest_fail_fast:
+ break
def get_filename_for_node(self, node: Node, docname: str) -> str:
"""Try to get the file which actually contains the doctest, not the
@@ -419,7 +425,7 @@ def skipped(self, node: Element) -> bool:
exec(self.config.doctest_global_cleanup, context) # NoQA: S102
return should_skip
- def test_doc(self, docname: str, doctree: Node) -> None:
+ def test_doc(self, docname: str, doctree: Node) -> bool:
groups: dict[str, TestGroup] = {}
add_to_all_groups = []
self.setup_runner = SphinxDocTestRunner(verbose=False, optionflags=self.opt)
@@ -496,13 +502,17 @@ def condition(node: Node) -> bool:
for group in groups.values():
group.add_code(code)
if not groups:
- return
+ return True
show_successes = self.config.doctest_show_successes
if show_successes:
self._out(f'\nDocument: {docname}\n----------{"-" * len(docname)}\n')
+ success = True
for group in groups.values():
- self.test_group(group)
+ if not self.test_group(group):
+ success = False
+ if self.config.doctest_fail_fast:
+ break
# Separately count results from setup code
res_f, res_t = self.setup_runner.summarize(self._out, verbose=False)
self.setup_failures += res_f
@@ -517,13 +527,14 @@ def condition(node: Node) -> bool:
)
self.cleanup_failures += res_f
self.cleanup_tries += res_t
+ return success
def compile(
self, code: str, name: str, type: str, flags: Any, dont_inherit: bool
) -> Any:
return compile(code, name, self.type, flags, dont_inherit)
- def test_group(self, group: TestGroup) -> None:
+ def test_group(self, group: TestGroup) -> bool:
ns: dict[str, Any] = {}
def run_setup_cleanup(
@@ -553,9 +564,10 @@ def run_setup_cleanup(
# run the setup code
if not run_setup_cleanup(self.setup_runner, group.setup, 'setup'):
# if setup failed, don't run the group
- return
+ return False
# run the tests
+ success = True
for code in group.tests:
if len(code) == 1:
# ordinary doctests (code/output interleaved)
@@ -608,11 +620,19 @@ def run_setup_cleanup(
self.type = 'exec' # multiple statements again
# DocTest.__init__ copies the globs namespace, which we don't want
test.globs = ns
+ old_f = self.test_runner.failures
# also don't clear the globs namespace after running the doctest
self.test_runner.run(test, out=self._warn_out, clear_globs=False)
+ if self.test_runner.failures > old_f:
+ success = False
+ if self.config.doctest_fail_fast:
+ break
# run the cleanup
- run_setup_cleanup(self.cleanup_runner, group.cleanup, 'cleanup')
+ if not run_setup_cleanup(self.cleanup_runner, group.cleanup, 'cleanup'):
+ return False
+
+ return success
def setup(app: Sphinx) -> ExtensionMetadata:
@@ -638,6 +658,7 @@ def setup(app: Sphinx) -> ExtensionMetadata:
'',
types=frozenset({int}),
)
+ app.add_config_value('doctest_fail_fast', False, '', types=frozenset({bool}))
return {
'version': sphinx.__display_version__,
'parallel_read_safe': True,
diff --git a/tests/roots/test-ext-doctest-fail-fast/conf.py b/tests/roots/test-ext-doctest-fail-fast/conf.py
new file mode 100644
index 00000000000..227afbb2c95
--- /dev/null
+++ b/tests/roots/test-ext-doctest-fail-fast/conf.py
@@ -0,0 +1,11 @@
+extensions = ['sphinx.ext.doctest']
+
+project = 'test project for doctest'
+root_doc = 'fail-fast'
+source_suffix = {
+ '.txt': 'restructuredtext',
+}
+exclude_patterns = ['_build']
+
+# Set in tests.
+# doctest_fail_fast = ...
diff --git a/tests/roots/test-ext-doctest-fail-fast/fail-fast.txt b/tests/roots/test-ext-doctest-fail-fast/fail-fast.txt
new file mode 100644
index 00000000000..70a05af487b
--- /dev/null
+++ b/tests/roots/test-ext-doctest-fail-fast/fail-fast.txt
@@ -0,0 +1,11 @@
+Testing fast failure in the doctest extension
+=============================================
+
+>>> 1 + 1
+2
+
+>>> 1 + 1
+3
+
+>>> 1 + 1
+3
diff --git a/tests/test_extensions/test_ext_doctest.py b/tests/test_extensions/test_ext_doctest.py
index cb540fda7ec..810f8244ba8 100644
--- a/tests/test_extensions/test_ext_doctest.py
+++ b/tests/test_extensions/test_ext_doctest.py
@@ -147,3 +147,23 @@ def test_reporting_with_autodoc(app, capfd):
assert 'File "dir/bar.py", line ?, in default' in failures
assert 'File "foo.py", line ?, in default' in failures
assert 'File "index.rst", line 4, in default' in failures
+
+
+@pytest.mark.sphinx('doctest', testroot='ext-doctest-fail-fast')
+@pytest.mark.parametrize('fail_fast', [False, True, None])
+def test_fail_fast(app, fail_fast, capsys):
+ if fail_fast is not None:
+ app.config.doctest_fail_fast = fail_fast
+ # Patch builder to get a copy of the output
+ written = []
+ app.builder._out = written.append
+ app.build(force_all=True)
+ assert app.statuscode
+
+ written = ''.join(written)
+ if fail_fast:
+ assert 'Doctest summary (exiting after first failed test)' in written
+ assert '1 failure in tests' in written
+ else:
+ assert 'Doctest summary\n' in written
+ assert '2 failures in tests' in written
From 41e0cf0aeb45f07ee1d7abacf299d4bb94f5f226 Mon Sep 17 00:00:00 2001
From: Adam Turner <9087854+aa-turner@users.noreply.github.com>
Date: Tue, 25 Mar 2025 21:15:00 +0000
Subject: [PATCH 028/466] Upgrade to flit-core 3.12
---
pyproject.toml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/pyproject.toml b/pyproject.toml
index d841d10747d..364a51dc186 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,5 +1,5 @@
[build-system]
-requires = ["flit_core>=3.11"]
+requires = ["flit_core>=3.12"]
build-backend = "flit_core.buildapi"
# project metadata
From 466ccd076d5174988510053a95ebc48479820449 Mon Sep 17 00:00:00 2001
From: Adam Turner <9087854+aa-turner@users.noreply.github.com>
Date: Tue, 25 Mar 2025 21:17:04 +0000
Subject: [PATCH 029/466] Revert flit-core 3.11 temporary fix
---
sphinx/__init__.py | 6 +-----
1 file changed, 1 insertion(+), 5 deletions(-)
diff --git a/sphinx/__init__.py b/sphinx/__init__.py
index b70b6db47a6..ab02ddc547b 100644
--- a/sphinx/__init__.py
+++ b/sphinx/__init__.py
@@ -7,11 +7,7 @@
import warnings
-# work around flit error in parsing annotated assignments
-try:
- from sphinx.util._pathlib import _StrPath
-except ImportError:
- from pathlib import Path as _StrPath # type: ignore[assignment]
+from sphinx.util._pathlib import _StrPath
TYPE_CHECKING = False
if TYPE_CHECKING:
From 628442e88248d5cec1998128b3cfc25186681854 Mon Sep 17 00:00:00 2001
From: Adam Turner <9087854+aa-turner@users.noreply.github.com>
Date: Tue, 25 Mar 2025 21:26:28 +0000
Subject: [PATCH 030/466] Group the version variables together
---
sphinx/__init__.py | 6 +++---
1 file changed, 3 insertions(+), 3 deletions(-)
diff --git a/sphinx/__init__.py b/sphinx/__init__.py
index ab02ddc547b..6211f1abfb3 100644
--- a/sphinx/__init__.py
+++ b/sphinx/__init__.py
@@ -13,9 +13,6 @@
if TYPE_CHECKING:
from typing import Final
-__version__: Final = '8.3.0'
-__display_version__: Final = __version__ # used for command line version
-
warnings.filterwarnings(
'ignore',
'The frontend.Option class .*',
@@ -23,6 +20,9 @@
module='docutils.frontend',
)
+__version__: Final = '8.3.0'
+__display_version__: Final = __version__ # used for command line version
+
#: Version info for better programmatic use.
#:
#: A tuple of five elements; for Sphinx version 1.2.1 beta 3 this would be
From c40ef2b985fcaccc5fbb23a851e4b5d7f555f68b Mon Sep 17 00:00:00 2001
From: Adam Turner <9087854+aa-turner@users.noreply.github.com>
Date: Tue, 25 Mar 2025 21:29:11 +0000
Subject: [PATCH 031/466] Delete non-exported names from ``sphinx/__init__.py``
---
sphinx/__init__.py | 2 ++
1 file changed, 2 insertions(+)
diff --git a/sphinx/__init__.py b/sphinx/__init__.py
index 6211f1abfb3..6ddfdba271f 100644
--- a/sphinx/__init__.py
+++ b/sphinx/__init__.py
@@ -19,6 +19,7 @@
DeprecationWarning,
module='docutils.frontend',
)
+del warnings
__version__: Final = '8.3.0'
__display_version__: Final = __version__ # used for command line version
@@ -34,6 +35,7 @@
version_info: Final = (8, 3, 0, 'beta', 0)
package_dir: Final = _StrPath(__file__).resolve().parent
+del _StrPath
_in_development = True
if _in_development:
From 3c4b4e31128b5f2398fae39ab0f42af8cec16714 Mon Sep 17 00:00:00 2001
From: Adam Turner <9087854+AA-Turner@users.noreply.github.com>
Date: Thu, 27 Mar 2025 19:03:43 +0000
Subject: [PATCH 032/466] Warn on all redirects if
``linkcheck_allowed_redirects`` is an empty dictionary (#13452)
---
CHANGES.rst | 3 ++
doc/usage/configuration.rst | 5 ++++
sphinx/builders/linkcheck.py | 32 ++++++++++++++-------
tests/test_builders/test_build_linkcheck.py | 31 ++++++++++++++++++++
4 files changed, 60 insertions(+), 11 deletions(-)
diff --git a/CHANGES.rst b/CHANGES.rst
index 82396f2a33e..fede8b5177b 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -16,6 +16,9 @@ Features added
* #13332: Add :confval:`doctest_fail_fast` option to exit after the first failed
test.
Patch by Till Hoffmann.
+* #13439: linkcheck: Permit warning on every redirect with
+ ``linkcheck_allowed_redirects = {}``.
+ Patch by Adam Turner.
Bugs fixed
----------
diff --git a/doc/usage/configuration.rst b/doc/usage/configuration.rst
index 75e08d7654b..20912d1dc19 100644
--- a/doc/usage/configuration.rst
+++ b/doc/usage/configuration.rst
@@ -3668,6 +3668,11 @@ and which failures and redirects it ignores.
.. versionadded:: 4.1
+ .. versionchanged:: 8.3
+ Setting :confval:`!linkcheck_allowed_redirects` to the empty directory
+ may now be used to warn on all redirects encountered
+ by the *linkcheck* builder.
+
.. confval:: linkcheck_anchors
:type: :code-py:`bool`
:default: :code-py:`True`
diff --git a/sphinx/builders/linkcheck.py b/sphinx/builders/linkcheck.py
index 93ab2e78b00..e1a80a47c0f 100644
--- a/sphinx/builders/linkcheck.py
+++ b/sphinx/builders/linkcheck.py
@@ -25,6 +25,7 @@
from sphinx._cli.util.colour import darkgray, darkgreen, purple, red, turquoise
from sphinx.builders.dummy import DummyBuilder
+from sphinx.errors import ConfigError
from sphinx.locale import __
from sphinx.transforms.post_transforms import SphinxPostTransform
from sphinx.util import logging, requests
@@ -178,7 +179,7 @@ def process_result(self, result: CheckResult) -> None:
text = 'with unknown code'
linkstat['text'] = text
redirection = f'{text} to {result.message}'
- if self.config.linkcheck_allowed_redirects:
+ if self.config.linkcheck_allowed_redirects is not None:
msg = f'redirect {res_uri} - {redirection}'
logger.warning(msg, location=(result.docname, result.lineno))
else:
@@ -386,7 +387,7 @@ def __init__(
)
self.check_anchors: bool = config.linkcheck_anchors
self.allowed_redirects: dict[re.Pattern[str], re.Pattern[str]]
- self.allowed_redirects = config.linkcheck_allowed_redirects
+ self.allowed_redirects = config.linkcheck_allowed_redirects or {}
self.retries: int = config.linkcheck_retries
self.rate_limit_timeout = config.linkcheck_rate_limit_timeout
self._allow_unauthorized = config.linkcheck_allow_unauthorized
@@ -748,20 +749,26 @@ def rewrite_github_anchor(app: Sphinx, uri: str) -> str | None:
def compile_linkcheck_allowed_redirects(app: Sphinx, config: Config) -> None:
- """Compile patterns in linkcheck_allowed_redirects to the regexp objects."""
- linkcheck_allowed_redirects = app.config.linkcheck_allowed_redirects
- for url, pattern in list(linkcheck_allowed_redirects.items()):
+ """Compile patterns to the regexp objects."""
+ if config.linkcheck_allowed_redirects is _sentinel_lar:
+ config.linkcheck_allowed_redirects = None
+ return
+ if not isinstance(config.linkcheck_allowed_redirects, dict):
+ raise ConfigError
+ allowed_redirects = {}
+ for url, pattern in config.linkcheck_allowed_redirects.items():
try:
- linkcheck_allowed_redirects[re.compile(url)] = re.compile(pattern)
+ allowed_redirects[re.compile(url)] = re.compile(pattern)
except re.error as exc:
logger.warning(
__('Failed to compile regex in linkcheck_allowed_redirects: %r %s'),
exc.pattern,
exc.msg,
)
- finally:
- # Remove the original regexp-string
- linkcheck_allowed_redirects.pop(url)
+ config.linkcheck_allowed_redirects = allowed_redirects
+
+
+_sentinel_lar = object()
def setup(app: Sphinx) -> ExtensionMetadata:
@@ -772,7 +779,9 @@ def setup(app: Sphinx) -> ExtensionMetadata:
app.add_config_value(
'linkcheck_exclude_documents', [], '', types=frozenset({list, tuple})
)
- app.add_config_value('linkcheck_allowed_redirects', {}, '', types=frozenset({dict}))
+ app.add_config_value(
+ 'linkcheck_allowed_redirects', _sentinel_lar, '', types=frozenset({dict})
+ )
app.add_config_value('linkcheck_auth', [], '', types=frozenset({list, tuple}))
app.add_config_value('linkcheck_request_headers', {}, '', types=frozenset({dict}))
app.add_config_value('linkcheck_retries', 1, '', types=frozenset({int}))
@@ -799,7 +808,8 @@ def setup(app: Sphinx) -> ExtensionMetadata:
app.add_event('linkcheck-process-uri')
- app.connect('config-inited', compile_linkcheck_allowed_redirects, priority=800)
+ # priority 900 to happen after ``check_confval_types()``
+ app.connect('config-inited', compile_linkcheck_allowed_redirects, priority=900)
# FIXME: Disable URL rewrite handler for github.com temporarily.
# See: https://github.com/sphinx-doc/sphinx/issues/9435
diff --git a/tests/test_builders/test_build_linkcheck.py b/tests/test_builders/test_build_linkcheck.py
index 82baa62f3ef..bdd8dea54c1 100644
--- a/tests/test_builders/test_build_linkcheck.py
+++ b/tests/test_builders/test_build_linkcheck.py
@@ -10,6 +10,7 @@
import wsgiref.handlers
from base64 import b64encode
from http.server import BaseHTTPRequestHandler
+from io import StringIO
from queue import Queue
from typing import TYPE_CHECKING
from unittest import mock
@@ -27,6 +28,7 @@
RateLimit,
compile_linkcheck_allowed_redirects,
)
+from sphinx.errors import ConfigError
from sphinx.testing.util import SphinxTestApp
from sphinx.util import requests
from sphinx.util._pathlib import _StrPath
@@ -37,6 +39,7 @@
if TYPE_CHECKING:
from collections.abc import Callable, Iterable
+ from pathlib import Path
from typing import Any
from urllib3 import HTTPConnectionPool
@@ -752,6 +755,34 @@ def test_follows_redirects_on_GET(app, capsys):
assert app.warning.getvalue() == ''
+def test_linkcheck_allowed_redirects_config(
+ make_app: Callable[..., SphinxTestApp], tmp_path: Path
+) -> None:
+ tmp_path.joinpath('conf.py').touch()
+ tmp_path.joinpath('index.rst').touch()
+
+ # ``linkcheck_allowed_redirects = None`` is rejected
+ warning_stream = StringIO()
+ with pytest.raises(ConfigError):
+ make_app(
+ 'linkcheck',
+ srcdir=tmp_path,
+ confoverrides={'linkcheck_allowed_redirects': None},
+ warning=warning_stream,
+ )
+ assert strip_escape_sequences(warning_stream.getvalue()).splitlines() == [
+ "WARNING: The config value `linkcheck_allowed_redirects' has type `NoneType'; expected `dict'."
+ ]
+
+ # ``linkcheck_allowed_redirects = {}`` is permitted
+ app = make_app(
+ 'linkcheck',
+ srcdir=tmp_path,
+ confoverrides={'linkcheck_allowed_redirects': {}},
+ )
+ assert strip_escape_sequences(app.warning.getvalue()) == ''
+
+
@pytest.mark.sphinx('linkcheck', testroot='linkcheck-localserver-warn-redirects')
def test_linkcheck_allowed_redirects(app: SphinxTestApp) -> None:
with serve_application(app, make_redirect_handler(support_head=False)) as address:
From a6d7ae16739bf92a032a7c4df0297db7cf120ec9 Mon Sep 17 00:00:00 2001
From: James Addison <55152140+jayaddison@users.noreply.github.com>
Date: Tue, 1 Apr 2025 14:11:31 +0000
Subject: [PATCH 033/466] linkcheck: documentation and validation suggestions
for linkcheck_allowed_redirects (#13458)
Co-authored-by: Adam Turner <9087854+AA-Turner@users.noreply.github.com>
---
doc/usage/configuration.rst | 1 -
sphinx/builders/linkcheck.py | 6 +++++-
2 files changed, 5 insertions(+), 2 deletions(-)
diff --git a/doc/usage/configuration.rst b/doc/usage/configuration.rst
index 20912d1dc19..d14b5d4ec6b 100644
--- a/doc/usage/configuration.rst
+++ b/doc/usage/configuration.rst
@@ -3642,7 +3642,6 @@ and which failures and redirects it ignores.
.. confval:: linkcheck_allowed_redirects
:type: :code-py:`dict[str, str]`
- :default: :code-py:`{}`
A dictionary that maps a pattern of the source URI
to a pattern of the canonical URI.
diff --git a/sphinx/builders/linkcheck.py b/sphinx/builders/linkcheck.py
index e1a80a47c0f..ff6878f2acb 100644
--- a/sphinx/builders/linkcheck.py
+++ b/sphinx/builders/linkcheck.py
@@ -754,7 +754,11 @@ def compile_linkcheck_allowed_redirects(app: Sphinx, config: Config) -> None:
config.linkcheck_allowed_redirects = None
return
if not isinstance(config.linkcheck_allowed_redirects, dict):
- raise ConfigError
+ msg = __(
+ f'Invalid value `{config.linkcheck_allowed_redirects!r}` in '
+ 'linkcheck_allowed_redirects. Expected a dictionary.'
+ )
+ raise ConfigError(msg)
allowed_redirects = {}
for url, pattern in config.linkcheck_allowed_redirects.items():
try:
From 021d6a889aa7cbfede33a1c2b75eb68cd40b1554 Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Wed, 9 Apr 2025 20:54:02 +0100
Subject: [PATCH 034/466] Bump types-requests to 2.32.0.20250328 (#13456)
---
pyproject.toml | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/pyproject.toml b/pyproject.toml
index 364a51dc186..5189219e456 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -100,7 +100,7 @@ lint = [
"types-docutils==0.21.0.20241128",
"types-Pillow==10.2.0.20240822",
"types-Pygments==2.19.0.20250305",
- "types-requests==2.32.0.20250306", # align with requests
+ "types-requests==2.32.0.20250328", # align with requests
"types-urllib3==1.26.25.14",
"pyright==1.1.397",
"pytest>=8.0",
@@ -168,7 +168,7 @@ type-stubs = [
"types-docutils==0.21.0.20241128",
"types-Pillow==10.2.0.20240822",
"types-Pygments==2.19.0.20250305",
- "types-requests==2.32.0.20250306",
+ "types-requests==2.32.0.20250328",
"types-urllib3==1.26.25.14",
]
From 14818ffc01530a6fe0c5c73cb75e8c25255467bb Mon Sep 17 00:00:00 2001
From: Adam Turner <9087854+aa-turner@users.noreply.github.com>
Date: Sun, 27 Apr 2025 19:16:53 +0100
Subject: [PATCH 035/466] Bump Ruff to 0.11.3
---
pyproject.toml | 4 ++--
sphinx/ext/autosummary/generate.py | 2 +-
sphinx/ext/napoleon/docstring.py | 4 ++--
sphinx/pycode/parser.py | 9 +++++----
sphinx/transforms/__init__.py | 2 +-
tests/test_builders/test_build_latex.py | 2 +-
6 files changed, 12 insertions(+), 11 deletions(-)
diff --git a/pyproject.toml b/pyproject.toml
index 5189219e456..882a54f8e41 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -92,7 +92,7 @@ docs = [
"sphinxcontrib-websupport",
]
lint = [
- "ruff==0.11.2",
+ "ruff==0.11.3",
"mypy==1.15.0",
"sphinx-lint>=0.9",
"types-colorama==0.4.15.20240311",
@@ -135,7 +135,7 @@ docs = [
"sphinxcontrib-websupport",
]
lint = [
- "ruff==0.11.2",
+ "ruff==0.11.3",
"sphinx-lint>=0.9",
]
package = [
diff --git a/sphinx/ext/autosummary/generate.py b/sphinx/ext/autosummary/generate.py
index 62a106479ea..5ff9caf05e4 100644
--- a/sphinx/ext/autosummary/generate.py
+++ b/sphinx/ext/autosummary/generate.py
@@ -583,7 +583,7 @@ def generate_autosummary_docs(
showed_sources = sorted(sources)
if len(showed_sources) > 20:
- showed_sources = showed_sources[:10] + ['...'] + showed_sources[-10:]
+ showed_sources = [*showed_sources[:10], '...', *showed_sources[-10:]]
logger.info(
__('[autosummary] generating autosummary for: %s'), ', '.join(showed_sources)
)
diff --git a/sphinx/ext/napoleon/docstring.py b/sphinx/ext/napoleon/docstring.py
index 38325df1d94..ea991f72301 100644
--- a/sphinx/ext/napoleon/docstring.py
+++ b/sphinx/ext/napoleon/docstring.py
@@ -535,7 +535,7 @@ def _consume_returns_section(
if colon:
if after:
- _desc = [after] + lines[1:]
+ _desc = [after, *lines[1:]]
else:
_desc = lines[1:]
@@ -684,7 +684,7 @@ def _format_field(self, _name: str, _type: str, _desc: list[str]) -> list[str]:
if has_desc:
_desc = self._fix_field_desc(_desc)
if _desc[0]:
- return [field + _desc[0]] + _desc[1:]
+ return [field + _desc[0], *_desc[1:]]
else:
return [field, *_desc]
else:
diff --git a/sphinx/pycode/parser.py b/sphinx/pycode/parser.py
index 34d30200f75..2390b19d4d3 100644
--- a/sphinx/pycode/parser.py
+++ b/sphinx/pycode/parser.py
@@ -257,7 +257,7 @@ def get_qualname_for(self, name: str) -> list[str] | None:
if self.current_function:
if self.current_classes and self.context[-1] == '__init__':
# store variable comments inside __init__ method of classes
- return self.context[:-1] + [name]
+ return [*self.context[:-1], name]
else:
return None
else:
@@ -387,9 +387,10 @@ def visit_Assign(self, node: ast.Assign) -> None:
self.add_variable_annotation(varname, node.type_comment) # type: ignore[arg-type]
# check comments after assignment
- parser = AfterCommentParser(
- [current_line[node.col_offset :]] + self.buffers[node.lineno :]
- )
+ parser = AfterCommentParser([
+ current_line[node.col_offset :],
+ *self.buffers[node.lineno :],
+ ])
parser.parse()
if parser.comment and comment_re.match(parser.comment):
for varname in varnames:
diff --git a/sphinx/transforms/__init__.py b/sphinx/transforms/__init__.py
index e1f905d2d0f..c6620078e36 100644
--- a/sphinx/transforms/__init__.py
+++ b/sphinx/transforms/__init__.py
@@ -217,7 +217,7 @@ class SortIds(SphinxTransform):
def apply(self, **kwargs: Any) -> None:
for node in self.document.findall(nodes.section):
if len(node['ids']) > 1 and node['ids'][0].startswith('id'):
- node['ids'] = node['ids'][1:] + [node['ids'][0]]
+ node['ids'] = [*node['ids'][1:], node['ids'][0]]
TRANSLATABLE_NODES = {
diff --git a/tests/test_builders/test_build_latex.py b/tests/test_builders/test_build_latex.py
index ea585cd6f21..f1c19a5ab7f 100644
--- a/tests/test_builders/test_build_latex.py
+++ b/tests/test_builders/test_build_latex.py
@@ -143,7 +143,7 @@ def test_build_latex_doc(app, engine, docclass, python_maximum_signature_line_le
}
intersphinx_setup(app)
app.config.latex_engine = engine
- app.config.latex_documents = [app.config.latex_documents[0][:4] + (docclass,)]
+ app.config.latex_documents = [(*app.config.latex_documents[0][:4], docclass)]
if engine == 'xelatex':
app.config.latex_table_style = ['booktabs']
elif engine == 'lualatex':
From eb5ca6eb70c48815d6b414a49fc21a8725647ffa Mon Sep 17 00:00:00 2001
From: Adam Turner <9087854+aa-turner@users.noreply.github.com>
Date: Sun, 27 Apr 2025 19:18:56 +0100
Subject: [PATCH 036/466] Bump Ruff to 0.11.4
---
pyproject.toml | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/pyproject.toml b/pyproject.toml
index 882a54f8e41..f8ddce88ce2 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -92,7 +92,7 @@ docs = [
"sphinxcontrib-websupport",
]
lint = [
- "ruff==0.11.3",
+ "ruff==0.11.4",
"mypy==1.15.0",
"sphinx-lint>=0.9",
"types-colorama==0.4.15.20240311",
@@ -135,7 +135,7 @@ docs = [
"sphinxcontrib-websupport",
]
lint = [
- "ruff==0.11.3",
+ "ruff==0.11.4",
"sphinx-lint>=0.9",
]
package = [
From 69286861f3f117d3cb753d03c42fcdeb2faf33d7 Mon Sep 17 00:00:00 2001
From: Adam Turner <9087854+aa-turner@users.noreply.github.com>
Date: Sun, 27 Apr 2025 19:21:22 +0100
Subject: [PATCH 037/466] Bump Ruff to 0.11.5
---
pyproject.toml | 4 ++--
tests/roots/test-ext-autodoc/target/enums.py | 2 +-
2 files changed, 3 insertions(+), 3 deletions(-)
diff --git a/pyproject.toml b/pyproject.toml
index f8ddce88ce2..6399096879c 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -92,7 +92,7 @@ docs = [
"sphinxcontrib-websupport",
]
lint = [
- "ruff==0.11.4",
+ "ruff==0.11.5",
"mypy==1.15.0",
"sphinx-lint>=0.9",
"types-colorama==0.4.15.20240311",
@@ -135,7 +135,7 @@ docs = [
"sphinxcontrib-websupport",
]
lint = [
- "ruff==0.11.4",
+ "ruff==0.11.5",
"sphinx-lint>=0.9",
]
package = [
diff --git a/tests/roots/test-ext-autodoc/target/enums.py b/tests/roots/test-ext-autodoc/target/enums.py
index 6b2731672d2..9d6bcdbc97f 100644
--- a/tests/roots/test-ext-autodoc/target/enums.py
+++ b/tests/roots/test-ext-autodoc/target/enums.py
@@ -1,4 +1,4 @@
-# ruff: NoQA: D403, PIE796
+# ruff: NoQA: PIE796
import enum
from typing import final
From 92263a02158c848dcf73aa1f6ad1bbcda9e9c6c5 Mon Sep 17 00:00:00 2001
From: Adam Turner <9087854+aa-turner@users.noreply.github.com>
Date: Sun, 27 Apr 2025 19:21:49 +0100
Subject: [PATCH 038/466] Bump Ruff to 0.11.6
---
pyproject.toml | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/pyproject.toml b/pyproject.toml
index 6399096879c..f5b38c56d7a 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -92,7 +92,7 @@ docs = [
"sphinxcontrib-websupport",
]
lint = [
- "ruff==0.11.5",
+ "ruff==0.11.6",
"mypy==1.15.0",
"sphinx-lint>=0.9",
"types-colorama==0.4.15.20240311",
@@ -135,7 +135,7 @@ docs = [
"sphinxcontrib-websupport",
]
lint = [
- "ruff==0.11.5",
+ "ruff==0.11.6",
"sphinx-lint>=0.9",
]
package = [
From 052e1beb3c6fbe8f7c0af44e976c9a5bd92cc5b9 Mon Sep 17 00:00:00 2001
From: Adam Turner <9087854+aa-turner@users.noreply.github.com>
Date: Sun, 27 Apr 2025 19:22:14 +0100
Subject: [PATCH 039/466] Bump Ruff to 0.11.7
---
pyproject.toml | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/pyproject.toml b/pyproject.toml
index f5b38c56d7a..cb22ecf9c6a 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -92,7 +92,7 @@ docs = [
"sphinxcontrib-websupport",
]
lint = [
- "ruff==0.11.6",
+ "ruff==0.11.7",
"mypy==1.15.0",
"sphinx-lint>=0.9",
"types-colorama==0.4.15.20240311",
@@ -135,7 +135,7 @@ docs = [
"sphinxcontrib-websupport",
]
lint = [
- "ruff==0.11.6",
+ "ruff==0.11.7",
"sphinx-lint>=0.9",
]
package = [
From 8fef43c1977212fd498818195eada89f17f54279 Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Sun, 27 Apr 2025 19:26:06 +0100
Subject: [PATCH 040/466] Bump pyright to 1.1.400 (#13503)
---
pyproject.toml | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/pyproject.toml b/pyproject.toml
index cb22ecf9c6a..c19476cf0d2 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -102,7 +102,7 @@ lint = [
"types-Pygments==2.19.0.20250305",
"types-requests==2.32.0.20250328", # align with requests
"types-urllib3==1.26.25.14",
- "pyright==1.1.397",
+ "pyright==1.1.400",
"pytest>=8.0",
"pypi-attestations==0.0.22",
"betterproto==2.0.0b6",
@@ -158,7 +158,7 @@ translations = [
]
types = [
"mypy==1.15.0",
- "pyright==1.1.397",
+ "pyright==1.1.400",
{ include-group = "type-stubs" },
]
type-stubs = [
From 239a709662ae18ba79ddd6b51b9514e29985fdfe Mon Sep 17 00:00:00 2001
From: Adam Turner <9087854+AA-Turner@users.noreply.github.com>
Date: Sun, 27 Apr 2025 19:34:46 +0100
Subject: [PATCH 041/466] Bump Twine to 6.1 (#13510)
---
.github/workflows/create-release.yml | 34 ++--------------------------
pyproject.toml | 2 +-
2 files changed, 3 insertions(+), 33 deletions(-)
diff --git a/.github/workflows/create-release.yml b/.github/workflows/create-release.yml
index 5d07fcbd18d..b3820cc164c 100644
--- a/.github/workflows/create-release.yml
+++ b/.github/workflows/create-release.yml
@@ -47,8 +47,7 @@ jobs:
run: python -m build
- name: Check distribution
- run: |
- twine check dist/*
+ run: twine check dist/*
- name: Create Sigstore attestations for built distributions
uses: actions/attest@v1
@@ -87,39 +86,10 @@ jobs:
name: attestation-bundles
path: /tmp/attestation-bundles/
- - name: Mint PyPI API token
- id: mint-token
- uses: actions/github-script@v7
- with:
- # language=JavaScript
- script: |
- // retrieve the ambient OIDC token
- const oidc_request_token = process.env.ACTIONS_ID_TOKEN_REQUEST_TOKEN;
- const oidc_request_url = process.env.ACTIONS_ID_TOKEN_REQUEST_URL;
- const oidc_resp = await fetch(`${oidc_request_url}&audience=pypi`, {
- headers: {Authorization: `bearer ${oidc_request_token}`},
- });
- const oidc_token = (await oidc_resp.json()).value;
-
- // exchange the OIDC token for an API token
- const mint_resp = await fetch('https://pypi.org/_/oidc/github/mint-token', {
- method: 'post',
- body: `{"token": "${oidc_token}"}` ,
- headers: {'Content-Type': 'application/json'},
- });
- const api_token = (await mint_resp.json()).token;
-
- // mask the newly minted API token, so that we don't accidentally leak it
- core.setSecret(api_token)
- core.setOutput('api-token', api_token)
-
- name: Upload to PyPI
env:
TWINE_NON_INTERACTIVE: "true"
- TWINE_USERNAME: "__token__"
- TWINE_PASSWORD: "${{ steps.mint-token.outputs.api-token }}"
- run: |
- twine upload dist/* --attestations
+ run: twine upload dist/* --attestations
github-release:
runs-on: ubuntu-latest
diff --git a/pyproject.toml b/pyproject.toml
index c19476cf0d2..65dce1780d8 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -142,7 +142,7 @@ package = [
"betterproto==2.0.0b6", # resolution fails without betterproto
"build",
"pypi-attestations==0.0.22",
- "twine>=5.1",
+ "twine>=6.1",
]
test = [
"pytest>=8.0",
From 586a6dcba92be4566b90941f534f805974ebd712 Mon Sep 17 00:00:00 2001
From: Adam Turner <9087854+aa-turner@users.noreply.github.com>
Date: Sun, 27 Apr 2025 19:57:18 +0100
Subject: [PATCH 042/466] Bump ``astral-sh/setup-uv`` to v6
---
.github/workflows/builddoc.yml | 2 +-
.github/workflows/create-release.yml | 2 +-
.github/workflows/lint.yml | 8 ++++----
.github/workflows/main.yml | 14 +++++++-------
.github/workflows/transifex.yml | 4 ++--
5 files changed, 15 insertions(+), 15 deletions(-)
diff --git a/.github/workflows/builddoc.yml b/.github/workflows/builddoc.yml
index 8955cf2988a..e049f34e23c 100644
--- a/.github/workflows/builddoc.yml
+++ b/.github/workflows/builddoc.yml
@@ -31,7 +31,7 @@ jobs:
- name: Install graphviz
run: sudo apt-get install --no-install-recommends --yes graphviz
- name: Install uv
- uses: astral-sh/setup-uv@v5
+ uses: astral-sh/setup-uv@v6
with:
version: latest
enable-cache: false
diff --git a/.github/workflows/create-release.yml b/.github/workflows/create-release.yml
index b3820cc164c..d449e1446ae 100644
--- a/.github/workflows/create-release.yml
+++ b/.github/workflows/create-release.yml
@@ -35,7 +35,7 @@ jobs:
with:
python-version: "3"
- name: Install uv
- uses: astral-sh/setup-uv@v5
+ uses: astral-sh/setup-uv@v6
with:
version: latest
enable-cache: false
diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml
index 7e72b6f6fd0..d051e626886 100644
--- a/.github/workflows/lint.yml
+++ b/.github/workflows/lint.yml
@@ -50,7 +50,7 @@ jobs:
with:
python-version: "3"
- name: Install uv
- uses: astral-sh/setup-uv@v5
+ uses: astral-sh/setup-uv@v6
with:
version: latest
enable-cache: false
@@ -71,7 +71,7 @@ jobs:
with:
python-version: "3"
- name: Install uv
- uses: astral-sh/setup-uv@v5
+ uses: astral-sh/setup-uv@v6
with:
version: latest
enable-cache: false
@@ -92,7 +92,7 @@ jobs:
with:
python-version: "3"
- name: Install uv
- uses: astral-sh/setup-uv@v5
+ uses: astral-sh/setup-uv@v6
with:
version: latest
enable-cache: false
@@ -113,7 +113,7 @@ jobs:
with:
python-version: "3"
- name: Install uv
- uses: astral-sh/setup-uv@v5
+ uses: astral-sh/setup-uv@v6
with:
version: latest
enable-cache: false
diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml
index 7e7bdb6dab2..11b7ee07a0c 100644
--- a/.github/workflows/main.yml
+++ b/.github/workflows/main.yml
@@ -62,7 +62,7 @@ jobs:
- name: Install graphviz
run: sudo apt-get install --no-install-recommends --yes graphviz
- name: Install uv
- uses: astral-sh/setup-uv@v5
+ uses: astral-sh/setup-uv@v6
with:
version: latest
enable-cache: false
@@ -193,7 +193,7 @@ jobs:
- name: Install graphviz
run: choco install --no-progress graphviz
- name: Install uv
- uses: astral-sh/setup-uv@v5
+ uses: astral-sh/setup-uv@v6
with:
version: latest
enable-cache: false
@@ -222,7 +222,7 @@ jobs:
- name: Install graphviz
run: brew install graphviz
- name: Install uv
- uses: astral-sh/setup-uv@v5
+ uses: astral-sh/setup-uv@v6
with:
version: latest
enable-cache: false
@@ -257,7 +257,7 @@ jobs:
- name: Install graphviz
run: sudo apt-get install --no-install-recommends --yes graphviz
- name: Install uv
- uses: astral-sh/setup-uv@v5
+ uses: astral-sh/setup-uv@v6
with:
version: latest
enable-cache: false
@@ -290,7 +290,7 @@ jobs:
- name: Install graphviz
run: sudo apt-get install --no-install-recommends --yes graphviz
- name: Install uv
- uses: astral-sh/setup-uv@v5
+ uses: astral-sh/setup-uv@v6
with:
version: latest
enable-cache: false
@@ -321,7 +321,7 @@ jobs:
- name: Check Python version
run: python --version --version
- name: Install uv
- uses: astral-sh/setup-uv@v5
+ uses: astral-sh/setup-uv@v6
with:
version: latest
enable-cache: false
@@ -352,7 +352,7 @@ jobs:
- name: Install graphviz
run: sudo apt-get install --no-install-recommends --yes graphviz
- name: Install uv
- uses: astral-sh/setup-uv@v5
+ uses: astral-sh/setup-uv@v6
with:
version: latest
enable-cache: false
diff --git a/.github/workflows/transifex.yml b/.github/workflows/transifex.yml
index 56246266515..8f16784fac9 100644
--- a/.github/workflows/transifex.yml
+++ b/.github/workflows/transifex.yml
@@ -36,7 +36,7 @@ jobs:
curl -o- https://raw.githubusercontent.com/transifex/cli/master/install.sh | bash
shell: bash
- name: Install uv
- uses: astral-sh/setup-uv@v5
+ uses: astral-sh/setup-uv@v6
with:
version: latest
enable-cache: false
@@ -72,7 +72,7 @@ jobs:
curl -o- https://raw.githubusercontent.com/transifex/cli/master/install.sh | bash
shell: bash
- name: Install uv
- uses: astral-sh/setup-uv@v5
+ uses: astral-sh/setup-uv@v6
with:
version: latest
enable-cache: false
From 3f617a3de3d6f7cd45a4458ee230e7353922e513 Mon Sep 17 00:00:00 2001
From: Adam Turner <9087854+AA-Turner@users.noreply.github.com>
Date: Sun, 27 Apr 2025 20:29:43 +0100
Subject: [PATCH 043/466] Use dependency groups with pip 25.1 (#13512)
---
.github/workflows/main.yml | 6 +++---
1 file changed, 3 insertions(+), 3 deletions(-)
diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml
index 11b7ee07a0c..597e605b581 100644
--- a/.github/workflows/main.yml
+++ b/.github/workflows/main.yml
@@ -103,7 +103,7 @@ jobs:
- name: Install dependencies
run: |
python -m pip install --upgrade pip
- python -m pip install .[test]
+ python -m pip install . --group test
- name: Install Docutils ${{ matrix.docutils }}
run: python -m pip install --upgrade "docutils~=${{ matrix.docutils }}.0"
- name: Test with pytest
@@ -137,7 +137,7 @@ jobs:
- name: Install dependencies
run: |
python -m pip install --upgrade pip
- python -m pip install .[test]
+ python -m pip install . --group test
- name: Test with pytest
run: python -m pytest -n logical --dist=worksteal -vv --durations 25
env:
@@ -169,7 +169,7 @@ jobs:
- name: Install dependencies
run: |
python -m pip install --upgrade pip
- python -m pip install .[test]
+ python -m pip install . --group test
- name: Test with pytest
run: python -m pytest -n logical --dist=worksteal -vv --durations 25
env:
From 873b732de49fd71c6c62fc5546d2fb50309987e3 Mon Sep 17 00:00:00 2001
From: Adam Turner <9087854+AA-Turner@users.noreply.github.com>
Date: Sun, 27 Apr 2025 20:40:20 +0100
Subject: [PATCH 044/466] Use ``actions/setup-python`` for free-threaded
testing (#13511)
---
.github/workflows/main.yml | 37 +++++--------------------------------
1 file changed, 5 insertions(+), 32 deletions(-)
diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml
index 597e605b581..c4e96881d81 100644
--- a/.github/workflows/main.yml
+++ b/.github/workflows/main.yml
@@ -37,6 +37,7 @@ jobs:
- "3.11"
- "3.12"
- "3.13"
+ - "3.13t"
docutils:
- "0.20"
- "0.21"
@@ -68,8 +69,12 @@ jobs:
enable-cache: false
- name: Install dependencies
run: uv pip install . --group test
+ env:
+ UV_PYTHON: "python${{ matrix.python }}"
- name: Install Docutils ${{ matrix.docutils }}
run: uv pip install --upgrade "docutils~=${{ matrix.docutils }}.0"
+ env:
+ UV_PYTHON: "python${{ matrix.python }}"
- name: Test with pytest
run: python -m pytest -n logical --dist=worksteal -vv --durations 25
env:
@@ -111,38 +116,6 @@ jobs:
env:
PYTHONWARNINGS: "error" # treat all warnings as errors
- free-threaded:
- runs-on: ubuntu-latest
- name: Python ${{ matrix.python }} (free-threaded)
- timeout-minutes: 15
- strategy:
- fail-fast: false
- matrix:
- python:
- - "3.13"
-
- steps:
- - uses: actions/checkout@v4
- with:
- persist-credentials: false
- - name: Set up Python ${{ matrix.python }} (deadsnakes)
- uses: deadsnakes/action@v3.2.0
- with:
- python-version: ${{ matrix.python }}
- nogil: true
- - name: Check Python version
- run: python --version --version
- - name: Install graphviz
- run: sudo apt-get install --no-install-recommends --yes graphviz
- - name: Install dependencies
- run: |
- python -m pip install --upgrade pip
- python -m pip install . --group test
- - name: Test with pytest
- run: python -m pytest -n logical --dist=worksteal -vv --durations 25
- env:
- PYTHONWARNINGS: "error" # treat all warnings as errors
-
deadsnakes-free-threaded:
runs-on: ubuntu-latest
name: Python ${{ matrix.python }} (free-threaded)
From b6aefedfa74a14c76c0d0f588033cd2365ad0fc5 Mon Sep 17 00:00:00 2001
From: Adam Turner <9087854+AA-Turner@users.noreply.github.com>
Date: Sun, 27 Apr 2025 20:51:07 +0100
Subject: [PATCH 045/466] Use a faster temporary directory for CI on Windows
(#13513)
---
.github/workflows/main.yml | 8 ++++++++
1 file changed, 8 insertions(+)
diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml
index c4e96881d81..cf40554e6f4 100644
--- a/.github/workflows/main.yml
+++ b/.github/workflows/main.yml
@@ -154,6 +154,14 @@ jobs:
timeout-minutes: 15
steps:
+ # https://github.com/actions/runner-images/issues/8755
+ # On standard runners, the D: drive is much faster.
+ - name: Set %TMP% and %TEMP% to D:\\Temp
+ run: |
+ mkdir "D:\\Tmp"
+ echo "TMP=D:\\Tmp" >> $env:GITHUB_ENV
+ echo "TEMP=D:\\Tmp" >> $env:GITHUB_ENV
+
- uses: actions/checkout@v4
with:
persist-credentials: false
From 5e07baf2a80f23fbe1f4e1e396534ab920866e33 Mon Sep 17 00:00:00 2001
From: Adam Turner <9087854+aa-turner@users.noreply.github.com>
Date: Sun, 27 Apr 2025 20:55:39 +0100
Subject: [PATCH 046/466] Bump pypi-attestations to 0.0.25
---
pyproject.toml | 4 ++--
utils/convert_attestations.py | 2 +-
2 files changed, 3 insertions(+), 3 deletions(-)
diff --git a/pyproject.toml b/pyproject.toml
index 65dce1780d8..0d86b2adff7 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -104,7 +104,7 @@ lint = [
"types-urllib3==1.26.25.14",
"pyright==1.1.400",
"pytest>=8.0",
- "pypi-attestations==0.0.22",
+ "pypi-attestations==0.0.25",
"betterproto==2.0.0b6",
]
test = [
@@ -141,7 +141,7 @@ lint = [
package = [
"betterproto==2.0.0b6", # resolution fails without betterproto
"build",
- "pypi-attestations==0.0.22",
+ "pypi-attestations==0.0.25",
"twine>=6.1",
]
test = [
diff --git a/utils/convert_attestations.py b/utils/convert_attestations.py
index 0d013bf97ce..d4516c3c3f4 100644
--- a/utils/convert_attestations.py
+++ b/utils/convert_attestations.py
@@ -7,7 +7,7 @@
# /// script
# requires-python = ">=3.11"
# dependencies = [
-# "pypi-attestations==0.0.22",
+# "pypi-attestations==0.0.25",
# "betterproto==2.0.0b6",
# ]
# ///
From c4929d026c8d22ba229b39cfc2250a9eb1476282 Mon Sep 17 00:00:00 2001
From: Hugo van Kemenade <1324225+hugovk@users.noreply.github.com>
Date: Tue, 29 Apr 2025 19:30:38 +0300
Subject: [PATCH 047/466] Fix typos (#13520)
---
doc/man/sphinx-build.rst | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/doc/man/sphinx-build.rst b/doc/man/sphinx-build.rst
index 63af7e49b4c..055e3f366cc 100644
--- a/doc/man/sphinx-build.rst
+++ b/doc/man/sphinx-build.rst
@@ -272,13 +272,13 @@ Options
From Sphinx 8.1, :option:`!--keep-going` is always enabled.
Previously, it was only applicable whilst using :option:`--fail-on-warning`,
which by default exited :program:`sphinx-build` on the first warning.
- Using :option:`!--keep-going` runs :program:`!sphinx-build` to completion
+ Using :option:`!--keep-going` runs :program:`sphinx-build` to completion
and exits with exit status 1 if errors are encountered.
.. versionadded:: 1.8
.. versionchanged:: 8.1
:program:`sphinx-build` no longer exits on the first warning,
- meaning that in effect :option:`!--fail-on-warning` is always enabled.
+ meaning that in effect :option:`!--keep-going` is always enabled.
The option is retained for compatibility, but may be removed at some
later date.
From 97affba56c4de0c1d1e109da136ea7b2e06eb75b Mon Sep 17 00:00:00 2001
From: Adam Turner <9087854+aa-turner@users.noreply.github.com>
Date: Mon, 12 May 2025 17:22:18 +0100
Subject: [PATCH 048/466] Bump Ruff to 0.11.8
---
pyproject.toml | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/pyproject.toml b/pyproject.toml
index 0d86b2adff7..fb01508bae7 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -92,7 +92,7 @@ docs = [
"sphinxcontrib-websupport",
]
lint = [
- "ruff==0.11.7",
+ "ruff==0.11.8",
"mypy==1.15.0",
"sphinx-lint>=0.9",
"types-colorama==0.4.15.20240311",
@@ -135,7 +135,7 @@ docs = [
"sphinxcontrib-websupport",
]
lint = [
- "ruff==0.11.7",
+ "ruff==0.11.8",
"sphinx-lint>=0.9",
]
package = [
From ad85bf8a4dd4edbb994d897a83aba8508d47e378 Mon Sep 17 00:00:00 2001
From: Adam Turner <9087854+aa-turner@users.noreply.github.com>
Date: Mon, 12 May 2025 17:24:18 +0100
Subject: [PATCH 049/466] Bump Ruff to 0.11.9
---
pyproject.toml | 4 ++--
tests/roots/test-ext-autodoc/target/need_mocks.py | 3 +--
2 files changed, 3 insertions(+), 4 deletions(-)
diff --git a/pyproject.toml b/pyproject.toml
index fb01508bae7..0488b3b9d14 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -92,7 +92,7 @@ docs = [
"sphinxcontrib-websupport",
]
lint = [
- "ruff==0.11.8",
+ "ruff==0.11.9",
"mypy==1.15.0",
"sphinx-lint>=0.9",
"types-colorama==0.4.15.20240311",
@@ -135,7 +135,7 @@ docs = [
"sphinxcontrib-websupport",
]
lint = [
- "ruff==0.11.8",
+ "ruff==0.11.9",
"sphinx-lint>=0.9",
]
package = [
diff --git a/tests/roots/test-ext-autodoc/target/need_mocks.py b/tests/roots/test-ext-autodoc/target/need_mocks.py
index 1b8af7055d6..73782a2fde8 100644
--- a/tests/roots/test-ext-autodoc/target/need_mocks.py
+++ b/tests/roots/test-ext-autodoc/target/need_mocks.py
@@ -1,10 +1,9 @@
import missing_module
import missing_package1.missing_module1
+import sphinx.missing_module4
from missing_module import missing_name
from missing_package2 import missing_module2
from missing_package3.missing_module3 import missing_name # NoQA: F811
-
-import sphinx.missing_module4
from sphinx.missing_module4 import missing_name2
From 5a73bf6af0faa10f9e25e02a2d62c7e174492ecd Mon Sep 17 00:00:00 2001
From: Adam Dangoor
Date: Mon, 12 May 2025 17:31:12 +0100
Subject: [PATCH 050/466] Disallow untyped defs in
``tests/test_util/test_util_images.py`` (#13543)
Co-authored-by: Adam Turner <9087854+aa-turner@users.noreply.github.com>
---
pyproject.toml | 1 -
tests/test_util/test_util_images.py | 6 +++++-
2 files changed, 5 insertions(+), 2 deletions(-)
diff --git a/pyproject.toml b/pyproject.toml
index 0488b3b9d14..fc429193a38 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -290,7 +290,6 @@ module = [
"tests.test_util.test_util",
"tests.test_util.test_util_display",
"tests.test_util.test_util_docutils",
- "tests.test_util.test_util_images",
"tests.test_util.test_util_inventory",
# tests/test_writers
"tests.test_writers.test_docutilsconf",
diff --git a/tests/test_util/test_util_images.py b/tests/test_util/test_util_images.py
index b56d68c1083..875fc0d98f4 100644
--- a/tests/test_util/test_util_images.py
+++ b/tests/test_util/test_util_images.py
@@ -11,13 +11,17 @@
parse_data_uri,
)
+TYPE_CHECKING = False
+if TYPE_CHECKING:
+ from pathlib import Path
+
GIF_FILENAME = 'img.gif'
PNG_FILENAME = 'img.png'
PDF_FILENAME = 'img.pdf'
TXT_FILENAME = 'index.txt'
-def test_get_image_size(rootdir):
+def test_get_image_size(rootdir: Path) -> None:
assert get_image_size(rootdir / 'test-root' / GIF_FILENAME) == (200, 181)
assert get_image_size(rootdir / 'test-root' / PNG_FILENAME) == (200, 181)
assert get_image_size(rootdir / 'test-root' / PDF_FILENAME) is None
From 0227606e71dc765ed60cd0ad2c580a43b5ffca4f Mon Sep 17 00:00:00 2001
From: James Addison <55152140+jayaddison@users.noreply.github.com>
Date: Mon, 12 May 2025 16:52:52 +0000
Subject: [PATCH 051/466] Fix tests for Python 3.14.0a7 (#13527)
Authored-by: Adam Turner <9087854+aa-turner@users.noreply.github.com>
Co-authored-by: Adam Turner <9087854+aa-turner@users.noreply.github.com>
Co-authored-by: James Addison <55152140+jayaddison@users.noreply.github.com>
---
tests/test_extensions/test_ext_autodoc.py | 6 +++++-
tests/test_extensions/test_ext_autodoc_configs.py | 12 ++++++++++--
2 files changed, 15 insertions(+), 3 deletions(-)
diff --git a/tests/test_extensions/test_ext_autodoc.py b/tests/test_extensions/test_ext_autodoc.py
index a06c1bbe30d..7aa12db3c32 100644
--- a/tests/test_extensions/test_ext_autodoc.py
+++ b/tests/test_extensions/test_ext_autodoc.py
@@ -938,10 +938,14 @@ def test_autodoc_special_members(app):
}
if sys.version_info >= (3, 13, 0, 'alpha', 5):
options['exclude-members'] = '__static_attributes__,__firstlineno__'
+ if sys.version_info >= (3, 14, 0, 'alpha', 7):
+ ann_attr_name = '__annotations_cache__'
+ else:
+ ann_attr_name = '__annotations__'
actual = do_autodoc(app, 'class', 'target.Class', options)
assert list(filter(lambda l: '::' in l, actual)) == [
'.. py:class:: Class(arg)',
- ' .. py:attribute:: Class.__annotations__',
+ f' .. py:attribute:: Class.{ann_attr_name}',
' .. py:attribute:: Class.__dict__',
' .. py:method:: Class.__init__(arg)',
' .. py:attribute:: Class.__module__',
diff --git a/tests/test_extensions/test_ext_autodoc_configs.py b/tests/test_extensions/test_ext_autodoc_configs.py
index ab7539190e0..c88496ee506 100644
--- a/tests/test_extensions/test_ext_autodoc_configs.py
+++ b/tests/test_extensions/test_ext_autodoc_configs.py
@@ -1348,6 +1348,10 @@ def test_autodoc_type_aliases(app: SphinxTestApp) -> None:
# default
options = {'members': None}
actual = do_autodoc(app, 'module', 'target.autodoc_type_aliases', options)
+ if sys.version_info >= (3, 14, 0, 'alpha', 7):
+ attr2_typeinfo = ()
+ else:
+ attr2_typeinfo = (' :type: int',)
assert list(actual) == [
'',
'.. py:module:: target.autodoc_type_aliases',
@@ -1368,7 +1372,7 @@ def test_autodoc_type_aliases(app: SphinxTestApp) -> None:
'',
' .. py:attribute:: Foo.attr2',
' :module: target.autodoc_type_aliases',
- ' :type: int',
+ *attr2_typeinfo,
'',
' docstring',
'',
@@ -1421,6 +1425,10 @@ def test_autodoc_type_aliases(app: SphinxTestApp) -> None:
'io.StringIO': 'my.module.StringIO',
}
actual = do_autodoc(app, 'module', 'target.autodoc_type_aliases', options)
+ if sys.version_info >= (3, 14, 0, 'alpha', 7):
+ attr2_typeinfo = ()
+ else:
+ attr2_typeinfo = (' :type: myint',)
assert list(actual) == [
'',
'.. py:module:: target.autodoc_type_aliases',
@@ -1441,7 +1449,7 @@ def test_autodoc_type_aliases(app: SphinxTestApp) -> None:
'',
' .. py:attribute:: Foo.attr2',
' :module: target.autodoc_type_aliases',
- ' :type: myint',
+ *attr2_typeinfo,
'',
' docstring',
'',
From 059ee9c6269dec3f12eb73c85dd9c6ebb5667427 Mon Sep 17 00:00:00 2001
From: Adam Turner <9087854+aa-turner@users.noreply.github.com>
Date: Mon, 12 May 2025 18:03:51 +0100
Subject: [PATCH 052/466] Fix mypy failures
---
tests/test_extensions/test_ext_autodoc_configs.py | 1 +
1 file changed, 1 insertion(+)
diff --git a/tests/test_extensions/test_ext_autodoc_configs.py b/tests/test_extensions/test_ext_autodoc_configs.py
index c88496ee506..eb351442673 100644
--- a/tests/test_extensions/test_ext_autodoc_configs.py
+++ b/tests/test_extensions/test_ext_autodoc_configs.py
@@ -1348,6 +1348,7 @@ def test_autodoc_type_aliases(app: SphinxTestApp) -> None:
# default
options = {'members': None}
actual = do_autodoc(app, 'module', 'target.autodoc_type_aliases', options)
+ attr2_typeinfo: tuple[str, ...]
if sys.version_info >= (3, 14, 0, 'alpha', 7):
attr2_typeinfo = ()
else:
From e6d67ca53c11718023cd3964daa66e8eb15c4f40 Mon Sep 17 00:00:00 2001
From: James Addison <55152140+jayaddison@users.noreply.github.com>
Date: Mon, 12 May 2025 17:13:19 +0000
Subject: [PATCH 053/466] Ensure Python clock timezone is reset during test
teardown (#13537)
---
tests/test_builders/test_build_linkcheck.py | 6 ++++++
1 file changed, 6 insertions(+)
diff --git a/tests/test_builders/test_build_linkcheck.py b/tests/test_builders/test_build_linkcheck.py
index bdd8dea54c1..b0c74856b4e 100644
--- a/tests/test_builders/test_build_linkcheck.py
+++ b/tests/test_builders/test_build_linkcheck.py
@@ -1127,6 +1127,12 @@ def test_too_many_requests_retry_after_HTTP_date(tz, app, monkeypatch, capsys):
) as address:
app.build()
+ # Undo side-effects: the monkeypatch context manager clears the TZ environment
+ # variable, but we also need to reset Python's internal notion of the current
+ # timezone.
+ if sys.platform != 'win32':
+ time.tzset()
+
content = (app.outdir / 'output.json').read_text(encoding='utf8')
assert json.loads(content) == {
'filename': 'index.rst',
From c6e39d858467427ba0255824932fbc8b41694a71 Mon Sep 17 00:00:00 2001
From: Shengyu Zhang
Date: Tue, 13 May 2025 02:29:18 +0800
Subject: [PATCH 054/466] Add missing backslashes in LaTeX documentation
(#13525)
---
doc/latex.rst | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/doc/latex.rst b/doc/latex.rst
index fce61480941..80762b1c2c1 100644
--- a/doc/latex.rst
+++ b/doc/latex.rst
@@ -500,7 +500,7 @@ Keys that don't need to be overridden unless in special cases are:
.. hint::
If the key value is set to
- :code-tex:`r'\\newcommand\sphinxbackoftitlepage{}\\sphinxmaketitle'`, then ```` will be
typeset on back of title page (``'manual'`` docclass only).
@@ -1694,7 +1694,7 @@ Macros
.. hint::
If adding to preamble the loading of ``tocloft`` package, also add to
- preamble :code-tex:`\\renewcommand\sphinxtableofcontentshook{}` else it
+ preamble :code-tex:`\\renewcommand\\sphinxtableofcontentshook{}` else it
will reset :code-tex:`\\l@section` and :code-tex:`\\l@subsection`
cancelling ``tocloft`` customization.
From 7838043f2c1951122592ac84e74f83db5a48ca3d Mon Sep 17 00:00:00 2001
From: Yuki Kobayashi
Date: Tue, 13 May 2025 04:42:42 +0900
Subject: [PATCH 055/466] Support annotations and default values in
``_pseudo_parse_arglist`` (#13536)
Co-authored-by: Adam Turner <9087854+AA-Turner@users.noreply.github.com>
---
sphinx/domains/javascript.py | 5 +-
sphinx/domains/python/_annotations.py | 30 +++++++++-
sphinx/domains/python/_object.py | 10 ++--
tests/test_domains/test_domain_py.py | 21 ++++---
.../test_domains/test_domain_py_pyfunction.py | 56 +++++++++++++++++++
5 files changed, 105 insertions(+), 17 deletions(-)
diff --git a/sphinx/domains/javascript.py b/sphinx/domains/javascript.py
index 968f73aa3d3..51a93bcf802 100644
--- a/sphinx/domains/javascript.py
+++ b/sphinx/domains/javascript.py
@@ -137,8 +137,9 @@ def handle_signature(self, sig: str, signode: desc_signature) -> tuple[str, str]
_pseudo_parse_arglist(
signode,
arglist,
- multi_line_parameter_list,
- trailing_comma,
+ multi_line_parameter_list=multi_line_parameter_list,
+ trailing_comma=trailing_comma,
+ env=self.env,
)
return fullname, prefix
diff --git a/sphinx/domains/python/_annotations.py b/sphinx/domains/python/_annotations.py
index 29e47fa7151..60def00a533 100644
--- a/sphinx/domains/python/_annotations.py
+++ b/sphinx/domains/python/_annotations.py
@@ -552,8 +552,10 @@ def _keyword_only_separator() -> addnodes.desc_parameter:
def _pseudo_parse_arglist(
signode: desc_signature,
arglist: str,
+ *,
multi_line_parameter_list: bool = False,
trailing_comma: bool = True,
+ env: BuildEnvironment,
) -> None:
"""'Parse' a list of arguments separated by commas.
@@ -561,6 +563,7 @@ def _pseudo_parse_arglist(
brackets. Currently, this will split at any comma, even if it's inside a
string literal (e.g. default argument value).
"""
+ # TODO: decompose 'env' parameter into only the required bits
paramlist = addnodes.desc_parameterlist()
paramlist['multi_line_parameter_list'] = multi_line_parameter_list
paramlist['multi_line_trailing_comma'] = trailing_comma
@@ -583,9 +586,30 @@ def _pseudo_parse_arglist(
ends_open += 1
argument = argument[:-1].strip()
if argument:
- stack[-1] += addnodes.desc_parameter(
- '', '', addnodes.desc_sig_name(argument, argument)
- )
+ param_with_annotation, _, default_value = argument.partition('=')
+ param_name, _, annotation = param_with_annotation.partition(':')
+ del param_with_annotation
+
+ node = addnodes.desc_parameter()
+ node += addnodes.desc_sig_name('', param_name.strip())
+ if annotation:
+ children = _parse_annotation(annotation.strip(), env=env)
+ node += addnodes.desc_sig_punctuation('', ':')
+ node += addnodes.desc_sig_space()
+ node += addnodes.desc_sig_name('', '', *children) # type: ignore[arg-type]
+ if default_value:
+ if annotation:
+ node += addnodes.desc_sig_space()
+ node += addnodes.desc_sig_operator('', '=')
+ if annotation:
+ node += addnodes.desc_sig_space()
+ node += nodes.inline(
+ '',
+ default_value.strip(),
+ classes=['default_value'],
+ support_smartquotes=False,
+ )
+ stack[-1] += node
while ends_open:
stack.append(addnodes.desc_optional())
stack[-2] += stack[-1]
diff --git a/sphinx/domains/python/_object.py b/sphinx/domains/python/_object.py
index a858afe8a3e..fd4e62bbbe0 100644
--- a/sphinx/domains/python/_object.py
+++ b/sphinx/domains/python/_object.py
@@ -363,8 +363,9 @@ def handle_signature(self, sig: str, signode: desc_signature) -> tuple[str, str]
_pseudo_parse_arglist(
signode,
arglist,
- multi_line_parameter_list,
- trailing_comma,
+ multi_line_parameter_list=multi_line_parameter_list,
+ trailing_comma=trailing_comma,
+ env=self.env,
)
except (NotImplementedError, ValueError) as exc:
# duplicated parameter names raise ValueError and not a SyntaxError
@@ -374,8 +375,9 @@ def handle_signature(self, sig: str, signode: desc_signature) -> tuple[str, str]
_pseudo_parse_arglist(
signode,
arglist,
- multi_line_parameter_list,
- trailing_comma,
+ multi_line_parameter_list=multi_line_parameter_list,
+ trailing_comma=trailing_comma,
+ env=self.env,
)
else:
if self.needs_arglist():
diff --git a/tests/test_domains/test_domain_py.py b/tests/test_domains/test_domain_py.py
index 26c79ffd8fb..262773af35a 100644
--- a/tests/test_domains/test_domain_py.py
+++ b/tests/test_domains/test_domain_py.py
@@ -38,20 +38,25 @@
from sphinx.testing.util import assert_node
from sphinx.writers.text import STDINDENT
+TYPE_CHECKING = False
+if TYPE_CHECKING:
+ from sphinx.application import Sphinx
+ from sphinx.environment import BuildEnvironment
-def parse(sig):
+
+def parse(sig: str, *, env: BuildEnvironment) -> str:
m = py_sig_re.match(sig)
if m is None:
raise ValueError
_name_prefix, _tp_list, _name, arglist, _retann = m.groups()
signode = addnodes.desc_signature(sig, '')
- _pseudo_parse_arglist(signode, arglist)
+ _pseudo_parse_arglist(signode, arglist, env=env)
return signode.astext()
-def test_function_signatures() -> None:
- rv = parse("compile(source : string, filename, symbol='file')")
- assert rv == "(source : string, filename, symbol='file')"
+def test_function_signatures(app: Sphinx) -> None:
+ rv = parse("compile(source : string, filename, symbol='file')", env=app.env)
+ assert rv == "(source: string, filename, symbol='file')"
for params, expect in [
('(a=1)', '(a=1)'),
@@ -60,9 +65,9 @@ def test_function_signatures() -> None:
('(a=1[, b=None])', '(a=1, [b=None])'),
('(a=[], [b=None])', '(a=[], [b=None])'),
('(a=[][, b=None])', '(a=[], [b=None])'),
- ('(a: Foo[Bar]=[][, b=None])', '(a: Foo[Bar]=[], [b=None])'),
+ ('(a: Foo[Bar]=[][, b=None])', '(a: Foo[Bar] = [], [b=None])'),
]:
- rv = parse(f'func{params}')
+ rv = parse(f'func{params}', env=app.env)
assert rv == expect
# Note: 'def f[Foo[Bar]]()' is not valid Python but people might write
@@ -70,7 +75,7 @@ def test_function_signatures() -> None:
# variable.
for tparams in ['', '[Foo]', '[Foo[Bar]]']:
for retann in ['', '-> Foo', '-> Foo[Bar]', '-> anything else']:
- rv = parse(f'func{tparams}{params} {retann}'.rstrip())
+ rv = parse(f'func{tparams}{params} {retann}'.rstrip(), env=app.env)
assert rv == expect
diff --git a/tests/test_domains/test_domain_py_pyfunction.py b/tests/test_domains/test_domain_py_pyfunction.py
index 32f4e669291..a240d52ec5d 100644
--- a/tests/test_domains/test_domain_py_pyfunction.py
+++ b/tests/test_domains/test_domain_py_pyfunction.py
@@ -27,6 +27,10 @@
from sphinx.testing import restructuredtext
from sphinx.testing.util import assert_node
+TYPE_CHECKING = False
+if TYPE_CHECKING:
+ from sphinx.application import Sphinx
+
@pytest.mark.sphinx('html', testroot='_blank')
def test_pyfunction(app):
@@ -487,6 +491,58 @@ def test_optional_pyfunction_signature(app):
)
+@pytest.mark.sphinx('html', testroot='_blank')
+def test_pyfunction_signature_with_bracket(app: Sphinx) -> None:
+ text = '.. py:function:: hello(a : ~typing.Any = ) -> None'
+ doctree = restructuredtext.parse(app, text)
+ assert_node(
+ doctree,
+ (
+ addnodes.index,
+ [
+ desc,
+ (
+ [
+ desc_signature,
+ (
+ [desc_name, 'hello'],
+ desc_parameterlist,
+ [desc_returns, pending_xref, 'None'],
+ ),
+ ],
+ desc_content,
+ ),
+ ],
+ ),
+ )
+ assert_node(
+ doctree[1],
+ addnodes.desc,
+ desctype='function',
+ domain='py',
+ objtype='function',
+ no_index=False,
+ )
+ assert_node(
+ doctree[1][0][1], # type: ignore[index]
+ (
+ [
+ desc_parameter,
+ (
+ [desc_sig_name, 'a'],
+ [desc_sig_punctuation, ':'],
+ desc_sig_space,
+ [desc_sig_name, pending_xref, 'Any'],
+ desc_sig_space,
+ [desc_sig_operator, '='],
+ desc_sig_space,
+ [nodes.inline, ''],
+ ),
+ ],
+ ),
+ )
+
+
@pytest.mark.sphinx(
'html',
testroot='root',
From 4051354182034a2367051e5c71072c88771de5d8 Mon Sep 17 00:00:00 2001
From: Adam Turner <9087854+AA-Turner@users.noreply.github.com>
Date: Mon, 12 May 2025 21:23:22 +0100
Subject: [PATCH 056/466] Mark tests as expected failures on Docutils 0.22.0rc2
(#13547)
---
tests/test_directives/test_directive_only.py | 7 +++++++
tests/test_environment/test_environment_toctree.py | 9 +++++++++
tests/test_util/test_util_docutils_sphinx_directive.py | 9 +++++++++
3 files changed, 25 insertions(+)
diff --git a/tests/test_directives/test_directive_only.py b/tests/test_directives/test_directive_only.py
index 9e62f4cb3eb..700d11588de 100644
--- a/tests/test_directives/test_directive_only.py
+++ b/tests/test_directives/test_directive_only.py
@@ -5,13 +5,20 @@
import re
from typing import TYPE_CHECKING
+import docutils
import pytest
from docutils import nodes
if TYPE_CHECKING:
from sphinx.testing.util import SphinxTestApp
+xfail_du_22 = pytest.mark.xfail(
+ docutils.__version_info__ >= (0, 22, 0, 'alpha', 0),
+ reason='expected failure on Docutils 0.22+',
+)
+
+@xfail_du_22
@pytest.mark.sphinx('text', testroot='directive-only')
def test_sectioning(app: SphinxTestApp) -> None:
def getsects(section):
diff --git a/tests/test_environment/test_environment_toctree.py b/tests/test_environment/test_environment_toctree.py
index f6b849c5bec..72558aaa6da 100644
--- a/tests/test_environment/test_environment_toctree.py
+++ b/tests/test_environment/test_environment_toctree.py
@@ -4,6 +4,7 @@
from typing import TYPE_CHECKING
+import docutils
import pytest
from docutils import nodes
from docutils.nodes import bullet_list, list_item, literal, reference, title
@@ -17,7 +18,13 @@
if TYPE_CHECKING:
from sphinx.testing.util import SphinxTestApp
+xfail_du_22 = pytest.mark.xfail(
+ docutils.__version_info__ >= (0, 22, 0, 'alpha', 0),
+ reason='expected failure on Docutils 0.22+',
+)
+
+@xfail_du_22
@pytest.mark.sphinx('xml', testroot='toctree')
@pytest.mark.test_params(shared_result='test_environment_toctree_basic')
def test_process_doc(app):
@@ -464,6 +471,7 @@ def test_domain_objects_document_scoping(app: SphinxTestApp) -> None:
)
+@xfail_du_22
@pytest.mark.sphinx('xml', testroot='toctree')
@pytest.mark.test_params(shared_result='test_environment_toctree_basic')
def test_document_toc(app):
@@ -521,6 +529,7 @@ def test_document_toc(app):
assert_node(toctree[2][0], [compact_paragraph, reference, 'Indices and tables'])
+@xfail_du_22
@pytest.mark.sphinx('xml', testroot='toctree')
@pytest.mark.test_params(shared_result='test_environment_toctree_basic')
def test_document_toc_only(app):
diff --git a/tests/test_util/test_util_docutils_sphinx_directive.py b/tests/test_util/test_util_docutils_sphinx_directive.py
index ecfcab0b489..5770b1860f7 100644
--- a/tests/test_util/test_util_docutils_sphinx_directive.py
+++ b/tests/test_util/test_util_docutils_sphinx_directive.py
@@ -2,6 +2,8 @@
from types import SimpleNamespace
+import docutils
+import pytest
from docutils import nodes
from docutils.parsers.rst.languages import en as english # type: ignore[attr-defined]
from docutils.parsers.rst.states import (
@@ -14,6 +16,11 @@
from sphinx.util.docutils import SphinxDirective, new_document
+xfail_du_22 = pytest.mark.xfail(
+ docutils.__version_info__ >= (0, 22, 0, 'alpha', 0),
+ reason='expected failure on Docutils 0.22+',
+)
+
def make_directive(
*, env: SimpleNamespace, input_lines: StringList | None = None
@@ -104,6 +111,7 @@ def test_sphinx_directive_get_location() -> None:
assert directive.get_location() == ':1'
+@xfail_du_22
def test_sphinx_directive_parse_content_to_nodes() -> None:
directive = make_directive(env=SimpleNamespace())
content = 'spam\n====\n\nEggs! *Lobster thermidor.*'
@@ -120,6 +128,7 @@ def test_sphinx_directive_parse_content_to_nodes() -> None:
assert node.children[1].astext() == 'Eggs! Lobster thermidor.'
+@xfail_du_22
def test_sphinx_directive_parse_text_to_nodes() -> None:
directive = make_directive(env=SimpleNamespace())
content = 'spam\n====\n\nEggs! *Lobster thermidor.*'
From 5355a78e790cef630d19a2ab47107b85c5d626e1 Mon Sep 17 00:00:00 2001
From: Adam Turner <9087854+AA-Turner@users.noreply.github.com>
Date: Mon, 12 May 2025 21:44:41 +0100
Subject: [PATCH 057/466] Fix ``test_util`` for Docutils 0.22+ (#13548)
---
.../test_util_docutils_sphinx_directive.py | 35 +++++++++----------
1 file changed, 17 insertions(+), 18 deletions(-)
diff --git a/tests/test_util/test_util_docutils_sphinx_directive.py b/tests/test_util/test_util_docutils_sphinx_directive.py
index 5770b1860f7..00ea5bc3fb5 100644
--- a/tests/test_util/test_util_docutils_sphinx_directive.py
+++ b/tests/test_util/test_util_docutils_sphinx_directive.py
@@ -3,7 +3,6 @@
from types import SimpleNamespace
import docutils
-import pytest
from docutils import nodes
from docutils.parsers.rst.languages import en as english # type: ignore[attr-defined]
from docutils.parsers.rst.states import (
@@ -16,11 +15,6 @@
from sphinx.util.docutils import SphinxDirective, new_document
-xfail_du_22 = pytest.mark.xfail(
- docutils.__version_info__ >= (0, 22, 0, 'alpha', 0),
- reason='expected failure on Docutils 0.22+',
-)
-
def make_directive(
*, env: SimpleNamespace, input_lines: StringList | None = None
@@ -37,23 +31,30 @@ def make_directive_and_state(
if input_lines is not None:
sm.input_lines = input_lines
state = RSTState(sm)
- state.document = new_document('')
- state.document.settings.env = env
- state.document.settings.tab_width = 4
- state.document.settings.pep_references = None
- state.document.settings.rfc_references = None
+ document = state.document = new_document('')
+ document.settings.env = env
+ document.settings.tab_width = 4
+ document.settings.pep_references = None
+ document.settings.rfc_references = None
inliner = Inliner()
- inliner.init_customizations(state.document.settings)
+ inliner.init_customizations(document.settings)
state.inliner = inliner
state.parent = None
state.memo = SimpleNamespace(
- document=state.document,
+ document=document,
+ reporter=document.reporter,
language=english,
- inliner=state.inliner,
- reporter=state.document.reporter,
- section_level=0,
title_styles=[],
+ # section_parents=[], # Docutils 0.22+
+ section_level=0,
+ section_bubble_up_kludge=False,
+ inliner=inliner,
)
+ if docutils.__version_info__ >= (0, 22, 0, 'alpha', 0):
+ # https://github.com/sphinx-doc/sphinx/issues/13539
+ # https://sourceforge.net/p/docutils/code/10093/
+ # https://sourceforge.net/p/docutils/patches/213/
+ state.memo.section_parents = []
directive = SphinxDirective(
name='test_directive',
arguments=[],
@@ -111,7 +112,6 @@ def test_sphinx_directive_get_location() -> None:
assert directive.get_location() == ':1'
-@xfail_du_22
def test_sphinx_directive_parse_content_to_nodes() -> None:
directive = make_directive(env=SimpleNamespace())
content = 'spam\n====\n\nEggs! *Lobster thermidor.*'
@@ -128,7 +128,6 @@ def test_sphinx_directive_parse_content_to_nodes() -> None:
assert node.children[1].astext() == 'Eggs! Lobster thermidor.'
-@xfail_du_22
def test_sphinx_directive_parse_text_to_nodes() -> None:
directive = make_directive(env=SimpleNamespace())
content = 'spam\n====\n\nEggs! *Lobster thermidor.*'
From f928da16337699ece42f5d7fc377870422eee3e3 Mon Sep 17 00:00:00 2001
From: Adam Turner <9087854+AA-Turner@users.noreply.github.com>
Date: Mon, 12 May 2025 23:16:24 +0100
Subject: [PATCH 058/466] Fix tests for Docutils 0.22+ (#13549)
---
sphinx/directives/other.py | 14 +++++++++++++-
tests/test_directives/test_directive_only.py | 7 -------
tests/test_environment/test_environment_toctree.py | 9 ---------
3 files changed, 13 insertions(+), 17 deletions(-)
diff --git a/sphinx/directives/other.py b/sphinx/directives/other.py
index 8c66ed383b5..5d6f5b778a6 100644
--- a/sphinx/directives/other.py
+++ b/sphinx/directives/other.py
@@ -5,6 +5,7 @@
from pathlib import Path
from typing import TYPE_CHECKING, cast
+import docutils
from docutils import nodes
from docutils.parsers.rst import directives
from docutils.parsers.rst.directives.misc import Class
@@ -21,7 +22,7 @@
if TYPE_CHECKING:
from collections.abc import Sequence
- from typing import Any, ClassVar
+ from typing import Any, ClassVar, Final
from docutils.nodes import Element, Node
@@ -29,6 +30,7 @@
from sphinx.util.typing import ExtensionMetadata, OptionSpec
+DU_22_PLUS: Final = docutils.__version_info__ >= (0, 22, 0, 'alpha', 0)
glob_re = re.compile(r'.*[*?\[].*')
logger = logging.getLogger(__name__)
@@ -330,6 +332,14 @@ def run(self) -> list[Node]:
surrounding_section_level = memo.section_level
memo.title_styles = []
memo.section_level = 0
+ if DU_22_PLUS:
+ # https://github.com/sphinx-doc/sphinx/issues/13539
+ # https://sourceforge.net/p/docutils/code/10093/
+ # https://sourceforge.net/p/docutils/patches/213/
+ surrounding_section_parents = memo.section_parents
+ memo.section_parents = []
+ else:
+ surrounding_section_parents = []
try:
self.state.nested_parse(
self.content, self.content_offset, node, match_titles=True
@@ -365,6 +375,8 @@ def run(self) -> list[Node]:
return []
finally:
memo.title_styles = surrounding_title_styles
+ if DU_22_PLUS:
+ memo.section_parents = surrounding_section_parents
memo.section_level = surrounding_section_level
diff --git a/tests/test_directives/test_directive_only.py b/tests/test_directives/test_directive_only.py
index 700d11588de..9e62f4cb3eb 100644
--- a/tests/test_directives/test_directive_only.py
+++ b/tests/test_directives/test_directive_only.py
@@ -5,20 +5,13 @@
import re
from typing import TYPE_CHECKING
-import docutils
import pytest
from docutils import nodes
if TYPE_CHECKING:
from sphinx.testing.util import SphinxTestApp
-xfail_du_22 = pytest.mark.xfail(
- docutils.__version_info__ >= (0, 22, 0, 'alpha', 0),
- reason='expected failure on Docutils 0.22+',
-)
-
-@xfail_du_22
@pytest.mark.sphinx('text', testroot='directive-only')
def test_sectioning(app: SphinxTestApp) -> None:
def getsects(section):
diff --git a/tests/test_environment/test_environment_toctree.py b/tests/test_environment/test_environment_toctree.py
index 72558aaa6da..f6b849c5bec 100644
--- a/tests/test_environment/test_environment_toctree.py
+++ b/tests/test_environment/test_environment_toctree.py
@@ -4,7 +4,6 @@
from typing import TYPE_CHECKING
-import docutils
import pytest
from docutils import nodes
from docutils.nodes import bullet_list, list_item, literal, reference, title
@@ -18,13 +17,7 @@
if TYPE_CHECKING:
from sphinx.testing.util import SphinxTestApp
-xfail_du_22 = pytest.mark.xfail(
- docutils.__version_info__ >= (0, 22, 0, 'alpha', 0),
- reason='expected failure on Docutils 0.22+',
-)
-
-@xfail_du_22
@pytest.mark.sphinx('xml', testroot='toctree')
@pytest.mark.test_params(shared_result='test_environment_toctree_basic')
def test_process_doc(app):
@@ -471,7 +464,6 @@ def test_domain_objects_document_scoping(app: SphinxTestApp) -> None:
)
-@xfail_du_22
@pytest.mark.sphinx('xml', testroot='toctree')
@pytest.mark.test_params(shared_result='test_environment_toctree_basic')
def test_document_toc(app):
@@ -529,7 +521,6 @@ def test_document_toc(app):
assert_node(toctree[2][0], [compact_paragraph, reference, 'Indices and tables'])
-@xfail_du_22
@pytest.mark.sphinx('xml', testroot='toctree')
@pytest.mark.test_params(shared_result='test_environment_toctree_basic')
def test_document_toc_only(app):
From fefa2f26be4369a2cf81685fa20958c206a7f2af Mon Sep 17 00:00:00 2001
From: Adam Turner <9087854+AA-Turner@users.noreply.github.com>
Date: Mon, 12 May 2025 23:35:06 +0100
Subject: [PATCH 059/466] Add ``section_parents`` to
``_fresh_title_style_context()`` (#13551)
---
sphinx/util/parsing.py | 12 ++++++++++++
1 file changed, 12 insertions(+)
diff --git a/sphinx/util/parsing.py b/sphinx/util/parsing.py
index 4c4a6477683..ec6649fc247 100644
--- a/sphinx/util/parsing.py
+++ b/sphinx/util/parsing.py
@@ -5,15 +5,19 @@
import contextlib
from typing import TYPE_CHECKING
+import docutils
from docutils.nodes import Element
from docutils.statemachine import StringList, string2lines
if TYPE_CHECKING:
from collections.abc import Iterator
+ from typing import Final
from docutils.nodes import Node
from docutils.parsers.rst.states import RSTState
+DU_22_PLUS: Final = docutils.__version_info__ >= (0, 22, 0, 'alpha', 0)
+
def nested_parse_to_nodes(
state: RSTState,
@@ -75,15 +79,23 @@ def _fresh_title_style_context(state: RSTState) -> Iterator[None]:
memo = state.memo
surrounding_title_styles: list[str | tuple[str, str]] = memo.title_styles
surrounding_section_level: int = memo.section_level
+ if DU_22_PLUS:
+ surrounding_section_parents = memo.section_parents
+ else:
+ surrounding_section_parents = []
# clear current title styles
memo.title_styles = []
memo.section_level = 0
+ if DU_22_PLUS:
+ memo.section_parents = []
try:
yield
finally:
# reset title styles
memo.title_styles = surrounding_title_styles
memo.section_level = surrounding_section_level
+ if DU_22_PLUS:
+ memo.section_parents = surrounding_section_parents
def _text_to_string_list(
From a27f37597a116bb376027fd6d044ac7fd1f0047e Mon Sep 17 00:00:00 2001
From: Steve Piercy
Date: Mon, 12 May 2025 15:43:12 -0700
Subject: [PATCH 060/466] Emend the version changed note for
``linkcheck_allowed_redirects`` (#13550)
---
AUTHORS.rst | 1 +
doc/usage/configuration.rst | 2 +-
2 files changed, 2 insertions(+), 1 deletion(-)
diff --git a/AUTHORS.rst b/AUTHORS.rst
index 5ff09219c02..43a8da3469d 100644
--- a/AUTHORS.rst
+++ b/AUTHORS.rst
@@ -102,6 +102,7 @@ Contributors
* Slawek Figiel -- additional warning suppression
* Stefan Seefeld -- toctree improvements
* Stefan van der Walt -- autosummary extension
+* Steve Piercy -- documentation improvements
* \T. Powers -- HTML output improvements
* Taku Shimizu -- epub3 builder
* Thomas Lamb -- linkcheck builder
diff --git a/doc/usage/configuration.rst b/doc/usage/configuration.rst
index d14b5d4ec6b..7cdf462c4ba 100644
--- a/doc/usage/configuration.rst
+++ b/doc/usage/configuration.rst
@@ -3668,7 +3668,7 @@ and which failures and redirects it ignores.
.. versionadded:: 4.1
.. versionchanged:: 8.3
- Setting :confval:`!linkcheck_allowed_redirects` to the empty directory
+ Setting :confval:`!linkcheck_allowed_redirects` to an empty dictionary
may now be used to warn on all redirects encountered
by the *linkcheck* builder.
From 491999f5699c12128c7e37b4e89f11ec9a9800db Mon Sep 17 00:00:00 2001
From: Yuki Kobayashi
Date: Tue, 13 May 2025 18:52:55 +0900
Subject: [PATCH 061/466] Docs: Fix ``nested_parse`` sample code (#13455)
Co-authored-by: Adam Turner <9087854+AA-Turner@users.noreply.github.com>
---
doc/extdev/markupapi.rst | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/doc/extdev/markupapi.rst b/doc/extdev/markupapi.rst
index 7aa632446da..184bd2bd8e4 100644
--- a/doc/extdev/markupapi.rst
+++ b/doc/extdev/markupapi.rst
@@ -173,9 +173,9 @@ The methods are used as follows:
def run(self) -> list[Node]:
container = docutils.nodes.Element()
# either
- nested_parse_with_titles(self.state, self.result, container)
+ nested_parse_with_titles(self.state, self.result, container, self.content_offset)
# or
- self.state.nested_parse(self.result, 0, container)
+ self.state.nested_parse(self.result, self.content_offset, container)
parsed = container.children
return parsed
From 05137c25acf99c07badd6de25379fcfd8a6a120c Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Jean-Fran=C3=A7ois=20B=2E?=
<2589111+jfbu@users.noreply.github.com>
Date: Wed, 14 May 2025 09:48:56 +0200
Subject: [PATCH 062/466] LaTeX: fix the #13525 fix of code-tex markup in docs
---
doc/latex.rst | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/doc/latex.rst b/doc/latex.rst
index 80762b1c2c1..e8f8285f763 100644
--- a/doc/latex.rst
+++ b/doc/latex.rst
@@ -500,7 +500,7 @@ Keys that don't need to be overridden unless in special cases are:
.. hint::
If the key value is set to
- :code-tex:`r'\\newcommand\\sphinxbackoftitlepage{}\\sphinxmaketitle'`, then ```` will be
typeset on back of title page (``'manual'`` docclass only).
From df171a93678d62fec2b95a878566adf4ef9ae406 Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Wed, 14 May 2025 11:21:36 +0100
Subject: [PATCH 063/466] Bump types-docutils to 0.21.0.20250514 (#13555)
---
pyproject.toml | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/pyproject.toml b/pyproject.toml
index fc429193a38..59b2dc30c1c 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -97,7 +97,7 @@ lint = [
"sphinx-lint>=0.9",
"types-colorama==0.4.15.20240311",
"types-defusedxml==0.7.0.20240218",
- "types-docutils==0.21.0.20241128",
+ "types-docutils==0.21.0.20250514",
"types-Pillow==10.2.0.20240822",
"types-Pygments==2.19.0.20250305",
"types-requests==2.32.0.20250328", # align with requests
@@ -165,7 +165,7 @@ type-stubs = [
# align with versions used elsewhere
"types-colorama==0.4.15.20240311",
"types-defusedxml==0.7.0.20240218",
- "types-docutils==0.21.0.20241128",
+ "types-docutils==0.21.0.20250514",
"types-Pillow==10.2.0.20240822",
"types-Pygments==2.19.0.20250305",
"types-requests==2.32.0.20250328",
From 0e3d50cb6d224c7fec30e666fe102b1be7416265 Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Wed, 14 May 2025 11:26:28 +0100
Subject: [PATCH 064/466] Bump types-pygments to 2.19.0.20250514 (#13556)
---
pyproject.toml | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/pyproject.toml b/pyproject.toml
index 59b2dc30c1c..92f00d3a759 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -99,7 +99,7 @@ lint = [
"types-defusedxml==0.7.0.20240218",
"types-docutils==0.21.0.20250514",
"types-Pillow==10.2.0.20240822",
- "types-Pygments==2.19.0.20250305",
+ "types-Pygments==2.19.0.20250514",
"types-requests==2.32.0.20250328", # align with requests
"types-urllib3==1.26.25.14",
"pyright==1.1.400",
@@ -167,7 +167,7 @@ type-stubs = [
"types-defusedxml==0.7.0.20240218",
"types-docutils==0.21.0.20250514",
"types-Pillow==10.2.0.20240822",
- "types-Pygments==2.19.0.20250305",
+ "types-Pygments==2.19.0.20250514",
"types-requests==2.32.0.20250328",
"types-urllib3==1.26.25.14",
]
From 6210799bf5bb9fb5045aaa14465ebfc9fb1c8102 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Jean-Fran=C3=A7ois=20B=2E?=
<2589111+jfbu@users.noreply.github.com>
Date: Wed, 14 May 2025 13:40:37 +0200
Subject: [PATCH 065/466] Revert "LaTeX: fix the #13525 fix of code-tex markup
in docs"
This reverts commit 05137c25acf99c07badd6de25379fcfd8a6a120c.
Sorry about that.
---
doc/latex.rst | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/doc/latex.rst b/doc/latex.rst
index e8f8285f763..80762b1c2c1 100644
--- a/doc/latex.rst
+++ b/doc/latex.rst
@@ -500,7 +500,7 @@ Keys that don't need to be overridden unless in special cases are:
.. hint::
If the key value is set to
- :code-tex:`'\\newcommand\\sphinxbackoftitlepage{}\\sphinxmaketitle'`, then ```` will be
typeset on back of title page (``'manual'`` docclass only).
From 3b46823873977c2ed87879509b99714ca8bffa1d Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Thu, 15 May 2025 17:35:18 +0100
Subject: [PATCH 066/466] Bump types-requests to 2.32.0.20250515 (#13559)
---
pyproject.toml | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/pyproject.toml b/pyproject.toml
index 92f00d3a759..15520dc1841 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -100,7 +100,7 @@ lint = [
"types-docutils==0.21.0.20250514",
"types-Pillow==10.2.0.20240822",
"types-Pygments==2.19.0.20250514",
- "types-requests==2.32.0.20250328", # align with requests
+ "types-requests==2.32.0.20250515", # align with requests
"types-urllib3==1.26.25.14",
"pyright==1.1.400",
"pytest>=8.0",
@@ -168,7 +168,7 @@ type-stubs = [
"types-docutils==0.21.0.20250514",
"types-Pillow==10.2.0.20240822",
"types-Pygments==2.19.0.20250514",
- "types-requests==2.32.0.20250328",
+ "types-requests==2.32.0.20250515",
"types-urllib3==1.26.25.14",
]
From c4d37057f100862ea58ffcec95de9553263e0acd Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Karel=20Ko=C4=8D=C3=AD?=
Date: Fri, 16 May 2025 05:00:23 +0200
Subject: [PATCH 067/466] Support C domain objects in the table of contents
(#13497)
Co-authored-by: Adam Turner <9087854+AA-Turner@users.noreply.github.com>
---
CHANGES.rst | 1 +
sphinx/domains/c/__init__.py | 28 +++++++++++++++++++++++++++-
2 files changed, 28 insertions(+), 1 deletion(-)
diff --git a/CHANGES.rst b/CHANGES.rst
index fede8b5177b..d26a93871a5 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -19,6 +19,7 @@ Features added
* #13439: linkcheck: Permit warning on every redirect with
``linkcheck_allowed_redirects = {}``.
Patch by Adam Turner.
+* #13497: Support C domain objects in the table of contents.
Bugs fixed
----------
diff --git a/sphinx/domains/c/__init__.py b/sphinx/domains/c/__init__.py
index 6dbbf70ac92..7fa1822e4ac 100644
--- a/sphinx/domains/c/__init__.py
+++ b/sphinx/domains/c/__init__.py
@@ -39,7 +39,7 @@
from docutils.nodes import Element, Node, TextElement, system_message
- from sphinx.addnodes import pending_xref
+ from sphinx.addnodes import desc_signature, pending_xref
from sphinx.application import Sphinx
from sphinx.builders import Builder
from sphinx.domains.c._symbol import LookupKey
@@ -309,6 +309,32 @@ def after_content(self) -> None:
self.env.current_document.c_parent_symbol = self.oldParentSymbol
self.env.ref_context['c:parent_key'] = self.oldParentKey
+ def _object_hierarchy_parts(self, sig_node: desc_signature) -> tuple[str, ...]:
+ last_symbol: Symbol = self.env.current_document.c_last_symbol
+ return tuple(map(str, last_symbol.get_full_nested_name().names))
+
+ def _toc_entry_name(self, sig_node: desc_signature) -> str:
+ if not sig_node.get('_toc_parts'):
+ return ''
+
+ config = self.config
+ objtype = sig_node.parent.get('objtype')
+ if config.add_function_parentheses and (
+ objtype in {'function', 'method'}
+ or (objtype == 'macro' and '(' in sig_node.rawsource)
+ ):
+ parens = '()'
+ else:
+ parens = ''
+ *parents, name = sig_node['_toc_parts']
+ if config.toc_object_entries_show_parents == 'domain':
+ return '::'.join((name + parens,))
+ if config.toc_object_entries_show_parents == 'hide':
+ return name + parens
+ if config.toc_object_entries_show_parents == 'all':
+ return '::'.join([*parents, name + parens])
+ return ''
+
class CMemberObject(CObject):
object_type = 'member'
From c76d1bd1372a652eef236bf00f0a0f1ebc9bca7d Mon Sep 17 00:00:00 2001
From: Adam Turner <9087854+AA-Turner@users.noreply.github.com>
Date: Fri, 16 May 2025 04:00:41 +0100
Subject: [PATCH 068/466] Merge ``SearchLanguage.init()`` into ``__init__()``
(#13562)
---
sphinx/search/__init__.py | 5 +----
sphinx/search/da.py | 3 ++-
sphinx/search/de.py | 3 ++-
sphinx/search/en.py | 3 ++-
sphinx/search/es.py | 3 ++-
sphinx/search/fi.py | 3 ++-
sphinx/search/fr.py | 3 ++-
sphinx/search/hu.py | 3 ++-
sphinx/search/it.py | 3 ++-
sphinx/search/ja.py | 3 ++-
sphinx/search/nl.py | 3 ++-
sphinx/search/no.py | 3 ++-
sphinx/search/pt.py | 3 ++-
sphinx/search/ro.py | 3 ++-
sphinx/search/ru.py | 3 ++-
sphinx/search/sv.py | 3 ++-
sphinx/search/tr.py | 3 ++-
sphinx/search/zh.py | 2 --
18 files changed, 33 insertions(+), 22 deletions(-)
diff --git a/sphinx/search/__init__.py b/sphinx/search/__init__.py
index cd0aa0bbd8f..5563efceb4a 100644
--- a/sphinx/search/__init__.py
+++ b/sphinx/search/__init__.py
@@ -91,11 +91,8 @@ class SearchLanguage:
_word_re = re.compile(r'\w+')
def __init__(self, options: dict[str, str]) -> None:
- self.options = options
- self.init(options)
-
- def init(self, options: dict[str, str]) -> None:
"""Initialize the class with the options the user has given."""
+ self.options = options
def split(self, input: str) -> list[str]:
"""This method splits a sentence into words. Default splitter splits input
diff --git a/sphinx/search/da.py b/sphinx/search/da.py
index a56114bb6ba..b2cb66938fa 100644
--- a/sphinx/search/da.py
+++ b/sphinx/search/da.py
@@ -111,7 +111,8 @@ class SearchDanish(SearchLanguage):
js_stemmer_rawcode = 'danish-stemmer.js'
stopwords = danish_stopwords
- def init(self, options: dict[str, str]) -> None:
+ def __init__(self, options: dict[str, str]) -> None:
+ super().__init__(options)
self.stemmer = snowballstemmer.stemmer('danish')
def stem(self, word: str) -> str:
diff --git a/sphinx/search/de.py b/sphinx/search/de.py
index 37aa9ec8890..5ed8062d688 100644
--- a/sphinx/search/de.py
+++ b/sphinx/search/de.py
@@ -294,7 +294,8 @@ class SearchGerman(SearchLanguage):
js_stemmer_rawcode = 'german-stemmer.js'
stopwords = german_stopwords
- def init(self, options: dict[str, str]) -> None:
+ def __init__(self, options: dict[str, str]) -> None:
+ super().__init__(options)
self.stemmer = snowballstemmer.stemmer('german')
def stem(self, word: str) -> str:
diff --git a/sphinx/search/en.py b/sphinx/search/en.py
index 5173dc03fc0..51494a04a4d 100644
--- a/sphinx/search/en.py
+++ b/sphinx/search/en.py
@@ -211,7 +211,8 @@ class SearchEnglish(SearchLanguage):
js_stemmer_code = js_porter_stemmer
stopwords = english_stopwords
- def init(self, options: dict[str, str]) -> None:
+ def __init__(self, options: dict[str, str]) -> None:
+ super().__init__(options)
self.stemmer = snowballstemmer.stemmer('porter')
def stem(self, word: str) -> str:
diff --git a/sphinx/search/es.py b/sphinx/search/es.py
index 5739c88172a..f4079adfdfa 100644
--- a/sphinx/search/es.py
+++ b/sphinx/search/es.py
@@ -354,7 +354,8 @@ class SearchSpanish(SearchLanguage):
js_stemmer_rawcode = 'spanish-stemmer.js'
stopwords = spanish_stopwords
- def init(self, options: dict[str, str]) -> None:
+ def __init__(self, options: dict[str, str]) -> None:
+ super().__init__(options)
self.stemmer = snowballstemmer.stemmer('spanish')
def stem(self, word: str) -> str:
diff --git a/sphinx/search/fi.py b/sphinx/search/fi.py
index 24ef7502300..55a01586924 100644
--- a/sphinx/search/fi.py
+++ b/sphinx/search/fi.py
@@ -104,7 +104,8 @@ class SearchFinnish(SearchLanguage):
js_stemmer_rawcode = 'finnish-stemmer.js'
stopwords = finnish_stopwords
- def init(self, options: dict[str, str]) -> None:
+ def __init__(self, options: dict[str, str]) -> None:
+ super().__init__(options)
self.stemmer = snowballstemmer.stemmer('finnish')
def stem(self, word: str) -> str:
diff --git a/sphinx/search/fr.py b/sphinx/search/fr.py
index 7662737d6e3..d78745c7991 100644
--- a/sphinx/search/fr.py
+++ b/sphinx/search/fr.py
@@ -190,7 +190,8 @@ class SearchFrench(SearchLanguage):
js_stemmer_rawcode = 'french-stemmer.js'
stopwords = french_stopwords
- def init(self, options: dict[str, str]) -> None:
+ def __init__(self, options: dict[str, str]) -> None:
+ super().__init__(options)
self.stemmer = snowballstemmer.stemmer('french')
def stem(self, word: str) -> str:
diff --git a/sphinx/search/hu.py b/sphinx/search/hu.py
index 5c35b16fc65..7a6464c8e8b 100644
--- a/sphinx/search/hu.py
+++ b/sphinx/search/hu.py
@@ -217,7 +217,8 @@ class SearchHungarian(SearchLanguage):
js_stemmer_rawcode = 'hungarian-stemmer.js'
stopwords = hungarian_stopwords
- def init(self, options: dict[str, str]) -> None:
+ def __init__(self, options: dict[str, str]) -> None:
+ super().__init__(options)
self.stemmer = snowballstemmer.stemmer('hungarian')
def stem(self, word: str) -> str:
diff --git a/sphinx/search/it.py b/sphinx/search/it.py
index 60a5cf57720..1158e388ed6 100644
--- a/sphinx/search/it.py
+++ b/sphinx/search/it.py
@@ -307,7 +307,8 @@ class SearchItalian(SearchLanguage):
js_stemmer_rawcode = 'italian-stemmer.js'
stopwords = italian_stopwords
- def init(self, options: dict[str, str]) -> None:
+ def __init__(self, options: dict[str, str]) -> None:
+ super().__init__(options)
self.stemmer = snowballstemmer.stemmer('italian')
def stem(self, word: str) -> str:
diff --git a/sphinx/search/ja.py b/sphinx/search/ja.py
index f855fe4a67d..7045a314459 100644
--- a/sphinx/search/ja.py
+++ b/sphinx/search/ja.py
@@ -523,7 +523,8 @@ class SearchJapanese(SearchLanguage):
lang = 'ja'
language_name = 'Japanese'
- def init(self, options: dict[str, str]) -> None:
+ def __init__(self, options: dict[str, str]) -> None:
+ super().__init__(options)
dotted_path = options.get('type')
if dotted_path is None:
self.splitter = DefaultSplitter(options)
diff --git a/sphinx/search/nl.py b/sphinx/search/nl.py
index 2d2f2b8a8b6..d46b5ee3383 100644
--- a/sphinx/search/nl.py
+++ b/sphinx/search/nl.py
@@ -118,7 +118,8 @@ class SearchDutch(SearchLanguage):
js_stemmer_rawcode = 'dutch-stemmer.js'
stopwords = dutch_stopwords
- def init(self, options: dict[str, str]) -> None:
+ def __init__(self, options: dict[str, str]) -> None:
+ super().__init__(options)
self.stemmer = snowballstemmer.stemmer('dutch')
def stem(self, word: str) -> str:
diff --git a/sphinx/search/no.py b/sphinx/search/no.py
index dfc7786d46a..93118f83307 100644
--- a/sphinx/search/no.py
+++ b/sphinx/search/no.py
@@ -193,7 +193,8 @@ class SearchNorwegian(SearchLanguage):
js_stemmer_rawcode = 'norwegian-stemmer.js'
stopwords = norwegian_stopwords
- def init(self, options: dict[str, str]) -> None:
+ def __init__(self, options: dict[str, str]) -> None:
+ super().__init__(options)
self.stemmer = snowballstemmer.stemmer('norwegian')
def stem(self, word: str) -> str:
diff --git a/sphinx/search/pt.py b/sphinx/search/pt.py
index bf9b7a3a2f8..ff45b27bd95 100644
--- a/sphinx/search/pt.py
+++ b/sphinx/search/pt.py
@@ -252,7 +252,8 @@ class SearchPortuguese(SearchLanguage):
js_stemmer_rawcode = 'portuguese-stemmer.js'
stopwords = portuguese_stopwords
- def init(self, options: dict[str, str]) -> None:
+ def __init__(self, options: dict[str, str]) -> None:
+ super().__init__(options)
self.stemmer = snowballstemmer.stemmer('portuguese')
def stem(self, word: str) -> str:
diff --git a/sphinx/search/ro.py b/sphinx/search/ro.py
index 0c00486319a..e08ce5a09e3 100644
--- a/sphinx/search/ro.py
+++ b/sphinx/search/ro.py
@@ -13,7 +13,8 @@ class SearchRomanian(SearchLanguage):
js_stemmer_rawcode = 'romanian-stemmer.js'
stopwords: set[str] = set()
- def init(self, options: dict[str, str]) -> None:
+ def __init__(self, options: dict[str, str]) -> None:
+ super().__init__(options)
self.stemmer = snowballstemmer.stemmer('romanian')
def stem(self, word: str) -> str:
diff --git a/sphinx/search/ru.py b/sphinx/search/ru.py
index e93046cba94..bdeff001797 100644
--- a/sphinx/search/ru.py
+++ b/sphinx/search/ru.py
@@ -242,7 +242,8 @@ class SearchRussian(SearchLanguage):
js_stemmer_rawcode = 'russian-stemmer.js'
stopwords = russian_stopwords
- def init(self, options: dict[str, str]) -> None:
+ def __init__(self, options: dict[str, str]) -> None:
+ super().__init__(options)
self.stemmer = snowballstemmer.stemmer('russian')
def stem(self, word: str) -> str:
diff --git a/sphinx/search/sv.py b/sphinx/search/sv.py
index b4fa1bd06a2..5a796165805 100644
--- a/sphinx/search/sv.py
+++ b/sphinx/search/sv.py
@@ -131,7 +131,8 @@ class SearchSwedish(SearchLanguage):
js_stemmer_rawcode = 'swedish-stemmer.js'
stopwords = swedish_stopwords
- def init(self, options: dict[str, str]) -> None:
+ def __init__(self, options: dict[str, str]) -> None:
+ super().__init__(options)
self.stemmer = snowballstemmer.stemmer('swedish')
def stem(self, word: str) -> str:
diff --git a/sphinx/search/tr.py b/sphinx/search/tr.py
index b999e1d96d8..82080bf5c61 100644
--- a/sphinx/search/tr.py
+++ b/sphinx/search/tr.py
@@ -13,7 +13,8 @@ class SearchTurkish(SearchLanguage):
js_stemmer_rawcode = 'turkish-stemmer.js'
stopwords: set[str] = set()
- def init(self, options: dict[str, str]) -> None:
+ def __init__(self, options: dict[str, str]) -> None:
+ super().__init__(options)
self.stemmer = snowballstemmer.stemmer('turkish')
def stem(self, word: str) -> str:
diff --git a/sphinx/search/zh.py b/sphinx/search/zh.py
index 0f7e4dfd5f9..c063631f865 100644
--- a/sphinx/search/zh.py
+++ b/sphinx/search/zh.py
@@ -243,8 +243,6 @@ class SearchChinese(SearchLanguage):
def __init__(self, options: dict[str, str]) -> None:
super().__init__(options)
self.latin_terms: set[str] = set()
-
- def init(self, options: dict[str, str]) -> None:
dict_path = options.get('dict', JIEBA_DEFAULT_DICT)
if dict_path and Path(dict_path).is_file():
jieba_load_userdict(str(dict_path))
From 9ab73b2494a41e008448df267d9c66b280678fba Mon Sep 17 00:00:00 2001
From: Adam Turner <9087854+AA-Turner@users.noreply.github.com>
Date: Sun, 18 May 2025 04:18:37 +0100
Subject: [PATCH 069/466] Update URLs for the Snowball project (#13571)
---
sphinx/search/__init__.py | 2 +-
sphinx/search/da.py | 2 +-
sphinx/search/de.py | 2 +-
sphinx/search/es.py | 2 +-
sphinx/search/fi.py | 2 +-
sphinx/search/fr.py | 2 +-
sphinx/search/hu.py | 2 +-
sphinx/search/it.py | 2 +-
sphinx/search/nl.py | 2 +-
sphinx/search/no.py | 2 +-
sphinx/search/pt.py | 2 +-
sphinx/search/ru.py | 2 +-
sphinx/search/sv.py | 2 +-
13 files changed, 13 insertions(+), 13 deletions(-)
diff --git a/sphinx/search/__init__.py b/sphinx/search/__init__.py
index 5563efceb4a..66875adf5ec 100644
--- a/sphinx/search/__init__.py
+++ b/sphinx/search/__init__.py
@@ -130,7 +130,7 @@ def word_filter(self, word: str) -> bool:
def parse_stop_word(source: str) -> set[str]:
"""Parse snowball style word list like this:
- * https://snowball.tartarus.org/algorithms/finnish/stop.txt
+ * https://snowballstem.org/algorithms/finnish/stop.txt
"""
result: set[str] = set()
for line in source.splitlines():
diff --git a/sphinx/search/da.py b/sphinx/search/da.py
index b2cb66938fa..8be1c6a215e 100644
--- a/sphinx/search/da.py
+++ b/sphinx/search/da.py
@@ -7,7 +7,7 @@
from sphinx.search import SearchLanguage, parse_stop_word
danish_stopwords = parse_stop_word("""
-| source: https://snowball.tartarus.org/algorithms/danish/stop.txt
+| source: https://snowballstem.org/algorithms/danish/stop.txt
og | and
i | in
jeg | I
diff --git a/sphinx/search/de.py b/sphinx/search/de.py
index 5ed8062d688..ac5ac7ee131 100644
--- a/sphinx/search/de.py
+++ b/sphinx/search/de.py
@@ -7,7 +7,7 @@
from sphinx.search import SearchLanguage, parse_stop_word
german_stopwords = parse_stop_word("""
-|source: https://snowball.tartarus.org/algorithms/german/stop.txt
+|source: https://snowballstem.org/algorithms/german/stop.txt
aber | but
alle | all
diff --git a/sphinx/search/es.py b/sphinx/search/es.py
index f4079adfdfa..3cc41f600ac 100644
--- a/sphinx/search/es.py
+++ b/sphinx/search/es.py
@@ -7,7 +7,7 @@
from sphinx.search import SearchLanguage, parse_stop_word
spanish_stopwords = parse_stop_word("""
-|source: https://snowball.tartarus.org/algorithms/spanish/stop.txt
+|source: https://snowballstem.org/algorithms/spanish/stop.txt
de | from, of
la | the, her
que | who, that
diff --git a/sphinx/search/fi.py b/sphinx/search/fi.py
index 55a01586924..c8b048d4fc9 100644
--- a/sphinx/search/fi.py
+++ b/sphinx/search/fi.py
@@ -7,7 +7,7 @@
from sphinx.search import SearchLanguage, parse_stop_word
finnish_stopwords = parse_stop_word("""
-| source: https://snowball.tartarus.org/algorithms/finnish/stop.txt
+| source: https://snowballstem.org/algorithms/finnish/stop.txt
| forms of BE
olla
diff --git a/sphinx/search/fr.py b/sphinx/search/fr.py
index d78745c7991..bbdc56032ff 100644
--- a/sphinx/search/fr.py
+++ b/sphinx/search/fr.py
@@ -7,7 +7,7 @@
from sphinx.search import SearchLanguage, parse_stop_word
french_stopwords = parse_stop_word("""
-| source: https://snowball.tartarus.org/algorithms/french/stop.txt
+| source: https://snowballstem.org/algorithms/french/stop.txt
au | a + le
aux | a + les
avec | with
diff --git a/sphinx/search/hu.py b/sphinx/search/hu.py
index 7a6464c8e8b..4e30ca407ee 100644
--- a/sphinx/search/hu.py
+++ b/sphinx/search/hu.py
@@ -7,7 +7,7 @@
from sphinx.search import SearchLanguage, parse_stop_word
hungarian_stopwords = parse_stop_word("""
-| source: https://snowball.tartarus.org/algorithms/hungarian/stop.txt
+| source: https://snowballstem.org/algorithms/hungarian/stop.txt
| prepared by Anna Tordai
a
ahogy
diff --git a/sphinx/search/it.py b/sphinx/search/it.py
index 1158e388ed6..b42e9699b33 100644
--- a/sphinx/search/it.py
+++ b/sphinx/search/it.py
@@ -7,7 +7,7 @@
from sphinx.search import SearchLanguage, parse_stop_word
italian_stopwords = parse_stop_word("""
-| source: https://snowball.tartarus.org/algorithms/italian/stop.txt
+| source: https://snowballstem.org/algorithms/italian/stop.txt
ad | a (to) before vowel
al | a + il
allo | a + lo
diff --git a/sphinx/search/nl.py b/sphinx/search/nl.py
index d46b5ee3383..39c14c76664 100644
--- a/sphinx/search/nl.py
+++ b/sphinx/search/nl.py
@@ -7,7 +7,7 @@
from sphinx.search import SearchLanguage, parse_stop_word
dutch_stopwords = parse_stop_word("""
-| source: https://snowball.tartarus.org/algorithms/dutch/stop.txt
+| source: https://snowballstem.org/algorithms/dutch/stop.txt
de | the
en | and
van | of, from
diff --git a/sphinx/search/no.py b/sphinx/search/no.py
index 93118f83307..7a21e6728cb 100644
--- a/sphinx/search/no.py
+++ b/sphinx/search/no.py
@@ -7,7 +7,7 @@
from sphinx.search import SearchLanguage, parse_stop_word
norwegian_stopwords = parse_stop_word("""
-| source: https://snowball.tartarus.org/algorithms/norwegian/stop.txt
+| source: https://snowballstem.org/algorithms/norwegian/stop.txt
og | and
i | in
jeg | I
diff --git a/sphinx/search/pt.py b/sphinx/search/pt.py
index ff45b27bd95..82f1858f0de 100644
--- a/sphinx/search/pt.py
+++ b/sphinx/search/pt.py
@@ -7,7 +7,7 @@
from sphinx.search import SearchLanguage, parse_stop_word
portuguese_stopwords = parse_stop_word("""
-| source: https://snowball.tartarus.org/algorithms/portuguese/stop.txt
+| source: https://snowballstem.org/algorithms/portuguese/stop.txt
de | of, from
a | the; to, at; her
o | the; him
diff --git a/sphinx/search/ru.py b/sphinx/search/ru.py
index bdeff001797..aeab09fa624 100644
--- a/sphinx/search/ru.py
+++ b/sphinx/search/ru.py
@@ -7,7 +7,7 @@
from sphinx.search import SearchLanguage, parse_stop_word
russian_stopwords = parse_stop_word("""
-| source: https://snowball.tartarus.org/algorithms/russian/stop.txt
+| source: https://snowballstem.org/algorithms/russian/stop.txt
и | and
в | in/into
во | alternative form
diff --git a/sphinx/search/sv.py b/sphinx/search/sv.py
index 5a796165805..9a8232ef2bf 100644
--- a/sphinx/search/sv.py
+++ b/sphinx/search/sv.py
@@ -7,7 +7,7 @@
from sphinx.search import SearchLanguage, parse_stop_word
swedish_stopwords = parse_stop_word("""
-| source: https://snowball.tartarus.org/algorithms/swedish/stop.txt
+| source: https://snowballstem.org/algorithms/swedish/stop.txt
och | and
det | it, this/that
att | to (with infinitive)
From c30effe714ac79f1556b41fea4a1be80269a3141 Mon Sep 17 00:00:00 2001
From: Adam Turner <9087854+AA-Turner@users.noreply.github.com>
Date: Sun, 18 May 2025 04:42:23 +0100
Subject: [PATCH 070/466] Pre-parse stopword lists for HTML search (#13572)
---
.gitattributes | 1 +
sphinx/search/__init__.py | 10 +-
sphinx/search/_stopwords/__init__.py | 0
sphinx/search/_stopwords/da.py | 98 ++++++++
sphinx/search/_stopwords/da.txt | 95 ++++++++
sphinx/search/_stopwords/de.py | 235 ++++++++++++++++++
sphinx/search/_stopwords/de.txt | 278 +++++++++++++++++++++
sphinx/search/_stopwords/en.py | 37 +++
sphinx/search/_stopwords/es.py | 312 ++++++++++++++++++++++++
sphinx/search/_stopwords/es.txt | 338 ++++++++++++++++++++++++++
sphinx/search/_stopwords/fi.py | 233 ++++++++++++++++++
sphinx/search/_stopwords/fi.txt | 88 +++++++
sphinx/search/_stopwords/fr.py | 168 +++++++++++++
sphinx/search/_stopwords/fr.txt | 174 ++++++++++++++
sphinx/search/_stopwords/hu.py | 202 ++++++++++++++++
sphinx/search/_stopwords/hu.txt | 201 ++++++++++++++++
sphinx/search/_stopwords/it.py | 282 ++++++++++++++++++++++
sphinx/search/_stopwords/it.txt | 291 ++++++++++++++++++++++
sphinx/search/_stopwords/nl.py | 105 ++++++++
sphinx/search/_stopwords/nl.txt | 102 ++++++++
sphinx/search/_stopwords/no.py | 176 ++++++++++++++
sphinx/search/_stopwords/no.txt | 177 ++++++++++++++
sphinx/search/_stopwords/pt.py | 207 ++++++++++++++++
sphinx/search/_stopwords/pt.txt | 236 ++++++++++++++++++
sphinx/search/_stopwords/ru.py | 163 +++++++++++++
sphinx/search/_stopwords/ru.txt | 226 +++++++++++++++++
sphinx/search/_stopwords/sv.py | 118 +++++++++
sphinx/search/_stopwords/sv.txt | 115 +++++++++
sphinx/search/da.py | 103 +-------
sphinx/search/de.py | 286 +---------------------
sphinx/search/en.py | 15 +-
sphinx/search/es.py | 346 +--------------------------
sphinx/search/fi.py | 96 +-------
sphinx/search/fr.py | 182 +-------------
sphinx/search/hu.py | 209 +---------------
sphinx/search/it.py | 299 +----------------------
sphinx/search/nl.py | 110 +--------
sphinx/search/no.py | 185 +-------------
sphinx/search/pt.py | 244 +------------------
sphinx/search/ro.py | 2 +-
sphinx/search/ru.py | 234 +-----------------
sphinx/search/sv.py | 123 +---------
sphinx/search/tr.py | 2 +-
sphinx/search/zh.py | 15 +-
44 files changed, 4706 insertions(+), 2413 deletions(-)
create mode 100644 sphinx/search/_stopwords/__init__.py
create mode 100644 sphinx/search/_stopwords/da.py
create mode 100644 sphinx/search/_stopwords/da.txt
create mode 100644 sphinx/search/_stopwords/de.py
create mode 100644 sphinx/search/_stopwords/de.txt
create mode 100644 sphinx/search/_stopwords/en.py
create mode 100644 sphinx/search/_stopwords/es.py
create mode 100644 sphinx/search/_stopwords/es.txt
create mode 100644 sphinx/search/_stopwords/fi.py
create mode 100644 sphinx/search/_stopwords/fi.txt
create mode 100644 sphinx/search/_stopwords/fr.py
create mode 100644 sphinx/search/_stopwords/fr.txt
create mode 100644 sphinx/search/_stopwords/hu.py
create mode 100644 sphinx/search/_stopwords/hu.txt
create mode 100644 sphinx/search/_stopwords/it.py
create mode 100644 sphinx/search/_stopwords/it.txt
create mode 100644 sphinx/search/_stopwords/nl.py
create mode 100644 sphinx/search/_stopwords/nl.txt
create mode 100644 sphinx/search/_stopwords/no.py
create mode 100644 sphinx/search/_stopwords/no.txt
create mode 100644 sphinx/search/_stopwords/pt.py
create mode 100644 sphinx/search/_stopwords/pt.txt
create mode 100644 sphinx/search/_stopwords/ru.py
create mode 100644 sphinx/search/_stopwords/ru.txt
create mode 100644 sphinx/search/_stopwords/sv.py
create mode 100644 sphinx/search/_stopwords/sv.txt
diff --git a/.gitattributes b/.gitattributes
index d0f6ad06464..c10128857f4 100644
--- a/.gitattributes
+++ b/.gitattributes
@@ -62,4 +62,5 @@ tests/roots/test-pycode/cp_1251_coded.py working-tree-encoding=windows-1251
tests/js/fixtures/**/*.js generated
sphinx/search/minified-js/*.js generated
+sphinx/search/_stopwords/ generated
sphinx/themes/bizstyle/static/css3-mediaqueries.js generated
diff --git a/sphinx/search/__init__.py b/sphinx/search/__init__.py
index 66875adf5ec..1cb05bea0e2 100644
--- a/sphinx/search/__init__.py
+++ b/sphinx/search/__init__.py
@@ -20,7 +20,7 @@
from sphinx.util.index_entries import split_index_msg
if TYPE_CHECKING:
- from collections.abc import Callable, Iterable
+ from collections.abc import Callable, Iterable, Set
from typing import Any, Protocol, TypeVar
from docutils.nodes import Node
@@ -74,7 +74,7 @@ class SearchLanguage:
lang: str = ''
language_name: str = ''
- stopwords: set[str] = set()
+ stopwords: Set[str] = frozenset()
js_splitter_code: str = ''
js_stemmer_rawcode: str = ''
js_stemmer_code = """
@@ -128,9 +128,11 @@ def word_filter(self, word: str) -> bool:
def parse_stop_word(source: str) -> set[str]:
- """Parse snowball style word list like this:
+ """Collect the stopwords from a snowball style word list:
- * https://snowballstem.org/algorithms/finnish/stop.txt
+ .. code:: text
+
+ list of space separated stop words | optional comment
"""
result: set[str] = set()
for line in source.splitlines():
diff --git a/sphinx/search/_stopwords/__init__.py b/sphinx/search/_stopwords/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/sphinx/search/_stopwords/da.py b/sphinx/search/_stopwords/da.py
new file mode 100644
index 00000000000..c31a51c6df2
--- /dev/null
+++ b/sphinx/search/_stopwords/da.py
@@ -0,0 +1,98 @@
+from __future__ import annotations
+
+DANISH_STOPWORDS = frozenset({
+ 'ad',
+ 'af',
+ 'alle',
+ 'alt',
+ 'anden',
+ 'at',
+ 'blev',
+ 'blive',
+ 'bliver',
+ 'da',
+ 'de',
+ 'dem',
+ 'den',
+ 'denne',
+ 'der',
+ 'deres',
+ 'det',
+ 'dette',
+ 'dig',
+ 'din',
+ 'disse',
+ 'dog',
+ 'du',
+ 'efter',
+ 'eller',
+ 'en',
+ 'end',
+ 'er',
+ 'et',
+ 'for',
+ 'fra',
+ 'ham',
+ 'han',
+ 'hans',
+ 'har',
+ 'havde',
+ 'have',
+ 'hende',
+ 'hendes',
+ 'her',
+ 'hos',
+ 'hun',
+ 'hvad',
+ 'hvis',
+ 'hvor',
+ 'i',
+ 'ikke',
+ 'ind',
+ 'jeg',
+ 'jer',
+ 'jo',
+ 'kunne',
+ 'man',
+ 'mange',
+ 'med',
+ 'meget',
+ 'men',
+ 'mig',
+ 'min',
+ 'mine',
+ 'mit',
+ 'mod',
+ 'ned',
+ 'noget',
+ 'nogle',
+ 'nu',
+ 'når',
+ 'og',
+ 'også',
+ 'om',
+ 'op',
+ 'os',
+ 'over',
+ 'på',
+ 'selv',
+ 'sig',
+ 'sin',
+ 'sine',
+ 'sit',
+ 'skal',
+ 'skulle',
+ 'som',
+ 'sådan',
+ 'thi',
+ 'til',
+ 'ud',
+ 'under',
+ 'var',
+ 'vi',
+ 'vil',
+ 'ville',
+ 'vor',
+ 'være',
+ 'været',
+})
diff --git a/sphinx/search/_stopwords/da.txt b/sphinx/search/_stopwords/da.txt
new file mode 100644
index 00000000000..6f2bd01afc2
--- /dev/null
+++ b/sphinx/search/_stopwords/da.txt
@@ -0,0 +1,95 @@
+| source: https://snowballstem.org/algorithms/danish/stop.txt
+og | and
+i | in
+jeg | I
+det | that (dem. pronoun)/it (pers. pronoun)
+at | that (in front of a sentence)/to (with infinitive)
+en | a/an
+den | it (pers. pronoun)/that (dem. pronoun)
+til | to/at/for/until/against/by/of/into, more
+er | present tense of "to be"
+som | who, as
+på | on/upon/in/on/at/to/after/of/with/for, on
+de | they
+med | with/by/in, along
+han | he
+af | of/by/from/off/for/in/with/on, off
+for | at/for/to/from/by/of/ago, in front/before, because
+ikke | not
+der | who/which, there/those
+var | past tense of "to be"
+mig | me/myself
+sig | oneself/himself/herself/itself/themselves
+men | but
+et | a/an/one, one (number), someone/somebody/one
+har | present tense of "to have"
+om | round/about/for/in/a, about/around/down, if
+vi | we
+min | my
+havde | past tense of "to have"
+ham | him
+hun | she
+nu | now
+over | over/above/across/by/beyond/past/on/about, over/past
+da | then, when/as/since
+fra | from/off/since, off, since
+du | you
+ud | out
+sin | his/her/its/one's
+dem | them
+os | us/ourselves
+op | up
+man | you/one
+hans | his
+hvor | where
+eller | or
+hvad | what
+skal | must/shall etc.
+selv | myself/yourself/herself/ourselves etc., even
+her | here
+alle | all/everyone/everybody etc.
+vil | will (verb)
+blev | past tense of "to stay/to remain/to get/to become"
+kunne | could
+ind | in
+når | when
+være | present tense of "to be"
+dog | however/yet/after all
+noget | something
+ville | would
+jo | you know/you see (adv), yes
+deres | their/theirs
+efter | after/behind/according to/for/by/from, later/afterwards
+ned | down
+skulle | should
+denne | this
+end | than
+dette | this
+mit | my/mine
+også | also
+under | under/beneath/below/during, below/underneath
+have | have
+dig | you
+anden | other
+hende | her
+mine | my
+alt | everything
+meget | much/very, plenty of
+sit | his, her, its, one's
+sine | his, her, its, one's
+vor | our
+mod | against
+disse | these
+hvis | if
+din | your/yours
+nogle | some
+hos | by/at
+blive | be/become
+mange | many
+ad | by/through
+bliver | present tense of "to be/to become"
+hendes | her/hers
+været | be
+thi | for (conj)
+jer | you
+sådan | such, like this/like that
diff --git a/sphinx/search/_stopwords/de.py b/sphinx/search/_stopwords/de.py
new file mode 100644
index 00000000000..26ee3322ff3
--- /dev/null
+++ b/sphinx/search/_stopwords/de.py
@@ -0,0 +1,235 @@
+from __future__ import annotations
+
+GERMAN_STOPWORDS = frozenset({
+ 'aber',
+ 'alle',
+ 'allem',
+ 'allen',
+ 'aller',
+ 'alles',
+ 'als',
+ 'also',
+ 'am',
+ 'an',
+ 'ander',
+ 'andere',
+ 'anderem',
+ 'anderen',
+ 'anderer',
+ 'anderes',
+ 'anderm',
+ 'andern',
+ 'anderr',
+ 'anders',
+ 'auch',
+ 'auf',
+ 'aus',
+ 'bei',
+ 'bin',
+ 'bis',
+ 'bist',
+ 'da',
+ 'damit',
+ 'dann',
+ 'das',
+ 'daß',
+ 'dasselbe',
+ 'dazu',
+ 'dein',
+ 'deine',
+ 'deinem',
+ 'deinen',
+ 'deiner',
+ 'deines',
+ 'dem',
+ 'demselben',
+ 'den',
+ 'denn',
+ 'denselben',
+ 'der',
+ 'derer',
+ 'derselbe',
+ 'derselben',
+ 'des',
+ 'desselben',
+ 'dessen',
+ 'dich',
+ 'die',
+ 'dies',
+ 'diese',
+ 'dieselbe',
+ 'dieselben',
+ 'diesem',
+ 'diesen',
+ 'dieser',
+ 'dieses',
+ 'dir',
+ 'doch',
+ 'dort',
+ 'du',
+ 'durch',
+ 'ein',
+ 'eine',
+ 'einem',
+ 'einen',
+ 'einer',
+ 'eines',
+ 'einig',
+ 'einige',
+ 'einigem',
+ 'einigen',
+ 'einiger',
+ 'einiges',
+ 'einmal',
+ 'er',
+ 'es',
+ 'etwas',
+ 'euch',
+ 'euer',
+ 'eure',
+ 'eurem',
+ 'euren',
+ 'eurer',
+ 'eures',
+ 'für',
+ 'gegen',
+ 'gewesen',
+ 'hab',
+ 'habe',
+ 'haben',
+ 'hat',
+ 'hatte',
+ 'hatten',
+ 'hier',
+ 'hin',
+ 'hinter',
+ 'ich',
+ 'ihm',
+ 'ihn',
+ 'ihnen',
+ 'ihr',
+ 'ihre',
+ 'ihrem',
+ 'ihren',
+ 'ihrer',
+ 'ihres',
+ 'im',
+ 'in',
+ 'indem',
+ 'ins',
+ 'ist',
+ 'jede',
+ 'jedem',
+ 'jeden',
+ 'jeder',
+ 'jedes',
+ 'jene',
+ 'jenem',
+ 'jenen',
+ 'jener',
+ 'jenes',
+ 'jetzt',
+ 'kann',
+ 'kein',
+ 'keine',
+ 'keinem',
+ 'keinen',
+ 'keiner',
+ 'keines',
+ 'können',
+ 'könnte',
+ 'machen',
+ 'man',
+ 'manche',
+ 'manchem',
+ 'manchen',
+ 'mancher',
+ 'manches',
+ 'mein',
+ 'meine',
+ 'meinem',
+ 'meinen',
+ 'meiner',
+ 'meines',
+ 'mich',
+ 'mir',
+ 'mit',
+ 'muss',
+ 'musste',
+ 'nach',
+ 'nicht',
+ 'nichts',
+ 'noch',
+ 'nun',
+ 'nur',
+ 'ob',
+ 'oder',
+ 'ohne',
+ 'sehr',
+ 'sein',
+ 'seine',
+ 'seinem',
+ 'seinen',
+ 'seiner',
+ 'seines',
+ 'selbst',
+ 'sich',
+ 'sie',
+ 'sind',
+ 'so',
+ 'solche',
+ 'solchem',
+ 'solchen',
+ 'solcher',
+ 'solches',
+ 'soll',
+ 'sollte',
+ 'sondern',
+ 'sonst',
+ 'um',
+ 'und',
+ 'uns',
+ 'unse',
+ 'unsem',
+ 'unsen',
+ 'unser',
+ 'unses',
+ 'unter',
+ 'viel',
+ 'vom',
+ 'von',
+ 'vor',
+ 'war',
+ 'waren',
+ 'warst',
+ 'was',
+ 'weg',
+ 'weil',
+ 'weiter',
+ 'welche',
+ 'welchem',
+ 'welchen',
+ 'welcher',
+ 'welches',
+ 'wenn',
+ 'werde',
+ 'werden',
+ 'wie',
+ 'wieder',
+ 'will',
+ 'wir',
+ 'wird',
+ 'wirst',
+ 'wo',
+ 'wollen',
+ 'wollte',
+ 'während',
+ 'würde',
+ 'würden',
+ 'zu',
+ 'zum',
+ 'zur',
+ 'zwar',
+ 'zwischen',
+ 'über',
+})
diff --git a/sphinx/search/_stopwords/de.txt b/sphinx/search/_stopwords/de.txt
new file mode 100644
index 00000000000..94c4777bd05
--- /dev/null
+++ b/sphinx/search/_stopwords/de.txt
@@ -0,0 +1,278 @@
+|source: https://snowballstem.org/algorithms/german/stop.txt
+aber | but
+
+alle | all
+allem
+allen
+aller
+alles
+
+als | than, as
+also | so
+am | an + dem
+an | at
+
+ander | other
+andere
+anderem
+anderen
+anderer
+anderes
+anderm
+andern
+anderr
+anders
+
+auch | also
+auf | on
+aus | out of
+bei | by
+bin | am
+bis | until
+bist | art
+da | there
+damit | with it
+dann | then
+
+der | the
+den
+des
+dem
+die
+das
+
+daß | that
+
+derselbe | the same
+derselben
+denselben
+desselben
+demselben
+dieselbe
+dieselben
+dasselbe
+
+dazu | to that
+
+dein | thy
+deine
+deinem
+deinen
+deiner
+deines
+
+denn | because
+
+derer | of those
+dessen | of him
+
+dich | thee
+dir | to thee
+du | thou
+
+dies | this
+diese
+diesem
+diesen
+dieser
+dieses
+
+
+doch | (several meanings)
+dort | (over) there
+
+
+durch | through
+
+ein | a
+eine
+einem
+einen
+einer
+eines
+
+einig | some
+einige
+einigem
+einigen
+einiger
+einiges
+
+einmal | once
+
+er | he
+ihn | him
+ihm | to him
+
+es | it
+etwas | something
+
+euer | your
+eure
+eurem
+euren
+eurer
+eures
+
+für | for
+gegen | towards
+gewesen | p.p. of sein
+hab | have
+habe | have
+haben | have
+hat | has
+hatte | had
+hatten | had
+hier | here
+hin | there
+hinter | behind
+
+ich | I
+mich | me
+mir | to me
+
+
+ihr | you, to her
+ihre
+ihrem
+ihren
+ihrer
+ihres
+euch | to you
+
+im | in + dem
+in | in
+indem | while
+ins | in + das
+ist | is
+
+jede | each, every
+jedem
+jeden
+jeder
+jedes
+
+jene | that
+jenem
+jenen
+jener
+jenes
+
+jetzt | now
+kann | can
+
+kein | no
+keine
+keinem
+keinen
+keiner
+keines
+
+können | can
+könnte | could
+machen | do
+man | one
+
+manche | some, many a
+manchem
+manchen
+mancher
+manches
+
+mein | my
+meine
+meinem
+meinen
+meiner
+meines
+
+mit | with
+muss | must
+musste | had to
+nach | to(wards)
+nicht | not
+nichts | nothing
+noch | still, yet
+nun | now
+nur | only
+ob | whether
+oder | or
+ohne | without
+sehr | very
+
+sein | his
+seine
+seinem
+seinen
+seiner
+seines
+
+selbst | self
+sich | herself
+
+sie | they, she
+ihnen | to them
+
+sind | are
+so | so
+
+solche | such
+solchem
+solchen
+solcher
+solches
+
+soll | shall
+sollte | should
+sondern | but
+sonst | else
+über | over
+um | about, around
+und | and
+
+uns | us
+unse
+unsem
+unsen
+unser
+unses
+
+unter | under
+viel | much
+vom | von + dem
+von | from
+vor | before
+während | while
+war | was
+waren | were
+warst | wast
+was | what
+weg | away, off
+weil | because
+weiter | further
+
+welche | which
+welchem
+welchen
+welcher
+welches
+
+wenn | when
+werde | will
+werden | will
+wie | how
+wieder | again
+will | want
+wir | we
+wird | will
+wirst | willst
+wo | where
+wollen | want
+wollte | wanted
+würde | would
+würden | would
+zu | to
+zum | zu + dem
+zur | zu + der
+zwar | indeed
+zwischen | between
diff --git a/sphinx/search/_stopwords/en.py b/sphinx/search/_stopwords/en.py
new file mode 100644
index 00000000000..01bac4cf14e
--- /dev/null
+++ b/sphinx/search/_stopwords/en.py
@@ -0,0 +1,37 @@
+from __future__ import annotations
+
+ENGLISH_STOPWORDS = frozenset({
+ 'a',
+ 'and',
+ 'are',
+ 'as',
+ 'at',
+ 'be',
+ 'but',
+ 'by',
+ 'for',
+ 'if',
+ 'in',
+ 'into',
+ 'is',
+ 'it',
+ 'near',
+ 'no',
+ 'not',
+ 'of',
+ 'on',
+ 'or',
+ 'such',
+ 'that',
+ 'the',
+ 'their',
+ 'then',
+ 'there',
+ 'these',
+ 'they',
+ 'this',
+ 'to',
+ 'was',
+ 'will',
+ 'with',
+})
diff --git a/sphinx/search/_stopwords/es.py b/sphinx/search/_stopwords/es.py
new file mode 100644
index 00000000000..d70b317d032
--- /dev/null
+++ b/sphinx/search/_stopwords/es.py
@@ -0,0 +1,312 @@
+from __future__ import annotations
+
+SPANISH_STOPWORDS = frozenset({
+ 'a',
+ 'al',
+ 'algo',
+ 'algunas',
+ 'algunos',
+ 'ante',
+ 'antes',
+ 'como',
+ 'con',
+ 'contra',
+ 'cual',
+ 'cuando',
+ 'de',
+ 'del',
+ 'desde',
+ 'donde',
+ 'durante',
+ 'e',
+ 'el',
+ 'ella',
+ 'ellas',
+ 'ellos',
+ 'en',
+ 'entre',
+ 'era',
+ 'erais',
+ 'eran',
+ 'eras',
+ 'eres',
+ 'es',
+ 'esa',
+ 'esas',
+ 'ese',
+ 'eso',
+ 'esos',
+ 'esta',
+ 'estaba',
+ 'estabais',
+ 'estaban',
+ 'estabas',
+ 'estad',
+ 'estada',
+ 'estadas',
+ 'estado',
+ 'estados',
+ 'estamos',
+ 'estando',
+ 'estar',
+ 'estaremos',
+ 'estará',
+ 'estarán',
+ 'estarás',
+ 'estaré',
+ 'estaréis',
+ 'estaría',
+ 'estaríais',
+ 'estaríamos',
+ 'estarían',
+ 'estarías',
+ 'estas',
+ 'este',
+ 'estemos',
+ 'esto',
+ 'estos',
+ 'estoy',
+ 'estuve',
+ 'estuviera',
+ 'estuvierais',
+ 'estuvieran',
+ 'estuvieras',
+ 'estuvieron',
+ 'estuviese',
+ 'estuvieseis',
+ 'estuviesen',
+ 'estuvieses',
+ 'estuvimos',
+ 'estuviste',
+ 'estuvisteis',
+ 'estuviéramos',
+ 'estuviésemos',
+ 'estuvo',
+ 'está',
+ 'estábamos',
+ 'estáis',
+ 'están',
+ 'estás',
+ 'esté',
+ 'estéis',
+ 'estén',
+ 'estés',
+ 'fue',
+ 'fuera',
+ 'fuerais',
+ 'fueran',
+ 'fueras',
+ 'fueron',
+ 'fuese',
+ 'fueseis',
+ 'fuesen',
+ 'fueses',
+ 'fui',
+ 'fuimos',
+ 'fuiste',
+ 'fuisteis',
+ 'fuéramos',
+ 'fuésemos',
+ 'ha',
+ 'habida',
+ 'habidas',
+ 'habido',
+ 'habidos',
+ 'habiendo',
+ 'habremos',
+ 'habrá',
+ 'habrán',
+ 'habrás',
+ 'habré',
+ 'habréis',
+ 'habría',
+ 'habríais',
+ 'habríamos',
+ 'habrían',
+ 'habrías',
+ 'habéis',
+ 'había',
+ 'habíais',
+ 'habíamos',
+ 'habían',
+ 'habías',
+ 'han',
+ 'has',
+ 'hasta',
+ 'hay',
+ 'haya',
+ 'hayamos',
+ 'hayan',
+ 'hayas',
+ 'hayáis',
+ 'he',
+ 'hemos',
+ 'hube',
+ 'hubiera',
+ 'hubierais',
+ 'hubieran',
+ 'hubieras',
+ 'hubieron',
+ 'hubiese',
+ 'hubieseis',
+ 'hubiesen',
+ 'hubieses',
+ 'hubimos',
+ 'hubiste',
+ 'hubisteis',
+ 'hubiéramos',
+ 'hubiésemos',
+ 'hubo',
+ 'la',
+ 'las',
+ 'le',
+ 'les',
+ 'lo',
+ 'los',
+ 'me',
+ 'mi',
+ 'mis',
+ 'mucho',
+ 'muchos',
+ 'muy',
+ 'más',
+ 'mí',
+ 'mía',
+ 'mías',
+ 'mío',
+ 'míos',
+ 'nada',
+ 'ni',
+ 'no',
+ 'nos',
+ 'nosotras',
+ 'nosotros',
+ 'nuestra',
+ 'nuestras',
+ 'nuestro',
+ 'nuestros',
+ 'o',
+ 'os',
+ 'otra',
+ 'otras',
+ 'otro',
+ 'otros',
+ 'para',
+ 'pero',
+ 'poco',
+ 'por',
+ 'porque',
+ 'que',
+ 'quien',
+ 'quienes',
+ 'qué',
+ 'se',
+ 'sea',
+ 'seamos',
+ 'sean',
+ 'seas',
+ 'seremos',
+ 'será',
+ 'serán',
+ 'serás',
+ 'seré',
+ 'seréis',
+ 'sería',
+ 'seríais',
+ 'seríamos',
+ 'serían',
+ 'serías',
+ 'seáis',
+ 'sido',
+ 'siendo',
+ 'sin',
+ 'sobre',
+ 'sois',
+ 'somos',
+ 'son',
+ 'soy',
+ 'su',
+ 'sus',
+ 'suya',
+ 'suyas',
+ 'suyo',
+ 'suyos',
+ 'sí',
+ 'también',
+ 'tanto',
+ 'te',
+ 'tendremos',
+ 'tendrá',
+ 'tendrán',
+ 'tendrás',
+ 'tendré',
+ 'tendréis',
+ 'tendría',
+ 'tendríais',
+ 'tendríamos',
+ 'tendrían',
+ 'tendrías',
+ 'tened',
+ 'tenemos',
+ 'tenga',
+ 'tengamos',
+ 'tengan',
+ 'tengas',
+ 'tengo',
+ 'tengáis',
+ 'tenida',
+ 'tenidas',
+ 'tenido',
+ 'tenidos',
+ 'teniendo',
+ 'tenéis',
+ 'tenía',
+ 'teníais',
+ 'teníamos',
+ 'tenían',
+ 'tenías',
+ 'ti',
+ 'tiene',
+ 'tienen',
+ 'tienes',
+ 'todo',
+ 'todos',
+ 'tu',
+ 'tus',
+ 'tuve',
+ 'tuviera',
+ 'tuvierais',
+ 'tuvieran',
+ 'tuvieras',
+ 'tuvieron',
+ 'tuviese',
+ 'tuvieseis',
+ 'tuviesen',
+ 'tuvieses',
+ 'tuvimos',
+ 'tuviste',
+ 'tuvisteis',
+ 'tuviéramos',
+ 'tuviésemos',
+ 'tuvo',
+ 'tuya',
+ 'tuyas',
+ 'tuyo',
+ 'tuyos',
+ 'tú',
+ 'un',
+ 'una',
+ 'uno',
+ 'unos',
+ 'vosotras',
+ 'vosotros',
+ 'vuestra',
+ 'vuestras',
+ 'vuestro',
+ 'vuestros',
+ 'y',
+ 'ya',
+ 'yo',
+ 'él',
+ 'éramos',
+})
diff --git a/sphinx/search/_stopwords/es.txt b/sphinx/search/_stopwords/es.txt
new file mode 100644
index 00000000000..d7047b93164
--- /dev/null
+++ b/sphinx/search/_stopwords/es.txt
@@ -0,0 +1,338 @@
+|source: https://snowballstem.org/algorithms/spanish/stop.txt
+de | from, of
+la | the, her
+que | who, that
+el | the
+en | in
+y | and
+a | to
+los | the, them
+del | de + el
+se | himself, from him etc
+las | the, them
+por | for, by, etc
+un | a
+para | for
+con | with
+no | no
+una | a
+su | his, her
+al | a + el
+ | es from SER
+lo | him
+como | how
+más | more
+pero | pero
+sus | su plural
+le | to him, her
+ya | already
+o | or
+ | fue from SER
+este | this
+ | ha from HABER
+sí | himself etc
+porque | because
+esta | this
+ | son from SER
+entre | between
+ | está from ESTAR
+cuando | when
+muy | very
+sin | without
+sobre | on
+ | ser from SER
+ | tiene from TENER
+también | also
+me | me
+hasta | until
+hay | there is/are
+donde | where
+ | han from HABER
+quien | whom, that
+ | están from ESTAR
+ | estado from ESTAR
+desde | from
+todo | all
+nos | us
+durante | during
+ | estados from ESTAR
+todos | all
+uno | a
+les | to them
+ni | nor
+contra | against
+otros | other
+ | fueron from SER
+ese | that
+eso | that
+ | había from HABER
+ante | before
+ellos | they
+e | and (variant of y)
+esto | this
+mí | me
+antes | before
+algunos | some
+qué | what?
+unos | a
+yo | I
+otro | other
+otras | other
+otra | other
+él | he
+tanto | so much, many
+esa | that
+estos | these
+mucho | much, many
+quienes | who
+nada | nothing
+muchos | many
+cual | who
+ | sea from SER
+poco | few
+ella | she
+estar | to be
+ | haber from HABER
+estas | these
+ | estaba from ESTAR
+ | estamos from ESTAR
+algunas | some
+algo | something
+nosotros | we
+
+ | other forms
+
+mi | me
+mis | mi plural
+tú | thou
+te | thee
+ti | thee
+tu | thy
+tus | tu plural
+ellas | they
+nosotras | we
+vosotros | you
+vosotras | you
+os | you
+mío | mine
+mía |
+míos |
+mías |
+tuyo | thine
+tuya |
+tuyos |
+tuyas |
+suyo | his, hers, theirs
+suya |
+suyos |
+suyas |
+nuestro | ours
+nuestra |
+nuestros |
+nuestras |
+vuestro | yours
+vuestra |
+vuestros |
+vuestras |
+esos | those
+esas | those
+
+ | forms of estar, to be (not including the infinitive):
+estoy
+estás
+está
+estamos
+estáis
+están
+esté
+estés
+estemos
+estéis
+estén
+estaré
+estarás
+estará
+estaremos
+estaréis
+estarán
+estaría
+estarías
+estaríamos
+estaríais
+estarían
+estaba
+estabas
+estábamos
+estabais
+estaban
+estuve
+estuviste
+estuvo
+estuvimos
+estuvisteis
+estuvieron
+estuviera
+estuvieras
+estuviéramos
+estuvierais
+estuvieran
+estuviese
+estuvieses
+estuviésemos
+estuvieseis
+estuviesen
+estando
+estado
+estada
+estados
+estadas
+estad
+
+ | forms of haber, to have (not including the infinitive):
+he
+has
+ha
+hemos
+habéis
+han
+haya
+hayas
+hayamos
+hayáis
+hayan
+habré
+habrás
+habrá
+habremos
+habréis
+habrán
+habría
+habrías
+habríamos
+habríais
+habrían
+había
+habías
+habíamos
+habíais
+habían
+hube
+hubiste
+hubo
+hubimos
+hubisteis
+hubieron
+hubiera
+hubieras
+hubiéramos
+hubierais
+hubieran
+hubiese
+hubieses
+hubiésemos
+hubieseis
+hubiesen
+habiendo
+habido
+habida
+habidos
+habidas
+
+ | forms of ser, to be (not including the infinitive):
+soy
+eres
+es
+somos
+sois
+son
+sea
+seas
+seamos
+seáis
+sean
+seré
+serás
+será
+seremos
+seréis
+serán
+sería
+serías
+seríamos
+seríais
+serían
+era
+eras
+éramos
+erais
+eran
+fui
+fuiste
+fue
+fuimos
+fuisteis
+fueron
+fuera
+fueras
+fuéramos
+fuerais
+fueran
+fuese
+fueses
+fuésemos
+fueseis
+fuesen
+siendo
+sido
+ | sed also means 'thirst'
+
+ | forms of tener, to have (not including the infinitive):
+tengo
+tienes
+tiene
+tenemos
+tenéis
+tienen
+tenga
+tengas
+tengamos
+tengáis
+tengan
+tendré
+tendrás
+tendrá
+tendremos
+tendréis
+tendrán
+tendría
+tendrías
+tendríamos
+tendríais
+tendrían
+tenía
+tenías
+teníamos
+teníais
+tenían
+tuve
+tuviste
+tuvo
+tuvimos
+tuvisteis
+tuvieron
+tuviera
+tuvieras
+tuviéramos
+tuvierais
+tuvieran
+tuviese
+tuvieses
+tuviésemos
+tuvieseis
+tuviesen
+teniendo
+tenido
+tenida
+tenidos
+tenidas
+tened
diff --git a/sphinx/search/_stopwords/fi.py b/sphinx/search/_stopwords/fi.py
new file mode 100644
index 00000000000..d7586cba227
--- /dev/null
+++ b/sphinx/search/_stopwords/fi.py
@@ -0,0 +1,233 @@
+from __future__ import annotations
+
+FINNISH_STOPWORDS = frozenset({
+ 'ei',
+ 'eivät',
+ 'emme',
+ 'en',
+ 'et',
+ 'ette',
+ 'että',
+ 'he',
+ 'heidän',
+ 'heidät',
+ 'heihin',
+ 'heille',
+ 'heillä',
+ 'heiltä',
+ 'heissä',
+ 'heistä',
+ 'heitä',
+ 'hän',
+ 'häneen',
+ 'hänelle',
+ 'hänellä',
+ 'häneltä',
+ 'hänen',
+ 'hänessä',
+ 'hänestä',
+ 'hänet',
+ 'häntä',
+ 'itse',
+ 'ja',
+ 'johon',
+ 'joiden',
+ 'joihin',
+ 'joiksi',
+ 'joilla',
+ 'joille',
+ 'joilta',
+ 'joina',
+ 'joissa',
+ 'joista',
+ 'joita',
+ 'joka',
+ 'joksi',
+ 'jolla',
+ 'jolle',
+ 'jolta',
+ 'jona',
+ 'jonka',
+ 'jos',
+ 'jossa',
+ 'josta',
+ 'jota',
+ 'jotka',
+ 'kanssa',
+ 'keiden',
+ 'keihin',
+ 'keiksi',
+ 'keille',
+ 'keillä',
+ 'keiltä',
+ 'keinä',
+ 'keissä',
+ 'keistä',
+ 'keitä',
+ 'keneen',
+ 'keneksi',
+ 'kenelle',
+ 'kenellä',
+ 'keneltä',
+ 'kenen',
+ 'kenenä',
+ 'kenessä',
+ 'kenestä',
+ 'kenet',
+ 'ketkä',
+ 'ketä',
+ 'koska',
+ 'kuin',
+ 'kuka',
+ 'kun',
+ 'me',
+ 'meidän',
+ 'meidät',
+ 'meihin',
+ 'meille',
+ 'meillä',
+ 'meiltä',
+ 'meissä',
+ 'meistä',
+ 'meitä',
+ 'mihin',
+ 'miksi',
+ 'mikä',
+ 'mille',
+ 'millä',
+ 'miltä',
+ 'minkä',
+ 'minua',
+ 'minulla',
+ 'minulle',
+ 'minulta',
+ 'minun',
+ 'minussa',
+ 'minusta',
+ 'minut',
+ 'minuun',
+ 'minä',
+ 'missä',
+ 'mistä',
+ 'mitkä',
+ 'mitä',
+ 'mukaan',
+ 'mutta',
+ 'ne',
+ 'niiden',
+ 'niihin',
+ 'niiksi',
+ 'niille',
+ 'niillä',
+ 'niiltä',
+ 'niin',
+ 'niinä',
+ 'niissä',
+ 'niistä',
+ 'niitä',
+ 'noiden',
+ 'noihin',
+ 'noiksi',
+ 'noilla',
+ 'noille',
+ 'noilta',
+ 'noin',
+ 'noina',
+ 'noissa',
+ 'noista',
+ 'noita',
+ 'nuo',
+ 'nyt',
+ 'näiden',
+ 'näihin',
+ 'näiksi',
+ 'näille',
+ 'näillä',
+ 'näiltä',
+ 'näinä',
+ 'näissä',
+ 'näistä',
+ 'näitä',
+ 'nämä',
+ 'ole',
+ 'olemme',
+ 'olen',
+ 'olet',
+ 'olette',
+ 'oli',
+ 'olimme',
+ 'olin',
+ 'olisi',
+ 'olisimme',
+ 'olisin',
+ 'olisit',
+ 'olisitte',
+ 'olisivat',
+ 'olit',
+ 'olitte',
+ 'olivat',
+ 'olla',
+ 'olleet',
+ 'ollut',
+ 'on',
+ 'ovat',
+ 'poikki',
+ 'se',
+ 'sekä',
+ 'sen',
+ 'siihen',
+ 'siinä',
+ 'siitä',
+ 'siksi',
+ 'sille',
+ 'sillä',
+ 'siltä',
+ 'sinua',
+ 'sinulla',
+ 'sinulle',
+ 'sinulta',
+ 'sinun',
+ 'sinussa',
+ 'sinusta',
+ 'sinut',
+ 'sinuun',
+ 'sinä',
+ 'sitä',
+ 'tai',
+ 'te',
+ 'teidän',
+ 'teidät',
+ 'teihin',
+ 'teille',
+ 'teillä',
+ 'teiltä',
+ 'teissä',
+ 'teistä',
+ 'teitä',
+ 'tuo',
+ 'tuohon',
+ 'tuoksi',
+ 'tuolla',
+ 'tuolle',
+ 'tuolta',
+ 'tuon',
+ 'tuona',
+ 'tuossa',
+ 'tuosta',
+ 'tuota',
+ 'tähän',
+ 'täksi',
+ 'tälle',
+ 'tällä',
+ 'tältä',
+ 'tämä',
+ 'tämän',
+ 'tänä',
+ 'tässä',
+ 'tästä',
+ 'tätä',
+ 'vaan',
+ 'vai',
+ 'vaikka',
+ 'yli',
+})
diff --git a/sphinx/search/_stopwords/fi.txt b/sphinx/search/_stopwords/fi.txt
new file mode 100644
index 00000000000..9aff8a79929
--- /dev/null
+++ b/sphinx/search/_stopwords/fi.txt
@@ -0,0 +1,88 @@
+| source: https://snowballstem.org/algorithms/finnish/stop.txt
+| forms of BE
+
+olla
+olen
+olet
+on
+olemme
+olette
+ovat
+ole | negative form
+
+oli
+olisi
+olisit
+olisin
+olisimme
+olisitte
+olisivat
+olit
+olin
+olimme
+olitte
+olivat
+ollut
+olleet
+
+en | negation
+et
+ei
+emme
+ette
+eivät
+
+|Nom Gen Acc Part Iness Elat Illat Adess Ablat Allat Ess Trans
+minä minun minut minua minussa minusta minuun minulla minulta minulle | I
+sinä sinun sinut sinua sinussa sinusta sinuun sinulla sinulta sinulle | you
+hän hänen hänet häntä hänessä hänestä häneen hänellä häneltä hänelle | he she
+me meidän meidät meitä meissä meistä meihin meillä meiltä meille | we
+te teidän teidät teitä teissä teistä teihin teillä teiltä teille | you
+he heidän heidät heitä heissä heistä heihin heillä heiltä heille | they
+
+tämä tämän tätä tässä tästä tähän tällä tältä tälle tänä täksi | this
+tuo tuon tuota tuossa tuosta tuohon tuolla tuolta tuolle tuona tuoksi | that
+se sen sitä siinä siitä siihen sillä siltä sille sinä siksi | it
+nämä näiden näitä näissä näistä näihin näillä näiltä näille näinä näiksi | these
+nuo noiden noita noissa noista noihin noilla noilta noille noina noiksi | those
+ne niiden niitä niissä niistä niihin niillä niiltä niille niinä niiksi | they
+
+kuka kenen kenet ketä kenessä kenestä keneen kenellä keneltä kenelle kenenä keneksi| who
+ketkä keiden ketkä keitä keissä keistä keihin keillä keiltä keille keinä keiksi | (pl)
+mikä minkä minkä mitä missä mistä mihin millä miltä mille minä miksi | which what
+mitkä | (pl)
+
+joka jonka jota jossa josta johon jolla jolta jolle jona joksi | who which
+jotka joiden joita joissa joista joihin joilla joilta joille joina joiksi | (pl)
+
+| conjunctions
+
+että | that
+ja | and
+jos | if
+koska | because
+kuin | than
+mutta | but
+niin | so
+sekä | and
+sillä | for
+tai | or
+vaan | but
+vai | or
+vaikka | although
+
+
+| prepositions
+
+kanssa | with
+mukaan | according to
+noin | about
+poikki | across
+yli | over, across
+
+| other
+
+kun | when
+niin | so
+nyt | now
+itse | self
diff --git a/sphinx/search/_stopwords/fr.py b/sphinx/search/_stopwords/fr.py
new file mode 100644
index 00000000000..7dfd86d7445
--- /dev/null
+++ b/sphinx/search/_stopwords/fr.py
@@ -0,0 +1,168 @@
+from __future__ import annotations
+
+FRENCH_STOPWORDS = frozenset({
+ 'ai',
+ 'aie',
+ 'aient',
+ 'aies',
+ 'ait',
+ 'as',
+ 'au',
+ 'aura',
+ 'aurai',
+ 'auraient',
+ 'aurais',
+ 'aurait',
+ 'auras',
+ 'aurez',
+ 'auriez',
+ 'aurions',
+ 'aurons',
+ 'auront',
+ 'aux',
+ 'avaient',
+ 'avais',
+ 'avait',
+ 'avec',
+ 'avez',
+ 'aviez',
+ 'avions',
+ 'avons',
+ 'ayant',
+ 'ayez',
+ 'ayons',
+ 'c',
+ 'ce',
+ 'ceci',
+ 'cela',
+ 'celà',
+ 'ces',
+ 'cet',
+ 'cette',
+ 'd',
+ 'dans',
+ 'de',
+ 'des',
+ 'du',
+ 'elle',
+ 'en',
+ 'es',
+ 'est',
+ 'et',
+ 'eu',
+ 'eue',
+ 'eues',
+ 'eurent',
+ 'eus',
+ 'eusse',
+ 'eussent',
+ 'eusses',
+ 'eussiez',
+ 'eussions',
+ 'eut',
+ 'eux',
+ 'eûmes',
+ 'eût',
+ 'eûtes',
+ 'furent',
+ 'fus',
+ 'fusse',
+ 'fussent',
+ 'fusses',
+ 'fussiez',
+ 'fussions',
+ 'fut',
+ 'fûmes',
+ 'fût',
+ 'fûtes',
+ 'ici',
+ 'il',
+ 'ils',
+ 'j',
+ 'je',
+ 'l',
+ 'la',
+ 'le',
+ 'les',
+ 'leur',
+ 'leurs',
+ 'lui',
+ 'm',
+ 'ma',
+ 'mais',
+ 'me',
+ 'mes',
+ 'moi',
+ 'mon',
+ 'même',
+ 'n',
+ 'ne',
+ 'nos',
+ 'notre',
+ 'nous',
+ 'on',
+ 'ont',
+ 'ou',
+ 'par',
+ 'pas',
+ 'pour',
+ 'qu',
+ 'que',
+ 'quel',
+ 'quelle',
+ 'quelles',
+ 'quels',
+ 'qui',
+ 's',
+ 'sa',
+ 'sans',
+ 'se',
+ 'sera',
+ 'serai',
+ 'seraient',
+ 'serais',
+ 'serait',
+ 'seras',
+ 'serez',
+ 'seriez',
+ 'serions',
+ 'serons',
+ 'seront',
+ 'ses',
+ 'soi',
+ 'soient',
+ 'sois',
+ 'soit',
+ 'sommes',
+ 'son',
+ 'sont',
+ 'soyez',
+ 'soyons',
+ 'suis',
+ 'sur',
+ 't',
+ 'ta',
+ 'te',
+ 'tes',
+ 'toi',
+ 'ton',
+ 'tu',
+ 'un',
+ 'une',
+ 'vos',
+ 'votre',
+ 'vous',
+ 'y',
+ 'à',
+ 'étaient',
+ 'étais',
+ 'était',
+ 'étant',
+ 'étiez',
+ 'étions',
+ 'été',
+ 'étée',
+ 'étées',
+ 'étés',
+ 'êtes',
+})
diff --git a/sphinx/search/_stopwords/fr.txt b/sphinx/search/_stopwords/fr.txt
new file mode 100644
index 00000000000..7839ab57c86
--- /dev/null
+++ b/sphinx/search/_stopwords/fr.txt
@@ -0,0 +1,174 @@
+| source: https://snowballstem.org/algorithms/french/stop.txt
+au | a + le
+aux | a + les
+avec | with
+ce | this
+ces | these
+dans | with
+de | of
+des | de + les
+du | de + le
+elle | she
+en | `of them' etc
+et | and
+eux | them
+il | he
+je | I
+la | the
+le | the
+leur | their
+lui | him
+ma | my (fem)
+mais | but
+me | me
+même | same; as in moi-même (myself) etc
+mes | me (pl)
+moi | me
+mon | my (masc)
+ne | not
+nos | our (pl)
+notre | our
+nous | we
+on | one
+ou | where
+par | by
+pas | not
+pour | for
+qu | que before vowel
+que | that
+qui | who
+sa | his, her (fem)
+se | oneself
+ses | his (pl)
+son | his, her (masc)
+sur | on
+ta | thy (fem)
+te | thee
+tes | thy (pl)
+toi | thee
+ton | thy (masc)
+tu | thou
+un | a
+une | a
+vos | your (pl)
+votre | your
+vous | you
+
+ | single letter forms
+
+c | c'
+d | d'
+j | j'
+l | l'
+à | to, at
+m | m'
+n | n'
+s | s'
+t | t'
+y | there
+
+ | forms of être (not including the infinitive):
+été
+étée
+étées
+étés
+étant
+suis
+es
+est
+sommes
+êtes
+sont
+serai
+seras
+sera
+serons
+serez
+seront
+serais
+serait
+serions
+seriez
+seraient
+étais
+était
+étions
+étiez
+étaient
+fus
+fut
+fûmes
+fûtes
+furent
+sois
+soit
+soyons
+soyez
+soient
+fusse
+fusses
+fût
+fussions
+fussiez
+fussent
+
+ | forms of avoir (not including the infinitive):
+ayant
+eu
+eue
+eues
+eus
+ai
+as
+avons
+avez
+ont
+aurai
+auras
+aura
+aurons
+aurez
+auront
+aurais
+aurait
+aurions
+auriez
+auraient
+avais
+avait
+avions
+aviez
+avaient
+eut
+eûmes
+eûtes
+eurent
+aie
+aies
+ait
+ayons
+ayez
+aient
+eusse
+eusses
+eût
+eussions
+eussiez
+eussent
+
+ | Later additions (from Jean-Christophe Deschamps)
+ceci | this
+cela | that (added 11 Apr 2012. Omission reported by Adrien Grand)
+celà | that (incorrect, though common)
+cet | this
+cette | this
+ici | here
+ils | they
+les | the (pl)
+leurs | their (pl)
+quel | which
+quels | which
+quelle | which
+quelles | which
+sans | without
+soi | oneself
diff --git a/sphinx/search/_stopwords/hu.py b/sphinx/search/_stopwords/hu.py
new file mode 100644
index 00000000000..83bee011b0f
--- /dev/null
+++ b/sphinx/search/_stopwords/hu.py
@@ -0,0 +1,202 @@
+from __future__ import annotations
+
+HUNGARIAN_STOPWORDS = frozenset({
+ 'a',
+ 'abban',
+ 'ahhoz',
+ 'ahogy',
+ 'ahol',
+ 'aki',
+ 'akik',
+ 'akkor',
+ 'alatt',
+ 'amely',
+ 'amelyek',
+ 'amelyekben',
+ 'amelyeket',
+ 'amelyet',
+ 'amelynek',
+ 'ami',
+ 'amikor',
+ 'amit',
+ 'amolyan',
+ 'amíg',
+ 'annak',
+ 'arra',
+ 'arról',
+ 'az',
+ 'azok',
+ 'azon',
+ 'azonban',
+ 'azt',
+ 'aztán',
+ 'azután',
+ 'azzal',
+ 'azért',
+ 'be',
+ 'belül',
+ 'benne',
+ 'bár',
+ 'cikk',
+ 'cikkek',
+ 'cikkeket',
+ 'csak',
+ 'de',
+ 'e',
+ 'ebben',
+ 'eddig',
+ 'egy',
+ 'egyes',
+ 'egyetlen',
+ 'egyik',
+ 'egyre',
+ 'egyéb',
+ 'egész',
+ 'ehhez',
+ 'ekkor',
+ 'el',
+ 'ellen',
+ 'első',
+ 'elég',
+ 'elő',
+ 'először',
+ 'előtt',
+ 'emilyen',
+ 'ennek',
+ 'erre',
+ 'ez',
+ 'ezek',
+ 'ezen',
+ 'ezt',
+ 'ezzel',
+ 'ezért',
+ 'fel',
+ 'felé',
+ 'hanem',
+ 'hiszen',
+ 'hogy',
+ 'hogyan',
+ 'igen',
+ 'ill',
+ 'ill.',
+ 'illetve',
+ 'ilyen',
+ 'ilyenkor',
+ 'ismét',
+ 'ison',
+ 'itt',
+ 'jobban',
+ 'jó',
+ 'jól',
+ 'kell',
+ 'kellett',
+ 'keressünk',
+ 'keresztül',
+ 'ki',
+ 'kívül',
+ 'között',
+ 'közül',
+ 'legalább',
+ 'legyen',
+ 'lehet',
+ 'lehetett',
+ 'lenne',
+ 'lenni',
+ 'lesz',
+ 'lett',
+ 'maga',
+ 'magát',
+ 'majd',
+ 'meg',
+ 'mellett',
+ 'mely',
+ 'melyek',
+ 'mert',
+ 'mi',
+ 'mikor',
+ 'milyen',
+ 'minden',
+ 'mindenki',
+ 'mindent',
+ 'mindig',
+ 'mint',
+ 'mintha',
+ 'mit',
+ 'mivel',
+ 'miért',
+ 'most',
+ 'már',
+ 'más',
+ 'másik',
+ 'még',
+ 'míg',
+ 'nagy',
+ 'nagyobb',
+ 'nagyon',
+ 'ne',
+ 'nekem',
+ 'neki',
+ 'nem',
+ 'nincs',
+ 'néha',
+ 'néhány',
+ 'nélkül',
+ 'olyan',
+ 'ott',
+ 'pedig',
+ 'persze',
+ 'rá',
+ 's',
+ 'saját',
+ 'sem',
+ 'semmi',
+ 'sok',
+ 'sokat',
+ 'sokkal',
+ 'szemben',
+ 'szerint',
+ 'szinte',
+ 'számára',
+ 'talán',
+ 'tehát',
+ 'teljes',
+ 'tovább',
+ 'továbbá',
+ 'több',
+ 'ugyanis',
+ 'utolsó',
+ 'után',
+ 'utána',
+ 'vagy',
+ 'vagyis',
+ 'vagyok',
+ 'valaki',
+ 'valami',
+ 'valamint',
+ 'való',
+ 'van',
+ 'vannak',
+ 'vele',
+ 'vissza',
+ 'viszont',
+ 'volna',
+ 'volt',
+ 'voltak',
+ 'voltam',
+ 'voltunk',
+ 'által',
+ 'általában',
+ 'át',
+ 'én',
+ 'éppen',
+ 'és',
+ 'így',
+ 'össze',
+ 'úgy',
+ 'új',
+ 'újabb',
+ 'újra',
+ 'ő',
+ 'ők',
+ 'őket',
+})
diff --git a/sphinx/search/_stopwords/hu.txt b/sphinx/search/_stopwords/hu.txt
new file mode 100644
index 00000000000..658c6194f27
--- /dev/null
+++ b/sphinx/search/_stopwords/hu.txt
@@ -0,0 +1,201 @@
+| source: https://snowballstem.org/algorithms/hungarian/stop.txt
+| prepared by Anna Tordai
+a
+ahogy
+ahol
+aki
+akik
+akkor
+alatt
+által
+általában
+amely
+amelyek
+amelyekben
+amelyeket
+amelyet
+amelynek
+ami
+amit
+amolyan
+amíg
+amikor
+át
+abban
+ahhoz
+annak
+arra
+arról
+az
+azok
+azon
+azt
+azzal
+azért
+aztán
+azután
+azonban
+bár
+be
+belül
+benne
+cikk
+cikkek
+cikkeket
+csak
+de
+e
+eddig
+egész
+egy
+egyes
+egyetlen
+egyéb
+egyik
+egyre
+ekkor
+el
+elég
+ellen
+elő
+először
+előtt
+első
+én
+éppen
+ebben
+ehhez
+emilyen
+ennek
+erre
+ez
+ezt
+ezek
+ezen
+ezzel
+ezért
+és
+fel
+felé
+hanem
+hiszen
+hogy
+hogyan
+igen
+így
+illetve
+ill.
+ill
+ilyen
+ilyenkor
+ison
+ismét
+itt
+jó
+jól
+jobban
+kell
+kellett
+keresztül
+keressünk
+ki
+kívül
+között
+közül
+legalább
+lehet
+lehetett
+legyen
+lenne
+lenni
+lesz
+lett
+maga
+magát
+majd
+majd
+már
+más
+másik
+meg
+még
+mellett
+mert
+mely
+melyek
+mi
+mit
+míg
+miért
+milyen
+mikor
+minden
+mindent
+mindenki
+mindig
+mint
+mintha
+mivel
+most
+nagy
+nagyobb
+nagyon
+ne
+néha
+nekem
+neki
+nem
+néhány
+nélkül
+nincs
+olyan
+ott
+össze
+ő
+ők
+őket
+pedig
+persze
+rá
+s
+saját
+sem
+semmi
+sok
+sokat
+sokkal
+számára
+szemben
+szerint
+szinte
+talán
+tehát
+teljes
+tovább
+továbbá
+több
+úgy
+ugyanis
+új
+újabb
+újra
+után
+utána
+utolsó
+vagy
+vagyis
+valaki
+valami
+valamint
+való
+vagyok
+van
+vannak
+volt
+voltam
+voltak
+voltunk
+vissza
+vele
+viszont
+volna
diff --git a/sphinx/search/_stopwords/it.py b/sphinx/search/_stopwords/it.py
new file mode 100644
index 00000000000..4b0f522ac94
--- /dev/null
+++ b/sphinx/search/_stopwords/it.py
@@ -0,0 +1,282 @@
+from __future__ import annotations
+
+ITALIAN_STOPWORDS = frozenset({
+ 'a',
+ 'abbia',
+ 'abbiamo',
+ 'abbiano',
+ 'abbiate',
+ 'ad',
+ 'agl',
+ 'agli',
+ 'ai',
+ 'al',
+ 'all',
+ 'alla',
+ 'alle',
+ 'allo',
+ 'anche',
+ 'avemmo',
+ 'avendo',
+ 'avesse',
+ 'avessero',
+ 'avessi',
+ 'avessimo',
+ 'aveste',
+ 'avesti',
+ 'avete',
+ 'aveva',
+ 'avevamo',
+ 'avevano',
+ 'avevate',
+ 'avevi',
+ 'avevo',
+ 'avrai',
+ 'avranno',
+ 'avrebbe',
+ 'avrebbero',
+ 'avrei',
+ 'avremmo',
+ 'avremo',
+ 'avreste',
+ 'avresti',
+ 'avrete',
+ 'avrà',
+ 'avrò',
+ 'avuta',
+ 'avute',
+ 'avuti',
+ 'avuto',
+ 'c',
+ 'che',
+ 'chi',
+ 'ci',
+ 'coi',
+ 'col',
+ 'come',
+ 'con',
+ 'contro',
+ 'cui',
+ 'da',
+ 'dagl',
+ 'dagli',
+ 'dai',
+ 'dal',
+ 'dall',
+ 'dalla',
+ 'dalle',
+ 'dallo',
+ 'degl',
+ 'degli',
+ 'dei',
+ 'del',
+ 'dell',
+ 'della',
+ 'delle',
+ 'dello',
+ 'di',
+ 'dov',
+ 'dove',
+ 'e',
+ 'ebbe',
+ 'ebbero',
+ 'ebbi',
+ 'ed',
+ 'era',
+ 'erano',
+ 'eravamo',
+ 'eravate',
+ 'eri',
+ 'ero',
+ 'essendo',
+ 'faccia',
+ 'facciamo',
+ 'facciano',
+ 'facciate',
+ 'faccio',
+ 'facemmo',
+ 'facendo',
+ 'facesse',
+ 'facessero',
+ 'facessi',
+ 'facessimo',
+ 'faceste',
+ 'facesti',
+ 'faceva',
+ 'facevamo',
+ 'facevano',
+ 'facevate',
+ 'facevi',
+ 'facevo',
+ 'fai',
+ 'fanno',
+ 'farai',
+ 'faranno',
+ 'farebbe',
+ 'farebbero',
+ 'farei',
+ 'faremmo',
+ 'faremo',
+ 'fareste',
+ 'faresti',
+ 'farete',
+ 'farà',
+ 'farò',
+ 'fece',
+ 'fecero',
+ 'feci',
+ 'fosse',
+ 'fossero',
+ 'fossi',
+ 'fossimo',
+ 'foste',
+ 'fosti',
+ 'fu',
+ 'fui',
+ 'fummo',
+ 'furono',
+ 'gli',
+ 'ha',
+ 'hai',
+ 'hanno',
+ 'ho',
+ 'i',
+ 'il',
+ 'in',
+ 'io',
+ 'l',
+ 'la',
+ 'le',
+ 'lei',
+ 'li',
+ 'lo',
+ 'loro',
+ 'lui',
+ 'ma',
+ 'mi',
+ 'mia',
+ 'mie',
+ 'miei',
+ 'mio',
+ 'ne',
+ 'negl',
+ 'negli',
+ 'nei',
+ 'nel',
+ 'nell',
+ 'nella',
+ 'nelle',
+ 'nello',
+ 'noi',
+ 'non',
+ 'nostra',
+ 'nostre',
+ 'nostri',
+ 'nostro',
+ 'o',
+ 'per',
+ 'perché',
+ 'più',
+ 'quale',
+ 'quanta',
+ 'quante',
+ 'quanti',
+ 'quanto',
+ 'quella',
+ 'quelle',
+ 'quelli',
+ 'quello',
+ 'questa',
+ 'queste',
+ 'questi',
+ 'questo',
+ 'sarai',
+ 'saranno',
+ 'sarebbe',
+ 'sarebbero',
+ 'sarei',
+ 'saremmo',
+ 'saremo',
+ 'sareste',
+ 'saresti',
+ 'sarete',
+ 'sarà',
+ 'sarò',
+ 'se',
+ 'sei',
+ 'si',
+ 'sia',
+ 'siamo',
+ 'siano',
+ 'siate',
+ 'siete',
+ 'sono',
+ 'sta',
+ 'stai',
+ 'stanno',
+ 'starai',
+ 'staranno',
+ 'starebbe',
+ 'starebbero',
+ 'starei',
+ 'staremmo',
+ 'staremo',
+ 'stareste',
+ 'staresti',
+ 'starete',
+ 'starà',
+ 'starò',
+ 'stava',
+ 'stavamo',
+ 'stavano',
+ 'stavate',
+ 'stavi',
+ 'stavo',
+ 'stemmo',
+ 'stesse',
+ 'stessero',
+ 'stessi',
+ 'stessimo',
+ 'steste',
+ 'stesti',
+ 'stette',
+ 'stettero',
+ 'stetti',
+ 'stia',
+ 'stiamo',
+ 'stiano',
+ 'stiate',
+ 'sto',
+ 'su',
+ 'sua',
+ 'sue',
+ 'sugl',
+ 'sugli',
+ 'sui',
+ 'sul',
+ 'sull',
+ 'sulla',
+ 'sulle',
+ 'sullo',
+ 'suo',
+ 'suoi',
+ 'ti',
+ 'tra',
+ 'tu',
+ 'tua',
+ 'tue',
+ 'tuo',
+ 'tuoi',
+ 'tutti',
+ 'tutto',
+ 'un',
+ 'una',
+ 'uno',
+ 'vi',
+ 'voi',
+ 'vostra',
+ 'vostre',
+ 'vostri',
+ 'vostro',
+ 'è',
+})
diff --git a/sphinx/search/_stopwords/it.txt b/sphinx/search/_stopwords/it.txt
new file mode 100644
index 00000000000..c8776836110
--- /dev/null
+++ b/sphinx/search/_stopwords/it.txt
@@ -0,0 +1,291 @@
+| source: https://snowballstem.org/algorithms/italian/stop.txt
+ad | a (to) before vowel
+al | a + il
+allo | a + lo
+ai | a + i
+agli | a + gli
+all | a + l'
+agl | a + gl'
+alla | a + la
+alle | a + le
+con | with
+col | con + il
+coi | con + i (forms collo, cogli etc are now very rare)
+da | from
+dal | da + il
+dallo | da + lo
+dai | da + i
+dagli | da + gli
+dall | da + l'
+dagl | da + gll'
+dalla | da + la
+dalle | da + le
+di | of
+del | di + il
+dello | di + lo
+dei | di + i
+degli | di + gli
+dell | di + l'
+degl | di + gl'
+della | di + la
+delle | di + le
+in | in
+nel | in + el
+nello | in + lo
+nei | in + i
+negli | in + gli
+nell | in + l'
+negl | in + gl'
+nella | in + la
+nelle | in + le
+su | on
+sul | su + il
+sullo | su + lo
+sui | su + i
+sugli | su + gli
+sull | su + l'
+sugl | su + gl'
+sulla | su + la
+sulle | su + le
+per | through, by
+tra | among
+contro | against
+io | I
+tu | thou
+lui | he
+lei | she
+noi | we
+voi | you
+loro | they
+mio | my
+mia |
+miei |
+mie |
+tuo |
+tua |
+tuoi | thy
+tue |
+suo |
+sua |
+suoi | his, her
+sue |
+nostro | our
+nostra |
+nostri |
+nostre |
+vostro | your
+vostra |
+vostri |
+vostre |
+mi | me
+ti | thee
+ci | us, there
+vi | you, there
+lo | him, the
+la | her, the
+li | them
+le | them, the
+gli | to him, the
+ne | from there etc
+il | the
+un | a
+uno | a
+una | a
+ma | but
+ed | and
+se | if
+perché | why, because
+anche | also
+come | how
+dov | where (as dov')
+dove | where
+che | who, that
+chi | who
+cui | whom
+non | not
+più | more
+quale | who, that
+quanto | how much
+quanti |
+quanta |
+quante |
+quello | that
+quelli |
+quella |
+quelle |
+questo | this
+questi |
+questa |
+queste |
+si | yes
+tutto | all
+tutti | all
+
+ | single letter forms:
+
+a | at
+c | as c' for ce or ci
+e | and
+i | the
+l | as l'
+o | or
+
+ | forms of avere, to have (not including the infinitive):
+
+ho
+hai
+ha
+abbiamo
+avete
+hanno
+abbia
+abbiate
+abbiano
+avrò
+avrai
+avrà
+avremo
+avrete
+avranno
+avrei
+avresti
+avrebbe
+avremmo
+avreste
+avrebbero
+avevo
+avevi
+aveva
+avevamo
+avevate
+avevano
+ebbi
+avesti
+ebbe
+avemmo
+aveste
+ebbero
+avessi
+avesse
+avessimo
+avessero
+avendo
+avuto
+avuta
+avuti
+avute
+
+ | forms of essere, to be (not including the infinitive):
+sono
+sei
+è
+siamo
+siete
+sia
+siate
+siano
+sarò
+sarai
+sarà
+saremo
+sarete
+saranno
+sarei
+saresti
+sarebbe
+saremmo
+sareste
+sarebbero
+ero
+eri
+era
+eravamo
+eravate
+erano
+fui
+fosti
+fu
+fummo
+foste
+furono
+fossi
+fosse
+fossimo
+fossero
+essendo
+
+ | forms of fare, to do (not including the infinitive, fa, fat-):
+faccio
+fai
+facciamo
+fanno
+faccia
+facciate
+facciano
+farò
+farai
+farà
+faremo
+farete
+faranno
+farei
+faresti
+farebbe
+faremmo
+fareste
+farebbero
+facevo
+facevi
+faceva
+facevamo
+facevate
+facevano
+feci
+facesti
+fece
+facemmo
+faceste
+fecero
+facessi
+facesse
+facessimo
+facessero
+facendo
+
+ | forms of stare, to be (not including the infinitive):
+sto
+stai
+sta
+stiamo
+stanno
+stia
+stiate
+stiano
+starò
+starai
+starà
+staremo
+starete
+staranno
+starei
+staresti
+starebbe
+staremmo
+stareste
+starebbero
+stavo
+stavi
+stava
+stavamo
+stavate
+stavano
+stetti
+stesti
+stette
+stemmo
+steste
+stettero
+stessi
+stesse
+stessimo
+stessero
diff --git a/sphinx/search/_stopwords/nl.py b/sphinx/search/_stopwords/nl.py
new file mode 100644
index 00000000000..1742ec8dad2
--- /dev/null
+++ b/sphinx/search/_stopwords/nl.py
@@ -0,0 +1,105 @@
+from __future__ import annotations
+
+DUTCH_STOPWORDS = frozenset({
+ 'aan',
+ 'al',
+ 'alles',
+ 'als',
+ 'altijd',
+ 'andere',
+ 'ben',
+ 'bij',
+ 'daar',
+ 'dan',
+ 'dat',
+ 'de',
+ 'der',
+ 'deze',
+ 'die',
+ 'dit',
+ 'doch',
+ 'doen',
+ 'door',
+ 'dus',
+ 'een',
+ 'eens',
+ 'en',
+ 'er',
+ 'ge',
+ 'geen',
+ 'geweest',
+ 'haar',
+ 'had',
+ 'heb',
+ 'hebben',
+ 'heeft',
+ 'hem',
+ 'het',
+ 'hier',
+ 'hij',
+ 'hoe',
+ 'hun',
+ 'iemand',
+ 'iets',
+ 'ik',
+ 'in',
+ 'is',
+ 'ja',
+ 'je',
+ 'kan',
+ 'kon',
+ 'kunnen',
+ 'maar',
+ 'me',
+ 'meer',
+ 'men',
+ 'met',
+ 'mij',
+ 'mijn',
+ 'moet',
+ 'na',
+ 'naar',
+ 'niet',
+ 'niets',
+ 'nog',
+ 'nu',
+ 'of',
+ 'om',
+ 'omdat',
+ 'onder',
+ 'ons',
+ 'ook',
+ 'op',
+ 'over',
+ 'reeds',
+ 'te',
+ 'tegen',
+ 'toch',
+ 'toen',
+ 'tot',
+ 'u',
+ 'uit',
+ 'uw',
+ 'van',
+ 'veel',
+ 'voor',
+ 'want',
+ 'waren',
+ 'was',
+ 'wat',
+ 'werd',
+ 'wezen',
+ 'wie',
+ 'wil',
+ 'worden',
+ 'wordt',
+ 'zal',
+ 'ze',
+ 'zelf',
+ 'zich',
+ 'zij',
+ 'zijn',
+ 'zo',
+ 'zonder',
+ 'zou',
+})
diff --git a/sphinx/search/_stopwords/nl.txt b/sphinx/search/_stopwords/nl.txt
new file mode 100644
index 00000000000..64336d0623b
--- /dev/null
+++ b/sphinx/search/_stopwords/nl.txt
@@ -0,0 +1,102 @@
+| source: https://snowballstem.org/algorithms/dutch/stop.txt
+de | the
+en | and
+van | of, from
+ik | I, the ego
+te | (1) chez, at etc, (2) to, (3) too
+dat | that, which
+die | that, those, who, which
+in | in, inside
+een | a, an, one
+hij | he
+het | the, it
+niet | not, nothing, naught
+zijn | (1) to be, being, (2) his, one's, its
+is | is
+was | (1) was, past tense of all persons sing. of 'zijn' (to be) (2) wax, (3) the washing, (4) rise of river
+op | on, upon, at, in, up, used up
+aan | on, upon, to (as dative)
+met | with, by
+als | like, such as, when
+voor | (1) before, in front of, (2) furrow
+had | had, past tense all persons sing. of 'hebben' (have)
+er | there
+maar | but, only
+om | round, about, for etc
+hem | him
+dan | then
+zou | should/would, past tense all persons sing. of 'zullen'
+of | or, whether, if
+wat | what, something, anything
+mijn | possessive and noun 'mine'
+men | people, 'one'
+dit | this
+zo | so, thus, in this way
+door | through by
+over | over, across
+ze | she, her, they, them
+zich | oneself
+bij | (1) a bee, (2) by, near, at
+ook | also, too
+tot | till, until
+je | you
+mij | me
+uit | out of, from
+der | Old Dutch form of 'van der' still found in surnames
+daar | (1) there, (2) because
+haar | (1) her, their, them, (2) hair
+naar | (1) unpleasant, unwell etc, (2) towards, (3) as
+heb | present first person sing. of 'to have'
+hoe | how, why
+heeft | present third person sing. of 'to have'
+hebben | 'to have' and various parts thereof
+deze | this
+u | you
+want | (1) for, (2) mitten, (3) rigging
+nog | yet, still
+zal | 'shall', first and third person sing. of verb 'zullen' (will)
+me | me
+zij | she, they
+nu | now
+ge | 'thou', still used in Belgium and south Netherlands
+geen | none
+omdat | because
+iets | something, somewhat
+worden | to become, grow, get
+toch | yet, still
+al | all, every, each
+waren | (1) 'were' (2) to wander, (3) wares, (3)
+veel | much, many
+meer | (1) more, (2) lake
+doen | to do, to make
+toen | then, when
+moet | noun 'spot/mote' and present form of 'to must'
+ben | (1) am, (2) 'are' in interrogative second person singular of 'to be'
+zonder | without
+kan | noun 'can' and present form of 'to be able'
+hun | their, them
+dus | so, consequently
+alles | all, everything, anything
+onder | under, beneath
+ja | yes, of course
+eens | once, one day
+hier | here
+wie | who
+werd | imperfect third person sing. of 'become'
+altijd | always
+doch | yet, but etc
+wordt | present third person sing. of 'become'
+wezen | (1) to be, (2) 'been' as in 'been fishing', (3) orphans
+kunnen | to be able
+ons | us/our
+zelf | self
+tegen | against, towards, at
+na | after, near
+reeds | already
+wil | (1) present tense of 'want', (2) 'will', noun, (3) fender
+kon | could; past tense of 'to be able'
+niets | nothing
+uw | your
+iemand | somebody
+geweest | been; past participle of 'be'
+andere | other
diff --git a/sphinx/search/_stopwords/no.py b/sphinx/search/_stopwords/no.py
new file mode 100644
index 00000000000..9b9bfbea4c9
--- /dev/null
+++ b/sphinx/search/_stopwords/no.py
@@ -0,0 +1,176 @@
+from __future__ import annotations
+
+NORWEGIAN_STOPWORDS = frozenset({
+ 'alle',
+ 'at',
+ 'av',
+ 'bare',
+ 'begge',
+ 'ble',
+ 'blei',
+ 'bli',
+ 'blir',
+ 'blitt',
+ 'både',
+ 'båe',
+ 'da',
+ 'de',
+ 'deg',
+ 'dei',
+ 'deim',
+ 'deira',
+ 'deires',
+ 'dem',
+ 'den',
+ 'denne',
+ 'der',
+ 'dere',
+ 'deres',
+ 'det',
+ 'dette',
+ 'di',
+ 'din',
+ 'disse',
+ 'ditt',
+ 'du',
+ 'dykk',
+ 'dykkar',
+ 'då',
+ 'eg',
+ 'ein',
+ 'eit',
+ 'eitt',
+ 'eller',
+ 'elles',
+ 'en',
+ 'enn',
+ 'er',
+ 'et',
+ 'ett',
+ 'etter',
+ 'for',
+ 'fordi',
+ 'fra',
+ 'før',
+ 'ha',
+ 'hadde',
+ 'han',
+ 'hans',
+ 'har',
+ 'hennar',
+ 'henne',
+ 'hennes',
+ 'her',
+ 'hjå',
+ 'ho',
+ 'hoe',
+ 'honom',
+ 'hoss',
+ 'hossen',
+ 'hun',
+ 'hva',
+ 'hvem',
+ 'hver',
+ 'hvilke',
+ 'hvilken',
+ 'hvis',
+ 'hvor',
+ 'hvordan',
+ 'hvorfor',
+ 'i',
+ 'ikke',
+ 'ikkje',
+ 'ingen',
+ 'ingi',
+ 'inkje',
+ 'inn',
+ 'inni',
+ 'ja',
+ 'jeg',
+ 'kan',
+ 'kom',
+ 'korleis',
+ 'korso',
+ 'kun',
+ 'kunne',
+ 'kva',
+ 'kvar',
+ 'kvarhelst',
+ 'kven',
+ 'kvi',
+ 'kvifor',
+ 'man',
+ 'mange',
+ 'me',
+ 'med',
+ 'medan',
+ 'meg',
+ 'meget',
+ 'mellom',
+ 'men',
+ 'mi',
+ 'min',
+ 'mine',
+ 'mitt',
+ 'mot',
+ 'mykje',
+ 'ned',
+ 'no',
+ 'noe',
+ 'noen',
+ 'noka',
+ 'noko',
+ 'nokon',
+ 'nokor',
+ 'nokre',
+ 'nå',
+ 'når',
+ 'og',
+ 'også',
+ 'om',
+ 'opp',
+ 'oss',
+ 'over',
+ 'på',
+ 'samme',
+ 'seg',
+ 'selv',
+ 'si',
+ 'sia',
+ 'sidan',
+ 'siden',
+ 'sin',
+ 'sine',
+ 'sitt',
+ 'sjøl',
+ 'skal',
+ 'skulle',
+ 'slik',
+ 'so',
+ 'som',
+ 'somme',
+ 'somt',
+ 'så',
+ 'sånn',
+ 'til',
+ 'um',
+ 'upp',
+ 'ut',
+ 'uten',
+ 'var',
+ 'vart',
+ 'varte',
+ 'ved',
+ 'vere',
+ 'verte',
+ 'vi',
+ 'vil',
+ 'ville',
+ 'vore',
+ 'vors',
+ 'vort',
+ 'vår',
+ 'være',
+ 'vært',
+ 'å',
+})
diff --git a/sphinx/search/_stopwords/no.txt b/sphinx/search/_stopwords/no.txt
new file mode 100644
index 00000000000..552ad326a55
--- /dev/null
+++ b/sphinx/search/_stopwords/no.txt
@@ -0,0 +1,177 @@
+| source: https://snowballstem.org/algorithms/norwegian/stop.txt
+og | and
+i | in
+jeg | I
+det | it/this/that
+at | to (w. inf.)
+en | a/an
+et | a/an
+den | it/this/that
+til | to
+er | is/am/are
+som | who/that
+på | on
+de | they / you(formal)
+med | with
+han | he
+av | of
+ikke | not
+ikkje | not *
+der | there
+så | so
+var | was/were
+meg | me
+seg | you
+men | but
+ett | one
+har | have
+om | about
+vi | we
+min | my
+mitt | my
+ha | have
+hadde | had
+hun | she
+nå | now
+over | over
+da | when/as
+ved | by/know
+fra | from
+du | you
+ut | out
+sin | your
+dem | them
+oss | us
+opp | up
+man | you/one
+kan | can
+hans | his
+hvor | where
+eller | or
+hva | what
+skal | shall/must
+selv | self (reflective)
+sjøl | self (reflective)
+her | here
+alle | all
+vil | will
+bli | become
+ble | became
+blei | became *
+blitt | have become
+kunne | could
+inn | in
+når | when
+være | be
+kom | come
+noen | some
+noe | some
+ville | would
+dere | you
+som | who/which/that
+deres | their/theirs
+kun | only/just
+ja | yes
+etter | after
+ned | down
+skulle | should
+denne | this
+for | for/because
+deg | you
+si | hers/his
+sine | hers/his
+sitt | hers/his
+mot | against
+å | to
+meget | much
+hvorfor | why
+dette | this
+disse | these/those
+uten | without
+hvordan | how
+ingen | none
+din | your
+ditt | your
+blir | become
+samme | same
+hvilken | which
+hvilke | which (plural)
+sånn | such a
+inni | inside/within
+mellom | between
+vår | our
+hver | each
+hvem | who
+vors | us/ours
+hvis | whose
+både | both
+bare | only/just
+enn | than
+fordi | as/because
+før | before
+mange | many
+også | also
+slik | just
+vært | been
+være | to be
+båe | both *
+begge | both
+siden | since
+dykk | your *
+dykkar | yours *
+dei | they *
+deira | them *
+deires | theirs *
+deim | them *
+di | your (fem.) *
+då | as/when *
+eg | I *
+ein | a/an *
+eit | a/an *
+eitt | a/an *
+elles | or *
+honom | he *
+hjå | at *
+ho | she *
+hoe | she *
+henne | her
+hennar | her/hers
+hennes | hers
+hoss | how *
+hossen | how *
+ikkje | not *
+ingi | noone *
+inkje | noone *
+korleis | how *
+korso | how *
+kva | what/which *
+kvar | where *
+kvarhelst | where *
+kven | who/whom *
+kvi | why *
+kvifor | why *
+me | we *
+medan | while *
+mi | my *
+mine | my *
+mykje | much *
+no | now *
+nokon | some (masc./neut.) *
+noka | some (fem.) *
+nokor | some *
+noko | some *
+nokre | some *
+si | his/hers *
+sia | since *
+sidan | since *
+so | so *
+somt | some *
+somme | some *
+um | about*
+upp | up *
+vere | be *
+vore | was *
+verte | become *
+vort | become *
+varte | became *
+vart | became *
diff --git a/sphinx/search/_stopwords/pt.py b/sphinx/search/_stopwords/pt.py
new file mode 100644
index 00000000000..b79799d42a6
--- /dev/null
+++ b/sphinx/search/_stopwords/pt.py
@@ -0,0 +1,207 @@
+from __future__ import annotations
+
+PORTUGUESE_STOPWORDS = frozenset({
+ 'a',
+ 'ao',
+ 'aos',
+ 'aquela',
+ 'aquelas',
+ 'aquele',
+ 'aqueles',
+ 'aquilo',
+ 'as',
+ 'até',
+ 'com',
+ 'como',
+ 'da',
+ 'das',
+ 'de',
+ 'dela',
+ 'delas',
+ 'dele',
+ 'deles',
+ 'depois',
+ 'do',
+ 'dos',
+ 'e',
+ 'ela',
+ 'elas',
+ 'ele',
+ 'eles',
+ 'em',
+ 'entre',
+ 'era',
+ 'eram',
+ 'essa',
+ 'essas',
+ 'esse',
+ 'esses',
+ 'esta',
+ 'estamos',
+ 'estas',
+ 'estava',
+ 'estavam',
+ 'este',
+ 'esteja',
+ 'estejam',
+ 'estejamos',
+ 'estes',
+ 'esteve',
+ 'estive',
+ 'estivemos',
+ 'estiver',
+ 'estivera',
+ 'estiveram',
+ 'estiverem',
+ 'estivermos',
+ 'estivesse',
+ 'estivessem',
+ 'estivéramos',
+ 'estivéssemos',
+ 'estou',
+ 'está',
+ 'estávamos',
+ 'estão',
+ 'eu',
+ 'foi',
+ 'fomos',
+ 'for',
+ 'fora',
+ 'foram',
+ 'forem',
+ 'formos',
+ 'fosse',
+ 'fossem',
+ 'fui',
+ 'fôramos',
+ 'fôssemos',
+ 'haja',
+ 'hajam',
+ 'hajamos',
+ 'havemos',
+ 'hei',
+ 'houve',
+ 'houvemos',
+ 'houver',
+ 'houvera',
+ 'houveram',
+ 'houverei',
+ 'houverem',
+ 'houveremos',
+ 'houveria',
+ 'houveriam',
+ 'houvermos',
+ 'houverá',
+ 'houverão',
+ 'houveríamos',
+ 'houvesse',
+ 'houvessem',
+ 'houvéramos',
+ 'houvéssemos',
+ 'há',
+ 'hão',
+ 'isso',
+ 'isto',
+ 'já',
+ 'lhe',
+ 'lhes',
+ 'mais',
+ 'mas',
+ 'me',
+ 'mesmo',
+ 'meu',
+ 'meus',
+ 'minha',
+ 'minhas',
+ 'muito',
+ 'na',
+ 'nas',
+ 'nem',
+ 'no',
+ 'nos',
+ 'nossa',
+ 'nossas',
+ 'nosso',
+ 'nossos',
+ 'num',
+ 'numa',
+ 'não',
+ 'nós',
+ 'o',
+ 'os',
+ 'ou',
+ 'para',
+ 'pela',
+ 'pelas',
+ 'pelo',
+ 'pelos',
+ 'por',
+ 'qual',
+ 'quando',
+ 'que',
+ 'quem',
+ 'se',
+ 'seja',
+ 'sejam',
+ 'sejamos',
+ 'sem',
+ 'serei',
+ 'seremos',
+ 'seria',
+ 'seriam',
+ 'será',
+ 'serão',
+ 'seríamos',
+ 'seu',
+ 'seus',
+ 'somos',
+ 'sou',
+ 'sua',
+ 'suas',
+ 'são',
+ 'só',
+ 'também',
+ 'te',
+ 'tem',
+ 'temos',
+ 'tenha',
+ 'tenham',
+ 'tenhamos',
+ 'tenho',
+ 'terei',
+ 'teremos',
+ 'teria',
+ 'teriam',
+ 'terá',
+ 'terão',
+ 'teríamos',
+ 'teu',
+ 'teus',
+ 'teve',
+ 'tinha',
+ 'tinham',
+ 'tive',
+ 'tivemos',
+ 'tiver',
+ 'tivera',
+ 'tiveram',
+ 'tiverem',
+ 'tivermos',
+ 'tivesse',
+ 'tivessem',
+ 'tivéramos',
+ 'tivéssemos',
+ 'tu',
+ 'tua',
+ 'tuas',
+ 'tém',
+ 'tínhamos',
+ 'um',
+ 'uma',
+ 'você',
+ 'vocês',
+ 'vos',
+ 'à',
+ 'às',
+ 'éramos',
+})
diff --git a/sphinx/search/_stopwords/pt.txt b/sphinx/search/_stopwords/pt.txt
new file mode 100644
index 00000000000..5ef15633d81
--- /dev/null
+++ b/sphinx/search/_stopwords/pt.txt
@@ -0,0 +1,236 @@
+| source: https://snowballstem.org/algorithms/portuguese/stop.txt
+de | of, from
+a | the; to, at; her
+o | the; him
+que | who, that
+e | and
+do | de + o
+da | de + a
+em | in
+um | a
+para | for
+ | é from SER
+com | with
+não | not, no
+uma | a
+os | the; them
+no | em + o
+se | himself etc
+na | em + a
+por | for
+mais | more
+as | the; them
+dos | de + os
+como | as, like
+mas | but
+ | foi from SER
+ao | a + o
+ele | he
+das | de + as
+ | tem from TER
+à | a + a
+seu | his
+sua | her
+ou | or
+ | ser from SER
+quando | when
+muito | much
+ | há from HAV
+nos | em + os; us
+já | already, now
+ | está from EST
+eu | I
+também | also
+só | only, just
+pelo | per + o
+pela | per + a
+até | up to
+isso | that
+ela | he
+entre | between
+ | era from SER
+depois | after
+sem | without
+mesmo | same
+aos | a + os
+ | ter from TER
+seus | his
+quem | whom
+nas | em + as
+me | me
+esse | that
+eles | they
+ | estão from EST
+você | you
+ | tinha from TER
+ | foram from SER
+essa | that
+num | em + um
+nem | nor
+suas | her
+meu | my
+às | a + as
+minha | my
+ | têm from TER
+numa | em + uma
+pelos | per + os
+elas | they
+ | havia from HAV
+ | seja from SER
+qual | which
+ | será from SER
+nós | we
+ | tenho from TER
+lhe | to him, her
+deles | of them
+essas | those
+esses | those
+pelas | per + as
+este | this
+ | fosse from SER
+dele | of him
+
+ | other words. There are many contractions such as naquele = em+aquele,
+ | mo = me+o, but they are rare.
+ | Indefinite article plural forms are also rare.
+
+tu | thou
+te | thee
+vocês | you (plural)
+vos | you
+lhes | to them
+meus | my
+minhas
+teu | thy
+tua
+teus
+tuas
+nosso | our
+nossa
+nossos
+nossas
+
+dela | of her
+delas | of them
+
+esta | this
+estes | these
+estas | these
+aquele | that
+aquela | that
+aqueles | those
+aquelas | those
+isto | this
+aquilo | that
+
+ | forms of estar, to be (not including the infinitive):
+estou
+está
+estamos
+estão
+estive
+esteve
+estivemos
+estiveram
+estava
+estávamos
+estavam
+estivera
+estivéramos
+esteja
+estejamos
+estejam
+estivesse
+estivéssemos
+estivessem
+estiver
+estivermos
+estiverem
+
+ | forms of haver, to have (not including the infinitive):
+hei
+há
+havemos
+hão
+houve
+houvemos
+houveram
+houvera
+houvéramos
+haja
+hajamos
+hajam
+houvesse
+houvéssemos
+houvessem
+houver
+houvermos
+houverem
+houverei
+houverá
+houveremos
+houverão
+houveria
+houveríamos
+houveriam
+
+ | forms of ser, to be (not including the infinitive):
+sou
+somos
+são
+era
+éramos
+eram
+fui
+foi
+fomos
+foram
+fora
+fôramos
+seja
+sejamos
+sejam
+fosse
+fôssemos
+fossem
+for
+formos
+forem
+serei
+será
+seremos
+serão
+seria
+seríamos
+seriam
+
+ | forms of ter, to have (not including the infinitive):
+tenho
+tem
+temos
+tém
+tinha
+tínhamos
+tinham
+tive
+teve
+tivemos
+tiveram
+tivera
+tivéramos
+tenha
+tenhamos
+tenham
+tivesse
+tivéssemos
+tivessem
+tiver
+tivermos
+tiverem
+terei
+terá
+teremos
+terão
+teria
+teríamos
+teriam
diff --git a/sphinx/search/_stopwords/ru.py b/sphinx/search/_stopwords/ru.py
new file mode 100644
index 00000000000..cc275d5184a
--- /dev/null
+++ b/sphinx/search/_stopwords/ru.py
@@ -0,0 +1,163 @@
+from __future__ import annotations
+
+RUSSIAN_STOPWORDS = frozenset({
+ 'а',
+ 'без',
+ 'более',
+ 'больше',
+ 'будет',
+ 'будто',
+ 'бы',
+ 'был',
+ 'была',
+ 'были',
+ 'было',
+ 'быть',
+ 'в',
+ 'вам',
+ 'вас',
+ 'вдруг',
+ 'ведь',
+ 'во',
+ 'вот',
+ 'впрочем',
+ 'все',
+ 'всегда',
+ 'всего',
+ 'всех',
+ 'всю',
+ 'вы',
+ 'где',
+ 'говорил',
+ 'да',
+ 'даже',
+ 'два',
+ 'для',
+ 'до',
+ 'другой',
+ 'его',
+ 'ее',
+ 'ей',
+ 'ему',
+ 'если',
+ 'есть',
+ 'еще',
+ 'ж',
+ 'же',
+ 'жизнь',
+ 'за',
+ 'зачем',
+ 'здесь',
+ 'и',
+ 'из',
+ 'или',
+ 'им',
+ 'иногда',
+ 'их',
+ 'к',
+ 'кажется',
+ 'как',
+ 'какая',
+ 'какой',
+ 'когда',
+ 'конечно',
+ 'кто',
+ 'куда',
+ 'ли',
+ 'лучше',
+ 'между',
+ 'меня',
+ 'мне',
+ 'много',
+ 'может',
+ 'можно',
+ 'мой',
+ 'моя',
+ 'мы',
+ 'на',
+ 'над',
+ 'надо',
+ 'наконец',
+ 'нас',
+ 'не',
+ 'него',
+ 'нее',
+ 'ней',
+ 'нельзя',
+ 'нет',
+ 'ни',
+ 'нибудь',
+ 'никогда',
+ 'ним',
+ 'них',
+ 'ничего',
+ 'но',
+ 'ну',
+ 'о',
+ 'об',
+ 'один',
+ 'он',
+ 'она',
+ 'они',
+ 'опять',
+ 'от',
+ 'перед',
+ 'по',
+ 'под',
+ 'после',
+ 'потом',
+ 'потому',
+ 'почти',
+ 'при',
+ 'про',
+ 'раз',
+ 'разве',
+ 'с',
+ 'сам',
+ 'свою',
+ 'себе',
+ 'себя',
+ 'сегодня',
+ 'сейчас',
+ 'сказал',
+ 'сказала',
+ 'сказать',
+ 'со',
+ 'совсем',
+ 'так',
+ 'такой',
+ 'там',
+ 'тебя',
+ 'тем',
+ 'теперь',
+ 'то',
+ 'тогда',
+ 'того',
+ 'тоже',
+ 'только',
+ 'том',
+ 'тот',
+ 'три',
+ 'тут',
+ 'ты',
+ 'у',
+ 'уж',
+ 'уже',
+ 'хорошо',
+ 'хоть',
+ 'чего',
+ 'человек',
+ 'чем',
+ 'через',
+ 'что',
+ 'чтоб',
+ 'чтобы',
+ 'чуть',
+ 'эти',
+ 'этого',
+ 'этой',
+ 'этом',
+ 'этот',
+ 'эту',
+ 'я',
+})
diff --git a/sphinx/search/_stopwords/ru.txt b/sphinx/search/_stopwords/ru.txt
new file mode 100644
index 00000000000..43a73af0b55
--- /dev/null
+++ b/sphinx/search/_stopwords/ru.txt
@@ -0,0 +1,226 @@
+| source: https://snowballstem.org/algorithms/russian/stop.txt
+и | and
+в | in/into
+во | alternative form
+не | not
+что | what/that
+он | he
+на | on/onto
+я | i
+с | from
+со | alternative form
+как | how
+а | milder form of `no' (but)
+то | conjunction and form of `that'
+все | all
+она | she
+так | so, thus
+его | him
+но | but
+да | yes/and
+ты | thou
+к | towards, by
+у | around, chez
+же | intensifier particle
+вы | you
+за | beyond, behind
+бы | conditional/subj. particle
+по | up to, along
+только | only
+ее | her
+мне | to me
+было | it was
+вот | here is/are, particle
+от | away from
+меня | me
+еще | still, yet, more
+нет | no, there isnt/arent
+о | about
+из | out of
+ему | to him
+теперь | now
+когда | when
+даже | even
+ну | so, well
+вдруг | suddenly
+ли | interrogative particle
+если | if
+уже | already, but homonym of `narrower'
+или | or
+ни | neither
+быть | to be
+был | he was
+него | prepositional form of его
+до | up to
+вас | you accusative
+нибудь | indef. suffix preceded by hyphen
+опять | again
+уж | already, but homonym of `adder'
+вам | to you
+сказал | he said
+ведь | particle `after all'
+там | there
+потом | then
+себя | oneself
+ничего | nothing
+ей | to her
+может | usually with `быть' as `maybe'
+они | they
+тут | here
+где | where
+есть | there is/are
+надо | got to, must
+ней | prepositional form of ей
+для | for
+мы | we
+тебя | thee
+их | them, their
+чем | than
+была | she was
+сам | self
+чтоб | in order to
+без | without
+будто | as if
+человек | man, person, one
+чего | genitive form of `what'
+раз | once
+тоже | also
+себе | to oneself
+под | beneath
+жизнь | life
+будет | will be
+ж | short form of intensifer particle `же'
+тогда | then
+кто | who
+этот | this
+говорил | was saying
+того | genitive form of `that'
+потому | for that reason
+этого | genitive form of `this'
+какой | which
+совсем | altogether
+ним | prepositional form of `его', `они'
+здесь | here
+этом | prepositional form of `этот'
+один | one
+почти | almost
+мой | my
+тем | instrumental/dative plural of `тот', `то'
+чтобы | full form of `in order that'
+нее | her (acc.)
+кажется | it seems
+сейчас | now
+были | they were
+куда | where to
+зачем | why
+сказать | to say
+всех | all (acc., gen. preposn. plural)
+никогда | never
+сегодня | today
+можно | possible, one can
+при | by
+наконец | finally
+два | two
+об | alternative form of `о', about
+другой | another
+хоть | even
+после | after
+над | above
+больше | more
+тот | that one (masc.)
+через | across, in
+эти | these
+нас | us
+про | about
+всего | in all, only, of all
+них | prepositional form of `они' (they)
+какая | which, feminine
+много | lots
+разве | interrogative particle
+сказала | she said
+три | three
+эту | this, acc. fem. sing.
+моя | my, feminine
+впрочем | moreover, besides
+хорошо | good
+свою | ones own, acc. fem. sing.
+этой | oblique form of `эта', fem. `this'
+перед | in front of
+иногда | sometimes
+лучше | better
+чуть | a little
+том | preposn. form of `that one'
+нельзя | one must not
+такой | such a one
+им | to them
+более | more
+всегда | always
+конечно | of course
+всю | acc. fem. sing of `all'
+между | between
+
+
+ | b: some paradigms
+ |
+ | personal pronouns
+ |
+ | я меня мне мной [мною]
+ | ты тебя тебе тобой [тобою]
+ | он его ему им [него, нему, ним]
+ | она ее эи ею [нее, нэи, нею]
+ | оно его ему им [него, нему, ним]
+ |
+ | мы нас нам нами
+ | вы вас вам вами
+ | они их им ими [них, ним, ними]
+ |
+ | себя себе собой [собою]
+ |
+ | demonstrative pronouns: этот (this), тот (that)
+ |
+ | этот эта это эти
+ | этого эты это эти
+ | этого этой этого этих
+ | этому этой этому этим
+ | этим этой этим [этою] этими
+ | этом этой этом этих
+ |
+ | тот та то те
+ | того ту то те
+ | того той того тех
+ | тому той тому тем
+ | тем той тем [тою] теми
+ | том той том тех
+ |
+ | determinative pronouns
+ |
+ | (a) весь (all)
+ |
+ | весь вся все все
+ | всего всю все все
+ | всего всей всего всех
+ | всему всей всему всем
+ | всем всей всем [всею] всеми
+ | всем всей всем всех
+ |
+ | (b) сам (himself etc)
+ |
+ | сам сама само сами
+ | самого саму само самих
+ | самого самой самого самих
+ | самому самой самому самим
+ | самим самой самим [самою] самими
+ | самом самой самом самих
+ |
+ | stems of verbs `to be', `to have', `to do' and modal
+ |
+ | быть бы буд быв есть суть
+ | име
+ | дел
+ | мог мож мочь
+ | уме
+ | хоч хот
+ | долж
+ | можн
+ | нужн
+ | нельзя
diff --git a/sphinx/search/_stopwords/sv.py b/sphinx/search/_stopwords/sv.py
new file mode 100644
index 00000000000..c1f10635e0b
--- /dev/null
+++ b/sphinx/search/_stopwords/sv.py
@@ -0,0 +1,118 @@
+from __future__ import annotations
+
+SWEDISH_STOPWORDS = frozenset({
+ 'alla',
+ 'allt',
+ 'att',
+ 'av',
+ 'blev',
+ 'bli',
+ 'blir',
+ 'blivit',
+ 'de',
+ 'dem',
+ 'den',
+ 'denna',
+ 'deras',
+ 'dess',
+ 'dessa',
+ 'det',
+ 'detta',
+ 'dig',
+ 'din',
+ 'dina',
+ 'ditt',
+ 'du',
+ 'där',
+ 'då',
+ 'efter',
+ 'ej',
+ 'eller',
+ 'en',
+ 'er',
+ 'era',
+ 'ert',
+ 'ett',
+ 'från',
+ 'för',
+ 'ha',
+ 'hade',
+ 'han',
+ 'hans',
+ 'har',
+ 'henne',
+ 'hennes',
+ 'hon',
+ 'honom',
+ 'hur',
+ 'här',
+ 'i',
+ 'icke',
+ 'ingen',
+ 'inom',
+ 'inte',
+ 'jag',
+ 'ju',
+ 'kan',
+ 'kunde',
+ 'man',
+ 'med',
+ 'mellan',
+ 'men',
+ 'mig',
+ 'min',
+ 'mina',
+ 'mitt',
+ 'mot',
+ 'mycket',
+ 'ni',
+ 'nu',
+ 'när',
+ 'någon',
+ 'något',
+ 'några',
+ 'och',
+ 'om',
+ 'oss',
+ 'på',
+ 'samma',
+ 'sedan',
+ 'sig',
+ 'sin',
+ 'sina',
+ 'sitta',
+ 'själv',
+ 'skulle',
+ 'som',
+ 'så',
+ 'sådan',
+ 'sådana',
+ 'sådant',
+ 'till',
+ 'under',
+ 'upp',
+ 'ut',
+ 'utan',
+ 'vad',
+ 'var',
+ 'vara',
+ 'varför',
+ 'varit',
+ 'varje',
+ 'vars',
+ 'vart',
+ 'vem',
+ 'vi',
+ 'vid',
+ 'vilka',
+ 'vilkas',
+ 'vilken',
+ 'vilket',
+ 'vår',
+ 'våra',
+ 'vårt',
+ 'än',
+ 'är',
+ 'åt',
+ 'över',
+})
diff --git a/sphinx/search/_stopwords/sv.txt b/sphinx/search/_stopwords/sv.txt
new file mode 100644
index 00000000000..850ae7474d6
--- /dev/null
+++ b/sphinx/search/_stopwords/sv.txt
@@ -0,0 +1,115 @@
+| source: https://snowballstem.org/algorithms/swedish/stop.txt
+och | and
+det | it, this/that
+att | to (with infinitive)
+i | in, at
+en | a
+jag | I
+hon | she
+som | who, that
+han | he
+på | on
+den | it, this/that
+med | with
+var | where, each
+sig | him(self) etc
+för | for
+så | so (also: seed)
+till | to
+är | is
+men | but
+ett | a
+om | if; around, about
+hade | had
+de | they, these/those
+av | of
+icke | not, no
+mig | me
+du | you
+henne | her
+då | then, when
+sin | his
+nu | now
+har | have
+inte | inte någon = no one
+hans | his
+honom | him
+skulle | 'sake'
+hennes | her
+där | there
+min | my
+man | one (pronoun)
+ej | nor
+vid | at, by, on (also: vast)
+kunde | could
+något | some etc
+från | from, off
+ut | out
+när | when
+efter | after, behind
+upp | up
+vi | we
+dem | them
+vara | be
+vad | what
+över | over
+än | than
+dig | you
+kan | can
+sina | his
+här | here
+ha | have
+mot | towards
+alla | all
+under | under (also: wonder)
+någon | some etc
+eller | or (else)
+allt | all
+mycket | much
+sedan | since
+ju | why
+denna | this/that
+själv | myself, yourself etc
+detta | this/that
+åt | to
+utan | without
+varit | was
+hur | how
+ingen | no
+mitt | my
+ni | you
+bli | to be, become
+blev | from bli
+oss | us
+din | thy
+dessa | these/those
+några | some etc
+deras | their
+blir | from bli
+mina | my
+samma | (the) same
+vilken | who, that
+er | you, your
+sådan | such a
+vår | our
+blivit | from bli
+dess | its
+inom | within
+mellan | between
+sådant | such a
+varför | why
+varje | each
+vilka | who, that
+ditt | thy
+vem | who
+vilket | who, that
+sitta | his
+sådana | such a
+vart | each
+dina | thy
+vars | whose
+vårt | our
+våra | our
+ert | your
+era | your
+vilkas | whose
diff --git a/sphinx/search/da.py b/sphinx/search/da.py
index 8be1c6a215e..3eb997af1c3 100644
--- a/sphinx/search/da.py
+++ b/sphinx/search/da.py
@@ -4,112 +4,15 @@
import snowballstemmer
-from sphinx.search import SearchLanguage, parse_stop_word
-
-danish_stopwords = parse_stop_word("""
-| source: https://snowballstem.org/algorithms/danish/stop.txt
-og | and
-i | in
-jeg | I
-det | that (dem. pronoun)/it (pers. pronoun)
-at | that (in front of a sentence)/to (with infinitive)
-en | a/an
-den | it (pers. pronoun)/that (dem. pronoun)
-til | to/at/for/until/against/by/of/into, more
-er | present tense of "to be"
-som | who, as
-på | on/upon/in/on/at/to/after/of/with/for, on
-de | they
-med | with/by/in, along
-han | he
-af | of/by/from/off/for/in/with/on, off
-for | at/for/to/from/by/of/ago, in front/before, because
-ikke | not
-der | who/which, there/those
-var | past tense of "to be"
-mig | me/myself
-sig | oneself/himself/herself/itself/themselves
-men | but
-et | a/an/one, one (number), someone/somebody/one
-har | present tense of "to have"
-om | round/about/for/in/a, about/around/down, if
-vi | we
-min | my
-havde | past tense of "to have"
-ham | him
-hun | she
-nu | now
-over | over/above/across/by/beyond/past/on/about, over/past
-da | then, when/as/since
-fra | from/off/since, off, since
-du | you
-ud | out
-sin | his/her/its/one's
-dem | them
-os | us/ourselves
-op | up
-man | you/one
-hans | his
-hvor | where
-eller | or
-hvad | what
-skal | must/shall etc.
-selv | myself/yourself/herself/ourselves etc., even
-her | here
-alle | all/everyone/everybody etc.
-vil | will (verb)
-blev | past tense of "to stay/to remain/to get/to become"
-kunne | could
-ind | in
-når | when
-være | present tense of "to be"
-dog | however/yet/after all
-noget | something
-ville | would
-jo | you know/you see (adv), yes
-deres | their/theirs
-efter | after/behind/according to/for/by/from, later/afterwards
-ned | down
-skulle | should
-denne | this
-end | than
-dette | this
-mit | my/mine
-også | also
-under | under/beneath/below/during, below/underneath
-have | have
-dig | you
-anden | other
-hende | her
-mine | my
-alt | everything
-meget | much/very, plenty of
-sit | his, her, its, one's
-sine | his, her, its, one's
-vor | our
-mod | against
-disse | these
-hvis | if
-din | your/yours
-nogle | some
-hos | by/at
-blive | be/become
-mange | many
-ad | by/through
-bliver | present tense of "to be/to become"
-hendes | her/hers
-været | be
-thi | for (conj)
-jer | you
-sådan | such, like this/like that
-""")
+from sphinx.search import SearchLanguage
+from sphinx.search._stopwords.da import DANISH_STOPWORDS
class SearchDanish(SearchLanguage):
lang = 'da'
language_name = 'Danish'
js_stemmer_rawcode = 'danish-stemmer.js'
- stopwords = danish_stopwords
+ stopwords = DANISH_STOPWORDS
def __init__(self, options: dict[str, str]) -> None:
super().__init__(options)
diff --git a/sphinx/search/de.py b/sphinx/search/de.py
index ac5ac7ee131..6875b9c7535 100644
--- a/sphinx/search/de.py
+++ b/sphinx/search/de.py
@@ -4,295 +4,15 @@
import snowballstemmer
-from sphinx.search import SearchLanguage, parse_stop_word
-
-german_stopwords = parse_stop_word("""
-|source: https://snowballstem.org/algorithms/german/stop.txt
-aber | but
-
-alle | all
-allem
-allen
-aller
-alles
-
-als | than, as
-also | so
-am | an + dem
-an | at
-
-ander | other
-andere
-anderem
-anderen
-anderer
-anderes
-anderm
-andern
-anderr
-anders
-
-auch | also
-auf | on
-aus | out of
-bei | by
-bin | am
-bis | until
-bist | art
-da | there
-damit | with it
-dann | then
-
-der | the
-den
-des
-dem
-die
-das
-
-daß | that
-
-derselbe | the same
-derselben
-denselben
-desselben
-demselben
-dieselbe
-dieselben
-dasselbe
-
-dazu | to that
-
-dein | thy
-deine
-deinem
-deinen
-deiner
-deines
-
-denn | because
-
-derer | of those
-dessen | of him
-
-dich | thee
-dir | to thee
-du | thou
-
-dies | this
-diese
-diesem
-diesen
-dieser
-dieses
-
-
-doch | (several meanings)
-dort | (over) there
-
-
-durch | through
-
-ein | a
-eine
-einem
-einen
-einer
-eines
-
-einig | some
-einige
-einigem
-einigen
-einiger
-einiges
-
-einmal | once
-
-er | he
-ihn | him
-ihm | to him
-
-es | it
-etwas | something
-
-euer | your
-eure
-eurem
-euren
-eurer
-eures
-
-für | for
-gegen | towards
-gewesen | p.p. of sein
-hab | have
-habe | have
-haben | have
-hat | has
-hatte | had
-hatten | had
-hier | here
-hin | there
-hinter | behind
-
-ich | I
-mich | me
-mir | to me
-
-
-ihr | you, to her
-ihre
-ihrem
-ihren
-ihrer
-ihres
-euch | to you
-
-im | in + dem
-in | in
-indem | while
-ins | in + das
-ist | is
-
-jede | each, every
-jedem
-jeden
-jeder
-jedes
-
-jene | that
-jenem
-jenen
-jener
-jenes
-
-jetzt | now
-kann | can
-
-kein | no
-keine
-keinem
-keinen
-keiner
-keines
-
-können | can
-könnte | could
-machen | do
-man | one
-
-manche | some, many a
-manchem
-manchen
-mancher
-manches
-
-mein | my
-meine
-meinem
-meinen
-meiner
-meines
-
-mit | with
-muss | must
-musste | had to
-nach | to(wards)
-nicht | not
-nichts | nothing
-noch | still, yet
-nun | now
-nur | only
-ob | whether
-oder | or
-ohne | without
-sehr | very
-
-sein | his
-seine
-seinem
-seinen
-seiner
-seines
-
-selbst | self
-sich | herself
-
-sie | they, she
-ihnen | to them
-
-sind | are
-so | so
-
-solche | such
-solchem
-solchen
-solcher
-solches
-
-soll | shall
-sollte | should
-sondern | but
-sonst | else
-über | over
-um | about, around
-und | and
-
-uns | us
-unse
-unsem
-unsen
-unser
-unses
-
-unter | under
-viel | much
-vom | von + dem
-von | from
-vor | before
-während | while
-war | was
-waren | were
-warst | wast
-was | what
-weg | away, off
-weil | because
-weiter | further
-
-welche | which
-welchem
-welchen
-welcher
-welches
-
-wenn | when
-werde | will
-werden | will
-wie | how
-wieder | again
-will | want
-wir | we
-wird | will
-wirst | willst
-wo | where
-wollen | want
-wollte | wanted
-würde | would
-würden | would
-zu | to
-zum | zu + dem
-zur | zu + der
-zwar | indeed
-zwischen | between
-""")
+from sphinx.search import SearchLanguage
+from sphinx.search._stopwords.de import GERMAN_STOPWORDS
class SearchGerman(SearchLanguage):
lang = 'de'
language_name = 'German'
js_stemmer_rawcode = 'german-stemmer.js'
- stopwords = german_stopwords
+ stopwords = GERMAN_STOPWORDS
def __init__(self, options: dict[str, str]) -> None:
super().__init__(options)
diff --git a/sphinx/search/en.py b/sphinx/search/en.py
index 51494a04a4d..30324c8832a 100644
--- a/sphinx/search/en.py
+++ b/sphinx/search/en.py
@@ -5,18 +5,7 @@
import snowballstemmer
from sphinx.search import SearchLanguage
-
-english_stopwords = {
- 'a', 'and', 'are', 'as', 'at',
- 'be', 'but', 'by',
- 'for',
- 'if', 'in', 'into', 'is', 'it',
- 'near', 'no', 'not',
- 'of', 'on', 'or',
- 'such',
- 'that', 'the', 'their', 'then', 'there', 'these', 'they', 'this', 'to',
- 'was', 'will', 'with',
-} # fmt: skip
+from sphinx.search._stopwords.en import ENGLISH_STOPWORDS
js_porter_stemmer = """
/**
@@ -209,7 +198,7 @@ class SearchEnglish(SearchLanguage):
lang = 'en'
language_name = 'English'
js_stemmer_code = js_porter_stemmer
- stopwords = english_stopwords
+ stopwords = ENGLISH_STOPWORDS
def __init__(self, options: dict[str, str]) -> None:
super().__init__(options)
diff --git a/sphinx/search/es.py b/sphinx/search/es.py
index 3cc41f600ac..d11937ad0c6 100644
--- a/sphinx/search/es.py
+++ b/sphinx/search/es.py
@@ -4,355 +4,15 @@
import snowballstemmer
-from sphinx.search import SearchLanguage, parse_stop_word
-
-spanish_stopwords = parse_stop_word("""
-|source: https://snowballstem.org/algorithms/spanish/stop.txt
-de | from, of
-la | the, her
-que | who, that
-el | the
-en | in
-y | and
-a | to
-los | the, them
-del | de + el
-se | himself, from him etc
-las | the, them
-por | for, by, etc
-un | a
-para | for
-con | with
-no | no
-una | a
-su | his, her
-al | a + el
- | es from SER
-lo | him
-como | how
-más | more
-pero | pero
-sus | su plural
-le | to him, her
-ya | already
-o | or
- | fue from SER
-este | this
- | ha from HABER
-sí | himself etc
-porque | because
-esta | this
- | son from SER
-entre | between
- | está from ESTAR
-cuando | when
-muy | very
-sin | without
-sobre | on
- | ser from SER
- | tiene from TENER
-también | also
-me | me
-hasta | until
-hay | there is/are
-donde | where
- | han from HABER
-quien | whom, that
- | están from ESTAR
- | estado from ESTAR
-desde | from
-todo | all
-nos | us
-durante | during
- | estados from ESTAR
-todos | all
-uno | a
-les | to them
-ni | nor
-contra | against
-otros | other
- | fueron from SER
-ese | that
-eso | that
- | había from HABER
-ante | before
-ellos | they
-e | and (variant of y)
-esto | this
-mí | me
-antes | before
-algunos | some
-qué | what?
-unos | a
-yo | I
-otro | other
-otras | other
-otra | other
-él | he
-tanto | so much, many
-esa | that
-estos | these
-mucho | much, many
-quienes | who
-nada | nothing
-muchos | many
-cual | who
- | sea from SER
-poco | few
-ella | she
-estar | to be
- | haber from HABER
-estas | these
- | estaba from ESTAR
- | estamos from ESTAR
-algunas | some
-algo | something
-nosotros | we
-
- | other forms
-
-mi | me
-mis | mi plural
-tú | thou
-te | thee
-ti | thee
-tu | thy
-tus | tu plural
-ellas | they
-nosotras | we
-vosotros | you
-vosotras | you
-os | you
-mío | mine
-mía |
-míos |
-mías |
-tuyo | thine
-tuya |
-tuyos |
-tuyas |
-suyo | his, hers, theirs
-suya |
-suyos |
-suyas |
-nuestro | ours
-nuestra |
-nuestros |
-nuestras |
-vuestro | yours
-vuestra |
-vuestros |
-vuestras |
-esos | those
-esas | those
-
- | forms of estar, to be (not including the infinitive):
-estoy
-estás
-está
-estamos
-estáis
-están
-esté
-estés
-estemos
-estéis
-estén
-estaré
-estarás
-estará
-estaremos
-estaréis
-estarán
-estaría
-estarías
-estaríamos
-estaríais
-estarían
-estaba
-estabas
-estábamos
-estabais
-estaban
-estuve
-estuviste
-estuvo
-estuvimos
-estuvisteis
-estuvieron
-estuviera
-estuvieras
-estuviéramos
-estuvierais
-estuvieran
-estuviese
-estuvieses
-estuviésemos
-estuvieseis
-estuviesen
-estando
-estado
-estada
-estados
-estadas
-estad
-
- | forms of haber, to have (not including the infinitive):
-he
-has
-ha
-hemos
-habéis
-han
-haya
-hayas
-hayamos
-hayáis
-hayan
-habré
-habrás
-habrá
-habremos
-habréis
-habrán
-habría
-habrías
-habríamos
-habríais
-habrían
-había
-habías
-habíamos
-habíais
-habían
-hube
-hubiste
-hubo
-hubimos
-hubisteis
-hubieron
-hubiera
-hubieras
-hubiéramos
-hubierais
-hubieran
-hubiese
-hubieses
-hubiésemos
-hubieseis
-hubiesen
-habiendo
-habido
-habida
-habidos
-habidas
-
- | forms of ser, to be (not including the infinitive):
-soy
-eres
-es
-somos
-sois
-son
-sea
-seas
-seamos
-seáis
-sean
-seré
-serás
-será
-seremos
-seréis
-serán
-sería
-serías
-seríamos
-seríais
-serían
-era
-eras
-éramos
-erais
-eran
-fui
-fuiste
-fue
-fuimos
-fuisteis
-fueron
-fuera
-fueras
-fuéramos
-fuerais
-fueran
-fuese
-fueses
-fuésemos
-fueseis
-fuesen
-siendo
-sido
- | sed also means 'thirst'
-
- | forms of tener, to have (not including the infinitive):
-tengo
-tienes
-tiene
-tenemos
-tenéis
-tienen
-tenga
-tengas
-tengamos
-tengáis
-tengan
-tendré
-tendrás
-tendrá
-tendremos
-tendréis
-tendrán
-tendría
-tendrías
-tendríamos
-tendríais
-tendrían
-tenía
-tenías
-teníamos
-teníais
-tenían
-tuve
-tuviste
-tuvo
-tuvimos
-tuvisteis
-tuvieron
-tuviera
-tuvieras
-tuviéramos
-tuvierais
-tuvieran
-tuviese
-tuvieses
-tuviésemos
-tuvieseis
-tuviesen
-teniendo
-tenido
-tenida
-tenidos
-tenidas
-tened
-""")
+from sphinx.search import SearchLanguage
+from sphinx.search._stopwords.es import SPANISH_STOPWORDS
class SearchSpanish(SearchLanguage):
lang = 'es'
language_name = 'Spanish'
js_stemmer_rawcode = 'spanish-stemmer.js'
- stopwords = spanish_stopwords
+ stopwords = SPANISH_STOPWORDS
def __init__(self, options: dict[str, str]) -> None:
super().__init__(options)
diff --git a/sphinx/search/fi.py b/sphinx/search/fi.py
index c8b048d4fc9..cd044b71a80 100644
--- a/sphinx/search/fi.py
+++ b/sphinx/search/fi.py
@@ -4,105 +4,15 @@
import snowballstemmer
-from sphinx.search import SearchLanguage, parse_stop_word
-
-finnish_stopwords = parse_stop_word("""
-| source: https://snowballstem.org/algorithms/finnish/stop.txt
-| forms of BE
-
-olla
-olen
-olet
-on
-olemme
-olette
-ovat
-ole | negative form
-
-oli
-olisi
-olisit
-olisin
-olisimme
-olisitte
-olisivat
-olit
-olin
-olimme
-olitte
-olivat
-ollut
-olleet
-
-en | negation
-et
-ei
-emme
-ette
-eivät
-
-|Nom Gen Acc Part Iness Elat Illat Adess Ablat Allat Ess Trans
-minä minun minut minua minussa minusta minuun minulla minulta minulle | I
-sinä sinun sinut sinua sinussa sinusta sinuun sinulla sinulta sinulle | you
-hän hänen hänet häntä hänessä hänestä häneen hänellä häneltä hänelle | he she
-me meidän meidät meitä meissä meistä meihin meillä meiltä meille | we
-te teidän teidät teitä teissä teistä teihin teillä teiltä teille | you
-he heidän heidät heitä heissä heistä heihin heillä heiltä heille | they
-
-tämä tämän tätä tässä tästä tähän tällä tältä tälle tänä täksi | this
-tuo tuon tuota tuossa tuosta tuohon tuolla tuolta tuolle tuona tuoksi | that
-se sen sitä siinä siitä siihen sillä siltä sille sinä siksi | it
-nämä näiden näitä näissä näistä näihin näillä näiltä näille näinä näiksi | these
-nuo noiden noita noissa noista noihin noilla noilta noille noina noiksi | those
-ne niiden niitä niissä niistä niihin niillä niiltä niille niinä niiksi | they
-
-kuka kenen kenet ketä kenessä kenestä keneen kenellä keneltä kenelle kenenä keneksi| who
-ketkä keiden ketkä keitä keissä keistä keihin keillä keiltä keille keinä keiksi | (pl)
-mikä minkä minkä mitä missä mistä mihin millä miltä mille minä miksi | which what
-mitkä | (pl)
-
-joka jonka jota jossa josta johon jolla jolta jolle jona joksi | who which
-jotka joiden joita joissa joista joihin joilla joilta joille joina joiksi | (pl)
-
-| conjunctions
-
-että | that
-ja | and
-jos | if
-koska | because
-kuin | than
-mutta | but
-niin | so
-sekä | and
-sillä | for
-tai | or
-vaan | but
-vai | or
-vaikka | although
-
-
-| prepositions
-
-kanssa | with
-mukaan | according to
-noin | about
-poikki | across
-yli | over, across
-
-| other
-
-kun | when
-niin | so
-nyt | now
-itse | self
-""") # NoQA: E501
+from sphinx.search import SearchLanguage
+from sphinx.search._stopwords.fi import FINNISH_STOPWORDS
class SearchFinnish(SearchLanguage):
lang = 'fi'
language_name = 'Finnish'
js_stemmer_rawcode = 'finnish-stemmer.js'
- stopwords = finnish_stopwords
+ stopwords = FINNISH_STOPWORDS
def __init__(self, options: dict[str, str]) -> None:
super().__init__(options)
diff --git a/sphinx/search/fr.py b/sphinx/search/fr.py
index bbdc56032ff..11a2c70f5dc 100644
--- a/sphinx/search/fr.py
+++ b/sphinx/search/fr.py
@@ -4,191 +4,15 @@
import snowballstemmer
-from sphinx.search import SearchLanguage, parse_stop_word
-
-french_stopwords = parse_stop_word("""
-| source: https://snowballstem.org/algorithms/french/stop.txt
-au | a + le
-aux | a + les
-avec | with
-ce | this
-ces | these
-dans | with
-de | of
-des | de + les
-du | de + le
-elle | she
-en | `of them' etc
-et | and
-eux | them
-il | he
-je | I
-la | the
-le | the
-leur | their
-lui | him
-ma | my (fem)
-mais | but
-me | me
-même | same; as in moi-même (myself) etc
-mes | me (pl)
-moi | me
-mon | my (masc)
-ne | not
-nos | our (pl)
-notre | our
-nous | we
-on | one
-ou | where
-par | by
-pas | not
-pour | for
-qu | que before vowel
-que | that
-qui | who
-sa | his, her (fem)
-se | oneself
-ses | his (pl)
-son | his, her (masc)
-sur | on
-ta | thy (fem)
-te | thee
-tes | thy (pl)
-toi | thee
-ton | thy (masc)
-tu | thou
-un | a
-une | a
-vos | your (pl)
-votre | your
-vous | you
-
- | single letter forms
-
-c | c'
-d | d'
-j | j'
-l | l'
-à | to, at
-m | m'
-n | n'
-s | s'
-t | t'
-y | there
-
- | forms of être (not including the infinitive):
-été
-étée
-étées
-étés
-étant
-suis
-es
-est
-sommes
-êtes
-sont
-serai
-seras
-sera
-serons
-serez
-seront
-serais
-serait
-serions
-seriez
-seraient
-étais
-était
-étions
-étiez
-étaient
-fus
-fut
-fûmes
-fûtes
-furent
-sois
-soit
-soyons
-soyez
-soient
-fusse
-fusses
-fût
-fussions
-fussiez
-fussent
-
- | forms of avoir (not including the infinitive):
-ayant
-eu
-eue
-eues
-eus
-ai
-as
-avons
-avez
-ont
-aurai
-auras
-aura
-aurons
-aurez
-auront
-aurais
-aurait
-aurions
-auriez
-auraient
-avais
-avait
-avions
-aviez
-avaient
-eut
-eûmes
-eûtes
-eurent
-aie
-aies
-ait
-ayons
-ayez
-aient
-eusse
-eusses
-eût
-eussions
-eussiez
-eussent
-
- | Later additions (from Jean-Christophe Deschamps)
-ceci | this
-cela | that (added 11 Apr 2012. Omission reported by Adrien Grand)
-celà | that (incorrect, though common)
-cet | this
-cette | this
-ici | here
-ils | they
-les | the (pl)
-leurs | their (pl)
-quel | which
-quels | which
-quelle | which
-quelles | which
-sans | without
-soi | oneself
-""")
+from sphinx.search import SearchLanguage
+from sphinx.search._stopwords.fr import FRENCH_STOPWORDS
class SearchFrench(SearchLanguage):
lang = 'fr'
language_name = 'French'
js_stemmer_rawcode = 'french-stemmer.js'
- stopwords = french_stopwords
+ stopwords = FRENCH_STOPWORDS
def __init__(self, options: dict[str, str]) -> None:
super().__init__(options)
diff --git a/sphinx/search/hu.py b/sphinx/search/hu.py
index 4e30ca407ee..e86159cb604 100644
--- a/sphinx/search/hu.py
+++ b/sphinx/search/hu.py
@@ -4,218 +4,15 @@
import snowballstemmer
-from sphinx.search import SearchLanguage, parse_stop_word
-
-hungarian_stopwords = parse_stop_word("""
-| source: https://snowballstem.org/algorithms/hungarian/stop.txt
-| prepared by Anna Tordai
-a
-ahogy
-ahol
-aki
-akik
-akkor
-alatt
-által
-általában
-amely
-amelyek
-amelyekben
-amelyeket
-amelyet
-amelynek
-ami
-amit
-amolyan
-amíg
-amikor
-át
-abban
-ahhoz
-annak
-arra
-arról
-az
-azok
-azon
-azt
-azzal
-azért
-aztán
-azután
-azonban
-bár
-be
-belül
-benne
-cikk
-cikkek
-cikkeket
-csak
-de
-e
-eddig
-egész
-egy
-egyes
-egyetlen
-egyéb
-egyik
-egyre
-ekkor
-el
-elég
-ellen
-elő
-először
-előtt
-első
-én
-éppen
-ebben
-ehhez
-emilyen
-ennek
-erre
-ez
-ezt
-ezek
-ezen
-ezzel
-ezért
-és
-fel
-felé
-hanem
-hiszen
-hogy
-hogyan
-igen
-így
-illetve
-ill.
-ill
-ilyen
-ilyenkor
-ison
-ismét
-itt
-jó
-jól
-jobban
-kell
-kellett
-keresztül
-keressünk
-ki
-kívül
-között
-közül
-legalább
-lehet
-lehetett
-legyen
-lenne
-lenni
-lesz
-lett
-maga
-magát
-majd
-majd
-már
-más
-másik
-meg
-még
-mellett
-mert
-mely
-melyek
-mi
-mit
-míg
-miért
-milyen
-mikor
-minden
-mindent
-mindenki
-mindig
-mint
-mintha
-mivel
-most
-nagy
-nagyobb
-nagyon
-ne
-néha
-nekem
-neki
-nem
-néhány
-nélkül
-nincs
-olyan
-ott
-össze
-ő
-ők
-őket
-pedig
-persze
-rá
-s
-saját
-sem
-semmi
-sok
-sokat
-sokkal
-számára
-szemben
-szerint
-szinte
-talán
-tehát
-teljes
-tovább
-továbbá
-több
-úgy
-ugyanis
-új
-újabb
-újra
-után
-utána
-utolsó
-vagy
-vagyis
-valaki
-valami
-valamint
-való
-vagyok
-van
-vannak
-volt
-voltam
-voltak
-voltunk
-vissza
-vele
-viszont
-volna
-""")
+from sphinx.search import SearchLanguage
+from sphinx.search._stopwords.hu import HUNGARIAN_STOPWORDS
class SearchHungarian(SearchLanguage):
lang = 'hu'
language_name = 'Hungarian'
js_stemmer_rawcode = 'hungarian-stemmer.js'
- stopwords = hungarian_stopwords
+ stopwords = HUNGARIAN_STOPWORDS
def __init__(self, options: dict[str, str]) -> None:
super().__init__(options)
diff --git a/sphinx/search/it.py b/sphinx/search/it.py
index b42e9699b33..a7052c9ae82 100644
--- a/sphinx/search/it.py
+++ b/sphinx/search/it.py
@@ -4,308 +4,15 @@
import snowballstemmer
-from sphinx.search import SearchLanguage, parse_stop_word
-
-italian_stopwords = parse_stop_word("""
-| source: https://snowballstem.org/algorithms/italian/stop.txt
-ad | a (to) before vowel
-al | a + il
-allo | a + lo
-ai | a + i
-agli | a + gli
-all | a + l'
-agl | a + gl'
-alla | a + la
-alle | a + le
-con | with
-col | con + il
-coi | con + i (forms collo, cogli etc are now very rare)
-da | from
-dal | da + il
-dallo | da + lo
-dai | da + i
-dagli | da + gli
-dall | da + l'
-dagl | da + gll'
-dalla | da + la
-dalle | da + le
-di | of
-del | di + il
-dello | di + lo
-dei | di + i
-degli | di + gli
-dell | di + l'
-degl | di + gl'
-della | di + la
-delle | di + le
-in | in
-nel | in + el
-nello | in + lo
-nei | in + i
-negli | in + gli
-nell | in + l'
-negl | in + gl'
-nella | in + la
-nelle | in + le
-su | on
-sul | su + il
-sullo | su + lo
-sui | su + i
-sugli | su + gli
-sull | su + l'
-sugl | su + gl'
-sulla | su + la
-sulle | su + le
-per | through, by
-tra | among
-contro | against
-io | I
-tu | thou
-lui | he
-lei | she
-noi | we
-voi | you
-loro | they
-mio | my
-mia |
-miei |
-mie |
-tuo |
-tua |
-tuoi | thy
-tue |
-suo |
-sua |
-suoi | his, her
-sue |
-nostro | our
-nostra |
-nostri |
-nostre |
-vostro | your
-vostra |
-vostri |
-vostre |
-mi | me
-ti | thee
-ci | us, there
-vi | you, there
-lo | him, the
-la | her, the
-li | them
-le | them, the
-gli | to him, the
-ne | from there etc
-il | the
-un | a
-uno | a
-una | a
-ma | but
-ed | and
-se | if
-perché | why, because
-anche | also
-come | how
-dov | where (as dov')
-dove | where
-che | who, that
-chi | who
-cui | whom
-non | not
-più | more
-quale | who, that
-quanto | how much
-quanti |
-quanta |
-quante |
-quello | that
-quelli |
-quella |
-quelle |
-questo | this
-questi |
-questa |
-queste |
-si | yes
-tutto | all
-tutti | all
-
- | single letter forms:
-
-a | at
-c | as c' for ce or ci
-e | and
-i | the
-l | as l'
-o | or
-
- | forms of avere, to have (not including the infinitive):
-
-ho
-hai
-ha
-abbiamo
-avete
-hanno
-abbia
-abbiate
-abbiano
-avrò
-avrai
-avrà
-avremo
-avrete
-avranno
-avrei
-avresti
-avrebbe
-avremmo
-avreste
-avrebbero
-avevo
-avevi
-aveva
-avevamo
-avevate
-avevano
-ebbi
-avesti
-ebbe
-avemmo
-aveste
-ebbero
-avessi
-avesse
-avessimo
-avessero
-avendo
-avuto
-avuta
-avuti
-avute
-
- | forms of essere, to be (not including the infinitive):
-sono
-sei
-è
-siamo
-siete
-sia
-siate
-siano
-sarò
-sarai
-sarà
-saremo
-sarete
-saranno
-sarei
-saresti
-sarebbe
-saremmo
-sareste
-sarebbero
-ero
-eri
-era
-eravamo
-eravate
-erano
-fui
-fosti
-fu
-fummo
-foste
-furono
-fossi
-fosse
-fossimo
-fossero
-essendo
-
- | forms of fare, to do (not including the infinitive, fa, fat-):
-faccio
-fai
-facciamo
-fanno
-faccia
-facciate
-facciano
-farò
-farai
-farà
-faremo
-farete
-faranno
-farei
-faresti
-farebbe
-faremmo
-fareste
-farebbero
-facevo
-facevi
-faceva
-facevamo
-facevate
-facevano
-feci
-facesti
-fece
-facemmo
-faceste
-fecero
-facessi
-facesse
-facessimo
-facessero
-facendo
-
- | forms of stare, to be (not including the infinitive):
-sto
-stai
-sta
-stiamo
-stanno
-stia
-stiate
-stiano
-starò
-starai
-starà
-staremo
-starete
-staranno
-starei
-staresti
-starebbe
-staremmo
-stareste
-starebbero
-stavo
-stavi
-stava
-stavamo
-stavate
-stavano
-stetti
-stesti
-stette
-stemmo
-steste
-stettero
-stessi
-stesse
-stessimo
-stessero
-""")
+from sphinx.search import SearchLanguage
+from sphinx.search._stopwords.it import ITALIAN_STOPWORDS
class SearchItalian(SearchLanguage):
lang = 'it'
language_name = 'Italian'
js_stemmer_rawcode = 'italian-stemmer.js'
- stopwords = italian_stopwords
+ stopwords = ITALIAN_STOPWORDS
def __init__(self, options: dict[str, str]) -> None:
super().__init__(options)
diff --git a/sphinx/search/nl.py b/sphinx/search/nl.py
index 39c14c76664..0692920efc4 100644
--- a/sphinx/search/nl.py
+++ b/sphinx/search/nl.py
@@ -4,119 +4,15 @@
import snowballstemmer
-from sphinx.search import SearchLanguage, parse_stop_word
-
-dutch_stopwords = parse_stop_word("""
-| source: https://snowballstem.org/algorithms/dutch/stop.txt
-de | the
-en | and
-van | of, from
-ik | I, the ego
-te | (1) chez, at etc, (2) to, (3) too
-dat | that, which
-die | that, those, who, which
-in | in, inside
-een | a, an, one
-hij | he
-het | the, it
-niet | not, nothing, naught
-zijn | (1) to be, being, (2) his, one's, its
-is | is
-was | (1) was, past tense of all persons sing. of 'zijn' (to be) (2) wax, (3) the washing, (4) rise of river
-op | on, upon, at, in, up, used up
-aan | on, upon, to (as dative)
-met | with, by
-als | like, such as, when
-voor | (1) before, in front of, (2) furrow
-had | had, past tense all persons sing. of 'hebben' (have)
-er | there
-maar | but, only
-om | round, about, for etc
-hem | him
-dan | then
-zou | should/would, past tense all persons sing. of 'zullen'
-of | or, whether, if
-wat | what, something, anything
-mijn | possessive and noun 'mine'
-men | people, 'one'
-dit | this
-zo | so, thus, in this way
-door | through by
-over | over, across
-ze | she, her, they, them
-zich | oneself
-bij | (1) a bee, (2) by, near, at
-ook | also, too
-tot | till, until
-je | you
-mij | me
-uit | out of, from
-der | Old Dutch form of 'van der' still found in surnames
-daar | (1) there, (2) because
-haar | (1) her, their, them, (2) hair
-naar | (1) unpleasant, unwell etc, (2) towards, (3) as
-heb | present first person sing. of 'to have'
-hoe | how, why
-heeft | present third person sing. of 'to have'
-hebben | 'to have' and various parts thereof
-deze | this
-u | you
-want | (1) for, (2) mitten, (3) rigging
-nog | yet, still
-zal | 'shall', first and third person sing. of verb 'zullen' (will)
-me | me
-zij | she, they
-nu | now
-ge | 'thou', still used in Belgium and south Netherlands
-geen | none
-omdat | because
-iets | something, somewhat
-worden | to become, grow, get
-toch | yet, still
-al | all, every, each
-waren | (1) 'were' (2) to wander, (3) wares, (3)
-veel | much, many
-meer | (1) more, (2) lake
-doen | to do, to make
-toen | then, when
-moet | noun 'spot/mote' and present form of 'to must'
-ben | (1) am, (2) 'are' in interrogative second person singular of 'to be'
-zonder | without
-kan | noun 'can' and present form of 'to be able'
-hun | their, them
-dus | so, consequently
-alles | all, everything, anything
-onder | under, beneath
-ja | yes, of course
-eens | once, one day
-hier | here
-wie | who
-werd | imperfect third person sing. of 'become'
-altijd | always
-doch | yet, but etc
-wordt | present third person sing. of 'become'
-wezen | (1) to be, (2) 'been' as in 'been fishing', (3) orphans
-kunnen | to be able
-ons | us/our
-zelf | self
-tegen | against, towards, at
-na | after, near
-reeds | already
-wil | (1) present tense of 'want', (2) 'will', noun, (3) fender
-kon | could; past tense of 'to be able'
-niets | nothing
-uw | your
-iemand | somebody
-geweest | been; past participle of 'be'
-andere | other
-""") # NoQA: E501
+from sphinx.search import SearchLanguage
+from sphinx.search._stopwords.nl import DUTCH_STOPWORDS
class SearchDutch(SearchLanguage):
lang = 'nl'
language_name = 'Dutch'
js_stemmer_rawcode = 'dutch-stemmer.js'
- stopwords = dutch_stopwords
+ stopwords = DUTCH_STOPWORDS
def __init__(self, options: dict[str, str]) -> None:
super().__init__(options)
diff --git a/sphinx/search/no.py b/sphinx/search/no.py
index 7a21e6728cb..a2bb88ee9a4 100644
--- a/sphinx/search/no.py
+++ b/sphinx/search/no.py
@@ -4,194 +4,15 @@
import snowballstemmer
-from sphinx.search import SearchLanguage, parse_stop_word
-
-norwegian_stopwords = parse_stop_word("""
-| source: https://snowballstem.org/algorithms/norwegian/stop.txt
-og | and
-i | in
-jeg | I
-det | it/this/that
-at | to (w. inf.)
-en | a/an
-et | a/an
-den | it/this/that
-til | to
-er | is/am/are
-som | who/that
-på | on
-de | they / you(formal)
-med | with
-han | he
-av | of
-ikke | not
-ikkje | not *
-der | there
-så | so
-var | was/were
-meg | me
-seg | you
-men | but
-ett | one
-har | have
-om | about
-vi | we
-min | my
-mitt | my
-ha | have
-hadde | had
-hun | she
-nå | now
-over | over
-da | when/as
-ved | by/know
-fra | from
-du | you
-ut | out
-sin | your
-dem | them
-oss | us
-opp | up
-man | you/one
-kan | can
-hans | his
-hvor | where
-eller | or
-hva | what
-skal | shall/must
-selv | self (reflective)
-sjøl | self (reflective)
-her | here
-alle | all
-vil | will
-bli | become
-ble | became
-blei | became *
-blitt | have become
-kunne | could
-inn | in
-når | when
-være | be
-kom | come
-noen | some
-noe | some
-ville | would
-dere | you
-som | who/which/that
-deres | their/theirs
-kun | only/just
-ja | yes
-etter | after
-ned | down
-skulle | should
-denne | this
-for | for/because
-deg | you
-si | hers/his
-sine | hers/his
-sitt | hers/his
-mot | against
-å | to
-meget | much
-hvorfor | why
-dette | this
-disse | these/those
-uten | without
-hvordan | how
-ingen | none
-din | your
-ditt | your
-blir | become
-samme | same
-hvilken | which
-hvilke | which (plural)
-sånn | such a
-inni | inside/within
-mellom | between
-vår | our
-hver | each
-hvem | who
-vors | us/ours
-hvis | whose
-både | both
-bare | only/just
-enn | than
-fordi | as/because
-før | before
-mange | many
-også | also
-slik | just
-vært | been
-være | to be
-båe | both *
-begge | both
-siden | since
-dykk | your *
-dykkar | yours *
-dei | they *
-deira | them *
-deires | theirs *
-deim | them *
-di | your (fem.) *
-då | as/when *
-eg | I *
-ein | a/an *
-eit | a/an *
-eitt | a/an *
-elles | or *
-honom | he *
-hjå | at *
-ho | she *
-hoe | she *
-henne | her
-hennar | her/hers
-hennes | hers
-hoss | how *
-hossen | how *
-ikkje | not *
-ingi | noone *
-inkje | noone *
-korleis | how *
-korso | how *
-kva | what/which *
-kvar | where *
-kvarhelst | where *
-kven | who/whom *
-kvi | why *
-kvifor | why *
-me | we *
-medan | while *
-mi | my *
-mine | my *
-mykje | much *
-no | now *
-nokon | some (masc./neut.) *
-noka | some (fem.) *
-nokor | some *
-noko | some *
-nokre | some *
-si | his/hers *
-sia | since *
-sidan | since *
-so | so *
-somt | some *
-somme | some *
-um | about*
-upp | up *
-vere | be *
-vore | was *
-verte | become *
-vort | become *
-varte | became *
-vart | became *
-""")
+from sphinx.search import SearchLanguage
+from sphinx.search._stopwords.no import NORWEGIAN_STOPWORDS
class SearchNorwegian(SearchLanguage):
lang = 'no'
language_name = 'Norwegian'
js_stemmer_rawcode = 'norwegian-stemmer.js'
- stopwords = norwegian_stopwords
+ stopwords = NORWEGIAN_STOPWORDS
def __init__(self, options: dict[str, str]) -> None:
super().__init__(options)
diff --git a/sphinx/search/pt.py b/sphinx/search/pt.py
index 82f1858f0de..9c5dfa05774 100644
--- a/sphinx/search/pt.py
+++ b/sphinx/search/pt.py
@@ -4,253 +4,15 @@
import snowballstemmer
-from sphinx.search import SearchLanguage, parse_stop_word
-
-portuguese_stopwords = parse_stop_word("""
-| source: https://snowballstem.org/algorithms/portuguese/stop.txt
-de | of, from
-a | the; to, at; her
-o | the; him
-que | who, that
-e | and
-do | de + o
-da | de + a
-em | in
-um | a
-para | for
- | é from SER
-com | with
-não | not, no
-uma | a
-os | the; them
-no | em + o
-se | himself etc
-na | em + a
-por | for
-mais | more
-as | the; them
-dos | de + os
-como | as, like
-mas | but
- | foi from SER
-ao | a + o
-ele | he
-das | de + as
- | tem from TER
-à | a + a
-seu | his
-sua | her
-ou | or
- | ser from SER
-quando | when
-muito | much
- | há from HAV
-nos | em + os; us
-já | already, now
- | está from EST
-eu | I
-também | also
-só | only, just
-pelo | per + o
-pela | per + a
-até | up to
-isso | that
-ela | he
-entre | between
- | era from SER
-depois | after
-sem | without
-mesmo | same
-aos | a + os
- | ter from TER
-seus | his
-quem | whom
-nas | em + as
-me | me
-esse | that
-eles | they
- | estão from EST
-você | you
- | tinha from TER
- | foram from SER
-essa | that
-num | em + um
-nem | nor
-suas | her
-meu | my
-às | a + as
-minha | my
- | têm from TER
-numa | em + uma
-pelos | per + os
-elas | they
- | havia from HAV
- | seja from SER
-qual | which
- | será from SER
-nós | we
- | tenho from TER
-lhe | to him, her
-deles | of them
-essas | those
-esses | those
-pelas | per + as
-este | this
- | fosse from SER
-dele | of him
-
- | other words. There are many contractions such as naquele = em+aquele,
- | mo = me+o, but they are rare.
- | Indefinite article plural forms are also rare.
-
-tu | thou
-te | thee
-vocês | you (plural)
-vos | you
-lhes | to them
-meus | my
-minhas
-teu | thy
-tua
-teus
-tuas
-nosso | our
-nossa
-nossos
-nossas
-
-dela | of her
-delas | of them
-
-esta | this
-estes | these
-estas | these
-aquele | that
-aquela | that
-aqueles | those
-aquelas | those
-isto | this
-aquilo | that
-
- | forms of estar, to be (not including the infinitive):
-estou
-está
-estamos
-estão
-estive
-esteve
-estivemos
-estiveram
-estava
-estávamos
-estavam
-estivera
-estivéramos
-esteja
-estejamos
-estejam
-estivesse
-estivéssemos
-estivessem
-estiver
-estivermos
-estiverem
-
- | forms of haver, to have (not including the infinitive):
-hei
-há
-havemos
-hão
-houve
-houvemos
-houveram
-houvera
-houvéramos
-haja
-hajamos
-hajam
-houvesse
-houvéssemos
-houvessem
-houver
-houvermos
-houverem
-houverei
-houverá
-houveremos
-houverão
-houveria
-houveríamos
-houveriam
-
- | forms of ser, to be (not including the infinitive):
-sou
-somos
-são
-era
-éramos
-eram
-fui
-foi
-fomos
-foram
-fora
-fôramos
-seja
-sejamos
-sejam
-fosse
-fôssemos
-fossem
-for
-formos
-forem
-serei
-será
-seremos
-serão
-seria
-seríamos
-seriam
-
- | forms of ter, to have (not including the infinitive):
-tenho
-tem
-temos
-tém
-tinha
-tínhamos
-tinham
-tive
-teve
-tivemos
-tiveram
-tivera
-tivéramos
-tenha
-tenhamos
-tenham
-tivesse
-tivéssemos
-tivessem
-tiver
-tivermos
-tiverem
-terei
-terá
-teremos
-terão
-teria
-teríamos
-teriam
-""")
+from sphinx.search import SearchLanguage
+from sphinx.search._stopwords.pt import PORTUGUESE_STOPWORDS
class SearchPortuguese(SearchLanguage):
lang = 'pt'
language_name = 'Portuguese'
js_stemmer_rawcode = 'portuguese-stemmer.js'
- stopwords = portuguese_stopwords
+ stopwords = PORTUGUESE_STOPWORDS
def __init__(self, options: dict[str, str]) -> None:
super().__init__(options)
diff --git a/sphinx/search/ro.py b/sphinx/search/ro.py
index e08ce5a09e3..6aebdc13249 100644
--- a/sphinx/search/ro.py
+++ b/sphinx/search/ro.py
@@ -11,7 +11,7 @@ class SearchRomanian(SearchLanguage):
lang = 'ro'
language_name = 'Romanian'
js_stemmer_rawcode = 'romanian-stemmer.js'
- stopwords: set[str] = set()
+ stopwords = frozenset()
def __init__(self, options: dict[str, str]) -> None:
super().__init__(options)
diff --git a/sphinx/search/ru.py b/sphinx/search/ru.py
index aeab09fa624..52ff533832e 100644
--- a/sphinx/search/ru.py
+++ b/sphinx/search/ru.py
@@ -4,243 +4,15 @@
import snowballstemmer
-from sphinx.search import SearchLanguage, parse_stop_word
-
-russian_stopwords = parse_stop_word("""
-| source: https://snowballstem.org/algorithms/russian/stop.txt
-и | and
-в | in/into
-во | alternative form
-не | not
-что | what/that
-он | he
-на | on/onto
-я | i
-с | from
-со | alternative form
-как | how
-а | milder form of `no' (but)
-то | conjunction and form of `that'
-все | all
-она | she
-так | so, thus
-его | him
-но | but
-да | yes/and
-ты | thou
-к | towards, by
-у | around, chez
-же | intensifier particle
-вы | you
-за | beyond, behind
-бы | conditional/subj. particle
-по | up to, along
-только | only
-ее | her
-мне | to me
-было | it was
-вот | here is/are, particle
-от | away from
-меня | me
-еще | still, yet, more
-нет | no, there isnt/arent
-о | about
-из | out of
-ему | to him
-теперь | now
-когда | when
-даже | even
-ну | so, well
-вдруг | suddenly
-ли | interrogative particle
-если | if
-уже | already, but homonym of `narrower'
-или | or
-ни | neither
-быть | to be
-был | he was
-него | prepositional form of его
-до | up to
-вас | you accusative
-нибудь | indef. suffix preceded by hyphen
-опять | again
-уж | already, but homonym of `adder'
-вам | to you
-сказал | he said
-ведь | particle `after all'
-там | there
-потом | then
-себя | oneself
-ничего | nothing
-ей | to her
-может | usually with `быть' as `maybe'
-они | they
-тут | here
-где | where
-есть | there is/are
-надо | got to, must
-ней | prepositional form of ей
-для | for
-мы | we
-тебя | thee
-их | them, their
-чем | than
-была | she was
-сам | self
-чтоб | in order to
-без | without
-будто | as if
-человек | man, person, one
-чего | genitive form of `what'
-раз | once
-тоже | also
-себе | to oneself
-под | beneath
-жизнь | life
-будет | will be
-ж | short form of intensifer particle `же'
-тогда | then
-кто | who
-этот | this
-говорил | was saying
-того | genitive form of `that'
-потому | for that reason
-этого | genitive form of `this'
-какой | which
-совсем | altogether
-ним | prepositional form of `его', `они'
-здесь | here
-этом | prepositional form of `этот'
-один | one
-почти | almost
-мой | my
-тем | instrumental/dative plural of `тот', `то'
-чтобы | full form of `in order that'
-нее | her (acc.)
-кажется | it seems
-сейчас | now
-были | they were
-куда | where to
-зачем | why
-сказать | to say
-всех | all (acc., gen. preposn. plural)
-никогда | never
-сегодня | today
-можно | possible, one can
-при | by
-наконец | finally
-два | two
-об | alternative form of `о', about
-другой | another
-хоть | even
-после | after
-над | above
-больше | more
-тот | that one (masc.)
-через | across, in
-эти | these
-нас | us
-про | about
-всего | in all, only, of all
-них | prepositional form of `они' (they)
-какая | which, feminine
-много | lots
-разве | interrogative particle
-сказала | she said
-три | three
-эту | this, acc. fem. sing.
-моя | my, feminine
-впрочем | moreover, besides
-хорошо | good
-свою | ones own, acc. fem. sing.
-этой | oblique form of `эта', fem. `this'
-перед | in front of
-иногда | sometimes
-лучше | better
-чуть | a little
-том | preposn. form of `that one'
-нельзя | one must not
-такой | such a one
-им | to them
-более | more
-всегда | always
-конечно | of course
-всю | acc. fem. sing of `all'
-между | between
-
-
- | b: some paradigms
- |
- | personal pronouns
- |
- | я меня мне мной [мною]
- | ты тебя тебе тобой [тобою]
- | он его ему им [него, нему, ним]
- | она ее эи ею [нее, нэи, нею]
- | оно его ему им [него, нему, ним]
- |
- | мы нас нам нами
- | вы вас вам вами
- | они их им ими [них, ним, ними]
- |
- | себя себе собой [собою]
- |
- | demonstrative pronouns: этот (this), тот (that)
- |
- | этот эта это эти
- | этого эты это эти
- | этого этой этого этих
- | этому этой этому этим
- | этим этой этим [этою] этими
- | этом этой этом этих
- |
- | тот та то те
- | того ту то те
- | того той того тех
- | тому той тому тем
- | тем той тем [тою] теми
- | том той том тех
- |
- | determinative pronouns
- |
- | (a) весь (all)
- |
- | весь вся все все
- | всего всю все все
- | всего всей всего всех
- | всему всей всему всем
- | всем всей всем [всею] всеми
- | всем всей всем всех
- |
- | (b) сам (himself etc)
- |
- | сам сама само сами
- | самого саму само самих
- | самого самой самого самих
- | самому самой самому самим
- | самим самой самим [самою] самими
- | самом самой самом самих
- |
- | stems of verbs `to be', `to have', `to do' and modal
- |
- | быть бы буд быв есть суть
- | име
- | дел
- | мог мож мочь
- | уме
- | хоч хот
- | долж
- | можн
- | нужн
- | нельзя
-""")
+from sphinx.search import SearchLanguage
+from sphinx.search._stopwords.ru import RUSSIAN_STOPWORDS
class SearchRussian(SearchLanguage):
lang = 'ru'
language_name = 'Russian'
js_stemmer_rawcode = 'russian-stemmer.js'
- stopwords = russian_stopwords
+ stopwords = RUSSIAN_STOPWORDS
def __init__(self, options: dict[str, str]) -> None:
super().__init__(options)
diff --git a/sphinx/search/sv.py b/sphinx/search/sv.py
index 9a8232ef2bf..bcfac2ba528 100644
--- a/sphinx/search/sv.py
+++ b/sphinx/search/sv.py
@@ -4,132 +4,15 @@
import snowballstemmer
-from sphinx.search import SearchLanguage, parse_stop_word
-
-swedish_stopwords = parse_stop_word("""
-| source: https://snowballstem.org/algorithms/swedish/stop.txt
-och | and
-det | it, this/that
-att | to (with infinitive)
-i | in, at
-en | a
-jag | I
-hon | she
-som | who, that
-han | he
-på | on
-den | it, this/that
-med | with
-var | where, each
-sig | him(self) etc
-för | for
-så | so (also: seed)
-till | to
-är | is
-men | but
-ett | a
-om | if; around, about
-hade | had
-de | they, these/those
-av | of
-icke | not, no
-mig | me
-du | you
-henne | her
-då | then, when
-sin | his
-nu | now
-har | have
-inte | inte någon = no one
-hans | his
-honom | him
-skulle | 'sake'
-hennes | her
-där | there
-min | my
-man | one (pronoun)
-ej | nor
-vid | at, by, on (also: vast)
-kunde | could
-något | some etc
-från | from, off
-ut | out
-när | when
-efter | after, behind
-upp | up
-vi | we
-dem | them
-vara | be
-vad | what
-över | over
-än | than
-dig | you
-kan | can
-sina | his
-här | here
-ha | have
-mot | towards
-alla | all
-under | under (also: wonder)
-någon | some etc
-eller | or (else)
-allt | all
-mycket | much
-sedan | since
-ju | why
-denna | this/that
-själv | myself, yourself etc
-detta | this/that
-åt | to
-utan | without
-varit | was
-hur | how
-ingen | no
-mitt | my
-ni | you
-bli | to be, become
-blev | from bli
-oss | us
-din | thy
-dessa | these/those
-några | some etc
-deras | their
-blir | from bli
-mina | my
-samma | (the) same
-vilken | who, that
-er | you, your
-sådan | such a
-vår | our
-blivit | from bli
-dess | its
-inom | within
-mellan | between
-sådant | such a
-varför | why
-varje | each
-vilka | who, that
-ditt | thy
-vem | who
-vilket | who, that
-sitta | his
-sådana | such a
-vart | each
-dina | thy
-vars | whose
-vårt | our
-våra | our
-ert | your
-era | your
-vilkas | whose
-""")
+from sphinx.search import SearchLanguage
+from sphinx.search._stopwords.sv import SWEDISH_STOPWORDS
class SearchSwedish(SearchLanguage):
lang = 'sv'
language_name = 'Swedish'
js_stemmer_rawcode = 'swedish-stemmer.js'
- stopwords = swedish_stopwords
+ stopwords = SWEDISH_STOPWORDS
def __init__(self, options: dict[str, str]) -> None:
super().__init__(options)
diff --git a/sphinx/search/tr.py b/sphinx/search/tr.py
index 82080bf5c61..674264f1928 100644
--- a/sphinx/search/tr.py
+++ b/sphinx/search/tr.py
@@ -11,7 +11,7 @@ class SearchTurkish(SearchLanguage):
lang = 'tr'
language_name = 'Turkish'
js_stemmer_rawcode = 'turkish-stemmer.js'
- stopwords: set[str] = set()
+ stopwords = frozenset()
def __init__(self, options: dict[str, str]) -> None:
super().__init__(options)
diff --git a/sphinx/search/zh.py b/sphinx/search/zh.py
index c063631f865..d22f765d520 100644
--- a/sphinx/search/zh.py
+++ b/sphinx/search/zh.py
@@ -9,6 +9,7 @@
import snowballstemmer
from sphinx.search import SearchLanguage
+from sphinx.search._stopwords.en import ENGLISH_STOPWORDS
if TYPE_CHECKING:
from collections.abc import Iterator
@@ -32,18 +33,6 @@ def cut_for_search(sentence: str, HMM: bool = True) -> Iterator[str]:
)
del jieba
-english_stopwords = {
- 'a', 'and', 'are', 'as', 'at',
- 'be', 'but', 'by',
- 'for',
- 'if', 'in', 'into', 'is', 'it',
- 'near', 'no', 'not',
- 'of', 'on', 'or',
- 'such',
- 'that', 'the', 'their', 'then', 'there', 'these', 'they', 'this', 'to',
- 'was', 'will', 'with',
-} # fmt: skip
-
js_porter_stemmer = """
/**
* Porter Stemmer
@@ -237,7 +226,7 @@ class SearchChinese(SearchLanguage):
lang = 'zh'
language_name = 'Chinese'
js_stemmer_code = js_porter_stemmer
- stopwords = english_stopwords
+ stopwords = ENGLISH_STOPWORDS
latin1_letters = re.compile(r'[a-zA-Z0-9_]+')
def __init__(self, options: dict[str, str]) -> None:
From 0296bbe3af1c4821752e60943a15a8b9fdf5667f Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Sun, 18 May 2025 04:52:35 +0100
Subject: [PATCH 071/466] Bump types-defusedxml to 0.7.0.20250516 (#13563)
---
pyproject.toml | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/pyproject.toml b/pyproject.toml
index 15520dc1841..d2bc85e02b8 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -96,7 +96,7 @@ lint = [
"mypy==1.15.0",
"sphinx-lint>=0.9",
"types-colorama==0.4.15.20240311",
- "types-defusedxml==0.7.0.20240218",
+ "types-defusedxml==0.7.0.20250516",
"types-docutils==0.21.0.20250514",
"types-Pillow==10.2.0.20240822",
"types-Pygments==2.19.0.20250514",
@@ -164,7 +164,7 @@ types = [
type-stubs = [
# align with versions used elsewhere
"types-colorama==0.4.15.20240311",
- "types-defusedxml==0.7.0.20240218",
+ "types-defusedxml==0.7.0.20250516",
"types-docutils==0.21.0.20250514",
"types-Pillow==10.2.0.20240822",
"types-Pygments==2.19.0.20250514",
From 54ca93372f6e22f563071fb39c9827f15543f1c4 Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Sun, 18 May 2025 04:52:48 +0100
Subject: [PATCH 072/466] Bump types-pygments to 2.19.0.20250516 (#13565)
---
pyproject.toml | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/pyproject.toml b/pyproject.toml
index d2bc85e02b8..3d166f9427a 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -99,7 +99,7 @@ lint = [
"types-defusedxml==0.7.0.20250516",
"types-docutils==0.21.0.20250514",
"types-Pillow==10.2.0.20240822",
- "types-Pygments==2.19.0.20250514",
+ "types-Pygments==2.19.0.20250516",
"types-requests==2.32.0.20250515", # align with requests
"types-urllib3==1.26.25.14",
"pyright==1.1.400",
@@ -167,7 +167,7 @@ type-stubs = [
"types-defusedxml==0.7.0.20250516",
"types-docutils==0.21.0.20250514",
"types-Pillow==10.2.0.20240822",
- "types-Pygments==2.19.0.20250514",
+ "types-Pygments==2.19.0.20250516",
"types-requests==2.32.0.20250515",
"types-urllib3==1.26.25.14",
]
From 647d9bdbc6396c3d52d6690f989b3addcee8581d Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Sun, 18 May 2025 04:53:20 +0100
Subject: [PATCH 073/466] Bump Ruff to 0.11.10 (#13566)
---
pyproject.toml | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/pyproject.toml b/pyproject.toml
index 3d166f9427a..e18dbb8e91e 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -92,7 +92,7 @@ docs = [
"sphinxcontrib-websupport",
]
lint = [
- "ruff==0.11.9",
+ "ruff==0.11.10",
"mypy==1.15.0",
"sphinx-lint>=0.9",
"types-colorama==0.4.15.20240311",
@@ -135,7 +135,7 @@ docs = [
"sphinxcontrib-websupport",
]
lint = [
- "ruff==0.11.9",
+ "ruff==0.11.10",
"sphinx-lint>=0.9",
]
package = [
From 63fdb590687ad431ef64c23c65492a3601ae5813 Mon Sep 17 00:00:00 2001
From: Adam Turner <9087854+aa-turner@users.noreply.github.com>
Date: Mon, 19 May 2025 00:48:32 +0100
Subject: [PATCH 074/466] Bump pypi-attestations to 0.0.26
---
pyproject.toml | 4 ++--
utils/convert_attestations.py | 2 +-
2 files changed, 3 insertions(+), 3 deletions(-)
diff --git a/pyproject.toml b/pyproject.toml
index e18dbb8e91e..39b18f23104 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -104,7 +104,7 @@ lint = [
"types-urllib3==1.26.25.14",
"pyright==1.1.400",
"pytest>=8.0",
- "pypi-attestations==0.0.25",
+ "pypi-attestations==0.0.26",
"betterproto==2.0.0b6",
]
test = [
@@ -141,7 +141,7 @@ lint = [
package = [
"betterproto==2.0.0b6", # resolution fails without betterproto
"build",
- "pypi-attestations==0.0.25",
+ "pypi-attestations==0.0.26",
"twine>=6.1",
]
test = [
diff --git a/utils/convert_attestations.py b/utils/convert_attestations.py
index d4516c3c3f4..c62fd5a057c 100644
--- a/utils/convert_attestations.py
+++ b/utils/convert_attestations.py
@@ -7,7 +7,7 @@
# /// script
# requires-python = ">=3.11"
# dependencies = [
-# "pypi-attestations==0.0.25",
+# "pypi-attestations==0.0.26",
# "betterproto==2.0.0b6",
# ]
# ///
From d4c036a90c25d5122215c1a2673db3debf631c27 Mon Sep 17 00:00:00 2001
From: Adam Turner <9087854+AA-Turner@users.noreply.github.com>
Date: Mon, 19 May 2025 01:31:50 +0100
Subject: [PATCH 075/466] Update JavaScript sources for language stemmers to
Snowball 3.0.1 (#13573)
---
sphinx/search/minified-js/README.rst | 7 +
sphinx/search/minified-js/arabic-stemmer.js | 1 +
sphinx/search/minified-js/armenian-stemmer.js | 1 +
sphinx/search/minified-js/base-stemmer.js | 2 +-
sphinx/search/minified-js/basque-stemmer.js | 1 +
sphinx/search/minified-js/catalan-stemmer.js | 1 +
sphinx/search/minified-js/danish-stemmer.js | 2 +-
sphinx/search/minified-js/dutch-stemmer.js | 2 +-
.../minified-js/dutch_porter-stemmer.js | 1 +
sphinx/search/minified-js/english-stemmer.js | 1 +
.../search/minified-js/esperanto-stemmer.js | 1 +
sphinx/search/minified-js/estonian-stemmer.js | 1 +
sphinx/search/minified-js/finnish-stemmer.js | 2 +-
sphinx/search/minified-js/french-stemmer.js | 2 +-
sphinx/search/minified-js/german-stemmer.js | 2 +-
sphinx/search/minified-js/greek-stemmer.js | 1 +
sphinx/search/minified-js/hindi-stemmer.js | 1 +
.../search/minified-js/hungarian-stemmer.js | 2 +-
.../search/minified-js/indonesian-stemmer.js | 1 +
sphinx/search/minified-js/irish-stemmer.js | 1 +
sphinx/search/minified-js/italian-stemmer.js | 2 +-
.../search/minified-js/lithuanian-stemmer.js | 1 +
sphinx/search/minified-js/nepali-stemmer.js | 1 +
.../search/minified-js/norwegian-stemmer.js | 2 +-
sphinx/search/minified-js/porter-stemmer.js | 2 +-
.../search/minified-js/portuguese-stemmer.js | 2 +-
sphinx/search/minified-js/romanian-stemmer.js | 2 +-
sphinx/search/minified-js/russian-stemmer.js | 2 +-
sphinx/search/minified-js/serbian-stemmer.js | 1 +
sphinx/search/minified-js/spanish-stemmer.js | 2 +-
sphinx/search/minified-js/swedish-stemmer.js | 2 +-
sphinx/search/minified-js/tamil-stemmer.js | 1 +
sphinx/search/minified-js/turkish-stemmer.js | 2 +-
sphinx/search/minified-js/yiddish-stemmer.js | 1 +
.../search/non-minified-js/arabic-stemmer.js | 1612 ++++++
.../non-minified-js/armenian-stemmer.js | 350 ++
sphinx/search/non-minified-js/base-stemmer.js | 186 +-
.../search/non-minified-js/basque-stemmer.js | 736 +++
.../search/non-minified-js/catalan-stemmer.js | 886 ++++
.../search/non-minified-js/danish-stemmer.js | 92 +-
.../search/non-minified-js/dutch-stemmer.js | 2208 ++++++--
.../non-minified-js/dutch_porter-stemmer.js | 637 +++
.../search/non-minified-js/english-stemmer.js | 1066 ++++
.../non-minified-js/esperanto-stemmer.js | 762 +++
.../non-minified-js/estonian-stemmer.js | 1088 ++++
.../search/non-minified-js/finnish-stemmer.js | 223 +-
.../search/non-minified-js/french-stemmer.js | 402 +-
.../search/non-minified-js/german-stemmer.js | 438 +-
.../search/non-minified-js/greek-stemmer.js | 2873 +++++++++++
.../search/non-minified-js/hindi-stemmer.js | 181 +
.../non-minified-js/hungarian-stemmer.js | 152 +-
.../non-minified-js/indonesian-stemmer.js | 409 ++
.../search/non-minified-js/irish-stemmer.js | 378 ++
.../search/non-minified-js/italian-stemmer.js | 227 +-
.../non-minified-js/lithuanian-stemmer.js | 534 ++
.../search/non-minified-js/nepali-stemmer.js | 282 +
.../non-minified-js/norwegian-stemmer.js | 154 +-
.../search/non-minified-js/porter-stemmer.js | 213 +-
.../non-minified-js/portuguese-stemmer.js | 225 +-
.../non-minified-js/romanian-stemmer.js | 381 +-
.../search/non-minified-js/russian-stemmer.js | 125 +-
.../search/non-minified-js/serbian-stemmer.js | 4516 +++++++++++++++++
.../search/non-minified-js/spanish-stemmer.js | 225 +-
.../search/non-minified-js/swedish-stemmer.js | 188 +-
.../search/non-minified-js/tamil-stemmer.js | 1189 +++++
.../search/non-minified-js/turkish-stemmer.js | 649 +--
.../search/non-minified-js/yiddish-stemmer.js | 1160 +++++
67 files changed, 22216 insertions(+), 2587 deletions(-)
create mode 100644 sphinx/search/minified-js/README.rst
create mode 100644 sphinx/search/minified-js/arabic-stemmer.js
create mode 100644 sphinx/search/minified-js/armenian-stemmer.js
create mode 100644 sphinx/search/minified-js/basque-stemmer.js
create mode 100644 sphinx/search/minified-js/catalan-stemmer.js
create mode 100644 sphinx/search/minified-js/dutch_porter-stemmer.js
create mode 100644 sphinx/search/minified-js/english-stemmer.js
create mode 100644 sphinx/search/minified-js/esperanto-stemmer.js
create mode 100644 sphinx/search/minified-js/estonian-stemmer.js
create mode 100644 sphinx/search/minified-js/greek-stemmer.js
create mode 100644 sphinx/search/minified-js/hindi-stemmer.js
create mode 100644 sphinx/search/minified-js/indonesian-stemmer.js
create mode 100644 sphinx/search/minified-js/irish-stemmer.js
create mode 100644 sphinx/search/minified-js/lithuanian-stemmer.js
create mode 100644 sphinx/search/minified-js/nepali-stemmer.js
create mode 100644 sphinx/search/minified-js/serbian-stemmer.js
create mode 100644 sphinx/search/minified-js/tamil-stemmer.js
create mode 100644 sphinx/search/minified-js/yiddish-stemmer.js
create mode 100644 sphinx/search/non-minified-js/arabic-stemmer.js
create mode 100644 sphinx/search/non-minified-js/armenian-stemmer.js
create mode 100644 sphinx/search/non-minified-js/basque-stemmer.js
create mode 100644 sphinx/search/non-minified-js/catalan-stemmer.js
create mode 100644 sphinx/search/non-minified-js/dutch_porter-stemmer.js
create mode 100644 sphinx/search/non-minified-js/english-stemmer.js
create mode 100644 sphinx/search/non-minified-js/esperanto-stemmer.js
create mode 100644 sphinx/search/non-minified-js/estonian-stemmer.js
create mode 100644 sphinx/search/non-minified-js/greek-stemmer.js
create mode 100644 sphinx/search/non-minified-js/hindi-stemmer.js
create mode 100644 sphinx/search/non-minified-js/indonesian-stemmer.js
create mode 100644 sphinx/search/non-minified-js/irish-stemmer.js
create mode 100644 sphinx/search/non-minified-js/lithuanian-stemmer.js
create mode 100644 sphinx/search/non-minified-js/nepali-stemmer.js
create mode 100644 sphinx/search/non-minified-js/serbian-stemmer.js
create mode 100644 sphinx/search/non-minified-js/tamil-stemmer.js
create mode 100644 sphinx/search/non-minified-js/yiddish-stemmer.js
diff --git a/sphinx/search/minified-js/README.rst b/sphinx/search/minified-js/README.rst
new file mode 100644
index 00000000000..e14b36aef3b
--- /dev/null
+++ b/sphinx/search/minified-js/README.rst
@@ -0,0 +1,7 @@
+Regenerate minified files with::
+
+ npm install -g uglify-js
+ for f in $(ls sphinx/search/non-minified-js/); \
+ do echo $f && \
+ npx uglifyjs sphinx/search/non-minified-js/$f --compress --mangle --output sphinx/search/minified-js/$f; \
+ done
diff --git a/sphinx/search/minified-js/arabic-stemmer.js b/sphinx/search/minified-js/arabic-stemmer.js
new file mode 100644
index 00000000000..c8e178a75b4
--- /dev/null
+++ b/sphinx/search/minified-js/arabic-stemmer.js
@@ -0,0 +1 @@
+var ArabicStemmer=function(){var o=new BaseStemmer,l=[["ـ",-1,1],["ً",-1,1],["ٌ",-1,1],["ٍ",-1,1],["َ",-1,1],["ُ",-1,1],["ِ",-1,1],["ّ",-1,1],["ْ",-1,1],["٠",-1,2],["١",-1,3],["٢",-1,4],["٣",-1,5],["٤",-1,6],["٥",-1,7],["٦",-1,8],["٧",-1,9],["٨",-1,10],["٩",-1,11],["ﺀ",-1,12],["ﺁ",-1,16],["ﺂ",-1,16],["ﺃ",-1,13],["ﺄ",-1,13],["ﺅ",-1,17],["ﺆ",-1,17],["ﺇ",-1,14],["ﺈ",-1,14],["ﺉ",-1,15],["ﺊ",-1,15],["ﺋ",-1,15],["ﺌ",-1,15],["ﺍ",-1,18],["ﺎ",-1,18],["ﺏ",-1,19],["ﺐ",-1,19],["ﺑ",-1,19],["ﺒ",-1,19],["ﺓ",-1,20],["ﺔ",-1,20],["ﺕ",-1,21],["ﺖ",-1,21],["ﺗ",-1,21],["ﺘ",-1,21],["ﺙ",-1,22],["ﺚ",-1,22],["ﺛ",-1,22],["ﺜ",-1,22],["ﺝ",-1,23],["ﺞ",-1,23],["ﺟ",-1,23],["ﺠ",-1,23],["ﺡ",-1,24],["ﺢ",-1,24],["ﺣ",-1,24],["ﺤ",-1,24],["ﺥ",-1,25],["ﺦ",-1,25],["ﺧ",-1,25],["ﺨ",-1,25],["ﺩ",-1,26],["ﺪ",-1,26],["ﺫ",-1,27],["ﺬ",-1,27],["ﺭ",-1,28],["ﺮ",-1,28],["ﺯ",-1,29],["ﺰ",-1,29],["ﺱ",-1,30],["ﺲ",-1,30],["ﺳ",-1,30],["ﺴ",-1,30],["ﺵ",-1,31],["ﺶ",-1,31],["ﺷ",-1,31],["ﺸ",-1,31],["ﺹ",-1,32],["ﺺ",-1,32],["ﺻ",-1,32],["ﺼ",-1,32],["ﺽ",-1,33],["ﺾ",-1,33],["ﺿ",-1,33],["ﻀ",-1,33],["ﻁ",-1,34],["ﻂ",-1,34],["ﻃ",-1,34],["ﻄ",-1,34],["ﻅ",-1,35],["ﻆ",-1,35],["ﻇ",-1,35],["ﻈ",-1,35],["ﻉ",-1,36],["ﻊ",-1,36],["ﻋ",-1,36],["ﻌ",-1,36],["ﻍ",-1,37],["ﻎ",-1,37],["ﻏ",-1,37],["ﻐ",-1,37],["ﻑ",-1,38],["ﻒ",-1,38],["ﻓ",-1,38],["ﻔ",-1,38],["ﻕ",-1,39],["ﻖ",-1,39],["ﻗ",-1,39],["ﻘ",-1,39],["ﻙ",-1,40],["ﻚ",-1,40],["ﻛ",-1,40],["ﻜ",-1,40],["ﻝ",-1,41],["ﻞ",-1,41],["ﻟ",-1,41],["ﻠ",-1,41],["ﻡ",-1,42],["ﻢ",-1,42],["ﻣ",-1,42],["ﻤ",-1,42],["ﻥ",-1,43],["ﻦ",-1,43],["ﻧ",-1,43],["ﻨ",-1,43],["ﻩ",-1,44],["ﻪ",-1,44],["ﻫ",-1,44],["ﻬ",-1,44],["ﻭ",-1,45],["ﻮ",-1,45],["ﻯ",-1,46],["ﻰ",-1,46],["ﻱ",-1,47],["ﻲ",-1,47],["ﻳ",-1,47],["ﻴ",-1,47],["ﻵ",-1,51],["ﻶ",-1,51],["ﻷ",-1,49],["ﻸ",-1,49],["ﻹ",-1,50],["ﻺ",-1,50],["ﻻ",-1,48],["ﻼ",-1,48]],b=[["آ",-1,1],["أ",-1,1],["ؤ",-1,1],["إ",-1,1],["ئ",-1,1]],m=[["آ",-1,1],["أ",-1,1],["ؤ",-1,2],["إ",-1,1],["ئ",-1,3]],_=[["ال",-1,2],["بال",-1,1],["كال",-1,1],["لل",-1,2]],k=[["أآ",-1,2],["أأ",-1,1],["أؤ",-1,1],["أإ",-1,4],["أا",-1,3]],g=[["ف",-1,1],["و",-1,1]],d=[["ال",-1,2],["بال",-1,1],["كال",-1,1],["لل",-1,2]],h=[["ب",-1,1],["با",0,-1],["بب",0,2],["كك",-1,3]],v=[["سأ",-1,4],["ست",-1,2],["سن",-1,3],["سي",-1,1]],w=[["تست",-1,1],["نست",-1,1],["يست",-1,1]],C=[["كما",-1,3],["هما",-1,3],["نا",-1,2],["ها",-1,2],["ك",-1,1],["كم",-1,2],["هم",-1,2],["هن",-1,2],["ه",-1,1],["ي",-1,1]],S=[["ن",-1,1]],r=[["ا",-1,1],["و",-1,1],["ي",-1,1]],e=[["ات",-1,1]],i=[["ت",-1,1]],q=[["ة",-1,1]],A=[["ي",-1,1]],B=[["كما",-1,3],["هما",-1,3],["نا",-1,2],["ها",-1,2],["ك",-1,1],["كم",-1,2],["هم",-1,2],["كن",-1,2],["هن",-1,2],["ه",-1,1],["كمو",-1,3],["ني",-1,2]],c=[["ا",-1,1],["تا",0,2],["تما",0,4],["نا",0,2],["ت",-1,1],["ن",-1,1],["ان",5,3],["تن",5,2],["ون",5,3],["ين",5,3],["ي",-1,1]],W=[["وا",-1,1],["تم",-1,1]],j=[["و",-1,1],["تمو",0,2]],p=[["ى",-1,1]],x=!1,y=!1,z=!1;function D(){return o.ket=o.cursor,0!=o.find_among_b(r)&&(o.bra=o.cursor,!(o.current.length<=4||!o.slice_del()))}function E(){return o.ket=o.cursor,0!=o.find_among_b(e)&&(o.bra=o.cursor,!(o.current.length<5||!o.slice_del()))}function F(){return o.ket=o.cursor,0!=o.find_among_b(i)&&(o.bra=o.cursor,!(o.current.length<4||!o.slice_del()))}function G(){var r;if(o.ket=o.cursor,0!=(r=o.find_among_b(c))){switch(o.bra=o.cursor,r){case 1:if(o.current.length<4)return;if(o.slice_del())break;return;case 2:if(o.current.length<5)return;if(o.slice_del())break;return;case 3:if(o.current.length<=5)return;if(o.slice_del())break;return;case 4:if(o.current.length<6)return;if(o.slice_del())break;return}return 1}}this.stem=function(){x=!(y=z=!0);var r=o.cursor,r=((()=>{var r;if(o.bra=o.cursor,0!=(r=o.find_among(_)))switch(o.ket=o.cursor,r){case 1:if(o.current.length<=4)return;x=!(y=!(z=!0));break;case 2:if(o.current.length<=3)return;x=!(y=!(z=!0))}})(),o.cursor=r,(()=>{for(var r,e=o.cursor;;){var i=o.cursor;r:{var c=o.cursor;if(o.bra=o.cursor,0!=(r=o.find_among(l)))switch(o.ket=o.cursor,r){case 1:if(o.slice_del())break;return;case 2:if(o.slice_from("0"))break;return;case 3:if(o.slice_from("1"))break;return;case 4:if(o.slice_from("2"))break;return;case 5:if(o.slice_from("3"))break;return;case 6:if(o.slice_from("4"))break;return;case 7:if(o.slice_from("5"))break;return;case 8:if(o.slice_from("6"))break;return;case 9:if(o.slice_from("7"))break;return;case 10:if(o.slice_from("8"))break;return;case 11:if(o.slice_from("9"))break;return;case 12:if(o.slice_from("ء"))break;return;case 13:if(o.slice_from("أ"))break;return;case 14:if(o.slice_from("إ"))break;return;case 15:if(o.slice_from("ئ"))break;return;case 16:if(o.slice_from("آ"))break;return;case 17:if(o.slice_from("ؤ"))break;return;case 18:if(o.slice_from("ا"))break;return;case 19:if(o.slice_from("ب"))break;return;case 20:if(o.slice_from("ة"))break;return;case 21:if(o.slice_from("ت"))break;return;case 22:if(o.slice_from("ث"))break;return;case 23:if(o.slice_from("ج"))break;return;case 24:if(o.slice_from("ح"))break;return;case 25:if(o.slice_from("خ"))break;return;case 26:if(o.slice_from("د"))break;return;case 27:if(o.slice_from("ذ"))break;return;case 28:if(o.slice_from("ر"))break;return;case 29:if(o.slice_from("ز"))break;return;case 30:if(o.slice_from("س"))break;return;case 31:if(o.slice_from("ش"))break;return;case 32:if(o.slice_from("ص"))break;return;case 33:if(o.slice_from("ض"))break;return;case 34:if(o.slice_from("ط"))break;return;case 35:if(o.slice_from("ظ"))break;return;case 36:if(o.slice_from("ع"))break;return;case 37:if(o.slice_from("غ"))break;return;case 38:if(o.slice_from("ف"))break;return;case 39:if(o.slice_from("ق"))break;return;case 40:if(o.slice_from("ك"))break;return;case 41:if(o.slice_from("ل"))break;return;case 42:if(o.slice_from("م"))break;return;case 43:if(o.slice_from("ن"))break;return;case 44:if(o.slice_from("ه"))break;return;case 45:if(o.slice_from("و"))break;return;case 46:if(o.slice_from("ى"))break;return;case 47:if(o.slice_from("ي"))break;return;case 48:if(o.slice_from("لا"))break;return;case 49:if(o.slice_from("لأ"))break;return;case 50:if(o.slice_from("لإ"))break;return;case 51:if(o.slice_from("لآ"))break;return}else{if(o.cursor=c,o.cursor>=o.limit)break r;o.cursor++}continue}o.cursor=i;break}o.cursor=e})(),o.limit_backward=o.cursor,o.cursor=o.limit,o.limit-o.cursor);r:e:{var e=o.limit-o.cursor;i:if(y){c:{var i=o.limit-o.cursor;s:{for(var c=1;;){var s=o.limit-o.cursor;if(!(()=>{var r;if(o.ket=o.cursor,0!=(r=o.find_among_b(B))){switch(o.bra=o.cursor,r){case 1:if(o.current.length<4)return;if(o.slice_del())break;return;case 2:if(o.current.length<5)return;if(o.slice_del())break;return;case 3:if(o.current.length<6)return;if(o.slice_del())break;return}return 1}})()){o.cursor=o.limit-s;break}c--}if(!(0{var r;if(o.ket=o.cursor,0!=(r=o.find_among_b(j))){switch(o.bra=o.cursor,r){case 1:if(o.current.length<4)return;if(o.slice_del())break;return;case 2:if(o.current.length<6)return;if(o.slice_del())break;return}return 1}})())){if(o.cursor=o.limit-t,o.cursor<=o.limit_backward)break s;o.cursor--}break c}}if(o.cursor=o.limit-i,(o.ket=o.cursor,0==o.find_among_b(W)||(o.bra=o.cursor,o.current.length<5)||!o.slice_del())&&(o.cursor=o.limit-i,!G()))break i}break e}if(o.cursor=o.limit-e,z){var u=o.limit-o.cursor;i:c:{var a=o.limit-o.cursor;if(o.ket=o.cursor,0==o.find_among_b(q)||(o.bra=o.cursor,o.current.length<4)||!o.slice_del()){o.cursor=o.limit-a;s:if(!x&&(()=>{var r;if(o.ket=o.cursor,0!=(r=o.find_among_b(C))){switch(o.bra=o.cursor,r){case 1:if(o.current.length<4)return;if(o.slice_del())break;return;case 2:if(o.current.length<5)return;if(o.slice_del())break;return;case 3:if(o.current.length<6)return;if(o.slice_del())break;return}return 1}})()){var n=o.limit-o.cursor;if(!D()&&(o.cursor=o.limit-n,!E())&&(o.cursor=o.limit-n,!F())){if(o.cursor=o.limit-n,o.cursor<=o.limit_backward)break s;o.cursor--}break c}if(o.cursor=o.limit-a,o.ket=o.cursor,!(0==o.find_among_b(S)||(o.bra=o.cursor,o.current.length<=5))&&o.slice_del()){n=o.limit-o.cursor;if(D()||(o.cursor=o.limit-n,E())||(o.cursor=o.limit-n,F()))break c}if(o.cursor=o.limit-a,(x||!D())&&(o.cursor=o.limit-a,!E())){o.cursor=o.limit-u;break i}}}if(o.ket=o.cursor,!(0==o.find_among_b(A)||(o.bra=o.cursor,o.current.length<3))&&o.slice_del())break e}if(o.cursor=o.limit-e,o.ket=o.cursor,0==o.find_among_b(p)||(o.bra=o.cursor,!o.slice_from("ي")))break r}o.cursor=o.limit-r,o.cursor=o.limit_backward;r=o.cursor;r:{var f=o.cursor,f=((()=>{var r;if(o.bra=o.cursor,0!=(r=o.find_among(k))){switch(o.ket=o.cursor,r){case 1:if(o.current.length<=3)return;if(o.slice_from("أ"))break;return;case 2:if(o.current.length<=3)return;if(o.slice_from("آ"))break;return;case 3:if(o.current.length<=3)return;if(o.slice_from("ا"))break;return;case 4:if(o.current.length<=3)return;if(o.slice_from("إ"))break;return}return 1}})()||(o.cursor=f),o.cursor),f=((()=>{var r;return o.bra=o.cursor,0==o.find_among(g)||(o.ket=o.cursor,o.current.length<=3)||(r=o.cursor,o.eq_s("ا"))?void 0:(o.cursor=r,!!o.slice_del())})()||(o.cursor=f),o.cursor);if(!(()=>{var r;if(o.bra=o.cursor,0!=(r=o.find_among(d))){switch(o.ket=o.cursor,r){case 1:if(o.current.length<=5)return;if(o.slice_del())break;return;case 2:if(o.current.length<=4)return;if(o.slice_del())break;return}return 1}})()&&(o.cursor=f,!z||!(()=>{var r;if(o.bra=o.cursor,0!=(r=o.find_among(h))){switch(o.ket=o.cursor,r){case 1:if(o.current.length<=3)return;if(o.slice_del())break;return;case 2:if(o.current.length<=3)return;if(o.slice_from("ب"))break;return;case 3:if(o.current.length<=3)return;if(o.slice_from("ك"))break;return}return 1}})())){if(o.cursor=f,!y)break r;f=o.cursor;if((()=>{var r;if(o.bra=o.cursor,0!=(r=o.find_among(v))){switch(o.ket=o.cursor,r){case 1:if(o.current.length<=4)return;if(o.slice_from("ي"))break;return;case 2:if(o.current.length<=4)return;if(o.slice_from("ت"))break;return;case 3:if(o.current.length<=4)return;if(o.slice_from("ن"))break;return;case 4:if(o.current.length<=4)return;if(o.slice_from("أ"))break;return}return 1}})()||(o.cursor=f),o.bra=o.cursor,0==o.find_among(w)||(o.ket=o.cursor,o.current.length<=4)||(z=!(y=!0),!o.slice_from("است")))break r}}return o.cursor=r,(()=>{var r,e=o.cursor;if(o.limit_backward=o.cursor,o.cursor=o.limit,o.ket=o.cursor,0!=o.find_among_b(b)){if(o.bra=o.cursor,!o.slice_from("ء"))return;o.cursor=o.limit_backward}for(o.cursor=e,e=o.cursor;;){var i=o.cursor;r:{var c=o.cursor;if(o.bra=o.cursor,0!=(r=o.find_among(m)))switch(o.ket=o.cursor,r){case 1:if(o.slice_from("ا"))break;return;case 2:if(o.slice_from("و"))break;return;case 3:if(o.slice_from("ي"))break;return}else{if(o.cursor=c,o.cursor>=o.limit)break r;o.cursor++}continue}o.cursor=i;break}o.cursor=e})(),!0},this.stemWord=function(r){return o.setCurrent(r),this.stem(),o.getCurrent()}};
\ No newline at end of file
diff --git a/sphinx/search/minified-js/armenian-stemmer.js b/sphinx/search/minified-js/armenian-stemmer.js
new file mode 100644
index 00000000000..6b5c33afba2
--- /dev/null
+++ b/sphinx/search/minified-js/armenian-stemmer.js
@@ -0,0 +1 @@
+var ArmenianStemmer=function(){var o=new BaseStemmer,u=[["րորդ",-1,1],["երորդ",0,1],["ալի",-1,1],["ակի",-1,1],["որակ",-1,1],["եղ",-1,1],["ական",-1,1],["արան",-1,1],["են",-1,1],["եկեն",8,1],["երեն",8,1],["որէն",-1,1],["ին",-1,1],["գին",12,1],["ովին",12,1],["լայն",-1,1],["վուն",-1,1],["պես",-1,1],["իվ",-1,1],["ատ",-1,1],["ավետ",-1,1],["կոտ",-1,1],["բար",-1,1]],c=[["ա",-1,1],["ացա",0,1],["եցա",0,1],["վե",-1,1],["ացրի",-1,1],["ացի",-1,1],["եցի",-1,1],["վեցի",6,1],["ալ",-1,1],["ըալ",8,1],["անալ",8,1],["ենալ",8,1],["ացնալ",8,1],["ել",-1,1],["ըել",13,1],["նել",13,1],["ցնել",15,1],["եցնել",16,1],["չել",13,1],["վել",13,1],["ացվել",19,1],["եցվել",19,1],["տել",13,1],["ատել",22,1],["ոտել",22,1],["կոտել",24,1],["ված",-1,1],["ում",-1,1],["վում",27,1],["ան",-1,1],["ցան",29,1],["ացան",30,1],["ացրին",-1,1],["ացին",-1,1],["եցին",-1,1],["վեցին",34,1],["ալիս",-1,1],["ելիս",-1,1],["ավ",-1,1],["ացավ",38,1],["եցավ",38,1],["ալով",-1,1],["ելով",-1,1],["ար",-1,1],["ացար",43,1],["եցար",43,1],["ացրիր",-1,1],["ացիր",-1,1],["եցիր",-1,1],["վեցիր",48,1],["աց",-1,1],["եց",-1,1],["ացրեց",51,1],["ալուց",-1,1],["ելուց",-1,1],["ալու",-1,1],["ելու",-1,1],["աք",-1,1],["ցաք",57,1],["ացաք",58,1],["ացրիք",-1,1],["ացիք",-1,1],["եցիք",-1,1],["վեցիք",62,1],["անք",-1,1],["ցանք",64,1],["ացանք",65,1],["ացրինք",-1,1],["ացինք",-1,1],["եցինք",-1,1],["վեցինք",69,1]],s=[["որդ",-1,1],["ույթ",-1,1],["ուհի",-1,1],["ցի",-1,1],["իլ",-1,1],["ակ",-1,1],["յակ",5,1],["անակ",5,1],["իկ",-1,1],["ուկ",-1,1],["ան",-1,1],["պան",10,1],["ստան",10,1],["արան",10,1],["եղէն",-1,1],["յուն",-1,1],["ություն",15,1],["ածո",-1,1],["իչ",-1,1],["ուս",-1,1],["ուստ",-1,1],["գար",-1,1],["վոր",-1,1],["ավոր",22,1],["ոց",-1,1],["անօց",-1,1],["ու",-1,1],["ք",-1,1],["չեք",27,1],["իք",27,1],["ալիք",29,1],["անիք",29,1],["վածք",27,1],["ույք",27,1],["ենք",27,1],["ոնք",27,1],["ունք",27,1],["մունք",36,1],["իչք",27,1],["արք",27,1]],r=[["սա",-1,1],["վա",-1,1],["ամբ",-1,1],["դ",-1,1],["անդ",3,1],["ությանդ",4,1],["վանդ",4,1],["ոջդ",3,1],["երդ",3,1],["ներդ",8,1],["ուդ",3,1],["ը",-1,1],["անը",11,1],["ությանը",12,1],["վանը",12,1],["ոջը",11,1],["երը",11,1],["ները",16,1],["ի",-1,1],["վի",18,1],["երի",18,1],["ների",20,1],["անում",-1,1],["երում",-1,1],["ներում",23,1],["ն",-1,1],["ան",25,1],["ության",26,1],["վան",26,1],["ին",25,1],["երին",29,1],["ներին",30,1],["ությանն",25,1],["երն",25,1],["ներն",33,1],["ուն",25,1],["ոջ",-1,1],["ությանս",-1,1],["վանս",-1,1],["ոջս",-1,1],["ով",-1,1],["անով",40,1],["վով",40,1],["երով",40,1],["ներով",43,1],["եր",-1,1],["ներ",45,1],["ց",-1,1],["ից",47,1],["վանից",48,1],["ոջից",48,1],["վից",48,1],["երից",48,1],["ներից",52,1],["ցից",48,1],["ոց",47,1],["ուց",47,1]],t=[209,4,128,0,18],n=0,e=0;function m(){o.ket=o.cursor,0!=o.find_among_b(r)&&(o.bra=o.cursor,n<=o.cursor)&&o.slice_del()}this.stem=function(){var r,i;return e=o.limit,n=o.limit,r=o.cursor,o.go_out_grouping(t,1377,1413)&&(o.cursor++,e=o.cursor,o.go_in_grouping(t,1377,1413))&&(o.cursor++,o.go_out_grouping(t,1377,1413))&&(o.cursor++,o.go_in_grouping(t,1377,1413))&&(o.cursor++,n=o.cursor),o.cursor=r,o.limit_backward=o.cursor,o.cursor=o.limit,!(o.cursor=this.limit)return false;var s=this.current.charCodeAt(this.cursor);if(s>i||s>>3]&1<<(s&7))==0)return false;this.cursor++;return true};this.in_grouping_b=function(r,t,i){if(this.cursor<=this.limit_backward)return false;var s=this.current.charCodeAt(this.cursor-1);if(s>i||s>>3]&1<<(s&7))==0)return false;this.cursor--;return true};this.out_grouping=function(r,t,i){if(this.cursor>=this.limit)return false;var s=this.current.charCodeAt(this.cursor);if(s>i||s>>3]&1<<(s&7))==0){this.cursor++;return true}return false};this.out_grouping_b=function(r,t,i){if(this.cursor<=this.limit_backward)return false;var s=this.current.charCodeAt(this.cursor-1);if(s>i||s>>3]&1<<(s&7))==0){this.cursor--;return true}return false};this.eq_s=function(r){if(this.limit-this.cursor>>1);var a=0;var f=h0)break;if(i==t)break;if(n)break;n=true}}do{var l=r[t];if(h>=l[0].length){this.cursor=s+l[0].length;if(l.length<4)return l[2];var v=l[3](this);this.cursor=s+l[0].length;if(v)return l[2]}t=l[1]}while(t>=0);return 0};this.find_among_b=function(r){var t=0;var i=r.length;var s=this.cursor;var e=this.limit_backward;var h=0;var u=0;var n=false;while(true){var c=t+(i-t>>1);var a=0;var f=h=0;o--){if(s-f==e){a=-1;break}a=this.current.charCodeAt(s-1-f)-l[0].charCodeAt(o);if(a!=0)break;f++}if(a<0){i=c;u=f}else{t=c;h=f}if(i-t<=1){if(t>0)break;if(i==t)break;if(n)break;n=true}}do{var l=r[t];if(h>=l[0].length){this.cursor=s-l[0].length;if(l.length<4)return l[2];var v=l[3](this);this.cursor=s-l[0].length;if(v)return l[2]}t=l[1]}while(t>=0);return 0};this.replace_s=function(r,t,i){var s=i.length-(t-r);this.current=this.current.slice(0,r)+i+this.current.slice(t);this.limit+=s;if(this.cursor>=t)this.cursor+=s;else if(this.cursor>r)this.cursor=r;return s};this.slice_check=function(){if(this.bra<0||this.bra>this.ket||this.ket>this.limit||this.limit>this.current.length){return false}return true};this.slice_from=function(r){var t=false;if(this.slice_check()){this.replace_s(this.bra,this.ket,r);t=true}return t};this.slice_del=function(){return this.slice_from("")};this.insert=function(r,t,i){var s=this.replace_s(r,t,i);if(r<=this.bra)this.bra+=s;if(r<=this.ket)this.ket+=s};this.slice_to=function(){var r="";if(this.slice_check()){r=this.current.slice(this.bra,this.ket)}return r};this.assign_to=function(){return this.current.slice(0,this.limit)}};
\ No newline at end of file
+let BaseStemmer=function(){this.current="",this.cursor=0,this.limit=0,this.limit_backward=0,this.bra=0,this.ket=0,this.setCurrent=function(t){this.current=t,this.cursor=0,this.limit=this.current.length,this.limit_backward=0,this.bra=this.cursor,this.ket=this.limit},this.getCurrent=function(){return this.current},this.copy_from=function(t){this.current=t.current,this.cursor=t.cursor,this.limit=t.limit,this.limit_backward=t.limit_backward,this.bra=t.bra,this.ket=t.ket},this.in_grouping=function(t,r,i){return!(this.cursor>=this.limit||i<(i=this.current.charCodeAt(this.cursor))||i>>3]&1<<(7&i))||(this.cursor++,0))},this.go_in_grouping=function(t,r,i){for(;this.cursor>>3]&1<<(7&s)))return!0;this.cursor++}return!1},this.in_grouping_b=function(t,r,i){return!(this.cursor<=this.limit_backward||i<(i=this.current.charCodeAt(this.cursor-1))||i>>3]&1<<(7&i))||(this.cursor--,0))},this.go_in_grouping_b=function(t,r,i){for(;this.cursor>this.limit_backward;){var s=this.current.charCodeAt(this.cursor-1);if(i>>3]&1<<(7&s)))return!0;this.cursor--}return!1},this.out_grouping=function(t,r,i){return!(this.cursor>=this.limit)&&(i<(i=this.current.charCodeAt(this.cursor))||i>>3]&1<<(7&i)))&&(this.cursor++,!0)},this.go_out_grouping=function(t,r,i){for(;this.cursor>>3]&1<<(7&s)))return!0;this.cursor++}return!1},this.out_grouping_b=function(t,r,i){return!(this.cursor<=this.limit_backward)&&(i<(i=this.current.charCodeAt(this.cursor-1))||i>>3]&1<<(7&i)))&&(this.cursor--,!0)},this.go_out_grouping_b=function(t,r,i){for(;this.cursor>this.limit_backward;){var s=this.current.charCodeAt(this.cursor-1);if(s<=i&&r<=s&&0!=(t[(s-=r)>>>3]&1<<(7&s)))return!0;this.cursor--}return!1},this.eq_s=function(t){return!(this.limit-this.cursor>>1),o=0,a=e=(l=t[r])[0].length){if(this.cursor=s+l[0].length,l.length<4)return l[2];var g=l[3](this);if(this.cursor=s+l[0].length,g)return l[2]}}while(0<=(r=l[1]));return 0},this.find_among_b=function(t){for(var r=0,i=t.length,s=this.cursor,h=this.limit_backward,e=0,n=0,c=!1;;){for(var u,o=r+(i-r>>1),a=0,l=e=(u=t[r])[0].length){if(this.cursor=s-u[0].length,u.length<4)return u[2];var g=u[3](this);if(this.cursor=s-u[0].length,g)return u[2]}}while(0<=(r=u[1]));return 0},this.replace_s=function(t,r,i){var s=i.length-(r-t);return this.current=this.current.slice(0,t)+i+this.current.slice(r),this.limit+=s,this.cursor>=r?this.cursor+=s:this.cursor>t&&(this.cursor=t),s},this.slice_check=function(){return!(this.bra<0||this.bra>this.ket||this.ket>this.limit||this.limit>this.current.length)},this.slice_from=function(t){var r=!1;return this.slice_check()&&(this.replace_s(this.bra,this.ket,t),r=!0),r},this.slice_del=function(){return this.slice_from("")},this.insert=function(t,r,i){r=this.replace_s(t,r,i);t<=this.bra&&(this.bra+=r),t<=this.ket&&(this.ket+=r)},this.slice_to=function(){var t="";return t=this.slice_check()?this.current.slice(this.bra,this.ket):t},this.assign_to=function(){return this.current.slice(0,this.limit)}};
\ No newline at end of file
diff --git a/sphinx/search/minified-js/basque-stemmer.js b/sphinx/search/minified-js/basque-stemmer.js
new file mode 100644
index 00000000000..3e1c4337a69
--- /dev/null
+++ b/sphinx/search/minified-js/basque-stemmer.js
@@ -0,0 +1 @@
+var BasqueStemmer=function(){var o=new BaseStemmer,u=[["idea",-1,1],["bidea",0,1],["kidea",0,1],["pidea",0,1],["kundea",-1,1],["galea",-1,1],["tailea",-1,1],["tzailea",-1,1],["gunea",-1,1],["kunea",-1,1],["tzaga",-1,1],["gaia",-1,1],["aldia",-1,1],["taldia",12,1],["karia",-1,1],["garria",-1,2],["karria",-1,1],["ka",-1,1],["tzaka",17,1],["la",-1,1],["mena",-1,1],["pena",-1,1],["kina",-1,1],["ezina",-1,1],["tezina",23,1],["kuna",-1,1],["tuna",-1,1],["kizuna",-1,1],["era",-1,1],["bera",28,1],["arabera",29,-1],["kera",28,1],["pera",28,1],["orra",-1,1],["korra",33,1],["dura",-1,1],["gura",-1,1],["kura",-1,1],["tura",-1,1],["eta",-1,1],["keta",39,1],["gailua",-1,1],["eza",-1,1],["erreza",42,1],["tza",-1,2],["gaitza",44,1],["kaitza",44,1],["kuntza",44,1],["ide",-1,1],["bide",48,1],["kide",48,1],["pide",48,1],["kunde",-1,1],["tzake",-1,1],["tzeke",-1,1],["le",-1,1],["gale",55,1],["taile",55,1],["tzaile",55,1],["gune",-1,1],["kune",-1,1],["tze",-1,1],["atze",61,1],["gai",-1,1],["aldi",-1,1],["taldi",64,1],["ki",-1,1],["ari",-1,1],["kari",67,1],["lari",67,1],["tari",67,1],["etari",70,1],["garri",-1,2],["karri",-1,1],["arazi",-1,1],["tarazi",74,1],["an",-1,1],["ean",76,1],["rean",77,1],["kan",76,1],["etan",76,1],["atseden",-1,-1],["men",-1,1],["pen",-1,1],["kin",-1,1],["rekin",84,1],["ezin",-1,1],["tezin",86,1],["tun",-1,1],["kizun",-1,1],["go",-1,1],["ago",90,1],["tio",-1,1],["dako",-1,1],["or",-1,1],["kor",94,1],["tzat",-1,1],["du",-1,1],["gailu",-1,1],["tu",-1,1],["atu",99,1],["aldatu",100,1],["tatu",100,1],["baditu",99,-1],["ez",-1,1],["errez",104,1],["tzez",104,1],["gaitz",-1,1],["kaitz",-1,1]],r=[["ada",-1,1],["kada",0,1],["anda",-1,1],["denda",-1,1],["gabea",-1,1],["kabea",-1,1],["aldea",-1,1],["kaldea",6,1],["taldea",6,1],["ordea",-1,1],["zalea",-1,1],["tzalea",10,1],["gilea",-1,1],["emea",-1,1],["kumea",-1,1],["nea",-1,1],["enea",15,1],["zionea",15,1],["unea",15,1],["gunea",18,1],["pea",-1,1],["aurrea",-1,1],["tea",-1,1],["kotea",22,1],["artea",22,1],["ostea",22,1],["etxea",-1,1],["ga",-1,1],["anga",27,1],["gaia",-1,1],["aldia",-1,1],["taldia",30,1],["handia",-1,1],["mendia",-1,1],["geia",-1,1],["egia",-1,1],["degia",35,1],["tegia",35,1],["nahia",-1,1],["ohia",-1,1],["kia",-1,1],["tokia",40,1],["oia",-1,1],["koia",42,1],["aria",-1,1],["karia",44,1],["laria",44,1],["taria",44,1],["eria",-1,1],["keria",48,1],["teria",48,1],["garria",-1,2],["larria",-1,1],["kirria",-1,1],["duria",-1,1],["asia",-1,1],["tia",-1,1],["ezia",-1,1],["bizia",-1,1],["ontzia",-1,1],["ka",-1,1],["joka",60,3],["aurka",60,-1],["ska",60,1],["xka",60,1],["zka",60,1],["gibela",-1,1],["gela",-1,1],["kaila",-1,1],["skila",-1,1],["tila",-1,1],["ola",-1,1],["na",-1,1],["kana",72,1],["ena",72,1],["garrena",74,1],["gerrena",74,1],["urrena",74,1],["zaina",72,1],["tzaina",78,1],["kina",72,1],["mina",72,1],["garna",72,1],["una",72,1],["duna",83,1],["asuna",83,1],["tasuna",85,1],["ondoa",-1,1],["kondoa",87,1],["ngoa",-1,1],["zioa",-1,1],["koa",-1,1],["takoa",91,1],["zkoa",91,1],["noa",-1,1],["zinoa",94,1],["aroa",-1,1],["taroa",96,1],["zaroa",96,1],["eroa",-1,1],["oroa",-1,1],["osoa",-1,1],["toa",-1,1],["ttoa",102,1],["ztoa",102,1],["txoa",-1,1],["tzoa",-1,1],["ñoa",-1,1],["ra",-1,1],["ara",108,1],["dara",109,1],["liara",109,1],["tiara",109,1],["tara",109,1],["etara",113,1],["tzara",109,1],["bera",108,1],["kera",108,1],["pera",108,1],["ora",108,2],["tzarra",108,1],["korra",108,1],["tra",108,1],["sa",-1,1],["osa",123,1],["ta",-1,1],["eta",125,1],["keta",126,1],["sta",125,1],["dua",-1,1],["mendua",129,1],["ordua",129,1],["lekua",-1,1],["burua",-1,1],["durua",-1,1],["tsua",-1,1],["tua",-1,1],["mentua",136,1],["estua",136,1],["txua",-1,1],["zua",-1,1],["tzua",140,1],["za",-1,1],["eza",142,1],["eroza",142,1],["tza",142,2],["koitza",145,1],["antza",145,1],["gintza",145,1],["kintza",145,1],["kuntza",145,1],["gabe",-1,1],["kabe",-1,1],["kide",-1,1],["alde",-1,1],["kalde",154,1],["talde",154,1],["orde",-1,1],["ge",-1,1],["zale",-1,1],["tzale",159,1],["gile",-1,1],["eme",-1,1],["kume",-1,1],["ne",-1,1],["zione",164,1],["une",164,1],["gune",166,1],["pe",-1,1],["aurre",-1,1],["te",-1,1],["kote",170,1],["arte",170,1],["oste",170,1],["etxe",-1,1],["gai",-1,1],["di",-1,1],["aldi",176,1],["taldi",177,1],["geldi",176,-1],["handi",176,1],["mendi",176,1],["gei",-1,1],["egi",-1,1],["degi",183,1],["tegi",183,1],["nahi",-1,1],["ohi",-1,1],["ki",-1,1],["toki",188,1],["oi",-1,1],["goi",190,1],["koi",190,1],["ari",-1,1],["kari",193,1],["lari",193,1],["tari",193,1],["garri",-1,2],["larri",-1,1],["kirri",-1,1],["duri",-1,1],["asi",-1,1],["ti",-1,1],["ontzi",-1,1],["ñi",-1,1],["ak",-1,1],["ek",-1,1],["tarik",-1,1],["gibel",-1,1],["ail",-1,1],["kail",209,1],["kan",-1,1],["tan",-1,1],["etan",212,1],["en",-1,4],["ren",214,2],["garren",215,1],["gerren",215,1],["urren",215,1],["ten",214,4],["tzen",214,4],["zain",-1,1],["tzain",221,1],["kin",-1,1],["min",-1,1],["dun",-1,1],["asun",-1,1],["tasun",226,1],["aizun",-1,1],["ondo",-1,1],["kondo",229,1],["go",-1,1],["ngo",231,1],["zio",-1,1],["ko",-1,1],["trako",234,5],["tako",234,1],["etako",236,1],["eko",234,1],["tariko",234,1],["sko",234,1],["tuko",234,1],["minutuko",241,6],["zko",234,1],["no",-1,1],["zino",244,1],["ro",-1,1],["aro",246,1],["igaro",247,-1],["taro",247,1],["zaro",247,1],["ero",246,1],["giro",246,1],["oro",246,1],["oso",-1,1],["to",-1,1],["tto",255,1],["zto",255,1],["txo",-1,1],["tzo",-1,1],["gintzo",259,1],["ño",-1,1],["zp",-1,1],["ar",-1,1],["dar",263,1],["behar",263,1],["zehar",263,-1],["liar",263,1],["tiar",263,1],["tar",263,1],["tzar",263,1],["or",-1,2],["kor",271,1],["os",-1,1],["ket",-1,1],["du",-1,1],["mendu",275,1],["ordu",275,1],["leku",-1,1],["buru",-1,2],["duru",-1,1],["tsu",-1,1],["tu",-1,1],["tatu",282,4],["mentu",282,1],["estu",282,1],["txu",-1,1],["zu",-1,1],["tzu",287,1],["gintzu",288,1],["z",-1,1],["ez",290,1],["eroz",290,1],["tz",290,1],["koitz",293,1]],n=[["zlea",-1,2],["keria",-1,1],["la",-1,1],["era",-1,1],["dade",-1,1],["tade",-1,1],["date",-1,1],["tate",-1,1],["gi",-1,1],["ki",-1,1],["ik",-1,1],["lanik",10,1],["rik",10,1],["larik",12,1],["ztik",10,1],["go",-1,1],["ro",-1,1],["ero",16,1],["to",-1,1]],k=[17,65,16],g=0,s=0,z=0;function l(){return z<=o.cursor}function d(){return g<=o.cursor}function c(){var a;if(o.ket=o.cursor,0!=(a=o.find_among_b(r))){switch(o.bra=o.cursor,a){case 1:if(!l())return;if(o.slice_del())break;return;case 2:if(!d())return;if(o.slice_del())break;return;case 3:if(o.slice_from("jok"))break;return;case 4:if(!(s<=o.cursor))return;if(o.slice_del())break;return;case 5:if(o.slice_from("tra"))break;return;case 6:if(o.slice_from("minutu"))break;return}return 1}}this.stem=function(){z=o.limit,s=o.limit,g=o.limit;var a=o.cursor;a:{r:{var r=o.cursor;i:if(o.in_grouping(k,97,117)){var i=o.cursor;if(!o.out_grouping(k,97,117)||!o.go_out_grouping(k,97,117)){if(o.cursor=i,!o.in_grouping(k,97,117))break i;if(!o.go_in_grouping(k,97,117))break i}o.cursor++;break r}if(o.cursor=r,!o.out_grouping(k,97,117))break a;i=o.cursor;if(o.out_grouping(k,97,117)&&o.go_out_grouping(k,97,117));else{if(o.cursor=i,!o.in_grouping(k,97,117))break a;if(o.cursor>=o.limit)break a}o.cursor++}z=o.cursor}for(o.cursor=a,a=o.cursor,o.go_out_grouping(k,97,117)&&(o.cursor++,o.go_in_grouping(k,97,117))&&(o.cursor++,s=o.cursor,o.go_out_grouping(k,97,117))&&(o.cursor++,o.go_in_grouping(k,97,117))&&(o.cursor++,g=o.cursor),o.cursor=a,o.limit_backward=o.cursor,o.cursor=o.limit;;){var e=o.limit-o.cursor;if(!(()=>{var a;if(o.ket=o.cursor,0!=(a=o.find_among_b(u))){switch(o.bra=o.cursor,a){case 1:if(!l())return;if(o.slice_del())break;return;case 2:if(!d())return;if(o.slice_del())break;return}return 1}})()){o.cursor=o.limit-e;break}}for(;;){var t=o.limit-o.cursor;if(!c()){o.cursor=o.limit-t;break}}a=o.limit-o.cursor;return(()=>{var a;if(o.ket=o.cursor,0!=(a=o.find_among_b(n)))switch(o.bra=o.cursor,a){case 1:if(!l())return;if(o.slice_del())break;return;case 2:if(o.slice_from("z"))break}})(),o.cursor=o.limit-a,o.cursor=o.limit_backward,!0},this.stemWord=function(a){return o.setCurrent(a),this.stem(),o.getCurrent()}};
\ No newline at end of file
diff --git a/sphinx/search/minified-js/catalan-stemmer.js b/sphinx/search/minified-js/catalan-stemmer.js
new file mode 100644
index 00000000000..75788216aa1
--- /dev/null
+++ b/sphinx/search/minified-js/catalan-stemmer.js
@@ -0,0 +1 @@
+var CatalanStemmer=function(){var e=new BaseStemmer,r=[["",-1,7],["·",0,6],["à",0,1],["á",0,1],["è",0,2],["é",0,2],["ì",0,3],["í",0,3],["ï",0,3],["ò",0,4],["ó",0,4],["ú",0,5],["ü",0,5]],a=[["la",-1,1],["-la",0,1],["sela",0,1],["le",-1,1],["me",-1,1],["-me",4,1],["se",-1,1],["-te",-1,1],["hi",-1,1],["'hi",8,1],["li",-1,1],["-li",10,1],["'l",-1,1],["'m",-1,1],["-m",-1,1],["'n",-1,1],["-n",-1,1],["ho",-1,1],["'ho",17,1],["lo",-1,1],["selo",19,1],["'s",-1,1],["las",-1,1],["selas",22,1],["les",-1,1],["-les",24,1],["'ls",-1,1],["-ls",-1,1],["'ns",-1,1],["-ns",-1,1],["ens",-1,1],["los",-1,1],["selos",31,1],["nos",-1,1],["-nos",33,1],["vos",-1,1],["us",-1,1],["-us",36,1],["'t",-1,1]],t=[["ica",-1,4],["lógica",0,3],["enca",-1,1],["ada",-1,2],["ancia",-1,1],["encia",-1,1],["ència",-1,1],["ícia",-1,1],["logia",-1,3],["inia",-1,1],["íinia",9,1],["eria",-1,1],["ària",-1,1],["atòria",-1,1],["alla",-1,1],["ella",-1,1],["ívola",-1,1],["ima",-1,1],["íssima",17,1],["quíssima",18,5],["ana",-1,1],["ina",-1,1],["era",-1,1],["sfera",22,1],["ora",-1,1],["dora",24,1],["adora",25,1],["adura",-1,1],["esa",-1,1],["osa",-1,1],["assa",-1,1],["essa",-1,1],["issa",-1,1],["eta",-1,1],["ita",-1,1],["ota",-1,1],["ista",-1,1],["ialista",36,1],["ionista",36,1],["iva",-1,1],["ativa",39,1],["nça",-1,1],["logía",-1,3],["ic",-1,4],["ístic",43,1],["enc",-1,1],["esc",-1,1],["ud",-1,1],["atge",-1,1],["ble",-1,1],["able",49,1],["ible",49,1],["isme",-1,1],["ialisme",52,1],["ionisme",52,1],["ivisme",52,1],["aire",-1,1],["icte",-1,1],["iste",-1,1],["ici",-1,1],["íci",-1,1],["logi",-1,3],["ari",-1,1],["tori",-1,1],["al",-1,1],["il",-1,1],["all",-1,1],["ell",-1,1],["ívol",-1,1],["isam",-1,1],["issem",-1,1],["ìssem",-1,1],["íssem",-1,1],["íssim",-1,1],["quíssim",73,5],["amen",-1,1],["ìssin",-1,1],["ar",-1,1],["ificar",77,1],["egar",77,1],["ejar",77,1],["itar",77,1],["itzar",77,1],["fer",-1,1],["or",-1,1],["dor",84,1],["dur",-1,1],["doras",-1,1],["ics",-1,4],["lógics",88,3],["uds",-1,1],["nces",-1,1],["ades",-1,2],["ancies",-1,1],["encies",-1,1],["ències",-1,1],["ícies",-1,1],["logies",-1,3],["inies",-1,1],["ínies",-1,1],["eries",-1,1],["àries",-1,1],["atòries",-1,1],["bles",-1,1],["ables",103,1],["ibles",103,1],["imes",-1,1],["íssimes",106,1],["quíssimes",107,5],["formes",-1,1],["ismes",-1,1],["ialismes",110,1],["ines",-1,1],["eres",-1,1],["ores",-1,1],["dores",114,1],["idores",115,1],["dures",-1,1],["eses",-1,1],["oses",-1,1],["asses",-1,1],["ictes",-1,1],["ites",-1,1],["otes",-1,1],["istes",-1,1],["ialistes",124,1],["ionistes",124,1],["iques",-1,4],["lógiques",127,3],["ives",-1,1],["atives",129,1],["logíes",-1,3],["allengües",-1,1],["icis",-1,1],["ícis",-1,1],["logis",-1,3],["aris",-1,1],["toris",-1,1],["ls",-1,1],["als",138,1],["ells",138,1],["ims",-1,1],["íssims",141,1],["quíssims",142,5],["ions",-1,1],["cions",144,1],["acions",145,2],["esos",-1,1],["osos",-1,1],["assos",-1,1],["issos",-1,1],["ers",-1,1],["ors",-1,1],["dors",152,1],["adors",153,1],["idors",153,1],["ats",-1,1],["itats",156,1],["bilitats",157,1],["ivitats",157,1],["ativitats",159,1],["ïtats",156,1],["ets",-1,1],["ants",-1,1],["ents",-1,1],["ments",164,1],["aments",165,1],["ots",-1,1],["uts",-1,1],["ius",-1,1],["trius",169,1],["atius",169,1],["ès",-1,1],["és",-1,1],["ís",-1,1],["dís",174,1],["ós",-1,1],["itat",-1,1],["bilitat",177,1],["ivitat",177,1],["ativitat",179,1],["ïtat",-1,1],["et",-1,1],["ant",-1,1],["ent",-1,1],["ient",184,1],["ment",184,1],["ament",186,1],["isament",187,1],["ot",-1,1],["isseu",-1,1],["ìsseu",-1,1],["ísseu",-1,1],["triu",-1,1],["íssiu",-1,1],["atiu",-1,1],["ó",-1,1],["ió",196,1],["ció",197,1],["ació",198,1]],n=[["aba",-1,1],["esca",-1,1],["isca",-1,1],["ïsca",-1,1],["ada",-1,1],["ida",-1,1],["uda",-1,1],["ïda",-1,1],["ia",-1,1],["aria",8,1],["iria",8,1],["ara",-1,1],["iera",-1,1],["ira",-1,1],["adora",-1,1],["ïra",-1,1],["ava",-1,1],["ixa",-1,1],["itza",-1,1],["ía",-1,1],["aría",19,1],["ería",19,1],["iría",19,1],["ïa",-1,1],["isc",-1,1],["ïsc",-1,1],["ad",-1,1],["ed",-1,1],["id",-1,1],["ie",-1,1],["re",-1,1],["dre",30,1],["ase",-1,1],["iese",-1,1],["aste",-1,1],["iste",-1,1],["ii",-1,1],["ini",-1,1],["esqui",-1,1],["eixi",-1,1],["itzi",-1,1],["am",-1,1],["em",-1,1],["arem",42,1],["irem",42,1],["àrem",42,1],["írem",42,1],["àssem",42,1],["éssem",42,1],["iguem",42,1],["ïguem",42,1],["avem",42,1],["àvem",42,1],["ávem",42,1],["irìem",42,1],["íem",42,1],["aríem",55,1],["iríem",55,1],["assim",-1,1],["essim",-1,1],["issim",-1,1],["àssim",-1,1],["èssim",-1,1],["éssim",-1,1],["íssim",-1,1],["ïm",-1,1],["an",-1,1],["aban",66,1],["arian",66,1],["aran",66,1],["ieran",66,1],["iran",66,1],["ían",66,1],["arían",72,1],["erían",72,1],["irían",72,1],["en",-1,1],["ien",76,1],["arien",77,1],["irien",77,1],["aren",76,1],["eren",76,1],["iren",76,1],["àren",76,1],["ïren",76,1],["asen",76,1],["iesen",76,1],["assen",76,1],["essen",76,1],["issen",76,1],["éssen",76,1],["ïssen",76,1],["esquen",76,1],["isquen",76,1],["ïsquen",76,1],["aven",76,1],["ixen",76,1],["eixen",96,1],["ïxen",76,1],["ïen",76,1],["in",-1,1],["inin",100,1],["sin",100,1],["isin",102,1],["assin",102,1],["essin",102,1],["issin",102,1],["ïssin",102,1],["esquin",100,1],["eixin",100,1],["aron",-1,1],["ieron",-1,1],["arán",-1,1],["erán",-1,1],["irán",-1,1],["iïn",-1,1],["ado",-1,1],["ido",-1,1],["ando",-1,2],["iendo",-1,1],["io",-1,1],["ixo",-1,1],["eixo",121,1],["ïxo",-1,1],["itzo",-1,1],["ar",-1,1],["tzar",125,1],["er",-1,1],["eixer",127,1],["ir",-1,1],["ador",-1,1],["as",-1,1],["abas",131,1],["adas",131,1],["idas",131,1],["aras",131,1],["ieras",131,1],["ías",131,1],["arías",137,1],["erías",137,1],["irías",137,1],["ids",-1,1],["es",-1,1],["ades",142,1],["ides",142,1],["udes",142,1],["ïdes",142,1],["atges",142,1],["ies",142,1],["aries",148,1],["iries",148,1],["ares",142,1],["ires",142,1],["adores",142,1],["ïres",142,1],["ases",142,1],["ieses",142,1],["asses",142,1],["esses",142,1],["isses",142,1],["ïsses",142,1],["ques",142,1],["esques",161,1],["ïsques",161,1],["aves",142,1],["ixes",142,1],["eixes",165,1],["ïxes",142,1],["ïes",142,1],["abais",-1,1],["arais",-1,1],["ierais",-1,1],["íais",-1,1],["aríais",172,1],["eríais",172,1],["iríais",172,1],["aseis",-1,1],["ieseis",-1,1],["asteis",-1,1],["isteis",-1,1],["inis",-1,1],["sis",-1,1],["isis",181,1],["assis",181,1],["essis",181,1],["issis",181,1],["ïssis",181,1],["esquis",-1,1],["eixis",-1,1],["itzis",-1,1],["áis",-1,1],["aréis",-1,1],["eréis",-1,1],["iréis",-1,1],["ams",-1,1],["ados",-1,1],["idos",-1,1],["amos",-1,1],["ábamos",197,1],["áramos",197,1],["iéramos",197,1],["íamos",197,1],["aríamos",201,1],["eríamos",201,1],["iríamos",201,1],["aremos",-1,1],["eremos",-1,1],["iremos",-1,1],["ásemos",-1,1],["iésemos",-1,1],["imos",-1,1],["adors",-1,1],["ass",-1,1],["erass",212,1],["ess",-1,1],["ats",-1,1],["its",-1,1],["ents",-1,1],["às",-1,1],["aràs",218,1],["iràs",218,1],["arás",-1,1],["erás",-1,1],["irás",-1,1],["és",-1,1],["arés",224,1],["ís",-1,1],["iïs",-1,1],["at",-1,1],["it",-1,1],["ant",-1,1],["ent",-1,1],["int",-1,1],["ut",-1,1],["ït",-1,1],["au",-1,1],["erau",235,1],["ieu",-1,1],["ineu",-1,1],["areu",-1,1],["ireu",-1,1],["àreu",-1,1],["íreu",-1,1],["asseu",-1,1],["esseu",-1,1],["eresseu",244,1],["àsseu",-1,1],["ésseu",-1,1],["igueu",-1,1],["ïgueu",-1,1],["àveu",-1,1],["áveu",-1,1],["itzeu",-1,1],["ìeu",-1,1],["irìeu",253,1],["íeu",-1,1],["aríeu",255,1],["iríeu",255,1],["assiu",-1,1],["issiu",-1,1],["àssiu",-1,1],["èssiu",-1,1],["éssiu",-1,1],["íssiu",-1,1],["ïu",-1,1],["ix",-1,1],["eix",265,1],["ïx",-1,1],["itz",-1,1],["ià",-1,1],["arà",-1,1],["irà",-1,1],["itzà",-1,1],["ará",-1,1],["erá",-1,1],["irá",-1,1],["irè",-1,1],["aré",-1,1],["eré",-1,1],["iré",-1,1],["í",-1,1],["iï",-1,1],["ió",-1,1]],o=[["a",-1,1],["e",-1,1],["i",-1,1],["ïn",-1,1],["o",-1,1],["ir",-1,1],["s",-1,1],["is",6,1],["os",6,1],["ïs",6,1],["it",-1,1],["eu",-1,1],["iu",-1,1],["iqu",-1,2],["itz",-1,1],["à",-1,1],["á",-1,1],["é",-1,1],["ì",-1,1],["í",-1,1],["ï",-1,1],["ó",-1,1]],u=[17,65,16,0,0,0,0,0,0,0,0,0,0,0,0,128,129,81,6,10],c=0,m=0;function l(){return m<=e.cursor}function d(){return c<=e.cursor}this.stem=function(){m=e.limit,c=e.limit,s=e.cursor,e.go_out_grouping(u,97,252)&&(e.cursor++,e.go_in_grouping(u,97,252))&&(e.cursor++,m=e.cursor,e.go_out_grouping(u,97,252))&&(e.cursor++,e.go_in_grouping(u,97,252))&&(e.cursor++,c=e.cursor),e.cursor=s,e.limit_backward=e.cursor,e.cursor=e.limit;var s=e.limit-e.cursor,s=(e.ket=e.cursor,0!=e.find_among_b(a)&&(e.bra=e.cursor,l())&&e.slice_del(),e.cursor=e.limit-s,e.limit-e.cursor),i=e.limit-e.cursor,i=((()=>{var s;if(e.ket=e.cursor,0!=(s=e.find_among_b(t))){switch(e.bra=e.cursor,s){case 1:if(!l())return;if(e.slice_del())break;return;case 2:if(!d())return;if(e.slice_del())break;return;case 3:if(!d())return;if(e.slice_from("log"))break;return;case 4:if(!d())return;if(e.slice_from("ic"))break;return;case 5:if(!l())return;if(e.slice_from("c"))break;return}return 1}})()||(e.cursor=e.limit-i,(()=>{var s;if(e.ket=e.cursor,0!=(s=e.find_among_b(n))){switch(e.bra=e.cursor,s){case 1:if(!l())return;if(e.slice_del())break;return;case 2:if(!d())return;if(e.slice_del())break;return}}})()),e.cursor=e.limit-s,e.limit-e.cursor),s=((()=>{var s;if(e.ket=e.cursor,0!=(s=e.find_among_b(o)))switch(e.bra=e.cursor,s){case 1:if(!l())return;if(e.slice_del())break;return;case 2:if(!l())return;if(e.slice_from("ic"))break}})(),e.cursor=e.limit-i,e.cursor=e.limit_backward,e.cursor);return(()=>{for(var s;;){var i=e.cursor;s:{switch(e.bra=e.cursor,s=e.find_among(r),e.ket=e.cursor,s){case 1:if(e.slice_from("a"))break;return;case 2:if(e.slice_from("e"))break;return;case 3:if(e.slice_from("i"))break;return;case 4:if(e.slice_from("o"))break;return;case 5:if(e.slice_from("u"))break;return;case 6:if(e.slice_from("."))break;return;case 7:if(e.cursor>=e.limit)break s;e.cursor++}continue}e.cursor=i;break}})(),e.cursor=s,!0},this.stemWord=function(s){return e.setCurrent(s),this.stem(),e.getCurrent()}};
\ No newline at end of file
diff --git a/sphinx/search/minified-js/danish-stemmer.js b/sphinx/search/minified-js/danish-stemmer.js
index f3fc600033c..7a577f3eb26 100644
--- a/sphinx/search/minified-js/danish-stemmer.js
+++ b/sphinx/search/minified-js/danish-stemmer.js
@@ -1 +1 @@
-DanishStemmer=function(){var r=new BaseStemmer;var e=[["hed",-1,1],["ethed",0,1],["ered",-1,1],["e",-1,1],["erede",3,1],["ende",3,1],["erende",5,1],["ene",3,1],["erne",3,1],["ere",3,1],["en",-1,1],["heden",10,1],["eren",10,1],["er",-1,1],["heder",13,1],["erer",13,1],["s",-1,2],["heds",16,1],["es",16,1],["endes",18,1],["erendes",19,1],["enes",18,1],["ernes",18,1],["eres",18,1],["ens",16,1],["hedens",24,1],["erens",24,1],["ers",16,1],["ets",16,1],["erets",28,1],["et",-1,1],["eret",30,1]];var i=[["gd",-1,-1],["dt",-1,-1],["gt",-1,-1],["kt",-1,-1]];var s=[["ig",-1,1],["lig",0,1],["elig",1,1],["els",-1,1],["løst",-1,2]];var t=[119,223,119,1];var a=[17,65,16,1,0,0,0,0,0,0,0,0,0,0,0,0,48,0,128];var u=[239,254,42,3,0,0,0,0,0,0,0,0,0,0,0,0,16];var c=0;var l=0;var n="";function o(){l=r.limit;var e=r.cursor;{var i=r.cursor+3;if(i>r.limit){return false}r.cursor=i}c=r.cursor;r.cursor=e;r:while(true){var s=r.cursor;e:{if(!r.in_grouping(a,97,248)){break e}r.cursor=s;break r}r.cursor=s;if(r.cursor>=r.limit){return false}r.cursor++}r:while(true){e:{if(!r.out_grouping(a,97,248)){break e}break r}if(r.cursor>=r.limit){return false}r.cursor++}l=r.cursor;r:{if(!(ls.limit||(s.cursor=i,l=s.cursor,s.cursor=e,s.go_out_grouping(o,97,248)&&(s.cursor++,s.go_in_grouping(o,97,248))&&(s.cursor++,m=s.cursor,l<=m||(m=l))),s.cursor=r,s.limit_backward=s.cursor,s.cursor=s.limit,s.limit-s.cursor),e=((()=>{var r;if(!(s.cursor{var r,i=s.limit-s.cursor;if(s.ket=s.cursor,(!s.eq_s_b("st")||(s.bra=s.cursor,!s.eq_s_b("ig"))||s.slice_del())&&(s.cursor=s.limit-i,!(s.cursor=r.limit){break r}r.cursor++;break}continue}r.cursor=s;break}r.cursor=u;var a=r.cursor;r:{r.bra=r.cursor;if(!r.eq_s("y")){r.cursor=a;break r}r.ket=r.cursor;if(!r.slice_from("Y")){return false}}while(true){var t=r.cursor;r:{e:while(true){var o=r.cursor;i:{if(!r.in_grouping(c,97,232)){break i}r.bra=r.cursor;u:{var f=r.cursor;s:{if(!r.eq_s("i")){break s}r.ket=r.cursor;if(!r.in_grouping(c,97,232)){break s}if(!r.slice_from("I")){return false}break u}r.cursor=f;if(!r.eq_s("y")){break i}r.ket=r.cursor;if(!r.slice_from("Y")){return false}}r.cursor=o;break e}r.cursor=o;if(r.cursor>=r.limit){break r}r.cursor++}continue}r.cursor=t;break}return true}function _(){n=r.limit;l=r.limit;r:while(true){e:{if(!r.in_grouping(c,97,232)){break e}break r}if(r.cursor>=r.limit){return false}r.cursor++}r:while(true){e:{if(!r.out_grouping(c,97,232)){break e}break r}if(r.cursor>=r.limit){return false}r.cursor++}n=r.cursor;r:{if(!(n<3)){break r}n=3}r:while(true){e:{if(!r.in_grouping(c,97,232)){break e}break r}if(r.cursor>=r.limit){return false}r.cursor++}r:while(true){e:{if(!r.out_grouping(c,97,232)){break e}break r}if(r.cursor>=r.limit){return false}r.cursor++}l=r.cursor;return true}function m(){var e;while(true){var u=r.cursor;r:{r.bra=r.cursor;e=r.find_among(i);if(e==0){break r}r.ket=r.cursor;switch(e){case 1:if(!r.slice_from("y")){return false}break;case 2:if(!r.slice_from("i")){return false}break;case 3:if(r.cursor>=r.limit){break r}r.cursor++;break}continue}r.cursor=u;break}return true}function v(){if(!(n<=r.cursor)){return false}return true}function g(){if(!(l<=r.cursor)){return false}return true}function d(){var e=r.limit-r.cursor;if(r.find_among_b(u)==0){return false}r.cursor=r.limit-e;r.ket=r.cursor;if(r.cursor<=r.limit_backward){return false}r.cursor--;r.bra=r.cursor;if(!r.slice_del()){return false}return true}function h(){b=false;r.ket=r.cursor;if(!r.eq_s_b("e")){return false}r.bra=r.cursor;if(!v()){return false}var e=r.limit-r.cursor;if(!r.out_grouping_b(c,97,232)){return false}r.cursor=r.limit-e;if(!r.slice_del()){return false}b=true;if(!d()){return false}return true}function w(){if(!v()){return false}var e=r.limit-r.cursor;if(!r.out_grouping_b(c,97,232)){return false}r.cursor=r.limit-e;{var i=r.limit-r.cursor;r:{if(!r.eq_s_b("gem")){break r}return false}r.cursor=r.limit-i}if(!r.slice_del()){return false}if(!d()){return false}return true}function p(){var e;var i=r.limit-r.cursor;r:{r.ket=r.cursor;e=r.find_among_b(s);if(e==0){break r}r.bra=r.cursor;switch(e){case 1:if(!v()){break r}if(!r.slice_from("heid")){return false}break;case 2:if(!w()){break r}break;case 3:if(!v()){break r}if(!r.out_grouping_b(f,97,232)){break r}if(!r.slice_del()){return false}break}}r.cursor=r.limit-i;var u=r.limit-r.cursor;h();r.cursor=r.limit-u;var l=r.limit-r.cursor;r:{r.ket=r.cursor;if(!r.eq_s_b("heid")){break r}r.bra=r.cursor;if(!g()){break r}{var n=r.limit-r.cursor;e:{if(!r.eq_s_b("c")){break e}break r}r.cursor=r.limit-n}if(!r.slice_del()){return false}r.ket=r.cursor;if(!r.eq_s_b("en")){break r}r.bra=r.cursor;if(!w()){break r}}r.cursor=r.limit-l;var k=r.limit-r.cursor;r:{r.ket=r.cursor;e=r.find_among_b(a);if(e==0){break r}r.bra=r.cursor;switch(e){case 1:if(!g()){break r}if(!r.slice_del()){return false}e:{var _=r.limit-r.cursor;i:{r.ket=r.cursor;if(!r.eq_s_b("ig")){break i}r.bra=r.cursor;if(!g()){break i}{var m=r.limit-r.cursor;u:{if(!r.eq_s_b("e")){break u}break i}r.cursor=r.limit-m}if(!r.slice_del()){return false}break e}r.cursor=r.limit-_;if(!d()){break r}}break;case 2:if(!g()){break r}{var p=r.limit-r.cursor;e:{if(!r.eq_s_b("e")){break e}break r}r.cursor=r.limit-p}if(!r.slice_del()){return false}break;case 3:if(!g()){break r}if(!r.slice_del()){return false}if(!h()){break r}break;case 4:if(!g()){break r}if(!r.slice_del()){return false}break;case 5:if(!g()){break r}if(!b){break r}if(!r.slice_del()){return false}break}}r.cursor=r.limit-k;var q=r.limit-r.cursor;r:{if(!r.out_grouping_b(o,73,232)){break r}var y=r.limit-r.cursor;if(r.find_among_b(t)==0){break r}if(!r.out_grouping_b(c,97,232)){break r}r.cursor=r.limit-y;r.ket=r.cursor;if(r.cursor<=r.limit_backward){break r}r.cursor--;r.bra=r.cursor;if(!r.slice_del()){return false}}r.cursor=r.limit-q;return true}this.stem=function(){var e=r.cursor;k();r.cursor=e;var i=r.cursor;_();r.cursor=i;r.limit_backward=r.cursor;r.cursor=r.limit;p();r.cursor=r.limit_backward;var u=r.cursor;m();r.cursor=u;return true};this["stemWord"]=function(e){r.setCurrent(e);this.stem();return r.getCurrent()}};
\ No newline at end of file
+var DutchStemmer=function(){var o=new BaseStemmer,a=[["a",-1,1],["e",-1,2],["o",-1,1],["u",-1,1],["à",-1,1],["á",-1,1],["â",-1,1],["ä",-1,1],["è",-1,2],["é",-1,2],["ê",-1,2],["eë",-1,3],["ië",-1,4],["ò",-1,1],["ó",-1,1],["ô",-1,1],["ö",-1,1],["ù",-1,1],["ú",-1,1],["û",-1,1],["ü",-1,1]],t=[["nde",-1,8],["en",-1,7],["s",-1,2],["'s",2,1],["es",2,4],["ies",4,3],["aus",2,6],["és",2,5]],e=[["de",-1,5],["ge",-1,2],["ische",-1,4],["je",-1,1],["lijke",-1,3],["le",-1,9],["ene",-1,10],["re",-1,8],["se",-1,7],["te",-1,6],["ieve",-1,11]],s=[["heid",-1,3],["fie",-1,7],["gie",-1,8],["atie",-1,1],["isme",-1,5],["ing",-1,5],["arij",-1,6],["erij",-1,5],["sel",-1,3],["rder",-1,4],["ster",-1,3],["iteit",-1,2],["dst",-1,10],["tst",-1,9]],c=[["end",-1,9],["atief",-1,2],["erig",-1,9],["achtig",-1,3],["ioneel",-1,1],["baar",-1,3],["laar",-1,5],["naar",-1,4],["raar",-1,6],["eriger",-1,9],["achtiger",-1,3],["lijker",-1,8],["tant",-1,7],["erigst",-1,9],["achtigst",-1,3],["lijkst",-1,8]],u=[["ig",-1,1],["iger",-1,1],["igst",-1,1]],f=[["ft",-1,2],["kt",-1,1],["pt",-1,3]],n=[["bb",-1,1],["cc",-1,2],["dd",-1,3],["ff",-1,4],["gg",-1,5],["hh",-1,6],["jj",-1,7],["kk",-1,8],["ll",-1,9],["mm",-1,10],["nn",-1,11],["pp",-1,12],["qq",-1,13],["rr",-1,14],["ss",-1,15],["tt",-1,16],["v",-1,4],["vv",16,17],["ww",-1,18],["xx",-1,19],["z",-1,15],["zz",20,20]],l=[["d",-1,1],["t",-1,2]],_=[["",-1,-1],["eft",0,1],["vaa",0,1],["val",0,1],["vali",3,-1],["vare",0,1]],m=[["ë",-1,1],["ï",-1,2]],b=[["ë",-1,1],["ï",-1,2]],k=[1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,120],d=[1,65,16,0,0,0,0,0,0,0,0,0,0,0,0,128,11,120,46,15],g=[17,65,16,0,0,0,0,0,0,0,0,0,0,0,0,128,139,127,46,15],v=[17,65,16,1,0,0,0,0,0,0,0,0,0,0,0,128,139,127,46,15],q=[17,65,208,1,0,0,0,0,0,0,0,0,0,0,0,128,139,127,46,15],w=!1,h=!1,p=0,j=0,z="";function x(){return j<=o.cursor}function C(){return p<=o.cursor}function S(){var r=o.limit-o.cursor,i=o.limit-o.cursor;return(o.in_grouping_b(v,97,252)||(o.cursor=o.limit-i,o.eq_s_b("ij")))&&(o.cursor=o.limit-r,1)}function B(){var r=o.limit-o.cursor,i=o.limit-o.cursor;return!o.eq_s_b("ij")&&(o.cursor=o.limit-i,o.out_grouping_b(v,97,252))&&(o.cursor=o.limit-r,1)}function D(){var r,i=o.limit-o.cursor;r:if(o.out_grouping_b(q,97,252)&&(o.ket=o.cursor,0!=(r=o.find_among_b(a))))switch(o.bra=o.cursor,r){case 1:var e=o.limit-o.cursor,s=o.limit-o.cursor;if(o.out_grouping_b(g,97,252)||(o.cursor=o.limit-s,!(o.cursor>o.limit_backward))){if(o.cursor=o.limit-e,""==(z=o.slice_to()))return;s=o.cursor;o.insert(o.cursor,o.cursor,z),o.cursor=s}break;case 2:var c=o.limit-o.cursor,e=o.limit-o.cursor;if(o.out_grouping_b(g,97,252)||(o.cursor=o.limit-e,!(o.cursor>o.limit_backward))){var u=o.limit-o.cursor;i:{var t=o.limit-o.cursor;if(!o.in_grouping_b(d,97,252)){if(o.cursor=o.limit-t,!o.in_grouping_b(k,101,235))break i;if(o.cursor>o.limit_backward)break i}break r}o.cursor=o.limit-u;t=o.limit-o.cursor;if(o.cursor<=o.limit_backward||(o.cursor--,!o.in_grouping_b(d,97,252))||!o.out_grouping_b(g,97,252)){if(o.cursor=o.limit-t,o.cursor=o.limit-c,""==(z=o.slice_to()))return;t=o.cursor;o.insert(o.cursor,o.cursor,z),o.cursor=t}}break;case 3:if(o.slice_from("eëe"))break;return;case 4:if(o.slice_from("iee"))break;return}o.cursor=o.limit-i}function W(){var r;if(o.ket=o.cursor,0!=(r=o.find_among_b(e))){switch(o.bra=o.cursor,r){case 1:r:{var i=o.limit-o.cursor;if(o.eq_s_b("'t")){if(o.bra=o.cursor,o.slice_del())break r;return}if(o.cursor=o.limit-i,o.eq_s_b("et")&&(o.bra=o.cursor,x())&&B()){if(o.slice_del())break r;return}if(o.cursor=o.limit-i,o.eq_s_b("rnt")){if(o.bra=o.cursor,o.slice_from("rn"))break r;return}if(o.cursor=o.limit-i,o.eq_s_b("t")&&(o.bra=o.cursor,x())&&(()=>{var r,i=o.limit-o.cursor;return!(o.cursor<=o.limit_backward)&&(o.cursor--,r=o.limit-o.cursor,o.in_grouping_b(v,97,252)||(o.cursor=o.limit-r,o.eq_s_b("ij")))?(o.cursor=o.limit-i,1):void 0})()){if(o.slice_del())break r;return}if(o.cursor=o.limit-i,o.eq_s_b("ink")){if(o.bra=o.cursor,o.slice_from("ing"))break r;return}if(o.cursor=o.limit-i,o.eq_s_b("mp")){if(o.bra=o.cursor,o.slice_from("m"))break r;return}if(o.cursor=o.limit-i,o.eq_s_b("'")&&(o.bra=o.cursor,x())){if(o.slice_del())break r;return}if(o.cursor=o.limit-i,o.bra=o.cursor,!x())return;if(!B())return;if(!o.slice_del())return}break;case 2:if(!x())return;if(o.slice_from("g"))break;return;case 3:if(!x())return;if(o.slice_from("lijk"))break;return;case 4:if(!x())return;if(o.slice_from("isch"))break;return;case 5:if(!x())return;if(!B())return;if(o.slice_del())break;return;case 6:if(!x())return;if(o.slice_from("t"))break;return;case 7:if(!x())return;if(o.slice_from("s"))break;return;case 8:if(!x())return;if(o.slice_from("r"))break;return;case 9:if(!x())return;if(!o.slice_del())return;o.insert(o.cursor,o.cursor,"l"),D();break;case 10:if(!x())return;if(!B())return;if(!o.slice_del())return;o.insert(o.cursor,o.cursor,"en"),D();break;case 11:if(!x())return;if(!B())return;if(o.slice_from("ief"))break;return}return 1}}function y(){var r;if(o.ket=o.cursor,0!=(r=o.find_among_b(l))&&(o.bra=o.cursor,x())&&B()){switch(r){case 1:var i=o.limit-o.cursor;if(o.eq_s_b("n")&&x())return;o.cursor=o.limit-i;r:{var e=o.limit-o.cursor;if(o.eq_s_b("in")&&!(o.cursor>o.limit_backward)){if(o.slice_from("n"))break r;return}if(o.cursor=o.limit-e,!o.slice_del())return}break;case 2:i=o.limit-o.cursor;if(o.eq_s_b("h")&&x())return;o.cursor=o.limit-i;i=o.limit-o.cursor;if(o.eq_s_b("en")&&!(o.cursor>o.limit_backward))return;if(o.cursor=o.limit-i,o.slice_del())break;return}return 1}}function A(){j=o.limit,p=o.limit;for(var r=o.cursor;o.out_grouping(v,97,252););for(var i=1;;){var e=o.cursor,s=o.cursor;if(!o.eq_s("ij")&&(o.cursor=s,!o.in_grouping(v,97,252))){o.cursor=e;break}i--}if(!(0{var r;if(o.ket=o.cursor,0!=(r=o.find_among_b(t))){switch(o.bra=o.cursor,r){case 1:if(o.slice_del())break;return;case 2:if(!x())return;var i=o.limit-o.cursor;if(o.eq_s_b("t")&&x())return;if(o.cursor=o.limit-i,!B())return;if(o.slice_del())break;return;case 3:if(!x())return;if(o.slice_from("ie"))break;return;case 4:r:{var e=o.limit-o.cursor,s=o.limit-o.cursor;if(o.eq_s_b("ar")&&x()&&B()){if(o.cursor=o.limit-s,!o.slice_del())return;D()}else{o.cursor=o.limit-e;s=o.limit-o.cursor;if(o.eq_s_b("er")&&x()&&B()){if(o.cursor=o.limit-s,o.slice_del())break r;return}if(o.cursor=o.limit-e,!x())return;if(!B())return;if(!o.slice_from("e"))return}}break;case 5:if(!x())return;if(o.slice_from("é"))break;return;case 6:if(!x())return;if(!S())return;if(o.slice_from("au"))break;return;case 7:r:{var c=o.limit-o.cursor;if(o.eq_s_b("hed")&&x()){if(o.bra=o.cursor,o.slice_from("heid"))break r;return}if(o.cursor=o.limit-c,o.eq_s_b("nd")){if(o.slice_del())break r;return}if(o.cursor=o.limit-c,o.eq_s_b("d")&&x()&&B()){if(o.bra=o.cursor,o.slice_del())break r;return}o.cursor=o.limit-c;var u=o.limit-o.cursor;if((o.eq_s_b("i")||(o.cursor=o.limit-u,o.eq_s_b("j")))&&S()){if(o.slice_del())break r;return}if(o.cursor=o.limit-c,!x())return;if(!B())return;if(!o.slice_del())return;D()}break;case 8:if(o.slice_from("nd"))break;return}return 1}})()&&(h=!0),o.cursor=o.limit-r,o.limit-o.cursor),r=(W()&&(h=!0),o.cursor=o.limit-r,o.limit-o.cursor),r=((()=>{var r;if(o.ket=o.cursor,0!=(r=o.find_among_b(s))){switch(o.bra=o.cursor,r){case 1:if(!x())return;if(o.slice_from("eer"))break;return;case 2:if(!x())return;if(!o.slice_del())return;D();break;case 3:if(!x())return;if(o.slice_del())break;return;case 4:if(o.slice_from("r"))break;return;case 5:r:{var i=o.limit-o.cursor;if(o.eq_s_b("ild")){if(o.slice_from("er"))break r;return}if(o.cursor=o.limit-i,!x())return;if(!o.slice_del())return;D()}break;case 6:if(!x())return;if(!B())return;if(o.slice_from("aar"))break;return;case 7:if(!C())return;if(!o.slice_del())return;o.insert(o.cursor,o.cursor,"f"),D();break;case 8:if(!C())return;if(!o.slice_del())return;o.insert(o.cursor,o.cursor,"g"),D();break;case 9:if(!x())return;if(!B())return;if(o.slice_from("t"))break;return;case 10:if(!x())return;if(!B())return;if(o.slice_from("d"))break;return}return 1}})()&&(h=!0),o.cursor=o.limit-r,o.limit-o.cursor),r=((()=>{r:{var r=o.limit-o.cursor;i:if(o.ket=o.cursor,0!=(i=o.find_among_b(c))){switch(o.bra=o.cursor,i){case 1:if(!x())break i;if(o.slice_from("ie"))break;return;case 2:if(!x())break i;if(o.slice_from("eer"))break;return;case 3:if(!x())break i;if(o.slice_del())break;return;case 4:if(!x())break i;if(!S())break i;if(o.slice_from("n"))break;return;case 5:if(!x())break i;if(!S())break i;if(o.slice_from("l"))break;return;case 6:if(!x())break i;if(!S())break i;if(o.slice_from("r"))break;return;case 7:if(!x())break i;if(o.slice_from("teer"))break;return;case 8:if(!x())break i;if(o.slice_from("lijk"))break;return;case 9:if(!x())break i;if(!B())break i;if(!o.slice_del())return;D()}break r}if(o.cursor=o.limit-r,o.ket=o.cursor,0==o.find_among_b(u))return;if(o.bra=o.cursor,!x())return;var i=o.limit-o.cursor;if(o.eq_s_b("inn")&&!(o.cursor>o.limit_backward))return;if(o.cursor=o.limit-i,!B())return;if(!o.slice_del())return;D()}return 1})()&&(h=!0),o.cursor=o.limit-r,o.cursor=o.limit_backward,w=!1,o.cursor),i=o.cursor,i=((()=>{if(o.bra=o.cursor,o.eq_s("ge")){o.ket=o.cursor;var r=o.cursor,i=o.cursor+3;if(!(i>o.limit)){o.cursor=i,o.cursor=r;for(var i=o.cursor;;){var e=o.cursor,s=o.cursor;if(o.eq_s("ij")||(o.cursor=s,o.in_grouping(v,97,252)))break;if(o.cursor=e,o.cursor>=o.limit)return;o.cursor++}for(;;){var c=o.cursor,u=o.cursor;if(!o.eq_s("ij")&&(o.cursor=u,!o.in_grouping(v,97,252))){o.cursor=c;break}}if(o.cursor{if(!(o.cursor>=o.limit)){for(o.cursor++;;){if(o.bra=o.cursor,o.eq_s("ge")){o.ket=o.cursor;break}if(o.cursor>=o.limit)return;o.cursor++}var r=o.cursor,i=o.cursor+3;if(!(i>o.limit)){o.cursor=i,o.cursor=r;for(var i=o.cursor;;){var e=o.cursor,s=o.cursor;if(o.eq_s("ij")||(o.cursor=s,o.in_grouping(v,97,252)))break;if(o.cursor=e,o.cursor>=o.limit)return;o.cursor++}for(;;){var c=o.cursor,u=o.cursor;if(!o.eq_s("ij")&&(o.cursor=u,!o.in_grouping(v,97,252))){o.cursor=c;break}}if(o.cursor{var r;if(o.ket=o.cursor,0!=(r=o.find_among_b(f))){switch(o.bra=o.cursor,r){case 1:if(o.slice_from("k"))break;return;case 2:if(o.slice_from("f"))break;return;case 3:if(o.slice_from("p"))break;return}return 1}})()&&(h=!0),o.cursor=o.limit-r,o.limit-o.cursor);return h&&(()=>{var r;if(o.ket=o.cursor,0!=(r=o.find_among_b(n))){switch(o.bra=o.cursor,r){case 1:if(o.slice_from("b"))break;return;case 2:if(o.slice_from("c"))break;return;case 3:if(o.slice_from("d"))break;return;case 4:if(o.slice_from("f"))break;return;case 5:if(o.slice_from("g"))break;return;case 6:if(o.slice_from("h"))break;return;case 7:if(o.slice_from("j"))break;return;case 8:if(o.slice_from("k"))break;return;case 9:if(o.slice_from("l"))break;return;case 10:if(o.slice_from("m"))break;return;case 11:var i=o.limit-o.cursor;if(o.eq_s_b("i")&&!(o.cursor>o.limit_backward))return;if(o.cursor=o.limit-i,o.slice_from("n"))break;return;case 12:if(o.slice_from("p"))break;return;case 13:if(o.slice_from("q"))break;return;case 14:if(o.slice_from("r"))break;return;case 15:if(o.slice_from("s"))break;return;case 16:if(o.slice_from("t"))break;return;case 17:if(o.slice_from("v"))break;return;case 18:if(o.slice_from("w"))break;return;case 19:if(o.slice_from("x"))break;return;case 20:if(o.slice_from("z"))break;return}}})(),o.cursor=o.limit-i,o.cursor=o.limit_backward,!0},this.stemWord=function(r){return o.setCurrent(r),this.stem(),o.getCurrent()}};
\ No newline at end of file
diff --git a/sphinx/search/minified-js/dutch_porter-stemmer.js b/sphinx/search/minified-js/dutch_porter-stemmer.js
new file mode 100644
index 00000000000..32f195914c2
--- /dev/null
+++ b/sphinx/search/minified-js/dutch_porter-stemmer.js
@@ -0,0 +1 @@
+var DutchPorterStemmer=function(){var t=new BaseStemmer,a=[["",-1,6],["á",0,1],["ä",0,1],["é",0,2],["ë",0,2],["í",0,3],["ï",0,3],["ó",0,4],["ö",0,4],["ú",0,5],["ü",0,5]],s=[["",-1,3],["I",0,2],["Y",0,1]],i=[["dd",-1,-1],["kk",-1,-1],["tt",-1,-1]],c=[["ene",-1,2],["se",-1,3],["en",-1,2],["heden",2,1],["s",-1,3]],n=[["end",-1,1],["ig",-1,2],["ing",-1,1],["lijk",-1,3],["baar",-1,4],["bar",-1,5]],_=[["aa",-1,-1],["ee",-1,-1],["oo",-1,-1],["uu",-1,-1]],l=[17,65,16,1,0,0,0,0,0,0,0,0,0,0,0,0,128],f=[1,0,0,17,65,16,1,0,0,0,0,0,0,0,0,0,0,0,0,128],b=[17,67,16,1,0,0,0,0,0,0,0,0,0,0,0,0,128],u=0,o=0,m=0,k=!1;function g(){return m<=t.cursor}function d(){return o<=t.cursor}function v(){var r=t.limit-t.cursor;return 0!=t.find_among_b(i)&&(t.cursor=t.limit-r,t.ket=t.cursor,!(t.cursor<=t.limit_backward||(t.cursor--,t.bra=t.cursor,!t.slice_del())))}function h(){var r;if((k=!1,t.ket=t.cursor,t.eq_s_b("e"))&&(t.bra=t.cursor,g()))return r=t.limit-t.cursor,t.out_grouping_b(l,97,232)?(t.cursor=t.limit-r,t.slice_del()&&(k=!0,!!v())):void 0}function p(){if(g()){var r=t.limit-t.cursor;if(t.out_grouping_b(l,97,232))return t.cursor=t.limit-r,r=t.limit-t.cursor,t.eq_s_b("gem")?void 0:(t.cursor=t.limit-r,t.slice_del()&&!!v())}}this.stem=function(){var r,i=t.cursor,i=((()=>{for(var r,i=t.cursor;;){var e=t.cursor;r:{switch(t.bra=t.cursor,r=t.find_among(a),t.ket=t.cursor,r){case 1:if(t.slice_from("a"))break;return;case 2:if(t.slice_from("e"))break;return;case 3:if(t.slice_from("i"))break;return;case 4:if(t.slice_from("o"))break;return;case 5:if(t.slice_from("u"))break;return;case 6:if(t.cursor>=t.limit)break r;t.cursor++}continue}t.cursor=e;break}if(t.cursor=i,i=t.cursor,t.bra=t.cursor,t.eq_s("y")){if(t.ket=t.cursor,!t.slice_from("Y"))return}else t.cursor=i;for(;;){var s=t.cursor;if(!t.go_out_grouping(l,97,232)){t.cursor=s;break}t.cursor++;var u=t.cursor;r:{t.bra=t.cursor;var o=t.cursor;if(t.eq_s("i")){t.ket=t.cursor;var c=t.cursor;if(t.in_grouping(l,97,232)&&!t.slice_from("I"))return;t.cursor=c}else{if(t.cursor=o,!t.eq_s("y")){t.cursor=u;break r}if(t.ket=t.cursor,!t.slice_from("Y"))return}}}})(),t.cursor=i,t.cursor),e=(m=t.limit,o=t.limit,r=t.cursor,(e=t.cursor+3)>t.limit||(t.cursor=e,u=t.cursor,t.cursor=r,t.go_out_grouping(l,97,232)&&(t.cursor++,t.go_in_grouping(l,97,232))&&(t.cursor++,m=t.cursor,u<=m||(m=u),t.go_out_grouping(l,97,232))&&(t.cursor++,t.go_in_grouping(l,97,232))&&(t.cursor++,o=t.cursor)),t.cursor=i,t.limit_backward=t.cursor,t.cursor=t.limit,(()=>{var r,i=t.limit-t.cursor;r:if(t.ket=t.cursor,0!=(r=t.find_among_b(c)))switch(t.bra=t.cursor,r){case 1:if(!g())break r;if(t.slice_from("heid"))break;return;case 2:p();break;case 3:if(!g())break r;if(!t.out_grouping_b(b,97,232))break r;if(t.slice_del())break;return}if(t.cursor=t.limit-i,i=t.limit-t.cursor,h(),t.cursor=t.limit-i,i=t.limit-t.cursor,t.ket=t.cursor,t.eq_s_b("heid")&&(t.bra=t.cursor,d())){var e=t.limit-t.cursor;if(!t.eq_s_b("c")){if(t.cursor=t.limit-e,!t.slice_del())return;t.ket=t.cursor,t.eq_s_b("en")&&(t.bra=t.cursor,p())}}t.cursor=t.limit-i,e=t.limit-t.cursor;r:if(t.ket=t.cursor,0!=(r=t.find_among_b(n)))switch(t.bra=t.cursor,r){case 1:if(d()){if(!t.slice_del())return;i:{var s=t.limit-t.cursor;if(t.ket=t.cursor,t.eq_s_b("ig")&&(t.bra=t.cursor,d())){var u=t.limit-t.cursor;if(!t.eq_s_b("e")){if(t.cursor=t.limit-u,t.slice_del())break i;return}}if(t.cursor=t.limit-s,!v())break r}}break;case 2:if(!d())break r;var o=t.limit-t.cursor;if(t.eq_s_b("e"))break r;if(t.cursor=t.limit-o,t.slice_del())break;return;case 3:if(d()){if(!t.slice_del())return;h()}break;case 4:if(!d())break r;if(t.slice_del())break;return;case 5:if(!d())break r;if(!k)break r;if(t.slice_del())break;return}if(t.cursor=t.limit-e,i=t.limit-t.cursor,t.out_grouping_b(f,73,232)){e=t.limit-t.cursor;if(0!=t.find_among_b(_)&&t.out_grouping_b(l,97,232)&&(t.cursor=t.limit-e,t.ket=t.cursor,!(t.cursor<=t.limit_backward||(t.cursor--,t.bra=t.cursor,t.slice_del()))))return}t.cursor=t.limit-i})(),t.cursor=t.limit_backward,t.cursor);return(()=>{for(var r;;){var i=t.cursor;r:{switch(t.bra=t.cursor,r=t.find_among(s),t.ket=t.cursor,r){case 1:if(t.slice_from("y"))break;return;case 2:if(t.slice_from("i"))break;return;case 3:if(t.cursor>=t.limit)break r;t.cursor++}continue}t.cursor=i;break}})(),t.cursor=e,!0},this.stemWord=function(r){return t.setCurrent(r),this.stem(),t.getCurrent()}};
\ No newline at end of file
diff --git a/sphinx/search/minified-js/english-stemmer.js b/sphinx/search/minified-js/english-stemmer.js
new file mode 100644
index 00000000000..e005f991e6a
--- /dev/null
+++ b/sphinx/search/minified-js/english-stemmer.js
@@ -0,0 +1 @@
+var EnglishStemmer=function(){var a=new BaseStemmer,c=[["arsen",-1,-1],["commun",-1,-1],["emerg",-1,-1],["gener",-1,-1],["later",-1,-1],["organ",-1,-1],["past",-1,-1],["univers",-1,-1]],o=[["'",-1,1],["'s'",0,1],["'s",-1,1]],u=[["ied",-1,2],["s",-1,3],["ies",1,2],["sses",1,1],["ss",1,-1],["us",1,-1]],t=[["succ",-1,1],["proc",-1,1],["exc",-1,1]],l=[["even",-1,2],["cann",-1,2],["inn",-1,2],["earr",-1,2],["herr",-1,2],["out",-1,2],["y",-1,1]],n=[["",-1,-1],["ed",0,2],["eed",1,1],["ing",0,3],["edly",0,2],["eedly",4,1],["ingly",0,2]],f=[["",-1,3],["bb",0,2],["dd",0,2],["ff",0,2],["gg",0,2],["bl",0,1],["mm",0,2],["nn",0,2],["pp",0,2],["rr",0,2],["at",0,1],["tt",0,2],["iz",0,1]],_=[["anci",-1,3],["enci",-1,2],["ogi",-1,14],["li",-1,16],["bli",3,12],["abli",4,4],["alli",3,8],["fulli",3,9],["lessli",3,15],["ousli",3,10],["entli",3,5],["aliti",-1,8],["biliti",-1,12],["iviti",-1,11],["tional",-1,1],["ational",14,7],["alism",-1,8],["ation",-1,7],["ization",17,6],["izer",-1,6],["ator",-1,7],["iveness",-1,11],["fulness",-1,9],["ousness",-1,10],["ogist",-1,13]],m=[["icate",-1,4],["ative",-1,6],["alize",-1,3],["iciti",-1,4],["ical",-1,4],["tional",-1,1],["ational",5,2],["ful",-1,5],["ness",-1,5]],b=[["ic",-1,1],["ance",-1,1],["ence",-1,1],["able",-1,1],["ible",-1,1],["ate",-1,1],["ive",-1,1],["ize",-1,1],["iti",-1,1],["al",-1,1],["ism",-1,1],["ion",-1,2],["er",-1,1],["ous",-1,1],["ant",-1,1],["ent",-1,1],["ment",15,1],["ement",16,1]],k=[["e",-1,1],["l",-1,2]],g=[["andes",-1,-1],["atlas",-1,-1],["bias",-1,-1],["cosmos",-1,-1],["early",-1,5],["gently",-1,3],["howe",-1,-1],["idly",-1,2],["news",-1,-1],["only",-1,6],["singly",-1,7],["skies",-1,1],["sky",-1,-1],["ugly",-1,4]],d=[17,64],v=[17,65,16,1],i=[1,17,65,208,1],w=[55,141,2],p=!1,y=0,h=0;function q(){var r=a.limit-a.cursor;return!!(a.out_grouping_b(i,89,121)&&a.in_grouping_b(v,97,121)&&a.out_grouping_b(v,97,121)||(a.cursor=a.limit-r,a.out_grouping_b(v,97,121)&&a.in_grouping_b(v,97,121)&&!(a.cursor>a.limit_backward))||(a.cursor=a.limit-r,a.eq_s_b("past")))}function z(){return h<=a.cursor}function Y(){return y<=a.cursor}this.stem=function(){var r=a.cursor;if(!(()=>{var r;if(a.bra=a.cursor,0!=(r=a.find_among(g))&&(a.ket=a.cursor,!(a.cursora.limit)a.cursor=i;else{a.cursor=e,a.cursor=r,(()=>{p=!1;var r=a.cursor;if(a.bra=a.cursor,!a.eq_s("'")||(a.ket=a.cursor,a.slice_del())){a.cursor=r;r=a.cursor;if(a.bra=a.cursor,a.eq_s("y")){if(a.ket=a.cursor,!a.slice_from("Y"))return;p=!0}a.cursor=r;for(r=a.cursor;;){var i=a.cursor;r:{for(;;){var e=a.cursor;if(a.in_grouping(v,97,121)&&(a.bra=a.cursor,a.eq_s("y"))){a.ket=a.cursor,a.cursor=e;break}if(a.cursor=e,a.cursor>=a.limit)break r;a.cursor++}if(!a.slice_from("Y"))return;p=!0;continue}a.cursor=i;break}a.cursor=r}})(),h=a.limit,y=a.limit;i=a.cursor;r:{var s=a.cursor;if(0==a.find_among(c)){if(a.cursor=s,!a.go_out_grouping(v,97,121))break r;if(a.cursor++,!a.go_in_grouping(v,97,121))break r;a.cursor++}h=a.cursor,a.go_out_grouping(v,97,121)&&(a.cursor++,a.go_in_grouping(v,97,121))&&(a.cursor++,y=a.cursor)}a.cursor=i,a.limit_backward=a.cursor,a.cursor=a.limit;var e=a.limit-a.cursor,r=((()=>{var r=a.limit-a.cursor;if(a.ket=a.cursor,0==a.find_among_b(o))a.cursor=a.limit-r;else if(a.bra=a.cursor,!a.slice_del())return;if(a.ket=a.cursor,0!=(r=a.find_among_b(u)))switch(a.bra=a.cursor,r){case 1:if(a.slice_from("ss"))break;return;case 2:r:{var i=a.limit-a.cursor,e=a.cursor-2;if(!(e{a.ket=a.cursor,o=a.find_among_b(n),a.bra=a.cursor;r:{var r=a.limit-a.cursor;i:{switch(o){case 1:var i=a.limit-a.cursor;e:{var e=a.limit-a.cursor;if(0==a.find_among_b(t)||a.cursor>a.limit_backward){if(a.cursor=a.limit-e,!z())break e;if(!a.slice_from("ee"))return}}a.cursor=a.limit-i;break;case 2:break i;case 3:if(0==(o=a.find_among_b(l)))break i;switch(o){case 1:var s=a.limit-a.cursor;if(!a.out_grouping_b(v,97,121))break i;if(a.cursor>a.limit_backward)break i;if(a.cursor=a.limit-s,a.bra=a.cursor,a.slice_from("ie"))break;return;case 2:if(a.cursor>a.limit_backward)break i}}break r}a.cursor=a.limit-r;var c=a.limit-a.cursor;if(!a.go_out_grouping_b(v,97,121))return;if(a.cursor--,a.cursor=a.limit-c,!a.slice_del())return;a.ket=a.cursor,a.bra=a.cursor;var o,c=a.limit-a.cursor;switch(o=a.find_among_b(f)){case 1:return a.slice_from("e");case 2:var u=a.limit-a.cursor;if(a.in_grouping_b(d,97,111)&&!(a.cursor>a.limit_backward))return;a.cursor=a.limit-u;break;case 3:return a.cursor!=h||(u=a.limit-a.cursor,q()&&(a.cursor=a.limit-u,a.slice_from("e")))}if(a.cursor=a.limit-c,a.ket=a.cursor,a.cursor<=a.limit_backward)return;if(a.cursor--,a.bra=a.cursor,!a.slice_del())return}})(),a.cursor=a.limit-r,a.limit-a.cursor),r=(a.ket=a.cursor,e=a.limit-a.cursor,(a.eq_s_b("y")||(a.cursor=a.limit-e,a.eq_s_b("Y")))&&(a.bra=a.cursor,a.out_grouping_b(v,97,121))&&a.cursor>a.limit_backward&&a.slice_from("i"),a.cursor=a.limit-i,a.limit-a.cursor),e=((()=>{var r;if(a.ket=a.cursor,0!=(r=a.find_among_b(_))&&(a.bra=a.cursor,z()))switch(r){case 1:if(a.slice_from("tion"))break;return;case 2:if(a.slice_from("ence"))break;return;case 3:if(a.slice_from("ance"))break;return;case 4:if(a.slice_from("able"))break;return;case 5:if(a.slice_from("ent"))break;return;case 6:if(a.slice_from("ize"))break;return;case 7:if(a.slice_from("ate"))break;return;case 8:if(a.slice_from("al"))break;return;case 9:if(a.slice_from("ful"))break;return;case 10:if(a.slice_from("ous"))break;return;case 11:if(a.slice_from("ive"))break;return;case 12:if(a.slice_from("ble"))break;return;case 13:if(a.slice_from("og"))break;return;case 14:if(!a.eq_s_b("l"))return;if(a.slice_from("og"))break;return;case 15:if(a.slice_from("less"))break;return;case 16:if(!a.in_grouping_b(w,99,116))return;if(a.slice_del())break}})(),a.cursor=a.limit-r,a.limit-a.cursor),i=((()=>{var r;if(a.ket=a.cursor,0!=(r=a.find_among_b(m))&&(a.bra=a.cursor,z()))switch(r){case 1:if(a.slice_from("tion"))break;return;case 2:if(a.slice_from("ate"))break;return;case 3:if(a.slice_from("al"))break;return;case 4:if(a.slice_from("ic"))break;return;case 5:if(a.slice_del())break;return;case 6:if(!Y())return;if(a.slice_del())break}})(),a.cursor=a.limit-e,a.limit-a.cursor),r=((()=>{var r;if(a.ket=a.cursor,0!=(r=a.find_among_b(b))&&(a.bra=a.cursor,Y()))switch(r){case 1:if(a.slice_del())break;return;case 2:var i=a.limit-a.cursor;if(!a.eq_s_b("s")&&(a.cursor=a.limit-i,!a.eq_s_b("t")))return;if(a.slice_del())break}})(),a.cursor=a.limit-i,a.limit-a.cursor),e=((()=>{var r;if(a.ket=a.cursor,0!=(r=a.find_among_b(k)))switch(a.bra=a.cursor,r){case 1:if(!Y()){if(!z())return;var i=a.limit-a.cursor;if(q())return;a.cursor=a.limit-i}if(a.slice_del())break;return;case 2:if(!Y())return;if(!a.eq_s_b("l"))return;if(a.slice_del())break}})(),a.cursor=a.limit-r,a.cursor=a.limit_backward,a.cursor);(()=>{if(p)for(;;){var r=a.cursor;r:{for(;;){var i=a.cursor;if(a.bra=a.cursor,a.eq_s("Y")){a.ket=a.cursor,a.cursor=i;break}if(a.cursor=i,a.cursor>=a.limit)break r;a.cursor++}if(a.slice_from("y"))continue;return}a.cursor=r;break}})(),a.cursor=e}}return!0},this.stemWord=function(r){return a.setCurrent(r),this.stem(),a.getCurrent()}};
\ No newline at end of file
diff --git a/sphinx/search/minified-js/esperanto-stemmer.js b/sphinx/search/minified-js/esperanto-stemmer.js
new file mode 100644
index 00000000000..44353e6a150
--- /dev/null
+++ b/sphinx/search/minified-js/esperanto-stemmer.js
@@ -0,0 +1 @@
+var EsperantoStemmer=function(){var o=new BaseStemmer,s=[["",-1,14],["-",0,13],["cx",0,1],["gx",0,2],["hx",0,3],["jx",0,4],["q",0,12],["sx",0,5],["ux",0,6],["w",0,12],["x",0,12],["y",0,12],["á",0,7],["é",0,8],["í",0,9],["ó",0,10],["ú",0,11]],i=[["as",-1,-1],["i",-1,-1],["is",1,-1],["os",-1,-1],["u",-1,-1],["us",4,-1]],u=[["ci",-1,-1],["gi",-1,-1],["hi",-1,-1],["li",-1,-1],["ili",3,-1],["ŝli",3,-1],["mi",-1,-1],["ni",-1,-1],["oni",7,-1],["ri",-1,-1],["si",-1,-1],["vi",-1,-1],["ivi",11,-1],["ĝi",-1,-1],["ŝi",-1,-1],["iŝi",14,-1],["malŝi",14,-1]],e=[["amb",-1,-1],["bald",-1,-1],["malbald",1,-1],["morg",-1,-1],["postmorg",3,-1],["adi",-1,-1],["hodi",-1,-1],["ank",-1,-1],["ĉirk",-1,-1],["tutĉirk",8,-1],["presk",-1,-1],["almen",-1,-1],["apen",-1,-1],["hier",-1,-1],["antaŭhier",13,-1],["malgr",-1,-1],["ankor",-1,-1],["kontr",-1,-1],["anstat",-1,-1],["kvaz",-1,-1]],c=[["aliu",-1,-1],["unu",-1,-1]],a=[["aha",-1,-1],["haha",0,-1],["haleluja",-1,-1],["hola",-1,-1],["hosana",-1,-1],["maltra",-1,-1],["hura",-1,-1],["ĥaĥa",-1,-1],["ekde",-1,-1],["elde",-1,-1],["disde",-1,-1],["ehe",-1,-1],["maltre",-1,-1],["dirlididi",-1,-1],["malpli",-1,-1],["malĉi",-1,-1],["malkaj",-1,-1],["amen",-1,-1],["tamen",17,-1],["oho",-1,-1],["maltro",-1,-1],["minus",-1,-1],["uhu",-1,-1],["muu",-1,-1]],t=[["tri",-1,-1],["du",-1,-1],["unu",-1,-1]],m=[["dek",-1,-1],["cent",-1,-1]],l=[["k",-1,-1],["kelk",0,-1],["nen",-1,-1],["t",-1,-1],["mult",3,-1],["samt",3,-1],["ĉ",-1,-1]],n=[["a",-1,-1],["e",-1,-1],["i",-1,-1],["j",-1,-1,r],["aj",3,-1],["oj",3,-1],["n",-1,-1,r],["an",6,-1],["en",6,-1],["jn",6,-1,r],["ajn",9,-1],["ojn",9,-1],["on",6,-1],["o",-1,-1],["as",-1,-1],["is",-1,-1],["os",-1,-1],["us",-1,-1],["u",-1,-1]],_=[17,65,16],b=[1,64,16],f=[255,3],k=!1;function r(){var r=o.limit-o.cursor;return!(!o.eq_s_b("-")&&(o.cursor=o.limit-r,!o.in_grouping_b(f,48,57)))}this.stem=function(){var r=o.cursor;if(!(()=>{var r;for(k=!1;;){var i=o.cursor;r:{switch(o.bra=o.cursor,r=o.find_among(s),o.ket=o.cursor,r){case 1:if(o.slice_from("ĉ"))break;return;case 2:if(o.slice_from("ĝ"))break;return;case 3:if(o.slice_from("ĥ"))break;return;case 4:if(o.slice_from("ĵ"))break;return;case 5:if(o.slice_from("ŝ"))break;return;case 6:if(o.slice_from("ŭ"))break;return;case 7:if(!o.slice_from("a"))return;k=!0;break;case 8:if(!o.slice_from("e"))return;k=!0;break;case 9:if(!o.slice_from("i"))return;k=!0;break;case 10:if(!o.slice_from("o"))return;k=!0;break;case 11:if(!o.slice_from("u"))return;k=!0;break;case 12:k=!0;break;case 13:k=!1;break;case 14:if(o.cursor>=o.limit)break r;o.cursor++}continue}o.cursor=i;break}return!k})())return!1;o.cursor=r;r=o.cursor,o.bra=o.cursor,o.eq_s("'")&&(o.ket=o.cursor,!o.eq_s("st")||0==o.find_among(i)||o.cursor{o.ket=o.cursor;var r=o.limit-o.cursor;return o.eq_s_b("n")||(o.cursor=o.limit-r),o.bra=o.cursor,0==o.find_among_b(u)||(r=o.limit-o.cursor,o.cursor>o.limit_backward&&(o.cursor=o.limit-r,!o.eq_s_b("-")))?void 0:!!o.slice_del()})())return!1;o.cursor=o.limit-r;r=o.limit-o.cursor,(()=>{if(o.ket=o.cursor,o.eq_s_b("'")){o.bra=o.cursor;r:{var r=o.limit-o.cursor;if(o.eq_s_b("l")&&!(o.cursor>o.limit_backward)){if(o.slice_from("a"))break r;return}if(o.cursor=o.limit-r,o.eq_s_b("un")&&!(o.cursor>o.limit_backward)){if(o.slice_from("u"))break r;return}if(o.cursor=o.limit-r,0!=o.find_among_b(e)){var i=o.limit-o.cursor;if(!(o.cursor>o.limit_backward)||(o.cursor=o.limit-i,o.eq_s_b("-"))){if(o.slice_from("aŭ"))break r;return}}if(o.cursor=o.limit-r,!o.slice_from("o"))return}}})(),o.cursor=o.limit-r,r=o.limit-o.cursor;if((()=>{o.ket=o.cursor,o.bra=o.cursor;var r=o.limit-o.cursor,i=o.limit-o.cursor,s=o.limit-o.cursor;if(o.eq_s_b("n")||(o.cursor=o.limit-s),o.bra=o.cursor,!o.eq_s_b("e")){o.cursor=o.limit-i;var s=o.limit-o.cursor,i=(o.eq_s_b("n")||(o.cursor=o.limit-s),o.limit-o.cursor);if(o.eq_s_b("j")||(o.cursor=o.limit-i),o.bra=o.cursor,!o.in_grouping_b(b,97,117))return}return!o.eq_s_b("i")||(s=o.limit-o.cursor,0==o.find_among_b(l)&&(o.cursor=o.limit-s),i=o.limit-o.cursor,o.cursor>o.limit_backward&&(o.cursor=o.limit-i,!o.eq_s_b("-")))?void 0:(o.cursor=o.limit-r,!!o.slice_del())})())return!1;o.cursor=o.limit-r;r=o.limit-o.cursor;if((()=>{var r;return 0==o.find_among_b(a)||(r=o.limit-o.cursor,o.cursor>o.limit_backward&&(o.cursor=o.limit-r,!o.eq_s_b("-")))?void 0:1})())return!1;o.cursor=o.limit-r;r=o.limit-o.cursor;if(0!=o.find_among_b(t)&&0!=o.find_among_b(m))return!1;o.cursor=o.limit-r;r=o.limit-o.cursor;if((()=>{o.ket=o.cursor;var r=o.limit-o.cursor,r=(o.eq_s_b("n")||(o.cursor=o.limit-r),o.limit-o.cursor);return o.eq_s_b("j")||(o.cursor=o.limit-r),o.bra=o.cursor,0==o.find_among_b(c)||(r=o.limit-o.cursor,o.cursor>o.limit_backward&&(o.cursor=o.limit-r,!o.eq_s_b("-")))?void 0:!!o.slice_del()})())return!1;o.cursor=o.limit-r;r=o.limit-o.cursor;return!!(()=>{r:{var r=o.limit-o.cursor;i:{for(var i=2;0{var r;return o.ket=o.cursor,0!=o.find_among_b(n)&&(r=o.limit-o.cursor,o.eq_s_b("-")||(o.cursor=o.limit-r),o.bra=o.cursor,o.slice_del())?1:void 0})())&&(o.cursor=o.limit_backward,!0)},this.stemWord=function(r){return o.setCurrent(r),this.stem(),o.getCurrent()}};
\ No newline at end of file
diff --git a/sphinx/search/minified-js/estonian-stemmer.js b/sphinx/search/minified-js/estonian-stemmer.js
new file mode 100644
index 00000000000..d27e90fcd3b
--- /dev/null
+++ b/sphinx/search/minified-js/estonian-stemmer.js
@@ -0,0 +1 @@
+var EstonianStemmer=function(){var t=new BaseStemmer,a=[["gi",-1,1],["ki",-1,2]],r=[["da",-1,3],["mata",-1,1],["b",-1,3],["ksid",-1,1],["nuksid",3,1],["me",-1,3],["sime",5,1],["ksime",6,1],["nuksime",7,1],["akse",-1,2],["dakse",9,1],["takse",9,1],["site",-1,1],["ksite",12,1],["nuksite",13,1],["n",-1,3],["sin",15,1],["ksin",16,1],["nuksin",17,1],["daks",-1,1],["taks",-1,1]],i=[["aa",-1,-1],["ee",-1,-1],["ii",-1,-1],["oo",-1,-1],["uu",-1,-1],["ää",-1,-1],["õõ",-1,-1],["öö",-1,-1],["üü",-1,-1]],s=[["i",-1,1]],o=[["lane",-1,1],["line",-1,3],["mine",-1,2],["lasse",-1,1],["lisse",-1,3],["misse",-1,2],["lasi",-1,1],["lisi",-1,3],["misi",-1,2],["last",-1,1],["list",-1,3],["mist",-1,2]],k=[["ga",-1,1],["ta",-1,1],["le",-1,1],["sse",-1,1],["l",-1,1],["s",-1,1],["ks",5,1],["t",-1,2],["lt",7,1],["st",7,1]],m=[["",-1,2],["las",0,1],["lis",0,1],["mis",0,1],["t",0,-1]],l=[["d",-1,4],["sid",0,2],["de",-1,4],["ikkude",2,1],["ike",-1,1],["ikke",-1,1],["te",-1,3]],c=[["va",-1,-1],["du",-1,-1],["nu",-1,-1],["tu",-1,-1]],n=[["kk",-1,1],["pp",-1,2],["tt",-1,3]],u=[["ma",-1,2],["mai",-1,1],["m",-1,1]],d=[["joob",-1,1],["jood",-1,1],["joodakse",1,1],["jooma",-1,1],["joomata",3,1],["joome",-1,1],["joon",-1,1],["joote",-1,1],["joovad",-1,1],["juua",-1,1],["juuakse",9,1],["jäi",-1,12],["jäid",11,12],["jäime",11,12],["jäin",11,12],["jäite",11,12],["jääb",-1,12],["jääd",-1,12],["jääda",17,12],["jäädakse",18,12],["jäädi",17,12],["jääks",-1,12],["jääksid",21,12],["jääksime",21,12],["jääksin",21,12],["jääksite",21,12],["jääma",-1,12],["jäämata",26,12],["jääme",-1,12],["jään",-1,12],["jääte",-1,12],["jäävad",-1,12],["jõi",-1,1],["jõid",32,1],["jõime",32,1],["jõin",32,1],["jõite",32,1],["keeb",-1,4],["keed",-1,4],["keedakse",38,4],["keeks",-1,4],["keeksid",40,4],["keeksime",40,4],["keeksin",40,4],["keeksite",40,4],["keema",-1,4],["keemata",45,4],["keeme",-1,4],["keen",-1,4],["kees",-1,4],["keeta",-1,4],["keete",-1,4],["keevad",-1,4],["käia",-1,8],["käiakse",53,8],["käib",-1,8],["käid",-1,8],["käidi",56,8],["käiks",-1,8],["käiksid",58,8],["käiksime",58,8],["käiksin",58,8],["käiksite",58,8],["käima",-1,8],["käimata",63,8],["käime",-1,8],["käin",-1,8],["käis",-1,8],["käite",-1,8],["käivad",-1,8],["laob",-1,16],["laod",-1,16],["laoks",-1,16],["laoksid",72,16],["laoksime",72,16],["laoksin",72,16],["laoksite",72,16],["laome",-1,16],["laon",-1,16],["laote",-1,16],["laovad",-1,16],["loeb",-1,14],["loed",-1,14],["loeks",-1,14],["loeksid",83,14],["loeksime",83,14],["loeksin",83,14],["loeksite",83,14],["loeme",-1,14],["loen",-1,14],["loete",-1,14],["loevad",-1,14],["loob",-1,7],["lood",-1,7],["loodi",93,7],["looks",-1,7],["looksid",95,7],["looksime",95,7],["looksin",95,7],["looksite",95,7],["looma",-1,7],["loomata",100,7],["loome",-1,7],["loon",-1,7],["loote",-1,7],["loovad",-1,7],["luua",-1,7],["luuakse",106,7],["lõi",-1,6],["lõid",108,6],["lõime",108,6],["lõin",108,6],["lõite",108,6],["lööb",-1,5],["lööd",-1,5],["löödakse",114,5],["löödi",114,5],["lööks",-1,5],["lööksid",117,5],["lööksime",117,5],["lööksin",117,5],["lööksite",117,5],["lööma",-1,5],["löömata",122,5],["lööme",-1,5],["löön",-1,5],["lööte",-1,5],["löövad",-1,5],["lüüa",-1,5],["lüüakse",128,5],["müüa",-1,13],["müüakse",130,13],["müüb",-1,13],["müüd",-1,13],["müüdi",133,13],["müüks",-1,13],["müüksid",135,13],["müüksime",135,13],["müüksin",135,13],["müüksite",135,13],["müüma",-1,13],["müümata",140,13],["müüme",-1,13],["müün",-1,13],["müüs",-1,13],["müüte",-1,13],["müüvad",-1,13],["näeb",-1,18],["näed",-1,18],["näeks",-1,18],["näeksid",149,18],["näeksime",149,18],["näeksin",149,18],["näeksite",149,18],["näeme",-1,18],["näen",-1,18],["näete",-1,18],["näevad",-1,18],["nägema",-1,18],["nägemata",158,18],["näha",-1,18],["nähakse",160,18],["nähti",-1,18],["põeb",-1,15],["põed",-1,15],["põeks",-1,15],["põeksid",165,15],["põeksime",165,15],["põeksin",165,15],["põeksite",165,15],["põeme",-1,15],["põen",-1,15],["põete",-1,15],["põevad",-1,15],["saab",-1,2],["saad",-1,2],["saada",175,2],["saadakse",176,2],["saadi",175,2],["saaks",-1,2],["saaksid",179,2],["saaksime",179,2],["saaksin",179,2],["saaksite",179,2],["saama",-1,2],["saamata",184,2],["saame",-1,2],["saan",-1,2],["saate",-1,2],["saavad",-1,2],["sai",-1,2],["said",190,2],["saime",190,2],["sain",190,2],["saite",190,2],["sõi",-1,9],["sõid",195,9],["sõime",195,9],["sõin",195,9],["sõite",195,9],["sööb",-1,9],["sööd",-1,9],["söödakse",201,9],["söödi",201,9],["sööks",-1,9],["sööksid",204,9],["sööksime",204,9],["sööksin",204,9],["sööksite",204,9],["sööma",-1,9],["söömata",209,9],["sööme",-1,9],["söön",-1,9],["sööte",-1,9],["söövad",-1,9],["süüa",-1,9],["süüakse",215,9],["teeb",-1,17],["teed",-1,17],["teeks",-1,17],["teeksid",219,17],["teeksime",219,17],["teeksin",219,17],["teeksite",219,17],["teeme",-1,17],["teen",-1,17],["teete",-1,17],["teevad",-1,17],["tegema",-1,17],["tegemata",228,17],["teha",-1,17],["tehakse",230,17],["tehti",-1,17],["toob",-1,10],["tood",-1,10],["toodi",234,10],["tooks",-1,10],["tooksid",236,10],["tooksime",236,10],["tooksin",236,10],["tooksite",236,10],["tooma",-1,10],["toomata",241,10],["toome",-1,10],["toon",-1,10],["toote",-1,10],["toovad",-1,10],["tuua",-1,10],["tuuakse",247,10],["tõi",-1,10],["tõid",249,10],["tõime",249,10],["tõin",249,10],["tõite",249,10],["viia",-1,3],["viiakse",254,3],["viib",-1,3],["viid",-1,3],["viidi",257,3],["viiks",-1,3],["viiksid",259,3],["viiksime",259,3],["viiksin",259,3],["viiksite",259,3],["viima",-1,3],["viimata",264,3],["viime",-1,3],["viin",-1,3],["viisime",-1,3],["viisin",-1,3],["viisite",-1,3],["viite",-1,3],["viivad",-1,3],["võib",-1,11],["võid",-1,11],["võida",274,11],["võidakse",275,11],["võidi",274,11],["võiks",-1,11],["võiksid",278,11],["võiksime",278,11],["võiksin",278,11],["võiksite",278,11],["võima",-1,11],["võimata",283,11],["võime",-1,11],["võin",-1,11],["võis",-1,11],["võite",-1,11],["võivad",-1,11]],b=[17,65,16,0,0,0,0,0,0,0,0,0,0,0,0,0,8,0,48,8],_=[17,65,16],f=[117,66,6,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,128,0,0,0,16],v=[21,123,243,0,0,0,0,0,0,0,0,0,0,0,0,0,8,0,48,8],w=0;function g(){return 0!=t.find_among_b(i)}function j(){var i=t.limit-t.cursor,i=((()=>{var i;if(!(t.cursor{var i;if(!(t.cursor{var i;if(!(t.cursor{var i;if(!(t.cursor{var i;if(t.bra=t.cursor,0!=(i=t.find_among(d))&&(t.ket=t.cursor,!(t.cursor{var i;if(!(t.cursor{var i;if(!(t.cursor{var i;if(t.in_grouping_b(b,97,252)&&!(w>t.cursor)&&(t.ket=t.cursor,0!=(i=t.find_among_b(n))))switch(t.bra=t.cursor,i){case 1:if(t.slice_from("k"))break;return;case 2:if(t.slice_from("p"))break;return;case 3:if(t.slice_from("t"))break}})(),t.cursor=t.limit-e,t.cursor=t.limit_backward,!0},this.stemWord=function(i){return t.setCurrent(i),this.stem(),t.getCurrent()}};
\ No newline at end of file
diff --git a/sphinx/search/minified-js/finnish-stemmer.js b/sphinx/search/minified-js/finnish-stemmer.js
index f6301fdfa56..160fb267a05 100644
--- a/sphinx/search/minified-js/finnish-stemmer.js
+++ b/sphinx/search/minified-js/finnish-stemmer.js
@@ -1 +1 @@
-FinnishStemmer=function(){var r=new BaseStemmer;var i=[["pa",-1,1],["sti",-1,2],["kaan",-1,1],["han",-1,1],["kin",-1,1],["hän",-1,1],["kään",-1,1],["ko",-1,1],["pä",-1,1],["kö",-1,1]];var e=[["lla",-1,-1],["na",-1,-1],["ssa",-1,-1],["ta",-1,-1],["lta",3,-1],["sta",3,-1]];var a=[["llä",-1,-1],["nä",-1,-1],["ssä",-1,-1],["tä",-1,-1],["ltä",3,-1],["stä",3,-1]];var s=[["lle",-1,-1],["ine",-1,-1]];var t=[["nsa",-1,3],["mme",-1,3],["nne",-1,3],["ni",-1,2],["si",-1,1],["an",-1,4],["en",-1,6],["än",-1,5],["nsä",-1,3]];var u=[["aa",-1,-1],["ee",-1,-1],["ii",-1,-1],["oo",-1,-1],["uu",-1,-1],["ää",-1,-1],["öö",-1,-1]];var l=[["a",-1,8],["lla",0,-1],["na",0,-1],["ssa",0,-1],["ta",0,-1],["lta",4,-1],["sta",4,-1],["tta",4,2],["lle",-1,-1],["ine",-1,-1],["ksi",-1,-1],["n",-1,7],["han",11,1],["den",11,-1,S],["seen",11,-1,C],["hen",11,2],["tten",11,-1,S],["hin",11,3],["siin",11,-1,S],["hon",11,4],["hän",11,5],["hön",11,6],["ä",-1,8],["llä",22,-1],["nä",22,-1],["ssä",22,-1],["tä",22,-1],["ltä",26,-1],["stä",26,-1],["ttä",26,2]];var c=[["eja",-1,-1],["mma",-1,1],["imma",1,-1],["mpa",-1,1],["impa",3,-1],["mmi",-1,1],["immi",5,-1],["mpi",-1,1],["impi",7,-1],["ejä",-1,-1],["mmä",-1,1],["immä",10,-1],["mpä",-1,1],["impä",12,-1]];var n=[["i",-1,-1],["j",-1,-1]];var f=[["mma",-1,1],["imma",0,-1]];var o=[17,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,8];var b=[119,223,119,1];var _=[17,65,16,1,0,0,0,0,0,0,0,0,0,0,0,0,8,0,32];var m=[17,65,16,0,0,0,0,0,0,0,0,0,0,0,0,0,8,0,32];var k=[17,97,24,1,0,0,0,0,0,0,0,0,0,0,0,0,8,0,32];var d=false;var v="";var w=0;var g=0;function p(){g=r.limit;w=r.limit;r:while(true){var i=r.cursor;i:{if(!r.in_grouping(_,97,246)){break i}r.cursor=i;break r}r.cursor=i;if(r.cursor>=r.limit){return false}r.cursor++}r:while(true){i:{if(!r.out_grouping(_,97,246)){break i}break r}if(r.cursor>=r.limit){return false}r.cursor++}g=r.cursor;r:while(true){var e=r.cursor;i:{if(!r.in_grouping(_,97,246)){break i}r.cursor=e;break r}r.cursor=e;if(r.cursor>=r.limit){return false}r.cursor++}r:while(true){i:{if(!r.out_grouping(_,97,246)){break i}break r}if(r.cursor>=r.limit){return false}r.cursor++}w=r.cursor;return true}function h(){if(!(w<=r.cursor)){return false}return true}function q(){var e;if(r.cursor{var r;if(!(c.cursor{var r;if(!(c.cursor
{var r;if(!(c.cursor{var r;if(!(c.cursor{if(!(c.cursor
=r.limit){break r}r.cursor++}continue}r.cursor=e;break}return true}function v(){m=r.limit;k=r.limit;b=r.limit;var i=r.cursor;r:{e:{var s=r.cursor;i:{if(!r.in_grouping(o,97,251)){break i}if(!r.in_grouping(o,97,251)){break i}if(r.cursor>=r.limit){break i}r.cursor++;break e}r.cursor=s;i:{if(r.find_among(e)==0){break i}break e}r.cursor=s;if(r.cursor>=r.limit){break r}r.cursor++;i:while(true){s:{if(!r.in_grouping(o,97,251)){break s}break i}if(r.cursor>=r.limit){break r}r.cursor++}}m=r.cursor}r.cursor=i;var a=r.cursor;r:{e:while(true){i:{if(!r.in_grouping(o,97,251)){break i}break e}if(r.cursor>=r.limit){break r}r.cursor++}e:while(true){i:{if(!r.out_grouping(o,97,251)){break i}break e}if(r.cursor>=r.limit){break r}r.cursor++}k=r.cursor;e:while(true){i:{if(!r.in_grouping(o,97,251)){break i}break e}if(r.cursor>=r.limit){break r}r.cursor++}e:while(true){i:{if(!r.out_grouping(o,97,251)){break i}break e}if(r.cursor>=r.limit){break r}r.cursor++}b=r.cursor}r.cursor=a;return true}function d(){var e;while(true){var s=r.cursor;r:{r.bra=r.cursor;e=r.find_among(i);if(e==0){break r}r.ket=r.cursor;switch(e){case 1:if(!r.slice_from("i")){return false}break;case 2:if(!r.slice_from("u")){return false}break;case 3:if(!r.slice_from("y")){return false}break;case 4:if(!r.slice_from("ë")){return false}break;case 5:if(!r.slice_from("ï")){return false}break;case 6:if(!r.slice_del()){return false}break;case 7:if(r.cursor>=r.limit){break r}r.cursor++;break}continue}r.cursor=s;break}return true}function g(){if(!(m<=r.cursor)){return false}return true}function w(){if(!(k<=r.cursor)){return false}return true}function q(){if(!(b<=r.cursor)){return false}return true}function h(){var e;r.ket=r.cursor;e=r.find_among_b(u);if(e==0){return false}r.bra=r.cursor;switch(e){case 1:if(!q()){return false}if(!r.slice_del()){return false}break;case 2:if(!q()){return false}if(!r.slice_del()){return false}var i=r.limit-r.cursor;r:{r.ket=r.cursor;if(!r.eq_s_b("ic")){r.cursor=r.limit-i;break r}r.bra=r.cursor;e:{var t=r.limit-r.cursor;i:{if(!q()){break i}if(!r.slice_del()){return false}break e}r.cursor=r.limit-t;if(!r.slice_from("iqU")){return false}}}break;case 3:if(!q()){return false}if(!r.slice_from("log")){return false}break;case 4:if(!q()){return false}if(!r.slice_from("u")){return false}break;case 5:if(!q()){return false}if(!r.slice_from("ent")){return false}break;case 6:if(!g()){return false}if(!r.slice_del()){return false}var c=r.limit-r.cursor;r:{r.ket=r.cursor;e=r.find_among_b(s);if(e==0){r.cursor=r.limit-c;break r}r.bra=r.cursor;switch(e){case 1:if(!q()){r.cursor=r.limit-c;break r}if(!r.slice_del()){return false}r.ket=r.cursor;if(!r.eq_s_b("at")){r.cursor=r.limit-c;break r}r.bra=r.cursor;if(!q()){r.cursor=r.limit-c;break r}if(!r.slice_del()){return false}break;case 2:e:{var f=r.limit-r.cursor;i:{if(!q()){break i}if(!r.slice_del()){return false}break e}r.cursor=r.limit-f;if(!w()){r.cursor=r.limit-c;break r}if(!r.slice_from("eux")){return false}}break;case 3:if(!q()){r.cursor=r.limit-c;break r}if(!r.slice_del()){return false}break;case 4:if(!g()){r.cursor=r.limit-c;break r}if(!r.slice_from("i")){return false}break}}break;case 7:if(!q()){return false}if(!r.slice_del()){return false}var l=r.limit-r.cursor;r:{r.ket=r.cursor;e=r.find_among_b(a);if(e==0){r.cursor=r.limit-l;break r}r.bra=r.cursor;switch(e){case 1:e:{var n=r.limit-r.cursor;i:{if(!q()){break i}if(!r.slice_del()){return false}break e}r.cursor=r.limit-n;if(!r.slice_from("abl")){return false}}break;case 2:e:{var b=r.limit-r.cursor;i:{if(!q()){break i}if(!r.slice_del()){return false}break e}r.cursor=r.limit-b;if(!r.slice_from("iqU")){return false}}break;case 3:if(!q()){r.cursor=r.limit-l;break r}if(!r.slice_del()){return false}break}}break;case 8:if(!q()){return false}if(!r.slice_del()){return false}var k=r.limit-r.cursor;r:{r.ket=r.cursor;if(!r.eq_s_b("at")){r.cursor=r.limit-k;break r}r.bra=r.cursor;if(!q()){r.cursor=r.limit-k;break r}if(!r.slice_del()){return false}r.ket=r.cursor;if(!r.eq_s_b("ic")){r.cursor=r.limit-k;break r}r.bra=r.cursor;e:{var m=r.limit-r.cursor;i:{if(!q()){break i}if(!r.slice_del()){return false}break e}r.cursor=r.limit-m;if(!r.slice_from("iqU")){return false}}}break;case 9:if(!r.slice_from("eau")){return false}break;case 10:if(!w()){return false}if(!r.slice_from("al")){return false}break;case 11:r:{var _=r.limit-r.cursor;e:{if(!q()){break e}if(!r.slice_del()){return false}break r}r.cursor=r.limit-_;if(!w()){return false}if(!r.slice_from("eux")){return false}}break;case 12:if(!w()){return false}if(!r.out_grouping_b(o,97,251)){return false}if(!r.slice_del()){return false}break;case 13:if(!g()){return false}if(!r.slice_from("ant")){return false}return false;case 14:if(!g()){return false}if(!r.slice_from("ent")){return false}return false;case 15:var v=r.limit-r.cursor;if(!r.in_grouping_b(o,97,251)){return false}if(!g()){return false}r.cursor=r.limit-v;if(!r.slice_del()){return false}return false}return true}function p(){if(r.cursor0){return false}}r.ket=r.cursor;r:{var i=r.limit-r.cursor;e:{if(!r.eq_s_b("é")){break e}break r}r.cursor=r.limit-i;if(!r.eq_s_b("è")){return false}}r.bra=r.cursor;if(!r.slice_from("e")){return false}return true}this.stem=function(){var e=r.cursor;_();r.cursor=e;v();r.limit_backward=r.cursor;r.cursor=r.limit;var i=r.limit-r.cursor;r:{e:{var s=r.limit-r.cursor;i:{var a=r.limit-r.cursor;s:{var u=r.limit-r.cursor;a:{if(!h()){break a}break s}r.cursor=r.limit-u;a:{if(!p()){break a}break s}r.cursor=r.limit-u;if(!z()){break i}}r.cursor=r.limit-a;var t=r.limit-r.cursor;s:{r.ket=r.cursor;a:{var c=r.limit-r.cursor;u:{if(!r.eq_s_b("Y")){break u}r.bra=r.cursor;if(!r.slice_from("i")){return false}break a}r.cursor=r.limit-c;if(!r.eq_s_b("ç")){r.cursor=r.limit-t;break s}r.bra=r.cursor;if(!r.slice_from("c")){return false}}}break e}r.cursor=r.limit-s;if(!I()){break r}}}r.cursor=r.limit-i;var f=r.limit-r.cursor;U();r.cursor=r.limit-f;var l=r.limit-r.cursor;H();r.cursor=r.limit-l;r.cursor=r.limit_backward;var o=r.cursor;d();r.cursor=o;return true};this["stemWord"]=function(e){r.setCurrent(e);this.stem();return r.getCurrent()}};
\ No newline at end of file
+var FrenchStemmer=function(){var n=new BaseStemmer,f=[["col",-1,-1],["ni",-1,1],["par",-1,-1],["tap",-1,-1]],_=[["",-1,7],["H",0,6],["He",1,4],["Hi",1,5],["I",0,1],["U",0,2],["Y",0,3]],m=[["iqU",-1,3],["abl",-1,3],["Ièr",-1,4],["ièr",-1,4],["eus",-1,2],["iv",-1,1]],b=[["ic",-1,2],["abil",-1,1],["iv",-1,3]],k=[["iqUe",-1,1],["atrice",-1,2],["ance",-1,1],["ence",-1,5],["logie",-1,3],["able",-1,1],["isme",-1,1],["euse",-1,12],["iste",-1,1],["ive",-1,8],["if",-1,8],["usion",-1,4],["ation",-1,2],["ution",-1,4],["ateur",-1,2],["iqUes",-1,1],["atrices",-1,2],["ances",-1,1],["ences",-1,5],["logies",-1,3],["ables",-1,1],["ismes",-1,1],["euses",-1,12],["istes",-1,1],["ives",-1,8],["ifs",-1,8],["usions",-1,4],["ations",-1,2],["utions",-1,4],["ateurs",-1,2],["ments",-1,16],["ements",30,6],["issements",31,13],["ités",-1,7],["ment",-1,16],["ement",34,6],["issement",35,13],["amment",34,14],["emment",34,15],["aux",-1,10],["eaux",39,9],["eux",-1,1],["oux",-1,11],["ité",-1,7]],d=[["ira",-1,1],["ie",-1,1],["isse",-1,1],["issante",-1,1],["i",-1,1],["irai",4,1],["ir",-1,1],["iras",-1,1],["ies",-1,1],["îmes",-1,1],["isses",-1,1],["issantes",-1,1],["îtes",-1,1],["is",-1,1],["irais",13,1],["issais",13,1],["irions",-1,1],["issions",-1,1],["irons",-1,1],["issons",-1,1],["issants",-1,1],["it",-1,1],["irait",21,1],["issait",21,1],["issant",-1,1],["iraIent",-1,1],["issaIent",-1,1],["irent",-1,1],["issent",-1,1],["iront",-1,1],["ît",-1,1],["iriez",-1,1],["issiez",-1,1],["irez",-1,1],["issez",-1,1]],g=[["al",-1,1],["épl",-1,-1],["auv",-1,-1]],v=[["a",-1,3],["era",0,2],["aise",-1,4],["asse",-1,3],["ante",-1,3],["ée",-1,2],["ai",-1,3],["erai",6,2],["er",-1,2],["as",-1,3],["eras",9,2],["âmes",-1,3],["aises",-1,4],["asses",-1,3],["antes",-1,3],["âtes",-1,3],["ées",-1,2],["ais",-1,4],["eais",17,2],["erais",17,2],["ions",-1,1],["erions",20,2],["assions",20,3],["erons",-1,2],["ants",-1,3],["és",-1,2],["ait",-1,3],["erait",26,2],["ant",-1,3],["aIent",-1,3],["eraIent",29,2],["èrent",-1,2],["assent",-1,3],["eront",-1,2],["ât",-1,3],["ez",-1,2],["iez",35,2],["eriez",36,2],["assiez",36,3],["erez",35,2],["é",-1,2]],q=[["e",-1,3],["Ière",0,2],["ière",0,2],["ion",-1,1],["Ier",-1,2],["ier",-1,2]],w=[["ell",-1,-1],["eill",-1,-1],["enn",-1,-1],["onn",-1,-1],["ett",-1,-1]],p=[17,65,16,1,0,0,0,0,0,0,0,0,0,0,0,128,130,103,8,5],h=[65,85],z=[131,14,3],I=[1,65,20,0,0,0,0,0,0,0,0,0,0,0,0,0,128],U=0,H=0,x=0;function Y(){return x<=n.cursor}function y(){return H<=n.cursor}function C(){return U<=n.cursor}this.stem=function(){var r=n.cursor,i=(n.bra=n.cursor,i=n.cursor,(n.in_grouping(z,99,116)||(n.cursor=i,n.eq_s("qu")))&&n.eq_s("'")&&(n.ket=n.cursor,n.cursor{for(;;){var r=n.cursor;r:{for(;;){var i=n.cursor;i:{e:{var e=n.cursor;s:if(n.in_grouping(p,97,251)){n.bra=n.cursor;c:{var s=n.cursor;if(n.eq_s("u")&&(n.ket=n.cursor,n.in_grouping(p,97,251))){if(n.slice_from("U"))break c;return}if(n.cursor=s,n.eq_s("i")&&(n.ket=n.cursor,n.in_grouping(p,97,251))){if(n.slice_from("I"))break c;return}if(n.cursor=s,!n.eq_s("y"))break s;if(n.ket=n.cursor,!n.slice_from("Y"))return}break e}if(n.cursor=e,n.bra=n.cursor,n.eq_s("ë")){if(n.ket=n.cursor,n.slice_from("He"))break e;return}if(n.cursor=e,n.bra=n.cursor,n.eq_s("ï")){if(n.ket=n.cursor,n.slice_from("Hi"))break e;return}if(n.cursor=e,n.bra=n.cursor,n.eq_s("y")&&(n.ket=n.cursor,n.in_grouping(p,97,251))){if(n.slice_from("Y"))break e;return}if(n.cursor=e,!n.eq_s("q"))break i;if(n.bra=n.cursor,!n.eq_s("u"))break i;if(n.ket=n.cursor,!n.slice_from("U"))return}n.cursor=i;break}if(n.cursor=i,n.cursor>=n.limit)break r;n.cursor++}continue}n.cursor=r;break}})(),n.cursor=i,x=n.limit,H=n.limit,U=n.limit;var e,r=n.cursor;r:{i:{var s=n.cursor;if(!n.in_grouping(p,97,251)||!n.in_grouping(p,97,251)||n.cursor>=n.limit){n.cursor=s;e:if(0!=(e=n.find_among(f))){switch(e){case 1:if(n.in_grouping(p,97,251))break;break e}break i}if(n.cursor=s,n.cursor>=n.limit)break r;if(n.cursor++,!n.go_out_grouping(p,97,251))break r}n.cursor++}x=n.cursor}n.cursor=r,r=n.cursor,n.go_out_grouping(p,97,251)&&(n.cursor++,n.go_in_grouping(p,97,251))&&(n.cursor++,H=n.cursor,n.go_out_grouping(p,97,251))&&(n.cursor++,n.go_in_grouping(p,97,251))&&(n.cursor++,U=n.cursor),n.cursor=r,n.limit_backward=n.cursor,n.cursor=n.limit;i=n.limit-n.cursor;r:{var c=n.limit-n.cursor,u=n.limit-n.cursor,t=n.limit-n.cursor;if((()=>{var r;if(n.ket=n.cursor,0!=(r=n.find_among_b(k))){switch(n.bra=n.cursor,r){case 1:if(!C())return;if(n.slice_del())break;return;case 2:if(!C())return;if(!n.slice_del())return;var i=n.limit-n.cursor;if(n.ket=n.cursor,n.eq_s_b("ic")){n.bra=n.cursor;i:{var e=n.limit-n.cursor;if(C()){if(n.slice_del())break i;return}if(n.cursor=n.limit-e,!n.slice_from("iqU"))return}}else n.cursor=n.limit-i;break;case 3:if(!C())return;if(n.slice_from("log"))break;return;case 4:if(!C())return;if(n.slice_from("u"))break;return;case 5:if(!C())return;if(n.slice_from("ent"))break;return;case 6:if(!Y())return;if(!n.slice_del())return;var s=n.limit-n.cursor;i:if(n.ket=n.cursor,0==(r=n.find_among_b(m)))n.cursor=n.limit-s;else switch(n.bra=n.cursor,r){case 1:if(!C()){n.cursor=n.limit-s;break i}if(!n.slice_del())return;if(n.ket=n.cursor,!n.eq_s_b("at")){n.cursor=n.limit-s;break i}if(n.bra=n.cursor,!C()){n.cursor=n.limit-s;break i}if(n.slice_del())break;return;case 2:e:{var c=n.limit-n.cursor;if(C()){if(n.slice_del())break e;return}if(n.cursor=n.limit-c,!y()){n.cursor=n.limit-s;break i}if(!n.slice_from("eux"))return}break;case 3:if(!C()){n.cursor=n.limit-s;break i}if(n.slice_del())break;return;case 4:if(!Y()){n.cursor=n.limit-s;break i}if(n.slice_from("i"))break;return}break;case 7:if(!C())return;if(!n.slice_del())return;var u=n.limit-n.cursor;i:if(n.ket=n.cursor,0==(r=n.find_among_b(b)))n.cursor=n.limit-u;else switch(n.bra=n.cursor,r){case 1:e:{var t=n.limit-n.cursor;if(C()){if(n.slice_del())break e;return}if(n.cursor=n.limit-t,!n.slice_from("abl"))return}break;case 2:e:{var o=n.limit-n.cursor;if(C()){if(n.slice_del())break e;return}if(n.cursor=n.limit-o,!n.slice_from("iqU"))return}break;case 3:if(!C()){n.cursor=n.limit-u;break i}if(n.slice_del())break;return}break;case 8:if(!C())return;if(!n.slice_del())return;i=n.limit-n.cursor;if(n.ket=n.cursor,n.eq_s_b("at"))if(n.bra=n.cursor,C()){if(!n.slice_del())return;if(n.ket=n.cursor,n.eq_s_b("ic")){n.bra=n.cursor;i:{var a=n.limit-n.cursor;if(C()){if(n.slice_del())break i;return}if(n.cursor=n.limit-a,!n.slice_from("iqU"))return}}else n.cursor=n.limit-i}else n.cursor=n.limit-i;else n.cursor=n.limit-i;break;case 9:if(n.slice_from("eau"))break;return;case 10:if(!y())return;if(n.slice_from("al"))break;return;case 11:if(!n.in_grouping_b(h,98,112))return;if(n.slice_from("ou"))break;return;case 12:i:{var l=n.limit-n.cursor;if(C()){if(n.slice_del())break i;return}if(n.cursor=n.limit-l,!y())return;if(!n.slice_from("eux"))return}break;case 13:if(!y())return;if(!n.out_grouping_b(p,97,251))return;if(n.slice_del())break;return;case 14:return Y()?void n.slice_from("ant"):void 0;case 15:return Y()?void n.slice_from("ent"):void 0;case 16:i=n.limit-n.cursor;return n.in_grouping_b(p,97,251)?Y()&&(n.cursor=n.limit-i,void n.slice_del()):void 0}return 1}})()||(n.cursor=n.limit-t,(()=>{if(!(n.cursor{var r;if(!(n.cursorn.limit_backward)break i}return}if(n.cursor=n.limit-e,n.slice_del())break;return}return 1}n.limit_backward=i}})())){n.cursor=n.limit-u;var o=n.limit-n.cursor;i:{n.ket=n.cursor;e:{var a=n.limit-n.cursor;if(n.eq_s_b("Y")){if(n.bra=n.cursor,n.slice_from("i"))break e;return!1}if(n.cursor=n.limit-a,!n.eq_s_b("ç")){n.cursor=n.limit-o;break i}if(n.bra=n.cursor,!n.slice_from("c"))return!1}}}else if(n.cursor=n.limit-c,!(()=>{var r=n.limit-n.cursor;if(n.ket=n.cursor,n.eq_s_b("s")){n.bra=n.cursor;var i=n.limit-n.cursor,e=n.limit-n.cursor;if(n.eq_s_b("Hi")||(n.cursor=n.limit-e,n.out_grouping_b(I,97,232))){if(n.cursor=n.limit-i,!n.slice_del())return}else n.cursor=n.limit-r}else n.cursor=n.limit-r;if(!(n.cursor{for(var r;;){var i=n.cursor;r:{switch(n.bra=n.cursor,r=n.find_among(_),n.ket=n.cursor,r){case 1:if(n.slice_from("i"))break;return;case 2:if(n.slice_from("u"))break;return;case 3:if(n.slice_from("y"))break;return;case 4:if(n.slice_from("ë"))break;return;case 5:if(n.slice_from("ï"))break;return;case 6:if(n.slice_del())break;return;case 7:if(n.cursor>=n.limit)break r;n.cursor++}continue}n.cursor=i;break}})(),n.cursor=r,!0},this.stemWord=function(r){return n.setCurrent(r),this.stem(),n.getCurrent()}};
\ No newline at end of file
diff --git a/sphinx/search/minified-js/german-stemmer.js b/sphinx/search/minified-js/german-stemmer.js
index e2a335d15e0..da15f9e8f71 100644
--- a/sphinx/search/minified-js/german-stemmer.js
+++ b/sphinx/search/minified-js/german-stemmer.js
@@ -1 +1 @@
-GermanStemmer=function(){var r=new BaseStemmer;var e=[["",-1,5],["U",0,2],["Y",0,1],["ä",0,3],["ö",0,4],["ü",0,2]];var i=[["e",-1,2],["em",-1,1],["en",-1,2],["ern",-1,1],["er",-1,1],["s",-1,3],["es",5,2]];var s=[["en",-1,1],["er",-1,1],["st",-1,2],["est",2,1]];var u=[["ig",-1,1],["lich",-1,1]];var a=[["end",-1,1],["ig",-1,2],["ung",-1,1],["lich",-1,3],["isch",-1,2],["ik",-1,2],["heit",-1,3],["keit",-1,4]];var c=[17,65,16,1,0,0,0,0,0,0,0,0,0,0,0,0,8,0,32,8];var t=[117,30,5];var o=[117,30,4];var f=0;var l=0;var n=0;function b(){var e=r.cursor;while(true){var i=r.cursor;r:{e:{var s=r.cursor;i:{r.bra=r.cursor;if(!r.eq_s("ß")){break i}r.ket=r.cursor;if(!r.slice_from("ss")){return false}break e}r.cursor=s;if(r.cursor>=r.limit){break r}r.cursor++}continue}r.cursor=i;break}r.cursor=e;while(true){var u=r.cursor;r:{e:while(true){var a=r.cursor;i:{if(!r.in_grouping(c,97,252)){break i}r.bra=r.cursor;s:{var t=r.cursor;u:{if(!r.eq_s("u")){break u}r.ket=r.cursor;if(!r.in_grouping(c,97,252)){break u}if(!r.slice_from("U")){return false}break s}r.cursor=t;if(!r.eq_s("y")){break i}r.ket=r.cursor;if(!r.in_grouping(c,97,252)){break i}if(!r.slice_from("Y")){return false}}r.cursor=a;break e}r.cursor=a;if(r.cursor>=r.limit){break r}r.cursor++}continue}r.cursor=u;break}return true}function k(){n=r.limit;l=r.limit;var e=r.cursor;{var i=r.cursor+3;if(i>r.limit){return false}r.cursor=i}f=r.cursor;r.cursor=e;r:while(true){e:{if(!r.in_grouping(c,97,252)){break e}break r}if(r.cursor>=r.limit){return false}r.cursor++}r:while(true){e:{if(!r.out_grouping(c,97,252)){break e}break r}if(r.cursor>=r.limit){return false}r.cursor++}n=r.cursor;r:{if(!(n=r.limit){return false}r.cursor++}r:while(true){e:{if(!r.out_grouping(c,97,252)){break e}break r}if(r.cursor>=r.limit){return false}r.cursor++}l=r.cursor;return true}function m(){var i;while(true){var s=r.cursor;r:{r.bra=r.cursor;i=r.find_among(e);if(i==0){break r}r.ket=r.cursor;switch(i){case 1:if(!r.slice_from("y")){return false}break;case 2:if(!r.slice_from("u")){return false}break;case 3:if(!r.slice_from("a")){return false}break;case 4:if(!r.slice_from("o")){return false}break;case 5:if(r.cursor>=r.limit){break r}r.cursor++;break}continue}r.cursor=s;break}return true}function _(){if(!(n<=r.cursor)){return false}return true}function v(){if(!(l<=r.cursor)){return false}return true}function g(){var e;var c=r.limit-r.cursor;r:{r.ket=r.cursor;e=r.find_among_b(i);if(e==0){break r}r.bra=r.cursor;if(!_()){break r}switch(e){case 1:if(!r.slice_del()){return false}break;case 2:if(!r.slice_del()){return false}var f=r.limit-r.cursor;e:{r.ket=r.cursor;if(!r.eq_s_b("s")){r.cursor=r.limit-f;break e}r.bra=r.cursor;if(!r.eq_s_b("nis")){r.cursor=r.limit-f;break e}if(!r.slice_del()){return false}}break;case 3:if(!r.in_grouping_b(t,98,116)){break r}if(!r.slice_del()){return false}break}}r.cursor=r.limit-c;var l=r.limit-r.cursor;r:{r.ket=r.cursor;e=r.find_among_b(s);if(e==0){break r}r.bra=r.cursor;if(!_()){break r}switch(e){case 1:if(!r.slice_del()){return false}break;case 2:if(!r.in_grouping_b(o,98,116)){break r}{var n=r.cursor-3;if(n{for(var r,i=n.cursor;;){var e=n.cursor;r:{for(;;){var s=n.cursor;i:if(n.in_grouping(t,97,252)){n.bra=n.cursor;e:{var c=n.cursor;if(n.eq_s("u")&&(n.ket=n.cursor,n.in_grouping(t,97,252))){if(n.slice_from("U"))break e;return}if(n.cursor=c,!n.eq_s("y"))break i;if(n.ket=n.cursor,!n.in_grouping(t,97,252))break i;if(!n.slice_from("Y"))return}n.cursor=s;break}if(n.cursor=s,n.cursor>=n.limit)break r;n.cursor++}continue}n.cursor=e;break}for(n.cursor=i;;){var u=n.cursor;r:{switch(n.bra=n.cursor,r=n.find_among(o),n.ket=n.cursor,r){case 1:if(n.slice_from("ss"))break;return;case 2:if(n.slice_from("ä"))break;return;case 3:if(n.slice_from("ö"))break;return;case 4:if(n.slice_from("ü"))break;return;case 5:if(n.cursor>=n.limit)break r;n.cursor++}continue}n.cursor=u;break}})(),n.cursor=i,n.cursor),e=(a=n.limit,u=n.limit,r=n.cursor,(e=n.cursor+3)>n.limit||(n.cursor=e,c=n.cursor,n.cursor=r,n.go_out_grouping(t,97,252)&&(n.cursor++,n.go_in_grouping(t,97,252))&&(n.cursor++,a=n.cursor,c<=a||(a=c),n.go_out_grouping(t,97,252))&&(n.cursor++,n.go_in_grouping(t,97,252))&&(n.cursor++,u=n.cursor)),n.cursor=i,n.limit_backward=n.cursor,n.cursor=n.limit,(()=>{var r,i=n.limit-n.cursor;r:if(n.ket=n.cursor,0!=(r=n.find_among_b(l))&&(n.bra=n.cursor,v()))switch(r){case 1:var e=n.limit-n.cursor;if(n.eq_s_b("syst"))break r;if(n.cursor=n.limit-e,n.slice_del())break;return;case 2:if(n.slice_del())break;return;case 3:if(!n.slice_del())return;e=n.limit-n.cursor;if(n.ket=n.cursor,n.eq_s_b("s"))if(n.bra=n.cursor,n.eq_s_b("nis")){if(!n.slice_del())return}else n.cursor=n.limit-e;else n.cursor=n.limit-e;break;case 4:if(!n.in_grouping_b(g,98,116))break r;if(n.slice_del())break;return;case 5:if(n.slice_from("l"))break;return}n.cursor=n.limit-i,i=n.limit-n.cursor;r:if(n.ket=n.cursor,0!=(r=n.find_among_b(_))&&(n.bra=n.cursor,v()))switch(r){case 1:if(n.slice_del())break;return;case 2:if(!n.in_grouping_b(d,98,116))break r;var s=n.cursor-3;if(s{for(var r;;){var i=n.cursor;r:{switch(n.bra=n.cursor,r=n.find_among(s),n.ket=n.cursor,r){case 1:if(n.slice_from("y"))break;return;case 2:if(n.slice_from("u"))break;return;case 3:if(n.slice_from("a"))break;return;case 4:if(n.slice_from("o"))break;return;case 5:if(n.cursor>=n.limit)break r;n.cursor++}continue}n.cursor=i;break}})(),n.cursor=e,!0},this.stemWord=function(r){return n.setCurrent(r),this.stem(),n.getCurrent()}};
\ No newline at end of file
diff --git a/sphinx/search/minified-js/greek-stemmer.js b/sphinx/search/minified-js/greek-stemmer.js
new file mode 100644
index 00000000000..d02a30bf1fe
--- /dev/null
+++ b/sphinx/search/minified-js/greek-stemmer.js
@@ -0,0 +1 @@
+var GreekStemmer=function(){var s=new BaseStemmer,e=[["",-1,25],["Ά",0,1],["Έ",0,5],["Ή",0,7],["Ί",0,9],["Ό",0,15],["Ύ",0,20],["Ώ",0,24],["ΐ",0,7],["Α",0,1],["Β",0,2],["Γ",0,3],["Δ",0,4],["Ε",0,5],["Ζ",0,6],["Η",0,7],["Θ",0,8],["Ι",0,9],["Κ",0,10],["Λ",0,11],["Μ",0,12],["Ν",0,13],["Ξ",0,14],["Ο",0,15],["Π",0,16],["Ρ",0,17],["Σ",0,18],["Τ",0,19],["Υ",0,20],["Φ",0,21],["Χ",0,22],["Ψ",0,23],["Ω",0,24],["Ϊ",0,9],["Ϋ",0,20],["ά",0,1],["έ",0,5],["ή",0,7],["ί",0,9],["ΰ",0,20],["ς",0,18],["ϊ",0,7],["ϋ",0,20],["ό",0,15],["ύ",0,20],["ώ",0,24]],o=[["σκαγια",-1,2],["φαγια",-1,1],["ολογια",-1,3],["σογια",-1,4],["τατογια",-1,5],["κρεατα",-1,6],["περατα",-1,7],["τερατα",-1,8],["γεγονοτα",-1,11],["καθεστωτα",-1,10],["φωτα",-1,9],["περατη",-1,7],["σκαγιων",-1,2],["φαγιων",-1,1],["ολογιων",-1,3],["σογιων",-1,4],["τατογιων",-1,5],["κρεατων",-1,6],["περατων",-1,7],["τερατων",-1,8],["γεγονοτων",-1,11],["καθεστωτων",-1,10],["φωτων",-1,9],["κρεασ",-1,6],["περασ",-1,7],["τερασ",-1,8],["γεγονοσ",-1,11],["κρεατοσ",-1,6],["περατοσ",-1,7],["τερατοσ",-1,8],["γεγονοτοσ",-1,11],["καθεστωτοσ",-1,10],["φωτοσ",-1,9],["καθεστωσ",-1,10],["φωσ",-1,9],["σκαγιου",-1,2],["φαγιου",-1,1],["ολογιου",-1,3],["σογιου",-1,4],["τατογιου",-1,5]],u=[["πα",-1,1],["ξαναπα",0,1],["επα",0,1],["περιπα",0,1],["αναμπα",0,1],["εμπα",0,1],["β",-1,2],["δανε",-1,1],["βαθυρι",-1,2],["βαρκ",-1,2],["μαρκ",-1,2],["λ",-1,2],["μ",-1,2],["κορν",-1,2],["αθρο",-1,1],["συναθρο",14,1],["π",-1,2],["ιμπ",16,2],["ρ",-1,2],["μαρ",18,2],["αμπαρ",18,2],["γκρ",18,2],["βολβορ",18,2],["γλυκορ",18,2],["πιπερορ",18,2],["πρ",18,2],["μπρ",25,2],["αρρ",18,2],["γλυκυρ",18,2],["πολυρ",18,2],["λου",-1,2]],a=[["ιζα",-1,1],["ιζε",-1,1],["ιζαμε",-1,1],["ιζουμε",-1,1],["ιζανε",-1,1],["ιζουνε",-1,1],["ιζατε",-1,1],["ιζετε",-1,1],["ιζει",-1,1],["ιζαν",-1,1],["ιζουν",-1,1],["ιζεσ",-1,1],["ιζεισ",-1,1],["ιζω",-1,1]],t=[["βι",-1,1],["λι",-1,1],["αλ",-1,1],["εν",-1,1],["σ",-1,1],["χ",-1,1],["υψ",-1,1],["ζω",-1,1]],_=[["ωθηκα",-1,1],["ωθηκε",-1,1],["ωθηκαμε",-1,1],["ωθηκανε",-1,1],["ωθηκατε",-1,1],["ωθηκαν",-1,1],["ωθηκεσ",-1,1]],l=[["ξαναπα",-1,1],["επα",-1,1],["περιπα",-1,1],["αναμπα",-1,1],["εμπα",-1,1],["χαρτοπα",-1,1],["εξαρχα",-1,1],["γε",-1,2],["γκε",-1,2],["κλε",-1,1],["εκλε",9,1],["απεκλε",10,1],["αποκλε",9,1],["εσωκλε",9,1],["δανε",-1,1],["πε",-1,1],["επε",15,1],["μετεπε",16,1],["εσε",-1,1],["γκ",-1,2],["μ",-1,2],["πουκαμ",20,2],["κομ",20,2],["αν",-1,2],["ολο",-1,2],["αθρο",-1,1],["συναθρο",25,1],["π",-1,2],["λαρ",-1,2],["δημοκρατ",-1,2],["αφ",-1,2],["γιγαντοαφ",30,2]],m=[["ισα",-1,1],["ισαμε",-1,1],["ισανε",-1,1],["ισε",-1,1],["ισατε",-1,1],["ισαν",-1,1],["ισεσ",-1,1]],f=[["ξαναπα",-1,1],["επα",-1,1],["περιπα",-1,1],["αναμπα",-1,1],["εμπα",-1,1],["χαρτοπα",-1,1],["εξαρχα",-1,1],["κλε",-1,1],["εκλε",7,1],["απεκλε",8,1],["αποκλε",7,1],["εσωκλε",7,1],["δανε",-1,1],["πε",-1,1],["επε",13,1],["μετεπε",14,1],["εσε",-1,1],["αθρο",-1,1],["συναθρο",17,1]],b=[["ισουμε",-1,1],["ισουνε",-1,1],["ισετε",-1,1],["ισει",-1,1],["ισουν",-1,1],["ισεισ",-1,1],["ισω",-1,1]],n=[["ατα",-1,2],["φα",-1,2],["ηφα",1,2],["μεγ",-1,2],["λυγ",-1,2],["ηδ",-1,2],["κλε",-1,1],["εσωκλε",6,1],["πλε",-1,1],["δανε",-1,1],["σε",-1,1],["ασε",10,1],["καθ",-1,2],["εχθ",-1,2],["κακ",-1,2],["μακ",-1,2],["σκ",-1,2],["φιλ",-1,2],["κυλ",-1,2],["μ",-1,2],["γεμ",19,2],["αχν",-1,2],["συναθρο",-1,1],["π",-1,2],["απ",23,2],["εμπ",23,2],["ευπ",23,2],["αρ",-1,2],["αορ",-1,2],["γυρ",-1,2],["χρ",-1,2],["χωρ",-1,2],["κτ",-1,2],["ακτ",32,2],["χτ",-1,2],["αχτ",34,2],["ταχ",-1,2],["σχ",-1,2],["ασχ",37,2],["υψ",-1,2]],k=[["ιστα",-1,1],["ιστε",-1,1],["ιστη",-1,1],["ιστοι",-1,1],["ιστων",-1,1],["ιστο",-1,1],["ιστεσ",-1,1],["ιστησ",-1,1],["ιστοσ",-1,1],["ιστουσ",-1,1],["ιστου",-1,1]],d=[["εγκλε",-1,1],["αποκλε",-1,1],["δανε",-1,2],["αντιδανε",2,2],["σε",-1,1],["μετασε",4,1],["μικροσε",4,1]],g=[["ατομικ",-1,2],["εθνικ",-1,4],["τοπικ",-1,7],["εκλεκτικ",-1,5],["σκεπτικ",-1,6],["γνωστικ",-1,3],["αγνωστικ",5,1],["αλεξανδριν",-1,8],["θεατριν",-1,10],["βυζαντιν",-1,9]],w=[["ισμοι",-1,1],["ισμων",-1,1],["ισμο",-1,1],["ισμοσ",-1,1],["ισμουσ",-1,1],["ισμου",-1,1]],v=[["σ",-1,1],["χ",-1,1]],h=[["ουδακια",-1,1],["αρακια",-1,1],["ουδακι",-1,1],["αρακι",-1,1]],q=[["β",-1,2],["βαμβ",0,1],["σλοβ",0,1],["τσεχοσλοβ",2,1],["καρδ",-1,2],["ζ",-1,2],["τζ",5,1],["κ",-1,1],["καπακ",7,1],["σοκ",7,1],["σκ",7,1],["βαλ",-1,2],["μαλ",-1,1],["γλ",-1,2],["τριπολ",-1,2],["πλ",-1,1],["λουλ",-1,1],["φυλ",-1,1],["καιμ",-1,1],["κλιμ",-1,1],["φαρμ",-1,1],["γιαν",-1,2],["σπαν",-1,1],["ηγουμεν",-1,2],["κον",-1,1],["μακρυν",-1,2],["π",-1,2],["κατραπ",26,1],["ρ",-1,1],["βρ",28,1],["λαβρ",29,1],["αμβρ",29,1],["μερ",28,1],["πατερ",28,2],["ανθρ",28,1],["κορ",28,1],["σ",-1,1],["ναγκασ",36,1],["τοσ",36,2],["μουστ",-1,1],["ρυ",-1,1],["φ",-1,1],["σφ",41,1],["αλισφ",42,1],["νυφ",41,2],["χ",-1,1]],p=[["ακια",-1,1],["αρακια",0,1],["ιτσα",-1,1],["ακι",-1,1],["αρακι",3,1],["ιτσων",-1,1],["ιτσασ",-1,1],["ιτσεσ",-1,1]],C=[["ψαλ",-1,1],["αιφν",-1,1],["ολο",-1,1],["ιρ",-1,1]],S=[["ε",-1,1],["παιχν",-1,1]],B=[["ιδια",-1,1],["ιδιων",-1,1],["ιδιο",-1,1]],G=[["ιβ",-1,1],["δ",-1,1],["φραγκ",-1,1],["λυκ",-1,1],["οβελ",-1,1],["μην",-1,1],["ρ",-1,1]],W=[["ισκε",-1,1],["ισκο",-1,1],["ισκοσ",-1,1],["ισκου",-1,1]],j=[["αδων",-1,1],["αδεσ",-1,1]],x=[["γιαγι",-1,-1],["θει",-1,-1],["οκ",-1,-1],["μαμ",-1,-1],["μαν",-1,-1],["μπαμπ",-1,-1],["πεθερ",-1,-1],["πατερ",-1,-1],["κυρ",-1,-1],["νταντ",-1,-1]],y=[["εδων",-1,1],["εδεσ",-1,1]],z=[["μιλ",-1,1],["δαπ",-1,1],["γηπ",-1,1],["ιπ",-1,1],["εμπ",-1,1],["οπ",-1,1],["κρασπ",-1,1],["υπ",-1,1]],A=[["ουδων",-1,1],["ουδεσ",-1,1]],D=[["τραγ",-1,1],["φε",-1,1],["καλιακ",-1,1],["αρκ",-1,1],["σκ",-1,1],["πεταλ",-1,1],["βελ",-1,1],["λουλ",-1,1],["φλ",-1,1],["χν",-1,1],["πλεξ",-1,1],["σπ",-1,1],["φρ",-1,1],["σ",-1,1],["λιχ",-1,1]],E=[["εων",-1,1],["εωσ",-1,1]],F=[["δ",-1,1],["ιδ",0,1],["θ",-1,1],["γαλ",-1,1],["ελ",-1,1],["ν",-1,1],["π",-1,1],["παρ",-1,1]],K=[["ια",-1,1],["ιων",-1,1],["ιου",-1,1]],L=[["ικα",-1,1],["ικων",-1,1],["ικο",-1,1],["ικου",-1,1]],M=[["αδ",-1,1],["συναδ",0,1],["καταδ",0,1],["αντιδ",-1,1],["ενδ",-1,1],["φυλοδ",-1,1],["υποδ",-1,1],["πρωτοδ",-1,1],["εξωδ",-1,1],["ηθ",-1,1],["ανηθ",9,1],["ξικ",-1,1],["αλ",-1,1],["αμμοχαλ",12,1],["συνομηλ",-1,1],["μπολ",-1,1],["μουλ",-1,1],["τσαμ",-1,1],["βρωμ",-1,1],["αμαν",-1,1],["μπαν",-1,1],["καλλιν",-1,1],["ποστελν",-1,1],["φιλον",-1,1],["καλπ",-1,1],["γερ",-1,1],["χασ",-1,1],["μποσ",-1,1],["πλιατσ",-1,1],["πετσ",-1,1],["πιτσ",-1,1],["φυσ",-1,1],["μπαγιατ",-1,1],["νιτ",-1,1],["πικαντ",-1,1],["σερτ",-1,1]],N=[["αγαμε",-1,1],["ηκαμε",-1,1],["ηθηκαμε",1,1],["ησαμε",-1,1],["ουσαμε",-1,1]],O=[["βουβ",-1,1],["ξεθ",-1,1],["πεθ",-1,1],["αποθ",-1,1],["αποκ",-1,1],["ουλ",-1,1],["αναπ",-1,1],["πικρ",-1,1],["ποτ",-1,1],["αποστ",-1,1],["χ",-1,1],["σιχ",10,1]],P=[["τρ",-1,1],["τσ",-1,1]],Q=[["αγανε",-1,1],["ηκανε",-1,1],["ηθηκανε",1,1],["ησανε",-1,1],["ουσανε",-1,1],["οντανε",-1,1],["ιοντανε",5,1],["ουντανε",-1,1],["ιουντανε",7,1],["οτανε",-1,1],["ιοτανε",9,1]],R=[["ταβ",-1,1],["νταβ",0,1],["ψηλοταβ",0,1],["λιβ",-1,1],["κλιβ",3,1],["ξηροκλιβ",4,1],["γ",-1,1],["αγ",6,1],["τραγ",7,1],["τσαγ",7,1],["αθιγγ",6,1],["τσιγγ",6,1],["ατσιγγ",11,1],["στεγ",6,1],["απηγ",6,1],["σιγ",6,1],["ανοργ",6,1],["ενοργ",6,1],["καλπουζ",-1,1],["θ",-1,1],["μωαμεθ",19,1],["πιθ",19,1],["απιθ",21,1],["δεκ",-1,1],["πελεκ",-1,1],["ικ",-1,1],["ανικ",25,1],["βουλκ",-1,1],["βασκ",-1,1],["βραχυκ",-1,1],["γαλ",-1,1],["καταγαλ",30,1],["ολογαλ",30,1],["βαθυγαλ",30,1],["μελ",-1,1],["καστελ",-1,1],["πορτολ",-1,1],["πλ",-1,1],["διπλ",37,1],["λαοπλ",37,1],["ψυχοπλ",37,1],["ουλ",-1,1],["μ",-1,1],["ολιγοδαμ",42,1],["μουσουλμ",42,1],["δραδουμ",42,1],["βραχμ",42,1],["ν",-1,1],["αμερικαν",47,1],["π",-1,1],["αδαπ",49,1],["χαμηλοδαπ",49,1],["πολυδαπ",49,1],["κοπ",49,1],["υποκοπ",53,1],["τσοπ",49,1],["σπ",49,1],["ερ",-1,1],["γερ",57,1],["βετερ",57,1],["λουθηρ",-1,1],["κορμορ",-1,1],["περιτρ",-1,1],["ουρ",-1,1],["σ",-1,1],["βασ",64,1],["πολισ",64,1],["σαρακατσ",64,1],["θυσ",64,1],["διατ",-1,1],["πλατ",-1,1],["τσαρλατ",-1,1],["τετ",-1,1],["πουριτ",-1,1],["σουλτ",-1,1],["μαιντ",-1,1],["ζωντ",-1,1],["καστ",-1,1],["φ",-1,1],["διαφ",78,1],["στεφ",78,1],["φωτοστεφ",80,1],["περηφ",78,1],["υπερηφ",82,1],["κοιλαρφ",78,1],["πενταρφ",78,1],["ορφ",78,1],["χ",-1,1],["αμηχ",87,1],["βιομηχ",87,1],["μεγλοβιομηχ",89,1],["καπνοβιομηχ",89,1],["μικροβιομηχ",89,1],["πολυμηχ",87,1],["λιχ",87,1]],T=[["ησετε",-1,1]],U=[["ενδ",-1,1],["συνδ",-1,1],["οδ",-1,1],["διαθ",-1,1],["καθ",-1,1],["ραθ",-1,1],["ταθ",-1,1],["τιθ",-1,1],["εκθ",-1,1],["ενθ",-1,1],["συνθ",-1,1],["ροθ",-1,1],["υπερθ",-1,1],["σθ",-1,1],["ευθ",-1,1],["αρκ",-1,1],["ωφελ",-1,1],["βολ",-1,1],["αιν",-1,1],["πον",-1,1],["ρον",-1,1],["συν",-1,1],["βαρ",-1,1],["βρ",-1,1],["αιρ",-1,1],["φορ",-1,1],["ευρ",-1,1],["πυρ",-1,1],["χωρ",-1,1],["νετ",-1,1],["σχ",-1,1]],V=[["παγ",-1,1],["δ",-1,1],["αδ",1,1],["θ",-1,1],["αθ",3,1],["τοκ",-1,1],["σκ",-1,1],["παρακαλ",-1,1],["σκελ",-1,1],["απλ",-1,1],["εμ",-1,1],["αν",-1,1],["βεν",-1,1],["βαρον",-1,1],["κοπ",-1,1],["σερπ",-1,1],["αβαρ",-1,1],["εναρ",-1,1],["αβρ",-1,1],["μπορ",-1,1],["θαρρ",-1,1],["ντρ",-1,1],["υ",-1,1],["νιφ",-1,1],["συρφ",-1,1]],X=[["οντασ",-1,1],["ωντασ",-1,1]],Y=[["ομαστε",-1,1],["ιομαστε",0,1]],Z=[["π",-1,1],["απ",0,1],["ακαταπ",1,1],["συμπ",0,1],["ασυμπ",3,1],["αμεταμφ",-1,1]],$=[["ζ",-1,1],["αλ",-1,1],["παρακαλ",1,1],["εκτελ",-1,1],["μ",-1,1],["ξ",-1,1],["προ",-1,1],["αρ",-1,1],["νισ",-1,1]],r1=[["ηθηκα",-1,1],["ηθηκε",-1,1],["ηθηκεσ",-1,1]],i1=[["πιθ",-1,1],["οθ",-1,1],["ναρθ",-1,1],["σκουλ",-1,1],["σκωλ",-1,1],["σφ",-1,1]],c1=[["θ",-1,1],["διαθ",0,1],["παρακαταθ",0,1],["συνθ",0,1],["προσθ",0,1]],s1=[["ηκα",-1,1],["ηκε",-1,1],["ηκεσ",-1,1]],e1=[["φαγ",-1,1],["ληγ",-1,1],["φρυδ",-1,1],["μαντιλ",-1,1],["μαλλ",-1,1],["ομ",-1,1],["βλεπ",-1,1],["ποδαρ",-1,1],["κυματ",-1,1],["πρωτ",-1,1],["λαχ",-1,1],["πανταχ",-1,1]],o1=[["τσα",-1,1],["χαδ",-1,1],["μεδ",-1,1],["λαμπιδ",-1,1],["δε",-1,1],["πλε",-1,1],["μεσαζ",-1,1],["δεσποζ",-1,1],["αιθ",-1,1],["φαρμακ",-1,1],["αγκ",-1,1],["ανηκ",-1,1],["λ",-1,1],["μ",-1,1],["αμ",13,1],["βρομ",13,1],["υποτειν",-1,1],["εκλιπ",-1,1],["ρ",-1,1],["ενδιαφερ",18,1],["αναρρ",18,1],["πατ",-1,1],["καθαρευ",-1,1],["δευτερευ",-1,1],["λεχ",-1,1]],u1=[["ουσα",-1,1],["ουσε",-1,1],["ουσεσ",-1,1]],a1=[["πελ",-1,1],["λλ",-1,1],["σμην",-1,1],["ρπ",-1,1],["πρ",-1,1],["φρ",-1,1],["χορτ",-1,1],["οφ",-1,1],["ψοφ",7,-1],["σφ",-1,1],["λοχ",-1,1],["ναυλοχ",10,-1]],t1=[["αμαλλι",-1,1],["λ",-1,1],["αμαλ",1,1],["μ",-1,1],["ουλαμ",3,1],["εν",-1,1],["δερβεν",5,1],["π",-1,1],["αειπ",7,1],["αρτιπ",7,1],["συμπ",7,1],["νεοπ",7,1],["κροκαλοπ",7,1],["ολοπ",7,1],["προσωποπ",7,1],["σιδηροπ",7,1],["δροσοπ",7,1],["ασπ",7,1],["ανυπ",7,1],["ρ",-1,1],["ασπαρ",19,1],["χαρ",19,1],["αχαρ",21,1],["απερ",19,1],["τρ",19,1],["ουρ",19,1],["τ",-1,1],["διατ",26,1],["επιτ",26,1],["συντ",26,1],["ομοτ",26,1],["νομοτ",30,1],["αποτ",26,1],["υποτ",26,1],["αβαστ",26,1],["αιμοστ",26,1],["προστ",26,1],["ανυστ",26,1],["ναυ",-1,1],["αφ",-1,1],["ξεφ",-1,1],["αδηφ",-1,1],["παμφ",-1,1],["πολυφ",-1,1]],_1=[["αγα",-1,1],["αγε",-1,1],["αγεσ",-1,1]],l1=[["ησα",-1,1],["ησε",-1,1],["ησου",-1,1]],m1=[["ν",-1,1],["δωδεκαν",0,1],["επταν",0,1],["μεγαλον",0,1],["ερημον",0,1],["χερσον",0,1]],f1=[["ηστε",-1,1]],b1=[["σβ",-1,1],["ασβ",0,1],["απλ",-1,1],["αειμν",-1,1],["χρ",-1,1],["αχρ",4,1],["κοινοχρ",4,1],["δυσχρ",4,1],["ευχρ",4,1],["παλιμψ",-1,1]],n1=[["ουνε",-1,1],["ηθουνε",0,1],["ησουνε",0,1]],k1=[["σπι",-1,1],["ν",-1,1],["εξων",1,1],["ρ",-1,1],["στραβομουτσ",-1,1],["κακομουτσ",-1,1]],d1=[["ουμε",-1,1],["ηθουμε",0,1],["ησουμε",0,1]],g1=[["αζ",-1,1],["ωριοπλ",-1,1],["ασουσ",-1,1],["παρασουσ",2,1],["αλλοσουσ",-1,1],["φ",-1,1],["χ",-1,1]],w1=[["ματα",-1,1],["ματων",-1,1],["ματοσ",-1,1]],v1=[["α",-1,1],["ιουμα",0,1],["ομουνα",0,1],["ιομουνα",2,1],["οσουνα",0,1],["ιοσουνα",4,1],["ε",-1,1],["αγατε",6,1],["ηκατε",6,1],["ηθηκατε",8,1],["ησατε",6,1],["ουσατε",6,1],["ειτε",6,1],["ηθειτε",12,1],["ιεμαστε",6,1],["ουμαστε",6,1],["ιουμαστε",15,1],["ιεσαστε",6,1],["οσαστε",6,1],["ιοσαστε",18,1],["η",-1,1],["ι",-1,1],["αμαι",21,1],["ιεμαι",21,1],["ομαι",21,1],["ουμαι",21,1],["ασαι",21,1],["εσαι",21,1],["ιεσαι",27,1],["αται",21,1],["εται",21,1],["ιεται",30,1],["ονται",21,1],["ουνται",21,1],["ιουνται",33,1],["ει",21,1],["αει",35,1],["ηθει",35,1],["ησει",35,1],["οι",21,1],["αν",-1,1],["αγαν",40,1],["ηκαν",40,1],["ηθηκαν",42,1],["ησαν",40,1],["ουσαν",40,1],["οντουσαν",45,1],["ιοντουσαν",46,1],["ονταν",40,1],["ιονταν",48,1],["ουνταν",40,1],["ιουνταν",50,1],["οταν",40,1],["ιοταν",52,1],["ομασταν",40,1],["ιομασταν",54,1],["οσασταν",40,1],["ιοσασταν",56,1],["ουν",-1,1],["ηθουν",58,1],["ομουν",58,1],["ιομουν",60,1],["ησουν",58,1],["οσουν",58,1],["ιοσουν",63,1],["ων",-1,1],["ηδων",65,1],["ο",-1,1],["ασ",-1,1],["εσ",-1,1],["ηδεσ",69,1],["ησεσ",69,1],["ησ",-1,1],["εισ",-1,1],["ηθεισ",73,1],["οσ",-1,1],["υσ",-1,1],["ουσ",76,1],["υ",-1,1],["ου",78,1],["ω",-1,1],["αω",80,1],["ηθω",80,1],["ησω",80,1]],h1=[["οτερ",-1,1],["εστερ",-1,1],["υτερ",-1,1],["ωτερ",-1,1],["οτατ",-1,1],["εστατ",-1,1],["υτατ",-1,1],["ωτατ",-1,1]],H=[81,65,16,1],I=[81,65,0,1],J=!1;this.stem=function(){s.limit_backward=s.cursor,s.cursor=s.limit;var r=s.limit-s.cursor;if((()=>{for(var r;;){var i=s.limit-s.cursor;r:{switch(s.ket=s.cursor,r=s.find_among_b(e),s.bra=s.cursor,r){case 1:if(s.slice_from("α"))break;return;case 2:if(s.slice_from("β"))break;return;case 3:if(s.slice_from("γ"))break;return;case 4:if(s.slice_from("δ"))break;return;case 5:if(s.slice_from("ε"))break;return;case 6:if(s.slice_from("ζ"))break;return;case 7:if(s.slice_from("η"))break;return;case 8:if(s.slice_from("θ"))break;return;case 9:if(s.slice_from("ι"))break;return;case 10:if(s.slice_from("κ"))break;return;case 11:if(s.slice_from("λ"))break;return;case 12:if(s.slice_from("μ"))break;return;case 13:if(s.slice_from("ν"))break;return;case 14:if(s.slice_from("ξ"))break;return;case 15:if(s.slice_from("ο"))break;return;case 16:if(s.slice_from("π"))break;return;case 17:if(s.slice_from("ρ"))break;return;case 18:if(s.slice_from("σ"))break;return;case 19:if(s.slice_from("τ"))break;return;case 20:if(s.slice_from("υ"))break;return;case 21:if(s.slice_from("φ"))break;return;case 22:if(s.slice_from("χ"))break;return;case 23:if(s.slice_from("ψ"))break;return;case 24:if(s.slice_from("ω"))break;return;case 25:if(s.cursor<=s.limit_backward)break r;s.cursor--}continue}s.cursor=s.limit-i;break}})(),s.cursor=s.limit-r,!(3<=s.current.length))return!1;J=!0;var r=s.limit-s.cursor,r=((()=>{var r;if(s.ket=s.cursor,0!=(r=s.find_among_b(o))){switch(s.bra=s.cursor,r){case 1:if(s.slice_from("φα"))break;return;case 2:if(s.slice_from("σκα"))break;return;case 3:if(s.slice_from("ολο"))break;return;case 4:if(s.slice_from("σο"))break;return;case 5:if(s.slice_from("τατο"))break;return;case 6:if(s.slice_from("κρε"))break;return;case 7:if(s.slice_from("περ"))break;return;case 8:if(s.slice_from("τερ"))break;return;case 9:if(s.slice_from("φω"))break;return;case 10:if(s.slice_from("καθεστ"))break;return;case 11:if(s.slice_from("γεγον"))break;return}J=!1}})(),s.cursor=s.limit-r,s.limit-s.cursor),r=((()=>{var r;if(s.ket=s.cursor,0!=s.find_among_b(a)&&(s.bra=s.cursor,s.slice_del())&&(J=!1,s.ket=s.cursor,s.bra=s.cursor,0!=(r=s.find_among_b(u)))&&!(s.cursor>s.limit_backward))switch(r){case 1:if(s.slice_from("ι"))break;return;case 2:if(s.slice_from("ιζ"))break}})(),s.cursor=s.limit-r,s.limit-s.cursor),r=(s.ket=s.cursor,0!=s.find_among_b(_)&&(s.bra=s.cursor,s.slice_del())&&(J=!1,s.ket=s.cursor,s.bra=s.cursor,0==s.find_among_b(t)||s.cursor>s.limit_backward||s.slice_from("ων")),s.cursor=s.limit-r,s.limit-s.cursor),r=((()=>{var r;r:{var i=s.limit-s.cursor;if(s.ket=s.cursor,s.eq_s_b("ισα")&&(s.bra=s.cursor,!(s.cursor>s.limit_backward))){if(s.slice_from("ισ"))break r;return}s.cursor=s.limit-i,s.ket=s.cursor}if(0!=s.find_among_b(m)&&(s.bra=s.cursor,s.slice_del())&&(J=!1,s.ket=s.cursor,s.bra=s.cursor,0!=(r=s.find_among_b(l)))&&!(s.cursor>s.limit_backward))switch(r){case 1:if(s.slice_from("ι"))break;return;case 2:if(s.slice_from("ισ"))break}})(),s.cursor=s.limit-r,s.limit-s.cursor),r=(s.ket=s.cursor,0!=s.find_among_b(b)&&(s.bra=s.cursor,s.slice_del())&&(J=!1,s.ket=s.cursor,s.bra=s.cursor,0==s.find_among_b(f)||s.cursor>s.limit_backward||s.slice_from("ι")),s.cursor=s.limit-r,s.limit-s.cursor),r=((()=>{var r;if(s.ket=s.cursor,0!=s.find_among_b(k)&&(s.bra=s.cursor,s.slice_del())&&(J=!1,s.ket=s.cursor,s.bra=s.cursor,0!=(r=s.find_among_b(n)))&&!(s.cursor>s.limit_backward))switch(r){case 1:if(s.slice_from("ι"))break;return;case 2:if(s.slice_from("ιστ"))break}})(),s.cursor=s.limit-r,s.limit-s.cursor),r=((()=>{var r;if(s.ket=s.cursor,0!=s.find_among_b(w)&&(s.bra=s.cursor,s.slice_del())){J=!1;var i=s.limit-s.cursor;if(s.ket=s.cursor,s.bra=s.cursor,0==(r=s.find_among_b(d))||s.cursor>s.limit_backward){if(s.cursor=s.limit-i,s.ket=s.cursor,0==(r=s.find_among_b(g)))return;switch(s.bra=s.cursor,r){case 1:if(s.slice_from("αγνωστ"))break;return;case 2:if(s.slice_from("ατομ"))break;return;case 3:if(s.slice_from("γνωστ"))break;return;case 4:if(s.slice_from("εθν"))break;return;case 5:if(s.slice_from("εκλεκτ"))break;return;case 6:if(s.slice_from("σκεπτ"))break;return;case 7:if(s.slice_from("τοπ"))break;return;case 8:if(s.slice_from("αλεξανδρ"))break;return;case 9:if(s.slice_from("βυζαντ"))break;return;case 10:if(s.slice_from("θεατρ"))break}}else switch(r){case 1:if(s.slice_from("ισμ"))break;return;case 2:if(s.slice_from("ι"))break}}})(),s.cursor=s.limit-r,s.limit-s.cursor),r=(s.ket=s.cursor,0!=s.find_among_b(h)&&(s.bra=s.cursor,s.slice_del())&&(J=!1,s.ket=s.cursor,s.bra=s.cursor,0==s.find_among_b(v)||s.cursor>s.limit_backward||s.slice_from("αρακ")),s.cursor=s.limit-r,s.limit-s.cursor),r=((()=>{var r;if(s.ket=s.cursor,0!=s.find_among_b(p)&&(s.bra=s.cursor,s.slice_del())){J=!1;var i=s.limit-s.cursor;if(s.ket=s.cursor,s.bra=s.cursor,0==(r=s.find_among_b(q))||s.cursor>s.limit_backward){if(s.cursor=s.limit-i,s.ket=s.cursor,s.bra=s.cursor,!s.eq_s_b("κορ"))return;if(!s.slice_from("ιτσ"));}else switch(r){case 1:if(s.slice_from("ακ"))break;return;case 2:if(s.slice_from("ιτσ"))break}}})(),s.cursor=s.limit-r,s.limit-s.cursor),r=((()=>{if(s.ket=s.cursor,0!=s.find_among_b(B)&&(s.bra=s.cursor,s.slice_del())){J=!1;r:{var r=s.limit-s.cursor;if(s.ket=s.cursor,s.bra=s.cursor,0!=s.find_among_b(C)&&!(s.cursor>s.limit_backward)){if(s.slice_from("ιδ"))break r;return}if(s.cursor=s.limit-r,s.ket=s.cursor,s.bra=s.cursor,0==s.find_among_b(S))return;if(!s.slice_from("ιδ"))return}}})(),s.cursor=s.limit-r,s.limit-s.cursor),r=(s.ket=s.cursor,0!=s.find_among_b(W)&&(s.bra=s.cursor,s.slice_del())&&(J=!1,s.ket=s.cursor,s.bra=s.cursor,0==s.find_among_b(G)||s.cursor>s.limit_backward||s.slice_from("ισκ")),s.cursor=s.limit-r,s.limit-s.cursor),i=(s.ket=s.cursor,0!=s.find_among_b(j)&&(s.bra=s.cursor,s.slice_del())&&(i=s.limit-s.cursor,0==s.find_among_b(x))&&(s.cursor=s.limit-i,i=s.cursor,s.insert(s.cursor,s.cursor,"αδ"),s.cursor=i),s.cursor=s.limit-r,s.limit-s.cursor),r=(s.ket=s.cursor,0!=s.find_among_b(y)&&(s.bra=s.cursor,s.slice_del())&&(s.ket=s.cursor,s.bra=s.cursor,0!=s.find_among_b(z))&&s.slice_from("εδ"),s.cursor=s.limit-i,s.limit-s.cursor),r=(s.ket=s.cursor,0!=s.find_among_b(A)&&(s.bra=s.cursor,s.slice_del())&&(s.ket=s.cursor,s.bra=s.cursor,0!=s.find_among_b(D))&&s.slice_from("ουδ"),s.cursor=s.limit-r,s.limit-s.cursor),r=(s.ket=s.cursor,0!=s.find_among_b(E)&&(s.bra=s.cursor,s.slice_del())&&(J=!1,s.ket=s.cursor,s.bra=s.cursor,0==s.find_among_b(F)||s.cursor>s.limit_backward||s.slice_from("ε")),s.cursor=s.limit-r,s.limit-s.cursor),r=(s.ket=s.cursor,0!=s.find_among_b(K)&&(s.bra=s.cursor,s.slice_del())&&(J=!1,s.ket=s.cursor,s.bra=s.cursor,s.in_grouping_b(H,945,969))&&s.slice_from("ι"),s.cursor=s.limit-r,s.limit-s.cursor),r=((()=>{if(s.ket=s.cursor,0!=s.find_among_b(L)&&(s.bra=s.cursor,s.slice_del())){J=!1;r:{var r=s.limit-s.cursor;if(s.ket=s.cursor,s.bra=s.cursor,s.in_grouping_b(H,945,969)){if(s.slice_from("ικ"))break r;return}s.cursor=s.limit-r,s.ket=s.cursor}s.bra=s.cursor,0==s.find_among_b(M)||s.cursor>s.limit_backward||s.slice_from("ικ")}})(),s.cursor=s.limit-r,s.limit-s.cursor),r=((()=>{var r=s.limit-s.cursor;if(s.ket=s.cursor,!s.eq_s_b("αγαμε")||(s.bra=s.cursor,s.cursor>s.limit_backward||s.slice_from("αγαμ"))){s.cursor=s.limit-r;r=s.limit-s.cursor;if(s.ket=s.cursor,0!=s.find_among_b(N)){if(s.bra=s.cursor,!s.slice_del())return;J=!1}s.cursor=s.limit-r,s.ket=s.cursor,s.eq_s_b("αμε")&&(s.bra=s.cursor,s.slice_del())&&(J=!1,s.ket=s.cursor,s.bra=s.cursor,0==s.find_among_b(O)||s.cursor>s.limit_backward||s.slice_from("αμ"))}})(),s.cursor=s.limit-r,s.limit-s.cursor),r=((()=>{var r=s.limit-s.cursor;if(s.ket=s.cursor,0!=s.find_among_b(Q)){if(s.bra=s.cursor,!s.slice_del())return;if(J=!1,s.ket=s.cursor,s.bra=s.cursor,0!=s.find_among_b(P)&&!(s.cursor>s.limit_backward||s.slice_from("αγαν")))return}if(s.cursor=s.limit-r,s.ket=s.cursor,s.eq_s_b("ανε")&&(s.bra=s.cursor,s.slice_del())){J=!1;r:{var i=s.limit-s.cursor;if(s.ket=s.cursor,s.bra=s.cursor,s.in_grouping_b(I,945,969)){if(s.slice_from("αν"))break r;return}s.cursor=s.limit-i,s.ket=s.cursor}s.bra=s.cursor,0==s.find_among_b(R)||s.cursor>s.limit_backward||s.slice_from("αν")}})(),s.cursor=s.limit-r,s.limit-s.cursor),r=((()=>{var r=s.limit-s.cursor;if(s.ket=s.cursor,0!=s.find_among_b(T)){if(s.bra=s.cursor,!s.slice_del())return;J=!1}if(s.cursor=s.limit-r,s.ket=s.cursor,s.eq_s_b("ετε")&&(s.bra=s.cursor,s.slice_del())){J=!1;r:{var i=s.limit-s.cursor;if(s.ket=s.cursor,s.bra=s.cursor,s.in_grouping_b(I,945,969)){if(s.slice_from("ετ"))break r;return}if(s.cursor=s.limit-i,s.ket=s.cursor,s.bra=s.cursor,0!=s.find_among_b(U)){if(s.slice_from("ετ"))break r;return}s.cursor=s.limit-i,s.ket=s.cursor}s.bra=s.cursor,0==s.find_among_b(V)||s.cursor>s.limit_backward||s.slice_from("ετ")}})(),s.cursor=s.limit-r,s.limit-s.cursor),r=((()=>{if(s.ket=s.cursor,0!=s.find_among_b(X)&&(s.bra=s.cursor,s.slice_del())){J=!1;r:{var r=s.limit-s.cursor;if(s.ket=s.cursor,s.bra=s.cursor,s.eq_s_b("αρχ")&&!(s.cursor>s.limit_backward)){if(s.slice_from("οντ"))break r;return}if(s.cursor=s.limit-r,s.ket=s.cursor,s.bra=s.cursor,!s.eq_s_b("κρε"))return;if(!s.slice_from("ωντ"))return}}})(),s.cursor=s.limit-r,s.limit-s.cursor),r=(s.ket=s.cursor,0!=s.find_among_b(Y)&&(s.bra=s.cursor,s.slice_del())&&(J=!1,s.ket=s.cursor,s.bra=s.cursor,!s.eq_s_b("ον")||s.cursor>s.limit_backward||s.slice_from("ομαστ")),s.cursor=s.limit-r,s.limit-s.cursor),r=((()=>{var r=s.limit-s.cursor;if(s.ket=s.cursor,s.eq_s_b("ιεστε")){if(s.bra=s.cursor,!s.slice_del())return;if(J=!1,s.ket=s.cursor,s.bra=s.cursor,0!=s.find_among_b(Z)&&!(s.cursor>s.limit_backward||s.slice_from("ιεστ")))return}s.cursor=s.limit-r,s.ket=s.cursor,s.eq_s_b("εστε")&&(s.bra=s.cursor,s.slice_del())&&(J=!1,s.ket=s.cursor,s.bra=s.cursor,0==s.find_among_b($)||s.cursor>s.limit_backward||s.slice_from("ιεστ"))})(),s.cursor=s.limit-r,s.limit-s.cursor),r=((()=>{var r=s.limit-s.cursor;if(s.ket=s.cursor,0!=s.find_among_b(r1)){if(s.bra=s.cursor,!s.slice_del())return;J=!1}if(s.cursor=s.limit-r,s.ket=s.cursor,0!=s.find_among_b(s1)&&(s.bra=s.cursor,s.slice_del())){J=!1;r:{var i=s.limit-s.cursor;if(s.ket=s.cursor,s.bra=s.cursor,0!=s.find_among_b(i1)){if(s.slice_from("ηκ"))break r;return}if(s.cursor=s.limit-i,s.ket=s.cursor,s.bra=s.cursor,0==s.find_among_b(c1))return;if(s.cursor>s.limit_backward)return;if(!s.slice_from("ηκ"))return}}})(),s.cursor=s.limit-r,s.limit-s.cursor),r=((()=>{if(s.ket=s.cursor,0!=s.find_among_b(u1)&&(s.bra=s.cursor,s.slice_del())){J=!1;r:{var r=s.limit-s.cursor;if(s.ket=s.cursor,s.bra=s.cursor,0!=s.find_among_b(e1)){if(s.slice_from("ουσ"))break r;return}if(s.cursor=s.limit-r,s.ket=s.cursor,s.bra=s.cursor,0==s.find_among_b(o1))return;if(s.cursor>s.limit_backward)return;if(!s.slice_from("ουσ"))return}}})(),s.cursor=s.limit-r,s.limit-s.cursor),r=(s.ket=s.cursor,0!=s.find_among_b(l1)&&(s.bra=s.cursor,s.slice_del())&&(J=!1,s.ket=s.cursor,s.bra=s.cursor,0==s.find_among_b(m1)||s.cursor>s.limit_backward||s.slice_from("ησ")),s.cursor=s.limit-r,s.limit-s.cursor),r=((()=>{var r;if(s.ket=s.cursor,0!=s.find_among_b(_1)&&(s.bra=s.cursor,s.slice_del())){J=!1;r:{var i=s.limit-s.cursor;if(s.ket=s.cursor,s.bra=s.cursor,s.eq_s_b("κολλ")){if(s.slice_from("αγ"))break r;return}s.cursor=s.limit-i;i=s.limit-s.cursor;if(s.ket=s.cursor,s.bra=s.cursor,0!=(r=s.find_among_b(a1)))switch(r){case 1:if(s.slice_from("αγ"))break;return}else{if(s.cursor=s.limit-i,s.ket=s.cursor,s.bra=s.cursor,0==s.find_among_b(t1))return;if(s.cursor>s.limit_backward)return;if(!s.slice_from("αγ"))return}}}})(),s.cursor=s.limit-r,s.limit-s.cursor),r=(s.ket=s.cursor,0!=s.find_among_b(f1)&&(s.bra=s.cursor,s.slice_del())&&(J=!1,s.ket=s.cursor,s.bra=s.cursor,0==s.find_among_b(b1)||s.cursor>s.limit_backward||s.slice_from("ηστ")),s.cursor=s.limit-r,s.limit-s.cursor),r=(s.ket=s.cursor,0!=s.find_among_b(n1)&&(s.bra=s.cursor,s.slice_del())&&(J=!1,s.ket=s.cursor,s.bra=s.cursor,0==s.find_among_b(k1)||s.cursor>s.limit_backward||s.slice_from("ουν")),s.cursor=s.limit-r,s.limit-s.cursor),r=(s.ket=s.cursor,0!=s.find_among_b(d1)&&(s.bra=s.cursor,s.slice_del())&&(J=!1,s.ket=s.cursor,s.bra=s.cursor,0==s.find_among_b(g1)||s.cursor>s.limit_backward||s.slice_from("ουμ")),s.cursor=s.limit-r,s.limit-s.cursor),c=(c=s.limit-s.cursor,s.ket=s.cursor,0!=s.find_among_b(w1)&&(s.bra=s.cursor,!s.slice_from("μα"))||(s.cursor=s.limit-c,J&&(s.ket=s.cursor,0!=s.find_among_b(v1))&&(s.bra=s.cursor,s.slice_del())),s.cursor=s.limit-r,s.limit-s.cursor);return s.ket=s.cursor,0!=s.find_among_b(h1)&&(s.bra=s.cursor,s.slice_del()),s.cursor=s.limit-c,s.cursor=s.limit_backward,!0},this.stemWord=function(r){return s.setCurrent(r),this.stem(),s.getCurrent()}};
\ No newline at end of file
diff --git a/sphinx/search/minified-js/hindi-stemmer.js b/sphinx/search/minified-js/hindi-stemmer.js
new file mode 100644
index 00000000000..850b0430cdd
--- /dev/null
+++ b/sphinx/search/minified-js/hindi-stemmer.js
@@ -0,0 +1 @@
+var HindiStemmer=function(){var t=new BaseStemmer,r=[["आँ",-1,-1],["ाँ",-1,-1],["इयाँ",1,-1],["आइयाँ",2,-1],["ाइयाँ",2,-1],["ियाँ",1,-1],["आं",-1,-1],["उआं",6,-1],["ुआं",6,-1],["ईं",-1,-1],["आईं",9,-1],["ाईं",9,-1],["एं",-1,-1],["आएं",12,-1],["उएं",12,-1],["ाएं",12,-1],["ताएं",15,-1,e],["अताएं",16,-1],["नाएं",15,-1,e],["अनाएं",18,-1],["ुएं",12,-1],["ओं",-1,-1],["आओं",21,-1],["उओं",21,-1],["ाओं",21,-1],["ताओं",24,-1,e],["अताओं",25,-1],["नाओं",24,-1,e],["अनाओं",27,-1],["ुओं",21,-1],["ां",-1,-1],["इयां",30,-1],["आइयां",31,-1],["ाइयां",31,-1],["ियां",30,-1],["ीं",-1,-1],["तीं",35,-1,e],["अतीं",36,-1],["आतीं",36,-1],["ातीं",36,-1],["ें",-1,-1],["ों",-1,-1],["इयों",41,-1],["आइयों",42,-1],["ाइयों",42,-1],["ियों",41,-1],["अ",-1,-1],["आ",-1,-1],["इ",-1,-1],["ई",-1,-1],["आई",49,-1],["ाई",49,-1],["उ",-1,-1],["ऊ",-1,-1],["ए",-1,-1],["आए",54,-1],["इए",54,-1],["आइए",56,-1],["ाइए",56,-1],["ाए",54,-1],["िए",54,-1],["ओ",-1,-1],["आओ",61,-1],["ाओ",61,-1],["कर",-1,-1,e],["अकर",64,-1],["आकर",64,-1],["ाकर",64,-1],["ा",-1,-1],["ऊंगा",68,-1],["आऊंगा",69,-1],["ाऊंगा",69,-1],["ूंगा",68,-1],["एगा",68,-1],["आएगा",73,-1],["ाएगा",73,-1],["ेगा",68,-1],["ता",68,-1,e],["अता",77,-1],["आता",77,-1],["ाता",77,-1],["ना",68,-1,e],["अना",81,-1],["आना",81,-1],["ाना",81,-1],["आया",68,-1],["ाया",68,-1],["ि",-1,-1],["ी",-1,-1],["ऊंगी",88,-1],["आऊंगी",89,-1],["ाऊंगी",89,-1],["एंगी",88,-1],["आएंगी",92,-1],["ाएंगी",92,-1],["ूंगी",88,-1],["ेंगी",88,-1],["एगी",88,-1],["आएगी",97,-1],["ाएगी",97,-1],["ओगी",88,-1],["आओगी",100,-1],["ाओगी",100,-1],["ेगी",88,-1],["ोगी",88,-1],["ती",88,-1,e],["अती",105,-1],["आती",105,-1],["ाती",105,-1],["नी",88,-1,e],["अनी",109,-1],["ु",-1,-1],["ू",-1,-1],["े",-1,-1],["एंगे",113,-1],["आएंगे",114,-1],["ाएंगे",114,-1],["ेंगे",113,-1],["ओगे",113,-1],["आओगे",118,-1],["ाओगे",118,-1],["ोगे",113,-1],["ते",113,-1,e],["अते",122,-1],["आते",122,-1],["ाते",122,-1],["ने",113,-1,e],["अने",126,-1],["आने",126,-1],["ाने",126,-1],["ो",-1,-1],["्",-1,-1]],i=[255,255,255,255,159,0,0,0,248,7];function e(){return!!t.in_grouping_b(i,2325,2399)}this.stem=function(){return!(t.cursor>=t.limit||(t.cursor++,t.limit_backward=t.cursor,t.cursor=t.limit,t.ket=t.cursor,0==t.find_among_b(r))||(t.bra=t.cursor,!t.slice_del())||(t.cursor=t.limit_backward,0))},this.stemWord=function(r){return t.setCurrent(r),this.stem(),t.getCurrent()}};
\ No newline at end of file
diff --git a/sphinx/search/minified-js/hungarian-stemmer.js b/sphinx/search/minified-js/hungarian-stemmer.js
index e1fca971f79..a7f3926cacf 100644
--- a/sphinx/search/minified-js/hungarian-stemmer.js
+++ b/sphinx/search/minified-js/hungarian-stemmer.js
@@ -1 +1 @@
-HungarianStemmer=function(){var r=new BaseStemmer;var e=[["cs",-1,-1],["dzs",-1,-1],["gy",-1,-1],["ly",-1,-1],["ny",-1,-1],["sz",-1,-1],["ty",-1,-1],["zs",-1,-1]];var i=[["á",-1,1],["é",-1,2]];var a=[["bb",-1,-1],["cc",-1,-1],["dd",-1,-1],["ff",-1,-1],["gg",-1,-1],["jj",-1,-1],["kk",-1,-1],["ll",-1,-1],["mm",-1,-1],["nn",-1,-1],["pp",-1,-1],["rr",-1,-1],["ccs",-1,-1],["ss",-1,-1],["zzs",-1,-1],["tt",-1,-1],["vv",-1,-1],["ggy",-1,-1],["lly",-1,-1],["nny",-1,-1],["tty",-1,-1],["ssz",-1,-1],["zz",-1,-1]];var t=[["al",-1,1],["el",-1,1]];var s=[["ba",-1,-1],["ra",-1,-1],["be",-1,-1],["re",-1,-1],["ig",-1,-1],["nak",-1,-1],["nek",-1,-1],["val",-1,-1],["vel",-1,-1],["ul",-1,-1],["nál",-1,-1],["nél",-1,-1],["ból",-1,-1],["ról",-1,-1],["tól",-1,-1],["ül",-1,-1],["ből",-1,-1],["ről",-1,-1],["től",-1,-1],["n",-1,-1],["an",19,-1],["ban",20,-1],["en",19,-1],["ben",22,-1],["képpen",22,-1],["on",19,-1],["ön",19,-1],["képp",-1,-1],["kor",-1,-1],["t",-1,-1],["at",29,-1],["et",29,-1],["ként",29,-1],["anként",32,-1],["enként",32,-1],["onként",32,-1],["ot",29,-1],["ért",29,-1],["öt",29,-1],["hez",-1,-1],["hoz",-1,-1],["höz",-1,-1],["vá",-1,-1],["vé",-1,-1]];var u=[["án",-1,2],["én",-1,1],["ánként",-1,2]];var n=[["stul",-1,1],["astul",0,1],["ástul",0,2],["stül",-1,1],["estül",3,1],["éstül",3,3]];var f=[["á",-1,1],["é",-1,1]];var c=[["k",-1,3],["ak",0,3],["ek",0,3],["ok",0,3],["ák",0,1],["ék",0,2],["ök",0,3]];var l=[["éi",-1,1],["áéi",0,3],["ééi",0,2],["é",-1,1],["ké",3,1],["aké",4,1],["eké",4,1],["oké",4,1],["áké",4,3],["éké",4,2],["öké",4,1],["éé",3,2]];var o=[["a",-1,1],["ja",0,1],["d",-1,1],["ad",2,1],["ed",2,1],["od",2,1],["ád",2,2],["éd",2,3],["öd",2,1],["e",-1,1],["je",9,1],["nk",-1,1],["unk",11,1],["ánk",11,2],["énk",11,3],["ünk",11,1],["uk",-1,1],["juk",16,1],["ájuk",17,2],["ük",-1,1],["jük",19,1],["éjük",20,3],["m",-1,1],["am",22,1],["em",22,1],["om",22,1],["ám",22,2],["ém",22,3],["o",-1,1],["á",-1,2],["é",-1,3]];var k=[["id",-1,1],["aid",0,1],["jaid",1,1],["eid",0,1],["jeid",3,1],["áid",0,2],["éid",0,3],["i",-1,1],["ai",7,1],["jai",8,1],["ei",7,1],["jei",10,1],["ái",7,2],["éi",7,3],["itek",-1,1],["eitek",14,1],["jeitek",15,1],["éitek",14,3],["ik",-1,1],["aik",18,1],["jaik",19,1],["eik",18,1],["jeik",21,1],["áik",18,2],["éik",18,3],["ink",-1,1],["aink",25,1],["jaink",26,1],["eink",25,1],["jeink",28,1],["áink",25,2],["éink",25,3],["aitok",-1,1],["jaitok",32,1],["áitok",-1,2],["im",-1,1],["aim",35,1],["jaim",36,1],["eim",35,1],["jeim",38,1],["áim",35,2],["éim",35,3]];var m=[17,65,16,0,0,0,0,0,0,0,0,0,0,0,0,0,1,17,36,10,0,0,0,0,0,0,0,0,0,0,1,0,0,0,1];var b=0;function _(){b=r.limit;r:{var i=r.cursor;e:{if(!r.in_grouping(m,97,369)){break e}i:while(true){var a=r.cursor;a:{if(!r.out_grouping(m,97,369)){break a}r.cursor=a;break i}r.cursor=a;if(r.cursor>=r.limit){break e}r.cursor++}i:{var t=r.cursor;a:{if(r.find_among(e)==0){break a}break i}r.cursor=t;if(r.cursor>=r.limit){break e}r.cursor++}b=r.cursor;break r}r.cursor=i;if(!r.out_grouping(m,97,369)){return false}e:while(true){i:{if(!r.in_grouping(m,97,369)){break i}break e}if(r.cursor>=r.limit){return false}r.cursor++}b=r.cursor}return true}function d(){if(!(b<=r.cursor)){return false}return true}function v(){var e;r.ket=r.cursor;e=r.find_among_b(i);if(e==0){return false}r.bra=r.cursor;if(!d()){return false}switch(e){case 1:if(!r.slice_from("a")){return false}break;case 2:if(!r.slice_from("e")){return false}break}return true}function g(){var e=r.limit-r.cursor;if(r.find_among_b(a)==0){return false}r.cursor=r.limit-e;return true}function j(){if(r.cursor<=r.limit_backward){return false}r.cursor--;r.ket=r.cursor;{var e=r.cursor-1;if(e{_=e.limit;var r=e.cursor;if(e.in_grouping(f,97,369)){var i=e.cursor;e.go_in_grouping(f,97,369)&&(e.cursor++,_=e.cursor),e.cursor=i}else{if(e.cursor=r,!e.go_out_grouping(f,97,369))return;e.cursor++,_=e.cursor}})(),e.cursor=r,e.limit_backward=e.cursor,e.cursor=e.limit,e.limit-e.cursor),r=(e.ket=e.cursor,0!=e.find_among_b(c)&&(e.bra=e.cursor,b())&&d()&&e.slice_del()&&g(),e.cursor=e.limit-r,e.limit-e.cursor),r=(j(),e.cursor=e.limit-r,e.limit-e.cursor),r=((()=>{var r;if(e.ket=e.cursor,0!=(r=e.find_among_b(t))&&(e.bra=e.cursor,b()))switch(r){case 1:if(e.slice_from("e"))break;return;case 2:if(e.slice_from("a"))break}})(),e.cursor=e.limit-r,e.limit-e.cursor),r=((()=>{var r;if(e.ket=e.cursor,0!=(r=e.find_among_b(o))&&(e.bra=e.cursor,b()))switch(r){case 1:if(e.slice_del())break;return;case 2:if(e.slice_from("a"))break;return;case 3:if(e.slice_from("e"))break}})(),e.cursor=e.limit-r,e.limit-e.cursor),r=(e.ket=e.cursor,0!=e.find_among_b(n)&&(e.bra=e.cursor,b())&&d()&&e.slice_del()&&g(),e.cursor=e.limit-r,e.limit-e.cursor),r=((()=>{var r;if(e.ket=e.cursor,0!=(r=e.find_among_b(k))&&(e.bra=e.cursor,b()))switch(r){case 1:if(e.slice_del())break;return;case 2:if(e.slice_from("e"))break;return;case 3:if(e.slice_from("a"))break}})(),e.cursor=e.limit-r,e.limit-e.cursor),r=((()=>{var r;if(e.ket=e.cursor,0!=(r=e.find_among_b(l))&&(e.bra=e.cursor,b()))switch(r){case 1:if(e.slice_del())break;return;case 2:if(e.slice_from("a"))break;return;case 3:if(e.slice_from("e"))break}})(),e.cursor=e.limit-r,e.limit-e.cursor),r=((()=>{var r;if(e.ket=e.cursor,0!=(r=e.find_among_b(m))&&(e.bra=e.cursor,b()))switch(r){case 1:if(e.slice_del())break;return;case 2:if(e.slice_from("a"))break;return;case 3:if(e.slice_from("e"))break}})(),e.cursor=e.limit-r,e.limit-e.cursor);return(()=>{var r;if(e.ket=e.cursor,0!=(r=e.find_among_b(u))&&(e.bra=e.cursor,b()))switch(r){case 1:if(e.slice_from("a"))break;return;case 2:if(e.slice_from("e"))break;return;case 3:if(e.slice_del())break}})(),e.cursor=e.limit-r,e.cursor=e.limit_backward,!0},this.stemWord=function(r){return e.setCurrent(r),this.stem(),e.getCurrent()}};
\ No newline at end of file
diff --git a/sphinx/search/minified-js/indonesian-stemmer.js b/sphinx/search/minified-js/indonesian-stemmer.js
new file mode 100644
index 00000000000..89339d9783d
--- /dev/null
+++ b/sphinx/search/minified-js/indonesian-stemmer.js
@@ -0,0 +1 @@
+var IndonesianStemmer=function(){var s=new BaseStemmer,c=[["kah",-1,1],["lah",-1,1],["pun",-1,1]],o=[["nya",-1,1],["ku",-1,1],["mu",-1,1]],r=[["i",-1,1,function(){if(2{var r;if(s.bra=s.cursor,0!=(r=s.find_among(n))){switch(s.ket=s.cursor,r){case 1:if(!s.slice_del())return;a=1,--l;break;case 2:if(!s.slice_del())return;a=3,--l;break;case 3:if(a=1,!s.slice_from("s"))return;--l;break;case 4:if(a=3,!s.slice_from("s"))return;--l;break;case 5:a=1,--l;r:{var e=s.cursor,i=s.cursor;if(s.in_grouping(t,97,117)){if(s.cursor=i,s.slice_from("p"))break r;return}if(s.cursor=e,!s.slice_del())return}break;case 6:a=3,--l;r:{var u=s.cursor,c=s.cursor;if(s.in_grouping(t,97,117)){if(s.cursor=c,s.slice_from("p"))break r;return}if(s.cursor=u,!s.slice_del())return}}return 1}})()?(u=s.cursor,i=s.cursor,l<=2||(s.limit_backward=s.cursor,s.cursor=s.limit,f()&&(s.cursor=s.limit_backward,s.cursor=i,l<=2||m())),s.cursor=u,s.cursor=e):(s.cursor=r,i=s.cursor,m(),s.cursor=i,u=s.cursor,l<=2||(s.limit_backward=s.cursor,s.cursor=s.limit,f()&&(s.cursor=s.limit_backward)),s.cursor=u),0))},this.stemWord=function(r){return s.setCurrent(r),this.stem(),s.getCurrent()}};
\ No newline at end of file
diff --git a/sphinx/search/minified-js/irish-stemmer.js b/sphinx/search/minified-js/irish-stemmer.js
new file mode 100644
index 00000000000..c90c92292cc
--- /dev/null
+++ b/sphinx/search/minified-js/irish-stemmer.js
@@ -0,0 +1 @@
+var IrishStemmer=function(){var i=new BaseStemmer,e=[["b'",-1,1],["bh",-1,4],["bhf",1,2],["bp",-1,8],["ch",-1,5],["d'",-1,1],["d'fh",5,2],["dh",-1,6],["dt",-1,9],["fh",-1,2],["gc",-1,5],["gh",-1,7],["h-",-1,1],["m'",-1,1],["mb",-1,4],["mh",-1,10],["n-",-1,1],["nd",-1,6],["ng",-1,7],["ph",-1,8],["sh",-1,3],["t-",-1,1],["th",-1,9],["ts",-1,3]],a=[["íochta",-1,1],["aíochta",0,1],["ire",-1,2],["aire",2,2],["abh",-1,1],["eabh",4,1],["ibh",-1,1],["aibh",6,1],["amh",-1,1],["eamh",8,1],["imh",-1,1],["aimh",10,1],["íocht",-1,1],["aíocht",12,1],["irí",-1,2],["airí",14,2]],c=[["óideacha",-1,6],["patacha",-1,5],["achta",-1,1],["arcachta",2,2],["eachta",2,1],["grafaíochta",-1,4],["paite",-1,5],["ach",-1,1],["each",7,1],["óideach",8,6],["gineach",8,3],["patach",7,5],["grafaíoch",-1,4],["pataigh",-1,5],["óidigh",-1,6],["achtúil",-1,1],["eachtúil",15,1],["gineas",-1,3],["ginis",-1,3],["acht",-1,1],["arcacht",19,2],["eacht",19,1],["grafaíocht",-1,4],["arcachtaí",-1,2],["grafaíochtaí",-1,4]],t=[["imid",-1,1],["aimid",0,1],["ímid",-1,1],["aímid",2,1],["adh",-1,2],["eadh",4,2],["faidh",-1,1],["fidh",-1,1],["áil",-1,2],["ain",-1,2],["tear",-1,2],["tar",-1,2]],s=[17,65,16,0,0,0,0,0,0,0,0,0,0,0,0,0,1,17,4,2],o=0,u=0,f=0;function n(){return u<=i.cursor}function h(){return o<=i.cursor}function m(){var r;if(i.ket=i.cursor,0!=(r=i.find_among_b(t)))switch(i.bra=i.cursor,r){case 1:if(!(f<=i.cursor))return;if(i.slice_del())break;return;case 2:if(!n())return;if(i.slice_del())break;return}}this.stem=function(){var r=i.cursor,r=((()=>{var r;if(i.bra=i.cursor,0!=(r=i.find_among(e)))switch(i.ket=i.cursor,r){case 1:if(i.slice_del())break;return;case 2:if(i.slice_from("f"))break;return;case 3:if(i.slice_from("s"))break;return;case 4:if(i.slice_from("b"))break;return;case 5:if(i.slice_from("c"))break;return;case 6:if(i.slice_from("d"))break;return;case 7:if(i.slice_from("g"))break;return;case 8:if(i.slice_from("p"))break;return;case 9:if(i.slice_from("t"))break;return;case 10:if(i.slice_from("m"))break}})(),i.cursor=r,f=i.limit,u=i.limit,o=i.limit,r=i.cursor,i.go_out_grouping(s,97,250)&&(i.cursor++,f=i.cursor,i.go_in_grouping(s,97,250))&&(i.cursor++,u=i.cursor,i.go_out_grouping(s,97,250))&&(i.cursor++,i.go_in_grouping(s,97,250))&&(i.cursor++,o=i.cursor),i.cursor=r,i.limit_backward=i.cursor,i.cursor=i.limit,i.limit-i.cursor),r=((()=>{var r;if(i.ket=i.cursor,0!=(r=i.find_among_b(a)))switch(i.bra=i.cursor,r){case 1:if(!n())return;if(i.slice_del())break;return;case 2:if(!h())return;if(i.slice_del())break}})(),i.cursor=i.limit-r,i.limit-i.cursor),r=((()=>{var r;if(i.ket=i.cursor,0!=(r=i.find_among_b(c)))switch(i.bra=i.cursor,r){case 1:if(!h())return;if(i.slice_del())break;return;case 2:if(i.slice_from("arc"))break;return;case 3:if(i.slice_from("gin"))break;return;case 4:if(i.slice_from("graf"))break;return;case 5:if(i.slice_from("paite"))break;return;case 6:if(i.slice_from("óid"))break}})(),i.cursor=i.limit-r,i.limit-i.cursor);return m(),i.cursor=i.limit-r,i.cursor=i.limit_backward,!0},this.stemWord=function(r){return i.setCurrent(r),this.stem(),i.getCurrent()}};
\ No newline at end of file
diff --git a/sphinx/search/minified-js/italian-stemmer.js b/sphinx/search/minified-js/italian-stemmer.js
index a3a5c4265e4..ac46b1d415e 100644
--- a/sphinx/search/minified-js/italian-stemmer.js
+++ b/sphinx/search/minified-js/italian-stemmer.js
@@ -1 +1 @@
-ItalianStemmer=function(){var r=new BaseStemmer;var e=[["",-1,7],["qu",0,6],["á",0,1],["é",0,2],["í",0,3],["ó",0,4],["ú",0,5]];var i=[["",-1,3],["I",0,1],["U",0,2]];var a=[["la",-1,-1],["cela",0,-1],["gliela",0,-1],["mela",0,-1],["tela",0,-1],["vela",0,-1],["le",-1,-1],["cele",6,-1],["gliele",6,-1],["mele",6,-1],["tele",6,-1],["vele",6,-1],["ne",-1,-1],["cene",12,-1],["gliene",12,-1],["mene",12,-1],["sene",12,-1],["tene",12,-1],["vene",12,-1],["ci",-1,-1],["li",-1,-1],["celi",20,-1],["glieli",20,-1],["meli",20,-1],["teli",20,-1],["veli",20,-1],["gli",20,-1],["mi",-1,-1],["si",-1,-1],["ti",-1,-1],["vi",-1,-1],["lo",-1,-1],["celo",31,-1],["glielo",31,-1],["melo",31,-1],["telo",31,-1],["velo",31,-1]];var s=[["ando",-1,1],["endo",-1,1],["ar",-1,2],["er",-1,2],["ir",-1,2]];var o=[["ic",-1,-1],["abil",-1,-1],["os",-1,-1],["iv",-1,1]];var u=[["ic",-1,1],["abil",-1,1],["iv",-1,1]];var t=[["ica",-1,1],["logia",-1,3],["osa",-1,1],["ista",-1,1],["iva",-1,9],["anza",-1,1],["enza",-1,5],["ice",-1,1],["atrice",7,1],["iche",-1,1],["logie",-1,3],["abile",-1,1],["ibile",-1,1],["usione",-1,4],["azione",-1,2],["uzione",-1,4],["atore",-1,2],["ose",-1,1],["ante",-1,1],["mente",-1,1],["amente",19,7],["iste",-1,1],["ive",-1,9],["anze",-1,1],["enze",-1,5],["ici",-1,1],["atrici",25,1],["ichi",-1,1],["abili",-1,1],["ibili",-1,1],["ismi",-1,1],["usioni",-1,4],["azioni",-1,2],["uzioni",-1,4],["atori",-1,2],["osi",-1,1],["anti",-1,1],["amenti",-1,6],["imenti",-1,6],["isti",-1,1],["ivi",-1,9],["ico",-1,1],["ismo",-1,1],["oso",-1,1],["amento",-1,6],["imento",-1,6],["ivo",-1,9],["ità",-1,8],["istà",-1,1],["istè",-1,1],["istì",-1,1]];var c=[["isca",-1,1],["enda",-1,1],["ata",-1,1],["ita",-1,1],["uta",-1,1],["ava",-1,1],["eva",-1,1],["iva",-1,1],["erebbe",-1,1],["irebbe",-1,1],["isce",-1,1],["ende",-1,1],["are",-1,1],["ere",-1,1],["ire",-1,1],["asse",-1,1],["ate",-1,1],["avate",16,1],["evate",16,1],["ivate",16,1],["ete",-1,1],["erete",20,1],["irete",20,1],["ite",-1,1],["ereste",-1,1],["ireste",-1,1],["ute",-1,1],["erai",-1,1],["irai",-1,1],["isci",-1,1],["endi",-1,1],["erei",-1,1],["irei",-1,1],["assi",-1,1],["ati",-1,1],["iti",-1,1],["eresti",-1,1],["iresti",-1,1],["uti",-1,1],["avi",-1,1],["evi",-1,1],["ivi",-1,1],["isco",-1,1],["ando",-1,1],["endo",-1,1],["Yamo",-1,1],["iamo",-1,1],["avamo",-1,1],["evamo",-1,1],["ivamo",-1,1],["eremo",-1,1],["iremo",-1,1],["assimo",-1,1],["ammo",-1,1],["emmo",-1,1],["eremmo",54,1],["iremmo",54,1],["immo",-1,1],["ano",-1,1],["iscano",58,1],["avano",58,1],["evano",58,1],["ivano",58,1],["eranno",-1,1],["iranno",-1,1],["ono",-1,1],["iscono",65,1],["arono",65,1],["erono",65,1],["irono",65,1],["erebbero",-1,1],["irebbero",-1,1],["assero",-1,1],["essero",-1,1],["issero",-1,1],["ato",-1,1],["ito",-1,1],["uto",-1,1],["avo",-1,1],["evo",-1,1],["ivo",-1,1],["ar",-1,1],["ir",-1,1],["erà",-1,1],["irà",-1,1],["erò",-1,1],["irò",-1,1]];var l=[17,65,16,0,0,0,0,0,0,0,0,0,0,0,0,128,128,8,2,1];var n=[17,65,0,0,0,0,0,0,0,0,0,0,0,0,0,128,128,8,2];var f=[17];var b=0;var m=0;var k=0;function _(){var i;var a=r.cursor;while(true){var s=r.cursor;r:{r.bra=r.cursor;i=r.find_among(e);if(i==0){break r}r.ket=r.cursor;switch(i){case 1:if(!r.slice_from("à")){return false}break;case 2:if(!r.slice_from("è")){return false}break;case 3:if(!r.slice_from("ì")){return false}break;case 4:if(!r.slice_from("ò")){return false}break;case 5:if(!r.slice_from("ù")){return false}break;case 6:if(!r.slice_from("qU")){return false}break;case 7:if(r.cursor>=r.limit){break r}r.cursor++;break}continue}r.cursor=s;break}r.cursor=a;while(true){var o=r.cursor;r:{e:while(true){var u=r.cursor;i:{if(!r.in_grouping(l,97,249)){break i}r.bra=r.cursor;a:{var t=r.cursor;s:{if(!r.eq_s("u")){break s}r.ket=r.cursor;if(!r.in_grouping(l,97,249)){break s}if(!r.slice_from("U")){return false}break a}r.cursor=t;if(!r.eq_s("i")){break i}r.ket=r.cursor;if(!r.in_grouping(l,97,249)){break i}if(!r.slice_from("I")){return false}}r.cursor=u;break e}r.cursor=u;if(r.cursor>=r.limit){break r}r.cursor++}continue}r.cursor=o;break}return true}function v(){k=r.limit;m=r.limit;b=r.limit;var e=r.cursor;r:{e:{var i=r.cursor;i:{if(!r.in_grouping(l,97,249)){break i}a:{var a=r.cursor;s:{if(!r.out_grouping(l,97,249)){break s}o:while(true){u:{if(!r.in_grouping(l,97,249)){break u}break o}if(r.cursor>=r.limit){break s}r.cursor++}break a}r.cursor=a;if(!r.in_grouping(l,97,249)){break i}s:while(true){o:{if(!r.out_grouping(l,97,249)){break o}break s}if(r.cursor>=r.limit){break i}r.cursor++}}break e}r.cursor=i;if(!r.out_grouping(l,97,249)){break r}i:{var s=r.cursor;a:{if(!r.out_grouping(l,97,249)){break a}s:while(true){o:{if(!r.in_grouping(l,97,249)){break o}break s}if(r.cursor>=r.limit){break a}r.cursor++}break i}r.cursor=s;if(!r.in_grouping(l,97,249)){break r}if(r.cursor>=r.limit){break r}r.cursor++}}k=r.cursor}r.cursor=e;var o=r.cursor;r:{e:while(true){i:{if(!r.in_grouping(l,97,249)){break i}break e}if(r.cursor>=r.limit){break r}r.cursor++}e:while(true){i:{if(!r.out_grouping(l,97,249)){break i}break e}if(r.cursor>=r.limit){break r}r.cursor++}m=r.cursor;e:while(true){i:{if(!r.in_grouping(l,97,249)){break i}break e}if(r.cursor>=r.limit){break r}r.cursor++}e:while(true){i:{if(!r.out_grouping(l,97,249)){break i}break e}if(r.cursor>=r.limit){break r}r.cursor++}b=r.cursor}r.cursor=o;return true}function g(){var e;while(true){var a=r.cursor;r:{r.bra=r.cursor;e=r.find_among(i);if(e==0){break r}r.ket=r.cursor;switch(e){case 1:if(!r.slice_from("i")){return false}break;case 2:if(!r.slice_from("u")){return false}break;case 3:if(r.cursor>=r.limit){break r}r.cursor++;break}continue}r.cursor=a;break}return true}function d(){if(!(k<=r.cursor)){return false}return true}function w(){if(!(m<=r.cursor)){return false}return true}function h(){if(!(b<=r.cursor)){return false}return true}function p(){var e;r.ket=r.cursor;if(r.find_among_b(a)==0){return false}r.bra=r.cursor;e=r.find_among_b(s);if(e==0){return false}if(!d()){return false}switch(e){case 1:if(!r.slice_del()){return false}break;case 2:if(!r.slice_from("e")){return false}break}return true}function q(){var e;r.ket=r.cursor;e=r.find_among_b(t);if(e==0){return false}r.bra=r.cursor;switch(e){case 1:if(!h()){return false}if(!r.slice_del()){return false}break;case 2:if(!h()){return false}if(!r.slice_del()){return false}var i=r.limit-r.cursor;r:{r.ket=r.cursor;if(!r.eq_s_b("ic")){r.cursor=r.limit-i;break r}r.bra=r.cursor;if(!h()){r.cursor=r.limit-i;break r}if(!r.slice_del()){return false}}break;case 3:if(!h()){return false}if(!r.slice_from("log")){return false}break;case 4:if(!h()){return false}if(!r.slice_from("u")){return false}break;case 5:if(!h()){return false}if(!r.slice_from("ente")){return false}break;case 6:if(!d()){return false}if(!r.slice_del()){return false}break;case 7:if(!w()){return false}if(!r.slice_del()){return false}var a=r.limit-r.cursor;r:{r.ket=r.cursor;e=r.find_among_b(o);if(e==0){r.cursor=r.limit-a;break r}r.bra=r.cursor;if(!h()){r.cursor=r.limit-a;break r}if(!r.slice_del()){return false}switch(e){case 1:r.ket=r.cursor;if(!r.eq_s_b("at")){r.cursor=r.limit-a;break r}r.bra=r.cursor;if(!h()){r.cursor=r.limit-a;break r}if(!r.slice_del()){return false}break}}break;case 8:if(!h()){return false}if(!r.slice_del()){return false}var s=r.limit-r.cursor;r:{r.ket=r.cursor;if(r.find_among_b(u)==0){r.cursor=r.limit-s;break r}r.bra=r.cursor;if(!h()){r.cursor=r.limit-s;break r}if(!r.slice_del()){return false}}break;case 9:if(!h()){return false}if(!r.slice_del()){return false}var c=r.limit-r.cursor;r:{r.ket=r.cursor;if(!r.eq_s_b("at")){r.cursor=r.limit-c;break r}r.bra=r.cursor;if(!h()){r.cursor=r.limit-c;break r}if(!r.slice_del()){return false}r.ket=r.cursor;if(!r.eq_s_b("ic")){r.cursor=r.limit-c;break r}r.bra=r.cursor;if(!h()){r.cursor=r.limit-c;break r}if(!r.slice_del()){return false}}break}return true}function z(){if(r.cursor{for(var r,i=u.cursor;;){var e=u.cursor;r:{switch(u.bra=u.cursor,r=u.find_among(a),u.ket=u.cursor,r){case 1:if(u.slice_from("à"))break;return;case 2:if(u.slice_from("è"))break;return;case 3:if(u.slice_from("ì"))break;return;case 4:if(u.slice_from("ò"))break;return;case 5:if(u.slice_from("ù"))break;return;case 6:if(u.slice_from("qU"))break;return;case 7:if(u.cursor>=u.limit)break r;u.cursor++}continue}u.cursor=e;break}for(u.cursor=i;;){var o=u.cursor;r:{for(;;){var s=u.cursor;i:if(u.in_grouping(m,97,249)){u.bra=u.cursor;e:{var c=u.cursor;if(u.eq_s("u")&&(u.ket=u.cursor,u.in_grouping(m,97,249))){if(u.slice_from("U"))break e;return}if(u.cursor=c,!u.eq_s("i"))break i;if(u.ket=u.cursor,!u.in_grouping(m,97,249))break i;if(!u.slice_from("I"))return}u.cursor=s;break}if(u.cursor=s,u.cursor>=u.limit)break r;u.cursor++}continue}u.cursor=o;break}})(),u.cursor=r,v=u.limit,k=u.limit,g=u.limit,u.cursor);r:{i:{var i=u.cursor;e:if(u.in_grouping(m,97,249)){var e=u.cursor;if(!u.out_grouping(m,97,249)||!u.go_out_grouping(m,97,249)){if(u.cursor=e,!u.in_grouping(m,97,249))break e;if(!u.go_in_grouping(m,97,249))break e}u.cursor++;break i}if(u.cursor=i,!u.eq_s("divan")){if(u.cursor=i,!u.out_grouping(m,97,249))break r;e=u.cursor;if(!u.out_grouping(m,97,249)||!u.go_out_grouping(m,97,249)){if(u.cursor=e,!u.in_grouping(m,97,249))break r;if(u.cursor>=u.limit)break r}u.cursor++}}v=u.cursor}u.cursor=r,r=u.cursor,u.go_out_grouping(m,97,249)&&(u.cursor++,u.go_in_grouping(m,97,249))&&(u.cursor++,k=u.cursor,u.go_out_grouping(m,97,249))&&(u.cursor++,u.go_in_grouping(m,97,249))&&(u.cursor++,g=u.cursor),u.cursor=r,u.limit_backward=u.cursor,u.cursor=u.limit;var r=u.limit-u.cursor,r=((()=>{var r;if(u.ket=u.cursor,0!=u.find_among_b(c)&&(u.bra=u.cursor,0!=(r=u.find_among_b(t)))&&d())switch(r){case 1:if(u.slice_del())break;return;case 2:if(u.slice_from("e"))break}})(),u.cursor=u.limit-r,u.limit-u.cursor),o=u.limit-u.cursor,o=(w()||(u.cursor=u.limit-o,(()=>{if(!(u.cursor{var r=u.limit-u.cursor;if(u.ket=u.cursor,u.in_grouping_b(_,97,242))if(u.bra=u.cursor,d()){if(!u.slice_del())return;if(u.ket=u.cursor,u.eq_s_b("i"))if(u.bra=u.cursor,d()){if(!u.slice_del())return}else u.cursor=u.limit-r;else u.cursor=u.limit-r}else u.cursor=u.limit-r;else u.cursor=u.limit-r;if(r=u.limit-u.cursor,u.ket=u.cursor,u.eq_s_b("h"))if(u.bra=u.cursor,u.in_grouping_b(b,99,103))if(d()){if(!u.slice_del());}else u.cursor=u.limit-r;else u.cursor=u.limit-r;else u.cursor=u.limit-r})(),u.cursor=u.limit-o,u.cursor=u.limit_backward,u.cursor);return(()=>{for(var r;;){var i=u.cursor;r:{switch(u.bra=u.cursor,r=u.find_among(s),u.ket=u.cursor,r){case 1:if(u.slice_from("i"))break;return;case 2:if(u.slice_from("u"))break;return;case 3:if(u.cursor>=u.limit)break r;u.cursor++}continue}u.cursor=i;break}})(),u.cursor=r,!0},this.stemWord=function(r){return u.setCurrent(r),this.stem(),u.getCurrent()}};
\ No newline at end of file
diff --git a/sphinx/search/minified-js/lithuanian-stemmer.js b/sphinx/search/minified-js/lithuanian-stemmer.js
new file mode 100644
index 00000000000..6d48ddac94c
--- /dev/null
+++ b/sphinx/search/minified-js/lithuanian-stemmer.js
@@ -0,0 +1 @@
+var LithuanianStemmer=function(){var e=new BaseStemmer,t=[["a",-1,-1],["ia",0,-1],["eria",1,-1],["osna",0,-1],["iosna",3,-1],["uosna",3,-1],["iuosna",5,-1],["ysna",0,-1],["ėsna",0,-1],["e",-1,-1],["ie",9,-1],["enie",10,-1],["erie",10,-1],["oje",9,-1],["ioje",13,-1],["uje",9,-1],["iuje",15,-1],["yje",9,-1],["enyje",17,-1],["eryje",17,-1],["ėje",9,-1],["ame",9,-1],["iame",21,-1],["sime",9,-1],["ome",9,-1],["ėme",9,-1],["tumėme",25,-1],["ose",9,-1],["iose",27,-1],["uose",27,-1],["iuose",29,-1],["yse",9,-1],["enyse",31,-1],["eryse",31,-1],["ėse",9,-1],["ate",9,-1],["iate",35,-1],["ite",9,-1],["kite",37,-1],["site",37,-1],["ote",9,-1],["tute",9,-1],["ėte",9,-1],["tumėte",42,-1],["i",-1,-1],["ai",44,-1],["iai",45,-1],["eriai",46,-1],["ei",44,-1],["tumei",48,-1],["ki",44,-1],["imi",44,-1],["erimi",51,-1],["umi",44,-1],["iumi",53,-1],["si",44,-1],["asi",55,-1],["iasi",56,-1],["esi",55,-1],["iesi",58,-1],["siesi",59,-1],["isi",55,-1],["aisi",61,-1],["eisi",61,-1],["tumeisi",63,-1],["uisi",61,-1],["osi",55,-1],["ėjosi",66,-1],["uosi",66,-1],["iuosi",68,-1],["siuosi",69,-1],["usi",55,-1],["ausi",71,-1],["čiausi",72,-1],["ąsi",55,-1],["ėsi",55,-1],["ųsi",55,-1],["tųsi",76,-1],["ti",44,-1],["enti",78,-1],["inti",78,-1],["oti",78,-1],["ioti",81,-1],["uoti",81,-1],["iuoti",83,-1],["auti",78,-1],["iauti",85,-1],["yti",78,-1],["ėti",78,-1],["telėti",88,-1],["inėti",88,-1],["terėti",88,-1],["ui",44,-1],["iui",92,-1],["eniui",93,-1],["oj",-1,-1],["ėj",-1,-1],["k",-1,-1],["am",-1,-1],["iam",98,-1],["iem",-1,-1],["im",-1,-1],["sim",101,-1],["om",-1,-1],["tum",-1,-1],["ėm",-1,-1],["tumėm",105,-1],["an",-1,-1],["on",-1,-1],["ion",108,-1],["un",-1,-1],["iun",110,-1],["ėn",-1,-1],["o",-1,-1],["io",113,-1],["enio",114,-1],["ėjo",113,-1],["uo",113,-1],["s",-1,-1],["as",118,-1],["ias",119,-1],["es",118,-1],["ies",121,-1],["is",118,-1],["ais",123,-1],["iais",124,-1],["tumeis",123,-1],["imis",123,-1],["enimis",127,-1],["omis",123,-1],["iomis",129,-1],["umis",123,-1],["ėmis",123,-1],["enis",123,-1],["asis",123,-1],["ysis",123,-1],["ams",118,-1],["iams",136,-1],["iems",118,-1],["ims",118,-1],["enims",139,-1],["erims",139,-1],["oms",118,-1],["ioms",142,-1],["ums",118,-1],["ėms",118,-1],["ens",118,-1],["os",118,-1],["ios",147,-1],["uos",147,-1],["iuos",149,-1],["ers",118,-1],["us",118,-1],["aus",152,-1],["iaus",153,-1],["ius",152,-1],["ys",118,-1],["enys",156,-1],["erys",156,-1],["ąs",118,-1],["iąs",159,-1],["ės",118,-1],["amės",161,-1],["iamės",162,-1],["imės",161,-1],["kimės",164,-1],["simės",164,-1],["omės",161,-1],["ėmės",161,-1],["tumėmės",168,-1],["atės",161,-1],["iatės",170,-1],["sitės",161,-1],["otės",161,-1],["ėtės",161,-1],["tumėtės",174,-1],["įs",118,-1],["ūs",118,-1],["tųs",118,-1],["at",-1,-1],["iat",179,-1],["it",-1,-1],["sit",181,-1],["ot",-1,-1],["ėt",-1,-1],["tumėt",184,-1],["u",-1,-1],["au",186,-1],["iau",187,-1],["čiau",188,-1],["iu",186,-1],["eniu",190,-1],["siu",190,-1],["y",-1,-1],["ą",-1,-1],["ią",194,-1],["ė",-1,-1],["ę",-1,-1],["į",-1,-1],["enį",198,-1],["erį",198,-1],["ų",-1,-1],["ių",201,-1],["erų",201,-1]],a=[["ing",-1,-1],["aj",-1,-1],["iaj",1,-1],["iej",-1,-1],["oj",-1,-1],["ioj",4,-1],["uoj",4,-1],["iuoj",6,-1],["auj",-1,-1],["ąj",-1,-1],["iąj",9,-1],["ėj",-1,-1],["ųj",-1,-1],["iųj",12,-1],["ok",-1,-1],["iok",14,-1],["iuk",-1,-1],["uliuk",16,-1],["učiuk",16,-1],["išk",-1,-1],["iul",-1,-1],["yl",-1,-1],["ėl",-1,-1],["am",-1,-1],["dam",23,-1],["jam",23,-1],["zgan",-1,-1],["ain",-1,-1],["esn",-1,-1],["op",-1,-1],["iop",29,-1],["ias",-1,-1],["ies",-1,-1],["ais",-1,-1],["iais",33,-1],["os",-1,-1],["ios",35,-1],["uos",35,-1],["iuos",37,-1],["aus",-1,-1],["iaus",39,-1],["ąs",-1,-1],["iąs",41,-1],["ęs",-1,-1],["utėait",-1,-1],["ant",-1,-1],["iant",45,-1],["siant",46,-1],["int",-1,-1],["ot",-1,-1],["uot",49,-1],["iuot",50,-1],["yt",-1,-1],["ėt",-1,-1],["ykšt",-1,-1],["iau",-1,-1],["dav",-1,-1],["sv",-1,-1],["šv",-1,-1],["ykšč",-1,-1],["ę",-1,-1],["ėję",60,-1]],u=[["ojime",-1,7],["ėjime",-1,3],["avime",-1,6],["okate",-1,8],["aite",-1,1],["uote",-1,2],["asius",-1,5],["okatės",-1,8],["aitės",-1,1],["uotės",-1,2],["esiu",-1,4]],s=[["č",-1,1],["dž",-1,2]],o=[["gd",-1,1]],m=[17,65,16,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,16,0,64,1,0,64,0,0,0,0,0,0,0,4,4],c=0;function n(){var i;if(e.ket=e.cursor,0!=(i=e.find_among_b(s)))switch(e.bra=e.cursor,i){case 1:if(e.slice_from("t"))break;return;case 2:if(e.slice_from("d"))break;return}}this.stem=function(){c=e.limit;var i=e.cursor,s=e.cursor,r=e.cursor,r=(e.eq_s("a")?(e.cursor=r,e.current.length<=6||e.cursor>=e.limit?e.cursor=s:e.cursor++):e.cursor=s,e.go_out_grouping(m,97,371)&&(e.cursor++,e.go_in_grouping(m,97,371))&&(e.cursor++,c=e.cursor),e.cursor=i,e.limit_backward=e.cursor,e.cursor=e.limit,e.limit-e.cursor),s=((()=>{var i;if(e.ket=e.cursor,0!=(i=e.find_among_b(u)))switch(e.bra=e.cursor,i){case 1:if(e.slice_from("aitė"))break;return;case 2:if(e.slice_from("uotė"))break;return;case 3:if(e.slice_from("ėjimas"))break;return;case 4:if(e.slice_from("esys"))break;return;case 5:if(e.slice_from("asys"))break;return;case 6:if(e.slice_from("avimas"))break;return;case 7:if(e.slice_from("ojimas"))break;return;case 8:if(e.slice_from("okatė"))break}})(),e.cursor=e.limit-r,e.limit-e.cursor),r=(e.cursor{for(;;){var i=e.limit-e.cursor;if(!(e.cursor{var r;if(s.ket=s.cursor,0!=(r=s.find_among_b(t)))switch(s.bra=s.cursor,r){case 1:if(s.slice_del())break;return;case 2:var i=s.limit-s.cursor;if(s.eq_s_b("ए")||(s.cursor=s.limit-i,s.eq_s_b("े"))||(s.cursor=s.limit-i,s.slice_del()))break}})(),s.cursor=s.limit-r;;){var i=s.limit-s.cursor,e=s.limit-s.cursor;if((()=>{var r;if(s.ket=s.cursor,0!=(r=s.find_among_b(c)))switch(s.bra=s.cursor,r){case 1:var i=s.limit-s.cursor;if(!s.eq_s_b("यौ")&&(s.cursor=s.limit-i,!s.eq_s_b("छौ")&&(s.cursor=s.limit-i,!s.eq_s_b("नौ"))&&(s.cursor=s.limit-i,!s.eq_s_b("थे"))))return;if(s.slice_del())break;return;case 2:if(!s.eq_s_b("त्र"))return;if(s.slice_del())break}})(),s.cursor=s.limit-e,s.ket=s.cursor,0==s.find_among_b(u)||(s.bra=s.cursor,!s.slice_del())){s.cursor=s.limit-i;break}}return s.cursor=s.limit_backward,!0},this.stemWord=function(r){return s.setCurrent(r),this.stem(),s.getCurrent()}};
\ No newline at end of file
diff --git a/sphinx/search/minified-js/norwegian-stemmer.js b/sphinx/search/minified-js/norwegian-stemmer.js
index c8ec76cc1ca..5cf580e3ed8 100644
--- a/sphinx/search/minified-js/norwegian-stemmer.js
+++ b/sphinx/search/minified-js/norwegian-stemmer.js
@@ -1 +1 @@
-NorwegianStemmer=function(){var r=new BaseStemmer;var e=[["a",-1,1],["e",-1,1],["ede",1,1],["ande",1,1],["ende",1,1],["ane",1,1],["ene",1,1],["hetene",6,1],["erte",1,3],["en",-1,1],["heten",9,1],["ar",-1,1],["er",-1,1],["heter",12,1],["s",-1,2],["as",14,1],["es",14,1],["edes",16,1],["endes",16,1],["enes",16,1],["hetenes",19,1],["ens",14,1],["hetens",21,1],["ers",14,1],["ets",14,1],["et",-1,1],["het",25,1],["ert",-1,3],["ast",-1,1]];var i=[["dt",-1,-1],["vt",-1,-1]];var t=[["leg",-1,1],["eleg",0,1],["ig",-1,1],["eig",2,1],["lig",2,1],["elig",4,1],["els",-1,1],["lov",-1,1],["elov",7,1],["slov",7,1],["hetslov",9,1]];var a=[17,65,16,1,0,0,0,0,0,0,0,0,0,0,0,0,48,0,128];var s=[119,125,149,1];var u=0;var c=0;function l(){c=r.limit;var e=r.cursor;{var i=r.cursor+3;if(i>r.limit){return false}r.cursor=i}u=r.cursor;r.cursor=e;r:while(true){var t=r.cursor;e:{if(!r.in_grouping(a,97,248)){break e}r.cursor=t;break r}r.cursor=t;if(r.cursor>=r.limit){return false}r.cursor++}r:while(true){e:{if(!r.out_grouping(a,97,248)){break e}break r}if(r.cursor>=r.limit){return false}r.cursor++}c=r.cursor;r:{if(!(ct.limit||(t.cursor=i,m=t.cursor,t.cursor=e,t.go_out_grouping(u,97,248)&&(t.cursor++,t.go_in_grouping(u,97,248))&&(t.cursor++,n=t.cursor,m<=n||(n=m))),t.cursor=r,t.limit_backward=t.cursor,t.cursor=t.limit,t.limit-t.cursor),e=((()=>{var r;if(!(t.cursor=r.limit){break e}r.cursor++}if(!r.slice_from("Y")){return false}l=true;continue}r.cursor=s;break}}r.cursor=i;n=r.limit;o=r.limit;var u=r.cursor;r:{e:while(true){i:{if(!r.in_grouping(c,97,121)){break i}break e}if(r.cursor>=r.limit){break r}r.cursor++}e:while(true){i:{if(!r.out_grouping(c,97,121)){break i}break e}if(r.cursor>=r.limit){break r}r.cursor++}n=r.cursor;e:while(true){i:{if(!r.in_grouping(c,97,121)){break i}break e}if(r.cursor>=r.limit){break r}r.cursor++}e:while(true){i:{if(!r.out_grouping(c,97,121)){break i}break e}if(r.cursor>=r.limit){break r}r.cursor++}o=r.cursor}r.cursor=u;r.limit_backward=r.cursor;r.cursor=r.limit;var t=r.limit-r.cursor;k();r.cursor=r.limit-t;var f=r.limit-r.cursor;v();r.cursor=r.limit-f;var b=r.limit-r.cursor;g();r.cursor=r.limit-b;var m=r.limit-r.cursor;d();r.cursor=r.limit-m;var _=r.limit-r.cursor;w();r.cursor=r.limit-_;var z=r.limit-r.cursor;h();r.cursor=r.limit-z;var y=r.limit-r.cursor;p();r.cursor=r.limit-y;var Y=r.limit-r.cursor;q();r.cursor=r.limit-Y;r.cursor=r.limit_backward;var C=r.cursor;r:{if(!l){break r}while(true){var S=r.cursor;e:{i:while(true){var B=r.cursor;s:{r.bra=r.cursor;if(!r.eq_s("Y")){break s}r.ket=r.cursor;r.cursor=B;break i}r.cursor=B;if(r.cursor>=r.limit){break e}r.cursor++}if(!r.slice_from("y")){return false}continue}r.cursor=S;break}}r.cursor=C;return true};this["stemWord"]=function(e){r.setCurrent(e);this.stem();return r.getCurrent()}};
\ No newline at end of file
+var PorterStemmer=function(){var u=new BaseStemmer,t=[["s",-1,3],["ies",0,2],["sses",0,1],["ss",0,-1]],a=[["",-1,3],["bb",0,2],["dd",0,2],["ff",0,2],["gg",0,2],["bl",0,1],["mm",0,2],["nn",0,2],["pp",0,2],["rr",0,2],["at",0,1],["tt",0,2],["iz",0,1]],n=[["ed",-1,2],["eed",0,1],["ing",-1,2]],l=[["anci",-1,3],["enci",-1,2],["abli",-1,4],["eli",-1,6],["alli",-1,9],["ousli",-1,11],["entli",-1,5],["aliti",-1,9],["biliti",-1,13],["iviti",-1,12],["tional",-1,1],["ational",10,8],["alism",-1,9],["ation",-1,8],["ization",13,7],["izer",-1,7],["ator",-1,8],["iveness",-1,12],["fulness",-1,10],["ousness",-1,11]],f=[["icate",-1,2],["ative",-1,3],["alize",-1,1],["iciti",-1,2],["ical",-1,2],["ful",-1,3],["ness",-1,3]],_=[["ic",-1,1],["ance",-1,1],["ence",-1,1],["able",-1,1],["ible",-1,1],["ate",-1,1],["ive",-1,1],["ize",-1,1],["iti",-1,1],["al",-1,1],["ism",-1,1],["ion",-1,2],["er",-1,1],["ous",-1,1],["ant",-1,1],["ent",-1,1],["ment",15,1],["ement",16,1],["ou",-1,1]],m=[17,65,16,1],r=[1,17,65,208,1],b=!1,k=0,g=0;function d(){return u.out_grouping_b(r,89,121)&&u.in_grouping_b(m,97,121)&&!!u.out_grouping_b(m,97,121)}function v(){return g<=u.cursor}function p(){return k<=u.cursor}this.stem=function(){b=!1;var r=u.cursor;if(u.bra=u.cursor,u.eq_s("y")){if(u.ket=u.cursor,!u.slice_from("Y"))return!1;b=!0}u.cursor=r;for(r=u.cursor;;){var i=u.cursor;r:{for(;;){var e=u.cursor;if(u.in_grouping(m,97,121)&&(u.bra=u.cursor,u.eq_s("y"))){u.ket=u.cursor,u.cursor=e;break}if(u.cursor=e,u.cursor>=u.limit)break r;u.cursor++}if(!u.slice_from("Y"))return!1;b=!0;continue}u.cursor=i;break}u.cursor=r,g=u.limit,k=u.limit;var r=u.cursor,r=(u.go_out_grouping(m,97,121)&&(u.cursor++,u.go_in_grouping(m,97,121))&&(u.cursor++,g=u.cursor,u.go_out_grouping(m,97,121))&&(u.cursor++,u.go_in_grouping(m,97,121))&&(u.cursor++,k=u.cursor),u.cursor=r,u.limit_backward=u.cursor,u.cursor=u.limit,u.limit-u.cursor),r=((()=>{var r;if(u.ket=u.cursor,0!=(r=u.find_among_b(t)))switch(u.bra=u.cursor,r){case 1:if(u.slice_from("ss"))break;return;case 2:if(u.slice_from("i"))break;return;case 3:if(u.slice_del())break}})(),u.cursor=u.limit-r,u.limit-u.cursor),r=((()=>{if(u.ket=u.cursor,0!=(i=u.find_among_b(n)))switch(u.bra=u.cursor,i){case 1:if(!v())return;if(u.slice_from("ee"))break;return;case 2:var r=u.limit-u.cursor;if(!u.go_out_grouping_b(m,97,121))return;if(u.cursor--,u.cursor=u.limit-r,!u.slice_del())return;var r=u.limit-u.cursor,i=u.find_among_b(a);switch(u.cursor=u.limit-r,i){case 1:var e=u.cursor;u.insert(u.cursor,u.cursor,"e"),u.cursor=e;break;case 2:if(u.ket=u.cursor,u.cursor<=u.limit_backward)return;if(u.cursor--,u.bra=u.cursor,u.slice_del())break;return;case 3:if(u.cursor!=g)return;e=u.limit-u.cursor;if(!d())return;u.cursor=u.limit-e;e=u.cursor;u.insert(u.cursor,u.cursor,"e"),u.cursor=e}}})(),u.cursor=u.limit-r,u.limit-u.cursor),s=(u.ket=u.cursor,s=u.limit-u.cursor,(u.eq_s_b("y")||(u.cursor=u.limit-s,u.eq_s_b("Y")))&&(u.bra=u.cursor,u.go_out_grouping_b(m,97,121))&&(u.cursor--,u.slice_from("i")),u.cursor=u.limit-r,u.limit-u.cursor),r=((()=>{var r;if(u.ket=u.cursor,0!=(r=u.find_among_b(l))&&(u.bra=u.cursor,v()))switch(r){case 1:if(u.slice_from("tion"))break;return;case 2:if(u.slice_from("ence"))break;return;case 3:if(u.slice_from("ance"))break;return;case 4:if(u.slice_from("able"))break;return;case 5:if(u.slice_from("ent"))break;return;case 6:if(u.slice_from("e"))break;return;case 7:if(u.slice_from("ize"))break;return;case 8:if(u.slice_from("ate"))break;return;case 9:if(u.slice_from("al"))break;return;case 10:if(u.slice_from("ful"))break;return;case 11:if(u.slice_from("ous"))break;return;case 12:if(u.slice_from("ive"))break;return;case 13:if(u.slice_from("ble"))break}})(),u.cursor=u.limit-s,u.limit-u.cursor),s=((()=>{var r;if(u.ket=u.cursor,0!=(r=u.find_among_b(f))&&(u.bra=u.cursor,v()))switch(r){case 1:if(u.slice_from("al"))break;return;case 2:if(u.slice_from("ic"))break;return;case 3:if(u.slice_del())break}})(),u.cursor=u.limit-r,u.limit-u.cursor),r=((()=>{var r;if(u.ket=u.cursor,0!=(r=u.find_among_b(_))&&(u.bra=u.cursor,p()))switch(r){case 1:if(u.slice_del())break;return;case 2:var i=u.limit-u.cursor;if(!u.eq_s_b("s")&&(u.cursor=u.limit-i,!u.eq_s_b("t")))return;if(u.slice_del())break}})(),u.cursor=u.limit-s,u.limit-u.cursor),s=((()=>{if(u.ket=u.cursor,u.eq_s_b("e")){if(u.bra=u.cursor,!p()){if(!v())return;var r=u.limit-u.cursor;if(d())return;u.cursor=u.limit-r}u.slice_del()}})(),u.cursor=u.limit-r,u.limit-u.cursor),r=(u.ket=u.cursor,u.eq_s_b("l")&&(u.bra=u.cursor,p())&&u.eq_s_b("l")&&u.slice_del(),u.cursor=u.limit-s,u.cursor=u.limit_backward,u.cursor);if(b)for(;;){var c=u.cursor;r:{for(;;){var o=u.cursor;if(u.bra=u.cursor,u.eq_s("Y")){u.ket=u.cursor,u.cursor=o;break}if(u.cursor=o,u.cursor>=u.limit)break r;u.cursor++}if(u.slice_from("y"))continue;return!1}u.cursor=c;break}return u.cursor=r,!0},this.stemWord=function(r){return u.setCurrent(r),this.stem(),u.getCurrent()}};
\ No newline at end of file
diff --git a/sphinx/search/minified-js/portuguese-stemmer.js b/sphinx/search/minified-js/portuguese-stemmer.js
index 022d860e6b3..9cc42155d49 100644
--- a/sphinx/search/minified-js/portuguese-stemmer.js
+++ b/sphinx/search/minified-js/portuguese-stemmer.js
@@ -1 +1 @@
-PortugueseStemmer=function(){var r=new BaseStemmer;var e=[["",-1,3],["ã",0,1],["õ",0,2]];var i=[["",-1,3],["a~",0,1],["o~",0,2]];var s=[["ic",-1,-1],["ad",-1,-1],["os",-1,-1],["iv",-1,1]];var a=[["ante",-1,1],["avel",-1,1],["ível",-1,1]];var u=[["ic",-1,1],["abil",-1,1],["iv",-1,1]];var o=[["ica",-1,1],["ância",-1,1],["ência",-1,4],["logia",-1,2],["ira",-1,9],["adora",-1,1],["osa",-1,1],["ista",-1,1],["iva",-1,8],["eza",-1,1],["idade",-1,7],["ante",-1,1],["mente",-1,6],["amente",12,5],["ável",-1,1],["ível",-1,1],["ico",-1,1],["ismo",-1,1],["oso",-1,1],["amento",-1,1],["imento",-1,1],["ivo",-1,8],["aça~o",-1,1],["uça~o",-1,3],["ador",-1,1],["icas",-1,1],["ências",-1,4],["logias",-1,2],["iras",-1,9],["adoras",-1,1],["osas",-1,1],["istas",-1,1],["ivas",-1,8],["ezas",-1,1],["idades",-1,7],["adores",-1,1],["antes",-1,1],["aço~es",-1,1],["uço~es",-1,3],["icos",-1,1],["ismos",-1,1],["osos",-1,1],["amentos",-1,1],["imentos",-1,1],["ivos",-1,8]];var t=[["ada",-1,1],["ida",-1,1],["ia",-1,1],["aria",2,1],["eria",2,1],["iria",2,1],["ara",-1,1],["era",-1,1],["ira",-1,1],["ava",-1,1],["asse",-1,1],["esse",-1,1],["isse",-1,1],["aste",-1,1],["este",-1,1],["iste",-1,1],["ei",-1,1],["arei",16,1],["erei",16,1],["irei",16,1],["am",-1,1],["iam",20,1],["ariam",21,1],["eriam",21,1],["iriam",21,1],["aram",20,1],["eram",20,1],["iram",20,1],["avam",20,1],["em",-1,1],["arem",29,1],["erem",29,1],["irem",29,1],["assem",29,1],["essem",29,1],["issem",29,1],["ado",-1,1],["ido",-1,1],["ando",-1,1],["endo",-1,1],["indo",-1,1],["ara~o",-1,1],["era~o",-1,1],["ira~o",-1,1],["ar",-1,1],["er",-1,1],["ir",-1,1],["as",-1,1],["adas",47,1],["idas",47,1],["ias",47,1],["arias",50,1],["erias",50,1],["irias",50,1],["aras",47,1],["eras",47,1],["iras",47,1],["avas",47,1],["es",-1,1],["ardes",58,1],["erdes",58,1],["irdes",58,1],["ares",58,1],["eres",58,1],["ires",58,1],["asses",58,1],["esses",58,1],["isses",58,1],["astes",58,1],["estes",58,1],["istes",58,1],["is",-1,1],["ais",71,1],["eis",71,1],["areis",73,1],["ereis",73,1],["ireis",73,1],["áreis",73,1],["éreis",73,1],["íreis",73,1],["ásseis",73,1],["ésseis",73,1],["ísseis",73,1],["áveis",73,1],["íeis",73,1],["aríeis",84,1],["eríeis",84,1],["iríeis",84,1],["ados",-1,1],["idos",-1,1],["amos",-1,1],["áramos",90,1],["éramos",90,1],["íramos",90,1],["ávamos",90,1],["íamos",90,1],["aríamos",95,1],["eríamos",95,1],["iríamos",95,1],["emos",-1,1],["aremos",99,1],["eremos",99,1],["iremos",99,1],["ássemos",99,1],["êssemos",99,1],["íssemos",99,1],["imos",-1,1],["armos",-1,1],["ermos",-1,1],["irmos",-1,1],["ámos",-1,1],["arás",-1,1],["erás",-1,1],["irás",-1,1],["eu",-1,1],["iu",-1,1],["ou",-1,1],["ará",-1,1],["erá",-1,1],["irá",-1,1]];var c=[["a",-1,1],["i",-1,1],["o",-1,1],["os",-1,1],["á",-1,1],["í",-1,1],["ó",-1,1]];var f=[["e",-1,1],["ç",-1,2],["é",-1,1],["ê",-1,1]];var l=[17,65,16,0,0,0,0,0,0,0,0,0,0,0,0,0,3,19,12,2];var n=0;var m=0;var b=0;function k(){var i;while(true){var s=r.cursor;r:{r.bra=r.cursor;i=r.find_among(e);if(i==0){break r}r.ket=r.cursor;switch(i){case 1:if(!r.slice_from("a~")){return false}break;case 2:if(!r.slice_from("o~")){return false}break;case 3:if(r.cursor>=r.limit){break r}r.cursor++;break}continue}r.cursor=s;break}return true}function _(){b=r.limit;m=r.limit;n=r.limit;var e=r.cursor;r:{e:{var i=r.cursor;i:{if(!r.in_grouping(l,97,250)){break i}s:{var s=r.cursor;a:{if(!r.out_grouping(l,97,250)){break a}u:while(true){o:{if(!r.in_grouping(l,97,250)){break o}break u}if(r.cursor>=r.limit){break a}r.cursor++}break s}r.cursor=s;if(!r.in_grouping(l,97,250)){break i}a:while(true){u:{if(!r.out_grouping(l,97,250)){break u}break a}if(r.cursor>=r.limit){break i}r.cursor++}}break e}r.cursor=i;if(!r.out_grouping(l,97,250)){break r}i:{var a=r.cursor;s:{if(!r.out_grouping(l,97,250)){break s}a:while(true){u:{if(!r.in_grouping(l,97,250)){break u}break a}if(r.cursor>=r.limit){break s}r.cursor++}break i}r.cursor=a;if(!r.in_grouping(l,97,250)){break r}if(r.cursor>=r.limit){break r}r.cursor++}}b=r.cursor}r.cursor=e;var u=r.cursor;r:{e:while(true){i:{if(!r.in_grouping(l,97,250)){break i}break e}if(r.cursor>=r.limit){break r}r.cursor++}e:while(true){i:{if(!r.out_grouping(l,97,250)){break i}break e}if(r.cursor>=r.limit){break r}r.cursor++}m=r.cursor;e:while(true){i:{if(!r.in_grouping(l,97,250)){break i}break e}if(r.cursor>=r.limit){break r}r.cursor++}e:while(true){i:{if(!r.out_grouping(l,97,250)){break i}break e}if(r.cursor>=r.limit){break r}r.cursor++}n=r.cursor}r.cursor=u;return true}function v(){var e;while(true){var s=r.cursor;r:{r.bra=r.cursor;e=r.find_among(i);if(e==0){break r}r.ket=r.cursor;switch(e){case 1:if(!r.slice_from("ã")){return false}break;case 2:if(!r.slice_from("õ")){return false}break;case 3:if(r.cursor>=r.limit){break r}r.cursor++;break}continue}r.cursor=s;break}return true}function d(){if(!(b<=r.cursor)){return false}return true}function g(){if(!(m<=r.cursor)){return false}return true}function w(){if(!(n<=r.cursor)){return false}return true}function h(){var e;r.ket=r.cursor;e=r.find_among_b(o);if(e==0){return false}r.bra=r.cursor;switch(e){case 1:if(!w()){return false}if(!r.slice_del()){return false}break;case 2:if(!w()){return false}if(!r.slice_from("log")){return false}break;case 3:if(!w()){return false}if(!r.slice_from("u")){return false}break;case 4:if(!w()){return false}if(!r.slice_from("ente")){return false}break;case 5:if(!g()){return false}if(!r.slice_del()){return false}var i=r.limit-r.cursor;r:{r.ket=r.cursor;e=r.find_among_b(s);if(e==0){r.cursor=r.limit-i;break r}r.bra=r.cursor;if(!w()){r.cursor=r.limit-i;break r}if(!r.slice_del()){return false}switch(e){case 1:r.ket=r.cursor;if(!r.eq_s_b("at")){r.cursor=r.limit-i;break r}r.bra=r.cursor;if(!w()){r.cursor=r.limit-i;break r}if(!r.slice_del()){return false}break}}break;case 6:if(!w()){return false}if(!r.slice_del()){return false}var t=r.limit-r.cursor;r:{r.ket=r.cursor;if(r.find_among_b(a)==0){r.cursor=r.limit-t;break r}r.bra=r.cursor;if(!w()){r.cursor=r.limit-t;break r}if(!r.slice_del()){return false}}break;case 7:if(!w()){return false}if(!r.slice_del()){return false}var c=r.limit-r.cursor;r:{r.ket=r.cursor;if(r.find_among_b(u)==0){r.cursor=r.limit-c;break r}r.bra=r.cursor;if(!w()){r.cursor=r.limit-c;break r}if(!r.slice_del()){return false}}break;case 8:if(!w()){return false}if(!r.slice_del()){return false}var f=r.limit-r.cursor;r:{r.ket=r.cursor;if(!r.eq_s_b("at")){r.cursor=r.limit-f;break r}r.bra=r.cursor;if(!w()){r.cursor=r.limit-f;break r}if(!r.slice_del()){return false}}break;case 9:if(!d()){return false}if(!r.eq_s_b("e")){return false}if(!r.slice_from("ir")){return false}break}return true}function p(){if(r.cursor{for(var r;;){var i=u.cursor;r:{switch(u.bra=u.cursor,r=u.find_among(c),u.ket=u.cursor,r){case 1:if(u.slice_from("a~"))break;return;case 2:if(u.slice_from("o~"))break;return;case 3:if(u.cursor>=u.limit)break r;u.cursor++}continue}u.cursor=i;break}})(),u.cursor=r,k=u.limit,d=u.limit,b=u.limit,u.cursor);r:{i:{var i=u.cursor;s:if(u.in_grouping(_,97,250)){var s=u.cursor;if(!u.out_grouping(_,97,250)||!u.go_out_grouping(_,97,250)){if(u.cursor=s,!u.in_grouping(_,97,250))break s;if(!u.go_in_grouping(_,97,250))break s}u.cursor++;break i}if(u.cursor=i,!u.out_grouping(_,97,250))break r;s=u.cursor;if(u.out_grouping(_,97,250)&&u.go_out_grouping(_,97,250));else{if(u.cursor=s,!u.in_grouping(_,97,250))break r;if(u.cursor>=u.limit)break r}u.cursor++}k=u.cursor}u.cursor=r,r=u.cursor,u.go_out_grouping(_,97,250)&&(u.cursor++,u.go_in_grouping(_,97,250))&&(u.cursor++,d=u.cursor,u.go_out_grouping(_,97,250))&&(u.cursor++,u.go_in_grouping(_,97,250))&&(u.cursor++,b=u.cursor),u.cursor=r,u.limit_backward=u.cursor,u.cursor=u.limit;r=u.limit-u.cursor;r:{var e=u.limit-u.cursor,o=u.limit-u.cursor,a=u.limit-u.cursor;if(p()||(u.cursor=u.limit-a,(()=>{if(!(u.cursor{var r;if(u.ket=u.cursor,0!=(r=u.find_among_b(f)))switch(u.bra=u.cursor,r){case 1:if(!g())return;if(!u.slice_del())return;u.ket=u.cursor;r:{var i=u.limit-u.cursor;if(u.eq_s_b("u")){u.bra=u.cursor;var s=u.limit-u.cursor;if(u.eq_s_b("g")){u.cursor=u.limit-s;break r}}if(u.cursor=u.limit-i,!u.eq_s_b("i"))return;u.bra=u.cursor;s=u.limit-u.cursor;if(!u.eq_s_b("c"))return;u.cursor=u.limit-s}if(!g())return;if(u.slice_del())break;return;case 2:if(u.slice_from("c"))break}})(),u.cursor=u.limit-r,u.cursor=u.limit_backward,r=u.cursor;return(()=>{for(var r;;){var i=u.cursor;r:{switch(u.bra=u.cursor,r=u.find_among(t),u.ket=u.cursor,r){case 1:if(u.slice_from("ã"))break;return;case 2:if(u.slice_from("õ"))break;return;case 3:if(u.cursor>=u.limit)break r;u.cursor++}continue}u.cursor=i;break}})(),u.cursor=r,!0},this.stemWord=function(r){return u.setCurrent(r),this.stem(),u.getCurrent()}};
\ No newline at end of file
diff --git a/sphinx/search/minified-js/romanian-stemmer.js b/sphinx/search/minified-js/romanian-stemmer.js
index 01c54d0185c..aefb071550d 100644
--- a/sphinx/search/minified-js/romanian-stemmer.js
+++ b/sphinx/search/minified-js/romanian-stemmer.js
@@ -1 +1 @@
-RomanianStemmer=function(){var r=new BaseStemmer;var i=[["",-1,3],["I",0,1],["U",0,2]];var e=[["ea",-1,3],["aţia",-1,7],["aua",-1,2],["iua",-1,4],["aţie",-1,7],["ele",-1,3],["ile",-1,5],["iile",6,4],["iei",-1,4],["atei",-1,6],["ii",-1,4],["ului",-1,1],["ul",-1,1],["elor",-1,3],["ilor",-1,4],["iilor",14,4]];var a=[["icala",-1,4],["iciva",-1,4],["ativa",-1,5],["itiva",-1,6],["icale",-1,4],["aţiune",-1,5],["iţiune",-1,6],["atoare",-1,5],["itoare",-1,6],["ătoare",-1,5],["icitate",-1,4],["abilitate",-1,1],["ibilitate",-1,2],["ivitate",-1,3],["icive",-1,4],["ative",-1,5],["itive",-1,6],["icali",-1,4],["atori",-1,5],["icatori",18,4],["itori",-1,6],["ători",-1,5],["icitati",-1,4],["abilitati",-1,1],["ivitati",-1,3],["icivi",-1,4],["ativi",-1,5],["itivi",-1,6],["icităi",-1,4],["abilităi",-1,1],["ivităi",-1,3],["icităţi",-1,4],["abilităţi",-1,1],["ivităţi",-1,3],["ical",-1,4],["ator",-1,5],["icator",35,4],["itor",-1,6],["ător",-1,5],["iciv",-1,4],["ativ",-1,5],["itiv",-1,6],["icală",-1,4],["icivă",-1,4],["ativă",-1,5],["itivă",-1,6]];var t=[["ica",-1,1],["abila",-1,1],["ibila",-1,1],["oasa",-1,1],["ata",-1,1],["ita",-1,1],["anta",-1,1],["ista",-1,3],["uta",-1,1],["iva",-1,1],["ic",-1,1],["ice",-1,1],["abile",-1,1],["ibile",-1,1],["isme",-1,3],["iune",-1,2],["oase",-1,1],["ate",-1,1],["itate",17,1],["ite",-1,1],["ante",-1,1],["iste",-1,3],["ute",-1,1],["ive",-1,1],["ici",-1,1],["abili",-1,1],["ibili",-1,1],["iuni",-1,2],["atori",-1,1],["osi",-1,1],["ati",-1,1],["itati",30,1],["iti",-1,1],["anti",-1,1],["isti",-1,3],["uti",-1,1],["işti",-1,3],["ivi",-1,1],["ităi",-1,1],["oşi",-1,1],["ităţi",-1,1],["abil",-1,1],["ibil",-1,1],["ism",-1,3],["ator",-1,1],["os",-1,1],["at",-1,1],["it",-1,1],["ant",-1,1],["ist",-1,3],["ut",-1,1],["iv",-1,1],["ică",-1,1],["abilă",-1,1],["ibilă",-1,1],["oasă",-1,1],["ată",-1,1],["ită",-1,1],["antă",-1,1],["istă",-1,3],["ută",-1,1],["ivă",-1,1]];var s=[["ea",-1,1],["ia",-1,1],["esc",-1,1],["ăsc",-1,1],["ind",-1,1],["ând",-1,1],["are",-1,1],["ere",-1,1],["ire",-1,1],["âre",-1,1],["se",-1,2],["ase",10,1],["sese",10,2],["ise",10,1],["use",10,1],["âse",10,1],["eşte",-1,1],["ăşte",-1,1],["eze",-1,1],["ai",-1,1],["eai",19,1],["iai",19,1],["sei",-1,2],["eşti",-1,1],["ăşti",-1,1],["ui",-1,1],["ezi",-1,1],["âi",-1,1],["aşi",-1,1],["seşi",-1,2],["aseşi",29,1],["seseşi",29,2],["iseşi",29,1],["useşi",29,1],["âseşi",29,1],["işi",-1,1],["uşi",-1,1],["âşi",-1,1],["aţi",-1,2],["eaţi",38,1],["iaţi",38,1],["eţi",-1,2],["iţi",-1,2],["âţi",-1,2],["arăţi",-1,1],["serăţi",-1,2],["aserăţi",45,1],["seserăţi",45,2],["iserăţi",45,1],["userăţi",45,1],["âserăţi",45,1],["irăţi",-1,1],["urăţi",-1,1],["ârăţi",-1,1],["am",-1,1],["eam",54,1],["iam",54,1],["em",-1,2],["asem",57,1],["sesem",57,2],["isem",57,1],["usem",57,1],["âsem",57,1],["im",-1,2],["âm",-1,2],["ăm",-1,2],["arăm",65,1],["serăm",65,2],["aserăm",67,1],["seserăm",67,2],["iserăm",67,1],["userăm",67,1],["âserăm",67,1],["irăm",65,1],["urăm",65,1],["ârăm",65,1],["au",-1,1],["eau",76,1],["iau",76,1],["indu",-1,1],["ându",-1,1],["ez",-1,1],["ească",-1,1],["ară",-1,1],["seră",-1,2],["aseră",84,1],["seseră",84,2],["iseră",84,1],["useră",84,1],["âseră",84,1],["iră",-1,1],["ură",-1,1],["âră",-1,1],["ează",-1,1]];var u=[["a",-1,1],["e",-1,1],["ie",1,1],["i",-1,1],["ă",-1,1]];var c=[17,65,16,0,0,0,0,0,0,0,0,0,0,0,0,0,2,32,0,0,4];var o=false;var f=0;var l=0;var n=0;function b(){while(true){var i=r.cursor;r:{i:while(true){var e=r.cursor;e:{if(!r.in_grouping(c,97,259)){break e}r.bra=r.cursor;a:{var a=r.cursor;t:{if(!r.eq_s("u")){break t}r.ket=r.cursor;if(!r.in_grouping(c,97,259)){break t}if(!r.slice_from("U")){return false}break a}r.cursor=a;if(!r.eq_s("i")){break e}r.ket=r.cursor;if(!r.in_grouping(c,97,259)){break e}if(!r.slice_from("I")){return false}}r.cursor=e;break i}r.cursor=e;if(r.cursor>=r.limit){break r}r.cursor++}continue}r.cursor=i;break}return true}function m(){n=r.limit;l=r.limit;f=r.limit;var i=r.cursor;r:{i:{var e=r.cursor;e:{if(!r.in_grouping(c,97,259)){break e}a:{var a=r.cursor;t:{if(!r.out_grouping(c,97,259)){break t}s:while(true){u:{if(!r.in_grouping(c,97,259)){break u}break s}if(r.cursor>=r.limit){break t}r.cursor++}break a}r.cursor=a;if(!r.in_grouping(c,97,259)){break e}t:while(true){s:{if(!r.out_grouping(c,97,259)){break s}break t}if(r.cursor>=r.limit){break e}r.cursor++}}break i}r.cursor=e;if(!r.out_grouping(c,97,259)){break r}e:{var t=r.cursor;a:{if(!r.out_grouping(c,97,259)){break a}t:while(true){s:{if(!r.in_grouping(c,97,259)){break s}break t}if(r.cursor>=r.limit){break a}r.cursor++}break e}r.cursor=t;if(!r.in_grouping(c,97,259)){break r}if(r.cursor>=r.limit){break r}r.cursor++}}n=r.cursor}r.cursor=i;var s=r.cursor;r:{i:while(true){e:{if(!r.in_grouping(c,97,259)){break e}break i}if(r.cursor>=r.limit){break r}r.cursor++}i:while(true){e:{if(!r.out_grouping(c,97,259)){break e}break i}if(r.cursor>=r.limit){break r}r.cursor++}l=r.cursor;i:while(true){e:{if(!r.in_grouping(c,97,259)){break e}break i}if(r.cursor>=r.limit){break r}r.cursor++}i:while(true){e:{if(!r.out_grouping(c,97,259)){break e}break i}if(r.cursor>=r.limit){break r}r.cursor++}f=r.cursor}r.cursor=s;return true}function k(){var e;while(true){var a=r.cursor;r:{r.bra=r.cursor;e=r.find_among(i);if(e==0){break r}r.ket=r.cursor;switch(e){case 1:if(!r.slice_from("i")){return false}break;case 2:if(!r.slice_from("u")){return false}break;case 3:if(r.cursor>=r.limit){break r}r.cursor++;break}continue}r.cursor=a;break}return true}function _(){if(!(n<=r.cursor)){return false}return true}function v(){if(!(l<=r.cursor)){return false}return true}function g(){if(!(f<=r.cursor)){return false}return true}function w(){var i;r.ket=r.cursor;i=r.find_among_b(e);if(i==0){return false}r.bra=r.cursor;if(!v()){return false}switch(i){case 1:if(!r.slice_del()){return false}break;case 2:if(!r.slice_from("a")){return false}break;case 3:if(!r.slice_from("e")){return false}break;case 4:if(!r.slice_from("i")){return false}break;case 5:{var a=r.limit-r.cursor;r:{if(!r.eq_s_b("ab")){break r}return false}r.cursor=r.limit-a}if(!r.slice_from("i")){return false}break;case 6:if(!r.slice_from("at")){return false}break;case 7:if(!r.slice_from("aţi")){return false}break}return true}function d(){var i;var e=r.limit-r.cursor;r.ket=r.cursor;i=r.find_among_b(a);if(i==0){return false}r.bra=r.cursor;if(!v()){return false}switch(i){case 1:if(!r.slice_from("abil")){return false}break;case 2:if(!r.slice_from("ibil")){return false}break;case 3:if(!r.slice_from("iv")){return false}break;case 4:if(!r.slice_from("ic")){return false}break;case 5:if(!r.slice_from("at")){return false}break;case 6:if(!r.slice_from("it")){return false}break}o=true;r.cursor=r.limit-e;return true}function h(){var i;o=false;while(true){var e=r.limit-r.cursor;r:{if(!d()){break r}continue}r.cursor=r.limit-e;break}r.ket=r.cursor;i=r.find_among_b(t);if(i==0){return false}r.bra=r.cursor;if(!g()){return false}switch(i){case 1:if(!r.slice_del()){return false}break;case 2:if(!r.eq_s_b("ţ")){return false}r.bra=r.cursor;if(!r.slice_from("t")){return false}break;case 3:if(!r.slice_from("ist")){return false}break}o=true;return true}function p(){var i;if(r.cursor{var i,r=s.limit-s.cursor;if(s.ket=s.cursor,0!=(i=s.find_among_b(e))&&(s.bra=s.cursor,_())){switch(i){case 1:if(s.slice_from("abil"))break;return;case 2:if(s.slice_from("ibil"))break;return;case 3:if(s.slice_from("iv"))break;return;case 4:if(s.slice_from("ic"))break;return;case 5:if(s.slice_from("at"))break;return;case 6:if(s.slice_from("it"))break;return}return l=!0,s.cursor=s.limit-r,1}})()){s.cursor=s.limit-r;break}}if(s.ket=s.cursor,0!=(i=s.find_among_b(a))&&(s.bra=s.cursor,m<=s.cursor)){switch(i){case 1:if(s.slice_del())break;return;case 2:if(!s.eq_s_b("ț"))return;if(s.bra=s.cursor,s.slice_from("t"))break;return;case 3:if(s.slice_from("ist"))break;return}l=!0}}function g(){s.ket=s.cursor,0!=s.find_among_b(i)&&(s.bra=s.cursor,b<=s.cursor)&&s.slice_del()}this.stem=function(){(()=>{for(var i,r=s.cursor;;){var e=s.cursor;i:{for(;;){var a=s.cursor;if(s.bra=s.cursor,0!=(i=s.find_among(t))){switch(s.ket=s.cursor,i){case 1:if(s.slice_from("ș"))break;return;case 2:if(s.slice_from("ț"))break;return}s.cursor=a;break}if(s.cursor=a,s.cursor>=s.limit)break i;s.cursor++}continue}s.cursor=e;break}s.cursor=r})();var i=s.cursor,i=((()=>{for(;;){var i=s.cursor;i:{for(;;){var r=s.cursor;r:if(s.in_grouping(n,97,259)){s.bra=s.cursor;e:{var e=s.cursor;if(s.eq_s("u")&&(s.ket=s.cursor,s.in_grouping(n,97,259))){if(s.slice_from("U"))break e;return}if(s.cursor=e,!s.eq_s("i"))break r;if(s.ket=s.cursor,!s.in_grouping(n,97,259))break r;if(!s.slice_from("I"))return}s.cursor=r;break}if(s.cursor=r,s.cursor>=s.limit)break i;s.cursor++}continue}s.cursor=i;break}})(),s.cursor=i,b=s.limit,f=s.limit,m=s.limit,s.cursor);i:{r:{var r=s.cursor;e:if(s.in_grouping(n,97,259)){var e=s.cursor;if(!s.out_grouping(n,97,259)||!s.go_out_grouping(n,97,259)){if(s.cursor=e,!s.in_grouping(n,97,259))break e;if(!s.go_in_grouping(n,97,259))break e}s.cursor++;break r}if(s.cursor=r,!s.out_grouping(n,97,259))break i;e=s.cursor;if(s.out_grouping(n,97,259)&&s.go_out_grouping(n,97,259));else{if(s.cursor=e,!s.in_grouping(n,97,259))break i;if(s.cursor>=s.limit)break i}s.cursor++}b=s.cursor}s.cursor=i,i=s.cursor,s.go_out_grouping(n,97,259)&&(s.cursor++,s.go_in_grouping(n,97,259))&&(s.cursor++,f=s.cursor,s.go_out_grouping(n,97,259))&&(s.cursor++,s.go_in_grouping(n,97,259))&&(s.cursor++,m=s.cursor),s.cursor=i,s.limit_backward=s.cursor,s.cursor=s.limit;var i=s.limit-s.cursor,i=((()=>{var i;if(s.ket=s.cursor,0!=(i=s.find_among_b(o))&&(s.bra=s.cursor,_()))switch(i){case 1:if(s.slice_del())break;return;case 2:if(s.slice_from("a"))break;return;case 3:if(s.slice_from("e"))break;return;case 4:if(s.slice_from("i"))break;return;case 5:var r=s.limit-s.cursor;if(s.eq_s_b("ab"))return;if(s.cursor=s.limit-r,s.slice_from("i"))break;return;case 6:if(s.slice_from("at"))break;return;case 7:if(s.slice_from("ați"))break}})(),s.cursor=s.limit-i,s.limit-s.cursor),i=(k(),s.cursor=s.limit-i,s.limit-s.cursor),a=s.limit-s.cursor,a=(l||(s.cursor=s.limit-a,(()=>{var i;if(!(s.cursor{for(var i;;){var r=s.cursor;i:{switch(s.bra=s.cursor,i=s.find_among(u),s.ket=s.cursor,i){case 1:if(s.slice_from("i"))break;return;case 2:if(s.slice_from("u"))break;return;case 3:if(s.cursor>=s.limit)break i;s.cursor++}continue}s.cursor=r;break}})(),s.cursor=i,!0},this.stemWord=function(i){return s.setCurrent(i),this.stem(),s.getCurrent()}};
\ No newline at end of file
diff --git a/sphinx/search/minified-js/russian-stemmer.js b/sphinx/search/minified-js/russian-stemmer.js
index 698d92bcdb8..7b5410c5e82 100644
--- a/sphinx/search/minified-js/russian-stemmer.js
+++ b/sphinx/search/minified-js/russian-stemmer.js
@@ -1 +1 @@
-RussianStemmer=function(){var r=new BaseStemmer;var e=[["в",-1,1],["ив",0,2],["ыв",0,2],["вши",-1,1],["ивши",3,2],["ывши",3,2],["вшись",-1,1],["ившись",6,2],["ывшись",6,2]];var i=[["ее",-1,1],["ие",-1,1],["ое",-1,1],["ые",-1,1],["ими",-1,1],["ыми",-1,1],["ей",-1,1],["ий",-1,1],["ой",-1,1],["ый",-1,1],["ем",-1,1],["им",-1,1],["ом",-1,1],["ым",-1,1],["его",-1,1],["ого",-1,1],["ему",-1,1],["ому",-1,1],["их",-1,1],["ых",-1,1],["ею",-1,1],["ою",-1,1],["ую",-1,1],["юю",-1,1],["ая",-1,1],["яя",-1,1]];var u=[["ем",-1,1],["нн",-1,1],["вш",-1,1],["ивш",2,2],["ывш",2,2],["щ",-1,1],["ющ",5,1],["ующ",6,2]];var s=[["сь",-1,1],["ся",-1,1]];var a=[["ла",-1,1],["ила",0,2],["ыла",0,2],["на",-1,1],["ена",3,2],["ете",-1,1],["ите",-1,2],["йте",-1,1],["ейте",7,2],["уйте",7,2],["ли",-1,1],["или",10,2],["ыли",10,2],["й",-1,1],["ей",13,2],["уй",13,2],["л",-1,1],["ил",16,2],["ыл",16,2],["ем",-1,1],["им",-1,2],["ым",-1,2],["н",-1,1],["ен",22,2],["ло",-1,1],["ило",24,2],["ыло",24,2],["но",-1,1],["ено",27,2],["нно",27,1],["ет",-1,1],["ует",30,2],["ит",-1,2],["ыт",-1,2],["ют",-1,1],["уют",34,2],["ят",-1,2],["ны",-1,1],["ены",37,2],["ть",-1,1],["ить",39,2],["ыть",39,2],["ешь",-1,1],["ишь",-1,2],["ю",-1,2],["ую",44,2]];var t=[["а",-1,1],["ев",-1,1],["ов",-1,1],["е",-1,1],["ие",3,1],["ье",3,1],["и",-1,1],["еи",6,1],["ии",6,1],["ами",6,1],["ями",6,1],["иями",10,1],["й",-1,1],["ей",12,1],["ией",13,1],["ий",12,1],["ой",12,1],["ам",-1,1],["ем",-1,1],["ием",18,1],["ом",-1,1],["ям",-1,1],["иям",21,1],["о",-1,1],["у",-1,1],["ах",-1,1],["ях",-1,1],["иях",26,1],["ы",-1,1],["ь",-1,1],["ю",-1,1],["ию",30,1],["ью",30,1],["я",-1,1],["ия",33,1],["ья",33,1]];var c=[["ост",-1,1],["ость",-1,1]];var f=[["ейше",-1,1],["н",-1,2],["ейш",-1,1],["ь",-1,3]];var l=[33,65,8,232];var o=0;var n=0;function b(){n=r.limit;o=r.limit;var e=r.cursor;r:{e:while(true){i:{if(!r.in_grouping(l,1072,1103)){break i}break e}if(r.cursor>=r.limit){break r}r.cursor++}n=r.cursor;e:while(true){i:{if(!r.out_grouping(l,1072,1103)){break i}break e}if(r.cursor>=r.limit){break r}r.cursor++}e:while(true){i:{if(!r.in_grouping(l,1072,1103)){break i}break e}if(r.cursor>=r.limit){break r}r.cursor++}e:while(true){i:{if(!r.out_grouping(l,1072,1103)){break i}break e}if(r.cursor>=r.limit){break r}r.cursor++}o=r.cursor}r.cursor=e;return true}function _(){if(!(o<=r.cursor)){return false}return true}function k(){var i;r.ket=r.cursor;i=r.find_among_b(e);if(i==0){return false}r.bra=r.cursor;switch(i){case 1:r:{var u=r.limit-r.cursor;e:{if(!r.eq_s_b("а")){break e}break r}r.cursor=r.limit-u;if(!r.eq_s_b("я")){return false}}if(!r.slice_del()){return false}break;case 2:if(!r.slice_del()){return false}break}return true}function m(){r.ket=r.cursor;if(r.find_among_b(i)==0){return false}r.bra=r.cursor;if(!r.slice_del()){return false}return true}function v(){var e;if(!m()){return false}var i=r.limit-r.cursor;r:{r.ket=r.cursor;e=r.find_among_b(u);if(e==0){r.cursor=r.limit-i;break r}r.bra=r.cursor;switch(e){case 1:e:{var s=r.limit-r.cursor;i:{if(!r.eq_s_b("а")){break i}break e}r.cursor=r.limit-s;if(!r.eq_s_b("я")){r.cursor=r.limit-i;break r}}if(!r.slice_del()){return false}break;case 2:if(!r.slice_del()){return false}break}}return true}function d(){r.ket=r.cursor;if(r.find_among_b(s)==0){return false}r.bra=r.cursor;if(!r.slice_del()){return false}return true}function g(){var e;r.ket=r.cursor;e=r.find_among_b(a);if(e==0){return false}r.bra=r.cursor;switch(e){case 1:r:{var i=r.limit-r.cursor;e:{if(!r.eq_s_b("а")){break e}break r}r.cursor=r.limit-i;if(!r.eq_s_b("я")){return false}}if(!r.slice_del()){return false}break;case 2:if(!r.slice_del()){return false}break}return true}function w(){r.ket=r.cursor;if(r.find_among_b(t)==0){return false}r.bra=r.cursor;if(!r.slice_del()){return false}return true}function h(){r.ket=r.cursor;if(r.find_among_b(c)==0){return false}r.bra=r.cursor;if(!_()){return false}if(!r.slice_del()){return false}return true}function q(){var e;r.ket=r.cursor;e=r.find_among_b(f);if(e==0){return false}r.bra=r.cursor;switch(e){case 1:if(!r.slice_del()){return false}r.ket=r.cursor;if(!r.eq_s_b("н")){return false}r.bra=r.cursor;if(!r.eq_s_b("н")){return false}if(!r.slice_del()){return false}break;case 2:if(!r.eq_s_b("н")){return false}if(!r.slice_del()){return false}break;case 3:if(!r.slice_del()){return false}break}return true}this.stem=function(){var e=r.cursor;r:{while(true){var i=r.cursor;e:{i:while(true){var u=r.cursor;u:{r.bra=r.cursor;if(!r.eq_s("ё")){break u}r.ket=r.cursor;r.cursor=u;break i}r.cursor=u;if(r.cursor>=r.limit){break e}r.cursor++}if(!r.slice_from("е")){return false}continue}r.cursor=i;break}}r.cursor=e;b();r.limit_backward=r.cursor;r.cursor=r.limit;if(r.cursor=u.limit)break r;u.cursor++}if(u.slice_from("е"))continue;return!1}u.cursor=i;break}if(u.cursor=r,b=u.limit,m=u.limit,r=u.cursor,u.go_out_grouping(n,1072,1103)&&(u.cursor++,b=u.cursor,u.go_in_grouping(n,1072,1103))&&(u.cursor++,u.go_out_grouping(n,1072,1103))&&(u.cursor++,u.go_in_grouping(n,1072,1103))&&(u.cursor++,m=u.cursor),u.cursor=r,u.limit_backward=u.cursor,u.cursor=u.limit,u.cursor{var r;if(u.ket=u.cursor,0!=(r=u.find_among_b(o))){switch(u.bra=u.cursor,r){case 1:var i=u.limit-u.cursor;if(!u.eq_s_b("а")&&(u.cursor=u.limit-i,!u.eq_s_b("я")))return;if(u.slice_del())break;return;case 2:if(u.slice_del())break;return}return 1}})()){u.cursor=u.limit-c;c=u.limit-u.cursor,c=(u.ket=u.cursor,0!=u.find_among_b(t)&&(u.bra=u.cursor,u.slice_del())||(u.cursor=u.limit-c),u.limit-u.cursor);if(!f()&&(u.cursor=u.limit-c,!(()=>{var r;if(u.ket=u.cursor,0!=(r=u.find_among_b(a))){switch(u.bra=u.cursor,r){case 1:var i=u.limit-u.cursor;if(!u.eq_s_b("а")&&(u.cursor=u.limit-i,!u.eq_s_b("я")))return;if(u.slice_del())break;return;case 2:if(u.slice_del())break;return}return 1}})())&&(u.cursor=u.limit-c,u.ket=u.cursor,0==u.find_among_b(l)||(u.bra=u.cursor,!u.slice_del())))break r}}u.cursor=u.limit-e;e=u.limit-u.cursor;if(u.ket=u.cursor,u.eq_s_b("и")){if(u.bra=u.cursor,!u.slice_del())return!1}else u.cursor=u.limit-e;e=u.limit-u.cursor,k(),u.cursor=u.limit-e,e=u.limit-u.cursor;return(()=>{var r;if(u.ket=u.cursor,0!=(r=u.find_among_b(_)))switch(u.bra=u.cursor,r){case 1:if(!u.slice_del())return;if(u.ket=u.cursor,!u.eq_s_b("н"))return;if(u.bra=u.cursor,!u.eq_s_b("н"))return;if(u.slice_del())break;return;case 2:if(!u.eq_s_b("н"))return;if(u.slice_del())break;return;case 3:if(u.slice_del())break}})(),u.cursor=u.limit-e,u.limit_backward=r,u.cursor=u.limit_backward,!0},this.stemWord=function(r){return u.setCurrent(r),this.stem(),u.getCurrent()}};
\ No newline at end of file
diff --git a/sphinx/search/minified-js/serbian-stemmer.js b/sphinx/search/minified-js/serbian-stemmer.js
new file mode 100644
index 00000000000..0ac2621eb22
--- /dev/null
+++ b/sphinx/search/minified-js/serbian-stemmer.js
@@ -0,0 +1 @@
+var SerbianStemmer=function(){var m=new BaseStemmer,s=[["а",-1,1],["б",-1,2],["в",-1,3],["г",-1,4],["д",-1,5],["е",-1,7],["ж",-1,8],["з",-1,9],["и",-1,10],["к",-1,12],["л",-1,13],["м",-1,15],["н",-1,16],["о",-1,18],["п",-1,19],["р",-1,20],["с",-1,21],["т",-1,22],["у",-1,24],["ф",-1,25],["х",-1,26],["ц",-1,27],["ч",-1,28],["ш",-1,30],["ђ",-1,6],["ј",-1,11],["љ",-1,14],["њ",-1,17],["ћ",-1,23],["џ",-1,29]],r=[["daba",-1,73],["ajaca",-1,12],["ejaca",-1,14],["ljaca",-1,13],["njaca",-1,85],["ojaca",-1,15],["alaca",-1,82],["elaca",-1,83],["olaca",-1,84],["maca",-1,75],["naca",-1,76],["raca",-1,81],["saca",-1,80],["vaca",-1,79],["šaca",-1,18],["aoca",-1,82],["acaka",-1,55],["ajaka",-1,16],["ojaka",-1,17],["anaka",-1,78],["ataka",-1,58],["etaka",-1,59],["itaka",-1,60],["otaka",-1,61],["utaka",-1,62],["ačaka",-1,54],["esama",-1,67],["izama",-1,87],["jacima",-1,5],["nicima",-1,23],["ticima",-1,24],["teticima",30,21],["zicima",-1,25],["atcima",-1,58],["utcima",-1,62],["čcima",-1,74],["pesima",-1,2],["inzima",-1,19],["lozima",-1,1],["metara",-1,68],["centara",-1,69],["istara",-1,70],["ekata",-1,86],["anata",-1,53],["nstava",-1,22],["kustava",-1,29],["ajac",-1,12],["ejac",-1,14],["ljac",-1,13],["njac",-1,85],["anjac",49,11],["ojac",-1,15],["alac",-1,82],["elac",-1,83],["olac",-1,84],["mac",-1,75],["nac",-1,76],["rac",-1,81],["sac",-1,80],["vac",-1,79],["šac",-1,18],["jebe",-1,88],["olce",-1,84],["kuse",-1,27],["rave",-1,42],["save",-1,52],["šave",-1,51],["baci",-1,89],["jaci",-1,5],["tvenici",-1,20],["snici",-1,26],["tetici",-1,21],["bojci",-1,4],["vojci",-1,3],["ojsci",-1,66],["atci",-1,58],["itci",-1,60],["utci",-1,62],["čci",-1,74],["pesi",-1,2],["inzi",-1,19],["lozi",-1,1],["acak",-1,55],["usak",-1,57],["atak",-1,58],["etak",-1,59],["itak",-1,60],["otak",-1,61],["utak",-1,62],["ačak",-1,54],["ušak",-1,56],["izam",-1,87],["tican",-1,65],["cajan",-1,7],["čajan",-1,6],["voljan",-1,77],["eskan",-1,63],["alan",-1,40],["bilan",-1,33],["gilan",-1,37],["nilan",-1,39],["rilan",-1,38],["silan",-1,36],["tilan",-1,34],["avilan",-1,35],["laran",-1,9],["eran",-1,8],["asan",-1,91],["esan",-1,10],["dusan",-1,31],["kusan",-1,28],["atan",-1,47],["pletan",-1,50],["tetan",-1,49],["antan",-1,32],["pravan",-1,44],["stavan",-1,43],["sivan",-1,46],["tivan",-1,45],["ozan",-1,41],["tičan",-1,64],["ašan",-1,90],["dušan",-1,30],["metar",-1,68],["centar",-1,69],["istar",-1,70],["ekat",-1,86],["enat",-1,48],["oscu",-1,72],["ošću",-1,71]],o=[["aca",-1,124],["eca",-1,125],["uca",-1,126],["ga",-1,20],["acega",3,124],["ecega",3,125],["ucega",3,126],["anjijega",3,84],["enjijega",3,85],["snjijega",3,122],["šnjijega",3,86],["kijega",3,95],["skijega",11,1],["škijega",11,2],["elijega",3,83],["nijega",3,13],["osijega",3,123],["atijega",3,120],["evitijega",3,92],["ovitijega",3,93],["astijega",3,94],["avijega",3,77],["evijega",3,78],["ivijega",3,79],["ovijega",3,80],["ošijega",3,91],["anjega",3,84],["enjega",3,85],["snjega",3,122],["šnjega",3,86],["kega",3,95],["skega",30,1],["škega",30,2],["elega",3,83],["nega",3,13],["anega",34,10],["enega",34,87],["snega",34,159],["šnega",34,88],["osega",3,123],["atega",3,120],["evitega",3,92],["ovitega",3,93],["astega",3,94],["avega",3,77],["evega",3,78],["ivega",3,79],["ovega",3,80],["aćega",3,14],["ećega",3,15],["ućega",3,16],["ošega",3,91],["acoga",3,124],["ecoga",3,125],["ucoga",3,126],["anjoga",3,84],["enjoga",3,85],["snjoga",3,122],["šnjoga",3,86],["koga",3,95],["skoga",59,1],["škoga",59,2],["loga",3,19],["eloga",62,83],["noga",3,13],["cinoga",64,137],["činoga",64,89],["osoga",3,123],["atoga",3,120],["evitoga",3,92],["ovitoga",3,93],["astoga",3,94],["avoga",3,77],["evoga",3,78],["ivoga",3,79],["ovoga",3,80],["aćoga",3,14],["ećoga",3,15],["ućoga",3,16],["ošoga",3,91],["uga",3,18],["aja",-1,109],["caja",81,26],["laja",81,30],["raja",81,31],["ćaja",81,28],["čaja",81,27],["đaja",81,29],["bija",-1,32],["cija",-1,33],["dija",-1,34],["fija",-1,40],["gija",-1,39],["anjija",-1,84],["enjija",-1,85],["snjija",-1,122],["šnjija",-1,86],["kija",-1,95],["skija",97,1],["škija",97,2],["lija",-1,24],["elija",100,83],["mija",-1,37],["nija",-1,13],["ganija",103,9],["manija",103,6],["panija",103,7],["ranija",103,8],["tanija",103,5],["pija",-1,41],["rija",-1,42],["rarija",110,21],["sija",-1,23],["osija",112,123],["tija",-1,44],["atija",114,120],["evitija",114,92],["ovitija",114,93],["otija",114,22],["astija",114,94],["avija",-1,77],["evija",-1,78],["ivija",-1,79],["ovija",-1,80],["zija",-1,45],["ošija",-1,91],["žija",-1,38],["anja",-1,84],["enja",-1,85],["snja",-1,122],["šnja",-1,86],["ka",-1,95],["ska",131,1],["ška",131,2],["ala",-1,104],["acala",134,128],["astajala",134,106],["istajala",134,107],["ostajala",134,108],["ijala",134,47],["injala",134,114],["nala",134,46],["irala",134,100],["urala",134,105],["tala",134,113],["astala",144,110],["istala",144,111],["ostala",144,112],["avala",134,97],["evala",134,96],["ivala",134,98],["ovala",134,76],["uvala",134,99],["ačala",134,102],["ela",-1,83],["ila",-1,116],["acila",155,124],["lucila",155,121],["nila",155,103],["astanila",158,110],["istanila",158,111],["ostanila",158,112],["rosila",155,127],["jetila",155,118],["ozila",155,48],["ačila",155,101],["lučila",155,117],["rošila",155,90],["ola",-1,50],["asla",-1,115],["nula",-1,13],["gama",-1,20],["logama",171,19],["ugama",171,18],["ajama",-1,109],["cajama",174,26],["lajama",174,30],["rajama",174,31],["ćajama",174,28],["čajama",174,27],["đajama",174,29],["bijama",-1,32],["cijama",-1,33],["dijama",-1,34],["fijama",-1,40],["gijama",-1,39],["lijama",-1,35],["mijama",-1,37],["nijama",-1,36],["ganijama",188,9],["manijama",188,6],["panijama",188,7],["ranijama",188,8],["tanijama",188,5],["pijama",-1,41],["rijama",-1,42],["sijama",-1,43],["tijama",-1,44],["zijama",-1,45],["žijama",-1,38],["alama",-1,104],["ijalama",200,47],["nalama",200,46],["elama",-1,119],["ilama",-1,116],["ramama",-1,52],["lemama",-1,51],["inama",-1,11],["cinama",207,137],["činama",207,89],["rama",-1,52],["arama",210,53],["drama",210,54],["erama",210,55],["orama",210,56],["basama",-1,135],["gasama",-1,131],["jasama",-1,129],["kasama",-1,133],["nasama",-1,132],["tasama",-1,130],["vasama",-1,134],["esama",-1,152],["isama",-1,154],["etama",-1,70],["estama",-1,71],["istama",-1,72],["kstama",-1,73],["ostama",-1,74],["avama",-1,77],["evama",-1,78],["ivama",-1,79],["bašama",-1,63],["gašama",-1,64],["jašama",-1,61],["kašama",-1,62],["našama",-1,60],["tašama",-1,59],["vašama",-1,65],["ešama",-1,66],["išama",-1,67],["lema",-1,51],["acima",-1,124],["ecima",-1,125],["ucima",-1,126],["ajima",-1,109],["cajima",245,26],["lajima",245,30],["rajima",245,31],["ćajima",245,28],["čajima",245,27],["đajima",245,29],["bijima",-1,32],["cijima",-1,33],["dijima",-1,34],["fijima",-1,40],["gijima",-1,39],["anjijima",-1,84],["enjijima",-1,85],["snjijima",-1,122],["šnjijima",-1,86],["kijima",-1,95],["skijima",261,1],["škijima",261,2],["lijima",-1,35],["elijima",264,83],["mijima",-1,37],["nijima",-1,13],["ganijima",267,9],["manijima",267,6],["panijima",267,7],["ranijima",267,8],["tanijima",267,5],["pijima",-1,41],["rijima",-1,42],["sijima",-1,43],["osijima",275,123],["tijima",-1,44],["atijima",277,120],["evitijima",277,92],["ovitijima",277,93],["astijima",277,94],["avijima",-1,77],["evijima",-1,78],["ivijima",-1,79],["ovijima",-1,80],["zijima",-1,45],["ošijima",-1,91],["žijima",-1,38],["anjima",-1,84],["enjima",-1,85],["snjima",-1,122],["šnjima",-1,86],["kima",-1,95],["skima",293,1],["škima",293,2],["alima",-1,104],["ijalima",296,47],["nalima",296,46],["elima",-1,83],["ilima",-1,116],["ozilima",300,48],["olima",-1,50],["lemima",-1,51],["nima",-1,13],["anima",304,10],["inima",304,11],["cinima",306,137],["činima",306,89],["onima",304,12],["arima",-1,53],["drima",-1,54],["erima",-1,55],["orima",-1,56],["basima",-1,135],["gasima",-1,131],["jasima",-1,129],["kasima",-1,133],["nasima",-1,132],["tasima",-1,130],["vasima",-1,134],["esima",-1,57],["isima",-1,58],["osima",-1,123],["atima",-1,120],["ikatima",324,68],["latima",324,69],["etima",-1,70],["evitima",-1,92],["ovitima",-1,93],["astima",-1,94],["estima",-1,71],["istima",-1,72],["kstima",-1,73],["ostima",-1,74],["ištima",-1,75],["avima",-1,77],["evima",-1,78],["ajevima",337,109],["cajevima",338,26],["lajevima",338,30],["rajevima",338,31],["ćajevima",338,28],["čajevima",338,27],["đajevima",338,29],["ivima",-1,79],["ovima",-1,80],["govima",346,20],["ugovima",347,17],["lovima",346,82],["olovima",349,49],["movima",346,81],["onovima",346,12],["stvima",-1,3],["štvima",-1,4],["aćima",-1,14],["ećima",-1,15],["ućima",-1,16],["bašima",-1,63],["gašima",-1,64],["jašima",-1,61],["kašima",-1,62],["našima",-1,60],["tašima",-1,59],["vašima",-1,65],["ešima",-1,66],["išima",-1,67],["ošima",-1,91],["na",-1,13],["ana",368,10],["acana",369,128],["urana",369,105],["tana",369,113],["avana",369,97],["evana",369,96],["ivana",369,98],["uvana",369,99],["ačana",369,102],["acena",368,124],["lucena",368,121],["ačena",368,101],["lučena",368,117],["ina",368,11],["cina",382,137],["anina",382,10],["čina",382,89],["ona",368,12],["ara",-1,53],["dra",-1,54],["era",-1,55],["ora",-1,56],["basa",-1,135],["gasa",-1,131],["jasa",-1,129],["kasa",-1,133],["nasa",-1,132],["tasa",-1,130],["vasa",-1,134],["esa",-1,57],["isa",-1,58],["osa",-1,123],["ata",-1,120],["ikata",401,68],["lata",401,69],["eta",-1,70],["evita",-1,92],["ovita",-1,93],["asta",-1,94],["esta",-1,71],["ista",-1,72],["ksta",-1,73],["osta",-1,74],["nuta",-1,13],["išta",-1,75],["ava",-1,77],["eva",-1,78],["ajeva",415,109],["cajeva",416,26],["lajeva",416,30],["rajeva",416,31],["ćajeva",416,28],["čajeva",416,27],["đajeva",416,29],["iva",-1,79],["ova",-1,80],["gova",424,20],["ugova",425,17],["lova",424,82],["olova",427,49],["mova",424,81],["onova",424,12],["stva",-1,3],["štva",-1,4],["aća",-1,14],["eća",-1,15],["uća",-1,16],["baša",-1,63],["gaša",-1,64],["jaša",-1,61],["kaša",-1,62],["naša",-1,60],["taša",-1,59],["vaša",-1,65],["eša",-1,66],["iša",-1,67],["oša",-1,91],["ace",-1,124],["ece",-1,125],["uce",-1,126],["luce",448,121],["astade",-1,110],["istade",-1,111],["ostade",-1,112],["ge",-1,20],["loge",453,19],["uge",453,18],["aje",-1,104],["caje",456,26],["laje",456,30],["raje",456,31],["astaje",456,106],["istaje",456,107],["ostaje",456,108],["ćaje",456,28],["čaje",456,27],["đaje",456,29],["ije",-1,116],["bije",466,32],["cije",466,33],["dije",466,34],["fije",466,40],["gije",466,39],["anjije",466,84],["enjije",466,85],["snjije",466,122],["šnjije",466,86],["kije",466,95],["skije",476,1],["škije",476,2],["lije",466,35],["elije",479,83],["mije",466,37],["nije",466,13],["ganije",482,9],["manije",482,6],["panije",482,7],["ranije",482,8],["tanije",482,5],["pije",466,41],["rije",466,42],["sije",466,43],["osije",490,123],["tije",466,44],["atije",492,120],["evitije",492,92],["ovitije",492,93],["astije",492,94],["avije",466,77],["evije",466,78],["ivije",466,79],["ovije",466,80],["zije",466,45],["ošije",466,91],["žije",466,38],["anje",-1,84],["enje",-1,85],["snje",-1,122],["šnje",-1,86],["uje",-1,25],["lucuje",508,121],["iruje",508,100],["lučuje",508,117],["ke",-1,95],["ske",512,1],["ške",512,2],["ale",-1,104],["acale",515,128],["astajale",515,106],["istajale",515,107],["ostajale",515,108],["ijale",515,47],["injale",515,114],["nale",515,46],["irale",515,100],["urale",515,105],["tale",515,113],["astale",525,110],["istale",525,111],["ostale",525,112],["avale",515,97],["evale",515,96],["ivale",515,98],["ovale",515,76],["uvale",515,99],["ačale",515,102],["ele",-1,83],["ile",-1,116],["acile",536,124],["lucile",536,121],["nile",536,103],["rosile",536,127],["jetile",536,118],["ozile",536,48],["ačile",536,101],["lučile",536,117],["rošile",536,90],["ole",-1,50],["asle",-1,115],["nule",-1,13],["rame",-1,52],["leme",-1,51],["acome",-1,124],["ecome",-1,125],["ucome",-1,126],["anjome",-1,84],["enjome",-1,85],["snjome",-1,122],["šnjome",-1,86],["kome",-1,95],["skome",558,1],["škome",558,2],["elome",-1,83],["nome",-1,13],["cinome",562,137],["činome",562,89],["osome",-1,123],["atome",-1,120],["evitome",-1,92],["ovitome",-1,93],["astome",-1,94],["avome",-1,77],["evome",-1,78],["ivome",-1,79],["ovome",-1,80],["aćome",-1,14],["ećome",-1,15],["ućome",-1,16],["ošome",-1,91],["ne",-1,13],["ane",578,10],["acane",579,128],["urane",579,105],["tane",579,113],["astane",582,110],["istane",582,111],["ostane",582,112],["avane",579,97],["evane",579,96],["ivane",579,98],["uvane",579,99],["ačane",579,102],["acene",578,124],["lucene",578,121],["ačene",578,101],["lučene",578,117],["ine",578,11],["cine",595,137],["anine",595,10],["čine",595,89],["one",578,12],["are",-1,53],["dre",-1,54],["ere",-1,55],["ore",-1,56],["ase",-1,161],["base",604,135],["acase",604,128],["gase",604,131],["jase",604,129],["astajase",608,138],["istajase",608,139],["ostajase",608,140],["injase",608,150],["kase",604,133],["nase",604,132],["irase",604,155],["urase",604,156],["tase",604,130],["vase",604,134],["avase",618,144],["evase",618,145],["ivase",618,146],["ovase",618,148],["uvase",618,147],["ese",-1,57],["ise",-1,58],["acise",625,124],["lucise",625,121],["rosise",625,127],["jetise",625,149],["ose",-1,123],["astadose",630,141],["istadose",630,142],["ostadose",630,143],["ate",-1,104],["acate",634,128],["ikate",634,68],["late",634,69],["irate",634,100],["urate",634,105],["tate",634,113],["avate",634,97],["evate",634,96],["ivate",634,98],["uvate",634,99],["ačate",634,102],["ete",-1,70],["astadete",646,110],["istadete",646,111],["ostadete",646,112],["astajete",646,106],["istajete",646,107],["ostajete",646,108],["ijete",646,116],["injete",646,114],["ujete",646,25],["lucujete",655,121],["irujete",655,100],["lučujete",655,117],["nete",646,13],["astanete",659,110],["istanete",659,111],["ostanete",659,112],["astete",646,115],["ite",-1,116],["acite",664,124],["lucite",664,121],["nite",664,13],["astanite",667,110],["istanite",667,111],["ostanite",667,112],["rosite",664,127],["jetite",664,118],["astite",664,115],["evite",664,92],["ovite",664,93],["ačite",664,101],["lučite",664,117],["rošite",664,90],["ajte",-1,104],["urajte",679,105],["tajte",679,113],["astajte",681,106],["istajte",681,107],["ostajte",681,108],["avajte",679,97],["evajte",679,96],["ivajte",679,98],["uvajte",679,99],["ijte",-1,116],["lucujte",-1,121],["irujte",-1,100],["lučujte",-1,117],["aste",-1,94],["acaste",693,128],["astajaste",693,106],["istajaste",693,107],["ostajaste",693,108],["injaste",693,114],["iraste",693,100],["uraste",693,105],["taste",693,113],["avaste",693,97],["evaste",693,96],["ivaste",693,98],["ovaste",693,76],["uvaste",693,99],["ačaste",693,102],["este",-1,71],["iste",-1,72],["aciste",709,124],["luciste",709,121],["niste",709,103],["rosiste",709,127],["jetiste",709,118],["ačiste",709,101],["lučiste",709,117],["rošiste",709,90],["kste",-1,73],["oste",-1,74],["astadoste",719,110],["istadoste",719,111],["ostadoste",719,112],["nuste",-1,13],["ište",-1,75],["ave",-1,77],["eve",-1,78],["ajeve",726,109],["cajeve",727,26],["lajeve",727,30],["rajeve",727,31],["ćajeve",727,28],["čajeve",727,27],["đajeve",727,29],["ive",-1,79],["ove",-1,80],["gove",735,20],["ugove",736,17],["love",735,82],["olove",738,49],["move",735,81],["onove",735,12],["aće",-1,14],["eće",-1,15],["uće",-1,16],["ače",-1,101],["luče",-1,117],["aše",-1,104],["baše",747,63],["gaše",747,64],["jaše",747,61],["astajaše",750,106],["istajaše",750,107],["ostajaše",750,108],["injaše",750,114],["kaše",747,62],["naše",747,60],["iraše",747,100],["uraše",747,105],["taše",747,59],["vaše",747,65],["avaše",760,97],["evaše",760,96],["ivaše",760,98],["ovaše",760,76],["uvaše",760,99],["ačaše",747,102],["eše",-1,66],["iše",-1,67],["jetiše",768,118],["ačiše",768,101],["lučiše",768,117],["rošiše",768,90],["oše",-1,91],["astadoše",773,110],["istadoše",773,111],["ostadoše",773,112],["aceg",-1,124],["eceg",-1,125],["uceg",-1,126],["anjijeg",-1,84],["enjijeg",-1,85],["snjijeg",-1,122],["šnjijeg",-1,86],["kijeg",-1,95],["skijeg",784,1],["škijeg",784,2],["elijeg",-1,83],["nijeg",-1,13],["osijeg",-1,123],["atijeg",-1,120],["evitijeg",-1,92],["ovitijeg",-1,93],["astijeg",-1,94],["avijeg",-1,77],["evijeg",-1,78],["ivijeg",-1,79],["ovijeg",-1,80],["ošijeg",-1,91],["anjeg",-1,84],["enjeg",-1,85],["snjeg",-1,122],["šnjeg",-1,86],["keg",-1,95],["eleg",-1,83],["neg",-1,13],["aneg",805,10],["eneg",805,87],["sneg",805,159],["šneg",805,88],["oseg",-1,123],["ateg",-1,120],["aveg",-1,77],["eveg",-1,78],["iveg",-1,79],["oveg",-1,80],["aćeg",-1,14],["ećeg",-1,15],["ućeg",-1,16],["ošeg",-1,91],["acog",-1,124],["ecog",-1,125],["ucog",-1,126],["anjog",-1,84],["enjog",-1,85],["snjog",-1,122],["šnjog",-1,86],["kog",-1,95],["skog",827,1],["škog",827,2],["elog",-1,83],["nog",-1,13],["cinog",831,137],["činog",831,89],["osog",-1,123],["atog",-1,120],["evitog",-1,92],["ovitog",-1,93],["astog",-1,94],["avog",-1,77],["evog",-1,78],["ivog",-1,79],["ovog",-1,80],["aćog",-1,14],["ećog",-1,15],["ućog",-1,16],["ošog",-1,91],["ah",-1,104],["acah",847,128],["astajah",847,106],["istajah",847,107],["ostajah",847,108],["injah",847,114],["irah",847,100],["urah",847,105],["tah",847,113],["avah",847,97],["evah",847,96],["ivah",847,98],["ovah",847,76],["uvah",847,99],["ačah",847,102],["ih",-1,116],["acih",862,124],["ecih",862,125],["ucih",862,126],["lucih",865,121],["anjijih",862,84],["enjijih",862,85],["snjijih",862,122],["šnjijih",862,86],["kijih",862,95],["skijih",871,1],["škijih",871,2],["elijih",862,83],["nijih",862,13],["osijih",862,123],["atijih",862,120],["evitijih",862,92],["ovitijih",862,93],["astijih",862,94],["avijih",862,77],["evijih",862,78],["ivijih",862,79],["ovijih",862,80],["ošijih",862,91],["anjih",862,84],["enjih",862,85],["snjih",862,122],["šnjih",862,86],["kih",862,95],["skih",890,1],["ških",890,2],["elih",862,83],["nih",862,13],["cinih",894,137],["činih",894,89],["osih",862,123],["rosih",897,127],["atih",862,120],["jetih",862,118],["evitih",862,92],["ovitih",862,93],["astih",862,94],["avih",862,77],["evih",862,78],["ivih",862,79],["ovih",862,80],["aćih",862,14],["ećih",862,15],["ućih",862,16],["ačih",862,101],["lučih",862,117],["oših",862,91],["roših",913,90],["astadoh",-1,110],["istadoh",-1,111],["ostadoh",-1,112],["acuh",-1,124],["ecuh",-1,125],["ucuh",-1,126],["aćuh",-1,14],["ećuh",-1,15],["ućuh",-1,16],["aci",-1,124],["aceci",-1,124],["ieci",-1,162],["ajuci",-1,161],["irajuci",927,155],["urajuci",927,156],["astajuci",927,138],["istajuci",927,139],["ostajuci",927,140],["avajuci",927,144],["evajuci",927,145],["ivajuci",927,146],["uvajuci",927,147],["ujuci",-1,157],["lucujuci",937,121],["irujuci",937,155],["luci",-1,121],["nuci",-1,164],["etuci",-1,153],["astuci",-1,136],["gi",-1,20],["ugi",944,18],["aji",-1,109],["caji",946,26],["laji",946,30],["raji",946,31],["ćaji",946,28],["čaji",946,27],["đaji",946,29],["biji",-1,32],["ciji",-1,33],["diji",-1,34],["fiji",-1,40],["giji",-1,39],["anjiji",-1,84],["enjiji",-1,85],["snjiji",-1,122],["šnjiji",-1,86],["kiji",-1,95],["skiji",962,1],["škiji",962,2],["liji",-1,35],["eliji",965,83],["miji",-1,37],["niji",-1,13],["ganiji",968,9],["maniji",968,6],["paniji",968,7],["raniji",968,8],["taniji",968,5],["piji",-1,41],["riji",-1,42],["siji",-1,43],["osiji",976,123],["tiji",-1,44],["atiji",978,120],["evitiji",978,92],["ovitiji",978,93],["astiji",978,94],["aviji",-1,77],["eviji",-1,78],["iviji",-1,79],["oviji",-1,80],["ziji",-1,45],["ošiji",-1,91],["žiji",-1,38],["anji",-1,84],["enji",-1,85],["snji",-1,122],["šnji",-1,86],["ki",-1,95],["ski",994,1],["ški",994,2],["ali",-1,104],["acali",997,128],["astajali",997,106],["istajali",997,107],["ostajali",997,108],["ijali",997,47],["injali",997,114],["nali",997,46],["irali",997,100],["urali",997,105],["tali",997,113],["astali",1007,110],["istali",1007,111],["ostali",1007,112],["avali",997,97],["evali",997,96],["ivali",997,98],["ovali",997,76],["uvali",997,99],["ačali",997,102],["eli",-1,83],["ili",-1,116],["acili",1018,124],["lucili",1018,121],["nili",1018,103],["rosili",1018,127],["jetili",1018,118],["ozili",1018,48],["ačili",1018,101],["lučili",1018,117],["rošili",1018,90],["oli",-1,50],["asli",-1,115],["nuli",-1,13],["rami",-1,52],["lemi",-1,51],["ni",-1,13],["ani",1033,10],["acani",1034,128],["urani",1034,105],["tani",1034,113],["avani",1034,97],["evani",1034,96],["ivani",1034,98],["uvani",1034,99],["ačani",1034,102],["aceni",1033,124],["luceni",1033,121],["ačeni",1033,101],["lučeni",1033,117],["ini",1033,11],["cini",1047,137],["čini",1047,89],["oni",1033,12],["ari",-1,53],["dri",-1,54],["eri",-1,55],["ori",-1,56],["basi",-1,135],["gasi",-1,131],["jasi",-1,129],["kasi",-1,133],["nasi",-1,132],["tasi",-1,130],["vasi",-1,134],["esi",-1,152],["isi",-1,154],["osi",-1,123],["avsi",-1,161],["acavsi",1065,128],["iravsi",1065,155],["tavsi",1065,160],["etavsi",1068,153],["astavsi",1068,141],["istavsi",1068,142],["ostavsi",1068,143],["ivsi",-1,162],["nivsi",1073,158],["rosivsi",1073,127],["nuvsi",-1,164],["ati",-1,104],["acati",1077,128],["astajati",1077,106],["istajati",1077,107],["ostajati",1077,108],["injati",1077,114],["ikati",1077,68],["lati",1077,69],["irati",1077,100],["urati",1077,105],["tati",1077,113],["astati",1087,110],["istati",1087,111],["ostati",1087,112],["avati",1077,97],["evati",1077,96],["ivati",1077,98],["ovati",1077,76],["uvati",1077,99],["ačati",1077,102],["eti",-1,70],["iti",-1,116],["aciti",1098,124],["luciti",1098,121],["niti",1098,103],["rositi",1098,127],["jetiti",1098,118],["eviti",1098,92],["oviti",1098,93],["ačiti",1098,101],["lučiti",1098,117],["rošiti",1098,90],["asti",-1,94],["esti",-1,71],["isti",-1,72],["ksti",-1,73],["osti",-1,74],["nuti",-1,13],["avi",-1,77],["evi",-1,78],["ajevi",1116,109],["cajevi",1117,26],["lajevi",1117,30],["rajevi",1117,31],["ćajevi",1117,28],["čajevi",1117,27],["đajevi",1117,29],["ivi",-1,79],["ovi",-1,80],["govi",1125,20],["ugovi",1126,17],["lovi",1125,82],["olovi",1128,49],["movi",1125,81],["onovi",1125,12],["ieći",-1,116],["ačeći",-1,101],["ajući",-1,104],["irajući",1134,100],["urajući",1134,105],["astajući",1134,106],["istajući",1134,107],["ostajući",1134,108],["avajući",1134,97],["evajući",1134,96],["ivajući",1134,98],["uvajući",1134,99],["ujući",-1,25],["irujući",1144,100],["lučujući",1144,117],["nući",-1,13],["etući",-1,70],["astući",-1,115],["ači",-1,101],["luči",-1,117],["baši",-1,63],["gaši",-1,64],["jaši",-1,61],["kaši",-1,62],["naši",-1,60],["taši",-1,59],["vaši",-1,65],["eši",-1,66],["iši",-1,67],["oši",-1,91],["avši",-1,104],["iravši",1162,100],["tavši",1162,113],["etavši",1164,70],["astavši",1164,110],["istavši",1164,111],["ostavši",1164,112],["ačavši",1162,102],["ivši",-1,116],["nivši",1170,103],["rošivši",1170,90],["nuvši",-1,13],["aj",-1,104],["uraj",1174,105],["taj",1174,113],["avaj",1174,97],["evaj",1174,96],["ivaj",1174,98],["uvaj",1174,99],["ij",-1,116],["acoj",-1,124],["ecoj",-1,125],["ucoj",-1,126],["anjijoj",-1,84],["enjijoj",-1,85],["snjijoj",-1,122],["šnjijoj",-1,86],["kijoj",-1,95],["skijoj",1189,1],["škijoj",1189,2],["elijoj",-1,83],["nijoj",-1,13],["osijoj",-1,123],["evitijoj",-1,92],["ovitijoj",-1,93],["astijoj",-1,94],["avijoj",-1,77],["evijoj",-1,78],["ivijoj",-1,79],["ovijoj",-1,80],["ošijoj",-1,91],["anjoj",-1,84],["enjoj",-1,85],["snjoj",-1,122],["šnjoj",-1,86],["koj",-1,95],["skoj",1207,1],["škoj",1207,2],["aloj",-1,104],["eloj",-1,83],["noj",-1,13],["cinoj",1212,137],["činoj",1212,89],["osoj",-1,123],["atoj",-1,120],["evitoj",-1,92],["ovitoj",-1,93],["astoj",-1,94],["avoj",-1,77],["evoj",-1,78],["ivoj",-1,79],["ovoj",-1,80],["aćoj",-1,14],["ećoj",-1,15],["ućoj",-1,16],["ošoj",-1,91],["lucuj",-1,121],["iruj",-1,100],["lučuj",-1,117],["al",-1,104],["iral",1231,100],["ural",1231,105],["el",-1,119],["il",-1,116],["am",-1,104],["acam",1236,128],["iram",1236,100],["uram",1236,105],["tam",1236,113],["avam",1236,97],["evam",1236,96],["ivam",1236,98],["uvam",1236,99],["ačam",1236,102],["em",-1,119],["acem",1246,124],["ecem",1246,125],["ucem",1246,126],["astadem",1246,110],["istadem",1246,111],["ostadem",1246,112],["ajem",1246,104],["cajem",1253,26],["lajem",1253,30],["rajem",1253,31],["astajem",1253,106],["istajem",1253,107],["ostajem",1253,108],["ćajem",1253,28],["čajem",1253,27],["đajem",1253,29],["ijem",1246,116],["anjijem",1263,84],["enjijem",1263,85],["snjijem",1263,123],["šnjijem",1263,86],["kijem",1263,95],["skijem",1268,1],["škijem",1268,2],["lijem",1263,24],["elijem",1271,83],["nijem",1263,13],["rarijem",1263,21],["sijem",1263,23],["osijem",1275,123],["atijem",1263,120],["evitijem",1263,92],["ovitijem",1263,93],["otijem",1263,22],["astijem",1263,94],["avijem",1263,77],["evijem",1263,78],["ivijem",1263,79],["ovijem",1263,80],["ošijem",1263,91],["anjem",1246,84],["enjem",1246,85],["injem",1246,114],["snjem",1246,122],["šnjem",1246,86],["ujem",1246,25],["lucujem",1292,121],["irujem",1292,100],["lučujem",1292,117],["kem",1246,95],["skem",1296,1],["škem",1296,2],["elem",1246,83],["nem",1246,13],["anem",1300,10],["astanem",1301,110],["istanem",1301,111],["ostanem",1301,112],["enem",1300,87],["snem",1300,159],["šnem",1300,88],["basem",1246,135],["gasem",1246,131],["jasem",1246,129],["kasem",1246,133],["nasem",1246,132],["tasem",1246,130],["vasem",1246,134],["esem",1246,152],["isem",1246,154],["osem",1246,123],["atem",1246,120],["etem",1246,70],["evitem",1246,92],["ovitem",1246,93],["astem",1246,94],["istem",1246,151],["ištem",1246,75],["avem",1246,77],["evem",1246,78],["ivem",1246,79],["aćem",1246,14],["ećem",1246,15],["ućem",1246,16],["bašem",1246,63],["gašem",1246,64],["jašem",1246,61],["kašem",1246,62],["našem",1246,60],["tašem",1246,59],["vašem",1246,65],["ešem",1246,66],["išem",1246,67],["ošem",1246,91],["im",-1,116],["acim",1341,124],["ecim",1341,125],["ucim",1341,126],["lucim",1344,121],["anjijim",1341,84],["enjijim",1341,85],["snjijim",1341,122],["šnjijim",1341,86],["kijim",1341,95],["skijim",1350,1],["škijim",1350,2],["elijim",1341,83],["nijim",1341,13],["osijim",1341,123],["atijim",1341,120],["evitijim",1341,92],["ovitijim",1341,93],["astijim",1341,94],["avijim",1341,77],["evijim",1341,78],["ivijim",1341,79],["ovijim",1341,80],["ošijim",1341,91],["anjim",1341,84],["enjim",1341,85],["snjim",1341,122],["šnjim",1341,86],["kim",1341,95],["skim",1369,1],["škim",1369,2],["elim",1341,83],["nim",1341,13],["cinim",1373,137],["činim",1373,89],["osim",1341,123],["rosim",1376,127],["atim",1341,120],["jetim",1341,118],["evitim",1341,92],["ovitim",1341,93],["astim",1341,94],["avim",1341,77],["evim",1341,78],["ivim",1341,79],["ovim",1341,80],["aćim",1341,14],["ećim",1341,15],["ućim",1341,16],["ačim",1341,101],["lučim",1341,117],["ošim",1341,91],["rošim",1392,90],["acom",-1,124],["ecom",-1,125],["ucom",-1,126],["gom",-1,20],["logom",1397,19],["ugom",1397,18],["bijom",-1,32],["cijom",-1,33],["dijom",-1,34],["fijom",-1,40],["gijom",-1,39],["lijom",-1,35],["mijom",-1,37],["nijom",-1,36],["ganijom",1407,9],["manijom",1407,6],["panijom",1407,7],["ranijom",1407,8],["tanijom",1407,5],["pijom",-1,41],["rijom",-1,42],["sijom",-1,43],["tijom",-1,44],["zijom",-1,45],["žijom",-1,38],["anjom",-1,84],["enjom",-1,85],["snjom",-1,122],["šnjom",-1,86],["kom",-1,95],["skom",1423,1],["škom",1423,2],["alom",-1,104],["ijalom",1426,47],["nalom",1426,46],["elom",-1,83],["ilom",-1,116],["ozilom",1430,48],["olom",-1,50],["ramom",-1,52],["lemom",-1,51],["nom",-1,13],["anom",1435,10],["inom",1435,11],["cinom",1437,137],["aninom",1437,10],["činom",1437,89],["onom",1435,12],["arom",-1,53],["drom",-1,54],["erom",-1,55],["orom",-1,56],["basom",-1,135],["gasom",-1,131],["jasom",-1,129],["kasom",-1,133],["nasom",-1,132],["tasom",-1,130],["vasom",-1,134],["esom",-1,57],["isom",-1,58],["osom",-1,123],["atom",-1,120],["ikatom",1456,68],["latom",1456,69],["etom",-1,70],["evitom",-1,92],["ovitom",-1,93],["astom",-1,94],["estom",-1,71],["istom",-1,72],["kstom",-1,73],["ostom",-1,74],["avom",-1,77],["evom",-1,78],["ivom",-1,79],["ovom",-1,80],["lovom",1470,82],["movom",1470,81],["stvom",-1,3],["štvom",-1,4],["aćom",-1,14],["ećom",-1,15],["ućom",-1,16],["bašom",-1,63],["gašom",-1,64],["jašom",-1,61],["kašom",-1,62],["našom",-1,60],["tašom",-1,59],["vašom",-1,65],["ešom",-1,66],["išom",-1,67],["ošom",-1,91],["an",-1,104],["acan",1488,128],["iran",1488,100],["uran",1488,105],["tan",1488,113],["avan",1488,97],["evan",1488,96],["ivan",1488,98],["uvan",1488,99],["ačan",1488,102],["acen",-1,124],["lucen",-1,121],["ačen",-1,101],["lučen",-1,117],["anin",-1,10],["ao",-1,104],["acao",1503,128],["astajao",1503,106],["istajao",1503,107],["ostajao",1503,108],["injao",1503,114],["irao",1503,100],["urao",1503,105],["tao",1503,113],["astao",1511,110],["istao",1511,111],["ostao",1511,112],["avao",1503,97],["evao",1503,96],["ivao",1503,98],["ovao",1503,76],["uvao",1503,99],["ačao",1503,102],["go",-1,20],["ugo",1521,18],["io",-1,116],["acio",1523,124],["lucio",1523,121],["lio",1523,24],["nio",1523,103],["rario",1523,21],["sio",1523,23],["rosio",1529,127],["jetio",1523,118],["otio",1523,22],["ačio",1523,101],["lučio",1523,117],["rošio",1523,90],["bijo",-1,32],["cijo",-1,33],["dijo",-1,34],["fijo",-1,40],["gijo",-1,39],["lijo",-1,35],["mijo",-1,37],["nijo",-1,36],["pijo",-1,41],["rijo",-1,42],["sijo",-1,43],["tijo",-1,44],["zijo",-1,45],["žijo",-1,38],["anjo",-1,84],["enjo",-1,85],["snjo",-1,122],["šnjo",-1,86],["ko",-1,95],["sko",1554,1],["ško",1554,2],["alo",-1,104],["acalo",1557,128],["astajalo",1557,106],["istajalo",1557,107],["ostajalo",1557,108],["ijalo",1557,47],["injalo",1557,114],["nalo",1557,46],["iralo",1557,100],["uralo",1557,105],["talo",1557,113],["astalo",1567,110],["istalo",1567,111],["ostalo",1567,112],["avalo",1557,97],["evalo",1557,96],["ivalo",1557,98],["ovalo",1557,76],["uvalo",1557,99],["ačalo",1557,102],["elo",-1,83],["ilo",-1,116],["acilo",1578,124],["lucilo",1578,121],["nilo",1578,103],["rosilo",1578,127],["jetilo",1578,118],["ačilo",1578,101],["lučilo",1578,117],["rošilo",1578,90],["aslo",-1,115],["nulo",-1,13],["amo",-1,104],["acamo",1589,128],["ramo",1589,52],["iramo",1591,100],["uramo",1591,105],["tamo",1589,113],["avamo",1589,97],["evamo",1589,96],["ivamo",1589,98],["uvamo",1589,99],["ačamo",1589,102],["emo",-1,119],["astademo",1600,110],["istademo",1600,111],["ostademo",1600,112],["astajemo",1600,106],["istajemo",1600,107],["ostajemo",1600,108],["ijemo",1600,116],["injemo",1600,114],["ujemo",1600,25],["lucujemo",1609,121],["irujemo",1609,100],["lučujemo",1609,117],["lemo",1600,51],["nemo",1600,13],["astanemo",1614,110],["istanemo",1614,111],["ostanemo",1614,112],["etemo",1600,70],["astemo",1600,115],["imo",-1,116],["acimo",1620,124],["lucimo",1620,121],["nimo",1620,13],["astanimo",1623,110],["istanimo",1623,111],["ostanimo",1623,112],["rosimo",1620,127],["etimo",1620,70],["jetimo",1628,118],["astimo",1620,115],["ačimo",1620,101],["lučimo",1620,117],["rošimo",1620,90],["ajmo",-1,104],["urajmo",1634,105],["tajmo",1634,113],["astajmo",1636,106],["istajmo",1636,107],["ostajmo",1636,108],["avajmo",1634,97],["evajmo",1634,96],["ivajmo",1634,98],["uvajmo",1634,99],["ijmo",-1,116],["ujmo",-1,25],["lucujmo",1645,121],["irujmo",1645,100],["lučujmo",1645,117],["asmo",-1,104],["acasmo",1649,128],["astajasmo",1649,106],["istajasmo",1649,107],["ostajasmo",1649,108],["injasmo",1649,114],["irasmo",1649,100],["urasmo",1649,105],["tasmo",1649,113],["avasmo",1649,97],["evasmo",1649,96],["ivasmo",1649,98],["ovasmo",1649,76],["uvasmo",1649,99],["ačasmo",1649,102],["ismo",-1,116],["acismo",1664,124],["lucismo",1664,121],["nismo",1664,103],["rosismo",1664,127],["jetismo",1664,118],["ačismo",1664,101],["lučismo",1664,117],["rošismo",1664,90],["astadosmo",-1,110],["istadosmo",-1,111],["ostadosmo",-1,112],["nusmo",-1,13],["no",-1,13],["ano",1677,104],["acano",1678,128],["urano",1678,105],["tano",1678,113],["avano",1678,97],["evano",1678,96],["ivano",1678,98],["uvano",1678,99],["ačano",1678,102],["aceno",1677,124],["luceno",1677,121],["ačeno",1677,101],["lučeno",1677,117],["ino",1677,11],["cino",1691,137],["čino",1691,89],["ato",-1,120],["ikato",1694,68],["lato",1694,69],["eto",-1,70],["evito",-1,92],["ovito",-1,93],["asto",-1,94],["esto",-1,71],["isto",-1,72],["ksto",-1,73],["osto",-1,74],["nuto",-1,13],["nuo",-1,13],["avo",-1,77],["evo",-1,78],["ivo",-1,79],["ovo",-1,80],["stvo",-1,3],["štvo",-1,4],["as",-1,161],["acas",1713,128],["iras",1713,155],["uras",1713,156],["tas",1713,160],["avas",1713,144],["evas",1713,145],["ivas",1713,146],["uvas",1713,147],["es",-1,163],["astades",1722,141],["istades",1722,142],["ostades",1722,143],["astajes",1722,138],["istajes",1722,139],["ostajes",1722,140],["ijes",1722,162],["injes",1722,150],["ujes",1722,157],["lucujes",1731,121],["irujes",1731,155],["nes",1722,164],["astanes",1734,141],["istanes",1734,142],["ostanes",1734,143],["etes",1722,153],["astes",1722,136],["is",-1,162],["acis",1740,124],["lucis",1740,121],["nis",1740,158],["rosis",1740,127],["jetis",1740,149],["at",-1,104],["acat",1746,128],["astajat",1746,106],["istajat",1746,107],["ostajat",1746,108],["injat",1746,114],["irat",1746,100],["urat",1746,105],["tat",1746,113],["astat",1754,110],["istat",1754,111],["ostat",1754,112],["avat",1746,97],["evat",1746,96],["ivat",1746,98],["irivat",1760,100],["ovat",1746,76],["uvat",1746,99],["ačat",1746,102],["it",-1,116],["acit",1765,124],["lucit",1765,121],["rosit",1765,127],["jetit",1765,118],["ačit",1765,101],["lučit",1765,117],["rošit",1765,90],["nut",-1,13],["astadu",-1,110],["istadu",-1,111],["ostadu",-1,112],["gu",-1,20],["logu",1777,19],["ugu",1777,18],["ahu",-1,104],["acahu",1780,128],["astajahu",1780,106],["istajahu",1780,107],["ostajahu",1780,108],["injahu",1780,114],["irahu",1780,100],["urahu",1780,105],["avahu",1780,97],["evahu",1780,96],["ivahu",1780,98],["ovahu",1780,76],["uvahu",1780,99],["ačahu",1780,102],["aju",-1,104],["caju",1794,26],["acaju",1795,128],["laju",1794,30],["raju",1794,31],["iraju",1798,100],["uraju",1798,105],["taju",1794,113],["astaju",1801,106],["istaju",1801,107],["ostaju",1801,108],["avaju",1794,97],["evaju",1794,96],["ivaju",1794,98],["uvaju",1794,99],["ćaju",1794,28],["čaju",1794,27],["ačaju",1810,102],["đaju",1794,29],["iju",-1,116],["biju",1813,32],["ciju",1813,33],["diju",1813,34],["fiju",1813,40],["giju",1813,39],["anjiju",1813,84],["enjiju",1813,85],["snjiju",1813,122],["šnjiju",1813,86],["kiju",1813,95],["liju",1813,24],["eliju",1824,83],["miju",1813,37],["niju",1813,13],["ganiju",1827,9],["maniju",1827,6],["paniju",1827,7],["raniju",1827,8],["taniju",1827,5],["piju",1813,41],["riju",1813,42],["rariju",1834,21],["siju",1813,23],["osiju",1836,123],["tiju",1813,44],["atiju",1838,120],["otiju",1838,22],["aviju",1813,77],["eviju",1813,78],["iviju",1813,79],["oviju",1813,80],["ziju",1813,45],["ošiju",1813,91],["žiju",1813,38],["anju",-1,84],["enju",-1,85],["snju",-1,122],["šnju",-1,86],["uju",-1,25],["lucuju",1852,121],["iruju",1852,100],["lučuju",1852,117],["ku",-1,95],["sku",1856,1],["šku",1856,2],["alu",-1,104],["ijalu",1859,47],["nalu",1859,46],["elu",-1,83],["ilu",-1,116],["ozilu",1863,48],["olu",-1,50],["ramu",-1,52],["acemu",-1,124],["ecemu",-1,125],["ucemu",-1,126],["anjijemu",-1,84],["enjijemu",-1,85],["snjijemu",-1,122],["šnjijemu",-1,86],["kijemu",-1,95],["skijemu",1874,1],["škijemu",1874,2],["elijemu",-1,83],["nijemu",-1,13],["osijemu",-1,123],["atijemu",-1,120],["evitijemu",-1,92],["ovitijemu",-1,93],["astijemu",-1,94],["avijemu",-1,77],["evijemu",-1,78],["ivijemu",-1,79],["ovijemu",-1,80],["ošijemu",-1,91],["anjemu",-1,84],["enjemu",-1,85],["snjemu",-1,122],["šnjemu",-1,86],["kemu",-1,95],["skemu",1893,1],["škemu",1893,2],["lemu",-1,51],["elemu",1896,83],["nemu",-1,13],["anemu",1898,10],["enemu",1898,87],["snemu",1898,159],["šnemu",1898,88],["osemu",-1,123],["atemu",-1,120],["evitemu",-1,92],["ovitemu",-1,93],["astemu",-1,94],["avemu",-1,77],["evemu",-1,78],["ivemu",-1,79],["ovemu",-1,80],["aćemu",-1,14],["ećemu",-1,15],["ućemu",-1,16],["ošemu",-1,91],["acomu",-1,124],["ecomu",-1,125],["ucomu",-1,126],["anjomu",-1,84],["enjomu",-1,85],["snjomu",-1,122],["šnjomu",-1,86],["komu",-1,95],["skomu",1923,1],["škomu",1923,2],["elomu",-1,83],["nomu",-1,13],["cinomu",1927,137],["činomu",1927,89],["osomu",-1,123],["atomu",-1,120],["evitomu",-1,92],["ovitomu",-1,93],["astomu",-1,94],["avomu",-1,77],["evomu",-1,78],["ivomu",-1,79],["ovomu",-1,80],["aćomu",-1,14],["ećomu",-1,15],["ućomu",-1,16],["ošomu",-1,91],["nu",-1,13],["anu",1943,10],["astanu",1944,110],["istanu",1944,111],["ostanu",1944,112],["inu",1943,11],["cinu",1948,137],["aninu",1948,10],["činu",1948,89],["onu",1943,12],["aru",-1,53],["dru",-1,54],["eru",-1,55],["oru",-1,56],["basu",-1,135],["gasu",-1,131],["jasu",-1,129],["kasu",-1,133],["nasu",-1,132],["tasu",-1,130],["vasu",-1,134],["esu",-1,57],["isu",-1,58],["osu",-1,123],["atu",-1,120],["ikatu",1967,68],["latu",1967,69],["etu",-1,70],["evitu",-1,92],["ovitu",-1,93],["astu",-1,94],["estu",-1,71],["istu",-1,72],["kstu",-1,73],["ostu",-1,74],["ištu",-1,75],["avu",-1,77],["evu",-1,78],["ivu",-1,79],["ovu",-1,80],["lovu",1982,82],["movu",1982,81],["stvu",-1,3],["štvu",-1,4],["bašu",-1,63],["gašu",-1,64],["jašu",-1,61],["kašu",-1,62],["našu",-1,60],["tašu",-1,59],["vašu",-1,65],["ešu",-1,66],["išu",-1,67],["ošu",-1,91],["avav",-1,97],["evav",-1,96],["ivav",-1,98],["uvav",-1,99],["kov",-1,95],["aš",-1,104],["iraš",2002,100],["uraš",2002,105],["taš",2002,113],["avaš",2002,97],["evaš",2002,96],["ivaš",2002,98],["uvaš",2002,99],["ačaš",2002,102],["eš",-1,119],["astadeš",2011,110],["istadeš",2011,111],["ostadeš",2011,112],["astaješ",2011,106],["istaješ",2011,107],["ostaješ",2011,108],["iješ",2011,116],["inješ",2011,114],["uješ",2011,25],["iruješ",2020,100],["lučuješ",2020,117],["neš",2011,13],["astaneš",2023,110],["istaneš",2023,111],["ostaneš",2023,112],["eteš",2011,70],["asteš",2011,115],["iš",-1,116],["niš",2029,103],["jetiš",2029,118],["ačiš",2029,101],["lučiš",2029,117],["rošiš",2029,90]],t=[["a",-1,1],["oga",0,1],["ama",0,1],["ima",0,1],["ena",0,1],["e",-1,1],["og",-1,1],["anog",6,1],["enog",6,1],["anih",-1,1],["enih",-1,1],["i",-1,1],["ani",11,1],["eni",11,1],["anoj",-1,1],["enoj",-1,1],["anim",-1,1],["enim",-1,1],["om",-1,1],["enom",18,1],["o",-1,1],["ano",20,1],["eno",20,1],["ost",-1,1],["u",-1,1],["enu",24,1]],u=[17,65,16],n=[65,4,0,0,0,0,0,0,0,0,0,4,0,0,128],j=[119,95,23,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,32,136,0,0,0,0,0,0,0,0,0,128,0,0,0,16],c=[1],f=0,l=!1;function v(){return f<=m.cursor}this.stem=function(){(()=>{for(var a,e=m.cursor;;){var i=m.cursor;a:{for(;;){var r=m.cursor;if(m.bra=m.cursor,0!=(a=m.find_among(s))){switch(m.ket=m.cursor,a){case 1:if(m.slice_from("a"))break;return;case 2:if(m.slice_from("b"))break;return;case 3:if(m.slice_from("v"))break;return;case 4:if(m.slice_from("g"))break;return;case 5:if(m.slice_from("d"))break;return;case 6:if(m.slice_from("đ"))break;return;case 7:if(m.slice_from("e"))break;return;case 8:if(m.slice_from("ž"))break;return;case 9:if(m.slice_from("z"))break;return;case 10:if(m.slice_from("i"))break;return;case 11:if(m.slice_from("j"))break;return;case 12:if(m.slice_from("k"))break;return;case 13:if(m.slice_from("l"))break;return;case 14:if(m.slice_from("lj"))break;return;case 15:if(m.slice_from("m"))break;return;case 16:if(m.slice_from("n"))break;return;case 17:if(m.slice_from("nj"))break;return;case 18:if(m.slice_from("o"))break;return;case 19:if(m.slice_from("p"))break;return;case 20:if(m.slice_from("r"))break;return;case 21:if(m.slice_from("s"))break;return;case 22:if(m.slice_from("t"))break;return;case 23:if(m.slice_from("ć"))break;return;case 24:if(m.slice_from("u"))break;return;case 25:if(m.slice_from("f"))break;return;case 26:if(m.slice_from("h"))break;return;case 27:if(m.slice_from("c"))break;return;case 28:if(m.slice_from("č"))break;return;case 29:if(m.slice_from("dž"))break;return;case 30:if(m.slice_from("š"))break;return}m.cursor=r;break}if(m.cursor=r,m.cursor>=m.limit)break a;m.cursor++}continue}m.cursor=i;break}m.cursor=e})(),(()=>{for(var a=m.cursor;;){var e=m.cursor;a:{for(;;){var i=m.cursor;if(m.in_grouping(j,98,382)&&(m.bra=m.cursor,m.eq_s("ije"))&&(m.ket=m.cursor,m.in_grouping(j,98,382))){if(!m.slice_from("e"))return;m.cursor=i;break}if(m.cursor=i,m.cursor>=m.limit)break a;m.cursor++}continue}m.cursor=e;break}for(m.cursor=a,a=m.cursor;;){var r=m.cursor;a:{for(;;){var s=m.cursor;if(m.in_grouping(j,98,382)&&(m.bra=m.cursor,m.eq_s("je"))&&(m.ket=m.cursor,m.in_grouping(j,98,382))){if(!m.slice_from("e"))return;m.cursor=s;break}if(m.cursor=s,m.cursor>=m.limit)break a;m.cursor++}continue}m.cursor=r;break}for(m.cursor=a,a=m.cursor;;){var o=m.cursor;a:{for(;;){var t=m.cursor;if(m.bra=m.cursor,m.eq_s("dj")){if(m.ket=m.cursor,!m.slice_from("đ"))return;m.cursor=t;break}if(m.cursor=t,m.cursor>=m.limit)break a;m.cursor++}continue}m.cursor=o;break}m.cursor=a})(),l=!0;var a=m.cursor,a=(m.go_out_grouping(n,263,382)&&(m.cursor++,l=!1),m.cursor=a,f=m.limit,m.cursor),a=(m.go_out_grouping(u,97,117)&&(m.cursor++,2<=(f=m.cursor)||m.go_in_grouping(u,97,117)&&(m.cursor++,f=m.cursor)),m.cursor=a,m.cursor);a:{for(;;){if(m.eq_s("r"))break;if(m.cursor>=m.limit)break a;m.cursor++}var e=m.cursor;if(m.cursor<2){if(m.cursor=e,!m.go_in_grouping(c,114,114))break a;m.cursor++}f-m.cursor<=1||(f=m.cursor)}m.cursor=a,m.limit_backward=m.cursor,m.cursor=m.limit;var a=m.limit-m.cursor,a=((()=>{var a;if(m.ket=m.cursor,0!=(a=m.find_among_b(r)))switch(m.bra=m.cursor,a){case 1:if(m.slice_from("loga"))break;return;case 2:if(m.slice_from("peh"))break;return;case 3:if(m.slice_from("vojka"))break;return;case 4:if(m.slice_from("bojka"))break;return;case 5:if(m.slice_from("jak"))break;return;case 6:if(m.slice_from("čajni"))break;return;case 7:if(!l)return;if(m.slice_from("cajni"))break;return;case 8:if(m.slice_from("erni"))break;return;case 9:if(m.slice_from("larni"))break;return;case 10:if(m.slice_from("esni"))break;return;case 11:if(m.slice_from("anjca"))break;return;case 12:if(m.slice_from("ajca"))break;return;case 13:if(m.slice_from("ljca"))break;return;case 14:if(m.slice_from("ejca"))break;return;case 15:if(m.slice_from("ojca"))break;return;case 16:if(m.slice_from("ajka"))break;return;case 17:if(m.slice_from("ojka"))break;return;case 18:if(m.slice_from("šca"))break;return;case 19:if(m.slice_from("ing"))break;return;case 20:if(m.slice_from("tvenik"))break;return;case 21:if(m.slice_from("tetika"))break;return;case 22:if(m.slice_from("nstva"))break;return;case 23:if(m.slice_from("nik"))break;return;case 24:if(m.slice_from("tik"))break;return;case 25:if(m.slice_from("zik"))break;return;case 26:if(m.slice_from("snik"))break;return;case 27:if(m.slice_from("kusi"))break;return;case 28:if(m.slice_from("kusni"))break;return;case 29:if(m.slice_from("kustva"))break;return;case 30:if(m.slice_from("dušni"))break;return;case 31:if(!l)return;if(m.slice_from("dusni"))break;return;case 32:if(m.slice_from("antni"))break;return;case 33:if(m.slice_from("bilni"))break;return;case 34:if(m.slice_from("tilni"))break;return;case 35:if(m.slice_from("avilni"))break;return;case 36:if(m.slice_from("silni"))break;return;case 37:if(m.slice_from("gilni"))break;return;case 38:if(m.slice_from("rilni"))break;return;case 39:if(m.slice_from("nilni"))break;return;case 40:if(m.slice_from("alni"))break;return;case 41:if(m.slice_from("ozni"))break;return;case 42:if(m.slice_from("ravi"))break;return;case 43:if(m.slice_from("stavni"))break;return;case 44:if(m.slice_from("pravni"))break;return;case 45:if(m.slice_from("tivni"))break;return;case 46:if(m.slice_from("sivni"))break;return;case 47:if(m.slice_from("atni"))break;return;case 48:if(m.slice_from("enta"))break;return;case 49:if(m.slice_from("tetni"))break;return;case 50:if(m.slice_from("pletni"))break;return;case 51:if(m.slice_from("šavi"))break;return;case 52:if(!l)return;if(m.slice_from("savi"))break;return;case 53:if(m.slice_from("anta"))break;return;case 54:if(m.slice_from("ačka"))break;return;case 55:if(!l)return;if(m.slice_from("acka"))break;return;case 56:if(m.slice_from("uška"))break;return;case 57:if(!l)return;if(m.slice_from("uska"))break;return;case 58:if(m.slice_from("atka"))break;return;case 59:if(m.slice_from("etka"))break;return;case 60:if(m.slice_from("itka"))break;return;case 61:if(m.slice_from("otka"))break;return;case 62:if(m.slice_from("utka"))break;return;case 63:if(m.slice_from("eskna"))break;return;case 64:if(m.slice_from("tični"))break;return;case 65:if(!l)return;if(m.slice_from("ticni"))break;return;case 66:if(m.slice_from("ojska"))break;return;case 67:if(m.slice_from("esma"))break;return;case 68:if(m.slice_from("metra"))break;return;case 69:if(m.slice_from("centra"))break;return;case 70:if(m.slice_from("istra"))break;return;case 71:if(m.slice_from("osti"))break;return;case 72:if(!l)return;if(m.slice_from("osti"))break;return;case 73:if(m.slice_from("dba"))break;return;case 74:if(m.slice_from("čka"))break;return;case 75:if(m.slice_from("mca"))break;return;case 76:if(m.slice_from("nca"))break;return;case 77:if(m.slice_from("voljni"))break;return;case 78:if(m.slice_from("anki"))break;return;case 79:if(m.slice_from("vca"))break;return;case 80:if(m.slice_from("sca"))break;return;case 81:if(m.slice_from("rca"))break;return;case 82:if(m.slice_from("alca"))break;return;case 83:if(m.slice_from("elca"))break;return;case 84:if(m.slice_from("olca"))break;return;case 85:if(m.slice_from("njca"))break;return;case 86:if(m.slice_from("ekta"))break;return;case 87:if(m.slice_from("izma"))break;return;case 88:if(m.slice_from("jebi"))break;return;case 89:if(m.slice_from("baci"))break;return;case 90:if(m.slice_from("ašni"))break;return;case 91:if(!l)return;if(m.slice_from("asni"))break}})(),m.cursor=m.limit-a,m.limit-m.cursor),i=m.limit-m.cursor;return(()=>{var a;if(m.ket=m.cursor,0!=(a=m.find_among_b(o))&&(m.bra=m.cursor,v())){switch(a){case 1:if(m.slice_from("sk"))break;return;case 2:if(m.slice_from("šk"))break;return;case 3:if(m.slice_from("stv"))break;return;case 4:if(m.slice_from("štv"))break;return;case 5:if(m.slice_from("tanij"))break;return;case 6:if(m.slice_from("manij"))break;return;case 7:if(m.slice_from("panij"))break;return;case 8:if(m.slice_from("ranij"))break;return;case 9:if(m.slice_from("ganij"))break;return;case 10:if(m.slice_from("an"))break;return;case 11:if(m.slice_from("in"))break;return;case 12:if(m.slice_from("on"))break;return;case 13:if(m.slice_from("n"))break;return;case 14:if(m.slice_from("ać"))break;return;case 15:if(m.slice_from("eć"))break;return;case 16:if(m.slice_from("uć"))break;return;case 17:if(m.slice_from("ugov"))break;return;case 18:if(m.slice_from("ug"))break;return;case 19:if(m.slice_from("log"))break;return;case 20:if(m.slice_from("g"))break;return;case 21:if(m.slice_from("rari"))break;return;case 22:if(m.slice_from("oti"))break;return;case 23:if(m.slice_from("si"))break;return;case 24:if(m.slice_from("li"))break;return;case 25:if(m.slice_from("uj"))break;return;case 26:if(m.slice_from("caj"))break;return;case 27:if(m.slice_from("čaj"))break;return;case 28:if(m.slice_from("ćaj"))break;return;case 29:if(m.slice_from("đaj"))break;return;case 30:if(m.slice_from("laj"))break;return;case 31:if(m.slice_from("raj"))break;return;case 32:if(m.slice_from("bij"))break;return;case 33:if(m.slice_from("cij"))break;return;case 34:if(m.slice_from("dij"))break;return;case 35:if(m.slice_from("lij"))break;return;case 36:if(m.slice_from("nij"))break;return;case 37:if(m.slice_from("mij"))break;return;case 38:if(m.slice_from("žij"))break;return;case 39:if(m.slice_from("gij"))break;return;case 40:if(m.slice_from("fij"))break;return;case 41:if(m.slice_from("pij"))break;return;case 42:if(m.slice_from("rij"))break;return;case 43:if(m.slice_from("sij"))break;return;case 44:if(m.slice_from("tij"))break;return;case 45:if(m.slice_from("zij"))break;return;case 46:if(m.slice_from("nal"))break;return;case 47:if(m.slice_from("ijal"))break;return;case 48:if(m.slice_from("ozil"))break;return;case 49:if(m.slice_from("olov"))break;return;case 50:if(m.slice_from("ol"))break;return;case 51:if(m.slice_from("lem"))break;return;case 52:if(m.slice_from("ram"))break;return;case 53:if(m.slice_from("ar"))break;return;case 54:if(m.slice_from("dr"))break;return;case 55:if(m.slice_from("er"))break;return;case 56:if(m.slice_from("or"))break;return;case 57:if(m.slice_from("es"))break;return;case 58:if(m.slice_from("is"))break;return;case 59:if(m.slice_from("taš"))break;return;case 60:if(m.slice_from("naš"))break;return;case 61:if(m.slice_from("jaš"))break;return;case 62:if(m.slice_from("kaš"))break;return;case 63:if(m.slice_from("baš"))break;return;case 64:if(m.slice_from("gaš"))break;return;case 65:if(m.slice_from("vaš"))break;return;case 66:if(m.slice_from("eš"))break;return;case 67:if(m.slice_from("iš"))break;return;case 68:if(m.slice_from("ikat"))break;return;case 69:if(m.slice_from("lat"))break;return;case 70:if(m.slice_from("et"))break;return;case 71:if(m.slice_from("est"))break;return;case 72:if(m.slice_from("ist"))break;return;case 73:if(m.slice_from("kst"))break;return;case 74:if(m.slice_from("ost"))break;return;case 75:if(m.slice_from("išt"))break;return;case 76:if(m.slice_from("ova"))break;return;case 77:if(m.slice_from("av"))break;return;case 78:if(m.slice_from("ev"))break;return;case 79:if(m.slice_from("iv"))break;return;case 80:if(m.slice_from("ov"))break;return;case 81:if(m.slice_from("mov"))break;return;case 82:if(m.slice_from("lov"))break;return;case 83:if(m.slice_from("el"))break;return;case 84:if(m.slice_from("anj"))break;return;case 85:if(m.slice_from("enj"))break;return;case 86:if(m.slice_from("šnj"))break;return;case 87:if(m.slice_from("en"))break;return;case 88:if(m.slice_from("šn"))break;return;case 89:if(m.slice_from("čin"))break;return;case 90:if(m.slice_from("roši"))break;return;case 91:if(m.slice_from("oš"))break;return;case 92:if(m.slice_from("evit"))break;return;case 93:if(m.slice_from("ovit"))break;return;case 94:if(m.slice_from("ast"))break;return;case 95:if(m.slice_from("k"))break;return;case 96:if(m.slice_from("eva"))break;return;case 97:if(m.slice_from("ava"))break;return;case 98:if(m.slice_from("iva"))break;return;case 99:if(m.slice_from("uva"))break;return;case 100:if(m.slice_from("ir"))break;return;case 101:if(m.slice_from("ač"))break;return;case 102:if(m.slice_from("ača"))break;return;case 103:if(m.slice_from("ni"))break;return;case 104:if(m.slice_from("a"))break;return;case 105:if(m.slice_from("ur"))break;return;case 106:if(m.slice_from("astaj"))break;return;case 107:if(m.slice_from("istaj"))break;return;case 108:if(m.slice_from("ostaj"))break;return;case 109:if(m.slice_from("aj"))break;return;case 110:if(m.slice_from("asta"))break;return;case 111:if(m.slice_from("ista"))break;return;case 112:if(m.slice_from("osta"))break;return;case 113:if(m.slice_from("ta"))break;return;case 114:if(m.slice_from("inj"))break;return;case 115:if(m.slice_from("as"))break;return;case 116:if(m.slice_from("i"))break;return;case 117:if(m.slice_from("luč"))break;return;case 118:if(m.slice_from("jeti"))break;return;case 119:if(m.slice_from("e"))break;return;case 120:if(m.slice_from("at"))break;return;case 121:if(!l)return;if(m.slice_from("luc"))break;return;case 122:if(!l)return;if(m.slice_from("snj"))break;return;case 123:if(!l)return;if(m.slice_from("os"))break;return;case 124:if(!l)return;if(m.slice_from("ac"))break;return;case 125:if(!l)return;if(m.slice_from("ec"))break;return;case 126:if(!l)return;if(m.slice_from("uc"))break;return;case 127:if(!l)return;if(m.slice_from("rosi"))break;return;case 128:if(!l)return;if(m.slice_from("aca"))break;return;case 129:if(!l)return;if(m.slice_from("jas"))break;return;case 130:if(!l)return;if(m.slice_from("tas"))break;return;case 131:if(!l)return;if(m.slice_from("gas"))break;return;case 132:if(!l)return;if(m.slice_from("nas"))break;return;case 133:if(!l)return;if(m.slice_from("kas"))break;return;case 134:if(!l)return;if(m.slice_from("vas"))break;return;case 135:if(!l)return;if(m.slice_from("bas"))break;return;case 136:if(!l)return;if(m.slice_from("as"))break;return;case 137:if(!l)return;if(m.slice_from("cin"))break;return;case 138:if(!l)return;if(m.slice_from("astaj"))break;return;case 139:if(!l)return;if(m.slice_from("istaj"))break;return;case 140:if(!l)return;if(m.slice_from("ostaj"))break;return;case 141:if(!l)return;if(m.slice_from("asta"))break;return;case 142:if(!l)return;if(m.slice_from("ista"))break;return;case 143:if(!l)return;if(m.slice_from("osta"))break;return;case 144:if(!l)return;if(m.slice_from("ava"))break;return;case 145:if(!l)return;if(m.slice_from("eva"))break;return;case 146:if(!l)return;if(m.slice_from("iva"))break;return;case 147:if(!l)return;if(m.slice_from("uva"))break;return;case 148:if(!l)return;if(m.slice_from("ova"))break;return;case 149:if(!l)return;if(m.slice_from("jeti"))break;return;case 150:if(!l)return;if(m.slice_from("inj"))break;return;case 151:if(!l)return;if(m.slice_from("ist"))break;return;case 152:if(!l)return;if(m.slice_from("es"))break;return;case 153:if(!l)return;if(m.slice_from("et"))break;return;case 154:if(!l)return;if(m.slice_from("is"))break;return;case 155:if(!l)return;if(m.slice_from("ir"))break;return;case 156:if(!l)return;if(m.slice_from("ur"))break;return;case 157:if(!l)return;if(m.slice_from("uj"))break;return;case 158:if(!l)return;if(m.slice_from("ni"))break;return;case 159:if(!l)return;if(m.slice_from("sn"))break;return;case 160:if(!l)return;if(m.slice_from("ta"))break;return;case 161:if(!l)return;if(m.slice_from("a"))break;return;case 162:if(!l)return;if(m.slice_from("i"))break;return;case 163:if(!l)return;if(m.slice_from("e"))break;return;case 164:if(!l)return;if(m.slice_from("n"))break;return}return 1}})()||(m.cursor=m.limit-i,m.ket=m.cursor,0!=m.find_among_b(t)&&(m.bra=m.cursor,v())&&m.slice_from("")),m.cursor=m.limit-a,m.cursor=m.limit_backward,!0},this.stemWord=function(a){return m.setCurrent(a),this.stem(),m.getCurrent()}};
\ No newline at end of file
diff --git a/sphinx/search/minified-js/spanish-stemmer.js b/sphinx/search/minified-js/spanish-stemmer.js
index ef634a8b75c..c47ed2e414a 100644
--- a/sphinx/search/minified-js/spanish-stemmer.js
+++ b/sphinx/search/minified-js/spanish-stemmer.js
@@ -1 +1 @@
-SpanishStemmer=function(){var r=new BaseStemmer;var e=[["",-1,6],["á",0,1],["é",0,2],["í",0,3],["ó",0,4],["ú",0,5]];var i=[["la",-1,-1],["sela",0,-1],["le",-1,-1],["me",-1,-1],["se",-1,-1],["lo",-1,-1],["selo",5,-1],["las",-1,-1],["selas",7,-1],["les",-1,-1],["los",-1,-1],["selos",10,-1],["nos",-1,-1]];var a=[["ando",-1,6],["iendo",-1,6],["yendo",-1,7],["ándo",-1,2],["iéndo",-1,1],["ar",-1,6],["er",-1,6],["ir",-1,6],["ár",-1,3],["ér",-1,4],["ír",-1,5]];var s=[["ic",-1,-1],["ad",-1,-1],["os",-1,-1],["iv",-1,1]];var u=[["able",-1,1],["ible",-1,1],["ante",-1,1]];var o=[["ic",-1,1],["abil",-1,1],["iv",-1,1]];var t=[["ica",-1,1],["ancia",-1,2],["encia",-1,5],["adora",-1,2],["osa",-1,1],["ista",-1,1],["iva",-1,9],["anza",-1,1],["logía",-1,3],["idad",-1,8],["able",-1,1],["ible",-1,1],["ante",-1,2],["mente",-1,7],["amente",13,6],["ación",-1,2],["ución",-1,4],["ico",-1,1],["ismo",-1,1],["oso",-1,1],["amiento",-1,1],["imiento",-1,1],["ivo",-1,9],["ador",-1,2],["icas",-1,1],["ancias",-1,2],["encias",-1,5],["adoras",-1,2],["osas",-1,1],["istas",-1,1],["ivas",-1,9],["anzas",-1,1],["logías",-1,3],["idades",-1,8],["ables",-1,1],["ibles",-1,1],["aciones",-1,2],["uciones",-1,4],["adores",-1,2],["antes",-1,2],["icos",-1,1],["ismos",-1,1],["osos",-1,1],["amientos",-1,1],["imientos",-1,1],["ivos",-1,9]];var c=[["ya",-1,1],["ye",-1,1],["yan",-1,1],["yen",-1,1],["yeron",-1,1],["yendo",-1,1],["yo",-1,1],["yas",-1,1],["yes",-1,1],["yais",-1,1],["yamos",-1,1],["yó",-1,1]];var l=[["aba",-1,2],["ada",-1,2],["ida",-1,2],["ara",-1,2],["iera",-1,2],["ía",-1,2],["aría",5,2],["ería",5,2],["iría",5,2],["ad",-1,2],["ed",-1,2],["id",-1,2],["ase",-1,2],["iese",-1,2],["aste",-1,2],["iste",-1,2],["an",-1,2],["aban",16,2],["aran",16,2],["ieran",16,2],["ían",16,2],["arían",20,2],["erían",20,2],["irían",20,2],["en",-1,1],["asen",24,2],["iesen",24,2],["aron",-1,2],["ieron",-1,2],["arán",-1,2],["erán",-1,2],["irán",-1,2],["ado",-1,2],["ido",-1,2],["ando",-1,2],["iendo",-1,2],["ar",-1,2],["er",-1,2],["ir",-1,2],["as",-1,2],["abas",39,2],["adas",39,2],["idas",39,2],["aras",39,2],["ieras",39,2],["ías",39,2],["arías",45,2],["erías",45,2],["irías",45,2],["es",-1,1],["ases",49,2],["ieses",49,2],["abais",-1,2],["arais",-1,2],["ierais",-1,2],["íais",-1,2],["aríais",55,2],["eríais",55,2],["iríais",55,2],["aseis",-1,2],["ieseis",-1,2],["asteis",-1,2],["isteis",-1,2],["áis",-1,2],["éis",-1,1],["aréis",64,2],["eréis",64,2],["iréis",64,2],["ados",-1,2],["idos",-1,2],["amos",-1,2],["ábamos",70,2],["áramos",70,2],["iéramos",70,2],["íamos",70,2],["aríamos",74,2],["eríamos",74,2],["iríamos",74,2],["emos",-1,1],["aremos",78,2],["eremos",78,2],["iremos",78,2],["ásemos",78,2],["iésemos",78,2],["imos",-1,2],["arás",-1,2],["erás",-1,2],["irás",-1,2],["ís",-1,2],["ará",-1,2],["erá",-1,2],["irá",-1,2],["aré",-1,2],["eré",-1,2],["iré",-1,2],["ió",-1,2]];var f=[["a",-1,1],["e",-1,2],["o",-1,1],["os",-1,1],["á",-1,1],["é",-1,2],["í",-1,1],["ó",-1,1]];var n=[17,65,16,0,0,0,0,0,0,0,0,0,0,0,0,0,1,17,4,10];var b=0;var m=0;var k=0;function _(){k=r.limit;m=r.limit;b=r.limit;var e=r.cursor;r:{e:{var i=r.cursor;i:{if(!r.in_grouping(n,97,252)){break i}a:{var a=r.cursor;s:{if(!r.out_grouping(n,97,252)){break s}u:while(true){o:{if(!r.in_grouping(n,97,252)){break o}break u}if(r.cursor>=r.limit){break s}r.cursor++}break a}r.cursor=a;if(!r.in_grouping(n,97,252)){break i}s:while(true){u:{if(!r.out_grouping(n,97,252)){break u}break s}if(r.cursor>=r.limit){break i}r.cursor++}}break e}r.cursor=i;if(!r.out_grouping(n,97,252)){break r}i:{var s=r.cursor;a:{if(!r.out_grouping(n,97,252)){break a}s:while(true){u:{if(!r.in_grouping(n,97,252)){break u}break s}if(r.cursor>=r.limit){break a}r.cursor++}break i}r.cursor=s;if(!r.in_grouping(n,97,252)){break r}if(r.cursor>=r.limit){break r}r.cursor++}}k=r.cursor}r.cursor=e;var u=r.cursor;r:{e:while(true){i:{if(!r.in_grouping(n,97,252)){break i}break e}if(r.cursor>=r.limit){break r}r.cursor++}e:while(true){i:{if(!r.out_grouping(n,97,252)){break i}break e}if(r.cursor>=r.limit){break r}r.cursor++}m=r.cursor;e:while(true){i:{if(!r.in_grouping(n,97,252)){break i}break e}if(r.cursor>=r.limit){break r}r.cursor++}e:while(true){i:{if(!r.out_grouping(n,97,252)){break i}break e}if(r.cursor>=r.limit){break r}r.cursor++}b=r.cursor}r.cursor=u;return true}function d(){var i;while(true){var a=r.cursor;r:{r.bra=r.cursor;i=r.find_among(e);if(i==0){break r}r.ket=r.cursor;switch(i){case 1:if(!r.slice_from("a")){return false}break;case 2:if(!r.slice_from("e")){return false}break;case 3:if(!r.slice_from("i")){return false}break;case 4:if(!r.slice_from("o")){return false}break;case 5:if(!r.slice_from("u")){return false}break;case 6:if(r.cursor>=r.limit){break r}r.cursor++;break}continue}r.cursor=a;break}return true}function v(){if(!(k<=r.cursor)){return false}return true}function g(){if(!(m<=r.cursor)){return false}return true}function w(){if(!(b<=r.cursor)){return false}return true}function h(){var e;r.ket=r.cursor;if(r.find_among_b(i)==0){return false}r.bra=r.cursor;e=r.find_among_b(a);if(e==0){return false}if(!v()){return false}switch(e){case 1:r.bra=r.cursor;if(!r.slice_from("iendo")){return false}break;case 2:r.bra=r.cursor;if(!r.slice_from("ando")){return false}break;case 3:r.bra=r.cursor;if(!r.slice_from("ar")){return false}break;case 4:r.bra=r.cursor;if(!r.slice_from("er")){return false}break;case 5:r.bra=r.cursor;if(!r.slice_from("ir")){return false}break;case 6:if(!r.slice_del()){return false}break;case 7:if(!r.eq_s_b("u")){return false}if(!r.slice_del()){return false}break}return true}function p(){var e;r.ket=r.cursor;e=r.find_among_b(t);if(e==0){return false}r.bra=r.cursor;switch(e){case 1:if(!w()){return false}if(!r.slice_del()){return false}break;case 2:if(!w()){return false}if(!r.slice_del()){return false}var i=r.limit-r.cursor;r:{r.ket=r.cursor;if(!r.eq_s_b("ic")){r.cursor=r.limit-i;break r}r.bra=r.cursor;if(!w()){r.cursor=r.limit-i;break r}if(!r.slice_del()){return false}}break;case 3:if(!w()){return false}if(!r.slice_from("log")){return false}break;case 4:if(!w()){return false}if(!r.slice_from("u")){return false}break;case 5:if(!w()){return false}if(!r.slice_from("ente")){return false}break;case 6:if(!g()){return false}if(!r.slice_del()){return false}var a=r.limit-r.cursor;r:{r.ket=r.cursor;e=r.find_among_b(s);if(e==0){r.cursor=r.limit-a;break r}r.bra=r.cursor;if(!w()){r.cursor=r.limit-a;break r}if(!r.slice_del()){return false}switch(e){case 1:r.ket=r.cursor;if(!r.eq_s_b("at")){r.cursor=r.limit-a;break r}r.bra=r.cursor;if(!w()){r.cursor=r.limit-a;break r}if(!r.slice_del()){return false}break}}break;case 7:if(!w()){return false}if(!r.slice_del()){return false}var c=r.limit-r.cursor;r:{r.ket=r.cursor;if(r.find_among_b(u)==0){r.cursor=r.limit-c;break r}r.bra=r.cursor;if(!w()){r.cursor=r.limit-c;break r}if(!r.slice_del()){return false}}break;case 8:if(!w()){return false}if(!r.slice_del()){return false}var l=r.limit-r.cursor;r:{r.ket=r.cursor;if(r.find_among_b(o)==0){r.cursor=r.limit-l;break r}r.bra=r.cursor;if(!w()){r.cursor=r.limit-l;break r}if(!r.slice_del()){return false}}break;case 9:if(!w()){return false}if(!r.slice_del()){return false}var f=r.limit-r.cursor;r:{r.ket=r.cursor;if(!r.eq_s_b("at")){r.cursor=r.limit-f;break r}r.bra=r.cursor;if(!w()){r.cursor=r.limit-f;break r}if(!r.slice_del()){return false}}break}return true}function y(){if(r.cursor=a.limit)break r}a.cursor++}g=a.cursor}a.cursor=r,r=a.cursor,a.go_out_grouping(b,97,252)&&(a.cursor++,a.go_in_grouping(b,97,252))&&(a.cursor++,k=a.cursor,a.go_out_grouping(b,97,252))&&(a.cursor++,a.go_in_grouping(b,97,252))&&(a.cursor++,d=a.cursor),a.cursor=r,a.limit_backward=a.cursor,a.cursor=a.limit;var r=a.limit-a.cursor,r=((()=>{var r;if(a.ket=a.cursor,0!=a.find_among_b(c)&&(a.bra=a.cursor,0!=(r=a.find_among_b(u)))&&v())switch(r){case 1:if(a.bra=a.cursor,a.slice_from("iendo"))break;return;case 2:if(a.bra=a.cursor,a.slice_from("ando"))break;return;case 3:if(a.bra=a.cursor,a.slice_from("ar"))break;return;case 4:if(a.bra=a.cursor,a.slice_from("er"))break;return;case 5:if(a.bra=a.cursor,a.slice_from("ir"))break;return;case 6:if(a.slice_del())break;return;case 7:if(!a.eq_s_b("u"))return;if(a.slice_del())break}})(),a.cursor=a.limit-r,a.limit-a.cursor),s=a.limit-a.cursor,s=(p()||(a.cursor=a.limit-s,(()=>{if(!(a.cursor{var r;if(!(a.cursor{var r;if(a.ket=a.cursor,0!=(r=a.find_among_b(_)))switch(a.bra=a.cursor,r){case 1:if(!v())return;if(a.slice_del())break;return;case 2:if(!v())return;if(!a.slice_del())return;var i=a.limit-a.cursor;if(a.ket=a.cursor,a.eq_s_b("u")){a.bra=a.cursor;var e=a.limit-a.cursor;if(a.eq_s_b("g"))if(a.cursor=a.limit-e,v()){if(!a.slice_del());}else a.cursor=a.limit-i;else a.cursor=a.limit-i}else a.cursor=a.limit-i}})(),a.cursor=a.limit-s,a.cursor=a.limit_backward,a.cursor);return(()=>{for(var r;;){var i=a.cursor;r:{switch(a.bra=a.cursor,r=a.find_among(o),a.ket=a.cursor,r){case 1:if(a.slice_from("a"))break;return;case 2:if(a.slice_from("e"))break;return;case 3:if(a.slice_from("i"))break;return;case 4:if(a.slice_from("o"))break;return;case 5:if(a.slice_from("u"))break;return;case 6:if(a.cursor>=a.limit)break r;a.cursor++}continue}a.cursor=i;break}})(),a.cursor=r,!0},this.stemWord=function(r){return a.setCurrent(r),this.stem(),a.getCurrent()}};
\ No newline at end of file
diff --git a/sphinx/search/minified-js/swedish-stemmer.js b/sphinx/search/minified-js/swedish-stemmer.js
index b975f54284d..d66010809c6 100644
--- a/sphinx/search/minified-js/swedish-stemmer.js
+++ b/sphinx/search/minified-js/swedish-stemmer.js
@@ -1 +1 @@
-SwedishStemmer=function(){var r=new BaseStemmer;var e=[["a",-1,1],["arna",0,1],["erna",0,1],["heterna",2,1],["orna",0,1],["ad",-1,1],["e",-1,1],["ade",6,1],["ande",6,1],["arne",6,1],["are",6,1],["aste",6,1],["en",-1,1],["anden",12,1],["aren",12,1],["heten",12,1],["ern",-1,1],["ar",-1,1],["er",-1,1],["heter",18,1],["or",-1,1],["s",-1,2],["as",21,1],["arnas",22,1],["ernas",22,1],["ornas",22,1],["es",21,1],["ades",26,1],["andes",26,1],["ens",21,1],["arens",29,1],["hetens",29,1],["erns",21,1],["at",-1,1],["andet",-1,1],["het",-1,1],["ast",-1,1]];var a=[["dd",-1,-1],["gd",-1,-1],["nn",-1,-1],["dt",-1,-1],["gt",-1,-1],["kt",-1,-1],["tt",-1,-1]];var i=[["ig",-1,1],["lig",0,1],["els",-1,1],["fullt",-1,3],["löst",-1,2]];var t=[17,65,16,1,0,0,0,0,0,0,0,0,0,0,0,0,24,0,32];var s=[119,127,149];var u=0;var n=0;function c(){n=r.limit;var e=r.cursor;{var a=r.cursor+3;if(a>r.limit){return false}r.cursor=a}u=r.cursor;r.cursor=e;r:while(true){var i=r.cursor;e:{if(!r.in_grouping(t,97,246)){break e}r.cursor=i;break r}r.cursor=i;if(r.cursor>=r.limit){return false}r.cursor++}r:while(true){e:{if(!r.out_grouping(t,97,246)){break e}break r}if(r.cursor>=r.limit){return false}r.cursor++}n=r.cursor;r:{if(!(na.limit_backward))return a.cursor=a.limit-r,r=a.limit-a.cursor,0==a.find_among_b(i)?(a.cursor=a.limit-r,1):void 0}this.stem=function(){var r=a.cursor,i=(m=a.limit,t=a.cursor,(i=a.cursor+3)>a.limit||(a.cursor=i,l=a.cursor,a.cursor=t,a.go_out_grouping(u,97,246)&&(a.cursor++,a.go_in_grouping(u,97,246))&&(a.cursor++,m=a.cursor,l<=m||(m=l))),a.cursor=r,a.limit_backward=a.cursor,a.cursor=a.limit,a.limit-a.cursor),t=((()=>{var r;if(!(a.cursor{var r;if(!(a.cursor{var r;if(z=!1,A()){l.limit_backward=l.cursor,l.cursor=l.limit;var i=l.limit-l.cursor;r:{var c=l.limit-l.cursor;if(l.ket=l.cursor,0!=(r=l.find_among_b(p))){switch(l.bra=l.cursor,r){case 1:if(l.slice_del())break;return;case 2:var e=l.limit-l.cursor;if(0!=l.find_among_b(W))break r;if(l.cursor=l.limit-e,l.slice_del())break;return;case 3:var s=l.limit-l.cursor;if(0!=l.find_among_b(j))break r;if(l.cursor=l.limit-s,l.slice_del())break;return;case 4:var o=l.limit-l.cursor;if(l.eq_s_b("ச"))break r;if(l.cursor=l.limit-o,l.slice_from("்"))break;return;case 5:if(l.slice_from("்"))break;return;case 6:var u=l.limit-l.cursor;if(!l.eq_s_b("்"))break r;if(l.cursor=l.limit-u,l.slice_del())break;return}z=!0,l.cursor=l.limit-c}}if(l.cursor=l.limit-i,i=l.limit-l.cursor,l.ket=l.cursor,0!=l.find_among_b(x)){if(l.bra=l.cursor,!l.slice_del())return;z=!0}return l.cursor=l.limit-i,l.cursor=l.limit_backward,E(),!!z}})()){l.cursor=r;break}}}this.stem=function(){y=!1;var r,i,c=l.cursor;return F(),l.cursor=c,!!A()&&(c=l.cursor,l.bra=l.cursor,l.eq_s("எ")&&0!=l.find_among(e)&&l.eq_s("்")&&(l.ket=l.cursor,l.slice_del())&&(r=l.cursor,D(),l.cursor=r),l.cursor=c,r=l.cursor,l.bra=l.cursor,0!=l.find_among(s)&&0!=l.find_among(o)&&l.eq_s("்")&&(l.ket=l.cursor,l.slice_del())&&(c=l.cursor,D(),l.cursor=c),l.cursor=r,c=l.cursor,A()&&(l.limit_backward=l.cursor,l.cursor=l.limit,i=l.limit-l.cursor,l.ket=l.cursor,0!=l.find_among_b(v)&&(l.bra=l.cursor,!l.slice_from("்"))||(l.cursor=l.limit-i,l.cursor=l.limit_backward,E())),l.cursor=c,i=l.cursor,A()&&(l.limit_backward=l.cursor,l.cursor=l.limit,l.ket=l.cursor,l.eq_s_b("ும்"))&&(l.bra=l.cursor,l.slice_from("்"))&&(l.cursor=l.limit_backward,c=l.cursor,F(),l.cursor=c),l.cursor=i,c=l.cursor,(()=>{var r;if(A()&&(l.limit_backward=l.cursor,l.cursor=l.limit,l.ket=l.cursor,0!=(r=l.find_among_b(q)))){switch(l.bra=l.cursor,r){case 1:if(l.slice_from("்"))break;return;case 2:var i=l.limit-l.cursor;if(0!=l.find_among_b(w))return;if(l.cursor=l.limit-i,l.slice_from("்"))break;return;case 3:if(l.slice_del())break;return}l.cursor=l.limit_backward,E()}})(),l.cursor=c,c=l.cursor,(()=>{var r;if(y=!1,A()){l.limit_backward=l.cursor,l.cursor=l.limit;r:{var i=l.limit-l.cursor;i:{var c=l.limit-l.cursor;if(l.ket=l.cursor,0!=(r=l.find_among_b(S))){switch(l.bra=l.cursor,r){case 1:if(l.slice_del())break;return;case 2:if(l.slice_from("்"))break;return;case 3:var e=l.limit-l.cursor;if(l.eq_s_b("ம"))break i;if(l.cursor=l.limit-e,l.slice_from("்"))break;return;case 4:if(l.current.length<7)break i;if(l.slice_from("்"))break;return;case 5:var s=l.limit-l.cursor;if(0!=l.find_among_b(h))break i;if(l.cursor=l.limit-s,l.slice_from("்"))break;return;case 6:var o=l.limit-l.cursor;if(0!=l.find_among_b(C))break i;if(l.cursor=l.limit-o,l.slice_del())break;return;case 7:if(l.slice_from("ி"))break;return}l.cursor=l.limit-c;break r}}l.cursor=l.limit-i;i=l.limit-l.cursor;if(l.ket=l.cursor,!l.eq_s_b("ை"))return;var u=l.limit-l.cursor,a=l.limit-l.cursor;if(0==l.find_among_b(B))l.cursor=l.limit-a;else{l.cursor=l.limit-u;a=l.limit-l.cursor;if(0==l.find_among_b(T))return;if(!l.eq_s_b("்"))return;l.cursor=l.limit-a}if(l.bra=l.cursor,!l.slice_from("்"))return;l.cursor=l.limit-i}y=!0;var t=l.limit-l.cursor;l.ket=l.cursor,l.eq_s_b("ின்")&&(l.bra=l.cursor,!l.slice_from("்"))||(l.cursor=l.limit-t,l.cursor=l.limit_backward,E())}})(),l.cursor=c,c=l.cursor,(()=>{var r;if(l.limit_backward=l.cursor,l.cursor=l.limit,l.ket=l.cursor,0!=(r=l.find_among_b(d))){switch(l.bra=l.cursor,r){case 1:r:{var i=l.limit-l.cursor;if(0!=l.find_among_b(u)){if(l.slice_from("ுங்"))break r;return}if(l.cursor=l.limit-i,!l.slice_from("்"))return}break;case 2:if(l.slice_from("ல்"))break;return;case 3:if(l.slice_from("ள்"))break;return;case 4:if(l.slice_del())break;return}l.cursor=l.limit_backward}})(),l.cursor=c,c=l.cursor,A()&&(l.limit_backward=l.cursor,l.cursor=l.limit,l.ket=l.cursor,0!=l.find_among_b(g))&&(l.bra=l.cursor,l.slice_del())&&(l.cursor=l.limit_backward),l.cursor=c,c=l.cursor,G(),l.cursor=c,!0)},this.stemWord=function(r){return l.setCurrent(r),this.stem(),l.getCurrent()}};
\ No newline at end of file
diff --git a/sphinx/search/minified-js/turkish-stemmer.js b/sphinx/search/minified-js/turkish-stemmer.js
index 4c0a699bfbe..476e4abc4ad 100644
--- a/sphinx/search/minified-js/turkish-stemmer.js
+++ b/sphinx/search/minified-js/turkish-stemmer.js
@@ -1 +1 @@
-TurkishStemmer=function(){var r=new BaseStemmer;var i=[["m",-1,-1],["n",-1,-1],["miz",-1,-1],["niz",-1,-1],["muz",-1,-1],["nuz",-1,-1],["müz",-1,-1],["nüz",-1,-1],["mız",-1,-1],["nız",-1,-1]];var e=[["leri",-1,-1],["ları",-1,-1]];var u=[["ni",-1,-1],["nu",-1,-1],["nü",-1,-1],["nı",-1,-1]];var a=[["in",-1,-1],["un",-1,-1],["ün",-1,-1],["ın",-1,-1]];var s=[["a",-1,-1],["e",-1,-1]];var t=[["na",-1,-1],["ne",-1,-1]];var l=[["da",-1,-1],["ta",-1,-1],["de",-1,-1],["te",-1,-1]];var c=[["nda",-1,-1],["nde",-1,-1]];var o=[["dan",-1,-1],["tan",-1,-1],["den",-1,-1],["ten",-1,-1]];var f=[["ndan",-1,-1],["nden",-1,-1]];var n=[["la",-1,-1],["le",-1,-1]];var b=[["ca",-1,-1],["ce",-1,-1]];var m=[["im",-1,-1],["um",-1,-1],["üm",-1,-1],["ım",-1,-1]];var k=[["sin",-1,-1],["sun",-1,-1],["sün",-1,-1],["sın",-1,-1]];var _=[["iz",-1,-1],["uz",-1,-1],["üz",-1,-1],["ız",-1,-1]];var v=[["siniz",-1,-1],["sunuz",-1,-1],["sünüz",-1,-1],["sınız",-1,-1]];var d=[["lar",-1,-1],["ler",-1,-1]];var g=[["niz",-1,-1],["nuz",-1,-1],["nüz",-1,-1],["nız",-1,-1]];var w=[["dir",-1,-1],["tir",-1,-1],["dur",-1,-1],["tur",-1,-1],["dür",-1,-1],["tür",-1,-1],["dır",-1,-1],["tır",-1,-1]];var q=[["casına",-1,-1],["cesine",-1,-1]];var p=[["di",-1,-1],["ti",-1,-1],["dik",-1,-1],["tik",-1,-1],["duk",-1,-1],["tuk",-1,-1],["dük",-1,-1],["tük",-1,-1],["dık",-1,-1],["tık",-1,-1],["dim",-1,-1],["tim",-1,-1],["dum",-1,-1],["tum",-1,-1],["düm",-1,-1],["tüm",-1,-1],["dım",-1,-1],["tım",-1,-1],["din",-1,-1],["tin",-1,-1],["dun",-1,-1],["tun",-1,-1],["dün",-1,-1],["tün",-1,-1],["dın",-1,-1],["tın",-1,-1],["du",-1,-1],["tu",-1,-1],["dü",-1,-1],["tü",-1,-1],["dı",-1,-1],["tı",-1,-1]];var h=[["sa",-1,-1],["se",-1,-1],["sak",-1,-1],["sek",-1,-1],["sam",-1,-1],["sem",-1,-1],["san",-1,-1],["sen",-1,-1]];var z=[["miş",-1,-1],["muş",-1,-1],["müş",-1,-1],["mış",-1,-1]];var y=[["b",-1,1],["c",-1,2],["d",-1,3],["ğ",-1,4]];var C=[17,65,16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,32,8,0,0,0,0,0,0,1];var S=[1,16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,8,0,0,0,0,0,0,1];var B=[1,64,16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1];var T=[17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,130];var W=[1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1];var j=[17];var x=[65];var A=[65];var D=false;function E(){var i=r.limit-r.cursor;r:while(true){var e=r.limit-r.cursor;i:{if(!r.in_grouping_b(C,97,305)){break i}r.cursor=r.limit-e;break r}r.cursor=r.limit-e;if(r.cursor<=r.limit_backward){return false}r.cursor--}r:{var u=r.limit-r.cursor;i:{if(!r.eq_s_b("a")){break i}e:while(true){var a=r.limit-r.cursor;u:{if(!r.in_grouping_b(B,97,305)){break u}r.cursor=r.limit-a;break e}r.cursor=r.limit-a;if(r.cursor<=r.limit_backward){break i}r.cursor--}break r}r.cursor=r.limit-u;i:{if(!r.eq_s_b("e")){break i}e:while(true){var s=r.limit-r.cursor;u:{if(!r.in_grouping_b(T,101,252)){break u}r.cursor=r.limit-s;break e}r.cursor=r.limit-s;if(r.cursor<=r.limit_backward){break i}r.cursor--}break r}r.cursor=r.limit-u;i:{if(!r.eq_s_b("ı")){break i}e:while(true){var t=r.limit-r.cursor;u:{if(!r.in_grouping_b(W,97,305)){break u}r.cursor=r.limit-t;break e}r.cursor=r.limit-t;if(r.cursor<=r.limit_backward){break i}r.cursor--}break r}r.cursor=r.limit-u;i:{if(!r.eq_s_b("i")){break i}e:while(true){var l=r.limit-r.cursor;u:{if(!r.in_grouping_b(j,101,105)){break u}r.cursor=r.limit-l;break e}r.cursor=r.limit-l;if(r.cursor<=r.limit_backward){break i}r.cursor--}break r}r.cursor=r.limit-u;i:{if(!r.eq_s_b("o")){break i}e:while(true){var c=r.limit-r.cursor;u:{if(!r.in_grouping_b(x,111,117)){break u}r.cursor=r.limit-c;break e}r.cursor=r.limit-c;if(r.cursor<=r.limit_backward){break i}r.cursor--}break r}r.cursor=r.limit-u;i:{if(!r.eq_s_b("ö")){break i}e:while(true){var o=r.limit-r.cursor;u:{if(!r.in_grouping_b(A,246,252)){break u}r.cursor=r.limit-o;break e}r.cursor=r.limit-o;if(r.cursor<=r.limit_backward){break i}r.cursor--}break r}r.cursor=r.limit-u;i:{if(!r.eq_s_b("u")){break i}e:while(true){var f=r.limit-r.cursor;u:{if(!r.in_grouping_b(x,111,117)){break u}r.cursor=r.limit-f;break e}r.cursor=r.limit-f;if(r.cursor<=r.limit_backward){break i}r.cursor--}break r}r.cursor=r.limit-u;if(!r.eq_s_b("ü")){return false}i:while(true){var n=r.limit-r.cursor;e:{if(!r.in_grouping_b(A,246,252)){break e}r.cursor=r.limit-n;break i}r.cursor=r.limit-n;if(r.cursor<=r.limit_backward){return false}r.cursor--}}r.cursor=r.limit-i;return true}function F(){r:{var i=r.limit-r.cursor;i:{if(!r.eq_s_b("n")){break i}var e=r.limit-r.cursor;if(!r.in_grouping_b(C,97,305)){break i}r.cursor=r.limit-e;break r}r.cursor=r.limit-i;{var u=r.limit-r.cursor;i:{var a=r.limit-r.cursor;if(!r.eq_s_b("n")){break i}r.cursor=r.limit-a;return false}r.cursor=r.limit-u}var s=r.limit-r.cursor;if(r.cursor<=r.limit_backward){return false}r.cursor--;if(!r.in_grouping_b(C,97,305)){return false}r.cursor=r.limit-s}return true}function G(){r:{var i=r.limit-r.cursor;i:{if(!r.eq_s_b("s")){break i}var e=r.limit-r.cursor;if(!r.in_grouping_b(C,97,305)){break i}r.cursor=r.limit-e;break r}r.cursor=r.limit-i;{var u=r.limit-r.cursor;i:{var a=r.limit-r.cursor;if(!r.eq_s_b("s")){break i}r.cursor=r.limit-a;return false}r.cursor=r.limit-u}var s=r.limit-r.cursor;if(r.cursor<=r.limit_backward){return false}r.cursor--;if(!r.in_grouping_b(C,97,305)){return false}r.cursor=r.limit-s}return true}function H(){r:{var i=r.limit-r.cursor;i:{if(!r.eq_s_b("y")){break i}var e=r.limit-r.cursor;if(!r.in_grouping_b(C,97,305)){break i}r.cursor=r.limit-e;break r}r.cursor=r.limit-i;{var u=r.limit-r.cursor;i:{var a=r.limit-r.cursor;if(!r.eq_s_b("y")){break i}r.cursor=r.limit-a;return false}r.cursor=r.limit-u}var s=r.limit-r.cursor;if(r.cursor<=r.limit_backward){return false}r.cursor--;if(!r.in_grouping_b(C,97,305)){return false}r.cursor=r.limit-s}return true}function I(){r:{var i=r.limit-r.cursor;i:{if(!r.in_grouping_b(S,105,305)){break i}var e=r.limit-r.cursor;if(!r.out_grouping_b(C,97,305)){break i}r.cursor=r.limit-e;break r}r.cursor=r.limit-i;{var u=r.limit-r.cursor;i:{var a=r.limit-r.cursor;if(!r.in_grouping_b(S,105,305)){break i}r.cursor=r.limit-a;return false}r.cursor=r.limit-u}var s=r.limit-r.cursor;if(r.cursor<=r.limit_backward){return false}r.cursor--;if(!r.out_grouping_b(C,97,305)){return false}r.cursor=r.limit-s}return true}function J(){if(r.find_among_b(i)==0){return false}if(!I()){return false}return true}function K(){if(!E()){return false}if(!r.in_grouping_b(S,105,305)){return false}if(!G()){return false}return true}function L(){if(r.find_among_b(e)==0){return false}return true}function M(){if(!E()){return false}if(!r.in_grouping_b(S,105,305)){return false}if(!H()){return false}return true}function N(){if(!E()){return false}if(r.find_among_b(u)==0){return false}return true}function O(){if(!E()){return false}if(r.find_among_b(a)==0){return false}if(!F()){return false}return true}function P(){if(!E()){return false}if(r.find_among_b(s)==0){return false}if(!H()){return false}return true}function Q(){if(!E()){return false}if(r.find_among_b(t)==0){return false}return true}function R(){if(!E()){return false}if(r.find_among_b(l)==0){return false}return true}function U(){if(!E()){return false}if(r.find_among_b(c)==0){return false}return true}function V(){if(!E()){return false}if(r.find_among_b(o)==0){return false}return true}function X(){if(!E()){return false}if(r.find_among_b(f)==0){return false}return true}function Y(){if(!E()){return false}if(r.find_among_b(n)==0){return false}if(!H()){return false}return true}function Z(){if(!r.eq_s_b("ki")){return false}return true}function $(){if(!E()){return false}if(r.find_among_b(b)==0){return false}if(!F()){return false}return true}function rr(){if(!E()){return false}if(r.find_among_b(m)==0){return false}if(!H()){return false}return true}function ir(){if(!E()){return false}if(r.find_among_b(k)==0){return false}return true}function er(){if(!E()){return false}if(r.find_among_b(_)==0){return false}if(!H()){return false}return true}function ur(){if(r.find_among_b(v)==0){return false}return true}function ar(){if(!E()){return false}if(r.find_among_b(d)==0){return false}return true}function sr(){if(!E()){return false}if(r.find_among_b(g)==0){return false}return true}function tr(){if(!E()){return false}if(r.find_among_b(w)==0){return false}return true}function lr(){if(r.find_among_b(q)==0){return false}return true}function cr(){if(!E()){return false}if(r.find_among_b(p)==0){return false}if(!H()){return false}return true}function or(){if(r.find_among_b(h)==0){return false}if(!H()){return false}return true}function fr(){if(!E()){return false}if(r.find_among_b(z)==0){return false}if(!H()){return false}return true}function nr(){if(!r.eq_s_b("ken")){return false}if(!H()){return false}return true}function br(){r.ket=r.cursor;D=true;r:{var i=r.limit-r.cursor;i:{e:{var e=r.limit-r.cursor;u:{if(!fr()){break u}break e}r.cursor=r.limit-e;u:{if(!cr()){break u}break e}r.cursor=r.limit-e;u:{if(!or()){break u}break e}r.cursor=r.limit-e;if(!nr()){break i}}break r}r.cursor=r.limit-i;i:{if(!lr()){break i}e:{var u=r.limit-r.cursor;u:{if(!ur()){break u}break e}r.cursor=r.limit-u;u:{if(!ar()){break u}break e}r.cursor=r.limit-u;u:{if(!rr()){break u}break e}r.cursor=r.limit-u;u:{if(!ir()){break u}break e}r.cursor=r.limit-u;u:{if(!er()){break u}break e}r.cursor=r.limit-u}if(!fr()){break i}break r}r.cursor=r.limit-i;i:{if(!ar()){break i}r.bra=r.cursor;if(!r.slice_del()){return false}var a=r.limit-r.cursor;e:{r.ket=r.cursor;u:{var s=r.limit-r.cursor;a:{if(!tr()){break a}break u}r.cursor=r.limit-s;a:{if(!cr()){break a}break u}r.cursor=r.limit-s;a:{if(!or()){break a}break u}r.cursor=r.limit-s;if(!fr()){r.cursor=r.limit-a;break e}}}D=false;break r}r.cursor=r.limit-i;i:{if(!sr()){break i}e:{var t=r.limit-r.cursor;u:{if(!cr()){break u}break e}r.cursor=r.limit-t;if(!or()){break i}}break r}r.cursor=r.limit-i;i:{e:{var l=r.limit-r.cursor;u:{if(!ur()){break u}break e}r.cursor=r.limit-l;u:{if(!er()){break u}break e}r.cursor=r.limit-l;u:{if(!ir()){break u}break e}r.cursor=r.limit-l;if(!rr()){break i}}r.bra=r.cursor;if(!r.slice_del()){return false}var c=r.limit-r.cursor;e:{r.ket=r.cursor;if(!fr()){r.cursor=r.limit-c;break e}}break r}r.cursor=r.limit-i;if(!tr()){return false}r.bra=r.cursor;if(!r.slice_del()){return false}var o=r.limit-r.cursor;i:{r.ket=r.cursor;e:{var f=r.limit-r.cursor;u:{if(!ur()){break u}break e}r.cursor=r.limit-f;u:{if(!ar()){break u}break e}r.cursor=r.limit-f;u:{if(!rr()){break u}break e}r.cursor=r.limit-f;u:{if(!ir()){break u}break e}r.cursor=r.limit-f;u:{if(!er()){break u}break e}r.cursor=r.limit-f}if(!fr()){r.cursor=r.limit-o;break i}}}r.bra=r.cursor;if(!r.slice_del()){return false}return true}function mr(){r.ket=r.cursor;if(!Z()){return false}r:{var i=r.limit-r.cursor;i:{if(!R()){break i}r.bra=r.cursor;if(!r.slice_del()){return false}var e=r.limit-r.cursor;e:{r.ket=r.cursor;u:{var u=r.limit-r.cursor;a:{if(!ar()){break a}r.bra=r.cursor;if(!r.slice_del()){return false}var a=r.limit-r.cursor;s:{if(!mr()){r.cursor=r.limit-a;break s}}break u}r.cursor=r.limit-u;if(!J()){r.cursor=r.limit-e;break e}r.bra=r.cursor;if(!r.slice_del()){return false}var s=r.limit-r.cursor;a:{r.ket=r.cursor;if(!ar()){r.cursor=r.limit-s;break a}r.bra=r.cursor;if(!r.slice_del()){return false}if(!mr()){r.cursor=r.limit-s;break a}}}}break r}r.cursor=r.limit-i;i:{if(!O()){break i}r.bra=r.cursor;if(!r.slice_del()){return false}var t=r.limit-r.cursor;e:{r.ket=r.cursor;u:{var l=r.limit-r.cursor;a:{if(!L()){break a}r.bra=r.cursor;if(!r.slice_del()){return false}break u}r.cursor=r.limit-l;a:{r.ket=r.cursor;s:{var c=r.limit-r.cursor;t:{if(!J()){break t}break s}r.cursor=r.limit-c;if(!K()){break a}}r.bra=r.cursor;if(!r.slice_del()){return false}var o=r.limit-r.cursor;s:{r.ket=r.cursor;if(!ar()){r.cursor=r.limit-o;break s}r.bra=r.cursor;if(!r.slice_del()){return false}if(!mr()){r.cursor=r.limit-o;break s}}break u}r.cursor=r.limit-l;if(!mr()){r.cursor=r.limit-t;break e}}}break r}r.cursor=r.limit-i;if(!U()){return false}i:{var f=r.limit-r.cursor;e:{if(!L()){break e}r.bra=r.cursor;if(!r.slice_del()){return false}break i}r.cursor=r.limit-f;e:{if(!K()){break e}r.bra=r.cursor;if(!r.slice_del()){return false}var n=r.limit-r.cursor;u:{r.ket=r.cursor;if(!ar()){r.cursor=r.limit-n;break u}r.bra=r.cursor;if(!r.slice_del()){return false}if(!mr()){r.cursor=r.limit-n;break u}}break i}r.cursor=r.limit-f;if(!mr()){return false}}}return true}function kr(){r:{var i=r.limit-r.cursor;i:{r.ket=r.cursor;if(!ar()){break i}r.bra=r.cursor;if(!r.slice_del()){return false}var e=r.limit-r.cursor;e:{if(!mr()){r.cursor=r.limit-e;break e}}break r}r.cursor=r.limit-i;i:{r.ket=r.cursor;if(!$()){break i}r.bra=r.cursor;if(!r.slice_del()){return false}var u=r.limit-r.cursor;e:{u:{var a=r.limit-r.cursor;a:{r.ket=r.cursor;if(!L()){break a}r.bra=r.cursor;if(!r.slice_del()){return false}break u}r.cursor=r.limit-a;a:{r.ket=r.cursor;s:{var s=r.limit-r.cursor;t:{if(!J()){break t}break s}r.cursor=r.limit-s;if(!K()){break a}}r.bra=r.cursor;if(!r.slice_del()){return false}var t=r.limit-r.cursor;s:{r.ket=r.cursor;if(!ar()){r.cursor=r.limit-t;break s}r.bra=r.cursor;if(!r.slice_del()){return false}if(!mr()){r.cursor=r.limit-t;break s}}break u}r.cursor=r.limit-a;r.ket=r.cursor;if(!ar()){r.cursor=r.limit-u;break e}r.bra=r.cursor;if(!r.slice_del()){return false}if(!mr()){r.cursor=r.limit-u;break e}}}break r}r.cursor=r.limit-i;i:{r.ket=r.cursor;e:{var l=r.limit-r.cursor;u:{if(!U()){break u}break e}r.cursor=r.limit-l;if(!Q()){break i}}e:{var c=r.limit-r.cursor;u:{if(!L()){break u}r.bra=r.cursor;if(!r.slice_del()){return false}break e}r.cursor=r.limit-c;u:{if(!K()){break u}r.bra=r.cursor;if(!r.slice_del()){return false}var o=r.limit-r.cursor;a:{r.ket=r.cursor;if(!ar()){r.cursor=r.limit-o;break a}r.bra=r.cursor;if(!r.slice_del()){return false}if(!mr()){r.cursor=r.limit-o;break a}}break e}r.cursor=r.limit-c;if(!mr()){break i}}break r}r.cursor=r.limit-i;i:{r.ket=r.cursor;e:{var f=r.limit-r.cursor;u:{if(!X()){break u}break e}r.cursor=r.limit-f;if(!N()){break i}}e:{var n=r.limit-r.cursor;u:{if(!K()){break u}r.bra=r.cursor;if(!r.slice_del()){return false}var b=r.limit-r.cursor;a:{r.ket=r.cursor;if(!ar()){r.cursor=r.limit-b;break a}r.bra=r.cursor;if(!r.slice_del()){return false}if(!mr()){r.cursor=r.limit-b;break a}}break e}r.cursor=r.limit-n;if(!L()){break i}}break r}r.cursor=r.limit-i;i:{r.ket=r.cursor;if(!V()){break i}r.bra=r.cursor;if(!r.slice_del()){return false}var m=r.limit-r.cursor;e:{r.ket=r.cursor;u:{var k=r.limit-r.cursor;a:{if(!J()){break a}r.bra=r.cursor;if(!r.slice_del()){return false}var _=r.limit-r.cursor;s:{r.ket=r.cursor;if(!ar()){r.cursor=r.limit-_;break s}r.bra=r.cursor;if(!r.slice_del()){return false}if(!mr()){r.cursor=r.limit-_;break s}}break u}r.cursor=r.limit-k;a:{if(!ar()){break a}r.bra=r.cursor;if(!r.slice_del()){return false}var v=r.limit-r.cursor;s:{if(!mr()){r.cursor=r.limit-v;break s}}break u}r.cursor=r.limit-k;if(!mr()){r.cursor=r.limit-m;break e}}}break r}r.cursor=r.limit-i;i:{r.ket=r.cursor;e:{var d=r.limit-r.cursor;u:{if(!O()){break u}break e}r.cursor=r.limit-d;if(!Y()){break i}}r.bra=r.cursor;if(!r.slice_del()){return false}var g=r.limit-r.cursor;e:{u:{var w=r.limit-r.cursor;a:{r.ket=r.cursor;if(!ar()){break a}r.bra=r.cursor;if(!r.slice_del()){return false}if(!mr()){break a}break u}r.cursor=r.limit-w;a:{r.ket=r.cursor;s:{var q=r.limit-r.cursor;t:{if(!J()){break t}break s}r.cursor=r.limit-q;if(!K()){break a}}r.bra=r.cursor;if(!r.slice_del()){return false}var p=r.limit-r.cursor;s:{r.ket=r.cursor;if(!ar()){r.cursor=r.limit-p;break s}r.bra=r.cursor;if(!r.slice_del()){return false}if(!mr()){r.cursor=r.limit-p;break s}}break u}r.cursor=r.limit-w;if(!mr()){r.cursor=r.limit-g;break e}}}break r}r.cursor=r.limit-i;i:{r.ket=r.cursor;if(!L()){break i}r.bra=r.cursor;if(!r.slice_del()){return false}break r}r.cursor=r.limit-i;i:{if(!mr()){break i}break r}r.cursor=r.limit-i;i:{r.ket=r.cursor;e:{var h=r.limit-r.cursor;u:{if(!R()){break u}break e}r.cursor=r.limit-h;u:{if(!M()){break u}break e}r.cursor=r.limit-h;if(!P()){break i}}r.bra=r.cursor;if(!r.slice_del()){return false}var z=r.limit-r.cursor;e:{r.ket=r.cursor;u:{var y=r.limit-r.cursor;a:{if(!J()){break a}r.bra=r.cursor;if(!r.slice_del()){return false}var C=r.limit-r.cursor;s:{r.ket=r.cursor;if(!ar()){r.cursor=r.limit-C;break s}}break u}r.cursor=r.limit-y;if(!ar()){r.cursor=r.limit-z;break e}}r.bra=r.cursor;if(!r.slice_del()){return false}r.ket=r.cursor;if(!mr()){r.cursor=r.limit-z;break e}}break r}r.cursor=r.limit-i;r.ket=r.cursor;i:{var S=r.limit-r.cursor;e:{if(!J()){break e}break i}r.cursor=r.limit-S;if(!K()){return false}}r.bra=r.cursor;if(!r.slice_del()){return false}var B=r.limit-r.cursor;i:{r.ket=r.cursor;if(!ar()){r.cursor=r.limit-B;break i}r.bra=r.cursor;if(!r.slice_del()){return false}if(!mr()){r.cursor=r.limit-B;break i}}}return true}function _r(){var i;r.ket=r.cursor;i=r.find_among_b(y);if(i==0){return false}r.bra=r.cursor;switch(i){case 1:if(!r.slice_from("p")){return false}break;case 2:if(!r.slice_from("ç")){return false}break;case 3:if(!r.slice_from("t")){return false}break;case 4:if(!r.slice_from("k")){return false}break}return true}function vr(){var i=r.limit-r.cursor;r:{var e=r.limit-r.cursor;i:{if(!r.eq_s_b("d")){break i}break r}r.cursor=r.limit-e;if(!r.eq_s_b("g")){return false}}r.cursor=r.limit-i;r:{var u=r.limit-r.cursor;i:{var a=r.limit-r.cursor;e:while(true){var s=r.limit-r.cursor;u:{if(!r.in_grouping_b(C,97,305)){break u}r.cursor=r.limit-s;break e}r.cursor=r.limit-s;if(r.cursor<=r.limit_backward){break i}r.cursor--}e:{var t=r.limit-r.cursor;u:{if(!r.eq_s_b("a")){break u}break e}r.cursor=r.limit-t;if(!r.eq_s_b("ı")){break i}}r.cursor=r.limit-a;{var l=r.cursor;r.insert(r.cursor,r.cursor,"ı");r.cursor=l}break r}r.cursor=r.limit-u;i:{var c=r.limit-r.cursor;e:while(true){var o=r.limit-r.cursor;u:{if(!r.in_grouping_b(C,97,305)){break u}r.cursor=r.limit-o;break e}r.cursor=r.limit-o;if(r.cursor<=r.limit_backward){break i}r.cursor--}e:{var f=r.limit-r.cursor;u:{if(!r.eq_s_b("e")){break u}break e}r.cursor=r.limit-f;if(!r.eq_s_b("i")){break i}}r.cursor=r.limit-c;{var n=r.cursor;r.insert(r.cursor,r.cursor,"i");r.cursor=n}break r}r.cursor=r.limit-u;i:{var b=r.limit-r.cursor;e:while(true){var m=r.limit-r.cursor;u:{if(!r.in_grouping_b(C,97,305)){break u}r.cursor=r.limit-m;break e}r.cursor=r.limit-m;if(r.cursor<=r.limit_backward){break i}r.cursor--}e:{var k=r.limit-r.cursor;u:{if(!r.eq_s_b("o")){break u}break e}r.cursor=r.limit-k;if(!r.eq_s_b("u")){break i}}r.cursor=r.limit-b;{var _=r.cursor;r.insert(r.cursor,r.cursor,"u");r.cursor=_}break r}r.cursor=r.limit-u;var v=r.limit-r.cursor;i:while(true){var d=r.limit-r.cursor;e:{if(!r.in_grouping_b(C,97,305)){break e}r.cursor=r.limit-d;break i}r.cursor=r.limit-d;if(r.cursor<=r.limit_backward){return false}r.cursor--}i:{var g=r.limit-r.cursor;e:{if(!r.eq_s_b("ö")){break e}break i}r.cursor=r.limit-g;if(!r.eq_s_b("ü")){return false}}r.cursor=r.limit-v;{var w=r.cursor;r.insert(r.cursor,r.cursor,"ü");r.cursor=w}}return true}function dr(){if(!r.eq_s_b("ad")){return false}var i=r.limit-r.cursor;r:{if(!r.eq_s_b("soy")){r.cursor=r.limit-i;break r}}if(r.cursor>r.limit_backward){return false}return true}function gr(){var i=r.cursor;{var e=2;while(true){var u=r.cursor;r:{i:while(true){e:{if(!r.in_grouping(C,97,305)){break e}break i}if(r.cursor>=r.limit){break r}r.cursor++}e--;continue}r.cursor=u;break}if(e>0){return false}}r.cursor=i;return true}function wr(){r.limit_backward=r.cursor;r.cursor=r.limit;{var i=r.limit-r.cursor;r:{if(!dr()){break r}return false}r.cursor=r.limit-i}var e=r.limit-r.cursor;vr();r.cursor=r.limit-e;var u=r.limit-r.cursor;_r();r.cursor=r.limit-u;r.cursor=r.limit_backward;return true}this.stem=function(){if(!gr()){return false}r.limit_backward=r.cursor;r.cursor=r.limit;var i=r.limit-r.cursor;br();r.cursor=r.limit-i;if(!D){return false}var e=r.limit-r.cursor;kr();r.cursor=r.limit-e;r.cursor=r.limit_backward;if(!wr()){return false}return true};this["stemWord"]=function(i){r.setCurrent(i);this.stem();return r.getCurrent()}};
\ No newline at end of file
+var TurkishStemmer=function(){var q=new BaseStemmer,u=[["m",-1,-1],["n",-1,-1],["miz",-1,-1],["niz",-1,-1],["muz",-1,-1],["nuz",-1,-1],["müz",-1,-1],["nüz",-1,-1],["mız",-1,-1],["nız",-1,-1]],r=[["leri",-1,-1],["ları",-1,-1]],p=[["ni",-1,-1],["nu",-1,-1],["nü",-1,-1],["nı",-1,-1]],i=[["in",-1,-1],["un",-1,-1],["ün",-1,-1],["ın",-1,-1]],z=[["a",-1,-1],["e",-1,-1]],w=[["na",-1,-1],["ne",-1,-1]],s=[["da",-1,-1],["ta",-1,-1],["de",-1,-1],["te",-1,-1]],o=[["nda",-1,-1],["nde",-1,-1]],h=[["dan",-1,-1],["tan",-1,-1],["den",-1,-1],["ten",-1,-1]],y=[["ndan",-1,-1],["nden",-1,-1]],C=[["la",-1,-1],["le",-1,-1]],I=[["ca",-1,-1],["ce",-1,-1]],g=[["im",-1,-1],["um",-1,-1],["üm",-1,-1],["ım",-1,-1]],v=[["sin",-1,-1],["sun",-1,-1],["sün",-1,-1],["sın",-1,-1]],J=[["iz",-1,-1],["uz",-1,-1],["üz",-1,-1],["ız",-1,-1]],K=[["siniz",-1,-1],["sunuz",-1,-1],["sünüz",-1,-1],["sınız",-1,-1]],L=[["lar",-1,-1],["ler",-1,-1]],M=[["niz",-1,-1],["nuz",-1,-1],["nüz",-1,-1],["nız",-1,-1]],N=[["dir",-1,-1],["tir",-1,-1],["dur",-1,-1],["tur",-1,-1],["dür",-1,-1],["tür",-1,-1],["dır",-1,-1],["tır",-1,-1]],O=[["casına",-1,-1],["cesine",-1,-1]],P=[["di",-1,-1],["ti",-1,-1],["dik",-1,-1],["tik",-1,-1],["duk",-1,-1],["tuk",-1,-1],["dük",-1,-1],["tük",-1,-1],["dık",-1,-1],["tık",-1,-1],["dim",-1,-1],["tim",-1,-1],["dum",-1,-1],["tum",-1,-1],["düm",-1,-1],["tüm",-1,-1],["dım",-1,-1],["tım",-1,-1],["din",-1,-1],["tin",-1,-1],["dun",-1,-1],["tun",-1,-1],["dün",-1,-1],["tün",-1,-1],["dın",-1,-1],["tın",-1,-1],["du",-1,-1],["tu",-1,-1],["dü",-1,-1],["tü",-1,-1],["dı",-1,-1],["tı",-1,-1]],Q=[["sa",-1,-1],["se",-1,-1],["sak",-1,-1],["sek",-1,-1],["sam",-1,-1],["sem",-1,-1],["san",-1,-1],["sen",-1,-1]],R=[["miş",-1,-1],["muş",-1,-1],["müş",-1,-1],["mış",-1,-1]],U=[["b",-1,1],["c",-1,2],["d",-1,3],["ğ",-1,4]],t=[17,65,16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,32,8,0,0,0,0,0,0,1],S=[1,16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,8,0,0,0,0,0,0,1],V=[1,64,16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1],X=[17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,130],Y=[1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1],Z=[17],c=[65],e=[65],l=!1;function B(){var r=q.limit-q.cursor;if(q.go_out_grouping_b(t,97,305)){var i=q.limit-q.cursor;if(!(q.eq_s_b("a")&&q.go_out_grouping_b(V,97,305)||(q.cursor=q.limit-i,q.eq_s_b("e")&&q.go_out_grouping_b(X,101,252))||(q.cursor=q.limit-i,q.eq_s_b("ı")&&q.go_out_grouping_b(Y,97,305))||(q.cursor=q.limit-i,q.eq_s_b("i")&&q.go_out_grouping_b(Z,101,105))||(q.cursor=q.limit-i,q.eq_s_b("o")&&q.go_out_grouping_b(c,111,117))||(q.cursor=q.limit-i,q.eq_s_b("ö")&&q.go_out_grouping_b(e,246,252))||(q.cursor=q.limit-i,q.eq_s_b("u")&&q.go_out_grouping_b(c,111,117)))){if(q.cursor=q.limit-i,!q.eq_s_b("ü"))return;if(!q.go_out_grouping_b(e,246,252))return}return q.cursor=q.limit-r,1}}function T(){r:{var r=q.limit-q.cursor;if(q.eq_s_b("n")){var i=q.limit-q.cursor;if(q.in_grouping_b(t,97,305)){q.cursor=q.limit-i;break r}}q.cursor=q.limit-r;i=q.limit-q.cursor,r=q.limit-q.cursor;if(q.eq_s_b("n"))return void(q.cursor=q.limit-r);q.cursor=q.limit-i;r=q.limit-q.cursor;if(q.cursor<=q.limit_backward)return;if(q.cursor--,!q.in_grouping_b(t,97,305))return;q.cursor=q.limit-r}return 1}function W(){r:{var r=q.limit-q.cursor;if(q.eq_s_b("y")){var i=q.limit-q.cursor;if(q.in_grouping_b(t,97,305)){q.cursor=q.limit-i;break r}}q.cursor=q.limit-r;i=q.limit-q.cursor,r=q.limit-q.cursor;if(q.eq_s_b("y"))return void(q.cursor=q.limit-r);q.cursor=q.limit-i;r=q.limit-q.cursor;if(q.cursor<=q.limit_backward)return;if(q.cursor--,!q.in_grouping_b(t,97,305))return;q.cursor=q.limit-r}return 1}function j(){if(0!=q.find_among_b(u)){r:{var r=q.limit-q.cursor;if(q.in_grouping_b(S,105,305)){var i=q.limit-q.cursor;if(q.out_grouping_b(t,97,305)){q.cursor=q.limit-i;break r}}q.cursor=q.limit-r;i=q.limit-q.cursor,r=q.limit-q.cursor;if(q.in_grouping_b(S,105,305))return!!(q.cursor=q.limit-r,0);q.cursor=q.limit-i;r=q.limit-q.cursor;if(q.cursor<=q.limit_backward)return!!void 0;if(q.cursor--,!q.out_grouping_b(t,97,305))return!!void 0;q.cursor=q.limit-r}return!!1}}function x(){if(B()&&q.in_grouping_b(S,105,305)){r:{var r=q.limit-q.cursor;if(q.eq_s_b("s")){var i=q.limit-q.cursor;if(q.in_grouping_b(t,97,305)){q.cursor=q.limit-i;break r}}q.cursor=q.limit-r;i=q.limit-q.cursor,r=q.limit-q.cursor;if(q.eq_s_b("s"))return!!(q.cursor=q.limit-r,0);q.cursor=q.limit-i;r=q.limit-q.cursor;if(q.cursor<=q.limit_backward)return!!void 0;if(q.cursor--,!q.in_grouping_b(t,97,305))return!!void 0;q.cursor=q.limit-r}return!!1}}function A(){return 0!=q.find_among_b(r)}function D(){return B()&&0!=q.find_among_b(i)&&!!T()}function E(){return B()&&0!=q.find_among_b(s)}function F(){return B()&&0!=q.find_among_b(o)}function m(){return B()&&0!=q.find_among_b(g)&&!!W()}function n(){return B()&&0!=q.find_among_b(v)}function _(){return B()&&0!=q.find_among_b(J)&&!!W()}function f(){return 0!=q.find_among_b(K)}function G(){return B()&&0!=q.find_among_b(L)}function a(){return B()&&0!=q.find_among_b(N)}function b(){return B()&&0!=q.find_among_b(P)&&!!W()}function d(){return 0!=q.find_among_b(Q)&&!!W()}function k(){return B()&&0!=q.find_among_b(R)&&!!W()}function $(){q.ket=q.cursor,l=!0;r:{var r=q.limit-q.cursor,i=q.limit-q.cursor;if(!(k()||(q.cursor=q.limit-i,b())||(q.cursor=q.limit-i,d())||(q.cursor=q.limit-i,q.eq_s_b("ken")&&W()))){if(q.cursor=q.limit-r,0!=q.find_among_b(O)){i=q.limit-q.cursor;if(f()||(q.cursor=q.limit-i,G())||(q.cursor=q.limit-i,m())||(q.cursor=q.limit-i,n())||(q.cursor=q.limit-i,_())||(q.cursor=q.limit-i),k())break r}if(q.cursor=q.limit-r,G()){if(q.bra=q.cursor,!q.slice_del())return;var i=q.limit-q.cursor,u=(q.ket=q.cursor,q.limit-q.cursor);a()||(q.cursor=q.limit-u,b())||(q.cursor=q.limit-u,d())||(q.cursor=q.limit-u,k())||(q.cursor=q.limit-i),l=!1}else{if(q.cursor=q.limit-r,B()&&0!=q.find_among_b(M)){u=q.limit-q.cursor;if(b()||(q.cursor=q.limit-u,d()))break r}q.cursor=q.limit-r;i=q.limit-q.cursor;if(f()||(q.cursor=q.limit-i,_())||(q.cursor=q.limit-i,n())||(q.cursor=q.limit-i,m())){if(q.bra=q.cursor,!q.slice_del())return;u=q.limit-q.cursor;q.ket=q.cursor,k()||(q.cursor=q.limit-u)}else{if(q.cursor=q.limit-r,!a())return;if(q.bra=q.cursor,!q.slice_del())return;i=q.limit-q.cursor,u=(q.ket=q.cursor,q.limit-q.cursor);f()||(q.cursor=q.limit-u,G())||(q.cursor=q.limit-u,m())||(q.cursor=q.limit-u,n())||(q.cursor=q.limit-u,_())||(q.cursor=q.limit-u),k()||(q.cursor=q.limit-i)}}}}q.bra=q.cursor,q.slice_del()}function H(){if(q.ket=q.cursor,q.eq_s_b("ki")){var r=q.limit-q.cursor;if(E()){if(q.bra=q.cursor,!q.slice_del())return;var i=q.limit-q.cursor;r:{q.ket=q.cursor;var u=q.limit-q.cursor;if(G()){if(q.bra=q.cursor,!q.slice_del())return;var s=q.limit-q.cursor;H()||(q.cursor=q.limit-s)}else{if(q.cursor=q.limit-u,!j()){q.cursor=q.limit-i;break r}if(q.bra=q.cursor,!q.slice_del())return;s=q.limit-q.cursor;if(q.ket=q.cursor,G()){if(q.bra=q.cursor,!q.slice_del())return;H()||(q.cursor=q.limit-s)}else q.cursor=q.limit-s}}}else if(q.cursor=q.limit-r,D()){if(q.bra=q.cursor,!q.slice_del())return;var o=q.limit-q.cursor;r:{q.ket=q.cursor;i:{var t=q.limit-q.cursor;if(A()){if(q.bra=q.cursor,q.slice_del())break i;return}q.cursor=q.limit-t,q.ket=q.cursor;var c=q.limit-q.cursor;if(j()||(q.cursor=q.limit-c,x())){if(q.bra=q.cursor,!q.slice_del())return;c=q.limit-q.cursor;if(q.ket=q.cursor,G()){if(q.bra=q.cursor,!q.slice_del())return;H()||(q.cursor=q.limit-c)}else q.cursor=q.limit-c}else if(q.cursor=q.limit-t,!H()){q.cursor=q.limit-o;break r}}}}else{if(q.cursor=q.limit-r,!F())return;r:{var e=q.limit-q.cursor;if(A()){if(q.bra=q.cursor,q.slice_del())break r;return}if(q.cursor=q.limit-e,x()){if(q.bra=q.cursor,!q.slice_del())return;var l=q.limit-q.cursor;if(q.ket=q.cursor,G()){if(q.bra=q.cursor,!q.slice_del())return;H()||(q.cursor=q.limit-l)}else q.cursor=q.limit-l}else if(q.cursor=q.limit-e,!H())return}}return 1}}function rr(){r:{var r=q.limit-q.cursor;if(q.ket=q.cursor,G()){if(q.bra=q.cursor,!q.slice_del())return;var i=q.limit-q.cursor;H()||(q.cursor=q.limit-i)}else if(q.cursor=q.limit-r,q.ket=q.cursor,B()&&0!=q.find_among_b(I)&&T()){if(q.bra=q.cursor,!q.slice_del())return;var u=q.limit-q.cursor;i:u:{var s=q.limit-q.cursor;if(q.ket=q.cursor,A()){if(q.bra=q.cursor,q.slice_del())break u;return}q.cursor=q.limit-s,q.ket=q.cursor;var o=q.limit-q.cursor;if(j()||(q.cursor=q.limit-o,x())){if(q.bra=q.cursor,!q.slice_del())return;o=q.limit-q.cursor;if(q.ket=q.cursor,G()){if(q.bra=q.cursor,!q.slice_del())return;H()||(q.cursor=q.limit-o)}else q.cursor=q.limit-o}else{if(q.cursor=q.limit-s,q.ket=q.cursor,!G()){q.cursor=q.limit-u;break i}if(q.bra=q.cursor,!q.slice_del())return;if(!H()){q.cursor=q.limit-u;break i}}}}else{q.cursor=q.limit-r;i:{q.ket=q.cursor;var t=q.limit-q.cursor;if(F()||(q.cursor=q.limit-t,B()&&0!=q.find_among_b(w))){u:{var c=q.limit-q.cursor;if(A()){if(q.bra=q.cursor,q.slice_del())break u;return}if(q.cursor=q.limit-c,x()){if(q.bra=q.cursor,!q.slice_del())return;var e=q.limit-q.cursor;if(q.ket=q.cursor,G()){if(q.bra=q.cursor,!q.slice_del())return;H()||(q.cursor=q.limit-e)}else q.cursor=q.limit-e}else if(q.cursor=q.limit-c,!H())break i}break r}}q.cursor=q.limit-r;i:{q.ket=q.cursor;var l=q.limit-q.cursor;if(B()&&0!=q.find_among_b(y)||(q.cursor=q.limit-l,B()&&0!=q.find_among_b(p))){l=q.limit-q.cursor;if(x()){if(q.bra=q.cursor,!q.slice_del())return;var m=q.limit-q.cursor;if(q.ket=q.cursor,G()){if(q.bra=q.cursor,!q.slice_del())return;H()||(q.cursor=q.limit-m)}else q.cursor=q.limit-m}else if(q.cursor=q.limit-l,!A())break i;break r}}if(q.cursor=q.limit-r,q.ket=q.cursor,B()&&0!=q.find_among_b(h)){if(q.bra=q.cursor,!q.slice_del())return;var n=q.limit-q.cursor;i:{q.ket=q.cursor;var _=q.limit-q.cursor;if(j()){if(q.bra=q.cursor,!q.slice_del())return;var f=q.limit-q.cursor;if(q.ket=q.cursor,G()){if(q.bra=q.cursor,!q.slice_del())return;H()||(q.cursor=q.limit-f)}else q.cursor=q.limit-f}else if(q.cursor=q.limit-_,G()){if(q.bra=q.cursor,!q.slice_del())return;f=q.limit-q.cursor;H()||(q.cursor=q.limit-f)}else if(q.cursor=q.limit-_,!H()){q.cursor=q.limit-n;break i}}}else{q.cursor=q.limit-r,q.ket=q.cursor;m=q.limit-q.cursor;if(D()||(q.cursor=q.limit-m,B()&&0!=q.find_among_b(C)&&W())){if(q.bra=q.cursor,!q.slice_del())return;var a=q.limit-q.cursor;i:u:{var b=q.limit-q.cursor;if(q.ket=q.cursor,G()){if(q.bra=q.cursor,!q.slice_del())return;if(H())break u}q.cursor=q.limit-b,q.ket=q.cursor;var d=q.limit-q.cursor;if(j()||(q.cursor=q.limit-d,x())){if(q.bra=q.cursor,!q.slice_del())return;d=q.limit-q.cursor;if(q.ket=q.cursor,G()){if(q.bra=q.cursor,!q.slice_del())return;H()||(q.cursor=q.limit-d)}else q.cursor=q.limit-d}else if(q.cursor=q.limit-b,!H()){q.cursor=q.limit-a;break i}}}else{if(q.cursor=q.limit-r,q.ket=q.cursor,A()){if(q.bra=q.cursor,q.slice_del())break r;return}if(q.cursor=q.limit-r,!H()){q.cursor=q.limit-r,q.ket=q.cursor;l=q.limit-q.cursor;if(E()||(q.cursor=q.limit-l,B()&&q.in_grouping_b(S,105,305)&&W())||(q.cursor=q.limit-l,B()&&0!=q.find_among_b(z)&&W())){if(q.bra=q.cursor,!q.slice_del())return;var k=q.limit-q.cursor;i:{q.ket=q.cursor;var g=q.limit-q.cursor;if(j()){if(q.bra=q.cursor,!q.slice_del())return;var v=q.limit-q.cursor;q.ket=q.cursor,G()||(q.cursor=q.limit-v)}else if(q.cursor=q.limit-g,!G()){q.cursor=q.limit-k;break i}if(q.bra=q.cursor,!q.slice_del())return;q.ket=q.cursor,H()||(q.cursor=q.limit-k)}}else{q.cursor=q.limit-r,q.ket=q.cursor;m=q.limit-q.cursor;if(!j()&&(q.cursor=q.limit-m,!x()))return;if(q.bra=q.cursor,!q.slice_del())return;l=q.limit-q.cursor;if(q.ket=q.cursor,G()){if(q.bra=q.cursor,!q.slice_del())return;H()||(q.cursor=q.limit-l)}else q.cursor=q.limit-l}}}}}}}function ir(){q.limit_backward=q.cursor,q.cursor=q.limit;var r=q.limit-q.cursor;if(!(()=>{var r;return!q.eq_s_b("ad")||(r=q.limit-q.cursor,q.eq_s_b("soy")||(q.cursor=q.limit-r),q.cursor>q.limit_backward)?void 0:1})())return q.cursor=q.limit-r,r=q.limit-q.cursor,(()=>{q.ket=q.cursor,q.bra=q.cursor;var r=q.limit-q.cursor;if((q.eq_s_b("d")||(q.cursor=q.limit-r,q.eq_s_b("g")))&&q.go_out_grouping_b(t,97,305))r:{var i=q.limit-q.cursor,u=q.limit-q.cursor;if(q.eq_s_b("a")||(q.cursor=q.limit-u,q.eq_s_b("ı"))){if(q.slice_from("ı"))break r;return}q.cursor=q.limit-i;u=q.limit-q.cursor;if(q.eq_s_b("e")||(q.cursor=q.limit-u,q.eq_s_b("i"))){if(q.slice_from("i"))break r;return}q.cursor=q.limit-i;u=q.limit-q.cursor;if(q.eq_s_b("o")||(q.cursor=q.limit-u,q.eq_s_b("u"))){if(q.slice_from("u"))break r;return}q.cursor=q.limit-i;u=q.limit-q.cursor;if(!q.eq_s_b("ö")&&(q.cursor=q.limit-u,!q.eq_s_b("ü")))return;if(!q.slice_from("ü"))return}})(),q.cursor=q.limit-r,r=q.limit-q.cursor,(()=>{var r;if(q.ket=q.cursor,0!=(r=q.find_among_b(U)))switch(q.bra=q.cursor,r){case 1:if(q.slice_from("p"))break;return;case 2:if(q.slice_from("ç"))break;return;case 3:if(q.slice_from("t"))break;return;case 4:if(q.slice_from("k"))break}})(),q.cursor=q.limit-r,q.cursor=q.limit_backward,1}this.stem=function(){if((()=>{var r=q.cursor;r:{for(q.bra=q.cursor;;){var i=q.cursor,u=q.cursor;if(!q.eq_s("'")){q.cursor=u,q.cursor=i;break}if(q.cursor=i,q.cursor>=q.limit)break r;q.cursor++}if(q.ket=q.cursor,!q.slice_del())return}q.cursor=r,r=q.cursor;r:{var s=q.cursor+2;if(!(s>q.limit)){for(q.cursor=s;;){var o=q.cursor;if(q.eq_s("'")){q.cursor=o;break}if(q.cursor=o,q.cursor>=q.limit)break r;q.cursor++}if(q.bra=q.cursor,q.cursor=q.limit,q.ket=q.cursor,!q.slice_del())return}}q.cursor=r})(),!(()=>{for(var r=q.cursor,i=2;0{for(var r,e=t.cursor;;){var s=t.cursor;r:{for(;;){var i=t.cursor;e:if(t.bra=t.cursor,0!=(r=t.find_among(l))){switch(t.ket=t.cursor,r){case 1:var c=t.cursor;if(t.eq_s("ּ"))break e;if(t.cursor=c,t.slice_from("װ"))break;return;case 2:var o=t.cursor;if(t.eq_s("ִ"))break e;if(t.cursor=o,t.slice_from("ױ"))break;return;case 3:var u=t.cursor;if(t.eq_s("ִ"))break e;if(t.cursor=u,t.slice_from("ײ"))break;return;case 4:if(t.slice_from("כ"))break;return;case 5:if(t.slice_from("מ"))break;return;case 6:if(t.slice_from("נ"))break;return;case 7:if(t.slice_from("פ"))break;return;case 8:if(t.slice_from("צ"))break;return}t.cursor=i;break}if(t.cursor=i,t.cursor>=t.limit)break r;t.cursor++}continue}t.cursor=s;break}for(t.cursor=e,e=t.cursor;;){var a=t.cursor;r:{for(;;){var f=t.cursor;if(t.bra=t.cursor,t.in_grouping(k,1456,1474)){if(t.ket=t.cursor,!t.slice_del())return;t.cursor=f;break}if(t.cursor=f,t.cursor>=t.limit)break r;t.cursor++}continue}t.cursor=a;break}t.cursor=e})();var r=t.cursor;return(()=>{q=t.limit;var r=t.cursor;if(t.bra=t.cursor,t.eq_s("גע")){t.ket=t.cursor;var e=t.cursor,s=t.cursor;if(!t.eq_s("לט")&&(t.cursor=s,!t.eq_s("בנ"))&&(t.cursor=s,t.cursort.limit||(t.cursor=e,v=t.cursor,t.cursor=s,r=t.cursor,0==t.find_among(f)&&(t.cursor=r),e=t.cursor,t.in_grouping(d,1489,1520)&&t.in_grouping(d,1489,1520)&&t.in_grouping(d,1489,1520)?q=t.cursor:(t.cursor=e,t.go_out_grouping(g,1488,1522)&&(t.cursor++,t.go_in_grouping(g,1488,1522))&&(q=t.cursor,v<=q||(q=v))))})(),t.cursor=r,t.limit_backward=t.cursor,t.cursor=t.limit,e(),t.cursor=t.limit_backward,!0},this.stemWord=function(r){return t.setCurrent(r),this.stem(),t.getCurrent()}};
\ No newline at end of file
diff --git a/sphinx/search/non-minified-js/arabic-stemmer.js b/sphinx/search/non-minified-js/arabic-stemmer.js
new file mode 100644
index 00000000000..dbab12d81e1
--- /dev/null
+++ b/sphinx/search/non-minified-js/arabic-stemmer.js
@@ -0,0 +1,1612 @@
+// Generated from arabic.sbl by Snowball 3.0.1 - https://snowballstem.org/
+
+/**@constructor*/
+var ArabicStemmer = function() {
+ var base = new BaseStemmer();
+
+ /** @const */ var a_0 = [
+ ["\u0640", -1, 1],
+ ["\u064B", -1, 1],
+ ["\u064C", -1, 1],
+ ["\u064D", -1, 1],
+ ["\u064E", -1, 1],
+ ["\u064F", -1, 1],
+ ["\u0650", -1, 1],
+ ["\u0651", -1, 1],
+ ["\u0652", -1, 1],
+ ["\u0660", -1, 2],
+ ["\u0661", -1, 3],
+ ["\u0662", -1, 4],
+ ["\u0663", -1, 5],
+ ["\u0664", -1, 6],
+ ["\u0665", -1, 7],
+ ["\u0666", -1, 8],
+ ["\u0667", -1, 9],
+ ["\u0668", -1, 10],
+ ["\u0669", -1, 11],
+ ["\uFE80", -1, 12],
+ ["\uFE81", -1, 16],
+ ["\uFE82", -1, 16],
+ ["\uFE83", -1, 13],
+ ["\uFE84", -1, 13],
+ ["\uFE85", -1, 17],
+ ["\uFE86", -1, 17],
+ ["\uFE87", -1, 14],
+ ["\uFE88", -1, 14],
+ ["\uFE89", -1, 15],
+ ["\uFE8A", -1, 15],
+ ["\uFE8B", -1, 15],
+ ["\uFE8C", -1, 15],
+ ["\uFE8D", -1, 18],
+ ["\uFE8E", -1, 18],
+ ["\uFE8F", -1, 19],
+ ["\uFE90", -1, 19],
+ ["\uFE91", -1, 19],
+ ["\uFE92", -1, 19],
+ ["\uFE93", -1, 20],
+ ["\uFE94", -1, 20],
+ ["\uFE95", -1, 21],
+ ["\uFE96", -1, 21],
+ ["\uFE97", -1, 21],
+ ["\uFE98", -1, 21],
+ ["\uFE99", -1, 22],
+ ["\uFE9A", -1, 22],
+ ["\uFE9B", -1, 22],
+ ["\uFE9C", -1, 22],
+ ["\uFE9D", -1, 23],
+ ["\uFE9E", -1, 23],
+ ["\uFE9F", -1, 23],
+ ["\uFEA0", -1, 23],
+ ["\uFEA1", -1, 24],
+ ["\uFEA2", -1, 24],
+ ["\uFEA3", -1, 24],
+ ["\uFEA4", -1, 24],
+ ["\uFEA5", -1, 25],
+ ["\uFEA6", -1, 25],
+ ["\uFEA7", -1, 25],
+ ["\uFEA8", -1, 25],
+ ["\uFEA9", -1, 26],
+ ["\uFEAA", -1, 26],
+ ["\uFEAB", -1, 27],
+ ["\uFEAC", -1, 27],
+ ["\uFEAD", -1, 28],
+ ["\uFEAE", -1, 28],
+ ["\uFEAF", -1, 29],
+ ["\uFEB0", -1, 29],
+ ["\uFEB1", -1, 30],
+ ["\uFEB2", -1, 30],
+ ["\uFEB3", -1, 30],
+ ["\uFEB4", -1, 30],
+ ["\uFEB5", -1, 31],
+ ["\uFEB6", -1, 31],
+ ["\uFEB7", -1, 31],
+ ["\uFEB8", -1, 31],
+ ["\uFEB9", -1, 32],
+ ["\uFEBA", -1, 32],
+ ["\uFEBB", -1, 32],
+ ["\uFEBC", -1, 32],
+ ["\uFEBD", -1, 33],
+ ["\uFEBE", -1, 33],
+ ["\uFEBF", -1, 33],
+ ["\uFEC0", -1, 33],
+ ["\uFEC1", -1, 34],
+ ["\uFEC2", -1, 34],
+ ["\uFEC3", -1, 34],
+ ["\uFEC4", -1, 34],
+ ["\uFEC5", -1, 35],
+ ["\uFEC6", -1, 35],
+ ["\uFEC7", -1, 35],
+ ["\uFEC8", -1, 35],
+ ["\uFEC9", -1, 36],
+ ["\uFECA", -1, 36],
+ ["\uFECB", -1, 36],
+ ["\uFECC", -1, 36],
+ ["\uFECD", -1, 37],
+ ["\uFECE", -1, 37],
+ ["\uFECF", -1, 37],
+ ["\uFED0", -1, 37],
+ ["\uFED1", -1, 38],
+ ["\uFED2", -1, 38],
+ ["\uFED3", -1, 38],
+ ["\uFED4", -1, 38],
+ ["\uFED5", -1, 39],
+ ["\uFED6", -1, 39],
+ ["\uFED7", -1, 39],
+ ["\uFED8", -1, 39],
+ ["\uFED9", -1, 40],
+ ["\uFEDA", -1, 40],
+ ["\uFEDB", -1, 40],
+ ["\uFEDC", -1, 40],
+ ["\uFEDD", -1, 41],
+ ["\uFEDE", -1, 41],
+ ["\uFEDF", -1, 41],
+ ["\uFEE0", -1, 41],
+ ["\uFEE1", -1, 42],
+ ["\uFEE2", -1, 42],
+ ["\uFEE3", -1, 42],
+ ["\uFEE4", -1, 42],
+ ["\uFEE5", -1, 43],
+ ["\uFEE6", -1, 43],
+ ["\uFEE7", -1, 43],
+ ["\uFEE8", -1, 43],
+ ["\uFEE9", -1, 44],
+ ["\uFEEA", -1, 44],
+ ["\uFEEB", -1, 44],
+ ["\uFEEC", -1, 44],
+ ["\uFEED", -1, 45],
+ ["\uFEEE", -1, 45],
+ ["\uFEEF", -1, 46],
+ ["\uFEF0", -1, 46],
+ ["\uFEF1", -1, 47],
+ ["\uFEF2", -1, 47],
+ ["\uFEF3", -1, 47],
+ ["\uFEF4", -1, 47],
+ ["\uFEF5", -1, 51],
+ ["\uFEF6", -1, 51],
+ ["\uFEF7", -1, 49],
+ ["\uFEF8", -1, 49],
+ ["\uFEF9", -1, 50],
+ ["\uFEFA", -1, 50],
+ ["\uFEFB", -1, 48],
+ ["\uFEFC", -1, 48]
+ ];
+
+ /** @const */ var a_1 = [
+ ["\u0622", -1, 1],
+ ["\u0623", -1, 1],
+ ["\u0624", -1, 1],
+ ["\u0625", -1, 1],
+ ["\u0626", -1, 1]
+ ];
+
+ /** @const */ var a_2 = [
+ ["\u0622", -1, 1],
+ ["\u0623", -1, 1],
+ ["\u0624", -1, 2],
+ ["\u0625", -1, 1],
+ ["\u0626", -1, 3]
+ ];
+
+ /** @const */ var a_3 = [
+ ["\u0627\u0644", -1, 2],
+ ["\u0628\u0627\u0644", -1, 1],
+ ["\u0643\u0627\u0644", -1, 1],
+ ["\u0644\u0644", -1, 2]
+ ];
+
+ /** @const */ var a_4 = [
+ ["\u0623\u0622", -1, 2],
+ ["\u0623\u0623", -1, 1],
+ ["\u0623\u0624", -1, 1],
+ ["\u0623\u0625", -1, 4],
+ ["\u0623\u0627", -1, 3]
+ ];
+
+ /** @const */ var a_5 = [
+ ["\u0641", -1, 1],
+ ["\u0648", -1, 1]
+ ];
+
+ /** @const */ var a_6 = [
+ ["\u0627\u0644", -1, 2],
+ ["\u0628\u0627\u0644", -1, 1],
+ ["\u0643\u0627\u0644", -1, 1],
+ ["\u0644\u0644", -1, 2]
+ ];
+
+ /** @const */ var a_7 = [
+ ["\u0628", -1, 1],
+ ["\u0628\u0627", 0, -1],
+ ["\u0628\u0628", 0, 2],
+ ["\u0643\u0643", -1, 3]
+ ];
+
+ /** @const */ var a_8 = [
+ ["\u0633\u0623", -1, 4],
+ ["\u0633\u062A", -1, 2],
+ ["\u0633\u0646", -1, 3],
+ ["\u0633\u064A", -1, 1]
+ ];
+
+ /** @const */ var a_9 = [
+ ["\u062A\u0633\u062A", -1, 1],
+ ["\u0646\u0633\u062A", -1, 1],
+ ["\u064A\u0633\u062A", -1, 1]
+ ];
+
+ /** @const */ var a_10 = [
+ ["\u0643\u0645\u0627", -1, 3],
+ ["\u0647\u0645\u0627", -1, 3],
+ ["\u0646\u0627", -1, 2],
+ ["\u0647\u0627", -1, 2],
+ ["\u0643", -1, 1],
+ ["\u0643\u0645", -1, 2],
+ ["\u0647\u0645", -1, 2],
+ ["\u0647\u0646", -1, 2],
+ ["\u0647", -1, 1],
+ ["\u064A", -1, 1]
+ ];
+
+ /** @const */ var a_11 = [
+ ["\u0646", -1, 1]
+ ];
+
+ /** @const */ var a_12 = [
+ ["\u0627", -1, 1],
+ ["\u0648", -1, 1],
+ ["\u064A", -1, 1]
+ ];
+
+ /** @const */ var a_13 = [
+ ["\u0627\u062A", -1, 1]
+ ];
+
+ /** @const */ var a_14 = [
+ ["\u062A", -1, 1]
+ ];
+
+ /** @const */ var a_15 = [
+ ["\u0629", -1, 1]
+ ];
+
+ /** @const */ var a_16 = [
+ ["\u064A", -1, 1]
+ ];
+
+ /** @const */ var a_17 = [
+ ["\u0643\u0645\u0627", -1, 3],
+ ["\u0647\u0645\u0627", -1, 3],
+ ["\u0646\u0627", -1, 2],
+ ["\u0647\u0627", -1, 2],
+ ["\u0643", -1, 1],
+ ["\u0643\u0645", -1, 2],
+ ["\u0647\u0645", -1, 2],
+ ["\u0643\u0646", -1, 2],
+ ["\u0647\u0646", -1, 2],
+ ["\u0647", -1, 1],
+ ["\u0643\u0645\u0648", -1, 3],
+ ["\u0646\u064A", -1, 2]
+ ];
+
+ /** @const */ var a_18 = [
+ ["\u0627", -1, 1],
+ ["\u062A\u0627", 0, 2],
+ ["\u062A\u0645\u0627", 0, 4],
+ ["\u0646\u0627", 0, 2],
+ ["\u062A", -1, 1],
+ ["\u0646", -1, 1],
+ ["\u0627\u0646", 5, 3],
+ ["\u062A\u0646", 5, 2],
+ ["\u0648\u0646", 5, 3],
+ ["\u064A\u0646", 5, 3],
+ ["\u064A", -1, 1]
+ ];
+
+ /** @const */ var a_19 = [
+ ["\u0648\u0627", -1, 1],
+ ["\u062A\u0645", -1, 1]
+ ];
+
+ /** @const */ var a_20 = [
+ ["\u0648", -1, 1],
+ ["\u062A\u0645\u0648", 0, 2]
+ ];
+
+ /** @const */ var a_21 = [
+ ["\u0649", -1, 1]
+ ];
+
+ var /** boolean */ B_is_defined = false;
+ var /** boolean */ B_is_verb = false;
+ var /** boolean */ B_is_noun = false;
+
+
+ /** @return {boolean} */
+ function r_Normalize_pre() {
+ var /** number */ among_var;
+ /** @const */ var /** number */ v_1 = base.cursor;
+ lab0: {
+ while(true)
+ {
+ /** @const */ var /** number */ v_2 = base.cursor;
+ lab1: {
+ lab2: {
+ /** @const */ var /** number */ v_3 = base.cursor;
+ lab3: {
+ base.bra = base.cursor;
+ among_var = base.find_among(a_0);
+ if (among_var == 0)
+ {
+ break lab3;
+ }
+ base.ket = base.cursor;
+ switch (among_var) {
+ case 1:
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ case 2:
+ if (!base.slice_from("0"))
+ {
+ return false;
+ }
+ break;
+ case 3:
+ if (!base.slice_from("1"))
+ {
+ return false;
+ }
+ break;
+ case 4:
+ if (!base.slice_from("2"))
+ {
+ return false;
+ }
+ break;
+ case 5:
+ if (!base.slice_from("3"))
+ {
+ return false;
+ }
+ break;
+ case 6:
+ if (!base.slice_from("4"))
+ {
+ return false;
+ }
+ break;
+ case 7:
+ if (!base.slice_from("5"))
+ {
+ return false;
+ }
+ break;
+ case 8:
+ if (!base.slice_from("6"))
+ {
+ return false;
+ }
+ break;
+ case 9:
+ if (!base.slice_from("7"))
+ {
+ return false;
+ }
+ break;
+ case 10:
+ if (!base.slice_from("8"))
+ {
+ return false;
+ }
+ break;
+ case 11:
+ if (!base.slice_from("9"))
+ {
+ return false;
+ }
+ break;
+ case 12:
+ if (!base.slice_from("\u0621"))
+ {
+ return false;
+ }
+ break;
+ case 13:
+ if (!base.slice_from("\u0623"))
+ {
+ return false;
+ }
+ break;
+ case 14:
+ if (!base.slice_from("\u0625"))
+ {
+ return false;
+ }
+ break;
+ case 15:
+ if (!base.slice_from("\u0626"))
+ {
+ return false;
+ }
+ break;
+ case 16:
+ if (!base.slice_from("\u0622"))
+ {
+ return false;
+ }
+ break;
+ case 17:
+ if (!base.slice_from("\u0624"))
+ {
+ return false;
+ }
+ break;
+ case 18:
+ if (!base.slice_from("\u0627"))
+ {
+ return false;
+ }
+ break;
+ case 19:
+ if (!base.slice_from("\u0628"))
+ {
+ return false;
+ }
+ break;
+ case 20:
+ if (!base.slice_from("\u0629"))
+ {
+ return false;
+ }
+ break;
+ case 21:
+ if (!base.slice_from("\u062A"))
+ {
+ return false;
+ }
+ break;
+ case 22:
+ if (!base.slice_from("\u062B"))
+ {
+ return false;
+ }
+ break;
+ case 23:
+ if (!base.slice_from("\u062C"))
+ {
+ return false;
+ }
+ break;
+ case 24:
+ if (!base.slice_from("\u062D"))
+ {
+ return false;
+ }
+ break;
+ case 25:
+ if (!base.slice_from("\u062E"))
+ {
+ return false;
+ }
+ break;
+ case 26:
+ if (!base.slice_from("\u062F"))
+ {
+ return false;
+ }
+ break;
+ case 27:
+ if (!base.slice_from("\u0630"))
+ {
+ return false;
+ }
+ break;
+ case 28:
+ if (!base.slice_from("\u0631"))
+ {
+ return false;
+ }
+ break;
+ case 29:
+ if (!base.slice_from("\u0632"))
+ {
+ return false;
+ }
+ break;
+ case 30:
+ if (!base.slice_from("\u0633"))
+ {
+ return false;
+ }
+ break;
+ case 31:
+ if (!base.slice_from("\u0634"))
+ {
+ return false;
+ }
+ break;
+ case 32:
+ if (!base.slice_from("\u0635"))
+ {
+ return false;
+ }
+ break;
+ case 33:
+ if (!base.slice_from("\u0636"))
+ {
+ return false;
+ }
+ break;
+ case 34:
+ if (!base.slice_from("\u0637"))
+ {
+ return false;
+ }
+ break;
+ case 35:
+ if (!base.slice_from("\u0638"))
+ {
+ return false;
+ }
+ break;
+ case 36:
+ if (!base.slice_from("\u0639"))
+ {
+ return false;
+ }
+ break;
+ case 37:
+ if (!base.slice_from("\u063A"))
+ {
+ return false;
+ }
+ break;
+ case 38:
+ if (!base.slice_from("\u0641"))
+ {
+ return false;
+ }
+ break;
+ case 39:
+ if (!base.slice_from("\u0642"))
+ {
+ return false;
+ }
+ break;
+ case 40:
+ if (!base.slice_from("\u0643"))
+ {
+ return false;
+ }
+ break;
+ case 41:
+ if (!base.slice_from("\u0644"))
+ {
+ return false;
+ }
+ break;
+ case 42:
+ if (!base.slice_from("\u0645"))
+ {
+ return false;
+ }
+ break;
+ case 43:
+ if (!base.slice_from("\u0646"))
+ {
+ return false;
+ }
+ break;
+ case 44:
+ if (!base.slice_from("\u0647"))
+ {
+ return false;
+ }
+ break;
+ case 45:
+ if (!base.slice_from("\u0648"))
+ {
+ return false;
+ }
+ break;
+ case 46:
+ if (!base.slice_from("\u0649"))
+ {
+ return false;
+ }
+ break;
+ case 47:
+ if (!base.slice_from("\u064A"))
+ {
+ return false;
+ }
+ break;
+ case 48:
+ if (!base.slice_from("\u0644\u0627"))
+ {
+ return false;
+ }
+ break;
+ case 49:
+ if (!base.slice_from("\u0644\u0623"))
+ {
+ return false;
+ }
+ break;
+ case 50:
+ if (!base.slice_from("\u0644\u0625"))
+ {
+ return false;
+ }
+ break;
+ case 51:
+ if (!base.slice_from("\u0644\u0622"))
+ {
+ return false;
+ }
+ break;
+ }
+ break lab2;
+ }
+ base.cursor = v_3;
+ if (base.cursor >= base.limit)
+ {
+ break lab1;
+ }
+ base.cursor++;
+ }
+ continue;
+ }
+ base.cursor = v_2;
+ break;
+ }
+ }
+ base.cursor = v_1;
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_Normalize_post() {
+ var /** number */ among_var;
+ /** @const */ var /** number */ v_1 = base.cursor;
+ lab0: {
+ base.limit_backward = base.cursor; base.cursor = base.limit;
+ base.ket = base.cursor;
+ if (base.find_among_b(a_1) == 0)
+ {
+ break lab0;
+ }
+ base.bra = base.cursor;
+ if (!base.slice_from("\u0621"))
+ {
+ return false;
+ }
+ base.cursor = base.limit_backward;
+ }
+ base.cursor = v_1;
+ /** @const */ var /** number */ v_2 = base.cursor;
+ lab1: {
+ while(true)
+ {
+ /** @const */ var /** number */ v_3 = base.cursor;
+ lab2: {
+ lab3: {
+ /** @const */ var /** number */ v_4 = base.cursor;
+ lab4: {
+ base.bra = base.cursor;
+ among_var = base.find_among(a_2);
+ if (among_var == 0)
+ {
+ break lab4;
+ }
+ base.ket = base.cursor;
+ switch (among_var) {
+ case 1:
+ if (!base.slice_from("\u0627"))
+ {
+ return false;
+ }
+ break;
+ case 2:
+ if (!base.slice_from("\u0648"))
+ {
+ return false;
+ }
+ break;
+ case 3:
+ if (!base.slice_from("\u064A"))
+ {
+ return false;
+ }
+ break;
+ }
+ break lab3;
+ }
+ base.cursor = v_4;
+ if (base.cursor >= base.limit)
+ {
+ break lab2;
+ }
+ base.cursor++;
+ }
+ continue;
+ }
+ base.cursor = v_3;
+ break;
+ }
+ }
+ base.cursor = v_2;
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_Checks1() {
+ var /** number */ among_var;
+ base.bra = base.cursor;
+ among_var = base.find_among(a_3);
+ if (among_var == 0)
+ {
+ return false;
+ }
+ base.ket = base.cursor;
+ switch (among_var) {
+ case 1:
+ if (base.current.length <= 4)
+ {
+ return false;
+ }
+ B_is_noun = true;
+ B_is_verb = false;
+ B_is_defined = true;
+ break;
+ case 2:
+ if (base.current.length <= 3)
+ {
+ return false;
+ }
+ B_is_noun = true;
+ B_is_verb = false;
+ B_is_defined = true;
+ break;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_Prefix_Step1() {
+ var /** number */ among_var;
+ base.bra = base.cursor;
+ among_var = base.find_among(a_4);
+ if (among_var == 0)
+ {
+ return false;
+ }
+ base.ket = base.cursor;
+ switch (among_var) {
+ case 1:
+ if (base.current.length <= 3)
+ {
+ return false;
+ }
+ if (!base.slice_from("\u0623"))
+ {
+ return false;
+ }
+ break;
+ case 2:
+ if (base.current.length <= 3)
+ {
+ return false;
+ }
+ if (!base.slice_from("\u0622"))
+ {
+ return false;
+ }
+ break;
+ case 3:
+ if (base.current.length <= 3)
+ {
+ return false;
+ }
+ if (!base.slice_from("\u0627"))
+ {
+ return false;
+ }
+ break;
+ case 4:
+ if (base.current.length <= 3)
+ {
+ return false;
+ }
+ if (!base.slice_from("\u0625"))
+ {
+ return false;
+ }
+ break;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_Prefix_Step2() {
+ base.bra = base.cursor;
+ if (base.find_among(a_5) == 0)
+ {
+ return false;
+ }
+ base.ket = base.cursor;
+ if (base.current.length <= 3)
+ {
+ return false;
+ }
+ {
+ /** @const */ var /** number */ v_1 = base.cursor;
+ lab0: {
+ if (!(base.eq_s("\u0627")))
+ {
+ break lab0;
+ }
+ return false;
+ }
+ base.cursor = v_1;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_Prefix_Step3a_Noun() {
+ var /** number */ among_var;
+ base.bra = base.cursor;
+ among_var = base.find_among(a_6);
+ if (among_var == 0)
+ {
+ return false;
+ }
+ base.ket = base.cursor;
+ switch (among_var) {
+ case 1:
+ if (base.current.length <= 5)
+ {
+ return false;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ case 2:
+ if (base.current.length <= 4)
+ {
+ return false;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_Prefix_Step3b_Noun() {
+ var /** number */ among_var;
+ base.bra = base.cursor;
+ among_var = base.find_among(a_7);
+ if (among_var == 0)
+ {
+ return false;
+ }
+ base.ket = base.cursor;
+ switch (among_var) {
+ case 1:
+ if (base.current.length <= 3)
+ {
+ return false;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ case 2:
+ if (base.current.length <= 3)
+ {
+ return false;
+ }
+ if (!base.slice_from("\u0628"))
+ {
+ return false;
+ }
+ break;
+ case 3:
+ if (base.current.length <= 3)
+ {
+ return false;
+ }
+ if (!base.slice_from("\u0643"))
+ {
+ return false;
+ }
+ break;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_Prefix_Step3_Verb() {
+ var /** number */ among_var;
+ base.bra = base.cursor;
+ among_var = base.find_among(a_8);
+ if (among_var == 0)
+ {
+ return false;
+ }
+ base.ket = base.cursor;
+ switch (among_var) {
+ case 1:
+ if (base.current.length <= 4)
+ {
+ return false;
+ }
+ if (!base.slice_from("\u064A"))
+ {
+ return false;
+ }
+ break;
+ case 2:
+ if (base.current.length <= 4)
+ {
+ return false;
+ }
+ if (!base.slice_from("\u062A"))
+ {
+ return false;
+ }
+ break;
+ case 3:
+ if (base.current.length <= 4)
+ {
+ return false;
+ }
+ if (!base.slice_from("\u0646"))
+ {
+ return false;
+ }
+ break;
+ case 4:
+ if (base.current.length <= 4)
+ {
+ return false;
+ }
+ if (!base.slice_from("\u0623"))
+ {
+ return false;
+ }
+ break;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_Prefix_Step4_Verb() {
+ base.bra = base.cursor;
+ if (base.find_among(a_9) == 0)
+ {
+ return false;
+ }
+ base.ket = base.cursor;
+ if (base.current.length <= 4)
+ {
+ return false;
+ }
+ B_is_verb = true;
+ B_is_noun = false;
+ if (!base.slice_from("\u0627\u0633\u062A"))
+ {
+ return false;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_Suffix_Noun_Step1a() {
+ var /** number */ among_var;
+ base.ket = base.cursor;
+ among_var = base.find_among_b(a_10);
+ if (among_var == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ switch (among_var) {
+ case 1:
+ if (base.current.length < 4)
+ {
+ return false;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ case 2:
+ if (base.current.length < 5)
+ {
+ return false;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ case 3:
+ if (base.current.length < 6)
+ {
+ return false;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_Suffix_Noun_Step1b() {
+ base.ket = base.cursor;
+ if (base.find_among_b(a_11) == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ if (base.current.length <= 5)
+ {
+ return false;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_Suffix_Noun_Step2a() {
+ base.ket = base.cursor;
+ if (base.find_among_b(a_12) == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ if (base.current.length <= 4)
+ {
+ return false;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_Suffix_Noun_Step2b() {
+ base.ket = base.cursor;
+ if (base.find_among_b(a_13) == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ if (base.current.length < 5)
+ {
+ return false;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_Suffix_Noun_Step2c1() {
+ base.ket = base.cursor;
+ if (base.find_among_b(a_14) == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ if (base.current.length < 4)
+ {
+ return false;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_Suffix_Noun_Step2c2() {
+ base.ket = base.cursor;
+ if (base.find_among_b(a_15) == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ if (base.current.length < 4)
+ {
+ return false;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_Suffix_Noun_Step3() {
+ base.ket = base.cursor;
+ if (base.find_among_b(a_16) == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ if (base.current.length < 3)
+ {
+ return false;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_Suffix_Verb_Step1() {
+ var /** number */ among_var;
+ base.ket = base.cursor;
+ among_var = base.find_among_b(a_17);
+ if (among_var == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ switch (among_var) {
+ case 1:
+ if (base.current.length < 4)
+ {
+ return false;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ case 2:
+ if (base.current.length < 5)
+ {
+ return false;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ case 3:
+ if (base.current.length < 6)
+ {
+ return false;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_Suffix_Verb_Step2a() {
+ var /** number */ among_var;
+ base.ket = base.cursor;
+ among_var = base.find_among_b(a_18);
+ if (among_var == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ switch (among_var) {
+ case 1:
+ if (base.current.length < 4)
+ {
+ return false;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ case 2:
+ if (base.current.length < 5)
+ {
+ return false;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ case 3:
+ if (base.current.length <= 5)
+ {
+ return false;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ case 4:
+ if (base.current.length < 6)
+ {
+ return false;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_Suffix_Verb_Step2b() {
+ base.ket = base.cursor;
+ if (base.find_among_b(a_19) == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ if (base.current.length < 5)
+ {
+ return false;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_Suffix_Verb_Step2c() {
+ var /** number */ among_var;
+ base.ket = base.cursor;
+ among_var = base.find_among_b(a_20);
+ if (among_var == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ switch (among_var) {
+ case 1:
+ if (base.current.length < 4)
+ {
+ return false;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ case 2:
+ if (base.current.length < 6)
+ {
+ return false;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_Suffix_All_alef_maqsura() {
+ base.ket = base.cursor;
+ if (base.find_among_b(a_21) == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ if (!base.slice_from("\u064A"))
+ {
+ return false;
+ }
+ return true;
+ };
+
+ this.stem = /** @return {boolean} */ function() {
+ B_is_noun = true;
+ B_is_verb = true;
+ B_is_defined = false;
+ /** @const */ var /** number */ v_1 = base.cursor;
+ r_Checks1();
+ base.cursor = v_1;
+ r_Normalize_pre();
+ base.limit_backward = base.cursor; base.cursor = base.limit;
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
+ lab0: {
+ lab1: {
+ /** @const */ var /** number */ v_3 = base.limit - base.cursor;
+ lab2: {
+ if (!B_is_verb)
+ {
+ break lab2;
+ }
+ lab3: {
+ /** @const */ var /** number */ v_4 = base.limit - base.cursor;
+ lab4: {
+ {
+ var v_5 = 1;
+ while(true)
+ {
+ /** @const */ var /** number */ v_6 = base.limit - base.cursor;
+ lab5: {
+ if (!r_Suffix_Verb_Step1())
+ {
+ break lab5;
+ }
+ v_5--;
+ continue;
+ }
+ base.cursor = base.limit - v_6;
+ break;
+ }
+ if (v_5 > 0)
+ {
+ break lab4;
+ }
+ }
+ lab6: {
+ /** @const */ var /** number */ v_7 = base.limit - base.cursor;
+ lab7: {
+ if (!r_Suffix_Verb_Step2a())
+ {
+ break lab7;
+ }
+ break lab6;
+ }
+ base.cursor = base.limit - v_7;
+ lab8: {
+ if (!r_Suffix_Verb_Step2c())
+ {
+ break lab8;
+ }
+ break lab6;
+ }
+ base.cursor = base.limit - v_7;
+ if (base.cursor <= base.limit_backward)
+ {
+ break lab4;
+ }
+ base.cursor--;
+ }
+ break lab3;
+ }
+ base.cursor = base.limit - v_4;
+ lab9: {
+ if (!r_Suffix_Verb_Step2b())
+ {
+ break lab9;
+ }
+ break lab3;
+ }
+ base.cursor = base.limit - v_4;
+ if (!r_Suffix_Verb_Step2a())
+ {
+ break lab2;
+ }
+ }
+ break lab1;
+ }
+ base.cursor = base.limit - v_3;
+ lab10: {
+ if (!B_is_noun)
+ {
+ break lab10;
+ }
+ /** @const */ var /** number */ v_8 = base.limit - base.cursor;
+ lab11: {
+ lab12: {
+ /** @const */ var /** number */ v_9 = base.limit - base.cursor;
+ lab13: {
+ if (!r_Suffix_Noun_Step2c2())
+ {
+ break lab13;
+ }
+ break lab12;
+ }
+ base.cursor = base.limit - v_9;
+ lab14: {
+ lab15: {
+ if (!B_is_defined)
+ {
+ break lab15;
+ }
+ break lab14;
+ }
+ if (!r_Suffix_Noun_Step1a())
+ {
+ break lab14;
+ }
+ lab16: {
+ /** @const */ var /** number */ v_10 = base.limit - base.cursor;
+ lab17: {
+ if (!r_Suffix_Noun_Step2a())
+ {
+ break lab17;
+ }
+ break lab16;
+ }
+ base.cursor = base.limit - v_10;
+ lab18: {
+ if (!r_Suffix_Noun_Step2b())
+ {
+ break lab18;
+ }
+ break lab16;
+ }
+ base.cursor = base.limit - v_10;
+ lab19: {
+ if (!r_Suffix_Noun_Step2c1())
+ {
+ break lab19;
+ }
+ break lab16;
+ }
+ base.cursor = base.limit - v_10;
+ if (base.cursor <= base.limit_backward)
+ {
+ break lab14;
+ }
+ base.cursor--;
+ }
+ break lab12;
+ }
+ base.cursor = base.limit - v_9;
+ lab20: {
+ if (!r_Suffix_Noun_Step1b())
+ {
+ break lab20;
+ }
+ lab21: {
+ /** @const */ var /** number */ v_11 = base.limit - base.cursor;
+ lab22: {
+ if (!r_Suffix_Noun_Step2a())
+ {
+ break lab22;
+ }
+ break lab21;
+ }
+ base.cursor = base.limit - v_11;
+ lab23: {
+ if (!r_Suffix_Noun_Step2b())
+ {
+ break lab23;
+ }
+ break lab21;
+ }
+ base.cursor = base.limit - v_11;
+ if (!r_Suffix_Noun_Step2c1())
+ {
+ break lab20;
+ }
+ }
+ break lab12;
+ }
+ base.cursor = base.limit - v_9;
+ lab24: {
+ lab25: {
+ if (!B_is_defined)
+ {
+ break lab25;
+ }
+ break lab24;
+ }
+ if (!r_Suffix_Noun_Step2a())
+ {
+ break lab24;
+ }
+ break lab12;
+ }
+ base.cursor = base.limit - v_9;
+ if (!r_Suffix_Noun_Step2b())
+ {
+ base.cursor = base.limit - v_8;
+ break lab11;
+ }
+ }
+ }
+ if (!r_Suffix_Noun_Step3())
+ {
+ break lab10;
+ }
+ break lab1;
+ }
+ base.cursor = base.limit - v_3;
+ if (!r_Suffix_All_alef_maqsura())
+ {
+ break lab0;
+ }
+ }
+ }
+ base.cursor = base.limit - v_2;
+ base.cursor = base.limit_backward;
+ /** @const */ var /** number */ v_12 = base.cursor;
+ lab26: {
+ /** @const */ var /** number */ v_13 = base.cursor;
+ lab27: {
+ if (!r_Prefix_Step1())
+ {
+ base.cursor = v_13;
+ break lab27;
+ }
+ }
+ /** @const */ var /** number */ v_14 = base.cursor;
+ lab28: {
+ if (!r_Prefix_Step2())
+ {
+ base.cursor = v_14;
+ break lab28;
+ }
+ }
+ lab29: {
+ /** @const */ var /** number */ v_15 = base.cursor;
+ lab30: {
+ if (!r_Prefix_Step3a_Noun())
+ {
+ break lab30;
+ }
+ break lab29;
+ }
+ base.cursor = v_15;
+ lab31: {
+ if (!B_is_noun)
+ {
+ break lab31;
+ }
+ if (!r_Prefix_Step3b_Noun())
+ {
+ break lab31;
+ }
+ break lab29;
+ }
+ base.cursor = v_15;
+ if (!B_is_verb)
+ {
+ break lab26;
+ }
+ /** @const */ var /** number */ v_16 = base.cursor;
+ lab32: {
+ if (!r_Prefix_Step3_Verb())
+ {
+ base.cursor = v_16;
+ break lab32;
+ }
+ }
+ if (!r_Prefix_Step4_Verb())
+ {
+ break lab26;
+ }
+ }
+ }
+ base.cursor = v_12;
+ r_Normalize_post();
+ return true;
+ };
+
+ /**@return{string}*/
+ this['stemWord'] = function(/**string*/word) {
+ base.setCurrent(word);
+ this.stem();
+ return base.getCurrent();
+ };
+};
diff --git a/sphinx/search/non-minified-js/armenian-stemmer.js b/sphinx/search/non-minified-js/armenian-stemmer.js
new file mode 100644
index 00000000000..915146dbd0a
--- /dev/null
+++ b/sphinx/search/non-minified-js/armenian-stemmer.js
@@ -0,0 +1,350 @@
+// Generated from armenian.sbl by Snowball 3.0.1 - https://snowballstem.org/
+
+/**@constructor*/
+var ArmenianStemmer = function() {
+ var base = new BaseStemmer();
+
+ /** @const */ var a_0 = [
+ ["\u0580\u0578\u0580\u0564", -1, 1],
+ ["\u0565\u0580\u0578\u0580\u0564", 0, 1],
+ ["\u0561\u056C\u056B", -1, 1],
+ ["\u0561\u056F\u056B", -1, 1],
+ ["\u0578\u0580\u0561\u056F", -1, 1],
+ ["\u0565\u0572", -1, 1],
+ ["\u0561\u056F\u0561\u0576", -1, 1],
+ ["\u0561\u0580\u0561\u0576", -1, 1],
+ ["\u0565\u0576", -1, 1],
+ ["\u0565\u056F\u0565\u0576", 8, 1],
+ ["\u0565\u0580\u0565\u0576", 8, 1],
+ ["\u0578\u0580\u0567\u0576", -1, 1],
+ ["\u056B\u0576", -1, 1],
+ ["\u0563\u056B\u0576", 12, 1],
+ ["\u0578\u057E\u056B\u0576", 12, 1],
+ ["\u056C\u0561\u0575\u0576", -1, 1],
+ ["\u057E\u0578\u0582\u0576", -1, 1],
+ ["\u057A\u0565\u057D", -1, 1],
+ ["\u056B\u057E", -1, 1],
+ ["\u0561\u057F", -1, 1],
+ ["\u0561\u057E\u0565\u057F", -1, 1],
+ ["\u056F\u0578\u057F", -1, 1],
+ ["\u0562\u0561\u0580", -1, 1]
+ ];
+
+ /** @const */ var a_1 = [
+ ["\u0561", -1, 1],
+ ["\u0561\u0581\u0561", 0, 1],
+ ["\u0565\u0581\u0561", 0, 1],
+ ["\u057E\u0565", -1, 1],
+ ["\u0561\u0581\u0580\u056B", -1, 1],
+ ["\u0561\u0581\u056B", -1, 1],
+ ["\u0565\u0581\u056B", -1, 1],
+ ["\u057E\u0565\u0581\u056B", 6, 1],
+ ["\u0561\u056C", -1, 1],
+ ["\u0568\u0561\u056C", 8, 1],
+ ["\u0561\u0576\u0561\u056C", 8, 1],
+ ["\u0565\u0576\u0561\u056C", 8, 1],
+ ["\u0561\u0581\u0576\u0561\u056C", 8, 1],
+ ["\u0565\u056C", -1, 1],
+ ["\u0568\u0565\u056C", 13, 1],
+ ["\u0576\u0565\u056C", 13, 1],
+ ["\u0581\u0576\u0565\u056C", 15, 1],
+ ["\u0565\u0581\u0576\u0565\u056C", 16, 1],
+ ["\u0579\u0565\u056C", 13, 1],
+ ["\u057E\u0565\u056C", 13, 1],
+ ["\u0561\u0581\u057E\u0565\u056C", 19, 1],
+ ["\u0565\u0581\u057E\u0565\u056C", 19, 1],
+ ["\u057F\u0565\u056C", 13, 1],
+ ["\u0561\u057F\u0565\u056C", 22, 1],
+ ["\u0578\u057F\u0565\u056C", 22, 1],
+ ["\u056F\u0578\u057F\u0565\u056C", 24, 1],
+ ["\u057E\u0561\u056E", -1, 1],
+ ["\u0578\u0582\u0574", -1, 1],
+ ["\u057E\u0578\u0582\u0574", 27, 1],
+ ["\u0561\u0576", -1, 1],
+ ["\u0581\u0561\u0576", 29, 1],
+ ["\u0561\u0581\u0561\u0576", 30, 1],
+ ["\u0561\u0581\u0580\u056B\u0576", -1, 1],
+ ["\u0561\u0581\u056B\u0576", -1, 1],
+ ["\u0565\u0581\u056B\u0576", -1, 1],
+ ["\u057E\u0565\u0581\u056B\u0576", 34, 1],
+ ["\u0561\u056C\u056B\u057D", -1, 1],
+ ["\u0565\u056C\u056B\u057D", -1, 1],
+ ["\u0561\u057E", -1, 1],
+ ["\u0561\u0581\u0561\u057E", 38, 1],
+ ["\u0565\u0581\u0561\u057E", 38, 1],
+ ["\u0561\u056C\u0578\u057E", -1, 1],
+ ["\u0565\u056C\u0578\u057E", -1, 1],
+ ["\u0561\u0580", -1, 1],
+ ["\u0561\u0581\u0561\u0580", 43, 1],
+ ["\u0565\u0581\u0561\u0580", 43, 1],
+ ["\u0561\u0581\u0580\u056B\u0580", -1, 1],
+ ["\u0561\u0581\u056B\u0580", -1, 1],
+ ["\u0565\u0581\u056B\u0580", -1, 1],
+ ["\u057E\u0565\u0581\u056B\u0580", 48, 1],
+ ["\u0561\u0581", -1, 1],
+ ["\u0565\u0581", -1, 1],
+ ["\u0561\u0581\u0580\u0565\u0581", 51, 1],
+ ["\u0561\u056C\u0578\u0582\u0581", -1, 1],
+ ["\u0565\u056C\u0578\u0582\u0581", -1, 1],
+ ["\u0561\u056C\u0578\u0582", -1, 1],
+ ["\u0565\u056C\u0578\u0582", -1, 1],
+ ["\u0561\u0584", -1, 1],
+ ["\u0581\u0561\u0584", 57, 1],
+ ["\u0561\u0581\u0561\u0584", 58, 1],
+ ["\u0561\u0581\u0580\u056B\u0584", -1, 1],
+ ["\u0561\u0581\u056B\u0584", -1, 1],
+ ["\u0565\u0581\u056B\u0584", -1, 1],
+ ["\u057E\u0565\u0581\u056B\u0584", 62, 1],
+ ["\u0561\u0576\u0584", -1, 1],
+ ["\u0581\u0561\u0576\u0584", 64, 1],
+ ["\u0561\u0581\u0561\u0576\u0584", 65, 1],
+ ["\u0561\u0581\u0580\u056B\u0576\u0584", -1, 1],
+ ["\u0561\u0581\u056B\u0576\u0584", -1, 1],
+ ["\u0565\u0581\u056B\u0576\u0584", -1, 1],
+ ["\u057E\u0565\u0581\u056B\u0576\u0584", 69, 1]
+ ];
+
+ /** @const */ var a_2 = [
+ ["\u0578\u0580\u0564", -1, 1],
+ ["\u0578\u0582\u0575\u0569", -1, 1],
+ ["\u0578\u0582\u0570\u056B", -1, 1],
+ ["\u0581\u056B", -1, 1],
+ ["\u056B\u056C", -1, 1],
+ ["\u0561\u056F", -1, 1],
+ ["\u0575\u0561\u056F", 5, 1],
+ ["\u0561\u0576\u0561\u056F", 5, 1],
+ ["\u056B\u056F", -1, 1],
+ ["\u0578\u0582\u056F", -1, 1],
+ ["\u0561\u0576", -1, 1],
+ ["\u057A\u0561\u0576", 10, 1],
+ ["\u057D\u057F\u0561\u0576", 10, 1],
+ ["\u0561\u0580\u0561\u0576", 10, 1],
+ ["\u0565\u0572\u0567\u0576", -1, 1],
+ ["\u0575\u0578\u0582\u0576", -1, 1],
+ ["\u0578\u0582\u0569\u0575\u0578\u0582\u0576", 15, 1],
+ ["\u0561\u056E\u0578", -1, 1],
+ ["\u056B\u0579", -1, 1],
+ ["\u0578\u0582\u057D", -1, 1],
+ ["\u0578\u0582\u057D\u057F", -1, 1],
+ ["\u0563\u0561\u0580", -1, 1],
+ ["\u057E\u0578\u0580", -1, 1],
+ ["\u0561\u057E\u0578\u0580", 22, 1],
+ ["\u0578\u0581", -1, 1],
+ ["\u0561\u0576\u0585\u0581", -1, 1],
+ ["\u0578\u0582", -1, 1],
+ ["\u0584", -1, 1],
+ ["\u0579\u0565\u0584", 27, 1],
+ ["\u056B\u0584", 27, 1],
+ ["\u0561\u056C\u056B\u0584", 29, 1],
+ ["\u0561\u0576\u056B\u0584", 29, 1],
+ ["\u057E\u0561\u056E\u0584", 27, 1],
+ ["\u0578\u0582\u0575\u0584", 27, 1],
+ ["\u0565\u0576\u0584", 27, 1],
+ ["\u0578\u0576\u0584", 27, 1],
+ ["\u0578\u0582\u0576\u0584", 27, 1],
+ ["\u0574\u0578\u0582\u0576\u0584", 36, 1],
+ ["\u056B\u0579\u0584", 27, 1],
+ ["\u0561\u0580\u0584", 27, 1]
+ ];
+
+ /** @const */ var a_3 = [
+ ["\u057D\u0561", -1, 1],
+ ["\u057E\u0561", -1, 1],
+ ["\u0561\u0574\u0562", -1, 1],
+ ["\u0564", -1, 1],
+ ["\u0561\u0576\u0564", 3, 1],
+ ["\u0578\u0582\u0569\u0575\u0561\u0576\u0564", 4, 1],
+ ["\u057E\u0561\u0576\u0564", 4, 1],
+ ["\u0578\u057B\u0564", 3, 1],
+ ["\u0565\u0580\u0564", 3, 1],
+ ["\u0576\u0565\u0580\u0564", 8, 1],
+ ["\u0578\u0582\u0564", 3, 1],
+ ["\u0568", -1, 1],
+ ["\u0561\u0576\u0568", 11, 1],
+ ["\u0578\u0582\u0569\u0575\u0561\u0576\u0568", 12, 1],
+ ["\u057E\u0561\u0576\u0568", 12, 1],
+ ["\u0578\u057B\u0568", 11, 1],
+ ["\u0565\u0580\u0568", 11, 1],
+ ["\u0576\u0565\u0580\u0568", 16, 1],
+ ["\u056B", -1, 1],
+ ["\u057E\u056B", 18, 1],
+ ["\u0565\u0580\u056B", 18, 1],
+ ["\u0576\u0565\u0580\u056B", 20, 1],
+ ["\u0561\u0576\u0578\u0582\u0574", -1, 1],
+ ["\u0565\u0580\u0578\u0582\u0574", -1, 1],
+ ["\u0576\u0565\u0580\u0578\u0582\u0574", 23, 1],
+ ["\u0576", -1, 1],
+ ["\u0561\u0576", 25, 1],
+ ["\u0578\u0582\u0569\u0575\u0561\u0576", 26, 1],
+ ["\u057E\u0561\u0576", 26, 1],
+ ["\u056B\u0576", 25, 1],
+ ["\u0565\u0580\u056B\u0576", 29, 1],
+ ["\u0576\u0565\u0580\u056B\u0576", 30, 1],
+ ["\u0578\u0582\u0569\u0575\u0561\u0576\u0576", 25, 1],
+ ["\u0565\u0580\u0576", 25, 1],
+ ["\u0576\u0565\u0580\u0576", 33, 1],
+ ["\u0578\u0582\u0576", 25, 1],
+ ["\u0578\u057B", -1, 1],
+ ["\u0578\u0582\u0569\u0575\u0561\u0576\u057D", -1, 1],
+ ["\u057E\u0561\u0576\u057D", -1, 1],
+ ["\u0578\u057B\u057D", -1, 1],
+ ["\u0578\u057E", -1, 1],
+ ["\u0561\u0576\u0578\u057E", 40, 1],
+ ["\u057E\u0578\u057E", 40, 1],
+ ["\u0565\u0580\u0578\u057E", 40, 1],
+ ["\u0576\u0565\u0580\u0578\u057E", 43, 1],
+ ["\u0565\u0580", -1, 1],
+ ["\u0576\u0565\u0580", 45, 1],
+ ["\u0581", -1, 1],
+ ["\u056B\u0581", 47, 1],
+ ["\u057E\u0561\u0576\u056B\u0581", 48, 1],
+ ["\u0578\u057B\u056B\u0581", 48, 1],
+ ["\u057E\u056B\u0581", 48, 1],
+ ["\u0565\u0580\u056B\u0581", 48, 1],
+ ["\u0576\u0565\u0580\u056B\u0581", 52, 1],
+ ["\u0581\u056B\u0581", 48, 1],
+ ["\u0578\u0581", 47, 1],
+ ["\u0578\u0582\u0581", 47, 1]
+ ];
+
+ /** @const */ var /** Array */ g_v = [209, 4, 128, 0, 18];
+
+ var /** number */ I_p2 = 0;
+ var /** number */ I_pV = 0;
+
+
+ /** @return {boolean} */
+ function r_mark_regions() {
+ I_pV = base.limit;
+ I_p2 = base.limit;
+ /** @const */ var /** number */ v_1 = base.cursor;
+ lab0: {
+ if (!base.go_out_grouping(g_v, 1377, 1413))
+ {
+ break lab0;
+ }
+ base.cursor++;
+ I_pV = base.cursor;
+ if (!base.go_in_grouping(g_v, 1377, 1413))
+ {
+ break lab0;
+ }
+ base.cursor++;
+ if (!base.go_out_grouping(g_v, 1377, 1413))
+ {
+ break lab0;
+ }
+ base.cursor++;
+ if (!base.go_in_grouping(g_v, 1377, 1413))
+ {
+ break lab0;
+ }
+ base.cursor++;
+ I_p2 = base.cursor;
+ }
+ base.cursor = v_1;
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_R2() {
+ return I_p2 <= base.cursor;
+ };
+
+ /** @return {boolean} */
+ function r_adjective() {
+ base.ket = base.cursor;
+ if (base.find_among_b(a_0) == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_verb() {
+ base.ket = base.cursor;
+ if (base.find_among_b(a_1) == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_noun() {
+ base.ket = base.cursor;
+ if (base.find_among_b(a_2) == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_ending() {
+ base.ket = base.cursor;
+ if (base.find_among_b(a_3) == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ if (!r_R2())
+ {
+ return false;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ return true;
+ };
+
+ this.stem = /** @return {boolean} */ function() {
+ r_mark_regions();
+ base.limit_backward = base.cursor; base.cursor = base.limit;
+ if (base.cursor < I_pV)
+ {
+ return false;
+ }
+ /** @const */ var /** number */ v_1 = base.limit_backward;
+ base.limit_backward = I_pV;
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
+ r_ending();
+ base.cursor = base.limit - v_2;
+ /** @const */ var /** number */ v_3 = base.limit - base.cursor;
+ r_verb();
+ base.cursor = base.limit - v_3;
+ /** @const */ var /** number */ v_4 = base.limit - base.cursor;
+ r_adjective();
+ base.cursor = base.limit - v_4;
+ /** @const */ var /** number */ v_5 = base.limit - base.cursor;
+ r_noun();
+ base.cursor = base.limit - v_5;
+ base.limit_backward = v_1;
+ base.cursor = base.limit_backward;
+ return true;
+ };
+
+ /**@return{string}*/
+ this['stemWord'] = function(/**string*/word) {
+ base.setCurrent(word);
+ this.stem();
+ return base.getCurrent();
+ };
+};
diff --git a/sphinx/search/non-minified-js/base-stemmer.js b/sphinx/search/non-minified-js/base-stemmer.js
index ca6cca156a3..8cf2d585582 100644
--- a/sphinx/search/non-minified-js/base-stemmer.js
+++ b/sphinx/search/non-minified-js/base-stemmer.js
@@ -1,5 +1,18 @@
+// @ts-check
+
/**@constructor*/
-BaseStemmer = function() {
+const BaseStemmer = function() {
+ /** @protected */
+ this.current = '';
+ this.cursor = 0;
+ this.limit = 0;
+ this.limit_backward = 0;
+ this.bra = 0;
+ this.ket = 0;
+
+ /**
+ * @param {string} value
+ */
this.setCurrent = function(value) {
this.current = value;
this.cursor = 0;
@@ -9,11 +22,18 @@ BaseStemmer = function() {
this.ket = this.limit;
};
+ /**
+ * @return {string}
+ */
this.getCurrent = function() {
return this.current;
};
+ /**
+ * @param {BaseStemmer} other
+ */
this.copy_from = function(other) {
+ /** @protected */
this.current = other.current;
this.cursor = other.cursor;
this.limit = other.limit;
@@ -22,7 +42,14 @@ BaseStemmer = function() {
this.ket = other.ket;
};
+ /**
+ * @param {number[]} s
+ * @param {number} min
+ * @param {number} max
+ * @return {boolean}
+ */
this.in_grouping = function(s, min, max) {
+ /** @protected */
if (this.cursor >= this.limit) return false;
var ch = this.current.charCodeAt(this.cursor);
if (ch > max || ch < min) return false;
@@ -32,7 +59,34 @@ BaseStemmer = function() {
return true;
};
+ /**
+ * @param {number[]} s
+ * @param {number} min
+ * @param {number} max
+ * @return {boolean}
+ */
+ this.go_in_grouping = function(s, min, max) {
+ /** @protected */
+ while (this.cursor < this.limit) {
+ var ch = this.current.charCodeAt(this.cursor);
+ if (ch > max || ch < min)
+ return true;
+ ch -= min;
+ if ((s[ch >>> 3] & (0x1 << (ch & 0x7))) == 0)
+ return true;
+ this.cursor++;
+ }
+ return false;
+ };
+
+ /**
+ * @param {number[]} s
+ * @param {number} min
+ * @param {number} max
+ * @return {boolean}
+ */
this.in_grouping_b = function(s, min, max) {
+ /** @protected */
if (this.cursor <= this.limit_backward) return false;
var ch = this.current.charCodeAt(this.cursor - 1);
if (ch > max || ch < min) return false;
@@ -42,7 +96,32 @@ BaseStemmer = function() {
return true;
};
+ /**
+ * @param {number[]} s
+ * @param {number} min
+ * @param {number} max
+ * @return {boolean}
+ */
+ this.go_in_grouping_b = function(s, min, max) {
+ /** @protected */
+ while (this.cursor > this.limit_backward) {
+ var ch = this.current.charCodeAt(this.cursor - 1);
+ if (ch > max || ch < min) return true;
+ ch -= min;
+ if ((s[ch >>> 3] & (0x1 << (ch & 0x7))) == 0) return true;
+ this.cursor--;
+ }
+ return false;
+ };
+
+ /**
+ * @param {number[]} s
+ * @param {number} min
+ * @param {number} max
+ * @return {boolean}
+ */
this.out_grouping = function(s, min, max) {
+ /** @protected */
if (this.cursor >= this.limit) return false;
var ch = this.current.charCodeAt(this.cursor);
if (ch > max || ch < min) {
@@ -57,7 +136,35 @@ BaseStemmer = function() {
return false;
};
+ /**
+ * @param {number[]} s
+ * @param {number} min
+ * @param {number} max
+ * @return {boolean}
+ */
+ this.go_out_grouping = function(s, min, max) {
+ /** @protected */
+ while (this.cursor < this.limit) {
+ var ch = this.current.charCodeAt(this.cursor);
+ if (ch <= max && ch >= min) {
+ ch -= min;
+ if ((s[ch >>> 3] & (0X1 << (ch & 0x7))) != 0) {
+ return true;
+ }
+ }
+ this.cursor++;
+ }
+ return false;
+ };
+
+ /**
+ * @param {number[]} s
+ * @param {number} min
+ * @param {number} max
+ * @return {boolean}
+ */
this.out_grouping_b = function(s, min, max) {
+ /** @protected */
if (this.cursor <= this.limit_backward) return false;
var ch = this.current.charCodeAt(this.cursor - 1);
if (ch > max || ch < min) {
@@ -72,8 +179,34 @@ BaseStemmer = function() {
return false;
};
+ /**
+ * @param {number[]} s
+ * @param {number} min
+ * @param {number} max
+ * @return {boolean}
+ */
+ this.go_out_grouping_b = function(s, min, max) {
+ /** @protected */
+ while (this.cursor > this.limit_backward) {
+ var ch = this.current.charCodeAt(this.cursor - 1);
+ if (ch <= max && ch >= min) {
+ ch -= min;
+ if ((s[ch >>> 3] & (0x1 << (ch & 0x7))) != 0) {
+ return true;
+ }
+ }
+ this.cursor--;
+ }
+ return false;
+ };
+
+ /**
+ * @param {string} s
+ * @return {boolean}
+ */
this.eq_s = function(s)
{
+ /** @protected */
if (this.limit - this.cursor < s.length) return false;
if (this.current.slice(this.cursor, this.cursor + s.length) != s)
{
@@ -83,8 +216,13 @@ BaseStemmer = function() {
return true;
};
+ /**
+ * @param {string} s
+ * @return {boolean}
+ */
this.eq_s_b = function(s)
{
+ /** @protected */
if (this.cursor - this.limit_backward < s.length) return false;
if (this.current.slice(this.cursor - s.length, this.cursor) != s)
{
@@ -94,8 +232,13 @@ BaseStemmer = function() {
return true;
};
- /** @return {number} */ this.find_among = function(v)
+ /**
+ * @param {Among[]} v
+ * @return {number}
+ */
+ this.find_among = function(v)
{
+ /** @protected */
var i = 0;
var j = v.length;
@@ -165,8 +308,13 @@ BaseStemmer = function() {
};
// find_among_b is for backwards processing. Same comments apply
+ /**
+ * @param {Among[]} v
+ * @return {number}
+ */
this.find_among_b = function(v)
{
+ /** @protected */
var i = 0;
var j = v.length
@@ -232,8 +380,15 @@ BaseStemmer = function() {
/* to replace chars between c_bra and c_ket in this.current by the
* chars in s.
*/
+ /**
+ * @param {number} c_bra
+ * @param {number} c_ket
+ * @param {string} s
+ * @return {number}
+ */
this.replace_s = function(c_bra, c_ket, s)
{
+ /** @protected */
var adjustment = s.length - (c_ket - c_bra);
this.current = this.current.slice(0, c_bra) + s + this.current.slice(c_ket);
this.limit += adjustment;
@@ -242,8 +397,12 @@ BaseStemmer = function() {
return adjustment;
};
+ /**
+ * @return {boolean}
+ */
this.slice_check = function()
{
+ /** @protected */
if (this.bra < 0 ||
this.bra > this.ket ||
this.ket > this.limit ||
@@ -254,8 +413,13 @@ BaseStemmer = function() {
return true;
};
+ /**
+ * @param {number} c_bra
+ * @return {boolean}
+ */
this.slice_from = function(s)
{
+ /** @protected */
var result = false;
if (this.slice_check())
{
@@ -265,20 +429,34 @@ BaseStemmer = function() {
return result;
};
+ /**
+ * @return {boolean}
+ */
this.slice_del = function()
{
+ /** @protected */
return this.slice_from("");
};
+ /**
+ * @param {number} c_bra
+ * @param {number} c_ket
+ * @param {string} s
+ */
this.insert = function(c_bra, c_ket, s)
{
+ /** @protected */
var adjustment = this.replace_s(c_bra, c_ket, s);
if (c_bra <= this.bra) this.bra += adjustment;
if (c_bra <= this.ket) this.ket += adjustment;
};
+ /**
+ * @return {string}
+ */
this.slice_to = function()
{
+ /** @protected */
var result = '';
if (this.slice_check())
{
@@ -287,8 +465,12 @@ BaseStemmer = function() {
return result;
};
+ /**
+ * @return {string}
+ */
this.assign_to = function()
{
+ /** @protected */
return this.current.slice(0, this.limit);
};
};
diff --git a/sphinx/search/non-minified-js/basque-stemmer.js b/sphinx/search/non-minified-js/basque-stemmer.js
new file mode 100644
index 00000000000..5ed3a26af26
--- /dev/null
+++ b/sphinx/search/non-minified-js/basque-stemmer.js
@@ -0,0 +1,736 @@
+// Generated from basque.sbl by Snowball 3.0.1 - https://snowballstem.org/
+
+/**@constructor*/
+var BasqueStemmer = function() {
+ var base = new BaseStemmer();
+
+ /** @const */ var a_0 = [
+ ["idea", -1, 1],
+ ["bidea", 0, 1],
+ ["kidea", 0, 1],
+ ["pidea", 0, 1],
+ ["kundea", -1, 1],
+ ["galea", -1, 1],
+ ["tailea", -1, 1],
+ ["tzailea", -1, 1],
+ ["gunea", -1, 1],
+ ["kunea", -1, 1],
+ ["tzaga", -1, 1],
+ ["gaia", -1, 1],
+ ["aldia", -1, 1],
+ ["taldia", 12, 1],
+ ["karia", -1, 1],
+ ["garria", -1, 2],
+ ["karria", -1, 1],
+ ["ka", -1, 1],
+ ["tzaka", 17, 1],
+ ["la", -1, 1],
+ ["mena", -1, 1],
+ ["pena", -1, 1],
+ ["kina", -1, 1],
+ ["ezina", -1, 1],
+ ["tezina", 23, 1],
+ ["kuna", -1, 1],
+ ["tuna", -1, 1],
+ ["kizuna", -1, 1],
+ ["era", -1, 1],
+ ["bera", 28, 1],
+ ["arabera", 29, -1],
+ ["kera", 28, 1],
+ ["pera", 28, 1],
+ ["orra", -1, 1],
+ ["korra", 33, 1],
+ ["dura", -1, 1],
+ ["gura", -1, 1],
+ ["kura", -1, 1],
+ ["tura", -1, 1],
+ ["eta", -1, 1],
+ ["keta", 39, 1],
+ ["gailua", -1, 1],
+ ["eza", -1, 1],
+ ["erreza", 42, 1],
+ ["tza", -1, 2],
+ ["gaitza", 44, 1],
+ ["kaitza", 44, 1],
+ ["kuntza", 44, 1],
+ ["ide", -1, 1],
+ ["bide", 48, 1],
+ ["kide", 48, 1],
+ ["pide", 48, 1],
+ ["kunde", -1, 1],
+ ["tzake", -1, 1],
+ ["tzeke", -1, 1],
+ ["le", -1, 1],
+ ["gale", 55, 1],
+ ["taile", 55, 1],
+ ["tzaile", 55, 1],
+ ["gune", -1, 1],
+ ["kune", -1, 1],
+ ["tze", -1, 1],
+ ["atze", 61, 1],
+ ["gai", -1, 1],
+ ["aldi", -1, 1],
+ ["taldi", 64, 1],
+ ["ki", -1, 1],
+ ["ari", -1, 1],
+ ["kari", 67, 1],
+ ["lari", 67, 1],
+ ["tari", 67, 1],
+ ["etari", 70, 1],
+ ["garri", -1, 2],
+ ["karri", -1, 1],
+ ["arazi", -1, 1],
+ ["tarazi", 74, 1],
+ ["an", -1, 1],
+ ["ean", 76, 1],
+ ["rean", 77, 1],
+ ["kan", 76, 1],
+ ["etan", 76, 1],
+ ["atseden", -1, -1],
+ ["men", -1, 1],
+ ["pen", -1, 1],
+ ["kin", -1, 1],
+ ["rekin", 84, 1],
+ ["ezin", -1, 1],
+ ["tezin", 86, 1],
+ ["tun", -1, 1],
+ ["kizun", -1, 1],
+ ["go", -1, 1],
+ ["ago", 90, 1],
+ ["tio", -1, 1],
+ ["dako", -1, 1],
+ ["or", -1, 1],
+ ["kor", 94, 1],
+ ["tzat", -1, 1],
+ ["du", -1, 1],
+ ["gailu", -1, 1],
+ ["tu", -1, 1],
+ ["atu", 99, 1],
+ ["aldatu", 100, 1],
+ ["tatu", 100, 1],
+ ["baditu", 99, -1],
+ ["ez", -1, 1],
+ ["errez", 104, 1],
+ ["tzez", 104, 1],
+ ["gaitz", -1, 1],
+ ["kaitz", -1, 1]
+ ];
+
+ /** @const */ var a_1 = [
+ ["ada", -1, 1],
+ ["kada", 0, 1],
+ ["anda", -1, 1],
+ ["denda", -1, 1],
+ ["gabea", -1, 1],
+ ["kabea", -1, 1],
+ ["aldea", -1, 1],
+ ["kaldea", 6, 1],
+ ["taldea", 6, 1],
+ ["ordea", -1, 1],
+ ["zalea", -1, 1],
+ ["tzalea", 10, 1],
+ ["gilea", -1, 1],
+ ["emea", -1, 1],
+ ["kumea", -1, 1],
+ ["nea", -1, 1],
+ ["enea", 15, 1],
+ ["zionea", 15, 1],
+ ["unea", 15, 1],
+ ["gunea", 18, 1],
+ ["pea", -1, 1],
+ ["aurrea", -1, 1],
+ ["tea", -1, 1],
+ ["kotea", 22, 1],
+ ["artea", 22, 1],
+ ["ostea", 22, 1],
+ ["etxea", -1, 1],
+ ["ga", -1, 1],
+ ["anga", 27, 1],
+ ["gaia", -1, 1],
+ ["aldia", -1, 1],
+ ["taldia", 30, 1],
+ ["handia", -1, 1],
+ ["mendia", -1, 1],
+ ["geia", -1, 1],
+ ["egia", -1, 1],
+ ["degia", 35, 1],
+ ["tegia", 35, 1],
+ ["nahia", -1, 1],
+ ["ohia", -1, 1],
+ ["kia", -1, 1],
+ ["tokia", 40, 1],
+ ["oia", -1, 1],
+ ["koia", 42, 1],
+ ["aria", -1, 1],
+ ["karia", 44, 1],
+ ["laria", 44, 1],
+ ["taria", 44, 1],
+ ["eria", -1, 1],
+ ["keria", 48, 1],
+ ["teria", 48, 1],
+ ["garria", -1, 2],
+ ["larria", -1, 1],
+ ["kirria", -1, 1],
+ ["duria", -1, 1],
+ ["asia", -1, 1],
+ ["tia", -1, 1],
+ ["ezia", -1, 1],
+ ["bizia", -1, 1],
+ ["ontzia", -1, 1],
+ ["ka", -1, 1],
+ ["joka", 60, 3],
+ ["aurka", 60, -1],
+ ["ska", 60, 1],
+ ["xka", 60, 1],
+ ["zka", 60, 1],
+ ["gibela", -1, 1],
+ ["gela", -1, 1],
+ ["kaila", -1, 1],
+ ["skila", -1, 1],
+ ["tila", -1, 1],
+ ["ola", -1, 1],
+ ["na", -1, 1],
+ ["kana", 72, 1],
+ ["ena", 72, 1],
+ ["garrena", 74, 1],
+ ["gerrena", 74, 1],
+ ["urrena", 74, 1],
+ ["zaina", 72, 1],
+ ["tzaina", 78, 1],
+ ["kina", 72, 1],
+ ["mina", 72, 1],
+ ["garna", 72, 1],
+ ["una", 72, 1],
+ ["duna", 83, 1],
+ ["asuna", 83, 1],
+ ["tasuna", 85, 1],
+ ["ondoa", -1, 1],
+ ["kondoa", 87, 1],
+ ["ngoa", -1, 1],
+ ["zioa", -1, 1],
+ ["koa", -1, 1],
+ ["takoa", 91, 1],
+ ["zkoa", 91, 1],
+ ["noa", -1, 1],
+ ["zinoa", 94, 1],
+ ["aroa", -1, 1],
+ ["taroa", 96, 1],
+ ["zaroa", 96, 1],
+ ["eroa", -1, 1],
+ ["oroa", -1, 1],
+ ["osoa", -1, 1],
+ ["toa", -1, 1],
+ ["ttoa", 102, 1],
+ ["ztoa", 102, 1],
+ ["txoa", -1, 1],
+ ["tzoa", -1, 1],
+ ["\u00F1oa", -1, 1],
+ ["ra", -1, 1],
+ ["ara", 108, 1],
+ ["dara", 109, 1],
+ ["liara", 109, 1],
+ ["tiara", 109, 1],
+ ["tara", 109, 1],
+ ["etara", 113, 1],
+ ["tzara", 109, 1],
+ ["bera", 108, 1],
+ ["kera", 108, 1],
+ ["pera", 108, 1],
+ ["ora", 108, 2],
+ ["tzarra", 108, 1],
+ ["korra", 108, 1],
+ ["tra", 108, 1],
+ ["sa", -1, 1],
+ ["osa", 123, 1],
+ ["ta", -1, 1],
+ ["eta", 125, 1],
+ ["keta", 126, 1],
+ ["sta", 125, 1],
+ ["dua", -1, 1],
+ ["mendua", 129, 1],
+ ["ordua", 129, 1],
+ ["lekua", -1, 1],
+ ["burua", -1, 1],
+ ["durua", -1, 1],
+ ["tsua", -1, 1],
+ ["tua", -1, 1],
+ ["mentua", 136, 1],
+ ["estua", 136, 1],
+ ["txua", -1, 1],
+ ["zua", -1, 1],
+ ["tzua", 140, 1],
+ ["za", -1, 1],
+ ["eza", 142, 1],
+ ["eroza", 142, 1],
+ ["tza", 142, 2],
+ ["koitza", 145, 1],
+ ["antza", 145, 1],
+ ["gintza", 145, 1],
+ ["kintza", 145, 1],
+ ["kuntza", 145, 1],
+ ["gabe", -1, 1],
+ ["kabe", -1, 1],
+ ["kide", -1, 1],
+ ["alde", -1, 1],
+ ["kalde", 154, 1],
+ ["talde", 154, 1],
+ ["orde", -1, 1],
+ ["ge", -1, 1],
+ ["zale", -1, 1],
+ ["tzale", 159, 1],
+ ["gile", -1, 1],
+ ["eme", -1, 1],
+ ["kume", -1, 1],
+ ["ne", -1, 1],
+ ["zione", 164, 1],
+ ["une", 164, 1],
+ ["gune", 166, 1],
+ ["pe", -1, 1],
+ ["aurre", -1, 1],
+ ["te", -1, 1],
+ ["kote", 170, 1],
+ ["arte", 170, 1],
+ ["oste", 170, 1],
+ ["etxe", -1, 1],
+ ["gai", -1, 1],
+ ["di", -1, 1],
+ ["aldi", 176, 1],
+ ["taldi", 177, 1],
+ ["geldi", 176, -1],
+ ["handi", 176, 1],
+ ["mendi", 176, 1],
+ ["gei", -1, 1],
+ ["egi", -1, 1],
+ ["degi", 183, 1],
+ ["tegi", 183, 1],
+ ["nahi", -1, 1],
+ ["ohi", -1, 1],
+ ["ki", -1, 1],
+ ["toki", 188, 1],
+ ["oi", -1, 1],
+ ["goi", 190, 1],
+ ["koi", 190, 1],
+ ["ari", -1, 1],
+ ["kari", 193, 1],
+ ["lari", 193, 1],
+ ["tari", 193, 1],
+ ["garri", -1, 2],
+ ["larri", -1, 1],
+ ["kirri", -1, 1],
+ ["duri", -1, 1],
+ ["asi", -1, 1],
+ ["ti", -1, 1],
+ ["ontzi", -1, 1],
+ ["\u00F1i", -1, 1],
+ ["ak", -1, 1],
+ ["ek", -1, 1],
+ ["tarik", -1, 1],
+ ["gibel", -1, 1],
+ ["ail", -1, 1],
+ ["kail", 209, 1],
+ ["kan", -1, 1],
+ ["tan", -1, 1],
+ ["etan", 212, 1],
+ ["en", -1, 4],
+ ["ren", 214, 2],
+ ["garren", 215, 1],
+ ["gerren", 215, 1],
+ ["urren", 215, 1],
+ ["ten", 214, 4],
+ ["tzen", 214, 4],
+ ["zain", -1, 1],
+ ["tzain", 221, 1],
+ ["kin", -1, 1],
+ ["min", -1, 1],
+ ["dun", -1, 1],
+ ["asun", -1, 1],
+ ["tasun", 226, 1],
+ ["aizun", -1, 1],
+ ["ondo", -1, 1],
+ ["kondo", 229, 1],
+ ["go", -1, 1],
+ ["ngo", 231, 1],
+ ["zio", -1, 1],
+ ["ko", -1, 1],
+ ["trako", 234, 5],
+ ["tako", 234, 1],
+ ["etako", 236, 1],
+ ["eko", 234, 1],
+ ["tariko", 234, 1],
+ ["sko", 234, 1],
+ ["tuko", 234, 1],
+ ["minutuko", 241, 6],
+ ["zko", 234, 1],
+ ["no", -1, 1],
+ ["zino", 244, 1],
+ ["ro", -1, 1],
+ ["aro", 246, 1],
+ ["igaro", 247, -1],
+ ["taro", 247, 1],
+ ["zaro", 247, 1],
+ ["ero", 246, 1],
+ ["giro", 246, 1],
+ ["oro", 246, 1],
+ ["oso", -1, 1],
+ ["to", -1, 1],
+ ["tto", 255, 1],
+ ["zto", 255, 1],
+ ["txo", -1, 1],
+ ["tzo", -1, 1],
+ ["gintzo", 259, 1],
+ ["\u00F1o", -1, 1],
+ ["zp", -1, 1],
+ ["ar", -1, 1],
+ ["dar", 263, 1],
+ ["behar", 263, 1],
+ ["zehar", 263, -1],
+ ["liar", 263, 1],
+ ["tiar", 263, 1],
+ ["tar", 263, 1],
+ ["tzar", 263, 1],
+ ["or", -1, 2],
+ ["kor", 271, 1],
+ ["os", -1, 1],
+ ["ket", -1, 1],
+ ["du", -1, 1],
+ ["mendu", 275, 1],
+ ["ordu", 275, 1],
+ ["leku", -1, 1],
+ ["buru", -1, 2],
+ ["duru", -1, 1],
+ ["tsu", -1, 1],
+ ["tu", -1, 1],
+ ["tatu", 282, 4],
+ ["mentu", 282, 1],
+ ["estu", 282, 1],
+ ["txu", -1, 1],
+ ["zu", -1, 1],
+ ["tzu", 287, 1],
+ ["gintzu", 288, 1],
+ ["z", -1, 1],
+ ["ez", 290, 1],
+ ["eroz", 290, 1],
+ ["tz", 290, 1],
+ ["koitz", 293, 1]
+ ];
+
+ /** @const */ var a_2 = [
+ ["zlea", -1, 2],
+ ["keria", -1, 1],
+ ["la", -1, 1],
+ ["era", -1, 1],
+ ["dade", -1, 1],
+ ["tade", -1, 1],
+ ["date", -1, 1],
+ ["tate", -1, 1],
+ ["gi", -1, 1],
+ ["ki", -1, 1],
+ ["ik", -1, 1],
+ ["lanik", 10, 1],
+ ["rik", 10, 1],
+ ["larik", 12, 1],
+ ["ztik", 10, 1],
+ ["go", -1, 1],
+ ["ro", -1, 1],
+ ["ero", 16, 1],
+ ["to", -1, 1]
+ ];
+
+ /** @const */ var /** Array */ g_v = [17, 65, 16];
+
+ var /** number */ I_p2 = 0;
+ var /** number */ I_p1 = 0;
+ var /** number */ I_pV = 0;
+
+
+ /** @return {boolean} */
+ function r_mark_regions() {
+ I_pV = base.limit;
+ I_p1 = base.limit;
+ I_p2 = base.limit;
+ /** @const */ var /** number */ v_1 = base.cursor;
+ lab0: {
+ lab1: {
+ /** @const */ var /** number */ v_2 = base.cursor;
+ lab2: {
+ if (!(base.in_grouping(g_v, 97, 117)))
+ {
+ break lab2;
+ }
+ lab3: {
+ /** @const */ var /** number */ v_3 = base.cursor;
+ lab4: {
+ if (!(base.out_grouping(g_v, 97, 117)))
+ {
+ break lab4;
+ }
+ if (!base.go_out_grouping(g_v, 97, 117))
+ {
+ break lab4;
+ }
+ base.cursor++;
+ break lab3;
+ }
+ base.cursor = v_3;
+ if (!(base.in_grouping(g_v, 97, 117)))
+ {
+ break lab2;
+ }
+ if (!base.go_in_grouping(g_v, 97, 117))
+ {
+ break lab2;
+ }
+ base.cursor++;
+ }
+ break lab1;
+ }
+ base.cursor = v_2;
+ if (!(base.out_grouping(g_v, 97, 117)))
+ {
+ break lab0;
+ }
+ lab5: {
+ /** @const */ var /** number */ v_4 = base.cursor;
+ lab6: {
+ if (!(base.out_grouping(g_v, 97, 117)))
+ {
+ break lab6;
+ }
+ if (!base.go_out_grouping(g_v, 97, 117))
+ {
+ break lab6;
+ }
+ base.cursor++;
+ break lab5;
+ }
+ base.cursor = v_4;
+ if (!(base.in_grouping(g_v, 97, 117)))
+ {
+ break lab0;
+ }
+ if (base.cursor >= base.limit)
+ {
+ break lab0;
+ }
+ base.cursor++;
+ }
+ }
+ I_pV = base.cursor;
+ }
+ base.cursor = v_1;
+ /** @const */ var /** number */ v_5 = base.cursor;
+ lab7: {
+ if (!base.go_out_grouping(g_v, 97, 117))
+ {
+ break lab7;
+ }
+ base.cursor++;
+ if (!base.go_in_grouping(g_v, 97, 117))
+ {
+ break lab7;
+ }
+ base.cursor++;
+ I_p1 = base.cursor;
+ if (!base.go_out_grouping(g_v, 97, 117))
+ {
+ break lab7;
+ }
+ base.cursor++;
+ if (!base.go_in_grouping(g_v, 97, 117))
+ {
+ break lab7;
+ }
+ base.cursor++;
+ I_p2 = base.cursor;
+ }
+ base.cursor = v_5;
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_RV() {
+ return I_pV <= base.cursor;
+ };
+
+ /** @return {boolean} */
+ function r_R2() {
+ return I_p2 <= base.cursor;
+ };
+
+ /** @return {boolean} */
+ function r_R1() {
+ return I_p1 <= base.cursor;
+ };
+
+ /** @return {boolean} */
+ function r_aditzak() {
+ var /** number */ among_var;
+ base.ket = base.cursor;
+ among_var = base.find_among_b(a_0);
+ if (among_var == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ switch (among_var) {
+ case 1:
+ if (!r_RV())
+ {
+ return false;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ case 2:
+ if (!r_R2())
+ {
+ return false;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_izenak() {
+ var /** number */ among_var;
+ base.ket = base.cursor;
+ among_var = base.find_among_b(a_1);
+ if (among_var == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ switch (among_var) {
+ case 1:
+ if (!r_RV())
+ {
+ return false;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ case 2:
+ if (!r_R2())
+ {
+ return false;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ case 3:
+ if (!base.slice_from("jok"))
+ {
+ return false;
+ }
+ break;
+ case 4:
+ if (!r_R1())
+ {
+ return false;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ case 5:
+ if (!base.slice_from("tra"))
+ {
+ return false;
+ }
+ break;
+ case 6:
+ if (!base.slice_from("minutu"))
+ {
+ return false;
+ }
+ break;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_adjetiboak() {
+ var /** number */ among_var;
+ base.ket = base.cursor;
+ among_var = base.find_among_b(a_2);
+ if (among_var == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ switch (among_var) {
+ case 1:
+ if (!r_RV())
+ {
+ return false;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ case 2:
+ if (!base.slice_from("z"))
+ {
+ return false;
+ }
+ break;
+ }
+ return true;
+ };
+
+ this.stem = /** @return {boolean} */ function() {
+ r_mark_regions();
+ base.limit_backward = base.cursor; base.cursor = base.limit;
+ while(true)
+ {
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
+ lab0: {
+ if (!r_aditzak())
+ {
+ break lab0;
+ }
+ continue;
+ }
+ base.cursor = base.limit - v_1;
+ break;
+ }
+ while(true)
+ {
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
+ lab1: {
+ if (!r_izenak())
+ {
+ break lab1;
+ }
+ continue;
+ }
+ base.cursor = base.limit - v_2;
+ break;
+ }
+ /** @const */ var /** number */ v_3 = base.limit - base.cursor;
+ r_adjetiboak();
+ base.cursor = base.limit - v_3;
+ base.cursor = base.limit_backward;
+ return true;
+ };
+
+ /**@return{string}*/
+ this['stemWord'] = function(/**string*/word) {
+ base.setCurrent(word);
+ this.stem();
+ return base.getCurrent();
+ };
+};
diff --git a/sphinx/search/non-minified-js/catalan-stemmer.js b/sphinx/search/non-minified-js/catalan-stemmer.js
new file mode 100644
index 00000000000..441e655adda
--- /dev/null
+++ b/sphinx/search/non-minified-js/catalan-stemmer.js
@@ -0,0 +1,886 @@
+// Generated from catalan.sbl by Snowball 3.0.1 - https://snowballstem.org/
+
+/**@constructor*/
+var CatalanStemmer = function() {
+ var base = new BaseStemmer();
+
+ /** @const */ var a_0 = [
+ ["", -1, 7],
+ ["\u00B7", 0, 6],
+ ["\u00E0", 0, 1],
+ ["\u00E1", 0, 1],
+ ["\u00E8", 0, 2],
+ ["\u00E9", 0, 2],
+ ["\u00EC", 0, 3],
+ ["\u00ED", 0, 3],
+ ["\u00EF", 0, 3],
+ ["\u00F2", 0, 4],
+ ["\u00F3", 0, 4],
+ ["\u00FA", 0, 5],
+ ["\u00FC", 0, 5]
+ ];
+
+ /** @const */ var a_1 = [
+ ["la", -1, 1],
+ ["-la", 0, 1],
+ ["sela", 0, 1],
+ ["le", -1, 1],
+ ["me", -1, 1],
+ ["-me", 4, 1],
+ ["se", -1, 1],
+ ["-te", -1, 1],
+ ["hi", -1, 1],
+ ["'hi", 8, 1],
+ ["li", -1, 1],
+ ["-li", 10, 1],
+ ["'l", -1, 1],
+ ["'m", -1, 1],
+ ["-m", -1, 1],
+ ["'n", -1, 1],
+ ["-n", -1, 1],
+ ["ho", -1, 1],
+ ["'ho", 17, 1],
+ ["lo", -1, 1],
+ ["selo", 19, 1],
+ ["'s", -1, 1],
+ ["las", -1, 1],
+ ["selas", 22, 1],
+ ["les", -1, 1],
+ ["-les", 24, 1],
+ ["'ls", -1, 1],
+ ["-ls", -1, 1],
+ ["'ns", -1, 1],
+ ["-ns", -1, 1],
+ ["ens", -1, 1],
+ ["los", -1, 1],
+ ["selos", 31, 1],
+ ["nos", -1, 1],
+ ["-nos", 33, 1],
+ ["vos", -1, 1],
+ ["us", -1, 1],
+ ["-us", 36, 1],
+ ["'t", -1, 1]
+ ];
+
+ /** @const */ var a_2 = [
+ ["ica", -1, 4],
+ ["l\u00F3gica", 0, 3],
+ ["enca", -1, 1],
+ ["ada", -1, 2],
+ ["ancia", -1, 1],
+ ["encia", -1, 1],
+ ["\u00E8ncia", -1, 1],
+ ["\u00EDcia", -1, 1],
+ ["logia", -1, 3],
+ ["inia", -1, 1],
+ ["\u00EDinia", 9, 1],
+ ["eria", -1, 1],
+ ["\u00E0ria", -1, 1],
+ ["at\u00F2ria", -1, 1],
+ ["alla", -1, 1],
+ ["ella", -1, 1],
+ ["\u00EDvola", -1, 1],
+ ["ima", -1, 1],
+ ["\u00EDssima", 17, 1],
+ ["qu\u00EDssima", 18, 5],
+ ["ana", -1, 1],
+ ["ina", -1, 1],
+ ["era", -1, 1],
+ ["sfera", 22, 1],
+ ["ora", -1, 1],
+ ["dora", 24, 1],
+ ["adora", 25, 1],
+ ["adura", -1, 1],
+ ["esa", -1, 1],
+ ["osa", -1, 1],
+ ["assa", -1, 1],
+ ["essa", -1, 1],
+ ["issa", -1, 1],
+ ["eta", -1, 1],
+ ["ita", -1, 1],
+ ["ota", -1, 1],
+ ["ista", -1, 1],
+ ["ialista", 36, 1],
+ ["ionista", 36, 1],
+ ["iva", -1, 1],
+ ["ativa", 39, 1],
+ ["n\u00E7a", -1, 1],
+ ["log\u00EDa", -1, 3],
+ ["ic", -1, 4],
+ ["\u00EDstic", 43, 1],
+ ["enc", -1, 1],
+ ["esc", -1, 1],
+ ["ud", -1, 1],
+ ["atge", -1, 1],
+ ["ble", -1, 1],
+ ["able", 49, 1],
+ ["ible", 49, 1],
+ ["isme", -1, 1],
+ ["ialisme", 52, 1],
+ ["ionisme", 52, 1],
+ ["ivisme", 52, 1],
+ ["aire", -1, 1],
+ ["icte", -1, 1],
+ ["iste", -1, 1],
+ ["ici", -1, 1],
+ ["\u00EDci", -1, 1],
+ ["logi", -1, 3],
+ ["ari", -1, 1],
+ ["tori", -1, 1],
+ ["al", -1, 1],
+ ["il", -1, 1],
+ ["all", -1, 1],
+ ["ell", -1, 1],
+ ["\u00EDvol", -1, 1],
+ ["isam", -1, 1],
+ ["issem", -1, 1],
+ ["\u00ECssem", -1, 1],
+ ["\u00EDssem", -1, 1],
+ ["\u00EDssim", -1, 1],
+ ["qu\u00EDssim", 73, 5],
+ ["amen", -1, 1],
+ ["\u00ECssin", -1, 1],
+ ["ar", -1, 1],
+ ["ificar", 77, 1],
+ ["egar", 77, 1],
+ ["ejar", 77, 1],
+ ["itar", 77, 1],
+ ["itzar", 77, 1],
+ ["fer", -1, 1],
+ ["or", -1, 1],
+ ["dor", 84, 1],
+ ["dur", -1, 1],
+ ["doras", -1, 1],
+ ["ics", -1, 4],
+ ["l\u00F3gics", 88, 3],
+ ["uds", -1, 1],
+ ["nces", -1, 1],
+ ["ades", -1, 2],
+ ["ancies", -1, 1],
+ ["encies", -1, 1],
+ ["\u00E8ncies", -1, 1],
+ ["\u00EDcies", -1, 1],
+ ["logies", -1, 3],
+ ["inies", -1, 1],
+ ["\u00EDnies", -1, 1],
+ ["eries", -1, 1],
+ ["\u00E0ries", -1, 1],
+ ["at\u00F2ries", -1, 1],
+ ["bles", -1, 1],
+ ["ables", 103, 1],
+ ["ibles", 103, 1],
+ ["imes", -1, 1],
+ ["\u00EDssimes", 106, 1],
+ ["qu\u00EDssimes", 107, 5],
+ ["formes", -1, 1],
+ ["ismes", -1, 1],
+ ["ialismes", 110, 1],
+ ["ines", -1, 1],
+ ["eres", -1, 1],
+ ["ores", -1, 1],
+ ["dores", 114, 1],
+ ["idores", 115, 1],
+ ["dures", -1, 1],
+ ["eses", -1, 1],
+ ["oses", -1, 1],
+ ["asses", -1, 1],
+ ["ictes", -1, 1],
+ ["ites", -1, 1],
+ ["otes", -1, 1],
+ ["istes", -1, 1],
+ ["ialistes", 124, 1],
+ ["ionistes", 124, 1],
+ ["iques", -1, 4],
+ ["l\u00F3giques", 127, 3],
+ ["ives", -1, 1],
+ ["atives", 129, 1],
+ ["log\u00EDes", -1, 3],
+ ["alleng\u00FCes", -1, 1],
+ ["icis", -1, 1],
+ ["\u00EDcis", -1, 1],
+ ["logis", -1, 3],
+ ["aris", -1, 1],
+ ["toris", -1, 1],
+ ["ls", -1, 1],
+ ["als", 138, 1],
+ ["ells", 138, 1],
+ ["ims", -1, 1],
+ ["\u00EDssims", 141, 1],
+ ["qu\u00EDssims", 142, 5],
+ ["ions", -1, 1],
+ ["cions", 144, 1],
+ ["acions", 145, 2],
+ ["esos", -1, 1],
+ ["osos", -1, 1],
+ ["assos", -1, 1],
+ ["issos", -1, 1],
+ ["ers", -1, 1],
+ ["ors", -1, 1],
+ ["dors", 152, 1],
+ ["adors", 153, 1],
+ ["idors", 153, 1],
+ ["ats", -1, 1],
+ ["itats", 156, 1],
+ ["bilitats", 157, 1],
+ ["ivitats", 157, 1],
+ ["ativitats", 159, 1],
+ ["\u00EFtats", 156, 1],
+ ["ets", -1, 1],
+ ["ants", -1, 1],
+ ["ents", -1, 1],
+ ["ments", 164, 1],
+ ["aments", 165, 1],
+ ["ots", -1, 1],
+ ["uts", -1, 1],
+ ["ius", -1, 1],
+ ["trius", 169, 1],
+ ["atius", 169, 1],
+ ["\u00E8s", -1, 1],
+ ["\u00E9s", -1, 1],
+ ["\u00EDs", -1, 1],
+ ["d\u00EDs", 174, 1],
+ ["\u00F3s", -1, 1],
+ ["itat", -1, 1],
+ ["bilitat", 177, 1],
+ ["ivitat", 177, 1],
+ ["ativitat", 179, 1],
+ ["\u00EFtat", -1, 1],
+ ["et", -1, 1],
+ ["ant", -1, 1],
+ ["ent", -1, 1],
+ ["ient", 184, 1],
+ ["ment", 184, 1],
+ ["ament", 186, 1],
+ ["isament", 187, 1],
+ ["ot", -1, 1],
+ ["isseu", -1, 1],
+ ["\u00ECsseu", -1, 1],
+ ["\u00EDsseu", -1, 1],
+ ["triu", -1, 1],
+ ["\u00EDssiu", -1, 1],
+ ["atiu", -1, 1],
+ ["\u00F3", -1, 1],
+ ["i\u00F3", 196, 1],
+ ["ci\u00F3", 197, 1],
+ ["aci\u00F3", 198, 1]
+ ];
+
+ /** @const */ var a_3 = [
+ ["aba", -1, 1],
+ ["esca", -1, 1],
+ ["isca", -1, 1],
+ ["\u00EFsca", -1, 1],
+ ["ada", -1, 1],
+ ["ida", -1, 1],
+ ["uda", -1, 1],
+ ["\u00EFda", -1, 1],
+ ["ia", -1, 1],
+ ["aria", 8, 1],
+ ["iria", 8, 1],
+ ["ara", -1, 1],
+ ["iera", -1, 1],
+ ["ira", -1, 1],
+ ["adora", -1, 1],
+ ["\u00EFra", -1, 1],
+ ["ava", -1, 1],
+ ["ixa", -1, 1],
+ ["itza", -1, 1],
+ ["\u00EDa", -1, 1],
+ ["ar\u00EDa", 19, 1],
+ ["er\u00EDa", 19, 1],
+ ["ir\u00EDa", 19, 1],
+ ["\u00EFa", -1, 1],
+ ["isc", -1, 1],
+ ["\u00EFsc", -1, 1],
+ ["ad", -1, 1],
+ ["ed", -1, 1],
+ ["id", -1, 1],
+ ["ie", -1, 1],
+ ["re", -1, 1],
+ ["dre", 30, 1],
+ ["ase", -1, 1],
+ ["iese", -1, 1],
+ ["aste", -1, 1],
+ ["iste", -1, 1],
+ ["ii", -1, 1],
+ ["ini", -1, 1],
+ ["esqui", -1, 1],
+ ["eixi", -1, 1],
+ ["itzi", -1, 1],
+ ["am", -1, 1],
+ ["em", -1, 1],
+ ["arem", 42, 1],
+ ["irem", 42, 1],
+ ["\u00E0rem", 42, 1],
+ ["\u00EDrem", 42, 1],
+ ["\u00E0ssem", 42, 1],
+ ["\u00E9ssem", 42, 1],
+ ["iguem", 42, 1],
+ ["\u00EFguem", 42, 1],
+ ["avem", 42, 1],
+ ["\u00E0vem", 42, 1],
+ ["\u00E1vem", 42, 1],
+ ["ir\u00ECem", 42, 1],
+ ["\u00EDem", 42, 1],
+ ["ar\u00EDem", 55, 1],
+ ["ir\u00EDem", 55, 1],
+ ["assim", -1, 1],
+ ["essim", -1, 1],
+ ["issim", -1, 1],
+ ["\u00E0ssim", -1, 1],
+ ["\u00E8ssim", -1, 1],
+ ["\u00E9ssim", -1, 1],
+ ["\u00EDssim", -1, 1],
+ ["\u00EFm", -1, 1],
+ ["an", -1, 1],
+ ["aban", 66, 1],
+ ["arian", 66, 1],
+ ["aran", 66, 1],
+ ["ieran", 66, 1],
+ ["iran", 66, 1],
+ ["\u00EDan", 66, 1],
+ ["ar\u00EDan", 72, 1],
+ ["er\u00EDan", 72, 1],
+ ["ir\u00EDan", 72, 1],
+ ["en", -1, 1],
+ ["ien", 76, 1],
+ ["arien", 77, 1],
+ ["irien", 77, 1],
+ ["aren", 76, 1],
+ ["eren", 76, 1],
+ ["iren", 76, 1],
+ ["\u00E0ren", 76, 1],
+ ["\u00EFren", 76, 1],
+ ["asen", 76, 1],
+ ["iesen", 76, 1],
+ ["assen", 76, 1],
+ ["essen", 76, 1],
+ ["issen", 76, 1],
+ ["\u00E9ssen", 76, 1],
+ ["\u00EFssen", 76, 1],
+ ["esquen", 76, 1],
+ ["isquen", 76, 1],
+ ["\u00EFsquen", 76, 1],
+ ["aven", 76, 1],
+ ["ixen", 76, 1],
+ ["eixen", 96, 1],
+ ["\u00EFxen", 76, 1],
+ ["\u00EFen", 76, 1],
+ ["in", -1, 1],
+ ["inin", 100, 1],
+ ["sin", 100, 1],
+ ["isin", 102, 1],
+ ["assin", 102, 1],
+ ["essin", 102, 1],
+ ["issin", 102, 1],
+ ["\u00EFssin", 102, 1],
+ ["esquin", 100, 1],
+ ["eixin", 100, 1],
+ ["aron", -1, 1],
+ ["ieron", -1, 1],
+ ["ar\u00E1n", -1, 1],
+ ["er\u00E1n", -1, 1],
+ ["ir\u00E1n", -1, 1],
+ ["i\u00EFn", -1, 1],
+ ["ado", -1, 1],
+ ["ido", -1, 1],
+ ["ando", -1, 2],
+ ["iendo", -1, 1],
+ ["io", -1, 1],
+ ["ixo", -1, 1],
+ ["eixo", 121, 1],
+ ["\u00EFxo", -1, 1],
+ ["itzo", -1, 1],
+ ["ar", -1, 1],
+ ["tzar", 125, 1],
+ ["er", -1, 1],
+ ["eixer", 127, 1],
+ ["ir", -1, 1],
+ ["ador", -1, 1],
+ ["as", -1, 1],
+ ["abas", 131, 1],
+ ["adas", 131, 1],
+ ["idas", 131, 1],
+ ["aras", 131, 1],
+ ["ieras", 131, 1],
+ ["\u00EDas", 131, 1],
+ ["ar\u00EDas", 137, 1],
+ ["er\u00EDas", 137, 1],
+ ["ir\u00EDas", 137, 1],
+ ["ids", -1, 1],
+ ["es", -1, 1],
+ ["ades", 142, 1],
+ ["ides", 142, 1],
+ ["udes", 142, 1],
+ ["\u00EFdes", 142, 1],
+ ["atges", 142, 1],
+ ["ies", 142, 1],
+ ["aries", 148, 1],
+ ["iries", 148, 1],
+ ["ares", 142, 1],
+ ["ires", 142, 1],
+ ["adores", 142, 1],
+ ["\u00EFres", 142, 1],
+ ["ases", 142, 1],
+ ["ieses", 142, 1],
+ ["asses", 142, 1],
+ ["esses", 142, 1],
+ ["isses", 142, 1],
+ ["\u00EFsses", 142, 1],
+ ["ques", 142, 1],
+ ["esques", 161, 1],
+ ["\u00EFsques", 161, 1],
+ ["aves", 142, 1],
+ ["ixes", 142, 1],
+ ["eixes", 165, 1],
+ ["\u00EFxes", 142, 1],
+ ["\u00EFes", 142, 1],
+ ["abais", -1, 1],
+ ["arais", -1, 1],
+ ["ierais", -1, 1],
+ ["\u00EDais", -1, 1],
+ ["ar\u00EDais", 172, 1],
+ ["er\u00EDais", 172, 1],
+ ["ir\u00EDais", 172, 1],
+ ["aseis", -1, 1],
+ ["ieseis", -1, 1],
+ ["asteis", -1, 1],
+ ["isteis", -1, 1],
+ ["inis", -1, 1],
+ ["sis", -1, 1],
+ ["isis", 181, 1],
+ ["assis", 181, 1],
+ ["essis", 181, 1],
+ ["issis", 181, 1],
+ ["\u00EFssis", 181, 1],
+ ["esquis", -1, 1],
+ ["eixis", -1, 1],
+ ["itzis", -1, 1],
+ ["\u00E1is", -1, 1],
+ ["ar\u00E9is", -1, 1],
+ ["er\u00E9is", -1, 1],
+ ["ir\u00E9is", -1, 1],
+ ["ams", -1, 1],
+ ["ados", -1, 1],
+ ["idos", -1, 1],
+ ["amos", -1, 1],
+ ["\u00E1bamos", 197, 1],
+ ["\u00E1ramos", 197, 1],
+ ["i\u00E9ramos", 197, 1],
+ ["\u00EDamos", 197, 1],
+ ["ar\u00EDamos", 201, 1],
+ ["er\u00EDamos", 201, 1],
+ ["ir\u00EDamos", 201, 1],
+ ["aremos", -1, 1],
+ ["eremos", -1, 1],
+ ["iremos", -1, 1],
+ ["\u00E1semos", -1, 1],
+ ["i\u00E9semos", -1, 1],
+ ["imos", -1, 1],
+ ["adors", -1, 1],
+ ["ass", -1, 1],
+ ["erass", 212, 1],
+ ["ess", -1, 1],
+ ["ats", -1, 1],
+ ["its", -1, 1],
+ ["ents", -1, 1],
+ ["\u00E0s", -1, 1],
+ ["ar\u00E0s", 218, 1],
+ ["ir\u00E0s", 218, 1],
+ ["ar\u00E1s", -1, 1],
+ ["er\u00E1s", -1, 1],
+ ["ir\u00E1s", -1, 1],
+ ["\u00E9s", -1, 1],
+ ["ar\u00E9s", 224, 1],
+ ["\u00EDs", -1, 1],
+ ["i\u00EFs", -1, 1],
+ ["at", -1, 1],
+ ["it", -1, 1],
+ ["ant", -1, 1],
+ ["ent", -1, 1],
+ ["int", -1, 1],
+ ["ut", -1, 1],
+ ["\u00EFt", -1, 1],
+ ["au", -1, 1],
+ ["erau", 235, 1],
+ ["ieu", -1, 1],
+ ["ineu", -1, 1],
+ ["areu", -1, 1],
+ ["ireu", -1, 1],
+ ["\u00E0reu", -1, 1],
+ ["\u00EDreu", -1, 1],
+ ["asseu", -1, 1],
+ ["esseu", -1, 1],
+ ["eresseu", 244, 1],
+ ["\u00E0sseu", -1, 1],
+ ["\u00E9sseu", -1, 1],
+ ["igueu", -1, 1],
+ ["\u00EFgueu", -1, 1],
+ ["\u00E0veu", -1, 1],
+ ["\u00E1veu", -1, 1],
+ ["itzeu", -1, 1],
+ ["\u00ECeu", -1, 1],
+ ["ir\u00ECeu", 253, 1],
+ ["\u00EDeu", -1, 1],
+ ["ar\u00EDeu", 255, 1],
+ ["ir\u00EDeu", 255, 1],
+ ["assiu", -1, 1],
+ ["issiu", -1, 1],
+ ["\u00E0ssiu", -1, 1],
+ ["\u00E8ssiu", -1, 1],
+ ["\u00E9ssiu", -1, 1],
+ ["\u00EDssiu", -1, 1],
+ ["\u00EFu", -1, 1],
+ ["ix", -1, 1],
+ ["eix", 265, 1],
+ ["\u00EFx", -1, 1],
+ ["itz", -1, 1],
+ ["i\u00E0", -1, 1],
+ ["ar\u00E0", -1, 1],
+ ["ir\u00E0", -1, 1],
+ ["itz\u00E0", -1, 1],
+ ["ar\u00E1", -1, 1],
+ ["er\u00E1", -1, 1],
+ ["ir\u00E1", -1, 1],
+ ["ir\u00E8", -1, 1],
+ ["ar\u00E9", -1, 1],
+ ["er\u00E9", -1, 1],
+ ["ir\u00E9", -1, 1],
+ ["\u00ED", -1, 1],
+ ["i\u00EF", -1, 1],
+ ["i\u00F3", -1, 1]
+ ];
+
+ /** @const */ var a_4 = [
+ ["a", -1, 1],
+ ["e", -1, 1],
+ ["i", -1, 1],
+ ["\u00EFn", -1, 1],
+ ["o", -1, 1],
+ ["ir", -1, 1],
+ ["s", -1, 1],
+ ["is", 6, 1],
+ ["os", 6, 1],
+ ["\u00EFs", 6, 1],
+ ["it", -1, 1],
+ ["eu", -1, 1],
+ ["iu", -1, 1],
+ ["iqu", -1, 2],
+ ["itz", -1, 1],
+ ["\u00E0", -1, 1],
+ ["\u00E1", -1, 1],
+ ["\u00E9", -1, 1],
+ ["\u00EC", -1, 1],
+ ["\u00ED", -1, 1],
+ ["\u00EF", -1, 1],
+ ["\u00F3", -1, 1]
+ ];
+
+ /** @const */ var /** Array */ g_v = [17, 65, 16, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 128, 129, 81, 6, 10];
+
+ var /** number */ I_p2 = 0;
+ var /** number */ I_p1 = 0;
+
+
+ /** @return {boolean} */
+ function r_mark_regions() {
+ I_p1 = base.limit;
+ I_p2 = base.limit;
+ /** @const */ var /** number */ v_1 = base.cursor;
+ lab0: {
+ if (!base.go_out_grouping(g_v, 97, 252))
+ {
+ break lab0;
+ }
+ base.cursor++;
+ if (!base.go_in_grouping(g_v, 97, 252))
+ {
+ break lab0;
+ }
+ base.cursor++;
+ I_p1 = base.cursor;
+ if (!base.go_out_grouping(g_v, 97, 252))
+ {
+ break lab0;
+ }
+ base.cursor++;
+ if (!base.go_in_grouping(g_v, 97, 252))
+ {
+ break lab0;
+ }
+ base.cursor++;
+ I_p2 = base.cursor;
+ }
+ base.cursor = v_1;
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_cleaning() {
+ var /** number */ among_var;
+ while(true)
+ {
+ /** @const */ var /** number */ v_1 = base.cursor;
+ lab0: {
+ base.bra = base.cursor;
+ among_var = base.find_among(a_0);
+ base.ket = base.cursor;
+ switch (among_var) {
+ case 1:
+ if (!base.slice_from("a"))
+ {
+ return false;
+ }
+ break;
+ case 2:
+ if (!base.slice_from("e"))
+ {
+ return false;
+ }
+ break;
+ case 3:
+ if (!base.slice_from("i"))
+ {
+ return false;
+ }
+ break;
+ case 4:
+ if (!base.slice_from("o"))
+ {
+ return false;
+ }
+ break;
+ case 5:
+ if (!base.slice_from("u"))
+ {
+ return false;
+ }
+ break;
+ case 6:
+ if (!base.slice_from("."))
+ {
+ return false;
+ }
+ break;
+ case 7:
+ if (base.cursor >= base.limit)
+ {
+ break lab0;
+ }
+ base.cursor++;
+ break;
+ }
+ continue;
+ }
+ base.cursor = v_1;
+ break;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_R1() {
+ return I_p1 <= base.cursor;
+ };
+
+ /** @return {boolean} */
+ function r_R2() {
+ return I_p2 <= base.cursor;
+ };
+
+ /** @return {boolean} */
+ function r_attached_pronoun() {
+ base.ket = base.cursor;
+ if (base.find_among_b(a_1) == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ if (!r_R1())
+ {
+ return false;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_standard_suffix() {
+ var /** number */ among_var;
+ base.ket = base.cursor;
+ among_var = base.find_among_b(a_2);
+ if (among_var == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ switch (among_var) {
+ case 1:
+ if (!r_R1())
+ {
+ return false;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ case 2:
+ if (!r_R2())
+ {
+ return false;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ case 3:
+ if (!r_R2())
+ {
+ return false;
+ }
+ if (!base.slice_from("log"))
+ {
+ return false;
+ }
+ break;
+ case 4:
+ if (!r_R2())
+ {
+ return false;
+ }
+ if (!base.slice_from("ic"))
+ {
+ return false;
+ }
+ break;
+ case 5:
+ if (!r_R1())
+ {
+ return false;
+ }
+ if (!base.slice_from("c"))
+ {
+ return false;
+ }
+ break;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_verb_suffix() {
+ var /** number */ among_var;
+ base.ket = base.cursor;
+ among_var = base.find_among_b(a_3);
+ if (among_var == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ switch (among_var) {
+ case 1:
+ if (!r_R1())
+ {
+ return false;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ case 2:
+ if (!r_R2())
+ {
+ return false;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_residual_suffix() {
+ var /** number */ among_var;
+ base.ket = base.cursor;
+ among_var = base.find_among_b(a_4);
+ if (among_var == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ switch (among_var) {
+ case 1:
+ if (!r_R1())
+ {
+ return false;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ case 2:
+ if (!r_R1())
+ {
+ return false;
+ }
+ if (!base.slice_from("ic"))
+ {
+ return false;
+ }
+ break;
+ }
+ return true;
+ };
+
+ this.stem = /** @return {boolean} */ function() {
+ r_mark_regions();
+ base.limit_backward = base.cursor; base.cursor = base.limit;
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
+ r_attached_pronoun();
+ base.cursor = base.limit - v_1;
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
+ lab0: {
+ lab1: {
+ /** @const */ var /** number */ v_3 = base.limit - base.cursor;
+ lab2: {
+ if (!r_standard_suffix())
+ {
+ break lab2;
+ }
+ break lab1;
+ }
+ base.cursor = base.limit - v_3;
+ if (!r_verb_suffix())
+ {
+ break lab0;
+ }
+ }
+ }
+ base.cursor = base.limit - v_2;
+ /** @const */ var /** number */ v_4 = base.limit - base.cursor;
+ r_residual_suffix();
+ base.cursor = base.limit - v_4;
+ base.cursor = base.limit_backward;
+ /** @const */ var /** number */ v_5 = base.cursor;
+ r_cleaning();
+ base.cursor = v_5;
+ return true;
+ };
+
+ /**@return{string}*/
+ this['stemWord'] = function(/**string*/word) {
+ base.setCurrent(word);
+ this.stem();
+ return base.getCurrent();
+ };
+};
diff --git a/sphinx/search/non-minified-js/danish-stemmer.js b/sphinx/search/non-minified-js/danish-stemmer.js
index 46b5d55b0e1..b0867495bbf 100644
--- a/sphinx/search/non-minified-js/danish-stemmer.js
+++ b/sphinx/search/non-minified-js/danish-stemmer.js
@@ -1,8 +1,9 @@
-// Generated by Snowball 2.1.0 - https://snowballstem.org/
+// Generated from danish.sbl by Snowball 3.0.1 - https://snowballstem.org/
/**@constructor*/
-DanishStemmer = function() {
+var DanishStemmer = function() {
var base = new BaseStemmer();
+
/** @const */ var a_0 = [
["hed", -1, 1],
["ethed", 0, 1],
@@ -67,9 +68,9 @@ DanishStemmer = function() {
/** @return {boolean} */
function r_mark_regions() {
I_p1 = base.limit;
- var /** number */ v_1 = base.cursor;
+ /** @const */ var /** number */ v_1 = base.cursor;
{
- var /** number */ c1 = base.cursor + 3;
+ /** @const */ var /** number */ c1 = base.cursor + 3;
if (c1 > base.limit)
{
return false;
@@ -78,44 +79,21 @@ DanishStemmer = function() {
}
I_x = base.cursor;
base.cursor = v_1;
- golab0: while(true)
+ if (!base.go_out_grouping(g_v, 97, 248))
{
- var /** number */ v_2 = base.cursor;
- lab1: {
- if (!(base.in_grouping(g_v, 97, 248)))
- {
- break lab1;
- }
- base.cursor = v_2;
- break golab0;
- }
- base.cursor = v_2;
- if (base.cursor >= base.limit)
- {
- return false;
- }
- base.cursor++;
+ return false;
}
- golab2: while(true)
+ base.cursor++;
+ if (!base.go_in_grouping(g_v, 97, 248))
{
- lab3: {
- if (!(base.out_grouping(g_v, 97, 248)))
- {
- break lab3;
- }
- break golab2;
- }
- if (base.cursor >= base.limit)
- {
- return false;
- }
- base.cursor++;
+ return false;
}
+ base.cursor++;
I_p1 = base.cursor;
- lab4: {
- if (!(I_p1 < I_x))
+ lab0: {
+ if (I_p1 >= I_x)
{
- break lab4;
+ break lab0;
}
I_p1 = I_x;
}
@@ -129,17 +107,17 @@ DanishStemmer = function() {
{
return false;
}
- var /** number */ v_2 = base.limit_backward;
+ /** @const */ var /** number */ v_1 = base.limit_backward;
base.limit_backward = I_p1;
base.ket = base.cursor;
among_var = base.find_among_b(a_0);
if (among_var == 0)
{
- base.limit_backward = v_2;
+ base.limit_backward = v_1;
return false;
}
base.bra = base.cursor;
- base.limit_backward = v_2;
+ base.limit_backward = v_1;
switch (among_var) {
case 1:
if (!base.slice_del())
@@ -163,21 +141,21 @@ DanishStemmer = function() {
/** @return {boolean} */
function r_consonant_pair() {
- var /** number */ v_1 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
if (base.cursor < I_p1)
{
return false;
}
- var /** number */ v_3 = base.limit_backward;
+ /** @const */ var /** number */ v_2 = base.limit_backward;
base.limit_backward = I_p1;
base.ket = base.cursor;
if (base.find_among_b(a_1) == 0)
{
- base.limit_backward = v_3;
+ base.limit_backward = v_2;
return false;
}
base.bra = base.cursor;
- base.limit_backward = v_3;
+ base.limit_backward = v_2;
base.cursor = base.limit - v_1;
if (base.cursor <= base.limit_backward)
{
@@ -195,7 +173,7 @@ DanishStemmer = function() {
/** @return {boolean} */
function r_other_suffix() {
var /** number */ among_var;
- var /** number */ v_1 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
lab0: {
base.ket = base.cursor;
if (!(base.eq_s_b("st")))
@@ -217,26 +195,26 @@ DanishStemmer = function() {
{
return false;
}
- var /** number */ v_3 = base.limit_backward;
+ /** @const */ var /** number */ v_2 = base.limit_backward;
base.limit_backward = I_p1;
base.ket = base.cursor;
among_var = base.find_among_b(a_2);
if (among_var == 0)
{
- base.limit_backward = v_3;
+ base.limit_backward = v_2;
return false;
}
base.bra = base.cursor;
- base.limit_backward = v_3;
+ base.limit_backward = v_2;
switch (among_var) {
case 1:
if (!base.slice_del())
{
return false;
}
- var /** number */ v_4 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_3 = base.limit - base.cursor;
r_consonant_pair();
- base.cursor = base.limit - v_4;
+ base.cursor = base.limit - v_3;
break;
case 2:
if (!base.slice_from("l\u00F8s"))
@@ -254,12 +232,12 @@ DanishStemmer = function() {
{
return false;
}
- var /** number */ v_2 = base.limit_backward;
+ /** @const */ var /** number */ v_1 = base.limit_backward;
base.limit_backward = I_p1;
base.ket = base.cursor;
if (!(base.in_grouping_b(g_c, 98, 122)))
{
- base.limit_backward = v_2;
+ base.limit_backward = v_1;
return false;
}
base.bra = base.cursor;
@@ -268,7 +246,7 @@ DanishStemmer = function() {
{
return false;
}
- base.limit_backward = v_2;
+ base.limit_backward = v_1;
if (!(base.eq_s_b(S_ch)))
{
return false;
@@ -281,20 +259,20 @@ DanishStemmer = function() {
};
this.stem = /** @return {boolean} */ function() {
- var /** number */ v_1 = base.cursor;
+ /** @const */ var /** number */ v_1 = base.cursor;
r_mark_regions();
base.cursor = v_1;
base.limit_backward = base.cursor; base.cursor = base.limit;
- var /** number */ v_2 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
r_main_suffix();
base.cursor = base.limit - v_2;
- var /** number */ v_3 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_3 = base.limit - base.cursor;
r_consonant_pair();
base.cursor = base.limit - v_3;
- var /** number */ v_4 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_4 = base.limit - base.cursor;
r_other_suffix();
base.cursor = base.limit - v_4;
- var /** number */ v_5 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_5 = base.limit - base.cursor;
r_undouble();
base.cursor = base.limit - v_5;
base.cursor = base.limit_backward;
diff --git a/sphinx/search/non-minified-js/dutch-stemmer.js b/sphinx/search/non-minified-js/dutch-stemmer.js
index 0ad11e212cc..50e53e7b510 100644
--- a/sphinx/search/non-minified-js/dutch-stemmer.js
+++ b/sphinx/search/non-minified-js/dutch-stemmer.js
@@ -1,678 +1,1960 @@
-// Generated by Snowball 2.1.0 - https://snowballstem.org/
+// Generated from dutch.sbl by Snowball 3.0.1 - https://snowballstem.org/
/**@constructor*/
-DutchStemmer = function() {
+var DutchStemmer = function() {
var base = new BaseStemmer();
+
/** @const */ var a_0 = [
- ["", -1, 6],
- ["\u00E1", 0, 1],
- ["\u00E4", 0, 1],
- ["\u00E9", 0, 2],
- ["\u00EB", 0, 2],
- ["\u00ED", 0, 3],
- ["\u00EF", 0, 3],
- ["\u00F3", 0, 4],
- ["\u00F6", 0, 4],
- ["\u00FA", 0, 5],
- ["\u00FC", 0, 5]
+ ["a", -1, 1],
+ ["e", -1, 2],
+ ["o", -1, 1],
+ ["u", -1, 1],
+ ["\u00E0", -1, 1],
+ ["\u00E1", -1, 1],
+ ["\u00E2", -1, 1],
+ ["\u00E4", -1, 1],
+ ["\u00E8", -1, 2],
+ ["\u00E9", -1, 2],
+ ["\u00EA", -1, 2],
+ ["e\u00EB", -1, 3],
+ ["i\u00EB", -1, 4],
+ ["\u00F2", -1, 1],
+ ["\u00F3", -1, 1],
+ ["\u00F4", -1, 1],
+ ["\u00F6", -1, 1],
+ ["\u00F9", -1, 1],
+ ["\u00FA", -1, 1],
+ ["\u00FB", -1, 1],
+ ["\u00FC", -1, 1]
];
/** @const */ var a_1 = [
- ["", -1, 3],
- ["I", 0, 2],
- ["Y", 0, 1]
+ ["nde", -1, 8],
+ ["en", -1, 7],
+ ["s", -1, 2],
+ ["'s", 2, 1],
+ ["es", 2, 4],
+ ["ies", 4, 3],
+ ["aus", 2, 6],
+ ["\u00E9s", 2, 5]
];
/** @const */ var a_2 = [
- ["dd", -1, -1],
- ["kk", -1, -1],
- ["tt", -1, -1]
+ ["de", -1, 5],
+ ["ge", -1, 2],
+ ["ische", -1, 4],
+ ["je", -1, 1],
+ ["lijke", -1, 3],
+ ["le", -1, 9],
+ ["ene", -1, 10],
+ ["re", -1, 8],
+ ["se", -1, 7],
+ ["te", -1, 6],
+ ["ieve", -1, 11]
];
/** @const */ var a_3 = [
- ["ene", -1, 2],
- ["se", -1, 3],
- ["en", -1, 2],
- ["heden", 2, 1],
- ["s", -1, 3]
+ ["heid", -1, 3],
+ ["fie", -1, 7],
+ ["gie", -1, 8],
+ ["atie", -1, 1],
+ ["isme", -1, 5],
+ ["ing", -1, 5],
+ ["arij", -1, 6],
+ ["erij", -1, 5],
+ ["sel", -1, 3],
+ ["rder", -1, 4],
+ ["ster", -1, 3],
+ ["iteit", -1, 2],
+ ["dst", -1, 10],
+ ["tst", -1, 9]
];
/** @const */ var a_4 = [
- ["end", -1, 1],
- ["ig", -1, 2],
- ["ing", -1, 1],
- ["lijk", -1, 3],
- ["baar", -1, 4],
- ["bar", -1, 5]
+ ["end", -1, 9],
+ ["atief", -1, 2],
+ ["erig", -1, 9],
+ ["achtig", -1, 3],
+ ["ioneel", -1, 1],
+ ["baar", -1, 3],
+ ["laar", -1, 5],
+ ["naar", -1, 4],
+ ["raar", -1, 6],
+ ["eriger", -1, 9],
+ ["achtiger", -1, 3],
+ ["lijker", -1, 8],
+ ["tant", -1, 7],
+ ["erigst", -1, 9],
+ ["achtigst", -1, 3],
+ ["lijkst", -1, 8]
];
/** @const */ var a_5 = [
- ["aa", -1, -1],
- ["ee", -1, -1],
- ["oo", -1, -1],
- ["uu", -1, -1]
+ ["ig", -1, 1],
+ ["iger", -1, 1],
+ ["igst", -1, 1]
+ ];
+
+ /** @const */ var a_6 = [
+ ["ft", -1, 2],
+ ["kt", -1, 1],
+ ["pt", -1, 3]
+ ];
+
+ /** @const */ var a_7 = [
+ ["bb", -1, 1],
+ ["cc", -1, 2],
+ ["dd", -1, 3],
+ ["ff", -1, 4],
+ ["gg", -1, 5],
+ ["hh", -1, 6],
+ ["jj", -1, 7],
+ ["kk", -1, 8],
+ ["ll", -1, 9],
+ ["mm", -1, 10],
+ ["nn", -1, 11],
+ ["pp", -1, 12],
+ ["qq", -1, 13],
+ ["rr", -1, 14],
+ ["ss", -1, 15],
+ ["tt", -1, 16],
+ ["v", -1, 4],
+ ["vv", 16, 17],
+ ["ww", -1, 18],
+ ["xx", -1, 19],
+ ["z", -1, 15],
+ ["zz", 20, 20]
+ ];
+
+ /** @const */ var a_8 = [
+ ["d", -1, 1],
+ ["t", -1, 2]
+ ];
+
+ /** @const */ var a_9 = [
+ ["", -1, -1],
+ ["eft", 0, 1],
+ ["vaa", 0, 1],
+ ["val", 0, 1],
+ ["vali", 3, -1],
+ ["vare", 0, 1]
+ ];
+
+ /** @const */ var a_10 = [
+ ["\u00EB", -1, 1],
+ ["\u00EF", -1, 2]
+ ];
+
+ /** @const */ var a_11 = [
+ ["\u00EB", -1, 1],
+ ["\u00EF", -1, 2]
];
- /** @const */ var /** Array */ g_v = [17, 65, 16, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 128];
+ /** @const */ var /** Array */ g_E = [1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 120];
+
+ /** @const */ var /** Array */ g_AIOU = [1, 65, 16, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 128, 11, 120, 46, 15];
- /** @const */ var /** Array */ g_v_I = [1, 0, 0, 17, 65, 16, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 128];
+ /** @const */ var /** Array */ g_AEIOU = [17, 65, 16, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 128, 139, 127, 46, 15];
- /** @const */ var /** Array */ g_v_j = [17, 67, 16, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 128];
+ /** @const */ var /** Array */ g_v = [17, 65, 16, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 128, 139, 127, 46, 15];
+ /** @const */ var /** Array */ g_v_WX = [17, 65, 208, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 128, 139, 127, 46, 15];
+
+ var /** boolean */ B_GE_removed = false;
+ var /** boolean */ B_stemmed = false;
var /** number */ I_p2 = 0;
var /** number */ I_p1 = 0;
- var /** boolean */ B_e_found = false;
+ var /** string */ S_ch = '';
/** @return {boolean} */
- function r_prelude() {
- var /** number */ among_var;
- var /** number */ v_1 = base.cursor;
- while(true)
+ function r_R1() {
+ return I_p1 <= base.cursor;
+ };
+
+ /** @return {boolean} */
+ function r_R2() {
+ return I_p2 <= base.cursor;
+ };
+
+ /** @return {boolean} */
+ function r_V() {
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
+ lab0: {
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
+ lab1: {
+ if (!(base.in_grouping_b(g_v, 97, 252)))
+ {
+ break lab1;
+ }
+ break lab0;
+ }
+ base.cursor = base.limit - v_2;
+ if (!(base.eq_s_b("ij")))
+ {
+ return false;
+ }
+ }
+ base.cursor = base.limit - v_1;
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_VX() {
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
+ if (base.cursor <= base.limit_backward)
+ {
+ return false;
+ }
+ base.cursor--;
+ lab0: {
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
+ lab1: {
+ if (!(base.in_grouping_b(g_v, 97, 252)))
+ {
+ break lab1;
+ }
+ break lab0;
+ }
+ base.cursor = base.limit - v_2;
+ if (!(base.eq_s_b("ij")))
+ {
+ return false;
+ }
+ }
+ base.cursor = base.limit - v_1;
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_C() {
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
{
- var /** number */ v_2 = base.cursor;
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
lab0: {
- base.bra = base.cursor;
- among_var = base.find_among(a_0);
- if (among_var == 0)
+ if (!(base.eq_s_b("ij")))
{
break lab0;
}
- base.ket = base.cursor;
- switch (among_var) {
- case 1:
- if (!base.slice_from("a"))
- {
- return false;
- }
- break;
- case 2:
- if (!base.slice_from("e"))
- {
- return false;
- }
- break;
- case 3:
- if (!base.slice_from("i"))
- {
- return false;
- }
- break;
- case 4:
- if (!base.slice_from("o"))
- {
- return false;
- }
- break;
- case 5:
- if (!base.slice_from("u"))
- {
- return false;
- }
- break;
- case 6:
- if (base.cursor >= base.limit)
- {
- break lab0;
- }
- base.cursor++;
- break;
- }
- continue;
+ return false;
}
- base.cursor = v_2;
- break;
+ base.cursor = base.limit - v_2;
}
- base.cursor = v_1;
- var /** number */ v_3 = base.cursor;
- lab1: {
- base.bra = base.cursor;
- if (!(base.eq_s("y")))
+ if (!(base.out_grouping_b(g_v, 97, 252)))
+ {
+ return false;
+ }
+ base.cursor = base.limit - v_1;
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_lengthen_V() {
+ var /** number */ among_var;
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
+ lab0: {
+ if (!(base.out_grouping_b(g_v_WX, 97, 252)))
{
- base.cursor = v_3;
- break lab1;
+ break lab0;
}
base.ket = base.cursor;
- if (!base.slice_from("Y"))
+ among_var = base.find_among_b(a_0);
+ if (among_var == 0)
{
- return false;
+ break lab0;
}
- }
- while(true)
- {
- var /** number */ v_4 = base.cursor;
- lab2: {
- golab3: while(true)
- {
- var /** number */ v_5 = base.cursor;
- lab4: {
- if (!(base.in_grouping(g_v, 97, 232)))
+ base.bra = base.cursor;
+ switch (among_var) {
+ case 1:
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
+ lab1: {
+ /** @const */ var /** number */ v_3 = base.limit - base.cursor;
+ lab2: {
+ if (!(base.out_grouping_b(g_AEIOU, 97, 252)))
+ {
+ break lab2;
+ }
+ break lab1;
+ }
+ base.cursor = base.limit - v_3;
+ if (base.cursor > base.limit_backward)
{
- break lab4;
+ break lab0;
}
- base.bra = base.cursor;
+ }
+ base.cursor = base.limit - v_2;
+ S_ch = base.slice_to();
+ if (S_ch == '')
+ {
+ return false;
+ }
+ {
+ /** @const */ var /** number */ c1 = base.cursor;
+ base.insert(base.cursor, base.cursor, S_ch);
+ base.cursor = c1;
+ }
+ break;
+ case 2:
+ /** @const */ var /** number */ v_4 = base.limit - base.cursor;
+ lab3: {
+ /** @const */ var /** number */ v_5 = base.limit - base.cursor;
+ lab4: {
+ if (!(base.out_grouping_b(g_AEIOU, 97, 252)))
+ {
+ break lab4;
+ }
+ break lab3;
+ }
+ base.cursor = base.limit - v_5;
+ if (base.cursor > base.limit_backward)
+ {
+ break lab0;
+ }
+ }
+ {
+ /** @const */ var /** number */ v_6 = base.limit - base.cursor;
lab5: {
- var /** number */ v_6 = base.cursor;
lab6: {
- if (!(base.eq_s("i")))
- {
+ /** @const */ var /** number */ v_7 = base.limit - base.cursor;
+ lab7: {
+ if (!(base.in_grouping_b(g_AIOU, 97, 252)))
+ {
+ break lab7;
+ }
break lab6;
}
- base.ket = base.cursor;
- if (!(base.in_grouping(g_v, 97, 232)))
+ base.cursor = base.limit - v_7;
+ if (!(base.in_grouping_b(g_E, 101, 235)))
{
- break lab6;
+ break lab5;
}
- if (!base.slice_from("I"))
+ if (base.cursor > base.limit_backward)
{
- return false;
+ break lab5;
}
- break lab5;
}
- base.cursor = v_6;
- if (!(base.eq_s("y")))
+ break lab0;
+ }
+ base.cursor = base.limit - v_6;
+ }
+ {
+ /** @const */ var /** number */ v_8 = base.limit - base.cursor;
+ lab8: {
+ if (base.cursor <= base.limit_backward)
{
- break lab4;
+ break lab8;
+ }
+ base.cursor--;
+ if (!(base.in_grouping_b(g_AIOU, 97, 252)))
+ {
+ break lab8;
}
- base.ket = base.cursor;
- if (!base.slice_from("Y"))
+ if (!(base.out_grouping_b(g_AEIOU, 97, 252)))
{
- return false;
+ break lab8;
}
+ break lab0;
}
- base.cursor = v_5;
- break golab3;
+ base.cursor = base.limit - v_8;
}
- base.cursor = v_5;
- if (base.cursor >= base.limit)
+ base.cursor = base.limit - v_4;
+ S_ch = base.slice_to();
+ if (S_ch == '')
{
- break lab2;
+ return false;
}
- base.cursor++;
- }
- continue;
+ {
+ /** @const */ var /** number */ c2 = base.cursor;
+ base.insert(base.cursor, base.cursor, S_ch);
+ base.cursor = c2;
+ }
+ break;
+ case 3:
+ if (!base.slice_from("e\u00EBe"))
+ {
+ return false;
+ }
+ break;
+ case 4:
+ if (!base.slice_from("iee"))
+ {
+ return false;
+ }
+ break;
}
- base.cursor = v_4;
- break;
}
+ base.cursor = base.limit - v_1;
return true;
};
/** @return {boolean} */
- function r_mark_regions() {
- I_p1 = base.limit;
- I_p2 = base.limit;
- golab0: while(true)
+ function r_Step_1() {
+ var /** number */ among_var;
+ base.ket = base.cursor;
+ among_var = base.find_among_b(a_1);
+ if (among_var == 0)
{
- lab1: {
- if (!(base.in_grouping(g_v, 97, 232)))
+ return false;
+ }
+ base.bra = base.cursor;
+ switch (among_var) {
+ case 1:
+ if (!base.slice_del())
{
- break lab1;
+ return false;
}
- break golab0;
- }
- if (base.cursor >= base.limit)
- {
- return false;
- }
- base.cursor++;
- }
- golab2: while(true)
- {
- lab3: {
- if (!(base.out_grouping(g_v, 97, 232)))
+ break;
+ case 2:
+ if (!r_R1())
{
- break lab3;
+ return false;
}
- break golab2;
- }
- if (base.cursor >= base.limit)
- {
- return false;
- }
- base.cursor++;
- }
- I_p1 = base.cursor;
- lab4: {
- if (!(I_p1 < 3))
- {
- break lab4;
- }
- I_p1 = 3;
- }
- golab5: while(true)
- {
- lab6: {
- if (!(base.in_grouping(g_v, 97, 232)))
{
- break lab6;
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
+ lab0: {
+ if (!(base.eq_s_b("t")))
+ {
+ break lab0;
+ }
+ if (!r_R1())
+ {
+ break lab0;
+ }
+ return false;
+ }
+ base.cursor = base.limit - v_1;
}
- break golab5;
- }
- if (base.cursor >= base.limit)
- {
- return false;
- }
- base.cursor++;
- }
- golab7: while(true)
- {
- lab8: {
- if (!(base.out_grouping(g_v, 97, 232)))
+ if (!r_C())
{
- break lab8;
+ return false;
}
- break golab7;
- }
- if (base.cursor >= base.limit)
- {
- return false;
- }
- base.cursor++;
- }
- I_p2 = base.cursor;
- return true;
- };
-
- /** @return {boolean} */
- function r_postlude() {
- var /** number */ among_var;
- while(true)
- {
- var /** number */ v_1 = base.cursor;
- lab0: {
- base.bra = base.cursor;
- among_var = base.find_among(a_1);
- if (among_var == 0)
+ if (!base.slice_del())
{
- break lab0;
+ return false;
+ }
+ break;
+ case 3:
+ if (!r_R1())
+ {
+ return false;
+ }
+ if (!base.slice_from("ie"))
+ {
+ return false;
+ }
+ break;
+ case 4:
+ lab1: {
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
+ lab2: {
+ /** @const */ var /** number */ v_3 = base.limit - base.cursor;
+ if (!(base.eq_s_b("ar")))
+ {
+ break lab2;
+ }
+ if (!r_R1())
+ {
+ break lab2;
+ }
+ if (!r_C())
+ {
+ break lab2;
+ }
+ base.cursor = base.limit - v_3;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ r_lengthen_V();
+ break lab1;
+ }
+ base.cursor = base.limit - v_2;
+ lab3: {
+ /** @const */ var /** number */ v_4 = base.limit - base.cursor;
+ if (!(base.eq_s_b("er")))
+ {
+ break lab3;
+ }
+ if (!r_R1())
+ {
+ break lab3;
+ }
+ if (!r_C())
+ {
+ break lab3;
+ }
+ base.cursor = base.limit - v_4;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break lab1;
+ }
+ base.cursor = base.limit - v_2;
+ if (!r_R1())
+ {
+ return false;
+ }
+ if (!r_C())
+ {
+ return false;
+ }
+ if (!base.slice_from("e"))
+ {
+ return false;
+ }
+ }
+ break;
+ case 5:
+ if (!r_R1())
+ {
+ return false;
+ }
+ if (!base.slice_from("\u00E9"))
+ {
+ return false;
+ }
+ break;
+ case 6:
+ if (!r_R1())
+ {
+ return false;
+ }
+ if (!r_V())
+ {
+ return false;
+ }
+ if (!base.slice_from("au"))
+ {
+ return false;
+ }
+ break;
+ case 7:
+ lab4: {
+ /** @const */ var /** number */ v_5 = base.limit - base.cursor;
+ lab5: {
+ if (!(base.eq_s_b("hed")))
+ {
+ break lab5;
+ }
+ if (!r_R1())
+ {
+ break lab5;
+ }
+ base.bra = base.cursor;
+ if (!base.slice_from("heid"))
+ {
+ return false;
+ }
+ break lab4;
+ }
+ base.cursor = base.limit - v_5;
+ lab6: {
+ if (!(base.eq_s_b("nd")))
+ {
+ break lab6;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break lab4;
+ }
+ base.cursor = base.limit - v_5;
+ lab7: {
+ if (!(base.eq_s_b("d")))
+ {
+ break lab7;
+ }
+ if (!r_R1())
+ {
+ break lab7;
+ }
+ if (!r_C())
+ {
+ break lab7;
+ }
+ base.bra = base.cursor;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break lab4;
+ }
+ base.cursor = base.limit - v_5;
+ lab8: {
+ lab9: {
+ /** @const */ var /** number */ v_6 = base.limit - base.cursor;
+ lab10: {
+ if (!(base.eq_s_b("i")))
+ {
+ break lab10;
+ }
+ break lab9;
+ }
+ base.cursor = base.limit - v_6;
+ if (!(base.eq_s_b("j")))
+ {
+ break lab8;
+ }
+ }
+ if (!r_V())
+ {
+ break lab8;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break lab4;
+ }
+ base.cursor = base.limit - v_5;
+ if (!r_R1())
+ {
+ return false;
+ }
+ if (!r_C())
+ {
+ return false;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ r_lengthen_V();
+ }
+ break;
+ case 8:
+ if (!base.slice_from("nd"))
+ {
+ return false;
+ }
+ break;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_Step_2() {
+ var /** number */ among_var;
+ base.ket = base.cursor;
+ among_var = base.find_among_b(a_2);
+ if (among_var == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ switch (among_var) {
+ case 1:
+ lab0: {
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
+ lab1: {
+ if (!(base.eq_s_b("'t")))
+ {
+ break lab1;
+ }
+ base.bra = base.cursor;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break lab0;
+ }
+ base.cursor = base.limit - v_1;
+ lab2: {
+ if (!(base.eq_s_b("et")))
+ {
+ break lab2;
+ }
+ base.bra = base.cursor;
+ if (!r_R1())
+ {
+ break lab2;
+ }
+ if (!r_C())
+ {
+ break lab2;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break lab0;
+ }
+ base.cursor = base.limit - v_1;
+ lab3: {
+ if (!(base.eq_s_b("rnt")))
+ {
+ break lab3;
+ }
+ base.bra = base.cursor;
+ if (!base.slice_from("rn"))
+ {
+ return false;
+ }
+ break lab0;
+ }
+ base.cursor = base.limit - v_1;
+ lab4: {
+ if (!(base.eq_s_b("t")))
+ {
+ break lab4;
+ }
+ base.bra = base.cursor;
+ if (!r_R1())
+ {
+ break lab4;
+ }
+ if (!r_VX())
+ {
+ break lab4;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break lab0;
+ }
+ base.cursor = base.limit - v_1;
+ lab5: {
+ if (!(base.eq_s_b("ink")))
+ {
+ break lab5;
+ }
+ base.bra = base.cursor;
+ if (!base.slice_from("ing"))
+ {
+ return false;
+ }
+ break lab0;
+ }
+ base.cursor = base.limit - v_1;
+ lab6: {
+ if (!(base.eq_s_b("mp")))
+ {
+ break lab6;
+ }
+ base.bra = base.cursor;
+ if (!base.slice_from("m"))
+ {
+ return false;
+ }
+ break lab0;
+ }
+ base.cursor = base.limit - v_1;
+ lab7: {
+ if (!(base.eq_s_b("'")))
+ {
+ break lab7;
+ }
+ base.bra = base.cursor;
+ if (!r_R1())
+ {
+ break lab7;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break lab0;
+ }
+ base.cursor = base.limit - v_1;
+ base.bra = base.cursor;
+ if (!r_R1())
+ {
+ return false;
+ }
+ if (!r_C())
+ {
+ return false;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ }
+ break;
+ case 2:
+ if (!r_R1())
+ {
+ return false;
+ }
+ if (!base.slice_from("g"))
+ {
+ return false;
+ }
+ break;
+ case 3:
+ if (!r_R1())
+ {
+ return false;
+ }
+ if (!base.slice_from("lijk"))
+ {
+ return false;
+ }
+ break;
+ case 4:
+ if (!r_R1())
+ {
+ return false;
+ }
+ if (!base.slice_from("isch"))
+ {
+ return false;
+ }
+ break;
+ case 5:
+ if (!r_R1())
+ {
+ return false;
+ }
+ if (!r_C())
+ {
+ return false;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ case 6:
+ if (!r_R1())
+ {
+ return false;
+ }
+ if (!base.slice_from("t"))
+ {
+ return false;
+ }
+ break;
+ case 7:
+ if (!r_R1())
+ {
+ return false;
+ }
+ if (!base.slice_from("s"))
+ {
+ return false;
+ }
+ break;
+ case 8:
+ if (!r_R1())
+ {
+ return false;
+ }
+ if (!base.slice_from("r"))
+ {
+ return false;
+ }
+ break;
+ case 9:
+ if (!r_R1())
+ {
+ return false;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ base.insert(base.cursor, base.cursor, "l");
+ r_lengthen_V();
+ break;
+ case 10:
+ if (!r_R1())
+ {
+ return false;
+ }
+ if (!r_C())
+ {
+ return false;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ base.insert(base.cursor, base.cursor, "en");
+ r_lengthen_V();
+ break;
+ case 11:
+ if (!r_R1())
+ {
+ return false;
+ }
+ if (!r_C())
+ {
+ return false;
+ }
+ if (!base.slice_from("ief"))
+ {
+ return false;
+ }
+ break;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_Step_3() {
+ var /** number */ among_var;
+ base.ket = base.cursor;
+ among_var = base.find_among_b(a_3);
+ if (among_var == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ switch (among_var) {
+ case 1:
+ if (!r_R1())
+ {
+ return false;
+ }
+ if (!base.slice_from("eer"))
+ {
+ return false;
+ }
+ break;
+ case 2:
+ if (!r_R1())
+ {
+ return false;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ r_lengthen_V();
+ break;
+ case 3:
+ if (!r_R1())
+ {
+ return false;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ case 4:
+ if (!base.slice_from("r"))
+ {
+ return false;
+ }
+ break;
+ case 5:
+ lab0: {
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
+ lab1: {
+ if (!(base.eq_s_b("ild")))
+ {
+ break lab1;
+ }
+ if (!base.slice_from("er"))
+ {
+ return false;
+ }
+ break lab0;
+ }
+ base.cursor = base.limit - v_1;
+ if (!r_R1())
+ {
+ return false;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ r_lengthen_V();
+ }
+ break;
+ case 6:
+ if (!r_R1())
+ {
+ return false;
+ }
+ if (!r_C())
+ {
+ return false;
+ }
+ if (!base.slice_from("aar"))
+ {
+ return false;
+ }
+ break;
+ case 7:
+ if (!r_R2())
+ {
+ return false;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ base.insert(base.cursor, base.cursor, "f");
+ r_lengthen_V();
+ break;
+ case 8:
+ if (!r_R2())
+ {
+ return false;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ base.insert(base.cursor, base.cursor, "g");
+ r_lengthen_V();
+ break;
+ case 9:
+ if (!r_R1())
+ {
+ return false;
}
+ if (!r_C())
+ {
+ return false;
+ }
+ if (!base.slice_from("t"))
+ {
+ return false;
+ }
+ break;
+ case 10:
+ if (!r_R1())
+ {
+ return false;
+ }
+ if (!r_C())
+ {
+ return false;
+ }
+ if (!base.slice_from("d"))
+ {
+ return false;
+ }
+ break;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_Step_4() {
+ var /** number */ among_var;
+ lab0: {
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
+ lab1: {
base.ket = base.cursor;
+ among_var = base.find_among_b(a_4);
+ if (among_var == 0)
+ {
+ break lab1;
+ }
+ base.bra = base.cursor;
switch (among_var) {
case 1:
- if (!base.slice_from("y"))
+ if (!r_R1())
+ {
+ break lab1;
+ }
+ if (!base.slice_from("ie"))
+ {
+ return false;
+ }
+ break;
+ case 2:
+ if (!r_R1())
+ {
+ break lab1;
+ }
+ if (!base.slice_from("eer"))
+ {
+ return false;
+ }
+ break;
+ case 3:
+ if (!r_R1())
+ {
+ break lab1;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ case 4:
+ if (!r_R1())
+ {
+ break lab1;
+ }
+ if (!r_V())
+ {
+ break lab1;
+ }
+ if (!base.slice_from("n"))
+ {
+ return false;
+ }
+ break;
+ case 5:
+ if (!r_R1())
+ {
+ break lab1;
+ }
+ if (!r_V())
+ {
+ break lab1;
+ }
+ if (!base.slice_from("l"))
+ {
+ return false;
+ }
+ break;
+ case 6:
+ if (!r_R1())
+ {
+ break lab1;
+ }
+ if (!r_V())
+ {
+ break lab1;
+ }
+ if (!base.slice_from("r"))
+ {
+ return false;
+ }
+ break;
+ case 7:
+ if (!r_R1())
+ {
+ break lab1;
+ }
+ if (!base.slice_from("teer"))
{
return false;
}
break;
- case 2:
- if (!base.slice_from("i"))
+ case 8:
+ if (!r_R1())
+ {
+ break lab1;
+ }
+ if (!base.slice_from("lijk"))
{
return false;
}
break;
- case 3:
- if (base.cursor >= base.limit)
+ case 9:
+ if (!r_R1())
{
- break lab0;
+ break lab1;
+ }
+ if (!r_C())
+ {
+ break lab1;
+ }
+ if (!base.slice_del())
+ {
+ return false;
}
- base.cursor++;
+ r_lengthen_V();
break;
}
- continue;
+ break lab0;
}
- base.cursor = v_1;
- break;
+ base.cursor = base.limit - v_1;
+ base.ket = base.cursor;
+ if (base.find_among_b(a_5) == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ if (!r_R1())
+ {
+ return false;
+ }
+ {
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
+ lab2: {
+ if (!(base.eq_s_b("inn")))
+ {
+ break lab2;
+ }
+ if (base.cursor > base.limit_backward)
+ {
+ break lab2;
+ }
+ return false;
+ }
+ base.cursor = base.limit - v_2;
+ }
+ if (!r_C())
+ {
+ return false;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ r_lengthen_V();
}
return true;
};
/** @return {boolean} */
- function r_R1() {
- if (!(I_p1 <= base.cursor))
+ function r_Step_7() {
+ var /** number */ among_var;
+ base.ket = base.cursor;
+ among_var = base.find_among_b(a_6);
+ if (among_var == 0)
{
return false;
}
+ base.bra = base.cursor;
+ switch (among_var) {
+ case 1:
+ if (!base.slice_from("k"))
+ {
+ return false;
+ }
+ break;
+ case 2:
+ if (!base.slice_from("f"))
+ {
+ return false;
+ }
+ break;
+ case 3:
+ if (!base.slice_from("p"))
+ {
+ return false;
+ }
+ break;
+ }
return true;
};
/** @return {boolean} */
- function r_R2() {
- if (!(I_p2 <= base.cursor))
+ function r_Step_6() {
+ var /** number */ among_var;
+ base.ket = base.cursor;
+ among_var = base.find_among_b(a_7);
+ if (among_var == 0)
{
return false;
}
+ base.bra = base.cursor;
+ switch (among_var) {
+ case 1:
+ if (!base.slice_from("b"))
+ {
+ return false;
+ }
+ break;
+ case 2:
+ if (!base.slice_from("c"))
+ {
+ return false;
+ }
+ break;
+ case 3:
+ if (!base.slice_from("d"))
+ {
+ return false;
+ }
+ break;
+ case 4:
+ if (!base.slice_from("f"))
+ {
+ return false;
+ }
+ break;
+ case 5:
+ if (!base.slice_from("g"))
+ {
+ return false;
+ }
+ break;
+ case 6:
+ if (!base.slice_from("h"))
+ {
+ return false;
+ }
+ break;
+ case 7:
+ if (!base.slice_from("j"))
+ {
+ return false;
+ }
+ break;
+ case 8:
+ if (!base.slice_from("k"))
+ {
+ return false;
+ }
+ break;
+ case 9:
+ if (!base.slice_from("l"))
+ {
+ return false;
+ }
+ break;
+ case 10:
+ if (!base.slice_from("m"))
+ {
+ return false;
+ }
+ break;
+ case 11:
+ {
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
+ lab0: {
+ if (!(base.eq_s_b("i")))
+ {
+ break lab0;
+ }
+ if (base.cursor > base.limit_backward)
+ {
+ break lab0;
+ }
+ return false;
+ }
+ base.cursor = base.limit - v_1;
+ }
+ if (!base.slice_from("n"))
+ {
+ return false;
+ }
+ break;
+ case 12:
+ if (!base.slice_from("p"))
+ {
+ return false;
+ }
+ break;
+ case 13:
+ if (!base.slice_from("q"))
+ {
+ return false;
+ }
+ break;
+ case 14:
+ if (!base.slice_from("r"))
+ {
+ return false;
+ }
+ break;
+ case 15:
+ if (!base.slice_from("s"))
+ {
+ return false;
+ }
+ break;
+ case 16:
+ if (!base.slice_from("t"))
+ {
+ return false;
+ }
+ break;
+ case 17:
+ if (!base.slice_from("v"))
+ {
+ return false;
+ }
+ break;
+ case 18:
+ if (!base.slice_from("w"))
+ {
+ return false;
+ }
+ break;
+ case 19:
+ if (!base.slice_from("x"))
+ {
+ return false;
+ }
+ break;
+ case 20:
+ if (!base.slice_from("z"))
+ {
+ return false;
+ }
+ break;
+ }
return true;
};
/** @return {boolean} */
- function r_undouble() {
- var /** number */ v_1 = base.limit - base.cursor;
- if (base.find_among_b(a_2) == 0)
+ function r_Step_1c() {
+ var /** number */ among_var;
+ base.ket = base.cursor;
+ among_var = base.find_among_b(a_8);
+ if (among_var == 0)
{
return false;
}
- base.cursor = base.limit - v_1;
- base.ket = base.cursor;
- if (base.cursor <= base.limit_backward)
+ base.bra = base.cursor;
+ if (!r_R1())
{
return false;
}
- base.cursor--;
- base.bra = base.cursor;
- if (!base.slice_del())
+ if (!r_C())
{
return false;
}
+ switch (among_var) {
+ case 1:
+ {
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
+ lab0: {
+ if (!(base.eq_s_b("n")))
+ {
+ break lab0;
+ }
+ if (!r_R1())
+ {
+ break lab0;
+ }
+ return false;
+ }
+ base.cursor = base.limit - v_1;
+ }
+ lab1: {
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
+ lab2: {
+ if (!(base.eq_s_b("in")))
+ {
+ break lab2;
+ }
+ if (base.cursor > base.limit_backward)
+ {
+ break lab2;
+ }
+ if (!base.slice_from("n"))
+ {
+ return false;
+ }
+ break lab1;
+ }
+ base.cursor = base.limit - v_2;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ }
+ break;
+ case 2:
+ {
+ /** @const */ var /** number */ v_3 = base.limit - base.cursor;
+ lab3: {
+ if (!(base.eq_s_b("h")))
+ {
+ break lab3;
+ }
+ if (!r_R1())
+ {
+ break lab3;
+ }
+ return false;
+ }
+ base.cursor = base.limit - v_3;
+ }
+ {
+ /** @const */ var /** number */ v_4 = base.limit - base.cursor;
+ lab4: {
+ if (!(base.eq_s_b("en")))
+ {
+ break lab4;
+ }
+ if (base.cursor > base.limit_backward)
+ {
+ break lab4;
+ }
+ return false;
+ }
+ base.cursor = base.limit - v_4;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ }
return true;
};
/** @return {boolean} */
- function r_e_ending() {
- B_e_found = false;
- base.ket = base.cursor;
- if (!(base.eq_s_b("e")))
+ function r_Lose_prefix() {
+ var /** number */ among_var;
+ base.bra = base.cursor;
+ if (!(base.eq_s("ge")))
{
return false;
}
- base.bra = base.cursor;
- if (!r_R1())
+ base.ket = base.cursor;
+ /** @const */ var /** number */ v_1 = base.cursor;
{
- return false;
+ /** @const */ var /** number */ c1 = base.cursor + 3;
+ if (c1 > base.limit)
+ {
+ return false;
+ }
+ base.cursor = c1;
}
- var /** number */ v_1 = base.limit - base.cursor;
- if (!(base.out_grouping_b(g_v, 97, 232)))
+ base.cursor = v_1;
+ /** @const */ var /** number */ v_2 = base.cursor;
+ golab0: while(true)
{
- return false;
+ /** @const */ var /** number */ v_3 = base.cursor;
+ lab1: {
+ lab2: {
+ /** @const */ var /** number */ v_4 = base.cursor;
+ lab3: {
+ if (!(base.eq_s("ij")))
+ {
+ break lab3;
+ }
+ break lab2;
+ }
+ base.cursor = v_4;
+ if (!(base.in_grouping(g_v, 97, 252)))
+ {
+ break lab1;
+ }
+ }
+ break golab0;
+ }
+ base.cursor = v_3;
+ if (base.cursor >= base.limit)
+ {
+ return false;
+ }
+ base.cursor++;
}
- base.cursor = base.limit - v_1;
- if (!base.slice_del())
+ while(true)
{
+ /** @const */ var /** number */ v_5 = base.cursor;
+ lab4: {
+ lab5: {
+ /** @const */ var /** number */ v_6 = base.cursor;
+ lab6: {
+ if (!(base.eq_s("ij")))
+ {
+ break lab6;
+ }
+ break lab5;
+ }
+ base.cursor = v_6;
+ if (!(base.in_grouping(g_v, 97, 252)))
+ {
+ break lab4;
+ }
+ }
+ continue;
+ }
+ base.cursor = v_5;
+ break;
+ }
+ lab7: {
+ if (base.cursor < base.limit)
+ {
+ break lab7;
+ }
return false;
}
- B_e_found = true;
- if (!r_undouble())
+ base.cursor = v_2;
+ among_var = base.find_among(a_9);
+ switch (among_var) {
+ case 1:
+ return false;
+ }
+ B_GE_removed = true;
+ if (!base.slice_del())
{
return false;
}
+ /** @const */ var /** number */ v_7 = base.cursor;
+ lab8: {
+ base.bra = base.cursor;
+ among_var = base.find_among(a_10);
+ if (among_var == 0)
+ {
+ break lab8;
+ }
+ base.ket = base.cursor;
+ switch (among_var) {
+ case 1:
+ if (!base.slice_from("e"))
+ {
+ return false;
+ }
+ break;
+ case 2:
+ if (!base.slice_from("i"))
+ {
+ return false;
+ }
+ break;
+ }
+ }
+ base.cursor = v_7;
return true;
};
/** @return {boolean} */
- function r_en_ending() {
- if (!r_R1())
+ function r_Lose_infix() {
+ var /** number */ among_var;
+ if (base.cursor >= base.limit)
{
return false;
}
- var /** number */ v_1 = base.limit - base.cursor;
- if (!(base.out_grouping_b(g_v, 97, 232)))
+ base.cursor++;
+ golab0: while(true)
{
- return false;
+ lab1: {
+ base.bra = base.cursor;
+ if (!(base.eq_s("ge")))
+ {
+ break lab1;
+ }
+ base.ket = base.cursor;
+ break golab0;
+ }
+ if (base.cursor >= base.limit)
+ {
+ return false;
+ }
+ base.cursor++;
}
- base.cursor = base.limit - v_1;
+ /** @const */ var /** number */ v_1 = base.cursor;
+ {
+ /** @const */ var /** number */ c1 = base.cursor + 3;
+ if (c1 > base.limit)
+ {
+ return false;
+ }
+ base.cursor = c1;
+ }
+ base.cursor = v_1;
+ /** @const */ var /** number */ v_2 = base.cursor;
+ golab2: while(true)
{
- var /** number */ v_2 = base.limit - base.cursor;
- lab0: {
- if (!(base.eq_s_b("gem")))
- {
- break lab0;
+ /** @const */ var /** number */ v_3 = base.cursor;
+ lab3: {
+ lab4: {
+ /** @const */ var /** number */ v_4 = base.cursor;
+ lab5: {
+ if (!(base.eq_s("ij")))
+ {
+ break lab5;
+ }
+ break lab4;
+ }
+ base.cursor = v_4;
+ if (!(base.in_grouping(g_v, 97, 252)))
+ {
+ break lab3;
+ }
}
+ break golab2;
+ }
+ base.cursor = v_3;
+ if (base.cursor >= base.limit)
+ {
return false;
}
- base.cursor = base.limit - v_2;
+ base.cursor++;
}
- if (!base.slice_del())
+ while(true)
{
+ /** @const */ var /** number */ v_5 = base.cursor;
+ lab6: {
+ lab7: {
+ /** @const */ var /** number */ v_6 = base.cursor;
+ lab8: {
+ if (!(base.eq_s("ij")))
+ {
+ break lab8;
+ }
+ break lab7;
+ }
+ base.cursor = v_6;
+ if (!(base.in_grouping(g_v, 97, 252)))
+ {
+ break lab6;
+ }
+ }
+ continue;
+ }
+ base.cursor = v_5;
+ break;
+ }
+ lab9: {
+ if (base.cursor < base.limit)
+ {
+ break lab9;
+ }
return false;
}
- if (!r_undouble())
+ base.cursor = v_2;
+ B_GE_removed = true;
+ if (!base.slice_del())
{
return false;
}
- return true;
- };
-
- /** @return {boolean} */
- function r_standard_suffix() {
- var /** number */ among_var;
- var /** number */ v_1 = base.limit - base.cursor;
- lab0: {
- base.ket = base.cursor;
- among_var = base.find_among_b(a_3);
+ /** @const */ var /** number */ v_7 = base.cursor;
+ lab10: {
+ base.bra = base.cursor;
+ among_var = base.find_among(a_11);
if (among_var == 0)
{
- break lab0;
+ break lab10;
}
- base.bra = base.cursor;
+ base.ket = base.cursor;
switch (among_var) {
case 1:
- if (!r_R1())
- {
- break lab0;
- }
- if (!base.slice_from("heid"))
+ if (!base.slice_from("e"))
{
return false;
}
break;
case 2:
- if (!r_en_ending())
+ if (!base.slice_from("i"))
{
- break lab0;
+ return false;
}
break;
- case 3:
- if (!r_R1())
- {
- break lab0;
- }
- if (!(base.out_grouping_b(g_v_j, 97, 232)))
+ }
+ }
+ base.cursor = v_7;
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_measure() {
+ I_p1 = base.limit;
+ I_p2 = base.limit;
+ /** @const */ var /** number */ v_1 = base.cursor;
+ lab0: {
+ while(true)
+ {
+ lab1: {
+ if (!(base.out_grouping(g_v, 97, 252)))
{
- break lab0;
+ break lab1;
}
- if (!base.slice_del())
- {
- return false;
+ continue;
+ }
+ break;
+ }
+ {
+ var v_2 = 1;
+ while(true)
+ {
+ /** @const */ var /** number */ v_3 = base.cursor;
+ lab2: {
+ lab3: {
+ /** @const */ var /** number */ v_4 = base.cursor;
+ lab4: {
+ if (!(base.eq_s("ij")))
+ {
+ break lab4;
+ }
+ break lab3;
+ }
+ base.cursor = v_4;
+ if (!(base.in_grouping(g_v, 97, 252)))
+ {
+ break lab2;
+ }
+ }
+ v_2--;
+ continue;
}
+ base.cursor = v_3;
break;
+ }
+ if (v_2 > 0)
+ {
+ break lab0;
+ }
}
- }
- base.cursor = base.limit - v_1;
- var /** number */ v_2 = base.limit - base.cursor;
- r_e_ending();
- base.cursor = base.limit - v_2;
- var /** number */ v_3 = base.limit - base.cursor;
- lab1: {
- base.ket = base.cursor;
- if (!(base.eq_s_b("heid")))
+ if (!(base.out_grouping(g_v, 97, 252)))
{
- break lab1;
+ break lab0;
}
- base.bra = base.cursor;
- if (!r_R2())
+ I_p1 = base.cursor;
+ while(true)
{
- break lab1;
+ lab5: {
+ if (!(base.out_grouping(g_v, 97, 252)))
+ {
+ break lab5;
+ }
+ continue;
+ }
+ break;
}
{
- var /** number */ v_4 = base.limit - base.cursor;
- lab2: {
- if (!(base.eq_s_b("c")))
- {
- break lab2;
+ var v_5 = 1;
+ while(true)
+ {
+ /** @const */ var /** number */ v_6 = base.cursor;
+ lab6: {
+ lab7: {
+ /** @const */ var /** number */ v_7 = base.cursor;
+ lab8: {
+ if (!(base.eq_s("ij")))
+ {
+ break lab8;
+ }
+ break lab7;
+ }
+ base.cursor = v_7;
+ if (!(base.in_grouping(g_v, 97, 252)))
+ {
+ break lab6;
+ }
+ }
+ v_5--;
+ continue;
}
- break lab1;
+ base.cursor = v_6;
+ break;
+ }
+ if (v_5 > 0)
+ {
+ break lab0;
}
- base.cursor = base.limit - v_4;
}
- if (!base.slice_del())
+ if (!(base.out_grouping(g_v, 97, 252)))
{
- return false;
+ break lab0;
}
- base.ket = base.cursor;
- if (!(base.eq_s_b("en")))
+ I_p2 = base.cursor;
+ }
+ base.cursor = v_1;
+ return true;
+ };
+
+ this.stem = /** @return {boolean} */ function() {
+ B_stemmed = false;
+ r_measure();
+ base.limit_backward = base.cursor; base.cursor = base.limit;
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
+ lab0: {
+ if (!r_Step_1())
{
- break lab1;
+ break lab0;
}
- base.bra = base.cursor;
- if (!r_en_ending())
+ B_stemmed = true;
+ }
+ base.cursor = base.limit - v_1;
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
+ lab1: {
+ if (!r_Step_2())
{
break lab1;
}
+ B_stemmed = true;
+ }
+ base.cursor = base.limit - v_2;
+ /** @const */ var /** number */ v_3 = base.limit - base.cursor;
+ lab2: {
+ if (!r_Step_3())
+ {
+ break lab2;
+ }
+ B_stemmed = true;
}
base.cursor = base.limit - v_3;
- var /** number */ v_5 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_4 = base.limit - base.cursor;
lab3: {
- base.ket = base.cursor;
- among_var = base.find_among_b(a_4);
- if (among_var == 0)
+ if (!r_Step_4())
{
break lab3;
}
- base.bra = base.cursor;
- switch (among_var) {
- case 1:
- if (!r_R2())
- {
- break lab3;
- }
- if (!base.slice_del())
- {
- return false;
- }
- lab4: {
- var /** number */ v_6 = base.limit - base.cursor;
- lab5: {
- base.ket = base.cursor;
- if (!(base.eq_s_b("ig")))
- {
- break lab5;
- }
- base.bra = base.cursor;
- if (!r_R2())
- {
- break lab5;
- }
- {
- var /** number */ v_7 = base.limit - base.cursor;
- lab6: {
- if (!(base.eq_s_b("e")))
- {
- break lab6;
- }
- break lab5;
- }
- base.cursor = base.limit - v_7;
- }
- if (!base.slice_del())
- {
- return false;
- }
- break lab4;
- }
- base.cursor = base.limit - v_6;
- if (!r_undouble())
- {
- break lab3;
- }
- }
- break;
- case 2:
- if (!r_R2())
- {
- break lab3;
- }
- {
- var /** number */ v_8 = base.limit - base.cursor;
- lab7: {
- if (!(base.eq_s_b("e")))
- {
- break lab7;
- }
- break lab3;
- }
- base.cursor = base.limit - v_8;
- }
- if (!base.slice_del())
- {
- return false;
- }
- break;
- case 3:
- if (!r_R2())
- {
- break lab3;
- }
- if (!base.slice_del())
- {
- return false;
- }
- if (!r_e_ending())
- {
- break lab3;
- }
- break;
- case 4:
- if (!r_R2())
- {
- break lab3;
- }
- if (!base.slice_del())
- {
- return false;
- }
- break;
- case 5:
- if (!r_R2())
- {
- break lab3;
- }
- if (!B_e_found)
- {
- break lab3;
- }
- if (!base.slice_del())
- {
- return false;
- }
- break;
+ B_stemmed = true;
+ }
+ base.cursor = base.limit - v_4;
+ base.cursor = base.limit_backward;
+ B_GE_removed = false;
+ /** @const */ var /** number */ v_5 = base.cursor;
+ lab4: {
+ /** @const */ var /** number */ v_6 = base.cursor;
+ if (!r_Lose_prefix())
+ {
+ break lab4;
}
+ base.cursor = v_6;
+ r_measure();
}
- base.cursor = base.limit - v_5;
- var /** number */ v_9 = base.limit - base.cursor;
- lab8: {
- if (!(base.out_grouping_b(g_v_I, 73, 232)))
+ base.cursor = v_5;
+ base.limit_backward = base.cursor; base.cursor = base.limit;
+ /** @const */ var /** number */ v_7 = base.limit - base.cursor;
+ lab5: {
+ if (!B_GE_removed)
{
- break lab8;
+ break lab5;
}
- var /** number */ v_10 = base.limit - base.cursor;
- if (base.find_among_b(a_5) == 0)
+ B_stemmed = true;
+ if (!r_Step_1c())
{
- break lab8;
+ break lab5;
}
- if (!(base.out_grouping_b(g_v, 97, 232)))
+ }
+ base.cursor = base.limit - v_7;
+ base.cursor = base.limit_backward;
+ B_GE_removed = false;
+ /** @const */ var /** number */ v_8 = base.cursor;
+ lab6: {
+ /** @const */ var /** number */ v_9 = base.cursor;
+ if (!r_Lose_infix())
{
- break lab8;
+ break lab6;
}
- base.cursor = base.limit - v_10;
- base.ket = base.cursor;
- if (base.cursor <= base.limit_backward)
+ base.cursor = v_9;
+ r_measure();
+ }
+ base.cursor = v_8;
+ base.limit_backward = base.cursor; base.cursor = base.limit;
+ /** @const */ var /** number */ v_10 = base.limit - base.cursor;
+ lab7: {
+ if (!B_GE_removed)
{
- break lab8;
+ break lab7;
}
- base.cursor--;
- base.bra = base.cursor;
- if (!base.slice_del())
+ B_stemmed = true;
+ if (!r_Step_1c())
{
- return false;
+ break lab7;
}
}
- base.cursor = base.limit - v_9;
- return true;
- };
-
- this.stem = /** @return {boolean} */ function() {
- var /** number */ v_1 = base.cursor;
- r_prelude();
- base.cursor = v_1;
- var /** number */ v_2 = base.cursor;
- r_mark_regions();
- base.cursor = v_2;
+ base.cursor = base.limit - v_10;
+ base.cursor = base.limit_backward;
base.limit_backward = base.cursor; base.cursor = base.limit;
- r_standard_suffix();
+ /** @const */ var /** number */ v_11 = base.limit - base.cursor;
+ lab8: {
+ if (!r_Step_7())
+ {
+ break lab8;
+ }
+ B_stemmed = true;
+ }
+ base.cursor = base.limit - v_11;
+ /** @const */ var /** number */ v_12 = base.limit - base.cursor;
+ lab9: {
+ if (!B_stemmed)
+ {
+ break lab9;
+ }
+ if (!r_Step_6())
+ {
+ break lab9;
+ }
+ }
+ base.cursor = base.limit - v_12;
base.cursor = base.limit_backward;
- var /** number */ v_4 = base.cursor;
- r_postlude();
- base.cursor = v_4;
return true;
};
diff --git a/sphinx/search/non-minified-js/dutch_porter-stemmer.js b/sphinx/search/non-minified-js/dutch_porter-stemmer.js
new file mode 100644
index 00000000000..6bbf2bf2e8e
--- /dev/null
+++ b/sphinx/search/non-minified-js/dutch_porter-stemmer.js
@@ -0,0 +1,637 @@
+// Generated from dutch_porter.sbl by Snowball 3.0.1 - https://snowballstem.org/
+
+/**@constructor*/
+var DutchPorterStemmer = function() {
+ var base = new BaseStemmer();
+
+ /** @const */ var a_0 = [
+ ["", -1, 6],
+ ["\u00E1", 0, 1],
+ ["\u00E4", 0, 1],
+ ["\u00E9", 0, 2],
+ ["\u00EB", 0, 2],
+ ["\u00ED", 0, 3],
+ ["\u00EF", 0, 3],
+ ["\u00F3", 0, 4],
+ ["\u00F6", 0, 4],
+ ["\u00FA", 0, 5],
+ ["\u00FC", 0, 5]
+ ];
+
+ /** @const */ var a_1 = [
+ ["", -1, 3],
+ ["I", 0, 2],
+ ["Y", 0, 1]
+ ];
+
+ /** @const */ var a_2 = [
+ ["dd", -1, -1],
+ ["kk", -1, -1],
+ ["tt", -1, -1]
+ ];
+
+ /** @const */ var a_3 = [
+ ["ene", -1, 2],
+ ["se", -1, 3],
+ ["en", -1, 2],
+ ["heden", 2, 1],
+ ["s", -1, 3]
+ ];
+
+ /** @const */ var a_4 = [
+ ["end", -1, 1],
+ ["ig", -1, 2],
+ ["ing", -1, 1],
+ ["lijk", -1, 3],
+ ["baar", -1, 4],
+ ["bar", -1, 5]
+ ];
+
+ /** @const */ var a_5 = [
+ ["aa", -1, -1],
+ ["ee", -1, -1],
+ ["oo", -1, -1],
+ ["uu", -1, -1]
+ ];
+
+ /** @const */ var /** Array */ g_v = [17, 65, 16, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 128];
+
+ /** @const */ var /** Array */ g_v_I = [1, 0, 0, 17, 65, 16, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 128];
+
+ /** @const */ var /** Array */ g_v_j = [17, 67, 16, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 128];
+
+ var /** number */ I_x = 0;
+ var /** number */ I_p2 = 0;
+ var /** number */ I_p1 = 0;
+ var /** boolean */ B_e_found = false;
+
+
+ /** @return {boolean} */
+ function r_prelude() {
+ var /** number */ among_var;
+ /** @const */ var /** number */ v_1 = base.cursor;
+ while(true)
+ {
+ /** @const */ var /** number */ v_2 = base.cursor;
+ lab0: {
+ base.bra = base.cursor;
+ among_var = base.find_among(a_0);
+ base.ket = base.cursor;
+ switch (among_var) {
+ case 1:
+ if (!base.slice_from("a"))
+ {
+ return false;
+ }
+ break;
+ case 2:
+ if (!base.slice_from("e"))
+ {
+ return false;
+ }
+ break;
+ case 3:
+ if (!base.slice_from("i"))
+ {
+ return false;
+ }
+ break;
+ case 4:
+ if (!base.slice_from("o"))
+ {
+ return false;
+ }
+ break;
+ case 5:
+ if (!base.slice_from("u"))
+ {
+ return false;
+ }
+ break;
+ case 6:
+ if (base.cursor >= base.limit)
+ {
+ break lab0;
+ }
+ base.cursor++;
+ break;
+ }
+ continue;
+ }
+ base.cursor = v_2;
+ break;
+ }
+ base.cursor = v_1;
+ /** @const */ var /** number */ v_3 = base.cursor;
+ lab1: {
+ base.bra = base.cursor;
+ if (!(base.eq_s("y")))
+ {
+ base.cursor = v_3;
+ break lab1;
+ }
+ base.ket = base.cursor;
+ if (!base.slice_from("Y"))
+ {
+ return false;
+ }
+ }
+ while(true)
+ {
+ /** @const */ var /** number */ v_4 = base.cursor;
+ lab2: {
+ if (!base.go_out_grouping(g_v, 97, 232))
+ {
+ break lab2;
+ }
+ base.cursor++;
+ /** @const */ var /** number */ v_5 = base.cursor;
+ lab3: {
+ base.bra = base.cursor;
+ lab4: {
+ /** @const */ var /** number */ v_6 = base.cursor;
+ lab5: {
+ if (!(base.eq_s("i")))
+ {
+ break lab5;
+ }
+ base.ket = base.cursor;
+ /** @const */ var /** number */ v_7 = base.cursor;
+ lab6: {
+ if (!(base.in_grouping(g_v, 97, 232)))
+ {
+ break lab6;
+ }
+ if (!base.slice_from("I"))
+ {
+ return false;
+ }
+ }
+ base.cursor = v_7;
+ break lab4;
+ }
+ base.cursor = v_6;
+ if (!(base.eq_s("y")))
+ {
+ base.cursor = v_5;
+ break lab3;
+ }
+ base.ket = base.cursor;
+ if (!base.slice_from("Y"))
+ {
+ return false;
+ }
+ }
+ }
+ continue;
+ }
+ base.cursor = v_4;
+ break;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_mark_regions() {
+ I_p1 = base.limit;
+ I_p2 = base.limit;
+ /** @const */ var /** number */ v_1 = base.cursor;
+ {
+ /** @const */ var /** number */ c1 = base.cursor + 3;
+ if (c1 > base.limit)
+ {
+ return false;
+ }
+ base.cursor = c1;
+ }
+ I_x = base.cursor;
+ base.cursor = v_1;
+ if (!base.go_out_grouping(g_v, 97, 232))
+ {
+ return false;
+ }
+ base.cursor++;
+ if (!base.go_in_grouping(g_v, 97, 232))
+ {
+ return false;
+ }
+ base.cursor++;
+ I_p1 = base.cursor;
+ lab0: {
+ if (I_p1 >= I_x)
+ {
+ break lab0;
+ }
+ I_p1 = I_x;
+ }
+ if (!base.go_out_grouping(g_v, 97, 232))
+ {
+ return false;
+ }
+ base.cursor++;
+ if (!base.go_in_grouping(g_v, 97, 232))
+ {
+ return false;
+ }
+ base.cursor++;
+ I_p2 = base.cursor;
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_postlude() {
+ var /** number */ among_var;
+ while(true)
+ {
+ /** @const */ var /** number */ v_1 = base.cursor;
+ lab0: {
+ base.bra = base.cursor;
+ among_var = base.find_among(a_1);
+ base.ket = base.cursor;
+ switch (among_var) {
+ case 1:
+ if (!base.slice_from("y"))
+ {
+ return false;
+ }
+ break;
+ case 2:
+ if (!base.slice_from("i"))
+ {
+ return false;
+ }
+ break;
+ case 3:
+ if (base.cursor >= base.limit)
+ {
+ break lab0;
+ }
+ base.cursor++;
+ break;
+ }
+ continue;
+ }
+ base.cursor = v_1;
+ break;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_R1() {
+ return I_p1 <= base.cursor;
+ };
+
+ /** @return {boolean} */
+ function r_R2() {
+ return I_p2 <= base.cursor;
+ };
+
+ /** @return {boolean} */
+ function r_undouble() {
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
+ if (base.find_among_b(a_2) == 0)
+ {
+ return false;
+ }
+ base.cursor = base.limit - v_1;
+ base.ket = base.cursor;
+ if (base.cursor <= base.limit_backward)
+ {
+ return false;
+ }
+ base.cursor--;
+ base.bra = base.cursor;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_e_ending() {
+ B_e_found = false;
+ base.ket = base.cursor;
+ if (!(base.eq_s_b("e")))
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ if (!r_R1())
+ {
+ return false;
+ }
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
+ if (!(base.out_grouping_b(g_v, 97, 232)))
+ {
+ return false;
+ }
+ base.cursor = base.limit - v_1;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ B_e_found = true;
+ if (!r_undouble())
+ {
+ return false;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_en_ending() {
+ if (!r_R1())
+ {
+ return false;
+ }
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
+ if (!(base.out_grouping_b(g_v, 97, 232)))
+ {
+ return false;
+ }
+ base.cursor = base.limit - v_1;
+ {
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
+ lab0: {
+ if (!(base.eq_s_b("gem")))
+ {
+ break lab0;
+ }
+ return false;
+ }
+ base.cursor = base.limit - v_2;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ if (!r_undouble())
+ {
+ return false;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_standard_suffix() {
+ var /** number */ among_var;
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
+ lab0: {
+ base.ket = base.cursor;
+ among_var = base.find_among_b(a_3);
+ if (among_var == 0)
+ {
+ break lab0;
+ }
+ base.bra = base.cursor;
+ switch (among_var) {
+ case 1:
+ if (!r_R1())
+ {
+ break lab0;
+ }
+ if (!base.slice_from("heid"))
+ {
+ return false;
+ }
+ break;
+ case 2:
+ if (!r_en_ending())
+ {
+ break lab0;
+ }
+ break;
+ case 3:
+ if (!r_R1())
+ {
+ break lab0;
+ }
+ if (!(base.out_grouping_b(g_v_j, 97, 232)))
+ {
+ break lab0;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ }
+ }
+ base.cursor = base.limit - v_1;
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
+ r_e_ending();
+ base.cursor = base.limit - v_2;
+ /** @const */ var /** number */ v_3 = base.limit - base.cursor;
+ lab1: {
+ base.ket = base.cursor;
+ if (!(base.eq_s_b("heid")))
+ {
+ break lab1;
+ }
+ base.bra = base.cursor;
+ if (!r_R2())
+ {
+ break lab1;
+ }
+ {
+ /** @const */ var /** number */ v_4 = base.limit - base.cursor;
+ lab2: {
+ if (!(base.eq_s_b("c")))
+ {
+ break lab2;
+ }
+ break lab1;
+ }
+ base.cursor = base.limit - v_4;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ base.ket = base.cursor;
+ if (!(base.eq_s_b("en")))
+ {
+ break lab1;
+ }
+ base.bra = base.cursor;
+ if (!r_en_ending())
+ {
+ break lab1;
+ }
+ }
+ base.cursor = base.limit - v_3;
+ /** @const */ var /** number */ v_5 = base.limit - base.cursor;
+ lab3: {
+ base.ket = base.cursor;
+ among_var = base.find_among_b(a_4);
+ if (among_var == 0)
+ {
+ break lab3;
+ }
+ base.bra = base.cursor;
+ switch (among_var) {
+ case 1:
+ if (!r_R2())
+ {
+ break lab3;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ lab4: {
+ /** @const */ var /** number */ v_6 = base.limit - base.cursor;
+ lab5: {
+ base.ket = base.cursor;
+ if (!(base.eq_s_b("ig")))
+ {
+ break lab5;
+ }
+ base.bra = base.cursor;
+ if (!r_R2())
+ {
+ break lab5;
+ }
+ {
+ /** @const */ var /** number */ v_7 = base.limit - base.cursor;
+ lab6: {
+ if (!(base.eq_s_b("e")))
+ {
+ break lab6;
+ }
+ break lab5;
+ }
+ base.cursor = base.limit - v_7;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break lab4;
+ }
+ base.cursor = base.limit - v_6;
+ if (!r_undouble())
+ {
+ break lab3;
+ }
+ }
+ break;
+ case 2:
+ if (!r_R2())
+ {
+ break lab3;
+ }
+ {
+ /** @const */ var /** number */ v_8 = base.limit - base.cursor;
+ lab7: {
+ if (!(base.eq_s_b("e")))
+ {
+ break lab7;
+ }
+ break lab3;
+ }
+ base.cursor = base.limit - v_8;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ case 3:
+ if (!r_R2())
+ {
+ break lab3;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ if (!r_e_ending())
+ {
+ break lab3;
+ }
+ break;
+ case 4:
+ if (!r_R2())
+ {
+ break lab3;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ case 5:
+ if (!r_R2())
+ {
+ break lab3;
+ }
+ if (!B_e_found)
+ {
+ break lab3;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ }
+ }
+ base.cursor = base.limit - v_5;
+ /** @const */ var /** number */ v_9 = base.limit - base.cursor;
+ lab8: {
+ if (!(base.out_grouping_b(g_v_I, 73, 232)))
+ {
+ break lab8;
+ }
+ /** @const */ var /** number */ v_10 = base.limit - base.cursor;
+ if (base.find_among_b(a_5) == 0)
+ {
+ break lab8;
+ }
+ if (!(base.out_grouping_b(g_v, 97, 232)))
+ {
+ break lab8;
+ }
+ base.cursor = base.limit - v_10;
+ base.ket = base.cursor;
+ if (base.cursor <= base.limit_backward)
+ {
+ break lab8;
+ }
+ base.cursor--;
+ base.bra = base.cursor;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ }
+ base.cursor = base.limit - v_9;
+ return true;
+ };
+
+ this.stem = /** @return {boolean} */ function() {
+ /** @const */ var /** number */ v_1 = base.cursor;
+ r_prelude();
+ base.cursor = v_1;
+ /** @const */ var /** number */ v_2 = base.cursor;
+ r_mark_regions();
+ base.cursor = v_2;
+ base.limit_backward = base.cursor; base.cursor = base.limit;
+ r_standard_suffix();
+ base.cursor = base.limit_backward;
+ /** @const */ var /** number */ v_3 = base.cursor;
+ r_postlude();
+ base.cursor = v_3;
+ return true;
+ };
+
+ /**@return{string}*/
+ this['stemWord'] = function(/**string*/word) {
+ base.setCurrent(word);
+ this.stem();
+ return base.getCurrent();
+ };
+};
diff --git a/sphinx/search/non-minified-js/english-stemmer.js b/sphinx/search/non-minified-js/english-stemmer.js
new file mode 100644
index 00000000000..056760ee8aa
--- /dev/null
+++ b/sphinx/search/non-minified-js/english-stemmer.js
@@ -0,0 +1,1066 @@
+// Generated from english.sbl by Snowball 3.0.1 - https://snowballstem.org/
+
+/**@constructor*/
+var EnglishStemmer = function() {
+ var base = new BaseStemmer();
+
+ /** @const */ var a_0 = [
+ ["arsen", -1, -1],
+ ["commun", -1, -1],
+ ["emerg", -1, -1],
+ ["gener", -1, -1],
+ ["later", -1, -1],
+ ["organ", -1, -1],
+ ["past", -1, -1],
+ ["univers", -1, -1]
+ ];
+
+ /** @const */ var a_1 = [
+ ["'", -1, 1],
+ ["'s'", 0, 1],
+ ["'s", -1, 1]
+ ];
+
+ /** @const */ var a_2 = [
+ ["ied", -1, 2],
+ ["s", -1, 3],
+ ["ies", 1, 2],
+ ["sses", 1, 1],
+ ["ss", 1, -1],
+ ["us", 1, -1]
+ ];
+
+ /** @const */ var a_3 = [
+ ["succ", -1, 1],
+ ["proc", -1, 1],
+ ["exc", -1, 1]
+ ];
+
+ /** @const */ var a_4 = [
+ ["even", -1, 2],
+ ["cann", -1, 2],
+ ["inn", -1, 2],
+ ["earr", -1, 2],
+ ["herr", -1, 2],
+ ["out", -1, 2],
+ ["y", -1, 1]
+ ];
+
+ /** @const */ var a_5 = [
+ ["", -1, -1],
+ ["ed", 0, 2],
+ ["eed", 1, 1],
+ ["ing", 0, 3],
+ ["edly", 0, 2],
+ ["eedly", 4, 1],
+ ["ingly", 0, 2]
+ ];
+
+ /** @const */ var a_6 = [
+ ["", -1, 3],
+ ["bb", 0, 2],
+ ["dd", 0, 2],
+ ["ff", 0, 2],
+ ["gg", 0, 2],
+ ["bl", 0, 1],
+ ["mm", 0, 2],
+ ["nn", 0, 2],
+ ["pp", 0, 2],
+ ["rr", 0, 2],
+ ["at", 0, 1],
+ ["tt", 0, 2],
+ ["iz", 0, 1]
+ ];
+
+ /** @const */ var a_7 = [
+ ["anci", -1, 3],
+ ["enci", -1, 2],
+ ["ogi", -1, 14],
+ ["li", -1, 16],
+ ["bli", 3, 12],
+ ["abli", 4, 4],
+ ["alli", 3, 8],
+ ["fulli", 3, 9],
+ ["lessli", 3, 15],
+ ["ousli", 3, 10],
+ ["entli", 3, 5],
+ ["aliti", -1, 8],
+ ["biliti", -1, 12],
+ ["iviti", -1, 11],
+ ["tional", -1, 1],
+ ["ational", 14, 7],
+ ["alism", -1, 8],
+ ["ation", -1, 7],
+ ["ization", 17, 6],
+ ["izer", -1, 6],
+ ["ator", -1, 7],
+ ["iveness", -1, 11],
+ ["fulness", -1, 9],
+ ["ousness", -1, 10],
+ ["ogist", -1, 13]
+ ];
+
+ /** @const */ var a_8 = [
+ ["icate", -1, 4],
+ ["ative", -1, 6],
+ ["alize", -1, 3],
+ ["iciti", -1, 4],
+ ["ical", -1, 4],
+ ["tional", -1, 1],
+ ["ational", 5, 2],
+ ["ful", -1, 5],
+ ["ness", -1, 5]
+ ];
+
+ /** @const */ var a_9 = [
+ ["ic", -1, 1],
+ ["ance", -1, 1],
+ ["ence", -1, 1],
+ ["able", -1, 1],
+ ["ible", -1, 1],
+ ["ate", -1, 1],
+ ["ive", -1, 1],
+ ["ize", -1, 1],
+ ["iti", -1, 1],
+ ["al", -1, 1],
+ ["ism", -1, 1],
+ ["ion", -1, 2],
+ ["er", -1, 1],
+ ["ous", -1, 1],
+ ["ant", -1, 1],
+ ["ent", -1, 1],
+ ["ment", 15, 1],
+ ["ement", 16, 1]
+ ];
+
+ /** @const */ var a_10 = [
+ ["e", -1, 1],
+ ["l", -1, 2]
+ ];
+
+ /** @const */ var a_11 = [
+ ["andes", -1, -1],
+ ["atlas", -1, -1],
+ ["bias", -1, -1],
+ ["cosmos", -1, -1],
+ ["early", -1, 5],
+ ["gently", -1, 3],
+ ["howe", -1, -1],
+ ["idly", -1, 2],
+ ["news", -1, -1],
+ ["only", -1, 6],
+ ["singly", -1, 7],
+ ["skies", -1, 1],
+ ["sky", -1, -1],
+ ["ugly", -1, 4]
+ ];
+
+ /** @const */ var /** Array */ g_aeo = [17, 64];
+
+ /** @const */ var /** Array */ g_v = [17, 65, 16, 1];
+
+ /** @const */ var /** Array */ g_v_WXY = [1, 17, 65, 208, 1];
+
+ /** @const */ var /** Array */ g_valid_LI = [55, 141, 2];
+
+ var /** boolean */ B_Y_found = false;
+ var /** number */ I_p2 = 0;
+ var /** number */ I_p1 = 0;
+
+
+ /** @return {boolean} */
+ function r_prelude() {
+ B_Y_found = false;
+ /** @const */ var /** number */ v_1 = base.cursor;
+ lab0: {
+ base.bra = base.cursor;
+ if (!(base.eq_s("'")))
+ {
+ break lab0;
+ }
+ base.ket = base.cursor;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ }
+ base.cursor = v_1;
+ /** @const */ var /** number */ v_2 = base.cursor;
+ lab1: {
+ base.bra = base.cursor;
+ if (!(base.eq_s("y")))
+ {
+ break lab1;
+ }
+ base.ket = base.cursor;
+ if (!base.slice_from("Y"))
+ {
+ return false;
+ }
+ B_Y_found = true;
+ }
+ base.cursor = v_2;
+ /** @const */ var /** number */ v_3 = base.cursor;
+ lab2: {
+ while(true)
+ {
+ /** @const */ var /** number */ v_4 = base.cursor;
+ lab3: {
+ golab4: while(true)
+ {
+ /** @const */ var /** number */ v_5 = base.cursor;
+ lab5: {
+ if (!(base.in_grouping(g_v, 97, 121)))
+ {
+ break lab5;
+ }
+ base.bra = base.cursor;
+ if (!(base.eq_s("y")))
+ {
+ break lab5;
+ }
+ base.ket = base.cursor;
+ base.cursor = v_5;
+ break golab4;
+ }
+ base.cursor = v_5;
+ if (base.cursor >= base.limit)
+ {
+ break lab3;
+ }
+ base.cursor++;
+ }
+ if (!base.slice_from("Y"))
+ {
+ return false;
+ }
+ B_Y_found = true;
+ continue;
+ }
+ base.cursor = v_4;
+ break;
+ }
+ }
+ base.cursor = v_3;
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_mark_regions() {
+ I_p1 = base.limit;
+ I_p2 = base.limit;
+ /** @const */ var /** number */ v_1 = base.cursor;
+ lab0: {
+ lab1: {
+ /** @const */ var /** number */ v_2 = base.cursor;
+ lab2: {
+ if (base.find_among(a_0) == 0)
+ {
+ break lab2;
+ }
+ break lab1;
+ }
+ base.cursor = v_2;
+ if (!base.go_out_grouping(g_v, 97, 121))
+ {
+ break lab0;
+ }
+ base.cursor++;
+ if (!base.go_in_grouping(g_v, 97, 121))
+ {
+ break lab0;
+ }
+ base.cursor++;
+ }
+ I_p1 = base.cursor;
+ if (!base.go_out_grouping(g_v, 97, 121))
+ {
+ break lab0;
+ }
+ base.cursor++;
+ if (!base.go_in_grouping(g_v, 97, 121))
+ {
+ break lab0;
+ }
+ base.cursor++;
+ I_p2 = base.cursor;
+ }
+ base.cursor = v_1;
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_shortv() {
+ lab0: {
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
+ lab1: {
+ if (!(base.out_grouping_b(g_v_WXY, 89, 121)))
+ {
+ break lab1;
+ }
+ if (!(base.in_grouping_b(g_v, 97, 121)))
+ {
+ break lab1;
+ }
+ if (!(base.out_grouping_b(g_v, 97, 121)))
+ {
+ break lab1;
+ }
+ break lab0;
+ }
+ base.cursor = base.limit - v_1;
+ lab2: {
+ if (!(base.out_grouping_b(g_v, 97, 121)))
+ {
+ break lab2;
+ }
+ if (!(base.in_grouping_b(g_v, 97, 121)))
+ {
+ break lab2;
+ }
+ if (base.cursor > base.limit_backward)
+ {
+ break lab2;
+ }
+ break lab0;
+ }
+ base.cursor = base.limit - v_1;
+ if (!(base.eq_s_b("past")))
+ {
+ return false;
+ }
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_R1() {
+ return I_p1 <= base.cursor;
+ };
+
+ /** @return {boolean} */
+ function r_R2() {
+ return I_p2 <= base.cursor;
+ };
+
+ /** @return {boolean} */
+ function r_Step_1a() {
+ var /** number */ among_var;
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
+ lab0: {
+ base.ket = base.cursor;
+ if (base.find_among_b(a_1) == 0)
+ {
+ base.cursor = base.limit - v_1;
+ break lab0;
+ }
+ base.bra = base.cursor;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ }
+ base.ket = base.cursor;
+ among_var = base.find_among_b(a_2);
+ if (among_var == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ switch (among_var) {
+ case 1:
+ if (!base.slice_from("ss"))
+ {
+ return false;
+ }
+ break;
+ case 2:
+ lab1: {
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
+ lab2: {
+ {
+ /** @const */ var /** number */ c1 = base.cursor - 2;
+ if (c1 < base.limit_backward)
+ {
+ break lab2;
+ }
+ base.cursor = c1;
+ }
+ if (!base.slice_from("i"))
+ {
+ return false;
+ }
+ break lab1;
+ }
+ base.cursor = base.limit - v_2;
+ if (!base.slice_from("ie"))
+ {
+ return false;
+ }
+ }
+ break;
+ case 3:
+ if (base.cursor <= base.limit_backward)
+ {
+ return false;
+ }
+ base.cursor--;
+ if (!base.go_out_grouping_b(g_v, 97, 121))
+ {
+ return false;
+ }
+ base.cursor--;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_Step_1b() {
+ var /** number */ among_var;
+ base.ket = base.cursor;
+ among_var = base.find_among_b(a_5);
+ base.bra = base.cursor;
+ lab0: {
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
+ lab1: {
+ switch (among_var) {
+ case 1:
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
+ lab2: {
+ lab3: {
+ /** @const */ var /** number */ v_3 = base.limit - base.cursor;
+ lab4: {
+ if (base.find_among_b(a_3) == 0)
+ {
+ break lab4;
+ }
+ if (base.cursor > base.limit_backward)
+ {
+ break lab4;
+ }
+ break lab3;
+ }
+ base.cursor = base.limit - v_3;
+ if (!r_R1())
+ {
+ break lab2;
+ }
+ if (!base.slice_from("ee"))
+ {
+ return false;
+ }
+ }
+ }
+ base.cursor = base.limit - v_2;
+ break;
+ case 2:
+ break lab1;
+ case 3:
+ among_var = base.find_among_b(a_4);
+ if (among_var == 0)
+ {
+ break lab1;
+ }
+ switch (among_var) {
+ case 1:
+ /** @const */ var /** number */ v_4 = base.limit - base.cursor;
+ if (!(base.out_grouping_b(g_v, 97, 121)))
+ {
+ break lab1;
+ }
+ if (base.cursor > base.limit_backward)
+ {
+ break lab1;
+ }
+ base.cursor = base.limit - v_4;
+ base.bra = base.cursor;
+ if (!base.slice_from("ie"))
+ {
+ return false;
+ }
+ break;
+ case 2:
+ if (base.cursor > base.limit_backward)
+ {
+ break lab1;
+ }
+ break;
+ }
+ break;
+ }
+ break lab0;
+ }
+ base.cursor = base.limit - v_1;
+ /** @const */ var /** number */ v_5 = base.limit - base.cursor;
+ if (!base.go_out_grouping_b(g_v, 97, 121))
+ {
+ return false;
+ }
+ base.cursor--;
+ base.cursor = base.limit - v_5;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ base.ket = base.cursor;
+ base.bra = base.cursor;
+ /** @const */ var /** number */ v_6 = base.limit - base.cursor;
+ among_var = base.find_among_b(a_6);
+ switch (among_var) {
+ case 1:
+ if (!base.slice_from("e"))
+ {
+ return false;
+ }
+ return false;
+ case 2:
+ {
+ /** @const */ var /** number */ v_7 = base.limit - base.cursor;
+ lab5: {
+ if (!(base.in_grouping_b(g_aeo, 97, 111)))
+ {
+ break lab5;
+ }
+ if (base.cursor > base.limit_backward)
+ {
+ break lab5;
+ }
+ return false;
+ }
+ base.cursor = base.limit - v_7;
+ }
+ break;
+ case 3:
+ if (base.cursor != I_p1)
+ {
+ return false;
+ }
+ /** @const */ var /** number */ v_8 = base.limit - base.cursor;
+ if (!r_shortv())
+ {
+ return false;
+ }
+ base.cursor = base.limit - v_8;
+ if (!base.slice_from("e"))
+ {
+ return false;
+ }
+ return false;
+ }
+ base.cursor = base.limit - v_6;
+ base.ket = base.cursor;
+ if (base.cursor <= base.limit_backward)
+ {
+ return false;
+ }
+ base.cursor--;
+ base.bra = base.cursor;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_Step_1c() {
+ base.ket = base.cursor;
+ lab0: {
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
+ lab1: {
+ if (!(base.eq_s_b("y")))
+ {
+ break lab1;
+ }
+ break lab0;
+ }
+ base.cursor = base.limit - v_1;
+ if (!(base.eq_s_b("Y")))
+ {
+ return false;
+ }
+ }
+ base.bra = base.cursor;
+ if (!(base.out_grouping_b(g_v, 97, 121)))
+ {
+ return false;
+ }
+ lab2: {
+ if (base.cursor > base.limit_backward)
+ {
+ break lab2;
+ }
+ return false;
+ }
+ if (!base.slice_from("i"))
+ {
+ return false;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_Step_2() {
+ var /** number */ among_var;
+ base.ket = base.cursor;
+ among_var = base.find_among_b(a_7);
+ if (among_var == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ if (!r_R1())
+ {
+ return false;
+ }
+ switch (among_var) {
+ case 1:
+ if (!base.slice_from("tion"))
+ {
+ return false;
+ }
+ break;
+ case 2:
+ if (!base.slice_from("ence"))
+ {
+ return false;
+ }
+ break;
+ case 3:
+ if (!base.slice_from("ance"))
+ {
+ return false;
+ }
+ break;
+ case 4:
+ if (!base.slice_from("able"))
+ {
+ return false;
+ }
+ break;
+ case 5:
+ if (!base.slice_from("ent"))
+ {
+ return false;
+ }
+ break;
+ case 6:
+ if (!base.slice_from("ize"))
+ {
+ return false;
+ }
+ break;
+ case 7:
+ if (!base.slice_from("ate"))
+ {
+ return false;
+ }
+ break;
+ case 8:
+ if (!base.slice_from("al"))
+ {
+ return false;
+ }
+ break;
+ case 9:
+ if (!base.slice_from("ful"))
+ {
+ return false;
+ }
+ break;
+ case 10:
+ if (!base.slice_from("ous"))
+ {
+ return false;
+ }
+ break;
+ case 11:
+ if (!base.slice_from("ive"))
+ {
+ return false;
+ }
+ break;
+ case 12:
+ if (!base.slice_from("ble"))
+ {
+ return false;
+ }
+ break;
+ case 13:
+ if (!base.slice_from("og"))
+ {
+ return false;
+ }
+ break;
+ case 14:
+ if (!(base.eq_s_b("l")))
+ {
+ return false;
+ }
+ if (!base.slice_from("og"))
+ {
+ return false;
+ }
+ break;
+ case 15:
+ if (!base.slice_from("less"))
+ {
+ return false;
+ }
+ break;
+ case 16:
+ if (!(base.in_grouping_b(g_valid_LI, 99, 116)))
+ {
+ return false;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_Step_3() {
+ var /** number */ among_var;
+ base.ket = base.cursor;
+ among_var = base.find_among_b(a_8);
+ if (among_var == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ if (!r_R1())
+ {
+ return false;
+ }
+ switch (among_var) {
+ case 1:
+ if (!base.slice_from("tion"))
+ {
+ return false;
+ }
+ break;
+ case 2:
+ if (!base.slice_from("ate"))
+ {
+ return false;
+ }
+ break;
+ case 3:
+ if (!base.slice_from("al"))
+ {
+ return false;
+ }
+ break;
+ case 4:
+ if (!base.slice_from("ic"))
+ {
+ return false;
+ }
+ break;
+ case 5:
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ case 6:
+ if (!r_R2())
+ {
+ return false;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_Step_4() {
+ var /** number */ among_var;
+ base.ket = base.cursor;
+ among_var = base.find_among_b(a_9);
+ if (among_var == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ if (!r_R2())
+ {
+ return false;
+ }
+ switch (among_var) {
+ case 1:
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ case 2:
+ lab0: {
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
+ lab1: {
+ if (!(base.eq_s_b("s")))
+ {
+ break lab1;
+ }
+ break lab0;
+ }
+ base.cursor = base.limit - v_1;
+ if (!(base.eq_s_b("t")))
+ {
+ return false;
+ }
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_Step_5() {
+ var /** number */ among_var;
+ base.ket = base.cursor;
+ among_var = base.find_among_b(a_10);
+ if (among_var == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ switch (among_var) {
+ case 1:
+ lab0: {
+ lab1: {
+ if (!r_R2())
+ {
+ break lab1;
+ }
+ break lab0;
+ }
+ if (!r_R1())
+ {
+ return false;
+ }
+ {
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
+ lab2: {
+ if (!r_shortv())
+ {
+ break lab2;
+ }
+ return false;
+ }
+ base.cursor = base.limit - v_1;
+ }
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ case 2:
+ if (!r_R2())
+ {
+ return false;
+ }
+ if (!(base.eq_s_b("l")))
+ {
+ return false;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_exception1() {
+ var /** number */ among_var;
+ base.bra = base.cursor;
+ among_var = base.find_among(a_11);
+ if (among_var == 0)
+ {
+ return false;
+ }
+ base.ket = base.cursor;
+ if (base.cursor < base.limit)
+ {
+ return false;
+ }
+ switch (among_var) {
+ case 1:
+ if (!base.slice_from("sky"))
+ {
+ return false;
+ }
+ break;
+ case 2:
+ if (!base.slice_from("idl"))
+ {
+ return false;
+ }
+ break;
+ case 3:
+ if (!base.slice_from("gentl"))
+ {
+ return false;
+ }
+ break;
+ case 4:
+ if (!base.slice_from("ugli"))
+ {
+ return false;
+ }
+ break;
+ case 5:
+ if (!base.slice_from("earli"))
+ {
+ return false;
+ }
+ break;
+ case 6:
+ if (!base.slice_from("onli"))
+ {
+ return false;
+ }
+ break;
+ case 7:
+ if (!base.slice_from("singl"))
+ {
+ return false;
+ }
+ break;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_postlude() {
+ if (!B_Y_found)
+ {
+ return false;
+ }
+ while(true)
+ {
+ /** @const */ var /** number */ v_1 = base.cursor;
+ lab0: {
+ golab1: while(true)
+ {
+ /** @const */ var /** number */ v_2 = base.cursor;
+ lab2: {
+ base.bra = base.cursor;
+ if (!(base.eq_s("Y")))
+ {
+ break lab2;
+ }
+ base.ket = base.cursor;
+ base.cursor = v_2;
+ break golab1;
+ }
+ base.cursor = v_2;
+ if (base.cursor >= base.limit)
+ {
+ break lab0;
+ }
+ base.cursor++;
+ }
+ if (!base.slice_from("y"))
+ {
+ return false;
+ }
+ continue;
+ }
+ base.cursor = v_1;
+ break;
+ }
+ return true;
+ };
+
+ this.stem = /** @return {boolean} */ function() {
+ lab0: {
+ /** @const */ var /** number */ v_1 = base.cursor;
+ lab1: {
+ if (!r_exception1())
+ {
+ break lab1;
+ }
+ break lab0;
+ }
+ base.cursor = v_1;
+ lab2: {
+ {
+ /** @const */ var /** number */ v_2 = base.cursor;
+ lab3: {
+ {
+ /** @const */ var /** number */ c1 = base.cursor + 3;
+ if (c1 > base.limit)
+ {
+ break lab3;
+ }
+ base.cursor = c1;
+ }
+ break lab2;
+ }
+ base.cursor = v_2;
+ }
+ break lab0;
+ }
+ base.cursor = v_1;
+ r_prelude();
+ r_mark_regions();
+ base.limit_backward = base.cursor; base.cursor = base.limit;
+ /** @const */ var /** number */ v_3 = base.limit - base.cursor;
+ r_Step_1a();
+ base.cursor = base.limit - v_3;
+ /** @const */ var /** number */ v_4 = base.limit - base.cursor;
+ r_Step_1b();
+ base.cursor = base.limit - v_4;
+ /** @const */ var /** number */ v_5 = base.limit - base.cursor;
+ r_Step_1c();
+ base.cursor = base.limit - v_5;
+ /** @const */ var /** number */ v_6 = base.limit - base.cursor;
+ r_Step_2();
+ base.cursor = base.limit - v_6;
+ /** @const */ var /** number */ v_7 = base.limit - base.cursor;
+ r_Step_3();
+ base.cursor = base.limit - v_7;
+ /** @const */ var /** number */ v_8 = base.limit - base.cursor;
+ r_Step_4();
+ base.cursor = base.limit - v_8;
+ /** @const */ var /** number */ v_9 = base.limit - base.cursor;
+ r_Step_5();
+ base.cursor = base.limit - v_9;
+ base.cursor = base.limit_backward;
+ /** @const */ var /** number */ v_10 = base.cursor;
+ r_postlude();
+ base.cursor = v_10;
+ }
+ return true;
+ };
+
+ /**@return{string}*/
+ this['stemWord'] = function(/**string*/word) {
+ base.setCurrent(word);
+ this.stem();
+ return base.getCurrent();
+ };
+};
diff --git a/sphinx/search/non-minified-js/esperanto-stemmer.js b/sphinx/search/non-minified-js/esperanto-stemmer.js
new file mode 100644
index 00000000000..8fc6af00f1a
--- /dev/null
+++ b/sphinx/search/non-minified-js/esperanto-stemmer.js
@@ -0,0 +1,762 @@
+// Generated from esperanto.sbl by Snowball 3.0.1 - https://snowballstem.org/
+
+/**@constructor*/
+var EsperantoStemmer = function() {
+ var base = new BaseStemmer();
+
+ /** @const */ var a_0 = [
+ ["", -1, 14],
+ ["-", 0, 13],
+ ["cx", 0, 1],
+ ["gx", 0, 2],
+ ["hx", 0, 3],
+ ["jx", 0, 4],
+ ["q", 0, 12],
+ ["sx", 0, 5],
+ ["ux", 0, 6],
+ ["w", 0, 12],
+ ["x", 0, 12],
+ ["y", 0, 12],
+ ["\u00E1", 0, 7],
+ ["\u00E9", 0, 8],
+ ["\u00ED", 0, 9],
+ ["\u00F3", 0, 10],
+ ["\u00FA", 0, 11]
+ ];
+
+ /** @const */ var a_1 = [
+ ["as", -1, -1],
+ ["i", -1, -1],
+ ["is", 1, -1],
+ ["os", -1, -1],
+ ["u", -1, -1],
+ ["us", 4, -1]
+ ];
+
+ /** @const */ var a_2 = [
+ ["ci", -1, -1],
+ ["gi", -1, -1],
+ ["hi", -1, -1],
+ ["li", -1, -1],
+ ["ili", 3, -1],
+ ["\u015Dli", 3, -1],
+ ["mi", -1, -1],
+ ["ni", -1, -1],
+ ["oni", 7, -1],
+ ["ri", -1, -1],
+ ["si", -1, -1],
+ ["vi", -1, -1],
+ ["ivi", 11, -1],
+ ["\u011Di", -1, -1],
+ ["\u015Di", -1, -1],
+ ["i\u015Di", 14, -1],
+ ["mal\u015Di", 14, -1]
+ ];
+
+ /** @const */ var a_3 = [
+ ["amb", -1, -1],
+ ["bald", -1, -1],
+ ["malbald", 1, -1],
+ ["morg", -1, -1],
+ ["postmorg", 3, -1],
+ ["adi", -1, -1],
+ ["hodi", -1, -1],
+ ["ank", -1, -1],
+ ["\u0109irk", -1, -1],
+ ["tut\u0109irk", 8, -1],
+ ["presk", -1, -1],
+ ["almen", -1, -1],
+ ["apen", -1, -1],
+ ["hier", -1, -1],
+ ["anta\u016Dhier", 13, -1],
+ ["malgr", -1, -1],
+ ["ankor", -1, -1],
+ ["kontr", -1, -1],
+ ["anstat", -1, -1],
+ ["kvaz", -1, -1]
+ ];
+
+ /** @const */ var a_4 = [
+ ["aliu", -1, -1],
+ ["unu", -1, -1]
+ ];
+
+ /** @const */ var a_5 = [
+ ["aha", -1, -1],
+ ["haha", 0, -1],
+ ["haleluja", -1, -1],
+ ["hola", -1, -1],
+ ["hosana", -1, -1],
+ ["maltra", -1, -1],
+ ["hura", -1, -1],
+ ["\u0125a\u0125a", -1, -1],
+ ["ekde", -1, -1],
+ ["elde", -1, -1],
+ ["disde", -1, -1],
+ ["ehe", -1, -1],
+ ["maltre", -1, -1],
+ ["dirlididi", -1, -1],
+ ["malpli", -1, -1],
+ ["mal\u0109i", -1, -1],
+ ["malkaj", -1, -1],
+ ["amen", -1, -1],
+ ["tamen", 17, -1],
+ ["oho", -1, -1],
+ ["maltro", -1, -1],
+ ["minus", -1, -1],
+ ["uhu", -1, -1],
+ ["muu", -1, -1]
+ ];
+
+ /** @const */ var a_6 = [
+ ["tri", -1, -1],
+ ["du", -1, -1],
+ ["unu", -1, -1]
+ ];
+
+ /** @const */ var a_7 = [
+ ["dek", -1, -1],
+ ["cent", -1, -1]
+ ];
+
+ /** @const */ var a_8 = [
+ ["k", -1, -1],
+ ["kelk", 0, -1],
+ ["nen", -1, -1],
+ ["t", -1, -1],
+ ["mult", 3, -1],
+ ["samt", 3, -1],
+ ["\u0109", -1, -1]
+ ];
+
+ /** @const */ var a_9 = [
+ ["a", -1, -1],
+ ["e", -1, -1],
+ ["i", -1, -1],
+ ["j", -1, -1, r_not_after_letter],
+ ["aj", 3, -1],
+ ["oj", 3, -1],
+ ["n", -1, -1, r_not_after_letter],
+ ["an", 6, -1],
+ ["en", 6, -1],
+ ["jn", 6, -1, r_not_after_letter],
+ ["ajn", 9, -1],
+ ["ojn", 9, -1],
+ ["on", 6, -1],
+ ["o", -1, -1],
+ ["as", -1, -1],
+ ["is", -1, -1],
+ ["os", -1, -1],
+ ["us", -1, -1],
+ ["u", -1, -1]
+ ];
+
+ /** @const */ var /** Array */ g_vowel = [17, 65, 16];
+
+ /** @const */ var /** Array */ g_aou = [1, 64, 16];
+
+ /** @const */ var /** Array */ g_digit = [255, 3];
+
+ var /** boolean */ B_foreign = false;
+
+
+ /** @return {boolean} */
+ function r_canonical_form() {
+ var /** number */ among_var;
+ B_foreign = false;
+ while(true)
+ {
+ /** @const */ var /** number */ v_1 = base.cursor;
+ lab0: {
+ base.bra = base.cursor;
+ among_var = base.find_among(a_0);
+ base.ket = base.cursor;
+ switch (among_var) {
+ case 1:
+ if (!base.slice_from("\u0109"))
+ {
+ return false;
+ }
+ break;
+ case 2:
+ if (!base.slice_from("\u011D"))
+ {
+ return false;
+ }
+ break;
+ case 3:
+ if (!base.slice_from("\u0125"))
+ {
+ return false;
+ }
+ break;
+ case 4:
+ if (!base.slice_from("\u0135"))
+ {
+ return false;
+ }
+ break;
+ case 5:
+ if (!base.slice_from("\u015D"))
+ {
+ return false;
+ }
+ break;
+ case 6:
+ if (!base.slice_from("\u016D"))
+ {
+ return false;
+ }
+ break;
+ case 7:
+ if (!base.slice_from("a"))
+ {
+ return false;
+ }
+ B_foreign = true;
+ break;
+ case 8:
+ if (!base.slice_from("e"))
+ {
+ return false;
+ }
+ B_foreign = true;
+ break;
+ case 9:
+ if (!base.slice_from("i"))
+ {
+ return false;
+ }
+ B_foreign = true;
+ break;
+ case 10:
+ if (!base.slice_from("o"))
+ {
+ return false;
+ }
+ B_foreign = true;
+ break;
+ case 11:
+ if (!base.slice_from("u"))
+ {
+ return false;
+ }
+ B_foreign = true;
+ break;
+ case 12:
+ B_foreign = true;
+ break;
+ case 13:
+ B_foreign = false;
+ break;
+ case 14:
+ if (base.cursor >= base.limit)
+ {
+ break lab0;
+ }
+ base.cursor++;
+ break;
+ }
+ continue;
+ }
+ base.cursor = v_1;
+ break;
+ }
+ lab1: {
+ if (!B_foreign)
+ {
+ break lab1;
+ }
+ return false;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_initial_apostrophe() {
+ base.bra = base.cursor;
+ if (!(base.eq_s("'")))
+ {
+ return false;
+ }
+ base.ket = base.cursor;
+ if (!(base.eq_s("st")))
+ {
+ return false;
+ }
+ if (base.find_among(a_1) == 0)
+ {
+ return false;
+ }
+ if (base.cursor < base.limit)
+ {
+ return false;
+ }
+ if (!base.slice_from("e"))
+ {
+ return false;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_pronoun() {
+ base.ket = base.cursor;
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
+ lab0: {
+ if (!(base.eq_s_b("n")))
+ {
+ base.cursor = base.limit - v_1;
+ break lab0;
+ }
+ }
+ base.bra = base.cursor;
+ if (base.find_among_b(a_2) == 0)
+ {
+ return false;
+ }
+ lab1: {
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
+ lab2: {
+ if (base.cursor > base.limit_backward)
+ {
+ break lab2;
+ }
+ break lab1;
+ }
+ base.cursor = base.limit - v_2;
+ if (!(base.eq_s_b("-")))
+ {
+ return false;
+ }
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_final_apostrophe() {
+ base.ket = base.cursor;
+ if (!(base.eq_s_b("'")))
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ lab0: {
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
+ lab1: {
+ if (!(base.eq_s_b("l")))
+ {
+ break lab1;
+ }
+ if (base.cursor > base.limit_backward)
+ {
+ break lab1;
+ }
+ if (!base.slice_from("a"))
+ {
+ return false;
+ }
+ break lab0;
+ }
+ base.cursor = base.limit - v_1;
+ lab2: {
+ if (!(base.eq_s_b("un")))
+ {
+ break lab2;
+ }
+ if (base.cursor > base.limit_backward)
+ {
+ break lab2;
+ }
+ if (!base.slice_from("u"))
+ {
+ return false;
+ }
+ break lab0;
+ }
+ base.cursor = base.limit - v_1;
+ lab3: {
+ if (base.find_among_b(a_3) == 0)
+ {
+ break lab3;
+ }
+ lab4: {
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
+ lab5: {
+ if (base.cursor > base.limit_backward)
+ {
+ break lab5;
+ }
+ break lab4;
+ }
+ base.cursor = base.limit - v_2;
+ if (!(base.eq_s_b("-")))
+ {
+ break lab3;
+ }
+ }
+ if (!base.slice_from("a\u016D"))
+ {
+ return false;
+ }
+ break lab0;
+ }
+ base.cursor = base.limit - v_1;
+ if (!base.slice_from("o"))
+ {
+ return false;
+ }
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_ujn_suffix() {
+ base.ket = base.cursor;
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
+ lab0: {
+ if (!(base.eq_s_b("n")))
+ {
+ base.cursor = base.limit - v_1;
+ break lab0;
+ }
+ }
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
+ lab1: {
+ if (!(base.eq_s_b("j")))
+ {
+ base.cursor = base.limit - v_2;
+ break lab1;
+ }
+ }
+ base.bra = base.cursor;
+ if (base.find_among_b(a_4) == 0)
+ {
+ return false;
+ }
+ lab2: {
+ /** @const */ var /** number */ v_3 = base.limit - base.cursor;
+ lab3: {
+ if (base.cursor > base.limit_backward)
+ {
+ break lab3;
+ }
+ break lab2;
+ }
+ base.cursor = base.limit - v_3;
+ if (!(base.eq_s_b("-")))
+ {
+ return false;
+ }
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_uninflected() {
+ if (base.find_among_b(a_5) == 0)
+ {
+ return false;
+ }
+ lab0: {
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
+ lab1: {
+ if (base.cursor > base.limit_backward)
+ {
+ break lab1;
+ }
+ break lab0;
+ }
+ base.cursor = base.limit - v_1;
+ if (!(base.eq_s_b("-")))
+ {
+ return false;
+ }
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_merged_numeral() {
+ if (base.find_among_b(a_6) == 0)
+ {
+ return false;
+ }
+ if (base.find_among_b(a_7) == 0)
+ {
+ return false;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_correlative() {
+ base.ket = base.cursor;
+ base.bra = base.cursor;
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
+ lab0: {
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
+ lab1: {
+ /** @const */ var /** number */ v_3 = base.limit - base.cursor;
+ lab2: {
+ if (!(base.eq_s_b("n")))
+ {
+ base.cursor = base.limit - v_3;
+ break lab2;
+ }
+ }
+ base.bra = base.cursor;
+ if (!(base.eq_s_b("e")))
+ {
+ break lab1;
+ }
+ break lab0;
+ }
+ base.cursor = base.limit - v_2;
+ /** @const */ var /** number */ v_4 = base.limit - base.cursor;
+ lab3: {
+ if (!(base.eq_s_b("n")))
+ {
+ base.cursor = base.limit - v_4;
+ break lab3;
+ }
+ }
+ /** @const */ var /** number */ v_5 = base.limit - base.cursor;
+ lab4: {
+ if (!(base.eq_s_b("j")))
+ {
+ base.cursor = base.limit - v_5;
+ break lab4;
+ }
+ }
+ base.bra = base.cursor;
+ if (!(base.in_grouping_b(g_aou, 97, 117)))
+ {
+ return false;
+ }
+ }
+ if (!(base.eq_s_b("i")))
+ {
+ return false;
+ }
+ /** @const */ var /** number */ v_6 = base.limit - base.cursor;
+ lab5: {
+ if (base.find_among_b(a_8) == 0)
+ {
+ base.cursor = base.limit - v_6;
+ break lab5;
+ }
+ }
+ lab6: {
+ /** @const */ var /** number */ v_7 = base.limit - base.cursor;
+ lab7: {
+ if (base.cursor > base.limit_backward)
+ {
+ break lab7;
+ }
+ break lab6;
+ }
+ base.cursor = base.limit - v_7;
+ if (!(base.eq_s_b("-")))
+ {
+ return false;
+ }
+ }
+ base.cursor = base.limit - v_1;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_long_word() {
+ lab0: {
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
+ lab1: {
+ for (var /** number */ v_2 = 2; v_2 > 0; v_2--)
+ {
+ if (!base.go_out_grouping_b(g_vowel, 97, 117))
+ {
+ break lab1;
+ }
+ base.cursor--;
+ }
+ break lab0;
+ }
+ base.cursor = base.limit - v_1;
+ lab2: {
+ golab3: while(true)
+ {
+ lab4: {
+ if (!(base.eq_s_b("-")))
+ {
+ break lab4;
+ }
+ break golab3;
+ }
+ if (base.cursor <= base.limit_backward)
+ {
+ break lab2;
+ }
+ base.cursor--;
+ }
+ if (base.cursor <= base.limit_backward)
+ {
+ break lab2;
+ }
+ base.cursor--;
+ break lab0;
+ }
+ base.cursor = base.limit - v_1;
+ if (!base.go_out_grouping_b(g_digit, 48, 57))
+ {
+ return false;
+ }
+ base.cursor--;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_not_after_letter() {
+ lab0: {
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
+ lab1: {
+ if (!(base.eq_s_b("-")))
+ {
+ break lab1;
+ }
+ break lab0;
+ }
+ base.cursor = base.limit - v_1;
+ if (!(base.in_grouping_b(g_digit, 48, 57)))
+ {
+ return false;
+ }
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_standard_suffix() {
+ base.ket = base.cursor;
+ if (base.find_among_b(a_9) == 0)
+ {
+ return false;
+ }
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
+ lab0: {
+ if (!(base.eq_s_b("-")))
+ {
+ base.cursor = base.limit - v_1;
+ break lab0;
+ }
+ }
+ base.bra = base.cursor;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ return true;
+ };
+
+ this.stem = /** @return {boolean} */ function() {
+ /** @const */ var /** number */ v_1 = base.cursor;
+ if (!r_canonical_form())
+ {
+ return false;
+ }
+ base.cursor = v_1;
+ /** @const */ var /** number */ v_2 = base.cursor;
+ r_initial_apostrophe();
+ base.cursor = v_2;
+ base.limit_backward = base.cursor; base.cursor = base.limit;
+ {
+ /** @const */ var /** number */ v_3 = base.limit - base.cursor;
+ lab0: {
+ if (!r_pronoun())
+ {
+ break lab0;
+ }
+ return false;
+ }
+ base.cursor = base.limit - v_3;
+ }
+ /** @const */ var /** number */ v_4 = base.limit - base.cursor;
+ r_final_apostrophe();
+ base.cursor = base.limit - v_4;
+ {
+ /** @const */ var /** number */ v_5 = base.limit - base.cursor;
+ lab1: {
+ if (!r_correlative())
+ {
+ break lab1;
+ }
+ return false;
+ }
+ base.cursor = base.limit - v_5;
+ }
+ {
+ /** @const */ var /** number */ v_6 = base.limit - base.cursor;
+ lab2: {
+ if (!r_uninflected())
+ {
+ break lab2;
+ }
+ return false;
+ }
+ base.cursor = base.limit - v_6;
+ }
+ {
+ /** @const */ var /** number */ v_7 = base.limit - base.cursor;
+ lab3: {
+ if (!r_merged_numeral())
+ {
+ break lab3;
+ }
+ return false;
+ }
+ base.cursor = base.limit - v_7;
+ }
+ {
+ /** @const */ var /** number */ v_8 = base.limit - base.cursor;
+ lab4: {
+ if (!r_ujn_suffix())
+ {
+ break lab4;
+ }
+ return false;
+ }
+ base.cursor = base.limit - v_8;
+ }
+ /** @const */ var /** number */ v_9 = base.limit - base.cursor;
+ if (!r_long_word())
+ {
+ return false;
+ }
+ base.cursor = base.limit - v_9;
+ if (!r_standard_suffix())
+ {
+ return false;
+ }
+ base.cursor = base.limit_backward;
+ return true;
+ };
+
+ /**@return{string}*/
+ this['stemWord'] = function(/**string*/word) {
+ base.setCurrent(word);
+ this.stem();
+ return base.getCurrent();
+ };
+};
diff --git a/sphinx/search/non-minified-js/estonian-stemmer.js b/sphinx/search/non-minified-js/estonian-stemmer.js
new file mode 100644
index 00000000000..2700c0b3379
--- /dev/null
+++ b/sphinx/search/non-minified-js/estonian-stemmer.js
@@ -0,0 +1,1088 @@
+// Generated from estonian.sbl by Snowball 3.0.1 - https://snowballstem.org/
+
+/**@constructor*/
+var EstonianStemmer = function() {
+ var base = new BaseStemmer();
+
+ /** @const */ var a_0 = [
+ ["gi", -1, 1],
+ ["ki", -1, 2]
+ ];
+
+ /** @const */ var a_1 = [
+ ["da", -1, 3],
+ ["mata", -1, 1],
+ ["b", -1, 3],
+ ["ksid", -1, 1],
+ ["nuksid", 3, 1],
+ ["me", -1, 3],
+ ["sime", 5, 1],
+ ["ksime", 6, 1],
+ ["nuksime", 7, 1],
+ ["akse", -1, 2],
+ ["dakse", 9, 1],
+ ["takse", 9, 1],
+ ["site", -1, 1],
+ ["ksite", 12, 1],
+ ["nuksite", 13, 1],
+ ["n", -1, 3],
+ ["sin", 15, 1],
+ ["ksin", 16, 1],
+ ["nuksin", 17, 1],
+ ["daks", -1, 1],
+ ["taks", -1, 1]
+ ];
+
+ /** @const */ var a_2 = [
+ ["aa", -1, -1],
+ ["ee", -1, -1],
+ ["ii", -1, -1],
+ ["oo", -1, -1],
+ ["uu", -1, -1],
+ ["\u00E4\u00E4", -1, -1],
+ ["\u00F5\u00F5", -1, -1],
+ ["\u00F6\u00F6", -1, -1],
+ ["\u00FC\u00FC", -1, -1]
+ ];
+
+ /** @const */ var a_3 = [
+ ["i", -1, 1]
+ ];
+
+ /** @const */ var a_4 = [
+ ["lane", -1, 1],
+ ["line", -1, 3],
+ ["mine", -1, 2],
+ ["lasse", -1, 1],
+ ["lisse", -1, 3],
+ ["misse", -1, 2],
+ ["lasi", -1, 1],
+ ["lisi", -1, 3],
+ ["misi", -1, 2],
+ ["last", -1, 1],
+ ["list", -1, 3],
+ ["mist", -1, 2]
+ ];
+
+ /** @const */ var a_5 = [
+ ["ga", -1, 1],
+ ["ta", -1, 1],
+ ["le", -1, 1],
+ ["sse", -1, 1],
+ ["l", -1, 1],
+ ["s", -1, 1],
+ ["ks", 5, 1],
+ ["t", -1, 2],
+ ["lt", 7, 1],
+ ["st", 7, 1]
+ ];
+
+ /** @const */ var a_6 = [
+ ["", -1, 2],
+ ["las", 0, 1],
+ ["lis", 0, 1],
+ ["mis", 0, 1],
+ ["t", 0, -1]
+ ];
+
+ /** @const */ var a_7 = [
+ ["d", -1, 4],
+ ["sid", 0, 2],
+ ["de", -1, 4],
+ ["ikkude", 2, 1],
+ ["ike", -1, 1],
+ ["ikke", -1, 1],
+ ["te", -1, 3]
+ ];
+
+ /** @const */ var a_8 = [
+ ["va", -1, -1],
+ ["du", -1, -1],
+ ["nu", -1, -1],
+ ["tu", -1, -1]
+ ];
+
+ /** @const */ var a_9 = [
+ ["kk", -1, 1],
+ ["pp", -1, 2],
+ ["tt", -1, 3]
+ ];
+
+ /** @const */ var a_10 = [
+ ["ma", -1, 2],
+ ["mai", -1, 1],
+ ["m", -1, 1]
+ ];
+
+ /** @const */ var a_11 = [
+ ["joob", -1, 1],
+ ["jood", -1, 1],
+ ["joodakse", 1, 1],
+ ["jooma", -1, 1],
+ ["joomata", 3, 1],
+ ["joome", -1, 1],
+ ["joon", -1, 1],
+ ["joote", -1, 1],
+ ["joovad", -1, 1],
+ ["juua", -1, 1],
+ ["juuakse", 9, 1],
+ ["j\u00E4i", -1, 12],
+ ["j\u00E4id", 11, 12],
+ ["j\u00E4ime", 11, 12],
+ ["j\u00E4in", 11, 12],
+ ["j\u00E4ite", 11, 12],
+ ["j\u00E4\u00E4b", -1, 12],
+ ["j\u00E4\u00E4d", -1, 12],
+ ["j\u00E4\u00E4da", 17, 12],
+ ["j\u00E4\u00E4dakse", 18, 12],
+ ["j\u00E4\u00E4di", 17, 12],
+ ["j\u00E4\u00E4ks", -1, 12],
+ ["j\u00E4\u00E4ksid", 21, 12],
+ ["j\u00E4\u00E4ksime", 21, 12],
+ ["j\u00E4\u00E4ksin", 21, 12],
+ ["j\u00E4\u00E4ksite", 21, 12],
+ ["j\u00E4\u00E4ma", -1, 12],
+ ["j\u00E4\u00E4mata", 26, 12],
+ ["j\u00E4\u00E4me", -1, 12],
+ ["j\u00E4\u00E4n", -1, 12],
+ ["j\u00E4\u00E4te", -1, 12],
+ ["j\u00E4\u00E4vad", -1, 12],
+ ["j\u00F5i", -1, 1],
+ ["j\u00F5id", 32, 1],
+ ["j\u00F5ime", 32, 1],
+ ["j\u00F5in", 32, 1],
+ ["j\u00F5ite", 32, 1],
+ ["keeb", -1, 4],
+ ["keed", -1, 4],
+ ["keedakse", 38, 4],
+ ["keeks", -1, 4],
+ ["keeksid", 40, 4],
+ ["keeksime", 40, 4],
+ ["keeksin", 40, 4],
+ ["keeksite", 40, 4],
+ ["keema", -1, 4],
+ ["keemata", 45, 4],
+ ["keeme", -1, 4],
+ ["keen", -1, 4],
+ ["kees", -1, 4],
+ ["keeta", -1, 4],
+ ["keete", -1, 4],
+ ["keevad", -1, 4],
+ ["k\u00E4ia", -1, 8],
+ ["k\u00E4iakse", 53, 8],
+ ["k\u00E4ib", -1, 8],
+ ["k\u00E4id", -1, 8],
+ ["k\u00E4idi", 56, 8],
+ ["k\u00E4iks", -1, 8],
+ ["k\u00E4iksid", 58, 8],
+ ["k\u00E4iksime", 58, 8],
+ ["k\u00E4iksin", 58, 8],
+ ["k\u00E4iksite", 58, 8],
+ ["k\u00E4ima", -1, 8],
+ ["k\u00E4imata", 63, 8],
+ ["k\u00E4ime", -1, 8],
+ ["k\u00E4in", -1, 8],
+ ["k\u00E4is", -1, 8],
+ ["k\u00E4ite", -1, 8],
+ ["k\u00E4ivad", -1, 8],
+ ["laob", -1, 16],
+ ["laod", -1, 16],
+ ["laoks", -1, 16],
+ ["laoksid", 72, 16],
+ ["laoksime", 72, 16],
+ ["laoksin", 72, 16],
+ ["laoksite", 72, 16],
+ ["laome", -1, 16],
+ ["laon", -1, 16],
+ ["laote", -1, 16],
+ ["laovad", -1, 16],
+ ["loeb", -1, 14],
+ ["loed", -1, 14],
+ ["loeks", -1, 14],
+ ["loeksid", 83, 14],
+ ["loeksime", 83, 14],
+ ["loeksin", 83, 14],
+ ["loeksite", 83, 14],
+ ["loeme", -1, 14],
+ ["loen", -1, 14],
+ ["loete", -1, 14],
+ ["loevad", -1, 14],
+ ["loob", -1, 7],
+ ["lood", -1, 7],
+ ["loodi", 93, 7],
+ ["looks", -1, 7],
+ ["looksid", 95, 7],
+ ["looksime", 95, 7],
+ ["looksin", 95, 7],
+ ["looksite", 95, 7],
+ ["looma", -1, 7],
+ ["loomata", 100, 7],
+ ["loome", -1, 7],
+ ["loon", -1, 7],
+ ["loote", -1, 7],
+ ["loovad", -1, 7],
+ ["luua", -1, 7],
+ ["luuakse", 106, 7],
+ ["l\u00F5i", -1, 6],
+ ["l\u00F5id", 108, 6],
+ ["l\u00F5ime", 108, 6],
+ ["l\u00F5in", 108, 6],
+ ["l\u00F5ite", 108, 6],
+ ["l\u00F6\u00F6b", -1, 5],
+ ["l\u00F6\u00F6d", -1, 5],
+ ["l\u00F6\u00F6dakse", 114, 5],
+ ["l\u00F6\u00F6di", 114, 5],
+ ["l\u00F6\u00F6ks", -1, 5],
+ ["l\u00F6\u00F6ksid", 117, 5],
+ ["l\u00F6\u00F6ksime", 117, 5],
+ ["l\u00F6\u00F6ksin", 117, 5],
+ ["l\u00F6\u00F6ksite", 117, 5],
+ ["l\u00F6\u00F6ma", -1, 5],
+ ["l\u00F6\u00F6mata", 122, 5],
+ ["l\u00F6\u00F6me", -1, 5],
+ ["l\u00F6\u00F6n", -1, 5],
+ ["l\u00F6\u00F6te", -1, 5],
+ ["l\u00F6\u00F6vad", -1, 5],
+ ["l\u00FC\u00FCa", -1, 5],
+ ["l\u00FC\u00FCakse", 128, 5],
+ ["m\u00FC\u00FCa", -1, 13],
+ ["m\u00FC\u00FCakse", 130, 13],
+ ["m\u00FC\u00FCb", -1, 13],
+ ["m\u00FC\u00FCd", -1, 13],
+ ["m\u00FC\u00FCdi", 133, 13],
+ ["m\u00FC\u00FCks", -1, 13],
+ ["m\u00FC\u00FCksid", 135, 13],
+ ["m\u00FC\u00FCksime", 135, 13],
+ ["m\u00FC\u00FCksin", 135, 13],
+ ["m\u00FC\u00FCksite", 135, 13],
+ ["m\u00FC\u00FCma", -1, 13],
+ ["m\u00FC\u00FCmata", 140, 13],
+ ["m\u00FC\u00FCme", -1, 13],
+ ["m\u00FC\u00FCn", -1, 13],
+ ["m\u00FC\u00FCs", -1, 13],
+ ["m\u00FC\u00FCte", -1, 13],
+ ["m\u00FC\u00FCvad", -1, 13],
+ ["n\u00E4eb", -1, 18],
+ ["n\u00E4ed", -1, 18],
+ ["n\u00E4eks", -1, 18],
+ ["n\u00E4eksid", 149, 18],
+ ["n\u00E4eksime", 149, 18],
+ ["n\u00E4eksin", 149, 18],
+ ["n\u00E4eksite", 149, 18],
+ ["n\u00E4eme", -1, 18],
+ ["n\u00E4en", -1, 18],
+ ["n\u00E4ete", -1, 18],
+ ["n\u00E4evad", -1, 18],
+ ["n\u00E4gema", -1, 18],
+ ["n\u00E4gemata", 158, 18],
+ ["n\u00E4ha", -1, 18],
+ ["n\u00E4hakse", 160, 18],
+ ["n\u00E4hti", -1, 18],
+ ["p\u00F5eb", -1, 15],
+ ["p\u00F5ed", -1, 15],
+ ["p\u00F5eks", -1, 15],
+ ["p\u00F5eksid", 165, 15],
+ ["p\u00F5eksime", 165, 15],
+ ["p\u00F5eksin", 165, 15],
+ ["p\u00F5eksite", 165, 15],
+ ["p\u00F5eme", -1, 15],
+ ["p\u00F5en", -1, 15],
+ ["p\u00F5ete", -1, 15],
+ ["p\u00F5evad", -1, 15],
+ ["saab", -1, 2],
+ ["saad", -1, 2],
+ ["saada", 175, 2],
+ ["saadakse", 176, 2],
+ ["saadi", 175, 2],
+ ["saaks", -1, 2],
+ ["saaksid", 179, 2],
+ ["saaksime", 179, 2],
+ ["saaksin", 179, 2],
+ ["saaksite", 179, 2],
+ ["saama", -1, 2],
+ ["saamata", 184, 2],
+ ["saame", -1, 2],
+ ["saan", -1, 2],
+ ["saate", -1, 2],
+ ["saavad", -1, 2],
+ ["sai", -1, 2],
+ ["said", 190, 2],
+ ["saime", 190, 2],
+ ["sain", 190, 2],
+ ["saite", 190, 2],
+ ["s\u00F5i", -1, 9],
+ ["s\u00F5id", 195, 9],
+ ["s\u00F5ime", 195, 9],
+ ["s\u00F5in", 195, 9],
+ ["s\u00F5ite", 195, 9],
+ ["s\u00F6\u00F6b", -1, 9],
+ ["s\u00F6\u00F6d", -1, 9],
+ ["s\u00F6\u00F6dakse", 201, 9],
+ ["s\u00F6\u00F6di", 201, 9],
+ ["s\u00F6\u00F6ks", -1, 9],
+ ["s\u00F6\u00F6ksid", 204, 9],
+ ["s\u00F6\u00F6ksime", 204, 9],
+ ["s\u00F6\u00F6ksin", 204, 9],
+ ["s\u00F6\u00F6ksite", 204, 9],
+ ["s\u00F6\u00F6ma", -1, 9],
+ ["s\u00F6\u00F6mata", 209, 9],
+ ["s\u00F6\u00F6me", -1, 9],
+ ["s\u00F6\u00F6n", -1, 9],
+ ["s\u00F6\u00F6te", -1, 9],
+ ["s\u00F6\u00F6vad", -1, 9],
+ ["s\u00FC\u00FCa", -1, 9],
+ ["s\u00FC\u00FCakse", 215, 9],
+ ["teeb", -1, 17],
+ ["teed", -1, 17],
+ ["teeks", -1, 17],
+ ["teeksid", 219, 17],
+ ["teeksime", 219, 17],
+ ["teeksin", 219, 17],
+ ["teeksite", 219, 17],
+ ["teeme", -1, 17],
+ ["teen", -1, 17],
+ ["teete", -1, 17],
+ ["teevad", -1, 17],
+ ["tegema", -1, 17],
+ ["tegemata", 228, 17],
+ ["teha", -1, 17],
+ ["tehakse", 230, 17],
+ ["tehti", -1, 17],
+ ["toob", -1, 10],
+ ["tood", -1, 10],
+ ["toodi", 234, 10],
+ ["tooks", -1, 10],
+ ["tooksid", 236, 10],
+ ["tooksime", 236, 10],
+ ["tooksin", 236, 10],
+ ["tooksite", 236, 10],
+ ["tooma", -1, 10],
+ ["toomata", 241, 10],
+ ["toome", -1, 10],
+ ["toon", -1, 10],
+ ["toote", -1, 10],
+ ["toovad", -1, 10],
+ ["tuua", -1, 10],
+ ["tuuakse", 247, 10],
+ ["t\u00F5i", -1, 10],
+ ["t\u00F5id", 249, 10],
+ ["t\u00F5ime", 249, 10],
+ ["t\u00F5in", 249, 10],
+ ["t\u00F5ite", 249, 10],
+ ["viia", -1, 3],
+ ["viiakse", 254, 3],
+ ["viib", -1, 3],
+ ["viid", -1, 3],
+ ["viidi", 257, 3],
+ ["viiks", -1, 3],
+ ["viiksid", 259, 3],
+ ["viiksime", 259, 3],
+ ["viiksin", 259, 3],
+ ["viiksite", 259, 3],
+ ["viima", -1, 3],
+ ["viimata", 264, 3],
+ ["viime", -1, 3],
+ ["viin", -1, 3],
+ ["viisime", -1, 3],
+ ["viisin", -1, 3],
+ ["viisite", -1, 3],
+ ["viite", -1, 3],
+ ["viivad", -1, 3],
+ ["v\u00F5ib", -1, 11],
+ ["v\u00F5id", -1, 11],
+ ["v\u00F5ida", 274, 11],
+ ["v\u00F5idakse", 275, 11],
+ ["v\u00F5idi", 274, 11],
+ ["v\u00F5iks", -1, 11],
+ ["v\u00F5iksid", 278, 11],
+ ["v\u00F5iksime", 278, 11],
+ ["v\u00F5iksin", 278, 11],
+ ["v\u00F5iksite", 278, 11],
+ ["v\u00F5ima", -1, 11],
+ ["v\u00F5imata", 283, 11],
+ ["v\u00F5ime", -1, 11],
+ ["v\u00F5in", -1, 11],
+ ["v\u00F5is", -1, 11],
+ ["v\u00F5ite", -1, 11],
+ ["v\u00F5ivad", -1, 11]
+ ];
+
+ /** @const */ var /** Array */ g_V1 = [17, 65, 16, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 8, 0, 48, 8];
+
+ /** @const */ var /** Array */ g_RV = [17, 65, 16];
+
+ /** @const */ var /** Array */ g_KI = [117, 66, 6, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 128, 0, 0, 0, 16];
+
+ /** @const */ var /** Array */ g_GI = [21, 123, 243, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 8, 0, 48, 8];
+
+ var /** number */ I_p1 = 0;
+
+
+ /** @return {boolean} */
+ function r_mark_regions() {
+ I_p1 = base.limit;
+ if (!base.go_out_grouping(g_V1, 97, 252))
+ {
+ return false;
+ }
+ base.cursor++;
+ if (!base.go_in_grouping(g_V1, 97, 252))
+ {
+ return false;
+ }
+ base.cursor++;
+ I_p1 = base.cursor;
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_emphasis() {
+ var /** number */ among_var;
+ if (base.cursor < I_p1)
+ {
+ return false;
+ }
+ /** @const */ var /** number */ v_1 = base.limit_backward;
+ base.limit_backward = I_p1;
+ base.ket = base.cursor;
+ among_var = base.find_among_b(a_0);
+ if (among_var == 0)
+ {
+ base.limit_backward = v_1;
+ return false;
+ }
+ base.bra = base.cursor;
+ base.limit_backward = v_1;
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
+ {
+ /** @const */ var /** number */ c1 = base.cursor - 4;
+ if (c1 < base.limit_backward)
+ {
+ return false;
+ }
+ base.cursor = c1;
+ }
+ base.cursor = base.limit - v_2;
+ switch (among_var) {
+ case 1:
+ /** @const */ var /** number */ v_3 = base.limit - base.cursor;
+ if (!(base.in_grouping_b(g_GI, 97, 252)))
+ {
+ return false;
+ }
+ base.cursor = base.limit - v_3;
+ {
+ /** @const */ var /** number */ v_4 = base.limit - base.cursor;
+ lab0: {
+ if (!r_LONGV())
+ {
+ break lab0;
+ }
+ return false;
+ }
+ base.cursor = base.limit - v_4;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ case 2:
+ if (!(base.in_grouping_b(g_KI, 98, 382)))
+ {
+ return false;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_verb() {
+ var /** number */ among_var;
+ if (base.cursor < I_p1)
+ {
+ return false;
+ }
+ /** @const */ var /** number */ v_1 = base.limit_backward;
+ base.limit_backward = I_p1;
+ base.ket = base.cursor;
+ among_var = base.find_among_b(a_1);
+ if (among_var == 0)
+ {
+ base.limit_backward = v_1;
+ return false;
+ }
+ base.bra = base.cursor;
+ base.limit_backward = v_1;
+ switch (among_var) {
+ case 1:
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ case 2:
+ if (!base.slice_from("a"))
+ {
+ return false;
+ }
+ break;
+ case 3:
+ if (!(base.in_grouping_b(g_V1, 97, 252)))
+ {
+ return false;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_LONGV() {
+ if (base.find_among_b(a_2) == 0)
+ {
+ return false;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_i_plural() {
+ if (base.cursor < I_p1)
+ {
+ return false;
+ }
+ /** @const */ var /** number */ v_1 = base.limit_backward;
+ base.limit_backward = I_p1;
+ base.ket = base.cursor;
+ if (base.find_among_b(a_3) == 0)
+ {
+ base.limit_backward = v_1;
+ return false;
+ }
+ base.bra = base.cursor;
+ base.limit_backward = v_1;
+ if (!(base.in_grouping_b(g_RV, 97, 117)))
+ {
+ return false;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_special_noun_endings() {
+ var /** number */ among_var;
+ if (base.cursor < I_p1)
+ {
+ return false;
+ }
+ /** @const */ var /** number */ v_1 = base.limit_backward;
+ base.limit_backward = I_p1;
+ base.ket = base.cursor;
+ among_var = base.find_among_b(a_4);
+ if (among_var == 0)
+ {
+ base.limit_backward = v_1;
+ return false;
+ }
+ base.bra = base.cursor;
+ base.limit_backward = v_1;
+ switch (among_var) {
+ case 1:
+ if (!base.slice_from("lase"))
+ {
+ return false;
+ }
+ break;
+ case 2:
+ if (!base.slice_from("mise"))
+ {
+ return false;
+ }
+ break;
+ case 3:
+ if (!base.slice_from("lise"))
+ {
+ return false;
+ }
+ break;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_case_ending() {
+ var /** number */ among_var;
+ if (base.cursor < I_p1)
+ {
+ return false;
+ }
+ /** @const */ var /** number */ v_1 = base.limit_backward;
+ base.limit_backward = I_p1;
+ base.ket = base.cursor;
+ among_var = base.find_among_b(a_5);
+ if (among_var == 0)
+ {
+ base.limit_backward = v_1;
+ return false;
+ }
+ base.bra = base.cursor;
+ base.limit_backward = v_1;
+ switch (among_var) {
+ case 1:
+ lab0: {
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
+ lab1: {
+ if (!(base.in_grouping_b(g_RV, 97, 117)))
+ {
+ break lab1;
+ }
+ break lab0;
+ }
+ base.cursor = base.limit - v_2;
+ if (!r_LONGV())
+ {
+ return false;
+ }
+ }
+ break;
+ case 2:
+ /** @const */ var /** number */ v_3 = base.limit - base.cursor;
+ {
+ /** @const */ var /** number */ c1 = base.cursor - 4;
+ if (c1 < base.limit_backward)
+ {
+ return false;
+ }
+ base.cursor = c1;
+ }
+ base.cursor = base.limit - v_3;
+ break;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_plural_three_first_cases() {
+ var /** number */ among_var;
+ if (base.cursor < I_p1)
+ {
+ return false;
+ }
+ /** @const */ var /** number */ v_1 = base.limit_backward;
+ base.limit_backward = I_p1;
+ base.ket = base.cursor;
+ among_var = base.find_among_b(a_7);
+ if (among_var == 0)
+ {
+ base.limit_backward = v_1;
+ return false;
+ }
+ base.bra = base.cursor;
+ base.limit_backward = v_1;
+ switch (among_var) {
+ case 1:
+ if (!base.slice_from("iku"))
+ {
+ return false;
+ }
+ break;
+ case 2:
+ {
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
+ lab0: {
+ if (!r_LONGV())
+ {
+ break lab0;
+ }
+ return false;
+ }
+ base.cursor = base.limit - v_2;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ case 3:
+ lab1: {
+ /** @const */ var /** number */ v_3 = base.limit - base.cursor;
+ lab2: {
+ /** @const */ var /** number */ v_4 = base.limit - base.cursor;
+ {
+ /** @const */ var /** number */ c1 = base.cursor - 4;
+ if (c1 < base.limit_backward)
+ {
+ break lab2;
+ }
+ base.cursor = c1;
+ }
+ base.cursor = base.limit - v_4;
+ among_var = base.find_among_b(a_6);
+ switch (among_var) {
+ case 1:
+ if (!base.slice_from("e"))
+ {
+ return false;
+ }
+ break;
+ case 2:
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ }
+ break lab1;
+ }
+ base.cursor = base.limit - v_3;
+ if (!base.slice_from("t"))
+ {
+ return false;
+ }
+ }
+ break;
+ case 4:
+ lab3: {
+ /** @const */ var /** number */ v_5 = base.limit - base.cursor;
+ lab4: {
+ if (!(base.in_grouping_b(g_RV, 97, 117)))
+ {
+ break lab4;
+ }
+ break lab3;
+ }
+ base.cursor = base.limit - v_5;
+ if (!r_LONGV())
+ {
+ return false;
+ }
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_nu() {
+ if (base.cursor < I_p1)
+ {
+ return false;
+ }
+ /** @const */ var /** number */ v_1 = base.limit_backward;
+ base.limit_backward = I_p1;
+ base.ket = base.cursor;
+ if (base.find_among_b(a_8) == 0)
+ {
+ base.limit_backward = v_1;
+ return false;
+ }
+ base.bra = base.cursor;
+ base.limit_backward = v_1;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_undouble_kpt() {
+ var /** number */ among_var;
+ if (!(base.in_grouping_b(g_V1, 97, 252)))
+ {
+ return false;
+ }
+ if (I_p1 > base.cursor)
+ {
+ return false;
+ }
+ base.ket = base.cursor;
+ among_var = base.find_among_b(a_9);
+ if (among_var == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ switch (among_var) {
+ case 1:
+ if (!base.slice_from("k"))
+ {
+ return false;
+ }
+ break;
+ case 2:
+ if (!base.slice_from("p"))
+ {
+ return false;
+ }
+ break;
+ case 3:
+ if (!base.slice_from("t"))
+ {
+ return false;
+ }
+ break;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_degrees() {
+ var /** number */ among_var;
+ if (base.cursor < I_p1)
+ {
+ return false;
+ }
+ /** @const */ var /** number */ v_1 = base.limit_backward;
+ base.limit_backward = I_p1;
+ base.ket = base.cursor;
+ among_var = base.find_among_b(a_10);
+ if (among_var == 0)
+ {
+ base.limit_backward = v_1;
+ return false;
+ }
+ base.bra = base.cursor;
+ base.limit_backward = v_1;
+ switch (among_var) {
+ case 1:
+ if (!(base.in_grouping_b(g_RV, 97, 117)))
+ {
+ return false;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ case 2:
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_substantive() {
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
+ r_special_noun_endings();
+ base.cursor = base.limit - v_1;
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
+ r_case_ending();
+ base.cursor = base.limit - v_2;
+ /** @const */ var /** number */ v_3 = base.limit - base.cursor;
+ r_plural_three_first_cases();
+ base.cursor = base.limit - v_3;
+ /** @const */ var /** number */ v_4 = base.limit - base.cursor;
+ r_degrees();
+ base.cursor = base.limit - v_4;
+ /** @const */ var /** number */ v_5 = base.limit - base.cursor;
+ r_i_plural();
+ base.cursor = base.limit - v_5;
+ /** @const */ var /** number */ v_6 = base.limit - base.cursor;
+ r_nu();
+ base.cursor = base.limit - v_6;
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_verb_exceptions() {
+ var /** number */ among_var;
+ base.bra = base.cursor;
+ among_var = base.find_among(a_11);
+ if (among_var == 0)
+ {
+ return false;
+ }
+ base.ket = base.cursor;
+ if (base.cursor < base.limit)
+ {
+ return false;
+ }
+ switch (among_var) {
+ case 1:
+ if (!base.slice_from("joo"))
+ {
+ return false;
+ }
+ break;
+ case 2:
+ if (!base.slice_from("saa"))
+ {
+ return false;
+ }
+ break;
+ case 3:
+ if (!base.slice_from("viima"))
+ {
+ return false;
+ }
+ break;
+ case 4:
+ if (!base.slice_from("keesi"))
+ {
+ return false;
+ }
+ break;
+ case 5:
+ if (!base.slice_from("l\u00F6\u00F6"))
+ {
+ return false;
+ }
+ break;
+ case 6:
+ if (!base.slice_from("l\u00F5i"))
+ {
+ return false;
+ }
+ break;
+ case 7:
+ if (!base.slice_from("loo"))
+ {
+ return false;
+ }
+ break;
+ case 8:
+ if (!base.slice_from("k\u00E4isi"))
+ {
+ return false;
+ }
+ break;
+ case 9:
+ if (!base.slice_from("s\u00F6\u00F6"))
+ {
+ return false;
+ }
+ break;
+ case 10:
+ if (!base.slice_from("too"))
+ {
+ return false;
+ }
+ break;
+ case 11:
+ if (!base.slice_from("v\u00F5isi"))
+ {
+ return false;
+ }
+ break;
+ case 12:
+ if (!base.slice_from("j\u00E4\u00E4ma"))
+ {
+ return false;
+ }
+ break;
+ case 13:
+ if (!base.slice_from("m\u00FC\u00FCsi"))
+ {
+ return false;
+ }
+ break;
+ case 14:
+ if (!base.slice_from("luge"))
+ {
+ return false;
+ }
+ break;
+ case 15:
+ if (!base.slice_from("p\u00F5de"))
+ {
+ return false;
+ }
+ break;
+ case 16:
+ if (!base.slice_from("ladu"))
+ {
+ return false;
+ }
+ break;
+ case 17:
+ if (!base.slice_from("tegi"))
+ {
+ return false;
+ }
+ break;
+ case 18:
+ if (!base.slice_from("n\u00E4gi"))
+ {
+ return false;
+ }
+ break;
+ }
+ return true;
+ };
+
+ this.stem = /** @return {boolean} */ function() {
+ {
+ /** @const */ var /** number */ v_1 = base.cursor;
+ lab0: {
+ if (!r_verb_exceptions())
+ {
+ break lab0;
+ }
+ return false;
+ }
+ base.cursor = v_1;
+ }
+ /** @const */ var /** number */ v_2 = base.cursor;
+ r_mark_regions();
+ base.cursor = v_2;
+ base.limit_backward = base.cursor; base.cursor = base.limit;
+ /** @const */ var /** number */ v_3 = base.limit - base.cursor;
+ r_emphasis();
+ base.cursor = base.limit - v_3;
+ /** @const */ var /** number */ v_4 = base.limit - base.cursor;
+ lab1: {
+ lab2: {
+ /** @const */ var /** number */ v_5 = base.limit - base.cursor;
+ lab3: {
+ if (!r_verb())
+ {
+ break lab3;
+ }
+ break lab2;
+ }
+ base.cursor = base.limit - v_5;
+ r_substantive();
+ }
+ }
+ base.cursor = base.limit - v_4;
+ /** @const */ var /** number */ v_6 = base.limit - base.cursor;
+ r_undouble_kpt();
+ base.cursor = base.limit - v_6;
+ base.cursor = base.limit_backward;
+ return true;
+ };
+
+ /**@return{string}*/
+ this['stemWord'] = function(/**string*/word) {
+ base.setCurrent(word);
+ this.stem();
+ return base.getCurrent();
+ };
+};
diff --git a/sphinx/search/non-minified-js/finnish-stemmer.js b/sphinx/search/non-minified-js/finnish-stemmer.js
index c907c4a70b4..07fd78516f6 100644
--- a/sphinx/search/non-minified-js/finnish-stemmer.js
+++ b/sphinx/search/non-minified-js/finnish-stemmer.js
@@ -1,8 +1,9 @@
-// Generated by Snowball 2.1.0 - https://snowballstem.org/
+// Generated from finnish.sbl by Snowball 3.0.1 - https://snowballstem.org/
/**@constructor*/
-FinnishStemmer = function() {
+var FinnishStemmer = function() {
var base = new BaseStemmer();
+
/** @const */ var a_0 = [
["pa", -1, 1],
["sti", -1, 2],
@@ -141,84 +142,34 @@ FinnishStemmer = function() {
function r_mark_regions() {
I_p1 = base.limit;
I_p2 = base.limit;
- golab0: while(true)
+ if (!base.go_out_grouping(g_V1, 97, 246))
{
- var /** number */ v_1 = base.cursor;
- lab1: {
- if (!(base.in_grouping(g_V1, 97, 246)))
- {
- break lab1;
- }
- base.cursor = v_1;
- break golab0;
- }
- base.cursor = v_1;
- if (base.cursor >= base.limit)
- {
- return false;
- }
- base.cursor++;
+ return false;
}
- golab2: while(true)
+ base.cursor++;
+ if (!base.go_in_grouping(g_V1, 97, 246))
{
- lab3: {
- if (!(base.out_grouping(g_V1, 97, 246)))
- {
- break lab3;
- }
- break golab2;
- }
- if (base.cursor >= base.limit)
- {
- return false;
- }
- base.cursor++;
+ return false;
}
+ base.cursor++;
I_p1 = base.cursor;
- golab4: while(true)
+ if (!base.go_out_grouping(g_V1, 97, 246))
{
- var /** number */ v_3 = base.cursor;
- lab5: {
- if (!(base.in_grouping(g_V1, 97, 246)))
- {
- break lab5;
- }
- base.cursor = v_3;
- break golab4;
- }
- base.cursor = v_3;
- if (base.cursor >= base.limit)
- {
- return false;
- }
- base.cursor++;
+ return false;
}
- golab6: while(true)
+ base.cursor++;
+ if (!base.go_in_grouping(g_V1, 97, 246))
{
- lab7: {
- if (!(base.out_grouping(g_V1, 97, 246)))
- {
- break lab7;
- }
- break golab6;
- }
- if (base.cursor >= base.limit)
- {
- return false;
- }
- base.cursor++;
+ return false;
}
+ base.cursor++;
I_p2 = base.cursor;
return true;
};
/** @return {boolean} */
function r_R2() {
- if (!(I_p2 <= base.cursor))
- {
- return false;
- }
- return true;
+ return I_p2 <= base.cursor;
};
/** @return {boolean} */
@@ -228,17 +179,17 @@ FinnishStemmer = function() {
{
return false;
}
- var /** number */ v_2 = base.limit_backward;
+ /** @const */ var /** number */ v_1 = base.limit_backward;
base.limit_backward = I_p1;
base.ket = base.cursor;
among_var = base.find_among_b(a_0);
if (among_var == 0)
{
- base.limit_backward = v_2;
+ base.limit_backward = v_1;
return false;
}
base.bra = base.cursor;
- base.limit_backward = v_2;
+ base.limit_backward = v_1;
switch (among_var) {
case 1:
if (!(base.in_grouping_b(g_particle_end, 97, 246)))
@@ -267,21 +218,21 @@ FinnishStemmer = function() {
{
return false;
}
- var /** number */ v_2 = base.limit_backward;
+ /** @const */ var /** number */ v_1 = base.limit_backward;
base.limit_backward = I_p1;
base.ket = base.cursor;
among_var = base.find_among_b(a_4);
if (among_var == 0)
{
- base.limit_backward = v_2;
+ base.limit_backward = v_1;
return false;
}
base.bra = base.cursor;
- base.limit_backward = v_2;
+ base.limit_backward = v_1;
switch (among_var) {
case 1:
{
- var /** number */ v_3 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
lab0: {
if (!(base.eq_s_b("k")))
{
@@ -289,7 +240,7 @@ FinnishStemmer = function() {
}
return false;
}
- base.cursor = base.limit - v_3;
+ base.cursor = base.limit - v_2;
}
if (!base.slice_del())
{
@@ -381,17 +332,17 @@ FinnishStemmer = function() {
{
return false;
}
- var /** number */ v_2 = base.limit_backward;
+ /** @const */ var /** number */ v_1 = base.limit_backward;
base.limit_backward = I_p1;
base.ket = base.cursor;
among_var = base.find_among_b(a_6);
if (among_var == 0)
{
- base.limit_backward = v_2;
+ base.limit_backward = v_1;
return false;
}
base.bra = base.cursor;
- base.limit_backward = v_2;
+ base.limit_backward = v_1;
switch (among_var) {
case 1:
if (!(base.eq_s_b("a")))
@@ -430,11 +381,11 @@ FinnishStemmer = function() {
}
break;
case 7:
- var /** number */ v_3 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
lab0: {
- var /** number */ v_4 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_3 = base.limit - base.cursor;
lab1: {
- var /** number */ v_5 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_4 = base.limit - base.cursor;
lab2: {
if (!r_LONG())
{
@@ -442,17 +393,17 @@ FinnishStemmer = function() {
}
break lab1;
}
- base.cursor = base.limit - v_5;
+ base.cursor = base.limit - v_4;
if (!(base.eq_s_b("ie")))
{
- base.cursor = base.limit - v_3;
+ base.cursor = base.limit - v_2;
break lab0;
}
}
- base.cursor = base.limit - v_4;
+ base.cursor = base.limit - v_3;
if (base.cursor <= base.limit_backward)
{
- base.cursor = base.limit - v_3;
+ base.cursor = base.limit - v_2;
break lab0;
}
base.cursor--;
@@ -485,21 +436,21 @@ FinnishStemmer = function() {
{
return false;
}
- var /** number */ v_2 = base.limit_backward;
+ /** @const */ var /** number */ v_1 = base.limit_backward;
base.limit_backward = I_p2;
base.ket = base.cursor;
among_var = base.find_among_b(a_7);
if (among_var == 0)
{
- base.limit_backward = v_2;
+ base.limit_backward = v_1;
return false;
}
base.bra = base.cursor;
- base.limit_backward = v_2;
+ base.limit_backward = v_1;
switch (among_var) {
case 1:
{
- var /** number */ v_3 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
lab0: {
if (!(base.eq_s_b("po")))
{
@@ -507,7 +458,7 @@ FinnishStemmer = function() {
}
return false;
}
- base.cursor = base.limit - v_3;
+ base.cursor = base.limit - v_2;
}
break;
}
@@ -524,16 +475,16 @@ FinnishStemmer = function() {
{
return false;
}
- var /** number */ v_2 = base.limit_backward;
+ /** @const */ var /** number */ v_1 = base.limit_backward;
base.limit_backward = I_p1;
base.ket = base.cursor;
if (base.find_among_b(a_8) == 0)
{
- base.limit_backward = v_2;
+ base.limit_backward = v_1;
return false;
}
base.bra = base.cursor;
- base.limit_backward = v_2;
+ base.limit_backward = v_1;
if (!base.slice_del())
{
return false;
@@ -548,46 +499,46 @@ FinnishStemmer = function() {
{
return false;
}
- var /** number */ v_2 = base.limit_backward;
+ /** @const */ var /** number */ v_1 = base.limit_backward;
base.limit_backward = I_p1;
base.ket = base.cursor;
if (!(base.eq_s_b("t")))
{
- base.limit_backward = v_2;
+ base.limit_backward = v_1;
return false;
}
base.bra = base.cursor;
- var /** number */ v_3 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
if (!(base.in_grouping_b(g_V1, 97, 246)))
{
- base.limit_backward = v_2;
+ base.limit_backward = v_1;
return false;
}
- base.cursor = base.limit - v_3;
+ base.cursor = base.limit - v_2;
if (!base.slice_del())
{
return false;
}
- base.limit_backward = v_2;
+ base.limit_backward = v_1;
if (base.cursor < I_p2)
{
return false;
}
- var /** number */ v_5 = base.limit_backward;
+ /** @const */ var /** number */ v_3 = base.limit_backward;
base.limit_backward = I_p2;
base.ket = base.cursor;
among_var = base.find_among_b(a_9);
if (among_var == 0)
{
- base.limit_backward = v_5;
+ base.limit_backward = v_3;
return false;
}
base.bra = base.cursor;
- base.limit_backward = v_5;
+ base.limit_backward = v_3;
switch (among_var) {
case 1:
{
- var /** number */ v_6 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_4 = base.limit - base.cursor;
lab0: {
if (!(base.eq_s_b("po")))
{
@@ -595,7 +546,7 @@ FinnishStemmer = function() {
}
return false;
}
- base.cursor = base.limit - v_6;
+ base.cursor = base.limit - v_4;
}
break;
}
@@ -612,16 +563,16 @@ FinnishStemmer = function() {
{
return false;
}
- var /** number */ v_2 = base.limit_backward;
+ /** @const */ var /** number */ v_1 = base.limit_backward;
base.limit_backward = I_p1;
- var /** number */ v_3 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
lab0: {
- var /** number */ v_4 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_3 = base.limit - base.cursor;
if (!r_LONG())
{
break lab0;
}
- base.cursor = base.limit - v_4;
+ base.cursor = base.limit - v_3;
base.ket = base.cursor;
if (base.cursor <= base.limit_backward)
{
@@ -634,8 +585,8 @@ FinnishStemmer = function() {
return false;
}
}
- base.cursor = base.limit - v_3;
- var /** number */ v_5 = base.limit - base.cursor;
+ base.cursor = base.limit - v_2;
+ /** @const */ var /** number */ v_4 = base.limit - base.cursor;
lab1: {
base.ket = base.cursor;
if (!(base.in_grouping_b(g_AEI, 97, 228)))
@@ -652,8 +603,8 @@ FinnishStemmer = function() {
return false;
}
}
- base.cursor = base.limit - v_5;
- var /** number */ v_6 = base.limit - base.cursor;
+ base.cursor = base.limit - v_4;
+ /** @const */ var /** number */ v_5 = base.limit - base.cursor;
lab2: {
base.ket = base.cursor;
if (!(base.eq_s_b("j")))
@@ -662,7 +613,7 @@ FinnishStemmer = function() {
}
base.bra = base.cursor;
lab3: {
- var /** number */ v_7 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_6 = base.limit - base.cursor;
lab4: {
if (!(base.eq_s_b("o")))
{
@@ -670,7 +621,7 @@ FinnishStemmer = function() {
}
break lab3;
}
- base.cursor = base.limit - v_7;
+ base.cursor = base.limit - v_6;
if (!(base.eq_s_b("u")))
{
break lab2;
@@ -681,8 +632,8 @@ FinnishStemmer = function() {
return false;
}
}
- base.cursor = base.limit - v_6;
- var /** number */ v_8 = base.limit - base.cursor;
+ base.cursor = base.limit - v_5;
+ /** @const */ var /** number */ v_7 = base.limit - base.cursor;
lab5: {
base.ket = base.cursor;
if (!(base.eq_s_b("o")))
@@ -699,25 +650,11 @@ FinnishStemmer = function() {
return false;
}
}
- base.cursor = base.limit - v_8;
- base.limit_backward = v_2;
- golab6: while(true)
+ base.cursor = base.limit - v_7;
+ base.limit_backward = v_1;
+ if (!base.go_in_grouping_b(g_V1, 97, 246))
{
- var /** number */ v_9 = base.limit - base.cursor;
- lab7: {
- if (!(base.out_grouping_b(g_V1, 97, 246)))
- {
- break lab7;
- }
- base.cursor = base.limit - v_9;
- break golab6;
- }
- base.cursor = base.limit - v_9;
- if (base.cursor <= base.limit_backward)
- {
- return false;
- }
- base.cursor--;
+ return false;
}
base.ket = base.cursor;
if (!(base.in_grouping_b(g_C, 98, 122)))
@@ -742,21 +679,21 @@ FinnishStemmer = function() {
};
this.stem = /** @return {boolean} */ function() {
- var /** number */ v_1 = base.cursor;
+ /** @const */ var /** number */ v_1 = base.cursor;
r_mark_regions();
base.cursor = v_1;
B_ending_removed = false;
base.limit_backward = base.cursor; base.cursor = base.limit;
- var /** number */ v_2 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
r_particle_etc();
base.cursor = base.limit - v_2;
- var /** number */ v_3 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_3 = base.limit - base.cursor;
r_possessive();
base.cursor = base.limit - v_3;
- var /** number */ v_4 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_4 = base.limit - base.cursor;
r_case_ending();
base.cursor = base.limit - v_4;
- var /** number */ v_5 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_5 = base.limit - base.cursor;
r_other_endings();
base.cursor = base.limit - v_5;
lab0: {
@@ -765,18 +702,18 @@ FinnishStemmer = function() {
{
break lab1;
}
- var /** number */ v_7 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_6 = base.limit - base.cursor;
r_i_plural();
- base.cursor = base.limit - v_7;
+ base.cursor = base.limit - v_6;
break lab0;
}
- var /** number */ v_8 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_7 = base.limit - base.cursor;
r_t_plural();
- base.cursor = base.limit - v_8;
+ base.cursor = base.limit - v_7;
}
- var /** number */ v_9 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_8 = base.limit - base.cursor;
r_tidy();
- base.cursor = base.limit - v_9;
+ base.cursor = base.limit - v_8;
base.cursor = base.limit_backward;
return true;
};
diff --git a/sphinx/search/non-minified-js/french-stemmer.js b/sphinx/search/non-minified-js/french-stemmer.js
index c9708312a8f..0e7b0655494 100644
--- a/sphinx/search/non-minified-js/french-stemmer.js
+++ b/sphinx/search/non-minified-js/french-stemmer.js
@@ -1,10 +1,12 @@
-// Generated by Snowball 2.1.0 - https://snowballstem.org/
+// Generated from french.sbl by Snowball 3.0.1 - https://snowballstem.org/
/**@constructor*/
-FrenchStemmer = function() {
+var FrenchStemmer = function() {
var base = new BaseStemmer();
+
/** @const */ var a_0 = [
["col", -1, -1],
+ ["ni", -1, 1],
["par", -1, -1],
["tap", -1, -1]
];
@@ -42,7 +44,7 @@ FrenchStemmer = function() {
["logie", -1, 3],
["able", -1, 1],
["isme", -1, 1],
- ["euse", -1, 11],
+ ["euse", -1, 12],
["iste", -1, 1],
["ive", -1, 8],
["if", -1, 8],
@@ -57,7 +59,7 @@ FrenchStemmer = function() {
["logies", -1, 3],
["ables", -1, 1],
["ismes", -1, 1],
- ["euses", -1, 11],
+ ["euses", -1, 12],
["istes", -1, 1],
["ives", -1, 8],
["ifs", -1, 8],
@@ -65,18 +67,19 @@ FrenchStemmer = function() {
["ations", -1, 2],
["utions", -1, 4],
["ateurs", -1, 2],
- ["ments", -1, 15],
+ ["ments", -1, 16],
["ements", 30, 6],
- ["issements", 31, 12],
+ ["issements", 31, 13],
["it\u00E9s", -1, 7],
- ["ment", -1, 15],
+ ["ment", -1, 16],
["ement", 34, 6],
- ["issement", 35, 12],
- ["amment", 34, 13],
- ["emment", 34, 14],
+ ["issement", 35, 13],
+ ["amment", 34, 14],
+ ["emment", 34, 15],
["aux", -1, 10],
["eaux", 39, 9],
["eux", -1, 1],
+ ["oux", -1, 11],
["it\u00E9", -1, 7]
];
@@ -119,47 +122,56 @@ FrenchStemmer = function() {
];
/** @const */ var a_6 = [
+ ["al", -1, 1],
+ ["\u00E9pl", -1, -1],
+ ["auv", -1, -1]
+ ];
+
+ /** @const */ var a_7 = [
["a", -1, 3],
["era", 0, 2],
+ ["aise", -1, 4],
["asse", -1, 3],
["ante", -1, 3],
["\u00E9e", -1, 2],
["ai", -1, 3],
- ["erai", 5, 2],
+ ["erai", 6, 2],
["er", -1, 2],
["as", -1, 3],
- ["eras", 8, 2],
+ ["eras", 9, 2],
["\u00E2mes", -1, 3],
+ ["aises", -1, 4],
["asses", -1, 3],
["antes", -1, 3],
["\u00E2tes", -1, 3],
["\u00E9es", -1, 2],
- ["ais", -1, 3],
- ["erais", 15, 2],
+ ["ais", -1, 4],
+ ["eais", 17, 2],
+ ["erais", 17, 2],
["ions", -1, 1],
- ["erions", 17, 2],
- ["assions", 17, 3],
+ ["erions", 20, 2],
+ ["assions", 20, 3],
["erons", -1, 2],
["ants", -1, 3],
["\u00E9s", -1, 2],
["ait", -1, 3],
- ["erait", 23, 2],
+ ["erait", 26, 2],
["ant", -1, 3],
["aIent", -1, 3],
- ["eraIent", 26, 2],
+ ["eraIent", 29, 2],
["\u00E8rent", -1, 2],
["assent", -1, 3],
["eront", -1, 2],
["\u00E2t", -1, 3],
["ez", -1, 2],
- ["iez", 32, 2],
- ["eriez", 33, 2],
- ["assiez", 33, 3],
- ["erez", 32, 2],
+ ["iez", 35, 2],
+ ["eriez", 36, 2],
+ ["assiez", 36, 3],
+ ["erez", 35, 2],
["\u00E9", -1, 2]
];
- /** @const */ var a_7 = [
+ /** @const */ var a_8 = [
["e", -1, 3],
["I\u00E8re", 0, 2],
["i\u00E8re", 0, 2],
@@ -168,7 +180,7 @@ FrenchStemmer = function() {
["ier", -1, 2]
];
- /** @const */ var a_8 = [
+ /** @const */ var a_9 = [
["ell", -1, -1],
["eill", -1, -1],
["enn", -1, -1],
@@ -178,6 +190,10 @@ FrenchStemmer = function() {
/** @const */ var /** Array */ g_v = [17, 65, 16, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 128, 130, 103, 8, 5];
+ /** @const */ var /** Array */ g_oux_ending = [65, 85];
+
+ /** @const */ var /** Array */ g_elision_char = [131, 14, 3];
+
/** @const */ var /** Array */ g_keep_with_s = [1, 65, 20, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 128];
var /** number */ I_p2 = 0;
@@ -185,18 +201,55 @@ FrenchStemmer = function() {
var /** number */ I_pV = 0;
+ /** @return {boolean} */
+ function r_elisions() {
+ base.bra = base.cursor;
+ lab0: {
+ /** @const */ var /** number */ v_1 = base.cursor;
+ lab1: {
+ if (!(base.in_grouping(g_elision_char, 99, 116)))
+ {
+ break lab1;
+ }
+ break lab0;
+ }
+ base.cursor = v_1;
+ if (!(base.eq_s("qu")))
+ {
+ return false;
+ }
+ }
+ if (!(base.eq_s("'")))
+ {
+ return false;
+ }
+ base.ket = base.cursor;
+ lab2: {
+ if (base.cursor < base.limit)
+ {
+ break lab2;
+ }
+ return false;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ return true;
+ };
+
/** @return {boolean} */
function r_prelude() {
while(true)
{
- var /** number */ v_1 = base.cursor;
+ /** @const */ var /** number */ v_1 = base.cursor;
lab0: {
golab1: while(true)
{
- var /** number */ v_2 = base.cursor;
+ /** @const */ var /** number */ v_2 = base.cursor;
lab2: {
lab3: {
- var /** number */ v_3 = base.cursor;
+ /** @const */ var /** number */ v_3 = base.cursor;
lab4: {
if (!(base.in_grouping(g_v, 97, 251)))
{
@@ -204,7 +257,7 @@ FrenchStemmer = function() {
}
base.bra = base.cursor;
lab5: {
- var /** number */ v_4 = base.cursor;
+ /** @const */ var /** number */ v_4 = base.cursor;
lab6: {
if (!(base.eq_s("u")))
{
@@ -333,13 +386,14 @@ FrenchStemmer = function() {
/** @return {boolean} */
function r_mark_regions() {
+ var /** number */ among_var;
I_pV = base.limit;
I_p1 = base.limit;
I_p2 = base.limit;
- var /** number */ v_1 = base.cursor;
+ /** @const */ var /** number */ v_1 = base.cursor;
lab0: {
lab1: {
- var /** number */ v_2 = base.cursor;
+ /** @const */ var /** number */ v_2 = base.cursor;
lab2: {
if (!(base.in_grouping(g_v, 97, 251)))
{
@@ -358,10 +412,19 @@ FrenchStemmer = function() {
}
base.cursor = v_2;
lab3: {
- if (base.find_among(a_0) == 0)
+ among_var = base.find_among(a_0);
+ if (among_var == 0)
{
break lab3;
}
+ switch (among_var) {
+ case 1:
+ if (!(base.in_grouping(g_v, 97, 251)))
+ {
+ break lab3;
+ }
+ break;
+ }
break lab1;
}
base.cursor = v_2;
@@ -370,91 +433,41 @@ FrenchStemmer = function() {
break lab0;
}
base.cursor++;
- golab4: while(true)
+ if (!base.go_out_grouping(g_v, 97, 251))
{
- lab5: {
- if (!(base.in_grouping(g_v, 97, 251)))
- {
- break lab5;
- }
- break golab4;
- }
- if (base.cursor >= base.limit)
- {
- break lab0;
- }
- base.cursor++;
+ break lab0;
}
+ base.cursor++;
}
I_pV = base.cursor;
}
base.cursor = v_1;
- var /** number */ v_4 = base.cursor;
- lab6: {
- golab7: while(true)
+ /** @const */ var /** number */ v_3 = base.cursor;
+ lab4: {
+ if (!base.go_out_grouping(g_v, 97, 251))
{
- lab8: {
- if (!(base.in_grouping(g_v, 97, 251)))
- {
- break lab8;
- }
- break golab7;
- }
- if (base.cursor >= base.limit)
- {
- break lab6;
- }
- base.cursor++;
+ break lab4;
}
- golab9: while(true)
+ base.cursor++;
+ if (!base.go_in_grouping(g_v, 97, 251))
{
- lab10: {
- if (!(base.out_grouping(g_v, 97, 251)))
- {
- break lab10;
- }
- break golab9;
- }
- if (base.cursor >= base.limit)
- {
- break lab6;
- }
- base.cursor++;
+ break lab4;
}
+ base.cursor++;
I_p1 = base.cursor;
- golab11: while(true)
+ if (!base.go_out_grouping(g_v, 97, 251))
{
- lab12: {
- if (!(base.in_grouping(g_v, 97, 251)))
- {
- break lab12;
- }
- break golab11;
- }
- if (base.cursor >= base.limit)
- {
- break lab6;
- }
- base.cursor++;
+ break lab4;
}
- golab13: while(true)
+ base.cursor++;
+ if (!base.go_in_grouping(g_v, 97, 251))
{
- lab14: {
- if (!(base.out_grouping(g_v, 97, 251)))
- {
- break lab14;
- }
- break golab13;
- }
- if (base.cursor >= base.limit)
- {
- break lab6;
- }
- base.cursor++;
+ break lab4;
}
+ base.cursor++;
I_p2 = base.cursor;
}
- base.cursor = v_4;
+ base.cursor = v_3;
return true;
};
@@ -463,14 +476,10 @@ FrenchStemmer = function() {
var /** number */ among_var;
while(true)
{
- var /** number */ v_1 = base.cursor;
+ /** @const */ var /** number */ v_1 = base.cursor;
lab0: {
base.bra = base.cursor;
among_var = base.find_among(a_1);
- if (among_var == 0)
- {
- break lab0;
- }
base.ket = base.cursor;
switch (among_var) {
case 1:
@@ -527,29 +536,17 @@ FrenchStemmer = function() {
/** @return {boolean} */
function r_RV() {
- if (!(I_pV <= base.cursor))
- {
- return false;
- }
- return true;
+ return I_pV <= base.cursor;
};
/** @return {boolean} */
function r_R1() {
- if (!(I_p1 <= base.cursor))
- {
- return false;
- }
- return true;
+ return I_p1 <= base.cursor;
};
/** @return {boolean} */
function r_R2() {
- if (!(I_p2 <= base.cursor))
- {
- return false;
- }
- return true;
+ return I_p2 <= base.cursor;
};
/** @return {boolean} */
@@ -582,7 +579,7 @@ FrenchStemmer = function() {
{
return false;
}
- var /** number */ v_1 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
lab0: {
base.ket = base.cursor;
if (!(base.eq_s_b("ic")))
@@ -592,7 +589,7 @@ FrenchStemmer = function() {
}
base.bra = base.cursor;
lab1: {
- var /** number */ v_2 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
lab2: {
if (!r_R2())
{
@@ -651,7 +648,7 @@ FrenchStemmer = function() {
{
return false;
}
- var /** number */ v_3 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_3 = base.limit - base.cursor;
lab3: {
base.ket = base.cursor;
among_var = base.find_among_b(a_2);
@@ -691,7 +688,7 @@ FrenchStemmer = function() {
break;
case 2:
lab4: {
- var /** number */ v_4 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_4 = base.limit - base.cursor;
lab5: {
if (!r_R2())
{
@@ -749,7 +746,7 @@ FrenchStemmer = function() {
{
return false;
}
- var /** number */ v_5 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_5 = base.limit - base.cursor;
lab6: {
base.ket = base.cursor;
among_var = base.find_among_b(a_3);
@@ -762,7 +759,7 @@ FrenchStemmer = function() {
switch (among_var) {
case 1:
lab7: {
- var /** number */ v_6 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_6 = base.limit - base.cursor;
lab8: {
if (!r_R2())
{
@@ -783,7 +780,7 @@ FrenchStemmer = function() {
break;
case 2:
lab9: {
- var /** number */ v_7 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_7 = base.limit - base.cursor;
lab10: {
if (!r_R2())
{
@@ -825,7 +822,7 @@ FrenchStemmer = function() {
{
return false;
}
- var /** number */ v_8 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_8 = base.limit - base.cursor;
lab11: {
base.ket = base.cursor;
if (!(base.eq_s_b("at")))
@@ -851,7 +848,7 @@ FrenchStemmer = function() {
}
base.bra = base.cursor;
lab12: {
- var /** number */ v_9 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_9 = base.limit - base.cursor;
lab13: {
if (!r_R2())
{
@@ -888,8 +885,18 @@ FrenchStemmer = function() {
}
break;
case 11:
+ if (!(base.in_grouping_b(g_oux_ending, 98, 112)))
+ {
+ return false;
+ }
+ if (!base.slice_from("ou"))
+ {
+ return false;
+ }
+ break;
+ case 12:
lab14: {
- var /** number */ v_10 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_10 = base.limit - base.cursor;
lab15: {
if (!r_R2())
{
@@ -912,7 +919,7 @@ FrenchStemmer = function() {
}
}
break;
- case 12:
+ case 13:
if (!r_R1())
{
return false;
@@ -926,7 +933,7 @@ FrenchStemmer = function() {
return false;
}
break;
- case 13:
+ case 14:
if (!r_RV())
{
return false;
@@ -936,7 +943,7 @@ FrenchStemmer = function() {
return false;
}
return false;
- case 14:
+ case 15:
if (!r_RV())
{
return false;
@@ -946,8 +953,8 @@ FrenchStemmer = function() {
return false;
}
return false;
- case 15:
- var /** number */ v_11 = base.limit - base.cursor;
+ case 16:
+ /** @const */ var /** number */ v_11 = base.limit - base.cursor;
if (!(base.in_grouping_b(g_v, 97, 251)))
{
return false;
@@ -972,37 +979,37 @@ FrenchStemmer = function() {
{
return false;
}
- var /** number */ v_2 = base.limit_backward;
+ /** @const */ var /** number */ v_1 = base.limit_backward;
base.limit_backward = I_pV;
base.ket = base.cursor;
if (base.find_among_b(a_5) == 0)
{
- base.limit_backward = v_2;
+ base.limit_backward = v_1;
return false;
}
base.bra = base.cursor;
{
- var /** number */ v_3 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
lab0: {
if (!(base.eq_s_b("H")))
{
break lab0;
}
- base.limit_backward = v_2;
+ base.limit_backward = v_1;
return false;
}
- base.cursor = base.limit - v_3;
+ base.cursor = base.limit - v_2;
}
if (!(base.out_grouping_b(g_v, 97, 251)))
{
- base.limit_backward = v_2;
+ base.limit_backward = v_1;
return false;
}
if (!base.slice_del())
{
return false;
}
- base.limit_backward = v_2;
+ base.limit_backward = v_1;
return true;
};
@@ -1013,21 +1020,21 @@ FrenchStemmer = function() {
{
return false;
}
- var /** number */ v_2 = base.limit_backward;
+ /** @const */ var /** number */ v_1 = base.limit_backward;
base.limit_backward = I_pV;
base.ket = base.cursor;
- among_var = base.find_among_b(a_6);
+ among_var = base.find_among_b(a_7);
if (among_var == 0)
{
- base.limit_backward = v_2;
+ base.limit_backward = v_1;
return false;
}
base.bra = base.cursor;
+ base.limit_backward = v_1;
switch (among_var) {
case 1:
if (!r_R2())
{
- base.limit_backward = v_2;
return false;
}
if (!base.slice_del())
@@ -1042,34 +1049,64 @@ FrenchStemmer = function() {
}
break;
case 3:
- if (!base.slice_del())
- {
- return false;
- }
- var /** number */ v_3 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
lab0: {
- base.ket = base.cursor;
if (!(base.eq_s_b("e")))
{
- base.cursor = base.limit - v_3;
+ base.cursor = base.limit - v_2;
break lab0;
}
- base.bra = base.cursor;
- if (!base.slice_del())
+ if (!r_RV())
{
+ base.cursor = base.limit - v_2;
+ break lab0;
+ }
+ base.bra = base.cursor;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ case 4:
+ {
+ /** @const */ var /** number */ v_3 = base.limit - base.cursor;
+ lab1: {
+ among_var = base.find_among_b(a_6);
+ if (among_var == 0)
+ {
+ break lab1;
+ }
+ switch (among_var) {
+ case 1:
+ if (base.cursor <= base.limit_backward)
+ {
+ break lab1;
+ }
+ base.cursor--;
+ if (base.cursor > base.limit_backward)
+ {
+ break lab1;
+ }
+ break;
+ }
return false;
}
+ base.cursor = base.limit - v_3;
+ }
+ if (!base.slice_del())
+ {
+ return false;
}
break;
}
- base.limit_backward = v_2;
return true;
};
/** @return {boolean} */
function r_residual_suffix() {
var /** number */ among_var;
- var /** number */ v_1 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
lab0: {
base.ket = base.cursor;
if (!(base.eq_s_b("s")))
@@ -1078,9 +1115,9 @@ FrenchStemmer = function() {
break lab0;
}
base.bra = base.cursor;
- var /** number */ v_2 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
lab1: {
- var /** number */ v_3 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_3 = base.limit - base.cursor;
lab2: {
if (!(base.eq_s_b("Hi")))
{
@@ -1105,13 +1142,13 @@ FrenchStemmer = function() {
{
return false;
}
- var /** number */ v_5 = base.limit_backward;
+ /** @const */ var /** number */ v_4 = base.limit_backward;
base.limit_backward = I_pV;
base.ket = base.cursor;
- among_var = base.find_among_b(a_7);
+ among_var = base.find_among_b(a_8);
if (among_var == 0)
{
- base.limit_backward = v_5;
+ base.limit_backward = v_4;
return false;
}
base.bra = base.cursor;
@@ -1119,11 +1156,11 @@ FrenchStemmer = function() {
case 1:
if (!r_R2())
{
- base.limit_backward = v_5;
+ base.limit_backward = v_4;
return false;
}
lab3: {
- var /** number */ v_6 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_5 = base.limit - base.cursor;
lab4: {
if (!(base.eq_s_b("s")))
{
@@ -1131,10 +1168,10 @@ FrenchStemmer = function() {
}
break lab3;
}
- base.cursor = base.limit - v_6;
+ base.cursor = base.limit - v_5;
if (!(base.eq_s_b("t")))
{
- base.limit_backward = v_5;
+ base.limit_backward = v_4;
return false;
}
}
@@ -1156,14 +1193,14 @@ FrenchStemmer = function() {
}
break;
}
- base.limit_backward = v_5;
+ base.limit_backward = v_4;
return true;
};
/** @return {boolean} */
function r_un_double() {
- var /** number */ v_1 = base.limit - base.cursor;
- if (base.find_among_b(a_8) == 0)
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
+ if (base.find_among_b(a_9) == 0)
{
return false;
}
@@ -1205,7 +1242,7 @@ FrenchStemmer = function() {
}
base.ket = base.cursor;
lab1: {
- var /** number */ v_3 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
lab2: {
if (!(base.eq_s_b("\u00E9")))
{
@@ -1213,7 +1250,7 @@ FrenchStemmer = function() {
}
break lab1;
}
- base.cursor = base.limit - v_3;
+ base.cursor = base.limit - v_2;
if (!(base.eq_s_b("\u00E8")))
{
return false;
@@ -1228,19 +1265,22 @@ FrenchStemmer = function() {
};
this.stem = /** @return {boolean} */ function() {
- var /** number */ v_1 = base.cursor;
- r_prelude();
+ /** @const */ var /** number */ v_1 = base.cursor;
+ r_elisions();
base.cursor = v_1;
+ /** @const */ var /** number */ v_2 = base.cursor;
+ r_prelude();
+ base.cursor = v_2;
r_mark_regions();
base.limit_backward = base.cursor; base.cursor = base.limit;
- var /** number */ v_3 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_3 = base.limit - base.cursor;
lab0: {
lab1: {
- var /** number */ v_4 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_4 = base.limit - base.cursor;
lab2: {
- var /** number */ v_5 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_5 = base.limit - base.cursor;
lab3: {
- var /** number */ v_6 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_6 = base.limit - base.cursor;
lab4: {
if (!r_standard_suffix())
{
@@ -1263,11 +1303,11 @@ FrenchStemmer = function() {
}
}
base.cursor = base.limit - v_5;
- var /** number */ v_7 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_7 = base.limit - base.cursor;
lab6: {
base.ket = base.cursor;
lab7: {
- var /** number */ v_8 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_8 = base.limit - base.cursor;
lab8: {
if (!(base.eq_s_b("Y")))
{
@@ -1303,14 +1343,14 @@ FrenchStemmer = function() {
}
}
base.cursor = base.limit - v_3;
- var /** number */ v_9 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_9 = base.limit - base.cursor;
r_un_double();
base.cursor = base.limit - v_9;
- var /** number */ v_10 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_10 = base.limit - base.cursor;
r_un_accent();
base.cursor = base.limit - v_10;
base.cursor = base.limit_backward;
- var /** number */ v_11 = base.cursor;
+ /** @const */ var /** number */ v_11 = base.cursor;
r_postlude();
base.cursor = v_11;
return true;
diff --git a/sphinx/search/non-minified-js/german-stemmer.js b/sphinx/search/non-minified-js/german-stemmer.js
index f5ff81bc9d7..007a8668575 100644
--- a/sphinx/search/non-minified-js/german-stemmer.js
+++ b/sphinx/search/non-minified-js/german-stemmer.js
@@ -1,9 +1,19 @@
-// Generated by Snowball 2.1.0 - https://snowballstem.org/
+// Generated from german.sbl by Snowball 3.0.1 - https://snowballstem.org/
/**@constructor*/
-GermanStemmer = function() {
+var GermanStemmer = function() {
var base = new BaseStemmer();
+
/** @const */ var a_0 = [
+ ["", -1, 5],
+ ["ae", 0, 2],
+ ["oe", 0, 3],
+ ["qu", 0, -1],
+ ["ue", 0, 4],
+ ["\u00DF", 0, 1]
+ ];
+
+ /** @const */ var a_1 = [
["", -1, 5],
["U", 0, 2],
["Y", 0, 1],
@@ -12,29 +22,42 @@ GermanStemmer = function() {
["\u00FC", 0, 2]
];
- /** @const */ var a_1 = [
- ["e", -1, 2],
+ /** @const */ var a_2 = [
+ ["e", -1, 3],
["em", -1, 1],
- ["en", -1, 2],
- ["ern", -1, 1],
- ["er", -1, 1],
- ["s", -1, 3],
- ["es", 5, 2]
+ ["en", -1, 3],
+ ["erinnen", 2, 2],
+ ["erin", -1, 2],
+ ["ln", -1, 5],
+ ["ern", -1, 2],
+ ["er", -1, 2],
+ ["s", -1, 4],
+ ["es", 8, 3],
+ ["lns", 8, 5]
];
- /** @const */ var a_2 = [
+ /** @const */ var a_3 = [
+ ["tick", -1, -1],
+ ["plan", -1, -1],
+ ["geordn", -1, -1],
+ ["intern", -1, -1],
+ ["tr", -1, -1]
+ ];
+
+ /** @const */ var a_4 = [
["en", -1, 1],
["er", -1, 1],
+ ["et", -1, 3],
["st", -1, 2],
- ["est", 2, 1]
+ ["est", 3, 1]
];
- /** @const */ var a_3 = [
+ /** @const */ var a_5 = [
["ig", -1, 1],
["lich", -1, 1]
];
- /** @const */ var a_4 = [
+ /** @const */ var a_6 = [
["end", -1, 1],
["ig", -1, 2],
["ung", -1, 1],
@@ -47,6 +70,8 @@ GermanStemmer = function() {
/** @const */ var /** Array */ g_v = [17, 65, 16, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 8, 0, 32, 8];
+ /** @const */ var /** Array */ g_et_ending = [1, 128, 198, 227, 32, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 128];
+
/** @const */ var /** Array */ g_s_ending = [117, 30, 5];
/** @const */ var /** Array */ g_st_ending = [117, 30, 4];
@@ -58,98 +83,113 @@ GermanStemmer = function() {
/** @return {boolean} */
function r_prelude() {
- var /** number */ v_1 = base.cursor;
+ var /** number */ among_var;
+ /** @const */ var /** number */ v_1 = base.cursor;
while(true)
{
- var /** number */ v_2 = base.cursor;
+ /** @const */ var /** number */ v_2 = base.cursor;
lab0: {
- lab1: {
- var /** number */ v_3 = base.cursor;
- lab2: {
- base.bra = base.cursor;
- if (!(base.eq_s("\u00DF")))
- {
- break lab2;
- }
- base.ket = base.cursor;
- if (!base.slice_from("ss"))
- {
- return false;
- }
- break lab1;
- }
- base.cursor = v_3;
- if (base.cursor >= base.limit)
- {
- break lab0;
- }
- base.cursor++;
- }
- continue;
- }
- base.cursor = v_2;
- break;
- }
- base.cursor = v_1;
- while(true)
- {
- var /** number */ v_4 = base.cursor;
- lab3: {
- golab4: while(true)
+ golab1: while(true)
{
- var /** number */ v_5 = base.cursor;
- lab5: {
+ /** @const */ var /** number */ v_3 = base.cursor;
+ lab2: {
if (!(base.in_grouping(g_v, 97, 252)))
{
- break lab5;
+ break lab2;
}
base.bra = base.cursor;
- lab6: {
- var /** number */ v_6 = base.cursor;
- lab7: {
+ lab3: {
+ /** @const */ var /** number */ v_4 = base.cursor;
+ lab4: {
if (!(base.eq_s("u")))
{
- break lab7;
+ break lab4;
}
base.ket = base.cursor;
if (!(base.in_grouping(g_v, 97, 252)))
{
- break lab7;
+ break lab4;
}
if (!base.slice_from("U"))
{
return false;
}
- break lab6;
+ break lab3;
}
- base.cursor = v_6;
+ base.cursor = v_4;
if (!(base.eq_s("y")))
{
- break lab5;
+ break lab2;
}
base.ket = base.cursor;
if (!(base.in_grouping(g_v, 97, 252)))
{
- break lab5;
+ break lab2;
}
if (!base.slice_from("Y"))
{
return false;
}
}
- base.cursor = v_5;
- break golab4;
+ base.cursor = v_3;
+ break golab1;
}
- base.cursor = v_5;
+ base.cursor = v_3;
if (base.cursor >= base.limit)
{
- break lab3;
+ break lab0;
}
base.cursor++;
}
continue;
}
- base.cursor = v_4;
+ base.cursor = v_2;
+ break;
+ }
+ base.cursor = v_1;
+ while(true)
+ {
+ /** @const */ var /** number */ v_5 = base.cursor;
+ lab5: {
+ base.bra = base.cursor;
+ among_var = base.find_among(a_0);
+ base.ket = base.cursor;
+ switch (among_var) {
+ case 1:
+ if (!base.slice_from("ss"))
+ {
+ return false;
+ }
+ break;
+ case 2:
+ if (!base.slice_from("\u00E4"))
+ {
+ return false;
+ }
+ break;
+ case 3:
+ if (!base.slice_from("\u00F6"))
+ {
+ return false;
+ }
+ break;
+ case 4:
+ if (!base.slice_from("\u00FC"))
+ {
+ return false;
+ }
+ break;
+ case 5:
+ if (base.cursor >= base.limit)
+ {
+ break lab5;
+ }
+ base.cursor++;
+ break;
+ }
+ continue;
+ }
+ base.cursor = v_5;
break;
}
return true;
@@ -159,9 +199,9 @@ GermanStemmer = function() {
function r_mark_regions() {
I_p1 = base.limit;
I_p2 = base.limit;
- var /** number */ v_1 = base.cursor;
+ /** @const */ var /** number */ v_1 = base.cursor;
{
- var /** number */ c1 = base.cursor + 3;
+ /** @const */ var /** number */ c1 = base.cursor + 3;
if (c1 > base.limit)
{
return false;
@@ -170,74 +210,34 @@ GermanStemmer = function() {
}
I_x = base.cursor;
base.cursor = v_1;
- golab0: while(true)
+ if (!base.go_out_grouping(g_v, 97, 252))
{
- lab1: {
- if (!(base.in_grouping(g_v, 97, 252)))
- {
- break lab1;
- }
- break golab0;
- }
- if (base.cursor >= base.limit)
- {
- return false;
- }
- base.cursor++;
+ return false;
}
- golab2: while(true)
+ base.cursor++;
+ if (!base.go_in_grouping(g_v, 97, 252))
{
- lab3: {
- if (!(base.out_grouping(g_v, 97, 252)))
- {
- break lab3;
- }
- break golab2;
- }
- if (base.cursor >= base.limit)
- {
- return false;
- }
- base.cursor++;
+ return false;
}
+ base.cursor++;
I_p1 = base.cursor;
- lab4: {
- if (!(I_p1 < I_x))
+ lab0: {
+ if (I_p1 >= I_x)
{
- break lab4;
+ break lab0;
}
I_p1 = I_x;
}
- golab5: while(true)
+ if (!base.go_out_grouping(g_v, 97, 252))
{
- lab6: {
- if (!(base.in_grouping(g_v, 97, 252)))
- {
- break lab6;
- }
- break golab5;
- }
- if (base.cursor >= base.limit)
- {
- return false;
- }
- base.cursor++;
+ return false;
}
- golab7: while(true)
+ base.cursor++;
+ if (!base.go_in_grouping(g_v, 97, 252))
{
- lab8: {
- if (!(base.out_grouping(g_v, 97, 252)))
- {
- break lab8;
- }
- break golab7;
- }
- if (base.cursor >= base.limit)
- {
- return false;
- }
- base.cursor++;
+ return false;
}
+ base.cursor++;
I_p2 = base.cursor;
return true;
};
@@ -247,14 +247,10 @@ GermanStemmer = function() {
var /** number */ among_var;
while(true)
{
- var /** number */ v_1 = base.cursor;
+ /** @const */ var /** number */ v_1 = base.cursor;
lab0: {
base.bra = base.cursor;
- among_var = base.find_among(a_0);
- if (among_var == 0)
- {
- break lab0;
- }
+ among_var = base.find_among(a_1);
base.ket = base.cursor;
switch (among_var) {
case 1:
@@ -299,29 +295,21 @@ GermanStemmer = function() {
/** @return {boolean} */
function r_R1() {
- if (!(I_p1 <= base.cursor))
- {
- return false;
- }
- return true;
+ return I_p1 <= base.cursor;
};
/** @return {boolean} */
function r_R2() {
- if (!(I_p2 <= base.cursor))
- {
- return false;
- }
- return true;
+ return I_p2 <= base.cursor;
};
/** @return {boolean} */
function r_standard_suffix() {
var /** number */ among_var;
- var /** number */ v_1 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
lab0: {
base.ket = base.cursor;
- among_var = base.find_among_b(a_1);
+ among_var = base.find_among_b(a_2);
if (among_var == 0)
{
break lab0;
@@ -333,6 +321,17 @@ GermanStemmer = function() {
}
switch (among_var) {
case 1:
+ {
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
+ lab1: {
+ if (!(base.eq_s_b("syst")))
+ {
+ break lab1;
+ }
+ break lab0;
+ }
+ base.cursor = base.limit - v_2;
+ }
if (!base.slice_del())
{
return false;
@@ -343,19 +342,25 @@ GermanStemmer = function() {
{
return false;
}
- var /** number */ v_2 = base.limit - base.cursor;
- lab1: {
+ break;
+ case 3:
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ /** @const */ var /** number */ v_3 = base.limit - base.cursor;
+ lab2: {
base.ket = base.cursor;
if (!(base.eq_s_b("s")))
{
- base.cursor = base.limit - v_2;
- break lab1;
+ base.cursor = base.limit - v_3;
+ break lab2;
}
base.bra = base.cursor;
if (!(base.eq_s_b("nis")))
{
- base.cursor = base.limit - v_2;
- break lab1;
+ base.cursor = base.limit - v_3;
+ break lab2;
}
if (!base.slice_del())
{
@@ -363,7 +368,7 @@ GermanStemmer = function() {
}
}
break;
- case 3:
+ case 4:
if (!(base.in_grouping_b(g_s_ending, 98, 116)))
{
break lab0;
@@ -373,21 +378,27 @@ GermanStemmer = function() {
return false;
}
break;
+ case 5:
+ if (!base.slice_from("l"))
+ {
+ return false;
+ }
+ break;
}
}
base.cursor = base.limit - v_1;
- var /** number */ v_3 = base.limit - base.cursor;
- lab2: {
+ /** @const */ var /** number */ v_4 = base.limit - base.cursor;
+ lab3: {
base.ket = base.cursor;
- among_var = base.find_among_b(a_2);
+ among_var = base.find_among_b(a_4);
if (among_var == 0)
{
- break lab2;
+ break lab3;
}
base.bra = base.cursor;
if (!r_R1())
{
- break lab2;
+ break lab3;
}
switch (among_var) {
case 1:
@@ -399,13 +410,13 @@ GermanStemmer = function() {
case 2:
if (!(base.in_grouping_b(g_st_ending, 98, 116)))
{
- break lab2;
+ break lab3;
}
{
- var /** number */ c1 = base.cursor - 3;
+ /** @const */ var /** number */ c1 = base.cursor - 3;
if (c1 < base.limit_backward)
{
- break lab2;
+ break lab3;
}
base.cursor = c1;
}
@@ -414,21 +425,44 @@ GermanStemmer = function() {
return false;
}
break;
+ case 3:
+ /** @const */ var /** number */ v_5 = base.limit - base.cursor;
+ if (!(base.in_grouping_b(g_et_ending, 85, 228)))
+ {
+ break lab3;
+ }
+ base.cursor = base.limit - v_5;
+ {
+ /** @const */ var /** number */ v_6 = base.limit - base.cursor;
+ lab4: {
+ if (base.find_among_b(a_3) == 0)
+ {
+ break lab4;
+ }
+ break lab3;
+ }
+ base.cursor = base.limit - v_6;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
}
}
- base.cursor = base.limit - v_3;
- var /** number */ v_4 = base.limit - base.cursor;
- lab3: {
+ base.cursor = base.limit - v_4;
+ /** @const */ var /** number */ v_7 = base.limit - base.cursor;
+ lab5: {
base.ket = base.cursor;
- among_var = base.find_among_b(a_4);
+ among_var = base.find_among_b(a_6);
if (among_var == 0)
{
- break lab3;
+ break lab5;
}
base.bra = base.cursor;
if (!r_R2())
{
- break lab3;
+ break lab5;
}
switch (among_var) {
case 1:
@@ -436,31 +470,31 @@ GermanStemmer = function() {
{
return false;
}
- var /** number */ v_5 = base.limit - base.cursor;
- lab4: {
+ /** @const */ var /** number */ v_8 = base.limit - base.cursor;
+ lab6: {
base.ket = base.cursor;
if (!(base.eq_s_b("ig")))
{
- base.cursor = base.limit - v_5;
- break lab4;
+ base.cursor = base.limit - v_8;
+ break lab6;
}
base.bra = base.cursor;
{
- var /** number */ v_6 = base.limit - base.cursor;
- lab5: {
+ /** @const */ var /** number */ v_9 = base.limit - base.cursor;
+ lab7: {
if (!(base.eq_s_b("e")))
{
- break lab5;
+ break lab7;
}
- base.cursor = base.limit - v_5;
- break lab4;
+ base.cursor = base.limit - v_8;
+ break lab6;
}
- base.cursor = base.limit - v_6;
+ base.cursor = base.limit - v_9;
}
if (!r_R2())
{
- base.cursor = base.limit - v_5;
- break lab4;
+ base.cursor = base.limit - v_8;
+ break lab6;
}
if (!base.slice_del())
{
@@ -470,15 +504,15 @@ GermanStemmer = function() {
break;
case 2:
{
- var /** number */ v_7 = base.limit - base.cursor;
- lab6: {
+ /** @const */ var /** number */ v_10 = base.limit - base.cursor;
+ lab8: {
if (!(base.eq_s_b("e")))
{
- break lab6;
+ break lab8;
}
- break lab3;
+ break lab5;
}
- base.cursor = base.limit - v_7;
+ base.cursor = base.limit - v_10;
}
if (!base.slice_del())
{
@@ -490,30 +524,30 @@ GermanStemmer = function() {
{
return false;
}
- var /** number */ v_8 = base.limit - base.cursor;
- lab7: {
+ /** @const */ var /** number */ v_11 = base.limit - base.cursor;
+ lab9: {
base.ket = base.cursor;
- lab8: {
- var /** number */ v_9 = base.limit - base.cursor;
- lab9: {
+ lab10: {
+ /** @const */ var /** number */ v_12 = base.limit - base.cursor;
+ lab11: {
if (!(base.eq_s_b("er")))
{
- break lab9;
+ break lab11;
}
- break lab8;
+ break lab10;
}
- base.cursor = base.limit - v_9;
+ base.cursor = base.limit - v_12;
if (!(base.eq_s_b("en")))
{
- base.cursor = base.limit - v_8;
- break lab7;
+ base.cursor = base.limit - v_11;
+ break lab9;
}
}
base.bra = base.cursor;
if (!r_R1())
{
- base.cursor = base.limit - v_8;
- break lab7;
+ base.cursor = base.limit - v_11;
+ break lab9;
}
if (!base.slice_del())
{
@@ -526,19 +560,19 @@ GermanStemmer = function() {
{
return false;
}
- var /** number */ v_10 = base.limit - base.cursor;
- lab10: {
+ /** @const */ var /** number */ v_13 = base.limit - base.cursor;
+ lab12: {
base.ket = base.cursor;
- if (base.find_among_b(a_3) == 0)
+ if (base.find_among_b(a_5) == 0)
{
- base.cursor = base.limit - v_10;
- break lab10;
+ base.cursor = base.limit - v_13;
+ break lab12;
}
base.bra = base.cursor;
if (!r_R2())
{
- base.cursor = base.limit - v_10;
- break lab10;
+ base.cursor = base.limit - v_13;
+ break lab12;
}
if (!base.slice_del())
{
@@ -548,23 +582,23 @@ GermanStemmer = function() {
break;
}
}
- base.cursor = base.limit - v_4;
+ base.cursor = base.limit - v_7;
return true;
};
this.stem = /** @return {boolean} */ function() {
- var /** number */ v_1 = base.cursor;
+ /** @const */ var /** number */ v_1 = base.cursor;
r_prelude();
base.cursor = v_1;
- var /** number */ v_2 = base.cursor;
+ /** @const */ var /** number */ v_2 = base.cursor;
r_mark_regions();
base.cursor = v_2;
base.limit_backward = base.cursor; base.cursor = base.limit;
r_standard_suffix();
base.cursor = base.limit_backward;
- var /** number */ v_4 = base.cursor;
+ /** @const */ var /** number */ v_3 = base.cursor;
r_postlude();
- base.cursor = v_4;
+ base.cursor = v_3;
return true;
};
diff --git a/sphinx/search/non-minified-js/greek-stemmer.js b/sphinx/search/non-minified-js/greek-stemmer.js
new file mode 100644
index 00000000000..06ad1692fb1
--- /dev/null
+++ b/sphinx/search/non-minified-js/greek-stemmer.js
@@ -0,0 +1,2873 @@
+// Generated from greek.sbl by Snowball 3.0.1 - https://snowballstem.org/
+
+/**@constructor*/
+var GreekStemmer = function() {
+ var base = new BaseStemmer();
+
+ /** @const */ var a_0 = [
+ ["", -1, 25],
+ ["\u0386", 0, 1],
+ ["\u0388", 0, 5],
+ ["\u0389", 0, 7],
+ ["\u038A", 0, 9],
+ ["\u038C", 0, 15],
+ ["\u038E", 0, 20],
+ ["\u038F", 0, 24],
+ ["\u0390", 0, 7],
+ ["\u0391", 0, 1],
+ ["\u0392", 0, 2],
+ ["\u0393", 0, 3],
+ ["\u0394", 0, 4],
+ ["\u0395", 0, 5],
+ ["\u0396", 0, 6],
+ ["\u0397", 0, 7],
+ ["\u0398", 0, 8],
+ ["\u0399", 0, 9],
+ ["\u039A", 0, 10],
+ ["\u039B", 0, 11],
+ ["\u039C", 0, 12],
+ ["\u039D", 0, 13],
+ ["\u039E", 0, 14],
+ ["\u039F", 0, 15],
+ ["\u03A0", 0, 16],
+ ["\u03A1", 0, 17],
+ ["\u03A3", 0, 18],
+ ["\u03A4", 0, 19],
+ ["\u03A5", 0, 20],
+ ["\u03A6", 0, 21],
+ ["\u03A7", 0, 22],
+ ["\u03A8", 0, 23],
+ ["\u03A9", 0, 24],
+ ["\u03AA", 0, 9],
+ ["\u03AB", 0, 20],
+ ["\u03AC", 0, 1],
+ ["\u03AD", 0, 5],
+ ["\u03AE", 0, 7],
+ ["\u03AF", 0, 9],
+ ["\u03B0", 0, 20],
+ ["\u03C2", 0, 18],
+ ["\u03CA", 0, 7],
+ ["\u03CB", 0, 20],
+ ["\u03CC", 0, 15],
+ ["\u03CD", 0, 20],
+ ["\u03CE", 0, 24]
+ ];
+
+ /** @const */ var a_1 = [
+ ["\u03C3\u03BA\u03B1\u03B3\u03B9\u03B1", -1, 2],
+ ["\u03C6\u03B1\u03B3\u03B9\u03B1", -1, 1],
+ ["\u03BF\u03BB\u03BF\u03B3\u03B9\u03B1", -1, 3],
+ ["\u03C3\u03BF\u03B3\u03B9\u03B1", -1, 4],
+ ["\u03C4\u03B1\u03C4\u03BF\u03B3\u03B9\u03B1", -1, 5],
+ ["\u03BA\u03C1\u03B5\u03B1\u03C4\u03B1", -1, 6],
+ ["\u03C0\u03B5\u03C1\u03B1\u03C4\u03B1", -1, 7],
+ ["\u03C4\u03B5\u03C1\u03B1\u03C4\u03B1", -1, 8],
+ ["\u03B3\u03B5\u03B3\u03BF\u03BD\u03BF\u03C4\u03B1", -1, 11],
+ ["\u03BA\u03B1\u03B8\u03B5\u03C3\u03C4\u03C9\u03C4\u03B1", -1, 10],
+ ["\u03C6\u03C9\u03C4\u03B1", -1, 9],
+ ["\u03C0\u03B5\u03C1\u03B1\u03C4\u03B7", -1, 7],
+ ["\u03C3\u03BA\u03B1\u03B3\u03B9\u03C9\u03BD", -1, 2],
+ ["\u03C6\u03B1\u03B3\u03B9\u03C9\u03BD", -1, 1],
+ ["\u03BF\u03BB\u03BF\u03B3\u03B9\u03C9\u03BD", -1, 3],
+ ["\u03C3\u03BF\u03B3\u03B9\u03C9\u03BD", -1, 4],
+ ["\u03C4\u03B1\u03C4\u03BF\u03B3\u03B9\u03C9\u03BD", -1, 5],
+ ["\u03BA\u03C1\u03B5\u03B1\u03C4\u03C9\u03BD", -1, 6],
+ ["\u03C0\u03B5\u03C1\u03B1\u03C4\u03C9\u03BD", -1, 7],
+ ["\u03C4\u03B5\u03C1\u03B1\u03C4\u03C9\u03BD", -1, 8],
+ ["\u03B3\u03B5\u03B3\u03BF\u03BD\u03BF\u03C4\u03C9\u03BD", -1, 11],
+ ["\u03BA\u03B1\u03B8\u03B5\u03C3\u03C4\u03C9\u03C4\u03C9\u03BD", -1, 10],
+ ["\u03C6\u03C9\u03C4\u03C9\u03BD", -1, 9],
+ ["\u03BA\u03C1\u03B5\u03B1\u03C3", -1, 6],
+ ["\u03C0\u03B5\u03C1\u03B1\u03C3", -1, 7],
+ ["\u03C4\u03B5\u03C1\u03B1\u03C3", -1, 8],
+ ["\u03B3\u03B5\u03B3\u03BF\u03BD\u03BF\u03C3", -1, 11],
+ ["\u03BA\u03C1\u03B5\u03B1\u03C4\u03BF\u03C3", -1, 6],
+ ["\u03C0\u03B5\u03C1\u03B1\u03C4\u03BF\u03C3", -1, 7],
+ ["\u03C4\u03B5\u03C1\u03B1\u03C4\u03BF\u03C3", -1, 8],
+ ["\u03B3\u03B5\u03B3\u03BF\u03BD\u03BF\u03C4\u03BF\u03C3", -1, 11],
+ ["\u03BA\u03B1\u03B8\u03B5\u03C3\u03C4\u03C9\u03C4\u03BF\u03C3", -1, 10],
+ ["\u03C6\u03C9\u03C4\u03BF\u03C3", -1, 9],
+ ["\u03BA\u03B1\u03B8\u03B5\u03C3\u03C4\u03C9\u03C3", -1, 10],
+ ["\u03C6\u03C9\u03C3", -1, 9],
+ ["\u03C3\u03BA\u03B1\u03B3\u03B9\u03BF\u03C5", -1, 2],
+ ["\u03C6\u03B1\u03B3\u03B9\u03BF\u03C5", -1, 1],
+ ["\u03BF\u03BB\u03BF\u03B3\u03B9\u03BF\u03C5", -1, 3],
+ ["\u03C3\u03BF\u03B3\u03B9\u03BF\u03C5", -1, 4],
+ ["\u03C4\u03B1\u03C4\u03BF\u03B3\u03B9\u03BF\u03C5", -1, 5]
+ ];
+
+ /** @const */ var a_2 = [
+ ["\u03C0\u03B1", -1, 1],
+ ["\u03BE\u03B1\u03BD\u03B1\u03C0\u03B1", 0, 1],
+ ["\u03B5\u03C0\u03B1", 0, 1],
+ ["\u03C0\u03B5\u03C1\u03B9\u03C0\u03B1", 0, 1],
+ ["\u03B1\u03BD\u03B1\u03BC\u03C0\u03B1", 0, 1],
+ ["\u03B5\u03BC\u03C0\u03B1", 0, 1],
+ ["\u03B2", -1, 2],
+ ["\u03B4\u03B1\u03BD\u03B5", -1, 1],
+ ["\u03B2\u03B1\u03B8\u03C5\u03C1\u03B9", -1, 2],
+ ["\u03B2\u03B1\u03C1\u03BA", -1, 2],
+ ["\u03BC\u03B1\u03C1\u03BA", -1, 2],
+ ["\u03BB", -1, 2],
+ ["\u03BC", -1, 2],
+ ["\u03BA\u03BF\u03C1\u03BD", -1, 2],
+ ["\u03B1\u03B8\u03C1\u03BF", -1, 1],
+ ["\u03C3\u03C5\u03BD\u03B1\u03B8\u03C1\u03BF", 14, 1],
+ ["\u03C0", -1, 2],
+ ["\u03B9\u03BC\u03C0", 16, 2],
+ ["\u03C1", -1, 2],
+ ["\u03BC\u03B1\u03C1", 18, 2],
+ ["\u03B1\u03BC\u03C0\u03B1\u03C1", 18, 2],
+ ["\u03B3\u03BA\u03C1", 18, 2],
+ ["\u03B2\u03BF\u03BB\u03B2\u03BF\u03C1", 18, 2],
+ ["\u03B3\u03BB\u03C5\u03BA\u03BF\u03C1", 18, 2],
+ ["\u03C0\u03B9\u03C0\u03B5\u03C1\u03BF\u03C1", 18, 2],
+ ["\u03C0\u03C1", 18, 2],
+ ["\u03BC\u03C0\u03C1", 25, 2],
+ ["\u03B1\u03C1\u03C1", 18, 2],
+ ["\u03B3\u03BB\u03C5\u03BA\u03C5\u03C1", 18, 2],
+ ["\u03C0\u03BF\u03BB\u03C5\u03C1", 18, 2],
+ ["\u03BB\u03BF\u03C5", -1, 2]
+ ];
+
+ /** @const */ var a_3 = [
+ ["\u03B9\u03B6\u03B1", -1, 1],
+ ["\u03B9\u03B6\u03B5", -1, 1],
+ ["\u03B9\u03B6\u03B1\u03BC\u03B5", -1, 1],
+ ["\u03B9\u03B6\u03BF\u03C5\u03BC\u03B5", -1, 1],
+ ["\u03B9\u03B6\u03B1\u03BD\u03B5", -1, 1],
+ ["\u03B9\u03B6\u03BF\u03C5\u03BD\u03B5", -1, 1],
+ ["\u03B9\u03B6\u03B1\u03C4\u03B5", -1, 1],
+ ["\u03B9\u03B6\u03B5\u03C4\u03B5", -1, 1],
+ ["\u03B9\u03B6\u03B5\u03B9", -1, 1],
+ ["\u03B9\u03B6\u03B1\u03BD", -1, 1],
+ ["\u03B9\u03B6\u03BF\u03C5\u03BD", -1, 1],
+ ["\u03B9\u03B6\u03B5\u03C3", -1, 1],
+ ["\u03B9\u03B6\u03B5\u03B9\u03C3", -1, 1],
+ ["\u03B9\u03B6\u03C9", -1, 1]
+ ];
+
+ /** @const */ var a_4 = [
+ ["\u03B2\u03B9", -1, 1],
+ ["\u03BB\u03B9", -1, 1],
+ ["\u03B1\u03BB", -1, 1],
+ ["\u03B5\u03BD", -1, 1],
+ ["\u03C3", -1, 1],
+ ["\u03C7", -1, 1],
+ ["\u03C5\u03C8", -1, 1],
+ ["\u03B6\u03C9", -1, 1]
+ ];
+
+ /** @const */ var a_5 = [
+ ["\u03C9\u03B8\u03B7\u03BA\u03B1", -1, 1],
+ ["\u03C9\u03B8\u03B7\u03BA\u03B5", -1, 1],
+ ["\u03C9\u03B8\u03B7\u03BA\u03B1\u03BC\u03B5", -1, 1],
+ ["\u03C9\u03B8\u03B7\u03BA\u03B1\u03BD\u03B5", -1, 1],
+ ["\u03C9\u03B8\u03B7\u03BA\u03B1\u03C4\u03B5", -1, 1],
+ ["\u03C9\u03B8\u03B7\u03BA\u03B1\u03BD", -1, 1],
+ ["\u03C9\u03B8\u03B7\u03BA\u03B5\u03C3", -1, 1]
+ ];
+
+ /** @const */ var a_6 = [
+ ["\u03BE\u03B1\u03BD\u03B1\u03C0\u03B1", -1, 1],
+ ["\u03B5\u03C0\u03B1", -1, 1],
+ ["\u03C0\u03B5\u03C1\u03B9\u03C0\u03B1", -1, 1],
+ ["\u03B1\u03BD\u03B1\u03BC\u03C0\u03B1", -1, 1],
+ ["\u03B5\u03BC\u03C0\u03B1", -1, 1],
+ ["\u03C7\u03B1\u03C1\u03C4\u03BF\u03C0\u03B1", -1, 1],
+ ["\u03B5\u03BE\u03B1\u03C1\u03C7\u03B1", -1, 1],
+ ["\u03B3\u03B5", -1, 2],
+ ["\u03B3\u03BA\u03B5", -1, 2],
+ ["\u03BA\u03BB\u03B5", -1, 1],
+ ["\u03B5\u03BA\u03BB\u03B5", 9, 1],
+ ["\u03B1\u03C0\u03B5\u03BA\u03BB\u03B5", 10, 1],
+ ["\u03B1\u03C0\u03BF\u03BA\u03BB\u03B5", 9, 1],
+ ["\u03B5\u03C3\u03C9\u03BA\u03BB\u03B5", 9, 1],
+ ["\u03B4\u03B1\u03BD\u03B5", -1, 1],
+ ["\u03C0\u03B5", -1, 1],
+ ["\u03B5\u03C0\u03B5", 15, 1],
+ ["\u03BC\u03B5\u03C4\u03B5\u03C0\u03B5", 16, 1],
+ ["\u03B5\u03C3\u03B5", -1, 1],
+ ["\u03B3\u03BA", -1, 2],
+ ["\u03BC", -1, 2],
+ ["\u03C0\u03BF\u03C5\u03BA\u03B1\u03BC", 20, 2],
+ ["\u03BA\u03BF\u03BC", 20, 2],
+ ["\u03B1\u03BD", -1, 2],
+ ["\u03BF\u03BB\u03BF", -1, 2],
+ ["\u03B1\u03B8\u03C1\u03BF", -1, 1],
+ ["\u03C3\u03C5\u03BD\u03B1\u03B8\u03C1\u03BF", 25, 1],
+ ["\u03C0", -1, 2],
+ ["\u03BB\u03B1\u03C1", -1, 2],
+ ["\u03B4\u03B7\u03BC\u03BF\u03BA\u03C1\u03B1\u03C4", -1, 2],
+ ["\u03B1\u03C6", -1, 2],
+ ["\u03B3\u03B9\u03B3\u03B1\u03BD\u03C4\u03BF\u03B1\u03C6", 30, 2]
+ ];
+
+ /** @const */ var a_7 = [
+ ["\u03B9\u03C3\u03B1", -1, 1],
+ ["\u03B9\u03C3\u03B1\u03BC\u03B5", -1, 1],
+ ["\u03B9\u03C3\u03B1\u03BD\u03B5", -1, 1],
+ ["\u03B9\u03C3\u03B5", -1, 1],
+ ["\u03B9\u03C3\u03B1\u03C4\u03B5", -1, 1],
+ ["\u03B9\u03C3\u03B1\u03BD", -1, 1],
+ ["\u03B9\u03C3\u03B5\u03C3", -1, 1]
+ ];
+
+ /** @const */ var a_8 = [
+ ["\u03BE\u03B1\u03BD\u03B1\u03C0\u03B1", -1, 1],
+ ["\u03B5\u03C0\u03B1", -1, 1],
+ ["\u03C0\u03B5\u03C1\u03B9\u03C0\u03B1", -1, 1],
+ ["\u03B1\u03BD\u03B1\u03BC\u03C0\u03B1", -1, 1],
+ ["\u03B5\u03BC\u03C0\u03B1", -1, 1],
+ ["\u03C7\u03B1\u03C1\u03C4\u03BF\u03C0\u03B1", -1, 1],
+ ["\u03B5\u03BE\u03B1\u03C1\u03C7\u03B1", -1, 1],
+ ["\u03BA\u03BB\u03B5", -1, 1],
+ ["\u03B5\u03BA\u03BB\u03B5", 7, 1],
+ ["\u03B1\u03C0\u03B5\u03BA\u03BB\u03B5", 8, 1],
+ ["\u03B1\u03C0\u03BF\u03BA\u03BB\u03B5", 7, 1],
+ ["\u03B5\u03C3\u03C9\u03BA\u03BB\u03B5", 7, 1],
+ ["\u03B4\u03B1\u03BD\u03B5", -1, 1],
+ ["\u03C0\u03B5", -1, 1],
+ ["\u03B5\u03C0\u03B5", 13, 1],
+ ["\u03BC\u03B5\u03C4\u03B5\u03C0\u03B5", 14, 1],
+ ["\u03B5\u03C3\u03B5", -1, 1],
+ ["\u03B1\u03B8\u03C1\u03BF", -1, 1],
+ ["\u03C3\u03C5\u03BD\u03B1\u03B8\u03C1\u03BF", 17, 1]
+ ];
+
+ /** @const */ var a_9 = [
+ ["\u03B9\u03C3\u03BF\u03C5\u03BC\u03B5", -1, 1],
+ ["\u03B9\u03C3\u03BF\u03C5\u03BD\u03B5", -1, 1],
+ ["\u03B9\u03C3\u03B5\u03C4\u03B5", -1, 1],
+ ["\u03B9\u03C3\u03B5\u03B9", -1, 1],
+ ["\u03B9\u03C3\u03BF\u03C5\u03BD", -1, 1],
+ ["\u03B9\u03C3\u03B5\u03B9\u03C3", -1, 1],
+ ["\u03B9\u03C3\u03C9", -1, 1]
+ ];
+
+ /** @const */ var a_10 = [
+ ["\u03B1\u03C4\u03B1", -1, 2],
+ ["\u03C6\u03B1", -1, 2],
+ ["\u03B7\u03C6\u03B1", 1, 2],
+ ["\u03BC\u03B5\u03B3", -1, 2],
+ ["\u03BB\u03C5\u03B3", -1, 2],
+ ["\u03B7\u03B4", -1, 2],
+ ["\u03BA\u03BB\u03B5", -1, 1],
+ ["\u03B5\u03C3\u03C9\u03BA\u03BB\u03B5", 6, 1],
+ ["\u03C0\u03BB\u03B5", -1, 1],
+ ["\u03B4\u03B1\u03BD\u03B5", -1, 1],
+ ["\u03C3\u03B5", -1, 1],
+ ["\u03B1\u03C3\u03B5", 10, 1],
+ ["\u03BA\u03B1\u03B8", -1, 2],
+ ["\u03B5\u03C7\u03B8", -1, 2],
+ ["\u03BA\u03B1\u03BA", -1, 2],
+ ["\u03BC\u03B1\u03BA", -1, 2],
+ ["\u03C3\u03BA", -1, 2],
+ ["\u03C6\u03B9\u03BB", -1, 2],
+ ["\u03BA\u03C5\u03BB", -1, 2],
+ ["\u03BC", -1, 2],
+ ["\u03B3\u03B5\u03BC", 19, 2],
+ ["\u03B1\u03C7\u03BD", -1, 2],
+ ["\u03C3\u03C5\u03BD\u03B1\u03B8\u03C1\u03BF", -1, 1],
+ ["\u03C0", -1, 2],
+ ["\u03B1\u03C0", 23, 2],
+ ["\u03B5\u03BC\u03C0", 23, 2],
+ ["\u03B5\u03C5\u03C0", 23, 2],
+ ["\u03B1\u03C1", -1, 2],
+ ["\u03B1\u03BF\u03C1", -1, 2],
+ ["\u03B3\u03C5\u03C1", -1, 2],
+ ["\u03C7\u03C1", -1, 2],
+ ["\u03C7\u03C9\u03C1", -1, 2],
+ ["\u03BA\u03C4", -1, 2],
+ ["\u03B1\u03BA\u03C4", 32, 2],
+ ["\u03C7\u03C4", -1, 2],
+ ["\u03B1\u03C7\u03C4", 34, 2],
+ ["\u03C4\u03B1\u03C7", -1, 2],
+ ["\u03C3\u03C7", -1, 2],
+ ["\u03B1\u03C3\u03C7", 37, 2],
+ ["\u03C5\u03C8", -1, 2]
+ ];
+
+ /** @const */ var a_11 = [
+ ["\u03B9\u03C3\u03C4\u03B1", -1, 1],
+ ["\u03B9\u03C3\u03C4\u03B5", -1, 1],
+ ["\u03B9\u03C3\u03C4\u03B7", -1, 1],
+ ["\u03B9\u03C3\u03C4\u03BF\u03B9", -1, 1],
+ ["\u03B9\u03C3\u03C4\u03C9\u03BD", -1, 1],
+ ["\u03B9\u03C3\u03C4\u03BF", -1, 1],
+ ["\u03B9\u03C3\u03C4\u03B5\u03C3", -1, 1],
+ ["\u03B9\u03C3\u03C4\u03B7\u03C3", -1, 1],
+ ["\u03B9\u03C3\u03C4\u03BF\u03C3", -1, 1],
+ ["\u03B9\u03C3\u03C4\u03BF\u03C5\u03C3", -1, 1],
+ ["\u03B9\u03C3\u03C4\u03BF\u03C5", -1, 1]
+ ];
+
+ /** @const */ var a_12 = [
+ ["\u03B5\u03B3\u03BA\u03BB\u03B5", -1, 1],
+ ["\u03B1\u03C0\u03BF\u03BA\u03BB\u03B5", -1, 1],
+ ["\u03B4\u03B1\u03BD\u03B5", -1, 2],
+ ["\u03B1\u03BD\u03C4\u03B9\u03B4\u03B1\u03BD\u03B5", 2, 2],
+ ["\u03C3\u03B5", -1, 1],
+ ["\u03BC\u03B5\u03C4\u03B1\u03C3\u03B5", 4, 1],
+ ["\u03BC\u03B9\u03BA\u03C1\u03BF\u03C3\u03B5", 4, 1]
+ ];
+
+ /** @const */ var a_13 = [
+ ["\u03B1\u03C4\u03BF\u03BC\u03B9\u03BA", -1, 2],
+ ["\u03B5\u03B8\u03BD\u03B9\u03BA", -1, 4],
+ ["\u03C4\u03BF\u03C0\u03B9\u03BA", -1, 7],
+ ["\u03B5\u03BA\u03BB\u03B5\u03BA\u03C4\u03B9\u03BA", -1, 5],
+ ["\u03C3\u03BA\u03B5\u03C0\u03C4\u03B9\u03BA", -1, 6],
+ ["\u03B3\u03BD\u03C9\u03C3\u03C4\u03B9\u03BA", -1, 3],
+ ["\u03B1\u03B3\u03BD\u03C9\u03C3\u03C4\u03B9\u03BA", 5, 1],
+ ["\u03B1\u03BB\u03B5\u03BE\u03B1\u03BD\u03B4\u03C1\u03B9\u03BD", -1, 8],
+ ["\u03B8\u03B5\u03B1\u03C4\u03C1\u03B9\u03BD", -1, 10],
+ ["\u03B2\u03C5\u03B6\u03B1\u03BD\u03C4\u03B9\u03BD", -1, 9]
+ ];
+
+ /** @const */ var a_14 = [
+ ["\u03B9\u03C3\u03BC\u03BF\u03B9", -1, 1],
+ ["\u03B9\u03C3\u03BC\u03C9\u03BD", -1, 1],
+ ["\u03B9\u03C3\u03BC\u03BF", -1, 1],
+ ["\u03B9\u03C3\u03BC\u03BF\u03C3", -1, 1],
+ ["\u03B9\u03C3\u03BC\u03BF\u03C5\u03C3", -1, 1],
+ ["\u03B9\u03C3\u03BC\u03BF\u03C5", -1, 1]
+ ];
+
+ /** @const */ var a_15 = [
+ ["\u03C3", -1, 1],
+ ["\u03C7", -1, 1]
+ ];
+
+ /** @const */ var a_16 = [
+ ["\u03BF\u03C5\u03B4\u03B1\u03BA\u03B9\u03B1", -1, 1],
+ ["\u03B1\u03C1\u03B1\u03BA\u03B9\u03B1", -1, 1],
+ ["\u03BF\u03C5\u03B4\u03B1\u03BA\u03B9", -1, 1],
+ ["\u03B1\u03C1\u03B1\u03BA\u03B9", -1, 1]
+ ];
+
+ /** @const */ var a_17 = [
+ ["\u03B2", -1, 2],
+ ["\u03B2\u03B1\u03BC\u03B2", 0, 1],
+ ["\u03C3\u03BB\u03BF\u03B2", 0, 1],
+ ["\u03C4\u03C3\u03B5\u03C7\u03BF\u03C3\u03BB\u03BF\u03B2", 2, 1],
+ ["\u03BA\u03B1\u03C1\u03B4", -1, 2],
+ ["\u03B6", -1, 2],
+ ["\u03C4\u03B6", 5, 1],
+ ["\u03BA", -1, 1],
+ ["\u03BA\u03B1\u03C0\u03B1\u03BA", 7, 1],
+ ["\u03C3\u03BF\u03BA", 7, 1],
+ ["\u03C3\u03BA", 7, 1],
+ ["\u03B2\u03B1\u03BB", -1, 2],
+ ["\u03BC\u03B1\u03BB", -1, 1],
+ ["\u03B3\u03BB", -1, 2],
+ ["\u03C4\u03C1\u03B9\u03C0\u03BF\u03BB", -1, 2],
+ ["\u03C0\u03BB", -1, 1],
+ ["\u03BB\u03BF\u03C5\u03BB", -1, 1],
+ ["\u03C6\u03C5\u03BB", -1, 1],
+ ["\u03BA\u03B1\u03B9\u03BC", -1, 1],
+ ["\u03BA\u03BB\u03B9\u03BC", -1, 1],
+ ["\u03C6\u03B1\u03C1\u03BC", -1, 1],
+ ["\u03B3\u03B9\u03B1\u03BD", -1, 2],
+ ["\u03C3\u03C0\u03B1\u03BD", -1, 1],
+ ["\u03B7\u03B3\u03BF\u03C5\u03BC\u03B5\u03BD", -1, 2],
+ ["\u03BA\u03BF\u03BD", -1, 1],
+ ["\u03BC\u03B1\u03BA\u03C1\u03C5\u03BD", -1, 2],
+ ["\u03C0", -1, 2],
+ ["\u03BA\u03B1\u03C4\u03C1\u03B1\u03C0", 26, 1],
+ ["\u03C1", -1, 1],
+ ["\u03B2\u03C1", 28, 1],
+ ["\u03BB\u03B1\u03B2\u03C1", 29, 1],
+ ["\u03B1\u03BC\u03B2\u03C1", 29, 1],
+ ["\u03BC\u03B5\u03C1", 28, 1],
+ ["\u03C0\u03B1\u03C4\u03B5\u03C1", 28, 2],
+ ["\u03B1\u03BD\u03B8\u03C1", 28, 1],
+ ["\u03BA\u03BF\u03C1", 28, 1],
+ ["\u03C3", -1, 1],
+ ["\u03BD\u03B1\u03B3\u03BA\u03B1\u03C3", 36, 1],
+ ["\u03C4\u03BF\u03C3", 36, 2],
+ ["\u03BC\u03BF\u03C5\u03C3\u03C4", -1, 1],
+ ["\u03C1\u03C5", -1, 1],
+ ["\u03C6", -1, 1],
+ ["\u03C3\u03C6", 41, 1],
+ ["\u03B1\u03BB\u03B9\u03C3\u03C6", 42, 1],
+ ["\u03BD\u03C5\u03C6", 41, 2],
+ ["\u03C7", -1, 1]
+ ];
+
+ /** @const */ var a_18 = [
+ ["\u03B1\u03BA\u03B9\u03B1", -1, 1],
+ ["\u03B1\u03C1\u03B1\u03BA\u03B9\u03B1", 0, 1],
+ ["\u03B9\u03C4\u03C3\u03B1", -1, 1],
+ ["\u03B1\u03BA\u03B9", -1, 1],
+ ["\u03B1\u03C1\u03B1\u03BA\u03B9", 3, 1],
+ ["\u03B9\u03C4\u03C3\u03C9\u03BD", -1, 1],
+ ["\u03B9\u03C4\u03C3\u03B1\u03C3", -1, 1],
+ ["\u03B9\u03C4\u03C3\u03B5\u03C3", -1, 1]
+ ];
+
+ /** @const */ var a_19 = [
+ ["\u03C8\u03B1\u03BB", -1, 1],
+ ["\u03B1\u03B9\u03C6\u03BD", -1, 1],
+ ["\u03BF\u03BB\u03BF", -1, 1],
+ ["\u03B9\u03C1", -1, 1]
+ ];
+
+ /** @const */ var a_20 = [
+ ["\u03B5", -1, 1],
+ ["\u03C0\u03B1\u03B9\u03C7\u03BD", -1, 1]
+ ];
+
+ /** @const */ var a_21 = [
+ ["\u03B9\u03B4\u03B9\u03B1", -1, 1],
+ ["\u03B9\u03B4\u03B9\u03C9\u03BD", -1, 1],
+ ["\u03B9\u03B4\u03B9\u03BF", -1, 1]
+ ];
+
+ /** @const */ var a_22 = [
+ ["\u03B9\u03B2", -1, 1],
+ ["\u03B4", -1, 1],
+ ["\u03C6\u03C1\u03B1\u03B3\u03BA", -1, 1],
+ ["\u03BB\u03C5\u03BA", -1, 1],
+ ["\u03BF\u03B2\u03B5\u03BB", -1, 1],
+ ["\u03BC\u03B7\u03BD", -1, 1],
+ ["\u03C1", -1, 1]
+ ];
+
+ /** @const */ var a_23 = [
+ ["\u03B9\u03C3\u03BA\u03B5", -1, 1],
+ ["\u03B9\u03C3\u03BA\u03BF", -1, 1],
+ ["\u03B9\u03C3\u03BA\u03BF\u03C3", -1, 1],
+ ["\u03B9\u03C3\u03BA\u03BF\u03C5", -1, 1]
+ ];
+
+ /** @const */ var a_24 = [
+ ["\u03B1\u03B4\u03C9\u03BD", -1, 1],
+ ["\u03B1\u03B4\u03B5\u03C3", -1, 1]
+ ];
+
+ /** @const */ var a_25 = [
+ ["\u03B3\u03B9\u03B1\u03B3\u03B9", -1, -1],
+ ["\u03B8\u03B5\u03B9", -1, -1],
+ ["\u03BF\u03BA", -1, -1],
+ ["\u03BC\u03B1\u03BC", -1, -1],
+ ["\u03BC\u03B1\u03BD", -1, -1],
+ ["\u03BC\u03C0\u03B1\u03BC\u03C0", -1, -1],
+ ["\u03C0\u03B5\u03B8\u03B5\u03C1", -1, -1],
+ ["\u03C0\u03B1\u03C4\u03B5\u03C1", -1, -1],
+ ["\u03BA\u03C5\u03C1", -1, -1],
+ ["\u03BD\u03C4\u03B1\u03BD\u03C4", -1, -1]
+ ];
+
+ /** @const */ var a_26 = [
+ ["\u03B5\u03B4\u03C9\u03BD", -1, 1],
+ ["\u03B5\u03B4\u03B5\u03C3", -1, 1]
+ ];
+
+ /** @const */ var a_27 = [
+ ["\u03BC\u03B9\u03BB", -1, 1],
+ ["\u03B4\u03B1\u03C0", -1, 1],
+ ["\u03B3\u03B7\u03C0", -1, 1],
+ ["\u03B9\u03C0", -1, 1],
+ ["\u03B5\u03BC\u03C0", -1, 1],
+ ["\u03BF\u03C0", -1, 1],
+ ["\u03BA\u03C1\u03B1\u03C3\u03C0", -1, 1],
+ ["\u03C5\u03C0", -1, 1]
+ ];
+
+ /** @const */ var a_28 = [
+ ["\u03BF\u03C5\u03B4\u03C9\u03BD", -1, 1],
+ ["\u03BF\u03C5\u03B4\u03B5\u03C3", -1, 1]
+ ];
+
+ /** @const */ var a_29 = [
+ ["\u03C4\u03C1\u03B1\u03B3", -1, 1],
+ ["\u03C6\u03B5", -1, 1],
+ ["\u03BA\u03B1\u03BB\u03B9\u03B1\u03BA", -1, 1],
+ ["\u03B1\u03C1\u03BA", -1, 1],
+ ["\u03C3\u03BA", -1, 1],
+ ["\u03C0\u03B5\u03C4\u03B1\u03BB", -1, 1],
+ ["\u03B2\u03B5\u03BB", -1, 1],
+ ["\u03BB\u03BF\u03C5\u03BB", -1, 1],
+ ["\u03C6\u03BB", -1, 1],
+ ["\u03C7\u03BD", -1, 1],
+ ["\u03C0\u03BB\u03B5\u03BE", -1, 1],
+ ["\u03C3\u03C0", -1, 1],
+ ["\u03C6\u03C1", -1, 1],
+ ["\u03C3", -1, 1],
+ ["\u03BB\u03B9\u03C7", -1, 1]
+ ];
+
+ /** @const */ var a_30 = [
+ ["\u03B5\u03C9\u03BD", -1, 1],
+ ["\u03B5\u03C9\u03C3", -1, 1]
+ ];
+
+ /** @const */ var a_31 = [
+ ["\u03B4", -1, 1],
+ ["\u03B9\u03B4", 0, 1],
+ ["\u03B8", -1, 1],
+ ["\u03B3\u03B1\u03BB", -1, 1],
+ ["\u03B5\u03BB", -1, 1],
+ ["\u03BD", -1, 1],
+ ["\u03C0", -1, 1],
+ ["\u03C0\u03B1\u03C1", -1, 1]
+ ];
+
+ /** @const */ var a_32 = [
+ ["\u03B9\u03B1", -1, 1],
+ ["\u03B9\u03C9\u03BD", -1, 1],
+ ["\u03B9\u03BF\u03C5", -1, 1]
+ ];
+
+ /** @const */ var a_33 = [
+ ["\u03B9\u03BA\u03B1", -1, 1],
+ ["\u03B9\u03BA\u03C9\u03BD", -1, 1],
+ ["\u03B9\u03BA\u03BF", -1, 1],
+ ["\u03B9\u03BA\u03BF\u03C5", -1, 1]
+ ];
+
+ /** @const */ var a_34 = [
+ ["\u03B1\u03B4", -1, 1],
+ ["\u03C3\u03C5\u03BD\u03B1\u03B4", 0, 1],
+ ["\u03BA\u03B1\u03C4\u03B1\u03B4", 0, 1],
+ ["\u03B1\u03BD\u03C4\u03B9\u03B4", -1, 1],
+ ["\u03B5\u03BD\u03B4", -1, 1],
+ ["\u03C6\u03C5\u03BB\u03BF\u03B4", -1, 1],
+ ["\u03C5\u03C0\u03BF\u03B4", -1, 1],
+ ["\u03C0\u03C1\u03C9\u03C4\u03BF\u03B4", -1, 1],
+ ["\u03B5\u03BE\u03C9\u03B4", -1, 1],
+ ["\u03B7\u03B8", -1, 1],
+ ["\u03B1\u03BD\u03B7\u03B8", 9, 1],
+ ["\u03BE\u03B9\u03BA", -1, 1],
+ ["\u03B1\u03BB", -1, 1],
+ ["\u03B1\u03BC\u03BC\u03BF\u03C7\u03B1\u03BB", 12, 1],
+ ["\u03C3\u03C5\u03BD\u03BF\u03BC\u03B7\u03BB", -1, 1],
+ ["\u03BC\u03C0\u03BF\u03BB", -1, 1],
+ ["\u03BC\u03BF\u03C5\u03BB", -1, 1],
+ ["\u03C4\u03C3\u03B1\u03BC", -1, 1],
+ ["\u03B2\u03C1\u03C9\u03BC", -1, 1],
+ ["\u03B1\u03BC\u03B1\u03BD", -1, 1],
+ ["\u03BC\u03C0\u03B1\u03BD", -1, 1],
+ ["\u03BA\u03B1\u03BB\u03BB\u03B9\u03BD", -1, 1],
+ ["\u03C0\u03BF\u03C3\u03C4\u03B5\u03BB\u03BD", -1, 1],
+ ["\u03C6\u03B9\u03BB\u03BF\u03BD", -1, 1],
+ ["\u03BA\u03B1\u03BB\u03C0", -1, 1],
+ ["\u03B3\u03B5\u03C1", -1, 1],
+ ["\u03C7\u03B1\u03C3", -1, 1],
+ ["\u03BC\u03C0\u03BF\u03C3", -1, 1],
+ ["\u03C0\u03BB\u03B9\u03B1\u03C4\u03C3", -1, 1],
+ ["\u03C0\u03B5\u03C4\u03C3", -1, 1],
+ ["\u03C0\u03B9\u03C4\u03C3", -1, 1],
+ ["\u03C6\u03C5\u03C3", -1, 1],
+ ["\u03BC\u03C0\u03B1\u03B3\u03B9\u03B1\u03C4", -1, 1],
+ ["\u03BD\u03B9\u03C4", -1, 1],
+ ["\u03C0\u03B9\u03BA\u03B1\u03BD\u03C4", -1, 1],
+ ["\u03C3\u03B5\u03C1\u03C4", -1, 1]
+ ];
+
+ /** @const */ var a_35 = [
+ ["\u03B1\u03B3\u03B1\u03BC\u03B5", -1, 1],
+ ["\u03B7\u03BA\u03B1\u03BC\u03B5", -1, 1],
+ ["\u03B7\u03B8\u03B7\u03BA\u03B1\u03BC\u03B5", 1, 1],
+ ["\u03B7\u03C3\u03B1\u03BC\u03B5", -1, 1],
+ ["\u03BF\u03C5\u03C3\u03B1\u03BC\u03B5", -1, 1]
+ ];
+
+ /** @const */ var a_36 = [
+ ["\u03B2\u03BF\u03C5\u03B2", -1, 1],
+ ["\u03BE\u03B5\u03B8", -1, 1],
+ ["\u03C0\u03B5\u03B8", -1, 1],
+ ["\u03B1\u03C0\u03BF\u03B8", -1, 1],
+ ["\u03B1\u03C0\u03BF\u03BA", -1, 1],
+ ["\u03BF\u03C5\u03BB", -1, 1],
+ ["\u03B1\u03BD\u03B1\u03C0", -1, 1],
+ ["\u03C0\u03B9\u03BA\u03C1", -1, 1],
+ ["\u03C0\u03BF\u03C4", -1, 1],
+ ["\u03B1\u03C0\u03BF\u03C3\u03C4", -1, 1],
+ ["\u03C7", -1, 1],
+ ["\u03C3\u03B9\u03C7", 10, 1]
+ ];
+
+ /** @const */ var a_37 = [
+ ["\u03C4\u03C1", -1, 1],
+ ["\u03C4\u03C3", -1, 1]
+ ];
+
+ /** @const */ var a_38 = [
+ ["\u03B1\u03B3\u03B1\u03BD\u03B5", -1, 1],
+ ["\u03B7\u03BA\u03B1\u03BD\u03B5", -1, 1],
+ ["\u03B7\u03B8\u03B7\u03BA\u03B1\u03BD\u03B5", 1, 1],
+ ["\u03B7\u03C3\u03B1\u03BD\u03B5", -1, 1],
+ ["\u03BF\u03C5\u03C3\u03B1\u03BD\u03B5", -1, 1],
+ ["\u03BF\u03BD\u03C4\u03B1\u03BD\u03B5", -1, 1],
+ ["\u03B9\u03BF\u03BD\u03C4\u03B1\u03BD\u03B5", 5, 1],
+ ["\u03BF\u03C5\u03BD\u03C4\u03B1\u03BD\u03B5", -1, 1],
+ ["\u03B9\u03BF\u03C5\u03BD\u03C4\u03B1\u03BD\u03B5", 7, 1],
+ ["\u03BF\u03C4\u03B1\u03BD\u03B5", -1, 1],
+ ["\u03B9\u03BF\u03C4\u03B1\u03BD\u03B5", 9, 1]
+ ];
+
+ /** @const */ var a_39 = [
+ ["\u03C4\u03B1\u03B2", -1, 1],
+ ["\u03BD\u03C4\u03B1\u03B2", 0, 1],
+ ["\u03C8\u03B7\u03BB\u03BF\u03C4\u03B1\u03B2", 0, 1],
+ ["\u03BB\u03B9\u03B2", -1, 1],
+ ["\u03BA\u03BB\u03B9\u03B2", 3, 1],
+ ["\u03BE\u03B7\u03C1\u03BF\u03BA\u03BB\u03B9\u03B2", 4, 1],
+ ["\u03B3", -1, 1],
+ ["\u03B1\u03B3", 6, 1],
+ ["\u03C4\u03C1\u03B1\u03B3", 7, 1],
+ ["\u03C4\u03C3\u03B1\u03B3", 7, 1],
+ ["\u03B1\u03B8\u03B9\u03B3\u03B3", 6, 1],
+ ["\u03C4\u03C3\u03B9\u03B3\u03B3", 6, 1],
+ ["\u03B1\u03C4\u03C3\u03B9\u03B3\u03B3", 11, 1],
+ ["\u03C3\u03C4\u03B5\u03B3", 6, 1],
+ ["\u03B1\u03C0\u03B7\u03B3", 6, 1],
+ ["\u03C3\u03B9\u03B3", 6, 1],
+ ["\u03B1\u03BD\u03BF\u03C1\u03B3", 6, 1],
+ ["\u03B5\u03BD\u03BF\u03C1\u03B3", 6, 1],
+ ["\u03BA\u03B1\u03BB\u03C0\u03BF\u03C5\u03B6", -1, 1],
+ ["\u03B8", -1, 1],
+ ["\u03BC\u03C9\u03B1\u03BC\u03B5\u03B8", 19, 1],
+ ["\u03C0\u03B9\u03B8", 19, 1],
+ ["\u03B1\u03C0\u03B9\u03B8", 21, 1],
+ ["\u03B4\u03B5\u03BA", -1, 1],
+ ["\u03C0\u03B5\u03BB\u03B5\u03BA", -1, 1],
+ ["\u03B9\u03BA", -1, 1],
+ ["\u03B1\u03BD\u03B9\u03BA", 25, 1],
+ ["\u03B2\u03BF\u03C5\u03BB\u03BA", -1, 1],
+ ["\u03B2\u03B1\u03C3\u03BA", -1, 1],
+ ["\u03B2\u03C1\u03B1\u03C7\u03C5\u03BA", -1, 1],
+ ["\u03B3\u03B1\u03BB", -1, 1],
+ ["\u03BA\u03B1\u03C4\u03B1\u03B3\u03B1\u03BB", 30, 1],
+ ["\u03BF\u03BB\u03BF\u03B3\u03B1\u03BB", 30, 1],
+ ["\u03B2\u03B1\u03B8\u03C5\u03B3\u03B1\u03BB", 30, 1],
+ ["\u03BC\u03B5\u03BB", -1, 1],
+ ["\u03BA\u03B1\u03C3\u03C4\u03B5\u03BB", -1, 1],
+ ["\u03C0\u03BF\u03C1\u03C4\u03BF\u03BB", -1, 1],
+ ["\u03C0\u03BB", -1, 1],
+ ["\u03B4\u03B9\u03C0\u03BB", 37, 1],
+ ["\u03BB\u03B1\u03BF\u03C0\u03BB", 37, 1],
+ ["\u03C8\u03C5\u03C7\u03BF\u03C0\u03BB", 37, 1],
+ ["\u03BF\u03C5\u03BB", -1, 1],
+ ["\u03BC", -1, 1],
+ ["\u03BF\u03BB\u03B9\u03B3\u03BF\u03B4\u03B1\u03BC", 42, 1],
+ ["\u03BC\u03BF\u03C5\u03C3\u03BF\u03C5\u03BB\u03BC", 42, 1],
+ ["\u03B4\u03C1\u03B1\u03B4\u03BF\u03C5\u03BC", 42, 1],
+ ["\u03B2\u03C1\u03B1\u03C7\u03BC", 42, 1],
+ ["\u03BD", -1, 1],
+ ["\u03B1\u03BC\u03B5\u03C1\u03B9\u03BA\u03B1\u03BD", 47, 1],
+ ["\u03C0", -1, 1],
+ ["\u03B1\u03B4\u03B1\u03C0", 49, 1],
+ ["\u03C7\u03B1\u03BC\u03B7\u03BB\u03BF\u03B4\u03B1\u03C0", 49, 1],
+ ["\u03C0\u03BF\u03BB\u03C5\u03B4\u03B1\u03C0", 49, 1],
+ ["\u03BA\u03BF\u03C0", 49, 1],
+ ["\u03C5\u03C0\u03BF\u03BA\u03BF\u03C0", 53, 1],
+ ["\u03C4\u03C3\u03BF\u03C0", 49, 1],
+ ["\u03C3\u03C0", 49, 1],
+ ["\u03B5\u03C1", -1, 1],
+ ["\u03B3\u03B5\u03C1", 57, 1],
+ ["\u03B2\u03B5\u03C4\u03B5\u03C1", 57, 1],
+ ["\u03BB\u03BF\u03C5\u03B8\u03B7\u03C1", -1, 1],
+ ["\u03BA\u03BF\u03C1\u03BC\u03BF\u03C1", -1, 1],
+ ["\u03C0\u03B5\u03C1\u03B9\u03C4\u03C1", -1, 1],
+ ["\u03BF\u03C5\u03C1", -1, 1],
+ ["\u03C3", -1, 1],
+ ["\u03B2\u03B1\u03C3", 64, 1],
+ ["\u03C0\u03BF\u03BB\u03B9\u03C3", 64, 1],
+ ["\u03C3\u03B1\u03C1\u03B1\u03BA\u03B1\u03C4\u03C3", 64, 1],
+ ["\u03B8\u03C5\u03C3", 64, 1],
+ ["\u03B4\u03B9\u03B1\u03C4", -1, 1],
+ ["\u03C0\u03BB\u03B1\u03C4", -1, 1],
+ ["\u03C4\u03C3\u03B1\u03C1\u03BB\u03B1\u03C4", -1, 1],
+ ["\u03C4\u03B5\u03C4", -1, 1],
+ ["\u03C0\u03BF\u03C5\u03C1\u03B9\u03C4", -1, 1],
+ ["\u03C3\u03BF\u03C5\u03BB\u03C4", -1, 1],
+ ["\u03BC\u03B1\u03B9\u03BD\u03C4", -1, 1],
+ ["\u03B6\u03C9\u03BD\u03C4", -1, 1],
+ ["\u03BA\u03B1\u03C3\u03C4", -1, 1],
+ ["\u03C6", -1, 1],
+ ["\u03B4\u03B9\u03B1\u03C6", 78, 1],
+ ["\u03C3\u03C4\u03B5\u03C6", 78, 1],
+ ["\u03C6\u03C9\u03C4\u03BF\u03C3\u03C4\u03B5\u03C6", 80, 1],
+ ["\u03C0\u03B5\u03C1\u03B7\u03C6", 78, 1],
+ ["\u03C5\u03C0\u03B5\u03C1\u03B7\u03C6", 82, 1],
+ ["\u03BA\u03BF\u03B9\u03BB\u03B1\u03C1\u03C6", 78, 1],
+ ["\u03C0\u03B5\u03BD\u03C4\u03B1\u03C1\u03C6", 78, 1],
+ ["\u03BF\u03C1\u03C6", 78, 1],
+ ["\u03C7", -1, 1],
+ ["\u03B1\u03BC\u03B7\u03C7", 87, 1],
+ ["\u03B2\u03B9\u03BF\u03BC\u03B7\u03C7", 87, 1],
+ ["\u03BC\u03B5\u03B3\u03BB\u03BF\u03B2\u03B9\u03BF\u03BC\u03B7\u03C7", 89, 1],
+ ["\u03BA\u03B1\u03C0\u03BD\u03BF\u03B2\u03B9\u03BF\u03BC\u03B7\u03C7", 89, 1],
+ ["\u03BC\u03B9\u03BA\u03C1\u03BF\u03B2\u03B9\u03BF\u03BC\u03B7\u03C7", 89, 1],
+ ["\u03C0\u03BF\u03BB\u03C5\u03BC\u03B7\u03C7", 87, 1],
+ ["\u03BB\u03B9\u03C7", 87, 1]
+ ];
+
+ /** @const */ var a_40 = [
+ ["\u03B7\u03C3\u03B5\u03C4\u03B5", -1, 1]
+ ];
+
+ /** @const */ var a_41 = [
+ ["\u03B5\u03BD\u03B4", -1, 1],
+ ["\u03C3\u03C5\u03BD\u03B4", -1, 1],
+ ["\u03BF\u03B4", -1, 1],
+ ["\u03B4\u03B9\u03B1\u03B8", -1, 1],
+ ["\u03BA\u03B1\u03B8", -1, 1],
+ ["\u03C1\u03B1\u03B8", -1, 1],
+ ["\u03C4\u03B1\u03B8", -1, 1],
+ ["\u03C4\u03B9\u03B8", -1, 1],
+ ["\u03B5\u03BA\u03B8", -1, 1],
+ ["\u03B5\u03BD\u03B8", -1, 1],
+ ["\u03C3\u03C5\u03BD\u03B8", -1, 1],
+ ["\u03C1\u03BF\u03B8", -1, 1],
+ ["\u03C5\u03C0\u03B5\u03C1\u03B8", -1, 1],
+ ["\u03C3\u03B8", -1, 1],
+ ["\u03B5\u03C5\u03B8", -1, 1],
+ ["\u03B1\u03C1\u03BA", -1, 1],
+ ["\u03C9\u03C6\u03B5\u03BB", -1, 1],
+ ["\u03B2\u03BF\u03BB", -1, 1],
+ ["\u03B1\u03B9\u03BD", -1, 1],
+ ["\u03C0\u03BF\u03BD", -1, 1],
+ ["\u03C1\u03BF\u03BD", -1, 1],
+ ["\u03C3\u03C5\u03BD", -1, 1],
+ ["\u03B2\u03B1\u03C1", -1, 1],
+ ["\u03B2\u03C1", -1, 1],
+ ["\u03B1\u03B9\u03C1", -1, 1],
+ ["\u03C6\u03BF\u03C1", -1, 1],
+ ["\u03B5\u03C5\u03C1", -1, 1],
+ ["\u03C0\u03C5\u03C1", -1, 1],
+ ["\u03C7\u03C9\u03C1", -1, 1],
+ ["\u03BD\u03B5\u03C4", -1, 1],
+ ["\u03C3\u03C7", -1, 1]
+ ];
+
+ /** @const */ var a_42 = [
+ ["\u03C0\u03B1\u03B3", -1, 1],
+ ["\u03B4", -1, 1],
+ ["\u03B1\u03B4", 1, 1],
+ ["\u03B8", -1, 1],
+ ["\u03B1\u03B8", 3, 1],
+ ["\u03C4\u03BF\u03BA", -1, 1],
+ ["\u03C3\u03BA", -1, 1],
+ ["\u03C0\u03B1\u03C1\u03B1\u03BA\u03B1\u03BB", -1, 1],
+ ["\u03C3\u03BA\u03B5\u03BB", -1, 1],
+ ["\u03B1\u03C0\u03BB", -1, 1],
+ ["\u03B5\u03BC", -1, 1],
+ ["\u03B1\u03BD", -1, 1],
+ ["\u03B2\u03B5\u03BD", -1, 1],
+ ["\u03B2\u03B1\u03C1\u03BF\u03BD", -1, 1],
+ ["\u03BA\u03BF\u03C0", -1, 1],
+ ["\u03C3\u03B5\u03C1\u03C0", -1, 1],
+ ["\u03B1\u03B2\u03B1\u03C1", -1, 1],
+ ["\u03B5\u03BD\u03B1\u03C1", -1, 1],
+ ["\u03B1\u03B2\u03C1", -1, 1],
+ ["\u03BC\u03C0\u03BF\u03C1", -1, 1],
+ ["\u03B8\u03B1\u03C1\u03C1", -1, 1],
+ ["\u03BD\u03C4\u03C1", -1, 1],
+ ["\u03C5", -1, 1],
+ ["\u03BD\u03B9\u03C6", -1, 1],
+ ["\u03C3\u03C5\u03C1\u03C6", -1, 1]
+ ];
+
+ /** @const */ var a_43 = [
+ ["\u03BF\u03BD\u03C4\u03B1\u03C3", -1, 1],
+ ["\u03C9\u03BD\u03C4\u03B1\u03C3", -1, 1]
+ ];
+
+ /** @const */ var a_44 = [
+ ["\u03BF\u03BC\u03B1\u03C3\u03C4\u03B5", -1, 1],
+ ["\u03B9\u03BF\u03BC\u03B1\u03C3\u03C4\u03B5", 0, 1]
+ ];
+
+ /** @const */ var a_45 = [
+ ["\u03C0", -1, 1],
+ ["\u03B1\u03C0", 0, 1],
+ ["\u03B1\u03BA\u03B1\u03C4\u03B1\u03C0", 1, 1],
+ ["\u03C3\u03C5\u03BC\u03C0", 0, 1],
+ ["\u03B1\u03C3\u03C5\u03BC\u03C0", 3, 1],
+ ["\u03B1\u03BC\u03B5\u03C4\u03B1\u03BC\u03C6", -1, 1]
+ ];
+
+ /** @const */ var a_46 = [
+ ["\u03B6", -1, 1],
+ ["\u03B1\u03BB", -1, 1],
+ ["\u03C0\u03B1\u03C1\u03B1\u03BA\u03B1\u03BB", 1, 1],
+ ["\u03B5\u03BA\u03C4\u03B5\u03BB", -1, 1],
+ ["\u03BC", -1, 1],
+ ["\u03BE", -1, 1],
+ ["\u03C0\u03C1\u03BF", -1, 1],
+ ["\u03B1\u03C1", -1, 1],
+ ["\u03BD\u03B9\u03C3", -1, 1]
+ ];
+
+ /** @const */ var a_47 = [
+ ["\u03B7\u03B8\u03B7\u03BA\u03B1", -1, 1],
+ ["\u03B7\u03B8\u03B7\u03BA\u03B5", -1, 1],
+ ["\u03B7\u03B8\u03B7\u03BA\u03B5\u03C3", -1, 1]
+ ];
+
+ /** @const */ var a_48 = [
+ ["\u03C0\u03B9\u03B8", -1, 1],
+ ["\u03BF\u03B8", -1, 1],
+ ["\u03BD\u03B1\u03C1\u03B8", -1, 1],
+ ["\u03C3\u03BA\u03BF\u03C5\u03BB", -1, 1],
+ ["\u03C3\u03BA\u03C9\u03BB", -1, 1],
+ ["\u03C3\u03C6", -1, 1]
+ ];
+
+ /** @const */ var a_49 = [
+ ["\u03B8", -1, 1],
+ ["\u03B4\u03B9\u03B1\u03B8", 0, 1],
+ ["\u03C0\u03B1\u03C1\u03B1\u03BA\u03B1\u03C4\u03B1\u03B8", 0, 1],
+ ["\u03C3\u03C5\u03BD\u03B8", 0, 1],
+ ["\u03C0\u03C1\u03BF\u03C3\u03B8", 0, 1]
+ ];
+
+ /** @const */ var a_50 = [
+ ["\u03B7\u03BA\u03B1", -1, 1],
+ ["\u03B7\u03BA\u03B5", -1, 1],
+ ["\u03B7\u03BA\u03B5\u03C3", -1, 1]
+ ];
+
+ /** @const */ var a_51 = [
+ ["\u03C6\u03B1\u03B3", -1, 1],
+ ["\u03BB\u03B7\u03B3", -1, 1],
+ ["\u03C6\u03C1\u03C5\u03B4", -1, 1],
+ ["\u03BC\u03B1\u03BD\u03C4\u03B9\u03BB", -1, 1],
+ ["\u03BC\u03B1\u03BB\u03BB", -1, 1],
+ ["\u03BF\u03BC", -1, 1],
+ ["\u03B2\u03BB\u03B5\u03C0", -1, 1],
+ ["\u03C0\u03BF\u03B4\u03B1\u03C1", -1, 1],
+ ["\u03BA\u03C5\u03BC\u03B1\u03C4", -1, 1],
+ ["\u03C0\u03C1\u03C9\u03C4", -1, 1],
+ ["\u03BB\u03B1\u03C7", -1, 1],
+ ["\u03C0\u03B1\u03BD\u03C4\u03B1\u03C7", -1, 1]
+ ];
+
+ /** @const */ var a_52 = [
+ ["\u03C4\u03C3\u03B1", -1, 1],
+ ["\u03C7\u03B1\u03B4", -1, 1],
+ ["\u03BC\u03B5\u03B4", -1, 1],
+ ["\u03BB\u03B1\u03BC\u03C0\u03B9\u03B4", -1, 1],
+ ["\u03B4\u03B5", -1, 1],
+ ["\u03C0\u03BB\u03B5", -1, 1],
+ ["\u03BC\u03B5\u03C3\u03B1\u03B6", -1, 1],
+ ["\u03B4\u03B5\u03C3\u03C0\u03BF\u03B6", -1, 1],
+ ["\u03B1\u03B9\u03B8", -1, 1],
+ ["\u03C6\u03B1\u03C1\u03BC\u03B1\u03BA", -1, 1],
+ ["\u03B1\u03B3\u03BA", -1, 1],
+ ["\u03B1\u03BD\u03B7\u03BA", -1, 1],
+ ["\u03BB", -1, 1],
+ ["\u03BC", -1, 1],
+ ["\u03B1\u03BC", 13, 1],
+ ["\u03B2\u03C1\u03BF\u03BC", 13, 1],
+ ["\u03C5\u03C0\u03BF\u03C4\u03B5\u03B9\u03BD", -1, 1],
+ ["\u03B5\u03BA\u03BB\u03B9\u03C0", -1, 1],
+ ["\u03C1", -1, 1],
+ ["\u03B5\u03BD\u03B4\u03B9\u03B1\u03C6\u03B5\u03C1", 18, 1],
+ ["\u03B1\u03BD\u03B1\u03C1\u03C1", 18, 1],
+ ["\u03C0\u03B1\u03C4", -1, 1],
+ ["\u03BA\u03B1\u03B8\u03B1\u03C1\u03B5\u03C5", -1, 1],
+ ["\u03B4\u03B5\u03C5\u03C4\u03B5\u03C1\u03B5\u03C5", -1, 1],
+ ["\u03BB\u03B5\u03C7", -1, 1]
+ ];
+
+ /** @const */ var a_53 = [
+ ["\u03BF\u03C5\u03C3\u03B1", -1, 1],
+ ["\u03BF\u03C5\u03C3\u03B5", -1, 1],
+ ["\u03BF\u03C5\u03C3\u03B5\u03C3", -1, 1]
+ ];
+
+ /** @const */ var a_54 = [
+ ["\u03C0\u03B5\u03BB", -1, 1],
+ ["\u03BB\u03BB", -1, 1],
+ ["\u03C3\u03BC\u03B7\u03BD", -1, 1],
+ ["\u03C1\u03C0", -1, 1],
+ ["\u03C0\u03C1", -1, 1],
+ ["\u03C6\u03C1", -1, 1],
+ ["\u03C7\u03BF\u03C1\u03C4", -1, 1],
+ ["\u03BF\u03C6", -1, 1],
+ ["\u03C8\u03BF\u03C6", 7, -1],
+ ["\u03C3\u03C6", -1, 1],
+ ["\u03BB\u03BF\u03C7", -1, 1],
+ ["\u03BD\u03B1\u03C5\u03BB\u03BF\u03C7", 10, -1]
+ ];
+
+ /** @const */ var a_55 = [
+ ["\u03B1\u03BC\u03B1\u03BB\u03BB\u03B9", -1, 1],
+ ["\u03BB", -1, 1],
+ ["\u03B1\u03BC\u03B1\u03BB", 1, 1],
+ ["\u03BC", -1, 1],
+ ["\u03BF\u03C5\u03BB\u03B1\u03BC", 3, 1],
+ ["\u03B5\u03BD", -1, 1],
+ ["\u03B4\u03B5\u03C1\u03B2\u03B5\u03BD", 5, 1],
+ ["\u03C0", -1, 1],
+ ["\u03B1\u03B5\u03B9\u03C0", 7, 1],
+ ["\u03B1\u03C1\u03C4\u03B9\u03C0", 7, 1],
+ ["\u03C3\u03C5\u03BC\u03C0", 7, 1],
+ ["\u03BD\u03B5\u03BF\u03C0", 7, 1],
+ ["\u03BA\u03C1\u03BF\u03BA\u03B1\u03BB\u03BF\u03C0", 7, 1],
+ ["\u03BF\u03BB\u03BF\u03C0", 7, 1],
+ ["\u03C0\u03C1\u03BF\u03C3\u03C9\u03C0\u03BF\u03C0", 7, 1],
+ ["\u03C3\u03B9\u03B4\u03B7\u03C1\u03BF\u03C0", 7, 1],
+ ["\u03B4\u03C1\u03BF\u03C3\u03BF\u03C0", 7, 1],
+ ["\u03B1\u03C3\u03C0", 7, 1],
+ ["\u03B1\u03BD\u03C5\u03C0", 7, 1],
+ ["\u03C1", -1, 1],
+ ["\u03B1\u03C3\u03C0\u03B1\u03C1", 19, 1],
+ ["\u03C7\u03B1\u03C1", 19, 1],
+ ["\u03B1\u03C7\u03B1\u03C1", 21, 1],
+ ["\u03B1\u03C0\u03B5\u03C1", 19, 1],
+ ["\u03C4\u03C1", 19, 1],
+ ["\u03BF\u03C5\u03C1", 19, 1],
+ ["\u03C4", -1, 1],
+ ["\u03B4\u03B9\u03B1\u03C4", 26, 1],
+ ["\u03B5\u03C0\u03B9\u03C4", 26, 1],
+ ["\u03C3\u03C5\u03BD\u03C4", 26, 1],
+ ["\u03BF\u03BC\u03BF\u03C4", 26, 1],
+ ["\u03BD\u03BF\u03BC\u03BF\u03C4", 30, 1],
+ ["\u03B1\u03C0\u03BF\u03C4", 26, 1],
+ ["\u03C5\u03C0\u03BF\u03C4", 26, 1],
+ ["\u03B1\u03B2\u03B1\u03C3\u03C4", 26, 1],
+ ["\u03B1\u03B9\u03BC\u03BF\u03C3\u03C4", 26, 1],
+ ["\u03C0\u03C1\u03BF\u03C3\u03C4", 26, 1],
+ ["\u03B1\u03BD\u03C5\u03C3\u03C4", 26, 1],
+ ["\u03BD\u03B1\u03C5", -1, 1],
+ ["\u03B1\u03C6", -1, 1],
+ ["\u03BE\u03B5\u03C6", -1, 1],
+ ["\u03B1\u03B4\u03B7\u03C6", -1, 1],
+ ["\u03C0\u03B1\u03BC\u03C6", -1, 1],
+ ["\u03C0\u03BF\u03BB\u03C5\u03C6", -1, 1]
+ ];
+
+ /** @const */ var a_56 = [
+ ["\u03B1\u03B3\u03B1", -1, 1],
+ ["\u03B1\u03B3\u03B5", -1, 1],
+ ["\u03B1\u03B3\u03B5\u03C3", -1, 1]
+ ];
+
+ /** @const */ var a_57 = [
+ ["\u03B7\u03C3\u03B1", -1, 1],
+ ["\u03B7\u03C3\u03B5", -1, 1],
+ ["\u03B7\u03C3\u03BF\u03C5", -1, 1]
+ ];
+
+ /** @const */ var a_58 = [
+ ["\u03BD", -1, 1],
+ ["\u03B4\u03C9\u03B4\u03B5\u03BA\u03B1\u03BD", 0, 1],
+ ["\u03B5\u03C0\u03C4\u03B1\u03BD", 0, 1],
+ ["\u03BC\u03B5\u03B3\u03B1\u03BB\u03BF\u03BD", 0, 1],
+ ["\u03B5\u03C1\u03B7\u03BC\u03BF\u03BD", 0, 1],
+ ["\u03C7\u03B5\u03C1\u03C3\u03BF\u03BD", 0, 1]
+ ];
+
+ /** @const */ var a_59 = [
+ ["\u03B7\u03C3\u03C4\u03B5", -1, 1]
+ ];
+
+ /** @const */ var a_60 = [
+ ["\u03C3\u03B2", -1, 1],
+ ["\u03B1\u03C3\u03B2", 0, 1],
+ ["\u03B1\u03C0\u03BB", -1, 1],
+ ["\u03B1\u03B5\u03B9\u03BC\u03BD", -1, 1],
+ ["\u03C7\u03C1", -1, 1],
+ ["\u03B1\u03C7\u03C1", 4, 1],
+ ["\u03BA\u03BF\u03B9\u03BD\u03BF\u03C7\u03C1", 4, 1],
+ ["\u03B4\u03C5\u03C3\u03C7\u03C1", 4, 1],
+ ["\u03B5\u03C5\u03C7\u03C1", 4, 1],
+ ["\u03C0\u03B1\u03BB\u03B9\u03BC\u03C8", -1, 1]
+ ];
+
+ /** @const */ var a_61 = [
+ ["\u03BF\u03C5\u03BD\u03B5", -1, 1],
+ ["\u03B7\u03B8\u03BF\u03C5\u03BD\u03B5", 0, 1],
+ ["\u03B7\u03C3\u03BF\u03C5\u03BD\u03B5", 0, 1]
+ ];
+
+ /** @const */ var a_62 = [
+ ["\u03C3\u03C0\u03B9", -1, 1],
+ ["\u03BD", -1, 1],
+ ["\u03B5\u03BE\u03C9\u03BD", 1, 1],
+ ["\u03C1", -1, 1],
+ ["\u03C3\u03C4\u03C1\u03B1\u03B2\u03BF\u03BC\u03BF\u03C5\u03C4\u03C3", -1, 1],
+ ["\u03BA\u03B1\u03BA\u03BF\u03BC\u03BF\u03C5\u03C4\u03C3", -1, 1]
+ ];
+
+ /** @const */ var a_63 = [
+ ["\u03BF\u03C5\u03BC\u03B5", -1, 1],
+ ["\u03B7\u03B8\u03BF\u03C5\u03BC\u03B5", 0, 1],
+ ["\u03B7\u03C3\u03BF\u03C5\u03BC\u03B5", 0, 1]
+ ];
+
+ /** @const */ var a_64 = [
+ ["\u03B1\u03B6", -1, 1],
+ ["\u03C9\u03C1\u03B9\u03BF\u03C0\u03BB", -1, 1],
+ ["\u03B1\u03C3\u03BF\u03C5\u03C3", -1, 1],
+ ["\u03C0\u03B1\u03C1\u03B1\u03C3\u03BF\u03C5\u03C3", 2, 1],
+ ["\u03B1\u03BB\u03BB\u03BF\u03C3\u03BF\u03C5\u03C3", -1, 1],
+ ["\u03C6", -1, 1],
+ ["\u03C7", -1, 1]
+ ];
+
+ /** @const */ var a_65 = [
+ ["\u03BC\u03B1\u03C4\u03B1", -1, 1],
+ ["\u03BC\u03B1\u03C4\u03C9\u03BD", -1, 1],
+ ["\u03BC\u03B1\u03C4\u03BF\u03C3", -1, 1]
+ ];
+
+ /** @const */ var a_66 = [
+ ["\u03B1", -1, 1],
+ ["\u03B9\u03BF\u03C5\u03BC\u03B1", 0, 1],
+ ["\u03BF\u03BC\u03BF\u03C5\u03BD\u03B1", 0, 1],
+ ["\u03B9\u03BF\u03BC\u03BF\u03C5\u03BD\u03B1", 2, 1],
+ ["\u03BF\u03C3\u03BF\u03C5\u03BD\u03B1", 0, 1],
+ ["\u03B9\u03BF\u03C3\u03BF\u03C5\u03BD\u03B1", 4, 1],
+ ["\u03B5", -1, 1],
+ ["\u03B1\u03B3\u03B1\u03C4\u03B5", 6, 1],
+ ["\u03B7\u03BA\u03B1\u03C4\u03B5", 6, 1],
+ ["\u03B7\u03B8\u03B7\u03BA\u03B1\u03C4\u03B5", 8, 1],
+ ["\u03B7\u03C3\u03B1\u03C4\u03B5", 6, 1],
+ ["\u03BF\u03C5\u03C3\u03B1\u03C4\u03B5", 6, 1],
+ ["\u03B5\u03B9\u03C4\u03B5", 6, 1],
+ ["\u03B7\u03B8\u03B5\u03B9\u03C4\u03B5", 12, 1],
+ ["\u03B9\u03B5\u03BC\u03B1\u03C3\u03C4\u03B5", 6, 1],
+ ["\u03BF\u03C5\u03BC\u03B1\u03C3\u03C4\u03B5", 6, 1],
+ ["\u03B9\u03BF\u03C5\u03BC\u03B1\u03C3\u03C4\u03B5", 15, 1],
+ ["\u03B9\u03B5\u03C3\u03B1\u03C3\u03C4\u03B5", 6, 1],
+ ["\u03BF\u03C3\u03B1\u03C3\u03C4\u03B5", 6, 1],
+ ["\u03B9\u03BF\u03C3\u03B1\u03C3\u03C4\u03B5", 18, 1],
+ ["\u03B7", -1, 1],
+ ["\u03B9", -1, 1],
+ ["\u03B1\u03BC\u03B1\u03B9", 21, 1],
+ ["\u03B9\u03B5\u03BC\u03B1\u03B9", 21, 1],
+ ["\u03BF\u03BC\u03B1\u03B9", 21, 1],
+ ["\u03BF\u03C5\u03BC\u03B1\u03B9", 21, 1],
+ ["\u03B1\u03C3\u03B1\u03B9", 21, 1],
+ ["\u03B5\u03C3\u03B1\u03B9", 21, 1],
+ ["\u03B9\u03B5\u03C3\u03B1\u03B9", 27, 1],
+ ["\u03B1\u03C4\u03B1\u03B9", 21, 1],
+ ["\u03B5\u03C4\u03B1\u03B9", 21, 1],
+ ["\u03B9\u03B5\u03C4\u03B1\u03B9", 30, 1],
+ ["\u03BF\u03BD\u03C4\u03B1\u03B9", 21, 1],
+ ["\u03BF\u03C5\u03BD\u03C4\u03B1\u03B9", 21, 1],
+ ["\u03B9\u03BF\u03C5\u03BD\u03C4\u03B1\u03B9", 33, 1],
+ ["\u03B5\u03B9", 21, 1],
+ ["\u03B1\u03B5\u03B9", 35, 1],
+ ["\u03B7\u03B8\u03B5\u03B9", 35, 1],
+ ["\u03B7\u03C3\u03B5\u03B9", 35, 1],
+ ["\u03BF\u03B9", 21, 1],
+ ["\u03B1\u03BD", -1, 1],
+ ["\u03B1\u03B3\u03B1\u03BD", 40, 1],
+ ["\u03B7\u03BA\u03B1\u03BD", 40, 1],
+ ["\u03B7\u03B8\u03B7\u03BA\u03B1\u03BD", 42, 1],
+ ["\u03B7\u03C3\u03B1\u03BD", 40, 1],
+ ["\u03BF\u03C5\u03C3\u03B1\u03BD", 40, 1],
+ ["\u03BF\u03BD\u03C4\u03BF\u03C5\u03C3\u03B1\u03BD", 45, 1],
+ ["\u03B9\u03BF\u03BD\u03C4\u03BF\u03C5\u03C3\u03B1\u03BD", 46, 1],
+ ["\u03BF\u03BD\u03C4\u03B1\u03BD", 40, 1],
+ ["\u03B9\u03BF\u03BD\u03C4\u03B1\u03BD", 48, 1],
+ ["\u03BF\u03C5\u03BD\u03C4\u03B1\u03BD", 40, 1],
+ ["\u03B9\u03BF\u03C5\u03BD\u03C4\u03B1\u03BD", 50, 1],
+ ["\u03BF\u03C4\u03B1\u03BD", 40, 1],
+ ["\u03B9\u03BF\u03C4\u03B1\u03BD", 52, 1],
+ ["\u03BF\u03BC\u03B1\u03C3\u03C4\u03B1\u03BD", 40, 1],
+ ["\u03B9\u03BF\u03BC\u03B1\u03C3\u03C4\u03B1\u03BD", 54, 1],
+ ["\u03BF\u03C3\u03B1\u03C3\u03C4\u03B1\u03BD", 40, 1],
+ ["\u03B9\u03BF\u03C3\u03B1\u03C3\u03C4\u03B1\u03BD", 56, 1],
+ ["\u03BF\u03C5\u03BD", -1, 1],
+ ["\u03B7\u03B8\u03BF\u03C5\u03BD", 58, 1],
+ ["\u03BF\u03BC\u03BF\u03C5\u03BD", 58, 1],
+ ["\u03B9\u03BF\u03BC\u03BF\u03C5\u03BD", 60, 1],
+ ["\u03B7\u03C3\u03BF\u03C5\u03BD", 58, 1],
+ ["\u03BF\u03C3\u03BF\u03C5\u03BD", 58, 1],
+ ["\u03B9\u03BF\u03C3\u03BF\u03C5\u03BD", 63, 1],
+ ["\u03C9\u03BD", -1, 1],
+ ["\u03B7\u03B4\u03C9\u03BD", 65, 1],
+ ["\u03BF", -1, 1],
+ ["\u03B1\u03C3", -1, 1],
+ ["\u03B5\u03C3", -1, 1],
+ ["\u03B7\u03B4\u03B5\u03C3", 69, 1],
+ ["\u03B7\u03C3\u03B5\u03C3", 69, 1],
+ ["\u03B7\u03C3", -1, 1],
+ ["\u03B5\u03B9\u03C3", -1, 1],
+ ["\u03B7\u03B8\u03B5\u03B9\u03C3", 73, 1],
+ ["\u03BF\u03C3", -1, 1],
+ ["\u03C5\u03C3", -1, 1],
+ ["\u03BF\u03C5\u03C3", 76, 1],
+ ["\u03C5", -1, 1],
+ ["\u03BF\u03C5", 78, 1],
+ ["\u03C9", -1, 1],
+ ["\u03B1\u03C9", 80, 1],
+ ["\u03B7\u03B8\u03C9", 80, 1],
+ ["\u03B7\u03C3\u03C9", 80, 1]
+ ];
+
+ /** @const */ var a_67 = [
+ ["\u03BF\u03C4\u03B5\u03C1", -1, 1],
+ ["\u03B5\u03C3\u03C4\u03B5\u03C1", -1, 1],
+ ["\u03C5\u03C4\u03B5\u03C1", -1, 1],
+ ["\u03C9\u03C4\u03B5\u03C1", -1, 1],
+ ["\u03BF\u03C4\u03B1\u03C4", -1, 1],
+ ["\u03B5\u03C3\u03C4\u03B1\u03C4", -1, 1],
+ ["\u03C5\u03C4\u03B1\u03C4", -1, 1],
+ ["\u03C9\u03C4\u03B1\u03C4", -1, 1]
+ ];
+
+ /** @const */ var /** Array */ g_v = [81, 65, 16, 1];
+
+ /** @const */ var /** Array */ g_v2 = [81, 65, 0, 1];
+
+ var /** boolean */ B_test1 = false;
+
+
+ /** @return {boolean} */
+ function r_has_min_length() {
+ return base.current.length >= 3;
+ };
+
+ /** @return {boolean} */
+ function r_tolower() {
+ var /** number */ among_var;
+ while(true)
+ {
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
+ lab0: {
+ base.ket = base.cursor;
+ among_var = base.find_among_b(a_0);
+ base.bra = base.cursor;
+ switch (among_var) {
+ case 1:
+ if (!base.slice_from("\u03B1"))
+ {
+ return false;
+ }
+ break;
+ case 2:
+ if (!base.slice_from("\u03B2"))
+ {
+ return false;
+ }
+ break;
+ case 3:
+ if (!base.slice_from("\u03B3"))
+ {
+ return false;
+ }
+ break;
+ case 4:
+ if (!base.slice_from("\u03B4"))
+ {
+ return false;
+ }
+ break;
+ case 5:
+ if (!base.slice_from("\u03B5"))
+ {
+ return false;
+ }
+ break;
+ case 6:
+ if (!base.slice_from("\u03B6"))
+ {
+ return false;
+ }
+ break;
+ case 7:
+ if (!base.slice_from("\u03B7"))
+ {
+ return false;
+ }
+ break;
+ case 8:
+ if (!base.slice_from("\u03B8"))
+ {
+ return false;
+ }
+ break;
+ case 9:
+ if (!base.slice_from("\u03B9"))
+ {
+ return false;
+ }
+ break;
+ case 10:
+ if (!base.slice_from("\u03BA"))
+ {
+ return false;
+ }
+ break;
+ case 11:
+ if (!base.slice_from("\u03BB"))
+ {
+ return false;
+ }
+ break;
+ case 12:
+ if (!base.slice_from("\u03BC"))
+ {
+ return false;
+ }
+ break;
+ case 13:
+ if (!base.slice_from("\u03BD"))
+ {
+ return false;
+ }
+ break;
+ case 14:
+ if (!base.slice_from("\u03BE"))
+ {
+ return false;
+ }
+ break;
+ case 15:
+ if (!base.slice_from("\u03BF"))
+ {
+ return false;
+ }
+ break;
+ case 16:
+ if (!base.slice_from("\u03C0"))
+ {
+ return false;
+ }
+ break;
+ case 17:
+ if (!base.slice_from("\u03C1"))
+ {
+ return false;
+ }
+ break;
+ case 18:
+ if (!base.slice_from("\u03C3"))
+ {
+ return false;
+ }
+ break;
+ case 19:
+ if (!base.slice_from("\u03C4"))
+ {
+ return false;
+ }
+ break;
+ case 20:
+ if (!base.slice_from("\u03C5"))
+ {
+ return false;
+ }
+ break;
+ case 21:
+ if (!base.slice_from("\u03C6"))
+ {
+ return false;
+ }
+ break;
+ case 22:
+ if (!base.slice_from("\u03C7"))
+ {
+ return false;
+ }
+ break;
+ case 23:
+ if (!base.slice_from("\u03C8"))
+ {
+ return false;
+ }
+ break;
+ case 24:
+ if (!base.slice_from("\u03C9"))
+ {
+ return false;
+ }
+ break;
+ case 25:
+ if (base.cursor <= base.limit_backward)
+ {
+ break lab0;
+ }
+ base.cursor--;
+ break;
+ }
+ continue;
+ }
+ base.cursor = base.limit - v_1;
+ break;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_step_1() {
+ var /** number */ among_var;
+ base.ket = base.cursor;
+ among_var = base.find_among_b(a_1);
+ if (among_var == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ switch (among_var) {
+ case 1:
+ if (!base.slice_from("\u03C6\u03B1"))
+ {
+ return false;
+ }
+ break;
+ case 2:
+ if (!base.slice_from("\u03C3\u03BA\u03B1"))
+ {
+ return false;
+ }
+ break;
+ case 3:
+ if (!base.slice_from("\u03BF\u03BB\u03BF"))
+ {
+ return false;
+ }
+ break;
+ case 4:
+ if (!base.slice_from("\u03C3\u03BF"))
+ {
+ return false;
+ }
+ break;
+ case 5:
+ if (!base.slice_from("\u03C4\u03B1\u03C4\u03BF"))
+ {
+ return false;
+ }
+ break;
+ case 6:
+ if (!base.slice_from("\u03BA\u03C1\u03B5"))
+ {
+ return false;
+ }
+ break;
+ case 7:
+ if (!base.slice_from("\u03C0\u03B5\u03C1"))
+ {
+ return false;
+ }
+ break;
+ case 8:
+ if (!base.slice_from("\u03C4\u03B5\u03C1"))
+ {
+ return false;
+ }
+ break;
+ case 9:
+ if (!base.slice_from("\u03C6\u03C9"))
+ {
+ return false;
+ }
+ break;
+ case 10:
+ if (!base.slice_from("\u03BA\u03B1\u03B8\u03B5\u03C3\u03C4"))
+ {
+ return false;
+ }
+ break;
+ case 11:
+ if (!base.slice_from("\u03B3\u03B5\u03B3\u03BF\u03BD"))
+ {
+ return false;
+ }
+ break;
+ }
+ B_test1 = false;
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_step_s1() {
+ var /** number */ among_var;
+ base.ket = base.cursor;
+ if (base.find_among_b(a_3) == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ B_test1 = false;
+ base.ket = base.cursor;
+ base.bra = base.cursor;
+ among_var = base.find_among_b(a_2);
+ if (among_var == 0)
+ {
+ return false;
+ }
+ if (base.cursor > base.limit_backward)
+ {
+ return false;
+ }
+ switch (among_var) {
+ case 1:
+ if (!base.slice_from("\u03B9"))
+ {
+ return false;
+ }
+ break;
+ case 2:
+ if (!base.slice_from("\u03B9\u03B6"))
+ {
+ return false;
+ }
+ break;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_step_s2() {
+ base.ket = base.cursor;
+ if (base.find_among_b(a_5) == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ B_test1 = false;
+ base.ket = base.cursor;
+ base.bra = base.cursor;
+ if (base.find_among_b(a_4) == 0)
+ {
+ return false;
+ }
+ if (base.cursor > base.limit_backward)
+ {
+ return false;
+ }
+ if (!base.slice_from("\u03C9\u03BD"))
+ {
+ return false;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_step_s3() {
+ var /** number */ among_var;
+ lab0: {
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
+ lab1: {
+ base.ket = base.cursor;
+ if (!(base.eq_s_b("\u03B9\u03C3\u03B1")))
+ {
+ break lab1;
+ }
+ base.bra = base.cursor;
+ if (base.cursor > base.limit_backward)
+ {
+ break lab1;
+ }
+ if (!base.slice_from("\u03B9\u03C3"))
+ {
+ return false;
+ }
+ break lab0;
+ }
+ base.cursor = base.limit - v_1;
+ base.ket = base.cursor;
+ }
+ if (base.find_among_b(a_7) == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ B_test1 = false;
+ base.ket = base.cursor;
+ base.bra = base.cursor;
+ among_var = base.find_among_b(a_6);
+ if (among_var == 0)
+ {
+ return false;
+ }
+ if (base.cursor > base.limit_backward)
+ {
+ return false;
+ }
+ switch (among_var) {
+ case 1:
+ if (!base.slice_from("\u03B9"))
+ {
+ return false;
+ }
+ break;
+ case 2:
+ if (!base.slice_from("\u03B9\u03C3"))
+ {
+ return false;
+ }
+ break;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_step_s4() {
+ base.ket = base.cursor;
+ if (base.find_among_b(a_9) == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ B_test1 = false;
+ base.ket = base.cursor;
+ base.bra = base.cursor;
+ if (base.find_among_b(a_8) == 0)
+ {
+ return false;
+ }
+ if (base.cursor > base.limit_backward)
+ {
+ return false;
+ }
+ if (!base.slice_from("\u03B9"))
+ {
+ return false;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_step_s5() {
+ var /** number */ among_var;
+ base.ket = base.cursor;
+ if (base.find_among_b(a_11) == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ B_test1 = false;
+ base.ket = base.cursor;
+ base.bra = base.cursor;
+ among_var = base.find_among_b(a_10);
+ if (among_var == 0)
+ {
+ return false;
+ }
+ if (base.cursor > base.limit_backward)
+ {
+ return false;
+ }
+ switch (among_var) {
+ case 1:
+ if (!base.slice_from("\u03B9"))
+ {
+ return false;
+ }
+ break;
+ case 2:
+ if (!base.slice_from("\u03B9\u03C3\u03C4"))
+ {
+ return false;
+ }
+ break;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_step_s6() {
+ var /** number */ among_var;
+ base.ket = base.cursor;
+ if (base.find_among_b(a_14) == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ B_test1 = false;
+ lab0: {
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
+ lab1: {
+ base.ket = base.cursor;
+ base.bra = base.cursor;
+ among_var = base.find_among_b(a_12);
+ if (among_var == 0)
+ {
+ break lab1;
+ }
+ if (base.cursor > base.limit_backward)
+ {
+ break lab1;
+ }
+ switch (among_var) {
+ case 1:
+ if (!base.slice_from("\u03B9\u03C3\u03BC"))
+ {
+ return false;
+ }
+ break;
+ case 2:
+ if (!base.slice_from("\u03B9"))
+ {
+ return false;
+ }
+ break;
+ }
+ break lab0;
+ }
+ base.cursor = base.limit - v_1;
+ base.ket = base.cursor;
+ among_var = base.find_among_b(a_13);
+ if (among_var == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ switch (among_var) {
+ case 1:
+ if (!base.slice_from("\u03B1\u03B3\u03BD\u03C9\u03C3\u03C4"))
+ {
+ return false;
+ }
+ break;
+ case 2:
+ if (!base.slice_from("\u03B1\u03C4\u03BF\u03BC"))
+ {
+ return false;
+ }
+ break;
+ case 3:
+ if (!base.slice_from("\u03B3\u03BD\u03C9\u03C3\u03C4"))
+ {
+ return false;
+ }
+ break;
+ case 4:
+ if (!base.slice_from("\u03B5\u03B8\u03BD"))
+ {
+ return false;
+ }
+ break;
+ case 5:
+ if (!base.slice_from("\u03B5\u03BA\u03BB\u03B5\u03BA\u03C4"))
+ {
+ return false;
+ }
+ break;
+ case 6:
+ if (!base.slice_from("\u03C3\u03BA\u03B5\u03C0\u03C4"))
+ {
+ return false;
+ }
+ break;
+ case 7:
+ if (!base.slice_from("\u03C4\u03BF\u03C0"))
+ {
+ return false;
+ }
+ break;
+ case 8:
+ if (!base.slice_from("\u03B1\u03BB\u03B5\u03BE\u03B1\u03BD\u03B4\u03C1"))
+ {
+ return false;
+ }
+ break;
+ case 9:
+ if (!base.slice_from("\u03B2\u03C5\u03B6\u03B1\u03BD\u03C4"))
+ {
+ return false;
+ }
+ break;
+ case 10:
+ if (!base.slice_from("\u03B8\u03B5\u03B1\u03C4\u03C1"))
+ {
+ return false;
+ }
+ break;
+ }
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_step_s7() {
+ base.ket = base.cursor;
+ if (base.find_among_b(a_16) == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ B_test1 = false;
+ base.ket = base.cursor;
+ base.bra = base.cursor;
+ if (base.find_among_b(a_15) == 0)
+ {
+ return false;
+ }
+ if (base.cursor > base.limit_backward)
+ {
+ return false;
+ }
+ if (!base.slice_from("\u03B1\u03C1\u03B1\u03BA"))
+ {
+ return false;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_step_s8() {
+ var /** number */ among_var;
+ base.ket = base.cursor;
+ if (base.find_among_b(a_18) == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ B_test1 = false;
+ lab0: {
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
+ lab1: {
+ base.ket = base.cursor;
+ base.bra = base.cursor;
+ among_var = base.find_among_b(a_17);
+ if (among_var == 0)
+ {
+ break lab1;
+ }
+ if (base.cursor > base.limit_backward)
+ {
+ break lab1;
+ }
+ switch (among_var) {
+ case 1:
+ if (!base.slice_from("\u03B1\u03BA"))
+ {
+ return false;
+ }
+ break;
+ case 2:
+ if (!base.slice_from("\u03B9\u03C4\u03C3"))
+ {
+ return false;
+ }
+ break;
+ }
+ break lab0;
+ }
+ base.cursor = base.limit - v_1;
+ base.ket = base.cursor;
+ base.bra = base.cursor;
+ if (!(base.eq_s_b("\u03BA\u03BF\u03C1")))
+ {
+ return false;
+ }
+ if (!base.slice_from("\u03B9\u03C4\u03C3"))
+ {
+ return false;
+ }
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_step_s9() {
+ base.ket = base.cursor;
+ if (base.find_among_b(a_21) == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ B_test1 = false;
+ lab0: {
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
+ lab1: {
+ base.ket = base.cursor;
+ base.bra = base.cursor;
+ if (base.find_among_b(a_19) == 0)
+ {
+ break lab1;
+ }
+ if (base.cursor > base.limit_backward)
+ {
+ break lab1;
+ }
+ if (!base.slice_from("\u03B9\u03B4"))
+ {
+ return false;
+ }
+ break lab0;
+ }
+ base.cursor = base.limit - v_1;
+ base.ket = base.cursor;
+ base.bra = base.cursor;
+ if (base.find_among_b(a_20) == 0)
+ {
+ return false;
+ }
+ if (!base.slice_from("\u03B9\u03B4"))
+ {
+ return false;
+ }
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_step_s10() {
+ base.ket = base.cursor;
+ if (base.find_among_b(a_23) == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ B_test1 = false;
+ base.ket = base.cursor;
+ base.bra = base.cursor;
+ if (base.find_among_b(a_22) == 0)
+ {
+ return false;
+ }
+ if (base.cursor > base.limit_backward)
+ {
+ return false;
+ }
+ if (!base.slice_from("\u03B9\u03C3\u03BA"))
+ {
+ return false;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_step_2a() {
+ base.ket = base.cursor;
+ if (base.find_among_b(a_24) == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ {
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
+ lab0: {
+ if (base.find_among_b(a_25) == 0)
+ {
+ break lab0;
+ }
+ return false;
+ }
+ base.cursor = base.limit - v_1;
+ }
+ {
+ /** @const */ var /** number */ c1 = base.cursor;
+ base.insert(base.cursor, base.cursor, "\u03B1\u03B4");
+ base.cursor = c1;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_step_2b() {
+ base.ket = base.cursor;
+ if (base.find_among_b(a_26) == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ base.ket = base.cursor;
+ base.bra = base.cursor;
+ if (base.find_among_b(a_27) == 0)
+ {
+ return false;
+ }
+ if (!base.slice_from("\u03B5\u03B4"))
+ {
+ return false;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_step_2c() {
+ base.ket = base.cursor;
+ if (base.find_among_b(a_28) == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ base.ket = base.cursor;
+ base.bra = base.cursor;
+ if (base.find_among_b(a_29) == 0)
+ {
+ return false;
+ }
+ if (!base.slice_from("\u03BF\u03C5\u03B4"))
+ {
+ return false;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_step_2d() {
+ base.ket = base.cursor;
+ if (base.find_among_b(a_30) == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ B_test1 = false;
+ base.ket = base.cursor;
+ base.bra = base.cursor;
+ if (base.find_among_b(a_31) == 0)
+ {
+ return false;
+ }
+ if (base.cursor > base.limit_backward)
+ {
+ return false;
+ }
+ if (!base.slice_from("\u03B5"))
+ {
+ return false;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_step_3() {
+ base.ket = base.cursor;
+ if (base.find_among_b(a_32) == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ B_test1 = false;
+ base.ket = base.cursor;
+ base.bra = base.cursor;
+ if (!(base.in_grouping_b(g_v, 945, 969)))
+ {
+ return false;
+ }
+ if (!base.slice_from("\u03B9"))
+ {
+ return false;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_step_4() {
+ base.ket = base.cursor;
+ if (base.find_among_b(a_33) == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ B_test1 = false;
+ lab0: {
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
+ lab1: {
+ base.ket = base.cursor;
+ base.bra = base.cursor;
+ if (!(base.in_grouping_b(g_v, 945, 969)))
+ {
+ break lab1;
+ }
+ if (!base.slice_from("\u03B9\u03BA"))
+ {
+ return false;
+ }
+ break lab0;
+ }
+ base.cursor = base.limit - v_1;
+ base.ket = base.cursor;
+ }
+ base.bra = base.cursor;
+ if (base.find_among_b(a_34) == 0)
+ {
+ return false;
+ }
+ if (base.cursor > base.limit_backward)
+ {
+ return false;
+ }
+ if (!base.slice_from("\u03B9\u03BA"))
+ {
+ return false;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_step_5a() {
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
+ lab0: {
+ base.ket = base.cursor;
+ if (!(base.eq_s_b("\u03B1\u03B3\u03B1\u03BC\u03B5")))
+ {
+ break lab0;
+ }
+ base.bra = base.cursor;
+ if (base.cursor > base.limit_backward)
+ {
+ break lab0;
+ }
+ if (!base.slice_from("\u03B1\u03B3\u03B1\u03BC"))
+ {
+ return false;
+ }
+ }
+ base.cursor = base.limit - v_1;
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
+ lab1: {
+ base.ket = base.cursor;
+ if (base.find_among_b(a_35) == 0)
+ {
+ break lab1;
+ }
+ base.bra = base.cursor;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ B_test1 = false;
+ }
+ base.cursor = base.limit - v_2;
+ base.ket = base.cursor;
+ if (!(base.eq_s_b("\u03B1\u03BC\u03B5")))
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ B_test1 = false;
+ base.ket = base.cursor;
+ base.bra = base.cursor;
+ if (base.find_among_b(a_36) == 0)
+ {
+ return false;
+ }
+ if (base.cursor > base.limit_backward)
+ {
+ return false;
+ }
+ if (!base.slice_from("\u03B1\u03BC"))
+ {
+ return false;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_step_5b() {
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
+ lab0: {
+ base.ket = base.cursor;
+ if (base.find_among_b(a_38) == 0)
+ {
+ break lab0;
+ }
+ base.bra = base.cursor;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ B_test1 = false;
+ base.ket = base.cursor;
+ base.bra = base.cursor;
+ if (base.find_among_b(a_37) == 0)
+ {
+ break lab0;
+ }
+ if (base.cursor > base.limit_backward)
+ {
+ break lab0;
+ }
+ if (!base.slice_from("\u03B1\u03B3\u03B1\u03BD"))
+ {
+ return false;
+ }
+ }
+ base.cursor = base.limit - v_1;
+ base.ket = base.cursor;
+ if (!(base.eq_s_b("\u03B1\u03BD\u03B5")))
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ B_test1 = false;
+ lab1: {
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
+ lab2: {
+ base.ket = base.cursor;
+ base.bra = base.cursor;
+ if (!(base.in_grouping_b(g_v2, 945, 969)))
+ {
+ break lab2;
+ }
+ if (!base.slice_from("\u03B1\u03BD"))
+ {
+ return false;
+ }
+ break lab1;
+ }
+ base.cursor = base.limit - v_2;
+ base.ket = base.cursor;
+ }
+ base.bra = base.cursor;
+ if (base.find_among_b(a_39) == 0)
+ {
+ return false;
+ }
+ if (base.cursor > base.limit_backward)
+ {
+ return false;
+ }
+ if (!base.slice_from("\u03B1\u03BD"))
+ {
+ return false;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_step_5c() {
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
+ lab0: {
+ base.ket = base.cursor;
+ if (base.find_among_b(a_40) == 0)
+ {
+ break lab0;
+ }
+ base.bra = base.cursor;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ B_test1 = false;
+ }
+ base.cursor = base.limit - v_1;
+ base.ket = base.cursor;
+ if (!(base.eq_s_b("\u03B5\u03C4\u03B5")))
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ B_test1 = false;
+ lab1: {
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
+ lab2: {
+ base.ket = base.cursor;
+ base.bra = base.cursor;
+ if (!(base.in_grouping_b(g_v2, 945, 969)))
+ {
+ break lab2;
+ }
+ if (!base.slice_from("\u03B5\u03C4"))
+ {
+ return false;
+ }
+ break lab1;
+ }
+ base.cursor = base.limit - v_2;
+ lab3: {
+ base.ket = base.cursor;
+ base.bra = base.cursor;
+ if (base.find_among_b(a_41) == 0)
+ {
+ break lab3;
+ }
+ if (!base.slice_from("\u03B5\u03C4"))
+ {
+ return false;
+ }
+ break lab1;
+ }
+ base.cursor = base.limit - v_2;
+ base.ket = base.cursor;
+ }
+ base.bra = base.cursor;
+ if (base.find_among_b(a_42) == 0)
+ {
+ return false;
+ }
+ if (base.cursor > base.limit_backward)
+ {
+ return false;
+ }
+ if (!base.slice_from("\u03B5\u03C4"))
+ {
+ return false;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_step_5d() {
+ base.ket = base.cursor;
+ if (base.find_among_b(a_43) == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ B_test1 = false;
+ lab0: {
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
+ lab1: {
+ base.ket = base.cursor;
+ base.bra = base.cursor;
+ if (!(base.eq_s_b("\u03B1\u03C1\u03C7")))
+ {
+ break lab1;
+ }
+ if (base.cursor > base.limit_backward)
+ {
+ break lab1;
+ }
+ if (!base.slice_from("\u03BF\u03BD\u03C4"))
+ {
+ return false;
+ }
+ break lab0;
+ }
+ base.cursor = base.limit - v_1;
+ base.ket = base.cursor;
+ base.bra = base.cursor;
+ if (!(base.eq_s_b("\u03BA\u03C1\u03B5")))
+ {
+ return false;
+ }
+ if (!base.slice_from("\u03C9\u03BD\u03C4"))
+ {
+ return false;
+ }
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_step_5e() {
+ base.ket = base.cursor;
+ if (base.find_among_b(a_44) == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ B_test1 = false;
+ base.ket = base.cursor;
+ base.bra = base.cursor;
+ if (!(base.eq_s_b("\u03BF\u03BD")))
+ {
+ return false;
+ }
+ if (base.cursor > base.limit_backward)
+ {
+ return false;
+ }
+ if (!base.slice_from("\u03BF\u03BC\u03B1\u03C3\u03C4"))
+ {
+ return false;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_step_5f() {
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
+ lab0: {
+ base.ket = base.cursor;
+ if (!(base.eq_s_b("\u03B9\u03B5\u03C3\u03C4\u03B5")))
+ {
+ break lab0;
+ }
+ base.bra = base.cursor;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ B_test1 = false;
+ base.ket = base.cursor;
+ base.bra = base.cursor;
+ if (base.find_among_b(a_45) == 0)
+ {
+ break lab0;
+ }
+ if (base.cursor > base.limit_backward)
+ {
+ break lab0;
+ }
+ if (!base.slice_from("\u03B9\u03B5\u03C3\u03C4"))
+ {
+ return false;
+ }
+ }
+ base.cursor = base.limit - v_1;
+ base.ket = base.cursor;
+ if (!(base.eq_s_b("\u03B5\u03C3\u03C4\u03B5")))
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ B_test1 = false;
+ base.ket = base.cursor;
+ base.bra = base.cursor;
+ if (base.find_among_b(a_46) == 0)
+ {
+ return false;
+ }
+ if (base.cursor > base.limit_backward)
+ {
+ return false;
+ }
+ if (!base.slice_from("\u03B9\u03B5\u03C3\u03C4"))
+ {
+ return false;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_step_5g() {
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
+ lab0: {
+ base.ket = base.cursor;
+ if (base.find_among_b(a_47) == 0)
+ {
+ break lab0;
+ }
+ base.bra = base.cursor;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ B_test1 = false;
+ }
+ base.cursor = base.limit - v_1;
+ base.ket = base.cursor;
+ if (base.find_among_b(a_50) == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ B_test1 = false;
+ lab1: {
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
+ lab2: {
+ base.ket = base.cursor;
+ base.bra = base.cursor;
+ if (base.find_among_b(a_48) == 0)
+ {
+ break lab2;
+ }
+ if (!base.slice_from("\u03B7\u03BA"))
+ {
+ return false;
+ }
+ break lab1;
+ }
+ base.cursor = base.limit - v_2;
+ base.ket = base.cursor;
+ base.bra = base.cursor;
+ if (base.find_among_b(a_49) == 0)
+ {
+ return false;
+ }
+ if (base.cursor > base.limit_backward)
+ {
+ return false;
+ }
+ if (!base.slice_from("\u03B7\u03BA"))
+ {
+ return false;
+ }
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_step_5h() {
+ base.ket = base.cursor;
+ if (base.find_among_b(a_53) == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ B_test1 = false;
+ lab0: {
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
+ lab1: {
+ base.ket = base.cursor;
+ base.bra = base.cursor;
+ if (base.find_among_b(a_51) == 0)
+ {
+ break lab1;
+ }
+ if (!base.slice_from("\u03BF\u03C5\u03C3"))
+ {
+ return false;
+ }
+ break lab0;
+ }
+ base.cursor = base.limit - v_1;
+ base.ket = base.cursor;
+ base.bra = base.cursor;
+ if (base.find_among_b(a_52) == 0)
+ {
+ return false;
+ }
+ if (base.cursor > base.limit_backward)
+ {
+ return false;
+ }
+ if (!base.slice_from("\u03BF\u03C5\u03C3"))
+ {
+ return false;
+ }
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_step_5i() {
+ var /** number */ among_var;
+ base.ket = base.cursor;
+ if (base.find_among_b(a_56) == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ B_test1 = false;
+ lab0: {
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
+ lab1: {
+ base.ket = base.cursor;
+ base.bra = base.cursor;
+ if (!(base.eq_s_b("\u03BA\u03BF\u03BB\u03BB")))
+ {
+ break lab1;
+ }
+ if (!base.slice_from("\u03B1\u03B3"))
+ {
+ return false;
+ }
+ break lab0;
+ }
+ base.cursor = base.limit - v_1;
+ lab2: {
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
+ lab3: {
+ base.ket = base.cursor;
+ base.bra = base.cursor;
+ among_var = base.find_among_b(a_54);
+ if (among_var == 0)
+ {
+ break lab3;
+ }
+ switch (among_var) {
+ case 1:
+ if (!base.slice_from("\u03B1\u03B3"))
+ {
+ return false;
+ }
+ break;
+ }
+ break lab2;
+ }
+ base.cursor = base.limit - v_2;
+ base.ket = base.cursor;
+ base.bra = base.cursor;
+ if (base.find_among_b(a_55) == 0)
+ {
+ return false;
+ }
+ if (base.cursor > base.limit_backward)
+ {
+ return false;
+ }
+ if (!base.slice_from("\u03B1\u03B3"))
+ {
+ return false;
+ }
+ }
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_step_5j() {
+ base.ket = base.cursor;
+ if (base.find_among_b(a_57) == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ B_test1 = false;
+ base.ket = base.cursor;
+ base.bra = base.cursor;
+ if (base.find_among_b(a_58) == 0)
+ {
+ return false;
+ }
+ if (base.cursor > base.limit_backward)
+ {
+ return false;
+ }
+ if (!base.slice_from("\u03B7\u03C3"))
+ {
+ return false;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_step_5k() {
+ base.ket = base.cursor;
+ if (base.find_among_b(a_59) == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ B_test1 = false;
+ base.ket = base.cursor;
+ base.bra = base.cursor;
+ if (base.find_among_b(a_60) == 0)
+ {
+ return false;
+ }
+ if (base.cursor > base.limit_backward)
+ {
+ return false;
+ }
+ if (!base.slice_from("\u03B7\u03C3\u03C4"))
+ {
+ return false;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_step_5l() {
+ base.ket = base.cursor;
+ if (base.find_among_b(a_61) == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ B_test1 = false;
+ base.ket = base.cursor;
+ base.bra = base.cursor;
+ if (base.find_among_b(a_62) == 0)
+ {
+ return false;
+ }
+ if (base.cursor > base.limit_backward)
+ {
+ return false;
+ }
+ if (!base.slice_from("\u03BF\u03C5\u03BD"))
+ {
+ return false;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_step_5m() {
+ base.ket = base.cursor;
+ if (base.find_among_b(a_63) == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ B_test1 = false;
+ base.ket = base.cursor;
+ base.bra = base.cursor;
+ if (base.find_among_b(a_64) == 0)
+ {
+ return false;
+ }
+ if (base.cursor > base.limit_backward)
+ {
+ return false;
+ }
+ if (!base.slice_from("\u03BF\u03C5\u03BC"))
+ {
+ return false;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_step_6() {
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
+ lab0: {
+ base.ket = base.cursor;
+ if (base.find_among_b(a_65) == 0)
+ {
+ break lab0;
+ }
+ base.bra = base.cursor;
+ if (!base.slice_from("\u03BC\u03B1"))
+ {
+ return false;
+ }
+ }
+ base.cursor = base.limit - v_1;
+ if (!B_test1)
+ {
+ return false;
+ }
+ base.ket = base.cursor;
+ if (base.find_among_b(a_66) == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_step_7() {
+ base.ket = base.cursor;
+ if (base.find_among_b(a_67) == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ return true;
+ };
+
+ this.stem = /** @return {boolean} */ function() {
+ base.limit_backward = base.cursor; base.cursor = base.limit;
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
+ r_tolower();
+ base.cursor = base.limit - v_1;
+ if (!r_has_min_length())
+ {
+ return false;
+ }
+ B_test1 = true;
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
+ r_step_1();
+ base.cursor = base.limit - v_2;
+ /** @const */ var /** number */ v_3 = base.limit - base.cursor;
+ r_step_s1();
+ base.cursor = base.limit - v_3;
+ /** @const */ var /** number */ v_4 = base.limit - base.cursor;
+ r_step_s2();
+ base.cursor = base.limit - v_4;
+ /** @const */ var /** number */ v_5 = base.limit - base.cursor;
+ r_step_s3();
+ base.cursor = base.limit - v_5;
+ /** @const */ var /** number */ v_6 = base.limit - base.cursor;
+ r_step_s4();
+ base.cursor = base.limit - v_6;
+ /** @const */ var /** number */ v_7 = base.limit - base.cursor;
+ r_step_s5();
+ base.cursor = base.limit - v_7;
+ /** @const */ var /** number */ v_8 = base.limit - base.cursor;
+ r_step_s6();
+ base.cursor = base.limit - v_8;
+ /** @const */ var /** number */ v_9 = base.limit - base.cursor;
+ r_step_s7();
+ base.cursor = base.limit - v_9;
+ /** @const */ var /** number */ v_10 = base.limit - base.cursor;
+ r_step_s8();
+ base.cursor = base.limit - v_10;
+ /** @const */ var /** number */ v_11 = base.limit - base.cursor;
+ r_step_s9();
+ base.cursor = base.limit - v_11;
+ /** @const */ var /** number */ v_12 = base.limit - base.cursor;
+ r_step_s10();
+ base.cursor = base.limit - v_12;
+ /** @const */ var /** number */ v_13 = base.limit - base.cursor;
+ r_step_2a();
+ base.cursor = base.limit - v_13;
+ /** @const */ var /** number */ v_14 = base.limit - base.cursor;
+ r_step_2b();
+ base.cursor = base.limit - v_14;
+ /** @const */ var /** number */ v_15 = base.limit - base.cursor;
+ r_step_2c();
+ base.cursor = base.limit - v_15;
+ /** @const */ var /** number */ v_16 = base.limit - base.cursor;
+ r_step_2d();
+ base.cursor = base.limit - v_16;
+ /** @const */ var /** number */ v_17 = base.limit - base.cursor;
+ r_step_3();
+ base.cursor = base.limit - v_17;
+ /** @const */ var /** number */ v_18 = base.limit - base.cursor;
+ r_step_4();
+ base.cursor = base.limit - v_18;
+ /** @const */ var /** number */ v_19 = base.limit - base.cursor;
+ r_step_5a();
+ base.cursor = base.limit - v_19;
+ /** @const */ var /** number */ v_20 = base.limit - base.cursor;
+ r_step_5b();
+ base.cursor = base.limit - v_20;
+ /** @const */ var /** number */ v_21 = base.limit - base.cursor;
+ r_step_5c();
+ base.cursor = base.limit - v_21;
+ /** @const */ var /** number */ v_22 = base.limit - base.cursor;
+ r_step_5d();
+ base.cursor = base.limit - v_22;
+ /** @const */ var /** number */ v_23 = base.limit - base.cursor;
+ r_step_5e();
+ base.cursor = base.limit - v_23;
+ /** @const */ var /** number */ v_24 = base.limit - base.cursor;
+ r_step_5f();
+ base.cursor = base.limit - v_24;
+ /** @const */ var /** number */ v_25 = base.limit - base.cursor;
+ r_step_5g();
+ base.cursor = base.limit - v_25;
+ /** @const */ var /** number */ v_26 = base.limit - base.cursor;
+ r_step_5h();
+ base.cursor = base.limit - v_26;
+ /** @const */ var /** number */ v_27 = base.limit - base.cursor;
+ r_step_5j();
+ base.cursor = base.limit - v_27;
+ /** @const */ var /** number */ v_28 = base.limit - base.cursor;
+ r_step_5i();
+ base.cursor = base.limit - v_28;
+ /** @const */ var /** number */ v_29 = base.limit - base.cursor;
+ r_step_5k();
+ base.cursor = base.limit - v_29;
+ /** @const */ var /** number */ v_30 = base.limit - base.cursor;
+ r_step_5l();
+ base.cursor = base.limit - v_30;
+ /** @const */ var /** number */ v_31 = base.limit - base.cursor;
+ r_step_5m();
+ base.cursor = base.limit - v_31;
+ /** @const */ var /** number */ v_32 = base.limit - base.cursor;
+ r_step_6();
+ base.cursor = base.limit - v_32;
+ /** @const */ var /** number */ v_33 = base.limit - base.cursor;
+ r_step_7();
+ base.cursor = base.limit - v_33;
+ base.cursor = base.limit_backward;
+ return true;
+ };
+
+ /**@return{string}*/
+ this['stemWord'] = function(/**string*/word) {
+ base.setCurrent(word);
+ this.stem();
+ return base.getCurrent();
+ };
+};
diff --git a/sphinx/search/non-minified-js/hindi-stemmer.js b/sphinx/search/non-minified-js/hindi-stemmer.js
new file mode 100644
index 00000000000..26a715e7e77
--- /dev/null
+++ b/sphinx/search/non-minified-js/hindi-stemmer.js
@@ -0,0 +1,181 @@
+// Generated from hindi.sbl by Snowball 3.0.1 - https://snowballstem.org/
+
+/**@constructor*/
+var HindiStemmer = function() {
+ var base = new BaseStemmer();
+
+ /** @const */ var a_0 = [
+ ["\u0906\u0901", -1, -1],
+ ["\u093E\u0901", -1, -1],
+ ["\u0907\u092F\u093E\u0901", 1, -1],
+ ["\u0906\u0907\u092F\u093E\u0901", 2, -1],
+ ["\u093E\u0907\u092F\u093E\u0901", 2, -1],
+ ["\u093F\u092F\u093E\u0901", 1, -1],
+ ["\u0906\u0902", -1, -1],
+ ["\u0909\u0906\u0902", 6, -1],
+ ["\u0941\u0906\u0902", 6, -1],
+ ["\u0908\u0902", -1, -1],
+ ["\u0906\u0908\u0902", 9, -1],
+ ["\u093E\u0908\u0902", 9, -1],
+ ["\u090F\u0902", -1, -1],
+ ["\u0906\u090F\u0902", 12, -1],
+ ["\u0909\u090F\u0902", 12, -1],
+ ["\u093E\u090F\u0902", 12, -1],
+ ["\u0924\u093E\u090F\u0902", 15, -1, r_CONSONANT],
+ ["\u0905\u0924\u093E\u090F\u0902", 16, -1],
+ ["\u0928\u093E\u090F\u0902", 15, -1, r_CONSONANT],
+ ["\u0905\u0928\u093E\u090F\u0902", 18, -1],
+ ["\u0941\u090F\u0902", 12, -1],
+ ["\u0913\u0902", -1, -1],
+ ["\u0906\u0913\u0902", 21, -1],
+ ["\u0909\u0913\u0902", 21, -1],
+ ["\u093E\u0913\u0902", 21, -1],
+ ["\u0924\u093E\u0913\u0902", 24, -1, r_CONSONANT],
+ ["\u0905\u0924\u093E\u0913\u0902", 25, -1],
+ ["\u0928\u093E\u0913\u0902", 24, -1, r_CONSONANT],
+ ["\u0905\u0928\u093E\u0913\u0902", 27, -1],
+ ["\u0941\u0913\u0902", 21, -1],
+ ["\u093E\u0902", -1, -1],
+ ["\u0907\u092F\u093E\u0902", 30, -1],
+ ["\u0906\u0907\u092F\u093E\u0902", 31, -1],
+ ["\u093E\u0907\u092F\u093E\u0902", 31, -1],
+ ["\u093F\u092F\u093E\u0902", 30, -1],
+ ["\u0940\u0902", -1, -1],
+ ["\u0924\u0940\u0902", 35, -1, r_CONSONANT],
+ ["\u0905\u0924\u0940\u0902", 36, -1],
+ ["\u0906\u0924\u0940\u0902", 36, -1],
+ ["\u093E\u0924\u0940\u0902", 36, -1],
+ ["\u0947\u0902", -1, -1],
+ ["\u094B\u0902", -1, -1],
+ ["\u0907\u092F\u094B\u0902", 41, -1],
+ ["\u0906\u0907\u092F\u094B\u0902", 42, -1],
+ ["\u093E\u0907\u092F\u094B\u0902", 42, -1],
+ ["\u093F\u092F\u094B\u0902", 41, -1],
+ ["\u0905", -1, -1],
+ ["\u0906", -1, -1],
+ ["\u0907", -1, -1],
+ ["\u0908", -1, -1],
+ ["\u0906\u0908", 49, -1],
+ ["\u093E\u0908", 49, -1],
+ ["\u0909", -1, -1],
+ ["\u090A", -1, -1],
+ ["\u090F", -1, -1],
+ ["\u0906\u090F", 54, -1],
+ ["\u0907\u090F", 54, -1],
+ ["\u0906\u0907\u090F", 56, -1],
+ ["\u093E\u0907\u090F", 56, -1],
+ ["\u093E\u090F", 54, -1],
+ ["\u093F\u090F", 54, -1],
+ ["\u0913", -1, -1],
+ ["\u0906\u0913", 61, -1],
+ ["\u093E\u0913", 61, -1],
+ ["\u0915\u0930", -1, -1, r_CONSONANT],
+ ["\u0905\u0915\u0930", 64, -1],
+ ["\u0906\u0915\u0930", 64, -1],
+ ["\u093E\u0915\u0930", 64, -1],
+ ["\u093E", -1, -1],
+ ["\u090A\u0902\u0917\u093E", 68, -1],
+ ["\u0906\u090A\u0902\u0917\u093E", 69, -1],
+ ["\u093E\u090A\u0902\u0917\u093E", 69, -1],
+ ["\u0942\u0902\u0917\u093E", 68, -1],
+ ["\u090F\u0917\u093E", 68, -1],
+ ["\u0906\u090F\u0917\u093E", 73, -1],
+ ["\u093E\u090F\u0917\u093E", 73, -1],
+ ["\u0947\u0917\u093E", 68, -1],
+ ["\u0924\u093E", 68, -1, r_CONSONANT],
+ ["\u0905\u0924\u093E", 77, -1],
+ ["\u0906\u0924\u093E", 77, -1],
+ ["\u093E\u0924\u093E", 77, -1],
+ ["\u0928\u093E", 68, -1, r_CONSONANT],
+ ["\u0905\u0928\u093E", 81, -1],
+ ["\u0906\u0928\u093E", 81, -1],
+ ["\u093E\u0928\u093E", 81, -1],
+ ["\u0906\u092F\u093E", 68, -1],
+ ["\u093E\u092F\u093E", 68, -1],
+ ["\u093F", -1, -1],
+ ["\u0940", -1, -1],
+ ["\u090A\u0902\u0917\u0940", 88, -1],
+ ["\u0906\u090A\u0902\u0917\u0940", 89, -1],
+ ["\u093E\u090A\u0902\u0917\u0940", 89, -1],
+ ["\u090F\u0902\u0917\u0940", 88, -1],
+ ["\u0906\u090F\u0902\u0917\u0940", 92, -1],
+ ["\u093E\u090F\u0902\u0917\u0940", 92, -1],
+ ["\u0942\u0902\u0917\u0940", 88, -1],
+ ["\u0947\u0902\u0917\u0940", 88, -1],
+ ["\u090F\u0917\u0940", 88, -1],
+ ["\u0906\u090F\u0917\u0940", 97, -1],
+ ["\u093E\u090F\u0917\u0940", 97, -1],
+ ["\u0913\u0917\u0940", 88, -1],
+ ["\u0906\u0913\u0917\u0940", 100, -1],
+ ["\u093E\u0913\u0917\u0940", 100, -1],
+ ["\u0947\u0917\u0940", 88, -1],
+ ["\u094B\u0917\u0940", 88, -1],
+ ["\u0924\u0940", 88, -1, r_CONSONANT],
+ ["\u0905\u0924\u0940", 105, -1],
+ ["\u0906\u0924\u0940", 105, -1],
+ ["\u093E\u0924\u0940", 105, -1],
+ ["\u0928\u0940", 88, -1, r_CONSONANT],
+ ["\u0905\u0928\u0940", 109, -1],
+ ["\u0941", -1, -1],
+ ["\u0942", -1, -1],
+ ["\u0947", -1, -1],
+ ["\u090F\u0902\u0917\u0947", 113, -1],
+ ["\u0906\u090F\u0902\u0917\u0947", 114, -1],
+ ["\u093E\u090F\u0902\u0917\u0947", 114, -1],
+ ["\u0947\u0902\u0917\u0947", 113, -1],
+ ["\u0913\u0917\u0947", 113, -1],
+ ["\u0906\u0913\u0917\u0947", 118, -1],
+ ["\u093E\u0913\u0917\u0947", 118, -1],
+ ["\u094B\u0917\u0947", 113, -1],
+ ["\u0924\u0947", 113, -1, r_CONSONANT],
+ ["\u0905\u0924\u0947", 122, -1],
+ ["\u0906\u0924\u0947", 122, -1],
+ ["\u093E\u0924\u0947", 122, -1],
+ ["\u0928\u0947", 113, -1, r_CONSONANT],
+ ["\u0905\u0928\u0947", 126, -1],
+ ["\u0906\u0928\u0947", 126, -1],
+ ["\u093E\u0928\u0947", 126, -1],
+ ["\u094B", -1, -1],
+ ["\u094D", -1, -1]
+ ];
+
+ /** @const */ var /** Array */ g_consonant = [255, 255, 255, 255, 159, 0, 0, 0, 248, 7];
+
+
+ /** @return {boolean} */
+ function r_CONSONANT() {
+ if (!(base.in_grouping_b(g_consonant, 2325, 2399)))
+ {
+ return false;
+ }
+ return true;
+ };
+
+ this.stem = /** @return {boolean} */ function() {
+ if (base.cursor >= base.limit)
+ {
+ return false;
+ }
+ base.cursor++;
+ base.limit_backward = base.cursor; base.cursor = base.limit;
+ base.ket = base.cursor;
+ if (base.find_among_b(a_0) == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ base.cursor = base.limit_backward;
+ return true;
+ };
+
+ /**@return{string}*/
+ this['stemWord'] = function(/**string*/word) {
+ base.setCurrent(word);
+ this.stem();
+ return base.getCurrent();
+ };
+};
diff --git a/sphinx/search/non-minified-js/hungarian-stemmer.js b/sphinx/search/non-minified-js/hungarian-stemmer.js
index 2c550ac0d0e..886e1cf39b2 100644
--- a/sphinx/search/non-minified-js/hungarian-stemmer.js
+++ b/sphinx/search/non-minified-js/hungarian-stemmer.js
@@ -1,25 +1,15 @@
-// Generated by Snowball 2.1.0 - https://snowballstem.org/
+// Generated from hungarian.sbl by Snowball 3.0.1 - https://snowballstem.org/
/**@constructor*/
-HungarianStemmer = function() {
+var HungarianStemmer = function() {
var base = new BaseStemmer();
- /** @const */ var a_0 = [
- ["cs", -1, -1],
- ["dzs", -1, -1],
- ["gy", -1, -1],
- ["ly", -1, -1],
- ["ny", -1, -1],
- ["sz", -1, -1],
- ["ty", -1, -1],
- ["zs", -1, -1]
- ];
- /** @const */ var a_1 = [
+ /** @const */ var a_0 = [
["\u00E1", -1, 1],
["\u00E9", -1, 2]
];
- /** @const */ var a_2 = [
+ /** @const */ var a_1 = [
["bb", -1, -1],
["cc", -1, -1],
["dd", -1, -1],
@@ -45,12 +35,12 @@ HungarianStemmer = function() {
["zz", -1, -1]
];
- /** @const */ var a_3 = [
+ /** @const */ var a_2 = [
["al", -1, 1],
["el", -1, 1]
];
- /** @const */ var a_4 = [
+ /** @const */ var a_3 = [
["ba", -1, -1],
["ra", -1, -1],
["be", -1, -1],
@@ -97,13 +87,13 @@ HungarianStemmer = function() {
["v\u00E9", -1, -1]
];
- /** @const */ var a_5 = [
+ /** @const */ var a_4 = [
["\u00E1n", -1, 2],
["\u00E9n", -1, 1],
["\u00E1nk\u00E9nt", -1, 2]
];
- /** @const */ var a_6 = [
+ /** @const */ var a_5 = [
["stul", -1, 1],
["astul", 0, 1],
["\u00E1stul", 0, 2],
@@ -112,12 +102,12 @@ HungarianStemmer = function() {
["\u00E9st\u00FCl", 3, 3]
];
- /** @const */ var a_7 = [
+ /** @const */ var a_6 = [
["\u00E1", -1, 1],
["\u00E9", -1, 1]
];
- /** @const */ var a_8 = [
+ /** @const */ var a_7 = [
["k", -1, 3],
["ak", 0, 3],
["ek", 0, 3],
@@ -127,7 +117,7 @@ HungarianStemmer = function() {
["\u00F6k", 0, 3]
];
- /** @const */ var a_9 = [
+ /** @const */ var a_8 = [
["\u00E9i", -1, 1],
["\u00E1\u00E9i", 0, 3],
["\u00E9\u00E9i", 0, 2],
@@ -142,7 +132,7 @@ HungarianStemmer = function() {
["\u00E9\u00E9", 3, 2]
];
- /** @const */ var a_10 = [
+ /** @const */ var a_9 = [
["a", -1, 1],
["ja", 0, 1],
["d", -1, 1],
@@ -176,7 +166,7 @@ HungarianStemmer = function() {
["\u00E9", -1, 3]
];
- /** @const */ var a_11 = [
+ /** @const */ var a_10 = [
["id", -1, 1],
["aid", 0, 1],
["jaid", 1, 1],
@@ -230,69 +220,30 @@ HungarianStemmer = function() {
function r_mark_regions() {
I_p1 = base.limit;
lab0: {
- var /** number */ v_1 = base.cursor;
+ /** @const */ var /** number */ v_1 = base.cursor;
lab1: {
if (!(base.in_grouping(g_v, 97, 369)))
{
break lab1;
}
- golab2: while(true)
- {
- var /** number */ v_2 = base.cursor;
- lab3: {
- if (!(base.out_grouping(g_v, 97, 369)))
- {
- break lab3;
- }
- base.cursor = v_2;
- break golab2;
- }
- base.cursor = v_2;
- if (base.cursor >= base.limit)
- {
- break lab1;
- }
- base.cursor++;
- }
- lab4: {
- var /** number */ v_3 = base.cursor;
- lab5: {
- if (base.find_among(a_0) == 0)
- {
- break lab5;
- }
- break lab4;
- }
- base.cursor = v_3;
- if (base.cursor >= base.limit)
+ /** @const */ var /** number */ v_2 = base.cursor;
+ lab2: {
+ if (!base.go_in_grouping(g_v, 97, 369))
{
- break lab1;
+ break lab2;
}
base.cursor++;
+ I_p1 = base.cursor;
}
- I_p1 = base.cursor;
+ base.cursor = v_2;
break lab0;
}
base.cursor = v_1;
- if (!(base.out_grouping(g_v, 97, 369)))
+ if (!base.go_out_grouping(g_v, 97, 369))
{
return false;
}
- golab6: while(true)
- {
- lab7: {
- if (!(base.in_grouping(g_v, 97, 369)))
- {
- break lab7;
- }
- break golab6;
- }
- if (base.cursor >= base.limit)
- {
- return false;
- }
- base.cursor++;
- }
+ base.cursor++;
I_p1 = base.cursor;
}
return true;
@@ -300,18 +251,14 @@ HungarianStemmer = function() {
/** @return {boolean} */
function r_R1() {
- if (!(I_p1 <= base.cursor))
- {
- return false;
- }
- return true;
+ return I_p1 <= base.cursor;
};
/** @return {boolean} */
function r_v_ending() {
var /** number */ among_var;
base.ket = base.cursor;
- among_var = base.find_among_b(a_1);
+ among_var = base.find_among_b(a_0);
if (among_var == 0)
{
return false;
@@ -340,8 +287,8 @@ HungarianStemmer = function() {
/** @return {boolean} */
function r_double() {
- var /** number */ v_1 = base.limit - base.cursor;
- if (base.find_among_b(a_2) == 0)
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
+ if (base.find_among_b(a_1) == 0)
{
return false;
}
@@ -357,14 +304,11 @@ HungarianStemmer = function() {
}
base.cursor--;
base.ket = base.cursor;
+ if (base.cursor <= base.limit_backward)
{
- var /** number */ c1 = base.cursor - 1;
- if (c1 < base.limit_backward)
- {
- return false;
- }
- base.cursor = c1;
+ return false;
}
+ base.cursor--;
base.bra = base.cursor;
if (!base.slice_del())
{
@@ -376,7 +320,7 @@ HungarianStemmer = function() {
/** @return {boolean} */
function r_instrum() {
base.ket = base.cursor;
- if (base.find_among_b(a_3) == 0)
+ if (base.find_among_b(a_2) == 0)
{
return false;
}
@@ -403,7 +347,7 @@ HungarianStemmer = function() {
/** @return {boolean} */
function r_case() {
base.ket = base.cursor;
- if (base.find_among_b(a_4) == 0)
+ if (base.find_among_b(a_3) == 0)
{
return false;
}
@@ -427,7 +371,7 @@ HungarianStemmer = function() {
function r_case_special() {
var /** number */ among_var;
base.ket = base.cursor;
- among_var = base.find_among_b(a_5);
+ among_var = base.find_among_b(a_4);
if (among_var == 0)
{
return false;
@@ -458,7 +402,7 @@ HungarianStemmer = function() {
function r_case_other() {
var /** number */ among_var;
base.ket = base.cursor;
- among_var = base.find_among_b(a_6);
+ among_var = base.find_among_b(a_5);
if (among_var == 0)
{
return false;
@@ -494,7 +438,7 @@ HungarianStemmer = function() {
/** @return {boolean} */
function r_factive() {
base.ket = base.cursor;
- if (base.find_among_b(a_7) == 0)
+ if (base.find_among_b(a_6) == 0)
{
return false;
}
@@ -522,7 +466,7 @@ HungarianStemmer = function() {
function r_plural() {
var /** number */ among_var;
base.ket = base.cursor;
- among_var = base.find_among_b(a_8);
+ among_var = base.find_among_b(a_7);
if (among_var == 0)
{
return false;
@@ -559,7 +503,7 @@ HungarianStemmer = function() {
function r_owned() {
var /** number */ among_var;
base.ket = base.cursor;
- among_var = base.find_among_b(a_9);
+ among_var = base.find_among_b(a_8);
if (among_var == 0)
{
return false;
@@ -596,7 +540,7 @@ HungarianStemmer = function() {
function r_sing_owner() {
var /** number */ among_var;
base.ket = base.cursor;
- among_var = base.find_among_b(a_10);
+ among_var = base.find_among_b(a_9);
if (among_var == 0)
{
return false;
@@ -633,7 +577,7 @@ HungarianStemmer = function() {
function r_plur_owner() {
var /** number */ among_var;
base.ket = base.cursor;
- among_var = base.find_among_b(a_11);
+ among_var = base.find_among_b(a_10);
if (among_var == 0)
{
return false;
@@ -667,35 +611,35 @@ HungarianStemmer = function() {
};
this.stem = /** @return {boolean} */ function() {
- var /** number */ v_1 = base.cursor;
+ /** @const */ var /** number */ v_1 = base.cursor;
r_mark_regions();
base.cursor = v_1;
base.limit_backward = base.cursor; base.cursor = base.limit;
- var /** number */ v_2 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
r_instrum();
base.cursor = base.limit - v_2;
- var /** number */ v_3 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_3 = base.limit - base.cursor;
r_case();
base.cursor = base.limit - v_3;
- var /** number */ v_4 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_4 = base.limit - base.cursor;
r_case_special();
base.cursor = base.limit - v_4;
- var /** number */ v_5 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_5 = base.limit - base.cursor;
r_case_other();
base.cursor = base.limit - v_5;
- var /** number */ v_6 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_6 = base.limit - base.cursor;
r_factive();
base.cursor = base.limit - v_6;
- var /** number */ v_7 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_7 = base.limit - base.cursor;
r_owned();
base.cursor = base.limit - v_7;
- var /** number */ v_8 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_8 = base.limit - base.cursor;
r_sing_owner();
base.cursor = base.limit - v_8;
- var /** number */ v_9 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_9 = base.limit - base.cursor;
r_plur_owner();
base.cursor = base.limit - v_9;
- var /** number */ v_10 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_10 = base.limit - base.cursor;
r_plural();
base.cursor = base.limit - v_10;
base.cursor = base.limit_backward;
diff --git a/sphinx/search/non-minified-js/indonesian-stemmer.js b/sphinx/search/non-minified-js/indonesian-stemmer.js
new file mode 100644
index 00000000000..714c410e738
--- /dev/null
+++ b/sphinx/search/non-minified-js/indonesian-stemmer.js
@@ -0,0 +1,409 @@
+// Generated from indonesian.sbl by Snowball 3.0.1 - https://snowballstem.org/
+
+/**@constructor*/
+var IndonesianStemmer = function() {
+ var base = new BaseStemmer();
+
+ /** @const */ var a_0 = [
+ ["kah", -1, 1],
+ ["lah", -1, 1],
+ ["pun", -1, 1]
+ ];
+
+ /** @const */ var a_1 = [
+ ["nya", -1, 1],
+ ["ku", -1, 1],
+ ["mu", -1, 1]
+ ];
+
+ /** @const */ var a_2 = [
+ ["i", -1, 1, r_SUFFIX_I_OK],
+ ["an", -1, 1, r_SUFFIX_AN_OK],
+ ["kan", 1, 1, r_SUFFIX_KAN_OK]
+ ];
+
+ /** @const */ var a_3 = [
+ ["di", -1, 1],
+ ["ke", -1, 2],
+ ["me", -1, 1],
+ ["mem", 2, 5],
+ ["men", 2, 1],
+ ["meng", 4, 1],
+ ["meny", 4, 3, r_VOWEL],
+ ["pem", -1, 6],
+ ["pen", -1, 2],
+ ["peng", 8, 2],
+ ["peny", 8, 4, r_VOWEL],
+ ["ter", -1, 1]
+ ];
+
+ /** @const */ var a_4 = [
+ ["be", -1, 3, r_KER],
+ ["belajar", 0, 4],
+ ["ber", 0, 3],
+ ["pe", -1, 1],
+ ["pelajar", 3, 2],
+ ["per", 3, 1]
+ ];
+
+ /** @const */ var /** Array */ g_vowel = [17, 65, 16];
+
+ var /** number */ I_prefix = 0;
+ var /** number */ I_measure = 0;
+
+
+ /** @return {boolean} */
+ function r_remove_particle() {
+ base.ket = base.cursor;
+ if (base.find_among_b(a_0) == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ I_measure -= 1;
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_remove_possessive_pronoun() {
+ base.ket = base.cursor;
+ if (base.find_among_b(a_1) == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ I_measure -= 1;
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_SUFFIX_KAN_OK() {
+ if (I_prefix == 3)
+ {
+ return false;
+ }
+ if (I_prefix == 2)
+ {
+ return false;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_SUFFIX_AN_OK() {
+ return I_prefix != 1;
+ };
+
+ /** @return {boolean} */
+ function r_SUFFIX_I_OK() {
+ if (I_prefix > 2)
+ {
+ return false;
+ }
+ {
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
+ lab0: {
+ if (!(base.eq_s_b("s")))
+ {
+ break lab0;
+ }
+ return false;
+ }
+ base.cursor = base.limit - v_1;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_remove_suffix() {
+ base.ket = base.cursor;
+ if (base.find_among_b(a_2) == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ I_measure -= 1;
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_VOWEL() {
+ if (!(base.in_grouping(g_vowel, 97, 117)))
+ {
+ return false;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_KER() {
+ if (!(base.out_grouping(g_vowel, 97, 117)))
+ {
+ return false;
+ }
+ if (!(base.eq_s("er")))
+ {
+ return false;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_remove_first_order_prefix() {
+ var /** number */ among_var;
+ base.bra = base.cursor;
+ among_var = base.find_among(a_3);
+ if (among_var == 0)
+ {
+ return false;
+ }
+ base.ket = base.cursor;
+ switch (among_var) {
+ case 1:
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ I_prefix = 1;
+ I_measure -= 1;
+ break;
+ case 2:
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ I_prefix = 3;
+ I_measure -= 1;
+ break;
+ case 3:
+ I_prefix = 1;
+ if (!base.slice_from("s"))
+ {
+ return false;
+ }
+ I_measure -= 1;
+ break;
+ case 4:
+ I_prefix = 3;
+ if (!base.slice_from("s"))
+ {
+ return false;
+ }
+ I_measure -= 1;
+ break;
+ case 5:
+ I_prefix = 1;
+ I_measure -= 1;
+ lab0: {
+ /** @const */ var /** number */ v_1 = base.cursor;
+ lab1: {
+ /** @const */ var /** number */ v_2 = base.cursor;
+ if (!(base.in_grouping(g_vowel, 97, 117)))
+ {
+ break lab1;
+ }
+ base.cursor = v_2;
+ if (!base.slice_from("p"))
+ {
+ return false;
+ }
+ break lab0;
+ }
+ base.cursor = v_1;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ }
+ break;
+ case 6:
+ I_prefix = 3;
+ I_measure -= 1;
+ lab2: {
+ /** @const */ var /** number */ v_3 = base.cursor;
+ lab3: {
+ /** @const */ var /** number */ v_4 = base.cursor;
+ if (!(base.in_grouping(g_vowel, 97, 117)))
+ {
+ break lab3;
+ }
+ base.cursor = v_4;
+ if (!base.slice_from("p"))
+ {
+ return false;
+ }
+ break lab2;
+ }
+ base.cursor = v_3;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ }
+ break;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_remove_second_order_prefix() {
+ var /** number */ among_var;
+ base.bra = base.cursor;
+ among_var = base.find_among(a_4);
+ if (among_var == 0)
+ {
+ return false;
+ }
+ base.ket = base.cursor;
+ switch (among_var) {
+ case 1:
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ I_prefix = 2;
+ I_measure -= 1;
+ break;
+ case 2:
+ if (!base.slice_from("ajar"))
+ {
+ return false;
+ }
+ I_measure -= 1;
+ break;
+ case 3:
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ I_prefix = 4;
+ I_measure -= 1;
+ break;
+ case 4:
+ if (!base.slice_from("ajar"))
+ {
+ return false;
+ }
+ I_prefix = 4;
+ I_measure -= 1;
+ break;
+ }
+ return true;
+ };
+
+ this.stem = /** @return {boolean} */ function() {
+ I_measure = 0;
+ /** @const */ var /** number */ v_1 = base.cursor;
+ lab0: {
+ while(true)
+ {
+ lab1: {
+ if (!base.go_out_grouping(g_vowel, 97, 117))
+ {
+ break lab1;
+ }
+ base.cursor++;
+ I_measure += 1;
+ continue;
+ }
+ break;
+ }
+ }
+ base.cursor = v_1;
+ if (I_measure <= 2)
+ {
+ return false;
+ }
+ I_prefix = 0;
+ base.limit_backward = base.cursor; base.cursor = base.limit;
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
+ r_remove_particle();
+ base.cursor = base.limit - v_2;
+ if (I_measure <= 2)
+ {
+ return false;
+ }
+ /** @const */ var /** number */ v_3 = base.limit - base.cursor;
+ r_remove_possessive_pronoun();
+ base.cursor = base.limit - v_3;
+ base.cursor = base.limit_backward;
+ if (I_measure <= 2)
+ {
+ return false;
+ }
+ lab2: {
+ /** @const */ var /** number */ v_4 = base.cursor;
+ lab3: {
+ /** @const */ var /** number */ v_5 = base.cursor;
+ if (!r_remove_first_order_prefix())
+ {
+ break lab3;
+ }
+ /** @const */ var /** number */ v_6 = base.cursor;
+ lab4: {
+ /** @const */ var /** number */ v_7 = base.cursor;
+ if (I_measure <= 2)
+ {
+ break lab4;
+ }
+ base.limit_backward = base.cursor; base.cursor = base.limit;
+ if (!r_remove_suffix())
+ {
+ break lab4;
+ }
+ base.cursor = base.limit_backward;
+ base.cursor = v_7;
+ if (I_measure <= 2)
+ {
+ break lab4;
+ }
+ if (!r_remove_second_order_prefix())
+ {
+ break lab4;
+ }
+ }
+ base.cursor = v_6;
+ base.cursor = v_5;
+ break lab2;
+ }
+ base.cursor = v_4;
+ /** @const */ var /** number */ v_8 = base.cursor;
+ r_remove_second_order_prefix();
+ base.cursor = v_8;
+ /** @const */ var /** number */ v_9 = base.cursor;
+ lab5: {
+ if (I_measure <= 2)
+ {
+ break lab5;
+ }
+ base.limit_backward = base.cursor; base.cursor = base.limit;
+ if (!r_remove_suffix())
+ {
+ break lab5;
+ }
+ base.cursor = base.limit_backward;
+ }
+ base.cursor = v_9;
+ }
+ return true;
+ };
+
+ /**@return{string}*/
+ this['stemWord'] = function(/**string*/word) {
+ base.setCurrent(word);
+ this.stem();
+ return base.getCurrent();
+ };
+};
diff --git a/sphinx/search/non-minified-js/irish-stemmer.js b/sphinx/search/non-minified-js/irish-stemmer.js
new file mode 100644
index 00000000000..ec6601efb04
--- /dev/null
+++ b/sphinx/search/non-minified-js/irish-stemmer.js
@@ -0,0 +1,378 @@
+// Generated from irish.sbl by Snowball 3.0.1 - https://snowballstem.org/
+
+/**@constructor*/
+var IrishStemmer = function() {
+ var base = new BaseStemmer();
+
+ /** @const */ var a_0 = [
+ ["b'", -1, 1],
+ ["bh", -1, 4],
+ ["bhf", 1, 2],
+ ["bp", -1, 8],
+ ["ch", -1, 5],
+ ["d'", -1, 1],
+ ["d'fh", 5, 2],
+ ["dh", -1, 6],
+ ["dt", -1, 9],
+ ["fh", -1, 2],
+ ["gc", -1, 5],
+ ["gh", -1, 7],
+ ["h-", -1, 1],
+ ["m'", -1, 1],
+ ["mb", -1, 4],
+ ["mh", -1, 10],
+ ["n-", -1, 1],
+ ["nd", -1, 6],
+ ["ng", -1, 7],
+ ["ph", -1, 8],
+ ["sh", -1, 3],
+ ["t-", -1, 1],
+ ["th", -1, 9],
+ ["ts", -1, 3]
+ ];
+
+ /** @const */ var a_1 = [
+ ["\u00EDochta", -1, 1],
+ ["a\u00EDochta", 0, 1],
+ ["ire", -1, 2],
+ ["aire", 2, 2],
+ ["abh", -1, 1],
+ ["eabh", 4, 1],
+ ["ibh", -1, 1],
+ ["aibh", 6, 1],
+ ["amh", -1, 1],
+ ["eamh", 8, 1],
+ ["imh", -1, 1],
+ ["aimh", 10, 1],
+ ["\u00EDocht", -1, 1],
+ ["a\u00EDocht", 12, 1],
+ ["ir\u00ED", -1, 2],
+ ["air\u00ED", 14, 2]
+ ];
+
+ /** @const */ var a_2 = [
+ ["\u00F3ideacha", -1, 6],
+ ["patacha", -1, 5],
+ ["achta", -1, 1],
+ ["arcachta", 2, 2],
+ ["eachta", 2, 1],
+ ["grafa\u00EDochta", -1, 4],
+ ["paite", -1, 5],
+ ["ach", -1, 1],
+ ["each", 7, 1],
+ ["\u00F3ideach", 8, 6],
+ ["gineach", 8, 3],
+ ["patach", 7, 5],
+ ["grafa\u00EDoch", -1, 4],
+ ["pataigh", -1, 5],
+ ["\u00F3idigh", -1, 6],
+ ["acht\u00FAil", -1, 1],
+ ["eacht\u00FAil", 15, 1],
+ ["gineas", -1, 3],
+ ["ginis", -1, 3],
+ ["acht", -1, 1],
+ ["arcacht", 19, 2],
+ ["eacht", 19, 1],
+ ["grafa\u00EDocht", -1, 4],
+ ["arcachta\u00ED", -1, 2],
+ ["grafa\u00EDochta\u00ED", -1, 4]
+ ];
+
+ /** @const */ var a_3 = [
+ ["imid", -1, 1],
+ ["aimid", 0, 1],
+ ["\u00EDmid", -1, 1],
+ ["a\u00EDmid", 2, 1],
+ ["adh", -1, 2],
+ ["eadh", 4, 2],
+ ["faidh", -1, 1],
+ ["fidh", -1, 1],
+ ["\u00E1il", -1, 2],
+ ["ain", -1, 2],
+ ["tear", -1, 2],
+ ["tar", -1, 2]
+ ];
+
+ /** @const */ var /** Array */ g_v = [17, 65, 16, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 17, 4, 2];
+
+ var /** number */ I_p2 = 0;
+ var /** number */ I_p1 = 0;
+ var /** number */ I_pV = 0;
+
+
+ /** @return {boolean} */
+ function r_mark_regions() {
+ I_pV = base.limit;
+ I_p1 = base.limit;
+ I_p2 = base.limit;
+ /** @const */ var /** number */ v_1 = base.cursor;
+ lab0: {
+ if (!base.go_out_grouping(g_v, 97, 250))
+ {
+ break lab0;
+ }
+ base.cursor++;
+ I_pV = base.cursor;
+ if (!base.go_in_grouping(g_v, 97, 250))
+ {
+ break lab0;
+ }
+ base.cursor++;
+ I_p1 = base.cursor;
+ if (!base.go_out_grouping(g_v, 97, 250))
+ {
+ break lab0;
+ }
+ base.cursor++;
+ if (!base.go_in_grouping(g_v, 97, 250))
+ {
+ break lab0;
+ }
+ base.cursor++;
+ I_p2 = base.cursor;
+ }
+ base.cursor = v_1;
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_initial_morph() {
+ var /** number */ among_var;
+ base.bra = base.cursor;
+ among_var = base.find_among(a_0);
+ if (among_var == 0)
+ {
+ return false;
+ }
+ base.ket = base.cursor;
+ switch (among_var) {
+ case 1:
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ case 2:
+ if (!base.slice_from("f"))
+ {
+ return false;
+ }
+ break;
+ case 3:
+ if (!base.slice_from("s"))
+ {
+ return false;
+ }
+ break;
+ case 4:
+ if (!base.slice_from("b"))
+ {
+ return false;
+ }
+ break;
+ case 5:
+ if (!base.slice_from("c"))
+ {
+ return false;
+ }
+ break;
+ case 6:
+ if (!base.slice_from("d"))
+ {
+ return false;
+ }
+ break;
+ case 7:
+ if (!base.slice_from("g"))
+ {
+ return false;
+ }
+ break;
+ case 8:
+ if (!base.slice_from("p"))
+ {
+ return false;
+ }
+ break;
+ case 9:
+ if (!base.slice_from("t"))
+ {
+ return false;
+ }
+ break;
+ case 10:
+ if (!base.slice_from("m"))
+ {
+ return false;
+ }
+ break;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_RV() {
+ return I_pV <= base.cursor;
+ };
+
+ /** @return {boolean} */
+ function r_R1() {
+ return I_p1 <= base.cursor;
+ };
+
+ /** @return {boolean} */
+ function r_R2() {
+ return I_p2 <= base.cursor;
+ };
+
+ /** @return {boolean} */
+ function r_noun_sfx() {
+ var /** number */ among_var;
+ base.ket = base.cursor;
+ among_var = base.find_among_b(a_1);
+ if (among_var == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ switch (among_var) {
+ case 1:
+ if (!r_R1())
+ {
+ return false;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ case 2:
+ if (!r_R2())
+ {
+ return false;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_deriv() {
+ var /** number */ among_var;
+ base.ket = base.cursor;
+ among_var = base.find_among_b(a_2);
+ if (among_var == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ switch (among_var) {
+ case 1:
+ if (!r_R2())
+ {
+ return false;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ case 2:
+ if (!base.slice_from("arc"))
+ {
+ return false;
+ }
+ break;
+ case 3:
+ if (!base.slice_from("gin"))
+ {
+ return false;
+ }
+ break;
+ case 4:
+ if (!base.slice_from("graf"))
+ {
+ return false;
+ }
+ break;
+ case 5:
+ if (!base.slice_from("paite"))
+ {
+ return false;
+ }
+ break;
+ case 6:
+ if (!base.slice_from("\u00F3id"))
+ {
+ return false;
+ }
+ break;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_verb_sfx() {
+ var /** number */ among_var;
+ base.ket = base.cursor;
+ among_var = base.find_among_b(a_3);
+ if (among_var == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ switch (among_var) {
+ case 1:
+ if (!r_RV())
+ {
+ return false;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ case 2:
+ if (!r_R1())
+ {
+ return false;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ }
+ return true;
+ };
+
+ this.stem = /** @return {boolean} */ function() {
+ /** @const */ var /** number */ v_1 = base.cursor;
+ r_initial_morph();
+ base.cursor = v_1;
+ r_mark_regions();
+ base.limit_backward = base.cursor; base.cursor = base.limit;
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
+ r_noun_sfx();
+ base.cursor = base.limit - v_2;
+ /** @const */ var /** number */ v_3 = base.limit - base.cursor;
+ r_deriv();
+ base.cursor = base.limit - v_3;
+ /** @const */ var /** number */ v_4 = base.limit - base.cursor;
+ r_verb_sfx();
+ base.cursor = base.limit - v_4;
+ base.cursor = base.limit_backward;
+ return true;
+ };
+
+ /**@return{string}*/
+ this['stemWord'] = function(/**string*/word) {
+ base.setCurrent(word);
+ this.stem();
+ return base.getCurrent();
+ };
+};
diff --git a/sphinx/search/non-minified-js/italian-stemmer.js b/sphinx/search/non-minified-js/italian-stemmer.js
index df6ddfd332a..f5ab55ecc8d 100644
--- a/sphinx/search/non-minified-js/italian-stemmer.js
+++ b/sphinx/search/non-minified-js/italian-stemmer.js
@@ -1,8 +1,9 @@
-// Generated by Snowball 2.1.0 - https://snowballstem.org/
+// Generated from italian.sbl by Snowball 3.0.1 - https://snowballstem.org/
/**@constructor*/
-ItalianStemmer = function() {
+var ItalianStemmer = function() {
var base = new BaseStemmer();
+
/** @const */ var a_0 = [
["", -1, 7],
["qu", 0, 6],
@@ -238,17 +239,13 @@ ItalianStemmer = function() {
/** @return {boolean} */
function r_prelude() {
var /** number */ among_var;
- var /** number */ v_1 = base.cursor;
+ /** @const */ var /** number */ v_1 = base.cursor;
while(true)
{
- var /** number */ v_2 = base.cursor;
+ /** @const */ var /** number */ v_2 = base.cursor;
lab0: {
base.bra = base.cursor;
among_var = base.find_among(a_0);
- if (among_var == 0)
- {
- break lab0;
- }
base.ket = base.cursor;
switch (among_var) {
case 1:
@@ -303,11 +300,11 @@ ItalianStemmer = function() {
base.cursor = v_1;
while(true)
{
- var /** number */ v_3 = base.cursor;
+ /** @const */ var /** number */ v_3 = base.cursor;
lab1: {
golab2: while(true)
{
- var /** number */ v_4 = base.cursor;
+ /** @const */ var /** number */ v_4 = base.cursor;
lab3: {
if (!(base.in_grouping(g_v, 97, 249)))
{
@@ -315,7 +312,7 @@ ItalianStemmer = function() {
}
base.bra = base.cursor;
lab4: {
- var /** number */ v_5 = base.cursor;
+ /** @const */ var /** number */ v_5 = base.cursor;
lab5: {
if (!(base.eq_s("u")))
{
@@ -370,37 +367,27 @@ ItalianStemmer = function() {
I_pV = base.limit;
I_p1 = base.limit;
I_p2 = base.limit;
- var /** number */ v_1 = base.cursor;
+ /** @const */ var /** number */ v_1 = base.cursor;
lab0: {
lab1: {
- var /** number */ v_2 = base.cursor;
+ /** @const */ var /** number */ v_2 = base.cursor;
lab2: {
if (!(base.in_grouping(g_v, 97, 249)))
{
break lab2;
}
lab3: {
- var /** number */ v_3 = base.cursor;
+ /** @const */ var /** number */ v_3 = base.cursor;
lab4: {
if (!(base.out_grouping(g_v, 97, 249)))
{
break lab4;
}
- golab5: while(true)
+ if (!base.go_out_grouping(g_v, 97, 249))
{
- lab6: {
- if (!(base.in_grouping(g_v, 97, 249)))
- {
- break lab6;
- }
- break golab5;
- }
- if (base.cursor >= base.limit)
- {
- break lab4;
- }
- base.cursor++;
+ break lab4;
}
+ base.cursor++;
break lab3;
}
base.cursor = v_3;
@@ -408,21 +395,19 @@ ItalianStemmer = function() {
{
break lab2;
}
- golab7: while(true)
+ if (!base.go_in_grouping(g_v, 97, 249))
{
- lab8: {
- if (!(base.out_grouping(g_v, 97, 249)))
- {
- break lab8;
- }
- break golab7;
- }
- if (base.cursor >= base.limit)
- {
- break lab2;
- }
- base.cursor++;
+ break lab2;
}
+ base.cursor++;
+ }
+ break lab1;
+ }
+ base.cursor = v_2;
+ lab5: {
+ if (!(base.eq_s("divan")))
+ {
+ break lab5;
}
break lab1;
}
@@ -431,31 +416,21 @@ ItalianStemmer = function() {
{
break lab0;
}
- lab9: {
- var /** number */ v_6 = base.cursor;
- lab10: {
+ lab6: {
+ /** @const */ var /** number */ v_4 = base.cursor;
+ lab7: {
if (!(base.out_grouping(g_v, 97, 249)))
{
- break lab10;
+ break lab7;
}
- golab11: while(true)
+ if (!base.go_out_grouping(g_v, 97, 249))
{
- lab12: {
- if (!(base.in_grouping(g_v, 97, 249)))
- {
- break lab12;
- }
- break golab11;
- }
- if (base.cursor >= base.limit)
- {
- break lab10;
- }
- base.cursor++;
+ break lab7;
}
- break lab9;
+ base.cursor++;
+ break lab6;
}
- base.cursor = v_6;
+ base.cursor = v_4;
if (!(base.in_grouping(g_v, 97, 249)))
{
break lab0;
@@ -470,72 +445,32 @@ ItalianStemmer = function() {
I_pV = base.cursor;
}
base.cursor = v_1;
- var /** number */ v_8 = base.cursor;
- lab13: {
- golab14: while(true)
+ /** @const */ var /** number */ v_5 = base.cursor;
+ lab8: {
+ if (!base.go_out_grouping(g_v, 97, 249))
{
- lab15: {
- if (!(base.in_grouping(g_v, 97, 249)))
- {
- break lab15;
- }
- break golab14;
- }
- if (base.cursor >= base.limit)
- {
- break lab13;
- }
- base.cursor++;
+ break lab8;
}
- golab16: while(true)
+ base.cursor++;
+ if (!base.go_in_grouping(g_v, 97, 249))
{
- lab17: {
- if (!(base.out_grouping(g_v, 97, 249)))
- {
- break lab17;
- }
- break golab16;
- }
- if (base.cursor >= base.limit)
- {
- break lab13;
- }
- base.cursor++;
+ break lab8;
}
+ base.cursor++;
I_p1 = base.cursor;
- golab18: while(true)
+ if (!base.go_out_grouping(g_v, 97, 249))
{
- lab19: {
- if (!(base.in_grouping(g_v, 97, 249)))
- {
- break lab19;
- }
- break golab18;
- }
- if (base.cursor >= base.limit)
- {
- break lab13;
- }
- base.cursor++;
+ break lab8;
}
- golab20: while(true)
+ base.cursor++;
+ if (!base.go_in_grouping(g_v, 97, 249))
{
- lab21: {
- if (!(base.out_grouping(g_v, 97, 249)))
- {
- break lab21;
- }
- break golab20;
- }
- if (base.cursor >= base.limit)
- {
- break lab13;
- }
- base.cursor++;
+ break lab8;
}
+ base.cursor++;
I_p2 = base.cursor;
}
- base.cursor = v_8;
+ base.cursor = v_5;
return true;
};
@@ -544,14 +479,10 @@ ItalianStemmer = function() {
var /** number */ among_var;
while(true)
{
- var /** number */ v_1 = base.cursor;
+ /** @const */ var /** number */ v_1 = base.cursor;
lab0: {
base.bra = base.cursor;
among_var = base.find_among(a_1);
- if (among_var == 0)
- {
- break lab0;
- }
base.ket = base.cursor;
switch (among_var) {
case 1:
@@ -584,29 +515,17 @@ ItalianStemmer = function() {
/** @return {boolean} */
function r_RV() {
- if (!(I_pV <= base.cursor))
- {
- return false;
- }
- return true;
+ return I_pV <= base.cursor;
};
/** @return {boolean} */
function r_R1() {
- if (!(I_p1 <= base.cursor))
- {
- return false;
- }
- return true;
+ return I_p1 <= base.cursor;
};
/** @return {boolean} */
function r_R2() {
- if (!(I_p2 <= base.cursor))
- {
- return false;
- }
- return true;
+ return I_p2 <= base.cursor;
};
/** @return {boolean} */
@@ -674,7 +593,7 @@ ItalianStemmer = function() {
{
return false;
}
- var /** number */ v_1 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
lab0: {
base.ket = base.cursor;
if (!(base.eq_s_b("ic")))
@@ -743,7 +662,7 @@ ItalianStemmer = function() {
{
return false;
}
- var /** number */ v_2 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
lab1: {
base.ket = base.cursor;
among_var = base.find_among_b(a_4);
@@ -793,7 +712,7 @@ ItalianStemmer = function() {
{
return false;
}
- var /** number */ v_3 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_3 = base.limit - base.cursor;
lab2: {
base.ket = base.cursor;
if (base.find_among_b(a_5) == 0)
@@ -822,7 +741,7 @@ ItalianStemmer = function() {
{
return false;
}
- var /** number */ v_4 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_4 = base.limit - base.cursor;
lab3: {
base.ket = base.cursor;
if (!(base.eq_s_b("at")))
@@ -868,12 +787,12 @@ ItalianStemmer = function() {
{
return false;
}
- var /** number */ v_2 = base.limit_backward;
+ /** @const */ var /** number */ v_1 = base.limit_backward;
base.limit_backward = I_pV;
base.ket = base.cursor;
if (base.find_among_b(a_7) == 0)
{
- base.limit_backward = v_2;
+ base.limit_backward = v_1;
return false;
}
base.bra = base.cursor;
@@ -881,13 +800,13 @@ ItalianStemmer = function() {
{
return false;
}
- base.limit_backward = v_2;
+ base.limit_backward = v_1;
return true;
};
/** @return {boolean} */
function r_vowel_suffix() {
- var /** number */ v_1 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
lab0: {
base.ket = base.cursor;
if (!(base.in_grouping_b(g_AEIO, 97, 242)))
@@ -922,7 +841,7 @@ ItalianStemmer = function() {
return false;
}
}
- var /** number */ v_2 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
lab1: {
base.ket = base.cursor;
if (!(base.eq_s_b("h")))
@@ -950,18 +869,18 @@ ItalianStemmer = function() {
};
this.stem = /** @return {boolean} */ function() {
- var /** number */ v_1 = base.cursor;
+ /** @const */ var /** number */ v_1 = base.cursor;
r_prelude();
base.cursor = v_1;
r_mark_regions();
base.limit_backward = base.cursor; base.cursor = base.limit;
- var /** number */ v_3 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
r_attached_pronoun();
- base.cursor = base.limit - v_3;
- var /** number */ v_4 = base.limit - base.cursor;
+ base.cursor = base.limit - v_2;
+ /** @const */ var /** number */ v_3 = base.limit - base.cursor;
lab0: {
lab1: {
- var /** number */ v_5 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_4 = base.limit - base.cursor;
lab2: {
if (!r_standard_suffix())
{
@@ -969,21 +888,21 @@ ItalianStemmer = function() {
}
break lab1;
}
- base.cursor = base.limit - v_5;
+ base.cursor = base.limit - v_4;
if (!r_verb_suffix())
{
break lab0;
}
}
}
- base.cursor = base.limit - v_4;
- var /** number */ v_6 = base.limit - base.cursor;
+ base.cursor = base.limit - v_3;
+ /** @const */ var /** number */ v_5 = base.limit - base.cursor;
r_vowel_suffix();
- base.cursor = base.limit - v_6;
+ base.cursor = base.limit - v_5;
base.cursor = base.limit_backward;
- var /** number */ v_7 = base.cursor;
+ /** @const */ var /** number */ v_6 = base.cursor;
r_postlude();
- base.cursor = v_7;
+ base.cursor = v_6;
return true;
};
diff --git a/sphinx/search/non-minified-js/lithuanian-stemmer.js b/sphinx/search/non-minified-js/lithuanian-stemmer.js
new file mode 100644
index 00000000000..213ff5979b2
--- /dev/null
+++ b/sphinx/search/non-minified-js/lithuanian-stemmer.js
@@ -0,0 +1,534 @@
+// Generated from lithuanian.sbl by Snowball 3.0.1 - https://snowballstem.org/
+
+/**@constructor*/
+var LithuanianStemmer = function() {
+ var base = new BaseStemmer();
+
+ /** @const */ var a_0 = [
+ ["a", -1, -1],
+ ["ia", 0, -1],
+ ["eria", 1, -1],
+ ["osna", 0, -1],
+ ["iosna", 3, -1],
+ ["uosna", 3, -1],
+ ["iuosna", 5, -1],
+ ["ysna", 0, -1],
+ ["\u0117sna", 0, -1],
+ ["e", -1, -1],
+ ["ie", 9, -1],
+ ["enie", 10, -1],
+ ["erie", 10, -1],
+ ["oje", 9, -1],
+ ["ioje", 13, -1],
+ ["uje", 9, -1],
+ ["iuje", 15, -1],
+ ["yje", 9, -1],
+ ["enyje", 17, -1],
+ ["eryje", 17, -1],
+ ["\u0117je", 9, -1],
+ ["ame", 9, -1],
+ ["iame", 21, -1],
+ ["sime", 9, -1],
+ ["ome", 9, -1],
+ ["\u0117me", 9, -1],
+ ["tum\u0117me", 25, -1],
+ ["ose", 9, -1],
+ ["iose", 27, -1],
+ ["uose", 27, -1],
+ ["iuose", 29, -1],
+ ["yse", 9, -1],
+ ["enyse", 31, -1],
+ ["eryse", 31, -1],
+ ["\u0117se", 9, -1],
+ ["ate", 9, -1],
+ ["iate", 35, -1],
+ ["ite", 9, -1],
+ ["kite", 37, -1],
+ ["site", 37, -1],
+ ["ote", 9, -1],
+ ["tute", 9, -1],
+ ["\u0117te", 9, -1],
+ ["tum\u0117te", 42, -1],
+ ["i", -1, -1],
+ ["ai", 44, -1],
+ ["iai", 45, -1],
+ ["eriai", 46, -1],
+ ["ei", 44, -1],
+ ["tumei", 48, -1],
+ ["ki", 44, -1],
+ ["imi", 44, -1],
+ ["erimi", 51, -1],
+ ["umi", 44, -1],
+ ["iumi", 53, -1],
+ ["si", 44, -1],
+ ["asi", 55, -1],
+ ["iasi", 56, -1],
+ ["esi", 55, -1],
+ ["iesi", 58, -1],
+ ["siesi", 59, -1],
+ ["isi", 55, -1],
+ ["aisi", 61, -1],
+ ["eisi", 61, -1],
+ ["tumeisi", 63, -1],
+ ["uisi", 61, -1],
+ ["osi", 55, -1],
+ ["\u0117josi", 66, -1],
+ ["uosi", 66, -1],
+ ["iuosi", 68, -1],
+ ["siuosi", 69, -1],
+ ["usi", 55, -1],
+ ["ausi", 71, -1],
+ ["\u010Diausi", 72, -1],
+ ["\u0105si", 55, -1],
+ ["\u0117si", 55, -1],
+ ["\u0173si", 55, -1],
+ ["t\u0173si", 76, -1],
+ ["ti", 44, -1],
+ ["enti", 78, -1],
+ ["inti", 78, -1],
+ ["oti", 78, -1],
+ ["ioti", 81, -1],
+ ["uoti", 81, -1],
+ ["iuoti", 83, -1],
+ ["auti", 78, -1],
+ ["iauti", 85, -1],
+ ["yti", 78, -1],
+ ["\u0117ti", 78, -1],
+ ["tel\u0117ti", 88, -1],
+ ["in\u0117ti", 88, -1],
+ ["ter\u0117ti", 88, -1],
+ ["ui", 44, -1],
+ ["iui", 92, -1],
+ ["eniui", 93, -1],
+ ["oj", -1, -1],
+ ["\u0117j", -1, -1],
+ ["k", -1, -1],
+ ["am", -1, -1],
+ ["iam", 98, -1],
+ ["iem", -1, -1],
+ ["im", -1, -1],
+ ["sim", 101, -1],
+ ["om", -1, -1],
+ ["tum", -1, -1],
+ ["\u0117m", -1, -1],
+ ["tum\u0117m", 105, -1],
+ ["an", -1, -1],
+ ["on", -1, -1],
+ ["ion", 108, -1],
+ ["un", -1, -1],
+ ["iun", 110, -1],
+ ["\u0117n", -1, -1],
+ ["o", -1, -1],
+ ["io", 113, -1],
+ ["enio", 114, -1],
+ ["\u0117jo", 113, -1],
+ ["uo", 113, -1],
+ ["s", -1, -1],
+ ["as", 118, -1],
+ ["ias", 119, -1],
+ ["es", 118, -1],
+ ["ies", 121, -1],
+ ["is", 118, -1],
+ ["ais", 123, -1],
+ ["iais", 124, -1],
+ ["tumeis", 123, -1],
+ ["imis", 123, -1],
+ ["enimis", 127, -1],
+ ["omis", 123, -1],
+ ["iomis", 129, -1],
+ ["umis", 123, -1],
+ ["\u0117mis", 123, -1],
+ ["enis", 123, -1],
+ ["asis", 123, -1],
+ ["ysis", 123, -1],
+ ["ams", 118, -1],
+ ["iams", 136, -1],
+ ["iems", 118, -1],
+ ["ims", 118, -1],
+ ["enims", 139, -1],
+ ["erims", 139, -1],
+ ["oms", 118, -1],
+ ["ioms", 142, -1],
+ ["ums", 118, -1],
+ ["\u0117ms", 118, -1],
+ ["ens", 118, -1],
+ ["os", 118, -1],
+ ["ios", 147, -1],
+ ["uos", 147, -1],
+ ["iuos", 149, -1],
+ ["ers", 118, -1],
+ ["us", 118, -1],
+ ["aus", 152, -1],
+ ["iaus", 153, -1],
+ ["ius", 152, -1],
+ ["ys", 118, -1],
+ ["enys", 156, -1],
+ ["erys", 156, -1],
+ ["\u0105s", 118, -1],
+ ["i\u0105s", 159, -1],
+ ["\u0117s", 118, -1],
+ ["am\u0117s", 161, -1],
+ ["iam\u0117s", 162, -1],
+ ["im\u0117s", 161, -1],
+ ["kim\u0117s", 164, -1],
+ ["sim\u0117s", 164, -1],
+ ["om\u0117s", 161, -1],
+ ["\u0117m\u0117s", 161, -1],
+ ["tum\u0117m\u0117s", 168, -1],
+ ["at\u0117s", 161, -1],
+ ["iat\u0117s", 170, -1],
+ ["sit\u0117s", 161, -1],
+ ["ot\u0117s", 161, -1],
+ ["\u0117t\u0117s", 161, -1],
+ ["tum\u0117t\u0117s", 174, -1],
+ ["\u012Fs", 118, -1],
+ ["\u016Bs", 118, -1],
+ ["t\u0173s", 118, -1],
+ ["at", -1, -1],
+ ["iat", 179, -1],
+ ["it", -1, -1],
+ ["sit", 181, -1],
+ ["ot", -1, -1],
+ ["\u0117t", -1, -1],
+ ["tum\u0117t", 184, -1],
+ ["u", -1, -1],
+ ["au", 186, -1],
+ ["iau", 187, -1],
+ ["\u010Diau", 188, -1],
+ ["iu", 186, -1],
+ ["eniu", 190, -1],
+ ["siu", 190, -1],
+ ["y", -1, -1],
+ ["\u0105", -1, -1],
+ ["i\u0105", 194, -1],
+ ["\u0117", -1, -1],
+ ["\u0119", -1, -1],
+ ["\u012F", -1, -1],
+ ["en\u012F", 198, -1],
+ ["er\u012F", 198, -1],
+ ["\u0173", -1, -1],
+ ["i\u0173", 201, -1],
+ ["er\u0173", 201, -1]
+ ];
+
+ /** @const */ var a_1 = [
+ ["ing", -1, -1],
+ ["aj", -1, -1],
+ ["iaj", 1, -1],
+ ["iej", -1, -1],
+ ["oj", -1, -1],
+ ["ioj", 4, -1],
+ ["uoj", 4, -1],
+ ["iuoj", 6, -1],
+ ["auj", -1, -1],
+ ["\u0105j", -1, -1],
+ ["i\u0105j", 9, -1],
+ ["\u0117j", -1, -1],
+ ["\u0173j", -1, -1],
+ ["i\u0173j", 12, -1],
+ ["ok", -1, -1],
+ ["iok", 14, -1],
+ ["iuk", -1, -1],
+ ["uliuk", 16, -1],
+ ["u\u010Diuk", 16, -1],
+ ["i\u0161k", -1, -1],
+ ["iul", -1, -1],
+ ["yl", -1, -1],
+ ["\u0117l", -1, -1],
+ ["am", -1, -1],
+ ["dam", 23, -1],
+ ["jam", 23, -1],
+ ["zgan", -1, -1],
+ ["ain", -1, -1],
+ ["esn", -1, -1],
+ ["op", -1, -1],
+ ["iop", 29, -1],
+ ["ias", -1, -1],
+ ["ies", -1, -1],
+ ["ais", -1, -1],
+ ["iais", 33, -1],
+ ["os", -1, -1],
+ ["ios", 35, -1],
+ ["uos", 35, -1],
+ ["iuos", 37, -1],
+ ["aus", -1, -1],
+ ["iaus", 39, -1],
+ ["\u0105s", -1, -1],
+ ["i\u0105s", 41, -1],
+ ["\u0119s", -1, -1],
+ ["ut\u0117ait", -1, -1],
+ ["ant", -1, -1],
+ ["iant", 45, -1],
+ ["siant", 46, -1],
+ ["int", -1, -1],
+ ["ot", -1, -1],
+ ["uot", 49, -1],
+ ["iuot", 50, -1],
+ ["yt", -1, -1],
+ ["\u0117t", -1, -1],
+ ["yk\u0161t", -1, -1],
+ ["iau", -1, -1],
+ ["dav", -1, -1],
+ ["sv", -1, -1],
+ ["\u0161v", -1, -1],
+ ["yk\u0161\u010D", -1, -1],
+ ["\u0119", -1, -1],
+ ["\u0117j\u0119", 60, -1]
+ ];
+
+ /** @const */ var a_2 = [
+ ["ojime", -1, 7],
+ ["\u0117jime", -1, 3],
+ ["avime", -1, 6],
+ ["okate", -1, 8],
+ ["aite", -1, 1],
+ ["uote", -1, 2],
+ ["asius", -1, 5],
+ ["okat\u0117s", -1, 8],
+ ["ait\u0117s", -1, 1],
+ ["uot\u0117s", -1, 2],
+ ["esiu", -1, 4]
+ ];
+
+ /** @const */ var a_3 = [
+ ["\u010D", -1, 1],
+ ["d\u017E", -1, 2]
+ ];
+
+ /** @const */ var a_4 = [
+ ["gd", -1, 1]
+ ];
+
+ /** @const */ var /** Array */ g_v = [17, 65, 16, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 16, 0, 64, 1, 0, 64, 0, 0, 0, 0, 0, 0, 0, 4, 4];
+
+ var /** number */ I_p1 = 0;
+
+
+ /** @return {boolean} */
+ function r_step1() {
+ if (base.cursor < I_p1)
+ {
+ return false;
+ }
+ /** @const */ var /** number */ v_1 = base.limit_backward;
+ base.limit_backward = I_p1;
+ base.ket = base.cursor;
+ if (base.find_among_b(a_0) == 0)
+ {
+ base.limit_backward = v_1;
+ return false;
+ }
+ base.bra = base.cursor;
+ base.limit_backward = v_1;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_step2() {
+ while(true)
+ {
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
+ lab0: {
+ if (base.cursor < I_p1)
+ {
+ break lab0;
+ }
+ /** @const */ var /** number */ v_2 = base.limit_backward;
+ base.limit_backward = I_p1;
+ base.ket = base.cursor;
+ if (base.find_among_b(a_1) == 0)
+ {
+ base.limit_backward = v_2;
+ break lab0;
+ }
+ base.bra = base.cursor;
+ base.limit_backward = v_2;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ continue;
+ }
+ base.cursor = base.limit - v_1;
+ break;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_fix_conflicts() {
+ var /** number */ among_var;
+ base.ket = base.cursor;
+ among_var = base.find_among_b(a_2);
+ if (among_var == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ switch (among_var) {
+ case 1:
+ if (!base.slice_from("ait\u0117"))
+ {
+ return false;
+ }
+ break;
+ case 2:
+ if (!base.slice_from("uot\u0117"))
+ {
+ return false;
+ }
+ break;
+ case 3:
+ if (!base.slice_from("\u0117jimas"))
+ {
+ return false;
+ }
+ break;
+ case 4:
+ if (!base.slice_from("esys"))
+ {
+ return false;
+ }
+ break;
+ case 5:
+ if (!base.slice_from("asys"))
+ {
+ return false;
+ }
+ break;
+ case 6:
+ if (!base.slice_from("avimas"))
+ {
+ return false;
+ }
+ break;
+ case 7:
+ if (!base.slice_from("ojimas"))
+ {
+ return false;
+ }
+ break;
+ case 8:
+ if (!base.slice_from("okat\u0117"))
+ {
+ return false;
+ }
+ break;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_fix_chdz() {
+ var /** number */ among_var;
+ base.ket = base.cursor;
+ among_var = base.find_among_b(a_3);
+ if (among_var == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ switch (among_var) {
+ case 1:
+ if (!base.slice_from("t"))
+ {
+ return false;
+ }
+ break;
+ case 2:
+ if (!base.slice_from("d"))
+ {
+ return false;
+ }
+ break;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_fix_gd() {
+ base.ket = base.cursor;
+ if (base.find_among_b(a_4) == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ if (!base.slice_from("g"))
+ {
+ return false;
+ }
+ return true;
+ };
+
+ this.stem = /** @return {boolean} */ function() {
+ I_p1 = base.limit;
+ /** @const */ var /** number */ v_1 = base.cursor;
+ lab0: {
+ /** @const */ var /** number */ v_2 = base.cursor;
+ lab1: {
+ /** @const */ var /** number */ v_3 = base.cursor;
+ if (!(base.eq_s("a")))
+ {
+ base.cursor = v_2;
+ break lab1;
+ }
+ base.cursor = v_3;
+ if (base.current.length <= 6)
+ {
+ base.cursor = v_2;
+ break lab1;
+ }
+ if (base.cursor >= base.limit)
+ {
+ base.cursor = v_2;
+ break lab1;
+ }
+ base.cursor++;
+ }
+ if (!base.go_out_grouping(g_v, 97, 371))
+ {
+ break lab0;
+ }
+ base.cursor++;
+ if (!base.go_in_grouping(g_v, 97, 371))
+ {
+ break lab0;
+ }
+ base.cursor++;
+ I_p1 = base.cursor;
+ }
+ base.cursor = v_1;
+ base.limit_backward = base.cursor; base.cursor = base.limit;
+ /** @const */ var /** number */ v_4 = base.limit - base.cursor;
+ r_fix_conflicts();
+ base.cursor = base.limit - v_4;
+ /** @const */ var /** number */ v_5 = base.limit - base.cursor;
+ r_step1();
+ base.cursor = base.limit - v_5;
+ /** @const */ var /** number */ v_6 = base.limit - base.cursor;
+ r_fix_chdz();
+ base.cursor = base.limit - v_6;
+ /** @const */ var /** number */ v_7 = base.limit - base.cursor;
+ r_step2();
+ base.cursor = base.limit - v_7;
+ /** @const */ var /** number */ v_8 = base.limit - base.cursor;
+ r_fix_chdz();
+ base.cursor = base.limit - v_8;
+ /** @const */ var /** number */ v_9 = base.limit - base.cursor;
+ r_fix_gd();
+ base.cursor = base.limit - v_9;
+ base.cursor = base.limit_backward;
+ return true;
+ };
+
+ /**@return{string}*/
+ this['stemWord'] = function(/**string*/word) {
+ base.setCurrent(word);
+ this.stem();
+ return base.getCurrent();
+ };
+};
diff --git a/sphinx/search/non-minified-js/nepali-stemmer.js b/sphinx/search/non-minified-js/nepali-stemmer.js
new file mode 100644
index 00000000000..d6352d00a4d
--- /dev/null
+++ b/sphinx/search/non-minified-js/nepali-stemmer.js
@@ -0,0 +1,282 @@
+// Generated from nepali.sbl by Snowball 3.0.1 - https://snowballstem.org/
+
+/**@constructor*/
+var NepaliStemmer = function() {
+ var base = new BaseStemmer();
+
+ /** @const */ var a_0 = [
+ ["\u0932\u093E\u0907", -1, 1],
+ ["\u0932\u093E\u0908", -1, 1],
+ ["\u0938\u0901\u0917", -1, 1],
+ ["\u0938\u0902\u0917", -1, 1],
+ ["\u092E\u093E\u0930\u094D\u092B\u0924", -1, 1],
+ ["\u0930\u0924", -1, 1],
+ ["\u0915\u093E", -1, 2],
+ ["\u092E\u093E", -1, 1],
+ ["\u0926\u094D\u0935\u093E\u0930\u093E", -1, 1],
+ ["\u0915\u093F", -1, 2],
+ ["\u092A\u091B\u093F", -1, 1],
+ ["\u0915\u0940", -1, 2],
+ ["\u0932\u0947", -1, 1],
+ ["\u0915\u0948", -1, 2],
+ ["\u0938\u0901\u0917\u0948", -1, 1],
+ ["\u092E\u0948", -1, 1],
+ ["\u0915\u094B", -1, 2]
+ ];
+
+ /** @const */ var a_1 = [
+ ["\u0901", -1, 1],
+ ["\u0902", -1, 1],
+ ["\u0948", -1, 2]
+ ];
+
+ /** @const */ var a_2 = [
+ ["\u0925\u093F\u090F", -1, 1],
+ ["\u091B", -1, 1],
+ ["\u0907\u091B", 1, 1],
+ ["\u090F\u091B", 1, 1],
+ ["\u093F\u091B", 1, 1],
+ ["\u0947\u091B", 1, 1],
+ ["\u0928\u0947\u091B", 5, 1],
+ ["\u0939\u0941\u0928\u0947\u091B", 6, 1],
+ ["\u0907\u0928\u094D\u091B", 1, 1],
+ ["\u093F\u0928\u094D\u091B", 1, 1],
+ ["\u0939\u0941\u0928\u094D\u091B", 1, 1],
+ ["\u090F\u0915\u093E", -1, 1],
+ ["\u0907\u090F\u0915\u093E", 11, 1],
+ ["\u093F\u090F\u0915\u093E", 11, 1],
+ ["\u0947\u0915\u093E", -1, 1],
+ ["\u0928\u0947\u0915\u093E", 14, 1],
+ ["\u0926\u093E", -1, 1],
+ ["\u0907\u0926\u093E", 16, 1],
+ ["\u093F\u0926\u093E", 16, 1],
+ ["\u0926\u0947\u0916\u093F", -1, 1],
+ ["\u092E\u093E\u0925\u093F", -1, 1],
+ ["\u090F\u0915\u0940", -1, 1],
+ ["\u0907\u090F\u0915\u0940", 21, 1],
+ ["\u093F\u090F\u0915\u0940", 21, 1],
+ ["\u0947\u0915\u0940", -1, 1],
+ ["\u0926\u0947\u0916\u0940", -1, 1],
+ ["\u0925\u0940", -1, 1],
+ ["\u0926\u0940", -1, 1],
+ ["\u091B\u0941", -1, 1],
+ ["\u090F\u091B\u0941", 28, 1],
+ ["\u0947\u091B\u0941", 28, 1],
+ ["\u0928\u0947\u091B\u0941", 30, 1],
+ ["\u0928\u0941", -1, 1],
+ ["\u0939\u0930\u0941", -1, 1],
+ ["\u0939\u0930\u0942", -1, 1],
+ ["\u091B\u0947", -1, 1],
+ ["\u0925\u0947", -1, 1],
+ ["\u0928\u0947", -1, 1],
+ ["\u090F\u0915\u0948", -1, 1],
+ ["\u0947\u0915\u0948", -1, 1],
+ ["\u0928\u0947\u0915\u0948", 39, 1],
+ ["\u0926\u0948", -1, 1],
+ ["\u0907\u0926\u0948", 41, 1],
+ ["\u093F\u0926\u0948", 41, 1],
+ ["\u090F\u0915\u094B", -1, 1],
+ ["\u0907\u090F\u0915\u094B", 44, 1],
+ ["\u093F\u090F\u0915\u094B", 44, 1],
+ ["\u0947\u0915\u094B", -1, 1],
+ ["\u0928\u0947\u0915\u094B", 47, 1],
+ ["\u0926\u094B", -1, 1],
+ ["\u0907\u0926\u094B", 49, 1],
+ ["\u093F\u0926\u094B", 49, 1],
+ ["\u092F\u094B", -1, 1],
+ ["\u0907\u092F\u094B", 52, 1],
+ ["\u092D\u092F\u094B", 52, 1],
+ ["\u093F\u092F\u094B", 52, 1],
+ ["\u0925\u093F\u092F\u094B", 55, 1],
+ ["\u0926\u093F\u092F\u094B", 55, 1],
+ ["\u0925\u094D\u092F\u094B", 52, 1],
+ ["\u091B\u094C", -1, 1],
+ ["\u0907\u091B\u094C", 59, 1],
+ ["\u090F\u091B\u094C", 59, 1],
+ ["\u093F\u091B\u094C", 59, 1],
+ ["\u0947\u091B\u094C", 59, 1],
+ ["\u0928\u0947\u091B\u094C", 63, 1],
+ ["\u092F\u094C", -1, 1],
+ ["\u0925\u093F\u092F\u094C", 65, 1],
+ ["\u091B\u094D\u092F\u094C", 65, 1],
+ ["\u0925\u094D\u092F\u094C", 65, 1],
+ ["\u091B\u0928\u094D", -1, 1],
+ ["\u0907\u091B\u0928\u094D", 69, 1],
+ ["\u090F\u091B\u0928\u094D", 69, 1],
+ ["\u093F\u091B\u0928\u094D", 69, 1],
+ ["\u0947\u091B\u0928\u094D", 69, 1],
+ ["\u0928\u0947\u091B\u0928\u094D", 73, 1],
+ ["\u0932\u093E\u0928\u094D", -1, 1],
+ ["\u091B\u093F\u0928\u094D", -1, 1],
+ ["\u0925\u093F\u0928\u094D", -1, 1],
+ ["\u092A\u0930\u094D", -1, 1],
+ ["\u0907\u0938\u094D", -1, 1],
+ ["\u0925\u093F\u0907\u0938\u094D", 79, 1],
+ ["\u091B\u0938\u094D", -1, 1],
+ ["\u0907\u091B\u0938\u094D", 81, 1],
+ ["\u090F\u091B\u0938\u094D", 81, 1],
+ ["\u093F\u091B\u0938\u094D", 81, 1],
+ ["\u0947\u091B\u0938\u094D", 81, 1],
+ ["\u0928\u0947\u091B\u0938\u094D", 85, 1],
+ ["\u093F\u0938\u094D", -1, 1],
+ ["\u0925\u093F\u0938\u094D", 87, 1],
+ ["\u091B\u0947\u0938\u094D", -1, 1],
+ ["\u0939\u094B\u0938\u094D", -1, 1]
+ ];
+
+
+ /** @return {boolean} */
+ function r_remove_category_1() {
+ var /** number */ among_var;
+ base.ket = base.cursor;
+ among_var = base.find_among_b(a_0);
+ if (among_var == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ switch (among_var) {
+ case 1:
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ case 2:
+ lab0: {
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
+ lab1: {
+ if (!(base.eq_s_b("\u090F")))
+ {
+ break lab1;
+ }
+ break lab0;
+ }
+ base.cursor = base.limit - v_1;
+ lab2: {
+ if (!(base.eq_s_b("\u0947")))
+ {
+ break lab2;
+ }
+ break lab0;
+ }
+ base.cursor = base.limit - v_1;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ }
+ break;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_remove_category_2() {
+ var /** number */ among_var;
+ base.ket = base.cursor;
+ among_var = base.find_among_b(a_1);
+ if (among_var == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ switch (among_var) {
+ case 1:
+ lab0: {
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
+ lab1: {
+ if (!(base.eq_s_b("\u092F\u094C")))
+ {
+ break lab1;
+ }
+ break lab0;
+ }
+ base.cursor = base.limit - v_1;
+ lab2: {
+ if (!(base.eq_s_b("\u091B\u094C")))
+ {
+ break lab2;
+ }
+ break lab0;
+ }
+ base.cursor = base.limit - v_1;
+ lab3: {
+ if (!(base.eq_s_b("\u0928\u094C")))
+ {
+ break lab3;
+ }
+ break lab0;
+ }
+ base.cursor = base.limit - v_1;
+ if (!(base.eq_s_b("\u0925\u0947")))
+ {
+ return false;
+ }
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ case 2:
+ if (!(base.eq_s_b("\u0924\u094D\u0930")))
+ {
+ return false;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_remove_category_3() {
+ base.ket = base.cursor;
+ if (base.find_among_b(a_2) == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ return true;
+ };
+
+ this.stem = /** @return {boolean} */ function() {
+ base.limit_backward = base.cursor; base.cursor = base.limit;
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
+ r_remove_category_1();
+ base.cursor = base.limit - v_1;
+ while(true)
+ {
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
+ lab0: {
+ /** @const */ var /** number */ v_3 = base.limit - base.cursor;
+ r_remove_category_2();
+ base.cursor = base.limit - v_3;
+ if (!r_remove_category_3())
+ {
+ break lab0;
+ }
+ continue;
+ }
+ base.cursor = base.limit - v_2;
+ break;
+ }
+ base.cursor = base.limit_backward;
+ return true;
+ };
+
+ /**@return{string}*/
+ this['stemWord'] = function(/**string*/word) {
+ base.setCurrent(word);
+ this.stem();
+ return base.getCurrent();
+ };
+};
diff --git a/sphinx/search/non-minified-js/norwegian-stemmer.js b/sphinx/search/non-minified-js/norwegian-stemmer.js
index e1760631ab9..149e63c1a32 100644
--- a/sphinx/search/non-minified-js/norwegian-stemmer.js
+++ b/sphinx/search/non-minified-js/norwegian-stemmer.js
@@ -1,9 +1,28 @@
-// Generated by Snowball 2.1.0 - https://snowballstem.org/
+// Generated from norwegian.sbl by Snowball 3.0.1 - https://snowballstem.org/
/**@constructor*/
-NorwegianStemmer = function() {
+var NorwegianStemmer = function() {
var base = new BaseStemmer();
+
/** @const */ var a_0 = [
+ ["", -1, 1],
+ ["ind", 0, -1],
+ ["kk", 0, -1],
+ ["nk", 0, -1],
+ ["amm", 0, -1],
+ ["omm", 0, -1],
+ ["kap", 0, -1],
+ ["skap", 6, 1],
+ ["pp", 0, -1],
+ ["lt", 0, -1],
+ ["ast", 0, -1],
+ ["\u00F8st", 0, -1],
+ ["v", 0, -1],
+ ["hav", 12, 1],
+ ["giv", 12, 1]
+ ];
+
+ /** @const */ var a_1 = [
["a", -1, 1],
["e", -1, 1],
["ede", 1, 1],
@@ -12,13 +31,13 @@ NorwegianStemmer = function() {
["ane", 1, 1],
["ene", 1, 1],
["hetene", 6, 1],
- ["erte", 1, 3],
+ ["erte", 1, 4],
["en", -1, 1],
["heten", 9, 1],
["ar", -1, 1],
["er", -1, 1],
["heter", 12, 1],
- ["s", -1, 2],
+ ["s", -1, 3],
["as", 14, 1],
["es", 14, 1],
["edes", 16, 1],
@@ -27,20 +46,20 @@ NorwegianStemmer = function() {
["hetenes", 19, 1],
["ens", 14, 1],
["hetens", 21, 1],
- ["ers", 14, 1],
+ ["ers", 14, 2],
["ets", 14, 1],
["et", -1, 1],
["het", 25, 1],
- ["ert", -1, 3],
+ ["ert", -1, 4],
["ast", -1, 1]
];
- /** @const */ var a_1 = [
+ /** @const */ var a_2 = [
["dt", -1, -1],
["vt", -1, -1]
];
- /** @const */ var a_2 = [
+ /** @const */ var a_3 = [
["leg", -1, 1],
["eleg", 0, 1],
["ig", -1, 1],
@@ -54,9 +73,9 @@ NorwegianStemmer = function() {
["hetslov", 9, 1]
];
- /** @const */ var /** Array */ g_v = [17, 65, 16, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 48, 0, 128];
+ /** @const */ var /** Array */ g_v = [17, 65, 16, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 48, 2, 142];
- /** @const */ var /** Array */ g_s_ending = [119, 125, 149, 1];
+ /** @const */ var /** Array */ g_s_ending = [119, 125, 148, 1];
var /** number */ I_x = 0;
var /** number */ I_p1 = 0;
@@ -65,9 +84,9 @@ NorwegianStemmer = function() {
/** @return {boolean} */
function r_mark_regions() {
I_p1 = base.limit;
- var /** number */ v_1 = base.cursor;
+ /** @const */ var /** number */ v_1 = base.cursor;
{
- var /** number */ c1 = base.cursor + 3;
+ /** @const */ var /** number */ c1 = base.cursor + 3;
if (c1 > base.limit)
{
return false;
@@ -76,44 +95,21 @@ NorwegianStemmer = function() {
}
I_x = base.cursor;
base.cursor = v_1;
- golab0: while(true)
+ if (!base.go_out_grouping(g_v, 97, 248))
{
- var /** number */ v_2 = base.cursor;
- lab1: {
- if (!(base.in_grouping(g_v, 97, 248)))
- {
- break lab1;
- }
- base.cursor = v_2;
- break golab0;
- }
- base.cursor = v_2;
- if (base.cursor >= base.limit)
- {
- return false;
- }
- base.cursor++;
+ return false;
}
- golab2: while(true)
+ base.cursor++;
+ if (!base.go_in_grouping(g_v, 97, 248))
{
- lab3: {
- if (!(base.out_grouping(g_v, 97, 248)))
- {
- break lab3;
- }
- break golab2;
- }
- if (base.cursor >= base.limit)
- {
- return false;
- }
- base.cursor++;
+ return false;
}
+ base.cursor++;
I_p1 = base.cursor;
- lab4: {
- if (!(I_p1 < I_x))
+ lab0: {
+ if (I_p1 >= I_x)
{
- break lab4;
+ break lab0;
}
I_p1 = I_x;
}
@@ -127,17 +123,17 @@ NorwegianStemmer = function() {
{
return false;
}
- var /** number */ v_2 = base.limit_backward;
+ /** @const */ var /** number */ v_1 = base.limit_backward;
base.limit_backward = I_p1;
base.ket = base.cursor;
- among_var = base.find_among_b(a_0);
+ among_var = base.find_among_b(a_1);
if (among_var == 0)
{
- base.limit_backward = v_2;
+ base.limit_backward = v_1;
return false;
}
base.bra = base.cursor;
- base.limit_backward = v_2;
+ base.limit_backward = v_1;
switch (among_var) {
case 1:
if (!base.slice_del())
@@ -146,8 +142,19 @@ NorwegianStemmer = function() {
}
break;
case 2:
+ among_var = base.find_among_b(a_0);
+ switch (among_var) {
+ case 1:
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ }
+ break;
+ case 3:
lab0: {
- var /** number */ v_3 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
lab1: {
if (!(base.in_grouping_b(g_s_ending, 98, 122)))
{
@@ -155,7 +162,26 @@ NorwegianStemmer = function() {
}
break lab0;
}
- base.cursor = base.limit - v_3;
+ base.cursor = base.limit - v_2;
+ lab2: {
+ if (!(base.eq_s_b("r")))
+ {
+ break lab2;
+ }
+ {
+ /** @const */ var /** number */ v_3 = base.limit - base.cursor;
+ lab3: {
+ if (!(base.eq_s_b("e")))
+ {
+ break lab3;
+ }
+ break lab2;
+ }
+ base.cursor = base.limit - v_3;
+ }
+ break lab0;
+ }
+ base.cursor = base.limit - v_2;
if (!(base.eq_s_b("k")))
{
return false;
@@ -170,7 +196,7 @@ NorwegianStemmer = function() {
return false;
}
break;
- case 3:
+ case 4:
if (!base.slice_from("er"))
{
return false;
@@ -182,21 +208,21 @@ NorwegianStemmer = function() {
/** @return {boolean} */
function r_consonant_pair() {
- var /** number */ v_1 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
if (base.cursor < I_p1)
{
return false;
}
- var /** number */ v_3 = base.limit_backward;
+ /** @const */ var /** number */ v_2 = base.limit_backward;
base.limit_backward = I_p1;
base.ket = base.cursor;
- if (base.find_among_b(a_1) == 0)
+ if (base.find_among_b(a_2) == 0)
{
- base.limit_backward = v_3;
+ base.limit_backward = v_2;
return false;
}
base.bra = base.cursor;
- base.limit_backward = v_3;
+ base.limit_backward = v_2;
base.cursor = base.limit - v_1;
if (base.cursor <= base.limit_backward)
{
@@ -217,16 +243,16 @@ NorwegianStemmer = function() {
{
return false;
}
- var /** number */ v_2 = base.limit_backward;
+ /** @const */ var /** number */ v_1 = base.limit_backward;
base.limit_backward = I_p1;
base.ket = base.cursor;
- if (base.find_among_b(a_2) == 0)
+ if (base.find_among_b(a_3) == 0)
{
- base.limit_backward = v_2;
+ base.limit_backward = v_1;
return false;
}
base.bra = base.cursor;
- base.limit_backward = v_2;
+ base.limit_backward = v_1;
if (!base.slice_del())
{
return false;
@@ -235,17 +261,17 @@ NorwegianStemmer = function() {
};
this.stem = /** @return {boolean} */ function() {
- var /** number */ v_1 = base.cursor;
+ /** @const */ var /** number */ v_1 = base.cursor;
r_mark_regions();
base.cursor = v_1;
base.limit_backward = base.cursor; base.cursor = base.limit;
- var /** number */ v_2 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
r_main_suffix();
base.cursor = base.limit - v_2;
- var /** number */ v_3 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_3 = base.limit - base.cursor;
r_consonant_pair();
base.cursor = base.limit - v_3;
- var /** number */ v_4 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_4 = base.limit - base.cursor;
r_other_suffix();
base.cursor = base.limit - v_4;
base.cursor = base.limit_backward;
diff --git a/sphinx/search/non-minified-js/porter-stemmer.js b/sphinx/search/non-minified-js/porter-stemmer.js
index 0747d2cf3ea..182b9d5edfb 100644
--- a/sphinx/search/non-minified-js/porter-stemmer.js
+++ b/sphinx/search/non-minified-js/porter-stemmer.js
@@ -1,8 +1,9 @@
-// Generated by Snowball 2.1.0 - https://snowballstem.org/
+// Generated from porter.sbl by Snowball 3.0.1 - https://snowballstem.org/
/**@constructor*/
-PorterStemmer = function() {
+var PorterStemmer = function() {
var base = new BaseStemmer();
+
/** @const */ var a_0 = [
["s", -1, 3],
["ies", 0, 2],
@@ -115,20 +116,12 @@ PorterStemmer = function() {
/** @return {boolean} */
function r_R1() {
- if (!(I_p1 <= base.cursor))
- {
- return false;
- }
- return true;
+ return I_p1 <= base.cursor;
};
/** @return {boolean} */
function r_R2() {
- if (!(I_p2 <= base.cursor))
- {
- return false;
- }
- return true;
+ return I_p2 <= base.cursor;
};
/** @return {boolean} */
@@ -186,38 +179,24 @@ PorterStemmer = function() {
}
break;
case 2:
- var /** number */ v_1 = base.limit - base.cursor;
- golab0: while(true)
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
+ if (!base.go_out_grouping_b(g_v, 97, 121))
{
- lab1: {
- if (!(base.in_grouping_b(g_v, 97, 121)))
- {
- break lab1;
- }
- break golab0;
- }
- if (base.cursor <= base.limit_backward)
- {
- return false;
- }
- base.cursor--;
+ return false;
}
+ base.cursor--;
base.cursor = base.limit - v_1;
if (!base.slice_del())
{
return false;
}
- var /** number */ v_3 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
among_var = base.find_among_b(a_1);
- if (among_var == 0)
- {
- return false;
- }
- base.cursor = base.limit - v_3;
+ base.cursor = base.limit - v_2;
switch (among_var) {
case 1:
{
- var /** number */ c1 = base.cursor;
+ /** @const */ var /** number */ c1 = base.cursor;
base.insert(base.cursor, base.cursor, "e");
base.cursor = c1;
}
@@ -240,14 +219,14 @@ PorterStemmer = function() {
{
return false;
}
- var /** number */ v_4 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_3 = base.limit - base.cursor;
if (!r_shortv())
{
return false;
}
- base.cursor = base.limit - v_4;
+ base.cursor = base.limit - v_3;
{
- var /** number */ c2 = base.cursor;
+ /** @const */ var /** number */ c2 = base.cursor;
base.insert(base.cursor, base.cursor, "e");
base.cursor = c2;
}
@@ -262,7 +241,7 @@ PorterStemmer = function() {
function r_Step_1c() {
base.ket = base.cursor;
lab0: {
- var /** number */ v_1 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
lab1: {
if (!(base.eq_s_b("y")))
{
@@ -277,21 +256,11 @@ PorterStemmer = function() {
}
}
base.bra = base.cursor;
- golab2: while(true)
+ if (!base.go_out_grouping_b(g_v, 97, 121))
{
- lab3: {
- if (!(base.in_grouping_b(g_v, 97, 121)))
- {
- break lab3;
- }
- break golab2;
- }
- if (base.cursor <= base.limit_backward)
- {
- return false;
- }
- base.cursor--;
+ return false;
}
+ base.cursor--;
if (!base.slice_from("i"))
{
return false;
@@ -456,7 +425,7 @@ PorterStemmer = function() {
break;
case 2:
lab0: {
- var /** number */ v_1 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
lab1: {
if (!(base.eq_s_b("s")))
{
@@ -488,7 +457,6 @@ PorterStemmer = function() {
}
base.bra = base.cursor;
lab0: {
- var /** number */ v_1 = base.limit - base.cursor;
lab1: {
if (!r_R2())
{
@@ -496,13 +464,12 @@ PorterStemmer = function() {
}
break lab0;
}
- base.cursor = base.limit - v_1;
if (!r_R1())
{
return false;
}
{
- var /** number */ v_2 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
lab2: {
if (!r_shortv())
{
@@ -510,7 +477,7 @@ PorterStemmer = function() {
}
return false;
}
- base.cursor = base.limit - v_2;
+ base.cursor = base.limit - v_1;
}
}
if (!base.slice_del())
@@ -545,7 +512,7 @@ PorterStemmer = function() {
this.stem = /** @return {boolean} */ function() {
B_Y_found = false;
- var /** number */ v_1 = base.cursor;
+ /** @const */ var /** number */ v_1 = base.cursor;
lab0: {
base.bra = base.cursor;
if (!(base.eq_s("y")))
@@ -560,15 +527,15 @@ PorterStemmer = function() {
B_Y_found = true;
}
base.cursor = v_1;
- var /** number */ v_2 = base.cursor;
+ /** @const */ var /** number */ v_2 = base.cursor;
lab1: {
while(true)
{
- var /** number */ v_3 = base.cursor;
+ /** @const */ var /** number */ v_3 = base.cursor;
lab2: {
golab3: while(true)
{
- var /** number */ v_4 = base.cursor;
+ /** @const */ var /** number */ v_4 = base.cursor;
lab4: {
if (!(base.in_grouping(g_v, 97, 121)))
{
@@ -604,125 +571,85 @@ PorterStemmer = function() {
base.cursor = v_2;
I_p1 = base.limit;
I_p2 = base.limit;
- var /** number */ v_5 = base.cursor;
+ /** @const */ var /** number */ v_5 = base.cursor;
lab5: {
- golab6: while(true)
+ if (!base.go_out_grouping(g_v, 97, 121))
{
- lab7: {
- if (!(base.in_grouping(g_v, 97, 121)))
- {
- break lab7;
- }
- break golab6;
- }
- if (base.cursor >= base.limit)
- {
- break lab5;
- }
- base.cursor++;
+ break lab5;
}
- golab8: while(true)
+ base.cursor++;
+ if (!base.go_in_grouping(g_v, 97, 121))
{
- lab9: {
- if (!(base.out_grouping(g_v, 97, 121)))
- {
- break lab9;
- }
- break golab8;
- }
- if (base.cursor >= base.limit)
- {
- break lab5;
- }
- base.cursor++;
+ break lab5;
}
+ base.cursor++;
I_p1 = base.cursor;
- golab10: while(true)
+ if (!base.go_out_grouping(g_v, 97, 121))
{
- lab11: {
- if (!(base.in_grouping(g_v, 97, 121)))
- {
- break lab11;
- }
- break golab10;
- }
- if (base.cursor >= base.limit)
- {
- break lab5;
- }
- base.cursor++;
+ break lab5;
}
- golab12: while(true)
+ base.cursor++;
+ if (!base.go_in_grouping(g_v, 97, 121))
{
- lab13: {
- if (!(base.out_grouping(g_v, 97, 121)))
- {
- break lab13;
- }
- break golab12;
- }
- if (base.cursor >= base.limit)
- {
- break lab5;
- }
- base.cursor++;
+ break lab5;
}
+ base.cursor++;
I_p2 = base.cursor;
}
base.cursor = v_5;
base.limit_backward = base.cursor; base.cursor = base.limit;
- var /** number */ v_10 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_6 = base.limit - base.cursor;
r_Step_1a();
- base.cursor = base.limit - v_10;
- var /** number */ v_11 = base.limit - base.cursor;
+ base.cursor = base.limit - v_6;
+ /** @const */ var /** number */ v_7 = base.limit - base.cursor;
r_Step_1b();
- base.cursor = base.limit - v_11;
- var /** number */ v_12 = base.limit - base.cursor;
+ base.cursor = base.limit - v_7;
+ /** @const */ var /** number */ v_8 = base.limit - base.cursor;
r_Step_1c();
- base.cursor = base.limit - v_12;
- var /** number */ v_13 = base.limit - base.cursor;
+ base.cursor = base.limit - v_8;
+ /** @const */ var /** number */ v_9 = base.limit - base.cursor;
r_Step_2();
- base.cursor = base.limit - v_13;
- var /** number */ v_14 = base.limit - base.cursor;
+ base.cursor = base.limit - v_9;
+ /** @const */ var /** number */ v_10 = base.limit - base.cursor;
r_Step_3();
- base.cursor = base.limit - v_14;
- var /** number */ v_15 = base.limit - base.cursor;
+ base.cursor = base.limit - v_10;
+ /** @const */ var /** number */ v_11 = base.limit - base.cursor;
r_Step_4();
- base.cursor = base.limit - v_15;
- var /** number */ v_16 = base.limit - base.cursor;
+ base.cursor = base.limit - v_11;
+ /** @const */ var /** number */ v_12 = base.limit - base.cursor;
r_Step_5a();
- base.cursor = base.limit - v_16;
- var /** number */ v_17 = base.limit - base.cursor;
+ base.cursor = base.limit - v_12;
+ /** @const */ var /** number */ v_13 = base.limit - base.cursor;
r_Step_5b();
- base.cursor = base.limit - v_17;
+ base.cursor = base.limit - v_13;
base.cursor = base.limit_backward;
- var /** number */ v_18 = base.cursor;
- lab14: {
+ /** @const */ var /** number */ v_14 = base.cursor;
+ lab6: {
if (!B_Y_found)
{
- break lab14;
+ break lab6;
}
while(true)
{
- var /** number */ v_19 = base.cursor;
- lab15: {
- golab16: while(true)
+ /** @const */ var /** number */ v_15 = base.cursor;
+ lab7: {
+ golab8: while(true)
{
- var /** number */ v_20 = base.cursor;
- lab17: {
+ /** @const */ var /** number */ v_16 = base.cursor;
+ lab9: {
base.bra = base.cursor;
if (!(base.eq_s("Y")))
{
- break lab17;
+ break lab9;
}
base.ket = base.cursor;
- base.cursor = v_20;
- break golab16;
+ base.cursor = v_16;
+ break golab8;
}
- base.cursor = v_20;
+ base.cursor = v_16;
if (base.cursor >= base.limit)
{
- break lab15;
+ break lab7;
}
base.cursor++;
}
@@ -732,11 +659,11 @@ PorterStemmer = function() {
}
continue;
}
- base.cursor = v_19;
+ base.cursor = v_15;
break;
}
}
- base.cursor = v_18;
+ base.cursor = v_14;
return true;
};
diff --git a/sphinx/search/non-minified-js/portuguese-stemmer.js b/sphinx/search/non-minified-js/portuguese-stemmer.js
index 662b976565a..2b4a63fafe6 100644
--- a/sphinx/search/non-minified-js/portuguese-stemmer.js
+++ b/sphinx/search/non-minified-js/portuguese-stemmer.js
@@ -1,8 +1,9 @@
-// Generated by Snowball 2.1.0 - https://snowballstem.org/
+// Generated from portuguese.sbl by Snowball 3.0.1 - https://snowballstem.org/
/**@constructor*/
-PortugueseStemmer = function() {
+var PortugueseStemmer = function() {
var base = new BaseStemmer();
+
/** @const */ var a_0 = [
["", -1, 3],
["\u00E3", 0, 1],
@@ -234,14 +235,10 @@ PortugueseStemmer = function() {
var /** number */ among_var;
while(true)
{
- var /** number */ v_1 = base.cursor;
+ /** @const */ var /** number */ v_1 = base.cursor;
lab0: {
base.bra = base.cursor;
among_var = base.find_among(a_0);
- if (among_var == 0)
- {
- break lab0;
- }
base.ket = base.cursor;
switch (among_var) {
case 1:
@@ -277,37 +274,27 @@ PortugueseStemmer = function() {
I_pV = base.limit;
I_p1 = base.limit;
I_p2 = base.limit;
- var /** number */ v_1 = base.cursor;
+ /** @const */ var /** number */ v_1 = base.cursor;
lab0: {
lab1: {
- var /** number */ v_2 = base.cursor;
+ /** @const */ var /** number */ v_2 = base.cursor;
lab2: {
if (!(base.in_grouping(g_v, 97, 250)))
{
break lab2;
}
lab3: {
- var /** number */ v_3 = base.cursor;
+ /** @const */ var /** number */ v_3 = base.cursor;
lab4: {
if (!(base.out_grouping(g_v, 97, 250)))
{
break lab4;
}
- golab5: while(true)
+ if (!base.go_out_grouping(g_v, 97, 250))
{
- lab6: {
- if (!(base.in_grouping(g_v, 97, 250)))
- {
- break lab6;
- }
- break golab5;
- }
- if (base.cursor >= base.limit)
- {
- break lab4;
- }
- base.cursor++;
+ break lab4;
}
+ base.cursor++;
break lab3;
}
base.cursor = v_3;
@@ -315,21 +302,11 @@ PortugueseStemmer = function() {
{
break lab2;
}
- golab7: while(true)
+ if (!base.go_in_grouping(g_v, 97, 250))
{
- lab8: {
- if (!(base.out_grouping(g_v, 97, 250)))
- {
- break lab8;
- }
- break golab7;
- }
- if (base.cursor >= base.limit)
- {
- break lab2;
- }
- base.cursor++;
+ break lab2;
}
+ base.cursor++;
}
break lab1;
}
@@ -338,31 +315,21 @@ PortugueseStemmer = function() {
{
break lab0;
}
- lab9: {
- var /** number */ v_6 = base.cursor;
- lab10: {
+ lab5: {
+ /** @const */ var /** number */ v_4 = base.cursor;
+ lab6: {
if (!(base.out_grouping(g_v, 97, 250)))
{
- break lab10;
+ break lab6;
}
- golab11: while(true)
+ if (!base.go_out_grouping(g_v, 97, 250))
{
- lab12: {
- if (!(base.in_grouping(g_v, 97, 250)))
- {
- break lab12;
- }
- break golab11;
- }
- if (base.cursor >= base.limit)
- {
- break lab10;
- }
- base.cursor++;
+ break lab6;
}
- break lab9;
+ base.cursor++;
+ break lab5;
}
- base.cursor = v_6;
+ base.cursor = v_4;
if (!(base.in_grouping(g_v, 97, 250)))
{
break lab0;
@@ -377,72 +344,32 @@ PortugueseStemmer = function() {
I_pV = base.cursor;
}
base.cursor = v_1;
- var /** number */ v_8 = base.cursor;
- lab13: {
- golab14: while(true)
+ /** @const */ var /** number */ v_5 = base.cursor;
+ lab7: {
+ if (!base.go_out_grouping(g_v, 97, 250))
{
- lab15: {
- if (!(base.in_grouping(g_v, 97, 250)))
- {
- break lab15;
- }
- break golab14;
- }
- if (base.cursor >= base.limit)
- {
- break lab13;
- }
- base.cursor++;
+ break lab7;
}
- golab16: while(true)
+ base.cursor++;
+ if (!base.go_in_grouping(g_v, 97, 250))
{
- lab17: {
- if (!(base.out_grouping(g_v, 97, 250)))
- {
- break lab17;
- }
- break golab16;
- }
- if (base.cursor >= base.limit)
- {
- break lab13;
- }
- base.cursor++;
+ break lab7;
}
+ base.cursor++;
I_p1 = base.cursor;
- golab18: while(true)
+ if (!base.go_out_grouping(g_v, 97, 250))
{
- lab19: {
- if (!(base.in_grouping(g_v, 97, 250)))
- {
- break lab19;
- }
- break golab18;
- }
- if (base.cursor >= base.limit)
- {
- break lab13;
- }
- base.cursor++;
+ break lab7;
}
- golab20: while(true)
+ base.cursor++;
+ if (!base.go_in_grouping(g_v, 97, 250))
{
- lab21: {
- if (!(base.out_grouping(g_v, 97, 250)))
- {
- break lab21;
- }
- break golab20;
- }
- if (base.cursor >= base.limit)
- {
- break lab13;
- }
- base.cursor++;
+ break lab7;
}
+ base.cursor++;
I_p2 = base.cursor;
}
- base.cursor = v_8;
+ base.cursor = v_5;
return true;
};
@@ -451,14 +378,10 @@ PortugueseStemmer = function() {
var /** number */ among_var;
while(true)
{
- var /** number */ v_1 = base.cursor;
+ /** @const */ var /** number */ v_1 = base.cursor;
lab0: {
base.bra = base.cursor;
among_var = base.find_among(a_1);
- if (among_var == 0)
- {
- break lab0;
- }
base.ket = base.cursor;
switch (among_var) {
case 1:
@@ -491,29 +414,17 @@ PortugueseStemmer = function() {
/** @return {boolean} */
function r_RV() {
- if (!(I_pV <= base.cursor))
- {
- return false;
- }
- return true;
+ return I_pV <= base.cursor;
};
/** @return {boolean} */
function r_R1() {
- if (!(I_p1 <= base.cursor))
- {
- return false;
- }
- return true;
+ return I_p1 <= base.cursor;
};
/** @return {boolean} */
function r_R2() {
- if (!(I_p2 <= base.cursor))
- {
- return false;
- }
- return true;
+ return I_p2 <= base.cursor;
};
/** @return {boolean} */
@@ -576,7 +487,7 @@ PortugueseStemmer = function() {
{
return false;
}
- var /** number */ v_1 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
lab0: {
base.ket = base.cursor;
among_var = base.find_among_b(a_2);
@@ -626,7 +537,7 @@ PortugueseStemmer = function() {
{
return false;
}
- var /** number */ v_2 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
lab1: {
base.ket = base.cursor;
if (base.find_among_b(a_3) == 0)
@@ -655,7 +566,7 @@ PortugueseStemmer = function() {
{
return false;
}
- var /** number */ v_3 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_3 = base.limit - base.cursor;
lab2: {
base.ket = base.cursor;
if (base.find_among_b(a_4) == 0)
@@ -684,7 +595,7 @@ PortugueseStemmer = function() {
{
return false;
}
- var /** number */ v_4 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_4 = base.limit - base.cursor;
lab3: {
base.ket = base.cursor;
if (!(base.eq_s_b("at")))
@@ -728,12 +639,12 @@ PortugueseStemmer = function() {
{
return false;
}
- var /** number */ v_2 = base.limit_backward;
+ /** @const */ var /** number */ v_1 = base.limit_backward;
base.limit_backward = I_pV;
base.ket = base.cursor;
if (base.find_among_b(a_6) == 0)
{
- base.limit_backward = v_2;
+ base.limit_backward = v_1;
return false;
}
base.bra = base.cursor;
@@ -741,7 +652,7 @@ PortugueseStemmer = function() {
{
return false;
}
- base.limit_backward = v_2;
+ base.limit_backward = v_1;
return true;
};
@@ -786,14 +697,14 @@ PortugueseStemmer = function() {
}
base.ket = base.cursor;
lab0: {
- var /** number */ v_1 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
lab1: {
if (!(base.eq_s_b("u")))
{
break lab1;
}
base.bra = base.cursor;
- var /** number */ v_2 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
if (!(base.eq_s_b("g")))
{
break lab1;
@@ -807,7 +718,7 @@ PortugueseStemmer = function() {
return false;
}
base.bra = base.cursor;
- var /** number */ v_3 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_3 = base.limit - base.cursor;
if (!(base.eq_s_b("c")))
{
return false;
@@ -834,19 +745,19 @@ PortugueseStemmer = function() {
};
this.stem = /** @return {boolean} */ function() {
- var /** number */ v_1 = base.cursor;
+ /** @const */ var /** number */ v_1 = base.cursor;
r_prelude();
base.cursor = v_1;
r_mark_regions();
base.limit_backward = base.cursor; base.cursor = base.limit;
- var /** number */ v_3 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
lab0: {
lab1: {
- var /** number */ v_4 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_3 = base.limit - base.cursor;
lab2: {
- var /** number */ v_5 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_4 = base.limit - base.cursor;
lab3: {
- var /** number */ v_6 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_5 = base.limit - base.cursor;
lab4: {
if (!r_standard_suffix())
{
@@ -854,14 +765,14 @@ PortugueseStemmer = function() {
}
break lab3;
}
- base.cursor = base.limit - v_6;
+ base.cursor = base.limit - v_5;
if (!r_verb_suffix())
{
break lab2;
}
}
- base.cursor = base.limit - v_5;
- var /** number */ v_7 = base.limit - base.cursor;
+ base.cursor = base.limit - v_4;
+ /** @const */ var /** number */ v_6 = base.limit - base.cursor;
lab5: {
base.ket = base.cursor;
if (!(base.eq_s_b("i")))
@@ -869,12 +780,12 @@ PortugueseStemmer = function() {
break lab5;
}
base.bra = base.cursor;
- var /** number */ v_8 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_7 = base.limit - base.cursor;
if (!(base.eq_s_b("c")))
{
break lab5;
}
- base.cursor = base.limit - v_8;
+ base.cursor = base.limit - v_7;
if (!r_RV())
{
break lab5;
@@ -884,24 +795,24 @@ PortugueseStemmer = function() {
return false;
}
}
- base.cursor = base.limit - v_7;
+ base.cursor = base.limit - v_6;
break lab1;
}
- base.cursor = base.limit - v_4;
+ base.cursor = base.limit - v_3;
if (!r_residual_suffix())
{
break lab0;
}
}
}
- base.cursor = base.limit - v_3;
- var /** number */ v_9 = base.limit - base.cursor;
+ base.cursor = base.limit - v_2;
+ /** @const */ var /** number */ v_8 = base.limit - base.cursor;
r_residual_form();
- base.cursor = base.limit - v_9;
+ base.cursor = base.limit - v_8;
base.cursor = base.limit_backward;
- var /** number */ v_10 = base.cursor;
+ /** @const */ var /** number */ v_9 = base.cursor;
r_postlude();
- base.cursor = v_10;
+ base.cursor = v_9;
return true;
};
diff --git a/sphinx/search/non-minified-js/romanian-stemmer.js b/sphinx/search/non-minified-js/romanian-stemmer.js
index 67538f1c008..4c58e819ec2 100644
--- a/sphinx/search/non-minified-js/romanian-stemmer.js
+++ b/sphinx/search/non-minified-js/romanian-stemmer.js
@@ -1,20 +1,26 @@
-// Generated by Snowball 2.1.0 - https://snowballstem.org/
+// Generated from romanian.sbl by Snowball 3.0.1 - https://snowballstem.org/
/**@constructor*/
-RomanianStemmer = function() {
+var RomanianStemmer = function() {
var base = new BaseStemmer();
+
/** @const */ var a_0 = [
+ ["\u015F", -1, 1],
+ ["\u0163", -1, 2]
+ ];
+
+ /** @const */ var a_1 = [
["", -1, 3],
["I", 0, 1],
["U", 0, 2]
];
- /** @const */ var a_1 = [
+ /** @const */ var a_2 = [
["ea", -1, 3],
- ["a\u0163ia", -1, 7],
+ ["a\u021Bia", -1, 7],
["aua", -1, 2],
["iua", -1, 4],
- ["a\u0163ie", -1, 7],
+ ["a\u021Bie", -1, 7],
["ele", -1, 3],
["ile", -1, 5],
["iile", 6, 4],
@@ -28,14 +34,14 @@ RomanianStemmer = function() {
["iilor", 14, 4]
];
- /** @const */ var a_2 = [
+ /** @const */ var a_3 = [
["icala", -1, 4],
["iciva", -1, 4],
["ativa", -1, 5],
["itiva", -1, 6],
["icale", -1, 4],
- ["a\u0163iune", -1, 5],
- ["i\u0163iune", -1, 6],
+ ["a\u021Biune", -1, 5],
+ ["i\u021Biune", -1, 6],
["atoare", -1, 5],
["itoare", -1, 6],
["\u0103toare", -1, 5],
@@ -60,9 +66,9 @@ RomanianStemmer = function() {
["icit\u0103i", -1, 4],
["abilit\u0103i", -1, 1],
["ivit\u0103i", -1, 3],
- ["icit\u0103\u0163i", -1, 4],
- ["abilit\u0103\u0163i", -1, 1],
- ["ivit\u0103\u0163i", -1, 3],
+ ["icit\u0103\u021Bi", -1, 4],
+ ["abilit\u0103\u021Bi", -1, 1],
+ ["ivit\u0103\u021Bi", -1, 3],
["ical", -1, 4],
["ator", -1, 5],
["icator", 35, 4],
@@ -77,7 +83,7 @@ RomanianStemmer = function() {
["itiv\u0103", -1, 6]
];
- /** @const */ var a_3 = [
+ /** @const */ var a_4 = [
["ica", -1, 1],
["abila", -1, 1],
["ibila", -1, 1],
@@ -114,11 +120,11 @@ RomanianStemmer = function() {
["anti", -1, 1],
["isti", -1, 3],
["uti", -1, 1],
- ["i\u015Fti", -1, 3],
+ ["i\u0219ti", -1, 3],
["ivi", -1, 1],
["it\u0103i", -1, 1],
- ["o\u015Fi", -1, 1],
- ["it\u0103\u0163i", -1, 1],
+ ["o\u0219i", -1, 1],
+ ["it\u0103\u021Bi", -1, 1],
["abil", -1, 1],
["ibil", -1, 1],
["ism", -1, 3],
@@ -142,7 +148,7 @@ RomanianStemmer = function() {
["iv\u0103", -1, 1]
];
- /** @const */ var a_4 = [
+ /** @const */ var a_5 = [
["ea", -1, 1],
["ia", -1, 1],
["esc", -1, 1],
@@ -159,44 +165,44 @@ RomanianStemmer = function() {
["ise", 10, 1],
["use", 10, 1],
["\u00E2se", 10, 1],
- ["e\u015Fte", -1, 1],
- ["\u0103\u015Fte", -1, 1],
+ ["e\u0219te", -1, 1],
+ ["\u0103\u0219te", -1, 1],
["eze", -1, 1],
["ai", -1, 1],
["eai", 19, 1],
["iai", 19, 1],
["sei", -1, 2],
- ["e\u015Fti", -1, 1],
- ["\u0103\u015Fti", -1, 1],
+ ["e\u0219ti", -1, 1],
+ ["\u0103\u0219ti", -1, 1],
["ui", -1, 1],
["ezi", -1, 1],
["\u00E2i", -1, 1],
- ["a\u015Fi", -1, 1],
- ["se\u015Fi", -1, 2],
- ["ase\u015Fi", 29, 1],
- ["sese\u015Fi", 29, 2],
- ["ise\u015Fi", 29, 1],
- ["use\u015Fi", 29, 1],
- ["\u00E2se\u015Fi", 29, 1],
- ["i\u015Fi", -1, 1],
- ["u\u015Fi", -1, 1],
- ["\u00E2\u015Fi", -1, 1],
- ["a\u0163i", -1, 2],
- ["ea\u0163i", 38, 1],
- ["ia\u0163i", 38, 1],
- ["e\u0163i", -1, 2],
- ["i\u0163i", -1, 2],
- ["\u00E2\u0163i", -1, 2],
- ["ar\u0103\u0163i", -1, 1],
- ["ser\u0103\u0163i", -1, 2],
- ["aser\u0103\u0163i", 45, 1],
- ["seser\u0103\u0163i", 45, 2],
- ["iser\u0103\u0163i", 45, 1],
- ["user\u0103\u0163i", 45, 1],
- ["\u00E2ser\u0103\u0163i", 45, 1],
- ["ir\u0103\u0163i", -1, 1],
- ["ur\u0103\u0163i", -1, 1],
- ["\u00E2r\u0103\u0163i", -1, 1],
+ ["a\u0219i", -1, 1],
+ ["se\u0219i", -1, 2],
+ ["ase\u0219i", 29, 1],
+ ["sese\u0219i", 29, 2],
+ ["ise\u0219i", 29, 1],
+ ["use\u0219i", 29, 1],
+ ["\u00E2se\u0219i", 29, 1],
+ ["i\u0219i", -1, 1],
+ ["u\u0219i", -1, 1],
+ ["\u00E2\u0219i", -1, 1],
+ ["a\u021Bi", -1, 2],
+ ["ea\u021Bi", 38, 1],
+ ["ia\u021Bi", 38, 1],
+ ["e\u021Bi", -1, 2],
+ ["i\u021Bi", -1, 2],
+ ["\u00E2\u021Bi", -1, 2],
+ ["ar\u0103\u021Bi", -1, 1],
+ ["ser\u0103\u021Bi", -1, 2],
+ ["aser\u0103\u021Bi", 45, 1],
+ ["seser\u0103\u021Bi", 45, 2],
+ ["iser\u0103\u021Bi", 45, 1],
+ ["user\u0103\u021Bi", 45, 1],
+ ["\u00E2ser\u0103\u021Bi", 45, 1],
+ ["ir\u0103\u021Bi", -1, 1],
+ ["ur\u0103\u021Bi", -1, 1],
+ ["\u00E2r\u0103\u021Bi", -1, 1],
["am", -1, 1],
["eam", 54, 1],
["iam", 54, 1],
@@ -239,7 +245,7 @@ RomanianStemmer = function() {
["eaz\u0103", -1, 1]
];
- /** @const */ var a_5 = [
+ /** @const */ var a_6 = [
["a", -1, 1],
["e", -1, 1],
["ie", 1, 1],
@@ -255,15 +261,69 @@ RomanianStemmer = function() {
var /** number */ I_pV = 0;
+ /** @return {boolean} */
+ function r_norm() {
+ var /** number */ among_var;
+ /** @const */ var /** number */ v_1 = base.cursor;
+ lab0: {
+ while(true)
+ {
+ /** @const */ var /** number */ v_2 = base.cursor;
+ lab1: {
+ golab2: while(true)
+ {
+ /** @const */ var /** number */ v_3 = base.cursor;
+ lab3: {
+ base.bra = base.cursor;
+ among_var = base.find_among(a_0);
+ if (among_var == 0)
+ {
+ break lab3;
+ }
+ base.ket = base.cursor;
+ switch (among_var) {
+ case 1:
+ if (!base.slice_from("\u0219"))
+ {
+ return false;
+ }
+ break;
+ case 2:
+ if (!base.slice_from("\u021B"))
+ {
+ return false;
+ }
+ break;
+ }
+ base.cursor = v_3;
+ break golab2;
+ }
+ base.cursor = v_3;
+ if (base.cursor >= base.limit)
+ {
+ break lab1;
+ }
+ base.cursor++;
+ }
+ continue;
+ }
+ base.cursor = v_2;
+ break;
+ }
+ }
+ base.cursor = v_1;
+ return true;
+ };
+
/** @return {boolean} */
function r_prelude() {
while(true)
{
- var /** number */ v_1 = base.cursor;
+ /** @const */ var /** number */ v_1 = base.cursor;
lab0: {
golab1: while(true)
{
- var /** number */ v_2 = base.cursor;
+ /** @const */ var /** number */ v_2 = base.cursor;
lab2: {
if (!(base.in_grouping(g_v, 97, 259)))
{
@@ -271,7 +331,7 @@ RomanianStemmer = function() {
}
base.bra = base.cursor;
lab3: {
- var /** number */ v_3 = base.cursor;
+ /** @const */ var /** number */ v_3 = base.cursor;
lab4: {
if (!(base.eq_s("u")))
{
@@ -326,37 +386,27 @@ RomanianStemmer = function() {
I_pV = base.limit;
I_p1 = base.limit;
I_p2 = base.limit;
- var /** number */ v_1 = base.cursor;
+ /** @const */ var /** number */ v_1 = base.cursor;
lab0: {
lab1: {
- var /** number */ v_2 = base.cursor;
+ /** @const */ var /** number */ v_2 = base.cursor;
lab2: {
if (!(base.in_grouping(g_v, 97, 259)))
{
break lab2;
}
lab3: {
- var /** number */ v_3 = base.cursor;
+ /** @const */ var /** number */ v_3 = base.cursor;
lab4: {
if (!(base.out_grouping(g_v, 97, 259)))
{
break lab4;
}
- golab5: while(true)
+ if (!base.go_out_grouping(g_v, 97, 259))
{
- lab6: {
- if (!(base.in_grouping(g_v, 97, 259)))
- {
- break lab6;
- }
- break golab5;
- }
- if (base.cursor >= base.limit)
- {
- break lab4;
- }
- base.cursor++;
+ break lab4;
}
+ base.cursor++;
break lab3;
}
base.cursor = v_3;
@@ -364,21 +414,11 @@ RomanianStemmer = function() {
{
break lab2;
}
- golab7: while(true)
+ if (!base.go_in_grouping(g_v, 97, 259))
{
- lab8: {
- if (!(base.out_grouping(g_v, 97, 259)))
- {
- break lab8;
- }
- break golab7;
- }
- if (base.cursor >= base.limit)
- {
- break lab2;
- }
- base.cursor++;
+ break lab2;
}
+ base.cursor++;
}
break lab1;
}
@@ -387,31 +427,21 @@ RomanianStemmer = function() {
{
break lab0;
}
- lab9: {
- var /** number */ v_6 = base.cursor;
- lab10: {
+ lab5: {
+ /** @const */ var /** number */ v_4 = base.cursor;
+ lab6: {
if (!(base.out_grouping(g_v, 97, 259)))
{
- break lab10;
+ break lab6;
}
- golab11: while(true)
+ if (!base.go_out_grouping(g_v, 97, 259))
{
- lab12: {
- if (!(base.in_grouping(g_v, 97, 259)))
- {
- break lab12;
- }
- break golab11;
- }
- if (base.cursor >= base.limit)
- {
- break lab10;
- }
- base.cursor++;
+ break lab6;
}
- break lab9;
+ base.cursor++;
+ break lab5;
}
- base.cursor = v_6;
+ base.cursor = v_4;
if (!(base.in_grouping(g_v, 97, 259)))
{
break lab0;
@@ -426,72 +456,32 @@ RomanianStemmer = function() {
I_pV = base.cursor;
}
base.cursor = v_1;
- var /** number */ v_8 = base.cursor;
- lab13: {
- golab14: while(true)
+ /** @const */ var /** number */ v_5 = base.cursor;
+ lab7: {
+ if (!base.go_out_grouping(g_v, 97, 259))
{
- lab15: {
- if (!(base.in_grouping(g_v, 97, 259)))
- {
- break lab15;
- }
- break golab14;
- }
- if (base.cursor >= base.limit)
- {
- break lab13;
- }
- base.cursor++;
+ break lab7;
}
- golab16: while(true)
+ base.cursor++;
+ if (!base.go_in_grouping(g_v, 97, 259))
{
- lab17: {
- if (!(base.out_grouping(g_v, 97, 259)))
- {
- break lab17;
- }
- break golab16;
- }
- if (base.cursor >= base.limit)
- {
- break lab13;
- }
- base.cursor++;
+ break lab7;
}
+ base.cursor++;
I_p1 = base.cursor;
- golab18: while(true)
+ if (!base.go_out_grouping(g_v, 97, 259))
{
- lab19: {
- if (!(base.in_grouping(g_v, 97, 259)))
- {
- break lab19;
- }
- break golab18;
- }
- if (base.cursor >= base.limit)
- {
- break lab13;
- }
- base.cursor++;
+ break lab7;
}
- golab20: while(true)
+ base.cursor++;
+ if (!base.go_in_grouping(g_v, 97, 259))
{
- lab21: {
- if (!(base.out_grouping(g_v, 97, 259)))
- {
- break lab21;
- }
- break golab20;
- }
- if (base.cursor >= base.limit)
- {
- break lab13;
- }
- base.cursor++;
+ break lab7;
}
+ base.cursor++;
I_p2 = base.cursor;
}
- base.cursor = v_8;
+ base.cursor = v_5;
return true;
};
@@ -500,14 +490,10 @@ RomanianStemmer = function() {
var /** number */ among_var;
while(true)
{
- var /** number */ v_1 = base.cursor;
+ /** @const */ var /** number */ v_1 = base.cursor;
lab0: {
base.bra = base.cursor;
- among_var = base.find_among(a_0);
- if (among_var == 0)
- {
- break lab0;
- }
+ among_var = base.find_among(a_1);
base.ket = base.cursor;
switch (among_var) {
case 1:
@@ -540,36 +526,24 @@ RomanianStemmer = function() {
/** @return {boolean} */
function r_RV() {
- if (!(I_pV <= base.cursor))
- {
- return false;
- }
- return true;
+ return I_pV <= base.cursor;
};
/** @return {boolean} */
function r_R1() {
- if (!(I_p1 <= base.cursor))
- {
- return false;
- }
- return true;
+ return I_p1 <= base.cursor;
};
/** @return {boolean} */
function r_R2() {
- if (!(I_p2 <= base.cursor))
- {
- return false;
- }
- return true;
+ return I_p2 <= base.cursor;
};
/** @return {boolean} */
function r_step_0() {
var /** number */ among_var;
base.ket = base.cursor;
- among_var = base.find_among_b(a_1);
+ among_var = base.find_among_b(a_2);
if (among_var == 0)
{
return false;
@@ -606,7 +580,7 @@ RomanianStemmer = function() {
break;
case 5:
{
- var /** number */ v_1 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
lab0: {
if (!(base.eq_s_b("ab")))
{
@@ -628,7 +602,7 @@ RomanianStemmer = function() {
}
break;
case 7:
- if (!base.slice_from("a\u0163i"))
+ if (!base.slice_from("a\u021Bi"))
{
return false;
}
@@ -640,9 +614,9 @@ RomanianStemmer = function() {
/** @return {boolean} */
function r_combo_suffix() {
var /** number */ among_var;
- var /** number */ v_1 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
base.ket = base.cursor;
- among_var = base.find_among_b(a_2);
+ among_var = base.find_among_b(a_3);
if (among_var == 0)
{
return false;
@@ -701,7 +675,7 @@ RomanianStemmer = function() {
B_standard_suffix_removed = false;
while(true)
{
- var /** number */ v_1 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
lab0: {
if (!r_combo_suffix())
{
@@ -713,7 +687,7 @@ RomanianStemmer = function() {
break;
}
base.ket = base.cursor;
- among_var = base.find_among_b(a_3);
+ among_var = base.find_among_b(a_4);
if (among_var == 0)
{
return false;
@@ -731,7 +705,7 @@ RomanianStemmer = function() {
}
break;
case 2:
- if (!(base.eq_s_b("\u0163")))
+ if (!(base.eq_s_b("\u021B")))
{
return false;
}
@@ -759,20 +733,20 @@ RomanianStemmer = function() {
{
return false;
}
- var /** number */ v_2 = base.limit_backward;
+ /** @const */ var /** number */ v_1 = base.limit_backward;
base.limit_backward = I_pV;
base.ket = base.cursor;
- among_var = base.find_among_b(a_4);
+ among_var = base.find_among_b(a_5);
if (among_var == 0)
{
- base.limit_backward = v_2;
+ base.limit_backward = v_1;
return false;
}
base.bra = base.cursor;
switch (among_var) {
case 1:
lab0: {
- var /** number */ v_3 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
lab1: {
if (!(base.out_grouping_b(g_v, 97, 259)))
{
@@ -780,10 +754,10 @@ RomanianStemmer = function() {
}
break lab0;
}
- base.cursor = base.limit - v_3;
+ base.cursor = base.limit - v_2;
if (!(base.eq_s_b("u")))
{
- base.limit_backward = v_2;
+ base.limit_backward = v_1;
return false;
}
}
@@ -799,14 +773,14 @@ RomanianStemmer = function() {
}
break;
}
- base.limit_backward = v_2;
+ base.limit_backward = v_1;
return true;
};
/** @return {boolean} */
function r_vowel_suffix() {
base.ket = base.cursor;
- if (base.find_among_b(a_5) == 0)
+ if (base.find_among_b(a_6) == 0)
{
return false;
}
@@ -823,21 +797,22 @@ RomanianStemmer = function() {
};
this.stem = /** @return {boolean} */ function() {
- var /** number */ v_1 = base.cursor;
+ r_norm();
+ /** @const */ var /** number */ v_1 = base.cursor;
r_prelude();
base.cursor = v_1;
r_mark_regions();
base.limit_backward = base.cursor; base.cursor = base.limit;
- var /** number */ v_3 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
r_step_0();
- base.cursor = base.limit - v_3;
- var /** number */ v_4 = base.limit - base.cursor;
+ base.cursor = base.limit - v_2;
+ /** @const */ var /** number */ v_3 = base.limit - base.cursor;
r_standard_suffix();
- base.cursor = base.limit - v_4;
- var /** number */ v_5 = base.limit - base.cursor;
+ base.cursor = base.limit - v_3;
+ /** @const */ var /** number */ v_4 = base.limit - base.cursor;
lab0: {
lab1: {
- var /** number */ v_6 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_5 = base.limit - base.cursor;
lab2: {
if (!B_standard_suffix_removed)
{
@@ -845,21 +820,21 @@ RomanianStemmer = function() {
}
break lab1;
}
- base.cursor = base.limit - v_6;
+ base.cursor = base.limit - v_5;
if (!r_verb_suffix())
{
break lab0;
}
}
}
- base.cursor = base.limit - v_5;
- var /** number */ v_7 = base.limit - base.cursor;
+ base.cursor = base.limit - v_4;
+ /** @const */ var /** number */ v_6 = base.limit - base.cursor;
r_vowel_suffix();
- base.cursor = base.limit - v_7;
+ base.cursor = base.limit - v_6;
base.cursor = base.limit_backward;
- var /** number */ v_8 = base.cursor;
+ /** @const */ var /** number */ v_7 = base.cursor;
r_postlude();
- base.cursor = v_8;
+ base.cursor = v_7;
return true;
};
diff --git a/sphinx/search/non-minified-js/russian-stemmer.js b/sphinx/search/non-minified-js/russian-stemmer.js
index 28ded5fc816..36c655d6bd7 100644
--- a/sphinx/search/non-minified-js/russian-stemmer.js
+++ b/sphinx/search/non-minified-js/russian-stemmer.js
@@ -1,8 +1,9 @@
-// Generated by Snowball 2.1.0 - https://snowballstem.org/
+// Generated from russian.sbl by Snowball 3.0.1 - https://snowballstem.org/
/**@constructor*/
-RussianStemmer = function() {
+var RussianStemmer = function() {
var base = new BaseStemmer();
+
/** @const */ var a_0 = [
["\u0432", -1, 1],
["\u0438\u0432", 0, 2],
@@ -170,69 +171,29 @@ RussianStemmer = function() {
function r_mark_regions() {
I_pV = base.limit;
I_p2 = base.limit;
- var /** number */ v_1 = base.cursor;
+ /** @const */ var /** number */ v_1 = base.cursor;
lab0: {
- golab1: while(true)
+ if (!base.go_out_grouping(g_v, 1072, 1103))
{
- lab2: {
- if (!(base.in_grouping(g_v, 1072, 1103)))
- {
- break lab2;
- }
- break golab1;
- }
- if (base.cursor >= base.limit)
- {
- break lab0;
- }
- base.cursor++;
+ break lab0;
}
+ base.cursor++;
I_pV = base.cursor;
- golab3: while(true)
+ if (!base.go_in_grouping(g_v, 1072, 1103))
{
- lab4: {
- if (!(base.out_grouping(g_v, 1072, 1103)))
- {
- break lab4;
- }
- break golab3;
- }
- if (base.cursor >= base.limit)
- {
- break lab0;
- }
- base.cursor++;
+ break lab0;
}
- golab5: while(true)
+ base.cursor++;
+ if (!base.go_out_grouping(g_v, 1072, 1103))
{
- lab6: {
- if (!(base.in_grouping(g_v, 1072, 1103)))
- {
- break lab6;
- }
- break golab5;
- }
- if (base.cursor >= base.limit)
- {
- break lab0;
- }
- base.cursor++;
+ break lab0;
}
- golab7: while(true)
+ base.cursor++;
+ if (!base.go_in_grouping(g_v, 1072, 1103))
{
- lab8: {
- if (!(base.out_grouping(g_v, 1072, 1103)))
- {
- break lab8;
- }
- break golab7;
- }
- if (base.cursor >= base.limit)
- {
- break lab0;
- }
- base.cursor++;
+ break lab0;
}
+ base.cursor++;
I_p2 = base.cursor;
}
base.cursor = v_1;
@@ -241,11 +202,7 @@ RussianStemmer = function() {
/** @return {boolean} */
function r_R2() {
- if (!(I_p2 <= base.cursor))
- {
- return false;
- }
- return true;
+ return I_p2 <= base.cursor;
};
/** @return {boolean} */
@@ -261,7 +218,7 @@ RussianStemmer = function() {
switch (among_var) {
case 1:
lab0: {
- var /** number */ v_1 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
lab1: {
if (!(base.eq_s_b("\u0430")))
{
@@ -312,7 +269,7 @@ RussianStemmer = function() {
{
return false;
}
- var /** number */ v_1 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
lab0: {
base.ket = base.cursor;
among_var = base.find_among_b(a_2);
@@ -325,7 +282,7 @@ RussianStemmer = function() {
switch (among_var) {
case 1:
lab1: {
- var /** number */ v_2 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
lab2: {
if (!(base.eq_s_b("\u0430")))
{
@@ -384,7 +341,7 @@ RussianStemmer = function() {
switch (among_var) {
case 1:
lab0: {
- var /** number */ v_1 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
lab1: {
if (!(base.eq_s_b("\u0430")))
{
@@ -499,15 +456,15 @@ RussianStemmer = function() {
};
this.stem = /** @return {boolean} */ function() {
- var /** number */ v_1 = base.cursor;
+ /** @const */ var /** number */ v_1 = base.cursor;
lab0: {
while(true)
{
- var /** number */ v_2 = base.cursor;
+ /** @const */ var /** number */ v_2 = base.cursor;
lab1: {
golab2: while(true)
{
- var /** number */ v_3 = base.cursor;
+ /** @const */ var /** number */ v_3 = base.cursor;
lab3: {
base.bra = base.cursor;
if (!(base.eq_s("\u0451")))
@@ -542,12 +499,12 @@ RussianStemmer = function() {
{
return false;
}
- var /** number */ v_6 = base.limit_backward;
+ /** @const */ var /** number */ v_4 = base.limit_backward;
base.limit_backward = I_pV;
- var /** number */ v_7 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_5 = base.limit - base.cursor;
lab4: {
lab5: {
- var /** number */ v_8 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_6 = base.limit - base.cursor;
lab6: {
if (!r_perfective_gerund())
{
@@ -555,17 +512,17 @@ RussianStemmer = function() {
}
break lab5;
}
- base.cursor = base.limit - v_8;
- var /** number */ v_9 = base.limit - base.cursor;
+ base.cursor = base.limit - v_6;
+ /** @const */ var /** number */ v_7 = base.limit - base.cursor;
lab7: {
if (!r_reflexive())
{
- base.cursor = base.limit - v_9;
+ base.cursor = base.limit - v_7;
break lab7;
}
}
lab8: {
- var /** number */ v_10 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_8 = base.limit - base.cursor;
lab9: {
if (!r_adjectival())
{
@@ -573,7 +530,7 @@ RussianStemmer = function() {
}
break lab8;
}
- base.cursor = base.limit - v_10;
+ base.cursor = base.limit - v_8;
lab10: {
if (!r_verb())
{
@@ -581,7 +538,7 @@ RussianStemmer = function() {
}
break lab8;
}
- base.cursor = base.limit - v_10;
+ base.cursor = base.limit - v_8;
if (!r_noun())
{
break lab4;
@@ -589,13 +546,13 @@ RussianStemmer = function() {
}
}
}
- base.cursor = base.limit - v_7;
- var /** number */ v_11 = base.limit - base.cursor;
+ base.cursor = base.limit - v_5;
+ /** @const */ var /** number */ v_9 = base.limit - base.cursor;
lab11: {
base.ket = base.cursor;
if (!(base.eq_s_b("\u0438")))
{
- base.cursor = base.limit - v_11;
+ base.cursor = base.limit - v_9;
break lab11;
}
base.bra = base.cursor;
@@ -604,13 +561,13 @@ RussianStemmer = function() {
return false;
}
}
- var /** number */ v_12 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_10 = base.limit - base.cursor;
r_derivational();
- base.cursor = base.limit - v_12;
- var /** number */ v_13 = base.limit - base.cursor;
+ base.cursor = base.limit - v_10;
+ /** @const */ var /** number */ v_11 = base.limit - base.cursor;
r_tidy_up();
- base.cursor = base.limit - v_13;
- base.limit_backward = v_6;
+ base.cursor = base.limit - v_11;
+ base.limit_backward = v_4;
base.cursor = base.limit_backward;
return true;
};
diff --git a/sphinx/search/non-minified-js/serbian-stemmer.js b/sphinx/search/non-minified-js/serbian-stemmer.js
new file mode 100644
index 00000000000..7d6d0ce32e9
--- /dev/null
+++ b/sphinx/search/non-minified-js/serbian-stemmer.js
@@ -0,0 +1,4516 @@
+// Generated from serbian.sbl by Snowball 3.0.1 - https://snowballstem.org/
+
+/**@constructor*/
+var SerbianStemmer = function() {
+ var base = new BaseStemmer();
+
+ /** @const */ var a_0 = [
+ ["\u0430", -1, 1],
+ ["\u0431", -1, 2],
+ ["\u0432", -1, 3],
+ ["\u0433", -1, 4],
+ ["\u0434", -1, 5],
+ ["\u0435", -1, 7],
+ ["\u0436", -1, 8],
+ ["\u0437", -1, 9],
+ ["\u0438", -1, 10],
+ ["\u043A", -1, 12],
+ ["\u043B", -1, 13],
+ ["\u043C", -1, 15],
+ ["\u043D", -1, 16],
+ ["\u043E", -1, 18],
+ ["\u043F", -1, 19],
+ ["\u0440", -1, 20],
+ ["\u0441", -1, 21],
+ ["\u0442", -1, 22],
+ ["\u0443", -1, 24],
+ ["\u0444", -1, 25],
+ ["\u0445", -1, 26],
+ ["\u0446", -1, 27],
+ ["\u0447", -1, 28],
+ ["\u0448", -1, 30],
+ ["\u0452", -1, 6],
+ ["\u0458", -1, 11],
+ ["\u0459", -1, 14],
+ ["\u045A", -1, 17],
+ ["\u045B", -1, 23],
+ ["\u045F", -1, 29]
+ ];
+
+ /** @const */ var a_1 = [
+ ["daba", -1, 73],
+ ["ajaca", -1, 12],
+ ["ejaca", -1, 14],
+ ["ljaca", -1, 13],
+ ["njaca", -1, 85],
+ ["ojaca", -1, 15],
+ ["alaca", -1, 82],
+ ["elaca", -1, 83],
+ ["olaca", -1, 84],
+ ["maca", -1, 75],
+ ["naca", -1, 76],
+ ["raca", -1, 81],
+ ["saca", -1, 80],
+ ["vaca", -1, 79],
+ ["\u0161aca", -1, 18],
+ ["aoca", -1, 82],
+ ["acaka", -1, 55],
+ ["ajaka", -1, 16],
+ ["ojaka", -1, 17],
+ ["anaka", -1, 78],
+ ["ataka", -1, 58],
+ ["etaka", -1, 59],
+ ["itaka", -1, 60],
+ ["otaka", -1, 61],
+ ["utaka", -1, 62],
+ ["a\u010Daka", -1, 54],
+ ["esama", -1, 67],
+ ["izama", -1, 87],
+ ["jacima", -1, 5],
+ ["nicima", -1, 23],
+ ["ticima", -1, 24],
+ ["teticima", 30, 21],
+ ["zicima", -1, 25],
+ ["atcima", -1, 58],
+ ["utcima", -1, 62],
+ ["\u010Dcima", -1, 74],
+ ["pesima", -1, 2],
+ ["inzima", -1, 19],
+ ["lozima", -1, 1],
+ ["metara", -1, 68],
+ ["centara", -1, 69],
+ ["istara", -1, 70],
+ ["ekata", -1, 86],
+ ["anata", -1, 53],
+ ["nstava", -1, 22],
+ ["kustava", -1, 29],
+ ["ajac", -1, 12],
+ ["ejac", -1, 14],
+ ["ljac", -1, 13],
+ ["njac", -1, 85],
+ ["anjac", 49, 11],
+ ["ojac", -1, 15],
+ ["alac", -1, 82],
+ ["elac", -1, 83],
+ ["olac", -1, 84],
+ ["mac", -1, 75],
+ ["nac", -1, 76],
+ ["rac", -1, 81],
+ ["sac", -1, 80],
+ ["vac", -1, 79],
+ ["\u0161ac", -1, 18],
+ ["jebe", -1, 88],
+ ["olce", -1, 84],
+ ["kuse", -1, 27],
+ ["rave", -1, 42],
+ ["save", -1, 52],
+ ["\u0161ave", -1, 51],
+ ["baci", -1, 89],
+ ["jaci", -1, 5],
+ ["tvenici", -1, 20],
+ ["snici", -1, 26],
+ ["tetici", -1, 21],
+ ["bojci", -1, 4],
+ ["vojci", -1, 3],
+ ["ojsci", -1, 66],
+ ["atci", -1, 58],
+ ["itci", -1, 60],
+ ["utci", -1, 62],
+ ["\u010Dci", -1, 74],
+ ["pesi", -1, 2],
+ ["inzi", -1, 19],
+ ["lozi", -1, 1],
+ ["acak", -1, 55],
+ ["usak", -1, 57],
+ ["atak", -1, 58],
+ ["etak", -1, 59],
+ ["itak", -1, 60],
+ ["otak", -1, 61],
+ ["utak", -1, 62],
+ ["a\u010Dak", -1, 54],
+ ["u\u0161ak", -1, 56],
+ ["izam", -1, 87],
+ ["tican", -1, 65],
+ ["cajan", -1, 7],
+ ["\u010Dajan", -1, 6],
+ ["voljan", -1, 77],
+ ["eskan", -1, 63],
+ ["alan", -1, 40],
+ ["bilan", -1, 33],
+ ["gilan", -1, 37],
+ ["nilan", -1, 39],
+ ["rilan", -1, 38],
+ ["silan", -1, 36],
+ ["tilan", -1, 34],
+ ["avilan", -1, 35],
+ ["laran", -1, 9],
+ ["eran", -1, 8],
+ ["asan", -1, 91],
+ ["esan", -1, 10],
+ ["dusan", -1, 31],
+ ["kusan", -1, 28],
+ ["atan", -1, 47],
+ ["pletan", -1, 50],
+ ["tetan", -1, 49],
+ ["antan", -1, 32],
+ ["pravan", -1, 44],
+ ["stavan", -1, 43],
+ ["sivan", -1, 46],
+ ["tivan", -1, 45],
+ ["ozan", -1, 41],
+ ["ti\u010Dan", -1, 64],
+ ["a\u0161an", -1, 90],
+ ["du\u0161an", -1, 30],
+ ["metar", -1, 68],
+ ["centar", -1, 69],
+ ["istar", -1, 70],
+ ["ekat", -1, 86],
+ ["enat", -1, 48],
+ ["oscu", -1, 72],
+ ["o\u0161\u0107u", -1, 71]
+ ];
+
+ /** @const */ var a_2 = [
+ ["aca", -1, 124],
+ ["eca", -1, 125],
+ ["uca", -1, 126],
+ ["ga", -1, 20],
+ ["acega", 3, 124],
+ ["ecega", 3, 125],
+ ["ucega", 3, 126],
+ ["anjijega", 3, 84],
+ ["enjijega", 3, 85],
+ ["snjijega", 3, 122],
+ ["\u0161njijega", 3, 86],
+ ["kijega", 3, 95],
+ ["skijega", 11, 1],
+ ["\u0161kijega", 11, 2],
+ ["elijega", 3, 83],
+ ["nijega", 3, 13],
+ ["osijega", 3, 123],
+ ["atijega", 3, 120],
+ ["evitijega", 3, 92],
+ ["ovitijega", 3, 93],
+ ["astijega", 3, 94],
+ ["avijega", 3, 77],
+ ["evijega", 3, 78],
+ ["ivijega", 3, 79],
+ ["ovijega", 3, 80],
+ ["o\u0161ijega", 3, 91],
+ ["anjega", 3, 84],
+ ["enjega", 3, 85],
+ ["snjega", 3, 122],
+ ["\u0161njega", 3, 86],
+ ["kega", 3, 95],
+ ["skega", 30, 1],
+ ["\u0161kega", 30, 2],
+ ["elega", 3, 83],
+ ["nega", 3, 13],
+ ["anega", 34, 10],
+ ["enega", 34, 87],
+ ["snega", 34, 159],
+ ["\u0161nega", 34, 88],
+ ["osega", 3, 123],
+ ["atega", 3, 120],
+ ["evitega", 3, 92],
+ ["ovitega", 3, 93],
+ ["astega", 3, 94],
+ ["avega", 3, 77],
+ ["evega", 3, 78],
+ ["ivega", 3, 79],
+ ["ovega", 3, 80],
+ ["a\u0107ega", 3, 14],
+ ["e\u0107ega", 3, 15],
+ ["u\u0107ega", 3, 16],
+ ["o\u0161ega", 3, 91],
+ ["acoga", 3, 124],
+ ["ecoga", 3, 125],
+ ["ucoga", 3, 126],
+ ["anjoga", 3, 84],
+ ["enjoga", 3, 85],
+ ["snjoga", 3, 122],
+ ["\u0161njoga", 3, 86],
+ ["koga", 3, 95],
+ ["skoga", 59, 1],
+ ["\u0161koga", 59, 2],
+ ["loga", 3, 19],
+ ["eloga", 62, 83],
+ ["noga", 3, 13],
+ ["cinoga", 64, 137],
+ ["\u010Dinoga", 64, 89],
+ ["osoga", 3, 123],
+ ["atoga", 3, 120],
+ ["evitoga", 3, 92],
+ ["ovitoga", 3, 93],
+ ["astoga", 3, 94],
+ ["avoga", 3, 77],
+ ["evoga", 3, 78],
+ ["ivoga", 3, 79],
+ ["ovoga", 3, 80],
+ ["a\u0107oga", 3, 14],
+ ["e\u0107oga", 3, 15],
+ ["u\u0107oga", 3, 16],
+ ["o\u0161oga", 3, 91],
+ ["uga", 3, 18],
+ ["aja", -1, 109],
+ ["caja", 81, 26],
+ ["laja", 81, 30],
+ ["raja", 81, 31],
+ ["\u0107aja", 81, 28],
+ ["\u010Daja", 81, 27],
+ ["\u0111aja", 81, 29],
+ ["bija", -1, 32],
+ ["cija", -1, 33],
+ ["dija", -1, 34],
+ ["fija", -1, 40],
+ ["gija", -1, 39],
+ ["anjija", -1, 84],
+ ["enjija", -1, 85],
+ ["snjija", -1, 122],
+ ["\u0161njija", -1, 86],
+ ["kija", -1, 95],
+ ["skija", 97, 1],
+ ["\u0161kija", 97, 2],
+ ["lija", -1, 24],
+ ["elija", 100, 83],
+ ["mija", -1, 37],
+ ["nija", -1, 13],
+ ["ganija", 103, 9],
+ ["manija", 103, 6],
+ ["panija", 103, 7],
+ ["ranija", 103, 8],
+ ["tanija", 103, 5],
+ ["pija", -1, 41],
+ ["rija", -1, 42],
+ ["rarija", 110, 21],
+ ["sija", -1, 23],
+ ["osija", 112, 123],
+ ["tija", -1, 44],
+ ["atija", 114, 120],
+ ["evitija", 114, 92],
+ ["ovitija", 114, 93],
+ ["otija", 114, 22],
+ ["astija", 114, 94],
+ ["avija", -1, 77],
+ ["evija", -1, 78],
+ ["ivija", -1, 79],
+ ["ovija", -1, 80],
+ ["zija", -1, 45],
+ ["o\u0161ija", -1, 91],
+ ["\u017Eija", -1, 38],
+ ["anja", -1, 84],
+ ["enja", -1, 85],
+ ["snja", -1, 122],
+ ["\u0161nja", -1, 86],
+ ["ka", -1, 95],
+ ["ska", 131, 1],
+ ["\u0161ka", 131, 2],
+ ["ala", -1, 104],
+ ["acala", 134, 128],
+ ["astajala", 134, 106],
+ ["istajala", 134, 107],
+ ["ostajala", 134, 108],
+ ["ijala", 134, 47],
+ ["injala", 134, 114],
+ ["nala", 134, 46],
+ ["irala", 134, 100],
+ ["urala", 134, 105],
+ ["tala", 134, 113],
+ ["astala", 144, 110],
+ ["istala", 144, 111],
+ ["ostala", 144, 112],
+ ["avala", 134, 97],
+ ["evala", 134, 96],
+ ["ivala", 134, 98],
+ ["ovala", 134, 76],
+ ["uvala", 134, 99],
+ ["a\u010Dala", 134, 102],
+ ["ela", -1, 83],
+ ["ila", -1, 116],
+ ["acila", 155, 124],
+ ["lucila", 155, 121],
+ ["nila", 155, 103],
+ ["astanila", 158, 110],
+ ["istanila", 158, 111],
+ ["ostanila", 158, 112],
+ ["rosila", 155, 127],
+ ["jetila", 155, 118],
+ ["ozila", 155, 48],
+ ["a\u010Dila", 155, 101],
+ ["lu\u010Dila", 155, 117],
+ ["ro\u0161ila", 155, 90],
+ ["ola", -1, 50],
+ ["asla", -1, 115],
+ ["nula", -1, 13],
+ ["gama", -1, 20],
+ ["logama", 171, 19],
+ ["ugama", 171, 18],
+ ["ajama", -1, 109],
+ ["cajama", 174, 26],
+ ["lajama", 174, 30],
+ ["rajama", 174, 31],
+ ["\u0107ajama", 174, 28],
+ ["\u010Dajama", 174, 27],
+ ["\u0111ajama", 174, 29],
+ ["bijama", -1, 32],
+ ["cijama", -1, 33],
+ ["dijama", -1, 34],
+ ["fijama", -1, 40],
+ ["gijama", -1, 39],
+ ["lijama", -1, 35],
+ ["mijama", -1, 37],
+ ["nijama", -1, 36],
+ ["ganijama", 188, 9],
+ ["manijama", 188, 6],
+ ["panijama", 188, 7],
+ ["ranijama", 188, 8],
+ ["tanijama", 188, 5],
+ ["pijama", -1, 41],
+ ["rijama", -1, 42],
+ ["sijama", -1, 43],
+ ["tijama", -1, 44],
+ ["zijama", -1, 45],
+ ["\u017Eijama", -1, 38],
+ ["alama", -1, 104],
+ ["ijalama", 200, 47],
+ ["nalama", 200, 46],
+ ["elama", -1, 119],
+ ["ilama", -1, 116],
+ ["ramama", -1, 52],
+ ["lemama", -1, 51],
+ ["inama", -1, 11],
+ ["cinama", 207, 137],
+ ["\u010Dinama", 207, 89],
+ ["rama", -1, 52],
+ ["arama", 210, 53],
+ ["drama", 210, 54],
+ ["erama", 210, 55],
+ ["orama", 210, 56],
+ ["basama", -1, 135],
+ ["gasama", -1, 131],
+ ["jasama", -1, 129],
+ ["kasama", -1, 133],
+ ["nasama", -1, 132],
+ ["tasama", -1, 130],
+ ["vasama", -1, 134],
+ ["esama", -1, 152],
+ ["isama", -1, 154],
+ ["etama", -1, 70],
+ ["estama", -1, 71],
+ ["istama", -1, 72],
+ ["kstama", -1, 73],
+ ["ostama", -1, 74],
+ ["avama", -1, 77],
+ ["evama", -1, 78],
+ ["ivama", -1, 79],
+ ["ba\u0161ama", -1, 63],
+ ["ga\u0161ama", -1, 64],
+ ["ja\u0161ama", -1, 61],
+ ["ka\u0161ama", -1, 62],
+ ["na\u0161ama", -1, 60],
+ ["ta\u0161ama", -1, 59],
+ ["va\u0161ama", -1, 65],
+ ["e\u0161ama", -1, 66],
+ ["i\u0161ama", -1, 67],
+ ["lema", -1, 51],
+ ["acima", -1, 124],
+ ["ecima", -1, 125],
+ ["ucima", -1, 126],
+ ["ajima", -1, 109],
+ ["cajima", 245, 26],
+ ["lajima", 245, 30],
+ ["rajima", 245, 31],
+ ["\u0107ajima", 245, 28],
+ ["\u010Dajima", 245, 27],
+ ["\u0111ajima", 245, 29],
+ ["bijima", -1, 32],
+ ["cijima", -1, 33],
+ ["dijima", -1, 34],
+ ["fijima", -1, 40],
+ ["gijima", -1, 39],
+ ["anjijima", -1, 84],
+ ["enjijima", -1, 85],
+ ["snjijima", -1, 122],
+ ["\u0161njijima", -1, 86],
+ ["kijima", -1, 95],
+ ["skijima", 261, 1],
+ ["\u0161kijima", 261, 2],
+ ["lijima", -1, 35],
+ ["elijima", 264, 83],
+ ["mijima", -1, 37],
+ ["nijima", -1, 13],
+ ["ganijima", 267, 9],
+ ["manijima", 267, 6],
+ ["panijima", 267, 7],
+ ["ranijima", 267, 8],
+ ["tanijima", 267, 5],
+ ["pijima", -1, 41],
+ ["rijima", -1, 42],
+ ["sijima", -1, 43],
+ ["osijima", 275, 123],
+ ["tijima", -1, 44],
+ ["atijima", 277, 120],
+ ["evitijima", 277, 92],
+ ["ovitijima", 277, 93],
+ ["astijima", 277, 94],
+ ["avijima", -1, 77],
+ ["evijima", -1, 78],
+ ["ivijima", -1, 79],
+ ["ovijima", -1, 80],
+ ["zijima", -1, 45],
+ ["o\u0161ijima", -1, 91],
+ ["\u017Eijima", -1, 38],
+ ["anjima", -1, 84],
+ ["enjima", -1, 85],
+ ["snjima", -1, 122],
+ ["\u0161njima", -1, 86],
+ ["kima", -1, 95],
+ ["skima", 293, 1],
+ ["\u0161kima", 293, 2],
+ ["alima", -1, 104],
+ ["ijalima", 296, 47],
+ ["nalima", 296, 46],
+ ["elima", -1, 83],
+ ["ilima", -1, 116],
+ ["ozilima", 300, 48],
+ ["olima", -1, 50],
+ ["lemima", -1, 51],
+ ["nima", -1, 13],
+ ["anima", 304, 10],
+ ["inima", 304, 11],
+ ["cinima", 306, 137],
+ ["\u010Dinima", 306, 89],
+ ["onima", 304, 12],
+ ["arima", -1, 53],
+ ["drima", -1, 54],
+ ["erima", -1, 55],
+ ["orima", -1, 56],
+ ["basima", -1, 135],
+ ["gasima", -1, 131],
+ ["jasima", -1, 129],
+ ["kasima", -1, 133],
+ ["nasima", -1, 132],
+ ["tasima", -1, 130],
+ ["vasima", -1, 134],
+ ["esima", -1, 57],
+ ["isima", -1, 58],
+ ["osima", -1, 123],
+ ["atima", -1, 120],
+ ["ikatima", 324, 68],
+ ["latima", 324, 69],
+ ["etima", -1, 70],
+ ["evitima", -1, 92],
+ ["ovitima", -1, 93],
+ ["astima", -1, 94],
+ ["estima", -1, 71],
+ ["istima", -1, 72],
+ ["kstima", -1, 73],
+ ["ostima", -1, 74],
+ ["i\u0161tima", -1, 75],
+ ["avima", -1, 77],
+ ["evima", -1, 78],
+ ["ajevima", 337, 109],
+ ["cajevima", 338, 26],
+ ["lajevima", 338, 30],
+ ["rajevima", 338, 31],
+ ["\u0107ajevima", 338, 28],
+ ["\u010Dajevima", 338, 27],
+ ["\u0111ajevima", 338, 29],
+ ["ivima", -1, 79],
+ ["ovima", -1, 80],
+ ["govima", 346, 20],
+ ["ugovima", 347, 17],
+ ["lovima", 346, 82],
+ ["olovima", 349, 49],
+ ["movima", 346, 81],
+ ["onovima", 346, 12],
+ ["stvima", -1, 3],
+ ["\u0161tvima", -1, 4],
+ ["a\u0107ima", -1, 14],
+ ["e\u0107ima", -1, 15],
+ ["u\u0107ima", -1, 16],
+ ["ba\u0161ima", -1, 63],
+ ["ga\u0161ima", -1, 64],
+ ["ja\u0161ima", -1, 61],
+ ["ka\u0161ima", -1, 62],
+ ["na\u0161ima", -1, 60],
+ ["ta\u0161ima", -1, 59],
+ ["va\u0161ima", -1, 65],
+ ["e\u0161ima", -1, 66],
+ ["i\u0161ima", -1, 67],
+ ["o\u0161ima", -1, 91],
+ ["na", -1, 13],
+ ["ana", 368, 10],
+ ["acana", 369, 128],
+ ["urana", 369, 105],
+ ["tana", 369, 113],
+ ["avana", 369, 97],
+ ["evana", 369, 96],
+ ["ivana", 369, 98],
+ ["uvana", 369, 99],
+ ["a\u010Dana", 369, 102],
+ ["acena", 368, 124],
+ ["lucena", 368, 121],
+ ["a\u010Dena", 368, 101],
+ ["lu\u010Dena", 368, 117],
+ ["ina", 368, 11],
+ ["cina", 382, 137],
+ ["anina", 382, 10],
+ ["\u010Dina", 382, 89],
+ ["ona", 368, 12],
+ ["ara", -1, 53],
+ ["dra", -1, 54],
+ ["era", -1, 55],
+ ["ora", -1, 56],
+ ["basa", -1, 135],
+ ["gasa", -1, 131],
+ ["jasa", -1, 129],
+ ["kasa", -1, 133],
+ ["nasa", -1, 132],
+ ["tasa", -1, 130],
+ ["vasa", -1, 134],
+ ["esa", -1, 57],
+ ["isa", -1, 58],
+ ["osa", -1, 123],
+ ["ata", -1, 120],
+ ["ikata", 401, 68],
+ ["lata", 401, 69],
+ ["eta", -1, 70],
+ ["evita", -1, 92],
+ ["ovita", -1, 93],
+ ["asta", -1, 94],
+ ["esta", -1, 71],
+ ["ista", -1, 72],
+ ["ksta", -1, 73],
+ ["osta", -1, 74],
+ ["nuta", -1, 13],
+ ["i\u0161ta", -1, 75],
+ ["ava", -1, 77],
+ ["eva", -1, 78],
+ ["ajeva", 415, 109],
+ ["cajeva", 416, 26],
+ ["lajeva", 416, 30],
+ ["rajeva", 416, 31],
+ ["\u0107ajeva", 416, 28],
+ ["\u010Dajeva", 416, 27],
+ ["\u0111ajeva", 416, 29],
+ ["iva", -1, 79],
+ ["ova", -1, 80],
+ ["gova", 424, 20],
+ ["ugova", 425, 17],
+ ["lova", 424, 82],
+ ["olova", 427, 49],
+ ["mova", 424, 81],
+ ["onova", 424, 12],
+ ["stva", -1, 3],
+ ["\u0161tva", -1, 4],
+ ["a\u0107a", -1, 14],
+ ["e\u0107a", -1, 15],
+ ["u\u0107a", -1, 16],
+ ["ba\u0161a", -1, 63],
+ ["ga\u0161a", -1, 64],
+ ["ja\u0161a", -1, 61],
+ ["ka\u0161a", -1, 62],
+ ["na\u0161a", -1, 60],
+ ["ta\u0161a", -1, 59],
+ ["va\u0161a", -1, 65],
+ ["e\u0161a", -1, 66],
+ ["i\u0161a", -1, 67],
+ ["o\u0161a", -1, 91],
+ ["ace", -1, 124],
+ ["ece", -1, 125],
+ ["uce", -1, 126],
+ ["luce", 448, 121],
+ ["astade", -1, 110],
+ ["istade", -1, 111],
+ ["ostade", -1, 112],
+ ["ge", -1, 20],
+ ["loge", 453, 19],
+ ["uge", 453, 18],
+ ["aje", -1, 104],
+ ["caje", 456, 26],
+ ["laje", 456, 30],
+ ["raje", 456, 31],
+ ["astaje", 456, 106],
+ ["istaje", 456, 107],
+ ["ostaje", 456, 108],
+ ["\u0107aje", 456, 28],
+ ["\u010Daje", 456, 27],
+ ["\u0111aje", 456, 29],
+ ["ije", -1, 116],
+ ["bije", 466, 32],
+ ["cije", 466, 33],
+ ["dije", 466, 34],
+ ["fije", 466, 40],
+ ["gije", 466, 39],
+ ["anjije", 466, 84],
+ ["enjije", 466, 85],
+ ["snjije", 466, 122],
+ ["\u0161njije", 466, 86],
+ ["kije", 466, 95],
+ ["skije", 476, 1],
+ ["\u0161kije", 476, 2],
+ ["lije", 466, 35],
+ ["elije", 479, 83],
+ ["mije", 466, 37],
+ ["nije", 466, 13],
+ ["ganije", 482, 9],
+ ["manije", 482, 6],
+ ["panije", 482, 7],
+ ["ranije", 482, 8],
+ ["tanije", 482, 5],
+ ["pije", 466, 41],
+ ["rije", 466, 42],
+ ["sije", 466, 43],
+ ["osije", 490, 123],
+ ["tije", 466, 44],
+ ["atije", 492, 120],
+ ["evitije", 492, 92],
+ ["ovitije", 492, 93],
+ ["astije", 492, 94],
+ ["avije", 466, 77],
+ ["evije", 466, 78],
+ ["ivije", 466, 79],
+ ["ovije", 466, 80],
+ ["zije", 466, 45],
+ ["o\u0161ije", 466, 91],
+ ["\u017Eije", 466, 38],
+ ["anje", -1, 84],
+ ["enje", -1, 85],
+ ["snje", -1, 122],
+ ["\u0161nje", -1, 86],
+ ["uje", -1, 25],
+ ["lucuje", 508, 121],
+ ["iruje", 508, 100],
+ ["lu\u010Duje", 508, 117],
+ ["ke", -1, 95],
+ ["ske", 512, 1],
+ ["\u0161ke", 512, 2],
+ ["ale", -1, 104],
+ ["acale", 515, 128],
+ ["astajale", 515, 106],
+ ["istajale", 515, 107],
+ ["ostajale", 515, 108],
+ ["ijale", 515, 47],
+ ["injale", 515, 114],
+ ["nale", 515, 46],
+ ["irale", 515, 100],
+ ["urale", 515, 105],
+ ["tale", 515, 113],
+ ["astale", 525, 110],
+ ["istale", 525, 111],
+ ["ostale", 525, 112],
+ ["avale", 515, 97],
+ ["evale", 515, 96],
+ ["ivale", 515, 98],
+ ["ovale", 515, 76],
+ ["uvale", 515, 99],
+ ["a\u010Dale", 515, 102],
+ ["ele", -1, 83],
+ ["ile", -1, 116],
+ ["acile", 536, 124],
+ ["lucile", 536, 121],
+ ["nile", 536, 103],
+ ["rosile", 536, 127],
+ ["jetile", 536, 118],
+ ["ozile", 536, 48],
+ ["a\u010Dile", 536, 101],
+ ["lu\u010Dile", 536, 117],
+ ["ro\u0161ile", 536, 90],
+ ["ole", -1, 50],
+ ["asle", -1, 115],
+ ["nule", -1, 13],
+ ["rame", -1, 52],
+ ["leme", -1, 51],
+ ["acome", -1, 124],
+ ["ecome", -1, 125],
+ ["ucome", -1, 126],
+ ["anjome", -1, 84],
+ ["enjome", -1, 85],
+ ["snjome", -1, 122],
+ ["\u0161njome", -1, 86],
+ ["kome", -1, 95],
+ ["skome", 558, 1],
+ ["\u0161kome", 558, 2],
+ ["elome", -1, 83],
+ ["nome", -1, 13],
+ ["cinome", 562, 137],
+ ["\u010Dinome", 562, 89],
+ ["osome", -1, 123],
+ ["atome", -1, 120],
+ ["evitome", -1, 92],
+ ["ovitome", -1, 93],
+ ["astome", -1, 94],
+ ["avome", -1, 77],
+ ["evome", -1, 78],
+ ["ivome", -1, 79],
+ ["ovome", -1, 80],
+ ["a\u0107ome", -1, 14],
+ ["e\u0107ome", -1, 15],
+ ["u\u0107ome", -1, 16],
+ ["o\u0161ome", -1, 91],
+ ["ne", -1, 13],
+ ["ane", 578, 10],
+ ["acane", 579, 128],
+ ["urane", 579, 105],
+ ["tane", 579, 113],
+ ["astane", 582, 110],
+ ["istane", 582, 111],
+ ["ostane", 582, 112],
+ ["avane", 579, 97],
+ ["evane", 579, 96],
+ ["ivane", 579, 98],
+ ["uvane", 579, 99],
+ ["a\u010Dane", 579, 102],
+ ["acene", 578, 124],
+ ["lucene", 578, 121],
+ ["a\u010Dene", 578, 101],
+ ["lu\u010Dene", 578, 117],
+ ["ine", 578, 11],
+ ["cine", 595, 137],
+ ["anine", 595, 10],
+ ["\u010Dine", 595, 89],
+ ["one", 578, 12],
+ ["are", -1, 53],
+ ["dre", -1, 54],
+ ["ere", -1, 55],
+ ["ore", -1, 56],
+ ["ase", -1, 161],
+ ["base", 604, 135],
+ ["acase", 604, 128],
+ ["gase", 604, 131],
+ ["jase", 604, 129],
+ ["astajase", 608, 138],
+ ["istajase", 608, 139],
+ ["ostajase", 608, 140],
+ ["injase", 608, 150],
+ ["kase", 604, 133],
+ ["nase", 604, 132],
+ ["irase", 604, 155],
+ ["urase", 604, 156],
+ ["tase", 604, 130],
+ ["vase", 604, 134],
+ ["avase", 618, 144],
+ ["evase", 618, 145],
+ ["ivase", 618, 146],
+ ["ovase", 618, 148],
+ ["uvase", 618, 147],
+ ["ese", -1, 57],
+ ["ise", -1, 58],
+ ["acise", 625, 124],
+ ["lucise", 625, 121],
+ ["rosise", 625, 127],
+ ["jetise", 625, 149],
+ ["ose", -1, 123],
+ ["astadose", 630, 141],
+ ["istadose", 630, 142],
+ ["ostadose", 630, 143],
+ ["ate", -1, 104],
+ ["acate", 634, 128],
+ ["ikate", 634, 68],
+ ["late", 634, 69],
+ ["irate", 634, 100],
+ ["urate", 634, 105],
+ ["tate", 634, 113],
+ ["avate", 634, 97],
+ ["evate", 634, 96],
+ ["ivate", 634, 98],
+ ["uvate", 634, 99],
+ ["a\u010Date", 634, 102],
+ ["ete", -1, 70],
+ ["astadete", 646, 110],
+ ["istadete", 646, 111],
+ ["ostadete", 646, 112],
+ ["astajete", 646, 106],
+ ["istajete", 646, 107],
+ ["ostajete", 646, 108],
+ ["ijete", 646, 116],
+ ["injete", 646, 114],
+ ["ujete", 646, 25],
+ ["lucujete", 655, 121],
+ ["irujete", 655, 100],
+ ["lu\u010Dujete", 655, 117],
+ ["nete", 646, 13],
+ ["astanete", 659, 110],
+ ["istanete", 659, 111],
+ ["ostanete", 659, 112],
+ ["astete", 646, 115],
+ ["ite", -1, 116],
+ ["acite", 664, 124],
+ ["lucite", 664, 121],
+ ["nite", 664, 13],
+ ["astanite", 667, 110],
+ ["istanite", 667, 111],
+ ["ostanite", 667, 112],
+ ["rosite", 664, 127],
+ ["jetite", 664, 118],
+ ["astite", 664, 115],
+ ["evite", 664, 92],
+ ["ovite", 664, 93],
+ ["a\u010Dite", 664, 101],
+ ["lu\u010Dite", 664, 117],
+ ["ro\u0161ite", 664, 90],
+ ["ajte", -1, 104],
+ ["urajte", 679, 105],
+ ["tajte", 679, 113],
+ ["astajte", 681, 106],
+ ["istajte", 681, 107],
+ ["ostajte", 681, 108],
+ ["avajte", 679, 97],
+ ["evajte", 679, 96],
+ ["ivajte", 679, 98],
+ ["uvajte", 679, 99],
+ ["ijte", -1, 116],
+ ["lucujte", -1, 121],
+ ["irujte", -1, 100],
+ ["lu\u010Dujte", -1, 117],
+ ["aste", -1, 94],
+ ["acaste", 693, 128],
+ ["astajaste", 693, 106],
+ ["istajaste", 693, 107],
+ ["ostajaste", 693, 108],
+ ["injaste", 693, 114],
+ ["iraste", 693, 100],
+ ["uraste", 693, 105],
+ ["taste", 693, 113],
+ ["avaste", 693, 97],
+ ["evaste", 693, 96],
+ ["ivaste", 693, 98],
+ ["ovaste", 693, 76],
+ ["uvaste", 693, 99],
+ ["a\u010Daste", 693, 102],
+ ["este", -1, 71],
+ ["iste", -1, 72],
+ ["aciste", 709, 124],
+ ["luciste", 709, 121],
+ ["niste", 709, 103],
+ ["rosiste", 709, 127],
+ ["jetiste", 709, 118],
+ ["a\u010Diste", 709, 101],
+ ["lu\u010Diste", 709, 117],
+ ["ro\u0161iste", 709, 90],
+ ["kste", -1, 73],
+ ["oste", -1, 74],
+ ["astadoste", 719, 110],
+ ["istadoste", 719, 111],
+ ["ostadoste", 719, 112],
+ ["nuste", -1, 13],
+ ["i\u0161te", -1, 75],
+ ["ave", -1, 77],
+ ["eve", -1, 78],
+ ["ajeve", 726, 109],
+ ["cajeve", 727, 26],
+ ["lajeve", 727, 30],
+ ["rajeve", 727, 31],
+ ["\u0107ajeve", 727, 28],
+ ["\u010Dajeve", 727, 27],
+ ["\u0111ajeve", 727, 29],
+ ["ive", -1, 79],
+ ["ove", -1, 80],
+ ["gove", 735, 20],
+ ["ugove", 736, 17],
+ ["love", 735, 82],
+ ["olove", 738, 49],
+ ["move", 735, 81],
+ ["onove", 735, 12],
+ ["a\u0107e", -1, 14],
+ ["e\u0107e", -1, 15],
+ ["u\u0107e", -1, 16],
+ ["a\u010De", -1, 101],
+ ["lu\u010De", -1, 117],
+ ["a\u0161e", -1, 104],
+ ["ba\u0161e", 747, 63],
+ ["ga\u0161e", 747, 64],
+ ["ja\u0161e", 747, 61],
+ ["astaja\u0161e", 750, 106],
+ ["istaja\u0161e", 750, 107],
+ ["ostaja\u0161e", 750, 108],
+ ["inja\u0161e", 750, 114],
+ ["ka\u0161e", 747, 62],
+ ["na\u0161e", 747, 60],
+ ["ira\u0161e", 747, 100],
+ ["ura\u0161e", 747, 105],
+ ["ta\u0161e", 747, 59],
+ ["va\u0161e", 747, 65],
+ ["ava\u0161e", 760, 97],
+ ["eva\u0161e", 760, 96],
+ ["iva\u0161e", 760, 98],
+ ["ova\u0161e", 760, 76],
+ ["uva\u0161e", 760, 99],
+ ["a\u010Da\u0161e", 747, 102],
+ ["e\u0161e", -1, 66],
+ ["i\u0161e", -1, 67],
+ ["jeti\u0161e", 768, 118],
+ ["a\u010Di\u0161e", 768, 101],
+ ["lu\u010Di\u0161e", 768, 117],
+ ["ro\u0161i\u0161e", 768, 90],
+ ["o\u0161e", -1, 91],
+ ["astado\u0161e", 773, 110],
+ ["istado\u0161e", 773, 111],
+ ["ostado\u0161e", 773, 112],
+ ["aceg", -1, 124],
+ ["eceg", -1, 125],
+ ["uceg", -1, 126],
+ ["anjijeg", -1, 84],
+ ["enjijeg", -1, 85],
+ ["snjijeg", -1, 122],
+ ["\u0161njijeg", -1, 86],
+ ["kijeg", -1, 95],
+ ["skijeg", 784, 1],
+ ["\u0161kijeg", 784, 2],
+ ["elijeg", -1, 83],
+ ["nijeg", -1, 13],
+ ["osijeg", -1, 123],
+ ["atijeg", -1, 120],
+ ["evitijeg", -1, 92],
+ ["ovitijeg", -1, 93],
+ ["astijeg", -1, 94],
+ ["avijeg", -1, 77],
+ ["evijeg", -1, 78],
+ ["ivijeg", -1, 79],
+ ["ovijeg", -1, 80],
+ ["o\u0161ijeg", -1, 91],
+ ["anjeg", -1, 84],
+ ["enjeg", -1, 85],
+ ["snjeg", -1, 122],
+ ["\u0161njeg", -1, 86],
+ ["keg", -1, 95],
+ ["eleg", -1, 83],
+ ["neg", -1, 13],
+ ["aneg", 805, 10],
+ ["eneg", 805, 87],
+ ["sneg", 805, 159],
+ ["\u0161neg", 805, 88],
+ ["oseg", -1, 123],
+ ["ateg", -1, 120],
+ ["aveg", -1, 77],
+ ["eveg", -1, 78],
+ ["iveg", -1, 79],
+ ["oveg", -1, 80],
+ ["a\u0107eg", -1, 14],
+ ["e\u0107eg", -1, 15],
+ ["u\u0107eg", -1, 16],
+ ["o\u0161eg", -1, 91],
+ ["acog", -1, 124],
+ ["ecog", -1, 125],
+ ["ucog", -1, 126],
+ ["anjog", -1, 84],
+ ["enjog", -1, 85],
+ ["snjog", -1, 122],
+ ["\u0161njog", -1, 86],
+ ["kog", -1, 95],
+ ["skog", 827, 1],
+ ["\u0161kog", 827, 2],
+ ["elog", -1, 83],
+ ["nog", -1, 13],
+ ["cinog", 831, 137],
+ ["\u010Dinog", 831, 89],
+ ["osog", -1, 123],
+ ["atog", -1, 120],
+ ["evitog", -1, 92],
+ ["ovitog", -1, 93],
+ ["astog", -1, 94],
+ ["avog", -1, 77],
+ ["evog", -1, 78],
+ ["ivog", -1, 79],
+ ["ovog", -1, 80],
+ ["a\u0107og", -1, 14],
+ ["e\u0107og", -1, 15],
+ ["u\u0107og", -1, 16],
+ ["o\u0161og", -1, 91],
+ ["ah", -1, 104],
+ ["acah", 847, 128],
+ ["astajah", 847, 106],
+ ["istajah", 847, 107],
+ ["ostajah", 847, 108],
+ ["injah", 847, 114],
+ ["irah", 847, 100],
+ ["urah", 847, 105],
+ ["tah", 847, 113],
+ ["avah", 847, 97],
+ ["evah", 847, 96],
+ ["ivah", 847, 98],
+ ["ovah", 847, 76],
+ ["uvah", 847, 99],
+ ["a\u010Dah", 847, 102],
+ ["ih", -1, 116],
+ ["acih", 862, 124],
+ ["ecih", 862, 125],
+ ["ucih", 862, 126],
+ ["lucih", 865, 121],
+ ["anjijih", 862, 84],
+ ["enjijih", 862, 85],
+ ["snjijih", 862, 122],
+ ["\u0161njijih", 862, 86],
+ ["kijih", 862, 95],
+ ["skijih", 871, 1],
+ ["\u0161kijih", 871, 2],
+ ["elijih", 862, 83],
+ ["nijih", 862, 13],
+ ["osijih", 862, 123],
+ ["atijih", 862, 120],
+ ["evitijih", 862, 92],
+ ["ovitijih", 862, 93],
+ ["astijih", 862, 94],
+ ["avijih", 862, 77],
+ ["evijih", 862, 78],
+ ["ivijih", 862, 79],
+ ["ovijih", 862, 80],
+ ["o\u0161ijih", 862, 91],
+ ["anjih", 862, 84],
+ ["enjih", 862, 85],
+ ["snjih", 862, 122],
+ ["\u0161njih", 862, 86],
+ ["kih", 862, 95],
+ ["skih", 890, 1],
+ ["\u0161kih", 890, 2],
+ ["elih", 862, 83],
+ ["nih", 862, 13],
+ ["cinih", 894, 137],
+ ["\u010Dinih", 894, 89],
+ ["osih", 862, 123],
+ ["rosih", 897, 127],
+ ["atih", 862, 120],
+ ["jetih", 862, 118],
+ ["evitih", 862, 92],
+ ["ovitih", 862, 93],
+ ["astih", 862, 94],
+ ["avih", 862, 77],
+ ["evih", 862, 78],
+ ["ivih", 862, 79],
+ ["ovih", 862, 80],
+ ["a\u0107ih", 862, 14],
+ ["e\u0107ih", 862, 15],
+ ["u\u0107ih", 862, 16],
+ ["a\u010Dih", 862, 101],
+ ["lu\u010Dih", 862, 117],
+ ["o\u0161ih", 862, 91],
+ ["ro\u0161ih", 913, 90],
+ ["astadoh", -1, 110],
+ ["istadoh", -1, 111],
+ ["ostadoh", -1, 112],
+ ["acuh", -1, 124],
+ ["ecuh", -1, 125],
+ ["ucuh", -1, 126],
+ ["a\u0107uh", -1, 14],
+ ["e\u0107uh", -1, 15],
+ ["u\u0107uh", -1, 16],
+ ["aci", -1, 124],
+ ["aceci", -1, 124],
+ ["ieci", -1, 162],
+ ["ajuci", -1, 161],
+ ["irajuci", 927, 155],
+ ["urajuci", 927, 156],
+ ["astajuci", 927, 138],
+ ["istajuci", 927, 139],
+ ["ostajuci", 927, 140],
+ ["avajuci", 927, 144],
+ ["evajuci", 927, 145],
+ ["ivajuci", 927, 146],
+ ["uvajuci", 927, 147],
+ ["ujuci", -1, 157],
+ ["lucujuci", 937, 121],
+ ["irujuci", 937, 155],
+ ["luci", -1, 121],
+ ["nuci", -1, 164],
+ ["etuci", -1, 153],
+ ["astuci", -1, 136],
+ ["gi", -1, 20],
+ ["ugi", 944, 18],
+ ["aji", -1, 109],
+ ["caji", 946, 26],
+ ["laji", 946, 30],
+ ["raji", 946, 31],
+ ["\u0107aji", 946, 28],
+ ["\u010Daji", 946, 27],
+ ["\u0111aji", 946, 29],
+ ["biji", -1, 32],
+ ["ciji", -1, 33],
+ ["diji", -1, 34],
+ ["fiji", -1, 40],
+ ["giji", -1, 39],
+ ["anjiji", -1, 84],
+ ["enjiji", -1, 85],
+ ["snjiji", -1, 122],
+ ["\u0161njiji", -1, 86],
+ ["kiji", -1, 95],
+ ["skiji", 962, 1],
+ ["\u0161kiji", 962, 2],
+ ["liji", -1, 35],
+ ["eliji", 965, 83],
+ ["miji", -1, 37],
+ ["niji", -1, 13],
+ ["ganiji", 968, 9],
+ ["maniji", 968, 6],
+ ["paniji", 968, 7],
+ ["raniji", 968, 8],
+ ["taniji", 968, 5],
+ ["piji", -1, 41],
+ ["riji", -1, 42],
+ ["siji", -1, 43],
+ ["osiji", 976, 123],
+ ["tiji", -1, 44],
+ ["atiji", 978, 120],
+ ["evitiji", 978, 92],
+ ["ovitiji", 978, 93],
+ ["astiji", 978, 94],
+ ["aviji", -1, 77],
+ ["eviji", -1, 78],
+ ["iviji", -1, 79],
+ ["oviji", -1, 80],
+ ["ziji", -1, 45],
+ ["o\u0161iji", -1, 91],
+ ["\u017Eiji", -1, 38],
+ ["anji", -1, 84],
+ ["enji", -1, 85],
+ ["snji", -1, 122],
+ ["\u0161nji", -1, 86],
+ ["ki", -1, 95],
+ ["ski", 994, 1],
+ ["\u0161ki", 994, 2],
+ ["ali", -1, 104],
+ ["acali", 997, 128],
+ ["astajali", 997, 106],
+ ["istajali", 997, 107],
+ ["ostajali", 997, 108],
+ ["ijali", 997, 47],
+ ["injali", 997, 114],
+ ["nali", 997, 46],
+ ["irali", 997, 100],
+ ["urali", 997, 105],
+ ["tali", 997, 113],
+ ["astali", 1007, 110],
+ ["istali", 1007, 111],
+ ["ostali", 1007, 112],
+ ["avali", 997, 97],
+ ["evali", 997, 96],
+ ["ivali", 997, 98],
+ ["ovali", 997, 76],
+ ["uvali", 997, 99],
+ ["a\u010Dali", 997, 102],
+ ["eli", -1, 83],
+ ["ili", -1, 116],
+ ["acili", 1018, 124],
+ ["lucili", 1018, 121],
+ ["nili", 1018, 103],
+ ["rosili", 1018, 127],
+ ["jetili", 1018, 118],
+ ["ozili", 1018, 48],
+ ["a\u010Dili", 1018, 101],
+ ["lu\u010Dili", 1018, 117],
+ ["ro\u0161ili", 1018, 90],
+ ["oli", -1, 50],
+ ["asli", -1, 115],
+ ["nuli", -1, 13],
+ ["rami", -1, 52],
+ ["lemi", -1, 51],
+ ["ni", -1, 13],
+ ["ani", 1033, 10],
+ ["acani", 1034, 128],
+ ["urani", 1034, 105],
+ ["tani", 1034, 113],
+ ["avani", 1034, 97],
+ ["evani", 1034, 96],
+ ["ivani", 1034, 98],
+ ["uvani", 1034, 99],
+ ["a\u010Dani", 1034, 102],
+ ["aceni", 1033, 124],
+ ["luceni", 1033, 121],
+ ["a\u010Deni", 1033, 101],
+ ["lu\u010Deni", 1033, 117],
+ ["ini", 1033, 11],
+ ["cini", 1047, 137],
+ ["\u010Dini", 1047, 89],
+ ["oni", 1033, 12],
+ ["ari", -1, 53],
+ ["dri", -1, 54],
+ ["eri", -1, 55],
+ ["ori", -1, 56],
+ ["basi", -1, 135],
+ ["gasi", -1, 131],
+ ["jasi", -1, 129],
+ ["kasi", -1, 133],
+ ["nasi", -1, 132],
+ ["tasi", -1, 130],
+ ["vasi", -1, 134],
+ ["esi", -1, 152],
+ ["isi", -1, 154],
+ ["osi", -1, 123],
+ ["avsi", -1, 161],
+ ["acavsi", 1065, 128],
+ ["iravsi", 1065, 155],
+ ["tavsi", 1065, 160],
+ ["etavsi", 1068, 153],
+ ["astavsi", 1068, 141],
+ ["istavsi", 1068, 142],
+ ["ostavsi", 1068, 143],
+ ["ivsi", -1, 162],
+ ["nivsi", 1073, 158],
+ ["rosivsi", 1073, 127],
+ ["nuvsi", -1, 164],
+ ["ati", -1, 104],
+ ["acati", 1077, 128],
+ ["astajati", 1077, 106],
+ ["istajati", 1077, 107],
+ ["ostajati", 1077, 108],
+ ["injati", 1077, 114],
+ ["ikati", 1077, 68],
+ ["lati", 1077, 69],
+ ["irati", 1077, 100],
+ ["urati", 1077, 105],
+ ["tati", 1077, 113],
+ ["astati", 1087, 110],
+ ["istati", 1087, 111],
+ ["ostati", 1087, 112],
+ ["avati", 1077, 97],
+ ["evati", 1077, 96],
+ ["ivati", 1077, 98],
+ ["ovati", 1077, 76],
+ ["uvati", 1077, 99],
+ ["a\u010Dati", 1077, 102],
+ ["eti", -1, 70],
+ ["iti", -1, 116],
+ ["aciti", 1098, 124],
+ ["luciti", 1098, 121],
+ ["niti", 1098, 103],
+ ["rositi", 1098, 127],
+ ["jetiti", 1098, 118],
+ ["eviti", 1098, 92],
+ ["oviti", 1098, 93],
+ ["a\u010Diti", 1098, 101],
+ ["lu\u010Diti", 1098, 117],
+ ["ro\u0161iti", 1098, 90],
+ ["asti", -1, 94],
+ ["esti", -1, 71],
+ ["isti", -1, 72],
+ ["ksti", -1, 73],
+ ["osti", -1, 74],
+ ["nuti", -1, 13],
+ ["avi", -1, 77],
+ ["evi", -1, 78],
+ ["ajevi", 1116, 109],
+ ["cajevi", 1117, 26],
+ ["lajevi", 1117, 30],
+ ["rajevi", 1117, 31],
+ ["\u0107ajevi", 1117, 28],
+ ["\u010Dajevi", 1117, 27],
+ ["\u0111ajevi", 1117, 29],
+ ["ivi", -1, 79],
+ ["ovi", -1, 80],
+ ["govi", 1125, 20],
+ ["ugovi", 1126, 17],
+ ["lovi", 1125, 82],
+ ["olovi", 1128, 49],
+ ["movi", 1125, 81],
+ ["onovi", 1125, 12],
+ ["ie\u0107i", -1, 116],
+ ["a\u010De\u0107i", -1, 101],
+ ["aju\u0107i", -1, 104],
+ ["iraju\u0107i", 1134, 100],
+ ["uraju\u0107i", 1134, 105],
+ ["astaju\u0107i", 1134, 106],
+ ["istaju\u0107i", 1134, 107],
+ ["ostaju\u0107i", 1134, 108],
+ ["avaju\u0107i", 1134, 97],
+ ["evaju\u0107i", 1134, 96],
+ ["ivaju\u0107i", 1134, 98],
+ ["uvaju\u0107i", 1134, 99],
+ ["uju\u0107i", -1, 25],
+ ["iruju\u0107i", 1144, 100],
+ ["lu\u010Duju\u0107i", 1144, 117],
+ ["nu\u0107i", -1, 13],
+ ["etu\u0107i", -1, 70],
+ ["astu\u0107i", -1, 115],
+ ["a\u010Di", -1, 101],
+ ["lu\u010Di", -1, 117],
+ ["ba\u0161i", -1, 63],
+ ["ga\u0161i", -1, 64],
+ ["ja\u0161i", -1, 61],
+ ["ka\u0161i", -1, 62],
+ ["na\u0161i", -1, 60],
+ ["ta\u0161i", -1, 59],
+ ["va\u0161i", -1, 65],
+ ["e\u0161i", -1, 66],
+ ["i\u0161i", -1, 67],
+ ["o\u0161i", -1, 91],
+ ["av\u0161i", -1, 104],
+ ["irav\u0161i", 1162, 100],
+ ["tav\u0161i", 1162, 113],
+ ["etav\u0161i", 1164, 70],
+ ["astav\u0161i", 1164, 110],
+ ["istav\u0161i", 1164, 111],
+ ["ostav\u0161i", 1164, 112],
+ ["a\u010Dav\u0161i", 1162, 102],
+ ["iv\u0161i", -1, 116],
+ ["niv\u0161i", 1170, 103],
+ ["ro\u0161iv\u0161i", 1170, 90],
+ ["nuv\u0161i", -1, 13],
+ ["aj", -1, 104],
+ ["uraj", 1174, 105],
+ ["taj", 1174, 113],
+ ["avaj", 1174, 97],
+ ["evaj", 1174, 96],
+ ["ivaj", 1174, 98],
+ ["uvaj", 1174, 99],
+ ["ij", -1, 116],
+ ["acoj", -1, 124],
+ ["ecoj", -1, 125],
+ ["ucoj", -1, 126],
+ ["anjijoj", -1, 84],
+ ["enjijoj", -1, 85],
+ ["snjijoj", -1, 122],
+ ["\u0161njijoj", -1, 86],
+ ["kijoj", -1, 95],
+ ["skijoj", 1189, 1],
+ ["\u0161kijoj", 1189, 2],
+ ["elijoj", -1, 83],
+ ["nijoj", -1, 13],
+ ["osijoj", -1, 123],
+ ["evitijoj", -1, 92],
+ ["ovitijoj", -1, 93],
+ ["astijoj", -1, 94],
+ ["avijoj", -1, 77],
+ ["evijoj", -1, 78],
+ ["ivijoj", -1, 79],
+ ["ovijoj", -1, 80],
+ ["o\u0161ijoj", -1, 91],
+ ["anjoj", -1, 84],
+ ["enjoj", -1, 85],
+ ["snjoj", -1, 122],
+ ["\u0161njoj", -1, 86],
+ ["koj", -1, 95],
+ ["skoj", 1207, 1],
+ ["\u0161koj", 1207, 2],
+ ["aloj", -1, 104],
+ ["eloj", -1, 83],
+ ["noj", -1, 13],
+ ["cinoj", 1212, 137],
+ ["\u010Dinoj", 1212, 89],
+ ["osoj", -1, 123],
+ ["atoj", -1, 120],
+ ["evitoj", -1, 92],
+ ["ovitoj", -1, 93],
+ ["astoj", -1, 94],
+ ["avoj", -1, 77],
+ ["evoj", -1, 78],
+ ["ivoj", -1, 79],
+ ["ovoj", -1, 80],
+ ["a\u0107oj", -1, 14],
+ ["e\u0107oj", -1, 15],
+ ["u\u0107oj", -1, 16],
+ ["o\u0161oj", -1, 91],
+ ["lucuj", -1, 121],
+ ["iruj", -1, 100],
+ ["lu\u010Duj", -1, 117],
+ ["al", -1, 104],
+ ["iral", 1231, 100],
+ ["ural", 1231, 105],
+ ["el", -1, 119],
+ ["il", -1, 116],
+ ["am", -1, 104],
+ ["acam", 1236, 128],
+ ["iram", 1236, 100],
+ ["uram", 1236, 105],
+ ["tam", 1236, 113],
+ ["avam", 1236, 97],
+ ["evam", 1236, 96],
+ ["ivam", 1236, 98],
+ ["uvam", 1236, 99],
+ ["a\u010Dam", 1236, 102],
+ ["em", -1, 119],
+ ["acem", 1246, 124],
+ ["ecem", 1246, 125],
+ ["ucem", 1246, 126],
+ ["astadem", 1246, 110],
+ ["istadem", 1246, 111],
+ ["ostadem", 1246, 112],
+ ["ajem", 1246, 104],
+ ["cajem", 1253, 26],
+ ["lajem", 1253, 30],
+ ["rajem", 1253, 31],
+ ["astajem", 1253, 106],
+ ["istajem", 1253, 107],
+ ["ostajem", 1253, 108],
+ ["\u0107ajem", 1253, 28],
+ ["\u010Dajem", 1253, 27],
+ ["\u0111ajem", 1253, 29],
+ ["ijem", 1246, 116],
+ ["anjijem", 1263, 84],
+ ["enjijem", 1263, 85],
+ ["snjijem", 1263, 123],
+ ["\u0161njijem", 1263, 86],
+ ["kijem", 1263, 95],
+ ["skijem", 1268, 1],
+ ["\u0161kijem", 1268, 2],
+ ["lijem", 1263, 24],
+ ["elijem", 1271, 83],
+ ["nijem", 1263, 13],
+ ["rarijem", 1263, 21],
+ ["sijem", 1263, 23],
+ ["osijem", 1275, 123],
+ ["atijem", 1263, 120],
+ ["evitijem", 1263, 92],
+ ["ovitijem", 1263, 93],
+ ["otijem", 1263, 22],
+ ["astijem", 1263, 94],
+ ["avijem", 1263, 77],
+ ["evijem", 1263, 78],
+ ["ivijem", 1263, 79],
+ ["ovijem", 1263, 80],
+ ["o\u0161ijem", 1263, 91],
+ ["anjem", 1246, 84],
+ ["enjem", 1246, 85],
+ ["injem", 1246, 114],
+ ["snjem", 1246, 122],
+ ["\u0161njem", 1246, 86],
+ ["ujem", 1246, 25],
+ ["lucujem", 1292, 121],
+ ["irujem", 1292, 100],
+ ["lu\u010Dujem", 1292, 117],
+ ["kem", 1246, 95],
+ ["skem", 1296, 1],
+ ["\u0161kem", 1296, 2],
+ ["elem", 1246, 83],
+ ["nem", 1246, 13],
+ ["anem", 1300, 10],
+ ["astanem", 1301, 110],
+ ["istanem", 1301, 111],
+ ["ostanem", 1301, 112],
+ ["enem", 1300, 87],
+ ["snem", 1300, 159],
+ ["\u0161nem", 1300, 88],
+ ["basem", 1246, 135],
+ ["gasem", 1246, 131],
+ ["jasem", 1246, 129],
+ ["kasem", 1246, 133],
+ ["nasem", 1246, 132],
+ ["tasem", 1246, 130],
+ ["vasem", 1246, 134],
+ ["esem", 1246, 152],
+ ["isem", 1246, 154],
+ ["osem", 1246, 123],
+ ["atem", 1246, 120],
+ ["etem", 1246, 70],
+ ["evitem", 1246, 92],
+ ["ovitem", 1246, 93],
+ ["astem", 1246, 94],
+ ["istem", 1246, 151],
+ ["i\u0161tem", 1246, 75],
+ ["avem", 1246, 77],
+ ["evem", 1246, 78],
+ ["ivem", 1246, 79],
+ ["a\u0107em", 1246, 14],
+ ["e\u0107em", 1246, 15],
+ ["u\u0107em", 1246, 16],
+ ["ba\u0161em", 1246, 63],
+ ["ga\u0161em", 1246, 64],
+ ["ja\u0161em", 1246, 61],
+ ["ka\u0161em", 1246, 62],
+ ["na\u0161em", 1246, 60],
+ ["ta\u0161em", 1246, 59],
+ ["va\u0161em", 1246, 65],
+ ["e\u0161em", 1246, 66],
+ ["i\u0161em", 1246, 67],
+ ["o\u0161em", 1246, 91],
+ ["im", -1, 116],
+ ["acim", 1341, 124],
+ ["ecim", 1341, 125],
+ ["ucim", 1341, 126],
+ ["lucim", 1344, 121],
+ ["anjijim", 1341, 84],
+ ["enjijim", 1341, 85],
+ ["snjijim", 1341, 122],
+ ["\u0161njijim", 1341, 86],
+ ["kijim", 1341, 95],
+ ["skijim", 1350, 1],
+ ["\u0161kijim", 1350, 2],
+ ["elijim", 1341, 83],
+ ["nijim", 1341, 13],
+ ["osijim", 1341, 123],
+ ["atijim", 1341, 120],
+ ["evitijim", 1341, 92],
+ ["ovitijim", 1341, 93],
+ ["astijim", 1341, 94],
+ ["avijim", 1341, 77],
+ ["evijim", 1341, 78],
+ ["ivijim", 1341, 79],
+ ["ovijim", 1341, 80],
+ ["o\u0161ijim", 1341, 91],
+ ["anjim", 1341, 84],
+ ["enjim", 1341, 85],
+ ["snjim", 1341, 122],
+ ["\u0161njim", 1341, 86],
+ ["kim", 1341, 95],
+ ["skim", 1369, 1],
+ ["\u0161kim", 1369, 2],
+ ["elim", 1341, 83],
+ ["nim", 1341, 13],
+ ["cinim", 1373, 137],
+ ["\u010Dinim", 1373, 89],
+ ["osim", 1341, 123],
+ ["rosim", 1376, 127],
+ ["atim", 1341, 120],
+ ["jetim", 1341, 118],
+ ["evitim", 1341, 92],
+ ["ovitim", 1341, 93],
+ ["astim", 1341, 94],
+ ["avim", 1341, 77],
+ ["evim", 1341, 78],
+ ["ivim", 1341, 79],
+ ["ovim", 1341, 80],
+ ["a\u0107im", 1341, 14],
+ ["e\u0107im", 1341, 15],
+ ["u\u0107im", 1341, 16],
+ ["a\u010Dim", 1341, 101],
+ ["lu\u010Dim", 1341, 117],
+ ["o\u0161im", 1341, 91],
+ ["ro\u0161im", 1392, 90],
+ ["acom", -1, 124],
+ ["ecom", -1, 125],
+ ["ucom", -1, 126],
+ ["gom", -1, 20],
+ ["logom", 1397, 19],
+ ["ugom", 1397, 18],
+ ["bijom", -1, 32],
+ ["cijom", -1, 33],
+ ["dijom", -1, 34],
+ ["fijom", -1, 40],
+ ["gijom", -1, 39],
+ ["lijom", -1, 35],
+ ["mijom", -1, 37],
+ ["nijom", -1, 36],
+ ["ganijom", 1407, 9],
+ ["manijom", 1407, 6],
+ ["panijom", 1407, 7],
+ ["ranijom", 1407, 8],
+ ["tanijom", 1407, 5],
+ ["pijom", -1, 41],
+ ["rijom", -1, 42],
+ ["sijom", -1, 43],
+ ["tijom", -1, 44],
+ ["zijom", -1, 45],
+ ["\u017Eijom", -1, 38],
+ ["anjom", -1, 84],
+ ["enjom", -1, 85],
+ ["snjom", -1, 122],
+ ["\u0161njom", -1, 86],
+ ["kom", -1, 95],
+ ["skom", 1423, 1],
+ ["\u0161kom", 1423, 2],
+ ["alom", -1, 104],
+ ["ijalom", 1426, 47],
+ ["nalom", 1426, 46],
+ ["elom", -1, 83],
+ ["ilom", -1, 116],
+ ["ozilom", 1430, 48],
+ ["olom", -1, 50],
+ ["ramom", -1, 52],
+ ["lemom", -1, 51],
+ ["nom", -1, 13],
+ ["anom", 1435, 10],
+ ["inom", 1435, 11],
+ ["cinom", 1437, 137],
+ ["aninom", 1437, 10],
+ ["\u010Dinom", 1437, 89],
+ ["onom", 1435, 12],
+ ["arom", -1, 53],
+ ["drom", -1, 54],
+ ["erom", -1, 55],
+ ["orom", -1, 56],
+ ["basom", -1, 135],
+ ["gasom", -1, 131],
+ ["jasom", -1, 129],
+ ["kasom", -1, 133],
+ ["nasom", -1, 132],
+ ["tasom", -1, 130],
+ ["vasom", -1, 134],
+ ["esom", -1, 57],
+ ["isom", -1, 58],
+ ["osom", -1, 123],
+ ["atom", -1, 120],
+ ["ikatom", 1456, 68],
+ ["latom", 1456, 69],
+ ["etom", -1, 70],
+ ["evitom", -1, 92],
+ ["ovitom", -1, 93],
+ ["astom", -1, 94],
+ ["estom", -1, 71],
+ ["istom", -1, 72],
+ ["kstom", -1, 73],
+ ["ostom", -1, 74],
+ ["avom", -1, 77],
+ ["evom", -1, 78],
+ ["ivom", -1, 79],
+ ["ovom", -1, 80],
+ ["lovom", 1470, 82],
+ ["movom", 1470, 81],
+ ["stvom", -1, 3],
+ ["\u0161tvom", -1, 4],
+ ["a\u0107om", -1, 14],
+ ["e\u0107om", -1, 15],
+ ["u\u0107om", -1, 16],
+ ["ba\u0161om", -1, 63],
+ ["ga\u0161om", -1, 64],
+ ["ja\u0161om", -1, 61],
+ ["ka\u0161om", -1, 62],
+ ["na\u0161om", -1, 60],
+ ["ta\u0161om", -1, 59],
+ ["va\u0161om", -1, 65],
+ ["e\u0161om", -1, 66],
+ ["i\u0161om", -1, 67],
+ ["o\u0161om", -1, 91],
+ ["an", -1, 104],
+ ["acan", 1488, 128],
+ ["iran", 1488, 100],
+ ["uran", 1488, 105],
+ ["tan", 1488, 113],
+ ["avan", 1488, 97],
+ ["evan", 1488, 96],
+ ["ivan", 1488, 98],
+ ["uvan", 1488, 99],
+ ["a\u010Dan", 1488, 102],
+ ["acen", -1, 124],
+ ["lucen", -1, 121],
+ ["a\u010Den", -1, 101],
+ ["lu\u010Den", -1, 117],
+ ["anin", -1, 10],
+ ["ao", -1, 104],
+ ["acao", 1503, 128],
+ ["astajao", 1503, 106],
+ ["istajao", 1503, 107],
+ ["ostajao", 1503, 108],
+ ["injao", 1503, 114],
+ ["irao", 1503, 100],
+ ["urao", 1503, 105],
+ ["tao", 1503, 113],
+ ["astao", 1511, 110],
+ ["istao", 1511, 111],
+ ["ostao", 1511, 112],
+ ["avao", 1503, 97],
+ ["evao", 1503, 96],
+ ["ivao", 1503, 98],
+ ["ovao", 1503, 76],
+ ["uvao", 1503, 99],
+ ["a\u010Dao", 1503, 102],
+ ["go", -1, 20],
+ ["ugo", 1521, 18],
+ ["io", -1, 116],
+ ["acio", 1523, 124],
+ ["lucio", 1523, 121],
+ ["lio", 1523, 24],
+ ["nio", 1523, 103],
+ ["rario", 1523, 21],
+ ["sio", 1523, 23],
+ ["rosio", 1529, 127],
+ ["jetio", 1523, 118],
+ ["otio", 1523, 22],
+ ["a\u010Dio", 1523, 101],
+ ["lu\u010Dio", 1523, 117],
+ ["ro\u0161io", 1523, 90],
+ ["bijo", -1, 32],
+ ["cijo", -1, 33],
+ ["dijo", -1, 34],
+ ["fijo", -1, 40],
+ ["gijo", -1, 39],
+ ["lijo", -1, 35],
+ ["mijo", -1, 37],
+ ["nijo", -1, 36],
+ ["pijo", -1, 41],
+ ["rijo", -1, 42],
+ ["sijo", -1, 43],
+ ["tijo", -1, 44],
+ ["zijo", -1, 45],
+ ["\u017Eijo", -1, 38],
+ ["anjo", -1, 84],
+ ["enjo", -1, 85],
+ ["snjo", -1, 122],
+ ["\u0161njo", -1, 86],
+ ["ko", -1, 95],
+ ["sko", 1554, 1],
+ ["\u0161ko", 1554, 2],
+ ["alo", -1, 104],
+ ["acalo", 1557, 128],
+ ["astajalo", 1557, 106],
+ ["istajalo", 1557, 107],
+ ["ostajalo", 1557, 108],
+ ["ijalo", 1557, 47],
+ ["injalo", 1557, 114],
+ ["nalo", 1557, 46],
+ ["iralo", 1557, 100],
+ ["uralo", 1557, 105],
+ ["talo", 1557, 113],
+ ["astalo", 1567, 110],
+ ["istalo", 1567, 111],
+ ["ostalo", 1567, 112],
+ ["avalo", 1557, 97],
+ ["evalo", 1557, 96],
+ ["ivalo", 1557, 98],
+ ["ovalo", 1557, 76],
+ ["uvalo", 1557, 99],
+ ["a\u010Dalo", 1557, 102],
+ ["elo", -1, 83],
+ ["ilo", -1, 116],
+ ["acilo", 1578, 124],
+ ["lucilo", 1578, 121],
+ ["nilo", 1578, 103],
+ ["rosilo", 1578, 127],
+ ["jetilo", 1578, 118],
+ ["a\u010Dilo", 1578, 101],
+ ["lu\u010Dilo", 1578, 117],
+ ["ro\u0161ilo", 1578, 90],
+ ["aslo", -1, 115],
+ ["nulo", -1, 13],
+ ["amo", -1, 104],
+ ["acamo", 1589, 128],
+ ["ramo", 1589, 52],
+ ["iramo", 1591, 100],
+ ["uramo", 1591, 105],
+ ["tamo", 1589, 113],
+ ["avamo", 1589, 97],
+ ["evamo", 1589, 96],
+ ["ivamo", 1589, 98],
+ ["uvamo", 1589, 99],
+ ["a\u010Damo", 1589, 102],
+ ["emo", -1, 119],
+ ["astademo", 1600, 110],
+ ["istademo", 1600, 111],
+ ["ostademo", 1600, 112],
+ ["astajemo", 1600, 106],
+ ["istajemo", 1600, 107],
+ ["ostajemo", 1600, 108],
+ ["ijemo", 1600, 116],
+ ["injemo", 1600, 114],
+ ["ujemo", 1600, 25],
+ ["lucujemo", 1609, 121],
+ ["irujemo", 1609, 100],
+ ["lu\u010Dujemo", 1609, 117],
+ ["lemo", 1600, 51],
+ ["nemo", 1600, 13],
+ ["astanemo", 1614, 110],
+ ["istanemo", 1614, 111],
+ ["ostanemo", 1614, 112],
+ ["etemo", 1600, 70],
+ ["astemo", 1600, 115],
+ ["imo", -1, 116],
+ ["acimo", 1620, 124],
+ ["lucimo", 1620, 121],
+ ["nimo", 1620, 13],
+ ["astanimo", 1623, 110],
+ ["istanimo", 1623, 111],
+ ["ostanimo", 1623, 112],
+ ["rosimo", 1620, 127],
+ ["etimo", 1620, 70],
+ ["jetimo", 1628, 118],
+ ["astimo", 1620, 115],
+ ["a\u010Dimo", 1620, 101],
+ ["lu\u010Dimo", 1620, 117],
+ ["ro\u0161imo", 1620, 90],
+ ["ajmo", -1, 104],
+ ["urajmo", 1634, 105],
+ ["tajmo", 1634, 113],
+ ["astajmo", 1636, 106],
+ ["istajmo", 1636, 107],
+ ["ostajmo", 1636, 108],
+ ["avajmo", 1634, 97],
+ ["evajmo", 1634, 96],
+ ["ivajmo", 1634, 98],
+ ["uvajmo", 1634, 99],
+ ["ijmo", -1, 116],
+ ["ujmo", -1, 25],
+ ["lucujmo", 1645, 121],
+ ["irujmo", 1645, 100],
+ ["lu\u010Dujmo", 1645, 117],
+ ["asmo", -1, 104],
+ ["acasmo", 1649, 128],
+ ["astajasmo", 1649, 106],
+ ["istajasmo", 1649, 107],
+ ["ostajasmo", 1649, 108],
+ ["injasmo", 1649, 114],
+ ["irasmo", 1649, 100],
+ ["urasmo", 1649, 105],
+ ["tasmo", 1649, 113],
+ ["avasmo", 1649, 97],
+ ["evasmo", 1649, 96],
+ ["ivasmo", 1649, 98],
+ ["ovasmo", 1649, 76],
+ ["uvasmo", 1649, 99],
+ ["a\u010Dasmo", 1649, 102],
+ ["ismo", -1, 116],
+ ["acismo", 1664, 124],
+ ["lucismo", 1664, 121],
+ ["nismo", 1664, 103],
+ ["rosismo", 1664, 127],
+ ["jetismo", 1664, 118],
+ ["a\u010Dismo", 1664, 101],
+ ["lu\u010Dismo", 1664, 117],
+ ["ro\u0161ismo", 1664, 90],
+ ["astadosmo", -1, 110],
+ ["istadosmo", -1, 111],
+ ["ostadosmo", -1, 112],
+ ["nusmo", -1, 13],
+ ["no", -1, 13],
+ ["ano", 1677, 104],
+ ["acano", 1678, 128],
+ ["urano", 1678, 105],
+ ["tano", 1678, 113],
+ ["avano", 1678, 97],
+ ["evano", 1678, 96],
+ ["ivano", 1678, 98],
+ ["uvano", 1678, 99],
+ ["a\u010Dano", 1678, 102],
+ ["aceno", 1677, 124],
+ ["luceno", 1677, 121],
+ ["a\u010Deno", 1677, 101],
+ ["lu\u010Deno", 1677, 117],
+ ["ino", 1677, 11],
+ ["cino", 1691, 137],
+ ["\u010Dino", 1691, 89],
+ ["ato", -1, 120],
+ ["ikato", 1694, 68],
+ ["lato", 1694, 69],
+ ["eto", -1, 70],
+ ["evito", -1, 92],
+ ["ovito", -1, 93],
+ ["asto", -1, 94],
+ ["esto", -1, 71],
+ ["isto", -1, 72],
+ ["ksto", -1, 73],
+ ["osto", -1, 74],
+ ["nuto", -1, 13],
+ ["nuo", -1, 13],
+ ["avo", -1, 77],
+ ["evo", -1, 78],
+ ["ivo", -1, 79],
+ ["ovo", -1, 80],
+ ["stvo", -1, 3],
+ ["\u0161tvo", -1, 4],
+ ["as", -1, 161],
+ ["acas", 1713, 128],
+ ["iras", 1713, 155],
+ ["uras", 1713, 156],
+ ["tas", 1713, 160],
+ ["avas", 1713, 144],
+ ["evas", 1713, 145],
+ ["ivas", 1713, 146],
+ ["uvas", 1713, 147],
+ ["es", -1, 163],
+ ["astades", 1722, 141],
+ ["istades", 1722, 142],
+ ["ostades", 1722, 143],
+ ["astajes", 1722, 138],
+ ["istajes", 1722, 139],
+ ["ostajes", 1722, 140],
+ ["ijes", 1722, 162],
+ ["injes", 1722, 150],
+ ["ujes", 1722, 157],
+ ["lucujes", 1731, 121],
+ ["irujes", 1731, 155],
+ ["nes", 1722, 164],
+ ["astanes", 1734, 141],
+ ["istanes", 1734, 142],
+ ["ostanes", 1734, 143],
+ ["etes", 1722, 153],
+ ["astes", 1722, 136],
+ ["is", -1, 162],
+ ["acis", 1740, 124],
+ ["lucis", 1740, 121],
+ ["nis", 1740, 158],
+ ["rosis", 1740, 127],
+ ["jetis", 1740, 149],
+ ["at", -1, 104],
+ ["acat", 1746, 128],
+ ["astajat", 1746, 106],
+ ["istajat", 1746, 107],
+ ["ostajat", 1746, 108],
+ ["injat", 1746, 114],
+ ["irat", 1746, 100],
+ ["urat", 1746, 105],
+ ["tat", 1746, 113],
+ ["astat", 1754, 110],
+ ["istat", 1754, 111],
+ ["ostat", 1754, 112],
+ ["avat", 1746, 97],
+ ["evat", 1746, 96],
+ ["ivat", 1746, 98],
+ ["irivat", 1760, 100],
+ ["ovat", 1746, 76],
+ ["uvat", 1746, 99],
+ ["a\u010Dat", 1746, 102],
+ ["it", -1, 116],
+ ["acit", 1765, 124],
+ ["lucit", 1765, 121],
+ ["rosit", 1765, 127],
+ ["jetit", 1765, 118],
+ ["a\u010Dit", 1765, 101],
+ ["lu\u010Dit", 1765, 117],
+ ["ro\u0161it", 1765, 90],
+ ["nut", -1, 13],
+ ["astadu", -1, 110],
+ ["istadu", -1, 111],
+ ["ostadu", -1, 112],
+ ["gu", -1, 20],
+ ["logu", 1777, 19],
+ ["ugu", 1777, 18],
+ ["ahu", -1, 104],
+ ["acahu", 1780, 128],
+ ["astajahu", 1780, 106],
+ ["istajahu", 1780, 107],
+ ["ostajahu", 1780, 108],
+ ["injahu", 1780, 114],
+ ["irahu", 1780, 100],
+ ["urahu", 1780, 105],
+ ["avahu", 1780, 97],
+ ["evahu", 1780, 96],
+ ["ivahu", 1780, 98],
+ ["ovahu", 1780, 76],
+ ["uvahu", 1780, 99],
+ ["a\u010Dahu", 1780, 102],
+ ["aju", -1, 104],
+ ["caju", 1794, 26],
+ ["acaju", 1795, 128],
+ ["laju", 1794, 30],
+ ["raju", 1794, 31],
+ ["iraju", 1798, 100],
+ ["uraju", 1798, 105],
+ ["taju", 1794, 113],
+ ["astaju", 1801, 106],
+ ["istaju", 1801, 107],
+ ["ostaju", 1801, 108],
+ ["avaju", 1794, 97],
+ ["evaju", 1794, 96],
+ ["ivaju", 1794, 98],
+ ["uvaju", 1794, 99],
+ ["\u0107aju", 1794, 28],
+ ["\u010Daju", 1794, 27],
+ ["a\u010Daju", 1810, 102],
+ ["\u0111aju", 1794, 29],
+ ["iju", -1, 116],
+ ["biju", 1813, 32],
+ ["ciju", 1813, 33],
+ ["diju", 1813, 34],
+ ["fiju", 1813, 40],
+ ["giju", 1813, 39],
+ ["anjiju", 1813, 84],
+ ["enjiju", 1813, 85],
+ ["snjiju", 1813, 122],
+ ["\u0161njiju", 1813, 86],
+ ["kiju", 1813, 95],
+ ["liju", 1813, 24],
+ ["eliju", 1824, 83],
+ ["miju", 1813, 37],
+ ["niju", 1813, 13],
+ ["ganiju", 1827, 9],
+ ["maniju", 1827, 6],
+ ["paniju", 1827, 7],
+ ["raniju", 1827, 8],
+ ["taniju", 1827, 5],
+ ["piju", 1813, 41],
+ ["riju", 1813, 42],
+ ["rariju", 1834, 21],
+ ["siju", 1813, 23],
+ ["osiju", 1836, 123],
+ ["tiju", 1813, 44],
+ ["atiju", 1838, 120],
+ ["otiju", 1838, 22],
+ ["aviju", 1813, 77],
+ ["eviju", 1813, 78],
+ ["iviju", 1813, 79],
+ ["oviju", 1813, 80],
+ ["ziju", 1813, 45],
+ ["o\u0161iju", 1813, 91],
+ ["\u017Eiju", 1813, 38],
+ ["anju", -1, 84],
+ ["enju", -1, 85],
+ ["snju", -1, 122],
+ ["\u0161nju", -1, 86],
+ ["uju", -1, 25],
+ ["lucuju", 1852, 121],
+ ["iruju", 1852, 100],
+ ["lu\u010Duju", 1852, 117],
+ ["ku", -1, 95],
+ ["sku", 1856, 1],
+ ["\u0161ku", 1856, 2],
+ ["alu", -1, 104],
+ ["ijalu", 1859, 47],
+ ["nalu", 1859, 46],
+ ["elu", -1, 83],
+ ["ilu", -1, 116],
+ ["ozilu", 1863, 48],
+ ["olu", -1, 50],
+ ["ramu", -1, 52],
+ ["acemu", -1, 124],
+ ["ecemu", -1, 125],
+ ["ucemu", -1, 126],
+ ["anjijemu", -1, 84],
+ ["enjijemu", -1, 85],
+ ["snjijemu", -1, 122],
+ ["\u0161njijemu", -1, 86],
+ ["kijemu", -1, 95],
+ ["skijemu", 1874, 1],
+ ["\u0161kijemu", 1874, 2],
+ ["elijemu", -1, 83],
+ ["nijemu", -1, 13],
+ ["osijemu", -1, 123],
+ ["atijemu", -1, 120],
+ ["evitijemu", -1, 92],
+ ["ovitijemu", -1, 93],
+ ["astijemu", -1, 94],
+ ["avijemu", -1, 77],
+ ["evijemu", -1, 78],
+ ["ivijemu", -1, 79],
+ ["ovijemu", -1, 80],
+ ["o\u0161ijemu", -1, 91],
+ ["anjemu", -1, 84],
+ ["enjemu", -1, 85],
+ ["snjemu", -1, 122],
+ ["\u0161njemu", -1, 86],
+ ["kemu", -1, 95],
+ ["skemu", 1893, 1],
+ ["\u0161kemu", 1893, 2],
+ ["lemu", -1, 51],
+ ["elemu", 1896, 83],
+ ["nemu", -1, 13],
+ ["anemu", 1898, 10],
+ ["enemu", 1898, 87],
+ ["snemu", 1898, 159],
+ ["\u0161nemu", 1898, 88],
+ ["osemu", -1, 123],
+ ["atemu", -1, 120],
+ ["evitemu", -1, 92],
+ ["ovitemu", -1, 93],
+ ["astemu", -1, 94],
+ ["avemu", -1, 77],
+ ["evemu", -1, 78],
+ ["ivemu", -1, 79],
+ ["ovemu", -1, 80],
+ ["a\u0107emu", -1, 14],
+ ["e\u0107emu", -1, 15],
+ ["u\u0107emu", -1, 16],
+ ["o\u0161emu", -1, 91],
+ ["acomu", -1, 124],
+ ["ecomu", -1, 125],
+ ["ucomu", -1, 126],
+ ["anjomu", -1, 84],
+ ["enjomu", -1, 85],
+ ["snjomu", -1, 122],
+ ["\u0161njomu", -1, 86],
+ ["komu", -1, 95],
+ ["skomu", 1923, 1],
+ ["\u0161komu", 1923, 2],
+ ["elomu", -1, 83],
+ ["nomu", -1, 13],
+ ["cinomu", 1927, 137],
+ ["\u010Dinomu", 1927, 89],
+ ["osomu", -1, 123],
+ ["atomu", -1, 120],
+ ["evitomu", -1, 92],
+ ["ovitomu", -1, 93],
+ ["astomu", -1, 94],
+ ["avomu", -1, 77],
+ ["evomu", -1, 78],
+ ["ivomu", -1, 79],
+ ["ovomu", -1, 80],
+ ["a\u0107omu", -1, 14],
+ ["e\u0107omu", -1, 15],
+ ["u\u0107omu", -1, 16],
+ ["o\u0161omu", -1, 91],
+ ["nu", -1, 13],
+ ["anu", 1943, 10],
+ ["astanu", 1944, 110],
+ ["istanu", 1944, 111],
+ ["ostanu", 1944, 112],
+ ["inu", 1943, 11],
+ ["cinu", 1948, 137],
+ ["aninu", 1948, 10],
+ ["\u010Dinu", 1948, 89],
+ ["onu", 1943, 12],
+ ["aru", -1, 53],
+ ["dru", -1, 54],
+ ["eru", -1, 55],
+ ["oru", -1, 56],
+ ["basu", -1, 135],
+ ["gasu", -1, 131],
+ ["jasu", -1, 129],
+ ["kasu", -1, 133],
+ ["nasu", -1, 132],
+ ["tasu", -1, 130],
+ ["vasu", -1, 134],
+ ["esu", -1, 57],
+ ["isu", -1, 58],
+ ["osu", -1, 123],
+ ["atu", -1, 120],
+ ["ikatu", 1967, 68],
+ ["latu", 1967, 69],
+ ["etu", -1, 70],
+ ["evitu", -1, 92],
+ ["ovitu", -1, 93],
+ ["astu", -1, 94],
+ ["estu", -1, 71],
+ ["istu", -1, 72],
+ ["kstu", -1, 73],
+ ["ostu", -1, 74],
+ ["i\u0161tu", -1, 75],
+ ["avu", -1, 77],
+ ["evu", -1, 78],
+ ["ivu", -1, 79],
+ ["ovu", -1, 80],
+ ["lovu", 1982, 82],
+ ["movu", 1982, 81],
+ ["stvu", -1, 3],
+ ["\u0161tvu", -1, 4],
+ ["ba\u0161u", -1, 63],
+ ["ga\u0161u", -1, 64],
+ ["ja\u0161u", -1, 61],
+ ["ka\u0161u", -1, 62],
+ ["na\u0161u", -1, 60],
+ ["ta\u0161u", -1, 59],
+ ["va\u0161u", -1, 65],
+ ["e\u0161u", -1, 66],
+ ["i\u0161u", -1, 67],
+ ["o\u0161u", -1, 91],
+ ["avav", -1, 97],
+ ["evav", -1, 96],
+ ["ivav", -1, 98],
+ ["uvav", -1, 99],
+ ["kov", -1, 95],
+ ["a\u0161", -1, 104],
+ ["ira\u0161", 2002, 100],
+ ["ura\u0161", 2002, 105],
+ ["ta\u0161", 2002, 113],
+ ["ava\u0161", 2002, 97],
+ ["eva\u0161", 2002, 96],
+ ["iva\u0161", 2002, 98],
+ ["uva\u0161", 2002, 99],
+ ["a\u010Da\u0161", 2002, 102],
+ ["e\u0161", -1, 119],
+ ["astade\u0161", 2011, 110],
+ ["istade\u0161", 2011, 111],
+ ["ostade\u0161", 2011, 112],
+ ["astaje\u0161", 2011, 106],
+ ["istaje\u0161", 2011, 107],
+ ["ostaje\u0161", 2011, 108],
+ ["ije\u0161", 2011, 116],
+ ["inje\u0161", 2011, 114],
+ ["uje\u0161", 2011, 25],
+ ["iruje\u0161", 2020, 100],
+ ["lu\u010Duje\u0161", 2020, 117],
+ ["ne\u0161", 2011, 13],
+ ["astane\u0161", 2023, 110],
+ ["istane\u0161", 2023, 111],
+ ["ostane\u0161", 2023, 112],
+ ["ete\u0161", 2011, 70],
+ ["aste\u0161", 2011, 115],
+ ["i\u0161", -1, 116],
+ ["ni\u0161", 2029, 103],
+ ["jeti\u0161", 2029, 118],
+ ["a\u010Di\u0161", 2029, 101],
+ ["lu\u010Di\u0161", 2029, 117],
+ ["ro\u0161i\u0161", 2029, 90]
+ ];
+
+ /** @const */ var a_3 = [
+ ["a", -1, 1],
+ ["oga", 0, 1],
+ ["ama", 0, 1],
+ ["ima", 0, 1],
+ ["ena", 0, 1],
+ ["e", -1, 1],
+ ["og", -1, 1],
+ ["anog", 6, 1],
+ ["enog", 6, 1],
+ ["anih", -1, 1],
+ ["enih", -1, 1],
+ ["i", -1, 1],
+ ["ani", 11, 1],
+ ["eni", 11, 1],
+ ["anoj", -1, 1],
+ ["enoj", -1, 1],
+ ["anim", -1, 1],
+ ["enim", -1, 1],
+ ["om", -1, 1],
+ ["enom", 18, 1],
+ ["o", -1, 1],
+ ["ano", 20, 1],
+ ["eno", 20, 1],
+ ["ost", -1, 1],
+ ["u", -1, 1],
+ ["enu", 24, 1]
+ ];
+
+ /** @const */ var /** Array */ g_v = [17, 65, 16];
+
+ /** @const */ var /** Array */ g_sa = [65, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 128];
+
+ /** @const */ var /** Array */ g_ca = [119, 95, 23, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 32, 136, 0, 0, 0, 0, 0, 0, 0, 0, 0, 128, 0, 0, 0, 16];
+
+ /** @const */ var /** Array */ g_rg = [1];
+
+ var /** number */ I_p1 = 0;
+ var /** boolean */ B_no_diacritics = false;
+
+
+ /** @return {boolean} */
+ function r_cyr_to_lat() {
+ var /** number */ among_var;
+ /** @const */ var /** number */ v_1 = base.cursor;
+ lab0: {
+ while(true)
+ {
+ /** @const */ var /** number */ v_2 = base.cursor;
+ lab1: {
+ golab2: while(true)
+ {
+ /** @const */ var /** number */ v_3 = base.cursor;
+ lab3: {
+ base.bra = base.cursor;
+ among_var = base.find_among(a_0);
+ if (among_var == 0)
+ {
+ break lab3;
+ }
+ base.ket = base.cursor;
+ switch (among_var) {
+ case 1:
+ if (!base.slice_from("a"))
+ {
+ return false;
+ }
+ break;
+ case 2:
+ if (!base.slice_from("b"))
+ {
+ return false;
+ }
+ break;
+ case 3:
+ if (!base.slice_from("v"))
+ {
+ return false;
+ }
+ break;
+ case 4:
+ if (!base.slice_from("g"))
+ {
+ return false;
+ }
+ break;
+ case 5:
+ if (!base.slice_from("d"))
+ {
+ return false;
+ }
+ break;
+ case 6:
+ if (!base.slice_from("\u0111"))
+ {
+ return false;
+ }
+ break;
+ case 7:
+ if (!base.slice_from("e"))
+ {
+ return false;
+ }
+ break;
+ case 8:
+ if (!base.slice_from("\u017E"))
+ {
+ return false;
+ }
+ break;
+ case 9:
+ if (!base.slice_from("z"))
+ {
+ return false;
+ }
+ break;
+ case 10:
+ if (!base.slice_from("i"))
+ {
+ return false;
+ }
+ break;
+ case 11:
+ if (!base.slice_from("j"))
+ {
+ return false;
+ }
+ break;
+ case 12:
+ if (!base.slice_from("k"))
+ {
+ return false;
+ }
+ break;
+ case 13:
+ if (!base.slice_from("l"))
+ {
+ return false;
+ }
+ break;
+ case 14:
+ if (!base.slice_from("lj"))
+ {
+ return false;
+ }
+ break;
+ case 15:
+ if (!base.slice_from("m"))
+ {
+ return false;
+ }
+ break;
+ case 16:
+ if (!base.slice_from("n"))
+ {
+ return false;
+ }
+ break;
+ case 17:
+ if (!base.slice_from("nj"))
+ {
+ return false;
+ }
+ break;
+ case 18:
+ if (!base.slice_from("o"))
+ {
+ return false;
+ }
+ break;
+ case 19:
+ if (!base.slice_from("p"))
+ {
+ return false;
+ }
+ break;
+ case 20:
+ if (!base.slice_from("r"))
+ {
+ return false;
+ }
+ break;
+ case 21:
+ if (!base.slice_from("s"))
+ {
+ return false;
+ }
+ break;
+ case 22:
+ if (!base.slice_from("t"))
+ {
+ return false;
+ }
+ break;
+ case 23:
+ if (!base.slice_from("\u0107"))
+ {
+ return false;
+ }
+ break;
+ case 24:
+ if (!base.slice_from("u"))
+ {
+ return false;
+ }
+ break;
+ case 25:
+ if (!base.slice_from("f"))
+ {
+ return false;
+ }
+ break;
+ case 26:
+ if (!base.slice_from("h"))
+ {
+ return false;
+ }
+ break;
+ case 27:
+ if (!base.slice_from("c"))
+ {
+ return false;
+ }
+ break;
+ case 28:
+ if (!base.slice_from("\u010D"))
+ {
+ return false;
+ }
+ break;
+ case 29:
+ if (!base.slice_from("d\u017E"))
+ {
+ return false;
+ }
+ break;
+ case 30:
+ if (!base.slice_from("\u0161"))
+ {
+ return false;
+ }
+ break;
+ }
+ base.cursor = v_3;
+ break golab2;
+ }
+ base.cursor = v_3;
+ if (base.cursor >= base.limit)
+ {
+ break lab1;
+ }
+ base.cursor++;
+ }
+ continue;
+ }
+ base.cursor = v_2;
+ break;
+ }
+ }
+ base.cursor = v_1;
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_prelude() {
+ /** @const */ var /** number */ v_1 = base.cursor;
+ lab0: {
+ while(true)
+ {
+ /** @const */ var /** number */ v_2 = base.cursor;
+ lab1: {
+ golab2: while(true)
+ {
+ /** @const */ var /** number */ v_3 = base.cursor;
+ lab3: {
+ if (!(base.in_grouping(g_ca, 98, 382)))
+ {
+ break lab3;
+ }
+ base.bra = base.cursor;
+ if (!(base.eq_s("ije")))
+ {
+ break lab3;
+ }
+ base.ket = base.cursor;
+ if (!(base.in_grouping(g_ca, 98, 382)))
+ {
+ break lab3;
+ }
+ if (!base.slice_from("e"))
+ {
+ return false;
+ }
+ base.cursor = v_3;
+ break golab2;
+ }
+ base.cursor = v_3;
+ if (base.cursor >= base.limit)
+ {
+ break lab1;
+ }
+ base.cursor++;
+ }
+ continue;
+ }
+ base.cursor = v_2;
+ break;
+ }
+ }
+ base.cursor = v_1;
+ /** @const */ var /** number */ v_4 = base.cursor;
+ lab4: {
+ while(true)
+ {
+ /** @const */ var /** number */ v_5 = base.cursor;
+ lab5: {
+ golab6: while(true)
+ {
+ /** @const */ var /** number */ v_6 = base.cursor;
+ lab7: {
+ if (!(base.in_grouping(g_ca, 98, 382)))
+ {
+ break lab7;
+ }
+ base.bra = base.cursor;
+ if (!(base.eq_s("je")))
+ {
+ break lab7;
+ }
+ base.ket = base.cursor;
+ if (!(base.in_grouping(g_ca, 98, 382)))
+ {
+ break lab7;
+ }
+ if (!base.slice_from("e"))
+ {
+ return false;
+ }
+ base.cursor = v_6;
+ break golab6;
+ }
+ base.cursor = v_6;
+ if (base.cursor >= base.limit)
+ {
+ break lab5;
+ }
+ base.cursor++;
+ }
+ continue;
+ }
+ base.cursor = v_5;
+ break;
+ }
+ }
+ base.cursor = v_4;
+ /** @const */ var /** number */ v_7 = base.cursor;
+ lab8: {
+ while(true)
+ {
+ /** @const */ var /** number */ v_8 = base.cursor;
+ lab9: {
+ golab10: while(true)
+ {
+ /** @const */ var /** number */ v_9 = base.cursor;
+ lab11: {
+ base.bra = base.cursor;
+ if (!(base.eq_s("dj")))
+ {
+ break lab11;
+ }
+ base.ket = base.cursor;
+ if (!base.slice_from("\u0111"))
+ {
+ return false;
+ }
+ base.cursor = v_9;
+ break golab10;
+ }
+ base.cursor = v_9;
+ if (base.cursor >= base.limit)
+ {
+ break lab9;
+ }
+ base.cursor++;
+ }
+ continue;
+ }
+ base.cursor = v_8;
+ break;
+ }
+ }
+ base.cursor = v_7;
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_mark_regions() {
+ B_no_diacritics = true;
+ /** @const */ var /** number */ v_1 = base.cursor;
+ lab0: {
+ if (!base.go_out_grouping(g_sa, 263, 382))
+ {
+ break lab0;
+ }
+ base.cursor++;
+ B_no_diacritics = false;
+ }
+ base.cursor = v_1;
+ I_p1 = base.limit;
+ /** @const */ var /** number */ v_2 = base.cursor;
+ lab1: {
+ if (!base.go_out_grouping(g_v, 97, 117))
+ {
+ break lab1;
+ }
+ base.cursor++;
+ I_p1 = base.cursor;
+ if (I_p1 >= 2)
+ {
+ break lab1;
+ }
+ if (!base.go_in_grouping(g_v, 97, 117))
+ {
+ break lab1;
+ }
+ base.cursor++;
+ I_p1 = base.cursor;
+ }
+ base.cursor = v_2;
+ /** @const */ var /** number */ v_3 = base.cursor;
+ lab2: {
+ golab3: while(true)
+ {
+ lab4: {
+ if (!(base.eq_s("r")))
+ {
+ break lab4;
+ }
+ break golab3;
+ }
+ if (base.cursor >= base.limit)
+ {
+ break lab2;
+ }
+ base.cursor++;
+ }
+ lab5: {
+ /** @const */ var /** number */ v_4 = base.cursor;
+ lab6: {
+ if (base.cursor < 2)
+ {
+ break lab6;
+ }
+ break lab5;
+ }
+ base.cursor = v_4;
+ if (!base.go_in_grouping(g_rg, 114, 114))
+ {
+ break lab2;
+ }
+ base.cursor++;
+ }
+ if ((I_p1 - base.cursor) <= 1)
+ {
+ break lab2;
+ }
+ I_p1 = base.cursor;
+ }
+ base.cursor = v_3;
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_R1() {
+ return I_p1 <= base.cursor;
+ };
+
+ /** @return {boolean} */
+ function r_Step_1() {
+ var /** number */ among_var;
+ base.ket = base.cursor;
+ among_var = base.find_among_b(a_1);
+ if (among_var == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ switch (among_var) {
+ case 1:
+ if (!base.slice_from("loga"))
+ {
+ return false;
+ }
+ break;
+ case 2:
+ if (!base.slice_from("peh"))
+ {
+ return false;
+ }
+ break;
+ case 3:
+ if (!base.slice_from("vojka"))
+ {
+ return false;
+ }
+ break;
+ case 4:
+ if (!base.slice_from("bojka"))
+ {
+ return false;
+ }
+ break;
+ case 5:
+ if (!base.slice_from("jak"))
+ {
+ return false;
+ }
+ break;
+ case 6:
+ if (!base.slice_from("\u010Dajni"))
+ {
+ return false;
+ }
+ break;
+ case 7:
+ if (!B_no_diacritics)
+ {
+ return false;
+ }
+ if (!base.slice_from("cajni"))
+ {
+ return false;
+ }
+ break;
+ case 8:
+ if (!base.slice_from("erni"))
+ {
+ return false;
+ }
+ break;
+ case 9:
+ if (!base.slice_from("larni"))
+ {
+ return false;
+ }
+ break;
+ case 10:
+ if (!base.slice_from("esni"))
+ {
+ return false;
+ }
+ break;
+ case 11:
+ if (!base.slice_from("anjca"))
+ {
+ return false;
+ }
+ break;
+ case 12:
+ if (!base.slice_from("ajca"))
+ {
+ return false;
+ }
+ break;
+ case 13:
+ if (!base.slice_from("ljca"))
+ {
+ return false;
+ }
+ break;
+ case 14:
+ if (!base.slice_from("ejca"))
+ {
+ return false;
+ }
+ break;
+ case 15:
+ if (!base.slice_from("ojca"))
+ {
+ return false;
+ }
+ break;
+ case 16:
+ if (!base.slice_from("ajka"))
+ {
+ return false;
+ }
+ break;
+ case 17:
+ if (!base.slice_from("ojka"))
+ {
+ return false;
+ }
+ break;
+ case 18:
+ if (!base.slice_from("\u0161ca"))
+ {
+ return false;
+ }
+ break;
+ case 19:
+ if (!base.slice_from("ing"))
+ {
+ return false;
+ }
+ break;
+ case 20:
+ if (!base.slice_from("tvenik"))
+ {
+ return false;
+ }
+ break;
+ case 21:
+ if (!base.slice_from("tetika"))
+ {
+ return false;
+ }
+ break;
+ case 22:
+ if (!base.slice_from("nstva"))
+ {
+ return false;
+ }
+ break;
+ case 23:
+ if (!base.slice_from("nik"))
+ {
+ return false;
+ }
+ break;
+ case 24:
+ if (!base.slice_from("tik"))
+ {
+ return false;
+ }
+ break;
+ case 25:
+ if (!base.slice_from("zik"))
+ {
+ return false;
+ }
+ break;
+ case 26:
+ if (!base.slice_from("snik"))
+ {
+ return false;
+ }
+ break;
+ case 27:
+ if (!base.slice_from("kusi"))
+ {
+ return false;
+ }
+ break;
+ case 28:
+ if (!base.slice_from("kusni"))
+ {
+ return false;
+ }
+ break;
+ case 29:
+ if (!base.slice_from("kustva"))
+ {
+ return false;
+ }
+ break;
+ case 30:
+ if (!base.slice_from("du\u0161ni"))
+ {
+ return false;
+ }
+ break;
+ case 31:
+ if (!B_no_diacritics)
+ {
+ return false;
+ }
+ if (!base.slice_from("dusni"))
+ {
+ return false;
+ }
+ break;
+ case 32:
+ if (!base.slice_from("antni"))
+ {
+ return false;
+ }
+ break;
+ case 33:
+ if (!base.slice_from("bilni"))
+ {
+ return false;
+ }
+ break;
+ case 34:
+ if (!base.slice_from("tilni"))
+ {
+ return false;
+ }
+ break;
+ case 35:
+ if (!base.slice_from("avilni"))
+ {
+ return false;
+ }
+ break;
+ case 36:
+ if (!base.slice_from("silni"))
+ {
+ return false;
+ }
+ break;
+ case 37:
+ if (!base.slice_from("gilni"))
+ {
+ return false;
+ }
+ break;
+ case 38:
+ if (!base.slice_from("rilni"))
+ {
+ return false;
+ }
+ break;
+ case 39:
+ if (!base.slice_from("nilni"))
+ {
+ return false;
+ }
+ break;
+ case 40:
+ if (!base.slice_from("alni"))
+ {
+ return false;
+ }
+ break;
+ case 41:
+ if (!base.slice_from("ozni"))
+ {
+ return false;
+ }
+ break;
+ case 42:
+ if (!base.slice_from("ravi"))
+ {
+ return false;
+ }
+ break;
+ case 43:
+ if (!base.slice_from("stavni"))
+ {
+ return false;
+ }
+ break;
+ case 44:
+ if (!base.slice_from("pravni"))
+ {
+ return false;
+ }
+ break;
+ case 45:
+ if (!base.slice_from("tivni"))
+ {
+ return false;
+ }
+ break;
+ case 46:
+ if (!base.slice_from("sivni"))
+ {
+ return false;
+ }
+ break;
+ case 47:
+ if (!base.slice_from("atni"))
+ {
+ return false;
+ }
+ break;
+ case 48:
+ if (!base.slice_from("enta"))
+ {
+ return false;
+ }
+ break;
+ case 49:
+ if (!base.slice_from("tetni"))
+ {
+ return false;
+ }
+ break;
+ case 50:
+ if (!base.slice_from("pletni"))
+ {
+ return false;
+ }
+ break;
+ case 51:
+ if (!base.slice_from("\u0161avi"))
+ {
+ return false;
+ }
+ break;
+ case 52:
+ if (!B_no_diacritics)
+ {
+ return false;
+ }
+ if (!base.slice_from("savi"))
+ {
+ return false;
+ }
+ break;
+ case 53:
+ if (!base.slice_from("anta"))
+ {
+ return false;
+ }
+ break;
+ case 54:
+ if (!base.slice_from("a\u010Dka"))
+ {
+ return false;
+ }
+ break;
+ case 55:
+ if (!B_no_diacritics)
+ {
+ return false;
+ }
+ if (!base.slice_from("acka"))
+ {
+ return false;
+ }
+ break;
+ case 56:
+ if (!base.slice_from("u\u0161ka"))
+ {
+ return false;
+ }
+ break;
+ case 57:
+ if (!B_no_diacritics)
+ {
+ return false;
+ }
+ if (!base.slice_from("uska"))
+ {
+ return false;
+ }
+ break;
+ case 58:
+ if (!base.slice_from("atka"))
+ {
+ return false;
+ }
+ break;
+ case 59:
+ if (!base.slice_from("etka"))
+ {
+ return false;
+ }
+ break;
+ case 60:
+ if (!base.slice_from("itka"))
+ {
+ return false;
+ }
+ break;
+ case 61:
+ if (!base.slice_from("otka"))
+ {
+ return false;
+ }
+ break;
+ case 62:
+ if (!base.slice_from("utka"))
+ {
+ return false;
+ }
+ break;
+ case 63:
+ if (!base.slice_from("eskna"))
+ {
+ return false;
+ }
+ break;
+ case 64:
+ if (!base.slice_from("ti\u010Dni"))
+ {
+ return false;
+ }
+ break;
+ case 65:
+ if (!B_no_diacritics)
+ {
+ return false;
+ }
+ if (!base.slice_from("ticni"))
+ {
+ return false;
+ }
+ break;
+ case 66:
+ if (!base.slice_from("ojska"))
+ {
+ return false;
+ }
+ break;
+ case 67:
+ if (!base.slice_from("esma"))
+ {
+ return false;
+ }
+ break;
+ case 68:
+ if (!base.slice_from("metra"))
+ {
+ return false;
+ }
+ break;
+ case 69:
+ if (!base.slice_from("centra"))
+ {
+ return false;
+ }
+ break;
+ case 70:
+ if (!base.slice_from("istra"))
+ {
+ return false;
+ }
+ break;
+ case 71:
+ if (!base.slice_from("osti"))
+ {
+ return false;
+ }
+ break;
+ case 72:
+ if (!B_no_diacritics)
+ {
+ return false;
+ }
+ if (!base.slice_from("osti"))
+ {
+ return false;
+ }
+ break;
+ case 73:
+ if (!base.slice_from("dba"))
+ {
+ return false;
+ }
+ break;
+ case 74:
+ if (!base.slice_from("\u010Dka"))
+ {
+ return false;
+ }
+ break;
+ case 75:
+ if (!base.slice_from("mca"))
+ {
+ return false;
+ }
+ break;
+ case 76:
+ if (!base.slice_from("nca"))
+ {
+ return false;
+ }
+ break;
+ case 77:
+ if (!base.slice_from("voljni"))
+ {
+ return false;
+ }
+ break;
+ case 78:
+ if (!base.slice_from("anki"))
+ {
+ return false;
+ }
+ break;
+ case 79:
+ if (!base.slice_from("vca"))
+ {
+ return false;
+ }
+ break;
+ case 80:
+ if (!base.slice_from("sca"))
+ {
+ return false;
+ }
+ break;
+ case 81:
+ if (!base.slice_from("rca"))
+ {
+ return false;
+ }
+ break;
+ case 82:
+ if (!base.slice_from("alca"))
+ {
+ return false;
+ }
+ break;
+ case 83:
+ if (!base.slice_from("elca"))
+ {
+ return false;
+ }
+ break;
+ case 84:
+ if (!base.slice_from("olca"))
+ {
+ return false;
+ }
+ break;
+ case 85:
+ if (!base.slice_from("njca"))
+ {
+ return false;
+ }
+ break;
+ case 86:
+ if (!base.slice_from("ekta"))
+ {
+ return false;
+ }
+ break;
+ case 87:
+ if (!base.slice_from("izma"))
+ {
+ return false;
+ }
+ break;
+ case 88:
+ if (!base.slice_from("jebi"))
+ {
+ return false;
+ }
+ break;
+ case 89:
+ if (!base.slice_from("baci"))
+ {
+ return false;
+ }
+ break;
+ case 90:
+ if (!base.slice_from("a\u0161ni"))
+ {
+ return false;
+ }
+ break;
+ case 91:
+ if (!B_no_diacritics)
+ {
+ return false;
+ }
+ if (!base.slice_from("asni"))
+ {
+ return false;
+ }
+ break;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_Step_2() {
+ var /** number */ among_var;
+ base.ket = base.cursor;
+ among_var = base.find_among_b(a_2);
+ if (among_var == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ if (!r_R1())
+ {
+ return false;
+ }
+ switch (among_var) {
+ case 1:
+ if (!base.slice_from("sk"))
+ {
+ return false;
+ }
+ break;
+ case 2:
+ if (!base.slice_from("\u0161k"))
+ {
+ return false;
+ }
+ break;
+ case 3:
+ if (!base.slice_from("stv"))
+ {
+ return false;
+ }
+ break;
+ case 4:
+ if (!base.slice_from("\u0161tv"))
+ {
+ return false;
+ }
+ break;
+ case 5:
+ if (!base.slice_from("tanij"))
+ {
+ return false;
+ }
+ break;
+ case 6:
+ if (!base.slice_from("manij"))
+ {
+ return false;
+ }
+ break;
+ case 7:
+ if (!base.slice_from("panij"))
+ {
+ return false;
+ }
+ break;
+ case 8:
+ if (!base.slice_from("ranij"))
+ {
+ return false;
+ }
+ break;
+ case 9:
+ if (!base.slice_from("ganij"))
+ {
+ return false;
+ }
+ break;
+ case 10:
+ if (!base.slice_from("an"))
+ {
+ return false;
+ }
+ break;
+ case 11:
+ if (!base.slice_from("in"))
+ {
+ return false;
+ }
+ break;
+ case 12:
+ if (!base.slice_from("on"))
+ {
+ return false;
+ }
+ break;
+ case 13:
+ if (!base.slice_from("n"))
+ {
+ return false;
+ }
+ break;
+ case 14:
+ if (!base.slice_from("a\u0107"))
+ {
+ return false;
+ }
+ break;
+ case 15:
+ if (!base.slice_from("e\u0107"))
+ {
+ return false;
+ }
+ break;
+ case 16:
+ if (!base.slice_from("u\u0107"))
+ {
+ return false;
+ }
+ break;
+ case 17:
+ if (!base.slice_from("ugov"))
+ {
+ return false;
+ }
+ break;
+ case 18:
+ if (!base.slice_from("ug"))
+ {
+ return false;
+ }
+ break;
+ case 19:
+ if (!base.slice_from("log"))
+ {
+ return false;
+ }
+ break;
+ case 20:
+ if (!base.slice_from("g"))
+ {
+ return false;
+ }
+ break;
+ case 21:
+ if (!base.slice_from("rari"))
+ {
+ return false;
+ }
+ break;
+ case 22:
+ if (!base.slice_from("oti"))
+ {
+ return false;
+ }
+ break;
+ case 23:
+ if (!base.slice_from("si"))
+ {
+ return false;
+ }
+ break;
+ case 24:
+ if (!base.slice_from("li"))
+ {
+ return false;
+ }
+ break;
+ case 25:
+ if (!base.slice_from("uj"))
+ {
+ return false;
+ }
+ break;
+ case 26:
+ if (!base.slice_from("caj"))
+ {
+ return false;
+ }
+ break;
+ case 27:
+ if (!base.slice_from("\u010Daj"))
+ {
+ return false;
+ }
+ break;
+ case 28:
+ if (!base.slice_from("\u0107aj"))
+ {
+ return false;
+ }
+ break;
+ case 29:
+ if (!base.slice_from("\u0111aj"))
+ {
+ return false;
+ }
+ break;
+ case 30:
+ if (!base.slice_from("laj"))
+ {
+ return false;
+ }
+ break;
+ case 31:
+ if (!base.slice_from("raj"))
+ {
+ return false;
+ }
+ break;
+ case 32:
+ if (!base.slice_from("bij"))
+ {
+ return false;
+ }
+ break;
+ case 33:
+ if (!base.slice_from("cij"))
+ {
+ return false;
+ }
+ break;
+ case 34:
+ if (!base.slice_from("dij"))
+ {
+ return false;
+ }
+ break;
+ case 35:
+ if (!base.slice_from("lij"))
+ {
+ return false;
+ }
+ break;
+ case 36:
+ if (!base.slice_from("nij"))
+ {
+ return false;
+ }
+ break;
+ case 37:
+ if (!base.slice_from("mij"))
+ {
+ return false;
+ }
+ break;
+ case 38:
+ if (!base.slice_from("\u017Eij"))
+ {
+ return false;
+ }
+ break;
+ case 39:
+ if (!base.slice_from("gij"))
+ {
+ return false;
+ }
+ break;
+ case 40:
+ if (!base.slice_from("fij"))
+ {
+ return false;
+ }
+ break;
+ case 41:
+ if (!base.slice_from("pij"))
+ {
+ return false;
+ }
+ break;
+ case 42:
+ if (!base.slice_from("rij"))
+ {
+ return false;
+ }
+ break;
+ case 43:
+ if (!base.slice_from("sij"))
+ {
+ return false;
+ }
+ break;
+ case 44:
+ if (!base.slice_from("tij"))
+ {
+ return false;
+ }
+ break;
+ case 45:
+ if (!base.slice_from("zij"))
+ {
+ return false;
+ }
+ break;
+ case 46:
+ if (!base.slice_from("nal"))
+ {
+ return false;
+ }
+ break;
+ case 47:
+ if (!base.slice_from("ijal"))
+ {
+ return false;
+ }
+ break;
+ case 48:
+ if (!base.slice_from("ozil"))
+ {
+ return false;
+ }
+ break;
+ case 49:
+ if (!base.slice_from("olov"))
+ {
+ return false;
+ }
+ break;
+ case 50:
+ if (!base.slice_from("ol"))
+ {
+ return false;
+ }
+ break;
+ case 51:
+ if (!base.slice_from("lem"))
+ {
+ return false;
+ }
+ break;
+ case 52:
+ if (!base.slice_from("ram"))
+ {
+ return false;
+ }
+ break;
+ case 53:
+ if (!base.slice_from("ar"))
+ {
+ return false;
+ }
+ break;
+ case 54:
+ if (!base.slice_from("dr"))
+ {
+ return false;
+ }
+ break;
+ case 55:
+ if (!base.slice_from("er"))
+ {
+ return false;
+ }
+ break;
+ case 56:
+ if (!base.slice_from("or"))
+ {
+ return false;
+ }
+ break;
+ case 57:
+ if (!base.slice_from("es"))
+ {
+ return false;
+ }
+ break;
+ case 58:
+ if (!base.slice_from("is"))
+ {
+ return false;
+ }
+ break;
+ case 59:
+ if (!base.slice_from("ta\u0161"))
+ {
+ return false;
+ }
+ break;
+ case 60:
+ if (!base.slice_from("na\u0161"))
+ {
+ return false;
+ }
+ break;
+ case 61:
+ if (!base.slice_from("ja\u0161"))
+ {
+ return false;
+ }
+ break;
+ case 62:
+ if (!base.slice_from("ka\u0161"))
+ {
+ return false;
+ }
+ break;
+ case 63:
+ if (!base.slice_from("ba\u0161"))
+ {
+ return false;
+ }
+ break;
+ case 64:
+ if (!base.slice_from("ga\u0161"))
+ {
+ return false;
+ }
+ break;
+ case 65:
+ if (!base.slice_from("va\u0161"))
+ {
+ return false;
+ }
+ break;
+ case 66:
+ if (!base.slice_from("e\u0161"))
+ {
+ return false;
+ }
+ break;
+ case 67:
+ if (!base.slice_from("i\u0161"))
+ {
+ return false;
+ }
+ break;
+ case 68:
+ if (!base.slice_from("ikat"))
+ {
+ return false;
+ }
+ break;
+ case 69:
+ if (!base.slice_from("lat"))
+ {
+ return false;
+ }
+ break;
+ case 70:
+ if (!base.slice_from("et"))
+ {
+ return false;
+ }
+ break;
+ case 71:
+ if (!base.slice_from("est"))
+ {
+ return false;
+ }
+ break;
+ case 72:
+ if (!base.slice_from("ist"))
+ {
+ return false;
+ }
+ break;
+ case 73:
+ if (!base.slice_from("kst"))
+ {
+ return false;
+ }
+ break;
+ case 74:
+ if (!base.slice_from("ost"))
+ {
+ return false;
+ }
+ break;
+ case 75:
+ if (!base.slice_from("i\u0161t"))
+ {
+ return false;
+ }
+ break;
+ case 76:
+ if (!base.slice_from("ova"))
+ {
+ return false;
+ }
+ break;
+ case 77:
+ if (!base.slice_from("av"))
+ {
+ return false;
+ }
+ break;
+ case 78:
+ if (!base.slice_from("ev"))
+ {
+ return false;
+ }
+ break;
+ case 79:
+ if (!base.slice_from("iv"))
+ {
+ return false;
+ }
+ break;
+ case 80:
+ if (!base.slice_from("ov"))
+ {
+ return false;
+ }
+ break;
+ case 81:
+ if (!base.slice_from("mov"))
+ {
+ return false;
+ }
+ break;
+ case 82:
+ if (!base.slice_from("lov"))
+ {
+ return false;
+ }
+ break;
+ case 83:
+ if (!base.slice_from("el"))
+ {
+ return false;
+ }
+ break;
+ case 84:
+ if (!base.slice_from("anj"))
+ {
+ return false;
+ }
+ break;
+ case 85:
+ if (!base.slice_from("enj"))
+ {
+ return false;
+ }
+ break;
+ case 86:
+ if (!base.slice_from("\u0161nj"))
+ {
+ return false;
+ }
+ break;
+ case 87:
+ if (!base.slice_from("en"))
+ {
+ return false;
+ }
+ break;
+ case 88:
+ if (!base.slice_from("\u0161n"))
+ {
+ return false;
+ }
+ break;
+ case 89:
+ if (!base.slice_from("\u010Din"))
+ {
+ return false;
+ }
+ break;
+ case 90:
+ if (!base.slice_from("ro\u0161i"))
+ {
+ return false;
+ }
+ break;
+ case 91:
+ if (!base.slice_from("o\u0161"))
+ {
+ return false;
+ }
+ break;
+ case 92:
+ if (!base.slice_from("evit"))
+ {
+ return false;
+ }
+ break;
+ case 93:
+ if (!base.slice_from("ovit"))
+ {
+ return false;
+ }
+ break;
+ case 94:
+ if (!base.slice_from("ast"))
+ {
+ return false;
+ }
+ break;
+ case 95:
+ if (!base.slice_from("k"))
+ {
+ return false;
+ }
+ break;
+ case 96:
+ if (!base.slice_from("eva"))
+ {
+ return false;
+ }
+ break;
+ case 97:
+ if (!base.slice_from("ava"))
+ {
+ return false;
+ }
+ break;
+ case 98:
+ if (!base.slice_from("iva"))
+ {
+ return false;
+ }
+ break;
+ case 99:
+ if (!base.slice_from("uva"))
+ {
+ return false;
+ }
+ break;
+ case 100:
+ if (!base.slice_from("ir"))
+ {
+ return false;
+ }
+ break;
+ case 101:
+ if (!base.slice_from("a\u010D"))
+ {
+ return false;
+ }
+ break;
+ case 102:
+ if (!base.slice_from("a\u010Da"))
+ {
+ return false;
+ }
+ break;
+ case 103:
+ if (!base.slice_from("ni"))
+ {
+ return false;
+ }
+ break;
+ case 104:
+ if (!base.slice_from("a"))
+ {
+ return false;
+ }
+ break;
+ case 105:
+ if (!base.slice_from("ur"))
+ {
+ return false;
+ }
+ break;
+ case 106:
+ if (!base.slice_from("astaj"))
+ {
+ return false;
+ }
+ break;
+ case 107:
+ if (!base.slice_from("istaj"))
+ {
+ return false;
+ }
+ break;
+ case 108:
+ if (!base.slice_from("ostaj"))
+ {
+ return false;
+ }
+ break;
+ case 109:
+ if (!base.slice_from("aj"))
+ {
+ return false;
+ }
+ break;
+ case 110:
+ if (!base.slice_from("asta"))
+ {
+ return false;
+ }
+ break;
+ case 111:
+ if (!base.slice_from("ista"))
+ {
+ return false;
+ }
+ break;
+ case 112:
+ if (!base.slice_from("osta"))
+ {
+ return false;
+ }
+ break;
+ case 113:
+ if (!base.slice_from("ta"))
+ {
+ return false;
+ }
+ break;
+ case 114:
+ if (!base.slice_from("inj"))
+ {
+ return false;
+ }
+ break;
+ case 115:
+ if (!base.slice_from("as"))
+ {
+ return false;
+ }
+ break;
+ case 116:
+ if (!base.slice_from("i"))
+ {
+ return false;
+ }
+ break;
+ case 117:
+ if (!base.slice_from("lu\u010D"))
+ {
+ return false;
+ }
+ break;
+ case 118:
+ if (!base.slice_from("jeti"))
+ {
+ return false;
+ }
+ break;
+ case 119:
+ if (!base.slice_from("e"))
+ {
+ return false;
+ }
+ break;
+ case 120:
+ if (!base.slice_from("at"))
+ {
+ return false;
+ }
+ break;
+ case 121:
+ if (!B_no_diacritics)
+ {
+ return false;
+ }
+ if (!base.slice_from("luc"))
+ {
+ return false;
+ }
+ break;
+ case 122:
+ if (!B_no_diacritics)
+ {
+ return false;
+ }
+ if (!base.slice_from("snj"))
+ {
+ return false;
+ }
+ break;
+ case 123:
+ if (!B_no_diacritics)
+ {
+ return false;
+ }
+ if (!base.slice_from("os"))
+ {
+ return false;
+ }
+ break;
+ case 124:
+ if (!B_no_diacritics)
+ {
+ return false;
+ }
+ if (!base.slice_from("ac"))
+ {
+ return false;
+ }
+ break;
+ case 125:
+ if (!B_no_diacritics)
+ {
+ return false;
+ }
+ if (!base.slice_from("ec"))
+ {
+ return false;
+ }
+ break;
+ case 126:
+ if (!B_no_diacritics)
+ {
+ return false;
+ }
+ if (!base.slice_from("uc"))
+ {
+ return false;
+ }
+ break;
+ case 127:
+ if (!B_no_diacritics)
+ {
+ return false;
+ }
+ if (!base.slice_from("rosi"))
+ {
+ return false;
+ }
+ break;
+ case 128:
+ if (!B_no_diacritics)
+ {
+ return false;
+ }
+ if (!base.slice_from("aca"))
+ {
+ return false;
+ }
+ break;
+ case 129:
+ if (!B_no_diacritics)
+ {
+ return false;
+ }
+ if (!base.slice_from("jas"))
+ {
+ return false;
+ }
+ break;
+ case 130:
+ if (!B_no_diacritics)
+ {
+ return false;
+ }
+ if (!base.slice_from("tas"))
+ {
+ return false;
+ }
+ break;
+ case 131:
+ if (!B_no_diacritics)
+ {
+ return false;
+ }
+ if (!base.slice_from("gas"))
+ {
+ return false;
+ }
+ break;
+ case 132:
+ if (!B_no_diacritics)
+ {
+ return false;
+ }
+ if (!base.slice_from("nas"))
+ {
+ return false;
+ }
+ break;
+ case 133:
+ if (!B_no_diacritics)
+ {
+ return false;
+ }
+ if (!base.slice_from("kas"))
+ {
+ return false;
+ }
+ break;
+ case 134:
+ if (!B_no_diacritics)
+ {
+ return false;
+ }
+ if (!base.slice_from("vas"))
+ {
+ return false;
+ }
+ break;
+ case 135:
+ if (!B_no_diacritics)
+ {
+ return false;
+ }
+ if (!base.slice_from("bas"))
+ {
+ return false;
+ }
+ break;
+ case 136:
+ if (!B_no_diacritics)
+ {
+ return false;
+ }
+ if (!base.slice_from("as"))
+ {
+ return false;
+ }
+ break;
+ case 137:
+ if (!B_no_diacritics)
+ {
+ return false;
+ }
+ if (!base.slice_from("cin"))
+ {
+ return false;
+ }
+ break;
+ case 138:
+ if (!B_no_diacritics)
+ {
+ return false;
+ }
+ if (!base.slice_from("astaj"))
+ {
+ return false;
+ }
+ break;
+ case 139:
+ if (!B_no_diacritics)
+ {
+ return false;
+ }
+ if (!base.slice_from("istaj"))
+ {
+ return false;
+ }
+ break;
+ case 140:
+ if (!B_no_diacritics)
+ {
+ return false;
+ }
+ if (!base.slice_from("ostaj"))
+ {
+ return false;
+ }
+ break;
+ case 141:
+ if (!B_no_diacritics)
+ {
+ return false;
+ }
+ if (!base.slice_from("asta"))
+ {
+ return false;
+ }
+ break;
+ case 142:
+ if (!B_no_diacritics)
+ {
+ return false;
+ }
+ if (!base.slice_from("ista"))
+ {
+ return false;
+ }
+ break;
+ case 143:
+ if (!B_no_diacritics)
+ {
+ return false;
+ }
+ if (!base.slice_from("osta"))
+ {
+ return false;
+ }
+ break;
+ case 144:
+ if (!B_no_diacritics)
+ {
+ return false;
+ }
+ if (!base.slice_from("ava"))
+ {
+ return false;
+ }
+ break;
+ case 145:
+ if (!B_no_diacritics)
+ {
+ return false;
+ }
+ if (!base.slice_from("eva"))
+ {
+ return false;
+ }
+ break;
+ case 146:
+ if (!B_no_diacritics)
+ {
+ return false;
+ }
+ if (!base.slice_from("iva"))
+ {
+ return false;
+ }
+ break;
+ case 147:
+ if (!B_no_diacritics)
+ {
+ return false;
+ }
+ if (!base.slice_from("uva"))
+ {
+ return false;
+ }
+ break;
+ case 148:
+ if (!B_no_diacritics)
+ {
+ return false;
+ }
+ if (!base.slice_from("ova"))
+ {
+ return false;
+ }
+ break;
+ case 149:
+ if (!B_no_diacritics)
+ {
+ return false;
+ }
+ if (!base.slice_from("jeti"))
+ {
+ return false;
+ }
+ break;
+ case 150:
+ if (!B_no_diacritics)
+ {
+ return false;
+ }
+ if (!base.slice_from("inj"))
+ {
+ return false;
+ }
+ break;
+ case 151:
+ if (!B_no_diacritics)
+ {
+ return false;
+ }
+ if (!base.slice_from("ist"))
+ {
+ return false;
+ }
+ break;
+ case 152:
+ if (!B_no_diacritics)
+ {
+ return false;
+ }
+ if (!base.slice_from("es"))
+ {
+ return false;
+ }
+ break;
+ case 153:
+ if (!B_no_diacritics)
+ {
+ return false;
+ }
+ if (!base.slice_from("et"))
+ {
+ return false;
+ }
+ break;
+ case 154:
+ if (!B_no_diacritics)
+ {
+ return false;
+ }
+ if (!base.slice_from("is"))
+ {
+ return false;
+ }
+ break;
+ case 155:
+ if (!B_no_diacritics)
+ {
+ return false;
+ }
+ if (!base.slice_from("ir"))
+ {
+ return false;
+ }
+ break;
+ case 156:
+ if (!B_no_diacritics)
+ {
+ return false;
+ }
+ if (!base.slice_from("ur"))
+ {
+ return false;
+ }
+ break;
+ case 157:
+ if (!B_no_diacritics)
+ {
+ return false;
+ }
+ if (!base.slice_from("uj"))
+ {
+ return false;
+ }
+ break;
+ case 158:
+ if (!B_no_diacritics)
+ {
+ return false;
+ }
+ if (!base.slice_from("ni"))
+ {
+ return false;
+ }
+ break;
+ case 159:
+ if (!B_no_diacritics)
+ {
+ return false;
+ }
+ if (!base.slice_from("sn"))
+ {
+ return false;
+ }
+ break;
+ case 160:
+ if (!B_no_diacritics)
+ {
+ return false;
+ }
+ if (!base.slice_from("ta"))
+ {
+ return false;
+ }
+ break;
+ case 161:
+ if (!B_no_diacritics)
+ {
+ return false;
+ }
+ if (!base.slice_from("a"))
+ {
+ return false;
+ }
+ break;
+ case 162:
+ if (!B_no_diacritics)
+ {
+ return false;
+ }
+ if (!base.slice_from("i"))
+ {
+ return false;
+ }
+ break;
+ case 163:
+ if (!B_no_diacritics)
+ {
+ return false;
+ }
+ if (!base.slice_from("e"))
+ {
+ return false;
+ }
+ break;
+ case 164:
+ if (!B_no_diacritics)
+ {
+ return false;
+ }
+ if (!base.slice_from("n"))
+ {
+ return false;
+ }
+ break;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_Step_3() {
+ base.ket = base.cursor;
+ if (base.find_among_b(a_3) == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ if (!r_R1())
+ {
+ return false;
+ }
+ if (!base.slice_from(""))
+ {
+ return false;
+ }
+ return true;
+ };
+
+ this.stem = /** @return {boolean} */ function() {
+ r_cyr_to_lat();
+ r_prelude();
+ r_mark_regions();
+ base.limit_backward = base.cursor; base.cursor = base.limit;
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
+ r_Step_1();
+ base.cursor = base.limit - v_1;
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
+ lab0: {
+ lab1: {
+ /** @const */ var /** number */ v_3 = base.limit - base.cursor;
+ lab2: {
+ if (!r_Step_2())
+ {
+ break lab2;
+ }
+ break lab1;
+ }
+ base.cursor = base.limit - v_3;
+ if (!r_Step_3())
+ {
+ break lab0;
+ }
+ }
+ }
+ base.cursor = base.limit - v_2;
+ base.cursor = base.limit_backward;
+ return true;
+ };
+
+ /**@return{string}*/
+ this['stemWord'] = function(/**string*/word) {
+ base.setCurrent(word);
+ this.stem();
+ return base.getCurrent();
+ };
+};
diff --git a/sphinx/search/non-minified-js/spanish-stemmer.js b/sphinx/search/non-minified-js/spanish-stemmer.js
index fffd6160b13..f800db7467d 100644
--- a/sphinx/search/non-minified-js/spanish-stemmer.js
+++ b/sphinx/search/non-minified-js/spanish-stemmer.js
@@ -1,8 +1,9 @@
-// Generated by Snowball 2.1.0 - https://snowballstem.org/
+// Generated from spanish.sbl by Snowball 3.0.1 - https://snowballstem.org/
/**@constructor*/
-SpanishStemmer = function() {
+var SpanishStemmer = function() {
var base = new BaseStemmer();
+
/** @const */ var a_0 = [
["", -1, 6],
["\u00E1", 0, 1],
@@ -77,6 +78,8 @@ SpanishStemmer = function() {
["ante", -1, 2],
["mente", -1, 7],
["amente", 13, 6],
+ ["acion", -1, 2],
+ ["ucion", -1, 4],
["aci\u00F3n", -1, 2],
["uci\u00F3n", -1, 4],
["ico", -1, 1],
@@ -247,37 +250,27 @@ SpanishStemmer = function() {
I_pV = base.limit;
I_p1 = base.limit;
I_p2 = base.limit;
- var /** number */ v_1 = base.cursor;
+ /** @const */ var /** number */ v_1 = base.cursor;
lab0: {
lab1: {
- var /** number */ v_2 = base.cursor;
+ /** @const */ var /** number */ v_2 = base.cursor;
lab2: {
if (!(base.in_grouping(g_v, 97, 252)))
{
break lab2;
}
lab3: {
- var /** number */ v_3 = base.cursor;
+ /** @const */ var /** number */ v_3 = base.cursor;
lab4: {
if (!(base.out_grouping(g_v, 97, 252)))
{
break lab4;
}
- golab5: while(true)
+ if (!base.go_out_grouping(g_v, 97, 252))
{
- lab6: {
- if (!(base.in_grouping(g_v, 97, 252)))
- {
- break lab6;
- }
- break golab5;
- }
- if (base.cursor >= base.limit)
- {
- break lab4;
- }
- base.cursor++;
+ break lab4;
}
+ base.cursor++;
break lab3;
}
base.cursor = v_3;
@@ -285,21 +278,11 @@ SpanishStemmer = function() {
{
break lab2;
}
- golab7: while(true)
+ if (!base.go_in_grouping(g_v, 97, 252))
{
- lab8: {
- if (!(base.out_grouping(g_v, 97, 252)))
- {
- break lab8;
- }
- break golab7;
- }
- if (base.cursor >= base.limit)
- {
- break lab2;
- }
- base.cursor++;
+ break lab2;
}
+ base.cursor++;
}
break lab1;
}
@@ -308,31 +291,21 @@ SpanishStemmer = function() {
{
break lab0;
}
- lab9: {
- var /** number */ v_6 = base.cursor;
- lab10: {
+ lab5: {
+ /** @const */ var /** number */ v_4 = base.cursor;
+ lab6: {
if (!(base.out_grouping(g_v, 97, 252)))
{
- break lab10;
+ break lab6;
}
- golab11: while(true)
+ if (!base.go_out_grouping(g_v, 97, 252))
{
- lab12: {
- if (!(base.in_grouping(g_v, 97, 252)))
- {
- break lab12;
- }
- break golab11;
- }
- if (base.cursor >= base.limit)
- {
- break lab10;
- }
- base.cursor++;
+ break lab6;
}
- break lab9;
+ base.cursor++;
+ break lab5;
}
- base.cursor = v_6;
+ base.cursor = v_4;
if (!(base.in_grouping(g_v, 97, 252)))
{
break lab0;
@@ -347,72 +320,32 @@ SpanishStemmer = function() {
I_pV = base.cursor;
}
base.cursor = v_1;
- var /** number */ v_8 = base.cursor;
- lab13: {
- golab14: while(true)
+ /** @const */ var /** number */ v_5 = base.cursor;
+ lab7: {
+ if (!base.go_out_grouping(g_v, 97, 252))
{
- lab15: {
- if (!(base.in_grouping(g_v, 97, 252)))
- {
- break lab15;
- }
- break golab14;
- }
- if (base.cursor >= base.limit)
- {
- break lab13;
- }
- base.cursor++;
+ break lab7;
}
- golab16: while(true)
+ base.cursor++;
+ if (!base.go_in_grouping(g_v, 97, 252))
{
- lab17: {
- if (!(base.out_grouping(g_v, 97, 252)))
- {
- break lab17;
- }
- break golab16;
- }
- if (base.cursor >= base.limit)
- {
- break lab13;
- }
- base.cursor++;
+ break lab7;
}
+ base.cursor++;
I_p1 = base.cursor;
- golab18: while(true)
+ if (!base.go_out_grouping(g_v, 97, 252))
{
- lab19: {
- if (!(base.in_grouping(g_v, 97, 252)))
- {
- break lab19;
- }
- break golab18;
- }
- if (base.cursor >= base.limit)
- {
- break lab13;
- }
- base.cursor++;
+ break lab7;
}
- golab20: while(true)
+ base.cursor++;
+ if (!base.go_in_grouping(g_v, 97, 252))
{
- lab21: {
- if (!(base.out_grouping(g_v, 97, 252)))
- {
- break lab21;
- }
- break golab20;
- }
- if (base.cursor >= base.limit)
- {
- break lab13;
- }
- base.cursor++;
+ break lab7;
}
+ base.cursor++;
I_p2 = base.cursor;
}
- base.cursor = v_8;
+ base.cursor = v_5;
return true;
};
@@ -421,14 +354,10 @@ SpanishStemmer = function() {
var /** number */ among_var;
while(true)
{
- var /** number */ v_1 = base.cursor;
+ /** @const */ var /** number */ v_1 = base.cursor;
lab0: {
base.bra = base.cursor;
among_var = base.find_among(a_0);
- if (among_var == 0)
- {
- break lab0;
- }
base.ket = base.cursor;
switch (among_var) {
case 1:
@@ -479,29 +408,17 @@ SpanishStemmer = function() {
/** @return {boolean} */
function r_RV() {
- if (!(I_pV <= base.cursor))
- {
- return false;
- }
- return true;
+ return I_pV <= base.cursor;
};
/** @return {boolean} */
function r_R1() {
- if (!(I_p1 <= base.cursor))
- {
- return false;
- }
- return true;
+ return I_p1 <= base.cursor;
};
/** @return {boolean} */
function r_R2() {
- if (!(I_p2 <= base.cursor))
- {
- return false;
- }
- return true;
+ return I_p2 <= base.cursor;
};
/** @return {boolean} */
@@ -608,7 +525,7 @@ SpanishStemmer = function() {
{
return false;
}
- var /** number */ v_1 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
lab0: {
base.ket = base.cursor;
if (!(base.eq_s_b("ic")))
@@ -667,7 +584,7 @@ SpanishStemmer = function() {
{
return false;
}
- var /** number */ v_2 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
lab1: {
base.ket = base.cursor;
among_var = base.find_among_b(a_3);
@@ -717,7 +634,7 @@ SpanishStemmer = function() {
{
return false;
}
- var /** number */ v_3 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_3 = base.limit - base.cursor;
lab2: {
base.ket = base.cursor;
if (base.find_among_b(a_4) == 0)
@@ -746,7 +663,7 @@ SpanishStemmer = function() {
{
return false;
}
- var /** number */ v_4 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_4 = base.limit - base.cursor;
lab3: {
base.ket = base.cursor;
if (base.find_among_b(a_5) == 0)
@@ -775,7 +692,7 @@ SpanishStemmer = function() {
{
return false;
}
- var /** number */ v_5 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_5 = base.limit - base.cursor;
lab4: {
base.ket = base.cursor;
if (!(base.eq_s_b("at")))
@@ -805,16 +722,16 @@ SpanishStemmer = function() {
{
return false;
}
- var /** number */ v_2 = base.limit_backward;
+ /** @const */ var /** number */ v_1 = base.limit_backward;
base.limit_backward = I_pV;
base.ket = base.cursor;
if (base.find_among_b(a_7) == 0)
{
- base.limit_backward = v_2;
+ base.limit_backward = v_1;
return false;
}
base.bra = base.cursor;
- base.limit_backward = v_2;
+ base.limit_backward = v_1;
if (!(base.eq_s_b("u")))
{
return false;
@@ -833,33 +750,33 @@ SpanishStemmer = function() {
{
return false;
}
- var /** number */ v_2 = base.limit_backward;
+ /** @const */ var /** number */ v_1 = base.limit_backward;
base.limit_backward = I_pV;
base.ket = base.cursor;
among_var = base.find_among_b(a_8);
if (among_var == 0)
{
- base.limit_backward = v_2;
+ base.limit_backward = v_1;
return false;
}
base.bra = base.cursor;
- base.limit_backward = v_2;
+ base.limit_backward = v_1;
switch (among_var) {
case 1:
- var /** number */ v_3 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
lab0: {
if (!(base.eq_s_b("u")))
{
- base.cursor = base.limit - v_3;
+ base.cursor = base.limit - v_2;
break lab0;
}
- var /** number */ v_4 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_3 = base.limit - base.cursor;
if (!(base.eq_s_b("g")))
{
- base.cursor = base.limit - v_3;
+ base.cursor = base.limit - v_2;
break lab0;
}
- base.cursor = base.limit - v_4;
+ base.cursor = base.limit - v_3;
}
base.bra = base.cursor;
if (!base.slice_del())
@@ -907,7 +824,7 @@ SpanishStemmer = function() {
{
return false;
}
- var /** number */ v_1 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
lab0: {
base.ket = base.cursor;
if (!(base.eq_s_b("u")))
@@ -916,7 +833,7 @@ SpanishStemmer = function() {
break lab0;
}
base.bra = base.cursor;
- var /** number */ v_2 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
if (!(base.eq_s_b("g")))
{
base.cursor = base.limit - v_1;
@@ -941,13 +858,13 @@ SpanishStemmer = function() {
this.stem = /** @return {boolean} */ function() {
r_mark_regions();
base.limit_backward = base.cursor; base.cursor = base.limit;
- var /** number */ v_2 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
r_attached_pronoun();
- base.cursor = base.limit - v_2;
- var /** number */ v_3 = base.limit - base.cursor;
+ base.cursor = base.limit - v_1;
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
lab0: {
lab1: {
- var /** number */ v_4 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_3 = base.limit - base.cursor;
lab2: {
if (!r_standard_suffix())
{
@@ -955,7 +872,7 @@ SpanishStemmer = function() {
}
break lab1;
}
- base.cursor = base.limit - v_4;
+ base.cursor = base.limit - v_3;
lab3: {
if (!r_y_verb_suffix())
{
@@ -963,21 +880,21 @@ SpanishStemmer = function() {
}
break lab1;
}
- base.cursor = base.limit - v_4;
+ base.cursor = base.limit - v_3;
if (!r_verb_suffix())
{
break lab0;
}
}
}
- base.cursor = base.limit - v_3;
- var /** number */ v_5 = base.limit - base.cursor;
+ base.cursor = base.limit - v_2;
+ /** @const */ var /** number */ v_4 = base.limit - base.cursor;
r_residual_suffix();
- base.cursor = base.limit - v_5;
+ base.cursor = base.limit - v_4;
base.cursor = base.limit_backward;
- var /** number */ v_6 = base.cursor;
+ /** @const */ var /** number */ v_5 = base.cursor;
r_postlude();
- base.cursor = v_6;
+ base.cursor = v_5;
return true;
};
diff --git a/sphinx/search/non-minified-js/swedish-stemmer.js b/sphinx/search/non-minified-js/swedish-stemmer.js
index 4d7d49fc0e5..bf1a64268d2 100644
--- a/sphinx/search/non-minified-js/swedish-stemmer.js
+++ b/sphinx/search/non-minified-js/swedish-stemmer.js
@@ -1,9 +1,34 @@
-// Generated by Snowball 2.1.0 - https://snowballstem.org/
+// Generated from swedish.sbl by Snowball 3.0.1 - https://snowballstem.org/
/**@constructor*/
-SwedishStemmer = function() {
+var SwedishStemmer = function() {
var base = new BaseStemmer();
+
/** @const */ var a_0 = [
+ ["fab", -1, -1],
+ ["h", -1, -1],
+ ["pak", -1, -1],
+ ["rak", -1, -1],
+ ["stak", -1, -1],
+ ["kom", -1, -1],
+ ["iet", -1, -1],
+ ["cit", -1, -1],
+ ["dit", -1, -1],
+ ["alit", -1, -1],
+ ["ilit", -1, -1],
+ ["mit", -1, -1],
+ ["nit", -1, -1],
+ ["pit", -1, -1],
+ ["rit", -1, -1],
+ ["sit", -1, -1],
+ ["tit", -1, -1],
+ ["uit", -1, -1],
+ ["ivit", -1, -1],
+ ["kvit", -1, -1],
+ ["xit", -1, -1]
+ ];
+
+ /** @const */ var a_1 = [
["a", -1, 1],
["arna", 0, 1],
["erna", 0, 1],
@@ -38,12 +63,13 @@ SwedishStemmer = function() {
["hetens", 29, 1],
["erns", 21, 1],
["at", -1, 1],
- ["andet", -1, 1],
- ["het", -1, 1],
+ ["et", -1, 3],
+ ["andet", 34, 1],
+ ["het", 34, 1],
["ast", -1, 1]
];
- /** @const */ var a_1 = [
+ /** @const */ var a_2 = [
["dd", -1, -1],
["gd", -1, -1],
["nn", -1, -1],
@@ -53,18 +79,20 @@ SwedishStemmer = function() {
["tt", -1, -1]
];
- /** @const */ var a_2 = [
+ /** @const */ var a_3 = [
["ig", -1, 1],
["lig", 0, 1],
["els", -1, 1],
["fullt", -1, 3],
- ["l\u00F6st", -1, 2]
+ ["\u00F6st", -1, 2]
];
/** @const */ var /** Array */ g_v = [17, 65, 16, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 24, 0, 32];
/** @const */ var /** Array */ g_s_ending = [119, 127, 149];
+ /** @const */ var /** Array */ g_ost_ending = [173, 58];
+
var /** number */ I_x = 0;
var /** number */ I_p1 = 0;
@@ -72,9 +100,9 @@ SwedishStemmer = function() {
/** @return {boolean} */
function r_mark_regions() {
I_p1 = base.limit;
- var /** number */ v_1 = base.cursor;
+ /** @const */ var /** number */ v_1 = base.cursor;
{
- var /** number */ c1 = base.cursor + 3;
+ /** @const */ var /** number */ c1 = base.cursor + 3;
if (c1 > base.limit)
{
return false;
@@ -83,46 +111,56 @@ SwedishStemmer = function() {
}
I_x = base.cursor;
base.cursor = v_1;
- golab0: while(true)
+ if (!base.go_out_grouping(g_v, 97, 246))
{
- var /** number */ v_2 = base.cursor;
- lab1: {
- if (!(base.in_grouping(g_v, 97, 246)))
- {
- break lab1;
- }
- base.cursor = v_2;
- break golab0;
+ return false;
+ }
+ base.cursor++;
+ if (!base.go_in_grouping(g_v, 97, 246))
+ {
+ return false;
+ }
+ base.cursor++;
+ I_p1 = base.cursor;
+ lab0: {
+ if (I_p1 >= I_x)
+ {
+ break lab0;
}
- base.cursor = v_2;
- if (base.cursor >= base.limit)
+ I_p1 = I_x;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_et_condition() {
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
+ if (!(base.out_grouping_b(g_v, 97, 246)))
+ {
+ return false;
+ }
+ if (!(base.in_grouping_b(g_v, 97, 246)))
+ {
+ return false;
+ }
+ lab0: {
+ if (base.cursor > base.limit_backward)
{
- return false;
+ break lab0;
}
- base.cursor++;
+ return false;
}
- golab2: while(true)
+ base.cursor = base.limit - v_1;
{
- lab3: {
- if (!(base.out_grouping(g_v, 97, 246)))
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
+ lab1: {
+ if (base.find_among_b(a_0) == 0)
{
- break lab3;
+ break lab1;
}
- break golab2;
- }
- if (base.cursor >= base.limit)
- {
return false;
}
- base.cursor++;
- }
- I_p1 = base.cursor;
- lab4: {
- if (!(I_p1 < I_x))
- {
- break lab4;
- }
- I_p1 = I_x;
+ base.cursor = base.limit - v_2;
}
return true;
};
@@ -134,17 +172,17 @@ SwedishStemmer = function() {
{
return false;
}
- var /** number */ v_2 = base.limit_backward;
+ /** @const */ var /** number */ v_1 = base.limit_backward;
base.limit_backward = I_p1;
base.ket = base.cursor;
- among_var = base.find_among_b(a_0);
+ among_var = base.find_among_b(a_1);
if (among_var == 0)
{
- base.limit_backward = v_2;
+ base.limit_backward = v_1;
return false;
}
base.bra = base.cursor;
- base.limit_backward = v_2;
+ base.limit_backward = v_1;
switch (among_var) {
case 1:
if (!base.slice_del())
@@ -153,7 +191,33 @@ SwedishStemmer = function() {
}
break;
case 2:
- if (!(base.in_grouping_b(g_s_ending, 98, 121)))
+ lab0: {
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
+ lab1: {
+ if (!(base.eq_s_b("et")))
+ {
+ break lab1;
+ }
+ if (!r_et_condition())
+ {
+ break lab1;
+ }
+ base.bra = base.cursor;
+ break lab0;
+ }
+ base.cursor = base.limit - v_2;
+ if (!(base.in_grouping_b(g_s_ending, 98, 121)))
+ {
+ return false;
+ }
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ case 3:
+ if (!r_et_condition())
{
return false;
}
@@ -172,19 +236,19 @@ SwedishStemmer = function() {
{
return false;
}
- var /** number */ v_2 = base.limit_backward;
+ /** @const */ var /** number */ v_1 = base.limit_backward;
base.limit_backward = I_p1;
- var /** number */ v_3 = base.limit - base.cursor;
- if (base.find_among_b(a_1) == 0)
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
+ if (base.find_among_b(a_2) == 0)
{
- base.limit_backward = v_2;
+ base.limit_backward = v_1;
return false;
}
- base.cursor = base.limit - v_3;
+ base.cursor = base.limit - v_2;
base.ket = base.cursor;
if (base.cursor <= base.limit_backward)
{
- base.limit_backward = v_2;
+ base.limit_backward = v_1;
return false;
}
base.cursor--;
@@ -193,7 +257,7 @@ SwedishStemmer = function() {
{
return false;
}
- base.limit_backward = v_2;
+ base.limit_backward = v_1;
return true;
};
@@ -204,16 +268,17 @@ SwedishStemmer = function() {
{
return false;
}
- var /** number */ v_2 = base.limit_backward;
+ /** @const */ var /** number */ v_1 = base.limit_backward;
base.limit_backward = I_p1;
base.ket = base.cursor;
- among_var = base.find_among_b(a_2);
+ among_var = base.find_among_b(a_3);
if (among_var == 0)
{
- base.limit_backward = v_2;
+ base.limit_backward = v_1;
return false;
}
base.bra = base.cursor;
+ base.limit_backward = v_1;
switch (among_var) {
case 1:
if (!base.slice_del())
@@ -222,7 +287,11 @@ SwedishStemmer = function() {
}
break;
case 2:
- if (!base.slice_from("l\u00F6s"))
+ if (!(base.in_grouping_b(g_ost_ending, 105, 118)))
+ {
+ return false;
+ }
+ if (!base.slice_from("\u00F6s"))
{
return false;
}
@@ -234,22 +303,21 @@ SwedishStemmer = function() {
}
break;
}
- base.limit_backward = v_2;
return true;
};
this.stem = /** @return {boolean} */ function() {
- var /** number */ v_1 = base.cursor;
+ /** @const */ var /** number */ v_1 = base.cursor;
r_mark_regions();
base.cursor = v_1;
base.limit_backward = base.cursor; base.cursor = base.limit;
- var /** number */ v_2 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
r_main_suffix();
base.cursor = base.limit - v_2;
- var /** number */ v_3 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_3 = base.limit - base.cursor;
r_consonant_pair();
base.cursor = base.limit - v_3;
- var /** number */ v_4 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_4 = base.limit - base.cursor;
r_other_suffix();
base.cursor = base.limit - v_4;
base.cursor = base.limit_backward;
diff --git a/sphinx/search/non-minified-js/tamil-stemmer.js b/sphinx/search/non-minified-js/tamil-stemmer.js
new file mode 100644
index 00000000000..2ae474784d7
--- /dev/null
+++ b/sphinx/search/non-minified-js/tamil-stemmer.js
@@ -0,0 +1,1189 @@
+// Generated from tamil.sbl by Snowball 3.0.1 - https://snowballstem.org/
+
+/**@constructor*/
+var TamilStemmer = function() {
+ var base = new BaseStemmer();
+
+ /** @const */ var a_0 = [
+ ["\u0BB5\u0BC1", -1, 3],
+ ["\u0BB5\u0BC2", -1, 4],
+ ["\u0BB5\u0BCA", -1, 2],
+ ["\u0BB5\u0BCB", -1, 1]
+ ];
+
+ /** @const */ var a_1 = [
+ ["\u0B95", -1, -1],
+ ["\u0B99", -1, -1],
+ ["\u0B9A", -1, -1],
+ ["\u0B9E", -1, -1],
+ ["\u0BA4", -1, -1],
+ ["\u0BA8", -1, -1],
+ ["\u0BAA", -1, -1],
+ ["\u0BAE", -1, -1],
+ ["\u0BAF", -1, -1],
+ ["\u0BB5", -1, -1]
+ ];
+
+ /** @const */ var a_2 = [
+ ["\u0BBF", -1, -1],
+ ["\u0BC0", -1, -1],
+ ["\u0BC8", -1, -1]
+ ];
+
+ /** @const */ var a_3 = [
+ ["\u0BBE", -1, -1],
+ ["\u0BBF", -1, -1],
+ ["\u0BC0", -1, -1],
+ ["\u0BC1", -1, -1],
+ ["\u0BC2", -1, -1],
+ ["\u0BC6", -1, -1],
+ ["\u0BC7", -1, -1],
+ ["\u0BC8", -1, -1]
+ ];
+
+ /** @const */ var a_4 = [
+ ["", -1, 2],
+ ["\u0BC8", 0, 1],
+ ["\u0BCD", 0, 1]
+ ];
+
+ /** @const */ var a_5 = [
+ ["\u0BA8\u0BCD\u0BA4", -1, 1],
+ ["\u0BAF", -1, 1],
+ ["\u0BB5", -1, 1],
+ ["\u0BA9\u0BC1", -1, 8],
+ ["\u0BC1\u0B95\u0BCD", -1, 7],
+ ["\u0BC1\u0B95\u0BCD\u0B95\u0BCD", -1, 7],
+ ["\u0B9F\u0BCD\u0B95\u0BCD", -1, 3],
+ ["\u0BB1\u0BCD\u0B95\u0BCD", -1, 4],
+ ["\u0B99\u0BCD", -1, 9],
+ ["\u0B9F\u0BCD\u0B9F\u0BCD", -1, 5],
+ ["\u0BA4\u0BCD\u0BA4\u0BCD", -1, 6],
+ ["\u0BA8\u0BCD\u0BA4\u0BCD", -1, 1],
+ ["\u0BA8\u0BCD", -1, 1],
+ ["\u0B9F\u0BCD\u0BAA\u0BCD", -1, 3],
+ ["\u0BAF\u0BCD", -1, 2],
+ ["\u0BA9\u0BCD\u0BB1\u0BCD", -1, 4],
+ ["\u0BB5\u0BCD", -1, 1]
+ ];
+
+ /** @const */ var a_6 = [
+ ["\u0B95", -1, -1],
+ ["\u0B9A", -1, -1],
+ ["\u0B9F", -1, -1],
+ ["\u0BA4", -1, -1],
+ ["\u0BAA", -1, -1],
+ ["\u0BB1", -1, -1]
+ ];
+
+ /** @const */ var a_7 = [
+ ["\u0B95", -1, -1],
+ ["\u0B9A", -1, -1],
+ ["\u0B9F", -1, -1],
+ ["\u0BA4", -1, -1],
+ ["\u0BAA", -1, -1],
+ ["\u0BB1", -1, -1]
+ ];
+
+ /** @const */ var a_8 = [
+ ["\u0B9E", -1, -1],
+ ["\u0BA3", -1, -1],
+ ["\u0BA8", -1, -1],
+ ["\u0BA9", -1, -1],
+ ["\u0BAE", -1, -1],
+ ["\u0BAF", -1, -1],
+ ["\u0BB0", -1, -1],
+ ["\u0BB2", -1, -1],
+ ["\u0BB3", -1, -1],
+ ["\u0BB4", -1, -1],
+ ["\u0BB5", -1, -1]
+ ];
+
+ /** @const */ var a_9 = [
+ ["\u0BBE", -1, -1],
+ ["\u0BBF", -1, -1],
+ ["\u0BC0", -1, -1],
+ ["\u0BC1", -1, -1],
+ ["\u0BC2", -1, -1],
+ ["\u0BC6", -1, -1],
+ ["\u0BC7", -1, -1],
+ ["\u0BC8", -1, -1],
+ ["\u0BCD", -1, -1]
+ ];
+
+ /** @const */ var a_10 = [
+ ["\u0B85", -1, -1],
+ ["\u0B87", -1, -1],
+ ["\u0B89", -1, -1]
+ ];
+
+ /** @const */ var a_11 = [
+ ["\u0B95", -1, -1],
+ ["\u0B99", -1, -1],
+ ["\u0B9A", -1, -1],
+ ["\u0B9E", -1, -1],
+ ["\u0BA4", -1, -1],
+ ["\u0BA8", -1, -1],
+ ["\u0BAA", -1, -1],
+ ["\u0BAE", -1, -1],
+ ["\u0BAF", -1, -1],
+ ["\u0BB5", -1, -1]
+ ];
+
+ /** @const */ var a_12 = [
+ ["\u0B95", -1, -1],
+ ["\u0B9A", -1, -1],
+ ["\u0B9F", -1, -1],
+ ["\u0BA4", -1, -1],
+ ["\u0BAA", -1, -1],
+ ["\u0BB1", -1, -1]
+ ];
+
+ /** @const */ var a_13 = [
+ ["\u0B95\u0BB3\u0BCD", -1, 4],
+ ["\u0BC1\u0B99\u0BCD\u0B95\u0BB3\u0BCD", 0, 1],
+ ["\u0B9F\u0BCD\u0B95\u0BB3\u0BCD", 0, 3],
+ ["\u0BB1\u0BCD\u0B95\u0BB3\u0BCD", 0, 2]
+ ];
+
+ /** @const */ var a_14 = [
+ ["\u0BBE", -1, -1],
+ ["\u0BC7", -1, -1],
+ ["\u0BCB", -1, -1]
+ ];
+
+ /** @const */ var a_15 = [
+ ["\u0BAA\u0BBF", -1, -1],
+ ["\u0BB5\u0BBF", -1, -1]
+ ];
+
+ /** @const */ var a_16 = [
+ ["\u0BBE", -1, -1],
+ ["\u0BBF", -1, -1],
+ ["\u0BC0", -1, -1],
+ ["\u0BC1", -1, -1],
+ ["\u0BC2", -1, -1],
+ ["\u0BC6", -1, -1],
+ ["\u0BC7", -1, -1],
+ ["\u0BC8", -1, -1]
+ ];
+
+ /** @const */ var a_17 = [
+ ["\u0BAA\u0B9F\u0BCD\u0B9F", -1, 3],
+ ["\u0BAA\u0B9F\u0BCD\u0B9F\u0BA3", -1, 3],
+ ["\u0BA4\u0BBE\u0BA9", -1, 3],
+ ["\u0BAA\u0B9F\u0BBF\u0BA4\u0BBE\u0BA9", 2, 3],
+ ["\u0BC6\u0BA9", -1, 1],
+ ["\u0BBE\u0B95\u0BBF\u0BAF", -1, 1],
+ ["\u0B95\u0BC1\u0BB0\u0BBF\u0BAF", -1, 3],
+ ["\u0BC1\u0B9F\u0BC8\u0BAF", -1, 1],
+ ["\u0BB2\u0BCD\u0BB2", -1, 2],
+ ["\u0BC1\u0BB3\u0BCD\u0BB3", -1, 1],
+ ["\u0BBE\u0B95\u0BBF", -1, 1],
+ ["\u0BAA\u0B9F\u0BBF", -1, 3],
+ ["\u0BBF\u0BA9\u0BCD\u0BB1\u0BBF", -1, 1],
+ ["\u0BAA\u0BB1\u0BCD\u0BB1\u0BBF", -1, 3],
+ ["\u0BAA\u0B9F\u0BC1", -1, 3],
+ ["\u0BB5\u0BBF\u0B9F\u0BC1", -1, 3],
+ ["\u0BAA\u0B9F\u0BCD\u0B9F\u0BC1", -1, 3],
+ ["\u0BB5\u0BBF\u0B9F\u0BCD\u0B9F\u0BC1", -1, 3],
+ ["\u0BAA\u0B9F\u0BCD\u0B9F\u0BA4\u0BC1", -1, 3],
+ ["\u0BC6\u0BA9\u0BCD\u0BB1\u0BC1", -1, 1],
+ ["\u0BC1\u0B9F\u0BC8", -1, 1],
+ ["\u0BBF\u0BB2\u0BCD\u0BB2\u0BC8", -1, 1],
+ ["\u0BC1\u0B9F\u0BA9\u0BCD", -1, 1],
+ ["\u0BBF\u0B9F\u0BAE\u0BCD", -1, 1],
+ ["\u0BC6\u0BB2\u0BCD\u0BB2\u0BBE\u0BAE\u0BCD", -1, 3],
+ ["\u0BC6\u0BA9\u0BC1\u0BAE\u0BCD", -1, 1]
+ ];
+
+ /** @const */ var a_18 = [
+ ["\u0BBE", -1, -1],
+ ["\u0BBF", -1, -1],
+ ["\u0BC0", -1, -1],
+ ["\u0BC1", -1, -1],
+ ["\u0BC2", -1, -1],
+ ["\u0BC6", -1, -1],
+ ["\u0BC7", -1, -1],
+ ["\u0BC8", -1, -1]
+ ];
+
+ /** @const */ var a_19 = [
+ ["\u0BBE", -1, -1],
+ ["\u0BBF", -1, -1],
+ ["\u0BC0", -1, -1],
+ ["\u0BC1", -1, -1],
+ ["\u0BC2", -1, -1],
+ ["\u0BC6", -1, -1],
+ ["\u0BC7", -1, -1],
+ ["\u0BC8", -1, -1]
+ ];
+
+ /** @const */ var a_20 = [
+ ["\u0BB5\u0BBF\u0B9F", -1, 2],
+ ["\u0BC0", -1, 7],
+ ["\u0BCA\u0B9F\u0BC1", -1, 2],
+ ["\u0BCB\u0B9F\u0BC1", -1, 2],
+ ["\u0BA4\u0BC1", -1, 6],
+ ["\u0BBF\u0BB0\u0BC1\u0BA8\u0BCD\u0BA4\u0BC1", 4, 2],
+ ["\u0BBF\u0BA9\u0BCD\u0BB1\u0BC1", -1, 2],
+ ["\u0BC1\u0B9F\u0BC8", -1, 2],
+ ["\u0BA9\u0BC8", -1, 1],
+ ["\u0B95\u0BA3\u0BCD", -1, 1],
+ ["\u0BBF\u0BA9\u0BCD", -1, 3],
+ ["\u0BAE\u0BC1\u0BA9\u0BCD", -1, 1],
+ ["\u0BBF\u0B9F\u0BAE\u0BCD", -1, 4],
+ ["\u0BBF\u0BB1\u0BCD", -1, 2],
+ ["\u0BAE\u0BC7\u0BB1\u0BCD", -1, 1],
+ ["\u0BB2\u0BCD", -1, 5],
+ ["\u0BBE\u0BAE\u0BB2\u0BCD", 15, 2],
+ ["\u0BBE\u0BB2\u0BCD", 15, 2],
+ ["\u0BBF\u0BB2\u0BCD", 15, 2],
+ ["\u0BAE\u0BC7\u0BB2\u0BCD", 15, 1],
+ ["\u0BC1\u0BB3\u0BCD", -1, 2],
+ ["\u0B95\u0BC0\u0BB4\u0BCD", -1, 1]
+ ];
+
+ /** @const */ var a_21 = [
+ ["\u0B95", -1, -1],
+ ["\u0B9A", -1, -1],
+ ["\u0B9F", -1, -1],
+ ["\u0BA4", -1, -1],
+ ["\u0BAA", -1, -1],
+ ["\u0BB1", -1, -1]
+ ];
+
+ /** @const */ var a_22 = [
+ ["\u0B95", -1, -1],
+ ["\u0B9A", -1, -1],
+ ["\u0B9F", -1, -1],
+ ["\u0BA4", -1, -1],
+ ["\u0BAA", -1, -1],
+ ["\u0BB1", -1, -1]
+ ];
+
+ /** @const */ var a_23 = [
+ ["\u0B85", -1, -1],
+ ["\u0B86", -1, -1],
+ ["\u0B87", -1, -1],
+ ["\u0B88", -1, -1],
+ ["\u0B89", -1, -1],
+ ["\u0B8A", -1, -1],
+ ["\u0B8E", -1, -1],
+ ["\u0B8F", -1, -1],
+ ["\u0B90", -1, -1],
+ ["\u0B92", -1, -1],
+ ["\u0B93", -1, -1],
+ ["\u0B94", -1, -1]
+ ];
+
+ /** @const */ var a_24 = [
+ ["\u0BBE", -1, -1],
+ ["\u0BBF", -1, -1],
+ ["\u0BC0", -1, -1],
+ ["\u0BC1", -1, -1],
+ ["\u0BC2", -1, -1],
+ ["\u0BC6", -1, -1],
+ ["\u0BC7", -1, -1],
+ ["\u0BC8", -1, -1]
+ ];
+
+ /** @const */ var a_25 = [
+ ["\u0B95", -1, 1],
+ ["\u0BA4", -1, 1],
+ ["\u0BA9", -1, 1],
+ ["\u0BAA", -1, 1],
+ ["\u0BAF", -1, 1],
+ ["\u0BBE", -1, 5],
+ ["\u0B95\u0BC1", -1, 6],
+ ["\u0BAA\u0B9F\u0BC1", -1, 1],
+ ["\u0BA4\u0BC1", -1, 3],
+ ["\u0BBF\u0BB1\u0BCD\u0BB1\u0BC1", -1, 1],
+ ["\u0BA9\u0BC8", -1, 1],
+ ["\u0BB5\u0BC8", -1, 1],
+ ["\u0BA9\u0BA9\u0BCD", -1, 1],
+ ["\u0BAA\u0BA9\u0BCD", -1, 1],
+ ["\u0BB5\u0BA9\u0BCD", -1, 2],
+ ["\u0BBE\u0BA9\u0BCD", -1, 4],
+ ["\u0BA9\u0BBE\u0BA9\u0BCD", 15, 1],
+ ["\u0BAE\u0BBF\u0BA9\u0BCD", -1, 1],
+ ["\u0BA9\u0BC6\u0BA9\u0BCD", -1, 1],
+ ["\u0BC7\u0BA9\u0BCD", -1, 5],
+ ["\u0BA9\u0BAE\u0BCD", -1, 1],
+ ["\u0BAA\u0BAE\u0BCD", -1, 1],
+ ["\u0BBE\u0BAE\u0BCD", -1, 5],
+ ["\u0B95\u0BC1\u0BAE\u0BCD", -1, 1],
+ ["\u0B9F\u0BC1\u0BAE\u0BCD", -1, 5],
+ ["\u0BA4\u0BC1\u0BAE\u0BCD", -1, 1],
+ ["\u0BB1\u0BC1\u0BAE\u0BCD", -1, 1],
+ ["\u0BC6\u0BAE\u0BCD", -1, 5],
+ ["\u0BC7\u0BAE\u0BCD", -1, 5],
+ ["\u0BCB\u0BAE\u0BCD", -1, 5],
+ ["\u0BBE\u0BAF\u0BCD", -1, 5],
+ ["\u0BA9\u0BB0\u0BCD", -1, 1],
+ ["\u0BAA\u0BB0\u0BCD", -1, 1],
+ ["\u0BC0\u0BAF\u0BB0\u0BCD", -1, 5],
+ ["\u0BB5\u0BB0\u0BCD", -1, 1],
+ ["\u0BBE\u0BB0\u0BCD", -1, 5],
+ ["\u0BA9\u0BBE\u0BB0\u0BCD", 35, 1],
+ ["\u0BAE\u0BBE\u0BB0\u0BCD", 35, 1],
+ ["\u0B95\u0BCA\u0BA3\u0BCD\u0B9F\u0BBF\u0BB0\u0BCD", -1, 1],
+ ["\u0BA9\u0BBF\u0BB0\u0BCD", -1, 5],
+ ["\u0BC0\u0BB0\u0BCD", -1, 5],
+ ["\u0BA9\u0BB3\u0BCD", -1, 1],
+ ["\u0BAA\u0BB3\u0BCD", -1, 1],
+ ["\u0BB5\u0BB3\u0BCD", -1, 1],
+ ["\u0BBE\u0BB3\u0BCD", -1, 5],
+ ["\u0BA9\u0BBE\u0BB3\u0BCD", 44, 1]
+ ];
+
+ /** @const */ var a_26 = [
+ ["\u0B95\u0BBF\u0BB1", -1, -1],
+ ["\u0B95\u0BBF\u0BA9\u0BCD\u0BB1", -1, -1],
+ ["\u0BBE\u0BA8\u0BBF\u0BA9\u0BCD\u0BB1", -1, -1],
+ ["\u0B95\u0BBF\u0BB1\u0BCD", -1, -1],
+ ["\u0B95\u0BBF\u0BA9\u0BCD\u0BB1\u0BCD", -1, -1],
+ ["\u0BBE\u0BA8\u0BBF\u0BA9\u0BCD\u0BB1\u0BCD", -1, -1]
+ ];
+
+ var /** boolean */ B_found_vetrumai_urupu = false;
+ var /** boolean */ B_found_a_match = false;
+
+
+ /** @return {boolean} */
+ function r_has_min_length() {
+ return base.current.length > 4;
+ };
+
+ /** @return {boolean} */
+ function r_fix_va_start() {
+ var /** number */ among_var;
+ base.bra = base.cursor;
+ among_var = base.find_among(a_0);
+ if (among_var == 0)
+ {
+ return false;
+ }
+ base.ket = base.cursor;
+ switch (among_var) {
+ case 1:
+ if (!base.slice_from("\u0B93"))
+ {
+ return false;
+ }
+ break;
+ case 2:
+ if (!base.slice_from("\u0B92"))
+ {
+ return false;
+ }
+ break;
+ case 3:
+ if (!base.slice_from("\u0B89"))
+ {
+ return false;
+ }
+ break;
+ case 4:
+ if (!base.slice_from("\u0B8A"))
+ {
+ return false;
+ }
+ break;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_fix_endings() {
+ /** @const */ var /** number */ v_1 = base.cursor;
+ lab0: {
+ while(true)
+ {
+ /** @const */ var /** number */ v_2 = base.cursor;
+ lab1: {
+ if (!r_fix_ending())
+ {
+ break lab1;
+ }
+ continue;
+ }
+ base.cursor = v_2;
+ break;
+ }
+ }
+ base.cursor = v_1;
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_remove_question_prefixes() {
+ base.bra = base.cursor;
+ if (!(base.eq_s("\u0B8E")))
+ {
+ return false;
+ }
+ if (base.find_among(a_1) == 0)
+ {
+ return false;
+ }
+ if (!(base.eq_s("\u0BCD")))
+ {
+ return false;
+ }
+ base.ket = base.cursor;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ /** @const */ var /** number */ v_1 = base.cursor;
+ r_fix_va_start();
+ base.cursor = v_1;
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_fix_ending() {
+ var /** number */ among_var;
+ if (base.current.length <= 3)
+ {
+ return false;
+ }
+ base.limit_backward = base.cursor; base.cursor = base.limit;
+ lab0: {
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
+ lab1: {
+ base.ket = base.cursor;
+ among_var = base.find_among_b(a_5);
+ if (among_var == 0)
+ {
+ break lab1;
+ }
+ base.bra = base.cursor;
+ switch (among_var) {
+ case 1:
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ case 2:
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
+ if (base.find_among_b(a_2) == 0)
+ {
+ break lab1;
+ }
+ base.cursor = base.limit - v_2;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ case 3:
+ if (!base.slice_from("\u0BB3\u0BCD"))
+ {
+ return false;
+ }
+ break;
+ case 4:
+ if (!base.slice_from("\u0BB2\u0BCD"))
+ {
+ return false;
+ }
+ break;
+ case 5:
+ if (!base.slice_from("\u0B9F\u0BC1"))
+ {
+ return false;
+ }
+ break;
+ case 6:
+ if (!B_found_vetrumai_urupu)
+ {
+ break lab1;
+ }
+ {
+ /** @const */ var /** number */ v_3 = base.limit - base.cursor;
+ lab2: {
+ if (!(base.eq_s_b("\u0BC8")))
+ {
+ break lab2;
+ }
+ break lab1;
+ }
+ base.cursor = base.limit - v_3;
+ }
+ if (!base.slice_from("\u0BAE\u0BCD"))
+ {
+ return false;
+ }
+ break;
+ case 7:
+ if (!base.slice_from("\u0BCD"))
+ {
+ return false;
+ }
+ break;
+ case 8:
+ {
+ /** @const */ var /** number */ v_4 = base.limit - base.cursor;
+ lab3: {
+ if (base.find_among_b(a_3) == 0)
+ {
+ break lab3;
+ }
+ break lab1;
+ }
+ base.cursor = base.limit - v_4;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ case 9:
+ among_var = base.find_among_b(a_4);
+ switch (among_var) {
+ case 1:
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ case 2:
+ if (!base.slice_from("\u0BAE\u0BCD"))
+ {
+ return false;
+ }
+ break;
+ }
+ break;
+ }
+ break lab0;
+ }
+ base.cursor = base.limit - v_1;
+ base.ket = base.cursor;
+ if (!(base.eq_s_b("\u0BCD")))
+ {
+ return false;
+ }
+ lab4: {
+ /** @const */ var /** number */ v_5 = base.limit - base.cursor;
+ lab5: {
+ if (base.find_among_b(a_6) == 0)
+ {
+ break lab5;
+ }
+ /** @const */ var /** number */ v_6 = base.limit - base.cursor;
+ lab6: {
+ if (!(base.eq_s_b("\u0BCD")))
+ {
+ base.cursor = base.limit - v_6;
+ break lab6;
+ }
+ if (base.find_among_b(a_7) == 0)
+ {
+ base.cursor = base.limit - v_6;
+ break lab6;
+ }
+ }
+ base.bra = base.cursor;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break lab4;
+ }
+ base.cursor = base.limit - v_5;
+ lab7: {
+ if (base.find_among_b(a_8) == 0)
+ {
+ break lab7;
+ }
+ base.bra = base.cursor;
+ if (!(base.eq_s_b("\u0BCD")))
+ {
+ break lab7;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break lab4;
+ }
+ base.cursor = base.limit - v_5;
+ /** @const */ var /** number */ v_7 = base.limit - base.cursor;
+ if (base.find_among_b(a_9) == 0)
+ {
+ return false;
+ }
+ base.cursor = base.limit - v_7;
+ base.bra = base.cursor;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ }
+ }
+ base.cursor = base.limit_backward;
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_remove_pronoun_prefixes() {
+ base.bra = base.cursor;
+ if (base.find_among(a_10) == 0)
+ {
+ return false;
+ }
+ if (base.find_among(a_11) == 0)
+ {
+ return false;
+ }
+ if (!(base.eq_s("\u0BCD")))
+ {
+ return false;
+ }
+ base.ket = base.cursor;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ /** @const */ var /** number */ v_1 = base.cursor;
+ r_fix_va_start();
+ base.cursor = v_1;
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_remove_plural_suffix() {
+ var /** number */ among_var;
+ base.limit_backward = base.cursor; base.cursor = base.limit;
+ base.ket = base.cursor;
+ among_var = base.find_among_b(a_13);
+ if (among_var == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ switch (among_var) {
+ case 1:
+ lab0: {
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
+ lab1: {
+ if (base.find_among_b(a_12) == 0)
+ {
+ break lab1;
+ }
+ if (!base.slice_from("\u0BC1\u0B99\u0BCD"))
+ {
+ return false;
+ }
+ break lab0;
+ }
+ base.cursor = base.limit - v_1;
+ if (!base.slice_from("\u0BCD"))
+ {
+ return false;
+ }
+ }
+ break;
+ case 2:
+ if (!base.slice_from("\u0BB2\u0BCD"))
+ {
+ return false;
+ }
+ break;
+ case 3:
+ if (!base.slice_from("\u0BB3\u0BCD"))
+ {
+ return false;
+ }
+ break;
+ case 4:
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ }
+ base.cursor = base.limit_backward;
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_remove_question_suffixes() {
+ if (!r_has_min_length())
+ {
+ return false;
+ }
+ base.limit_backward = base.cursor; base.cursor = base.limit;
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
+ lab0: {
+ base.ket = base.cursor;
+ if (base.find_among_b(a_14) == 0)
+ {
+ break lab0;
+ }
+ base.bra = base.cursor;
+ if (!base.slice_from("\u0BCD"))
+ {
+ return false;
+ }
+ }
+ base.cursor = base.limit - v_1;
+ base.cursor = base.limit_backward;
+ r_fix_endings();
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_remove_command_suffixes() {
+ if (!r_has_min_length())
+ {
+ return false;
+ }
+ base.limit_backward = base.cursor; base.cursor = base.limit;
+ base.ket = base.cursor;
+ if (base.find_among_b(a_15) == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ base.cursor = base.limit_backward;
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_remove_um() {
+ if (!r_has_min_length())
+ {
+ return false;
+ }
+ base.limit_backward = base.cursor; base.cursor = base.limit;
+ base.ket = base.cursor;
+ if (!(base.eq_s_b("\u0BC1\u0BAE\u0BCD")))
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ if (!base.slice_from("\u0BCD"))
+ {
+ return false;
+ }
+ base.cursor = base.limit_backward;
+ /** @const */ var /** number */ v_1 = base.cursor;
+ r_fix_ending();
+ base.cursor = v_1;
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_remove_common_word_endings() {
+ var /** number */ among_var;
+ if (!r_has_min_length())
+ {
+ return false;
+ }
+ base.limit_backward = base.cursor; base.cursor = base.limit;
+ base.ket = base.cursor;
+ among_var = base.find_among_b(a_17);
+ if (among_var == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ switch (among_var) {
+ case 1:
+ if (!base.slice_from("\u0BCD"))
+ {
+ return false;
+ }
+ break;
+ case 2:
+ {
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
+ lab0: {
+ if (base.find_among_b(a_16) == 0)
+ {
+ break lab0;
+ }
+ return false;
+ }
+ base.cursor = base.limit - v_1;
+ }
+ if (!base.slice_from("\u0BCD"))
+ {
+ return false;
+ }
+ break;
+ case 3:
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ }
+ base.cursor = base.limit_backward;
+ r_fix_endings();
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_remove_vetrumai_urupukal() {
+ var /** number */ among_var;
+ B_found_vetrumai_urupu = false;
+ if (!r_has_min_length())
+ {
+ return false;
+ }
+ base.limit_backward = base.cursor; base.cursor = base.limit;
+ lab0: {
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
+ lab1: {
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
+ base.ket = base.cursor;
+ among_var = base.find_among_b(a_20);
+ if (among_var == 0)
+ {
+ break lab1;
+ }
+ base.bra = base.cursor;
+ switch (among_var) {
+ case 1:
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ case 2:
+ if (!base.slice_from("\u0BCD"))
+ {
+ return false;
+ }
+ break;
+ case 3:
+ {
+ /** @const */ var /** number */ v_3 = base.limit - base.cursor;
+ lab2: {
+ if (!(base.eq_s_b("\u0BAE")))
+ {
+ break lab2;
+ }
+ break lab1;
+ }
+ base.cursor = base.limit - v_3;
+ }
+ if (!base.slice_from("\u0BCD"))
+ {
+ return false;
+ }
+ break;
+ case 4:
+ if (base.current.length < 7)
+ {
+ break lab1;
+ }
+ if (!base.slice_from("\u0BCD"))
+ {
+ return false;
+ }
+ break;
+ case 5:
+ {
+ /** @const */ var /** number */ v_4 = base.limit - base.cursor;
+ lab3: {
+ if (base.find_among_b(a_18) == 0)
+ {
+ break lab3;
+ }
+ break lab1;
+ }
+ base.cursor = base.limit - v_4;
+ }
+ if (!base.slice_from("\u0BCD"))
+ {
+ return false;
+ }
+ break;
+ case 6:
+ {
+ /** @const */ var /** number */ v_5 = base.limit - base.cursor;
+ lab4: {
+ if (base.find_among_b(a_19) == 0)
+ {
+ break lab4;
+ }
+ break lab1;
+ }
+ base.cursor = base.limit - v_5;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ case 7:
+ if (!base.slice_from("\u0BBF"))
+ {
+ return false;
+ }
+ break;
+ }
+ base.cursor = base.limit - v_2;
+ break lab0;
+ }
+ base.cursor = base.limit - v_1;
+ /** @const */ var /** number */ v_6 = base.limit - base.cursor;
+ base.ket = base.cursor;
+ if (!(base.eq_s_b("\u0BC8")))
+ {
+ return false;
+ }
+ lab5: {
+ /** @const */ var /** number */ v_7 = base.limit - base.cursor;
+ lab6: {
+ {
+ /** @const */ var /** number */ v_8 = base.limit - base.cursor;
+ lab7: {
+ if (base.find_among_b(a_21) == 0)
+ {
+ break lab7;
+ }
+ break lab6;
+ }
+ base.cursor = base.limit - v_8;
+ }
+ break lab5;
+ }
+ base.cursor = base.limit - v_7;
+ /** @const */ var /** number */ v_9 = base.limit - base.cursor;
+ if (base.find_among_b(a_22) == 0)
+ {
+ return false;
+ }
+ if (!(base.eq_s_b("\u0BCD")))
+ {
+ return false;
+ }
+ base.cursor = base.limit - v_9;
+ }
+ base.bra = base.cursor;
+ if (!base.slice_from("\u0BCD"))
+ {
+ return false;
+ }
+ base.cursor = base.limit - v_6;
+ }
+ B_found_vetrumai_urupu = true;
+ /** @const */ var /** number */ v_10 = base.limit - base.cursor;
+ lab8: {
+ base.ket = base.cursor;
+ if (!(base.eq_s_b("\u0BBF\u0BA9\u0BCD")))
+ {
+ break lab8;
+ }
+ base.bra = base.cursor;
+ if (!base.slice_from("\u0BCD"))
+ {
+ return false;
+ }
+ }
+ base.cursor = base.limit - v_10;
+ base.cursor = base.limit_backward;
+ r_fix_endings();
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_remove_tense_suffixes() {
+ while(true)
+ {
+ /** @const */ var /** number */ v_1 = base.cursor;
+ lab0: {
+ if (!r_remove_tense_suffix())
+ {
+ break lab0;
+ }
+ continue;
+ }
+ base.cursor = v_1;
+ break;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_remove_tense_suffix() {
+ var /** number */ among_var;
+ B_found_a_match = false;
+ if (!r_has_min_length())
+ {
+ return false;
+ }
+ base.limit_backward = base.cursor; base.cursor = base.limit;
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
+ lab0: {
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
+ base.ket = base.cursor;
+ among_var = base.find_among_b(a_25);
+ if (among_var == 0)
+ {
+ break lab0;
+ }
+ base.bra = base.cursor;
+ switch (among_var) {
+ case 1:
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ case 2:
+ {
+ /** @const */ var /** number */ v_3 = base.limit - base.cursor;
+ lab1: {
+ if (base.find_among_b(a_23) == 0)
+ {
+ break lab1;
+ }
+ break lab0;
+ }
+ base.cursor = base.limit - v_3;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ case 3:
+ {
+ /** @const */ var /** number */ v_4 = base.limit - base.cursor;
+ lab2: {
+ if (base.find_among_b(a_24) == 0)
+ {
+ break lab2;
+ }
+ break lab0;
+ }
+ base.cursor = base.limit - v_4;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ case 4:
+ {
+ /** @const */ var /** number */ v_5 = base.limit - base.cursor;
+ lab3: {
+ if (!(base.eq_s_b("\u0B9A")))
+ {
+ break lab3;
+ }
+ break lab0;
+ }
+ base.cursor = base.limit - v_5;
+ }
+ if (!base.slice_from("\u0BCD"))
+ {
+ return false;
+ }
+ break;
+ case 5:
+ if (!base.slice_from("\u0BCD"))
+ {
+ return false;
+ }
+ break;
+ case 6:
+ /** @const */ var /** number */ v_6 = base.limit - base.cursor;
+ if (!(base.eq_s_b("\u0BCD")))
+ {
+ break lab0;
+ }
+ base.cursor = base.limit - v_6;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ }
+ B_found_a_match = true;
+ base.cursor = base.limit - v_2;
+ }
+ base.cursor = base.limit - v_1;
+ /** @const */ var /** number */ v_7 = base.limit - base.cursor;
+ lab4: {
+ base.ket = base.cursor;
+ if (base.find_among_b(a_26) == 0)
+ {
+ break lab4;
+ }
+ base.bra = base.cursor;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ B_found_a_match = true;
+ }
+ base.cursor = base.limit - v_7;
+ base.cursor = base.limit_backward;
+ r_fix_endings();
+ if (!B_found_a_match)
+ {
+ return false;
+ }
+ return true;
+ };
+
+ this.stem = /** @return {boolean} */ function() {
+ B_found_vetrumai_urupu = false;
+ /** @const */ var /** number */ v_1 = base.cursor;
+ r_fix_ending();
+ base.cursor = v_1;
+ if (!r_has_min_length())
+ {
+ return false;
+ }
+ /** @const */ var /** number */ v_2 = base.cursor;
+ r_remove_question_prefixes();
+ base.cursor = v_2;
+ /** @const */ var /** number */ v_3 = base.cursor;
+ r_remove_pronoun_prefixes();
+ base.cursor = v_3;
+ /** @const */ var /** number */ v_4 = base.cursor;
+ r_remove_question_suffixes();
+ base.cursor = v_4;
+ /** @const */ var /** number */ v_5 = base.cursor;
+ r_remove_um();
+ base.cursor = v_5;
+ /** @const */ var /** number */ v_6 = base.cursor;
+ r_remove_common_word_endings();
+ base.cursor = v_6;
+ /** @const */ var /** number */ v_7 = base.cursor;
+ r_remove_vetrumai_urupukal();
+ base.cursor = v_7;
+ /** @const */ var /** number */ v_8 = base.cursor;
+ r_remove_plural_suffix();
+ base.cursor = v_8;
+ /** @const */ var /** number */ v_9 = base.cursor;
+ r_remove_command_suffixes();
+ base.cursor = v_9;
+ /** @const */ var /** number */ v_10 = base.cursor;
+ r_remove_tense_suffixes();
+ base.cursor = v_10;
+ return true;
+ };
+
+ /**@return{string}*/
+ this['stemWord'] = function(/**string*/word) {
+ base.setCurrent(word);
+ this.stem();
+ return base.getCurrent();
+ };
+};
diff --git a/sphinx/search/non-minified-js/turkish-stemmer.js b/sphinx/search/non-minified-js/turkish-stemmer.js
index 8ba74b9218e..c57ba798066 100644
--- a/sphinx/search/non-minified-js/turkish-stemmer.js
+++ b/sphinx/search/non-minified-js/turkish-stemmer.js
@@ -1,8 +1,9 @@
-// Generated by Snowball 2.1.0 - https://snowballstem.org/
+// Generated from turkish.sbl by Snowball 3.0.1 - https://snowballstem.org/
/**@constructor*/
-TurkishStemmer = function() {
+var TurkishStemmer = function() {
var base = new BaseStemmer();
+
/** @const */ var a_0 = [
["m", -1, -1],
["n", -1, -1],
@@ -216,230 +217,104 @@ TurkishStemmer = function() {
/** @return {boolean} */
function r_check_vowel_harmony() {
- var /** number */ v_1 = base.limit - base.cursor;
- golab0: while(true)
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
+ if (!base.go_out_grouping_b(g_vowel, 97, 305))
{
- var /** number */ v_2 = base.limit - base.cursor;
- lab1: {
- if (!(base.in_grouping_b(g_vowel, 97, 305)))
- {
- break lab1;
- }
- base.cursor = base.limit - v_2;
- break golab0;
- }
- base.cursor = base.limit - v_2;
- if (base.cursor <= base.limit_backward)
- {
- return false;
- }
- base.cursor--;
+ return false;
}
- lab2: {
- var /** number */ v_3 = base.limit - base.cursor;
- lab3: {
+ lab0: {
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
+ lab1: {
if (!(base.eq_s_b("a")))
{
- break lab3;
+ break lab1;
}
- golab4: while(true)
+ if (!base.go_out_grouping_b(g_vowel1, 97, 305))
{
- var /** number */ v_4 = base.limit - base.cursor;
- lab5: {
- if (!(base.in_grouping_b(g_vowel1, 97, 305)))
- {
- break lab5;
- }
- base.cursor = base.limit - v_4;
- break golab4;
- }
- base.cursor = base.limit - v_4;
- if (base.cursor <= base.limit_backward)
- {
- break lab3;
- }
- base.cursor--;
+ break lab1;
}
- break lab2;
+ break lab0;
}
- base.cursor = base.limit - v_3;
- lab6: {
+ base.cursor = base.limit - v_2;
+ lab2: {
if (!(base.eq_s_b("e")))
{
- break lab6;
+ break lab2;
}
- golab7: while(true)
+ if (!base.go_out_grouping_b(g_vowel2, 101, 252))
{
- var /** number */ v_5 = base.limit - base.cursor;
- lab8: {
- if (!(base.in_grouping_b(g_vowel2, 101, 252)))
- {
- break lab8;
- }
- base.cursor = base.limit - v_5;
- break golab7;
- }
- base.cursor = base.limit - v_5;
- if (base.cursor <= base.limit_backward)
- {
- break lab6;
- }
- base.cursor--;
+ break lab2;
}
- break lab2;
+ break lab0;
}
- base.cursor = base.limit - v_3;
- lab9: {
+ base.cursor = base.limit - v_2;
+ lab3: {
if (!(base.eq_s_b("\u0131")))
{
- break lab9;
+ break lab3;
}
- golab10: while(true)
+ if (!base.go_out_grouping_b(g_vowel3, 97, 305))
{
- var /** number */ v_6 = base.limit - base.cursor;
- lab11: {
- if (!(base.in_grouping_b(g_vowel3, 97, 305)))
- {
- break lab11;
- }
- base.cursor = base.limit - v_6;
- break golab10;
- }
- base.cursor = base.limit - v_6;
- if (base.cursor <= base.limit_backward)
- {
- break lab9;
- }
- base.cursor--;
+ break lab3;
}
- break lab2;
+ break lab0;
}
- base.cursor = base.limit - v_3;
- lab12: {
+ base.cursor = base.limit - v_2;
+ lab4: {
if (!(base.eq_s_b("i")))
{
- break lab12;
+ break lab4;
}
- golab13: while(true)
+ if (!base.go_out_grouping_b(g_vowel4, 101, 105))
{
- var /** number */ v_7 = base.limit - base.cursor;
- lab14: {
- if (!(base.in_grouping_b(g_vowel4, 101, 105)))
- {
- break lab14;
- }
- base.cursor = base.limit - v_7;
- break golab13;
- }
- base.cursor = base.limit - v_7;
- if (base.cursor <= base.limit_backward)
- {
- break lab12;
- }
- base.cursor--;
+ break lab4;
}
- break lab2;
+ break lab0;
}
- base.cursor = base.limit - v_3;
- lab15: {
+ base.cursor = base.limit - v_2;
+ lab5: {
if (!(base.eq_s_b("o")))
{
- break lab15;
+ break lab5;
}
- golab16: while(true)
+ if (!base.go_out_grouping_b(g_vowel5, 111, 117))
{
- var /** number */ v_8 = base.limit - base.cursor;
- lab17: {
- if (!(base.in_grouping_b(g_vowel5, 111, 117)))
- {
- break lab17;
- }
- base.cursor = base.limit - v_8;
- break golab16;
- }
- base.cursor = base.limit - v_8;
- if (base.cursor <= base.limit_backward)
- {
- break lab15;
- }
- base.cursor--;
+ break lab5;
}
- break lab2;
+ break lab0;
}
- base.cursor = base.limit - v_3;
- lab18: {
+ base.cursor = base.limit - v_2;
+ lab6: {
if (!(base.eq_s_b("\u00F6")))
{
- break lab18;
+ break lab6;
}
- golab19: while(true)
+ if (!base.go_out_grouping_b(g_vowel6, 246, 252))
{
- var /** number */ v_9 = base.limit - base.cursor;
- lab20: {
- if (!(base.in_grouping_b(g_vowel6, 246, 252)))
- {
- break lab20;
- }
- base.cursor = base.limit - v_9;
- break golab19;
- }
- base.cursor = base.limit - v_9;
- if (base.cursor <= base.limit_backward)
- {
- break lab18;
- }
- base.cursor--;
+ break lab6;
}
- break lab2;
+ break lab0;
}
- base.cursor = base.limit - v_3;
- lab21: {
+ base.cursor = base.limit - v_2;
+ lab7: {
if (!(base.eq_s_b("u")))
{
- break lab21;
+ break lab7;
}
- golab22: while(true)
+ if (!base.go_out_grouping_b(g_vowel5, 111, 117))
{
- var /** number */ v_10 = base.limit - base.cursor;
- lab23: {
- if (!(base.in_grouping_b(g_vowel5, 111, 117)))
- {
- break lab23;
- }
- base.cursor = base.limit - v_10;
- break golab22;
- }
- base.cursor = base.limit - v_10;
- if (base.cursor <= base.limit_backward)
- {
- break lab21;
- }
- base.cursor--;
+ break lab7;
}
- break lab2;
+ break lab0;
}
- base.cursor = base.limit - v_3;
+ base.cursor = base.limit - v_2;
if (!(base.eq_s_b("\u00FC")))
{
return false;
}
- golab24: while(true)
+ if (!base.go_out_grouping_b(g_vowel6, 246, 252))
{
- var /** number */ v_11 = base.limit - base.cursor;
- lab25: {
- if (!(base.in_grouping_b(g_vowel6, 246, 252)))
- {
- break lab25;
- }
- base.cursor = base.limit - v_11;
- break golab24;
- }
- base.cursor = base.limit - v_11;
- if (base.cursor <= base.limit_backward)
- {
- return false;
- }
- base.cursor--;
+ return false;
}
}
base.cursor = base.limit - v_1;
@@ -449,13 +324,13 @@ TurkishStemmer = function() {
/** @return {boolean} */
function r_mark_suffix_with_optional_n_consonant() {
lab0: {
- var /** number */ v_1 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
lab1: {
if (!(base.eq_s_b("n")))
{
break lab1;
}
- var /** number */ v_2 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
if (!(base.in_grouping_b(g_vowel, 97, 305)))
{
break lab1;
@@ -465,9 +340,9 @@ TurkishStemmer = function() {
}
base.cursor = base.limit - v_1;
{
- var /** number */ v_3 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_3 = base.limit - base.cursor;
lab2: {
- var /** number */ v_4 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_4 = base.limit - base.cursor;
if (!(base.eq_s_b("n")))
{
break lab2;
@@ -477,7 +352,7 @@ TurkishStemmer = function() {
}
base.cursor = base.limit - v_3;
}
- var /** number */ v_5 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_5 = base.limit - base.cursor;
if (base.cursor <= base.limit_backward)
{
return false;
@@ -495,13 +370,13 @@ TurkishStemmer = function() {
/** @return {boolean} */
function r_mark_suffix_with_optional_s_consonant() {
lab0: {
- var /** number */ v_1 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
lab1: {
if (!(base.eq_s_b("s")))
{
break lab1;
}
- var /** number */ v_2 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
if (!(base.in_grouping_b(g_vowel, 97, 305)))
{
break lab1;
@@ -511,9 +386,9 @@ TurkishStemmer = function() {
}
base.cursor = base.limit - v_1;
{
- var /** number */ v_3 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_3 = base.limit - base.cursor;
lab2: {
- var /** number */ v_4 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_4 = base.limit - base.cursor;
if (!(base.eq_s_b("s")))
{
break lab2;
@@ -523,7 +398,7 @@ TurkishStemmer = function() {
}
base.cursor = base.limit - v_3;
}
- var /** number */ v_5 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_5 = base.limit - base.cursor;
if (base.cursor <= base.limit_backward)
{
return false;
@@ -541,13 +416,13 @@ TurkishStemmer = function() {
/** @return {boolean} */
function r_mark_suffix_with_optional_y_consonant() {
lab0: {
- var /** number */ v_1 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
lab1: {
if (!(base.eq_s_b("y")))
{
break lab1;
}
- var /** number */ v_2 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
if (!(base.in_grouping_b(g_vowel, 97, 305)))
{
break lab1;
@@ -557,9 +432,9 @@ TurkishStemmer = function() {
}
base.cursor = base.limit - v_1;
{
- var /** number */ v_3 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_3 = base.limit - base.cursor;
lab2: {
- var /** number */ v_4 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_4 = base.limit - base.cursor;
if (!(base.eq_s_b("y")))
{
break lab2;
@@ -569,7 +444,7 @@ TurkishStemmer = function() {
}
base.cursor = base.limit - v_3;
}
- var /** number */ v_5 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_5 = base.limit - base.cursor;
if (base.cursor <= base.limit_backward)
{
return false;
@@ -587,13 +462,13 @@ TurkishStemmer = function() {
/** @return {boolean} */
function r_mark_suffix_with_optional_U_vowel() {
lab0: {
- var /** number */ v_1 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
lab1: {
if (!(base.in_grouping_b(g_U, 105, 305)))
{
break lab1;
}
- var /** number */ v_2 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
if (!(base.out_grouping_b(g_vowel, 97, 305)))
{
break lab1;
@@ -603,9 +478,9 @@ TurkishStemmer = function() {
}
base.cursor = base.limit - v_1;
{
- var /** number */ v_3 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_3 = base.limit - base.cursor;
lab2: {
- var /** number */ v_4 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_4 = base.limit - base.cursor;
if (!(base.in_grouping_b(g_U, 105, 305)))
{
break lab2;
@@ -615,7 +490,7 @@ TurkishStemmer = function() {
}
base.cursor = base.limit - v_3;
}
- var /** number */ v_5 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_5 = base.limit - base.cursor;
if (base.cursor <= base.limit_backward)
{
return false;
@@ -1010,10 +885,10 @@ TurkishStemmer = function() {
base.ket = base.cursor;
B_continue_stemming_noun_suffixes = true;
lab0: {
- var /** number */ v_1 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
lab1: {
lab2: {
- var /** number */ v_2 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
lab3: {
if (!r_mark_ymUs_())
{
@@ -1052,7 +927,7 @@ TurkishStemmer = function() {
break lab6;
}
lab7: {
- var /** number */ v_3 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_3 = base.limit - base.cursor;
lab8: {
if (!r_mark_sUnUz())
{
@@ -1111,11 +986,11 @@ TurkishStemmer = function() {
{
return false;
}
- var /** number */ v_4 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_4 = base.limit - base.cursor;
lab14: {
base.ket = base.cursor;
lab15: {
- var /** number */ v_5 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_5 = base.limit - base.cursor;
lab16: {
if (!r_mark_DUr())
{
@@ -1157,7 +1032,7 @@ TurkishStemmer = function() {
break lab19;
}
lab20: {
- var /** number */ v_6 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_6 = base.limit - base.cursor;
lab21: {
if (!r_mark_yDU())
{
@@ -1176,7 +1051,7 @@ TurkishStemmer = function() {
base.cursor = base.limit - v_1;
lab22: {
lab23: {
- var /** number */ v_7 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_7 = base.limit - base.cursor;
lab24: {
if (!r_mark_sUnUz())
{
@@ -1211,7 +1086,7 @@ TurkishStemmer = function() {
{
return false;
}
- var /** number */ v_8 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_8 = base.limit - base.cursor;
lab27: {
base.ket = base.cursor;
if (!r_mark_ymUs_())
@@ -1232,11 +1107,11 @@ TurkishStemmer = function() {
{
return false;
}
- var /** number */ v_9 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_9 = base.limit - base.cursor;
lab28: {
base.ket = base.cursor;
lab29: {
- var /** number */ v_10 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_10 = base.limit - base.cursor;
lab30: {
if (!r_mark_sUnUz())
{
@@ -1301,7 +1176,7 @@ TurkishStemmer = function() {
return false;
}
lab0: {
- var /** number */ v_1 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
lab1: {
if (!r_mark_DA())
{
@@ -1312,11 +1187,11 @@ TurkishStemmer = function() {
{
return false;
}
- var /** number */ v_2 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
lab2: {
base.ket = base.cursor;
lab3: {
- var /** number */ v_3 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_3 = base.limit - base.cursor;
lab4: {
if (!r_mark_lAr())
{
@@ -1327,7 +1202,7 @@ TurkishStemmer = function() {
{
return false;
}
- var /** number */ v_4 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_4 = base.limit - base.cursor;
lab5: {
if (!r_stem_suffix_chain_before_ki())
{
@@ -1348,7 +1223,7 @@ TurkishStemmer = function() {
{
return false;
}
- var /** number */ v_5 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_5 = base.limit - base.cursor;
lab6: {
base.ket = base.cursor;
if (!r_mark_lAr())
@@ -1382,11 +1257,11 @@ TurkishStemmer = function() {
{
return false;
}
- var /** number */ v_6 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_6 = base.limit - base.cursor;
lab8: {
base.ket = base.cursor;
lab9: {
- var /** number */ v_7 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_7 = base.limit - base.cursor;
lab10: {
if (!r_mark_lArI())
{
@@ -1403,7 +1278,7 @@ TurkishStemmer = function() {
lab11: {
base.ket = base.cursor;
lab12: {
- var /** number */ v_8 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_8 = base.limit - base.cursor;
lab13: {
if (!r_mark_possessives())
{
@@ -1422,7 +1297,7 @@ TurkishStemmer = function() {
{
return false;
}
- var /** number */ v_9 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_9 = base.limit - base.cursor;
lab14: {
base.ket = base.cursor;
if (!r_mark_lAr())
@@ -1459,7 +1334,7 @@ TurkishStemmer = function() {
return false;
}
lab15: {
- var /** number */ v_10 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_10 = base.limit - base.cursor;
lab16: {
if (!r_mark_lArI())
{
@@ -1483,7 +1358,7 @@ TurkishStemmer = function() {
{
return false;
}
- var /** number */ v_11 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_11 = base.limit - base.cursor;
lab18: {
base.ket = base.cursor;
if (!r_mark_lAr())
@@ -1517,7 +1392,7 @@ TurkishStemmer = function() {
/** @return {boolean} */
function r_stem_noun_suffixes() {
lab0: {
- var /** number */ v_1 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
lab1: {
base.ket = base.cursor;
if (!r_mark_lAr())
@@ -1529,7 +1404,7 @@ TurkishStemmer = function() {
{
return false;
}
- var /** number */ v_2 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
lab2: {
if (!r_stem_suffix_chain_before_ki())
{
@@ -1551,10 +1426,10 @@ TurkishStemmer = function() {
{
return false;
}
- var /** number */ v_3 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_3 = base.limit - base.cursor;
lab4: {
lab5: {
- var /** number */ v_4 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_4 = base.limit - base.cursor;
lab6: {
base.ket = base.cursor;
if (!r_mark_lArI())
@@ -1572,7 +1447,7 @@ TurkishStemmer = function() {
lab7: {
base.ket = base.cursor;
lab8: {
- var /** number */ v_5 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_5 = base.limit - base.cursor;
lab9: {
if (!r_mark_possessives())
{
@@ -1591,7 +1466,7 @@ TurkishStemmer = function() {
{
return false;
}
- var /** number */ v_6 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_6 = base.limit - base.cursor;
lab10: {
base.ket = base.cursor;
if (!r_mark_lAr())
@@ -1637,7 +1512,7 @@ TurkishStemmer = function() {
lab11: {
base.ket = base.cursor;
lab12: {
- var /** number */ v_7 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_7 = base.limit - base.cursor;
lab13: {
if (!r_mark_ndA())
{
@@ -1652,7 +1527,7 @@ TurkishStemmer = function() {
}
}
lab14: {
- var /** number */ v_8 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_8 = base.limit - base.cursor;
lab15: {
if (!r_mark_lArI())
{
@@ -1676,7 +1551,7 @@ TurkishStemmer = function() {
{
return false;
}
- var /** number */ v_9 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_9 = base.limit - base.cursor;
lab17: {
base.ket = base.cursor;
if (!r_mark_lAr())
@@ -1709,7 +1584,7 @@ TurkishStemmer = function() {
lab18: {
base.ket = base.cursor;
lab19: {
- var /** number */ v_10 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_10 = base.limit - base.cursor;
lab20: {
if (!r_mark_ndAn())
{
@@ -1724,7 +1599,7 @@ TurkishStemmer = function() {
}
}
lab21: {
- var /** number */ v_11 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_11 = base.limit - base.cursor;
lab22: {
if (!r_mark_sU())
{
@@ -1735,7 +1610,7 @@ TurkishStemmer = function() {
{
return false;
}
- var /** number */ v_12 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_12 = base.limit - base.cursor;
lab23: {
base.ket = base.cursor;
if (!r_mark_lAr())
@@ -1776,11 +1651,11 @@ TurkishStemmer = function() {
{
return false;
}
- var /** number */ v_13 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_13 = base.limit - base.cursor;
lab25: {
base.ket = base.cursor;
lab26: {
- var /** number */ v_14 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_14 = base.limit - base.cursor;
lab27: {
if (!r_mark_possessives())
{
@@ -1791,7 +1666,7 @@ TurkishStemmer = function() {
{
return false;
}
- var /** number */ v_15 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_15 = base.limit - base.cursor;
lab28: {
base.ket = base.cursor;
if (!r_mark_lAr())
@@ -1823,7 +1698,7 @@ TurkishStemmer = function() {
{
return false;
}
- var /** number */ v_16 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_16 = base.limit - base.cursor;
lab30: {
if (!r_stem_suffix_chain_before_ki())
{
@@ -1847,7 +1722,7 @@ TurkishStemmer = function() {
lab31: {
base.ket = base.cursor;
lab32: {
- var /** number */ v_17 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_17 = base.limit - base.cursor;
lab33: {
if (!r_mark_nUn())
{
@@ -1866,10 +1741,10 @@ TurkishStemmer = function() {
{
return false;
}
- var /** number */ v_18 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_18 = base.limit - base.cursor;
lab34: {
lab35: {
- var /** number */ v_19 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_19 = base.limit - base.cursor;
lab36: {
base.ket = base.cursor;
if (!r_mark_lAr())
@@ -1891,7 +1766,7 @@ TurkishStemmer = function() {
lab37: {
base.ket = base.cursor;
lab38: {
- var /** number */ v_20 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_20 = base.limit - base.cursor;
lab39: {
if (!r_mark_possessives())
{
@@ -1910,7 +1785,7 @@ TurkishStemmer = function() {
{
return false;
}
- var /** number */ v_21 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_21 = base.limit - base.cursor;
lab40: {
base.ket = base.cursor;
if (!r_mark_lAr())
@@ -1967,7 +1842,7 @@ TurkishStemmer = function() {
lab43: {
base.ket = base.cursor;
lab44: {
- var /** number */ v_22 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_22 = base.limit - base.cursor;
lab45: {
if (!r_mark_DA())
{
@@ -1994,11 +1869,11 @@ TurkishStemmer = function() {
{
return false;
}
- var /** number */ v_23 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_23 = base.limit - base.cursor;
lab47: {
base.ket = base.cursor;
lab48: {
- var /** number */ v_24 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_24 = base.limit - base.cursor;
lab49: {
if (!r_mark_possessives())
{
@@ -2009,7 +1884,7 @@ TurkishStemmer = function() {
{
return false;
}
- var /** number */ v_25 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_25 = base.limit - base.cursor;
lab50: {
base.ket = base.cursor;
if (!r_mark_lAr())
@@ -2044,7 +1919,7 @@ TurkishStemmer = function() {
base.cursor = base.limit - v_1;
base.ket = base.cursor;
lab51: {
- var /** number */ v_26 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_26 = base.limit - base.cursor;
lab52: {
if (!r_mark_possessives())
{
@@ -2063,7 +1938,7 @@ TurkishStemmer = function() {
{
return false;
}
- var /** number */ v_27 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_27 = base.limit - base.cursor;
lab53: {
base.ket = base.cursor;
if (!r_mark_lAr())
@@ -2127,9 +2002,10 @@ TurkishStemmer = function() {
/** @return {boolean} */
function r_append_U_to_stems_ending_with_d_or_g() {
- var /** number */ v_1 = base.limit - base.cursor;
+ base.ket = base.cursor;
+ base.bra = base.cursor;
lab0: {
- var /** number */ v_2 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
lab1: {
if (!(base.eq_s_b("d")))
{
@@ -2137,186 +2013,105 @@ TurkishStemmer = function() {
}
break lab0;
}
- base.cursor = base.limit - v_2;
+ base.cursor = base.limit - v_1;
if (!(base.eq_s_b("g")))
{
return false;
}
}
- base.cursor = base.limit - v_1;
+ if (!base.go_out_grouping_b(g_vowel, 97, 305))
+ {
+ return false;
+ }
lab2: {
- var /** number */ v_3 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
lab3: {
- var /** number */ v_4 = base.limit - base.cursor;
- golab4: while(true)
- {
- var /** number */ v_5 = base.limit - base.cursor;
+ lab4: {
+ /** @const */ var /** number */ v_3 = base.limit - base.cursor;
lab5: {
- if (!(base.in_grouping_b(g_vowel, 97, 305)))
- {
- break lab5;
- }
- base.cursor = base.limit - v_5;
- break golab4;
- }
- base.cursor = base.limit - v_5;
- if (base.cursor <= base.limit_backward)
- {
- break lab3;
- }
- base.cursor--;
- }
- lab6: {
- var /** number */ v_6 = base.limit - base.cursor;
- lab7: {
if (!(base.eq_s_b("a")))
{
- break lab7;
+ break lab5;
}
- break lab6;
+ break lab4;
}
- base.cursor = base.limit - v_6;
+ base.cursor = base.limit - v_3;
if (!(base.eq_s_b("\u0131")))
{
break lab3;
}
}
- base.cursor = base.limit - v_4;
+ if (!base.slice_from("\u0131"))
{
- var /** number */ c1 = base.cursor;
- base.insert(base.cursor, base.cursor, "\u0131");
- base.cursor = c1;
+ return false;
}
break lab2;
}
- base.cursor = base.limit - v_3;
- lab8: {
- var /** number */ v_7 = base.limit - base.cursor;
- golab9: while(true)
- {
- var /** number */ v_8 = base.limit - base.cursor;
- lab10: {
- if (!(base.in_grouping_b(g_vowel, 97, 305)))
- {
- break lab10;
- }
- base.cursor = base.limit - v_8;
- break golab9;
- }
- base.cursor = base.limit - v_8;
- if (base.cursor <= base.limit_backward)
- {
- break lab8;
- }
- base.cursor--;
- }
- lab11: {
- var /** number */ v_9 = base.limit - base.cursor;
- lab12: {
+ base.cursor = base.limit - v_2;
+ lab6: {
+ lab7: {
+ /** @const */ var /** number */ v_4 = base.limit - base.cursor;
+ lab8: {
if (!(base.eq_s_b("e")))
{
- break lab12;
+ break lab8;
}
- break lab11;
+ break lab7;
}
- base.cursor = base.limit - v_9;
+ base.cursor = base.limit - v_4;
if (!(base.eq_s_b("i")))
{
- break lab8;
+ break lab6;
}
}
- base.cursor = base.limit - v_7;
+ if (!base.slice_from("i"))
{
- var /** number */ c2 = base.cursor;
- base.insert(base.cursor, base.cursor, "i");
- base.cursor = c2;
+ return false;
}
break lab2;
}
- base.cursor = base.limit - v_3;
- lab13: {
- var /** number */ v_10 = base.limit - base.cursor;
- golab14: while(true)
- {
- var /** number */ v_11 = base.limit - base.cursor;
- lab15: {
- if (!(base.in_grouping_b(g_vowel, 97, 305)))
- {
- break lab15;
- }
- base.cursor = base.limit - v_11;
- break golab14;
- }
- base.cursor = base.limit - v_11;
- if (base.cursor <= base.limit_backward)
- {
- break lab13;
- }
- base.cursor--;
- }
- lab16: {
- var /** number */ v_12 = base.limit - base.cursor;
- lab17: {
+ base.cursor = base.limit - v_2;
+ lab9: {
+ lab10: {
+ /** @const */ var /** number */ v_5 = base.limit - base.cursor;
+ lab11: {
if (!(base.eq_s_b("o")))
{
- break lab17;
+ break lab11;
}
- break lab16;
+ break lab10;
}
- base.cursor = base.limit - v_12;
+ base.cursor = base.limit - v_5;
if (!(base.eq_s_b("u")))
{
- break lab13;
- }
- }
- base.cursor = base.limit - v_10;
- {
- var /** number */ c3 = base.cursor;
- base.insert(base.cursor, base.cursor, "u");
- base.cursor = c3;
- }
- break lab2;
- }
- base.cursor = base.limit - v_3;
- var /** number */ v_13 = base.limit - base.cursor;
- golab18: while(true)
- {
- var /** number */ v_14 = base.limit - base.cursor;
- lab19: {
- if (!(base.in_grouping_b(g_vowel, 97, 305)))
- {
- break lab19;
+ break lab9;
}
- base.cursor = base.limit - v_14;
- break golab18;
}
- base.cursor = base.limit - v_14;
- if (base.cursor <= base.limit_backward)
+ if (!base.slice_from("u"))
{
return false;
}
- base.cursor--;
+ break lab2;
}
- lab20: {
- var /** number */ v_15 = base.limit - base.cursor;
- lab21: {
+ base.cursor = base.limit - v_2;
+ lab12: {
+ /** @const */ var /** number */ v_6 = base.limit - base.cursor;
+ lab13: {
if (!(base.eq_s_b("\u00F6")))
{
- break lab21;
+ break lab13;
}
- break lab20;
+ break lab12;
}
- base.cursor = base.limit - v_15;
+ base.cursor = base.limit - v_6;
if (!(base.eq_s_b("\u00FC")))
{
return false;
}
}
- base.cursor = base.limit - v_13;
+ if (!base.slice_from("\u00FC"))
{
- var /** number */ c4 = base.cursor;
- base.insert(base.cursor, base.cursor, "\u00FC");
- base.cursor = c4;
+ return false;
}
}
return true;
@@ -2328,7 +2123,7 @@ TurkishStemmer = function() {
{
return false;
}
- var /** number */ v_1 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
lab0: {
if (!(base.eq_s_b("soy")))
{
@@ -2344,39 +2139,92 @@ TurkishStemmer = function() {
};
/** @return {boolean} */
- function r_more_than_one_syllable_word() {
- var /** number */ v_1 = base.cursor;
- {
- var v_2 = 2;
- while(true)
+ function r_remove_proper_noun_suffix() {
+ /** @const */ var /** number */ v_1 = base.cursor;
+ lab0: {
+ base.bra = base.cursor;
+ golab1: while(true)
{
- var /** number */ v_3 = base.cursor;
- lab0: {
- golab1: while(true)
+ /** @const */ var /** number */ v_2 = base.cursor;
+ lab2: {
{
- lab2: {
- if (!(base.in_grouping(g_vowel, 97, 305)))
+ /** @const */ var /** number */ v_3 = base.cursor;
+ lab3: {
+ if (!(base.eq_s("'")))
{
- break lab2;
+ break lab3;
}
- break golab1;
- }
- if (base.cursor >= base.limit)
- {
- break lab0;
+ break lab2;
}
- base.cursor++;
+ base.cursor = v_3;
}
- v_2--;
- continue;
+ base.cursor = v_2;
+ break golab1;
}
- base.cursor = v_3;
- break;
+ base.cursor = v_2;
+ if (base.cursor >= base.limit)
+ {
+ break lab0;
+ }
+ base.cursor++;
}
- if (v_2 > 0)
+ base.ket = base.cursor;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ }
+ base.cursor = v_1;
+ /** @const */ var /** number */ v_4 = base.cursor;
+ lab4: {
+ {
+ /** @const */ var /** number */ c1 = base.cursor + 2;
+ if (c1 > base.limit)
+ {
+ break lab4;
+ }
+ base.cursor = c1;
+ }
+ golab5: while(true)
+ {
+ /** @const */ var /** number */ v_5 = base.cursor;
+ lab6: {
+ if (!(base.eq_s("'")))
+ {
+ break lab6;
+ }
+ base.cursor = v_5;
+ break golab5;
+ }
+ base.cursor = v_5;
+ if (base.cursor >= base.limit)
+ {
+ break lab4;
+ }
+ base.cursor++;
+ }
+ base.bra = base.cursor;
+ base.cursor = base.limit;
+ base.ket = base.cursor;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ }
+ base.cursor = v_4;
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_more_than_one_syllable_word() {
+ /** @const */ var /** number */ v_1 = base.cursor;
+ for (var /** number */ v_2 = 2; v_2 > 0; v_2--)
+ {
+ if (!base.go_out_grouping(g_vowel, 97, 305))
{
return false;
}
+ base.cursor++;
}
base.cursor = v_1;
return true;
@@ -2386,7 +2234,7 @@ TurkishStemmer = function() {
function r_postlude() {
base.limit_backward = base.cursor; base.cursor = base.limit;
{
- var /** number */ v_1 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
lab0: {
if (!r_is_reserved_word())
{
@@ -2396,10 +2244,10 @@ TurkishStemmer = function() {
}
base.cursor = base.limit - v_1;
}
- var /** number */ v_2 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
r_append_U_to_stems_ending_with_d_or_g();
base.cursor = base.limit - v_2;
- var /** number */ v_3 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_3 = base.limit - base.cursor;
r_post_process_last_consonants();
base.cursor = base.limit - v_3;
base.cursor = base.limit_backward;
@@ -2407,19 +2255,20 @@ TurkishStemmer = function() {
};
this.stem = /** @return {boolean} */ function() {
+ r_remove_proper_noun_suffix();
if (!r_more_than_one_syllable_word())
{
return false;
}
base.limit_backward = base.cursor; base.cursor = base.limit;
- var /** number */ v_1 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
r_stem_nominal_verb_suffixes();
base.cursor = base.limit - v_1;
if (!B_continue_stemming_noun_suffixes)
{
return false;
}
- var /** number */ v_2 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
r_stem_noun_suffixes();
base.cursor = base.limit - v_2;
base.cursor = base.limit_backward;
diff --git a/sphinx/search/non-minified-js/yiddish-stemmer.js b/sphinx/search/non-minified-js/yiddish-stemmer.js
new file mode 100644
index 00000000000..b9a7ddb411c
--- /dev/null
+++ b/sphinx/search/non-minified-js/yiddish-stemmer.js
@@ -0,0 +1,1160 @@
+// Generated from yiddish.sbl by Snowball 3.0.1 - https://snowballstem.org/
+
+/**@constructor*/
+var YiddishStemmer = function() {
+ var base = new BaseStemmer();
+
+ /** @const */ var a_0 = [
+ ["\u05D5\u05D5", -1, 1],
+ ["\u05D5\u05D9", -1, 2],
+ ["\u05D9\u05D9", -1, 3],
+ ["\u05DA", -1, 4],
+ ["\u05DD", -1, 5],
+ ["\u05DF", -1, 6],
+ ["\u05E3", -1, 7],
+ ["\u05E5", -1, 8]
+ ];
+
+ /** @const */ var a_1 = [
+ ["\u05D0\u05D3\u05D5\u05E8\u05DB", -1, 1],
+ ["\u05D0\u05D4\u05D9\u05E0", -1, 1],
+ ["\u05D0\u05D4\u05E2\u05E8", -1, 1],
+ ["\u05D0\u05D4\u05F2\u05DE", -1, 1],
+ ["\u05D0\u05D5\u05DE", -1, 1],
+ ["\u05D0\u05D5\u05E0\u05D8\u05E2\u05E8", -1, 1],
+ ["\u05D0\u05D9\u05D1\u05E2\u05E8", -1, 1],
+ ["\u05D0\u05E0", -1, 1],
+ ["\u05D0\u05E0\u05D8", 7, 1],
+ ["\u05D0\u05E0\u05D8\u05E7\u05E2\u05D2\u05E0", 8, 1],
+ ["\u05D0\u05E0\u05D9\u05D3\u05E2\u05E8", 7, 1],
+ ["\u05D0\u05E4", -1, 1],
+ ["\u05D0\u05E4\u05D9\u05E8", 11, 1],
+ ["\u05D0\u05E7\u05E2\u05D2\u05E0", -1, 1],
+ ["\u05D0\u05E8\u05D0\u05E4", -1, 1],
+ ["\u05D0\u05E8\u05D5\u05DE", -1, 1],
+ ["\u05D0\u05E8\u05D5\u05E0\u05D8\u05E2\u05E8", -1, 1],
+ ["\u05D0\u05E8\u05D9\u05D1\u05E2\u05E8", -1, 1],
+ ["\u05D0\u05E8\u05F1\u05E1", -1, 1],
+ ["\u05D0\u05E8\u05F1\u05E4", -1, 1],
+ ["\u05D0\u05E8\u05F2\u05E0", -1, 1],
+ ["\u05D0\u05F0\u05E2\u05E7", -1, 1],
+ ["\u05D0\u05F1\u05E1", -1, 1],
+ ["\u05D0\u05F1\u05E4", -1, 1],
+ ["\u05D0\u05F2\u05E0", -1, 1],
+ ["\u05D1\u05D0", -1, 1],
+ ["\u05D1\u05F2", -1, 1],
+ ["\u05D3\u05D5\u05E8\u05DB", -1, 1],
+ ["\u05D3\u05E2\u05E8", -1, 1],
+ ["\u05DE\u05D9\u05D8", -1, 1],
+ ["\u05E0\u05D0\u05DB", -1, 1],
+ ["\u05E4\u05D0\u05E8", -1, 1],
+ ["\u05E4\u05D0\u05E8\u05D1\u05F2", 31, 1],
+ ["\u05E4\u05D0\u05E8\u05F1\u05E1", 31, 1],
+ ["\u05E4\u05D5\u05E0\u05D0\u05E0\u05D3\u05E2\u05E8", -1, 1],
+ ["\u05E6\u05D5", -1, 1],
+ ["\u05E6\u05D5\u05D6\u05D0\u05DE\u05E2\u05E0", 35, 1],
+ ["\u05E6\u05D5\u05E0\u05F1\u05E4", 35, 1],
+ ["\u05E6\u05D5\u05E8\u05D9\u05E7", 35, 1],
+ ["\u05E6\u05E2", -1, 1]
+ ];
+
+ /** @const */ var a_2 = [
+ ["\u05D3\u05D6\u05E9", -1, -1],
+ ["\u05E9\u05D8\u05E8", -1, -1],
+ ["\u05E9\u05D8\u05E9", -1, -1],
+ ["\u05E9\u05E4\u05E8", -1, -1]
+ ];
+
+ /** @const */ var a_3 = [
+ ["\u05E7\u05DC\u05D9\u05D1", -1, 9],
+ ["\u05E8\u05D9\u05D1", -1, 10],
+ ["\u05D8\u05E8\u05D9\u05D1", 1, 7],
+ ["\u05E9\u05E8\u05D9\u05D1", 1, 15],
+ ["\u05D4\u05F1\u05D1", -1, 23],
+ ["\u05E9\u05F0\u05D9\u05D2", -1, 12],
+ ["\u05D2\u05D0\u05E0\u05D2", -1, 1],
+ ["\u05D6\u05D5\u05E0\u05D2", -1, 18],
+ ["\u05E9\u05DC\u05D5\u05E0\u05D2", -1, 21],
+ ["\u05E6\u05F0\u05D5\u05E0\u05D2", -1, 20],
+ ["\u05D1\u05F1\u05D2", -1, 22],
+ ["\u05D1\u05D5\u05E0\u05D3", -1, 16],
+ ["\u05F0\u05D9\u05D6", -1, 6],
+ ["\u05D1\u05D9\u05D8", -1, 4],
+ ["\u05DC\u05D9\u05D8", -1, 8],
+ ["\u05DE\u05D9\u05D8", -1, 3],
+ ["\u05E9\u05E0\u05D9\u05D8", -1, 14],
+ ["\u05E0\u05D5\u05DE", -1, 2],
+ ["\u05E9\u05D8\u05D0\u05E0", -1, 25],
+ ["\u05D1\u05D9\u05E1", -1, 5],
+ ["\u05E9\u05DE\u05D9\u05E1", -1, 13],
+ ["\u05E8\u05D9\u05E1", -1, 11],
+ ["\u05D8\u05E8\u05D5\u05E0\u05E7", -1, 19],
+ ["\u05E4\u05D0\u05E8\u05DC\u05F1\u05E8", -1, 24],
+ ["\u05E9\u05F0\u05F1\u05E8", -1, 26],
+ ["\u05F0\u05D5\u05D8\u05E9", -1, 17]
+ ];
+
+ /** @const */ var a_4 = [
+ ["\u05D5\u05E0\u05D2", -1, 1],
+ ["\u05E1\u05D8\u05D5", -1, 1],
+ ["\u05D8", -1, 1],
+ ["\u05D1\u05E8\u05D0\u05DB\u05D8", 2, 31],
+ ["\u05E1\u05D8", 2, 1],
+ ["\u05D9\u05E1\u05D8", 4, 33],
+ ["\u05E2\u05D8", 2, 1],
+ ["\u05E9\u05D0\u05E4\u05D8", 2, 1],
+ ["\u05D4\u05F2\u05D8", 2, 1],
+ ["\u05E7\u05F2\u05D8", 2, 1],
+ ["\u05D9\u05E7\u05F2\u05D8", 9, 1],
+ ["\u05DC\u05E2\u05DB", -1, 1],
+ ["\u05E2\u05DC\u05E2\u05DB", 11, 1],
+ ["\u05D9\u05D6\u05DE", -1, 1],
+ ["\u05D9\u05DE", -1, 1],
+ ["\u05E2\u05DE", -1, 1],
+ ["\u05E2\u05E0\u05E2\u05DE", 15, 3],
+ ["\u05D8\u05E2\u05E0\u05E2\u05DE", 16, 4],
+ ["\u05E0", -1, 1],
+ ["\u05E7\u05DC\u05D9\u05D1\u05E0", 18, 14],
+ ["\u05E8\u05D9\u05D1\u05E0", 18, 15],
+ ["\u05D8\u05E8\u05D9\u05D1\u05E0", 20, 12],
+ ["\u05E9\u05E8\u05D9\u05D1\u05E0", 20, 7],
+ ["\u05D4\u05F1\u05D1\u05E0", 18, 27],
+ ["\u05E9\u05F0\u05D9\u05D2\u05E0", 18, 17],
+ ["\u05D6\u05D5\u05E0\u05D2\u05E0", 18, 22],
+ ["\u05E9\u05DC\u05D5\u05E0\u05D2\u05E0", 18, 25],
+ ["\u05E6\u05F0\u05D5\u05E0\u05D2\u05E0", 18, 24],
+ ["\u05D1\u05F1\u05D2\u05E0", 18, 26],
+ ["\u05D1\u05D5\u05E0\u05D3\u05E0", 18, 20],
+ ["\u05F0\u05D9\u05D6\u05E0", 18, 11],
+ ["\u05D8\u05E0", 18, 4],
+ ["GE\u05D1\u05D9\u05D8\u05E0", 31, 9],
+ ["GE\u05DC\u05D9\u05D8\u05E0", 31, 13],
+ ["GE\u05DE\u05D9\u05D8\u05E0", 31, 8],
+ ["\u05E9\u05E0\u05D9\u05D8\u05E0", 31, 19],
+ ["\u05E1\u05D8\u05E0", 31, 1],
+ ["\u05D9\u05E1\u05D8\u05E0", 36, 1],
+ ["\u05E2\u05D8\u05E0", 31, 1],
+ ["GE\u05D1\u05D9\u05E1\u05E0", 18, 10],
+ ["\u05E9\u05DE\u05D9\u05E1\u05E0", 18, 18],
+ ["GE\u05E8\u05D9\u05E1\u05E0", 18, 16],
+ ["\u05E2\u05E0", 18, 1],
+ ["\u05D2\u05D0\u05E0\u05D2\u05E2\u05E0", 42, 5],
+ ["\u05E2\u05DC\u05E2\u05E0", 42, 1],
+ ["\u05E0\u05D5\u05DE\u05E2\u05E0", 42, 6],
+ ["\u05D9\u05D6\u05DE\u05E2\u05E0", 42, 1],
+ ["\u05E9\u05D8\u05D0\u05E0\u05E2\u05E0", 42, 29],
+ ["\u05D8\u05E8\u05D5\u05E0\u05E7\u05E0", 18, 23],
+ ["\u05E4\u05D0\u05E8\u05DC\u05F1\u05E8\u05E0", 18, 28],
+ ["\u05E9\u05F0\u05F1\u05E8\u05E0", 18, 30],
+ ["\u05F0\u05D5\u05D8\u05E9\u05E0", 18, 21],
+ ["\u05D2\u05F2\u05E0", 18, 5],
+ ["\u05E1", -1, 1],
+ ["\u05D8\u05E1", 53, 4],
+ ["\u05E2\u05D8\u05E1", 54, 1],
+ ["\u05E0\u05E1", 53, 1],
+ ["\u05D8\u05E0\u05E1", 56, 4],
+ ["\u05E2\u05E0\u05E1", 56, 3],
+ ["\u05E2\u05E1", 53, 1],
+ ["\u05D9\u05E2\u05E1", 59, 2],
+ ["\u05E2\u05DC\u05E2\u05E1", 59, 1],
+ ["\u05E2\u05E8\u05E1", 53, 1],
+ ["\u05E2\u05E0\u05E2\u05E8\u05E1", 62, 1],
+ ["\u05E2", -1, 1],
+ ["\u05D8\u05E2", 64, 4],
+ ["\u05E1\u05D8\u05E2", 65, 1],
+ ["\u05E2\u05D8\u05E2", 65, 1],
+ ["\u05D9\u05E2", 64, -1],
+ ["\u05E2\u05DC\u05E2", 64, 1],
+ ["\u05E2\u05E0\u05E2", 64, 3],
+ ["\u05D8\u05E2\u05E0\u05E2", 70, 4],
+ ["\u05E2\u05E8", -1, 1],
+ ["\u05D8\u05E2\u05E8", 72, 4],
+ ["\u05E1\u05D8\u05E2\u05E8", 73, 1],
+ ["\u05E2\u05D8\u05E2\u05E8", 73, 1],
+ ["\u05E2\u05E0\u05E2\u05E8", 72, 3],
+ ["\u05D8\u05E2\u05E0\u05E2\u05E8", 76, 4],
+ ["\u05D5\u05EA", -1, 32]
+ ];
+
+ /** @const */ var a_5 = [
+ ["\u05D5\u05E0\u05D2", -1, 1],
+ ["\u05E9\u05D0\u05E4\u05D8", -1, 1],
+ ["\u05D4\u05F2\u05D8", -1, 1],
+ ["\u05E7\u05F2\u05D8", -1, 1],
+ ["\u05D9\u05E7\u05F2\u05D8", 3, 1],
+ ["\u05DC", -1, 2]
+ ];
+
+ /** @const */ var a_6 = [
+ ["\u05D9\u05D2", -1, 1],
+ ["\u05D9\u05E7", -1, 1],
+ ["\u05D3\u05D9\u05E7", 1, 1],
+ ["\u05E0\u05D3\u05D9\u05E7", 2, 1],
+ ["\u05E2\u05E0\u05D3\u05D9\u05E7", 3, 1],
+ ["\u05D1\u05DC\u05D9\u05E7", 1, -1],
+ ["\u05D2\u05DC\u05D9\u05E7", 1, -1],
+ ["\u05E0\u05D9\u05E7", 1, 1],
+ ["\u05D9\u05E9", -1, 1]
+ ];
+
+ /** @const */ var /** Array */ g_niked = [255, 155, 6];
+
+ /** @const */ var /** Array */ g_vowel = [33, 2, 4, 0, 6];
+
+ /** @const */ var /** Array */ g_consonant = [239, 254, 253, 131];
+
+ var /** number */ I_x = 0;
+ var /** number */ I_p1 = 0;
+
+
+ /** @return {boolean} */
+ function r_prelude() {
+ var /** number */ among_var;
+ /** @const */ var /** number */ v_1 = base.cursor;
+ lab0: {
+ while(true)
+ {
+ /** @const */ var /** number */ v_2 = base.cursor;
+ lab1: {
+ golab2: while(true)
+ {
+ /** @const */ var /** number */ v_3 = base.cursor;
+ lab3: {
+ base.bra = base.cursor;
+ among_var = base.find_among(a_0);
+ if (among_var == 0)
+ {
+ break lab3;
+ }
+ base.ket = base.cursor;
+ switch (among_var) {
+ case 1:
+ {
+ /** @const */ var /** number */ v_4 = base.cursor;
+ lab4: {
+ if (!(base.eq_s("\u05BC")))
+ {
+ break lab4;
+ }
+ break lab3;
+ }
+ base.cursor = v_4;
+ }
+ if (!base.slice_from("\u05F0"))
+ {
+ return false;
+ }
+ break;
+ case 2:
+ {
+ /** @const */ var /** number */ v_5 = base.cursor;
+ lab5: {
+ if (!(base.eq_s("\u05B4")))
+ {
+ break lab5;
+ }
+ break lab3;
+ }
+ base.cursor = v_5;
+ }
+ if (!base.slice_from("\u05F1"))
+ {
+ return false;
+ }
+ break;
+ case 3:
+ {
+ /** @const */ var /** number */ v_6 = base.cursor;
+ lab6: {
+ if (!(base.eq_s("\u05B4")))
+ {
+ break lab6;
+ }
+ break lab3;
+ }
+ base.cursor = v_6;
+ }
+ if (!base.slice_from("\u05F2"))
+ {
+ return false;
+ }
+ break;
+ case 4:
+ if (!base.slice_from("\u05DB"))
+ {
+ return false;
+ }
+ break;
+ case 5:
+ if (!base.slice_from("\u05DE"))
+ {
+ return false;
+ }
+ break;
+ case 6:
+ if (!base.slice_from("\u05E0"))
+ {
+ return false;
+ }
+ break;
+ case 7:
+ if (!base.slice_from("\u05E4"))
+ {
+ return false;
+ }
+ break;
+ case 8:
+ if (!base.slice_from("\u05E6"))
+ {
+ return false;
+ }
+ break;
+ }
+ base.cursor = v_3;
+ break golab2;
+ }
+ base.cursor = v_3;
+ if (base.cursor >= base.limit)
+ {
+ break lab1;
+ }
+ base.cursor++;
+ }
+ continue;
+ }
+ base.cursor = v_2;
+ break;
+ }
+ }
+ base.cursor = v_1;
+ /** @const */ var /** number */ v_7 = base.cursor;
+ lab7: {
+ while(true)
+ {
+ /** @const */ var /** number */ v_8 = base.cursor;
+ lab8: {
+ golab9: while(true)
+ {
+ /** @const */ var /** number */ v_9 = base.cursor;
+ lab10: {
+ base.bra = base.cursor;
+ if (!(base.in_grouping(g_niked, 1456, 1474)))
+ {
+ break lab10;
+ }
+ base.ket = base.cursor;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ base.cursor = v_9;
+ break golab9;
+ }
+ base.cursor = v_9;
+ if (base.cursor >= base.limit)
+ {
+ break lab8;
+ }
+ base.cursor++;
+ }
+ continue;
+ }
+ base.cursor = v_8;
+ break;
+ }
+ }
+ base.cursor = v_7;
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_mark_regions() {
+ I_p1 = base.limit;
+ /** @const */ var /** number */ v_1 = base.cursor;
+ lab0: {
+ base.bra = base.cursor;
+ if (!(base.eq_s("\u05D2\u05E2")))
+ {
+ base.cursor = v_1;
+ break lab0;
+ }
+ base.ket = base.cursor;
+ {
+ /** @const */ var /** number */ v_2 = base.cursor;
+ lab1: {
+ lab2: {
+ /** @const */ var /** number */ v_3 = base.cursor;
+ lab3: {
+ if (!(base.eq_s("\u05DC\u05D8")))
+ {
+ break lab3;
+ }
+ break lab2;
+ }
+ base.cursor = v_3;
+ lab4: {
+ if (!(base.eq_s("\u05D1\u05E0")))
+ {
+ break lab4;
+ }
+ break lab2;
+ }
+ base.cursor = v_3;
+ if (base.cursor < base.limit)
+ {
+ break lab1;
+ }
+ }
+ base.cursor = v_1;
+ break lab0;
+ }
+ base.cursor = v_2;
+ }
+ if (!base.slice_from("GE"))
+ {
+ return false;
+ }
+ }
+ /** @const */ var /** number */ v_4 = base.cursor;
+ lab5: {
+ if (base.find_among(a_1) == 0)
+ {
+ base.cursor = v_4;
+ break lab5;
+ }
+ lab6: {
+ /** @const */ var /** number */ v_5 = base.cursor;
+ lab7: {
+ /** @const */ var /** number */ v_6 = base.cursor;
+ lab8: {
+ /** @const */ var /** number */ v_7 = base.cursor;
+ lab9: {
+ if (!(base.eq_s("\u05E6\u05D5\u05D2\u05E0")))
+ {
+ break lab9;
+ }
+ break lab8;
+ }
+ base.cursor = v_7;
+ lab10: {
+ if (!(base.eq_s("\u05E6\u05D5\u05E7\u05D8")))
+ {
+ break lab10;
+ }
+ break lab8;
+ }
+ base.cursor = v_7;
+ if (!(base.eq_s("\u05E6\u05D5\u05E7\u05E0")))
+ {
+ break lab7;
+ }
+ }
+ if (base.cursor < base.limit)
+ {
+ break lab7;
+ }
+ base.cursor = v_6;
+ break lab6;
+ }
+ base.cursor = v_5;
+ lab11: {
+ /** @const */ var /** number */ v_8 = base.cursor;
+ if (!(base.eq_s("\u05D2\u05E2\u05D1\u05E0")))
+ {
+ break lab11;
+ }
+ base.cursor = v_8;
+ break lab6;
+ }
+ base.cursor = v_5;
+ lab12: {
+ base.bra = base.cursor;
+ if (!(base.eq_s("\u05D2\u05E2")))
+ {
+ break lab12;
+ }
+ base.ket = base.cursor;
+ if (!base.slice_from("GE"))
+ {
+ return false;
+ }
+ break lab6;
+ }
+ base.cursor = v_5;
+ base.bra = base.cursor;
+ if (!(base.eq_s("\u05E6\u05D5")))
+ {
+ base.cursor = v_4;
+ break lab5;
+ }
+ base.ket = base.cursor;
+ if (!base.slice_from("TSU"))
+ {
+ return false;
+ }
+ }
+ }
+ /** @const */ var /** number */ v_9 = base.cursor;
+ {
+ /** @const */ var /** number */ c1 = base.cursor + 3;
+ if (c1 > base.limit)
+ {
+ return false;
+ }
+ base.cursor = c1;
+ }
+ I_x = base.cursor;
+ base.cursor = v_9;
+ /** @const */ var /** number */ v_10 = base.cursor;
+ lab13: {
+ if (base.find_among(a_2) == 0)
+ {
+ base.cursor = v_10;
+ break lab13;
+ }
+ }
+ {
+ /** @const */ var /** number */ v_11 = base.cursor;
+ lab14: {
+ if (!(base.in_grouping(g_consonant, 1489, 1520)))
+ {
+ break lab14;
+ }
+ if (!(base.in_grouping(g_consonant, 1489, 1520)))
+ {
+ break lab14;
+ }
+ if (!(base.in_grouping(g_consonant, 1489, 1520)))
+ {
+ break lab14;
+ }
+ I_p1 = base.cursor;
+ return false;
+ }
+ base.cursor = v_11;
+ }
+ if (!base.go_out_grouping(g_vowel, 1488, 1522))
+ {
+ return false;
+ }
+ base.cursor++;
+ if (!base.go_in_grouping(g_vowel, 1488, 1522))
+ {
+ return false;
+ }
+ I_p1 = base.cursor;
+ lab15: {
+ if (I_p1 >= I_x)
+ {
+ break lab15;
+ }
+ I_p1 = I_x;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_R1() {
+ return I_p1 <= base.cursor;
+ };
+
+ /** @return {boolean} */
+ function r_R1plus3() {
+ return I_p1 <= (base.cursor + 3);
+ };
+
+ /** @return {boolean} */
+ function r_standard_suffix() {
+ var /** number */ among_var;
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
+ lab0: {
+ base.ket = base.cursor;
+ among_var = base.find_among_b(a_4);
+ if (among_var == 0)
+ {
+ break lab0;
+ }
+ base.bra = base.cursor;
+ switch (among_var) {
+ case 1:
+ if (!r_R1())
+ {
+ break lab0;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ case 2:
+ if (!r_R1())
+ {
+ break lab0;
+ }
+ if (!base.slice_from("\u05D9\u05E2"))
+ {
+ return false;
+ }
+ break;
+ case 3:
+ if (!r_R1())
+ {
+ break lab0;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ base.ket = base.cursor;
+ among_var = base.find_among_b(a_3);
+ if (among_var == 0)
+ {
+ break lab0;
+ }
+ base.bra = base.cursor;
+ switch (among_var) {
+ case 1:
+ if (!base.slice_from("\u05D2\u05F2"))
+ {
+ return false;
+ }
+ break;
+ case 2:
+ if (!base.slice_from("\u05E0\u05E2\u05DE"))
+ {
+ return false;
+ }
+ break;
+ case 3:
+ if (!base.slice_from("\u05DE\u05F2\u05D3"))
+ {
+ return false;
+ }
+ break;
+ case 4:
+ if (!base.slice_from("\u05D1\u05F2\u05D8"))
+ {
+ return false;
+ }
+ break;
+ case 5:
+ if (!base.slice_from("\u05D1\u05F2\u05E1"))
+ {
+ return false;
+ }
+ break;
+ case 6:
+ if (!base.slice_from("\u05F0\u05F2\u05D6"))
+ {
+ return false;
+ }
+ break;
+ case 7:
+ if (!base.slice_from("\u05D8\u05E8\u05F2\u05D1"))
+ {
+ return false;
+ }
+ break;
+ case 8:
+ if (!base.slice_from("\u05DC\u05F2\u05D8"))
+ {
+ return false;
+ }
+ break;
+ case 9:
+ if (!base.slice_from("\u05E7\u05DC\u05F2\u05D1"))
+ {
+ return false;
+ }
+ break;
+ case 10:
+ if (!base.slice_from("\u05E8\u05F2\u05D1"))
+ {
+ return false;
+ }
+ break;
+ case 11:
+ if (!base.slice_from("\u05E8\u05F2\u05E1"))
+ {
+ return false;
+ }
+ break;
+ case 12:
+ if (!base.slice_from("\u05E9\u05F0\u05F2\u05D2"))
+ {
+ return false;
+ }
+ break;
+ case 13:
+ if (!base.slice_from("\u05E9\u05DE\u05F2\u05E1"))
+ {
+ return false;
+ }
+ break;
+ case 14:
+ if (!base.slice_from("\u05E9\u05E0\u05F2\u05D3"))
+ {
+ return false;
+ }
+ break;
+ case 15:
+ if (!base.slice_from("\u05E9\u05E8\u05F2\u05D1"))
+ {
+ return false;
+ }
+ break;
+ case 16:
+ if (!base.slice_from("\u05D1\u05D9\u05E0\u05D3"))
+ {
+ return false;
+ }
+ break;
+ case 17:
+ if (!base.slice_from("\u05F0\u05D9\u05D8\u05E9"))
+ {
+ return false;
+ }
+ break;
+ case 18:
+ if (!base.slice_from("\u05D6\u05D9\u05E0\u05D2"))
+ {
+ return false;
+ }
+ break;
+ case 19:
+ if (!base.slice_from("\u05D8\u05E8\u05D9\u05E0\u05E7"))
+ {
+ return false;
+ }
+ break;
+ case 20:
+ if (!base.slice_from("\u05E6\u05F0\u05D9\u05E0\u05D2"))
+ {
+ return false;
+ }
+ break;
+ case 21:
+ if (!base.slice_from("\u05E9\u05DC\u05D9\u05E0\u05D2"))
+ {
+ return false;
+ }
+ break;
+ case 22:
+ if (!base.slice_from("\u05D1\u05F2\u05D2"))
+ {
+ return false;
+ }
+ break;
+ case 23:
+ if (!base.slice_from("\u05D4\u05F2\u05D1"))
+ {
+ return false;
+ }
+ break;
+ case 24:
+ if (!base.slice_from("\u05E4\u05D0\u05E8\u05DC\u05D9\u05E8"))
+ {
+ return false;
+ }
+ break;
+ case 25:
+ if (!base.slice_from("\u05E9\u05D8\u05F2"))
+ {
+ return false;
+ }
+ break;
+ case 26:
+ if (!base.slice_from("\u05E9\u05F0\u05E2\u05E8"))
+ {
+ return false;
+ }
+ break;
+ }
+ break;
+ case 4:
+ lab1: {
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
+ lab2: {
+ if (!r_R1())
+ {
+ break lab2;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break lab1;
+ }
+ base.cursor = base.limit - v_2;
+ if (!base.slice_from("\u05D8"))
+ {
+ return false;
+ }
+ }
+ base.ket = base.cursor;
+ if (!(base.eq_s_b("\u05D1\u05E8\u05D0\u05DB")))
+ {
+ break lab0;
+ }
+ /** @const */ var /** number */ v_3 = base.limit - base.cursor;
+ lab3: {
+ if (!(base.eq_s_b("\u05D2\u05E2")))
+ {
+ base.cursor = base.limit - v_3;
+ break lab3;
+ }
+ }
+ base.bra = base.cursor;
+ if (!base.slice_from("\u05D1\u05E8\u05E2\u05E0\u05D2"))
+ {
+ return false;
+ }
+ break;
+ case 5:
+ if (!base.slice_from("\u05D2\u05F2"))
+ {
+ return false;
+ }
+ break;
+ case 6:
+ if (!base.slice_from("\u05E0\u05E2\u05DE"))
+ {
+ return false;
+ }
+ break;
+ case 7:
+ if (!base.slice_from("\u05E9\u05E8\u05F2\u05D1"))
+ {
+ return false;
+ }
+ break;
+ case 8:
+ if (!base.slice_from("\u05DE\u05F2\u05D3"))
+ {
+ return false;
+ }
+ break;
+ case 9:
+ if (!base.slice_from("\u05D1\u05F2\u05D8"))
+ {
+ return false;
+ }
+ break;
+ case 10:
+ if (!base.slice_from("\u05D1\u05F2\u05E1"))
+ {
+ return false;
+ }
+ break;
+ case 11:
+ if (!base.slice_from("\u05F0\u05F2\u05D6"))
+ {
+ return false;
+ }
+ break;
+ case 12:
+ if (!base.slice_from("\u05D8\u05E8\u05F2\u05D1"))
+ {
+ return false;
+ }
+ break;
+ case 13:
+ if (!base.slice_from("\u05DC\u05F2\u05D8"))
+ {
+ return false;
+ }
+ break;
+ case 14:
+ if (!base.slice_from("\u05E7\u05DC\u05F2\u05D1"))
+ {
+ return false;
+ }
+ break;
+ case 15:
+ if (!base.slice_from("\u05E8\u05F2\u05D1"))
+ {
+ return false;
+ }
+ break;
+ case 16:
+ if (!base.slice_from("\u05E8\u05F2\u05E1"))
+ {
+ return false;
+ }
+ break;
+ case 17:
+ if (!base.slice_from("\u05E9\u05F0\u05F2\u05D2"))
+ {
+ return false;
+ }
+ break;
+ case 18:
+ if (!base.slice_from("\u05E9\u05DE\u05F2\u05E1"))
+ {
+ return false;
+ }
+ break;
+ case 19:
+ if (!base.slice_from("\u05E9\u05E0\u05F2\u05D3"))
+ {
+ return false;
+ }
+ break;
+ case 20:
+ if (!base.slice_from("\u05D1\u05D9\u05E0\u05D3"))
+ {
+ return false;
+ }
+ break;
+ case 21:
+ if (!base.slice_from("\u05F0\u05D9\u05D8\u05E9"))
+ {
+ return false;
+ }
+ break;
+ case 22:
+ if (!base.slice_from("\u05D6\u05D9\u05E0\u05D2"))
+ {
+ return false;
+ }
+ break;
+ case 23:
+ if (!base.slice_from("\u05D8\u05E8\u05D9\u05E0\u05E7"))
+ {
+ return false;
+ }
+ break;
+ case 24:
+ if (!base.slice_from("\u05E6\u05F0\u05D9\u05E0\u05D2"))
+ {
+ return false;
+ }
+ break;
+ case 25:
+ if (!base.slice_from("\u05E9\u05DC\u05D9\u05E0\u05D2"))
+ {
+ return false;
+ }
+ break;
+ case 26:
+ if (!base.slice_from("\u05D1\u05F2\u05D2"))
+ {
+ return false;
+ }
+ break;
+ case 27:
+ if (!base.slice_from("\u05D4\u05F2\u05D1"))
+ {
+ return false;
+ }
+ break;
+ case 28:
+ if (!base.slice_from("\u05E4\u05D0\u05E8\u05DC\u05D9\u05E8"))
+ {
+ return false;
+ }
+ break;
+ case 29:
+ if (!base.slice_from("\u05E9\u05D8\u05F2"))
+ {
+ return false;
+ }
+ break;
+ case 30:
+ if (!base.slice_from("\u05E9\u05F0\u05E2\u05E8"))
+ {
+ return false;
+ }
+ break;
+ case 31:
+ if (!base.slice_from("\u05D1\u05E8\u05E2\u05E0\u05D2"))
+ {
+ return false;
+ }
+ break;
+ case 32:
+ if (!r_R1())
+ {
+ break lab0;
+ }
+ if (!base.slice_from("\u05D4"))
+ {
+ return false;
+ }
+ break;
+ case 33:
+ lab4: {
+ /** @const */ var /** number */ v_4 = base.limit - base.cursor;
+ lab5: {
+ lab6: {
+ /** @const */ var /** number */ v_5 = base.limit - base.cursor;
+ lab7: {
+ if (!(base.eq_s_b("\u05D2")))
+ {
+ break lab7;
+ }
+ break lab6;
+ }
+ base.cursor = base.limit - v_5;
+ if (!(base.eq_s_b("\u05E9")))
+ {
+ break lab5;
+ }
+ }
+ /** @const */ var /** number */ v_6 = base.limit - base.cursor;
+ lab8: {
+ if (!r_R1plus3())
+ {
+ base.cursor = base.limit - v_6;
+ break lab8;
+ }
+ if (!base.slice_from("\u05D9\u05E1"))
+ {
+ return false;
+ }
+ }
+ break lab4;
+ }
+ base.cursor = base.limit - v_4;
+ if (!r_R1())
+ {
+ break lab0;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ }
+ break;
+ }
+ }
+ base.cursor = base.limit - v_1;
+ /** @const */ var /** number */ v_7 = base.limit - base.cursor;
+ lab9: {
+ base.ket = base.cursor;
+ among_var = base.find_among_b(a_5);
+ if (among_var == 0)
+ {
+ break lab9;
+ }
+ base.bra = base.cursor;
+ switch (among_var) {
+ case 1:
+ if (!r_R1())
+ {
+ break lab9;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ case 2:
+ if (!r_R1())
+ {
+ break lab9;
+ }
+ if (!(base.in_grouping_b(g_consonant, 1489, 1520)))
+ {
+ break lab9;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ }
+ }
+ base.cursor = base.limit - v_7;
+ /** @const */ var /** number */ v_8 = base.limit - base.cursor;
+ lab10: {
+ base.ket = base.cursor;
+ among_var = base.find_among_b(a_6);
+ if (among_var == 0)
+ {
+ break lab10;
+ }
+ base.bra = base.cursor;
+ switch (among_var) {
+ case 1:
+ if (!r_R1())
+ {
+ break lab10;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ }
+ }
+ base.cursor = base.limit - v_8;
+ /** @const */ var /** number */ v_9 = base.limit - base.cursor;
+ lab11: {
+ while(true)
+ {
+ /** @const */ var /** number */ v_10 = base.limit - base.cursor;
+ lab12: {
+ golab13: while(true)
+ {
+ /** @const */ var /** number */ v_11 = base.limit - base.cursor;
+ lab14: {
+ base.ket = base.cursor;
+ lab15: {
+ /** @const */ var /** number */ v_12 = base.limit - base.cursor;
+ lab16: {
+ if (!(base.eq_s_b("GE")))
+ {
+ break lab16;
+ }
+ break lab15;
+ }
+ base.cursor = base.limit - v_12;
+ if (!(base.eq_s_b("TSU")))
+ {
+ break lab14;
+ }
+ }
+ base.bra = base.cursor;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ base.cursor = base.limit - v_11;
+ break golab13;
+ }
+ base.cursor = base.limit - v_11;
+ if (base.cursor <= base.limit_backward)
+ {
+ break lab12;
+ }
+ base.cursor--;
+ }
+ continue;
+ }
+ base.cursor = base.limit - v_10;
+ break;
+ }
+ }
+ base.cursor = base.limit - v_9;
+ return true;
+ };
+
+ this.stem = /** @return {boolean} */ function() {
+ r_prelude();
+ /** @const */ var /** number */ v_1 = base.cursor;
+ r_mark_regions();
+ base.cursor = v_1;
+ base.limit_backward = base.cursor; base.cursor = base.limit;
+ r_standard_suffix();
+ base.cursor = base.limit_backward;
+ return true;
+ };
+
+ /**@return{string}*/
+ this['stemWord'] = function(/**string*/word) {
+ base.setCurrent(word);
+ this.stem();
+ return base.getCurrent();
+ };
+};
From 4532958b9405d82a14da328d85d08991978464a5 Mon Sep 17 00:00:00 2001
From: Adam Turner <9087854+aa-turner@users.noreply.github.com>
Date: Mon, 19 May 2025 22:14:16 +0100
Subject: [PATCH 076/466] Remove ``const`` from ``BaseStemmer``
---
sphinx/search/minified-js/base-stemmer.js | 2 +-
sphinx/search/non-minified-js/base-stemmer.js | 2 +-
2 files changed, 2 insertions(+), 2 deletions(-)
diff --git a/sphinx/search/minified-js/base-stemmer.js b/sphinx/search/minified-js/base-stemmer.js
index 69a4ad03787..9736db91588 100644
--- a/sphinx/search/minified-js/base-stemmer.js
+++ b/sphinx/search/minified-js/base-stemmer.js
@@ -1 +1 @@
-let BaseStemmer=function(){this.current="",this.cursor=0,this.limit=0,this.limit_backward=0,this.bra=0,this.ket=0,this.setCurrent=function(t){this.current=t,this.cursor=0,this.limit=this.current.length,this.limit_backward=0,this.bra=this.cursor,this.ket=this.limit},this.getCurrent=function(){return this.current},this.copy_from=function(t){this.current=t.current,this.cursor=t.cursor,this.limit=t.limit,this.limit_backward=t.limit_backward,this.bra=t.bra,this.ket=t.ket},this.in_grouping=function(t,r,i){return!(this.cursor>=this.limit||i<(i=this.current.charCodeAt(this.cursor))||i>>3]&1<<(7&i))||(this.cursor++,0))},this.go_in_grouping=function(t,r,i){for(;this.cursor>>3]&1<<(7&s)))return!0;this.cursor++}return!1},this.in_grouping_b=function(t,r,i){return!(this.cursor<=this.limit_backward||i<(i=this.current.charCodeAt(this.cursor-1))||i>>3]&1<<(7&i))||(this.cursor--,0))},this.go_in_grouping_b=function(t,r,i){for(;this.cursor>this.limit_backward;){var s=this.current.charCodeAt(this.cursor-1);if(i>>3]&1<<(7&s)))return!0;this.cursor--}return!1},this.out_grouping=function(t,r,i){return!(this.cursor>=this.limit)&&(i<(i=this.current.charCodeAt(this.cursor))||i>>3]&1<<(7&i)))&&(this.cursor++,!0)},this.go_out_grouping=function(t,r,i){for(;this.cursor>>3]&1<<(7&s)))return!0;this.cursor++}return!1},this.out_grouping_b=function(t,r,i){return!(this.cursor<=this.limit_backward)&&(i<(i=this.current.charCodeAt(this.cursor-1))||i>>3]&1<<(7&i)))&&(this.cursor--,!0)},this.go_out_grouping_b=function(t,r,i){for(;this.cursor>this.limit_backward;){var s=this.current.charCodeAt(this.cursor-1);if(s<=i&&r<=s&&0!=(t[(s-=r)>>>3]&1<<(7&s)))return!0;this.cursor--}return!1},this.eq_s=function(t){return!(this.limit-this.cursor>>1),o=0,a=e=(l=t[r])[0].length){if(this.cursor=s+l[0].length,l.length<4)return l[2];var g=l[3](this);if(this.cursor=s+l[0].length,g)return l[2]}}while(0<=(r=l[1]));return 0},this.find_among_b=function(t){for(var r=0,i=t.length,s=this.cursor,h=this.limit_backward,e=0,n=0,c=!1;;){for(var u,o=r+(i-r>>1),a=0,l=e=(u=t[r])[0].length){if(this.cursor=s-u[0].length,u.length<4)return u[2];var g=u[3](this);if(this.cursor=s-u[0].length,g)return u[2]}}while(0<=(r=u[1]));return 0},this.replace_s=function(t,r,i){var s=i.length-(r-t);return this.current=this.current.slice(0,t)+i+this.current.slice(r),this.limit+=s,this.cursor>=r?this.cursor+=s:this.cursor>t&&(this.cursor=t),s},this.slice_check=function(){return!(this.bra<0||this.bra>this.ket||this.ket>this.limit||this.limit>this.current.length)},this.slice_from=function(t){var r=!1;return this.slice_check()&&(this.replace_s(this.bra,this.ket,t),r=!0),r},this.slice_del=function(){return this.slice_from("")},this.insert=function(t,r,i){r=this.replace_s(t,r,i);t<=this.bra&&(this.bra+=r),t<=this.ket&&(this.ket+=r)},this.slice_to=function(){var t="";return t=this.slice_check()?this.current.slice(this.bra,this.ket):t},this.assign_to=function(){return this.current.slice(0,this.limit)}};
\ No newline at end of file
+BaseStemmer=function(){this.current="",this.cursor=0,this.limit=0,this.limit_backward=0,this.bra=0,this.ket=0,this.setCurrent=function(t){this.current=t,this.cursor=0,this.limit=this.current.length,this.limit_backward=0,this.bra=this.cursor,this.ket=this.limit},this.getCurrent=function(){return this.current},this.copy_from=function(t){this.current=t.current,this.cursor=t.cursor,this.limit=t.limit,this.limit_backward=t.limit_backward,this.bra=t.bra,this.ket=t.ket},this.in_grouping=function(t,r,i){return!(this.cursor>=this.limit||i<(i=this.current.charCodeAt(this.cursor))||i>>3]&1<<(7&i))||(this.cursor++,0))},this.go_in_grouping=function(t,r,i){for(;this.cursor>>3]&1<<(7&s)))return!0;this.cursor++}return!1},this.in_grouping_b=function(t,r,i){return!(this.cursor<=this.limit_backward||i<(i=this.current.charCodeAt(this.cursor-1))||i>>3]&1<<(7&i))||(this.cursor--,0))},this.go_in_grouping_b=function(t,r,i){for(;this.cursor>this.limit_backward;){var s=this.current.charCodeAt(this.cursor-1);if(i>>3]&1<<(7&s)))return!0;this.cursor--}return!1},this.out_grouping=function(t,r,i){return!(this.cursor>=this.limit)&&(i<(i=this.current.charCodeAt(this.cursor))||i>>3]&1<<(7&i)))&&(this.cursor++,!0)},this.go_out_grouping=function(t,r,i){for(;this.cursor>>3]&1<<(7&s)))return!0;this.cursor++}return!1},this.out_grouping_b=function(t,r,i){return!(this.cursor<=this.limit_backward)&&(i<(i=this.current.charCodeAt(this.cursor-1))||i>>3]&1<<(7&i)))&&(this.cursor--,!0)},this.go_out_grouping_b=function(t,r,i){for(;this.cursor>this.limit_backward;){var s=this.current.charCodeAt(this.cursor-1);if(s<=i&&r<=s&&0!=(t[(s-=r)>>>3]&1<<(7&s)))return!0;this.cursor--}return!1},this.eq_s=function(t){return!(this.limit-this.cursor>>1),o=0,a=e=(l=t[r])[0].length){if(this.cursor=s+l[0].length,l.length<4)return l[2];var g=l[3](this);if(this.cursor=s+l[0].length,g)return l[2]}}while(0<=(r=l[1]));return 0},this.find_among_b=function(t){for(var r=0,i=t.length,s=this.cursor,h=this.limit_backward,e=0,n=0,c=!1;;){for(var u,o=r+(i-r>>1),a=0,l=e=(u=t[r])[0].length){if(this.cursor=s-u[0].length,u.length<4)return u[2];var g=u[3](this);if(this.cursor=s-u[0].length,g)return u[2]}}while(0<=(r=u[1]));return 0},this.replace_s=function(t,r,i){var s=i.length-(r-t);return this.current=this.current.slice(0,t)+i+this.current.slice(r),this.limit+=s,this.cursor>=r?this.cursor+=s:this.cursor>t&&(this.cursor=t),s},this.slice_check=function(){return!(this.bra<0||this.bra>this.ket||this.ket>this.limit||this.limit>this.current.length)},this.slice_from=function(t){var r=!1;return this.slice_check()&&(this.replace_s(this.bra,this.ket,t),r=!0),r},this.slice_del=function(){return this.slice_from("")},this.insert=function(t,r,i){r=this.replace_s(t,r,i);t<=this.bra&&(this.bra+=r),t<=this.ket&&(this.ket+=r)},this.slice_to=function(){var t="";return t=this.slice_check()?this.current.slice(this.bra,this.ket):t},this.assign_to=function(){return this.current.slice(0,this.limit)}};
\ No newline at end of file
diff --git a/sphinx/search/non-minified-js/base-stemmer.js b/sphinx/search/non-minified-js/base-stemmer.js
index 8cf2d585582..e6fa0c49260 100644
--- a/sphinx/search/non-minified-js/base-stemmer.js
+++ b/sphinx/search/non-minified-js/base-stemmer.js
@@ -1,7 +1,7 @@
// @ts-check
/**@constructor*/
-const BaseStemmer = function() {
+BaseStemmer = function() {
/** @protected */
this.current = '';
this.cursor = 0;
From 954839afe3a3204a713d40fa4fa9a95da46c305b Mon Sep 17 00:00:00 2001
From: Adam Turner <9087854+AA-Turner@users.noreply.github.com>
Date: Mon, 19 May 2025 22:59:34 +0100
Subject: [PATCH 077/466] Use the more modern English stemmer (#13574)
The 'Porter' stemmer is considered frozen.
---
sphinx/search/da.py | 2 +-
sphinx/search/de.py | 2 +-
sphinx/search/en.py | 192 +--------------------
sphinx/search/es.py | 2 +-
sphinx/search/fi.py | 2 +-
sphinx/search/fr.py | 2 +-
sphinx/search/hu.py | 2 +-
sphinx/search/it.py | 2 +-
sphinx/search/nl.py | 2 +-
sphinx/search/no.py | 2 +-
sphinx/search/pt.py | 2 +-
sphinx/search/ro.py | 2 +-
sphinx/search/ru.py | 2 +-
sphinx/search/sv.py | 2 +-
sphinx/search/tr.py | 2 +-
sphinx/search/zh.py | 188 +-------------------
tests/js/fixtures/cpp/searchindex.js | 2 +-
tests/js/fixtures/multiterm/searchindex.js | 2 +-
tests/js/fixtures/partial/searchindex.js | 2 +-
tests/js/fixtures/titles/searchindex.js | 2 +-
tests/test_search.py | 12 +-
21 files changed, 26 insertions(+), 402 deletions(-)
diff --git a/sphinx/search/da.py b/sphinx/search/da.py
index 3eb997af1c3..e632a97fb78 100644
--- a/sphinx/search/da.py
+++ b/sphinx/search/da.py
@@ -1,4 +1,4 @@
-"""Danish search language: includes the JS Danish stemmer."""
+"""Danish search language."""
from __future__ import annotations
diff --git a/sphinx/search/de.py b/sphinx/search/de.py
index 6875b9c7535..278d78fb487 100644
--- a/sphinx/search/de.py
+++ b/sphinx/search/de.py
@@ -1,4 +1,4 @@
-"""German search language: includes the JS German stemmer."""
+"""German search language."""
from __future__ import annotations
diff --git a/sphinx/search/en.py b/sphinx/search/en.py
index 30324c8832a..273a25a0272 100644
--- a/sphinx/search/en.py
+++ b/sphinx/search/en.py
@@ -1,4 +1,4 @@
-"""English search language: includes the JS porter stemmer."""
+"""English search language."""
from __future__ import annotations
@@ -7,202 +7,16 @@
from sphinx.search import SearchLanguage
from sphinx.search._stopwords.en import ENGLISH_STOPWORDS
-js_porter_stemmer = """
-/**
- * Porter Stemmer
- */
-var Stemmer = function() {
-
- var step2list = {
- ational: 'ate',
- tional: 'tion',
- enci: 'ence',
- anci: 'ance',
- izer: 'ize',
- bli: 'ble',
- alli: 'al',
- entli: 'ent',
- eli: 'e',
- ousli: 'ous',
- ization: 'ize',
- ation: 'ate',
- ator: 'ate',
- alism: 'al',
- iveness: 'ive',
- fulness: 'ful',
- ousness: 'ous',
- aliti: 'al',
- iviti: 'ive',
- biliti: 'ble',
- logi: 'log'
- };
-
- var step3list = {
- icate: 'ic',
- ative: '',
- alize: 'al',
- iciti: 'ic',
- ical: 'ic',
- ful: '',
- ness: ''
- };
-
- var c = "[^aeiou]"; // consonant
- var v = "[aeiouy]"; // vowel
- var C = c + "[^aeiouy]*"; // consonant sequence
- var V = v + "[aeiou]*"; // vowel sequence
-
- var mgr0 = "^(" + C + ")?" + V + C; // [C]VC... is m>0
- var meq1 = "^(" + C + ")?" + V + C + "(" + V + ")?$"; // [C]VC[V] is m=1
- var mgr1 = "^(" + C + ")?" + V + C + V + C; // [C]VCVC... is m>1
- var s_v = "^(" + C + ")?" + v; // vowel in stem
-
- this.stemWord = function (w) {
- var stem;
- var suffix;
- var firstch;
- var origword = w;
-
- if (w.length < 3)
- return w;
-
- var re;
- var re2;
- var re3;
- var re4;
-
- firstch = w.substr(0,1);
- if (firstch == "y")
- w = firstch.toUpperCase() + w.substr(1);
-
- // Step 1a
- re = /^(.+?)(ss|i)es$/;
- re2 = /^(.+?)([^s])s$/;
-
- if (re.test(w))
- w = w.replace(re,"$1$2");
- else if (re2.test(w))
- w = w.replace(re2,"$1$2");
-
- // Step 1b
- re = /^(.+?)eed$/;
- re2 = /^(.+?)(ed|ing)$/;
- if (re.test(w)) {
- var fp = re.exec(w);
- re = new RegExp(mgr0);
- if (re.test(fp[1])) {
- re = /.$/;
- w = w.replace(re,"");
- }
- }
- else if (re2.test(w)) {
- var fp = re2.exec(w);
- stem = fp[1];
- re2 = new RegExp(s_v);
- if (re2.test(stem)) {
- w = stem;
- re2 = /(at|bl|iz)$/;
- re3 = new RegExp("([^aeiouylsz])\\\\1$");
- re4 = new RegExp("^" + C + v + "[^aeiouwxy]$");
- if (re2.test(w))
- w = w + "e";
- else if (re3.test(w)) {
- re = /.$/;
- w = w.replace(re,"");
- }
- else if (re4.test(w))
- w = w + "e";
- }
- }
-
- // Step 1c
- re = /^(.+?)y$/;
- if (re.test(w)) {
- var fp = re.exec(w);
- stem = fp[1];
- re = new RegExp(s_v);
- if (re.test(stem))
- w = stem + "i";
- }
-
- // Step 2
- re = /^(.+?)(ational|tional|enci|anci|izer|bli|alli|entli|eli|ousli|\
-ization|ation|ator|alism|iveness|fulness|ousness|aliti|iviti|biliti|logi)$/;
- if (re.test(w)) {
- var fp = re.exec(w);
- stem = fp[1];
- suffix = fp[2];
- re = new RegExp(mgr0);
- if (re.test(stem))
- w = stem + step2list[suffix];
- }
-
- // Step 3
- re = /^(.+?)(icate|ative|alize|iciti|ical|ful|ness)$/;
- if (re.test(w)) {
- var fp = re.exec(w);
- stem = fp[1];
- suffix = fp[2];
- re = new RegExp(mgr0);
- if (re.test(stem))
- w = stem + step3list[suffix];
- }
-
- // Step 4
- re = /^(.+?)(al|ance|ence|er|ic|able|ible|ant|ement|ment|ent|ou|ism|ate|\
-iti|ous|ive|ize)$/;
- re2 = /^(.+?)(s|t)(ion)$/;
- if (re.test(w)) {
- var fp = re.exec(w);
- stem = fp[1];
- re = new RegExp(mgr1);
- if (re.test(stem))
- w = stem;
- }
- else if (re2.test(w)) {
- var fp = re2.exec(w);
- stem = fp[1] + fp[2];
- re2 = new RegExp(mgr1);
- if (re2.test(stem))
- w = stem;
- }
-
- // Step 5
- re = /^(.+?)e$/;
- if (re.test(w)) {
- var fp = re.exec(w);
- stem = fp[1];
- re = new RegExp(mgr1);
- re2 = new RegExp(meq1);
- re3 = new RegExp("^" + C + v + "[^aeiouwxy]$");
- if (re.test(stem) || (re2.test(stem) && !(re3.test(stem))))
- w = stem;
- }
- re = /ll$/;
- re2 = new RegExp(mgr1);
- if (re.test(w) && re2.test(w)) {
- re = /.$/;
- w = w.replace(re,"");
- }
-
- // and turn initial Y back to y
- if (firstch == "y")
- w = firstch.toLowerCase() + w.substr(1);
- return w;
- }
-}
-"""
-
class SearchEnglish(SearchLanguage):
lang = 'en'
language_name = 'English'
- js_stemmer_code = js_porter_stemmer
+ js_stemmer_rawcode = 'english-stemmer.js'
stopwords = ENGLISH_STOPWORDS
def __init__(self, options: dict[str, str]) -> None:
super().__init__(options)
- self.stemmer = snowballstemmer.stemmer('porter')
+ self.stemmer = snowballstemmer.stemmer('english')
def stem(self, word: str) -> str:
return self.stemmer.stemWord(word.lower())
diff --git a/sphinx/search/es.py b/sphinx/search/es.py
index d11937ad0c6..c1b08ab1bad 100644
--- a/sphinx/search/es.py
+++ b/sphinx/search/es.py
@@ -1,4 +1,4 @@
-"""Spanish search language: includes the JS Spanish stemmer."""
+"""Spanish search language."""
from __future__ import annotations
diff --git a/sphinx/search/fi.py b/sphinx/search/fi.py
index cd044b71a80..01c7e0ba126 100644
--- a/sphinx/search/fi.py
+++ b/sphinx/search/fi.py
@@ -1,4 +1,4 @@
-"""Finnish search language: includes the JS Finnish stemmer."""
+"""Finnish search language."""
from __future__ import annotations
diff --git a/sphinx/search/fr.py b/sphinx/search/fr.py
index 11a2c70f5dc..e79976dfea1 100644
--- a/sphinx/search/fr.py
+++ b/sphinx/search/fr.py
@@ -1,4 +1,4 @@
-"""French search language: includes the JS French stemmer."""
+"""French search language."""
from __future__ import annotations
diff --git a/sphinx/search/hu.py b/sphinx/search/hu.py
index e86159cb604..254ad488d78 100644
--- a/sphinx/search/hu.py
+++ b/sphinx/search/hu.py
@@ -1,4 +1,4 @@
-"""Hungarian search language: includes the JS Hungarian stemmer."""
+"""Hungarian search language."""
from __future__ import annotations
diff --git a/sphinx/search/it.py b/sphinx/search/it.py
index a7052c9ae82..d8a583f9d85 100644
--- a/sphinx/search/it.py
+++ b/sphinx/search/it.py
@@ -1,4 +1,4 @@
-"""Italian search language: includes the JS Italian stemmer."""
+"""Italian search language."""
from __future__ import annotations
diff --git a/sphinx/search/nl.py b/sphinx/search/nl.py
index 0692920efc4..de1a7d1f17d 100644
--- a/sphinx/search/nl.py
+++ b/sphinx/search/nl.py
@@ -1,4 +1,4 @@
-"""Dutch search language: includes the JS porter stemmer."""
+"""Dutch search language."""
from __future__ import annotations
diff --git a/sphinx/search/no.py b/sphinx/search/no.py
index a2bb88ee9a4..45b202f0926 100644
--- a/sphinx/search/no.py
+++ b/sphinx/search/no.py
@@ -1,4 +1,4 @@
-"""Norwegian search language: includes the JS Norwegian stemmer."""
+"""Norwegian search language."""
from __future__ import annotations
diff --git a/sphinx/search/pt.py b/sphinx/search/pt.py
index 9c5dfa05774..a10e4cd2b53 100644
--- a/sphinx/search/pt.py
+++ b/sphinx/search/pt.py
@@ -1,4 +1,4 @@
-"""Portuguese search language: includes the JS Portuguese stemmer."""
+"""Portuguese search language."""
from __future__ import annotations
diff --git a/sphinx/search/ro.py b/sphinx/search/ro.py
index 6aebdc13249..e9d29602f4e 100644
--- a/sphinx/search/ro.py
+++ b/sphinx/search/ro.py
@@ -1,4 +1,4 @@
-"""Romanian search language: includes the JS Romanian stemmer."""
+"""Romanian search language."""
from __future__ import annotations
diff --git a/sphinx/search/ru.py b/sphinx/search/ru.py
index 52ff533832e..584b19b9f79 100644
--- a/sphinx/search/ru.py
+++ b/sphinx/search/ru.py
@@ -1,4 +1,4 @@
-"""Russian search language: includes the JS Russian stemmer."""
+"""Russian search language."""
from __future__ import annotations
diff --git a/sphinx/search/sv.py b/sphinx/search/sv.py
index bcfac2ba528..8b138ebdf80 100644
--- a/sphinx/search/sv.py
+++ b/sphinx/search/sv.py
@@ -1,4 +1,4 @@
-"""Swedish search language: includes the JS Swedish stemmer."""
+"""Swedish search language."""
from __future__ import annotations
diff --git a/sphinx/search/tr.py b/sphinx/search/tr.py
index 674264f1928..40131f0e372 100644
--- a/sphinx/search/tr.py
+++ b/sphinx/search/tr.py
@@ -1,4 +1,4 @@
-"""Turkish search language: includes the JS Turkish stemmer."""
+"""Turkish search language."""
from __future__ import annotations
diff --git a/sphinx/search/zh.py b/sphinx/search/zh.py
index d22f765d520..464e1e1fd47 100644
--- a/sphinx/search/zh.py
+++ b/sphinx/search/zh.py
@@ -33,199 +33,13 @@ def cut_for_search(sentence: str, HMM: bool = True) -> Iterator[str]:
)
del jieba
-js_porter_stemmer = """
-/**
- * Porter Stemmer
- */
-var Stemmer = function() {
-
- var step2list = {
- ational: 'ate',
- tional: 'tion',
- enci: 'ence',
- anci: 'ance',
- izer: 'ize',
- bli: 'ble',
- alli: 'al',
- entli: 'ent',
- eli: 'e',
- ousli: 'ous',
- ization: 'ize',
- ation: 'ate',
- ator: 'ate',
- alism: 'al',
- iveness: 'ive',
- fulness: 'ful',
- ousness: 'ous',
- aliti: 'al',
- iviti: 'ive',
- biliti: 'ble',
- logi: 'log'
- };
-
- var step3list = {
- icate: 'ic',
- ative: '',
- alize: 'al',
- iciti: 'ic',
- ical: 'ic',
- ful: '',
- ness: ''
- };
-
- var c = "[^aeiou]"; // consonant
- var v = "[aeiouy]"; // vowel
- var C = c + "[^aeiouy]*"; // consonant sequence
- var V = v + "[aeiou]*"; // vowel sequence
-
- var mgr0 = "^(" + C + ")?" + V + C; // [C]VC... is m>0
- var meq1 = "^(" + C + ")?" + V + C + "(" + V + ")?$"; // [C]VC[V] is m=1
- var mgr1 = "^(" + C + ")?" + V + C + V + C; // [C]VCVC... is m>1
- var s_v = "^(" + C + ")?" + v; // vowel in stem
-
- this.stemWord = function (w) {
- var stem;
- var suffix;
- var firstch;
- var origword = w;
-
- if (w.length < 3)
- return w;
-
- var re;
- var re2;
- var re3;
- var re4;
-
- firstch = w.substr(0,1);
- if (firstch == "y")
- w = firstch.toUpperCase() + w.substr(1);
-
- // Step 1a
- re = /^(.+?)(ss|i)es$/;
- re2 = /^(.+?)([^s])s$/;
-
- if (re.test(w))
- w = w.replace(re,"$1$2");
- else if (re2.test(w))
- w = w.replace(re2,"$1$2");
-
- // Step 1b
- re = /^(.+?)eed$/;
- re2 = /^(.+?)(ed|ing)$/;
- if (re.test(w)) {
- var fp = re.exec(w);
- re = new RegExp(mgr0);
- if (re.test(fp[1])) {
- re = /.$/;
- w = w.replace(re,"");
- }
- }
- else if (re2.test(w)) {
- var fp = re2.exec(w);
- stem = fp[1];
- re2 = new RegExp(s_v);
- if (re2.test(stem)) {
- w = stem;
- re2 = /(at|bl|iz)$/;
- re3 = new RegExp("([^aeiouylsz])\\\\1$");
- re4 = new RegExp("^" + C + v + "[^aeiouwxy]$");
- if (re2.test(w))
- w = w + "e";
- else if (re3.test(w)) {
- re = /.$/;
- w = w.replace(re,"");
- }
- else if (re4.test(w))
- w = w + "e";
- }
- }
-
- // Step 1c
- re = /^(.+?)y$/;
- if (re.test(w)) {
- var fp = re.exec(w);
- stem = fp[1];
- re = new RegExp(s_v);
- if (re.test(stem))
- w = stem + "i";
- }
-
- // Step 2
- re = /^(.+?)(ational|tional|enci|anci|izer|bli|alli|entli|eli|ousli|\
-ization|ation|ator|alism|iveness|fulness|ousness|aliti|iviti|biliti|logi)$/;
- if (re.test(w)) {
- var fp = re.exec(w);
- stem = fp[1];
- suffix = fp[2];
- re = new RegExp(mgr0);
- if (re.test(stem))
- w = stem + step2list[suffix];
- }
-
- // Step 3
- re = /^(.+?)(icate|ative|alize|iciti|ical|ful|ness)$/;
- if (re.test(w)) {
- var fp = re.exec(w);
- stem = fp[1];
- suffix = fp[2];
- re = new RegExp(mgr0);
- if (re.test(stem))
- w = stem + step3list[suffix];
- }
-
- // Step 4
- re = /^(.+?)(al|ance|ence|er|ic|able|ible|ant|ement|ment|ent|ou|ism|ate|\
-iti|ous|ive|ize)$/;
- re2 = /^(.+?)(s|t)(ion)$/;
- if (re.test(w)) {
- var fp = re.exec(w);
- stem = fp[1];
- re = new RegExp(mgr1);
- if (re.test(stem))
- w = stem;
- }
- else if (re2.test(w)) {
- var fp = re2.exec(w);
- stem = fp[1] + fp[2];
- re2 = new RegExp(mgr1);
- if (re2.test(stem))
- w = stem;
- }
-
- // Step 5
- re = /^(.+?)e$/;
- if (re.test(w)) {
- var fp = re.exec(w);
- stem = fp[1];
- re = new RegExp(mgr1);
- re2 = new RegExp(meq1);
- re3 = new RegExp("^" + C + v + "[^aeiouwxy]$");
- if (re.test(stem) || (re2.test(stem) && !(re3.test(stem))))
- w = stem;
- }
- re = /ll$/;
- re2 = new RegExp(mgr1);
- if (re.test(w) && re2.test(w)) {
- re = /.$/;
- w = w.replace(re,"");
- }
-
- // and turn initial Y back to y
- if (firstch == "y")
- w = firstch.toLowerCase() + w.substr(1);
- return w;
- }
-}
-"""
-
class SearchChinese(SearchLanguage):
"""Chinese search implementation"""
lang = 'zh'
language_name = 'Chinese'
- js_stemmer_code = js_porter_stemmer
+ js_stemmer_rawcode = 'english-stemmer.js'
stopwords = ENGLISH_STOPWORDS
latin1_letters = re.compile(r'[a-zA-Z0-9_]+')
diff --git a/tests/js/fixtures/cpp/searchindex.js b/tests/js/fixtures/cpp/searchindex.js
index 42adb88db92..81f14cc1895 100644
--- a/tests/js/fixtures/cpp/searchindex.js
+++ b/tests/js/fixtures/cpp/searchindex.js
@@ -1 +1 @@
-Search.setIndex({"alltitles":{},"docnames":["index"],"envversion":{"sphinx":65,"sphinx.domains.c":3,"sphinx.domains.changeset":1,"sphinx.domains.citation":1,"sphinx.domains.cpp":9,"sphinx.domains.index":1,"sphinx.domains.javascript":3,"sphinx.domains.math":2,"sphinx.domains.python":4,"sphinx.domains.rst":2,"sphinx.domains.std":2},"filenames":["index.rst"],"indexentries":{"sphinx (c++ class)":[[0,"_CPPv46Sphinx",false]]},"objects":{"":[[0,0,1,"_CPPv46Sphinx","Sphinx"]]},"objnames":{"0":["cpp","class","C++ class"]},"objtypes":{"0":"cpp:class"},"terms":{"The":0,"becaus":0,"c":0,"can":0,"cardin":0,"challeng":0,"charact":0,"class":0,"descript":0,"drop":0,"engin":0,"fixtur":0,"frequent":0,"gener":0,"i":0,"index":0,"inflat":0,"mathemat":0,"occur":0,"often":0,"project":0,"punctuat":0,"queri":0,"relat":0,"sampl":0,"search":0,"size":0,"sphinx":0,"term":0,"thei":0,"thi":0,"token":0,"us":0,"web":0,"would":0},"titles":["<no title>"],"titleterms":{}})
\ No newline at end of file
+Search.setIndex({"alltitles":{},"docnames":["index"],"envversion":{"sphinx":65,"sphinx.domains.c":3,"sphinx.domains.changeset":1,"sphinx.domains.citation":1,"sphinx.domains.cpp":9,"sphinx.domains.index":1,"sphinx.domains.javascript":3,"sphinx.domains.math":2,"sphinx.domains.python":4,"sphinx.domains.rst":2,"sphinx.domains.std":2},"filenames":["index.rst"],"indexentries":{"sphinx (c++ class)":[[0,"_CPPv46Sphinx",false]]},"objects":{"":[[0,0,1,"_CPPv46Sphinx","Sphinx"]]},"objnames":{"0":["cpp","class","C++ class"]},"objtypes":{"0":"cpp:class"},"terms":{"The":0,"This":0,"becaus":0,"c":0,"can":0,"cardin":0,"challeng":0,"charact":0,"class":0,"descript":0,"drop":0,"engin":0,"fixtur":0,"frequent":0,"generat":0,"index":0,"inflat":0,"mathemat":0,"occur":0,"often":0,"project":0,"punctuat":0,"queri":0,"relat":0,"sampl":0,"search":0,"size":0,"sphinx":0,"term":0,"token":0,"use":0,"web":0,"would":0},"titles":["<no title>"],"titleterms":{}})
\ No newline at end of file
diff --git a/tests/js/fixtures/multiterm/searchindex.js b/tests/js/fixtures/multiterm/searchindex.js
index 6f27d39329b..2f3f5ec39a1 100644
--- a/tests/js/fixtures/multiterm/searchindex.js
+++ b/tests/js/fixtures/multiterm/searchindex.js
@@ -1 +1 @@
-Search.setIndex({"alltitles":{"Main Page":[[0,null]]},"docnames":["index"],"envversion":{"sphinx":65,"sphinx.domains.c":3,"sphinx.domains.changeset":1,"sphinx.domains.citation":1,"sphinx.domains.cpp":9,"sphinx.domains.index":1,"sphinx.domains.javascript":3,"sphinx.domains.math":2,"sphinx.domains.python":4,"sphinx.domains.rst":2,"sphinx.domains.std":2},"filenames":["index.rst"],"indexentries":{},"objects":{},"objnames":{},"objtypes":{},"terms":{"At":0,"adjac":0,"all":0,"an":0,"appear":0,"applic":0,"ar":0,"built":0,"can":0,"check":0,"contain":0,"do":0,"document":0,"doesn":0,"each":0,"fixtur":0,"format":0,"function":0,"futur":0,"html":0,"i":0,"includ":0,"match":0,"messag":0,"multipl":0,"multiterm":0,"order":0,"other":0,"output":0,"perform":0,"perhap":0,"phrase":0,"project":0,"queri":0,"requir":0,"same":0,"search":0,"successfulli":0,"support":0,"t":0,"term":0,"test":0,"thi":0,"time":0,"us":0,"when":0,"write":0},"titles":["Main Page"],"titleterms":{"main":0,"page":0}})
\ No newline at end of file
+Search.setIndex({"alltitles":{"Main Page":[[0,null]]},"docnames":["index"],"envversion":{"sphinx":65,"sphinx.domains.c":3,"sphinx.domains.changeset":1,"sphinx.domains.citation":1,"sphinx.domains.cpp":9,"sphinx.domains.index":1,"sphinx.domains.javascript":3,"sphinx.domains.math":2,"sphinx.domains.python":4,"sphinx.domains.rst":2,"sphinx.domains.std":2},"filenames":["index.rst"],"indexentries":{},"objects":{},"objnames":{},"objtypes":{},"terms":{"At":0,"This":0,"adjac":0,"all":0,"an":0,"appear":0,"applic":0,"built":0,"can":0,"check":0,"contain":0,"do":0,"document":0,"doesn":0,"each":0,"fixtur":0,"format":0,"function":0,"futur":0,"html":0,"includ":0,"match":0,"messag":0,"multipl":0,"multiterm":0,"order":0,"other":0,"output":0,"perform":0,"perhap":0,"phrase":0,"project":0,"queri":0,"requir":0,"same":0,"search":0,"success":0,"support":0,"t":0,"term":0,"test":0,"time":0,"use":0,"when":0,"write":0},"titles":["Main Page"],"titleterms":{"main":0,"page":0}})
\ No newline at end of file
diff --git a/tests/js/fixtures/partial/searchindex.js b/tests/js/fixtures/partial/searchindex.js
index cd9dbabb149..5eb299eea63 100644
--- a/tests/js/fixtures/partial/searchindex.js
+++ b/tests/js/fixtures/partial/searchindex.js
@@ -1 +1 @@
-Search.setIndex({"alltitles":{"sphinx_utils module":[[0,null]]},"docnames":["index"],"envversion":{"sphinx":65,"sphinx.domains.c":3,"sphinx.domains.changeset":1,"sphinx.domains.citation":1,"sphinx.domains.cpp":9,"sphinx.domains.index":1,"sphinx.domains.javascript":3,"sphinx.domains.math":2,"sphinx.domains.python":4,"sphinx.domains.rst":2,"sphinx.domains.std":2},"filenames":["index.rst"],"indexentries":{},"objects":{},"objnames":{},"objtypes":{},"terms":{"ar":0,"both":0,"built":0,"confirm":0,"document":0,"function":0,"html":0,"i":0,"includ":0,"input":0,"javascript":0,"match":0,"partial":0,"possibl":0,"project":0,"provid":0,"restructuredtext":0,"sampl":0,"search":0,"should":0,"term":0,"thi":0,"titl":0,"us":0,"when":0},"titles":["sphinx_utils module"],"titleterms":{"modul":0,"sphinx_util":0}})
\ No newline at end of file
+Search.setIndex({"alltitles":{"sphinx_utils module":[[0,null]]},"docnames":["index"],"envversion":{"sphinx":65,"sphinx.domains.c":3,"sphinx.domains.changeset":1,"sphinx.domains.citation":1,"sphinx.domains.cpp":9,"sphinx.domains.index":1,"sphinx.domains.javascript":3,"sphinx.domains.math":2,"sphinx.domains.python":4,"sphinx.domains.rst":2,"sphinx.domains.std":2},"filenames":["index.rst"],"indexentries":{},"objects":{},"objnames":{},"objtypes":{},"terms":{"This":0,"both":0,"built":0,"confirm":0,"document":0,"function":0,"html":0,"includ":0,"input":0,"javascript":0,"match":0,"partial":0,"possibl":0,"project":0,"provid":0,"restructuredtext":0,"sampl":0,"search":0,"should":0,"term":0,"titl":0,"use":0,"when":0},"titles":["sphinx_utils module"],"titleterms":{"modul":0,"sphinx_util":0}})
\ No newline at end of file
diff --git a/tests/js/fixtures/titles/searchindex.js b/tests/js/fixtures/titles/searchindex.js
index cb9abd1da07..fa59e11c884 100644
--- a/tests/js/fixtures/titles/searchindex.js
+++ b/tests/js/fixtures/titles/searchindex.js
@@ -1 +1 @@
-Search.setIndex({"alltitles":{"Main Page":[[0,null]],"Relevance":[[0,"relevance"],[1,null]],"Result Scoring":[[0,"result-scoring"]]},"docnames":["index","relevance"],"envversion":{"sphinx":65,"sphinx.domains.c":3,"sphinx.domains.changeset":1,"sphinx.domains.citation":1,"sphinx.domains.cpp":9,"sphinx.domains.index":1,"sphinx.domains.javascript":3,"sphinx.domains.math":2,"sphinx.domains.python":4,"sphinx.domains.rst":2,"sphinx.domains.std":2},"filenames":["index.rst","relevance.rst"],"indexentries":{"example (class in relevance)":[[0,"relevance.Example",false]],"module":[[0,"module-relevance",false]],"relevance":[[0,"index-1",false],[0,"module-relevance",false]],"relevance (relevance.example attribute)":[[0,"relevance.Example.relevance",false]],"scoring":[[0,"index-0",true]]},"objects":{"":[[0,0,0,"-","relevance"]],"relevance":[[0,1,1,"","Example"]],"relevance.Example":[[0,2,1,"","relevance"]]},"objnames":{"0":["py","module","Python module"],"1":["py","class","Python class"],"2":["py","attribute","Python attribute"]},"objtypes":{"0":"py:module","1":"py:class","2":"py:attribute"},"terms":{"":[0,1],"A":1,"By":0,"For":[0,1],"In":[0,1],"against":0,"align":0,"also":1,"an":0,"answer":0,"appear":1,"ar":1,"area":0,"ask":0,"assign":0,"attempt":0,"attribut":0,"both":0,"built":1,"can":[0,1],"class":0,"code":[0,1],"collect":0,"consid":1,"contain":0,"context":0,"corpu":1,"could":1,"demonstr":0,"describ":1,"detail":1,"determin":[0,1],"docstr":0,"document":[0,1],"domain":1,"dure":0,"engin":0,"evalu":0,"exampl":[0,1],"extract":0,"feedback":0,"find":0,"found":0,"from":0,"function":1,"ha":1,"handl":0,"happen":1,"head":0,"help":0,"highli":[0,1],"how":0,"i":[0,1],"improv":0,"inform":0,"intend":0,"issu":[0,1],"itself":1,"knowledg":0,"languag":1,"less":1,"like":[0,1],"mani":0,"match":0,"mention":1,"more":0,"name":[0,1],"numer":0,"object":0,"often":0,"one":[0,1],"onli":[0,1],"order":0,"other":0,"over":0,"page":1,"part":1,"particular":0,"present":0,"printf":1,"program":1,"project":0,"queri":[0,1],"question":0,"re":0,"rel":0,"research":0,"result":1,"retriev":0,"sai":0,"same":1,"search":[0,1],"seem":0,"softwar":1,"some":1,"sphinx":0,"straightforward":1,"subject":0,"subsect":0,"term":[0,1],"test":0,"text":0,"than":[0,1],"thei":0,"them":0,"thi":0,"time":0,"titl":0,"two":0,"typic":0,"us":0,"user":[0,1],"we":[0,1],"when":0,"whether":1,"which":0,"within":0,"word":0,"would":[0,1]},"titles":["Main Page","Relevance"],"titleterms":{"main":0,"page":0,"relev":[0,1],"result":0,"score":0}})
\ No newline at end of file
+Search.setIndex({"alltitles":{"Main Page":[[0,null]],"Relevance":[[0,"relevance"],[1,null]],"Result Scoring":[[0,"result-scoring"]]},"docnames":["index","relevance"],"envversion":{"sphinx":65,"sphinx.domains.c":3,"sphinx.domains.changeset":1,"sphinx.domains.citation":1,"sphinx.domains.cpp":9,"sphinx.domains.index":1,"sphinx.domains.javascript":3,"sphinx.domains.math":2,"sphinx.domains.python":4,"sphinx.domains.rst":2,"sphinx.domains.std":2},"filenames":["index.rst","relevance.rst"],"indexentries":{"example (class in relevance)":[[0,"relevance.Example",false]],"module":[[0,"module-relevance",false]],"relevance":[[0,"index-1",false],[0,"module-relevance",false]],"relevance (relevance.example attribute)":[[0,"relevance.Example.relevance",false]],"scoring":[[0,"index-0",true]]},"objects":{"":[[0,0,0,"-","relevance"]],"relevance":[[0,1,1,"","Example"]],"relevance.Example":[[0,2,1,"","relevance"]]},"objnames":{"0":["py","module","Python module"],"1":["py","class","Python class"],"2":["py","attribute","Python attribute"]},"objtypes":{"0":"py:module","1":"py:class","2":"py:attribute"},"terms":{"A":1,"By":0,"For":[0,1],"In":[0,1],"This":0,"against":0,"align":0,"also":1,"an":0,"answer":0,"appear":1,"area":0,"ask":0,"assign":0,"attempt":0,"attribut":0,"both":0,"built":1,"can":[0,1],"class":0,"code":[0,1],"collect":0,"consid":1,"contain":0,"context":0,"corpus":1,"could":1,"demonstr":0,"describ":1,"detail":1,"determin":[0,1],"docstr":0,"document":[0,1],"domain":1,"dure":0,"engin":0,"evalu":0,"exampl":[0,1],"extract":0,"feedback":0,"find":0,"found":0,"from":0,"function":1,"handl":0,"happen":1,"has":1,"head":0,"help":0,"high":[0,1],"how":0,"improv":0,"inform":0,"intend":0,"issu":[0,1],"itself":1,"knowledg":0,"languag":1,"less":1,"like":[0,1],"mani":0,"match":0,"mention":1,"more":0,"name":[0,1],"numer":0,"object":0,"often":0,"one":[0,1],"onli":[0,1],"order":0,"other":0,"over":0,"page":1,"part":1,"particular":0,"present":0,"printf":1,"program":1,"project":0,"queri":[0,1],"question":0,"re":0,"relat":0,"research":0,"result":1,"retriev":0,"s":[0,1],"same":1,"say":0,"search":[0,1],"seem":0,"softwar":1,"some":1,"sphinx":0,"straightforward":1,"subject":0,"subsect":0,"term":[0,1],"test":0,"text":0,"than":[0,1],"them":0,"time":0,"titl":0,"two":0,"typic":0,"use":0,"user":[0,1],"we":[0,1],"when":0,"whether":1,"which":0,"within":0,"word":0,"would":[0,1]},"titles":["Main Page","Relevance"],"titleterms":{"main":0,"page":0,"relev":[0,1],"result":0,"score":0}})
\ No newline at end of file
diff --git a/tests/test_search.py b/tests/test_search.py
index 22fa6ab7616..a8ad186a533 100644
--- a/tests/test_search.py
+++ b/tests/test_search.py
@@ -107,7 +107,7 @@ def test_meta_keys_are_handled_for_language_en(app: SphinxTestApp) -> None:
searchindex = load_searchindex(app.outdir / 'searchindex.js')
assert not is_registered_term(searchindex, 'thisnoteith')
assert is_registered_term(searchindex, 'thisonetoo')
- assert is_registered_term(searchindex, 'findthiskei')
+ assert is_registered_term(searchindex, 'findthiskey')
assert is_registered_term(searchindex, 'thistoo')
assert not is_registered_term(searchindex, 'onlygerman')
assert is_registered_term(searchindex, 'notgerman')
@@ -125,7 +125,7 @@ def test_meta_keys_are_handled_for_language_de(app: SphinxTestApp) -> None:
searchindex = load_searchindex(app.outdir / 'searchindex.js')
assert not is_registered_term(searchindex, 'thisnoteith')
assert is_registered_term(searchindex, 'thisonetoo')
- assert not is_registered_term(searchindex, 'findthiskei')
+ assert not is_registered_term(searchindex, 'findthiskey')
assert not is_registered_term(searchindex, 'thistoo')
assert is_registered_term(searchindex, 'onlygerman')
assert not is_registered_term(searchindex, 'notgerman')
@@ -144,7 +144,7 @@ def test_stemmer(app: SphinxTestApp) -> None:
app.build(force_all=True)
searchindex = load_searchindex(app.outdir / 'searchindex.js')
print(searchindex)
- assert is_registered_term(searchindex, 'findthisstemmedkei')
+ assert is_registered_term(searchindex, 'findthisstemmedkey')
assert is_registered_term(searchindex, 'intern')
@@ -219,7 +219,6 @@ def test_IndexBuilder():
# dictionaries below may be iterated in arbitrary order by Python at
# runtime.
assert index._mapping == {
- 'ar': {'docname1_1', 'docname1_2', 'docname2_1', 'docname2_2'},
'fermion': {'docname1_1', 'docname1_2', 'docname2_1', 'docname2_2'},
'comment': {'docname1_1', 'docname1_2', 'docname2_1', 'docname2_2'},
'non': {'docname1_1', 'docname1_2', 'docname2_1', 'docname2_2'},
@@ -250,7 +249,6 @@ def test_IndexBuilder():
},
'objtypes': {0: 'dummy1:objtype1', 1: 'dummy2:objtype1'},
'terms': {
- 'ar': [0, 1, 2, 3],
'comment': [0, 1, 2, 3],
'fermion': [0, 1, 2, 3],
'index': [0, 1, 2, 3],
@@ -309,7 +307,6 @@ def test_IndexBuilder():
'docname2_2': 'filename2_2',
}
assert index._mapping == {
- 'ar': {'docname1_2', 'docname2_2'},
'fermion': {'docname1_2', 'docname2_2'},
'comment': {'docname1_2', 'docname2_2'},
'non': {'docname1_2', 'docname2_2'},
@@ -338,7 +335,6 @@ def test_IndexBuilder():
},
'objtypes': {0: 'dummy1:objtype1', 1: 'dummy2:objtype1'},
'terms': {
- 'ar': [0, 1],
'comment': [0, 1],
'fermion': [0, 1],
'index': [0, 1],
@@ -466,7 +462,7 @@ def assert_is_sorted(
assert_is_sorted(child, f'{path}[{i}]')
-@pytest.mark.parametrize('directory', JAVASCRIPT_TEST_ROOTS)
+@pytest.mark.parametrize('directory', JAVASCRIPT_TEST_ROOTS, ids=lambda p: p.name)
def test_check_js_search_indexes(make_app, sphinx_test_tempdir, directory):
app = make_app(
'html',
From 75400aff91758c3f605a81a8d2afb4ff6a304d49 Mon Sep 17 00:00:00 2001
From: Adam Turner <9087854+AA-Turner@users.noreply.github.com>
Date: Mon, 19 May 2025 23:49:41 +0100
Subject: [PATCH 078/466] Update stemming and Snowball (#13561)
---
CHANGES.rst | 2 +
doc/internals/contributing.rst | 12 +-
sphinx/search/__init__.py | 23 +-
sphinx/search/_stopwords/da.py | 3 +
sphinx/search/_stopwords/da.txt | 9 +-
sphinx/search/_stopwords/de.py | 3 +
sphinx/search/_stopwords/de.txt | 9 +-
sphinx/search/_stopwords/en.py | 148 +++++++++-
sphinx/search/_stopwords/en.txt | 310 +++++++++++++++++++++
sphinx/search/_stopwords/es.py | 3 +
sphinx/search/_stopwords/es.txt | 11 +-
sphinx/search/_stopwords/fi.py | 4 +
sphinx/search/_stopwords/fi.txt | 4 +-
sphinx/search/_stopwords/fr.py | 13 +-
sphinx/search/_stopwords/fr.txt | 25 +-
sphinx/search/_stopwords/hu.py | 3 +
sphinx/search/_stopwords/hu.txt | 4 +-
sphinx/search/_stopwords/it.py | 4 +
sphinx/search/_stopwords/it.txt | 6 +-
sphinx/search/_stopwords/nl.py | 3 +
sphinx/search/_stopwords/nl.txt | 12 +-
sphinx/search/_stopwords/no.py | 3 +
sphinx/search/_stopwords/no.txt | 16 +-
sphinx/search/_stopwords/pt.py | 3 +
sphinx/search/_stopwords/pt.txt | 11 +-
sphinx/search/_stopwords/ru.py | 3 +
sphinx/search/_stopwords/ru.txt | 11 +-
sphinx/search/_stopwords/sv.py | 5 +-
sphinx/search/_stopwords/sv.txt | 13 +-
tests/js/fixtures/cpp/searchindex.js | 2 +-
tests/js/fixtures/multiterm/searchindex.js | 2 +-
tests/js/fixtures/partial/searchindex.js | 2 +-
tests/js/fixtures/titles/searchindex.js | 2 +-
utils/generate_snowball.py | 131 +++++++++
34 files changed, 751 insertions(+), 64 deletions(-)
create mode 100644 sphinx/search/_stopwords/en.txt
create mode 100755 utils/generate_snowball.py
diff --git a/CHANGES.rst b/CHANGES.rst
index d26a93871a5..c0ed8089a60 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -20,6 +20,8 @@ Features added
``linkcheck_allowed_redirects = {}``.
Patch by Adam Turner.
* #13497: Support C domain objects in the table of contents.
+* #13535: html search: Update to the latest version of Snowball (v3.0.1).
+ Patch by Adam Turner.
Bugs fixed
----------
diff --git a/doc/internals/contributing.rst b/doc/internals/contributing.rst
index 4b8ca84a945..de4224d7bc3 100644
--- a/doc/internals/contributing.rst
+++ b/doc/internals/contributing.rst
@@ -337,13 +337,15 @@ Updating generated files
------------------------
* JavaScript stemming algorithms in :file:`sphinx/search/non-minified-js/*.js`
- are generated using `snowball `_
- by cloning the repository, executing ``make dist_libstemmer_js`` and then
- unpacking the tarball which is generated in :file:`dist` directory.
+ and stopword files in :file:`sphinx/search/_stopwords/`
+ are generated from the `Snowball project`_
+ by running :file:`utils/generate_snowball.py`.
Minified files in :file:`sphinx/search/minified-js/*.js` are generated from
- non-minified ones using :program:`uglifyjs` (installed via npm), with ``-m``
- option to enable mangling.
+ non-minified ones using :program:`uglifyjs` (installed via npm).
+ See :file:`sphinx/search/minified-js/README.rst`.
+
+ .. _Snowball project: https://snowballstem.org/
* The :file:`searchindex.js` files found in
the :file:`tests/js/fixtures/*` directories
diff --git a/sphinx/search/__init__.py b/sphinx/search/__init__.py
index 1cb05bea0e2..cc997bf6456 100644
--- a/sphinx/search/__init__.py
+++ b/sphinx/search/__init__.py
@@ -117,10 +117,7 @@ def word_filter(self, word: str) -> bool:
"""Return true if the target word should be registered in the search index.
This method is called after stemming.
"""
- return len(word) == 0 or not (
- ((len(word) < 3) and (12353 < ord(word[0]) < 12436))
- or (ord(word[0]) < 256 and (word in self.stopwords))
- )
+ return not word.isdigit() and word not in self.stopwords
# SearchEnglish imported after SearchLanguage is defined due to circular import
@@ -583,17 +580,17 @@ def get_js_stemmer_rawcode(self) -> str | None:
def get_js_stemmer_code(self) -> str:
"""Returns JS code that will be inserted into language_data.js."""
- if self.lang.js_stemmer_rawcode:
- base_js_path = _NON_MINIFIED_JS_PATH / 'base-stemmer.js'
- language_js_path = _NON_MINIFIED_JS_PATH / self.lang.js_stemmer_rawcode
- base_js = base_js_path.read_text(encoding='utf-8')
- language_js = language_js_path.read_text(encoding='utf-8')
- return (
- f'{base_js}\n{language_js}\nStemmer = {self.lang.language_name}Stemmer;'
- )
- else:
+ if not self.lang.js_stemmer_rawcode:
return self.lang.js_stemmer_code
+ base_js_path = _MINIFIED_JS_PATH / 'base-stemmer.js'
+ language_js_path = _MINIFIED_JS_PATH / self.lang.js_stemmer_rawcode
+ return '\n'.join((
+ base_js_path.read_text(encoding='utf-8'),
+ language_js_path.read_text(encoding='utf-8'),
+ f'window.Stemmer = {self.lang.language_name}Stemmer;',
+ ))
+
def _feed_visit_nodes(
node: nodes.Node,
diff --git a/sphinx/search/_stopwords/da.py b/sphinx/search/_stopwords/da.py
index c31a51c6df2..de8fa937b8a 100644
--- a/sphinx/search/_stopwords/da.py
+++ b/sphinx/search/_stopwords/da.py
@@ -1,3 +1,6 @@
+# automatically generated by utils/generate-snowball.py
+# from https://snowballstem.org/algorithms/danish/stop.txt
+
from __future__ import annotations
DANISH_STOPWORDS = frozenset({
diff --git a/sphinx/search/_stopwords/da.txt b/sphinx/search/_stopwords/da.txt
index 6f2bd01afc2..37052042642 100644
--- a/sphinx/search/_stopwords/da.txt
+++ b/sphinx/search/_stopwords/da.txt
@@ -1,4 +1,11 @@
-| source: https://snowballstem.org/algorithms/danish/stop.txt
+
+ | A Danish stop word list. Comments begin with vertical bar. Each stop
+ | word is at the start of a line.
+
+ | This is a ranked list (commonest to rarest) of stopwords derived from
+ | a large text sample.
+
+
og | and
i | in
jeg | I
diff --git a/sphinx/search/_stopwords/de.py b/sphinx/search/_stopwords/de.py
index 26ee3322ff3..d37e2105288 100644
--- a/sphinx/search/_stopwords/de.py
+++ b/sphinx/search/_stopwords/de.py
@@ -1,3 +1,6 @@
+# automatically generated by utils/generate-snowball.py
+# from https://snowballstem.org/algorithms/german/stop.txt
+
from __future__ import annotations
GERMAN_STOPWORDS = frozenset({
diff --git a/sphinx/search/_stopwords/de.txt b/sphinx/search/_stopwords/de.txt
index 94c4777bd05..c8935ae61c7 100644
--- a/sphinx/search/_stopwords/de.txt
+++ b/sphinx/search/_stopwords/de.txt
@@ -1,4 +1,11 @@
-|source: https://snowballstem.org/algorithms/german/stop.txt
+
+ | A German stop word list. Comments begin with vertical bar. Each stop
+ | word is at the start of a line.
+
+ | The number of forms in this list is reduced significantly by passing it
+ | through the German stemmer.
+
+
aber | but
alle | all
diff --git a/sphinx/search/_stopwords/en.py b/sphinx/search/_stopwords/en.py
index 01bac4cf14e..a4d9f800a02 100644
--- a/sphinx/search/_stopwords/en.py
+++ b/sphinx/search/_stopwords/en.py
@@ -1,37 +1,181 @@
+# automatically generated by utils/generate-snowball.py
+# from https://snowballstem.org/algorithms/english/stop.txt
+
from __future__ import annotations
ENGLISH_STOPWORDS = frozenset({
'a',
+ 'about',
+ 'above',
+ 'after',
+ 'again',
+ 'against',
+ 'all',
+ 'am',
+ 'an',
'and',
+ 'any',
'are',
+ "aren't",
'as',
'at',
'be',
+ 'because',
+ 'been',
+ 'before',
+ 'being',
+ 'below',
+ 'between',
+ 'both',
'but',
'by',
+ "can't",
+ 'cannot',
+ 'could',
+ "couldn't",
+ 'did',
+ "didn't",
+ 'do',
+ 'does',
+ "doesn't",
+ 'doing',
+ "don't",
+ 'down',
+ 'during',
+ 'each',
+ 'few',
'for',
+ 'from',
+ 'further',
+ 'had',
+ "hadn't",
+ 'has',
+ "hasn't",
+ 'have',
+ "haven't",
+ 'having',
+ 'he',
+ "he'd",
+ "he'll",
+ "he's",
+ 'her',
+ 'here',
+ "here's",
+ 'hers',
+ 'herself',
+ 'him',
+ 'himself',
+ 'his',
+ 'how',
+ "how's",
+ 'i',
+ "i'd",
+ "i'll",
+ "i'm",
+ "i've",
'if',
'in',
'into',
'is',
+ "isn't",
'it',
- 'near',
+ "it's",
+ 'its',
+ 'itself',
+ "let's",
+ 'me',
+ 'more',
+ 'most',
+ "mustn't",
+ 'my',
+ 'myself',
'no',
+ 'nor',
'not',
'of',
+ 'off',
'on',
+ 'once',
+ 'only',
'or',
+ 'other',
+ 'ought',
+ 'our',
+ 'ours',
+ 'ourselves',
+ 'out',
+ 'over',
+ 'own',
+ 'same',
+ "shan't",
+ 'she',
+ "she'd",
+ "she'll",
+ "she's",
+ 'should',
+ "shouldn't",
+ 'so',
+ 'some',
'such',
+ 'than',
'that',
+ "that's",
'the',
'their',
+ 'theirs',
+ 'them',
+ 'themselves',
'then',
'there',
+ "there's",
'these',
'they',
+ "they'd",
+ "they'll",
+ "they're",
+ "they've",
'this',
+ 'those',
+ 'through',
'to',
+ 'too',
+ 'under',
+ 'until',
+ 'up',
+ 'very',
'was',
- 'will',
+ "wasn't",
+ 'we',
+ "we'd",
+ "we'll",
+ "we're",
+ "we've",
+ 'were',
+ "weren't",
+ 'what',
+ "what's",
+ 'when',
+ "when's",
+ 'where',
+ "where's",
+ 'which',
+ 'while',
+ 'who',
+ "who's",
+ 'whom',
+ 'why',
+ "why's",
'with',
+ "won't",
+ 'would',
+ "wouldn't",
+ 'you',
+ "you'd",
+ "you'll",
+ "you're",
+ "you've",
+ 'your',
+ 'yours',
+ 'yourself',
+ 'yourselves',
})
diff --git a/sphinx/search/_stopwords/en.txt b/sphinx/search/_stopwords/en.txt
new file mode 100644
index 00000000000..e40c8c8cd6e
--- /dev/null
+++ b/sphinx/search/_stopwords/en.txt
@@ -0,0 +1,310 @@
+
+ | An English stop word list. Comments begin with vertical bar. Each stop
+ | word is at the start of a line.
+
+ | Many of the forms below are quite rare (e.g. "yourselves") but included for
+ | completeness.
+
+ | PRONOUNS FORMS
+ | 1st person sing
+
+i | subject, always in upper case of course
+
+me | object
+my | possessive adjective
+ | the possessive pronoun `mine' is best suppressed, because of the
+ | sense of coal-mine etc.
+myself | reflexive
+ | 1st person plural
+we | subject
+
+| us | object
+ | care is required here because US = United States. It is usually
+ | safe to remove it if it is in lower case.
+our | possessive adjective
+ours | possessive pronoun
+ourselves | reflexive
+ | second person (archaic `thou' forms not included)
+you | subject and object
+your | possessive adjective
+yours | possessive pronoun
+yourself | reflexive (singular)
+yourselves | reflexive (plural)
+ | third person singular
+he | subject
+him | object
+his | possessive adjective and pronoun
+himself | reflexive
+
+she | subject
+her | object and possessive adjective
+hers | possessive pronoun
+herself | reflexive
+
+it | subject and object
+its | possessive adjective
+itself | reflexive
+ | third person plural
+they | subject
+them | object
+their | possessive adjective
+theirs | possessive pronoun
+themselves | reflexive
+ | other forms (demonstratives, interrogatives)
+what
+which
+who
+whom
+this
+that
+these
+those
+
+ | VERB FORMS (using F.R. Palmer's nomenclature)
+ | BE
+am | 1st person, present
+is | -s form (3rd person, present)
+are | present
+was | 1st person, past
+were | past
+be | infinitive
+been | past participle
+being | -ing form
+ | HAVE
+have | simple
+has | -s form
+had | past
+having | -ing form
+ | DO
+do | simple
+does | -s form
+did | past
+doing | -ing form
+
+ | The forms below are best omitted, because of the significant homonym forms:
+
+ | He made a WILL
+ | old tin CAN
+ | merry month of MAY
+ | a smell of MUST
+ | fight the good fight with all thy MIGHT
+
+ | would, could, should, ought might however be included
+
+ | | AUXILIARIES
+ | | WILL
+ |will
+
+would
+
+ | | SHALL
+ |shall
+
+should
+
+ | | CAN
+ |can
+
+could
+
+ | | MAY
+ |may
+ |might
+ | | MUST
+ |must
+ | | OUGHT
+
+ought
+
+ | COMPOUND FORMS, increasingly encountered nowadays in 'formal' writing
+ | pronoun + verb
+
+i'm
+you're
+he's
+she's
+it's
+we're
+they're
+i've
+you've
+we've
+they've
+i'd
+you'd
+he'd
+she'd
+we'd
+they'd
+i'll
+you'll
+he'll
+she'll
+we'll
+they'll
+
+ | verb + negation
+
+isn't
+aren't
+wasn't
+weren't
+hasn't
+haven't
+hadn't
+doesn't
+don't
+didn't
+
+ | auxiliary + negation
+
+won't
+wouldn't
+shan't
+shouldn't
+can't
+cannot
+couldn't
+mustn't
+
+ | miscellaneous forms
+
+let's
+that's
+who's
+what's
+here's
+there's
+when's
+where's
+why's
+how's
+
+ | rarer forms
+
+ | daren't needn't
+
+ | doubtful forms
+
+ | oughtn't mightn't
+
+ | ARTICLES
+a
+an
+the
+
+ | THE REST (Overlap among prepositions, conjunctions, adverbs etc is so
+ | high, that classification is pointless.)
+and
+but
+if
+or
+because
+as
+until
+while
+
+of
+at
+by
+for
+with
+about
+against
+between
+into
+through
+during
+before
+after
+above
+below
+to
+from
+up
+down
+in
+out
+on
+off
+over
+under
+
+again
+further
+then
+once
+
+here
+there
+when
+where
+why
+how
+
+all
+any
+both
+each
+few
+more
+most
+other
+some
+such
+
+no
+nor
+not
+only
+own
+same
+so
+than
+too
+very
+
+ | Just for the record, the following words are among the commonest in English
+
+ | one
+ | every
+ | least
+ | less
+ | many
+ | now
+ | ever
+ | never
+ | say
+ | says
+ | said
+ | also
+ | get
+ | go
+ | goes
+ | just
+ | made
+ | make
+ | put
+ | see
+ | seen
+ | whether
+ | like
+ | well
+ | back
+ | even
+ | still
+ | way
+ | take
+ | since
+ | another
+ | however
+ | two
+ | three
+ | four
+ | five
+ | first
+ | second
+ | new
+ | old
+ | high
+ | long
diff --git a/sphinx/search/_stopwords/es.py b/sphinx/search/_stopwords/es.py
index d70b317d032..5db38b0cd5b 100644
--- a/sphinx/search/_stopwords/es.py
+++ b/sphinx/search/_stopwords/es.py
@@ -1,3 +1,6 @@
+# automatically generated by utils/generate-snowball.py
+# from https://snowballstem.org/algorithms/spanish/stop.txt
+
from __future__ import annotations
SPANISH_STOPWORDS = frozenset({
diff --git a/sphinx/search/_stopwords/es.txt b/sphinx/search/_stopwords/es.txt
index d7047b93164..416c84d225a 100644
--- a/sphinx/search/_stopwords/es.txt
+++ b/sphinx/search/_stopwords/es.txt
@@ -1,4 +1,13 @@
-|source: https://snowballstem.org/algorithms/spanish/stop.txt
+
+ | A Spanish stop word list. Comments begin with vertical bar. Each stop
+ | word is at the start of a line.
+
+
+ | The following is a ranked list (commonest to rarest) of stopwords
+ | deriving from a large sample of text.
+
+ | Extra words have been added at the end.
+
de | from, of
la | the, her
que | who, that
diff --git a/sphinx/search/_stopwords/fi.py b/sphinx/search/_stopwords/fi.py
index d7586cba227..ed29c8a6f0a 100644
--- a/sphinx/search/_stopwords/fi.py
+++ b/sphinx/search/_stopwords/fi.py
@@ -1,3 +1,6 @@
+# automatically generated by utils/generate-snowball.py
+# from https://snowballstem.org/algorithms/finnish/stop.txt
+
from __future__ import annotations
FINNISH_STOPWORDS = frozenset({
@@ -52,6 +55,7 @@
'jossa',
'josta',
'jota',
+ 'joten',
'jotka',
'kanssa',
'keiden',
diff --git a/sphinx/search/_stopwords/fi.txt b/sphinx/search/_stopwords/fi.txt
index 9aff8a79929..5ad14064e58 100644
--- a/sphinx/search/_stopwords/fi.txt
+++ b/sphinx/search/_stopwords/fi.txt
@@ -1,4 +1,4 @@
-| source: https://snowballstem.org/algorithms/finnish/stop.txt
+
| forms of BE
olla
@@ -60,6 +60,7 @@ jotka joiden joita joissa joista joihin joilla joilta joille joina
että | that
ja | and
jos | if
+joten | so
koska | because
kuin | than
mutta | but
@@ -83,6 +84,5 @@ yli | over, across
| other
kun | when
-niin | so
nyt | now
itse | self
diff --git a/sphinx/search/_stopwords/fr.py b/sphinx/search/_stopwords/fr.py
index 7dfd86d7445..cb2e5ef9501 100644
--- a/sphinx/search/_stopwords/fr.py
+++ b/sphinx/search/_stopwords/fr.py
@@ -1,3 +1,6 @@
+# automatically generated by utils/generate-snowball.py
+# from https://snowballstem.org/algorithms/french/stop.txt
+
from __future__ import annotations
FRENCH_STOPWORDS = frozenset({
@@ -6,14 +9,11 @@
'aient',
'aies',
'ait',
- 'as',
'au',
- 'aura',
'aurai',
'auraient',
'aurais',
'aurait',
- 'auras',
'aurez',
'auriez',
'aurions',
@@ -26,7 +26,6 @@
'avec',
'avez',
'aviez',
- 'avions',
'avons',
'ayant',
'ayez',
@@ -47,7 +46,6 @@
'elle',
'en',
'es',
- 'est',
'et',
'eu',
'eue',
@@ -73,7 +71,6 @@
'fussions',
'fut',
'fûmes',
- 'fût',
'fûtes',
'ici',
'il',
@@ -133,8 +130,6 @@
'soient',
'sois',
'soit',
- 'sommes',
- 'son',
'sont',
'soyez',
'soyons',
@@ -160,9 +155,7 @@
'étant',
'étiez',
'étions',
- 'été',
'étée',
'étées',
- 'étés',
'êtes',
})
diff --git a/sphinx/search/_stopwords/fr.txt b/sphinx/search/_stopwords/fr.txt
index 7839ab57c86..9cb744c3c25 100644
--- a/sphinx/search/_stopwords/fr.txt
+++ b/sphinx/search/_stopwords/fr.txt
@@ -1,4 +1,7 @@
-| source: https://snowballstem.org/algorithms/french/stop.txt
+
+ | A French stop word list. Comments begin with vertical bar. Each stop
+ | word is at the start of a line.
+
au | a + le
aux | a + les
avec | with
@@ -40,7 +43,7 @@ qui | who
sa | his, her (fem)
se | oneself
ses | his (pl)
-son | his, her (masc)
+ | son | his, her (masc). Omitted because it is homonym of "sound"
sur | on
ta | thy (fem)
te | thee
@@ -68,15 +71,15 @@ t | t'
y | there
| forms of être (not including the infinitive):
-été
+ | été - Omitted because it is homonym of "summer"
étée
étées
-étés
+ | étés - Omitted because it is homonym of "summers"
étant
suis
es
-est
-sommes
+ | est - Omitted because it is homonym of "east"
+ | sommes - Omitted because it is homonym of "sums"
êtes
sont
serai
@@ -107,7 +110,7 @@ soyez
soient
fusse
fusses
-fût
+ | fût - Omitted because it is homonym of "tap", like in "beer on tap"
fussions
fussiez
fussent
@@ -119,13 +122,13 @@ eue
eues
eus
ai
-as
+ | as - Omitted because it is homonym of "ace"
avons
avez
ont
aurai
-auras
-aura
+ | auras - Omitted because it is also the name of a kind of wind
+ | aura - Omitted because it is also the name of a kind of wind and homonym of "aura"
aurons
aurez
auront
@@ -136,7 +139,7 @@ auriez
auraient
avais
avait
-avions
+ | avions - Omitted because it is homonym of "planes"
aviez
avaient
eut
diff --git a/sphinx/search/_stopwords/hu.py b/sphinx/search/_stopwords/hu.py
index 83bee011b0f..8f41b67b28a 100644
--- a/sphinx/search/_stopwords/hu.py
+++ b/sphinx/search/_stopwords/hu.py
@@ -1,3 +1,6 @@
+# automatically generated by utils/generate-snowball.py
+# from https://snowballstem.org/algorithms/hungarian/stop.txt
+
from __future__ import annotations
HUNGARIAN_STOPWORDS = frozenset({
diff --git a/sphinx/search/_stopwords/hu.txt b/sphinx/search/_stopwords/hu.txt
index 658c6194f27..2599a8d1b96 100644
--- a/sphinx/search/_stopwords/hu.txt
+++ b/sphinx/search/_stopwords/hu.txt
@@ -1,5 +1,7 @@
-| source: https://snowballstem.org/algorithms/hungarian/stop.txt
+
+| Hungarian stop word list
| prepared by Anna Tordai
+
a
ahogy
ahol
diff --git a/sphinx/search/_stopwords/it.py b/sphinx/search/_stopwords/it.py
index 4b0f522ac94..873a2c1f77b 100644
--- a/sphinx/search/_stopwords/it.py
+++ b/sphinx/search/_stopwords/it.py
@@ -1,3 +1,6 @@
+# automatically generated by utils/generate-snowball.py
+# from https://snowballstem.org/algorithms/italian/stop.txt
+
from __future__ import annotations
ITALIAN_STOPWORDS = frozenset({
@@ -213,6 +216,7 @@
'sono',
'sta',
'stai',
+ 'stando',
'stanno',
'starai',
'staranno',
diff --git a/sphinx/search/_stopwords/it.txt b/sphinx/search/_stopwords/it.txt
index c8776836110..a20bb9528a5 100644
--- a/sphinx/search/_stopwords/it.txt
+++ b/sphinx/search/_stopwords/it.txt
@@ -1,4 +1,7 @@
-| source: https://snowballstem.org/algorithms/italian/stop.txt
+
+ | An Italian stop word list. Comments begin with vertical bar. Each stop
+ | word is at the start of a line.
+
ad | a (to) before vowel
al | a + il
allo | a + lo
@@ -289,3 +292,4 @@ stessi
stesse
stessimo
stessero
+stando
diff --git a/sphinx/search/_stopwords/nl.py b/sphinx/search/_stopwords/nl.py
index 1742ec8dad2..b8b9a4dcfcd 100644
--- a/sphinx/search/_stopwords/nl.py
+++ b/sphinx/search/_stopwords/nl.py
@@ -1,3 +1,6 @@
+# automatically generated by utils/generate-snowball.py
+# from https://snowballstem.org/algorithms/dutch/stop.txt
+
from __future__ import annotations
DUTCH_STOPWORDS = frozenset({
diff --git a/sphinx/search/_stopwords/nl.txt b/sphinx/search/_stopwords/nl.txt
index 64336d0623b..edf99730a2c 100644
--- a/sphinx/search/_stopwords/nl.txt
+++ b/sphinx/search/_stopwords/nl.txt
@@ -1,4 +1,14 @@
-| source: https://snowballstem.org/algorithms/dutch/stop.txt
+
+
+ | A Dutch stop word list. Comments begin with vertical bar. Each stop
+ | word is at the start of a line.
+
+ | This is a ranked list (commonest to rarest) of stopwords derived from
+ | a large sample of Dutch text.
+
+ | Dutch stop words frequently exhibit homonym clashes. These are indicated
+ | clearly below.
+
de | the
en | and
van | of, from
diff --git a/sphinx/search/_stopwords/no.py b/sphinx/search/_stopwords/no.py
index 9b9bfbea4c9..d06cfc4d798 100644
--- a/sphinx/search/_stopwords/no.py
+++ b/sphinx/search/_stopwords/no.py
@@ -1,3 +1,6 @@
+# automatically generated by utils/generate-snowball.py
+# from https://snowballstem.org/algorithms/norwegian/stop.txt
+
from __future__ import annotations
NORWEGIAN_STOPWORDS = frozenset({
diff --git a/sphinx/search/_stopwords/no.txt b/sphinx/search/_stopwords/no.txt
index 552ad326a55..c3d5da01e72 100644
--- a/sphinx/search/_stopwords/no.txt
+++ b/sphinx/search/_stopwords/no.txt
@@ -1,4 +1,12 @@
-| source: https://snowballstem.org/algorithms/norwegian/stop.txt
+
+ | A Norwegian stop word list. Comments begin with vertical bar. Each stop
+ | word is at the start of a line.
+
+ | This stop word list is for the dominant bokmål dialect. Words unique
+ | to nynorsk are marked *.
+
+ | Revised by Jan Bruusgaard , Jan 2005
+
og | and
i | in
jeg | I
@@ -9,7 +17,7 @@ et | a/an
den | it/this/that
til | to
er | is/am/are
-som | who/that
+som | who/which/that
på | on
de | they / you(formal)
med | with
@@ -68,7 +76,6 @@ noen | some
noe | some
ville | would
dere | you
-som | who/which/that
deres | their/theirs
kun | only/just
ja | yes
@@ -113,7 +120,6 @@ mange | many
også | also
slik | just
vært | been
-være | to be
båe | both *
begge | both
siden | since
@@ -139,7 +145,6 @@ hennar | her/hers
hennes | hers
hoss | how *
hossen | how *
-ikkje | not *
ingi | noone *
inkje | noone *
korleis | how *
@@ -161,7 +166,6 @@ noka | some (fem.) *
nokor | some *
noko | some *
nokre | some *
-si | his/hers *
sia | since *
sidan | since *
so | so *
diff --git a/sphinx/search/_stopwords/pt.py b/sphinx/search/_stopwords/pt.py
index b79799d42a6..17b7f8ec733 100644
--- a/sphinx/search/_stopwords/pt.py
+++ b/sphinx/search/_stopwords/pt.py
@@ -1,3 +1,6 @@
+# automatically generated by utils/generate-snowball.py
+# from https://snowballstem.org/algorithms/portuguese/stop.txt
+
from __future__ import annotations
PORTUGUESE_STOPWORDS = frozenset({
diff --git a/sphinx/search/_stopwords/pt.txt b/sphinx/search/_stopwords/pt.txt
index 5ef15633d81..9c3c9ac76d7 100644
--- a/sphinx/search/_stopwords/pt.txt
+++ b/sphinx/search/_stopwords/pt.txt
@@ -1,4 +1,13 @@
-| source: https://snowballstem.org/algorithms/portuguese/stop.txt
+
+ | A Portuguese stop word list. Comments begin with vertical bar. Each stop
+ | word is at the start of a line.
+
+
+ | The following is a ranked list (commonest to rarest) of stopwords
+ | deriving from a large sample of text.
+
+ | Extra words have been added at the end.
+
de | of, from
a | the; to, at; her
o | the; him
diff --git a/sphinx/search/_stopwords/ru.py b/sphinx/search/_stopwords/ru.py
index cc275d5184a..ccd0be5badd 100644
--- a/sphinx/search/_stopwords/ru.py
+++ b/sphinx/search/_stopwords/ru.py
@@ -1,3 +1,6 @@
+# automatically generated by utils/generate-snowball.py
+# from https://snowballstem.org/algorithms/russian/stop.txt
+
from __future__ import annotations
RUSSIAN_STOPWORDS = frozenset({
diff --git a/sphinx/search/_stopwords/ru.txt b/sphinx/search/_stopwords/ru.txt
index 43a73af0b55..96abb77073e 100644
--- a/sphinx/search/_stopwords/ru.txt
+++ b/sphinx/search/_stopwords/ru.txt
@@ -1,4 +1,13 @@
-| source: https://snowballstem.org/algorithms/russian/stop.txt
+
+
+ | a russian stop word list. comments begin with vertical bar. each stop
+ | word is at the start of a line.
+
+ | this is a ranked list (commonest to rarest) of stopwords derived from
+ | a large text sample.
+
+ | letter `ё' is translated to `е'.
+
и | and
в | in/into
во | alternative form
diff --git a/sphinx/search/_stopwords/sv.py b/sphinx/search/_stopwords/sv.py
index c1f10635e0b..0c6f48d2703 100644
--- a/sphinx/search/_stopwords/sv.py
+++ b/sphinx/search/_stopwords/sv.py
@@ -1,3 +1,6 @@
+# automatically generated by utils/generate-snowball.py
+# from https://snowballstem.org/algorithms/swedish/stop.txt
+
from __future__ import annotations
SWEDISH_STOPWORDS = frozenset({
@@ -80,7 +83,7 @@
'sig',
'sin',
'sina',
- 'sitta',
+ 'sitt',
'själv',
'skulle',
'som',
diff --git a/sphinx/search/_stopwords/sv.txt b/sphinx/search/_stopwords/sv.txt
index 850ae7474d6..77924c68dfe 100644
--- a/sphinx/search/_stopwords/sv.txt
+++ b/sphinx/search/_stopwords/sv.txt
@@ -1,4 +1,13 @@
-| source: https://snowballstem.org/algorithms/swedish/stop.txt
+
+ | A Swedish stop word list. Comments begin with vertical bar. Each stop
+ | word is at the start of a line.
+
+ | This is a ranked list (commonest to rarest) of stopwords derived from
+ | a large text sample.
+
+ | Swedish stop words occasionally exhibit homonym clashes. For example
+ | så = so, but also seed. These are indicated clearly below.
+
och | and
det | it, this/that
att | to (with infinitive)
@@ -103,7 +112,7 @@ vilka | who, that
ditt | thy
vem | who
vilket | who, that
-sitta | his
+sitt | his
sådana | such a
vart | each
dina | thy
diff --git a/tests/js/fixtures/cpp/searchindex.js b/tests/js/fixtures/cpp/searchindex.js
index 81f14cc1895..6c50cc9d99d 100644
--- a/tests/js/fixtures/cpp/searchindex.js
+++ b/tests/js/fixtures/cpp/searchindex.js
@@ -1 +1 @@
-Search.setIndex({"alltitles":{},"docnames":["index"],"envversion":{"sphinx":65,"sphinx.domains.c":3,"sphinx.domains.changeset":1,"sphinx.domains.citation":1,"sphinx.domains.cpp":9,"sphinx.domains.index":1,"sphinx.domains.javascript":3,"sphinx.domains.math":2,"sphinx.domains.python":4,"sphinx.domains.rst":2,"sphinx.domains.std":2},"filenames":["index.rst"],"indexentries":{"sphinx (c++ class)":[[0,"_CPPv46Sphinx",false]]},"objects":{"":[[0,0,1,"_CPPv46Sphinx","Sphinx"]]},"objnames":{"0":["cpp","class","C++ class"]},"objtypes":{"0":"cpp:class"},"terms":{"The":0,"This":0,"becaus":0,"c":0,"can":0,"cardin":0,"challeng":0,"charact":0,"class":0,"descript":0,"drop":0,"engin":0,"fixtur":0,"frequent":0,"generat":0,"index":0,"inflat":0,"mathemat":0,"occur":0,"often":0,"project":0,"punctuat":0,"queri":0,"relat":0,"sampl":0,"search":0,"size":0,"sphinx":0,"term":0,"token":0,"use":0,"web":0,"would":0},"titles":["<no title>"],"titleterms":{}})
\ No newline at end of file
+Search.setIndex({"alltitles":{},"docnames":["index"],"envversion":{"sphinx":65,"sphinx.domains.c":3,"sphinx.domains.changeset":1,"sphinx.domains.citation":1,"sphinx.domains.cpp":9,"sphinx.domains.index":1,"sphinx.domains.javascript":3,"sphinx.domains.math":2,"sphinx.domains.python":4,"sphinx.domains.rst":2,"sphinx.domains.std":2},"filenames":["index.rst"],"indexentries":{"sphinx (c++ class)":[[0,"_CPPv46Sphinx",false]]},"objects":{"":[[0,0,1,"_CPPv46Sphinx","Sphinx"]]},"objnames":{"0":["cpp","class","C++ class"]},"objtypes":{"0":"cpp:class"},"terms":{"The":0,"This":0,"becaus":0,"c":0,"can":0,"cardin":0,"challeng":0,"charact":0,"class":0,"descript":0,"drop":0,"engin":0,"fixtur":0,"frequent":0,"generat":0,"index":0,"inflat":0,"mathemat":0,"occur":0,"often":0,"project":0,"punctuat":0,"queri":0,"relat":0,"sampl":0,"search":0,"size":0,"sphinx":0,"term":0,"token":0,"use":0,"web":0},"titles":["<no title>"],"titleterms":{}})
\ No newline at end of file
diff --git a/tests/js/fixtures/multiterm/searchindex.js b/tests/js/fixtures/multiterm/searchindex.js
index 2f3f5ec39a1..a3a52b8cf14 100644
--- a/tests/js/fixtures/multiterm/searchindex.js
+++ b/tests/js/fixtures/multiterm/searchindex.js
@@ -1 +1 @@
-Search.setIndex({"alltitles":{"Main Page":[[0,null]]},"docnames":["index"],"envversion":{"sphinx":65,"sphinx.domains.c":3,"sphinx.domains.changeset":1,"sphinx.domains.citation":1,"sphinx.domains.cpp":9,"sphinx.domains.index":1,"sphinx.domains.javascript":3,"sphinx.domains.math":2,"sphinx.domains.python":4,"sphinx.domains.rst":2,"sphinx.domains.std":2},"filenames":["index.rst"],"indexentries":{},"objects":{},"objnames":{},"objtypes":{},"terms":{"At":0,"This":0,"adjac":0,"all":0,"an":0,"appear":0,"applic":0,"built":0,"can":0,"check":0,"contain":0,"do":0,"document":0,"doesn":0,"each":0,"fixtur":0,"format":0,"function":0,"futur":0,"html":0,"includ":0,"match":0,"messag":0,"multipl":0,"multiterm":0,"order":0,"other":0,"output":0,"perform":0,"perhap":0,"phrase":0,"project":0,"queri":0,"requir":0,"same":0,"search":0,"success":0,"support":0,"t":0,"term":0,"test":0,"time":0,"use":0,"when":0,"write":0},"titles":["Main Page"],"titleterms":{"main":0,"page":0}})
\ No newline at end of file
+Search.setIndex({"alltitles":{"Main Page":[[0,null]]},"docnames":["index"],"envversion":{"sphinx":65,"sphinx.domains.c":3,"sphinx.domains.changeset":1,"sphinx.domains.citation":1,"sphinx.domains.cpp":9,"sphinx.domains.index":1,"sphinx.domains.javascript":3,"sphinx.domains.math":2,"sphinx.domains.python":4,"sphinx.domains.rst":2,"sphinx.domains.std":2},"filenames":["index.rst"],"indexentries":{},"objects":{},"objnames":{},"objtypes":{},"terms":{"At":0,"This":0,"adjac":0,"appear":0,"applic":0,"built":0,"can":0,"check":0,"contain":0,"document":0,"doesn":0,"fixtur":0,"format":0,"function":0,"futur":0,"html":0,"includ":0,"match":0,"messag":0,"multipl":0,"multiterm":0,"order":0,"output":0,"perform":0,"perhap":0,"phrase":0,"project":0,"queri":0,"requir":0,"search":0,"success":0,"support":0,"t":0,"term":0,"test":0,"time":0,"use":0,"will":0,"write":0},"titles":["Main Page"],"titleterms":{"main":0,"page":0}})
\ No newline at end of file
diff --git a/tests/js/fixtures/partial/searchindex.js b/tests/js/fixtures/partial/searchindex.js
index 5eb299eea63..02863d73d83 100644
--- a/tests/js/fixtures/partial/searchindex.js
+++ b/tests/js/fixtures/partial/searchindex.js
@@ -1 +1 @@
-Search.setIndex({"alltitles":{"sphinx_utils module":[[0,null]]},"docnames":["index"],"envversion":{"sphinx":65,"sphinx.domains.c":3,"sphinx.domains.changeset":1,"sphinx.domains.citation":1,"sphinx.domains.cpp":9,"sphinx.domains.index":1,"sphinx.domains.javascript":3,"sphinx.domains.math":2,"sphinx.domains.python":4,"sphinx.domains.rst":2,"sphinx.domains.std":2},"filenames":["index.rst"],"indexentries":{},"objects":{},"objnames":{},"objtypes":{},"terms":{"This":0,"both":0,"built":0,"confirm":0,"document":0,"function":0,"html":0,"includ":0,"input":0,"javascript":0,"match":0,"partial":0,"possibl":0,"project":0,"provid":0,"restructuredtext":0,"sampl":0,"search":0,"should":0,"term":0,"titl":0,"use":0,"when":0},"titles":["sphinx_utils module"],"titleterms":{"modul":0,"sphinx_util":0}})
\ No newline at end of file
+Search.setIndex({"alltitles":{"sphinx_utils module":[[0,null]]},"docnames":["index"],"envversion":{"sphinx":65,"sphinx.domains.c":3,"sphinx.domains.changeset":1,"sphinx.domains.citation":1,"sphinx.domains.cpp":9,"sphinx.domains.index":1,"sphinx.domains.javascript":3,"sphinx.domains.math":2,"sphinx.domains.python":4,"sphinx.domains.rst":2,"sphinx.domains.std":2},"filenames":["index.rst"],"indexentries":{},"objects":{},"objnames":{},"objtypes":{},"terms":{"This":0,"built":0,"confirm":0,"document":0,"function":0,"html":0,"includ":0,"input":0,"javascript":0,"match":0,"partial":0,"possibl":0,"project":0,"provid":0,"restructuredtext":0,"sampl":0,"search":0,"term":0,"titl":0,"use":0},"titles":["sphinx_utils module"],"titleterms":{"modul":0,"sphinx_util":0}})
\ No newline at end of file
diff --git a/tests/js/fixtures/titles/searchindex.js b/tests/js/fixtures/titles/searchindex.js
index fa59e11c884..9faeadf76c6 100644
--- a/tests/js/fixtures/titles/searchindex.js
+++ b/tests/js/fixtures/titles/searchindex.js
@@ -1 +1 @@
-Search.setIndex({"alltitles":{"Main Page":[[0,null]],"Relevance":[[0,"relevance"],[1,null]],"Result Scoring":[[0,"result-scoring"]]},"docnames":["index","relevance"],"envversion":{"sphinx":65,"sphinx.domains.c":3,"sphinx.domains.changeset":1,"sphinx.domains.citation":1,"sphinx.domains.cpp":9,"sphinx.domains.index":1,"sphinx.domains.javascript":3,"sphinx.domains.math":2,"sphinx.domains.python":4,"sphinx.domains.rst":2,"sphinx.domains.std":2},"filenames":["index.rst","relevance.rst"],"indexentries":{"example (class in relevance)":[[0,"relevance.Example",false]],"module":[[0,"module-relevance",false]],"relevance":[[0,"index-1",false],[0,"module-relevance",false]],"relevance (relevance.example attribute)":[[0,"relevance.Example.relevance",false]],"scoring":[[0,"index-0",true]]},"objects":{"":[[0,0,0,"-","relevance"]],"relevance":[[0,1,1,"","Example"]],"relevance.Example":[[0,2,1,"","relevance"]]},"objnames":{"0":["py","module","Python module"],"1":["py","class","Python class"],"2":["py","attribute","Python attribute"]},"objtypes":{"0":"py:module","1":"py:class","2":"py:attribute"},"terms":{"A":1,"By":0,"For":[0,1],"In":[0,1],"This":0,"against":0,"align":0,"also":1,"an":0,"answer":0,"appear":1,"area":0,"ask":0,"assign":0,"attempt":0,"attribut":0,"both":0,"built":1,"can":[0,1],"class":0,"code":[0,1],"collect":0,"consid":1,"contain":0,"context":0,"corpus":1,"could":1,"demonstr":0,"describ":1,"detail":1,"determin":[0,1],"docstr":0,"document":[0,1],"domain":1,"dure":0,"engin":0,"evalu":0,"exampl":[0,1],"extract":0,"feedback":0,"find":0,"found":0,"from":0,"function":1,"handl":0,"happen":1,"has":1,"head":0,"help":0,"high":[0,1],"how":0,"improv":0,"inform":0,"intend":0,"issu":[0,1],"itself":1,"knowledg":0,"languag":1,"less":1,"like":[0,1],"mani":0,"match":0,"mention":1,"more":0,"name":[0,1],"numer":0,"object":0,"often":0,"one":[0,1],"onli":[0,1],"order":0,"other":0,"over":0,"page":1,"part":1,"particular":0,"present":0,"printf":1,"program":1,"project":0,"queri":[0,1],"question":0,"re":0,"relat":0,"research":0,"result":1,"retriev":0,"s":[0,1],"same":1,"say":0,"search":[0,1],"seem":0,"softwar":1,"some":1,"sphinx":0,"straightforward":1,"subject":0,"subsect":0,"term":[0,1],"test":0,"text":0,"than":[0,1],"them":0,"time":0,"titl":0,"two":0,"typic":0,"use":0,"user":[0,1],"we":[0,1],"when":0,"whether":1,"which":0,"within":0,"word":0,"would":[0,1]},"titles":["Main Page","Relevance"],"titleterms":{"main":0,"page":0,"relev":[0,1],"result":0,"score":0}})
\ No newline at end of file
+Search.setIndex({"alltitles":{"Main Page":[[0,null]],"Relevance":[[0,"relevance"],[1,null]],"Result Scoring":[[0,"result-scoring"]]},"docnames":["index","relevance"],"envversion":{"sphinx":65,"sphinx.domains.c":3,"sphinx.domains.changeset":1,"sphinx.domains.citation":1,"sphinx.domains.cpp":9,"sphinx.domains.index":1,"sphinx.domains.javascript":3,"sphinx.domains.math":2,"sphinx.domains.python":4,"sphinx.domains.rst":2,"sphinx.domains.std":2},"filenames":["index.rst","relevance.rst"],"indexentries":{"example (class in relevance)":[[0,"relevance.Example",false]],"module":[[0,"module-relevance",false]],"relevance":[[0,"index-1",false],[0,"module-relevance",false]],"relevance (relevance.example attribute)":[[0,"relevance.Example.relevance",false]],"scoring":[[0,"index-0",true]]},"objects":{"":[[0,0,0,"-","relevance"]],"relevance":[[0,1,1,"","Example"]],"relevance.Example":[[0,2,1,"","relevance"]]},"objnames":{"0":["py","module","Python module"],"1":["py","class","Python class"],"2":["py","attribute","Python attribute"]},"objtypes":{"0":"py:module","1":"py:class","2":"py:attribute"},"terms":{"A":1,"By":0,"For":[0,1],"In":[0,1],"This":0,"align":0,"also":1,"answer":0,"appear":1,"area":0,"ask":0,"assign":0,"attempt":0,"attribut":0,"built":1,"can":[0,1],"class":0,"code":[0,1],"collect":0,"consid":1,"contain":0,"context":0,"corpus":1,"demonstr":0,"describ":1,"detail":1,"determin":[0,1],"docstr":0,"document":[0,1],"domain":1,"dure":0,"engin":0,"evalu":0,"exampl":[0,1],"extract":0,"feedback":0,"find":0,"found":0,"function":1,"handl":0,"happen":1,"head":0,"help":0,"high":[0,1],"improv":0,"inform":0,"intend":0,"issu":[0,1],"knowledg":0,"languag":1,"less":1,"like":[0,1],"mani":0,"match":0,"mention":1,"name":[0,1],"numer":0,"object":0,"often":0,"one":[0,1],"onli":[0,1],"order":0,"page":1,"part":1,"particular":0,"present":0,"printf":1,"program":1,"project":0,"queri":[0,1],"question":0,"re":0,"relat":0,"research":0,"result":1,"retriev":0,"s":[0,1],"say":0,"search":[0,1],"seem":0,"softwar":1,"sphinx":0,"straightforward":1,"subject":0,"subsect":0,"term":[0,1],"test":0,"text":0,"time":0,"titl":0,"two":0,"typic":0,"use":0,"user":[0,1],"whether":1,"will":0,"within":0,"word":0},"titles":["Main Page","Relevance"],"titleterms":{"main":0,"page":0,"relev":[0,1],"result":0,"score":0}})
\ No newline at end of file
diff --git a/utils/generate_snowball.py b/utils/generate_snowball.py
new file mode 100755
index 00000000000..f59183d7f21
--- /dev/null
+++ b/utils/generate_snowball.py
@@ -0,0 +1,131 @@
+#!/usr/bin/env python3
+
+"""Refresh and update language stemming data from the Snowball project."""
+
+# /// script
+# requires-python = ">=3.11"
+# dependencies = [
+# "requests>=2.30",
+# ]
+# ///
+
+from __future__ import annotations
+
+import hashlib
+import shutil
+import subprocess
+import sys
+import tarfile
+import tempfile
+from io import BytesIO
+from pathlib import Path
+
+import requests
+
+SNOWBALL_VERSION = '3.0.1'
+SNOWBALL_URL = f'https://github.com/snowballstem/snowball/archive/refs/tags/v{SNOWBALL_VERSION}.tar.gz'
+SNOWBALL_SHA256 = '80ac10ce40dc4fcfbfed8d085c457b5613da0e86a73611a3d5527d044a142d60'
+
+ROOT = Path(__file__).resolve().parent.parent
+SEARCH_DIR = ROOT / 'sphinx' / 'search'
+STOPWORDS_DIR = SEARCH_DIR / '_stopwords'
+NON_MINIFIED_JS_DIR = SEARCH_DIR / 'non-minified-js'
+
+STOPWORD_URLS = (
+ ('da', 'danish', 'https://snowballstem.org/algorithms/danish/stop.txt'),
+ ('de', 'german', 'https://snowballstem.org/algorithms/german/stop.txt'),
+ ('en', 'english', 'https://snowballstem.org/algorithms/english/stop.txt'),
+ ('es', 'spanish', 'https://snowballstem.org/algorithms/spanish/stop.txt'),
+ ('fi', 'finnish', 'https://snowballstem.org/algorithms/finnish/stop.txt'),
+ ('fr', 'french', 'https://snowballstem.org/algorithms/french/stop.txt'),
+ ('hu', 'hungarian', 'https://snowballstem.org/algorithms/hungarian/stop.txt'),
+ ('it', 'italian', 'https://snowballstem.org/algorithms/italian/stop.txt'),
+ ('nl', 'dutch', 'https://snowballstem.org/algorithms/dutch/stop.txt'),
+ ('no', 'norwegian', 'https://snowballstem.org/algorithms/norwegian/stop.txt'),
+ ('pt', 'portuguese', 'https://snowballstem.org/algorithms/portuguese/stop.txt'),
+ # ('ro', 'romanian', ''),
+ ('ru', 'russian', 'https://snowballstem.org/algorithms/russian/stop.txt'),
+ ('sv', 'swedish', 'https://snowballstem.org/algorithms/swedish/stop.txt'),
+ # ('tr', 'turkish', ''),
+)
+
+
+def regenerate_stopwords() -> None:
+ STOPWORDS_DIR.mkdir(parents=True, exist_ok=True)
+ STOPWORDS_DIR.joinpath('__init__.py').touch()
+
+ for lang_code, lang_name, url in STOPWORD_URLS:
+ data = requests.get(url, timeout=5).content.decode('utf-8')
+
+ # record the original source of the stopwords list
+ txt_path = STOPWORDS_DIR / f'{lang_code}.txt'
+ txt_path.write_text(data.rstrip() + '\n', encoding='utf-8')
+
+ # generate the Python stopwords set
+ stopwords = parse_stop_word(data)
+ with (STOPWORDS_DIR / f'{lang_code}.py').open('w', encoding='utf-8') as f:
+ f.write('# automatically generated by utils/generate-snowball.py\n')
+ f.write(f'# from {url}\n\n')
+ f.write('from __future__ import annotations\n\n')
+ f.write(f'{lang_name.upper()}_STOPWORDS = frozenset(')
+ if stopwords:
+ f.write('{\n')
+ for word in sorted(stopwords, key=str.casefold):
+ f.write(f' {word!r},\n')
+ f.write('}')
+ f.write(')\n')
+
+
+def parse_stop_word(source: str) -> frozenset[str]:
+ """Collect the stopwords from a snowball style word list:
+
+ .. code:: text
+
+ list of space separated stop words | optional comment
+ """
+ stop_words: set[str] = set()
+ for line in source.splitlines():
+ stop_words.update(line.partition('|')[0].split()) # remove comment
+ return frozenset(stop_words)
+
+
+def regenerate_javascript() -> None:
+ tmp_root = Path(tempfile.mkdtemp())
+
+ # Download and verify the snowball release
+ archive = requests.get(SNOWBALL_URL, timeout=60).content
+ digest = hashlib.sha256(archive).hexdigest()
+ if digest != SNOWBALL_SHA256:
+ msg = (
+ f'data does not match expected checksum '
+ f'(expected {SNOWBALL_SHA256}, saw {digest}).'
+ )
+ raise RuntimeError(msg)
+
+ # Extract the release archive
+ with tarfile.TarFile.gzopen(
+ 'snowball.tar.gz', mode='r', fileobj=BytesIO(archive)
+ ) as tar:
+ tar.extractall(tmp_root, filter='data')
+ snowball_root = tmp_root / f'snowball-{SNOWBALL_VERSION}'
+ snowball_dist = snowball_root / 'dist'
+
+ # Generate JS stemmer files
+ cmd = ('make', '--jobs=8', 'dist_libstemmer_js')
+ subprocess.run(cmd, check=True, cwd=snowball_root)
+ with tarfile.open(snowball_dist / f'jsstemmer-{SNOWBALL_VERSION}.tar.gz') as tar:
+ tar.extractall(snowball_dist, filter='data')
+
+ # Copy generated JS to sphinx/search/
+ NON_MINIFIED_JS_DIR.mkdir(exist_ok=True)
+ js_dir = snowball_dist / f'jsstemmer-{SNOWBALL_VERSION}' / 'javascript'
+ shutil.copytree(js_dir, NON_MINIFIED_JS_DIR, dirs_exist_ok=True)
+
+ # Clean up
+ shutil.rmtree(snowball_root)
+
+
+if __name__ == '__main__':
+ regenerate_stopwords()
+ if sys.platform != 'win32':
+ regenerate_javascript()
From 0eae573102502030de6523def99f0b52d2ca2f41 Mon Sep 17 00:00:00 2001
From: Adam Turner <9087854+AA-Turner@users.noreply.github.com>
Date: Tue, 20 May 2025 00:05:07 +0100
Subject: [PATCH 079/466] Remove ``section_parents`` (#13560)
Docutils revision r10129 [1] removes the member from the namespace.
[1]: https://sourceforge.net/p/docutils/code/10129/
---
sphinx/directives/other.py | 14 +-------------
sphinx/util/parsing.py | 12 ------------
.../test_util_docutils_sphinx_directive.py | 7 -------
3 files changed, 1 insertion(+), 32 deletions(-)
diff --git a/sphinx/directives/other.py b/sphinx/directives/other.py
index 5d6f5b778a6..8c66ed383b5 100644
--- a/sphinx/directives/other.py
+++ b/sphinx/directives/other.py
@@ -5,7 +5,6 @@
from pathlib import Path
from typing import TYPE_CHECKING, cast
-import docutils
from docutils import nodes
from docutils.parsers.rst import directives
from docutils.parsers.rst.directives.misc import Class
@@ -22,7 +21,7 @@
if TYPE_CHECKING:
from collections.abc import Sequence
- from typing import Any, ClassVar, Final
+ from typing import Any, ClassVar
from docutils.nodes import Element, Node
@@ -30,7 +29,6 @@
from sphinx.util.typing import ExtensionMetadata, OptionSpec
-DU_22_PLUS: Final = docutils.__version_info__ >= (0, 22, 0, 'alpha', 0)
glob_re = re.compile(r'.*[*?\[].*')
logger = logging.getLogger(__name__)
@@ -332,14 +330,6 @@ def run(self) -> list[Node]:
surrounding_section_level = memo.section_level
memo.title_styles = []
memo.section_level = 0
- if DU_22_PLUS:
- # https://github.com/sphinx-doc/sphinx/issues/13539
- # https://sourceforge.net/p/docutils/code/10093/
- # https://sourceforge.net/p/docutils/patches/213/
- surrounding_section_parents = memo.section_parents
- memo.section_parents = []
- else:
- surrounding_section_parents = []
try:
self.state.nested_parse(
self.content, self.content_offset, node, match_titles=True
@@ -375,8 +365,6 @@ def run(self) -> list[Node]:
return []
finally:
memo.title_styles = surrounding_title_styles
- if DU_22_PLUS:
- memo.section_parents = surrounding_section_parents
memo.section_level = surrounding_section_level
diff --git a/sphinx/util/parsing.py b/sphinx/util/parsing.py
index ec6649fc247..4c4a6477683 100644
--- a/sphinx/util/parsing.py
+++ b/sphinx/util/parsing.py
@@ -5,19 +5,15 @@
import contextlib
from typing import TYPE_CHECKING
-import docutils
from docutils.nodes import Element
from docutils.statemachine import StringList, string2lines
if TYPE_CHECKING:
from collections.abc import Iterator
- from typing import Final
from docutils.nodes import Node
from docutils.parsers.rst.states import RSTState
-DU_22_PLUS: Final = docutils.__version_info__ >= (0, 22, 0, 'alpha', 0)
-
def nested_parse_to_nodes(
state: RSTState,
@@ -79,23 +75,15 @@ def _fresh_title_style_context(state: RSTState) -> Iterator[None]:
memo = state.memo
surrounding_title_styles: list[str | tuple[str, str]] = memo.title_styles
surrounding_section_level: int = memo.section_level
- if DU_22_PLUS:
- surrounding_section_parents = memo.section_parents
- else:
- surrounding_section_parents = []
# clear current title styles
memo.title_styles = []
memo.section_level = 0
- if DU_22_PLUS:
- memo.section_parents = []
try:
yield
finally:
# reset title styles
memo.title_styles = surrounding_title_styles
memo.section_level = surrounding_section_level
- if DU_22_PLUS:
- memo.section_parents = surrounding_section_parents
def _text_to_string_list(
diff --git a/tests/test_util/test_util_docutils_sphinx_directive.py b/tests/test_util/test_util_docutils_sphinx_directive.py
index 00ea5bc3fb5..8c24a3c4a83 100644
--- a/tests/test_util/test_util_docutils_sphinx_directive.py
+++ b/tests/test_util/test_util_docutils_sphinx_directive.py
@@ -2,7 +2,6 @@
from types import SimpleNamespace
-import docutils
from docutils import nodes
from docutils.parsers.rst.languages import en as english # type: ignore[attr-defined]
from docutils.parsers.rst.states import (
@@ -45,16 +44,10 @@ def make_directive_and_state(
reporter=document.reporter,
language=english,
title_styles=[],
- # section_parents=[], # Docutils 0.22+
section_level=0,
section_bubble_up_kludge=False,
inliner=inliner,
)
- if docutils.__version_info__ >= (0, 22, 0, 'alpha', 0):
- # https://github.com/sphinx-doc/sphinx/issues/13539
- # https://sourceforge.net/p/docutils/code/10093/
- # https://sourceforge.net/p/docutils/patches/213/
- state.memo.section_parents = []
directive = SphinxDirective(
name='test_directive',
arguments=[],
From fb628ccc3dfe72f36baa76a220c4f414e657ff38 Mon Sep 17 00:00:00 2001
From: Adam Turner <9087854+AA-Turner@users.noreply.github.com>
Date: Tue, 20 May 2025 01:14:14 +0100
Subject: [PATCH 080/466] Convert stopwords to a JavaScript set (#13575)
---
sphinx/themes/basic/static/language_data.js.jinja | 7 ++++---
sphinx/themes/basic/static/searchtools.js | 7 ++-----
tests/js/language_data.js | 5 +++--
3 files changed, 9 insertions(+), 10 deletions(-)
diff --git a/sphinx/themes/basic/static/language_data.js.jinja b/sphinx/themes/basic/static/language_data.js.jinja
index 64aefa798c7..daefea7eb9e 100644
--- a/sphinx/themes/basic/static/language_data.js.jinja
+++ b/sphinx/themes/basic/static/language_data.js.jinja
@@ -1,12 +1,13 @@
/*
* This script contains the language-specific data used by searchtools.js,
- * namely the list of stopwords, stemmer, scorer and splitter.
+ * namely the set of stopwords, stemmer, scorer and splitter.
*/
-var stopwords = {{ search_language_stop_words }};
+const stopwords = new Set({{ search_language_stop_words }});
+window.stopwords = stopwords; // Export to global scope
{% if search_language_stemming_code %}
-/* Non-minified version is copied as a separate JS file, if available */
+/* Non-minified versions are copied as separate JavaScript files, if available */
{{ search_language_stemming_code|safe }}
{% endif -%}
diff --git a/sphinx/themes/basic/static/searchtools.js b/sphinx/themes/basic/static/searchtools.js
index 91f4be57fc8..ba5e67aa75e 100644
--- a/sphinx/themes/basic/static/searchtools.js
+++ b/sphinx/themes/basic/static/searchtools.js
@@ -287,11 +287,8 @@ const Search = {
const queryTermLower = queryTerm.toLowerCase();
// maybe skip this "word"
- // stopwords array is from language_data.js
- if (
- stopwords.indexOf(queryTermLower) !== -1 ||
- queryTerm.match(/^\d+$/)
- )
+ // stopwords set is from language_data.js
+ if (stopwords.has(queryTermLower) || queryTerm.match(/^\d+$/))
return;
// stem the word
diff --git a/tests/js/language_data.js b/tests/js/language_data.js
index 15e4a8447f0..47c81f4a2a5 100644
--- a/tests/js/language_data.js
+++ b/tests/js/language_data.js
@@ -3,10 +3,11 @@
* namely the list of stopwords, stemmer, scorer and splitter.
*/
-var stopwords = [];
+const stopwords = new Set([]);
+window.stopwords = stopwords; // Export to global scope
-/* Non-minified version is copied as a separate JS file, if available */
+/* Non-minified versions are copied as separate JavaScript files, if available */
/**
* Dummy stemmer for languages without stemming rules.
From d71d399d06aed77a01c18953d6192aab4b05eb89 Mon Sep 17 00:00:00 2001
From: Spencer Brown
Date: Tue, 20 May 2025 11:58:25 +1000
Subject: [PATCH 081/466] autodoc: Support
``typing_extensions.{final,overload}`` (#13509)
Co-authored-by: Adam Turner <9087854+AA-Turner@users.noreply.github.com>
---
CHANGES.rst | 3 ++
sphinx/pycode/parser.py | 42 +++++++++----------
tests/roots/test-ext-autodoc/target/final.py | 11 +++++
.../test-ext-autodoc/target/overload3.py | 18 ++++++++
tests/test_extensions/test_ext_autodoc.py | 34 +++++++++++++++
5 files changed, 85 insertions(+), 23 deletions(-)
create mode 100644 tests/roots/test-ext-autodoc/target/overload3.py
diff --git a/CHANGES.rst b/CHANGES.rst
index c0ed8089a60..9b86d2df25e 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -22,6 +22,9 @@ Features added
* #13497: Support C domain objects in the table of contents.
* #13535: html search: Update to the latest version of Snowball (v3.0.1).
Patch by Adam Turner.
+* #13704: autodoc: Detect :py:func:`typing_extensions.overload `
+ and :py:func:`~typing.final` decorators.
+ Patch by Spencer Brown.
Bugs fixed
----------
diff --git a/sphinx/pycode/parser.py b/sphinx/pycode/parser.py
index 2390b19d4d3..43081c61f13 100644
--- a/sphinx/pycode/parser.py
+++ b/sphinx/pycode/parser.py
@@ -247,9 +247,9 @@ def __init__(self, buffers: list[str], encoding: str) -> None:
self.deforders: dict[str, int] = {}
self.finals: list[str] = []
self.overloads: dict[str, list[Signature]] = {}
- self.typing: str | None = None
- self.typing_final: str | None = None
- self.typing_overload: str | None = None
+ self.typing_mods: set[str] = set()
+ self.typing_final_names: set[str] = set()
+ self.typing_overload_names: set[str] = set()
super().__init__()
def get_qualname_for(self, name: str) -> list[str] | None:
@@ -295,11 +295,8 @@ def add_variable_annotation(self, name: str, annotation: ast.AST) -> None:
self.annotations[basename, name] = ast_unparse(annotation)
def is_final(self, decorators: list[ast.expr]) -> bool:
- final = []
- if self.typing:
- final.append('%s.final' % self.typing)
- if self.typing_final:
- final.append(self.typing_final)
+ final = {f'{modname}.final' for modname in self.typing_mods}
+ final |= self.typing_final_names
for decorator in decorators:
try:
@@ -311,11 +308,8 @@ def is_final(self, decorators: list[ast.expr]) -> bool:
return False
def is_overload(self, decorators: list[ast.expr]) -> bool:
- overload = []
- if self.typing:
- overload.append('%s.overload' % self.typing)
- if self.typing_overload:
- overload.append(self.typing_overload)
+ overload = {f'{modname}.overload' for modname in self.typing_mods}
+ overload |= self.typing_overload_names
for decorator in decorators:
try:
@@ -348,22 +342,24 @@ def visit_Import(self, node: ast.Import) -> None:
for name in node.names:
self.add_entry(name.asname or name.name)
- if name.name == 'typing':
- self.typing = name.asname or name.name
- elif name.name == 'typing.final':
- self.typing_final = name.asname or name.name
- elif name.name == 'typing.overload':
- self.typing_overload = name.asname or name.name
+ if name.name in {'typing', 'typing_extensions'}:
+ self.typing_mods.add(name.asname or name.name)
+ elif name.name in {'typing.final', 'typing_extensions.final'}:
+ self.typing_final_names.add(name.asname or name.name)
+ elif name.name in {'typing.overload', 'typing_extensions.overload'}:
+ self.typing_overload_names.add(name.asname or name.name)
def visit_ImportFrom(self, node: ast.ImportFrom) -> None:
"""Handles Import node and record the order of definitions."""
for name in node.names:
self.add_entry(name.asname or name.name)
- if node.module == 'typing' and name.name == 'final':
- self.typing_final = name.asname or name.name
- elif node.module == 'typing' and name.name == 'overload':
- self.typing_overload = name.asname or name.name
+ if node.module not in {'typing', 'typing_extensions'}:
+ continue
+ if name.name == 'final':
+ self.typing_final_names.add(name.asname or name.name)
+ elif name.name == 'overload':
+ self.typing_overload_names.add(name.asname or name.name)
def visit_Assign(self, node: ast.Assign) -> None:
"""Handles Assign node and pick up a variable comment."""
diff --git a/tests/roots/test-ext-autodoc/target/final.py b/tests/roots/test-ext-autodoc/target/final.py
index a8c3860e384..bd233abb580 100644
--- a/tests/roots/test-ext-autodoc/target/final.py
+++ b/tests/roots/test-ext-autodoc/target/final.py
@@ -3,6 +3,9 @@
import typing
from typing import final
+import typing_extensions
+from typing_extensions import final as final_ext # noqa: UP035
+
@typing.final
class Class:
@@ -14,3 +17,11 @@ def meth1(self):
def meth2(self):
"""docstring"""
+
+ @final_ext
+ def meth3(self):
+ """docstring"""
+
+ @typing_extensions.final
+ def meth4(self):
+ """docstring"""
diff --git a/tests/roots/test-ext-autodoc/target/overload3.py b/tests/roots/test-ext-autodoc/target/overload3.py
new file mode 100644
index 00000000000..a3cc34a9f85
--- /dev/null
+++ b/tests/roots/test-ext-autodoc/target/overload3.py
@@ -0,0 +1,18 @@
+import typing
+from typing import TYPE_CHECKING, overload
+
+import typing_extensions
+from typing_extensions import overload as over_ext # noqa: UP035
+
+
+@overload
+def test(x: int) -> int: ...
+@typing.overload
+def test(x: list[int]) -> list[int]: ...
+@over_ext
+def test(x: str) -> str: ...
+@typing_extensions.overload
+def test(x: float) -> float: ...
+def test(x):
+ """Documentation."""
+ return x
diff --git a/tests/test_extensions/test_ext_autodoc.py b/tests/test_extensions/test_ext_autodoc.py
index 7aa12db3c32..d7c41291e01 100644
--- a/tests/test_extensions/test_ext_autodoc.py
+++ b/tests/test_extensions/test_ext_autodoc.py
@@ -2823,6 +2823,20 @@ def test_final(app):
'',
' docstring',
'',
+ '',
+ ' .. py:method:: Class.meth3()',
+ ' :module: target.final',
+ ' :final:',
+ '',
+ ' docstring',
+ '',
+ '',
+ ' .. py:method:: Class.meth4()',
+ ' :module: target.final',
+ ' :final:',
+ '',
+ ' docstring',
+ '',
]
@@ -2896,6 +2910,26 @@ def test_overload2(app):
]
+@pytest.mark.sphinx('html', testroot='ext-autodoc')
+def test_overload3(app):
+ options = {'members': None}
+ actual = do_autodoc(app, 'module', 'target.overload3', options)
+ assert list(actual) == [
+ '',
+ '.. py:module:: target.overload3',
+ '',
+ '',
+ '.. py:function:: test(x: int) -> int',
+ ' test(x: list[int]) -> list[int]',
+ ' test(x: str) -> str',
+ ' test(x: float) -> float',
+ ' :module: target.overload3',
+ '',
+ ' Documentation.',
+ '',
+ ]
+
+
@pytest.mark.sphinx('html', testroot='ext-autodoc')
def test_pymodule_for_ModuleLevelDocumenter(app):
app.env.ref_context['py:module'] = 'target.classes'
From ee0e576aef3779dc330baa0f337e5d2c343d0c00 Mon Sep 17 00:00:00 2001
From: Adam Dangoor
Date: Wed, 21 May 2025 00:32:00 +0100
Subject: [PATCH 082/466] Remove mypy overrides for
``test_transforms_move_module_targets`` (#13553)
Co-authored-by: Adam Turner <9087854+AA-Turner@users.noreply.github.com>
---
pyproject.toml | 1 -
.../test_transforms_move_module_targets.py | 11 ++++++++---
2 files changed, 8 insertions(+), 4 deletions(-)
diff --git a/pyproject.toml b/pyproject.toml
index 39b18f23104..9e1c15f859b 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -283,7 +283,6 @@ module = [
"tests.test_theming.test_templating",
"tests.test_theming.test_theming",
# tests/test_transforms
- "tests.test_transforms.test_transforms_move_module_targets",
"tests.test_transforms.test_transforms_post_transforms_images",
"tests.test_transforms.test_transforms_reorder_nodes",
# tests/test_util
diff --git a/tests/test_transforms/test_transforms_move_module_targets.py b/tests/test_transforms/test_transforms_move_module_targets.py
index f64b7d6a500..839eb615adc 100644
--- a/tests/test_transforms/test_transforms_move_module_targets.py
+++ b/tests/test_transforms/test_transforms_move_module_targets.py
@@ -1,5 +1,7 @@
from __future__ import annotations
+from typing import TYPE_CHECKING
+
import pytest
from docutils import nodes
@@ -7,6 +9,9 @@
from sphinx.testing.util import SphinxTestApp
from sphinx.transforms import MoveModuleTargets
+if TYPE_CHECKING:
+ from pathlib import Path
+
CONTENT_PY = """\
move-module-targets
===================
@@ -29,7 +34,7 @@
],
)
@pytest.mark.usefixtures('rollback_sysmodules')
-def test_move_module_targets(tmp_path, content):
+def test_move_module_targets(tmp_path: Path, content: str) -> None:
# Test for the MoveModuleTargets transform
tmp_path.joinpath('conf.py').touch()
tmp_path.joinpath('index.rst').write_text(content, encoding='utf-8')
@@ -48,7 +53,7 @@ def test_move_module_targets(tmp_path, content):
@pytest.mark.usefixtures('rollback_sysmodules')
-def test_move_module_targets_no_section(tmp_path):
+def test_move_module_targets_no_section(tmp_path: Path) -> None:
# Test for the MoveModuleTargets transform
tmp_path.joinpath('conf.py').touch()
tmp_path.joinpath('index.rst').write_text(
@@ -63,7 +68,7 @@ def test_move_module_targets_no_section(tmp_path):
@pytest.mark.usefixtures('rollback_sysmodules')
-def test_move_module_targets_disabled(tmp_path):
+def test_move_module_targets_disabled(tmp_path: Path) -> None:
# Test for the MoveModuleTargets transform
tmp_path.joinpath('conf.py').touch()
tmp_path.joinpath('index.rst').write_text(CONTENT_PY, encoding='utf-8')
From 4dbdf802ce38a48a95e9648f190c00d0349f0443 Mon Sep 17 00:00:00 2001
From: Adam Dangoor
Date: Wed, 21 May 2025 00:47:52 +0100
Subject: [PATCH 083/466] Remove mypy overrides for ``test_util_fileutil``
(#13552)
---
pyproject.toml | 1 -
tests/test_util/test_util_fileutil.py | 9 +++++----
2 files changed, 5 insertions(+), 5 deletions(-)
diff --git a/pyproject.toml b/pyproject.toml
index 9e1c15f859b..2a58ec2abe4 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -338,7 +338,6 @@ module = [
# tests/test_transforms
"tests.test_transforms.test_transforms_post_transforms",
# tests/test_util
- "tests.test_util.test_util_fileutil",
"tests.test_util.test_util_i18n",
"tests.test_util.test_util_inspect",
"tests.test_util.test_util_logging",
diff --git a/tests/test_util/test_util_fileutil.py b/tests/test_util/test_util_fileutil.py
index 26b75d82e05..9311be58153 100644
--- a/tests/test_util/test_util_fileutil.py
+++ b/tests/test_util/test_util_fileutil.py
@@ -12,12 +12,13 @@
from sphinx._cli.util.errors import strip_escape_sequences
from sphinx.jinja2glue import BuiltinTemplateLoader
from sphinx.util.fileutil import _template_basename, copy_asset, copy_asset_file
+from sphinx.util.template import BaseRenderer
if TYPE_CHECKING:
from sphinx.testing.util import SphinxTestApp
-class DummyTemplateLoader(BuiltinTemplateLoader):
+class DummyTemplateLoader(BuiltinTemplateLoader, BaseRenderer):
def __init__(self) -> None:
super().__init__()
builder = mock.Mock()
@@ -26,7 +27,7 @@ def __init__(self) -> None:
self.init(builder)
-def test_copy_asset_file(tmp_path):
+def test_copy_asset_file(tmp_path: Path) -> None:
renderer = DummyTemplateLoader()
# copy normal file
@@ -69,7 +70,7 @@ def test_copy_asset_file(tmp_path):
assert (subdir2 / 'asset.txt.jinja').read_text(encoding='utf8') == '# {{var1}} data'
-def test_copy_asset(tmp_path):
+def test_copy_asset(tmp_path: Path) -> None:
renderer = DummyTemplateLoader()
# prepare source files
@@ -113,7 +114,7 @@ def test_copy_asset(tmp_path):
assert sidebar == 'sidebar: baz'
# copy with exclusion
- def excluded(path):
+ def excluded(path: str) -> bool:
return 'sidebar.html' in path or 'basic.css' in path
destdir = tmp_path / 'test3'
From 6d151533bc8f1f50c2bc58226ebff3290c9d90ac Mon Sep 17 00:00:00 2001
From: Shengyu Zhang
Date: Wed, 21 May 2025 09:32:21 +0800
Subject: [PATCH 084/466] Add tilde prefix support for the ``:py:deco:`` role
(#13545)
Co-authored-by: Adam Turner <9087854+aa-turner@users.noreply.github.com>
---
CHANGES.rst | 2 ++
sphinx/domains/python/__init__.py | 26 ++++++++++----------------
tests/test_domains/test_domain_py.py | 15 +++++++++++++++
3 files changed, 27 insertions(+), 16 deletions(-)
diff --git a/CHANGES.rst b/CHANGES.rst
index 9b86d2df25e..57e8e0efdf9 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -31,6 +31,8 @@ Bugs fixed
* #13369: Correctly parse and cross-reference unpacked type annotations.
Patch by Alicia Garcia-Raboso.
+* #13528: Add tilde ``~`` prefix support for :rst:role:`py:deco`.
+ Patch by Shengyu Zhang and Adam Turner.
Testing
-------
diff --git a/sphinx/domains/python/__init__.py b/sphinx/domains/python/__init__.py
index af923cae70e..fb030991464 100644
--- a/sphinx/domains/python/__init__.py
+++ b/sphinx/domains/python/__init__.py
@@ -29,7 +29,7 @@
from collections.abc import Iterable, Iterator, Sequence, Set
from typing import Any, ClassVar
- from docutils.nodes import Element, Node, TextElement
+ from docutils.nodes import Element, Node
from sphinx.addnodes import desc_signature, pending_xref
from sphinx.application import Sphinx
@@ -594,23 +594,17 @@ def process_link(
class _PyDecoXRefRole(PyXRefRole):
- def __init__(
+ def process_link(
self,
- fix_parens: bool = False,
- lowercase: bool = False,
- nodeclass: type[Element] | None = None,
- innernodeclass: type[TextElement] | None = None,
- warn_dangling: bool = False,
- ) -> None:
- super().__init__(
- fix_parens=True,
- lowercase=lowercase,
- nodeclass=nodeclass,
- innernodeclass=innernodeclass,
- warn_dangling=warn_dangling,
+ env: BuildEnvironment,
+ refnode: Element,
+ has_explicit_title: bool,
+ title: str,
+ target: str,
+ ) -> tuple[str, str]:
+ title, target = super().process_link(
+ env, refnode, has_explicit_title, title, target
)
-
- def update_title_and_target(self, title: str, target: str) -> tuple[str, str]:
return f'@{title}', target
diff --git a/tests/test_domains/test_domain_py.py b/tests/test_domains/test_domain_py.py
index 262773af35a..14346b0563a 100644
--- a/tests/test_domains/test_domain_py.py
+++ b/tests/test_domains/test_domain_py.py
@@ -1791,3 +1791,18 @@ def test_pep_695_and_pep_696_whitespaces_in_default(app, tp_list, tptext):
text = f'.. py:function:: f{tp_list}() -> Annotated[T, Qux[int]()]'
doctree = restructuredtext.parse(app, text)
assert doctree.astext() == f'\n\nf{tptext}() -> Annotated[T, Qux[int]()]\n\n'
+
+
+def test_deco_role(app):
+ text = """\
+.. py:decorator:: foo.bar
+ :no-contents-entry:
+ :no-index-entry:
+ :no-typesetting:
+"""
+
+ doctree = restructuredtext.parse(app, text + '\n:py:deco:`foo.bar`')
+ assert doctree.astext() == '\n\n\n\n@foo.bar'
+
+ doctree = restructuredtext.parse(app, text + '\n:py:deco:`~foo.bar`')
+ assert doctree.astext() == '\n\n\n\n@bar'
From ad360fd634c9c3378e1cc1693e6acd4cba29cf72 Mon Sep 17 00:00:00 2001
From: Victor Wheeler
Date: Tue, 20 May 2025 19:34:39 -0600
Subject: [PATCH 085/466] Fix parameter lists for two event callback functions
(#13516)
---
doc/extdev/event_callbacks.rst | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/doc/extdev/event_callbacks.rst b/doc/extdev/event_callbacks.rst
index 04eae51be1d..aec9a47e848 100644
--- a/doc/extdev/event_callbacks.rst
+++ b/doc/extdev/event_callbacks.rst
@@ -70,8 +70,8 @@ Below is an overview of the core event that happens during a build.
14. apply post-transforms (by priority): docutils.document -> docutils.document
15. event.doctree-resolved(app, doctree, docname)
- In the event that any reference nodes fail to resolve, the following may emit:
- - event.missing-reference(env, node, contnode)
- - event.warn-missing-reference(domain, node)
+ - event.missing-reference(app, env, node, contnode)
+ - event.warn-missing-reference(app, domain, node)
16. Generate output files
17. event.build-finished(app, exception)
From d742ddc123978adc96a28cdaf366b33cbc562cf7 Mon Sep 17 00:00:00 2001
From: Adam Dangoor
Date: Thu, 22 May 2025 05:51:04 +0100
Subject: [PATCH 086/466] Remove mypy overrides for
``tests/test_pycode/test_pycode.py`` (#13585)
---
pyproject.toml | 1 -
tests/test_pycode/test_pycode.py | 2 +-
2 files changed, 1 insertion(+), 2 deletions(-)
diff --git a/pyproject.toml b/pyproject.toml
index 2a58ec2abe4..cc26c4e7782 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -333,7 +333,6 @@ module = [
# tests/test_intl
"tests.test_intl.test_intl",
# tests/test_pycode
- "tests.test_pycode.test_pycode",
"tests.test_pycode.test_pycode_ast",
# tests/test_transforms
"tests.test_transforms.test_transforms_post_transforms",
diff --git a/tests/test_pycode/test_pycode.py b/tests/test_pycode/test_pycode.py
index 51b525f7b5b..4caf5019b94 100644
--- a/tests/test_pycode/test_pycode.py
+++ b/tests/test_pycode/test_pycode.py
@@ -41,7 +41,7 @@ def test_ModuleAnalyzer_for_file() -> None:
assert analyzer.srcname == str(SPHINX_MODULE_PATH)
-def test_ModuleAnalyzer_for_module(rootdir):
+def test_ModuleAnalyzer_for_module(rootdir: Path) -> None:
analyzer = ModuleAnalyzer.for_module('sphinx')
assert analyzer.modname == 'sphinx'
assert analyzer.srcname == str(SPHINX_MODULE_PATH)
From 4451a0a18a5448f1a0e2acc68d5b76505fe39267 Mon Sep 17 00:00:00 2001
From: Adam Dangoor
Date: Thu, 22 May 2025 05:54:49 +0100
Subject: [PATCH 087/466] Remove mypy overrides for
``tests/test_util/test_util.py`` (#13584)
---
pyproject.toml | 1 -
tests/test_util/test_util.py | 7 ++++++-
2 files changed, 6 insertions(+), 2 deletions(-)
diff --git a/pyproject.toml b/pyproject.toml
index cc26c4e7782..31c1345c9da 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -286,7 +286,6 @@ module = [
"tests.test_transforms.test_transforms_post_transforms_images",
"tests.test_transforms.test_transforms_reorder_nodes",
# tests/test_util
- "tests.test_util.test_util",
"tests.test_util.test_util_display",
"tests.test_util.test_util_docutils",
"tests.test_util.test_util_inventory",
diff --git a/tests/test_util/test_util.py b/tests/test_util/test_util.py
index e4881764680..ce403afd0ed 100644
--- a/tests/test_util/test_util.py
+++ b/tests/test_util/test_util.py
@@ -2,6 +2,8 @@
from __future__ import annotations
+from typing import TYPE_CHECKING
+
import pytest
import sphinx.util
@@ -29,8 +31,11 @@
relative_uri,
)
+if TYPE_CHECKING:
+ from pathlib import Path
+
-def test_ensuredir(tmp_path):
+def test_ensuredir(tmp_path: Path) -> None:
# Does not raise an exception for an existing directory.
ensuredir(tmp_path)
From a94fcd9e6602e2f444f53b8aa59575f9a596d362 Mon Sep 17 00:00:00 2001
From: Adam Dangoor
Date: Thu, 22 May 2025 05:55:52 +0100
Subject: [PATCH 088/466] Remove mypy overrides for
``tests/test_util/test_util_inventory.py`` (#13582)
---
pyproject.toml | 1 -
tests/test_util/test_util_inventory.py | 2 +-
2 files changed, 1 insertion(+), 2 deletions(-)
diff --git a/pyproject.toml b/pyproject.toml
index 31c1345c9da..0e7ba8a19b5 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -288,7 +288,6 @@ module = [
# tests/test_util
"tests.test_util.test_util_display",
"tests.test_util.test_util_docutils",
- "tests.test_util.test_util_inventory",
# tests/test_writers
"tests.test_writers.test_docutilsconf",
]
diff --git a/tests/test_util/test_util_inventory.py b/tests/test_util/test_util_inventory.py
index 0cab37d7904..5432d8cc5ca 100644
--- a/tests/test_util/test_util_inventory.py
+++ b/tests/test_util/test_util_inventory.py
@@ -107,7 +107,7 @@ def _build_inventory(srcdir: Path) -> Path:
return app.outdir / 'objects.inv'
-def test_inventory_localization(tmp_path):
+def test_inventory_localization(tmp_path: Path) -> None:
# Build an app using Estonian (EE) locale
srcdir_et = _write_appconfig(tmp_path, 'et')
inventory_et = _build_inventory(srcdir_et)
From 2992f31115e52e6a59b48471dbf26a3761b2dfea Mon Sep 17 00:00:00 2001
From: Adam Dangoor
Date: Thu, 22 May 2025 11:19:45 +0100
Subject: [PATCH 089/466] Remove mypy overrides for
``tests/test_pycode/test_pycode_ast.py`` (#13586)
---
pyproject.toml | 2 --
tests/test_pycode/test_pycode_ast.py | 7 ++++---
2 files changed, 4 insertions(+), 5 deletions(-)
diff --git a/pyproject.toml b/pyproject.toml
index 0e7ba8a19b5..1185e9ac977 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -330,8 +330,6 @@ module = [
"tests.test_extensions.test_ext_napoleon_docstring",
# tests/test_intl
"tests.test_intl.test_intl",
- # tests/test_pycode
- "tests.test_pycode.test_pycode_ast",
# tests/test_transforms
"tests.test_transforms.test_transforms_post_transforms",
# tests/test_util
diff --git a/tests/test_pycode/test_pycode_ast.py b/tests/test_pycode/test_pycode_ast.py
index 409e5806d1b..9dd8c8f5d17 100644
--- a/tests/test_pycode/test_pycode_ast.py
+++ b/tests/test_pycode/test_pycode_ast.py
@@ -65,9 +65,10 @@
('*tuple[str, int]', '*tuple[str, int]'), # Starred
],
) # fmt: skip
-def test_unparse(source, expected):
- module = ast.parse(source)
- assert ast_unparse(module.body[0].value, source) == expected
+def test_unparse(source: str, expected: str) -> None:
+ expr = ast.parse(source).body[0]
+ assert isinstance(expr, ast.Expr)
+ assert ast_unparse(expr.value, source) == expected
def test_unparse_None() -> None:
From 1a69059295e297f987b58918bd40a6c93771f1d8 Mon Sep 17 00:00:00 2001
From: Adam Dangoor
Date: Thu, 22 May 2025 11:20:37 +0100
Subject: [PATCH 090/466] Remove mypy overrides for
``tests/test_util/test_util_display.py`` (#13583)
---
pyproject.toml | 1 -
tests/test_util/test_util_display.py | 10 +++++++---
2 files changed, 7 insertions(+), 4 deletions(-)
diff --git a/pyproject.toml b/pyproject.toml
index 1185e9ac977..c16f5f48f83 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -286,7 +286,6 @@ module = [
"tests.test_transforms.test_transforms_post_transforms_images",
"tests.test_transforms.test_transforms_reorder_nodes",
# tests/test_util
- "tests.test_util.test_util_display",
"tests.test_util.test_util_docutils",
# tests/test_writers
"tests.test_writers.test_docutilsconf",
diff --git a/tests/test_util/test_util_display.py b/tests/test_util/test_util_display.py
index a3dda71b999..f4fa9c997ca 100644
--- a/tests/test_util/test_util_display.py
+++ b/tests/test_util/test_util_display.py
@@ -41,7 +41,9 @@ def test_status_iterator_length_0(app: SphinxTestApp) -> None:
@pytest.mark.sphinx('dummy', testroot='root')
-def test_status_iterator_verbosity_0(app, monkeypatch):
+def test_status_iterator_verbosity_0(
+ app: SphinxTestApp, monkeypatch: pytest.MonkeyPatch
+) -> None:
monkeypatch.setenv('FORCE_COLOR', '1')
logging.setup(app, app.status, app.warning)
@@ -59,7 +61,9 @@ def test_status_iterator_verbosity_0(app, monkeypatch):
@pytest.mark.sphinx('dummy', testroot='root')
-def test_status_iterator_verbosity_1(app, monkeypatch):
+def test_status_iterator_verbosity_1(
+ app: SphinxTestApp, monkeypatch: pytest.MonkeyPatch
+) -> None:
monkeypatch.setenv('FORCE_COLOR', '1')
logging.setup(app, app.status, app.warning)
@@ -107,7 +111,7 @@ def test_progress_message(app: SphinxTestApp) -> None:
# decorator
@progress_message('testing')
- def func():
+ def func() -> None:
logger.info('in func ', nonl=True)
func()
From 2c2159fb9ef3520c75152532cad5c44c21986562 Mon Sep 17 00:00:00 2001
From: Adam Turner <9087854+AA-Turner@users.noreply.github.com>
Date: Sat, 24 May 2025 02:15:04 +0100
Subject: [PATCH 091/466] Adopt Prettier for JavaScript formatting (#13581)
---
.github/workflows/lint.yml | 18 ++
.prettierrc.toml | 2 +
sphinx/search/__init__.py | 8 +-
sphinx/themes/basic/static/doctools.js | 11 +-
sphinx/themes/basic/static/searchtools.js | 154 ++++++++++------
.../themes/basic/static/sphinx_highlight.js | 59 +++---
sphinx/themes/scrolls/static/theme_extras.js | 22 +--
tests/js/jasmine-browser.mjs | 22 +--
tests/js/language_data.js | 12 +-
tests/js/searchtools.spec.js | 171 ++++++++++--------
tests/js/sphinx_highlight.spec.js | 50 ++---
tox.ini | 6 +
12 files changed, 314 insertions(+), 221 deletions(-)
create mode 100644 .prettierrc.toml
diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml
index d051e626886..010a6781aef 100644
--- a/.github/workflows/lint.yml
+++ b/.github/workflows/lint.yml
@@ -123,3 +123,21 @@ jobs:
run: |
python -m build .
twine check dist/*
+
+ prettier:
+ runs-on: ubuntu-latest
+
+ steps:
+ - uses: actions/checkout@v4
+ - name: Set up Node.js
+ uses: actions/setup-node@v4
+ with:
+ node-version: "20"
+ cache: "npm"
+ - run: >
+ npx prettier@3.5
+ --check
+ "sphinx/themes/**/*.js"
+ "!sphinx/themes/bizstyle/static/css3-mediaqueries*.js"
+ "tests/js/**/*.{js,mjs}"
+ "!tests/js/fixtures/**"
diff --git a/.prettierrc.toml b/.prettierrc.toml
new file mode 100644
index 00000000000..1799612bfdd
--- /dev/null
+++ b/.prettierrc.toml
@@ -0,0 +1,2 @@
+# https://prettier.io/docs/options
+experimentalOperatorPosition = "start"
diff --git a/sphinx/search/__init__.py b/sphinx/search/__init__.py
index cc997bf6456..187e6a2f37f 100644
--- a/sphinx/search/__init__.py
+++ b/sphinx/search/__init__.py
@@ -81,11 +81,11 @@ class SearchLanguage:
/**
* Dummy stemmer for languages without stemming rules.
*/
-var Stemmer = function() {
- this.stemWord = function(w) {
+var Stemmer = function () {
+ this.stemWord = function (w) {
return w;
- }
-}
+ };
+};
"""
_word_re = re.compile(r'\w+')
diff --git a/sphinx/themes/basic/static/doctools.js b/sphinx/themes/basic/static/doctools.js
index 0398ebb9f03..807cdb176c0 100644
--- a/sphinx/themes/basic/static/doctools.js
+++ b/sphinx/themes/basic/static/doctools.js
@@ -59,7 +59,7 @@ const Documentation = {
Object.assign(Documentation.TRANSLATIONS, catalog.messages);
Documentation.PLURAL_EXPR = new Function(
"n",
- `return (${catalog.plural_expr})`
+ `return (${catalog.plural_expr})`,
);
Documentation.LOCALE = catalog.locale;
},
@@ -89,7 +89,7 @@ const Documentation = {
const togglerElements = document.querySelectorAll("img.toggler");
togglerElements.forEach((el) =>
- el.addEventListener("click", (event) => toggler(event.currentTarget))
+ el.addEventListener("click", (event) => toggler(event.currentTarget)),
);
togglerElements.forEach((el) => (el.style.display = ""));
if (DOCUMENTATION_OPTIONS.COLLAPSE_INDEX) togglerElements.forEach(toggler);
@@ -98,14 +98,15 @@ const Documentation = {
initOnKeyListeners: () => {
// only install a listener if it is really needed
if (
- !DOCUMENTATION_OPTIONS.NAVIGATION_WITH_KEYS &&
- !DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS
+ !DOCUMENTATION_OPTIONS.NAVIGATION_WITH_KEYS
+ && !DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS
)
return;
document.addEventListener("keydown", (event) => {
// bail for input elements
- if (BLACKLISTED_KEY_CONTROL_ELEMENTS.has(document.activeElement.tagName)) return;
+ if (BLACKLISTED_KEY_CONTROL_ELEMENTS.has(document.activeElement.tagName))
+ return;
// bail with special keys
if (event.altKey || event.ctrlKey || event.metaKey) return;
diff --git a/sphinx/themes/basic/static/searchtools.js b/sphinx/themes/basic/static/searchtools.js
index ba5e67aa75e..5a7628a18a2 100644
--- a/sphinx/themes/basic/static/searchtools.js
+++ b/sphinx/themes/basic/static/searchtools.js
@@ -41,11 +41,12 @@ if (typeof Scorer === "undefined") {
}
// Global search result kind enum, used by themes to style search results.
+// prettier-ignore
class SearchResultKind {
- static get index() { return "index"; }
- static get object() { return "object"; }
- static get text() { return "text"; }
- static get title() { return "title"; }
+ static get index() { return "index"; }
+ static get object() { return "object"; }
+ static get text() { return "text"; }
+ static get title() { return "title"; }
}
const _removeChildren = (element) => {
@@ -95,20 +96,25 @@ const _displayItem = (item, searchTerms, highlightTerms) => {
listItem.appendChild(document.createElement("span")).innerHTML =
" (" + descr + ")";
// highlight search terms in the description
- if (SPHINX_HIGHLIGHT_ENABLED) // set in sphinx_highlight.js
- highlightTerms.forEach((term) => _highlightText(listItem, term, "highlighted"));
- }
- else if (showSearchSummary)
+ if (SPHINX_HIGHLIGHT_ENABLED)
+ // SPHINX_HIGHLIGHT_ENABLED is set in sphinx_highlight.js
+ highlightTerms.forEach((term) =>
+ _highlightText(listItem, term, "highlighted"),
+ );
+ } else if (showSearchSummary)
fetch(requestUrl)
.then((responseData) => responseData.text())
.then((data) => {
if (data)
listItem.appendChild(
- Search.makeSearchSummary(data, searchTerms, anchor)
+ Search.makeSearchSummary(data, searchTerms, anchor),
);
// highlight search terms in the summary
- if (SPHINX_HIGHLIGHT_ENABLED) // set in sphinx_highlight.js
- highlightTerms.forEach((term) => _highlightText(listItem, term, "highlighted"));
+ if (SPHINX_HIGHLIGHT_ENABLED)
+ // SPHINX_HIGHLIGHT_ENABLED is set in sphinx_highlight.js
+ highlightTerms.forEach((term) =>
+ _highlightText(listItem, term, "highlighted"),
+ );
});
Search.output.appendChild(listItem);
};
@@ -117,14 +123,14 @@ const _finishSearch = (resultCount) => {
Search.title.innerText = _("Search Results");
if (!resultCount)
Search.status.innerText = Documentation.gettext(
- "Your search did not match any documents. Please make sure that all words are spelled correctly and that you've selected enough categories."
+ "Your search did not match any documents. Please make sure that all words are spelled correctly and that you've selected enough categories.",
);
else
Search.status.innerText = Documentation.ngettext(
"Search finished, found one page matching the search query.",
"Search finished, found ${resultCount} pages matching the search query.",
resultCount,
- ).replace('${resultCount}', resultCount);
+ ).replace("${resultCount}", resultCount);
};
const _displayNextItem = (
results,
@@ -138,7 +144,7 @@ const _displayNextItem = (
_displayItem(results.pop(), searchTerms, highlightTerms);
setTimeout(
() => _displayNextItem(results, resultCount, searchTerms, highlightTerms),
- 5
+ 5,
);
}
// search finished, update title and status message
@@ -170,9 +176,10 @@ const _orderResultsByScoreThenName = (a, b) => {
* This is the same as ``\W+`` in Python, preserving the surrogate pair area.
*/
if (typeof splitQuery === "undefined") {
- var splitQuery = (query) => query
+ var splitQuery = (query) =>
+ query
.split(/[^\p{Letter}\p{Number}_\p{Emoji_Presentation}]+/gu)
- .filter(term => term) // remove remaining empty strings
+ .filter((term) => term); // remove remaining empty strings
}
/**
@@ -184,16 +191,23 @@ const Search = {
_pulse_status: -1,
htmlToText: (htmlString, anchor) => {
- const htmlElement = new DOMParser().parseFromString(htmlString, 'text/html');
+ const htmlElement = new DOMParser().parseFromString(
+ htmlString,
+ "text/html",
+ );
for (const removalQuery of [".headerlink", "script", "style"]) {
- htmlElement.querySelectorAll(removalQuery).forEach((el) => { el.remove() });
+ htmlElement.querySelectorAll(removalQuery).forEach((el) => {
+ el.remove();
+ });
}
if (anchor) {
- const anchorContent = htmlElement.querySelector(`[role="main"] ${anchor}`);
+ const anchorContent = htmlElement.querySelector(
+ `[role="main"] ${anchor}`,
+ );
if (anchorContent) return anchorContent.textContent;
console.warn(
- `Anchored content block not found. Sphinx search tries to obtain it via DOM query '[role=main] ${anchor}'. Check your theme or template.`
+ `Anchored content block not found. Sphinx search tries to obtain it via DOM query '[role=main] ${anchor}'. Check your theme or template.`,
);
}
@@ -202,7 +216,7 @@ const Search = {
if (docContent) return docContent.textContent;
console.warn(
- "Content block not found. Sphinx search tries to obtain it via DOM query '[role=main]'. Check your theme or template."
+ "Content block not found. Sphinx search tries to obtain it via DOM query '[role=main]'. Check your theme or template.",
);
return "";
},
@@ -288,8 +302,7 @@ const Search = {
// maybe skip this "word"
// stopwords set is from language_data.js
- if (stopwords.has(queryTermLower) || queryTerm.match(/^\d+$/))
- return;
+ if (stopwords.has(queryTermLower) || queryTerm.match(/^\d+$/)) return;
// stem the word
let word = stemmer.stemWord(queryTermLower);
@@ -301,8 +314,12 @@ const Search = {
}
});
- if (SPHINX_HIGHLIGHT_ENABLED) { // set in sphinx_highlight.js
- localStorage.setItem("sphinx_highlight_terms", [...highlightTerms].join(" "))
+ if (SPHINX_HIGHLIGHT_ENABLED) {
+ // SPHINX_HIGHLIGHT_ENABLED is set in sphinx_highlight.js
+ localStorage.setItem(
+ "sphinx_highlight_terms",
+ [...highlightTerms].join(" "),
+ );
}
// console.debug("SEARCH: searching for:");
@@ -315,7 +332,13 @@ const Search = {
/**
* execute search (requires search index to be loaded)
*/
- _performSearch: (query, searchTerms, excludedTerms, highlightTerms, objectTerms) => {
+ _performSearch: (
+ query,
+ searchTerms,
+ excludedTerms,
+ highlightTerms,
+ objectTerms,
+ ) => {
const filenames = Search._index.filenames;
const docNames = Search._index.docnames;
const titles = Search._index.titles;
@@ -331,10 +354,15 @@ const Search = {
const queryLower = query.toLowerCase().trim();
for (const [title, foundTitles] of Object.entries(allTitles)) {
- if (title.toLowerCase().trim().includes(queryLower) && (queryLower.length >= title.length/2)) {
+ if (
+ title.toLowerCase().trim().includes(queryLower)
+ && queryLower.length >= title.length / 2
+ ) {
for (const [file, id] of foundTitles) {
- const score = Math.round(Scorer.title * queryLower.length / title.length);
- const boost = titles[file] === title ? 1 : 0; // add a boost for document titles
+ const score = Math.round(
+ (Scorer.title * queryLower.length) / title.length,
+ );
+ const boost = titles[file] === title ? 1 : 0; // add a boost for document titles
normalResults.push([
docNames[file],
titles[file] !== title ? `${titles[file]} > ${title}` : title,
@@ -350,9 +378,9 @@ const Search = {
// search for explicit entries in index directives
for (const [entry, foundEntries] of Object.entries(indexEntries)) {
- if (entry.includes(queryLower) && (queryLower.length >= entry.length/2)) {
+ if (entry.includes(queryLower) && queryLower.length >= entry.length / 2) {
for (const [file, id, isMain] of foundEntries) {
- const score = Math.round(100 * queryLower.length / entry.length);
+ const score = Math.round((100 * queryLower.length) / entry.length);
const result = [
docNames[file],
titles[file],
@@ -373,11 +401,13 @@ const Search = {
// lookup as object
objectTerms.forEach((term) =>
- normalResults.push(...Search.performObjectSearch(term, objectTerms))
+ normalResults.push(...Search.performObjectSearch(term, objectTerms)),
);
// lookup as search terms in fulltext
- normalResults.push(...Search.performTermsSearch(searchTerms, excludedTerms));
+ normalResults.push(
+ ...Search.performTermsSearch(searchTerms, excludedTerms),
+ );
// let the scorer override scores with a custom scoring function
if (Scorer.score) {
@@ -398,7 +428,11 @@ const Search = {
// note the reversing of results, so that in the case of duplicates, the highest-scoring entry is kept
let seen = new Set();
results = results.reverse().reduce((acc, result) => {
- let resultStr = result.slice(0, 4).concat([result[5]]).map(v => String(v)).join(',');
+ let resultStr = result
+ .slice(0, 4)
+ .concat([result[5]])
+ .map((v) => String(v))
+ .join(",");
if (!seen.has(resultStr)) {
acc.push(result);
seen.add(resultStr);
@@ -410,8 +444,20 @@ const Search = {
},
query: (query) => {
- const [searchQuery, searchTerms, excludedTerms, highlightTerms, objectTerms] = Search._parseQuery(query);
- const results = Search._performSearch(searchQuery, searchTerms, excludedTerms, highlightTerms, objectTerms);
+ const [
+ searchQuery,
+ searchTerms,
+ excludedTerms,
+ highlightTerms,
+ objectTerms,
+ ] = Search._parseQuery(query);
+ const results = Search._performSearch(
+ searchQuery,
+ searchTerms,
+ excludedTerms,
+ highlightTerms,
+ objectTerms,
+ );
// for debugging
//Search.lastresults = results.slice(); // a copy
@@ -434,7 +480,7 @@ const Search = {
const results = [];
const objectSearchCallback = (prefix, match) => {
- const name = match[4]
+ const name = match[4];
const fullname = (prefix ? prefix + "." : "") + name;
const fullnameLower = fullname.toLowerCase();
if (fullnameLower.indexOf(object) < 0) return;
@@ -486,9 +532,7 @@ const Search = {
]);
};
Object.keys(objects).forEach((prefix) =>
- objects[prefix].forEach((array) =>
- objectSearchCallback(prefix, array)
- )
+ objects[prefix].forEach((array) => objectSearchCallback(prefix, array)),
);
return results;
},
@@ -513,8 +557,14 @@ const Search = {
// find documents, if any, containing the query word in their text/title term indices
// use Object.hasOwnProperty to avoid mismatching against prototype properties
const arr = [
- { files: terms.hasOwnProperty(word) ? terms[word] : undefined, score: Scorer.term },
- { files: titleTerms.hasOwnProperty(word) ? titleTerms[word] : undefined, score: Scorer.title },
+ {
+ files: terms.hasOwnProperty(word) ? terms[word] : undefined,
+ score: Scorer.term,
+ },
+ {
+ files: titleTerms.hasOwnProperty(word) ? titleTerms[word] : undefined,
+ score: Scorer.title,
+ },
];
// add support for partial matches
if (word.length > 2) {
@@ -555,7 +605,8 @@ const Search = {
// create the mapping
files.forEach((file) => {
if (!fileMap.has(file)) fileMap.set(file, [word]);
- else if (fileMap.get(file).indexOf(word) === -1) fileMap.get(file).push(word);
+ else if (fileMap.get(file).indexOf(word) === -1)
+ fileMap.get(file).push(word);
});
});
@@ -566,11 +617,11 @@ const Search = {
// as search terms with length < 3 are discarded
const filteredTermCount = [...searchTerms].filter(
- (term) => term.length > 2
+ (term) => term.length > 2,
).length;
if (
- wordList.length !== searchTerms.size &&
- wordList.length !== filteredTermCount
+ wordList.length !== searchTerms.size
+ && wordList.length !== filteredTermCount
)
continue;
@@ -578,10 +629,10 @@ const Search = {
if (
[...excludedTerms].some(
(term) =>
- terms[term] === file ||
- titleTerms[term] === file ||
- (terms[term] || []).includes(file) ||
- (titleTerms[term] || []).includes(file)
+ terms[term] === file
+ || titleTerms[term] === file
+ || (terms[term] || []).includes(file)
+ || (titleTerms[term] || []).includes(file),
)
)
break;
@@ -623,7 +674,8 @@ const Search = {
let summary = document.createElement("p");
summary.classList.add("context");
- summary.textContent = top + text.substr(startWithContext, 240).trim() + tail;
+ summary.textContent =
+ top + text.substr(startWithContext, 240).trim() + tail;
return summary;
},
diff --git a/sphinx/themes/basic/static/sphinx_highlight.js b/sphinx/themes/basic/static/sphinx_highlight.js
index 8a96c69a194..ce735d52ee4 100644
--- a/sphinx/themes/basic/static/sphinx_highlight.js
+++ b/sphinx/themes/basic/static/sphinx_highlight.js
@@ -1,7 +1,7 @@
/* Highlighting utilities for Sphinx HTML documentation. */
"use strict";
-const SPHINX_HIGHLIGHT_ENABLED = true
+const SPHINX_HIGHLIGHT_ENABLED = true;
/**
* highlight a given string on a node by wrapping it in
@@ -13,9 +13,9 @@ const _highlight = (node, addItems, text, className) => {
const parent = node.parentNode;
const pos = val.toLowerCase().indexOf(text);
if (
- pos >= 0 &&
- !parent.classList.contains(className) &&
- !parent.classList.contains("nohighlight")
+ pos >= 0
+ && !parent.classList.contains(className)
+ && !parent.classList.contains("nohighlight")
) {
let span;
@@ -30,13 +30,7 @@ const _highlight = (node, addItems, text, className) => {
span.appendChild(document.createTextNode(val.substr(pos, text.length)));
const rest = document.createTextNode(val.substr(pos + text.length));
- parent.insertBefore(
- span,
- parent.insertBefore(
- rest,
- node.nextSibling
- )
- );
+ parent.insertBefore(span, parent.insertBefore(rest, node.nextSibling));
node.nodeValue = val.substr(0, pos);
/* There may be more occurrences of search term in this node. So call this
* function recursively on the remaining fragment.
@@ -46,7 +40,7 @@ const _highlight = (node, addItems, text, className) => {
if (isInSVG) {
const rect = document.createElementNS(
"http://www.w3.org/2000/svg",
- "rect"
+ "rect",
);
const bbox = parent.getBBox();
rect.x.baseVal.value = bbox.x;
@@ -65,7 +59,7 @@ const _highlightText = (thisNode, text, className) => {
let addItems = [];
_highlight(thisNode, addItems, text, className);
addItems.forEach((obj) =>
- obj.parent.insertAdjacentElement("beforebegin", obj.target)
+ obj.parent.insertAdjacentElement("beforebegin", obj.target),
);
};
@@ -73,25 +67,27 @@ const _highlightText = (thisNode, text, className) => {
* Small JavaScript module for the documentation.
*/
const SphinxHighlight = {
-
/**
* highlight the search words provided in localstorage in the text
*/
highlightSearchWords: () => {
- if (!SPHINX_HIGHLIGHT_ENABLED) return; // bail if no highlight
+ if (!SPHINX_HIGHLIGHT_ENABLED) return; // bail if no highlight
// get and clear terms from localstorage
const url = new URL(window.location);
const highlight =
- localStorage.getItem("sphinx_highlight_terms")
- || url.searchParams.get("highlight")
- || "";
- localStorage.removeItem("sphinx_highlight_terms")
+ localStorage.getItem("sphinx_highlight_terms")
+ || url.searchParams.get("highlight")
+ || "";
+ localStorage.removeItem("sphinx_highlight_terms");
url.searchParams.delete("highlight");
window.history.replaceState({}, "", url);
// get individual terms from highlight string
- const terms = highlight.toLowerCase().split(/\s+/).filter(x => x);
+ const terms = highlight
+ .toLowerCase()
+ .split(/\s+/)
+ .filter((x) => x);
if (terms.length === 0) return; // nothing to do
// There should never be more than one element matching "div.body"
@@ -107,11 +103,11 @@ const SphinxHighlight = {
document
.createRange()
.createContextualFragment(
- '' +
- '' +
- _("Hide Search Matches") +
- "
"
- )
+ ''
+ + ''
+ + _("Hide Search Matches")
+ + "
",
+ ),
);
},
@@ -125,7 +121,7 @@ const SphinxHighlight = {
document
.querySelectorAll("span.highlighted")
.forEach((el) => el.classList.remove("highlighted"));
- localStorage.removeItem("sphinx_highlight_terms")
+ localStorage.removeItem("sphinx_highlight_terms");
},
initEscapeListener: () => {
@@ -134,10 +130,15 @@ const SphinxHighlight = {
document.addEventListener("keydown", (event) => {
// bail for input elements
- if (BLACKLISTED_KEY_CONTROL_ELEMENTS.has(document.activeElement.tagName)) return;
+ if (BLACKLISTED_KEY_CONTROL_ELEMENTS.has(document.activeElement.tagName))
+ return;
// bail with special keys
- if (event.shiftKey || event.altKey || event.ctrlKey || event.metaKey) return;
- if (DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS && (event.key === "Escape")) {
+ if (event.shiftKey || event.altKey || event.ctrlKey || event.metaKey)
+ return;
+ if (
+ DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS
+ && event.key === "Escape"
+ ) {
SphinxHighlight.hideSearchWords();
event.preventDefault();
}
diff --git a/sphinx/themes/scrolls/static/theme_extras.js b/sphinx/themes/scrolls/static/theme_extras.js
index df2be407339..84cc1509808 100644
--- a/sphinx/themes/scrolls/static/theme_extras.js
+++ b/sphinx/themes/scrolls/static/theme_extras.js
@@ -1,12 +1,12 @@
const initialiseThemeExtras = () => {
- const toc = document.getElementById("toc")
- toc.style.display = ""
- const items = toc.getElementsByTagName("ul")[0]
- items.style.display = "none"
- toc.getElementsByTagName("h3").addEventListener("click", () => {
- if (items.style.display !== "none") toc.classList.remove("expandedtoc")
- else toc.classList.add("expandedtoc");
- })
-}
-if (document.readyState !== "loading") initialiseThemeExtras()
-else document.addEventListener("DOMContentLoaded", initialiseThemeExtras)
+ const toc = document.getElementById("toc");
+ toc.style.display = "";
+ const items = toc.getElementsByTagName("ul")[0];
+ items.style.display = "none";
+ toc.getElementsByTagName("h3").addEventListener("click", () => {
+ if (items.style.display !== "none") toc.classList.remove("expandedtoc");
+ else toc.classList.add("expandedtoc");
+ });
+};
+if (document.readyState !== "loading") initialiseThemeExtras();
+else document.addEventListener("DOMContentLoaded", initialiseThemeExtras);
diff --git a/tests/js/jasmine-browser.mjs b/tests/js/jasmine-browser.mjs
index b84217fd8c5..f11c04b95b7 100644
--- a/tests/js/jasmine-browser.mjs
+++ b/tests/js/jasmine-browser.mjs
@@ -1,28 +1,26 @@
export default {
srcDir: ".",
srcFiles: [
- 'sphinx/themes/basic/static/doctools.js',
- 'sphinx/themes/basic/static/searchtools.js',
- 'sphinx/themes/basic/static/sphinx_highlight.js',
- 'tests/js/fixtures/**/*.js',
- 'tests/js/documentation_options.js',
- 'tests/js/language_data.js',
+ "sphinx/themes/basic/static/doctools.js",
+ "sphinx/themes/basic/static/searchtools.js",
+ "sphinx/themes/basic/static/sphinx_highlight.js",
+ "tests/js/fixtures/**/*.js",
+ "tests/js/documentation_options.js",
+ "tests/js/language_data.js",
],
specDir: "tests/js",
- specFiles: [
- '**/*.spec.js',
- ],
+ specFiles: ["**/*.spec.js"],
helpers: [],
env: {
stopSpecOnExpectationFailure: false,
stopOnSpecFailure: false,
- random: true
+ random: true,
},
listenAddress: "127.0.0.1",
hostname: "127.0.0.1",
browser: {
- name: "headlessFirefox"
- }
+ name: "headlessFirefox",
+ },
};
diff --git a/tests/js/language_data.js b/tests/js/language_data.js
index 47c81f4a2a5..c98e91ff6e2 100644
--- a/tests/js/language_data.js
+++ b/tests/js/language_data.js
@@ -4,17 +4,15 @@
*/
const stopwords = new Set([]);
-window.stopwords = stopwords; // Export to global scope
-
+window.stopwords = stopwords; // Export to global scope
/* Non-minified versions are copied as separate JavaScript files, if available */
/**
* Dummy stemmer for languages without stemming rules.
*/
-var Stemmer = function() {
- this.stemWord = function(w) {
+var Stemmer = function () {
+ this.stemWord = function (w) {
return w;
- }
-}
-
+ };
+};
diff --git a/tests/js/searchtools.spec.js b/tests/js/searchtools.spec.js
index 809fd19d0f4..d00689c907c 100644
--- a/tests/js/searchtools.spec.js
+++ b/tests/js/searchtools.spec.js
@@ -1,10 +1,9 @@
-describe('Basic html theme search', function() {
-
+describe("Basic html theme search", function () {
function loadFixture(name) {
- req = new XMLHttpRequest();
- req.open("GET", `__src__/tests/js/fixtures/${name}`, false);
- req.send(null);
- return req.responseText;
+ req = new XMLHttpRequest();
+ req.open("GET", `__src__/tests/js/fixtures/${name}`, false);
+ req.send(null);
+ return req.responseText;
}
function checkRanking(expectedRanking, results) {
@@ -16,7 +15,11 @@ describe('Basic html theme search', function() {
let [expectedPage, expectedTitle, expectedTarget] = nextExpected;
let [page, title, target] = result;
- if (page == expectedPage && title == expectedTitle && target == expectedTarget) {
+ if (
+ page == expectedPage
+ && title == expectedTitle
+ && target == expectedTarget
+ ) {
[nextExpected, ...remainingItems] = remainingItems;
}
}
@@ -25,13 +28,14 @@ describe('Basic html theme search', function() {
expect(nextExpected).toEqual(undefined);
}
- describe('terms search', function() {
-
- it('should find "C++" when in index', function() {
+ describe("terms search", function () {
+ it('should find "C++" when in index', function () {
eval(loadFixture("cpp/searchindex.js"));
- [_searchQuery, searchterms, excluded, ..._remainingItems] = Search._parseQuery('C++');
+ [_searchQuery, searchterms, excluded, ..._remainingItems] =
+ Search._parseQuery("C++");
+ // prettier-ignore
hits = [[
"index",
"<no title>",
@@ -44,10 +48,12 @@ describe('Basic html theme search', function() {
expect(Search.performTermsSearch(searchterms, excluded)).toEqual(hits);
});
- it('should be able to search for multiple terms', function() {
+ it("should be able to search for multiple terms", function () {
eval(loadFixture("multiterm/searchindex.js"));
- [_searchQuery, searchterms, excluded, ..._remainingItems] = Search._parseQuery('main page');
+ [_searchQuery, searchterms, excluded, ..._remainingItems] =
+ Search._parseQuery("main page");
+ // prettier-ignore
hits = [[
'index',
'Main Page',
@@ -60,11 +66,13 @@ describe('Basic html theme search', function() {
expect(Search.performTermsSearch(searchterms, excluded)).toEqual(hits);
});
- it('should partially-match "sphinx" when in title index', function() {
+ it('should partially-match "sphinx" when in title index', function () {
eval(loadFixture("partial/searchindex.js"));
- [_searchQuery, searchterms, excluded, ..._remainingItems] = Search._parseQuery('sphinx');
+ [_searchQuery, searchterms, excluded, ..._remainingItems] =
+ Search._parseQuery("sphinx");
+ // prettier-ignore
hits = [[
"index",
"sphinx_utils module",
@@ -77,13 +85,15 @@ describe('Basic html theme search', function() {
expect(Search.performTermsSearch(searchterms, excluded)).toEqual(hits);
});
- it('should partially-match within "possible" when in term index', function() {
+ it('should partially-match within "possible" when in term index', function () {
eval(loadFixture("partial/searchindex.js"));
- [_searchQuery, searchterms, excluded, ..._remainingItems] = Search._parseQuery('ossibl');
+ [_searchQuery, searchterms, excluded, ..._remainingItems] =
+ Search._parseQuery("ossibl");
terms = Search._index.terms;
titleterms = Search._index.titleterms;
+ // prettier-ignore
hits = [[
"index",
"sphinx_utils module",
@@ -93,18 +103,19 @@ describe('Basic html theme search', function() {
"index.rst",
"text"
]];
- expect(Search.performTermsSearch(searchterms, excluded, terms, titleterms)).toEqual(hits);
+ expect(
+ Search.performTermsSearch(searchterms, excluded, terms, titleterms),
+ ).toEqual(hits);
});
-
});
- describe('aggregation of search results', function() {
-
- it('should combine document title and document term matches', function() {
+ describe("aggregation of search results", function () {
+ it("should combine document title and document term matches", function () {
eval(loadFixture("multiterm/searchindex.js"));
- searchParameters = Search._parseQuery('main page');
+ searchParameters = Search._parseQuery("main page");
+ // prettier-ignore
hits = [
[
'index',
@@ -118,11 +129,9 @@ describe('Basic html theme search', function() {
];
expect(Search._performSearch(...searchParameters)).toEqual(hits);
});
-
});
- describe('search result ranking', function() {
-
+ describe("search result ranking", function () {
/*
* These tests should not proscribe precise expected ordering of search
* results; instead each test case should describe a single relevance rule
@@ -137,95 +146,96 @@ describe('Basic html theme search', function() {
* [1] - https://github.com/sphinx-doc/sphinx.git/
*/
- it('should score a code module match above a page-title match', function() {
+ it("should score a code module match above a page-title match", function () {
eval(loadFixture("titles/searchindex.js"));
+ // prettier-ignore
expectedRanking = [
['index', 'relevance', '#module-relevance'], /* py:module documentation */
['relevance', 'Relevance', ''], /* main title */
];
- searchParameters = Search._parseQuery('relevance');
+ searchParameters = Search._parseQuery("relevance");
results = Search._performSearch(...searchParameters);
checkRanking(expectedRanking, results);
});
- it('should score a main-title match above an object member match', function() {
+ it("should score a main-title match above an object member match", function () {
eval(loadFixture("titles/searchindex.js"));
+ // prettier-ignore
expectedRanking = [
['relevance', 'Relevance', ''], /* main title */
['index', 'relevance.Example.relevance', '#relevance.Example.relevance'], /* py:class attribute */
];
- searchParameters = Search._parseQuery('relevance');
+ searchParameters = Search._parseQuery("relevance");
results = Search._performSearch(...searchParameters);
checkRanking(expectedRanking, results);
});
- it('should score a title match above a standard index entry match', function() {
+ it("should score a title match above a standard index entry match", function () {
eval(loadFixture("titles/searchindex.js"));
+ // prettier-ignore
expectedRanking = [
['relevance', 'Relevance', ''], /* title */
['index', 'Main Page', '#index-1'], /* index entry */
];
- searchParameters = Search._parseQuery('relevance');
+ searchParameters = Search._parseQuery("relevance");
results = Search._performSearch(...searchParameters);
checkRanking(expectedRanking, results);
});
- it('should score a priority index entry match above a title match', function() {
+ it("should score a priority index entry match above a title match", function () {
eval(loadFixture("titles/searchindex.js"));
+ // prettier-ignore
expectedRanking = [
['index', 'Main Page', '#index-0'], /* index entry */
['index', 'Main Page > Result Scoring', '#result-scoring'], /* title */
];
- searchParameters = Search._parseQuery('scoring');
+ searchParameters = Search._parseQuery("scoring");
results = Search._performSearch(...searchParameters);
checkRanking(expectedRanking, results);
});
- it('should score a main-title match above a subheading-title match', function() {
+ it("should score a main-title match above a subheading-title match", function () {
eval(loadFixture("titles/searchindex.js"));
+ // prettier-ignore
expectedRanking = [
['relevance', 'Relevance', ''], /* main title */
['index', 'Main Page > Relevance', '#relevance'], /* subsection heading title */
];
- searchParameters = Search._parseQuery('relevance');
+ searchParameters = Search._parseQuery("relevance");
results = Search._performSearch(...searchParameters);
checkRanking(expectedRanking, results);
});
-
});
- describe('can handle edge-case search queries', function() {
-
- it('does not find the javascript prototype property in unrelated documents', function() {
+ describe("can handle edge-case search queries", function () {
+ it("does not find the javascript prototype property in unrelated documents", function () {
eval(loadFixture("partial/searchindex.js"));
- searchParameters = Search._parseQuery('__proto__');
+ searchParameters = Search._parseQuery("__proto__");
+ // prettier-ignore
hits = [];
expect(Search._performSearch(...searchParameters)).toEqual(hits);
});
-
});
-
});
-describe("htmlToText", function() {
-
+describe("htmlToText", function () {
const testHTML = `
@@ -257,44 +267,47 @@ describe("htmlToText", function() {
`;
it("basic case", () => {
- expect(Search.htmlToText(testHTML).trim().split(/\s+/)).toEqual([
- 'Getting', 'Started', 'Some', 'text',
- 'Other', 'Section', 'Other', 'text',
- 'Yet', 'Another', 'Section', 'More', 'text'
- ]);
+ expect(Search.htmlToText(testHTML).trim().split(/\s+/)).toEqual(
+ /* prettier-ignore */ [
+ "Getting", "Started", "Some", "text",
+ "Other", "Section", "Other", "text",
+ "Yet", "Another", "Section", "More", "text"
+ ],
+ );
});
it("will start reading from the anchor", () => {
- expect(Search.htmlToText(testHTML, '#other-section').trim().split(/\s+/)).toEqual(['Other', 'Section', 'Other', 'text']);
+ expect(
+ Search.htmlToText(testHTML, "#other-section").trim().split(/\s+/),
+ ).toEqual(["Other", "Section", "Other", "text"]);
});
});
// Regression test for https://github.com/sphinx-doc/sphinx/issues/3150
-describe('splitQuery regression tests', () => {
-
- it('can split English words', () => {
- const parts = splitQuery(' Hello World ')
- expect(parts).toEqual(['Hello', 'World'])
- })
-
- it('can split special characters', () => {
- const parts = splitQuery('Pin-Code')
- expect(parts).toEqual(['Pin', 'Code'])
- })
-
- it('can split Chinese characters', () => {
- const parts = splitQuery('Hello from 中国 上海')
- expect(parts).toEqual(['Hello', 'from', '中国', '上海'])
- })
-
- it('can split Emoji (surrogate pair) characters. It should keep emojis.', () => {
- const parts = splitQuery('😁😁')
- expect(parts).toEqual(['😁😁'])
- })
-
- it('can split umlauts. It should keep umlauts.', () => {
- const parts = splitQuery('Löschen Prüfung Abändern ærlig spørsmål')
- expect(parts).toEqual(['Löschen', 'Prüfung', 'Abändern', 'ærlig', 'spørsmål'])
- })
-
-})
+describe("splitQuery regression tests", () => {
+ it("can split English words", () => {
+ const parts = splitQuery(" Hello World ");
+ expect(parts).toEqual(["Hello", "World"]);
+ });
+
+ it("can split special characters", () => {
+ const parts = splitQuery("Pin-Code");
+ expect(parts).toEqual(["Pin", "Code"]);
+ });
+
+ it("can split Chinese characters", () => {
+ const parts = splitQuery("Hello from 中国 上海");
+ expect(parts).toEqual(["Hello", "from", "中国", "上海"]);
+ });
+
+ it("can split Emoji (surrogate pair) characters. It should keep emojis.", () => {
+ const parts = splitQuery("😁😁");
+ expect(parts).toEqual(["😁😁"]);
+ });
+
+ it("can split umlauts. It should keep umlauts.", () => {
+ const parts = splitQuery("Löschen Prüfung Abändern ærlig spørsmål");
+ // prettier-ignore
+ expect(parts).toEqual(["Löschen", "Prüfung", "Abändern", "ærlig", "spørsmål"])
+ });
+});
diff --git a/tests/js/sphinx_highlight.spec.js b/tests/js/sphinx_highlight.spec.js
index 1f52eabb96f..4d57d867745 100644
--- a/tests/js/sphinx_highlight.spec.js
+++ b/tests/js/sphinx_highlight.spec.js
@@ -1,30 +1,33 @@
-describe('highlightText', function() {
+describe("highlightText", function () {
+ const cyrillicTerm = "шеллы";
+ const umlautTerm = "gänsefüßchen";
- const cyrillicTerm = 'шеллы';
- const umlautTerm = 'gänsefüßchen';
-
- it('should highlight text incl. special characters correctly in HTML', function() {
+ it("should highlight text incl. special characters correctly in HTML", function () {
const highlightTestSpan = new DOMParser().parseFromString(
- 'This is the шеллы and Gänsefüßchen test!', 'text/html').body.firstChild
- _highlightText(highlightTestSpan, cyrillicTerm, 'highlighted');
- _highlightText(highlightTestSpan, umlautTerm, 'highlighted');
+ "This is the шеллы and Gänsefüßchen test!",
+ "text/html",
+ ).body.firstChild;
+ _highlightText(highlightTestSpan, cyrillicTerm, "highlighted");
+ _highlightText(highlightTestSpan, umlautTerm, "highlighted");
const expectedHtmlString =
- 'This is the шеллы and ' +
- 'Gänsefüßchen test!';
+ 'This is the шеллы and '
+ + 'Gänsefüßchen test!';
expect(highlightTestSpan.innerHTML).toEqual(expectedHtmlString);
});
- it('should highlight text incl. special characters correctly in SVG', function() {
+ it("should highlight text incl. special characters correctly in SVG", function () {
const highlightTestSvg = new DOMParser().parseFromString(
- '' +
- '' +
- '', 'text/html').body.firstChild
- _highlightText(highlightTestSvg, cyrillicTerm, 'highlighted');
- _highlightText(highlightTestSvg, umlautTerm, 'highlighted');
+ ''
+ + '"
+ + "",
+ "text/html",
+ ).body.firstChild;
+ _highlightText(highlightTestSvg, cyrillicTerm, "highlighted");
+ _highlightText(highlightTestSvg, umlautTerm, "highlighted");
/* Note wild cards and ``toMatch``; allowing for some variability
seems to be necessary, even between different FF versions */
const expectedSvgString =
@@ -32,8 +35,9 @@ describe('highlightText', function() {
+ ''
+ ''
+ 'This is the шеллы and '
- + 'Gänsefüßchen test!';
- expect(new XMLSerializer().serializeToString(highlightTestSvg.firstChild)).toMatch(new RegExp(expectedSvgString));
+ + "Gänsefüßchen test!";
+ expect(
+ new XMLSerializer().serializeToString(highlightTestSvg.firstChild),
+ ).toMatch(new RegExp(expectedSvgString));
});
-
});
diff --git a/tox.ini b/tox.ini
index 23b239c7ffc..87b9d1b6316 100644
--- a/tox.ini
+++ b/tox.ini
@@ -84,3 +84,9 @@ dependency_groups =
types
commands =
mypy {posargs}
+
+[testenv:prettier]
+description =
+ Run the Prettier JavaScript formatter.
+commands =
+ npx prettier@3.5 --write "sphinx/themes/**/*.js" "!sphinx/themes/bizstyle/static/css3-mediaqueries*.js" "tests/js/**/*.{js,mjs}" "!tests/js/fixtures/**"
From cffaf3d103ebfb455fd50df0a4ac8b850f95242f Mon Sep 17 00:00:00 2001
From: Adam Dangoor
Date: Sat, 24 May 2025 08:44:36 +0100
Subject: [PATCH 092/466] Remove unused fixture from
``test_config_pickle_protocol`` (#13590)
---
tests/test_config/test_config.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/tests/test_config/test_config.py b/tests/test_config/test_config.py
index 5e68b4a9657..fc1ba4c7321 100644
--- a/tests/test_config/test_config.py
+++ b/tests/test_config/test_config.py
@@ -143,7 +143,7 @@ def test_config_not_found(tmp_path):
@pytest.mark.parametrize('protocol', list(range(pickle.HIGHEST_PROTOCOL)))
-def test_config_pickle_protocol(tmp_path, protocol: int):
+def test_config_pickle_protocol(protocol: int):
config = Config()
pickled_config = pickle.loads(pickle.dumps(config, protocol))
From 7957429f26587da8e3432e62cb12414771143c98 Mon Sep 17 00:00:00 2001
From: Adam Turner <9087854+AA-Turner@users.noreply.github.com>
Date: Sat, 24 May 2025 08:45:05 +0100
Subject: [PATCH 093/466] Add initial Pyrefly configuration file (#13579)
---
pyproject.toml | 1 +
pyrefly.toml | 27 +++++++++++++++++++++++++++
2 files changed, 28 insertions(+)
create mode 100644 pyrefly.toml
diff --git a/pyproject.toml b/pyproject.toml
index c16f5f48f83..16f3aaf7eb5 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -158,6 +158,7 @@ translations = [
]
types = [
"mypy==1.15.0",
+ "pyrefly",
"pyright==1.1.400",
{ include-group = "type-stubs" },
]
diff --git a/pyrefly.toml b/pyrefly.toml
new file mode 100644
index 00000000000..88ccae4d84c
--- /dev/null
+++ b/pyrefly.toml
@@ -0,0 +1,27 @@
+# Configuration file for Pyrefly_.
+# n.b. Pyrefly is early in development.
+# Sphinx's current primary/reference type-checker is mypy.
+#
+# .. _Pyrefly: https://pyrefly.org/en/docs/configuration/
+
+project_includes = [
+ "doc/conf.py",
+ "doc/development/tutorials/examples/autodoc_intenum.py",
+ "doc/development/tutorials/examples/helloworld.py",
+ "sphinx",
+ "tests",
+ "utils",
+]
+project_excludes = [
+ "**/tests/roots*",
+]
+python_version = "3.11"
+replace_imports_with_any = [
+ "imagesize",
+ "pyximport",
+ "snowballstemmer",
+]
+
+# https://pyrefly.org/en/docs/error-kinds/
+[errors]
+implicitly-defined-attribute = false # many false positives
From a1b944488c70125b286dc1fc959f9a5c630d5c4f Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Sat, 24 May 2025 09:32:59 +0100
Subject: [PATCH 094/466] Bump Ruff to 0.11.11 (#13589)
---
pyproject.toml | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/pyproject.toml b/pyproject.toml
index 16f3aaf7eb5..1e3b7158579 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -92,7 +92,7 @@ docs = [
"sphinxcontrib-websupport",
]
lint = [
- "ruff==0.11.10",
+ "ruff==0.11.11",
"mypy==1.15.0",
"sphinx-lint>=0.9",
"types-colorama==0.4.15.20240311",
@@ -135,7 +135,7 @@ docs = [
"sphinxcontrib-websupport",
]
lint = [
- "ruff==0.11.10",
+ "ruff==0.11.11",
"sphinx-lint>=0.9",
]
package = [
From 1f2821e20492cc4eefbe7157ddb82dae0b9db2a1 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Jean-Fran=C3=A7ois=20B?=
<2589111+jfbu@users.noreply.github.com>
Date: Thu, 24 Apr 2025 11:54:42 +0200
Subject: [PATCH 095/466] LaTeX: add support for fontawesome6 package
---
CHANGES.rst | 2 +
doc/latex.rst | 48 ++++---
sphinx/texinputs/sphinx.sty | 148 ++++++++++++--------
sphinx/texinputs/sphinxlatexadmonitions.sty | 5 +-
4 files changed, 116 insertions(+), 87 deletions(-)
diff --git a/CHANGES.rst b/CHANGES.rst
index 57e8e0efdf9..48d9e689f35 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -20,6 +20,8 @@ Features added
``linkcheck_allowed_redirects = {}``.
Patch by Adam Turner.
* #13497: Support C domain objects in the table of contents.
+* #13500: LaTeX: add support for ``fontawesome6`` package.
+ Patch by Jean-François B.
* #13535: html search: Update to the latest version of Snowball (v3.0.1).
Patch by Adam Turner.
* #13704: autodoc: Detect :py:func:`typing_extensions.overload `
diff --git a/doc/latex.rst b/doc/latex.rst
index 80762b1c2c1..bfc4de73938 100644
--- a/doc/latex.rst
+++ b/doc/latex.rst
@@ -1006,18 +1006,20 @@ The color used in the above example is available from having passed the
``iconpackage``
- The name of the LaTeX package used for icons in the admonition titles. It
- defaults to ``fontawesome5`` or to fall-back ``fontawesome``. In case
- neither one is available the option value will automatically default to
- ``none``, which means that no attempt at loading a package is done.
- Independently of this setting, arbitrary LaTeX code can be associated to
- each admonition type via ``div._icon-title`` keys which are
- described in the :ref:`additionalcss` section. If these keys are not
- used, Sphinx will either apply its default choices of icons (if
- ``fontawesome{5,}`` is available) or not draw the icon at all. Notice that
- if fall-back ``fontawesome`` is used the common icon for :dudir:`caution`
- and :dudir:`danger` will default to "bolt" not "radiation", which is only
- found in ``fontawesome5``.
+ The name of the LaTeX package used for rendering icons in the admonition
+ titles. Its default is set dynamically to either ``fontawesome6``,
+ ``fontawesome5``, or ``fontawesome``, or ``none``, depending on whether
+ packages with those names exist in the used LaTeX installation. The LaTeX
+ code will use ``\faIcon`` command if with ``fontawesome6/fontawesome5``,
+ and ``\faicon`` if with ``fontawesome``. In the latter case the icon used
+ both for :dudir:`caution` and :dudir:`danger` will default to "bolt" not
+ "radiation", which is only found in ``fontawesome6`` and ``fontawesome5``.
+ If no "Font Awesome" related package is found (or if the option is set
+ forcefully to ``none``) the icons are silently dropped. User can set this
+ option to some specific package and must configure the
+ ``div.note_title-icon`` and similar keys to use then that LaTeX package
+ interface (see the :ref:`additionalcss` section for these extra
+ ``'sphinxsetup'`` keys).
.. versionadded:: 7.4.0
@@ -1410,17 +1412,17 @@ The next keys, for admonitions, :dudir:`topic`, contents_, and
(it applies only to the icon, not to the title of the admonition).
- ``div._title-icon``: the LaTeX code responsible for producing the
- icon. For example, the default for :dudir:`note` is
- ``div.note_title-icon=\faIcon{info-circle}``. This uses a command from the
- LaTeX ``fontawesome5`` package, which is loaded automatically if available.
-
- If neither ``fontawesome5`` nor fall-back ``fontawesome`` (for which the
- associated command is :code-tex:`\\faicon`, not :code-tex:`\\faIcon`) are
- found, or if the ``iconpackage`` key of :ref:`'sphinxsetup'
- ` is set to load some other user-chosen package, or no
- package at all, all the ``title-icons`` default to empty LaTeX code. It is
- up to user to employ this interface to inject the icon (or anything else)
- into the PDF output.
+ icon. If you want to modify the icons used by Sphinx, employ in these keys
+ the ``\faIcon`` LaTeX command (assuming either ``fontawesome6`` or
+ ``fontawesome5`` LaTeX package is available on your system). For example
+ the default for :dudir:`note` is
+ ``div.note_title-icon=\faIcon{info-circle}`` with ``fontawesome5`` and
+ ``div.note_title-icon=\faIcon{circle-info}`` with ``fontawesome6`` (which is
+ used automatically if available). If your system only provides the
+ ``fontawesome`` package (automatically detected) use its command ``\faicon``
+ rather in order to modify the choice of icons. The ``iconpackage`` key can
+ be used to use some other package providing icons, use then the commands
+ suitable to that package as values of the ``div._title-icon`` keys.
.. note::
diff --git a/sphinx/texinputs/sphinx.sty b/sphinx/texinputs/sphinx.sty
index 8837485c5f7..7e06eff7de8 100644
--- a/sphinx/texinputs/sphinx.sty
+++ b/sphinx/texinputs/sphinx.sty
@@ -9,7 +9,7 @@
% by the Sphinx LaTeX writer.
\NeedsTeXFormat{LaTeX2e}[1995/12/01]
-\ProvidesPackage{sphinx}[2024/11/23 v8.2.0 Sphinx LaTeX package (sphinx-doc)]
+\ProvidesPackage{sphinx}[2025/04/24 v8.3.0 Sphinx LaTeX package (sphinx-doc)]
% provides \ltx@ifundefined
% (many packages load ltxcmds: graphicx does for pdftex and lualatex but
@@ -67,7 +67,7 @@
Footnote rendering may have had problems, due to extra package or
document class; check latex log for instructions}%
\@namedef{sphinx_buildwarning_badiconpackage}{%
- You have set iconpackage=\spx@opt@iconpackage, but this LaTeX package
+ You have set iconpackage=\spx@usr@iconpackage, but this LaTeX package
is not found}%
%% OPTION HANDLING
@@ -672,7 +672,7 @@
% defaults for them remain not to have specific colour.
%
% 7.4.0 adds keys for admonition titles: for background and foreground colors,
-% and for icons (whose defaults are picked from Free Fontawesome 5).
+% and for icons.
\def\spx@tempa#1{%
\expandafter\spx@tempb
\csname if#1withshadowcolor\expandafter\endcsname
@@ -869,80 +869,106 @@
}
% 7.4.0 Support for icons in admonition titles
-% We try to
-% - get Sphinx PDF builds to process fine in absence of fontawesome5
-% - use fontawesome5 if present, but not if user prefers another package
-% - provide an interface for using other LaTeX code for icons
-% - provide an interface for using some other package than fontawesome5
-% Indeed we can't load fontawesome5 unconditionally even if available,
-% as it proves incompatible with fontawesome package.
-% We thus must delay its loading.
-\IfFileExists{fontawesome5.sty}{%
- \DeclareStringOption[fontawesome5]{iconpackage}%
+%
+% We let Sphinx use in order of priority: some user-specifid package,
+% fontawesome6 (since 8.3.0), fontawesome5, fontawesome, or nothing (and then
+% not draw any icon). To allow a user-specified package, an extra interface
+% is provided for specifying the icon-drawing LaTeX code.
+%
+% We can't load fontawesome6 (or 5) unconditionally even if available, as it
+% is incompatible with fontawesome package which may be preferred by user. We
+% thus must delay loading the package to at begin document, and for now can
+% only set the default value of iconpackage key..
+\IfFileExists{fontawesome6.sty}{%
+ \DeclareStringOption[fontawesome6]{iconpackage}%
}%
{%
+ \IfFileExists{fontawesome5.sty}{%
+ \DeclareStringOption[fontawesome5]{iconpackage}%
+ }%
+ {%
\IfFileExists{fontawesome.sty}
{\DeclareStringOption[fontawesome]{iconpackage}}
{\DeclareStringOption[none]{iconpackage}}%
+ }%
}%
-\newcommand\spx@faIcon[2][]{}%
-% The above \spx@faIcon which gobbles one mandatory and one optional
-% argument is put into use only if both fontawesome5 and fontawesome
-% LaTeX packages are not available, as part of the defaults for the
-% div.*_title-icon keys (these keys can be redefined via the sphinxsetup
-% interface).
-%
-\def\spxstring@fontawesome{fontawesome}
-\def\spxstring@fontawesomev{fontawesome5}
+% Unfortunately icon names differ between fontawesome, fontawesome5, and
+% fontawesome6 LaTeX packages. At 8.3.0 we refactor the icon support code
+% into something easier to maintain in future in case of a fontawesome7,
+% etc...
+%
+% TODO: Handle spaces possibly caused by bad user usage of iconpackage key?
+% This would need to check how LaTeX handle spaces in package name
+% in \RequirePackage command. Things in this area may have changed
+% recently (2025/04).
\AtBeginDocument{%
\ifx\spx@opt@iconpackage\spxstring@none
\else
\IfFileExists{\spx@opt@iconpackage.sty}
- {\RequirePackage{\spx@opt@iconpackage}%
- \ifx\spx@opt@iconpackage\spxstring@fontawesomev
- \renewcommand\spx@faIcon{\faIcon}%
- \else
- \ifx\spx@opt@iconpackage\spxstring@fontawesome
- \renewcommand\spx@faIcon[2][]{\faicon{##2}}%
- % The \ifdefined's are a bit silly because we know that
- % fontawesome.sty does not provide it, but perhaps
- % there can be some new release of that package?
- \ifdefined\faicon@lightbulb\else
- \let\faicon@lightbulb\faLightbulbO
- \fi
- \ifdefined\faicon@radiation\else
- \let\faicon@radiation\faBolt
- \fi
- \ifdefined\faicon@pen\else
- \let\faicon@pen\faPencil
- \fi
- % if neither has been required, \spx@faIcon will simply swallow
- % its argument and it is up to user
- % to set the various div.*_title-icon keys appropriately.
- \fi\fi %
- }%
+ {\RequirePackage{\spx@opt@iconpackage}}%
{%
- \sphinxbuildwarning{badiconpackage}%
- \PackageWarningNoLine{sphinx}{%
- You have set iconpackage=\spx@opt@iconpackage\MessageBreak
- But \spx@opt@iconpackage.sty is not found by LaTeX}
+ \let\spx@usr@iconpackage\spx@opt@iconpackage
+ \sphinxbuildwarning{badiconpackage}%
+ \PackageWarningNoLine{sphinx}{%
+ You have set iconpackage=\spx@usr@iconpackage\MessageBreak
+ But \spx@usr@iconpackage.sty is not found by LaTeX}
+ \let\spx@opt@iconpackage\spxstring@none
}%
\fi
}
+% Icon defaults depending on package used.
+% Attention! no extra spaces for alignment when using \@namedef!
+\@namedef{spx@fontawesome6@note}{\faIcon{circle-info}}
+\@namedef{spx@fontawesome6@hint}{\faIcon[regular]{lightbulb}}
+\@namedef{spx@fontawesome6@tip}{\faIcon[regular]{lightbulb}}
+\@namedef{spx@fontawesome6@seealso}{\faIcon{share}}
+\@namedef{spx@fontawesome6@todo}{\faIcon{pen}}
+\@namedef{spx@fontawesome6@important}{\faIcon{circle-pause}}
+\@namedef{spx@fontawesome6@caution}{\faIcon{radiation}}
+\@namedef{spx@fontawesome6@warning}{\faIcon{triangle-exclamation}}
+\@namedef{spx@fontawesome6@attention}{\faIcon{triangle-exclamation}}
+\@namedef{spx@fontawesome6@danger}{\faIcon{radiation}}
+\@namedef{spx@fontawesome6@error}{\faIcon{circle-xmark}}
+
+\@namedef{spx@fontawesome5@note}{\faIcon{info-circle}}
+\@namedef{spx@fontawesome5@hint}{\faIcon[regular]{lightbulb}}
+\@namedef{spx@fontawesome5@tip}{\faIcon[regular]{lightbulb}}
+\@namedef{spx@fontawesome5@seealso}{\faIcon{share}}
+\@namedef{spx@fontawesome5@todo}{\faIcon{pen}}
+\@namedef{spx@fontawesome5@important}{\faIcon{pause-circle}}
+\@namedef{spx@fontawesome5@caution}{\faIcon{radiation}}
+\@namedef{spx@fontawesome5@warning}{\faIcon{exclamation-triangle}}
+\@namedef{spx@fontawesome5@attention}{\faIcon{exclamation-triangle}}
+\@namedef{spx@fontawesome5@danger}{\faIcon{radiation}}
+\@namedef{spx@fontawesome5@error}{\faIcon{times-circle}}
+
+\def\spx@fontawesome@note {\faicon{info-circle}}
+\def\spx@fontawesome@hint {\faicon{lightbulb-o}}
+\def\spx@fontawesome@tip {\faicon{lightbulb-o}}
+\def\spx@fontawesome@seealso {\faicon{share}}
+\def\spx@fontawesome@todo {\faicon{pencil}}
+\def\spx@fontawesome@important{\faicon{pause-circle}}
+\def\spx@fontawesome@caution {\faicon{bolt}}
+\def\spx@fontawesome@warning {\faicon{exclamation-triangle}}
+\def\spx@fontawesome@attention{\faicon{exclamation-triangle}}
+\def\spx@fontawesome@danger {\faicon{bolt}}
+\def\spx@fontawesome@error {\faicon{times-circle}}
+
+% \spx@none@{note,hint,...} left undefined, the \@nameuse will be \relax
+\def\spx@titleicon@default#1{\@nameuse{spx@\spx@opt@iconpackage @#1}}
\setkeys{sphinx}{
-% Icon defaults.
- div.note_title-icon = \spx@faIcon{info-circle},
- div.hint_title-icon = \spx@faIcon[regular]{lightbulb},
- div.tip_title-icon = \spx@faIcon[regular]{lightbulb},
- div.seealso_title-icon = \spx@faIcon{share},
- div.todo_title-icon = \spx@faIcon{pen},
- div.important_title-icon = \spx@faIcon{pause-circle},
- div.caution_title-icon = \spx@faIcon{radiation},
- div.warning_title-icon = \spx@faIcon{exclamation-triangle},
- div.attention_title-icon = \spx@faIcon{exclamation-triangle},
- div.danger_title-icon = \spx@faIcon{radiation},
- div.error_title-icon = \spx@faIcon{times-circle},
+ div.note_title-icon = \spx@titleicon@default{note},
+ div.hint_title-icon = \spx@titleicon@default{hint},
+ div.tip_title-icon = \spx@titleicon@default{tip},
+ div.seealso_title-icon = \spx@titleicon@default{seealso},
+ div.todo_title-icon = \spx@titleicon@default{todo},
+ div.important_title-icon = \spx@titleicon@default{important},
+ div.caution_title-icon = \spx@titleicon@default{caution},
+ div.warning_title-icon = \spx@titleicon@default{warning},
+ div.attention_title-icon = \spx@titleicon@default{attention},
+ div.danger_title-icon = \spx@titleicon@default{danger},
+ div.error_title-icon = \spx@titleicon@default{error},
% MEMO: the new at 8.1.0 defaults for contents/topic/sidebar directives
% use no icons, they use \sphinxdotitlerow which detects automatically
% whether title-icon key has been set or not.
diff --git a/sphinx/texinputs/sphinxlatexadmonitions.sty b/sphinx/texinputs/sphinxlatexadmonitions.sty
index 0519903591b..627ee0d49ad 100644
--- a/sphinx/texinputs/sphinxlatexadmonitions.sty
+++ b/sphinx/texinputs/sphinxlatexadmonitions.sty
@@ -1,7 +1,7 @@
%% NOTICES AND ADMONITIONS
%
% change this info string if making any custom modification
-\ProvidesPackage{sphinxlatexadmonitions}[2024/10/11 v8.1.1 admonitions]
+\ProvidesPackage{sphinxlatexadmonitions}[2025/04/24 v8.3.0 admonitions]
% Provides support for this output mark-up from Sphinx latex writer:
%
@@ -342,8 +342,7 @@
\textcolor{sphinx#1TtlFgColor}{%
\@nameuse{sphinx#1TtlIcon}%
% The next macro is located here for legacy reasons of earlier
- % functioning of \spx@faIcon. When fontawesome{5,}.sty both
- % are unavailable, it (formerly) gobbled this next macro.
+ % functioning of sphinx.sty now removed \spx@faIcon macro.
% We leave it here now although it could be moved to after
% the closing brace.
\sphinxtitlerowaftericonspacecmd
From 0d7ba3b2f4ebd4bb673e0c4c8b477f731f3f9131 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Jean-Fran=C3=A7ois=20B=2E?=
<2589111+jfbu@users.noreply.github.com>
Date: Sat, 24 May 2025 15:57:19 +0200
Subject: [PATCH 096/466] Add .auto/ to .gitignore (Emacs AUCTeX)
---
.gitignore | 1 +
1 file changed, 1 insertion(+)
diff --git a/.gitignore b/.gitignore
index 35fd23178f5..5a50535097e 100644
--- a/.gitignore
+++ b/.gitignore
@@ -3,6 +3,7 @@
*.so
*.swp
+.auto/
.dir-locals.el
.cache/
.idea
From 036db81dcdf0a172ece16b013d640cc7763f8faa Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Jean-Fran=C3=A7ois=20B=2E?=
<2589111+jfbu@users.noreply.github.com>
Date: Sat, 24 May 2025 15:25:23 +0200
Subject: [PATCH 097/466] CI/LaTeX: run pdflatex twice when building documents
---
tests/test_builders/test_build_latex.py | 11 +++++++++++
1 file changed, 11 insertions(+)
diff --git a/tests/test_builders/test_build_latex.py b/tests/test_builders/test_build_latex.py
index f1c19a5ab7f..0d1c607462d 100644
--- a/tests/test_builders/test_build_latex.py
+++ b/tests/test_builders/test_build_latex.py
@@ -72,6 +72,17 @@ def compile_latex_document(app, filename='projectnamenotset.tex', docclass='manu
filename,
]
subprocess.run(args, capture_output=True, check=True)
+ # Run a second time (if engine is pdflatex), to have a chance to
+ # detect problems caused on second LaTeX pass (for example, this
+ # is required for the TOC in PDF to show up, for internal
+ # hyperlinks to actually work). Of course, this increases
+ # duration of test, but also its usefulness.
+ # TODO: in theory the correct way is to run Latexmk with options
+ # as configured in the Makefile and in presence of latexmkrc
+ # or latexmkjarc and also sphinx.xdy and other xindy support.
+ # And two passes are not enough except for simplest documents.
+ if app.config.latex_engine == 'pdflatex':
+ subprocess.run(args, capture_output=True, check=True)
except OSError as exc: # most likely the latex executable was not found
raise pytest.skip.Exception from exc
except CalledProcessError as exc:
From 546170754f3f2f96c0d12176b2d2fb5688ca75ab Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Jean-Fran=C3=A7ois=20B?=
<2589111+jfbu@users.noreply.github.com>
Date: Sun, 25 Aug 2024 12:09:42 +0200
Subject: [PATCH 098/466] LaTeX: render in PDF hyperlinks located inside titles
Fix #12821
---
CHANGES.rst | 2 ++
sphinx/texinputs/sphinxlatexstyletext.sty | 8 ++++++--
sphinx/writers/latex.py | 2 +-
tests/roots/test-root/markup.txt | 6 ++++++
4 files changed, 15 insertions(+), 3 deletions(-)
diff --git a/CHANGES.rst b/CHANGES.rst
index 48d9e689f35..f575efab7c7 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -31,6 +31,8 @@ Features added
Bugs fixed
----------
+* #12821: LaTeX: URLs/links in section titles should render in PDF.
+ Patch by Jean-François B.
* #13369: Correctly parse and cross-reference unpacked type annotations.
Patch by Alicia Garcia-Raboso.
* #13528: Add tilde ``~`` prefix support for :rst:role:`py:deco`.
diff --git a/sphinx/texinputs/sphinxlatexstyletext.sty b/sphinx/texinputs/sphinxlatexstyletext.sty
index d083cd96a83..6c80ce64b43 100644
--- a/sphinx/texinputs/sphinxlatexstyletext.sty
+++ b/sphinx/texinputs/sphinxlatexstyletext.sty
@@ -1,7 +1,7 @@
%% TEXT STYLING
%
% change this info string if making any custom modification
-\ProvidesPackage{sphinxlatexstyletext}[2024/07/28 v8.1.0 text styling]
+\ProvidesPackage{sphinxlatexstyletext}[2025/05/24 v8.3.0 text styling]
% 7.4.0 has moved all that is related to admonitions to sphinxlatexadmonitions.sty
% 8.1.0 has moved topic/contents/sidebar to sphinxlatexshadowbox.sty
@@ -57,7 +57,11 @@
% reduce hyperref "Token not allowed in a PDF string" warnings on PDF builds
\AtBeginDocument{\pdfstringdefDisableCommands{%
% all "protected" macros possibly ending up in section titles should be here
-% TODO: examine if \sphinxhref, \sphinxurl, \sphinnolinkurl should be handled
+% next four were added so that URLs and internal links in titles can be allowed
+ \let\sphinxurl \@firstofone
+ \let\sphinxnolinkurl\@firstofone
+ \let\sphinxhref \@secondoftwo
+ \def\hyperref[#1]#2{#2}% for PDF bookmark to ignore #1
\let\sphinxstyleemphasis \@firstofone
\let\sphinxstyleliteralemphasis \@firstofone
\let\sphinxstylestrong \@firstofone
diff --git a/sphinx/writers/latex.py b/sphinx/writers/latex.py
index a2a17855c18..5d9bb9bef9c 100644
--- a/sphinx/writers/latex.py
+++ b/sphinx/writers/latex.py
@@ -1962,7 +1962,7 @@ def visit_reference(self, node: Element) -> None:
uri = node.get('refuri', '')
if not uri and node.get('refid'):
uri = '%' + self.curfilestack[-1] + '#' + node['refid']
- if self.in_title or not uri:
+ if not uri:
self.context.append('')
elif uri.startswith('#'):
# references to labels in the same document
diff --git a/tests/roots/test-root/markup.txt b/tests/roots/test-root/markup.txt
index 91f41946620..2e45ba33680 100644
--- a/tests/roots/test-root/markup.txt
+++ b/tests/roots/test-root/markup.txt
@@ -469,3 +469,9 @@ Smart quotes
.. [#] Like footnotes.
+
+Link in a title: `Field lists `_
+---------------------------------------------------------------------------------------------------------------------
+
+Again: https://www.sphinx-doc.org/en/master/usage/restructuredtext/basics.html#field-lists
+------------------------------------------------------------------------------------------
From 31e63d786aefcb54ba08fa6406a3579ec5ecdc8f Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Mon, 2 Jun 2025 16:49:24 +0100
Subject: [PATCH 099/466] Bump types-requests to 2.32.0.20250602 (#13605)
---
pyproject.toml | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/pyproject.toml b/pyproject.toml
index 1e3b7158579..c676e699212 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -100,7 +100,7 @@ lint = [
"types-docutils==0.21.0.20250514",
"types-Pillow==10.2.0.20240822",
"types-Pygments==2.19.0.20250516",
- "types-requests==2.32.0.20250515", # align with requests
+ "types-requests==2.32.0.20250602", # align with requests
"types-urllib3==1.26.25.14",
"pyright==1.1.400",
"pytest>=8.0",
@@ -169,7 +169,7 @@ type-stubs = [
"types-docutils==0.21.0.20250514",
"types-Pillow==10.2.0.20240822",
"types-Pygments==2.19.0.20250516",
- "types-requests==2.32.0.20250515",
+ "types-requests==2.32.0.20250602",
"types-urllib3==1.26.25.14",
]
From 55092e794f756452d230320a06d231ad3fa60beb Mon Sep 17 00:00:00 2001
From: James Addison <55152140+jayaddison@users.noreply.github.com>
Date: Mon, 2 Jun 2025 21:17:35 +0000
Subject: [PATCH 100/466] Tests: Undo patch for Python 3.14.0 alpha 7; no
longer required for 3.14.0 beta 2 (#13606)
---
tests/test_extensions/test_ext_autodoc_configs.py | 13 ++-----------
1 file changed, 2 insertions(+), 11 deletions(-)
diff --git a/tests/test_extensions/test_ext_autodoc_configs.py b/tests/test_extensions/test_ext_autodoc_configs.py
index eb351442673..ab7539190e0 100644
--- a/tests/test_extensions/test_ext_autodoc_configs.py
+++ b/tests/test_extensions/test_ext_autodoc_configs.py
@@ -1348,11 +1348,6 @@ def test_autodoc_type_aliases(app: SphinxTestApp) -> None:
# default
options = {'members': None}
actual = do_autodoc(app, 'module', 'target.autodoc_type_aliases', options)
- attr2_typeinfo: tuple[str, ...]
- if sys.version_info >= (3, 14, 0, 'alpha', 7):
- attr2_typeinfo = ()
- else:
- attr2_typeinfo = (' :type: int',)
assert list(actual) == [
'',
'.. py:module:: target.autodoc_type_aliases',
@@ -1373,7 +1368,7 @@ def test_autodoc_type_aliases(app: SphinxTestApp) -> None:
'',
' .. py:attribute:: Foo.attr2',
' :module: target.autodoc_type_aliases',
- *attr2_typeinfo,
+ ' :type: int',
'',
' docstring',
'',
@@ -1426,10 +1421,6 @@ def test_autodoc_type_aliases(app: SphinxTestApp) -> None:
'io.StringIO': 'my.module.StringIO',
}
actual = do_autodoc(app, 'module', 'target.autodoc_type_aliases', options)
- if sys.version_info >= (3, 14, 0, 'alpha', 7):
- attr2_typeinfo = ()
- else:
- attr2_typeinfo = (' :type: myint',)
assert list(actual) == [
'',
'.. py:module:: target.autodoc_type_aliases',
@@ -1450,7 +1441,7 @@ def test_autodoc_type_aliases(app: SphinxTestApp) -> None:
'',
' .. py:attribute:: Foo.attr2',
' :module: target.autodoc_type_aliases',
- *attr2_typeinfo,
+ ' :type: myint',
'',
' docstring',
'',
From 68d56109ff50dd81dd31d4a01e3dccbd006c50ee Mon Sep 17 00:00:00 2001
From: James Addison <55152140+jayaddison@users.noreply.github.com>
Date: Mon, 2 Jun 2025 22:02:48 +0000
Subject: [PATCH 101/466] Tests: update LaTeX label test expectations from
Docutils r10151 (#13610)
Co-authored-by: Adam Turner <9087854+AA-Turner@users.noreply.github.com>
---
tests/test_builders/test_build_latex.py | 11 +++++++++--
1 file changed, 9 insertions(+), 2 deletions(-)
diff --git a/tests/test_builders/test_build_latex.py b/tests/test_builders/test_build_latex.py
index 0d1c607462d..37e708a021e 100644
--- a/tests/test_builders/test_build_latex.py
+++ b/tests/test_builders/test_build_latex.py
@@ -12,6 +12,7 @@
from subprocess import CalledProcessError
from typing import TYPE_CHECKING
+import docutils
import pygments
import pytest
@@ -1959,10 +1960,16 @@ def test_latex_labels(app: SphinxTestApp) -> None:
result = (app.outdir / 'projectnamenotset.tex').read_text(encoding='utf8')
+ # ref: docutils r10151
+ if docutils.__version_info__[:2] < (0, 22):
+ figure_id, table_id = 'id1', 'id2'
+ else:
+ figure_id, table_id = 'id2', 'id3'
+
# figures
assert (
r'\caption{labeled figure}'
- r'\label{\detokenize{index:id1}}'
+ r'\label{\detokenize{index:' + figure_id + '}}'
r'\label{\detokenize{index:figure2}}'
r'\label{\detokenize{index:figure1}}'
r'\end{figure}'
@@ -1988,7 +1995,7 @@ def test_latex_labels(app: SphinxTestApp) -> None:
# tables
assert (
r'\sphinxcaption{table caption}'
- r'\label{\detokenize{index:id2}}'
+ r'\label{\detokenize{index:' + table_id + '}}'
r'\label{\detokenize{index:table2}}'
r'\label{\detokenize{index:table1}}'
) in result
From 03c2373c755281e5c3eab08f8a5e6e10e87abf0a Mon Sep 17 00:00:00 2001
From: Adam Turner <9087854+aa-turner@users.noreply.github.com>
Date: Mon, 2 Jun 2025 23:23:57 +0100
Subject: [PATCH 102/466] Extract ``_is_typing()`` to module level
---
sphinx/domains/python/__init__.py | 15 +++++++--------
1 file changed, 7 insertions(+), 8 deletions(-)
diff --git a/sphinx/domains/python/__init__.py b/sphinx/domains/python/__init__.py
index fb030991464..914155cd31e 100644
--- a/sphinx/domains/python/__init__.py
+++ b/sphinx/domains/python/__init__.py
@@ -52,6 +52,8 @@
py_sig_re,
)
+_TYPING_ALL = frozenset(typing.__all__)
+
logger = logging.getLogger(__name__)
pairindextypes = {
@@ -1076,13 +1078,6 @@ def builtin_resolver(
app: Sphinx, env: BuildEnvironment, node: pending_xref, contnode: Element
) -> Element | None:
"""Do not emit nitpicky warnings for built-in types."""
-
- def istyping(s: str) -> bool:
- if s.startswith('typing.'):
- s = s.split('.', 1)[1]
-
- return s in typing.__all__
-
if node.get('refdomain') != 'py':
return None
elif node.get('reftype') in {'class', 'obj'} and node.get('reftarget') == 'None':
@@ -1092,13 +1087,17 @@ def istyping(s: str) -> bool:
if inspect.isclass(getattr(builtins, reftarget, None)):
# built-in class
return contnode
- if istyping(reftarget):
+ if _is_typing(reftarget):
# typing class
return contnode
return None
+def _is_typing(s: str, /) -> bool:
+ return s.removeprefix('typing.') in _TYPING_ALL
+
+
def setup(app: Sphinx) -> ExtensionMetadata:
app.setup_extension('sphinx.directives')
From 987ccb2a9706a344b4ebd347f841a172055e640b Mon Sep 17 00:00:00 2001
From: Adam Turner <9087854+aa-turner@users.noreply.github.com>
Date: Tue, 3 Jun 2025 00:37:45 +0100
Subject: [PATCH 103/466] Prefer ``str.partition`` over ``str.split(...,
maxsize=1)``
---
sphinx/_cli/__init__.py | 2 +-
sphinx/builders/latex/__init__.py | 6 +++---
sphinx/cmd/build.py | 4 ++--
sphinx/config.py | 2 +-
sphinx/directives/__init__.py | 2 +-
sphinx/domains/c/__init__.py | 2 +-
sphinx/domains/cpp/__init__.py | 2 +-
sphinx/domains/javascript.py | 2 +-
sphinx/domains/python/__init__.py | 2 +-
sphinx/domains/python/_object.py | 2 +-
sphinx/domains/std/__init__.py | 4 ++--
sphinx/ext/autosummary/__init__.py | 2 +-
sphinx/ext/doctest.py | 2 +-
sphinx/ext/intersphinx/_resolve.py | 2 +-
sphinx/ext/napoleon/docstring.py | 2 +-
sphinx/roles.py | 26 ++++++++++++-------------
sphinx/search/__init__.py | 2 +-
sphinx/util/docfields.py | 2 +-
sphinx/util/docstrings.py | 2 +-
sphinx/util/i18n.py | 2 +-
sphinx/util/images.py | 7 ++++++-
sphinx/util/index_entries.py | 2 +-
sphinx/util/inventory.py | 2 +-
sphinx/util/osutil.py | 4 ++--
sphinx/util/requests.py | 2 +-
sphinx/writers/texinfo.py | 4 ++--
tests/test_builders/test_build_latex.py | 6 +++---
tests/test_util/test_util_images.py | 5 +----
28 files changed, 53 insertions(+), 51 deletions(-)
diff --git a/sphinx/_cli/__init__.py b/sphinx/_cli/__init__.py
index 87128b0a5a0..8c305442de3 100644
--- a/sphinx/_cli/__init__.py
+++ b/sphinx/_cli/__init__.py
@@ -64,7 +64,7 @@ def _load_subcommand_descriptions() -> Iterator[tuple[str, str]]:
# log an error here, but don't fail the full enumeration
print(f'Failed to load the description for {command}', file=sys.stderr)
else:
- yield command, description.split('\n\n', 1)[0]
+ yield command, description.partition('\n\n')[0]
class _RootArgumentParser(argparse.ArgumentParser):
diff --git a/sphinx/builders/latex/__init__.py b/sphinx/builders/latex/__init__.py
index 5aeafca8bfd..985620f2023 100644
--- a/sphinx/builders/latex/__init__.py
+++ b/sphinx/builders/latex/__init__.py
@@ -513,9 +513,9 @@ def write_message_catalog(self) -> None:
formats = self.config.numfig_format
context = {
'addtocaptions': r'\@iden',
- 'figurename': formats.get('figure', '').split('%s', 1),
- 'tablename': formats.get('table', '').split('%s', 1),
- 'literalblockname': formats.get('code-block', '').split('%s', 1),
+ 'figurename': formats.get('figure', '').split('%s', maxsplit=1),
+ 'tablename': formats.get('table', '').split('%s', maxsplit=1),
+ 'literalblockname': formats.get('code-block', '').split('%s', maxsplit=1),
}
if self.context['babel'] or self.context['polyglossia']:
diff --git a/sphinx/cmd/build.py b/sphinx/cmd/build.py
index 11a70df0c6c..58f3ad26746 100644
--- a/sphinx/cmd/build.py
+++ b/sphinx/cmd/build.py
@@ -371,14 +371,14 @@ def _parse_confoverrides(
val: Any
for val in define:
try:
- key, val = val.split('=', 1)
+ key, _, val = val.partition('=')
except ValueError:
parser.error(__('-D option argument must be in the form name=value'))
confoverrides[key] = val
for val in htmldefine:
try:
- key, val = val.split('=')
+ key, _, val = val.partition('=')
except ValueError:
parser.error(__('-A option argument must be in the form name=value'))
with contextlib.suppress(ValueError):
diff --git a/sphinx/config.py b/sphinx/config.py
index bedc69f2337..2498ada6c56 100644
--- a/sphinx/config.py
+++ b/sphinx/config.py
@@ -320,7 +320,7 @@ def __init__(
for name in list(self._overrides.keys()):
if '.' in name:
- real_name, key = name.split('.', 1)
+ real_name, _, key = name.partition('.')
raw_config.setdefault(real_name, {})[key] = self._overrides.pop(name)
self.setup: _ExtensionSetupFunc | None = raw_config.get('setup')
diff --git a/sphinx/directives/__init__.py b/sphinx/directives/__init__.py
index b4fb7f76006..c442ea8e6c8 100644
--- a/sphinx/directives/__init__.py
+++ b/sphinx/directives/__init__.py
@@ -201,7 +201,7 @@ def run(self) -> list[Node]:
* parse the content and handle doc fields in it
"""
if ':' in self.name:
- self.domain, self.objtype = self.name.split(':', 1)
+ self.domain, _, self.objtype = self.name.partition(':')
else:
self.domain, self.objtype = '', self.name
self.indexnode = addnodes.index(entries=[])
diff --git a/sphinx/domains/c/__init__.py b/sphinx/domains/c/__init__.py
index 7fa1822e4ac..56ce0d170f6 100644
--- a/sphinx/domains/c/__init__.py
+++ b/sphinx/domains/c/__init__.py
@@ -668,7 +668,7 @@ def run(self) -> list[Node]:
The code is therefore based on the ObjectDescription version.
"""
if ':' in self.name:
- self.domain, self.objtype = self.name.split(':', 1)
+ self.domain, _, self.objtype = self.name.partition(':')
else:
self.domain, self.objtype = '', self.name
diff --git a/sphinx/domains/cpp/__init__.py b/sphinx/domains/cpp/__init__.py
index fc72e208791..554f4ebb17e 100644
--- a/sphinx/domains/cpp/__init__.py
+++ b/sphinx/domains/cpp/__init__.py
@@ -812,7 +812,7 @@ def run(self) -> list[Node]:
The code is therefore based on the ObjectDescription version.
"""
if ':' in self.name:
- self.domain, self.objtype = self.name.split(':', 1)
+ self.domain, _, self.objtype = self.name.partition(':')
else:
self.domain, self.objtype = '', self.name
diff --git a/sphinx/domains/javascript.py b/sphinx/domains/javascript.py
index 51a93bcf802..eaa69094c78 100644
--- a/sphinx/domains/javascript.py
+++ b/sphinx/domains/javascript.py
@@ -70,7 +70,7 @@ def handle_signature(self, sig: str, signode: desc_signature) -> tuple[str, str]
"""
sig = sig.strip()
if '(' in sig and sig[-1:] == ')':
- member, arglist = sig.split('(', 1)
+ member, _, arglist = sig.partition('(')
member = member.strip()
arglist = arglist[:-1].strip()
else:
diff --git a/sphinx/domains/python/__init__.py b/sphinx/domains/python/__init__.py
index 914155cd31e..d70c232e725 100644
--- a/sphinx/domains/python/__init__.py
+++ b/sphinx/domains/python/__init__.py
@@ -671,7 +671,7 @@ def generate(
entries = content.setdefault(modname[0].lower(), [])
- package = modname.split('.', maxsplit=1)[0]
+ package = modname.partition('.')[0]
if package != modname:
# it's a submodule
if prev_modname == package:
diff --git a/sphinx/domains/python/_object.py b/sphinx/domains/python/_object.py
index fd4e62bbbe0..6cd19245aae 100644
--- a/sphinx/domains/python/_object.py
+++ b/sphinx/domains/python/_object.py
@@ -93,7 +93,7 @@ def make_xref(
children = result.children
result.clear()
- shortname = target.split('.')[-1]
+ shortname = target.rpartition('.')[-1]
textnode = innernode('', shortname) # type: ignore[call-arg]
contnodes = [
pending_xref_condition('', '', textnode, condition='resolved'),
diff --git a/sphinx/domains/std/__init__.py b/sphinx/domains/std/__init__.py
index e123ce85786..9397733c814 100644
--- a/sphinx/domains/std/__init__.py
+++ b/sphinx/domains/std/__init__.py
@@ -218,7 +218,7 @@ def run(self) -> list[Node]:
ret.insert(0, inode)
name = self.name
if ':' in self.name:
- _, name = self.name.split(':', 1)
+ name = self.name.partition(':')[-1]
std = self.env.domains.standard_domain
std.note_object(name, fullname, node_id, location=node)
@@ -1235,7 +1235,7 @@ def _resolve_option_xref(
if not docname:
commands = []
while ws_re.search(target):
- subcommand, target = ws_re.split(target, 1)
+ subcommand, target = ws_re.split(target, maxsplit=1)
commands.append(subcommand)
progname = '-'.join(commands)
diff --git a/sphinx/ext/autosummary/__init__.py b/sphinx/ext/autosummary/__init__.py
index 733c936d8f0..fe7092c0a74 100644
--- a/sphinx/ext/autosummary/__init__.py
+++ b/sphinx/ext/autosummary/__init__.py
@@ -511,7 +511,7 @@ def append_row(*column_texts: str) -> None:
def strip_arg_typehint(s: str) -> str:
"""Strip a type hint from argument definition."""
- return s.split(':')[0].strip()
+ return s.partition(':')[0].strip()
def _cleanup_signature(s: str) -> str:
diff --git a/sphinx/ext/doctest.py b/sphinx/ext/doctest.py
index 343534f10ce..65b9933785c 100644
--- a/sphinx/ext/doctest.py
+++ b/sphinx/ext/doctest.py
@@ -392,7 +392,7 @@ def get_filename_for_node(self, node: Node, docname: str) -> str:
"""
try:
filename = relpath(node.source, self.env.srcdir) # type: ignore[arg-type]
- return filename.rsplit(':docstring of ', maxsplit=1)[0]
+ return filename.partition(':docstring of ')[0]
except Exception:
return str(self.env.doc2path(docname, False))
diff --git a/sphinx/ext/intersphinx/_resolve.py b/sphinx/ext/intersphinx/_resolve.py
index 102c5d3ab07..2029a0ea971 100644
--- a/sphinx/ext/intersphinx/_resolve.py
+++ b/sphinx/ext/intersphinx/_resolve.py
@@ -493,7 +493,7 @@ def get_inventory_and_name_suffix(self, name: str) -> tuple[str | None, str]:
assert name.startswith('external'), name
suffix = name[9:]
if name[8] == '+':
- inv_name, suffix = suffix.split(':', 1)
+ inv_name, _, suffix = suffix.partition(':')
return inv_name, suffix
elif name[8] == ':':
return None, suffix
diff --git a/sphinx/ext/napoleon/docstring.py b/sphinx/ext/napoleon/docstring.py
index ea991f72301..26c7741ea60 100644
--- a/sphinx/ext/napoleon/docstring.py
+++ b/sphinx/ext/napoleon/docstring.py
@@ -1387,7 +1387,7 @@ def translate(
if m and line[m.end() :].strip().startswith(':'):
push_item(current_func, rest)
current_func, line = line[: m.end()], line[m.end() :]
- rest = [line.split(':', 1)[1].strip()]
+ rest = [line.partition(':')[-1].strip()]
if not rest[0]:
rest = []
elif not line.startswith(' '):
diff --git a/sphinx/roles.py b/sphinx/roles.py
index 04469f45488..79ec70e90a3 100644
--- a/sphinx/roles.py
+++ b/sphinx/roles.py
@@ -103,7 +103,7 @@ def run(self) -> tuple[list[Node], list[system_message]]:
self.refdomain, self.reftype = '', self.name
self.classes = ['xref', self.reftype]
else:
- self.refdomain, self.reftype = self.name.split(':', 1)
+ self.refdomain, _, self.reftype = self.name.partition(':')
self.classes = ['xref', self.refdomain, f'{self.refdomain}-{self.reftype}']
if self.disabled:
@@ -234,9 +234,9 @@ def run(self) -> tuple[list[Node], list[system_message]]:
return [index, target, reference], []
def build_uri(self) -> str:
- ret = self.target.split('#', 1)
- if len(ret) == 2:
- return f'{CVE._BASE_URL}{ret[0]}#{ret[1]}'
+ ret = self.target.partition('#')
+ if ret[1]:
+ return f'{CVE._BASE_URL}{ret[0]}#{ret[2]}'
return f'{CVE._BASE_URL}{ret[0]}'
@@ -279,9 +279,9 @@ def run(self) -> tuple[list[Node], list[system_message]]:
return [index, target, reference], []
def build_uri(self) -> str:
- ret = self.target.split('#', 1)
- if len(ret) == 2:
- return f'{CWE._BASE_URL}{int(ret[0])}.html#{ret[1]}'
+ ret = self.target.partition('#')
+ if ret[1]:
+ return f'{CWE._BASE_URL}{int(ret[0])}.html#{ret[2]}'
return f'{CWE._BASE_URL}{int(ret[0])}.html'
@@ -323,9 +323,9 @@ def run(self) -> tuple[list[Node], list[system_message]]:
def build_uri(self) -> str:
base_url = self.inliner.document.settings.pep_base_url
- ret = self.target.split('#', 1)
- if len(ret) == 2:
- return base_url + 'pep-%04d/#%s' % (int(ret[0]), ret[1])
+ ret = self.target.partition('#')
+ if ret[1]:
+ return base_url + 'pep-%04d/#%s' % (int(ret[0]), ret[2])
else:
return base_url + 'pep-%04d/' % int(ret[0])
@@ -361,9 +361,9 @@ def run(self) -> tuple[list[Node], list[system_message]]:
def build_uri(self) -> str:
base_url = self.inliner.document.settings.rfc_base_url
- ret = self.target.split('#', 1)
- if len(ret) == 2:
- return base_url + self.inliner.rfc_url % int(ret[0]) + '#' + ret[1]
+ ret = self.target.partition('#')
+ if ret[1]:
+ return base_url + self.inliner.rfc_url % int(ret[0]) + '#' + ret[2]
else:
return base_url + self.inliner.rfc_url % int(ret[0])
diff --git a/sphinx/search/__init__.py b/sphinx/search/__init__.py
index 187e6a2f37f..b835b7b36db 100644
--- a/sphinx/search/__init__.py
+++ b/sphinx/search/__init__.py
@@ -300,7 +300,7 @@ def __init__(
# fallback; try again with language-code
if lang_class is None and '_' in lang:
- lang_class = languages.get(lang.split('_')[0])
+ lang_class = languages.get(lang.partition('_')[0])
if lang_class is None:
self.lang: SearchLanguage = SearchEnglish(options)
diff --git a/sphinx/util/docfields.py b/sphinx/util/docfields.py
index dcc24753862..ed04ec6ebd1 100644
--- a/sphinx/util/docfields.py
+++ b/sphinx/util/docfields.py
@@ -386,7 +386,7 @@ def _transform_step_1(
field_body = cast('nodes.field_body', field[1])
try:
# split into field type and argument
- fieldtype_name, fieldarg = field_name.astext().split(None, 1)
+ fieldtype_name, fieldarg = field_name.astext().split(None, maxsplit=1)
except ValueError:
# maybe an argument-less field type?
fieldtype_name, fieldarg = field_name.astext(), ''
diff --git a/sphinx/util/docstrings.py b/sphinx/util/docstrings.py
index 53e7620edc2..6f23096d92b 100644
--- a/sphinx/util/docstrings.py
+++ b/sphinx/util/docstrings.py
@@ -26,7 +26,7 @@ def separate_metadata(s: str | None) -> tuple[str | None, dict[str, str]]:
else:
matched = field_list_item_re.match(line)
if matched and not in_other_element:
- field_name = matched.group()[1:].split(':', 1)[0]
+ field_name = matched.group()[1:].partition(':')[0]
if field_name.startswith('meta '):
name = field_name[5:].strip()
metadata[name] = line[matched.end() :].strip()
diff --git a/sphinx/util/i18n.py b/sphinx/util/i18n.py
index 05542876fc3..dd1616a8f31 100644
--- a/sphinx/util/i18n.py
+++ b/sphinx/util/i18n.py
@@ -168,7 +168,7 @@ def docname_to_domain(docname: str, compaction: bool | str) -> str:
if isinstance(compaction, str):
return compaction
if compaction:
- return docname.split(SEP, 1)[0]
+ return docname.partition(SEP)[0]
else:
return docname
diff --git a/sphinx/util/images.py b/sphinx/util/images.py
index b43a0705d36..ca6cb66764b 100644
--- a/sphinx/util/images.py
+++ b/sphinx/util/images.py
@@ -90,12 +90,17 @@ def get_image_extension(mimetype: str) -> str | None:
def parse_data_uri(uri: str) -> DataURI | None:
if not uri.startswith('data:'):
return None
+ uri = uri[5:]
+
+ if ',' not in uri:
+ msg = 'malformed data URI'
+ raise ValueError(msg)
# data:[][;charset=][;base64],
mimetype = 'text/plain'
charset = 'US-ASCII'
- properties, data = uri[5:].split(',', 1)
+ properties, _, data = uri.partition(',')
for prop in properties.split(';'):
if prop == 'base64':
pass # skip
diff --git a/sphinx/util/index_entries.py b/sphinx/util/index_entries.py
index 10046842976..36dba388146 100644
--- a/sphinx/util/index_entries.py
+++ b/sphinx/util/index_entries.py
@@ -20,7 +20,7 @@ def split_index_msg(entry_type: str, value: str) -> list[str]:
def _split_into(n: int, type: str, value: str) -> list[str]:
"""Split an index entry into a given number of parts at semicolons."""
- parts = [x.strip() for x in value.split(';', n - 1)]
+ parts = [x.strip() for x in value.split(';', maxsplit=n - 1)]
if len(list(filter(None, parts))) < n:
msg = f'invalid {type} index entry {value!r}'
raise ValueError(msg)
diff --git a/sphinx/util/inventory.py b/sphinx/util/inventory.py
index d37398a5562..6e4c648bd3f 100644
--- a/sphinx/util/inventory.py
+++ b/sphinx/util/inventory.py
@@ -75,7 +75,7 @@ def _loads_v1(cls, lines: Sequence[str], *, uri: str) -> _Inventory:
projname = lines[0].rstrip()[11:] # Project name
version = lines[1].rstrip()[11:] # Project version
for line in lines[2:]:
- name, item_type, location = line.rstrip().split(None, 2)
+ name, item_type, location = line.rstrip().split(None, maxsplit=2)
location = posixpath.join(uri, location)
# version 1 did not add anchors to the location
if item_type == 'mod':
diff --git a/sphinx/util/osutil.py b/sphinx/util/osutil.py
index 807db899af9..374c59ae904 100644
--- a/sphinx/util/osutil.py
+++ b/sphinx/util/osutil.py
@@ -47,8 +47,8 @@ def relative_uri(base: str, to: str) -> str:
"""Return a relative URL from ``base`` to ``to``."""
if to.startswith(SEP):
return to
- b2 = base.split('#')[0].split(SEP)
- t2 = to.split('#')[0].split(SEP)
+ b2 = base.partition('#')[0].split(SEP)
+ t2 = to.partition('#')[0].split(SEP)
# remove common segments (except the last segment)
for x, y in zip(b2[:-1], t2[:-1], strict=False):
if x != y:
diff --git a/sphinx/util/requests.py b/sphinx/util/requests.py
index b439ce437e8..f7d4ffdf4e9 100644
--- a/sphinx/util/requests.py
+++ b/sphinx/util/requests.py
@@ -40,7 +40,7 @@ def _get_tls_cacert(url: str, certs: str | dict[str, str] | None) -> str | bool:
else:
hostname = urlsplit(url).netloc
if '@' in hostname:
- _, hostname = hostname.split('@', 1)
+ hostname = hostname.partition('@')[-1]
return certs.get(hostname, True)
diff --git a/sphinx/writers/texinfo.py b/sphinx/writers/texinfo.py
index eaa7dbdb6e7..c36b5d12f07 100644
--- a/sphinx/writers/texinfo.py
+++ b/sphinx/writers/texinfo.py
@@ -416,7 +416,7 @@ def add_menu_entries(
name = self.node_names[entry]
# special formatting for entries that are divided by an em-dash
try:
- parts = reg.split(name, 1)
+ parts = reg.split(name, maxsplit=1)
except TypeError:
# could be a gettext proxy
parts = [name]
@@ -748,7 +748,7 @@ def visit_reference(self, node: Element) -> None:
uri = self.escape_arg(uri)
id = 'Top'
if '#' in uri:
- uri, id = uri.split('#', 1)
+ uri, _, id = uri.partition('#')
id = self.escape_id(id)
name = self.escape_menu(name)
if name == id:
diff --git a/tests/test_builders/test_build_latex.py b/tests/test_builders/test_build_latex.py
index 37e708a021e..16f3437c154 100644
--- a/tests/test_builders/test_build_latex.py
+++ b/tests/test_builders/test_build_latex.py
@@ -1566,7 +1566,7 @@ def test_latex_table_tabulars(app: SphinxTestApp) -> None:
result = (app.outdir / 'projectnamenotset.tex').read_text(encoding='utf8')
tables = {}
for chap in re.split(r'\\(?:section|chapter){', result)[1:]:
- sectname, content = chap.split('}', 1)
+ sectname, _, content = chap.partition('}')
content = re.sub(r'\\sphinxstepscope', '', content) # filter a separator
tables[sectname] = content.strip()
@@ -1644,7 +1644,7 @@ def test_latex_table_longtable(app: SphinxTestApp) -> None:
result = (app.outdir / 'projectnamenotset.tex').read_text(encoding='utf8')
tables = {}
for chap in re.split(r'\\(?:section|chapter){', result)[1:]:
- sectname, content = chap.split('}', 1)
+ sectname, _, content = chap.partition('}')
content = re.sub(r'\\sphinxstepscope', '', content) # filter a separator
tables[sectname] = content.strip()
@@ -1712,7 +1712,7 @@ def test_latex_table_complex_tables(app: SphinxTestApp) -> None:
result = (app.outdir / 'projectnamenotset.tex').read_text(encoding='utf8')
tables = {}
for chap in re.split(r'\\(?:section|renewcommand){', result)[1:]:
- sectname, content = chap.split('}', 1)
+ sectname, _, content = chap.partition('}')
tables[sectname] = content.strip()
def get_expected(name):
diff --git a/tests/test_util/test_util_images.py b/tests/test_util/test_util_images.py
index 875fc0d98f4..d0b4f918afc 100644
--- a/tests/test_util/test_util_images.py
+++ b/tests/test_util/test_util_images.py
@@ -84,8 +84,5 @@ def test_parse_data_uri() -> None:
'data:iVBORw0KGgoAAAANSUhEUgAAAAUAAAAFCAYAAACNbyblAAAAHElEQVQI12P4'
'//8/w38GIAXDIBKE0DHxgljNBAAO9TXL0Y4OHwAAAABJRU5ErkJggg=='
)
- with pytest.raises(
- ValueError,
- match=r'not enough values to unpack \(expected 2, got 1\)',
- ):
+ with pytest.raises(ValueError, match=r'malformed data URI'):
parse_data_uri(uri)
From 2b7e3adf27c158305acca9b5e4d0d93d3e4c6f09 Mon Sep 17 00:00:00 2001
From: Adam Turner <9087854+aa-turner@users.noreply.github.com>
Date: Tue, 3 Jun 2025 00:56:19 +0100
Subject: [PATCH 104/466] Bump Ruff to 0.11.12
---
pyproject.toml | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/pyproject.toml b/pyproject.toml
index c676e699212..17cb0463ae5 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -92,7 +92,7 @@ docs = [
"sphinxcontrib-websupport",
]
lint = [
- "ruff==0.11.11",
+ "ruff==0.11.12",
"mypy==1.15.0",
"sphinx-lint>=0.9",
"types-colorama==0.4.15.20240311",
@@ -135,7 +135,7 @@ docs = [
"sphinxcontrib-websupport",
]
lint = [
- "ruff==0.11.11",
+ "ruff==0.11.12",
"sphinx-lint>=0.9",
]
package = [
From 6a860c8c2e9c586b6634fb597503867dd3e053d5 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Jean-Fran=C3=A7ois=20B=2E?=
<2589111+jfbu@users.noreply.github.com>
Date: Fri, 6 Jun 2025 16:11:34 +0200
Subject: [PATCH 105/466] Fix #13619 about possibly duplicated footnotes from
signatures in PDF (#13623)
---
CHANGES.rst | 3 +++
sphinx/texinputs/sphinxlatexobjects.sty | 30 +++++++++++++------------
2 files changed, 19 insertions(+), 14 deletions(-)
diff --git a/CHANGES.rst b/CHANGES.rst
index f575efab7c7..40d6984ca55 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -37,6 +37,9 @@ Bugs fixed
Patch by Alicia Garcia-Raboso.
* #13528: Add tilde ``~`` prefix support for :rst:role:`py:deco`.
Patch by Shengyu Zhang and Adam Turner.
+* #13619: LaTeX: possible duplicated footnotes in PDF from object signatures
+ (typically if :confval:`latex_show_urls` ``= 'footnote'``).
+ Patch by Jean-François B.
Testing
-------
diff --git a/sphinx/texinputs/sphinxlatexobjects.sty b/sphinx/texinputs/sphinxlatexobjects.sty
index 1147a016227..2a05dd6de8c 100644
--- a/sphinx/texinputs/sphinxlatexobjects.sty
+++ b/sphinx/texinputs/sphinxlatexobjects.sty
@@ -1,7 +1,7 @@
%% MODULE RELEASE DATA AND OBJECT DESCRIPTIONS
%
% change this info string if making any custom modification
-\ProvidesPackage{sphinxlatexobjects}[2025/02/11 documentation environments]
+\ProvidesPackage{sphinxlatexobjects}[2025/06/06 documentation environments]
% Provides support for this output mark-up from Sphinx latex writer:
%
@@ -155,20 +155,23 @@
\pysigadjustitemsep
}
\newcommand{\pysiglinewithargsret}[3]{%
- % as #1 may contain a footnote using \label we need to make \label
- % a no-op here to avoid LaTeX complaining about duplicates
-\let\spx@label\label\let\label\@gobble
- \settowidth{\py@argswidth}{#1\pysigarglistopen}%
-\let\label\spx@label
+ % #1 may contain a footnote (especially with latex_show_urls='footnote'
+ % and some intersphinx added hyperlinking). Here we want to measure
+ % a width but not typeset such a footnote (else #13619).
+ % Miraculously a sphinxpackagefootnote.sty tabulary compatibility
+ % layer employing the amsmath \ifmeasuring@ can be used here to let
+ % a footnote influence the actual width up to opening brace but not
+ % actually get typeset at this stage...
+ % MEMO: "argswidth" is misleading here, this code measures the name
+ % not the arguments.
+ \settowidth{\py@argswidth}{\measuring@true#1\pysigarglistopen}%
\py@argswidth=\dimexpr\linewidth+\labelwidth-\py@argswidth\relax\relax
\item[{#1\pysigarglistopen\py@sigparams{#2}{#3}\strut}]
\pysigadjustitemsep
}
-\newcommand{\pysiglinewithargsretwithtypelist}[4]{
-% #1 = name, #2 = typelist, #3 = arglist, #4 = retann
-\let\spx@label\label\let\label\@gobble
- \settowidth{\py@argswidth}{#1\pysigtypelistopen}%
-\let\label\spx@label
+\newcommand{\pysiglinewithargsretwithtypelist}[4]{%
+ % same comment as in \pysiglinewithargsret
+ \settowidth{\py@argswidth}{\measuring@true#1\pysigtypelistopen}%
\py@argswidth=\dimexpr\linewidth+\labelwidth-\py@argswidth\relax\relax
\item[{#1\pysigtypelistopen\py@sigparamswithtypelist{#2}{#3}{#4}\strut}]
\pysigadjustitemsep
@@ -244,9 +247,8 @@
\newcommand{\pysigwithonelineperargwithtypelist}[4]{
% #1 = name, #2 = typelist, #3 = arglist, #4 = retann
% render the type parameters list on one line, but each argument is rendered on its own line
-\let\spx@label\label\let\label\@gobble
- \settowidth{\py@argswidth}{#1\pysigtypelistopen}%
-\let\label\spx@label
+ % for \measuring@true see comment in \pysiglinewithargsret
+ \settowidth{\py@argswidth}{\measuring@true#1\pysigtypelistopen}%
\py@argswidth=\dimexpr\linewidth+\labelwidth-\py@argswidth\relax\relax
\item[{#1\pysigtypelistopen\parbox[t]{\py@argswidth}{%
\raggedright #2\pysigtypelistclose\pysigarglistopen\strut}\strut}]
From 43c298b8e075dff7fa7e7ff72d7a845bfd4aac86 Mon Sep 17 00:00:00 2001
From: Adam Turner <9087854+AA-Turner@users.noreply.github.com>
Date: Fri, 6 Jun 2025 20:40:05 +0100
Subject: [PATCH 106/466] Add ``_write_docname()`` (#13624)
---
sphinx/builders/__init__.py | 23 +++++++++++++----------
sphinx/environment/__init__.py | 3 ++-
sphinx/environment/adapters/toctree.py | 4 +---
3 files changed, 16 insertions(+), 14 deletions(-)
diff --git a/sphinx/builders/__init__.py b/sphinx/builders/__init__.py
index 21a1eb8b5c4..076fe218434 100644
--- a/sphinx/builders/__init__.py
+++ b/sphinx/builders/__init__.py
@@ -761,11 +761,7 @@ def _write_serial(self, docnames: Sequence[str]) -> None:
len(docnames),
self.app.verbosity,
):
- self.app.phase = BuildPhase.RESOLVING
- doctree = self.env.get_and_resolve_doctree(docname, self)
- self.app.phase = BuildPhase.WRITING
- self.write_doc_serialized(docname, doctree)
- self.write_doc(docname, doctree)
+ _write_docname(docname, app=self.app, env=self.env, builder=self)
def _write_parallel(self, docnames: Sequence[str], nproc: int) -> None:
def write_process(docs: list[tuple[str, nodes.document]]) -> None:
@@ -775,11 +771,7 @@ def write_process(docs: list[tuple[str, nodes.document]]) -> None:
# warm up caches/compile templates using the first document
firstname, docnames = docnames[0], docnames[1:]
- self.app.phase = BuildPhase.RESOLVING
- doctree = self.env.get_and_resolve_doctree(firstname, self)
- self.app.phase = BuildPhase.WRITING
- self.write_doc_serialized(firstname, doctree)
- self.write_doc(firstname, doctree)
+ _write_docname(firstname, app=self.app, env=self.env, builder=self)
tasks = ParallelTasks(nproc)
chunks = make_chunks(docnames, nproc)
@@ -867,6 +859,17 @@ def get_builder_config(self, option: str, default: str) -> Any:
return getattr(self.config, optname)
+def _write_docname(
+ docname: str, /, *, app: Sphinx, env: BuildEnvironment, builder: Builder
+) -> None:
+ """Write a single document."""
+ app.phase = BuildPhase.RESOLVING
+ doctree = env.get_and_resolve_doctree(docname, builder=builder)
+ app.phase = BuildPhase.WRITING
+ builder.write_doc_serialized(docname, doctree)
+ builder.write_doc(docname, doctree)
+
+
class _UnicodeDecodeErrorHandler:
"""Custom error handler for open() that warns and replaces."""
diff --git a/sphinx/environment/__init__.py b/sphinx/environment/__init__.py
index 79fa6278549..2f3e25ac477 100644
--- a/sphinx/environment/__init__.py
+++ b/sphinx/environment/__init__.py
@@ -701,6 +701,7 @@ def get_and_resolve_doctree(
self.apply_post_transforms(doctree, docname)
# now, resolve all toctree nodes
+ tags = builder.tags
for toctreenode in doctree.findall(addnodes.toctree):
result = toctree_adapters._resolve_toctree(
self,
@@ -709,7 +710,7 @@ def get_and_resolve_doctree(
toctreenode,
prune=prune_toctrees,
includehidden=includehidden,
- tags=builder.tags,
+ tags=tags,
)
if result is None:
toctreenode.parent.replace(toctreenode, [])
diff --git a/sphinx/environment/adapters/toctree.py b/sphinx/environment/adapters/toctree.py
index bedeca2f299..90344f185d7 100644
--- a/sphinx/environment/adapters/toctree.py
+++ b/sphinx/environment/adapters/toctree.py
@@ -191,9 +191,7 @@ def _resolve_toctree(
# prune the tree to maxdepth, also set toc depth and current classes
_toctree_add_classes(newnode, 1, docname)
- newnode = _toctree_copy(
- newnode, 1, maxdepth if prune else 0, collapse, builder.tags
- )
+ newnode = _toctree_copy(newnode, 1, maxdepth if prune else 0, collapse, tags)
if (
isinstance(newnode[-1], nodes.Element) and len(newnode[-1]) == 0
From 77a0d6658b8e86b8e730e78d8f7bb839babe7567 Mon Sep 17 00:00:00 2001
From: Adam Turner <9087854+AA-Turner@users.noreply.github.com>
Date: Fri, 6 Jun 2025 22:27:18 +0100
Subject: [PATCH 107/466] Extract nested function definitions (#13625)
---
doc/conf.py | 10 +-
sphinx/builders/_epub_base.py | 115 ++++---
sphinx/domains/c/_parser.py | 15 +-
sphinx/domains/c/_symbol.py | 76 +++--
sphinx/domains/cpp/__init__.py | 33 +-
sphinx/domains/cpp/_parser.py | 13 +-
sphinx/domains/cpp/_symbol.py | 322 ++++++++++--------
sphinx/domains/python/_annotations.py | 26 +-
sphinx/domains/std/__init__.py | 10 +-
sphinx/ext/doctest.py | 27 +-
sphinx/transforms/post_transforms/__init__.py | 41 +--
sphinx/writers/latex.py | 11 +-
12 files changed, 365 insertions(+), 334 deletions(-)
diff --git a/doc/conf.py b/doc/conf.py
index 9cf2f9b4856..28dbdb58c46 100644
--- a/doc/conf.py
+++ b/doc/conf.py
@@ -297,14 +297,12 @@ def linkify_issues_in_changelog(
) -> None:
"""Linkify issue references like #123 in changelog to GitHub."""
if docname == 'changes':
+ linkified_changelog = re.sub(r'(?:PR)?#([0-9]+)\b', _linkify, source[0])
+ source[0] = linkified_changelog
- def linkify(match: re.Match[str]) -> str:
- url = 'https://github.com/sphinx-doc/sphinx/issues/' + match[1]
- return f'`{match[0]} <{url}>`_'
-
- linkified_changelog = re.sub(r'(?:PR)?#([0-9]+)\b', linkify, source[0])
- source[0] = linkified_changelog
+def _linkify(match: re.Match[str], /) -> str:
+ return f'`{match[0]} `__'
REDIRECT_TEMPLATE = """
diff --git a/sphinx/builders/_epub_base.py b/sphinx/builders/_epub_base.py
index a9527c3c0e3..10ae0820c5b 100644
--- a/sphinx/builders/_epub_base.py
+++ b/sphinx/builders/_epub_base.py
@@ -279,16 +279,6 @@ def fix_ids(self, tree: nodes.document) -> None:
Some readers crash because they interpret the part as a
transport protocol specification.
"""
-
- def update_node_id(node: Element) -> None:
- """Update IDs of given *node*."""
- new_ids: list[str] = []
- for node_id in node['ids']:
- new_id = self.fix_fragment('', node_id)
- if new_id not in new_ids:
- new_ids.append(new_id)
- node['ids'] = new_ids
-
for reference in tree.findall(nodes.reference):
if 'refuri' in reference:
m = self.refuri_re.match(reference['refuri'])
@@ -298,66 +288,75 @@ def update_node_id(node: Element) -> None:
reference['refid'] = self.fix_fragment('', reference['refid'])
for target in tree.findall(nodes.target):
- update_node_id(target)
+ self._update_node_id(target)
next_node: Node = target.next_node(ascend=True)
if isinstance(next_node, nodes.Element):
- update_node_id(next_node)
+ self._update_node_id(next_node)
for desc_signature in tree.findall(addnodes.desc_signature):
- update_node_id(desc_signature)
+ self._update_node_id(desc_signature)
+
+ def _update_node_id(self, node: Element, /) -> None:
+ """Update IDs of given *node*."""
+ new_ids: list[str] = []
+ for node_id in node['ids']:
+ new_id = self.fix_fragment('', node_id)
+ if new_id not in new_ids:
+ new_ids.append(new_id)
+ node['ids'] = new_ids
+
+ @staticmethod
+ def _make_footnote_ref(doc: nodes.document, label: str) -> nodes.footnote_reference:
+ """Create a footnote_reference node with children"""
+ footnote_ref = nodes.footnote_reference('[#]_')
+ footnote_ref.append(nodes.Text(label))
+ doc.note_autofootnote_ref(footnote_ref)
+ return footnote_ref
+
+ @staticmethod
+ def _make_footnote(doc: nodes.document, label: str, uri: str) -> nodes.footnote:
+ """Create a footnote node with children"""
+ footnote = nodes.footnote(uri)
+ para = nodes.paragraph()
+ para.append(nodes.Text(uri))
+ footnote.append(para)
+ footnote.insert(0, nodes.label('', label))
+ doc.note_autofootnote(footnote)
+ return footnote
+
+ @staticmethod
+ def _footnote_spot(tree: nodes.document) -> tuple[Element, int]:
+ """Find or create a spot to place footnotes.
+
+ The function returns the tuple (parent, index).
+ """
+ # The code uses the following heuristic:
+ # a) place them after the last existing footnote
+ # b) place them after an (empty) Footnotes rubric
+ # c) create an empty Footnotes rubric at the end of the document
+ fns = list(tree.findall(nodes.footnote))
+ if fns:
+ fn = fns[-1]
+ return fn.parent, fn.parent.index(fn) + 1
+ for node in tree.findall(nodes.rubric):
+ if len(node) == 1 and node.astext() == FOOTNOTES_RUBRIC_NAME:
+ return node.parent, node.parent.index(node) + 1
+ doc = next(tree.findall(nodes.document))
+ rub = nodes.rubric()
+ rub.append(nodes.Text(FOOTNOTES_RUBRIC_NAME))
+ doc.append(rub)
+ return doc, doc.index(rub) + 1
def add_visible_links(
self, tree: nodes.document, show_urls: str = 'inline'
) -> None:
"""Add visible link targets for external links"""
-
- def make_footnote_ref(
- doc: nodes.document, label: str
- ) -> nodes.footnote_reference:
- """Create a footnote_reference node with children"""
- footnote_ref = nodes.footnote_reference('[#]_')
- footnote_ref.append(nodes.Text(label))
- doc.note_autofootnote_ref(footnote_ref)
- return footnote_ref
-
- def make_footnote(doc: nodes.document, label: str, uri: str) -> nodes.footnote:
- """Create a footnote node with children"""
- footnote = nodes.footnote(uri)
- para = nodes.paragraph()
- para.append(nodes.Text(uri))
- footnote.append(para)
- footnote.insert(0, nodes.label('', label))
- doc.note_autofootnote(footnote)
- return footnote
-
- def footnote_spot(tree: nodes.document) -> tuple[Element, int]:
- """Find or create a spot to place footnotes.
-
- The function returns the tuple (parent, index).
- """
- # The code uses the following heuristic:
- # a) place them after the last existing footnote
- # b) place them after an (empty) Footnotes rubric
- # c) create an empty Footnotes rubric at the end of the document
- fns = list(tree.findall(nodes.footnote))
- if fns:
- fn = fns[-1]
- return fn.parent, fn.parent.index(fn) + 1
- for node in tree.findall(nodes.rubric):
- if len(node) == 1 and node.astext() == FOOTNOTES_RUBRIC_NAME:
- return node.parent, node.parent.index(node) + 1
- doc = next(tree.findall(nodes.document))
- rub = nodes.rubric()
- rub.append(nodes.Text(FOOTNOTES_RUBRIC_NAME))
- doc.append(rub)
- return doc, doc.index(rub) + 1
-
if show_urls == 'no':
return
if show_urls == 'footnote':
doc = next(tree.findall(nodes.document))
- fn_spot, fn_idx = footnote_spot(tree)
+ fn_spot, fn_idx = self._footnote_spot(tree)
nr = 1
for node in list(tree.findall(nodes.reference)):
uri = node.get('refuri', '')
@@ -371,9 +370,9 @@ def footnote_spot(tree: nodes.document) -> tuple[Element, int]:
elif show_urls == 'footnote':
label = FOOTNOTE_LABEL_TEMPLATE % nr
nr += 1
- footnote_ref = make_footnote_ref(doc, label)
+ footnote_ref = self._make_footnote_ref(doc, label)
node.parent.insert(idx, footnote_ref)
- footnote = make_footnote(doc, label, uri)
+ footnote = self._make_footnote(doc, label, uri)
fn_spot.insert(fn_idx, footnote)
footnote_ref['refid'] = footnote['ids'][0]
footnote.add_backref(footnote_ref['ids'][0])
diff --git a/sphinx/domains/c/_parser.py b/sphinx/domains/c/_parser.py
index bd7ddbe2326..c59352b6ee2 100644
--- a/sphinx/domains/c/_parser.py
+++ b/sphinx/domains/c/_parser.py
@@ -369,10 +369,7 @@ def _parse_logical_or_expression(self) -> ASTExpression:
# pm = cast .*, ->*
def _parse_bin_op_expr(self: DefinitionParser, op_id: int) -> ASTExpression:
if op_id + 1 == len(_expression_bin_ops):
-
- def parser() -> ASTExpression:
- return self._parse_cast_expression()
-
+ parser = self._parse_cast_expression
else:
def parser() -> ASTExpression:
@@ -760,10 +757,7 @@ def _parse_declarator_name_suffix(
if self.skip_string(']'):
size = None
else:
-
- def parser() -> ASTExpression:
- return self._parse_expression()
-
+ parser = self._parse_expression
size = self._parse_expression_fallback([']'], parser)
self.skip_ws()
if not self.skip_string(']'):
@@ -1025,10 +1019,7 @@ def _parse_enumerator(self) -> ASTEnumerator:
init = None
if self.skip_string('='):
self.skip_ws()
-
- def parser() -> ASTExpression:
- return self._parse_constant_expression()
-
+ parser = self._parse_constant_expression
init_val = self._parse_expression_fallback([], parser)
init = ASTInitializer(init_val)
return ASTEnumerator(name, init, attrs)
diff --git a/sphinx/domains/c/_symbol.py b/sphinx/domains/c/_symbol.py
index cb43910e7ab..7ac555415ac 100644
--- a/sphinx/domains/c/_symbol.py
+++ b/sphinx/domains/c/_symbol.py
@@ -445,43 +445,19 @@ def on_missing_qualified_symbol(
# First check if one of those with a declaration matches.
# If it's a function, we need to compare IDs,
# otherwise there should be only one symbol with a declaration.
- def make_cand_symbol() -> Symbol:
- if Symbol.debug_lookup:
- Symbol.debug_print('begin: creating candidate symbol')
- symbol = Symbol(
- parent=lookup_result.parent_symbol,
- ident=lookup_result.ident,
- declaration=declaration,
- docname=docname,
- line=line,
- )
- if Symbol.debug_lookup:
- Symbol.debug_print('end: creating candidate symbol')
- return symbol
if len(with_decl) == 0:
cand_symbol = None
else:
- cand_symbol = make_cand_symbol()
-
- def handle_duplicate_declaration(
- symbol: Symbol, cand_symbol: Symbol
- ) -> None:
- if Symbol.debug_lookup:
- Symbol.debug_indent += 1
- Symbol.debug_print('redeclaration')
- Symbol.debug_indent -= 1
- Symbol.debug_indent -= 2
- # Redeclaration of the same symbol.
- # Let the new one be there, but raise an error to the client
- # so it can use the real symbol as subscope.
- # This will probably result in a duplicate id warning.
- cand_symbol.isRedeclaration = True
- raise _DuplicateSymbolError(symbol, declaration)
+ cand_symbol = self._make_cand_symbol(
+ lookup_result, declaration, docname, line
+ )
if declaration.objectType != 'function':
assert len(with_decl) <= 1
- handle_duplicate_declaration(with_decl[0], cand_symbol)
+ self._handle_duplicate_declaration(
+ with_decl[0], cand_symbol, declaration
+ )
# (not reachable)
# a function, so compare IDs
@@ -493,7 +469,7 @@ def handle_duplicate_declaration(
if Symbol.debug_lookup:
Symbol.debug_print('old_id: ', old_id)
if cand_id == old_id:
- handle_duplicate_declaration(symbol, cand_symbol)
+ self._handle_duplicate_declaration(symbol, cand_symbol, declaration)
# (not reachable)
# no candidate symbol found with matching ID
# if there is an empty symbol, fill that one
@@ -507,7 +483,7 @@ def handle_duplicate_declaration(
if cand_symbol is not None:
return cand_symbol
else:
- return make_cand_symbol()
+ return self._make_cand_symbol(lookup_result, declaration, docname, line)
else:
if Symbol.debug_lookup:
Symbol.debug_print(
@@ -529,6 +505,42 @@ def handle_duplicate_declaration(
symbol._fill_empty(declaration, docname, line)
return symbol
+ @staticmethod
+ def _make_cand_symbol(
+ lookup_result: SymbolLookupResult,
+ declaration: ASTDeclaration | None,
+ docname: str | None,
+ line: int | None,
+ ) -> Symbol:
+ if Symbol.debug_lookup:
+ Symbol.debug_print('begin: creating candidate symbol')
+ symbol = Symbol(
+ parent=lookup_result.parent_symbol,
+ ident=lookup_result.ident,
+ declaration=declaration,
+ docname=docname,
+ line=line,
+ )
+ if Symbol.debug_lookup:
+ Symbol.debug_print('end: creating candidate symbol')
+ return symbol
+
+ @staticmethod
+ def _handle_duplicate_declaration(
+ symbol: Symbol, cand_symbol: Symbol, declaration: ASTDeclaration
+ ) -> None:
+ if Symbol.debug_lookup:
+ Symbol.debug_indent += 1
+ Symbol.debug_print('redeclaration')
+ Symbol.debug_indent -= 1
+ Symbol.debug_indent -= 2
+ # Redeclaration of the same symbol.
+ # Let the new one be there, but raise an error to the client
+ # so it can use the real symbol as subscope.
+ # This will probably result in a duplicate id warning.
+ cand_symbol.isRedeclaration = True
+ raise _DuplicateSymbolError(symbol, declaration)
+
def merge_with(
self, other: Symbol, docnames: list[str], env: BuildEnvironment
) -> None:
diff --git a/sphinx/domains/cpp/__init__.py b/sphinx/domains/cpp/__init__.py
index 554f4ebb17e..ef486897bc4 100644
--- a/sphinx/domains/cpp/__init__.py
+++ b/sphinx/domains/cpp/__init__.py
@@ -1056,6 +1056,15 @@ def merge_domaindata(self, docnames: Set[str], otherdata: dict[str, Any]) -> Non
logger.debug('\tresult end')
logger.debug('merge_domaindata end')
+ def _check_type(self, typ: str, decl_typ: str) -> bool:
+ if typ == 'any':
+ return True
+ objtypes = self.objtypes_for_role(typ)
+ if objtypes:
+ return decl_typ in objtypes
+ logger.debug(f'Type is {typ}, declaration type is {decl_typ}') # NoQA: G004
+ raise AssertionError
+
def _resolve_xref_inner(
self,
env: BuildEnvironment,
@@ -1150,16 +1159,7 @@ def _resolve_xref_inner(
typ = typ.removeprefix('cpp:')
decl_typ = s.declaration.objectType
- def check_type() -> bool:
- if typ == 'any':
- return True
- objtypes = self.objtypes_for_role(typ)
- if objtypes:
- return decl_typ in objtypes
- logger.debug(f'Type is {typ}, declaration type is {decl_typ}') # NoQA: G004
- raise AssertionError
-
- if not check_type():
+ if not self._check_type(typ, decl_typ):
logger.warning(
'cpp:%s targets a %s (%s).',
typ,
@@ -1299,6 +1299,12 @@ def get_full_qualified_name(self, node: Element) -> str | None:
return f'{parent_name}::{target}'
+def _init_stuff(app: Sphinx) -> None:
+ Symbol.debug_lookup = app.config.cpp_debug_lookup
+ Symbol.debug_show_tree = app.config.cpp_debug_show_tree
+ app.config.cpp_index_common_prefix.sort(reverse=True)
+
+
def setup(app: Sphinx) -> ExtensionMetadata:
app.add_domain(CPPDomain)
app.add_config_value('cpp_index_common_prefix', [], 'env', types=frozenset({list}))
@@ -1318,12 +1324,7 @@ def setup(app: Sphinx) -> ExtensionMetadata:
app.add_config_value('cpp_debug_lookup', False, '', types=frozenset({bool}))
app.add_config_value('cpp_debug_show_tree', False, '', types=frozenset({bool}))
- def init_stuff(app: Sphinx) -> None:
- Symbol.debug_lookup = app.config.cpp_debug_lookup
- Symbol.debug_show_tree = app.config.cpp_debug_show_tree
- app.config.cpp_index_common_prefix.sort(reverse=True)
-
- app.connect('builder-inited', init_stuff)
+ app.connect('builder-inited', _init_stuff)
return {
'version': 'builtin',
diff --git a/sphinx/domains/cpp/_parser.py b/sphinx/domains/cpp/_parser.py
index aa941260da9..2055a942c68 100644
--- a/sphinx/domains/cpp/_parser.py
+++ b/sphinx/domains/cpp/_parser.py
@@ -438,9 +438,7 @@ def _parse_postfix_expression(self) -> ASTPostfixExpr:
if not self.skip_string('('):
self.fail("Expected '(' in '%s'." % cast)
- def parser() -> ASTExpression:
- return self._parse_expression()
-
+ parser = self._parse_expression
expr = self._parse_expression_fallback([')'], parser)
self.skip_ws()
if not self.skip_string(')'):
@@ -459,10 +457,7 @@ def parser() -> ASTExpression:
except DefinitionError as e_type:
self.pos = pos
try:
-
- def parser() -> ASTExpression:
- return self._parse_expression()
-
+ parser = self._parse_expression
expr = self._parse_expression_fallback([')'], parser)
prefix = ASTTypeId(expr, isType=False)
if not self.skip_string(')'):
@@ -1423,9 +1418,7 @@ def _parse_declarator_name_suffix(
array_ops.append(ASTArray(None))
continue
- def parser() -> ASTExpression:
- return self._parse_expression()
-
+ parser = self._parse_expression
value = self._parse_expression_fallback([']'], parser)
if not self.skip_string(']'):
self.fail("Expected ']' in end of array operator.")
diff --git a/sphinx/domains/cpp/_symbol.py b/sphinx/domains/cpp/_symbol.py
index 36b965e52ae..7449e616a03 100644
--- a/sphinx/domains/cpp/_symbol.py
+++ b/sphinx/domains/cpp/_symbol.py
@@ -38,6 +38,10 @@ def __str__(self) -> str:
return 'Internal C++ duplicate symbol error:\n%s' % self.symbol.dump(0)
+class _QualifiedSymbolIsTemplateParam(Exception):
+ pass
+
+
class SymbolLookupResult:
__slots__ = (
'symbols',
@@ -419,53 +423,19 @@ def _find_named_symbols(
if not _is_specialization(template_params, template_args):
template_args = None
- def matches(s: Symbol) -> bool:
- if s.identOrOp != ident_or_op:
- return False
- if (s.templateParams is None) != (template_params is None):
- if template_params is not None:
- # we query with params, they must match params
- return False
- if not template_shorthand:
- # we don't query with params, and we do care about them
- return False
- if template_params:
- # TODO: do better comparison
- if str(s.templateParams) != str(template_params):
- return False
- if (s.templateArgs is None) != (template_args is None):
- return False
- if s.templateArgs:
- # TODO: do better comparison
- if str(s.templateArgs) != str(template_args):
- return False
- return True
-
- def candidates() -> Iterator[Symbol]:
- s = self
- if Symbol.debug_lookup:
- Symbol.debug_print('searching in self:')
- logger.debug(s.to_string(Symbol.debug_indent + 1), end='')
- while True:
- if match_self:
- yield s
- if recurse_in_anon:
- yield from s.children_recurse_anon
- else:
- yield from s._children
-
- if s.siblingAbove is None:
- break
- s = s.siblingAbove
- if Symbol.debug_lookup:
- Symbol.debug_print('searching in sibling:')
- logger.debug(s.to_string(Symbol.debug_indent + 1), end='')
-
- for s in candidates():
+ for s in self._candidates(
+ match_self=match_self, recurse_in_anon=recurse_in_anon
+ ):
if Symbol.debug_lookup:
Symbol.debug_print('candidate:')
logger.debug(s.to_string(Symbol.debug_indent + 1), end='')
- if matches(s):
+ if self._matches(
+ s,
+ ident_or_op=ident_or_op,
+ template_params=template_params,
+ template_args=template_args,
+ template_shorthand=template_shorthand,
+ ):
if Symbol.debug_lookup:
Symbol.debug_indent += 1
Symbol.debug_print('matches')
@@ -476,6 +446,59 @@ def candidates() -> Iterator[Symbol]:
if Symbol.debug_lookup:
Symbol.debug_indent -= 2
+ @staticmethod
+ def _matches(
+ s: Symbol,
+ /,
+ *,
+ ident_or_op: ASTIdentifier | ASTOperator,
+ template_params: ASTTemplateParams | ASTTemplateIntroduction,
+ template_args: ASTTemplateArgs,
+ template_shorthand: bool,
+ ) -> bool:
+ if s.identOrOp != ident_or_op:
+ return False
+ if (s.templateParams is None) != (template_params is None):
+ if template_params is not None:
+ # we query with params, they must match params
+ return False
+ if not template_shorthand:
+ # we don't query with params, and we do care about them
+ return False
+ if template_params:
+ # TODO: do better comparison
+ if str(s.templateParams) != str(template_params):
+ return False
+ if (s.templateArgs is None) != (template_args is None):
+ return False
+ if s.templateArgs:
+ # TODO: do better comparison
+ if str(s.templateArgs) != str(template_args):
+ return False
+ return True
+
+ def _candidates(
+ self, *, match_self: bool, recurse_in_anon: bool
+ ) -> Iterator[Symbol]:
+ s = self
+ if Symbol.debug_lookup:
+ Symbol.debug_print('searching in self:')
+ logger.debug(s.to_string(Symbol.debug_indent + 1), end='')
+ while True:
+ if match_self:
+ yield s
+ if recurse_in_anon:
+ yield from s.children_recurse_anon
+ else:
+ yield from s._children
+
+ if s.siblingAbove is None:
+ break
+ s = s.siblingAbove
+ if Symbol.debug_lookup:
+ Symbol.debug_print('searching in sibling:')
+ logger.debug(s.to_string(Symbol.debug_indent + 1), end='')
+
def _symbol_lookup(
self,
nested_name: ASTNestedName,
@@ -661,34 +684,10 @@ def _add_symbols(
Symbol.debug_print('decl: ', declaration)
Symbol.debug_print(f'location: {docname}:{line}')
- def on_missing_qualified_symbol(
- parent_symbol: Symbol,
- ident_or_op: ASTIdentifier | ASTOperator,
- template_params: Any,
- template_args: ASTTemplateArgs,
- ) -> Symbol | None:
- if Symbol.debug_lookup:
- Symbol.debug_indent += 1
- Symbol.debug_print('_add_symbols, on_missing_qualified_symbol:')
- Symbol.debug_indent += 1
- Symbol.debug_print('template_params:', template_params)
- Symbol.debug_print('ident_or_op: ', ident_or_op)
- Symbol.debug_print('template_args: ', template_args)
- Symbol.debug_indent -= 2
- return Symbol(
- parent=parent_symbol,
- identOrOp=ident_or_op,
- templateParams=template_params,
- templateArgs=template_args,
- declaration=None,
- docname=None,
- line=None,
- )
-
lookup_result = self._symbol_lookup(
nested_name,
template_decls,
- on_missing_qualified_symbol,
+ _on_missing_qualified_symbol_fresh,
strict_template_param_arg_lists=True,
ancestor_lookup_type=None,
template_shorthand=False,
@@ -759,45 +758,18 @@ def on_missing_qualified_symbol(
# First check if one of those with a declaration matches.
# If it's a function, we need to compare IDs,
# otherwise there should be only one symbol with a declaration.
- def make_cand_symbol() -> Symbol:
- if Symbol.debug_lookup:
- Symbol.debug_print('begin: creating candidate symbol')
- symbol = Symbol(
- parent=lookup_result.parent_symbol,
- identOrOp=lookup_result.ident_or_op,
- templateParams=lookup_result.template_params,
- templateArgs=lookup_result.template_args,
- declaration=declaration,
- docname=docname,
- line=line,
- )
- if Symbol.debug_lookup:
- Symbol.debug_print('end: creating candidate symbol')
- return symbol
-
if len(with_decl) == 0:
cand_symbol = None
else:
- cand_symbol = make_cand_symbol()
-
- def handle_duplicate_declaration(
- symbol: Symbol, cand_symbol: Symbol
- ) -> None:
- if Symbol.debug_lookup:
- Symbol.debug_indent += 1
- Symbol.debug_print('redeclaration')
- Symbol.debug_indent -= 1
- Symbol.debug_indent -= 2
- # Redeclaration of the same symbol.
- # Let the new one be there, but raise an error to the client
- # so it can use the real symbol as subscope.
- # This will probably result in a duplicate id warning.
- cand_symbol.isRedeclaration = True
- raise _DuplicateSymbolError(symbol, declaration)
+ cand_symbol = self._make_cand_symbol(
+ lookup_result, declaration, docname, line
+ )
if declaration.objectType != 'function':
assert len(with_decl) <= 1
- handle_duplicate_declaration(with_decl[0], cand_symbol)
+ self._handle_duplicate_declaration(
+ with_decl[0], cand_symbol, declaration
+ )
# (not reachable)
# a function, so compare IDs
@@ -808,13 +780,13 @@ def handle_duplicate_declaration(
# but all existing must be functions as well,
# otherwise we declare it to be a duplicate
if symbol.declaration.objectType != 'function':
- handle_duplicate_declaration(symbol, cand_symbol)
+ self._handle_duplicate_declaration(symbol, cand_symbol, declaration)
# (not reachable)
old_id = symbol.declaration.get_newest_id()
if Symbol.debug_lookup:
Symbol.debug_print('old_id: ', old_id)
if cand_id == old_id:
- handle_duplicate_declaration(symbol, cand_symbol)
+ self._handle_duplicate_declaration(symbol, cand_symbol, declaration)
# (not reachable)
# no candidate symbol found with matching ID
# if there is an empty symbol, fill that one
@@ -824,12 +796,12 @@ def handle_duplicate_declaration(
if cand_symbol is not None:
Symbol.debug_print('result is already created cand_symbol')
else:
- Symbol.debug_print('result is make_cand_symbol()')
+ Symbol.debug_print('result is self._make_cand_symbol()')
Symbol.debug_indent -= 2
if cand_symbol is not None:
return cand_symbol
else:
- return make_cand_symbol()
+ return self._make_cand_symbol(lookup_result, declaration, docname, line)
else:
if Symbol.debug_lookup:
Symbol.debug_print(
@@ -851,6 +823,44 @@ def handle_duplicate_declaration(
symbol._fill_empty(declaration, docname, line)
return symbol
+ @staticmethod
+ def _make_cand_symbol(
+ lookup_result: SymbolLookupResult,
+ declaration: ASTDeclaration | None,
+ docname: str | None,
+ line: int | None,
+ ) -> Symbol:
+ if Symbol.debug_lookup:
+ Symbol.debug_print('begin: creating candidate symbol')
+ symbol = Symbol(
+ parent=lookup_result.parent_symbol,
+ identOrOp=lookup_result.ident_or_op,
+ templateParams=lookup_result.template_params,
+ templateArgs=lookup_result.template_args,
+ declaration=declaration,
+ docname=docname,
+ line=line,
+ )
+ if Symbol.debug_lookup:
+ Symbol.debug_print('end: creating candidate symbol')
+ return symbol
+
+ @staticmethod
+ def _handle_duplicate_declaration(
+ symbol: Symbol, cand_symbol: Symbol, declaration: ASTDeclaration
+ ) -> None:
+ if Symbol.debug_lookup:
+ Symbol.debug_indent += 1
+ Symbol.debug_print('redeclaration')
+ Symbol.debug_indent -= 1
+ Symbol.debug_indent -= 2
+ # Redeclaration of the same symbol.
+ # Let the new one be there, but raise an error to the client
+ # so it can use the real symbol as subscope.
+ # This will probably result in a duplicate id warning.
+ cand_symbol.isRedeclaration = True
+ raise _DuplicateSymbolError(symbol, declaration)
+
def merge_with(
self, other: Symbol, docnames: list[str], env: BuildEnvironment
) -> None:
@@ -859,12 +869,6 @@ def merge_with(
Symbol.debug_print('merge_with:')
assert other is not None
- def unconditional_add(self: Symbol, other_child: Symbol) -> None:
- # TODO: hmm, should we prune by docnames?
- self._children.append(other_child)
- other_child.parent = self
- other_child._assert_invariants()
-
if Symbol.debug_lookup:
Symbol.debug_indent += 1
for other_child in other._children:
@@ -874,7 +878,7 @@ def unconditional_add(self: Symbol, other_child: Symbol) -> None:
)
Symbol.debug_indent += 1
if other_child.isRedeclaration:
- unconditional_add(self, other_child)
+ self._unconditional_add(other_child)
if Symbol.debug_lookup:
Symbol.debug_print('is_redeclaration')
Symbol.debug_indent -= 1
@@ -898,7 +902,7 @@ def unconditional_add(self: Symbol, other_child: Symbol) -> None:
Symbol.debug_print('non-duplicate candidate symbols:', len(symbols))
if len(symbols) == 0:
- unconditional_add(self, other_child)
+ self._unconditional_add(other_child)
if Symbol.debug_lookup:
Symbol.debug_indent -= 1
continue
@@ -929,7 +933,7 @@ def unconditional_add(self: Symbol, other_child: Symbol) -> None:
if Symbol.debug_lookup:
Symbol.debug_indent -= 1
if our_child is None:
- unconditional_add(self, other_child)
+ self._unconditional_add(other_child)
continue
if other_child.declaration and other_child.docname in docnames:
if not our_child.declaration:
@@ -978,6 +982,12 @@ def unconditional_add(self: Symbol, other_child: Symbol) -> None:
if Symbol.debug_lookup:
Symbol.debug_indent -= 2
+ def _unconditional_add(self, other_child: Symbol) -> None:
+ # TODO: hmm, should we prune by docnames?
+ self._children.append(other_child)
+ other_child.parent = self
+ other_child._assert_invariants()
+
def add_name(
self,
nestedName: ASTNestedName,
@@ -1125,29 +1135,11 @@ def find_name(
Symbol.debug_print('recurseInAnon: ', recurseInAnon)
Symbol.debug_print('searchInSiblings: ', searchInSiblings)
- class QualifiedSymbolIsTemplateParam(Exception):
- pass
-
- def on_missing_qualified_symbol(
- parent_symbol: Symbol,
- ident_or_op: ASTIdentifier | ASTOperator,
- template_params: Any,
- template_args: ASTTemplateArgs,
- ) -> Symbol | None:
- # TODO: Maybe search without template args?
- # Though, the correct_primary_template_args does
- # that for primary templates.
- # Is there another case where it would be good?
- if parent_symbol.declaration is not None:
- if parent_symbol.declaration.objectType == 'templateParam':
- raise QualifiedSymbolIsTemplateParam
- return None
-
try:
lookup_result = self._symbol_lookup(
nestedName,
templateDecls,
- on_missing_qualified_symbol,
+ _on_missing_qualified_symbol_raise,
strict_template_param_arg_lists=False,
ancestor_lookup_type=typ,
template_shorthand=templateShorthand,
@@ -1156,7 +1148,7 @@ def on_missing_qualified_symbol(
correct_primary_template_args=False,
search_in_siblings=searchInSiblings,
)
- except QualifiedSymbolIsTemplateParam:
+ except _QualifiedSymbolIsTemplateParam:
return None, 'templateParamInQualified'
if lookup_result is None:
@@ -1210,18 +1202,10 @@ def find_declaration(
else:
template_decls = []
- def on_missing_qualified_symbol(
- parent_symbol: Symbol,
- ident_or_op: ASTIdentifier | ASTOperator,
- template_params: Any,
- template_args: ASTTemplateArgs,
- ) -> Symbol | None:
- return None
-
lookup_result = self._symbol_lookup(
nested_name,
template_decls,
- on_missing_qualified_symbol,
+ _on_missing_qualified_symbol_none,
strict_template_param_arg_lists=False,
ancestor_lookup_type=typ,
template_shorthand=templateShorthand,
@@ -1296,3 +1280,53 @@ def dump(self, indent: int) -> str:
self.to_string(indent),
*(c.dump(indent + 1) for c in self._children),
])
+
+
+def _on_missing_qualified_symbol_fresh(
+ parent_symbol: Symbol,
+ ident_or_op: ASTIdentifier | ASTOperator,
+ template_params: Any,
+ template_args: ASTTemplateArgs,
+) -> Symbol | None:
+ if Symbol.debug_lookup:
+ Symbol.debug_indent += 1
+ Symbol.debug_print('_add_symbols, on_missing_qualified_symbol:')
+ Symbol.debug_indent += 1
+ Symbol.debug_print('template_params:', template_params)
+ Symbol.debug_print('ident_or_op: ', ident_or_op)
+ Symbol.debug_print('template_args: ', template_args)
+ Symbol.debug_indent -= 2
+ return Symbol(
+ parent=parent_symbol,
+ identOrOp=ident_or_op,
+ templateParams=template_params,
+ templateArgs=template_args,
+ declaration=None,
+ docname=None,
+ line=None,
+ )
+
+
+def _on_missing_qualified_symbol_raise(
+ parent_symbol: Symbol,
+ ident_or_op: ASTIdentifier | ASTOperator,
+ template_params: Any,
+ template_args: ASTTemplateArgs,
+) -> Symbol | None:
+ # TODO: Maybe search without template args?
+ # Though, the correct_primary_template_args does
+ # that for primary templates.
+ # Is there another case where it would be good?
+ if parent_symbol.declaration is not None:
+ if parent_symbol.declaration.objectType == 'templateParam':
+ raise _QualifiedSymbolIsTemplateParam
+ return None
+
+
+def _on_missing_qualified_symbol_none(
+ parent_symbol: Symbol,
+ ident_or_op: ASTIdentifier | ASTOperator,
+ template_params: Any,
+ template_args: ASTTemplateArgs,
+) -> Symbol | None:
+ return None
diff --git a/sphinx/domains/python/_annotations.py b/sphinx/domains/python/_annotations.py
index 60def00a533..f476ff22fd4 100644
--- a/sphinx/domains/python/_annotations.py
+++ b/sphinx/domains/python/_annotations.py
@@ -6,6 +6,7 @@
import token
from collections import deque
from inspect import Parameter
+from itertools import chain, islice
from typing import TYPE_CHECKING
from docutils import nodes
@@ -316,18 +317,6 @@ def parse(self) -> None:
self.type_params.append(type_param)
def _build_identifier(self, tokens: list[Token]) -> str:
- from itertools import chain, islice
-
- def triplewise(iterable: Iterable[Token]) -> Iterator[tuple[Token, ...]]:
- # sliding_window('ABCDEFG', 4) --> ABCD BCDE CDEF DEFG
- it = iter(iterable)
- window = deque(islice(it, 3), maxlen=3)
- if len(window) == 3:
- yield tuple(window)
- for x in it:
- window.append(x)
- yield tuple(window)
-
idents: list[str] = []
tokens: Iterable[Token] = iter(tokens) # type: ignore[no-redef]
# do not format opening brackets
@@ -342,7 +331,7 @@ def triplewise(iterable: Iterable[Token]) -> Iterator[tuple[Token, ...]]:
# check the remaining tokens
stop = Token(token.ENDMARKER, '', (-1, -1), (-1, -1), '')
is_unpack_operator = False
- for tok, op, after in triplewise(chain(tokens, [stop, stop])):
+ for tok, op, after in _triplewise(chain(tokens, [stop, stop])):
ident = self._pformat_token(tok, native=is_unpack_operator)
idents.append(ident)
# determine if the next token is an unpack operator depending
@@ -628,3 +617,14 @@ def _pseudo_parse_arglist(
signode += paramlist
else:
signode += paramlist
+
+
+def _triplewise(iterable: Iterable[Token]) -> Iterator[tuple[Token, ...]]:
+ # sliding_window('ABCDEFG', 4) --> ABCD BCDE CDEF DEFG
+ it = iter(iterable)
+ window = deque(islice(it, 3), maxlen=3)
+ if len(window) == 3:
+ yield tuple(window)
+ for x in it:
+ window.append(x)
+ yield tuple(window)
diff --git a/sphinx/domains/std/__init__.py b/sphinx/domains/std/__init__.py
index 9397733c814..f8f3f5513e3 100644
--- a/sphinx/domains/std/__init__.py
+++ b/sphinx/domains/std/__init__.py
@@ -1378,16 +1378,12 @@ def get_numfig_title(self, node: Node) -> str | None:
def get_enumerable_node_type(self, node: Node) -> str | None:
"""Get type of enumerable nodes."""
-
- def has_child(node: Element, cls: type) -> bool:
- return any(isinstance(child, cls) for child in node)
-
if isinstance(node, nodes.section):
return 'section'
elif (
isinstance(node, nodes.container)
and 'literal_block' in node
- and has_child(node, nodes.literal_block)
+ and _has_child(node, nodes.literal_block)
):
# given node is a code-block having caption
return 'code-block'
@@ -1440,6 +1436,10 @@ def get_full_qualified_name(self, node: Element) -> str | None:
return None
+def _has_child(node: Element, cls: type) -> bool:
+ return any(isinstance(child, cls) for child in node)
+
+
def warn_missing_reference(
app: Sphinx,
domain: Domain,
diff --git a/sphinx/ext/doctest.py b/sphinx/ext/doctest.py
index 65b9933785c..9610e24d58d 100644
--- a/sphinx/ext/doctest.py
+++ b/sphinx/ext/doctest.py
@@ -436,21 +436,9 @@ def test_doc(self, docname: str, doctree: Node) -> bool:
self.cleanup_runner._fakeout = self.setup_runner._fakeout # type: ignore[attr-defined]
if self.config.doctest_test_doctest_blocks:
-
- def condition(node: Node) -> bool:
- return (
- isinstance(node, nodes.literal_block | nodes.comment)
- and 'testnodetype' in node
- ) or isinstance(node, nodes.doctest_block)
-
+ condition = _condition_with_doctest
else:
-
- def condition(node: Node) -> bool:
- return (
- isinstance(node, nodes.literal_block | nodes.comment)
- and 'testnodetype' in node
- )
-
+ condition = _condition_default
for node in doctree.findall(condition):
if self.skipped(node): # type: ignore[arg-type]
continue
@@ -663,3 +651,14 @@ def setup(app: Sphinx) -> ExtensionMetadata:
'version': sphinx.__display_version__,
'parallel_read_safe': True,
}
+
+
+def _condition_default(node: Node) -> bool:
+ return (
+ isinstance(node, (nodes.literal_block, nodes.comment))
+ and 'testnodetype' in node
+ )
+
+
+def _condition_with_doctest(node: Node) -> bool:
+ return _condition_default(node) or isinstance(node, nodes.doctest_block)
diff --git a/sphinx/transforms/post_transforms/__init__.py b/sphinx/transforms/post_transforms/__init__.py
index d9c5031b31c..1a40c3d791a 100644
--- a/sphinx/transforms/post_transforms/__init__.py
+++ b/sphinx/transforms/post_transforms/__init__.py
@@ -227,12 +227,7 @@ def _resolve_pending_any_xref(
if not results:
return None
if len(results) > 1:
-
- def stringify(name: str, node: Element) -> str:
- reftitle = node.get('reftitle', node.astext())
- return f':{name}:`{reftitle}`'
-
- candidates = ' or '.join(starmap(stringify, results))
+ candidates = ' or '.join(starmap(self._stringify, results))
msg = __(
"more than one target found for 'any' cross-reference %r: could be %s"
)
@@ -251,6 +246,11 @@ def stringify(name: str, node: Element) -> str:
new_node[0]['classes'].extend((res_domain, res_role.replace(':', '-')))
return new_node
+ @staticmethod
+ def _stringify(name: str, node: Element) -> str:
+ reftitle = node.get('reftitle', node.astext())
+ return f':{name}:`{reftitle}`'
+
def warn_missing_reference(
self,
refdoc: str,
@@ -273,21 +273,12 @@ def warn_missing_reference(
): # fmt: skip
warn = False
if self.config.nitpick_ignore_regex:
-
- def matches_ignore(entry_type: str, entry_target: str) -> bool:
- return any(
- (
- re.fullmatch(ignore_type, entry_type)
- and re.fullmatch(ignore_target, entry_target)
- )
- for ignore_type, ignore_target in self.config.nitpick_ignore_regex
- )
-
- if matches_ignore(dtype, target):
+ if _matches_ignore(self.config.nitpick_ignore_regex, dtype, target):
warn = False
# for "std" types also try without domain name
- if (not domain or domain.name == 'std') and matches_ignore(typ, target):
- warn = False
+ if not domain or domain.name == 'std':
+ if _matches_ignore(self.config.nitpick_ignore_regex, typ, target):
+ warn = False
if not warn:
return
@@ -317,6 +308,18 @@ def find_pending_xref_condition(
return None
+def _matches_ignore(
+ ignore_patterns: Sequence[tuple[str, str]], entry_type: str, entry_target: str
+) -> bool:
+ return any(
+ (
+ re.fullmatch(ignore_type, entry_type)
+ and re.fullmatch(ignore_target, entry_target)
+ )
+ for ignore_type, ignore_target in ignore_patterns
+ )
+
+
class OnlyNodeTransform(SphinxPostTransform):
default_priority = 50
diff --git a/sphinx/writers/latex.py b/sphinx/writers/latex.py
index 5d9bb9bef9c..823db1d875b 100644
--- a/sphinx/writers/latex.py
+++ b/sphinx/writers/latex.py
@@ -1852,13 +1852,10 @@ def add_target(id: str) -> None:
if node.get('ismod', False):
# Detect if the previous nodes are label targets. If so, remove
# the refid thereof from node['ids'] to avoid duplicated ids.
- def has_dup_label(sib: Node | None) -> bool:
- return isinstance(sib, nodes.target) and sib.get('refid') in node['ids']
-
prev = get_prev_node(node)
- if has_dup_label(prev):
+ if self._has_dup_label(prev, node):
ids = node['ids'][:] # copy to avoid side-effects
- while has_dup_label(prev):
+ while self._has_dup_label(prev, node):
ids.remove(prev['refid']) # type: ignore[index]
prev = get_prev_node(prev) # type: ignore[arg-type]
else:
@@ -1872,6 +1869,10 @@ def has_dup_label(sib: Node | None) -> bool:
def depart_target(self, node: Element) -> None:
pass
+ @staticmethod
+ def _has_dup_label(sib: Node | None, node: Element) -> bool:
+ return isinstance(sib, nodes.target) and sib.get('refid') in node['ids']
+
def visit_attribution(self, node: Element) -> None:
self.body.append(CR + r'\begin{flushright}' + CR)
self.body.append('---')
From 50590b19ad0af43357a36289b6ffa4089782c691 Mon Sep 17 00:00:00 2001
From: Adam Turner <9087854+AA-Turner@users.noreply.github.com>
Date: Fri, 6 Jun 2025 23:20:03 +0100
Subject: [PATCH 108/466] Mark class attributes as ``ClassVar`` (#13626)
---
sphinx/builders/__init__.py | 24 +++---
sphinx/builders/manpage.py | 2 +-
sphinx/domains/__init__.py | 30 +++----
sphinx/domains/_index.py | 7 +-
sphinx/domains/c/__init__.py | 2 +-
sphinx/domains/changeset.py | 2 +-
sphinx/domains/javascript.py | 2 +-
sphinx/domains/math.py | 4 +-
sphinx/domains/python/__init__.py | 4 +-
sphinx/domains/rst.py | 2 +-
sphinx/domains/std/__init__.py | 19 ++---
sphinx/environment/adapters/indexentries.py | 1 -
sphinx/ext/autodoc/__init__.py | 14 ++--
sphinx/ext/imgmath.py | 93 +++++++++------------
sphinx/registry.py | 8 +-
15 files changed, 99 insertions(+), 115 deletions(-)
diff --git a/sphinx/builders/__init__.py b/sphinx/builders/__init__.py
index 076fe218434..88b321868ab 100644
--- a/sphinx/builders/__init__.py
+++ b/sphinx/builders/__init__.py
@@ -48,7 +48,7 @@
if TYPE_CHECKING:
from collections.abc import Iterable, Sequence, Set
from gettext import NullTranslations
- from typing import Any, Literal
+ from typing import Any, ClassVar, Literal
from docutils.nodes import Node
@@ -70,37 +70,37 @@ class Builder:
#: The builder's name.
#: This is the value used to select builders on the command line.
- name: str = ''
+ name: ClassVar[str] = ''
#: The builder's output format, or '' if no document output is produced.
#: This is commonly the file extension, e.g. "html",
#: though any string value is accepted.
#: The builder's format string can be used by various components
#: such as :class:`.SphinxPostTransform` or extensions to determine
#: their compatibility with the builder.
- format: str = ''
+ format: ClassVar[str] = ''
#: The message emitted upon successful build completion.
#: This can be a printf-style template string
#: with the following keys: ``outdir``, ``project``
- epilog: str = ''
+ epilog: ClassVar[str] = ''
#: default translator class for the builder. This can be overridden by
#: :py:meth:`~sphinx.application.Sphinx.set_translator`.
- default_translator_class: type[nodes.NodeVisitor]
+ default_translator_class: ClassVar[type[nodes.NodeVisitor]]
# doctree versioning method
- versioning_method = 'none'
- versioning_compare = False
+ versioning_method: ClassVar[str] = 'none'
+ versioning_compare: ClassVar[bool] = False
#: Whether it is safe to make parallel :meth:`~.Builder.write_doc` calls.
- allow_parallel: bool = False
+ allow_parallel: ClassVar[bool] = False
# support translation
- use_message_catalog = True
+ use_message_catalog: ClassVar[bool] = True
#: The list of MIME types of image formats supported by the builder.
#: Image files are searched in the order in which they appear here.
- supported_image_types: list[str] = []
+ supported_image_types: ClassVar[list[str]] = []
#: The builder can produce output documents that may fetch external images when opened.
- supported_remote_images: bool = False
+ supported_remote_images: ClassVar[bool] = False
#: The file format produced by the builder allows images to be embedded using data-URIs.
- supported_data_uri_images: bool = False
+ supported_data_uri_images: ClassVar[bool] = False
srcdir = _StrPathProperty()
confdir = _StrPathProperty()
diff --git a/sphinx/builders/manpage.py b/sphinx/builders/manpage.py
index 7b62b7dca5a..feeb35c1877 100644
--- a/sphinx/builders/manpage.py
+++ b/sphinx/builders/manpage.py
@@ -37,7 +37,7 @@ class ManualPageBuilder(Builder):
epilog = __('The manual pages are in %(outdir)s.')
default_translator_class = ManualPageTranslator
- supported_image_types: list[str] = []
+ supported_image_types = []
def init(self) -> None:
if not self.config.man_pages:
diff --git a/sphinx/domains/__init__.py b/sphinx/domains/__init__.py
index 61be6049579..17aa7bdc453 100644
--- a/sphinx/domains/__init__.py
+++ b/sphinx/domains/__init__.py
@@ -14,7 +14,7 @@
if TYPE_CHECKING:
from collections.abc import Iterable, Sequence, Set
- from typing import Any
+ from typing import Any, ClassVar
from docutils import nodes
from docutils.nodes import Element, Node
@@ -82,27 +82,27 @@ class Domain:
"""
#: domain name: should be short, but unique
- name = ''
+ name: ClassVar[str] = ''
#: domain label: longer, more descriptive (used in messages)
- label = ''
+ label: ClassVar[str] = ''
#: type (usually directive) name -> ObjType instance
- object_types: dict[str, ObjType] = {}
+ object_types: ClassVar[dict[str, ObjType]] = {}
#: directive name -> directive class
- directives: dict[str, type[Directive]] = {}
+ directives: ClassVar[dict[str, type[Directive]]] = {}
#: role name -> role callable
- roles: dict[str, RoleFunction | XRefRole] = {}
+ roles: ClassVar[dict[str, RoleFunction | XRefRole]] = {}
#: a list of Index subclasses
- indices: list[type[Index]] = []
+ indices: ClassVar[list[type[Index]]] = []
#: role name -> a warning message if reference is missing
- dangling_warnings: dict[str, str] = {}
+ dangling_warnings: ClassVar[dict[str, str]] = {}
#: node_class -> (enum_node_type, title_getter)
- enumerable_nodes: dict[type[Node], tuple[str, TitleGetter | None]] = {}
+ enumerable_nodes: ClassVar[dict[type[Node], tuple[str, TitleGetter | None]]] = {}
#: data value for a fresh environment
- initial_data: dict[str, Any] = {}
+ initial_data: ClassVar[dict[str, Any]] = {}
#: data value
data: dict[str, Any]
#: data version, bump this when the format of `self.data` changes
- data_version = 0
+ data_version: ClassVar[int] = 0
def __init__(self, env: BuildEnvironment) -> None:
domain_data: dict[str, dict[str, Any]] = env.domaindata
@@ -113,10 +113,10 @@ def __init__(self, env: BuildEnvironment) -> None:
self._type2role: dict[str, str] = {}
# convert class variables to instance one (to enhance through API)
- self.object_types = dict(self.object_types)
- self.directives = dict(self.directives)
- self.roles = dict(self.roles)
- self.indices = list(self.indices)
+ self.object_types = dict(self.object_types) # type: ignore[misc]
+ self.directives = dict(self.directives) # type: ignore[misc]
+ self.roles = dict(self.roles) # type: ignore[misc]
+ self.indices = list(self.indices) # type: ignore[misc]
if self.name not in domain_data:
assert isinstance(self.initial_data, dict)
diff --git a/sphinx/domains/_index.py b/sphinx/domains/_index.py
index afb5be4007b..3845a97ba7b 100644
--- a/sphinx/domains/_index.py
+++ b/sphinx/domains/_index.py
@@ -9,6 +9,7 @@
if TYPE_CHECKING:
from collections.abc import Iterable
+ from typing import ClassVar
from sphinx.domains import Domain
@@ -73,9 +74,9 @@ class Index(ABC):
:rst:role:`ref` role.
"""
- name: str
- localname: str
- shortname: str | None = None
+ name: ClassVar[str]
+ localname: ClassVar[str]
+ shortname: ClassVar[str | None] = None
def __init__(self, domain: Domain) -> None:
if not self.name or self.localname is None:
diff --git a/sphinx/domains/c/__init__.py b/sphinx/domains/c/__init__.py
index 56ce0d170f6..80d24c1abe2 100644
--- a/sphinx/domains/c/__init__.py
+++ b/sphinx/domains/c/__init__.py
@@ -818,7 +818,7 @@ class CDomain(Domain):
'expr': CExprRole(asCode=True),
'texpr': CExprRole(asCode=False),
}
- initial_data: dict[str, Symbol | dict[str, tuple[str, str, str]]] = {
+ initial_data: ClassVar[dict[str, Symbol | dict[str, tuple[str, str, str]]]] = {
'root_symbol': Symbol(None, None, None, None, None),
'objects': {}, # fullname -> docname, node_id, objtype
}
diff --git a/sphinx/domains/changeset.py b/sphinx/domains/changeset.py
index 2d520e6ff64..d2492dcccb2 100644
--- a/sphinx/domains/changeset.py
+++ b/sphinx/domains/changeset.py
@@ -121,7 +121,7 @@ class ChangeSetDomain(Domain):
name = 'changeset'
label = 'changeset'
- initial_data: dict[str, dict[str, list[ChangeSet]]] = {
+ initial_data: ClassVar[dict[str, dict[str, list[ChangeSet]]]] = {
'changes': {}, # version -> list of ChangeSet
}
diff --git a/sphinx/domains/javascript.py b/sphinx/domains/javascript.py
index eaa69094c78..22673489d23 100644
--- a/sphinx/domains/javascript.py
+++ b/sphinx/domains/javascript.py
@@ -436,7 +436,7 @@ class JavaScriptDomain(Domain):
'attr': JSXRefRole(),
'mod': JSXRefRole(),
}
- initial_data: dict[str, dict[str, tuple[str, str]]] = {
+ initial_data: ClassVar[dict[str, dict[str, tuple[str, str]]]] = {
'objects': {}, # fullname -> docname, node_id, objtype
'modules': {}, # modname -> docname, node_id
}
diff --git a/sphinx/domains/math.py b/sphinx/domains/math.py
index 56e543917ad..d4f2606531f 100644
--- a/sphinx/domains/math.py
+++ b/sphinx/domains/math.py
@@ -15,7 +15,7 @@
if TYPE_CHECKING:
from collections.abc import Iterable, Set
- from typing import Any
+ from typing import Any, ClassVar
from docutils.nodes import Element, Node, system_message
@@ -47,7 +47,7 @@ class MathDomain(Domain):
name = 'math'
label = 'mathematics'
- initial_data: dict[str, Any] = {
+ initial_data: ClassVar[dict[str, Any]] = {
'objects': {}, # labelid -> (docname, eqno)
# backwards compatibility
'has_equations': {}, # https://github.com/sphinx-doc/sphinx/issues/13346
diff --git a/sphinx/domains/python/__init__.py b/sphinx/domains/python/__init__.py
index d70c232e725..1281b14ad58 100644
--- a/sphinx/domains/python/__init__.py
+++ b/sphinx/domains/python/__init__.py
@@ -732,7 +732,7 @@ class PythonDomain(Domain):
name = 'py'
label = 'Python'
- object_types: dict[str, ObjType] = {
+ object_types = {
'function': ObjType(_('function'), 'func', 'obj'),
'data': ObjType(_('data'), 'data', 'obj'),
'class': ObjType(_('class'), 'class', 'exc', 'obj'),
@@ -775,7 +775,7 @@ class PythonDomain(Domain):
'mod': PyXRefRole(),
'obj': PyXRefRole(),
}
- initial_data: dict[str, dict[str, tuple[Any]]] = {
+ initial_data: ClassVar[dict[str, dict[str, tuple[Any]]]] = {
'objects': {}, # fullname -> docname, objtype
'modules': {}, # modname -> docname, synopsis, platform, deprecated
}
diff --git a/sphinx/domains/rst.py b/sphinx/domains/rst.py
index 55aa3103d8a..2b486ea85ed 100644
--- a/sphinx/domains/rst.py
+++ b/sphinx/domains/rst.py
@@ -244,7 +244,7 @@ class ReSTDomain(Domain):
'dir': XRefRole(),
'role': XRefRole(),
}
- initial_data: dict[str, dict[tuple[str, str], str]] = {
+ initial_data: ClassVar[dict[str, dict[tuple[str, str], str]]] = {
'objects': {}, # fullname -> docname, objtype
}
diff --git a/sphinx/domains/std/__init__.py b/sphinx/domains/std/__init__.py
index f8f3f5513e3..04161736675 100644
--- a/sphinx/domains/std/__init__.py
+++ b/sphinx/domains/std/__init__.py
@@ -27,7 +27,6 @@
from typing import Any, ClassVar, Final
from docutils.nodes import Element, Node, system_message
- from docutils.parsers.rst import Directive
from sphinx.addnodes import desc_signature
from sphinx.application import Sphinx
@@ -36,8 +35,6 @@
from sphinx.util.typing import (
ExtensionMetadata,
OptionSpec,
- RoleFunction,
- TitleGetter,
)
logger = logging.getLogger(__name__)
@@ -725,7 +722,7 @@ class StandardDomain(Domain):
name = 'std'
label = 'Default'
- object_types: dict[str, ObjType] = {
+ object_types = {
'term': ObjType(_('glossary term'), 'term', searchprio=-1),
'token': ObjType(_('grammar token'), 'token', searchprio=-1),
'label': ObjType(_('reference label'), 'ref', 'keyword', searchprio=-1),
@@ -735,7 +732,7 @@ class StandardDomain(Domain):
'doc': ObjType(_('document'), 'doc', searchprio=-1),
}
- directives: dict[str, type[Directive]] = {
+ directives = {
'program': Program,
'cmdoption': Cmdoption, # old name for backwards compatibility
'option': Cmdoption,
@@ -744,7 +741,7 @@ class StandardDomain(Domain):
'glossary': Glossary,
'productionlist': ProductionList,
}
- roles: dict[str, RoleFunction | XRefRole] = {
+ roles = {
'option': OptionXRefRole(warn_dangling=True),
'confval': XRefRole(warn_dangling=True),
'envvar': EnvVarXRefRole(),
@@ -780,7 +777,7 @@ class StandardDomain(Domain):
}
# labelname -> docname, sectionname
- _virtual_doc_names: dict[str, tuple[str, str]] = {
+ _virtual_doc_names: Final = {
'genindex': ('genindex', _('Index')),
'modindex': ('py-modindex', _('Module Index')),
'search': ('search', _('Search Page')),
@@ -795,7 +792,7 @@ class StandardDomain(Domain):
}
# node_class -> (figtype, title_getter)
- enumerable_nodes: dict[type[Node], tuple[str, TitleGetter | None]] = {
+ enumerable_nodes = {
nodes.figure: ('figure', None),
nodes.table: ('table', None),
nodes.container: ('code-block', None),
@@ -805,9 +802,9 @@ def __init__(self, env: BuildEnvironment) -> None:
super().__init__(env)
# set up enumerable nodes
- self.enumerable_nodes = copy(
- self.enumerable_nodes
- ) # create a copy for this instance
+
+ # create a copy for this instance
+ self.enumerable_nodes = copy(self.enumerable_nodes) # type: ignore[misc]
for node, settings in env._registry.enumerable_nodes.items():
self.enumerable_nodes[node] = settings
diff --git a/sphinx/environment/adapters/indexentries.py b/sphinx/environment/adapters/indexentries.py
index e9e6e408b6c..0428e488308 100644
--- a/sphinx/environment/adapters/indexentries.py
+++ b/sphinx/environment/adapters/indexentries.py
@@ -50,7 +50,6 @@
class IndexEntries:
def __init__(self, env: BuildEnvironment) -> None:
self.env = env
- self.builder: Builder
def create_index(
self,
diff --git a/sphinx/ext/autodoc/__init__.py b/sphinx/ext/autodoc/__init__.py
index 560b6905208..6d20e4007b2 100644
--- a/sphinx/ext/autodoc/__init__.py
+++ b/sphinx/ext/autodoc/__init__.py
@@ -363,15 +363,15 @@ class Documenter:
#: name by which the directive is called (auto...) and the default
#: generated directive name
- objtype = 'object'
+ objtype: ClassVar = 'object'
#: indentation by which to indent the directive content
- content_indent = ' '
+ content_indent: ClassVar = ' '
#: priority if multiple documenters return True from can_document_member
- priority = 0
+ priority: ClassVar = 0
#: order if autodoc_member_order is set to 'groupwise'
- member_order = 0
+ member_order: ClassVar = 0
#: true if the generated content may contain titles
- titles_allowed = True
+ titles_allowed: ClassVar = True
option_spec: ClassVar[OptionSpec] = {
'no-index': bool_option,
@@ -2407,11 +2407,11 @@ def import_object(self, raiseerror: bool = False) -> bool:
obj = self.parent.__dict__.get(self.object_name, self.object)
if inspect.isstaticmethod(obj, cls=self.parent, name=self.object_name):
# document static members before regular methods
- self.member_order -= 1
+ self.member_order -= 1 # type: ignore[misc]
elif inspect.isclassmethod(obj):
# document class methods before static methods as
# they usually behave as alternative constructors
- self.member_order -= 2
+ self.member_order -= 2 # type: ignore[misc]
return ret
def format_args(self, **kwargs: Any) -> str:
diff --git a/sphinx/ext/imgmath.py b/sphinx/ext/imgmath.py
index 5b58db7b084..a8f88c62a1c 100644
--- a/sphinx/ext/imgmath.py
+++ b/sphinx/ext/imgmath.py
@@ -32,7 +32,6 @@
from docutils.nodes import Element
from sphinx.application import Sphinx
- from sphinx.builders import Builder
from sphinx.config import Config
from sphinx.util._pathlib import _StrPath
from sphinx.util.typing import ExtensionMetadata
@@ -116,36 +115,23 @@ def generate_latex_macro(
return LaTeXRenderer([templates_path]).render(template_name + '.jinja', variables)
-def ensure_tempdir(builder: Builder) -> Path:
- """Create temporary directory.
-
- use only one tempdir per build -- the use of a directory is cleaner
- than using temporary files, since we can clean up everything at once
- just removing the whole directory (see cleanup_tempdir)
- """
- if not hasattr(builder, '_imgmath_tempdir'):
- builder._imgmath_tempdir = Path(tempfile.mkdtemp()) # type: ignore[attr-defined]
-
- return builder._imgmath_tempdir # type: ignore[attr-defined]
-
-
-def compile_math(latex: str, builder: Builder) -> Path:
+def compile_math(latex: str, *, config: Config) -> Path:
"""Compile LaTeX macros for math to DVI."""
- tempdir = ensure_tempdir(builder)
+ tempdir = Path(tempfile.mkdtemp(suffix='-sphinx-imgmath'))
filename = tempdir / 'math.tex'
with open(filename, 'w', encoding='utf-8') as f:
f.write(latex)
- imgmath_latex_name = os.path.basename(builder.config.imgmath_latex)
+ imgmath_latex_name = os.path.basename(config.imgmath_latex)
# build latex command; old versions of latex don't have the
# --output-directory option, so we have to manually chdir to the
# temp dir to run it.
- command = [builder.config.imgmath_latex]
+ command = [config.imgmath_latex]
if imgmath_latex_name != 'tectonic':
command.append('--interaction=nonstopmode')
# add custom args from the config file
- command.extend(builder.config.imgmath_latex_args)
+ command.extend(config.imgmath_latex_args)
command.append('math.tex')
try:
@@ -162,7 +148,7 @@ def compile_math(latex: str, builder: Builder) -> Path:
'LaTeX command %r cannot be run (needed for math '
'display), check the imgmath_latex setting'
),
- builder.config.imgmath_latex,
+ config.imgmath_latex,
)
raise InvokeError from exc
except CalledProcessError as exc:
@@ -191,19 +177,19 @@ def convert_dvi_to_image(command: list[str], name: str) -> tuple[str, str]:
raise MathExtError(msg, exc.stderr, exc.stdout) from exc
-def convert_dvi_to_png(dvipath: Path, builder: Builder, out_path: Path) -> int | None:
+def convert_dvi_to_png(dvipath: Path, out_path: Path, *, config: Config) -> int | None:
"""Convert DVI file to PNG image."""
name = 'dvipng'
- command = [builder.config.imgmath_dvipng, '-o', out_path, '-T', 'tight', '-z9']
- command.extend(builder.config.imgmath_dvipng_args)
- if builder.config.imgmath_use_preview:
+ command = [config.imgmath_dvipng, '-o', out_path, '-T', 'tight', '-z9']
+ command.extend(config.imgmath_dvipng_args)
+ if config.imgmath_use_preview:
command.append('--depth')
command.append(dvipath)
stdout, _stderr = convert_dvi_to_image(command, name)
depth = None
- if builder.config.imgmath_use_preview:
+ if config.imgmath_use_preview:
for line in stdout.splitlines():
matched = depth_re.match(line)
if matched:
@@ -214,17 +200,17 @@ def convert_dvi_to_png(dvipath: Path, builder: Builder, out_path: Path) -> int |
return depth
-def convert_dvi_to_svg(dvipath: Path, builder: Builder, out_path: Path) -> int | None:
+def convert_dvi_to_svg(dvipath: Path, out_path: Path, *, config: Config) -> int | None:
"""Convert DVI file to SVG image."""
name = 'dvisvgm'
- command = [builder.config.imgmath_dvisvgm, '-o', out_path]
- command.extend(builder.config.imgmath_dvisvgm_args)
+ command = [config.imgmath_dvisvgm, '-o', out_path]
+ command.extend(config.imgmath_dvisvgm_args)
command.append(dvipath)
_stdout, stderr = convert_dvi_to_image(command, name)
depth = None
- if builder.config.imgmath_use_preview:
+ if config.imgmath_use_preview:
for line in stderr.splitlines(): # not stdout !
matched = depthsvg_re.match(line)
if matched:
@@ -236,8 +222,7 @@ def convert_dvi_to_svg(dvipath: Path, builder: Builder, out_path: Path) -> int |
def render_math(
- self: HTML5Translator,
- math: str,
+ self: HTML5Translator, math: str, *, config: Config
) -> tuple[_StrPath | None, int | None]:
"""Render the LaTeX math expression *math* using latex and dvipng or
dvisvgm.
@@ -252,14 +237,12 @@ def render_math(
docs successfully). If the programs are there, however, they may not fail
since that indicates a problem in the math source.
"""
- image_format = self.builder.config.imgmath_image_format.lower()
+ image_format = config.imgmath_image_format.lower()
if image_format not in SUPPORT_FORMAT:
unsupported_format_msg = 'imgmath_image_format must be either "png" or "svg"'
raise MathExtError(unsupported_format_msg)
- latex = generate_latex_macro(
- image_format, math, self.builder.config, self.builder.confdir
- )
+ latex = generate_latex_macro(image_format, math, config, self.builder.confdir)
filename = (
f'{sha1(latex.encode(), usedforsecurity=False).hexdigest()}.{image_format}'
@@ -281,7 +264,7 @@ def render_math(
# .tex -> .dvi
try:
- dvipath = compile_math(latex, self.builder)
+ dvipath = compile_math(latex, config=config)
except InvokeError:
self.builder._imgmath_warned_latex = True # type: ignore[attr-defined]
return None, None
@@ -289,9 +272,9 @@ def render_math(
# .dvi -> .png/.svg
try:
if image_format == 'png':
- depth = convert_dvi_to_png(dvipath, self.builder, generated_path)
+ depth = convert_dvi_to_png(dvipath, generated_path, config=config)
elif image_format == 'svg':
- depth = convert_dvi_to_svg(dvipath, self.builder, generated_path)
+ depth = convert_dvi_to_svg(dvipath, generated_path, config=config)
except InvokeError:
self.builder._imgmath_warned_image_translator = True # type: ignore[attr-defined]
return None, None
@@ -315,26 +298,25 @@ def clean_up_files(app: Sphinx, exc: Exception) -> None:
if exc:
return
- if hasattr(app.builder, '_imgmath_tempdir'):
- with contextlib.suppress(Exception):
- shutil.rmtree(app.builder._imgmath_tempdir)
-
- if app.builder.config.imgmath_embed:
+ if app.config.imgmath_embed:
# in embed mode, the images are still generated in the math output dir
# to be shared across workers, but are not useful to the final document
with contextlib.suppress(Exception):
shutil.rmtree(app.builder.outdir / app.builder.imagedir / 'math')
-def get_tooltip(self: HTML5Translator, node: Element) -> str:
- if self.builder.config.imgmath_add_tooltips:
+def get_tooltip(self: HTML5Translator, node: Element, *, config: Config) -> str:
+ if config.imgmath_add_tooltips:
return f' alt="{self.encode(node.astext()).strip()}"'
return ''
def html_visit_math(self: HTML5Translator, node: nodes.math) -> None:
+ config = self.builder.config
try:
- rendered_path, depth = render_math(self, '$' + node.astext() + '$')
+ rendered_path, depth = render_math(
+ self, '$' + node.astext() + '$', config=config
+ )
except MathExtError as exc:
msg = str(exc)
sm = nodes.system_message(
@@ -350,27 +332,27 @@ def html_visit_math(self: HTML5Translator, node: nodes.math) -> None:
f'{self.encode(node.astext()).strip()}'
)
else:
- if self.builder.config.imgmath_embed:
- image_format = self.builder.config.imgmath_image_format.lower()
+ if config.imgmath_embed:
+ image_format = config.imgmath_image_format.lower()
img_src = render_maths_to_base64(image_format, rendered_path)
else:
bname = os.path.basename(rendered_path)
relative_path = Path(self.builder.imgpath, 'math', bname)
img_src = relative_path.as_posix()
align = f' style="vertical-align: {-depth:d}px"' if depth is not None else ''
- self.body.append(
- f'
'
- )
+ tooltip = get_tooltip(self, node, config=config)
+ self.body.append(f'
')
raise nodes.SkipNode
def html_visit_displaymath(self: HTML5Translator, node: nodes.math_block) -> None:
+ config = self.builder.config
if node.get('no-wrap', node.get('nowrap', False)):
latex = node.astext()
else:
latex = wrap_displaymath(node.astext(), None, False)
try:
- rendered_path, _depth = render_math(self, latex)
+ rendered_path, _depth = render_math(self, latex, config=config)
except MathExtError as exc:
msg = str(exc)
sm = nodes.system_message(
@@ -393,14 +375,15 @@ def html_visit_displaymath(self: HTML5Translator, node: nodes.math_block) -> Non
f'{self.encode(node.astext()).strip()}
\n
'
)
else:
- if self.builder.config.imgmath_embed:
- image_format = self.builder.config.imgmath_image_format.lower()
+ if config.imgmath_embed:
+ image_format = config.imgmath_image_format.lower()
img_src = render_maths_to_base64(image_format, rendered_path)
else:
bname = os.path.basename(rendered_path)
relative_path = Path(self.builder.imgpath, 'math', bname)
img_src = relative_path.as_posix()
- self.body.append(f'
\n')
+ tooltip = get_tooltip(self, node, config=config)
+ self.body.append(f'
\n')
raise nodes.SkipNode
diff --git a/sphinx/registry.py b/sphinx/registry.py
index ce52a03b323..973aa6dfed4 100644
--- a/sphinx/registry.py
+++ b/sphinx/registry.py
@@ -410,7 +410,9 @@ def add_translation_handlers(
% (builder_name, handlers),
) from exc
- def get_translator_class(self, builder: Builder) -> type[nodes.NodeVisitor]:
+ def get_translator_class(
+ self, builder: type[Builder] | Builder
+ ) -> type[nodes.NodeVisitor]:
try:
return self.translators[builder.name]
except KeyError:
@@ -420,7 +422,9 @@ def get_translator_class(self, builder: Builder) -> type[nodes.NodeVisitor]:
msg = f'translator not found for {builder.name}'
raise AttributeError(msg) from err
- def create_translator(self, builder: Builder, *args: Any) -> nodes.NodeVisitor:
+ def create_translator(
+ self, builder: type[Builder] | Builder, *args: Any
+ ) -> nodes.NodeVisitor:
translator_class = self.get_translator_class(builder)
translator = translator_class(*args)
From 0b0c039c02091300a616b8151299265f67aaf81c Mon Sep 17 00:00:00 2001
From: Adam Turner <9087854+AA-Turner@users.noreply.github.com>
Date: Sat, 7 Jun 2025 01:34:48 +0100
Subject: [PATCH 109/466] Deprecate remaining public ``app`` attributes
(#13627)
---
CHANGES.rst | 5 +++
doc/extdev/deprecated.rst | 25 +++++++++++++
sphinx/builders/__init__.py | 65 ++++++++++++++++++++--------------
sphinx/environment/__init__.py | 26 +++++++++++---
sphinx/events.py | 16 ++++++---
sphinx/transforms/__init__.py | 3 ++
sphinx/util/logging.py | 20 +++++------
tests/test_events.py | 2 +-
tests/test_versioning.py | 2 +-
9 files changed, 116 insertions(+), 48 deletions(-)
diff --git a/CHANGES.rst b/CHANGES.rst
index 40d6984ca55..71ffa3c1b0d 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -10,6 +10,11 @@ Incompatible changes
Deprecated
----------
+* 13627: Deprecate remaining public :py:attr:`!.app` attributes,
+ including ``builder.app``, ``env.app``, ``events.app``,
+ and ``SphinxTransform.`app``.
+ Patch by Adam Turner.
+
Features added
--------------
diff --git a/doc/extdev/deprecated.rst b/doc/extdev/deprecated.rst
index ad05b054d99..898ec49c8fc 100644
--- a/doc/extdev/deprecated.rst
+++ b/doc/extdev/deprecated.rst
@@ -22,6 +22,31 @@ The following is a list of deprecated interfaces.
- Removed
- Alternatives
+ * - ``sphinx.builders.Builder.app``
+ - 8.3
+ - 10.0
+ - N/A
+
+ * - ``sphinx.environment.BuildEnvironment.app``
+ - 8.3
+ - 10.0
+ - N/A
+
+ * - ``sphinx.transforms.Transform.app``
+ - 8.3
+ - 10.0
+ - N/A
+
+ * - ``sphinx.transforms.post_transforms.SphinxPostTransform.app``
+ - 8.3
+ - 10.0
+ - N/A
+
+ * - ``sphinx.events.EventManager.app``
+ - 8.3
+ - 10.0
+ - N/A
+
* - ``sphinx.builders.singlehtml.SingleFileHTMLBuilder.fix_refuris``
- 8.2
- 10.0
diff --git a/sphinx/builders/__init__.py b/sphinx/builders/__init__.py
index 88b321868ab..fb8bd757864 100644
--- a/sphinx/builders/__init__.py
+++ b/sphinx/builders/__init__.py
@@ -14,6 +14,7 @@
from docutils.utils import DependencyList
from sphinx._cli.util.colour import bold
+from sphinx.deprecation import _deprecation_warning
from sphinx.environment import (
CONFIG_CHANGED_REASON,
CONFIG_OK,
@@ -114,7 +115,7 @@ def __init__(self, app: Sphinx, env: BuildEnvironment) -> None:
self.doctreedir = app.doctreedir
ensuredir(self.doctreedir)
- self.app: Sphinx = app
+ self._app: Sphinx = app
self.env: BuildEnvironment = env
self.env.set_versioning_method(self.versioning_method, self.versioning_compare)
self.events: EventManager = app.events
@@ -136,9 +137,15 @@ def __init__(self, app: Sphinx, env: BuildEnvironment) -> None:
self.parallel_ok = False
self.finish_tasks: Any = None
+ @property
+ def app(self) -> Sphinx:
+ cls_name = self.__class__.__qualname__
+ _deprecation_warning(__name__, f'{cls_name}.app', remove=(10, 0))
+ return self._app
+
@property
def _translator(self) -> NullTranslations | None:
- return self.app.translator
+ return self._app.translator
def get_translator_class(self, *args: Any) -> type[nodes.NodeVisitor]:
"""Return a class of translator."""
@@ -258,7 +265,7 @@ def cat2relpath(cat: CatalogInfo, srcdir: Path = self.srcdir) -> str:
__('writing output... '),
'darkgreen',
len(catalogs),
- self.app.verbosity,
+ self._app.verbosity,
stringify_func=cat2relpath,
):
catalog.write_mo(
@@ -397,14 +404,14 @@ def build(
# while reading, collect all warnings from docutils
with (
nullcontext()
- if self.app._exception_on_warning
+ if self._app._exception_on_warning
else logging.pending_warnings()
):
updated_docnames = set(self.read())
doccount = len(updated_docnames)
logger.info(bold(__('looking for now-outdated files... ')), nonl=True)
- updated_docnames.update(self.env.check_dependents(self.app, updated_docnames))
+ updated_docnames.update(self.env.check_dependents(self._app, updated_docnames))
outdated = len(updated_docnames) - doccount
if outdated:
logger.info(__('%d found'), outdated)
@@ -422,14 +429,14 @@ def build(
pickle.dump(self.env, f, pickle.HIGHEST_PROTOCOL)
# global actions
- self.app.phase = BuildPhase.CONSISTENCY_CHECK
+ self._app.phase = BuildPhase.CONSISTENCY_CHECK
with progress_message(__('checking consistency')):
self.env.check_consistency()
else:
if method == 'update' and not docnames:
logger.info(bold(__('no targets are out of date.')))
- self.app.phase = BuildPhase.RESOLVING
+ self._app.phase = BuildPhase.RESOLVING
# filter "docnames" (list of outdated files) by the updated
# found_docs of the environment; this will remove docs that
@@ -438,14 +445,14 @@ def build(
docnames = set(docnames) & self.env.found_docs
# determine if we can write in parallel
- if parallel_available and self.app.parallel > 1 and self.allow_parallel:
- self.parallel_ok = self.app.is_parallel_allowed('write')
+ if parallel_available and self._app.parallel > 1 and self.allow_parallel:
+ self.parallel_ok = self._app.is_parallel_allowed('write')
else:
self.parallel_ok = False
# create a task executor to use for misc. "finish-up" tasks
# if self.parallel_ok:
- # self.finish_tasks = ParallelTasks(self.app.parallel)
+ # self.finish_tasks = ParallelTasks(self._app.parallel)
# else:
# for now, just execute them serially
self.finish_tasks = SerialTasks()
@@ -508,13 +515,13 @@ def read(self) -> list[str]:
self.events.emit('env-before-read-docs', self.env, docnames)
# check if we should do parallel or serial read
- if parallel_available and self.app.parallel > 1:
- par_ok = self.app.is_parallel_allowed('read')
+ if parallel_available and self._app.parallel > 1:
+ par_ok = self._app.is_parallel_allowed('read')
else:
par_ok = False
if par_ok:
- self._read_parallel(docnames, nproc=self.app.parallel)
+ self._read_parallel(docnames, nproc=self._app.parallel)
else:
self._read_serial(docnames)
@@ -576,7 +583,7 @@ def _read_serial(self, docnames: list[str]) -> None:
__('reading sources... '),
'purple',
len(docnames),
- self.app.verbosity,
+ self._app.verbosity,
):
# remove all inventory entries for that file
self.events.emit('env-purge-doc', self.env, docname)
@@ -589,7 +596,11 @@ def _read_parallel(self, docnames: list[str], nproc: int) -> None:
# create a status_iterator to step progressbar after reading a document
# (see: ``merge()`` function)
progress = status_iterator(
- chunks, __('reading sources... '), 'purple', len(chunks), self.app.verbosity
+ chunks,
+ __('reading sources... '),
+ 'purple',
+ len(chunks),
+ self._app.verbosity,
)
# clear all outdated docs at once
@@ -598,7 +609,7 @@ def _read_parallel(self, docnames: list[str], nproc: int) -> None:
self.env.clear_doc(docname)
def read_process(docs: list[str]) -> bytes:
- self.env.app = self.app
+ self.env._app = self._app
for docname in docs:
self.read_doc(docname, _cache=False)
# allow pickling self to send it back
@@ -606,7 +617,7 @@ def read_process(docs: list[str]) -> bytes:
def merge(docs: list[str], otherenv: bytes) -> None:
env = pickle.loads(otherenv)
- self.env.merge_info_from(docs, env, self.app)
+ self.env.merge_info_from(docs, env, self._app)
next(progress)
@@ -630,8 +641,8 @@ def read_doc(self, docname: str, *, _cache: bool = True) -> None:
env.note_dependency(docutils_conf)
filename = str(env.doc2path(docname))
- filetype = get_filetype(self.app.config.source_suffix, filename)
- publisher = self.env._registry.get_publisher(self.app, filetype)
+ filetype = get_filetype(self._app.config.source_suffix, filename)
+ publisher = self.env._registry.get_publisher(self._app, filetype)
self.env.current_document._parser = publisher.parser
# record_dependencies is mutable even though it is in settings,
# explicitly re-initialise for each document
@@ -744,14 +755,14 @@ def write_documents(self, docnames: Set[str]) -> None:
if self.parallel_ok:
# number of subprocesses is parallel-1 because the main process
# is busy loading doctrees and doing write_doc_serialized()
- self._write_parallel(sorted_docnames, nproc=self.app.parallel - 1)
+ self._write_parallel(sorted_docnames, nproc=self._app.parallel - 1)
else:
self._write_serial(sorted_docnames)
def _write_serial(self, docnames: Sequence[str]) -> None:
with (
nullcontext()
- if self.app._exception_on_warning
+ if self._app._exception_on_warning
else logging.pending_warnings()
):
for docname in status_iterator(
@@ -759,19 +770,19 @@ def _write_serial(self, docnames: Sequence[str]) -> None:
__('writing output... '),
'darkgreen',
len(docnames),
- self.app.verbosity,
+ self._app.verbosity,
):
- _write_docname(docname, app=self.app, env=self.env, builder=self)
+ _write_docname(docname, app=self._app, env=self.env, builder=self)
def _write_parallel(self, docnames: Sequence[str], nproc: int) -> None:
def write_process(docs: list[tuple[str, nodes.document]]) -> None:
- self.app.phase = BuildPhase.WRITING
+ self._app.phase = BuildPhase.WRITING
for docname, doctree in docs:
self.write_doc(docname, doctree)
# warm up caches/compile templates using the first document
firstname, docnames = docnames[0], docnames[1:]
- _write_docname(firstname, app=self.app, env=self.env, builder=self)
+ _write_docname(firstname, app=self._app, env=self.env, builder=self)
tasks = ParallelTasks(nproc)
chunks = make_chunks(docnames, nproc)
@@ -783,13 +794,13 @@ def write_process(docs: list[tuple[str, nodes.document]]) -> None:
__('writing output... '),
'darkgreen',
len(chunks),
- self.app.verbosity,
+ self._app.verbosity,
)
def on_chunk_done(args: list[tuple[str, nodes.document]], result: None) -> None:
next(progress)
- self.app.phase = BuildPhase.RESOLVING
+ self._app.phase = BuildPhase.RESOLVING
for chunk in chunks:
arg = []
for docname in chunk:
diff --git a/sphinx/environment/__init__.py b/sphinx/environment/__init__.py
index 2f3e25ac477..7ad7298c22b 100644
--- a/sphinx/environment/__init__.py
+++ b/sphinx/environment/__init__.py
@@ -11,6 +11,7 @@
from typing import TYPE_CHECKING
from sphinx import addnodes
+from sphinx.deprecation import _deprecation_warning
from sphinx.domains._domains_container import _DomainsContainer
from sphinx.environment.adapters import toctree as toctree_adapters
from sphinx.errors import (
@@ -107,7 +108,7 @@ class BuildEnvironment:
doctreedir = _StrPathProperty()
def __init__(self, app: Sphinx) -> None:
- self.app: Sphinx = app
+ self._app: Sphinx = app
self.doctreedir = app.doctreedir
self.srcdir = app.srcdir
self.config: Config = None # type: ignore[assignment]
@@ -237,7 +238,7 @@ def __getstate__(self) -> dict[str, Any]:
"""Obtains serializable data for pickling."""
__dict__ = self.__dict__.copy()
# clear unpickleable attributes
- __dict__.update(app=None, domains=None, events=None)
+ __dict__.update(_app=None, domains=None, events=None)
# clear in-memory doctree caches, to reduce memory consumption and
# ensure that, upon restoring the state, the most recent pickled files
# on the disk are used instead of those from a possibly outdated state
@@ -257,7 +258,7 @@ def setup(self, app: Sphinx) -> None:
if self.project:
app.project.restore(self.project)
- self.app = app
+ self._app = app
self.doctreedir = app.doctreedir
self.events = app.events
self.srcdir = app.srcdir
@@ -284,13 +285,28 @@ def setup(self, app: Sphinx) -> None:
# initialize settings
self._update_settings(app.config)
+ @property
+ def app(self) -> Sphinx:
+ _deprecation_warning(__name__, 'BuildEnvironment.app', remove=(10, 0))
+ return self._app
+
+ @app.setter
+ def app(self, app: Sphinx) -> None:
+ _deprecation_warning(__name__, 'BuildEnvironment.app', remove=(10, 0))
+ self._app = app
+
+ @app.deleter
+ def app(self) -> None:
+ _deprecation_warning(__name__, 'BuildEnvironment.app', remove=(10, 0))
+ del self._app
+
@property
def _registry(self) -> SphinxComponentRegistry:
- return self.app.registry
+ return self._app.registry
@property
def _tags(self) -> Tags:
- return self.app.tags
+ return self._app.tags
@staticmethod
def _config_status(
diff --git a/sphinx/events.py b/sphinx/events.py
index 571ad143269..e408d80b796 100644
--- a/sphinx/events.py
+++ b/sphinx/events.py
@@ -9,6 +9,7 @@
from operator import attrgetter
from typing import TYPE_CHECKING, NamedTuple, overload
+from sphinx.deprecation import _deprecation_warning
from sphinx.errors import ExtensionError, SphinxError
from sphinx.locale import __
from sphinx.util import logging
@@ -66,17 +67,25 @@ class EventManager:
"""Event manager for Sphinx."""
def __init__(self, app: Sphinx) -> None:
- self.app = app
+ self._app = app
self.events = core_events.copy()
self.listeners: dict[str, list[EventListener]] = defaultdict(list)
self.next_listener_id = 0
+ # pass through errors for debugging.
+ self._reraise_errors: bool = app.pdb
+
def add(self, name: str) -> None:
"""Register a custom Sphinx event."""
if name in self.events:
raise ExtensionError(__('Event %r already present') % name)
self.events[name] = ''
+ @property
+ def app(self) -> Sphinx:
+ _deprecation_warning(__name__, 'EventManager.app', remove=(10, 0))
+ return self._app
+
# ---- Core events -------------------------------------------------------
@overload
@@ -401,15 +410,14 @@ def emit(
listeners = sorted(self.listeners[name], key=attrgetter('priority'))
for listener in listeners:
try:
- results.append(listener.handler(self.app, *args))
+ results.append(listener.handler(self._app, *args))
except allowed_exceptions:
# pass through the errors specified as *allowed_exceptions*
raise
except SphinxError:
raise
except Exception as exc:
- if self.app.pdb:
- # Just pass through the error, so that it can be debugged.
+ if self._reraise_errors:
raise
modname = safe_getattr(listener.handler, '__module__', None)
raise ExtensionError(
diff --git a/sphinx/transforms/__init__.py b/sphinx/transforms/__init__.py
index c6620078e36..e76c9e98331 100644
--- a/sphinx/transforms/__init__.py
+++ b/sphinx/transforms/__init__.py
@@ -15,6 +15,7 @@
from docutils.utils.smartquotes import smartchars
from sphinx import addnodes
+from sphinx.deprecation import _deprecation_warning
from sphinx.locale import _, __
from sphinx.util import logging
from sphinx.util.docutils import new_document
@@ -62,6 +63,8 @@ class SphinxTransform(Transform):
@property
def app(self) -> Sphinx:
"""Reference to the :class:`.Sphinx` object."""
+ cls_name = self.__class__.__qualname__
+ _deprecation_warning(__name__, f'{cls_name}.app', remove=(10, 0))
return self.env.app
@property
diff --git a/sphinx/util/logging.py b/sphinx/util/logging.py
index fab8acc3b90..d5392936334 100644
--- a/sphinx/util/logging.py
+++ b/sphinx/util/logging.py
@@ -430,7 +430,7 @@ class WarningSuppressor(logging.Filter):
"""Filter logs by `suppress_warnings`."""
def __init__(self, app: Sphinx) -> None:
- self.app = app
+ self._app = app
super().__init__()
def filter(self, record: logging.LogRecord) -> bool:
@@ -438,7 +438,7 @@ def filter(self, record: logging.LogRecord) -> bool:
subtype = getattr(record, 'subtype', '')
try:
- suppress_warnings = self.app.config.suppress_warnings
+ suppress_warnings = self._app.config.suppress_warnings
except AttributeError:
# config is not initialized yet (ex. in conf.py)
suppress_warnings = ()
@@ -446,7 +446,7 @@ def filter(self, record: logging.LogRecord) -> bool:
if is_suppressed_warning(type, subtype, suppress_warnings):
return False
else:
- self.app._warncount += 1
+ self._app._warncount += 1
return True
@@ -496,7 +496,7 @@ class SphinxLogRecordTranslator(logging.Filter):
LogRecordClass: type[logging.LogRecord]
def __init__(self, app: Sphinx) -> None:
- self.app = app
+ self._app = app
super().__init__()
def filter(self, record: SphinxWarningLogRecord) -> bool: # type: ignore[override]
@@ -509,15 +509,15 @@ def filter(self, record: SphinxWarningLogRecord) -> bool: # type: ignore[overri
docname, lineno = location
if docname:
if lineno:
- record.location = f'{self.app.env.doc2path(docname)}:{lineno}'
+ record.location = f'{self._app.env.doc2path(docname)}:{lineno}'
else:
- record.location = f'{self.app.env.doc2path(docname)}'
+ record.location = f'{self._app.env.doc2path(docname)}'
else:
record.location = None
elif isinstance(location, nodes.Node):
record.location = get_node_location(location)
elif location and ':' not in location:
- record.location = f'{self.app.env.doc2path(location)}'
+ record.location = f'{self._app.env.doc2path(location)}'
return True
@@ -537,7 +537,7 @@ def filter(self, record: SphinxWarningLogRecord) -> bool: # type: ignore[overri
ret = super().filter(record)
try:
- show_warning_types = self.app.config.show_warning_types
+ show_warning_types = self._app.config.show_warning_types
except AttributeError:
# config is not initialized yet (ex. in conf.py)
show_warning_types = False
@@ -602,10 +602,10 @@ class LastMessagesWriter:
"""Stream writer storing last 10 messages in memory to save trackback"""
def __init__(self, app: Sphinx, stream: IO[str]) -> None:
- self.app = app
+ self._app = app
def write(self, data: str) -> None:
- self.app.messagelog.append(data)
+ self._app.messagelog.append(data)
def setup(app: Sphinx, status: IO[str], warning: IO[str]) -> None:
diff --git a/tests/test_events.py b/tests/test_events.py
index 412116c9f4b..50b7bb5fd76 100644
--- a/tests/test_events.py
+++ b/tests/test_events.py
@@ -18,7 +18,7 @@
def test_event_priority() -> None:
result = []
- app = object() # pass a dummy object as an app
+ app = SimpleNamespace(pdb=False) # pass a dummy object as an app
events = EventManager(app) # type: ignore[arg-type]
events.connect('builder-inited', lambda app: result.append(1), priority=500)
events.connect('builder-inited', lambda app: result.append(2), priority=500)
diff --git a/tests/test_versioning.py b/tests/test_versioning.py
index 58e3b224c58..7b27106b98e 100644
--- a/tests/test_versioning.py
+++ b/tests/test_versioning.py
@@ -20,7 +20,7 @@ def _setup_module(rootdir, sphinx_test_tempdir):
if not srcdir.exists():
shutil.copytree(rootdir / 'test-versioning', srcdir)
app = SphinxTestApp(srcdir=srcdir)
- app.builder.env.app = app
+ app.builder.env._app = app
app.connect('doctree-resolved', on_doctree_resolved)
app.build()
original = doctrees['original']
From 8f18b573d6cb6bbd1f39970a112b9d4c2ece292e Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Jean-Fran=C3=A7ois=20B=2E?=
<2589111+jfbu@users.noreply.github.com>
Date: Sat, 7 Jun 2025 16:29:41 +0200
Subject: [PATCH 110/466] Close #13597 (LaTeX table in merged cell of parent
table) (#13629)
---
CHANGES.rst | 3 +++
sphinx/writers/latex.py | 12 ++++++------
tests/roots/test-root/markup.txt | 6 ++++++
3 files changed, 15 insertions(+), 6 deletions(-)
diff --git a/CHANGES.rst b/CHANGES.rst
index 71ffa3c1b0d..9bd8abece0e 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -29,6 +29,9 @@ Features added
Patch by Jean-François B.
* #13535: html search: Update to the latest version of Snowball (v3.0.1).
Patch by Adam Turner.
+* #13597: LaTeX: table nested in a merged cell leads to invalid LaTeX mark-up
+ and PDF cannot be built.
+ Patch by Jean-François B.
* #13704: autodoc: Detect :py:func:`typing_extensions.overload `
and :py:func:`~typing.final` decorators.
Patch by Spencer Brown.
diff --git a/sphinx/writers/latex.py b/sphinx/writers/latex.py
index 823db1d875b..0aa550a3b7e 100644
--- a/sphinx/writers/latex.py
+++ b/sphinx/writers/latex.py
@@ -134,6 +134,7 @@ def __init__(self, node: Element) -> None:
self.has_problematic = False
self.has_oldproblematic = False
self.has_verbatim = False
+ self.entry_needs_linetrimming = 0
self.caption: list[str] = []
self.stubs: list[int] = []
@@ -327,7 +328,6 @@ def __init__(
self.in_footnote = 0
self.in_caption = 0
self.in_term = 0
- self.needs_linetrimming = 0
self.in_minipage = 0
# only used by figure inside an admonition
self.no_latex_floats = 0
@@ -1331,7 +1331,7 @@ def visit_entry(self, node: Element) -> None:
r'\par' + CR + r'\vskip-\baselineskip'
r'\vbox{\hbox{\strut}}\end{varwidth}%' + CR + context
)
- self.needs_linetrimming = 1
+ self.table.entry_needs_linetrimming = 1
if len(list(node.findall(nodes.paragraph))) >= 2:
self.table.has_oldproblematic = True
if (
@@ -1346,13 +1346,14 @@ def visit_entry(self, node: Element) -> None:
pass
else:
self.body.append(r'\sphinxstyletheadfamily ')
- if self.needs_linetrimming:
+ if self.table.entry_needs_linetrimming:
self.pushbody([])
self.context.append(context)
def depart_entry(self, node: Element) -> None:
- if self.needs_linetrimming:
- self.needs_linetrimming = 0
+ assert self.table is not None
+ if self.table.entry_needs_linetrimming:
+ self.table.entry_needs_linetrimming = 0
body = self.popbody()
# Remove empty lines from top of merged cell
@@ -1362,7 +1363,6 @@ def depart_entry(self, node: Element) -> None:
self.body.append(self.context.pop())
- assert self.table is not None
cell = self.table.cell()
assert cell is not None
self.table.col += cell.width
diff --git a/tests/roots/test-root/markup.txt b/tests/roots/test-root/markup.txt
index 2e45ba33680..a9d9132ed98 100644
--- a/tests/roots/test-root/markup.txt
+++ b/tests/roots/test-root/markup.txt
@@ -223,6 +223,12 @@ Tables with multirow and multicol:
| |
+----+
+ +---+---+
+ | +---+ |
+ | | h | |
+ | +---+ |
+ +---+---+
+
.. list-table::
:header-rows: 0
From 21c8513e49d4827aef5602964c3d892bbdc06d0d Mon Sep 17 00:00:00 2001
From: Adam Turner <9087854+AA-Turner@users.noreply.github.com>
Date: Sat, 7 Jun 2025 16:12:47 +0100
Subject: [PATCH 111/466] Avoid ``self.app`` in transforms (#13628)
---
sphinx/application.py | 2 +
sphinx/builders/linkcheck.py | 4 +-
sphinx/environment/__init__.py | 3 +
sphinx/ext/extlinks.py | 2 +-
sphinx/ext/viewcode.py | 12 ++--
sphinx/io.py | 35 +---------
sphinx/transforms/__init__.py | 7 +-
sphinx/transforms/i18n.py | 66 +++++++++++++++----
sphinx/transforms/post_transforms/__init__.py | 17 ++---
sphinx/transforms/post_transforms/images.py | 18 ++---
.../test_transforms_post_transforms_images.py | 2 +-
11 files changed, 91 insertions(+), 77 deletions(-)
diff --git a/sphinx/application.py b/sphinx/application.py
index fe0e8bdf195..d5192eef0b6 100644
--- a/sphinx/application.py
+++ b/sphinx/application.py
@@ -399,6 +399,8 @@ def _post_init_env(self) -> None:
if self._fresh_env_used:
self.env.find_files(self.config, self.builder)
+ self.env._builder_cls = self.builder.__class__
+
def preload_builder(self, name: str) -> None:
self.registry.preload_builder(self, name)
diff --git a/sphinx/builders/linkcheck.py b/sphinx/builders/linkcheck.py
index ff6878f2acb..de102873036 100644
--- a/sphinx/builders/linkcheck.py
+++ b/sphinx/builders/linkcheck.py
@@ -259,11 +259,11 @@ def _add_uri(self, uri: str, node: nodes.Element) -> None:
:param uri: URI to add
:param node: A node class where the URI was found
"""
- builder = cast('CheckExternalLinksBuilder', self.app.builder)
+ builder = cast('CheckExternalLinksBuilder', self.env._app.builder)
hyperlinks = builder.hyperlinks
docname = self.env.docname
- if newuri := self.app.events.emit_firstresult('linkcheck-process-uri', uri):
+ if newuri := self.env.events.emit_firstresult('linkcheck-process-uri', uri):
uri = newuri
try:
diff --git a/sphinx/environment/__init__.py b/sphinx/environment/__init__.py
index 7ad7298c22b..846a1bbde98 100644
--- a/sphinx/environment/__init__.py
+++ b/sphinx/environment/__init__.py
@@ -107,6 +107,9 @@ class BuildEnvironment:
srcdir = _StrPathProperty()
doctreedir = _StrPathProperty()
+ # builder is created after the environment.
+ _builder_cls: type[Builder]
+
def __init__(self, app: Sphinx) -> None:
self._app: Sphinx = app
self.doctreedir = app.doctreedir
diff --git a/sphinx/ext/extlinks.py b/sphinx/ext/extlinks.py
index a5e213ac9f9..82a323bd4b8 100644
--- a/sphinx/ext/extlinks.py
+++ b/sphinx/ext/extlinks.py
@@ -68,7 +68,7 @@ def check_uri(self, refnode: nodes.reference) -> None:
uri = refnode['refuri']
title = refnode.astext()
- for alias, (base_uri, _caption) in self.app.config.extlinks.items():
+ for alias, (base_uri, _caption) in self.config.extlinks.items():
uri_pattern = re.compile(re.escape(base_uri).replace('%s', '(?P.+)'))
match = uri_pattern.match(uri)
diff --git a/sphinx/ext/viewcode.py b/sphinx/ext/viewcode.py
index 39e4cf420b7..195ed95f961 100644
--- a/sphinx/ext/viewcode.py
+++ b/sphinx/ext/viewcode.py
@@ -103,11 +103,11 @@ def _get_full_modname(modname: str, attribute: str) -> str | None:
return None
-def is_supported_builder(builder: Builder) -> bool:
+def is_supported_builder(builder: type[Builder], viewcode_enable_epub: bool) -> bool:
return (
builder.format == 'html'
and builder.name != 'singlehtml'
- and (not builder.name.startswith('epub') or builder.config.viewcode_enable_epub)
+ and (not builder.name.startswith('epub') or viewcode_enable_epub)
)
@@ -220,7 +220,9 @@ class ViewcodeAnchorTransform(SphinxPostTransform):
default_priority = 100
def run(self, **kwargs: Any) -> None:
- if is_supported_builder(self.app.builder):
+ if is_supported_builder(
+ self.env._builder_cls, self.config.viewcode_enable_epub
+ ):
self.convert_viewcode_anchors()
else:
self.remove_viewcode_anchors()
@@ -229,7 +231,7 @@ def convert_viewcode_anchors(self) -> None:
for node in self.document.findall(viewcode_anchor):
anchor = nodes.inline('', _('[source]'), classes=['viewcode-link'])
refnode = make_refnode(
- self.app.builder,
+ self.env._app.builder,
node['refdoc'],
node['reftarget'],
node['refid'],
@@ -281,7 +283,7 @@ def collect_pages(app: Sphinx) -> Iterator[tuple[str, dict[str, Any], str]]:
env = app.env
if not hasattr(env, '_viewcode_modules'):
return
- if not is_supported_builder(app.builder):
+ if not is_supported_builder(env._builder_cls, env.config.viewcode_enable_epub):
return
highlighter = app.builder.highlighter # type: ignore[attr-defined]
urito = app.builder.get_relative_uri
diff --git a/sphinx/io.py b/sphinx/io.py
index 009cd38bf68..26c8b756fab 100644
--- a/sphinx/io.py
+++ b/sphinx/io.py
@@ -10,16 +10,9 @@
from docutils.transforms.references import DanglingReferences
from docutils.writers import UnfilteredWriter
-from sphinx.transforms import AutoIndexUpgrader, DoctreeReadEvent, SphinxTransformer
-from sphinx.transforms.i18n import (
- Locale,
- PreserveTranslatableMessages,
- RemoveTranslatableInline,
-)
-from sphinx.transforms.references import SphinxDomains
+from sphinx.transforms import SphinxTransformer
from sphinx.util import logging
from sphinx.util.docutils import LoggingReporter
-from sphinx.versioning import UIDTransform
if TYPE_CHECKING:
from typing import Any
@@ -113,32 +106,6 @@ def read_source(self, env: BuildEnvironment) -> str:
return arg[0]
-class SphinxI18nReader(SphinxBaseReader):
- """A document reader for i18n.
-
- This returns the source line number of original text as current source line number
- to let users know where the error happened.
- Because the translated texts are partial and they don't have correct line numbers.
- """
-
- def setup(self, app: Sphinx) -> None:
- super().setup(app)
-
- self.transforms = self.transforms + app.registry.get_transforms()
- unused = [
- PreserveTranslatableMessages,
- Locale,
- RemoveTranslatableInline,
- AutoIndexUpgrader,
- SphinxDomains,
- DoctreeReadEvent,
- UIDTransform,
- ]
- for transform in unused:
- if transform in self.transforms:
- self.transforms.remove(transform)
-
-
class SphinxDummyWriter(UnfilteredWriter): # type: ignore[type-arg]
"""Dummy writer module used for generating doctree."""
diff --git a/sphinx/transforms/__init__.py b/sphinx/transforms/__init__.py
index e76c9e98331..6857e05fe58 100644
--- a/sphinx/transforms/__init__.py
+++ b/sphinx/transforms/__init__.py
@@ -63,8 +63,9 @@ class SphinxTransform(Transform):
@property
def app(self) -> Sphinx:
"""Reference to the :class:`.Sphinx` object."""
+ cls_module = self.__class__.__module__
cls_name = self.__class__.__qualname__
- _deprecation_warning(__name__, f'{cls_name}.app', remove=(10, 0))
+ _deprecation_warning(cls_module, f'{cls_name}.app', remove=(10, 0))
return self.env.app
@property
@@ -382,7 +383,7 @@ def is_available(self) -> bool:
if self.config.smartquotes is False:
# disabled by confval smartquotes
return False
- if self.app.builder.name in builders:
+ if self.env._builder_cls.name in builders:
# disabled by confval smartquotes_excludes['builders']
return False
if self.config.language in languages:
@@ -412,7 +413,7 @@ class DoctreeReadEvent(SphinxTransform):
default_priority = 880
def apply(self, **kwargs: Any) -> None:
- self.app.events.emit('doctree-read', self.document)
+ self.env.events.emit('doctree-read', self.document)
class GlossarySorter(SphinxTransform):
diff --git a/sphinx/transforms/i18n.py b/sphinx/transforms/i18n.py
index 815ca606bce..bfacfcf1a96 100644
--- a/sphinx/transforms/i18n.py
+++ b/sphinx/transforms/i18n.py
@@ -5,6 +5,7 @@
import contextlib
from re import DOTALL, match
from textwrap import indent
+from types import SimpleNamespace
from typing import TYPE_CHECKING, Any, TypeVar
from docutils import nodes
@@ -13,9 +14,11 @@
from sphinx import addnodes
from sphinx.domains.std import make_glossary_term, split_term_classifiers
from sphinx.errors import ConfigError
+from sphinx.io import SphinxBaseReader
from sphinx.locale import __
from sphinx.locale import init as init_locale
-from sphinx.transforms import SphinxTransform
+from sphinx.transforms import AutoIndexUpgrader, DoctreeReadEvent, SphinxTransform
+from sphinx.transforms.references import SphinxDomains
from sphinx.util import get_filetype, logging
from sphinx.util.i18n import docname_to_domain
from sphinx.util.index_entries import split_index_msg
@@ -26,12 +29,15 @@
extract_messages,
traverse_translatable_index,
)
+from sphinx.versioning import UIDTransform
if TYPE_CHECKING:
from collections.abc import Sequence
from sphinx.application import Sphinx
from sphinx.config import Config
+ from sphinx.environment import BuildEnvironment
+ from sphinx.registry import SphinxComponentRegistry
from sphinx.util.typing import ExtensionMetadata
@@ -47,17 +53,46 @@
N = TypeVar('N', bound=nodes.Node)
+class _SphinxI18nReader(SphinxBaseReader):
+ """A document reader for internationalisation (i18n).
+
+ This returns the source line number of the original text
+ as the current source line number to let users know where
+ the error happened, because the translated texts are
+ partial and they don't have correct line numbers.
+ """
+
+ def __init__(
+ self, *args: Any, registry: SphinxComponentRegistry, **kwargs: Any
+ ) -> None:
+ super().__init__(*args, **kwargs)
+ unused = frozenset({
+ PreserveTranslatableMessages,
+ Locale,
+ RemoveTranslatableInline,
+ AutoIndexUpgrader,
+ SphinxDomains,
+ DoctreeReadEvent,
+ UIDTransform,
+ })
+ transforms = self.transforms + registry.get_transforms()
+ self.transforms = [
+ transform for transform in transforms if transform not in unused
+ ]
+
+
def publish_msgstr(
- app: Sphinx,
source: str,
source_path: str,
source_line: int,
config: Config,
settings: Any,
+ *,
+ env: BuildEnvironment,
+ registry: SphinxComponentRegistry,
) -> nodes.Element:
"""Publish msgstr (single line) into docutils document
- :param sphinx.application.Sphinx app: sphinx application
:param str source: source text
:param str source_path: source path for warning indication
:param source_line: source line for warning indication
@@ -65,18 +100,18 @@ def publish_msgstr(
:param docutils.frontend.Values settings: docutils settings
:return: document
:rtype: docutils.nodes.document
+ :param sphinx.environment.BuildEnvironment env: sphinx environment
+ :param sphinx.registry.SphinxComponentRegistry registry: sphinx registry
"""
try:
# clear rst_prolog temporarily
rst_prolog = config.rst_prolog
config.rst_prolog = None
- from sphinx.io import SphinxI18nReader
-
- reader = SphinxI18nReader()
- reader.setup(app)
+ reader = _SphinxI18nReader(registry=registry)
+ app = SimpleNamespace(config=config, env=env, registry=registry)
filetype = get_filetype(config.source_suffix, source_path)
- parser = app.registry.create_source_parser(app, filetype)
+ parser = registry.create_source_parser(app, filetype) # type: ignore[arg-type]
doc = reader.read(
source=StringInput(
source=source, source_path=f'{source_path}:{source_line}:'
@@ -436,12 +471,13 @@ def apply(self, **kwargs: Any) -> None:
msgstr = '::\n\n' + indent(msgstr, ' ' * 3)
patch = publish_msgstr(
- self.app,
msgstr,
source,
node.line, # type: ignore[arg-type]
self.config,
settings,
+ env=self.env,
+ registry=self.env._registry,
)
# FIXME: no warnings about inconsistent references in this part
# XXX doctest and other block markup
@@ -456,12 +492,13 @@ def apply(self, **kwargs: Any) -> None:
for _id in node['ids']:
term, first_classifier = split_term_classifiers(msgstr)
patch = publish_msgstr(
- self.app,
term or '',
source,
node.line, # type: ignore[arg-type]
self.config,
settings,
+ env=self.env,
+ registry=self.env._registry,
)
updater.patch = make_glossary_term(
self.env,
@@ -533,12 +570,13 @@ def apply(self, **kwargs: Any) -> None:
msgstr = msgstr + '\n' + '=' * len(msgstr) * 2
patch = publish_msgstr(
- self.app,
msgstr,
source,
node.line, # type: ignore[arg-type]
self.config,
settings,
+ env=self.env,
+ registry=self.env._registry,
)
# Structural Subelements phase2
if isinstance(node, nodes.title):
@@ -612,7 +650,7 @@ class TranslationProgressTotaliser(SphinxTransform):
def apply(self, **kwargs: Any) -> None:
from sphinx.builders.gettext import MessageCatalogBuilder
- if isinstance(self.app.builder, MessageCatalogBuilder):
+ if issubclass(self.env._builder_cls, MessageCatalogBuilder):
return
total = translated = 0
@@ -635,7 +673,7 @@ class AddTranslationClasses(SphinxTransform):
def apply(self, **kwargs: Any) -> None:
from sphinx.builders.gettext import MessageCatalogBuilder
- if isinstance(self.app.builder, MessageCatalogBuilder):
+ if issubclass(self.env._builder_cls, MessageCatalogBuilder):
return
if not self.config.translation_progress_classes:
@@ -673,7 +711,7 @@ class RemoveTranslatableInline(SphinxTransform):
def apply(self, **kwargs: Any) -> None:
from sphinx.builders.gettext import MessageCatalogBuilder
- if isinstance(self.app.builder, MessageCatalogBuilder):
+ if issubclass(self.env._builder_cls, MessageCatalogBuilder):
return
matcher = NodeMatcher(nodes.inline, translatable=Any)
diff --git a/sphinx/transforms/post_transforms/__init__.py b/sphinx/transforms/post_transforms/__init__.py
index 1a40c3d791a..ae70ce195d9 100644
--- a/sphinx/transforms/post_transforms/__init__.py
+++ b/sphinx/transforms/post_transforms/__init__.py
@@ -47,9 +47,9 @@ def apply(self, **kwargs: Any) -> None:
def is_supported(self) -> bool:
"""Check this transform working for current builder."""
- if self.builders and self.app.builder.name not in self.builders:
+ if self.builders and self.env._builder_cls.name not in self.builders:
return False
- return not self.formats or self.app.builder.format in self.formats
+ return not self.formats or self.env._builder_cls.format in self.formats
def run(self, **kwargs: Any) -> None:
"""Main method of post transforms.
@@ -125,7 +125,7 @@ def _resolve_pending_xref(
try:
# no new node found? try the missing-reference event
- new_node = self.app.events.emit_firstresult(
+ new_node = self.env.events.emit_firstresult(
'missing-reference',
self.env,
node,
@@ -169,10 +169,11 @@ def _resolve_pending_xref_in_domain(
typ: str,
target: str,
) -> nodes.reference | None:
+ builder = self.env._app.builder
# let the domain try to resolve the reference
if domain is not None:
return domain.resolve_xref(
- self.env, ref_doc, self.app.builder, typ, target, node, contnode
+ self.env, ref_doc, builder, typ, target, node, contnode
)
# really hardwired reference types
@@ -193,7 +194,7 @@ def _resolve_pending_any_xref(
) -> nodes.reference | None:
"""Resolve reference generated by the "any" role."""
env = self.env
- builder = self.app.builder
+ builder = self.env._app.builder
domains = env.domains
results: list[tuple[str, nodes.reference]] = []
@@ -282,7 +283,7 @@ def warn_missing_reference(
if not warn:
return
- if self.app.events.emit_firstresult('warn-missing-reference', domain, node):
+ if self.env.events.emit_firstresult('warn-missing-reference', domain, node):
return
elif domain and typ in domain.dangling_warnings:
msg = domain.dangling_warnings[typ] % {'target': target}
@@ -328,7 +329,7 @@ def run(self, **kwargs: Any) -> None:
# result in a "Losing ids" exception if there is a target node before
# the only node, so we make sure docutils can transfer the id to
# something, even if it's just a comment and will lose the id anyway...
- process_only_nodes(self.document, self.app.tags)
+ process_only_nodes(self.document, self.env._tags)
class SigElementFallbackTransform(SphinxPostTransform):
@@ -343,7 +344,7 @@ def has_visitor(
return hasattr(translator, 'visit_%s' % node.__name__)
try:
- translator = self.app.builder.get_translator_class()
+ translator = self.env._registry.get_translator_class(self.env._builder_cls)
except AttributeError:
# do nothing if no translator class is specified (e.g., on a dummy builder)
return
diff --git a/sphinx/transforms/post_transforms/images.py b/sphinx/transforms/post_transforms/images.py
index d4c6262e529..97b585d9cf6 100644
--- a/sphinx/transforms/post_transforms/images.py
+++ b/sphinx/transforms/post_transforms/images.py
@@ -45,16 +45,16 @@ def handle(self, node: nodes.image) -> None:
@property
def imagedir(self) -> _StrPath:
- return self.app.doctreedir / 'images'
+ return self.env.doctreedir / 'images'
class ImageDownloader(BaseImageConverter):
default_priority = 100
def match(self, node: nodes.image) -> bool:
- if not self.app.builder.supported_image_types:
+ if not self.env._builder_cls.supported_image_types:
return False
- if self.app.builder.supported_remote_images:
+ if self.env._builder_cls.supported_remote_images:
return False
return '://' in node['uri']
@@ -130,7 +130,7 @@ class DataURIExtractor(BaseImageConverter):
default_priority = 150
def match(self, node: nodes.image) -> bool:
- if self.app.builder.supported_data_uri_images is True:
+ if self.env._builder_cls.supported_data_uri_images is True:
return False # do not transform the image; data URIs are valid in the build output
return node['uri'].startswith('data:')
@@ -208,12 +208,12 @@ class ImageConverter(BaseImageConverter):
conversion_rules: list[tuple[str, str]] = []
def match(self, node: nodes.image) -> bool:
- if not self.app.builder.supported_image_types:
+ if not self.env._builder_cls.supported_image_types:
return False
if '?' in node['candidates']:
return False
node_mime_types = set(self.guess_mimetypes(node))
- supported_image_types = set(self.app.builder.supported_image_types)
+ supported_image_types = set(self.env._builder_cls.supported_image_types)
if node_mime_types & supported_image_types:
# builder supports the image; no need to convert
return False
@@ -233,7 +233,7 @@ def match(self, node: nodes.image) -> bool:
def get_conversion_rule(self, node: nodes.image) -> tuple[str, str]:
for candidate in self.guess_mimetypes(node):
- for supported in self.app.builder.supported_image_types:
+ for supported in self.env._builder_cls.supported_image_types:
rule = (candidate, supported)
if rule in self.conversion_rules:
return rule
@@ -250,7 +250,7 @@ def guess_mimetypes(self, node: nodes.image) -> list[str]:
if '?' in node['candidates']:
return []
elif '*' in node['candidates']:
- path = self.app.srcdir / node['uri']
+ path = self.env.srcdir / node['uri']
guessed = guess_mimetype(path)
return [guessed] if guessed is not None else []
else:
@@ -269,7 +269,7 @@ def handle(self, node: nodes.image) -> None:
ensuredir(self.imagedir)
destpath = self.imagedir / filename
- abs_srcpath = self.app.srcdir / srcpath
+ abs_srcpath = self.env.srcdir / srcpath
if self.convert(abs_srcpath, destpath):
if '*' in node['candidates']:
node['candidates']['*'] = str(destpath)
diff --git a/tests/test_transforms/test_transforms_post_transforms_images.py b/tests/test_transforms/test_transforms_post_transforms_images.py
index 800fb3b986b..c6c80f59c62 100644
--- a/tests/test_transforms/test_transforms_post_transforms_images.py
+++ b/tests/test_transforms/test_transforms_post_transforms_images.py
@@ -35,7 +35,7 @@
def test_guess_mimetype_webp(tmp_path):
document = new_document('')
- document.settings.env = SimpleNamespace(app=SimpleNamespace(srcdir=tmp_path))
+ document.settings.env = SimpleNamespace(srcdir=tmp_path)
converter = ImageConverter(document)
file_webp = 'webp-image.webp'
From 0521d835149063085aa5579eff746ffac2ae1766 Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Sat, 7 Jun 2025 16:16:14 +0100
Subject: [PATCH 112/466] Bump Ruff to 0.11.13 (#13622)
---
pyproject.toml | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/pyproject.toml b/pyproject.toml
index 17cb0463ae5..17ad29b7626 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -92,7 +92,7 @@ docs = [
"sphinxcontrib-websupport",
]
lint = [
- "ruff==0.11.12",
+ "ruff==0.11.13",
"mypy==1.15.0",
"sphinx-lint>=0.9",
"types-colorama==0.4.15.20240311",
@@ -135,7 +135,7 @@ docs = [
"sphinxcontrib-websupport",
]
lint = [
- "ruff==0.11.12",
+ "ruff==0.11.13",
"sphinx-lint>=0.9",
]
package = [
From 3601161f0e95f0bb7e22682cdd5fe93f4ed11ed8 Mon Sep 17 00:00:00 2001
From: Adam Turner <9087854+AA-Turner@users.noreply.github.com>
Date: Sat, 7 Jun 2025 16:36:42 +0100
Subject: [PATCH 113/466] Bump types-docutils to 0.21.0.20250525 (#13630)
Co-authored-by: Adam Dangoor
---
pyproject.toml | 4 +-
sphinx/builders/_epub_base.py | 4 +-
sphinx/util/rst.py | 4 +-
sphinx/writers/html5.py | 42 +++++++-------
sphinx/writers/manpage.py | 58 +++++++++----------
.../test_util_docutils_sphinx_directive.py | 2 +-
6 files changed, 57 insertions(+), 57 deletions(-)
diff --git a/pyproject.toml b/pyproject.toml
index 17ad29b7626..fd0cdce21bc 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -97,7 +97,7 @@ lint = [
"sphinx-lint>=0.9",
"types-colorama==0.4.15.20240311",
"types-defusedxml==0.7.0.20250516",
- "types-docutils==0.21.0.20250514",
+ "types-docutils==0.21.0.20250525",
"types-Pillow==10.2.0.20240822",
"types-Pygments==2.19.0.20250516",
"types-requests==2.32.0.20250602", # align with requests
@@ -166,7 +166,7 @@ type-stubs = [
# align with versions used elsewhere
"types-colorama==0.4.15.20240311",
"types-defusedxml==0.7.0.20250516",
- "types-docutils==0.21.0.20250514",
+ "types-docutils==0.21.0.20250525",
"types-Pillow==10.2.0.20240822",
"types-Pygments==2.19.0.20250516",
"types-requests==2.32.0.20250602",
diff --git a/sphinx/builders/_epub_base.py b/sphinx/builders/_epub_base.py
index 10ae0820c5b..28a7553da51 100644
--- a/sphinx/builders/_epub_base.py
+++ b/sphinx/builders/_epub_base.py
@@ -114,8 +114,8 @@ class NavPoint(NamedTuple):
def sphinx_smarty_pants(t: str, language: str = 'en') -> str:
t = t.replace('"', '"')
- t = smartquotes.educateDashesOldSchool(t) # type: ignore[no-untyped-call]
- t = smartquotes.educateQuotes(t, language) # type: ignore[no-untyped-call]
+ t = smartquotes.educateDashesOldSchool(t)
+ t = smartquotes.educateQuotes(t, language)
t = t.replace('"', '"')
return t
diff --git a/sphinx/util/rst.py b/sphinx/util/rst.py
index c848a9b3657..7e6853a81ef 100644
--- a/sphinx/util/rst.py
+++ b/sphinx/util/rst.py
@@ -9,7 +9,7 @@
from unicodedata import east_asian_width
from docutils.parsers.rst import roles
-from docutils.parsers.rst.languages import en as english # type: ignore[attr-defined]
+from docutils.parsers.rst.languages import en as english
from docutils.parsers.rst.states import Body
from docutils.utils import Reporter
from jinja2 import pass_environment
@@ -66,7 +66,7 @@ def heading(env: Environment, text: str, level: int = 1) -> str:
def default_role(docname: str, name: str) -> Iterator[None]:
if name:
dummy_reporter = Reporter('', 4, 4)
- role_fn, _ = roles.role(name, english, 0, dummy_reporter)
+ role_fn, _ = roles.role(name, english, 0, dummy_reporter) # type: ignore[arg-type]
if role_fn:
docutils.register_role('', role_fn) # type: ignore[arg-type]
else:
diff --git a/sphinx/writers/html5.py b/sphinx/writers/html5.py
index b39b463d6db..39d7ecea680 100644
--- a/sphinx/writers/html5.py
+++ b/sphinx/writers/html5.py
@@ -357,7 +357,7 @@ def visit_reference(self, node: Element) -> None:
def visit_number_reference(self, node: Element) -> None:
self.visit_reference(node)
- def depart_number_reference(self, node: Element) -> None:
+ def depart_number_reference(self, node: nodes.reference) -> None:
self.depart_reference(node)
# overwritten -- we don't want source comments to show up in the HTML
@@ -451,7 +451,7 @@ def add_permalink_ref(self, node: Element, title: str) -> None:
)
# overwritten
- def visit_bullet_list(self, node: Element) -> None:
+ def visit_bullet_list(self, node: nodes.bullet_list) -> None:
if len(node) == 1 and isinstance(node[0], addnodes.toctree):
# avoid emitting empty
raise nodes.SkipNode
@@ -498,7 +498,7 @@ def depart_term(self, node: Element) -> None:
self.body.append('')
# overwritten
- def visit_title(self, node: Element) -> None:
+ def visit_title(self, node: nodes.title) -> None:
if (
isinstance(node.parent, addnodes.compact_paragraph)
and node.parent.get('toctree')
@@ -535,7 +535,7 @@ def visit_title(self, node: Element) -> None:
self.body.pop()
self.context[-1] = '\n'
- def depart_title(self, node: Element) -> None:
+ def depart_title(self, node: nodes.title) -> None:
close_tag = self.context[-1]
if (
self.config.html_permalinks
@@ -586,7 +586,7 @@ def depart_rubric(self, node: nodes.rubric) -> None:
super().depart_rubric(node)
# overwritten
- def visit_literal_block(self, node: Element) -> None:
+ def visit_literal_block(self, node: nodes.literal_block) -> None:
if node.rawsource != node.astext():
# most probably a parsed-literal block -- don't highlight
return super().visit_literal_block(node)
@@ -614,7 +614,7 @@ def visit_literal_block(self, node: Element) -> None:
self.body.append(starttag + highlighted + '\n')
raise nodes.SkipNode
- def visit_caption(self, node: Element) -> None:
+ def visit_caption(self, node: nodes.caption) -> None:
if (
isinstance(node.parent, nodes.container)
and node.parent.get('literal_block')
@@ -625,7 +625,7 @@ def visit_caption(self, node: Element) -> None:
self.add_fignumber(node.parent)
self.body.append(self.starttag(node, 'span', '', CLASS='caption-text'))
- def depart_caption(self, node: Element) -> None:
+ def depart_caption(self, node: nodes.caption) -> None:
self.body.append('')
# append permalink if available
@@ -648,7 +648,7 @@ def depart_caption(self, node: Element) -> None:
super().depart_caption(node)
def visit_doctest_block(self, node: Element) -> None:
- self.visit_literal_block(node)
+ self.visit_literal_block(node) # type: ignore[arg-type]
# overwritten to add the (for XHTML compliance)
def visit_block_quote(self, node: Element) -> None:
@@ -740,14 +740,14 @@ def depart_download_reference(self, node: Element) -> None:
self.body.append(self.context.pop())
# overwritten
- def visit_figure(self, node: Element) -> None:
+ def visit_figure(self, node: nodes.figure) -> None:
# set align=default if align not specified to give a default style
node.setdefault('align', 'default')
return super().visit_figure(node)
# overwritten
- def visit_image(self, node: Element) -> None:
+ def visit_image(self, node: nodes.image) -> None:
olduri = node['uri']
# rewrite the URI if the environment knows about it
if olduri in self.builder.images:
@@ -775,7 +775,7 @@ def visit_image(self, node: Element) -> None:
super().visit_image(node)
# overwritten
- def depart_image(self, node: Element) -> None:
+ def depart_image(self, node: nodes.image) -> None:
if node['uri'].lower().endswith(('svg', 'svgz')):
pass
else:
@@ -892,16 +892,16 @@ def visit_tip(self, node: Element) -> None:
def depart_tip(self, node: Element) -> None:
self.depart_admonition(node)
- def visit_literal_emphasis(self, node: Element) -> None:
+ def visit_literal_emphasis(self, node: nodes.emphasis) -> None:
return self.visit_emphasis(node)
- def depart_literal_emphasis(self, node: Element) -> None:
+ def depart_literal_emphasis(self, node: nodes.emphasis) -> None:
return self.depart_emphasis(node)
- def visit_literal_strong(self, node: Element) -> None:
+ def visit_literal_strong(self, node: nodes.strong) -> None:
return self.visit_strong(node)
- def depart_literal_strong(self, node: Element) -> None:
+ def depart_literal_strong(self, node: nodes.strong) -> None:
return self.depart_strong(node)
def visit_abbreviation(self, node: Element) -> None:
@@ -913,15 +913,15 @@ def visit_abbreviation(self, node: Element) -> None:
def depart_abbreviation(self, node: Element) -> None:
self.body.append('')
- def visit_manpage(self, node: Element) -> None:
+ def visit_manpage(self, node: nodes.emphasis) -> None:
self.visit_literal_emphasis(node)
- def depart_manpage(self, node: Element) -> None:
+ def depart_manpage(self, node: nodes.emphasis) -> None:
self.depart_literal_emphasis(node)
# overwritten to add even/odd classes
- def visit_table(self, node: Element) -> None:
+ def visit_table(self, node: nodes.table) -> None:
self._table_row_indices.append(0)
atts = {}
@@ -936,7 +936,7 @@ def visit_table(self, node: Element) -> None:
tag = self.starttag(node, 'table', CLASS=' '.join(classes), **atts)
self.body.append(tag)
- def depart_table(self, node: Element) -> None:
+ def depart_table(self, node: nodes.table) -> None:
self._table_row_indices.pop()
super().depart_table(node)
@@ -949,11 +949,11 @@ def visit_row(self, node: Element) -> None:
self.body.append(self.starttag(node, 'tr', ''))
node.column = 0 # type: ignore[attr-defined]
- def visit_field_list(self, node: Element) -> None:
+ def visit_field_list(self, node: nodes.field_list) -> None:
self._fieldlist_row_indices.append(0)
return super().visit_field_list(node)
- def depart_field_list(self, node: Element) -> None:
+ def depart_field_list(self, node: nodes.field_list) -> None:
self._fieldlist_row_indices.pop()
return super().depart_field_list(node)
diff --git a/sphinx/writers/manpage.py b/sphinx/writers/manpage.py
index 171761fa2b0..45ab340c4e3 100644
--- a/sphinx/writers/manpage.py
+++ b/sphinx/writers/manpage.py
@@ -71,7 +71,7 @@ def apply(self, **kwargs: Any) -> None:
node.parent.remove(node)
-class ManualPageTranslator(SphinxTranslator, BaseTranslator): # type: ignore[misc]
+class ManualPageTranslator(SphinxTranslator, BaseTranslator):
"""Custom man page translator."""
_docinfo: dict[str, Any] = {}
@@ -130,17 +130,17 @@ def depart_start_of_file(self, node: Element) -> None:
# Top-level nodes for descriptions
##################################
- def visit_desc(self, node: Element) -> None:
+ def visit_desc(self, node: nodes.definition_list) -> None:
self.visit_definition_list(node)
- def depart_desc(self, node: Element) -> None:
+ def depart_desc(self, node: nodes.definition_list) -> None:
self.depart_definition_list(node)
- def visit_desc_signature(self, node: Element) -> None:
- self.visit_definition_list_item(node)
+ def visit_desc_signature(self, node: nodes.term) -> None:
+ self.visit_definition_list_item(node) # type: ignore[arg-type]
self.visit_term(node)
- def depart_desc_signature(self, node: Element) -> None:
+ def depart_desc_signature(self, node: nodes.term) -> None:
self.depart_term(node)
def visit_desc_signature_line(self, node: Element) -> None:
@@ -149,10 +149,10 @@ def visit_desc_signature_line(self, node: Element) -> None:
def depart_desc_signature_line(self, node: Element) -> None:
self.body.append(' ')
- def visit_desc_content(self, node: Element) -> None:
+ def visit_desc_content(self, node: nodes.definition) -> None:
self.visit_definition(node)
- def depart_desc_content(self, node: Element) -> None:
+ def depart_desc_content(self, node: nodes.definition) -> None:
self.depart_definition(node)
def visit_desc_inline(self, node: Element) -> None:
@@ -231,25 +231,25 @@ def depart_desc_annotation(self, node: Element) -> None:
##############################################
- def visit_versionmodified(self, node: Element) -> None:
+ def visit_versionmodified(self, node: nodes.paragraph) -> None:
self.visit_paragraph(node)
- def depart_versionmodified(self, node: Element) -> None:
+ def depart_versionmodified(self, node: nodes.paragraph) -> None:
self.depart_paragraph(node)
# overwritten -- don't make whole of term bold if it includes strong node
- def visit_term(self, node: Element) -> None:
+ def visit_term(self, node: nodes.term) -> None:
if any(node.findall(nodes.strong)):
self.body.append('\n')
else:
super().visit_term(node)
# overwritten -- we don't want source comments to show up
- def visit_comment(self, node: Element) -> None:
+ def visit_comment(self, node: Element) -> None: # type: ignore[override]
raise nodes.SkipNode
# overwritten -- added ensure_eol()
- def visit_footnote(self, node: Element) -> None:
+ def visit_footnote(self, node: nodes.footnote) -> None:
self.ensure_eol()
super().visit_footnote(node)
@@ -264,10 +264,10 @@ def visit_rubric(self, node: Element) -> None:
def depart_rubric(self, node: Element) -> None:
self.body.append('\n')
- def visit_seealso(self, node: Element) -> None:
+ def visit_seealso(self, node: nodes.admonition) -> None:
self.visit_admonition(node, 'seealso')
- def depart_seealso(self, node: Element) -> None:
+ def depart_seealso(self, node: nodes.admonition) -> None:
self.depart_admonition(node)
def visit_productionlist(self, node: Element) -> None:
@@ -291,7 +291,7 @@ def visit_image(self, node: Element) -> None:
raise nodes.SkipNode
# overwritten -- don't visit inner marked up nodes
- def visit_reference(self, node: Element) -> None:
+ def visit_reference(self, node: nodes.reference) -> None:
uri = node.get('refuri', '')
is_safe_to_click = uri.startswith(('mailto:', 'http:', 'https:', 'ftp:'))
if is_safe_to_click:
@@ -301,7 +301,7 @@ def visit_reference(self, node: Element) -> None:
self.body.append(self.defs['reference'][0])
# avoid repeating escaping code... fine since
# visit_Text calls astext() and only works on that afterwards
- self.visit_Text(node)
+ self.visit_Text(node) # type: ignore[arg-type]
self.body.append(self.defs['reference'][1])
if uri and not uri.startswith('#'):
@@ -369,10 +369,10 @@ def visit_acks(self, node: Element) -> None:
self.body.append('\n')
raise nodes.SkipNode
- def visit_hlist(self, node: Element) -> None:
+ def visit_hlist(self, node: nodes.bullet_list) -> None:
self.visit_bullet_list(node)
- def depart_hlist(self, node: Element) -> None:
+ def depart_hlist(self, node: nodes.bullet_list) -> None:
self.depart_bullet_list(node)
def visit_hlistcol(self, node: Element) -> None:
@@ -381,16 +381,16 @@ def visit_hlistcol(self, node: Element) -> None:
def depart_hlistcol(self, node: Element) -> None:
pass
- def visit_literal_emphasis(self, node: Element) -> None:
+ def visit_literal_emphasis(self, node: nodes.emphasis) -> None:
return self.visit_emphasis(node)
- def depart_literal_emphasis(self, node: Element) -> None:
+ def depart_literal_emphasis(self, node: nodes.emphasis) -> None:
return self.depart_emphasis(node)
- def visit_literal_strong(self, node: Element) -> None:
+ def visit_literal_strong(self, node: nodes.strong) -> None:
return self.visit_strong(node)
- def depart_literal_strong(self, node: Element) -> None:
+ def depart_literal_strong(self, node: nodes.strong) -> None:
return self.depart_strong(node)
def visit_abbreviation(self, node: Element) -> None:
@@ -399,14 +399,14 @@ def visit_abbreviation(self, node: Element) -> None:
def depart_abbreviation(self, node: Element) -> None:
pass
- def visit_manpage(self, node: Element) -> None:
+ def visit_manpage(self, node: nodes.strong) -> None:
return self.visit_strong(node)
- def depart_manpage(self, node: Element) -> None:
+ def depart_manpage(self, node: nodes.strong) -> None:
return self.depart_strong(node)
# overwritten: handle section titles better than in 0.6 release
- def visit_caption(self, node: Element) -> None:
+ def visit_caption(self, node: nodes.caption) -> None:
if (
isinstance(node.parent, nodes.container)
and node.parent.get('literal_block')
@@ -415,7 +415,7 @@ def visit_caption(self, node: Element) -> None:
else:
super().visit_caption(node)
- def depart_caption(self, node: Element) -> None:
+ def depart_caption(self, node: nodes.caption) -> None:
if (
isinstance(node.parent, nodes.container)
and node.parent.get('literal_block')
@@ -425,7 +425,7 @@ def depart_caption(self, node: Element) -> None:
super().depart_caption(node)
# overwritten: handle section titles better than in 0.6 release
- def visit_title(self, node: Element) -> None:
+ def visit_title(self, node: nodes.title) -> None:
if isinstance(node.parent, addnodes.seealso):
self.body.append('.IP "')
return None
@@ -438,7 +438,7 @@ def visit_title(self, node: Element) -> None:
raise nodes.SkipNode
return super().visit_title(node)
- def depart_title(self, node: Element) -> None:
+ def depart_title(self, node: nodes.title) -> None:
if isinstance(node.parent, addnodes.seealso):
self.body.append('"\n')
return None
diff --git a/tests/test_util/test_util_docutils_sphinx_directive.py b/tests/test_util/test_util_docutils_sphinx_directive.py
index 8c24a3c4a83..eb1e4aea16a 100644
--- a/tests/test_util/test_util_docutils_sphinx_directive.py
+++ b/tests/test_util/test_util_docutils_sphinx_directive.py
@@ -3,7 +3,7 @@
from types import SimpleNamespace
from docutils import nodes
-from docutils.parsers.rst.languages import en as english # type: ignore[attr-defined]
+from docutils.parsers.rst.languages import en as english
from docutils.parsers.rst.states import (
Inliner,
RSTState,
From dc33f988f39e870d7195ab4714b5b55fc9ec2044 Mon Sep 17 00:00:00 2001
From: Marie Sacksick <79304610+MarieSacksick@users.noreply.github.com>
Date: Sat, 7 Jun 2025 17:38:30 +0200
Subject: [PATCH 114/466] autosummary: add 'vs.' to the well-known
abbreviations (#13591)
---
sphinx/ext/autosummary/__init__.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/sphinx/ext/autosummary/__init__.py b/sphinx/ext/autosummary/__init__.py
index fe7092c0a74..62c9427ecdb 100644
--- a/sphinx/ext/autosummary/__init__.py
+++ b/sphinx/ext/autosummary/__init__.py
@@ -107,7 +107,7 @@
periods_re = re.compile(r'\.(?:\s+)')
literal_re = re.compile(r'::\s*$')
-WELL_KNOWN_ABBREVIATIONS = ('et al.', 'e.g.', 'i.e.')
+WELL_KNOWN_ABBREVIATIONS = ('et al.', 'e.g.', 'i.e.', 'vs.')
# -- autosummary_toc node ------------------------------------------------------
From 74627b25b01d7adc5e90b3838e637336f31586a3 Mon Sep 17 00:00:00 2001
From: Tim Hoffmann <2836374+timhoffm@users.noreply.github.com>
Date: Sat, 7 Jun 2025 18:07:52 +0200
Subject: [PATCH 115/466] Use anonymous references for links with embedded URLs
(#13615)
Co-authored-by: Adam Turner <9087854+aa-turner@users.noreply.github.com>
---
doc/usage/restructuredtext/basics.rst | 36 ++++++++++++++++++++++++---
1 file changed, 32 insertions(+), 4 deletions(-)
diff --git a/doc/usage/restructuredtext/basics.rst b/doc/usage/restructuredtext/basics.rst
index 5d60ea81de4..ea61b80fc85 100644
--- a/doc/usage/restructuredtext/basics.rst
+++ b/doc/usage/restructuredtext/basics.rst
@@ -208,11 +208,39 @@ Hyperlinks
External links
~~~~~~~~~~~~~~
-Use ```Link text
`_`` for inline web links. If the
-link text should be the web address, you don't need special markup at all, the
-parser finds links and mail addresses in ordinary text.
+URLs and email addresses in text are automatically linked and do not need
+explicit markup at all.
+For example, https://domain.invalid/ is written with no special markup
+in the source of this document, and is recognised as an external hyperlink.
-.. important:: There must be a space between the link text and the opening \< for the URL.
+To create text with a link, the best approach is generally to put the URL
+below the paragraph as follows (:duref:`ref
`)::
+
+ This is a paragraph that contains `a link`_.
+
+ .. _a link: https://domain.invalid/
+
+This keeps the paragraph more readable in source code.
+
+Alternatively, you can embed the URL within the prose for an 'inline link'.
+This can lead to longer lines, but has the benefit of keeping the link text
+and the URL pointed to in the same place.
+This uses the following syntax: ```Link text `__``
+(:duref:`ref `).
+
+.. important::
+
+ There must be a space between the link text
+ and the opening angle bracket ('``<``') for the URL.
+
+.. tip::
+
+ Use two trailing underscores when embedding the URL.
+ Technically, a single underscore works as well,
+ but that would create a named reference instead of an anonymous one.
+ Named references typically do not have a benefit when the URL is embedded.
+ Moreover, they have the disadvantage that you must make sure that you
+ do not use the same "Link text" for another link in your document.
You can also separate the link and the target definition (:duref:`ref
`), like this::
From 25ab3d73901c75231880733ee2a3acf966ddcc11 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Melissa=20Weber=20Mendon=C3=A7a?=
Date: Sat, 7 Jun 2025 13:24:56 -0300
Subject: [PATCH 116/466] Document the ``autolink`` role for autosummary
(#13596)
Co-authored-by: Adam Turner <9087854+AA-Turner@users.noreply.github.com>
---
doc/usage/extensions/autosummary.rst | 24 ++++++++++++++++++++++++
1 file changed, 24 insertions(+)
diff --git a/doc/usage/extensions/autosummary.rst b/doc/usage/extensions/autosummary.rst
index 456faee1830..c84dcb60eff 100644
--- a/doc/usage/extensions/autosummary.rst
+++ b/doc/usage/extensions/autosummary.rst
@@ -412,3 +412,27 @@ the title of a page.
Stub pages are generated also based on these directives.
.. _`escape filter`: https://jinja.palletsprojects.com/en/3.0.x/templates/#jinja-filters.escape
+
+Autolink role
+-------------
+
+.. rst:role:: autolink
+
+ The ``:autolink:`` role functions as ``:py:obj:`` when the referenced *name*
+ can be resolved to a Python object, and otherwise it becomes simple emphasis.
+
+ There are some known design flaws.
+ For example, in the case of multiple objects having the same name,
+ :rst:role:`!autolink` could resolve to the wrong object.
+ It will fail silently if the referenced object is not found,
+ for example due to a spelling mistake or renaming.
+ This is sometimes unwanted behaviour.
+
+ Some users choose to configure their :confval:`default_role` to ``autolink``
+ for 'smart' referencing using the default interpreted text role (```content```).
+
+ .. seealso::
+
+ :rst:role:`any`
+
+ :rst:role:`py:obj`
From acdf4a86980efdb37146169fc09d435e54a4a47d Mon Sep 17 00:00:00 2001
From: Adam Turner <9087854+AA-Turner@users.noreply.github.com>
Date: Sat, 7 Jun 2025 17:25:11 +0100
Subject: [PATCH 117/466] Use an explicit tags parameter for
``global_toctree_for_doc()`` (#13631)
---
sphinx/builders/__init__.py | 22 ++++++++++++++-----
sphinx/builders/_epub_base.py | 12 ++++++++--
sphinx/builders/epub3.py | 6 ++++-
sphinx/builders/html/__init__.py | 2 +-
sphinx/builders/singlehtml.py | 4 ++--
sphinx/environment/__init__.py | 4 +++-
sphinx/environment/adapters/toctree.py | 17 ++++++++++++--
.../test_environment_toctree.py | 17 ++++++++++----
.../test_ext_inheritance_diagram.py | 2 +-
tests/test_extensions/test_ext_math.py | 2 +-
10 files changed, 68 insertions(+), 20 deletions(-)
diff --git a/sphinx/builders/__init__.py b/sphinx/builders/__init__.py
index fb8bd757864..70602273747 100644
--- a/sphinx/builders/__init__.py
+++ b/sphinx/builders/__init__.py
@@ -772,7 +772,9 @@ def _write_serial(self, docnames: Sequence[str]) -> None:
len(docnames),
self._app.verbosity,
):
- _write_docname(docname, app=self._app, env=self.env, builder=self)
+ _write_docname(
+ docname, app=self._app, env=self.env, builder=self, tags=self.tags
+ )
def _write_parallel(self, docnames: Sequence[str], nproc: int) -> None:
def write_process(docs: list[tuple[str, nodes.document]]) -> None:
@@ -782,7 +784,9 @@ def write_process(docs: list[tuple[str, nodes.document]]) -> None:
# warm up caches/compile templates using the first document
firstname, docnames = docnames[0], docnames[1:]
- _write_docname(firstname, app=self._app, env=self.env, builder=self)
+ _write_docname(
+ firstname, app=self._app, env=self.env, builder=self, tags=self.tags
+ )
tasks = ParallelTasks(nproc)
chunks = make_chunks(docnames, nproc)
@@ -804,7 +808,9 @@ def on_chunk_done(args: list[tuple[str, nodes.document]], result: None) -> None:
for chunk in chunks:
arg = []
for docname in chunk:
- doctree = self.env.get_and_resolve_doctree(docname, self)
+ doctree = self.env.get_and_resolve_doctree(
+ docname, self, tags=self.tags
+ )
self.write_doc_serialized(docname, doctree)
arg.append((docname, doctree))
tasks.add_task(write_process, arg, on_chunk_done)
@@ -871,11 +877,17 @@ def get_builder_config(self, option: str, default: str) -> Any:
def _write_docname(
- docname: str, /, *, app: Sphinx, env: BuildEnvironment, builder: Builder
+ docname: str,
+ /,
+ *,
+ app: Sphinx,
+ env: BuildEnvironment,
+ builder: Builder,
+ tags: Tags,
) -> None:
"""Write a single document."""
app.phase = BuildPhase.RESOLVING
- doctree = env.get_and_resolve_doctree(docname, builder=builder)
+ doctree = env.get_and_resolve_doctree(docname, builder=builder, tags=tags)
app.phase = BuildPhase.WRITING
builder.write_doc_serialized(docname, doctree)
builder.write_doc(docname, doctree)
diff --git a/sphinx/builders/_epub_base.py b/sphinx/builders/_epub_base.py
index 28a7553da51..1bd4846bf02 100644
--- a/sphinx/builders/_epub_base.py
+++ b/sphinx/builders/_epub_base.py
@@ -233,7 +233,11 @@ def get_toc(self) -> None:
and pre and post files not managed by Sphinx.
"""
doctree = self.env.get_and_resolve_doctree(
- self.config.master_doc, self, prune_toctrees=False, includehidden=True
+ self.config.master_doc,
+ self,
+ tags=self.tags,
+ prune_toctrees=False,
+ includehidden=True,
)
self.refnodes = self.get_refnodes(doctree, [])
master_dir = Path(self.config.master_doc).parent
@@ -765,7 +769,11 @@ def build_toc(self) -> None:
if self.config.epub_tocscope == 'default':
doctree = self.env.get_and_resolve_doctree(
- self.config.root_doc, self, prune_toctrees=False, includehidden=False
+ self.config.root_doc,
+ self,
+ tags=self.tags,
+ prune_toctrees=False,
+ includehidden=False,
)
refnodes = self.get_refnodes(doctree, [])
self.toc_add_files(refnodes)
diff --git a/sphinx/builders/epub3.py b/sphinx/builders/epub3.py
index 2ea66c34b8b..c17be8bd4e9 100644
--- a/sphinx/builders/epub3.py
+++ b/sphinx/builders/epub3.py
@@ -190,7 +190,11 @@ def build_navigation_doc(self) -> None:
if self.config.epub_tocscope == 'default':
doctree = self.env.get_and_resolve_doctree(
- self.config.root_doc, self, prune_toctrees=False, includehidden=False
+ self.config.root_doc,
+ self,
+ tags=self.tags,
+ prune_toctrees=False,
+ includehidden=False,
)
refnodes = self.get_refnodes(doctree, [])
self.toc_add_files(refnodes)
diff --git a/sphinx/builders/html/__init__.py b/sphinx/builders/html/__init__.py
index 1ba026a61d0..a5f725e2922 100644
--- a/sphinx/builders/html/__init__.py
+++ b/sphinx/builders/html/__init__.py
@@ -1029,7 +1029,7 @@ def _get_local_toctree(
if kwargs.get('maxdepth') == '': # NoQA: PLC1901
kwargs.pop('maxdepth')
toctree = global_toctree_for_doc(
- self.env, docname, self, collapse=collapse, **kwargs
+ self.env, docname, self, tags=self.tags, collapse=collapse, **kwargs
)
return self.render_partial(toctree)['fragment']
diff --git a/sphinx/builders/singlehtml.py b/sphinx/builders/singlehtml.py
index c95603927ce..1888f6679d1 100644
--- a/sphinx/builders/singlehtml.py
+++ b/sphinx/builders/singlehtml.py
@@ -84,7 +84,7 @@ def _get_local_toctree(
if kwargs.get('maxdepth') == '': # NoQA: PLC1901
kwargs.pop('maxdepth')
toctree = global_toctree_for_doc(
- self.env, docname, self, collapse=collapse, **kwargs
+ self.env, docname, self, tags=self.tags, collapse=collapse, **kwargs
)
return self.render_partial(toctree)['fragment']
@@ -141,7 +141,7 @@ def assemble_toc_fignumbers(
def get_doc_context(self, docname: str, body: str, metatags: str) -> dict[str, Any]:
# no relation links...
toctree = global_toctree_for_doc(
- self.env, self.config.root_doc, self, collapse=False
+ self.env, self.config.root_doc, self, tags=self.tags, collapse=False
)
# if there is no toctree, toc is None
if toctree:
diff --git a/sphinx/environment/__init__.py b/sphinx/environment/__init__.py
index 846a1bbde98..fd611639e9c 100644
--- a/sphinx/environment/__init__.py
+++ b/sphinx/environment/__init__.py
@@ -701,6 +701,8 @@ def get_and_resolve_doctree(
self,
docname: str,
builder: Builder,
+ *,
+ tags: Tags,
doctree: nodes.document | None = None,
prune_toctrees: bool = True,
includehidden: bool = False,
@@ -770,7 +772,7 @@ def resolve_toctree(
titles_only=titles_only,
collapse=collapse,
includehidden=includehidden,
- tags=builder.tags,
+ tags=self._tags,
)
def resolve_references(
diff --git a/sphinx/environment/adapters/toctree.py b/sphinx/environment/adapters/toctree.py
index 90344f185d7..4708383d64b 100644
--- a/sphinx/environment/adapters/toctree.py
+++ b/sphinx/environment/adapters/toctree.py
@@ -2,12 +2,14 @@
from __future__ import annotations
+import warnings
from typing import TYPE_CHECKING, TypeVar
from docutils import nodes
from docutils.nodes import Element
from sphinx import addnodes
+from sphinx.deprecation import RemovedInSphinx10Warning
from sphinx.locale import __
from sphinx.util import logging, url_re
from sphinx.util.matching import Matcher
@@ -69,6 +71,8 @@ def global_toctree_for_doc(
env: BuildEnvironment,
docname: str,
builder: Builder,
+ *,
+ tags: Tags = ..., # type: ignore[assignment]
collapse: bool = False,
includehidden: bool = True,
maxdepth: int = 0,
@@ -78,6 +82,15 @@ def global_toctree_for_doc(
This gives the global ToC, with all ancestors and their siblings.
"""
+ if tags is ...:
+ warnings.warn(
+ "'tags' will become a required keyword argument "
+ 'for global_toctree_for_doc() in Sphinx 10.0.',
+ RemovedInSphinx10Warning,
+ stacklevel=2,
+ )
+ tags = builder.tags
+
resolved = (
_resolve_toctree(
env,
@@ -89,7 +102,7 @@ def global_toctree_for_doc(
titles_only=titles_only,
collapse=collapse,
includehidden=includehidden,
- tags=builder.tags,
+ tags=tags,
)
for toctree_node in env.master_doctree.findall(addnodes.toctree)
)
@@ -582,5 +595,5 @@ def get_toctree_for(
**kwargs: Any,
) -> Element | None:
return global_toctree_for_doc(
- self.env, docname, builder, collapse=collapse, **kwargs
+ self.env, docname, builder, tags=builder.tags, collapse=collapse, **kwargs
)
diff --git a/tests/test_environment/test_environment_toctree.py b/tests/test_environment/test_environment_toctree.py
index f6b849c5bec..22474daef55 100644
--- a/tests/test_environment/test_environment_toctree.py
+++ b/tests/test_environment/test_environment_toctree.py
@@ -614,7 +614,9 @@ def test_document_toc_tocdepth(app):
@pytest.mark.test_params(shared_result='test_environment_toctree_basic')
def test_global_toctree_for_doc(app):
app.build()
- toctree = global_toctree_for_doc(app.env, 'index', app.builder, collapse=False)
+ toctree = global_toctree_for_doc(
+ app.env, 'index', app.builder, tags=app.tags, collapse=False
+ )
assert_node(
toctree,
[
@@ -676,7 +678,9 @@ def test_global_toctree_for_doc(app):
@pytest.mark.test_params(shared_result='test_environment_toctree_basic')
def test_global_toctree_for_doc_collapse(app):
app.build()
- toctree = global_toctree_for_doc(app.env, 'index', app.builder, collapse=True)
+ toctree = global_toctree_for_doc(
+ app.env, 'index', app.builder, tags=app.tags, collapse=True
+ )
assert_node(
toctree,
[
@@ -723,7 +727,7 @@ def test_global_toctree_for_doc_collapse(app):
def test_global_toctree_for_doc_maxdepth(app):
app.build()
toctree = global_toctree_for_doc(
- app.env, 'index', app.builder, collapse=False, maxdepth=3
+ app.env, 'index', app.builder, tags=app.tags, collapse=False, maxdepth=3
)
assert_node(
toctree,
@@ -814,7 +818,12 @@ def test_global_toctree_for_doc_maxdepth(app):
def test_global_toctree_for_doc_includehidden(app):
app.build()
toctree = global_toctree_for_doc(
- app.env, 'index', app.builder, collapse=False, includehidden=False
+ app.env,
+ 'index',
+ app.builder,
+ tags=app.tags,
+ collapse=False,
+ includehidden=False,
)
assert_node(
toctree,
diff --git a/tests/test_extensions/test_ext_inheritance_diagram.py b/tests/test_extensions/test_ext_inheritance_diagram.py
index 4153113d12e..f98d424eda3 100644
--- a/tests/test_extensions/test_ext_inheritance_diagram.py
+++ b/tests/test_extensions/test_ext_inheritance_diagram.py
@@ -293,7 +293,7 @@ def test_inheritance_diagram_latex_alias(app):
app.config.inheritance_alias = {'test.Foo': 'alias.Foo'}
app.build(force_all=True)
- doc = app.env.get_and_resolve_doctree('index', app)
+ doc = app.env.get_and_resolve_doctree('index', app.builder, tags=app.tags)
aliased_graph = doc.children[0].children[3]['graph'].class_info
assert len(aliased_graph) == 4
assert (
diff --git a/tests/test_extensions/test_ext_math.py b/tests/test_extensions/test_ext_math.py
index 02d215bef01..2dbc93629fd 100644
--- a/tests/test_extensions/test_ext_math.py
+++ b/tests/test_extensions/test_ext_math.py
@@ -323,7 +323,7 @@ def test_imgmath_numfig_html(app: SphinxTestApp) -> None:
def test_math_compat(app):
with warnings.catch_warnings(record=True):
app.build(force_all=True)
- doctree = app.env.get_and_resolve_doctree('index', app.builder)
+ doctree = app.env.get_and_resolve_doctree('index', app.builder, tags=app.tags)
assert_node(
doctree,
From 3044d67531f2a66f285912aa92929ccb3c9e3f8e Mon Sep 17 00:00:00 2001
From: Adam Turner <9087854+AA-Turner@users.noreply.github.com>
Date: Sat, 7 Jun 2025 18:58:06 +0100
Subject: [PATCH 118/466] Avoid self.app in builder (#13632)
---
sphinx/application.py | 1 +
sphinx/builders/__init__.py | 3 ++-
sphinx/builders/_epub_base.py | 2 +-
sphinx/builders/changes.py | 7 +++++-
sphinx/builders/gettext.py | 14 +++++++-----
sphinx/builders/html/__init__.py | 25 ++++++++++++++--------
sphinx/builders/latex/__init__.py | 4 ++--
sphinx/builders/latex/theming.py | 9 ++++----
sphinx/builders/linkcheck.py | 6 +++---
sphinx/builders/texinfo.py | 2 +-
sphinx/config.py | 6 ++++++
sphinx/ext/coverage.py | 8 +++----
sphinx/ext/doctest.py | 4 ++--
sphinx/theming.py | 20 ++++++++++++-----
sphinx/transforms/__init__.py | 2 +-
tests/test_extensions/test_ext_coverage.py | 2 +-
tests/test_theming/test_theming.py | 8 +++----
17 files changed, 78 insertions(+), 45 deletions(-)
diff --git a/sphinx/application.py b/sphinx/application.py
index d5192eef0b6..3874a6afa52 100644
--- a/sphinx/application.py
+++ b/sphinx/application.py
@@ -264,6 +264,7 @@ def __init__(
else:
self.confdir = _StrPath(confdir).resolve()
self.config = Config.read(self.confdir, confoverrides or {}, self.tags)
+ self.config._verbosity = -1 if self.quiet else self.verbosity
# set up translation infrastructure
self._init_i18n()
diff --git a/sphinx/builders/__init__.py b/sphinx/builders/__init__.py
index 70602273747..4e116732e7a 100644
--- a/sphinx/builders/__init__.py
+++ b/sphinx/builders/__init__.py
@@ -139,8 +139,9 @@ def __init__(self, app: Sphinx, env: BuildEnvironment) -> None:
@property
def app(self) -> Sphinx:
+ cls_module = self.__class__.__module__
cls_name = self.__class__.__qualname__
- _deprecation_warning(__name__, f'{cls_name}.app', remove=(10, 0))
+ _deprecation_warning(cls_module, f'{cls_name}.app', remove=(10, 0))
return self._app
@property
diff --git a/sphinx/builders/_epub_base.py b/sphinx/builders/_epub_base.py
index 1bd4846bf02..3c7c93dfd1f 100644
--- a/sphinx/builders/_epub_base.py
+++ b/sphinx/builders/_epub_base.py
@@ -425,7 +425,7 @@ def copy_image_files_pil(self) -> None:
__('copying images... '),
'brown',
len(self.images),
- self.app.verbosity,
+ self.config.verbosity,
):
dest = self.images[src]
try:
diff --git a/sphinx/builders/changes.py b/sphinx/builders/changes.py
index aa926e0809c..059a7d1b055 100644
--- a/sphinx/builders/changes.py
+++ b/sphinx/builders/changes.py
@@ -30,7 +30,12 @@ class ChangesBuilder(Builder):
def init(self) -> None:
self.create_template_bridge()
- theme_factory = HTMLThemeFactory(self.app)
+ theme_factory = HTMLThemeFactory(
+ confdir=self.confdir,
+ app=self._app,
+ config=self.config,
+ registry=self.env._registry,
+ )
self.theme = theme_factory.create('default')
self.templates.init(self, self.theme)
diff --git a/sphinx/builders/gettext.py b/sphinx/builders/gettext.py
index f5f26ffcc88..659bf218983 100644
--- a/sphinx/builders/gettext.py
+++ b/sphinx/builders/gettext.py
@@ -165,7 +165,7 @@ class I18nBuilder(Builder):
def init(self) -> None:
super().init()
self.env.set_versioning_method(self.versioning_method, self.config.gettext_uuid)
- self.tags = self.app.tags = I18nTags()
+ self.tags = self._app.tags = I18nTags()
self.catalogs: defaultdict[str, Catalog] = defaultdict(Catalog)
def get_target_uri(self, docname: str, typ: str | None = None) -> str:
@@ -251,7 +251,7 @@ def init(self) -> None:
def _collect_templates(self) -> set[str]:
template_files = set()
for template_path in self.config.templates_path:
- tmpl_abs_path = self.app.srcdir / template_path
+ tmpl_abs_path = self.srcdir / template_path
for dirpath, _dirs, files in walk(tmpl_abs_path):
for fn in files:
if fn.endswith('.html'):
@@ -268,7 +268,11 @@ def _extract_from_template(self) -> None:
extract_translations = self.templates.environment.extract_translations
for template in status_iterator(
- files, __('reading templates... '), 'purple', len(files), self.app.verbosity
+ files,
+ __('reading templates... '),
+ 'purple',
+ len(files),
+ self.config.verbosity,
):
try:
with codecs.open(template, encoding='utf-8') as f:
@@ -307,7 +311,7 @@ def finish(self) -> None:
__('writing message catalogs... '),
'darkgreen',
len(self.catalogs),
- self.app.verbosity,
+ self.config.verbosity,
operator.itemgetter(0),
):
# noop if config.gettext_compact is set
@@ -315,7 +319,7 @@ def finish(self) -> None:
context['messages'] = list(catalog)
template_path = [
- self.app.srcdir / rel_path for rel_path in self.config.templates_path
+ self.srcdir / rel_path for rel_path in self.config.templates_path
]
renderer = GettextRenderer(template_path, outdir=self.outdir)
content = renderer.render('message.pot.jinja', context)
diff --git a/sphinx/builders/html/__init__.py b/sphinx/builders/html/__init__.py
index a5f725e2922..1195d08beb6 100644
--- a/sphinx/builders/html/__init__.py
+++ b/sphinx/builders/html/__init__.py
@@ -228,7 +228,12 @@ def get_theme_config(self) -> tuple[str, dict[str, str | int | bool]]:
return self.config.html_theme, self.config.html_theme_options
def init_templates(self) -> None:
- theme_factory = HTMLThemeFactory(self.app)
+ theme_factory = HTMLThemeFactory(
+ confdir=self.confdir,
+ app=self._app,
+ config=self.config,
+ registry=self.env._registry,
+ )
theme_name, theme_options = self.get_theme_config()
self.theme = theme_factory.create(theme_name)
self.theme_options = theme_options
@@ -255,11 +260,6 @@ def init_highlighter(self) -> None:
self.dark_highlighter: PygmentsBridge | None
if dark_style is not None:
self.dark_highlighter = PygmentsBridge('html', dark_style)
- self.app.add_css_file(
- 'pygments_dark.css',
- media='(prefers-color-scheme: dark)',
- id='pygments_dark_css',
- )
else:
self.dark_highlighter = None
@@ -273,6 +273,13 @@ def css_files(self) -> list[_CascadingStyleSheet]:
def init_css_files(self) -> None:
self._css_files = []
self.add_css_file('pygments.css', priority=200)
+ if self.dark_highlighter is not None:
+ self.add_css_file(
+ 'pygments_dark.css',
+ priority=200,
+ media='(prefers-color-scheme: dark)',
+ id='pygments_dark_css',
+ )
for filename in self._get_style_filenames():
self.add_css_file(filename, priority=200)
@@ -780,7 +787,7 @@ def copy_image_files(self) -> None:
__('copying images... '),
'brown',
len(self.images),
- self.app.verbosity,
+ self.config.verbosity,
stringify_func=stringify_func,
):
dest = self.images[src]
@@ -807,7 +814,7 @@ def to_relpath(f: str) -> str:
__('copying downloadable files... '),
'brown',
len(self.env.dlfiles),
- self.app.verbosity,
+ self.config.verbosity,
stringify_func=to_relpath,
):
try:
@@ -1128,7 +1135,7 @@ def hasdoc(name: str) -> bool:
# 'blah.html' should have content_root = './' not ''.
ctx['content_root'] = (f'..{SEP}' * default_baseuri.count(SEP)) or f'.{SEP}'
- outdir = self.app.outdir
+ outdir = self.outdir
def css_tag(css: _CascadingStyleSheet) -> str:
attrs = [
diff --git a/sphinx/builders/latex/__init__.py b/sphinx/builders/latex/__init__.py
index 985620f2023..d5e4a779aa1 100644
--- a/sphinx/builders/latex/__init__.py
+++ b/sphinx/builders/latex/__init__.py
@@ -132,7 +132,7 @@ def init(self) -> None:
self.context: dict[str, Any] = {}
self.docnames: Iterable[str] = {}
self.document_data: list[tuple[str, str, str, str, str, bool]] = []
- self.themes = ThemeFactory(self.app)
+ self.themes = ThemeFactory(srcdir=self.srcdir, config=self.config)
texescape.init()
self.init_context()
@@ -481,7 +481,7 @@ def copy_image_files(self) -> None:
__('copying images... '),
'brown',
len(self.images),
- self.app.verbosity,
+ self.config.verbosity,
stringify_func=stringify_func,
):
dest = self.images[src]
diff --git a/sphinx/builders/latex/theming.py b/sphinx/builders/latex/theming.py
index f55c077c9ca..df8eb48ec4f 100644
--- a/sphinx/builders/latex/theming.py
+++ b/sphinx/builders/latex/theming.py
@@ -12,7 +12,6 @@
if TYPE_CHECKING:
from pathlib import Path
- from sphinx.application import Sphinx
from sphinx.config import Config
logger = logging.getLogger(__name__)
@@ -102,11 +101,11 @@ def __init__(self, name: str, filename: Path) -> None:
class ThemeFactory:
"""A factory class for LaTeX Themes."""
- def __init__(self, app: Sphinx) -> None:
+ def __init__(self, *, srcdir: Path, config: Config) -> None:
self.themes: dict[str, Theme] = {}
- self.theme_paths = [app.srcdir / p for p in app.config.latex_theme_path]
- self.config = app.config
- self.load_builtin_themes(app.config)
+ self.theme_paths = [srcdir / p for p in config.latex_theme_path]
+ self.config = config
+ self.load_builtin_themes(config)
def load_builtin_themes(self, config: Config) -> None:
"""Load built-in themes."""
diff --git a/sphinx/builders/linkcheck.py b/sphinx/builders/linkcheck.py
index de102873036..c1b199c5493 100644
--- a/sphinx/builders/linkcheck.py
+++ b/sphinx/builders/linkcheck.py
@@ -98,7 +98,7 @@ def finish(self) -> None:
self.process_result(result)
if self.broken_hyperlinks or self.timed_out_hyperlinks:
- self.app.statuscode = 1
+ self._app.statuscode = 1
def process_result(self, result: CheckResult) -> None:
filename = self.env.doc2path(result.docname, False)
@@ -130,7 +130,7 @@ def process_result(self, result: CheckResult) -> None:
case _Status.WORKING:
logger.info(darkgreen('ok ') + f'{res_uri}{result.message}') # NoQA: G003
case _Status.TIMEOUT:
- if self.app.quiet:
+ if self.config.verbosity < 0:
msg = 'timeout ' + f'{res_uri}{result.message}'
logger.warning(msg, location=(result.docname, result.lineno))
else:
@@ -145,7 +145,7 @@ def process_result(self, result: CheckResult) -> None:
)
self.timed_out_hyperlinks += 1
case _Status.BROKEN:
- if self.app.quiet:
+ if self.config.verbosity < 0:
logger.warning(
__('broken link: %s (%s)'),
res_uri,
diff --git a/sphinx/builders/texinfo.py b/sphinx/builders/texinfo.py
index 79afafab84d..6611be05465 100644
--- a/sphinx/builders/texinfo.py
+++ b/sphinx/builders/texinfo.py
@@ -198,7 +198,7 @@ def copy_image_files(self, targetname: str) -> None:
__('copying images... '),
'brown',
len(self.images),
- self.app.verbosity,
+ self.config.verbosity,
stringify_func=stringify_func,
):
dest = self.images[src]
diff --git a/sphinx/config.py b/sphinx/config.py
index 2498ada6c56..3e16c151ebd 100644
--- a/sphinx/config.py
+++ b/sphinx/config.py
@@ -333,6 +333,8 @@ def __init__(
raw_config['extensions'] = extensions
self.extensions: list[str] = raw_config.get('extensions', [])
+ self._verbosity: int = 0 # updated in Sphinx.__init__()
+
@property
def values(self) -> dict[str, _Opt]:
return self._options
@@ -341,6 +343,10 @@ def values(self) -> dict[str, _Opt]:
def overrides(self) -> dict[str, Any]:
return self._overrides
+ @property
+ def verbosity(self) -> int:
+ return self._verbosity
+
@classmethod
def read(
cls: type[Config],
diff --git a/sphinx/ext/coverage.py b/sphinx/ext/coverage.py
index b2d08603f38..5c5a8d51ab3 100644
--- a/sphinx/ext/coverage.py
+++ b/sphinx/ext/coverage.py
@@ -255,7 +255,7 @@ def write_c_coverage(self) -> None:
for typ, name in sorted(undoc):
op.write(f' * {name:<50} [{typ:>9}]\n')
if self.config.coverage_show_missing_items:
- if self.app.quiet:
+ if self.config.verbosity < 0:
logger.warning(
__('undocumented c api: %s [%s] in file %s'),
name,
@@ -446,7 +446,7 @@ def write_py_coverage(self) -> None:
op.write('Functions:\n')
op.writelines(f' * {x}\n' for x in undoc['funcs'])
if self.config.coverage_show_missing_items:
- if self.app.quiet:
+ if self.config.verbosity < 0:
for func in undoc['funcs']:
logger.warning(
__('undocumented python function: %s :: %s'),
@@ -468,7 +468,7 @@ def write_py_coverage(self) -> None:
if not methods:
op.write(f' * {class_name}\n')
if self.config.coverage_show_missing_items:
- if self.app.quiet:
+ if self.config.verbosity < 0:
logger.warning(
__('undocumented python class: %s :: %s'),
name,
@@ -485,7 +485,7 @@ def write_py_coverage(self) -> None:
op.write(f' * {class_name} -- missing methods:\n\n')
op.writelines(f' - {x}\n' for x in methods)
if self.config.coverage_show_missing_items:
- if self.app.quiet:
+ if self.config.verbosity < 0:
for meth in methods:
logger.warning(
__(
diff --git a/sphinx/ext/doctest.py b/sphinx/ext/doctest.py
index 9610e24d58d..da40a63e781 100644
--- a/sphinx/ext/doctest.py
+++ b/sphinx/ext/doctest.py
@@ -341,7 +341,7 @@ def _out(self, text: str) -> None:
self.outfile.write(text)
def _warn_out(self, text: str) -> None:
- if self.app.quiet:
+ if self.config.verbosity < 0:
logger.warning(text)
else:
logger.info(text, nonl=True)
@@ -360,7 +360,7 @@ def s(v: int) -> str:
header = 'Doctest summary'
if self.total_failures or self.setup_failures or self.cleanup_failures:
- self.app.statuscode = 1
+ self._app.statuscode = 1
if self.config.doctest_fail_fast:
header = f'{header} (exiting after first failed test)'
underline = '=' * len(header)
diff --git a/sphinx/theming.py b/sphinx/theming.py
index a27dbfe0973..9e06faaeffc 100644
--- a/sphinx/theming.py
+++ b/sphinx/theming.py
@@ -28,6 +28,8 @@
from typing import Any, Required, TypedDict
from sphinx.application import Sphinx
+ from sphinx.config import Config
+ from sphinx.registry import SphinxComponentRegistry
class _ThemeToml(TypedDict, total=False):
theme: Required[_ThemeTomlTheme]
@@ -148,13 +150,21 @@ def _cleanup(self) -> None:
class HTMLThemeFactory:
"""A factory class for HTML Themes."""
- def __init__(self, app: Sphinx) -> None:
+ def __init__(
+ self,
+ *,
+ confdir: Path,
+ app: Sphinx,
+ config: Config,
+ registry: SphinxComponentRegistry,
+ ) -> None:
self._app = app
- self._themes = app.registry.html_themes
+ self._confdir = confdir
+ self._themes = registry.html_themes
self._entry_point_themes: dict[str, Callable[[], None]] = {}
self._load_builtin_themes()
- if getattr(app.config, 'html_theme_path', None):
- self._load_additional_themes(app.config.html_theme_path)
+ if html_theme_path := getattr(config, 'html_theme_path', None):
+ self._load_additional_themes(html_theme_path)
self._load_entry_point_themes()
def _load_builtin_themes(self) -> None:
@@ -166,7 +176,7 @@ def _load_builtin_themes(self) -> None:
def _load_additional_themes(self, theme_paths: list[str]) -> None:
"""Load additional themes placed at specified directories."""
for theme_path in theme_paths:
- abs_theme_path = (self._app.confdir / theme_path).resolve()
+ abs_theme_path = (self._confdir / theme_path).resolve()
themes = self._find_themes(abs_theme_path)
for name, theme in themes.items():
self._themes[name] = _StrPath(theme)
diff --git a/sphinx/transforms/__init__.py b/sphinx/transforms/__init__.py
index 6857e05fe58..7ba50aaa240 100644
--- a/sphinx/transforms/__init__.py
+++ b/sphinx/transforms/__init__.py
@@ -66,7 +66,7 @@ def app(self) -> Sphinx:
cls_module = self.__class__.__module__
cls_name = self.__class__.__qualname__
_deprecation_warning(cls_module, f'{cls_name}.app', remove=(10, 0))
- return self.env.app
+ return self.env._app
@property
def env(self) -> BuildEnvironment:
diff --git a/tests/test_extensions/test_ext_coverage.py b/tests/test_extensions/test_ext_coverage.py
index 7422cd3560f..32fc2dba2d7 100644
--- a/tests/test_extensions/test_ext_coverage.py
+++ b/tests/test_extensions/test_ext_coverage.py
@@ -117,7 +117,7 @@ def test_show_missing_items(app: SphinxTestApp) -> None:
'coverage', testroot='root', confoverrides={'coverage_show_missing_items': True}
)
def test_show_missing_items_quiet(app: SphinxTestApp) -> None:
- app.quiet = True
+ app.config._verbosity = -1 # mimics status=None / app.quiet = True
app.build(force_all=True)
assert (
diff --git a/tests/test_theming/test_theming.py b/tests/test_theming/test_theming.py
index 173e0c9c64b..8ff3919c967 100644
--- a/tests/test_theming/test_theming.py
+++ b/tests/test_theming/test_theming.py
@@ -159,10 +159,10 @@ def test_dark_style(app, monkeypatch):
app.build()
assert (app.outdir / '_static' / 'pygments_dark.css').exists()
- css_file, properties = app.registry.css_files[0]
- assert css_file == 'pygments_dark.css'
- assert 'media' in properties
- assert properties['media'] == '(prefers-color-scheme: dark)'
+ css_file = app.builder._css_files[1]
+ assert css_file.filename == '_static/pygments_dark.css'
+ assert 'media' in css_file.attributes
+ assert css_file.attributes['media'] == '(prefers-color-scheme: dark)'
assert sorted(f.filename for f in app.builder._css_files) == [
'_static/classic.css',
From 5392f0f2ed4879949c7f8dc02e3ac43acadea8b3 Mon Sep 17 00:00:00 2001
From: Adam Turner <9087854+AA-Turner@users.noreply.github.com>
Date: Sun, 8 Jun 2025 19:52:24 +0100
Subject: [PATCH 119/466] Extract ``_read_conf_py()`` from ``Config.read()``
(#13633)
---
sphinx/application.py | 5 ++--
sphinx/config.py | 49 ++++++++++++++++----------------
tests/test_config/test_config.py | 17 +++++------
3 files changed, 37 insertions(+), 34 deletions(-)
diff --git a/sphinx/application.py b/sphinx/application.py
index 3874a6afa52..8117eecf340 100644
--- a/sphinx/application.py
+++ b/sphinx/application.py
@@ -255,15 +255,16 @@ def __init__(
self.statuscode = 0
# read config
+ overrides = confoverrides or {}
self.tags = Tags(tags)
if confdir is None:
# set confdir to srcdir if -C given (!= no confdir); a few pieces
# of code expect a confdir to be set
self.confdir = self.srcdir
- self.config = Config({}, confoverrides or {})
+ self.config = Config({}, overrides)
else:
self.confdir = _StrPath(confdir).resolve()
- self.config = Config.read(self.confdir, confoverrides or {}, self.tags)
+ self.config = Config.read(self.confdir, overrides=overrides, tags=self.tags)
self.config._verbosity = -1 if self.quiet else self.verbosity
# set up translation infrastructure
diff --git a/sphinx/config.py b/sphinx/config.py
index 3e16c151ebd..a43b6cc82d0 100644
--- a/sphinx/config.py
+++ b/sphinx/config.py
@@ -351,8 +351,9 @@ def verbosity(self) -> int:
def read(
cls: type[Config],
confdir: str | os.PathLike[str],
- overrides: dict[str, Any] | None = None,
- tags: Tags | None = None,
+ *,
+ overrides: dict[str, Any],
+ tags: Tags,
) -> Config:
"""Create a Config object from configuration file."""
filename = Path(confdir, CONFIG_FILENAME)
@@ -360,23 +361,7 @@ def read(
raise ConfigError(
__("config directory doesn't contain a conf.py file (%s)") % confdir
)
- namespace = eval_config_file(filename, tags)
-
- # Note: Old sphinx projects have been configured as "language = None" because
- # sphinx-quickstart previously generated this by default.
- # To keep compatibility, they should be fallback to 'en' for a while
- # (This conversion should not be removed before 2025-01-01).
- if namespace.get('language', ...) is None:
- logger.warning(
- __(
- "Invalid configuration value found: 'language = None'. "
- 'Update your configuration to a valid language code. '
- "Falling back to 'en' (English)."
- )
- )
- namespace['language'] = 'en'
-
- return cls(namespace, overrides)
+ return _read_conf_py(filename, overrides=overrides, tags=tags)
def convert_overrides(self, name: str, value: str) -> Any:
opt = self._options[name]
@@ -589,12 +574,28 @@ def __setstate__(self, state: dict[str, Any]) -> None:
self.__dict__.update(state)
-def eval_config_file(
- filename: str | os.PathLike[str], tags: Tags | None
-) -> dict[str, Any]:
- """Evaluate a config file."""
- filename = Path(filename)
+def _read_conf_py(conf_path: Path, *, overrides: dict[str, Any], tags: Tags) -> Config:
+ """Create a Config object from a conf.py file."""
+ namespace = eval_config_file(conf_path, tags)
+ # Note: Old sphinx projects have been configured as "language = None" because
+ # sphinx-quickstart previously generated this by default.
+ # To keep compatibility, they should be fallback to 'en' for a while
+ # (This conversion should not be removed before 2025-01-01).
+ if namespace.get('language', ...) is None:
+ logger.warning(
+ __(
+ "Invalid configuration value found: 'language = None'. "
+ 'Update your configuration to a valid language code. '
+ "Falling back to 'en' (English)."
+ )
+ )
+ namespace['language'] = 'en'
+ return Config(namespace, overrides)
+
+
+def eval_config_file(filename: Path, tags: Tags) -> dict[str, Any]:
+ """Evaluate a config file."""
namespace: dict[str, Any] = {
'__file__': str(filename),
'tags': tags,
diff --git a/tests/test_config/test_config.py b/tests/test_config/test_config.py
index fc1ba4c7321..b3392e654b2 100644
--- a/tests/test_config/test_config.py
+++ b/tests/test_config/test_config.py
@@ -19,6 +19,7 @@
)
from sphinx.deprecation import RemovedInSphinx90Warning
from sphinx.errors import ConfigError, ExtensionError, VersionRequirementError
+from sphinx.util.tags import Tags
if TYPE_CHECKING:
from collections.abc import Iterable
@@ -139,7 +140,7 @@ def test_core_config(app: SphinxTestApp) -> None:
def test_config_not_found(tmp_path):
with pytest.raises(ConfigError):
- Config.read(tmp_path)
+ Config.read(tmp_path, overrides={}, tags=Tags())
@pytest.mark.parametrize('protocol', list(range(pickle.HIGHEST_PROTOCOL)))
@@ -394,12 +395,12 @@ def test_errors_warnings(logger, tmp_path):
# test the error for syntax errors in the config file
(tmp_path / 'conf.py').write_text('project = \n', encoding='ascii')
with pytest.raises(ConfigError) as excinfo:
- Config.read(tmp_path, {}, None)
+ Config.read(tmp_path, overrides={}, tags=Tags())
assert 'conf.py' in str(excinfo.value)
# test the automatic conversion of 2.x only code in configs
(tmp_path / 'conf.py').write_text('project = u"Jägermeister"\n', encoding='utf8')
- cfg = Config.read(tmp_path, {}, None)
+ cfg = Config.read(tmp_path, overrides={}, tags=Tags())
assert cfg.project == 'Jägermeister'
assert logger.called is False
@@ -440,7 +441,7 @@ def test_config_eol(logger, tmp_path):
configfile = tmp_path / 'conf.py'
for eol in (b'\n', b'\r\n'):
configfile.write_bytes(b'project = "spam"' + eol)
- cfg = Config.read(tmp_path, {}, None)
+ cfg = Config.read(tmp_path, overrides={}, tags=Tags())
assert cfg.project == 'spam'
assert logger.called is False
@@ -678,7 +679,7 @@ def test_conf_py_language_none(tmp_path):
(tmp_path / 'conf.py').write_text('language = None', encoding='utf-8')
# When we load conf.py into a Config object
- cfg = Config.read(tmp_path, {}, None)
+ cfg = Config.read(tmp_path, overrides={}, tags=Tags())
# Then the language is coerced to English
assert cfg.language == 'en'
@@ -691,7 +692,7 @@ def test_conf_py_language_none_warning(logger, tmp_path):
(tmp_path / 'conf.py').write_text('language = None', encoding='utf-8')
# When we load conf.py into a Config object
- Config.read(tmp_path, {}, None)
+ Config.read(tmp_path, overrides={}, tags=Tags())
# Then a warning is raised
assert logger.warning.called
@@ -708,7 +709,7 @@ def test_conf_py_no_language(tmp_path):
(tmp_path / 'conf.py').touch()
# When we load conf.py into a Config object
- cfg = Config.read(tmp_path, {}, None)
+ cfg = Config.read(tmp_path, overrides={}, tags=Tags())
# Then the language is coerced to English
assert cfg.language == 'en'
@@ -720,7 +721,7 @@ def test_conf_py_nitpick_ignore_list(tmp_path):
(tmp_path / 'conf.py').touch()
# When we load conf.py into a Config object
- cfg = Config.read(tmp_path, {}, None)
+ cfg = Config.read(tmp_path, overrides={}, tags=Tags())
# Then the default nitpick_ignore[_regex] is an empty list
assert cfg.nitpick_ignore == []
From 39c81254de1708426b929611e418c76fb15b39aa Mon Sep 17 00:00:00 2001
From: Victor Wheeler
Date: Sun, 8 Jun 2025 13:35:36 -0600
Subject: [PATCH 120/466] Update ``extending_build.rst`` to match
``tutorials/example/todo.py`` (#13515)
Co-authored-by: Adam Turner <9087854+aa-turner@users.noreply.github.com>
---
doc/development/tutorials/extending_build.rst | 49 ++++++++++---------
1 file changed, 25 insertions(+), 24 deletions(-)
diff --git a/doc/development/tutorials/extending_build.rst b/doc/development/tutorials/extending_build.rst
index 4d3606a0a33..9894d656fed 100644
--- a/doc/development/tutorials/extending_build.rst
+++ b/doc/development/tutorials/extending_build.rst
@@ -143,7 +143,7 @@ Looking first at the ``TodolistDirective`` directive:
.. literalinclude:: examples/todo.py
:language: python
:linenos:
- :lines: 24-27
+ :pyobject: TodolistDirective
It's very simple, creating and returning an instance of our ``todolist`` node
class. The ``TodolistDirective`` directive itself has neither content nor
@@ -153,7 +153,7 @@ directive:
.. literalinclude:: examples/todo.py
:language: python
:linenos:
- :lines: 30-53
+ :pyobject: TodoDirective
Several important things are covered here. First, as you can see, we're now
subclassing the :class:`~sphinx.util.docutils.SphinxDirective` helper class
@@ -168,16 +168,16 @@ new unique integer on each call and therefore leads to unique target names. The
target node is instantiated without any text (the first two arguments).
On creating admonition node, the content body of the directive are parsed using
-``self.state.nested_parse``. The first argument gives the content body, and
-the second one gives content offset. The third argument gives the parent node
-of parsed result, in our case the ``todo`` node. Following this, the ``todo``
-node is added to the environment. This is needed to be able to create a list of
-all todo entries throughout the documentation, in the place where the author
-puts a ``todolist`` directive. For this case, the environment attribute
-``todo_all_todos`` is used (again, the name should be unique, so it is prefixed
-by the extension name). It does not exist when a new environment is created, so
-the directive must check and create it if necessary. Various information about
-the todo entry's location are stored along with a copy of the node.
+``self.parse_content_to_nodes()``.
+Following this, the ``todo`` node is added to the environment.
+This is needed to be able to create a list of all todo entries throughout
+the documentation, in the place where the author puts a ``todolist`` directive.
+For this case, the environment attribute ``todo_all_todos`` is used
+(again, the name should be unique, so it is prefixed by the extension name).
+It does not exist when a new environment is created, so the directive must
+check and create it if necessary.
+Various information about the todo entry's location are stored along with
+a copy of the node.
In the last line, the nodes that should be put into the doctree are returned:
the target node and the admonition node.
@@ -211,7 +211,7 @@ the :event:`env-purge-doc` event:
.. literalinclude:: examples/todo.py
:language: python
:linenos:
- :lines: 56-61
+ :pyobject: purge_todos
Since we store information from source files in the environment, which is
persistent, it may become out of date when the source file changes. Therefore,
@@ -229,7 +229,7 @@ to be merged:
.. literalinclude:: examples/todo.py
:language: python
:linenos:
- :lines: 64-68
+ :pyobject: merge_todos
The other handler belongs to the :event:`doctree-resolved` event:
@@ -237,12 +237,13 @@ The other handler belongs to the :event:`doctree-resolved` event:
.. literalinclude:: examples/todo.py
:language: python
:linenos:
- :lines: 71-113
+ :pyobject: process_todo_nodes
-The :event:`doctree-resolved` event is emitted at the end of :ref:`phase 3
-(resolving) ` and allows custom resolving to be done. The handler
-we have written for this event is a bit more involved. If the
-``todo_include_todos`` config value (which we'll describe shortly) is false,
+The :event:`doctree-resolved` event is emitted for each document that is
+about to be written at the end of :ref:`phase 3 (resolving) `
+and allows custom resolving to be done on that document.
+The handler we have written for this event is a bit more involved.
+If the ``todo_include_todos`` config value (which we'll describe shortly) is false,
all ``todo`` and ``todolist`` nodes are removed from the documents. If not,
``todo`` nodes just stay where and how they are. ``todolist`` nodes are
replaced by a list of todo entries, complete with backlinks to the location
@@ -266,17 +267,17 @@ the other parts of our extension. Let's look at our ``setup`` function:
.. literalinclude:: examples/todo.py
:language: python
:linenos:
- :lines: 116-
+ :pyobject: setup
The calls in this function refer to the classes and functions we added earlier.
What the individual calls do is the following:
* :meth:`~Sphinx.add_config_value` lets Sphinx know that it should recognize the
- new *config value* ``todo_include_todos``, whose default value should be
- ``False`` (this also tells Sphinx that it is a boolean value).
+ new *config value* ``todo_include_todos``, whose default value is ``False``
+ (which also tells Sphinx that it is a boolean value).
- If the third argument was ``'html'``, HTML documents would be full rebuild if the
- config value changed its value. This is needed for config values that
+ If the third argument was ``'html'``, HTML documents would be fully rebuilt
+ if the config value changed its value. This is needed for config values that
influence reading (build :ref:`phase 1 (reading) `).
* :meth:`~Sphinx.add_node` adds a new *node class* to the build system. It also
From 1580f5f7fda2f741c8052fca7239948612bf4463 Mon Sep 17 00:00:00 2001
From: Victor Wheeler
Date: Sun, 8 Jun 2025 13:41:45 -0600
Subject: [PATCH 121/466] Note that the ``:doc:`` role is case-sensitive
(#13587)
---
doc/usage/referencing.rst | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/doc/usage/referencing.rst b/doc/usage/referencing.rst
index 2597c9ce597..571d3c798bc 100644
--- a/doc/usage/referencing.rst
+++ b/doc/usage/referencing.rst
@@ -136,8 +136,8 @@ There is also a way to directly link to documents:
.. rst:role:: doc
- Link to the specified document; the document name can be specified in
- absolute or relative fashion. For example, if the reference
+ Link to the specified document; the document name can be a relative or absolute
+ path and is always case-sensitive, even on Windows. For example, if the reference
``:doc:`parrot``` occurs in the document ``sketches/index``, then the link
refers to ``sketches/parrot``. If the reference is ``:doc:`/people``` or
``:doc:`../people```, the link refers to ``people``.
From e4accf42fcaf2bb4ea4b3055719076d1000e03ba Mon Sep 17 00:00:00 2001
From: Adam Turner <9087854+aa-turner@users.noreply.github.com>
Date: Sun, 8 Jun 2025 23:09:43 +0100
Subject: [PATCH 122/466] Add private reference to the registry for builders
---
sphinx/builders/__init__.py | 7 ++++---
sphinx/builders/changes.py | 2 +-
sphinx/builders/html/__init__.py | 12 ++++++------
sphinx/builders/latex/__init__.py | 2 +-
4 files changed, 12 insertions(+), 11 deletions(-)
diff --git a/sphinx/builders/__init__.py b/sphinx/builders/__init__.py
index 4e116732e7a..b855168f817 100644
--- a/sphinx/builders/__init__.py
+++ b/sphinx/builders/__init__.py
@@ -125,6 +125,7 @@ def __init__(self, app: Sphinx, env: BuildEnvironment) -> None:
self.tags.add(self.name)
self.tags.add(f'format_{self.format}')
self.tags.add(f'builder_{self.name}')
+ self._registry = app.registry
# images that need to be copied over (source -> dest)
self.images: dict[str, str] = {}
@@ -150,7 +151,7 @@ def _translator(self) -> NullTranslations | None:
def get_translator_class(self, *args: Any) -> type[nodes.NodeVisitor]:
"""Return a class of translator."""
- return self.env._registry.get_translator_class(self)
+ return self._registry.get_translator_class(self)
def create_translator(self, *args: Any) -> nodes.NodeVisitor:
"""Return an instance of translator.
@@ -158,7 +159,7 @@ def create_translator(self, *args: Any) -> nodes.NodeVisitor:
This method returns an instance of ``default_translator_class`` by default.
Users can replace the translator class with ``app.set_translator()`` API.
"""
- return self.env._registry.create_translator(self, *args)
+ return self._registry.create_translator(self, *args)
# helper methods
def init(self) -> None:
@@ -643,7 +644,7 @@ def read_doc(self, docname: str, *, _cache: bool = True) -> None:
filename = str(env.doc2path(docname))
filetype = get_filetype(self._app.config.source_suffix, filename)
- publisher = self.env._registry.get_publisher(self._app, filetype)
+ publisher = self._registry.get_publisher(self._app, filetype)
self.env.current_document._parser = publisher.parser
# record_dependencies is mutable even though it is in settings,
# explicitly re-initialise for each document
diff --git a/sphinx/builders/changes.py b/sphinx/builders/changes.py
index 059a7d1b055..99d46fa0486 100644
--- a/sphinx/builders/changes.py
+++ b/sphinx/builders/changes.py
@@ -34,7 +34,7 @@ def init(self) -> None:
confdir=self.confdir,
app=self._app,
config=self.config,
- registry=self.env._registry,
+ registry=self._registry,
)
self.theme = theme_factory.create('default')
self.templates.init(self, self.theme)
diff --git a/sphinx/builders/html/__init__.py b/sphinx/builders/html/__init__.py
index 1195d08beb6..de49f89bbb4 100644
--- a/sphinx/builders/html/__init__.py
+++ b/sphinx/builders/html/__init__.py
@@ -232,7 +232,7 @@ def init_templates(self) -> None:
confdir=self.confdir,
app=self._app,
config=self.config,
- registry=self.env._registry,
+ registry=self._registry,
)
theme_name, theme_options = self.get_theme_config()
self.theme = theme_factory.create(theme_name)
@@ -284,7 +284,7 @@ def init_css_files(self) -> None:
for filename in self._get_style_filenames():
self.add_css_file(filename, priority=200)
- for filename, attrs in self.env._registry.css_files:
+ for filename, attrs in self._registry.css_files:
self.add_css_file(filename, **attrs)
for filename, attrs in self.get_builder_config('css_files', 'html'):
@@ -311,7 +311,7 @@ def init_js_files(self) -> None:
self.add_js_file('doctools.js', priority=200)
self.add_js_file('sphinx_highlight.js', priority=200)
- for filename, attrs in self.env._registry.js_files:
+ for filename, attrs in self._registry.js_files:
self.add_js_file(filename or '', **attrs)
for filename, attrs in self.get_builder_config('js_files', 'html'):
@@ -336,7 +336,7 @@ def math_renderer_name(self) -> str | None:
return name
else:
# not given: choose a math_renderer from registered ones as possible
- renderers = list(self.env._registry.html_inline_math_renderers)
+ renderers = list(self._registry.html_inline_math_renderers)
if len(renderers) == 1:
# only default math_renderer (mathjax) is registered
return renderers[0]
@@ -524,9 +524,9 @@ def prepare_writing(self, docnames: Set[str]) -> None:
))
# add assets registered after ``Builder.init()``.
- for css_filename, attrs in self.env._registry.css_files:
+ for css_filename, attrs in self._registry.css_files:
self.add_css_file(css_filename, **attrs)
- for js_filename, attrs in self.env._registry.js_files:
+ for js_filename, attrs in self._registry.js_files:
self.add_js_file(js_filename or '', **attrs)
# back up _css_files and _js_files to allow adding CSS/JS files to a specific page.
diff --git a/sphinx/builders/latex/__init__.py b/sphinx/builders/latex/__init__.py
index d5e4a779aa1..d22c959b276 100644
--- a/sphinx/builders/latex/__init__.py
+++ b/sphinx/builders/latex/__init__.py
@@ -211,7 +211,7 @@ def init_context(self) -> None:
def update_context(self) -> None:
"""Update template variables for .tex file just before writing."""
# Apply extension settings to context
- registry = self.env._registry
+ registry = self._registry
self.context['packages'] = registry.latex_packages
self.context['packages_after_hyperref'] = registry.latex_packages_after_hyperref
From 4d860475d77f9cd6580df22356c3d94e4328d06d Mon Sep 17 00:00:00 2001
From: Adam Turner <9087854+AA-Turner@users.noreply.github.com>
Date: Mon, 9 Jun 2025 21:39:39 +0100
Subject: [PATCH 123/466] Deprecate ``Parser.set_application()`` (#13637)
---
CHANGES.rst | 4 ++++
sphinx/parsers.py | 5 ++++-
sphinx/registry.py | 3 ++-
sphinx/testing/restructuredtext.py | 3 ++-
tests/test_markup/test_parser.py | 3 ++-
5 files changed, 14 insertions(+), 4 deletions(-)
diff --git a/CHANGES.rst b/CHANGES.rst
index 9bd8abece0e..9b6861dd14a 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -14,6 +14,10 @@ Deprecated
including ``builder.app``, ``env.app``, ``events.app``,
and ``SphinxTransform.`app``.
Patch by Adam Turner.
+* #13637: Deprecate the :py:meth:`!set_application` method
+ of :py:class:`~sphinx.parsers.Parser` objects.
+ Sphinx now directly sets the :py:attr:`!config` and :py:attr:`!env` attributes.
+ Patch by Adam Turner.
Features added
--------------
diff --git a/sphinx/parsers.py b/sphinx/parsers.py
index 70ff3eaae62..698cd12e76d 100644
--- a/sphinx/parsers.py
+++ b/sphinx/parsers.py
@@ -10,6 +10,7 @@
from docutils.statemachine import StringList
from docutils.transforms.universal import SmartQuotes
+from sphinx.deprecation import _deprecation_warning
from sphinx.util.rst import append_epilog, prepend_prolog
if TYPE_CHECKING:
@@ -44,7 +45,9 @@ def set_application(self, app: Sphinx) -> None:
:param sphinx.application.Sphinx app: Sphinx application object
"""
- self._app = app
+ cls_module = self.__class__.__module__
+ cls_name = self.__class__.__qualname__
+ _deprecation_warning(cls_module, f'{cls_name}.set_application', remove=(10, 0))
self.config = app.config
self.env = app.env
diff --git a/sphinx/registry.py b/sphinx/registry.py
index 973aa6dfed4..529036c8f8e 100644
--- a/sphinx/registry.py
+++ b/sphinx/registry.py
@@ -379,7 +379,8 @@ def create_source_parser(self, app: Sphinx, filename: str) -> Parser:
parser_class = self.get_source_parser(filename)
parser = parser_class()
if isinstance(parser, SphinxParser):
- parser.set_application(app)
+ parser.config = app.config
+ parser.env = app.env
return parser
def add_translator(
diff --git a/sphinx/testing/restructuredtext.py b/sphinx/testing/restructuredtext.py
index b04b61a4021..b2ebcf23002 100644
--- a/sphinx/testing/restructuredtext.py
+++ b/sphinx/testing/restructuredtext.py
@@ -22,7 +22,8 @@ def parse(app: Sphinx, text: str, docname: str = 'index') -> nodes.document:
reader = SphinxStandaloneReader()
reader.setup(app)
parser = RSTParser()
- parser.set_application(app)
+ parser.config = app.config
+ parser.env = app.env
with sphinx_domains(env):
return publish_doctree(
text,
diff --git a/tests/test_markup/test_parser.py b/tests/test_markup/test_parser.py
index eb8ccf24f1d..dbaa5e8cb4e 100644
--- a/tests/test_markup/test_parser.py
+++ b/tests/test_markup/test_parser.py
@@ -16,7 +16,8 @@ def test_RSTParser_prolog_epilog(RSTStateMachine, app):
document = new_document('dummy.rst')
document.settings = Mock(tab_width=8, language_code='')
parser = RSTParser()
- parser.set_application(app)
+ parser.config = app.config
+ parser.env = app.env
# normal case
text = 'hello Sphinx world\nSphinx is a document generator'
From b544cfca21ea2cba854d963beb774a6848edfba0 Mon Sep 17 00:00:00 2001
From: Adam Turner <9087854+AA-Turner@users.noreply.github.com>
Date: Mon, 9 Jun 2025 21:56:20 +0100
Subject: [PATCH 124/466] Rename ``SphinxBaseReader.setup()`` to
``_setup_transforms()`` (#13638)
---
sphinx/io.py | 22 ++++------------------
sphinx/testing/restructuredtext.py | 2 +-
2 files changed, 5 insertions(+), 19 deletions(-)
diff --git a/sphinx/io.py b/sphinx/io.py
index 26c8b756fab..f61c9d16378 100644
--- a/sphinx/io.py
+++ b/sphinx/io.py
@@ -25,6 +25,7 @@
from sphinx.application import Sphinx
from sphinx.environment import BuildEnvironment
+ from sphinx.registry import SphinxComponentRegistry
logger = logging.getLogger(__name__)
@@ -38,20 +39,6 @@ class SphinxBaseReader(standalone.Reader): # type: ignore[misc]
transforms: list[type[Transform]] = []
- def __init__(self, *args: Any, **kwargs: Any) -> None:
- from sphinx.application import Sphinx
-
- if len(args) > 0 and isinstance(args[0], Sphinx):
- self._app = args[0]
- self._env = self._app.env
- args = args[1:]
-
- super().__init__(*args, **kwargs)
-
- def setup(self, app: Sphinx) -> None:
- self._app = app # hold application object only for compatibility
- self._env = app.env
-
def get_transforms(self) -> list[type[Transform]]:
transforms = super().get_transforms() + self.transforms
@@ -83,9 +70,8 @@ def new_document(self) -> nodes.document:
class SphinxStandaloneReader(SphinxBaseReader):
"""A basic document reader for Sphinx."""
- def setup(self, app: Sphinx) -> None:
- self.transforms = self.transforms + app.registry.get_transforms()
- super().setup(app)
+ def _setup_transforms(self, *, registry: SphinxComponentRegistry) -> None:
+ self.transforms = self.transforms + registry.get_transforms()
def read(self, source: Input, parser: Parser, settings: Values) -> nodes.document: # type: ignore[type-arg]
self.source = source
@@ -130,7 +116,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None:
def create_publisher(app: Sphinx, filetype: str) -> Publisher:
reader = SphinxStandaloneReader()
- reader.setup(app)
+ reader._setup_transforms(registry=app.registry)
parser = app.registry.create_source_parser(app, filetype)
if parser.__class__.__name__ == 'CommonMarkParser' and parser.settings_spec == ():
diff --git a/sphinx/testing/restructuredtext.py b/sphinx/testing/restructuredtext.py
index b2ebcf23002..4439b128cd5 100644
--- a/sphinx/testing/restructuredtext.py
+++ b/sphinx/testing/restructuredtext.py
@@ -20,7 +20,7 @@ def parse(app: Sphinx, text: str, docname: str = 'index') -> nodes.document:
try:
app.env.current_document.docname = docname
reader = SphinxStandaloneReader()
- reader.setup(app)
+ reader._setup_transforms(registry=app.registry)
parser = RSTParser()
parser.config = app.config
parser.env = app.env
From 8eaa0ab60f044cd15435b8f54d1e079adddaa899 Mon Sep 17 00:00:00 2001
From: Adam Turner <9087854+AA-Turner@users.noreply.github.com>
Date: Mon, 9 Jun 2025 22:27:03 +0100
Subject: [PATCH 125/466] Stop taking ``app`` in ``create_source_parser()``
(#13639)
---
CHANGES.rst | 4 ++++
sphinx/io.py | 2 +-
sphinx/registry.py | 8 +++++---
sphinx/transforms/i18n.py | 4 +---
4 files changed, 11 insertions(+), 7 deletions(-)
diff --git a/CHANGES.rst b/CHANGES.rst
index 9b6861dd14a..28c9daaf09d 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -7,6 +7,10 @@ Dependencies
Incompatible changes
--------------------
+* #13639: :py:meth:`!SphinxComponentRegistry.create_source_parser` no longer
+ has an *app* parameter, instead taking *config* and *env*.
+ Patch by Adam Turner.
+
Deprecated
----------
diff --git a/sphinx/io.py b/sphinx/io.py
index f61c9d16378..8124f3ddf38 100644
--- a/sphinx/io.py
+++ b/sphinx/io.py
@@ -118,7 +118,7 @@ def create_publisher(app: Sphinx, filetype: str) -> Publisher:
reader = SphinxStandaloneReader()
reader._setup_transforms(registry=app.registry)
- parser = app.registry.create_source_parser(app, filetype)
+ parser = app.registry.create_source_parser(filetype, config=app.config, env=app.env)
if parser.__class__.__name__ == 'CommonMarkParser' and parser.settings_spec == ():
# a workaround for recommonmark
# If recommonmark.AutoStrictify is enabled, the parser invokes reST parser
diff --git a/sphinx/registry.py b/sphinx/registry.py
index 529036c8f8e..1f5fef1821a 100644
--- a/sphinx/registry.py
+++ b/sphinx/registry.py
@@ -375,12 +375,14 @@ def get_source_parser(self, filetype: str) -> type[Parser]:
def get_source_parsers(self) -> dict[str, type[Parser]]:
return self.source_parsers
- def create_source_parser(self, app: Sphinx, filename: str) -> Parser:
+ def create_source_parser(
+ self, filename: str, *, config: Config, env: BuildEnvironment
+ ) -> Parser:
parser_class = self.get_source_parser(filename)
parser = parser_class()
if isinstance(parser, SphinxParser):
- parser.config = app.config
- parser.env = app.env
+ parser.config = config
+ parser.env = env
return parser
def add_translator(
diff --git a/sphinx/transforms/i18n.py b/sphinx/transforms/i18n.py
index bfacfcf1a96..27db99c542f 100644
--- a/sphinx/transforms/i18n.py
+++ b/sphinx/transforms/i18n.py
@@ -5,7 +5,6 @@
import contextlib
from re import DOTALL, match
from textwrap import indent
-from types import SimpleNamespace
from typing import TYPE_CHECKING, Any, TypeVar
from docutils import nodes
@@ -109,9 +108,8 @@ def publish_msgstr(
config.rst_prolog = None
reader = _SphinxI18nReader(registry=registry)
- app = SimpleNamespace(config=config, env=env, registry=registry)
filetype = get_filetype(config.source_suffix, source_path)
- parser = registry.create_source_parser(app, filetype) # type: ignore[arg-type]
+ parser = registry.create_source_parser(filetype, config=config, env=env)
doc = reader.read(
source=StringInput(
source=source, source_path=f'{source_path}:{source_line}:'
From 82736e0cee14cf5503a7510b37e03291cba23a13 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Jean-Fran=C3=A7ois=20B=2E?=
<2589111+jfbu@users.noreply.github.com>
Date: Mon, 9 Jun 2025 20:17:42 +0200
Subject: [PATCH 126/466] LaTeX: fix #13635 (cell containing table turns row
colors off)
---
sphinx/texinputs/sphinxlatextables.sty | 91 ++++++++++++++++++++++----
1 file changed, 77 insertions(+), 14 deletions(-)
diff --git a/sphinx/texinputs/sphinxlatextables.sty b/sphinx/texinputs/sphinxlatextables.sty
index 4114955e071..dda2711f2e3 100644
--- a/sphinx/texinputs/sphinxlatextables.sty
+++ b/sphinx/texinputs/sphinxlatextables.sty
@@ -1,7 +1,7 @@
%% TABLES (WITH SUPPORT FOR MERGED CELLS OF GENERAL CONTENTS)
%
% change this info string if making any custom modification
-\ProvidesPackage{sphinxlatextables}[2024/07/01 v7.4.0 tables]%
+\ProvidesPackage{sphinxlatextables}[2025/06/09 v8.3.0 tables]%
% Provides support for this output mark-up from Sphinx latex writer
% and table templates:
@@ -42,6 +42,11 @@
% - \sphinxthistablewithnocolorrowsstyle
% - \sphinxthistablewithvlinesstyle
% - \sphinxthistablewithnovlinesstyle
+% These conditionals added at 8.3.0 for nested tables not to break row colors
+% (#13635). Nested tables are only partially supported by Sphinx LaTeX.
+% The method here is with no changes to neither writer nor templates.
+\newif\ifspx@intable
+\newif\ifspx@thistableisnested
%
% Also provides user command (see docs)
% - \sphixncolorblend
@@ -62,8 +67,8 @@
\RequirePackage{tabulary}
% tabulary has a bug with its re-definition of \multicolumn in its first pass
% which is not \long. But now Sphinx does not use LaTeX's \multicolumn but its
-% own macro. Hence we don't even need to patch tabulary. See
-% sphinxpackagemulticell.sty
+% own macro. Hence we don't even need to patch tabulary.
+%
% X or S (Sphinx) may have meanings if some table package is loaded hence
% \X was chosen to avoid possibility of conflict
\newcolumntype{\X}[2]{p{\dimexpr
@@ -109,7 +114,8 @@
\LTpre\z@skip\LTpost\z@skip % set to zero longtable's own skips
\edef\sphinxbaselineskip{\dimexpr\the\dimexpr\baselineskip\relax\relax}%
\spx@inframedtrue % message to sphinxheavybox
- }%
+ \spx@table@setnestedflags
+}
% Compatibility with caption package
\def\sphinxthelongtablecaptionisattop{%
\spx@ifcaptionpackage{\noalign{\vskip-\belowcaptionskip}}{}%
@@ -124,7 +130,18 @@
% B. Table with tabular or tabulary
\def\sphinxattablestart{\par\vskip\dimexpr\sphinxtablepre\relax
\spx@inframedtrue % message to sphinxheavybox
+ \spx@table@setnestedflags
}%
+% MEMO: this happens inside a savenotes environment and hence flags
+% are reset on exit of it.
+\def\spx@table@setnestedflags{% Issue #13635
+ \ifspx@intable
+ \let\spx@table@resetcolortbl\spx@nestedtable@resetcolortbl
+ \spx@thistableisnestedtrue
+ \else
+ \spx@intabletrue
+ \fi
+ }%
\let\sphinxattableend\sphinxatlongtableend
% This is used by tabular and tabulary templates
\newcommand*\sphinxcapstartof[1]{%
@@ -270,6 +287,9 @@
% cells (the code does inserts & tokens, see TN1b). It was decided to keep it
% simple with \sphinxstartmulticolumn...\sphinxstopmulticolumn.
%
+% **** ATTENTION: Sphinx does generate at least some nested tables in LaTeX
+% **** TODO: clarify if next paragraph means we must raise an
+% **** if LaTeX writer detects a merged cell inside nested table.
% MEMO about nesting: if sphinxmulticolumn is encountered in a nested tabular
% inside a tabulary it will think to be at top level in the tabulary. But
% Sphinx generates no nested tables, and if some LaTeX macro uses internally a
@@ -857,7 +877,32 @@
}%
\the\everycr
}%
- \global\rownum\@ne % is done from inside table so ok with tabulary two passes
+ \ifspx@thistableisnested
+ % Attention that tabulary does two passes so we need to push the
+ % initial rownum and, after the first pass, we must reset it!
+ % Fortunately Sphinx LaTeX writer makes parent table tabular or
+ % longtable if a nested table is a tabulary. So we don't need to
+ % worry about distinguishing if this or parent is tabulary.
+ \ifx\TY@final\@undefined % tabular
+ \spx@gpush@rownum
+ \else
+ \ifx\equation$% tabulary, first pass
+ \spx@gpush@rownum
+ \else % tabulary, second pass
+ \spx@gpop@rownum % reset \rownum
+ \spx@gpush@rownum% and push it again.
+ \fi
+ \fi
+ % To make nested tables stand out in a color row, we toggle the parity.
+ % TODO: Double-check if compatible with method for color of header
+ % row.
+ % TODO: Perhaps better to use specific colors for nested tables?
+ % This would mean though adding new sphinxsetup parameters
+ % and extending the documentation...
+ \ifodd\rownum\global\rownum\z@\else\global\rownum\@ne\fi
+ \else
+ \global\rownum\@ne
+ \fi
\sphinxSwitchCaseRowColor\rownum % set up color for the first body row
\sphinxrowcolorON % has been done from \sphinxtoprule location but let's do
% it again in case \sphinxtabletoprulehook has been used
@@ -883,20 +928,34 @@
\let\sphinxtabledecrementrownum\@empty
% \sphinxtableafterendhook will be modified by colorrows class to execute
-% this after the table
+% this after the table.
\def\spx@table@resetcolortbl{%
\sphinxrowcolorOFF
- \spx@table@reset@CTeverycr
+ \spx@table@reset@CT@everycr
% this last bit is done in order for the \sphinxbottomrule from the "foot"
-% longtable template to be able to use same code as the \sphinxbottomrule
-% at end of table body; see \sphinxbooktabsspecialbottomrule code
+% part of the longtable template to be able to use same code as the
+% \sphinxbottomrule at end of table body; see \sphinxbooktabsspecialbottomrule.
\global\rownum\z@
+ \global\let\spx@rownum@stack\@empty
+}
+% Most of \spx@table@resetcolortbl must be avoided if the table is nested.
+% Besides the sphinxTableRowColor must be reset because it has been
+% redefined by the cells of the nested table. So this is the alternative
+% macro which is executed on exit of nested table.
+\def\spx@nestedtable@resetcolortbl{%
+ \spx@gpop@rownum
+ \sphinxSwitchCaseRowColor\rownum
}
-\def\spx@table@reset@CTeverycr{%
+\def\spx@table@reset@CT@everycr{%
% we should probably be more cautious and not hard-code here the colortbl
-% set-up; so the macro is defined without @ to fac
+% set-up.
\global\CT@everycr{\noalign{\global\let\CT@row@color\relax}\the\everycr}%
}
+\let\spx@rownum@stack\@empty
+\def\spx@gpush@rownum{\xdef\spx@rownum@stack{\the\rownum.\spx@rownum@stack}}%
+\def\spx@gpop@rownum{\afterassignment\spx@gpop@rownum@i
+ \global\rownum=\spx@rownum@stack\relax}
+\def\spx@gpop@rownum@i.#1\relax{\gdef\spx@rownum@stack{#1}}
% At last the style macros \sphinxthistablewithstandardstyle etc...
@@ -1047,10 +1106,13 @@ local use of booktabs table style}%
%
% this one is not set to \@empty by nocolorrows, because it looks harmless
% to execute it always, as it simply resets to standard colortbl state after
-% the table; so we don't need an @@ version for this one
+% the table [^1]; so we don't need an @@ version for this one.
+% .. [1]: which is bad if nested in another table. This is taken care of
+% at level of \sphinxattablestart and \sphinxatlongtablestart.
\spx@prepend\spx@table@resetcolortbl\to\sphinxtableafterendhook
}
\def\spx@prepend#1\to#2{% attention about using this only with #2 "storage macro"
+% MEMO: #1 is prepended with no expansion, i.e. "as is".
\toks@{#1}%
\toks@\expandafter\expandafter\expandafter{\expandafter\the\expandafter\toks@#2}%
\edef#2{\the\toks@}%
@@ -1064,9 +1126,10 @@ local use of booktabs table style}%
\let\spx@table@startbodycolorrows\@empty
\let\sphinxtabledecrementrownum \@empty
% we don't worry about \sphinxtableafterendhook as the \spx@table@resetcolortbl
-% done at end can not do harm; and we could also have not bothered with the
+% done at end can not do harm [^1]; and we could also have not bothered with the
% \sphinxtabledecrementrownum as its \rownum decrement, if active, is harmless
-% in non-colorrows context
+% in non-colorrows context.
+% .. [1]: if nested in another table it is modified to do no harm.
}
% (not so easy) implementation of the booktabscolorgaps option. This option
From dfa7254cf4cfbbcc0a199b3f3665be63101d22a5 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Jean-Fran=C3=A7ois=20B=2E?=
<2589111+jfbu@users.noreply.github.com>
Date: Mon, 9 Jun 2025 23:43:48 +0200
Subject: [PATCH 127/466] Update CHANGES for fix of issues #13597 and #13635
---
CHANGES.rst | 9 ++++++---
1 file changed, 6 insertions(+), 3 deletions(-)
diff --git a/CHANGES.rst b/CHANGES.rst
index 28c9daaf09d..fefcd768f0f 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -37,9 +37,6 @@ Features added
Patch by Jean-François B.
* #13535: html search: Update to the latest version of Snowball (v3.0.1).
Patch by Adam Turner.
-* #13597: LaTeX: table nested in a merged cell leads to invalid LaTeX mark-up
- and PDF cannot be built.
- Patch by Jean-François B.
* #13704: autodoc: Detect :py:func:`typing_extensions.overload `
and :py:func:`~typing.final` decorators.
Patch by Spencer Brown.
@@ -53,9 +50,15 @@ Bugs fixed
Patch by Alicia Garcia-Raboso.
* #13528: Add tilde ``~`` prefix support for :rst:role:`py:deco`.
Patch by Shengyu Zhang and Adam Turner.
+* #13597: LaTeX: table nested in a merged cell leads to invalid LaTeX mark-up
+ and PDF cannot be built.
+ Patch by Jean-François B.
* #13619: LaTeX: possible duplicated footnotes in PDF from object signatures
(typically if :confval:`latex_show_urls` ``= 'footnote'``).
Patch by Jean-François B.
+* #13635: LaTeX: if a cell contains a table, row coloring is turned off for
+ the next table cells.
+ Patch by Jean-François B.
Testing
-------
From 9f5e80375e62723456e868362b5a218b0212a666 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Jean-Fran=C3=A7ois=20B=2E?=
<2589111+jfbu@users.noreply.github.com>
Date: Tue, 10 Jun 2025 00:46:48 +0200
Subject: [PATCH 128/466] LaTeX: protect the fix of #13635 against a particular
configuration (#13641)
---
sphinx/texinputs/sphinxlatextables.sty | 7 ++++++-
1 file changed, 6 insertions(+), 1 deletion(-)
diff --git a/sphinx/texinputs/sphinxlatextables.sty b/sphinx/texinputs/sphinxlatextables.sty
index dda2711f2e3..08efac559c0 100644
--- a/sphinx/texinputs/sphinxlatextables.sty
+++ b/sphinx/texinputs/sphinxlatextables.sty
@@ -943,7 +943,12 @@
% redefined by the cells of the nested table. So this is the alternative
% macro which is executed on exit of nested table.
\def\spx@nestedtable@resetcolortbl{%
- \spx@gpop@rownum
+ \ifx\spx@rownum@stack\@empty\else
+% The stack can be empty if this is executed on exit of a nested table,
+% and the parent table has received the "nocolorrows" class, but globally
+% colorrows are activated (default). So we protected against that case.
+ \spx@gpop@rownum
+ \fi
\sphinxSwitchCaseRowColor\rownum
}
\def\spx@table@reset@CT@everycr{%
From 97f946a598dfde8e9f84a15396e9805fef779f9a Mon Sep 17 00:00:00 2001
From: Adam Turner <9087854+AA-Turner@users.noreply.github.com>
Date: Tue, 10 Jun 2025 00:56:29 +0100
Subject: [PATCH 129/466] Refactor and simplify
``sphinx.io._create_publisher()`` (#13642)
---
sphinx/io.py | 26 +++++--------------
sphinx/registry.py | 6 +++--
sphinx/testing/restructuredtext.py | 2 +-
.../test_directive_object_description.py | 9 +++++--
4 files changed, 19 insertions(+), 24 deletions(-)
diff --git a/sphinx/io.py b/sphinx/io.py
index 8124f3ddf38..e2d299f8ae2 100644
--- a/sphinx/io.py
+++ b/sphinx/io.py
@@ -23,9 +23,7 @@
from docutils.parsers import Parser
from docutils.transforms import Transform
- from sphinx.application import Sphinx
from sphinx.environment import BuildEnvironment
- from sphinx.registry import SphinxComponentRegistry
logger = logging.getLogger(__name__)
@@ -70,8 +68,8 @@ def new_document(self) -> nodes.document:
class SphinxStandaloneReader(SphinxBaseReader):
"""A basic document reader for Sphinx."""
- def _setup_transforms(self, *, registry: SphinxComponentRegistry) -> None:
- self.transforms = self.transforms + registry.get_transforms()
+ def _setup_transforms(self, transforms: list[type[Transform]], /) -> None:
+ self.transforms = self.transforms + transforms
def read(self, source: Input, parser: Parser, settings: Values) -> nodes.document: # type: ignore[type-arg]
self.source = source
@@ -114,21 +112,11 @@ def __init__(self, *args: Any, **kwargs: Any) -> None:
super().__init__(*args, **kwargs)
-def create_publisher(app: Sphinx, filetype: str) -> Publisher:
+def _create_publisher(
+ *, env: BuildEnvironment, parser: Parser, transforms: list[type[Transform]]
+) -> Publisher:
reader = SphinxStandaloneReader()
- reader._setup_transforms(registry=app.registry)
-
- parser = app.registry.create_source_parser(filetype, config=app.config, env=app.env)
- if parser.__class__.__name__ == 'CommonMarkParser' and parser.settings_spec == ():
- # a workaround for recommonmark
- # If recommonmark.AutoStrictify is enabled, the parser invokes reST parser
- # internally. But recommonmark-0.4.0 does not provide settings_spec for reST
- # parser. As a workaround, this copies settings_spec for RSTParser to the
- # CommonMarkParser.
- from docutils.parsers.rst import Parser as RSTParser
-
- parser.settings_spec = RSTParser.settings_spec # type: ignore[misc]
-
+ reader._setup_transforms(transforms)
pub = Publisher(
reader=reader,
parser=parser,
@@ -137,7 +125,7 @@ def create_publisher(app: Sphinx, filetype: str) -> Publisher:
destination=NullOutput(),
)
# Propagate exceptions by default when used programmatically:
- defaults = {'traceback': True, **app.env.settings}
+ defaults = {'traceback': True, **env.settings}
# Set default settings
pub.get_settings(**defaults)
return pub
diff --git a/sphinx/registry.py b/sphinx/registry.py
index 1f5fef1821a..f8247296eb5 100644
--- a/sphinx/registry.py
+++ b/sphinx/registry.py
@@ -12,7 +12,7 @@
from sphinx.domains.std import GenericObject, Target
from sphinx.errors import ExtensionError, SphinxError, VersionRequirementError
from sphinx.extension import Extension
-from sphinx.io import create_publisher
+from sphinx.io import _create_publisher
from sphinx.locale import __
from sphinx.parsers import Parser as SphinxParser
from sphinx.roles import XRefRole
@@ -601,7 +601,9 @@ def get_publisher(self, app: Sphinx, filetype: str) -> Publisher:
return self.publishers[filetype]
except KeyError:
pass
- publisher = create_publisher(app, filetype)
+ parser = self.create_source_parser(filetype, config=app.config, env=app.env)
+ transforms = self.get_transforms()
+ publisher = _create_publisher(env=app.env, parser=parser, transforms=transforms)
self.publishers[filetype] = publisher
return publisher
diff --git a/sphinx/testing/restructuredtext.py b/sphinx/testing/restructuredtext.py
index 4439b128cd5..b17fd387946 100644
--- a/sphinx/testing/restructuredtext.py
+++ b/sphinx/testing/restructuredtext.py
@@ -20,7 +20,7 @@ def parse(app: Sphinx, text: str, docname: str = 'index') -> nodes.document:
try:
app.env.current_document.docname = docname
reader = SphinxStandaloneReader()
- reader._setup_transforms(registry=app.registry)
+ reader._setup_transforms(app.registry.get_transforms())
parser = RSTParser()
parser.config = app.config
parser.env = app.env
diff --git a/tests/test_directives/test_directive_object_description.py b/tests/test_directives/test_directive_object_description.py
index 210b9aac381..6b85c34d326 100644
--- a/tests/test_directives/test_directive_object_description.py
+++ b/tests/test_directives/test_directive_object_description.py
@@ -9,7 +9,7 @@
from docutils import nodes
from sphinx import addnodes
-from sphinx.io import create_publisher
+from sphinx.io import _create_publisher
from sphinx.testing import restructuredtext
from sphinx.util.docutils import sphinx_domains
@@ -22,8 +22,13 @@
def _doctree_for_test(
app: Sphinx, env: BuildEnvironment, docname: str
) -> nodes.document:
+ config = app.config
+ registry = app.registry
env.prepare_settings(docname)
- publisher = create_publisher(app, 'restructuredtext')
+ parser = registry.create_source_parser('restructuredtext', config=config, env=env)
+ publisher = _create_publisher(
+ env=env, parser=parser, transforms=registry.get_transforms()
+ )
with sphinx_domains(env):
publisher.set_source(source_path=str(env.doc2path(docname)))
publisher.publish()
From 58ebe2d3e9c3f335db23e945f0a7dd6fcc0d6877 Mon Sep 17 00:00:00 2001
From: Adam Turner <9087854+AA-Turner@users.noreply.github.com>
Date: Tue, 10 Jun 2025 01:22:22 +0100
Subject: [PATCH 130/466] Avoid passing ``app`` to
``SphinxComponentRegistry._get_publisher()`` (#13643)
---
sphinx/builders/__init__.py | 4 +++-
sphinx/registry.py | 8 +++++---
2 files changed, 8 insertions(+), 4 deletions(-)
diff --git a/sphinx/builders/__init__.py b/sphinx/builders/__init__.py
index b855168f817..18c2596b383 100644
--- a/sphinx/builders/__init__.py
+++ b/sphinx/builders/__init__.py
@@ -644,7 +644,9 @@ def read_doc(self, docname: str, *, _cache: bool = True) -> None:
filename = str(env.doc2path(docname))
filetype = get_filetype(self._app.config.source_suffix, filename)
- publisher = self._registry.get_publisher(self._app, filetype)
+ publisher = self._registry._get_publisher(
+ filetype, config=self.config, env=self.env
+ )
self.env.current_document._parser = publisher.parser
# record_dependencies is mutable even though it is in settings,
# explicitly re-initialise for each document
diff --git a/sphinx/registry.py b/sphinx/registry.py
index f8247296eb5..0d4151ca67b 100644
--- a/sphinx/registry.py
+++ b/sphinx/registry.py
@@ -596,14 +596,16 @@ def get_envversion(self, app: Sphinx) -> Mapping[str, int]:
return _get_env_version(app.extensions)
- def get_publisher(self, app: Sphinx, filetype: str) -> Publisher:
+ def _get_publisher(
+ self, filetype: str, *, config: Config, env: BuildEnvironment
+ ) -> Publisher:
try:
return self.publishers[filetype]
except KeyError:
pass
- parser = self.create_source_parser(filetype, config=app.config, env=app.env)
+ parser = self.create_source_parser(filetype, config=config, env=env)
transforms = self.get_transforms()
- publisher = _create_publisher(env=app.env, parser=parser, transforms=transforms)
+ publisher = _create_publisher(env=env, parser=parser, transforms=transforms)
self.publishers[filetype] = publisher
return publisher
From bb5e5454bcef531af75526b3b669e99bd3af411a Mon Sep 17 00:00:00 2001
From: Adam Turner <9087854+AA-Turner@users.noreply.github.com>
Date: Tue, 10 Jun 2025 01:50:48 +0100
Subject: [PATCH 131/466] Deprecate ``Parser.{config,env}`` (#13644)
---
CHANGES.rst | 3 ++-
sphinx/parsers.py | 38 +++++++++++++++++++-----------
sphinx/registry.py | 4 ++--
sphinx/testing/restructuredtext.py | 4 ++--
tests/test_markup/test_parser.py | 4 ++--
5 files changed, 32 insertions(+), 21 deletions(-)
diff --git a/CHANGES.rst b/CHANGES.rst
index fefcd768f0f..7098714e670 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -20,7 +20,8 @@ Deprecated
Patch by Adam Turner.
* #13637: Deprecate the :py:meth:`!set_application` method
of :py:class:`~sphinx.parsers.Parser` objects.
- Sphinx now directly sets the :py:attr:`!config` and :py:attr:`!env` attributes.
+ Patch by Adam Turner.
+* #13644: Deprecate the :py:attr:`!Parser.config` and :py:attr:`!env` attributes.
Patch by Adam Turner.
Features added
diff --git a/sphinx/parsers.py b/sphinx/parsers.py
index 698cd12e76d..eb5e77d9387 100644
--- a/sphinx/parsers.py
+++ b/sphinx/parsers.py
@@ -24,21 +24,31 @@
class Parser(docutils.parsers.Parser):
- """A base class of source parsers.
+ """A base class for source parsers.
- The additional parsers should inherit this class
- instead of ``docutils.parsers.Parser``.
- Compared with ``docutils.parsers.Parser``,
- this class improves accessibility to Sphinx APIs.
-
- The subclasses can access sphinx core runtime objects (app, config and env).
+ Additional parsers should inherit from this class instead of
+ ``docutils.parsers.Parser``.
+ This class provides access to core Sphinx objects; *config* and *env*.
"""
- #: The config object
- config: Config
+ _config: Config
+ _env: BuildEnvironment
+
+ @property
+ def config(self) -> Config:
+ """The config object."""
+ cls_module = self.__class__.__module__
+ cls_name = self.__class__.__qualname__
+ _deprecation_warning(cls_module, f'{cls_name}.config', remove=(9, 0))
+ return self._config
- #: The environment object
- env: BuildEnvironment
+ @property
+ def env(self) -> BuildEnvironment:
+ """The environment object."""
+ cls_module = self.__class__.__module__
+ cls_name = self.__class__.__qualname__
+ _deprecation_warning(cls_module, f'{cls_name}.env', remove=(9, 0))
+ return self._env
def set_application(self, app: Sphinx) -> None:
"""set_application will be called from Sphinx to set app and other instance variables
@@ -47,9 +57,9 @@ def set_application(self, app: Sphinx) -> None:
"""
cls_module = self.__class__.__module__
cls_name = self.__class__.__qualname__
- _deprecation_warning(cls_module, f'{cls_name}.set_application', remove=(10, 0))
- self.config = app.config
- self.env = app.env
+ _deprecation_warning(cls_module, f'{cls_name}.set_application', remove=(9, 0))
+ self._config = app.config
+ self._env = app.env
class RSTParser(docutils.parsers.rst.Parser, Parser):
diff --git a/sphinx/registry.py b/sphinx/registry.py
index 0d4151ca67b..6f7d7c477fe 100644
--- a/sphinx/registry.py
+++ b/sphinx/registry.py
@@ -381,8 +381,8 @@ def create_source_parser(
parser_class = self.get_source_parser(filename)
parser = parser_class()
if isinstance(parser, SphinxParser):
- parser.config = config
- parser.env = env
+ parser._config = config
+ parser._env = env
return parser
def add_translator(
diff --git a/sphinx/testing/restructuredtext.py b/sphinx/testing/restructuredtext.py
index b17fd387946..68c78199606 100644
--- a/sphinx/testing/restructuredtext.py
+++ b/sphinx/testing/restructuredtext.py
@@ -22,8 +22,8 @@ def parse(app: Sphinx, text: str, docname: str = 'index') -> nodes.document:
reader = SphinxStandaloneReader()
reader._setup_transforms(app.registry.get_transforms())
parser = RSTParser()
- parser.config = app.config
- parser.env = app.env
+ parser._config = app.config
+ parser._env = app.env
with sphinx_domains(env):
return publish_doctree(
text,
diff --git a/tests/test_markup/test_parser.py b/tests/test_markup/test_parser.py
index dbaa5e8cb4e..6a71fed9e49 100644
--- a/tests/test_markup/test_parser.py
+++ b/tests/test_markup/test_parser.py
@@ -16,8 +16,8 @@ def test_RSTParser_prolog_epilog(RSTStateMachine, app):
document = new_document('dummy.rst')
document.settings = Mock(tab_width=8, language_code='')
parser = RSTParser()
- parser.config = app.config
- parser.env = app.env
+ parser._config = app.config
+ parser._env = app.env
# normal case
text = 'hello Sphinx world\nSphinx is a document generator'
From a5366394ae527712c4edfeb07a5fbeecd4ca72e1 Mon Sep 17 00:00:00 2001
From: Adam Turner <9087854+AA-Turner@users.noreply.github.com>
Date: Tue, 10 Jun 2025 02:23:51 +0100
Subject: [PATCH 132/466] Move build phase to the builder (#13645)
---
sphinx/application.py | 9 +++++++--
sphinx/builders/__init__.py | 23 ++++++++++-------------
2 files changed, 17 insertions(+), 15 deletions(-)
diff --git a/sphinx/application.py b/sphinx/application.py
index 8117eecf340..8e53770545f 100644
--- a/sphinx/application.py
+++ b/sphinx/application.py
@@ -196,7 +196,6 @@ def __init__(
:param pdb: If true, enable the Python debugger on an exception.
:param exception_on_warning: If true, raise an exception on warnings.
"""
- self.phase = BuildPhase.INITIALIZATION
self.verbosity = verbosity
self._fresh_env_used: bool | None = None
self.extensions: dict[str, Extension] = {}
@@ -340,6 +339,12 @@ def fresh_env_used(self) -> bool | None:
"""
return self._fresh_env_used
+ @property
+ def phase(self) -> BuildPhase:
+ if not hasattr(self, 'builder'):
+ return BuildPhase.INITIALIZATION
+ return self.builder.phase
+
def _init_i18n(self) -> None:
"""Load translated strings from the configured localedirs if enabled in
the configuration.
@@ -420,7 +425,7 @@ def _init_builder(self) -> None:
# ---- main "build" method -------------------------------------------------
def build(self, force_all: bool = False, filenames: Sequence[Path] = ()) -> None:
- self.phase = BuildPhase.READING
+ self.builder.phase = BuildPhase.READING
try:
if force_all:
self.builder.build_all()
diff --git a/sphinx/builders/__init__.py b/sphinx/builders/__init__.py
index 18c2596b383..73426c6dc81 100644
--- a/sphinx/builders/__init__.py
+++ b/sphinx/builders/__init__.py
@@ -103,6 +103,8 @@ class Builder:
#: The file format produced by the builder allows images to be embedded using data-URIs.
supported_data_uri_images: ClassVar[bool] = False
+ phase: BuildPhase = BuildPhase.INITIALIZATION
+
srcdir = _StrPathProperty()
confdir = _StrPathProperty()
outdir = _StrPathProperty()
@@ -431,14 +433,14 @@ def build(
pickle.dump(self.env, f, pickle.HIGHEST_PROTOCOL)
# global actions
- self._app.phase = BuildPhase.CONSISTENCY_CHECK
+ self.phase = BuildPhase.CONSISTENCY_CHECK
with progress_message(__('checking consistency')):
self.env.check_consistency()
else:
if method == 'update' and not docnames:
logger.info(bold(__('no targets are out of date.')))
- self._app.phase = BuildPhase.RESOLVING
+ self.phase = BuildPhase.RESOLVING
# filter "docnames" (list of outdated files) by the updated
# found_docs of the environment; this will remove docs that
@@ -776,21 +778,17 @@ def _write_serial(self, docnames: Sequence[str]) -> None:
len(docnames),
self._app.verbosity,
):
- _write_docname(
- docname, app=self._app, env=self.env, builder=self, tags=self.tags
- )
+ _write_docname(docname, env=self.env, builder=self, tags=self.tags)
def _write_parallel(self, docnames: Sequence[str], nproc: int) -> None:
def write_process(docs: list[tuple[str, nodes.document]]) -> None:
- self._app.phase = BuildPhase.WRITING
+ self.phase = BuildPhase.WRITING
for docname, doctree in docs:
self.write_doc(docname, doctree)
# warm up caches/compile templates using the first document
firstname, docnames = docnames[0], docnames[1:]
- _write_docname(
- firstname, app=self._app, env=self.env, builder=self, tags=self.tags
- )
+ _write_docname(firstname, env=self.env, builder=self, tags=self.tags)
tasks = ParallelTasks(nproc)
chunks = make_chunks(docnames, nproc)
@@ -808,7 +806,7 @@ def write_process(docs: list[tuple[str, nodes.document]]) -> None:
def on_chunk_done(args: list[tuple[str, nodes.document]], result: None) -> None:
next(progress)
- self._app.phase = BuildPhase.RESOLVING
+ self.phase = BuildPhase.RESOLVING
for chunk in chunks:
arg = []
for docname in chunk:
@@ -884,15 +882,14 @@ def _write_docname(
docname: str,
/,
*,
- app: Sphinx,
env: BuildEnvironment,
builder: Builder,
tags: Tags,
) -> None:
"""Write a single document."""
- app.phase = BuildPhase.RESOLVING
+ builder.phase = BuildPhase.RESOLVING
doctree = env.get_and_resolve_doctree(docname, builder=builder, tags=tags)
- app.phase = BuildPhase.WRITING
+ builder.phase = BuildPhase.WRITING
builder.write_doc_serialized(docname, doctree)
builder.write_doc(docname, doctree)
From f1316bb1698d5f217cb273c84272c76cc7528979 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Jean-Fran=C3=A7ois=20B=2E?=
<2589111+jfbu@users.noreply.github.com>
Date: Tue, 10 Jun 2025 13:36:27 +0200
Subject: [PATCH 133/466] LaTeX: allow more cases of table nesting, fix #13646
Tables using longtable can now contain nested tables inclusive of those
rendered by tabulary, up to the suppression of the latter horizontal
lines due to an upstream LaTeX bug. A longtable can never itself be
nested, and will fall-back to tabular.
Formerly longtable would raise (in principle) an error if it contained
any sort of nested table, but the detection of being a longtable was
faulty if not specified as class option.
Relates #6838.
---
doc/usage/restructuredtext/directives.rst | 61 ++++++++++--------
sphinx/templates/latex/tabulary.tex.jinja | 3 +
sphinx/texinputs/sphinxlatextables.sty | 23 ++++++-
sphinx/writers/latex.py | 64 ++++++++++++++-----
.../expects/tabularcolumn.tex | 4 +-
tests/roots/test-root/markup.txt | 8 +++
6 files changed, 116 insertions(+), 47 deletions(-)
diff --git a/doc/usage/restructuredtext/directives.rst b/doc/usage/restructuredtext/directives.rst
index 33269b522a6..94526304cdf 100644
--- a/doc/usage/restructuredtext/directives.rst
+++ b/doc/usage/restructuredtext/directives.rst
@@ -1472,6 +1472,15 @@ Check the :confval:`latex_table_style`.
complex contents such as multiple paragraphs, blockquotes, lists, literal
blocks, will render correctly to LaTeX output.
+.. versionchanged:: 8.3.0
+
+ The partial support for nesting a table in another has been extended.
+ Formerly Sphinx would raise an error if ``longtable`` class was specified
+ for a table containing a nested table, and some cases would not raise an
+ error at Sphinx level but fail at LaTeX level during PDF build. This is a
+ complex topic in LaTeX rendering and the output can sometimes be improved
+ via the :rst:dir:`tabularcolumns` directive.
+
.. rst:directive:: .. tabularcolumns:: column spec
This directive influences only the LaTeX output for the next table in
@@ -1489,40 +1498,38 @@ Check the :confval:`latex_table_style`.
:rst:dir:`tabularcolumns` conflicts with ``:widths:`` option of table
directives. If both are specified, ``:widths:`` option will be ignored.
- Sphinx will render tables with more than 30 rows with ``longtable``.
- Besides the ``l``, ``r``, ``c`` and ``p{width}`` column specifiers, one can
- also use ``\X{a}{b}`` (new in version 1.5) which configures the column
- width to be a fraction ``a/b`` of the total line width and ``\Y{f}`` (new
- in version 1.6) where ``f`` is a decimal: for example ``\Y{0.2}`` means that
- the column will occupy ``0.2`` times the line width.
+ Sphinx renders tables with at most 30 rows using ``tabulary``, and those
+ with more rows with ``longtable``.
- When this directive is used for a table with at most 30 rows, Sphinx will
- render it with ``tabulary``. One can then use specific column types ``L``
- (left), ``R`` (right), ``C`` (centered) and ``J`` (justified). They have
- the effect of a ``p{width}`` (i.e. each cell is a LaTeX ``\parbox``) with
- the specified internal text alignment and an automatically computed
- ``width``.
-
- .. warning::
+ ``tabulary`` tries to compute automatically (internally to LaTeX) suitable
+ column widths. However, cells are then not allowed to contain
+ "problematic" elements such as lists, object descriptions,
+ blockquotes... Sphinx will fall back to using ``tabular`` if such a cell is
+ encountered (or a nested ``tabulary``). In such a case the table will have
+ a tendency to try to fill the whole available line width.
- - Cells that contain list-like elements such as object descriptions,
- blockquotes or any kind of lists are not compatible with the ``LRCJ``
- column types. The column type must then be some ``p{width}`` with an
- explicit ``width`` (or ``\X{a}{b}`` or ``\Y{f}``).
+ :rst:dir:`tabularcolumns` can help in coercing the usage of ``tabulary`` if
+ one is careful to not employ the ``tabulary`` column types (``L``, ``R``,
+ ``C`` or ``J``) for those columns with at least one "problematic" cell, but
+ only LaTeX's ``p{}`` or Sphinx ``\X`` and ``\Y`` (described next).
- - Literal blocks do not work with ``tabulary`` at all. Sphinx will
- fall back to ``tabular`` or ``longtable`` environments and generate a
- suitable column specification.
+ Literal blocks do not work at all with ``tabulary``. Sphinx will fall back
+ to ``tabular`` or ``longtable`` environments depending on the number of
+ rows. It will employ the :rst:dir:`tabularcolumns` specification only if it
+ contains no usage of the ``tabulary`` specific types.
-In absence of the :rst:dir:`tabularcolumns` directive, and for a table with at
-most 30 rows and no problematic cells as described in the above warning,
-Sphinx uses ``tabulary`` and the ``J`` column-type for every column.
+ Besides the LaTeX ``l``, ``r``, ``c`` and ``p{width}`` column specifiers,
+ one can also use ``\X{a}{b}`` which configures the column width to be a
+ fraction ``a/b`` of the total line width and ``\Y{f}`` where ``f`` is a
+ decimal: for example ``\Y{0.2}`` means that the column will occupy ``0.2``
+ times the line width.
.. versionchanged:: 1.6
- Formerly, the ``L`` column-type was used (text is flushed-left). To revert
- to this, include ``\newcolumntype{T}{L}`` in the LaTeX preamble, as in fact
- Sphinx uses ``T`` and sets it by default to be an alias of ``J``.
+ Use ``J`` (justified) by default with ``tabulary``, not ``L``
+ (flushed-left). To revert, include ``\newcolumntype{T}{L}`` in the LaTeX
+ preamble, as in fact Sphinx uses ``T`` and sets it by default to be an
+ alias of ``J``.
.. hint::
diff --git a/sphinx/templates/latex/tabulary.tex.jinja b/sphinx/templates/latex/tabulary.tex.jinja
index 6ebcec6d264..7ba065ed1a7 100644
--- a/sphinx/templates/latex/tabulary.tex.jinja
+++ b/sphinx/templates/latex/tabulary.tex.jinja
@@ -21,6 +21,9 @@
<% if 'nocolorrows' in table.styles -%>
\sphinxthistablewithnocolorrowsstyle
<% endif -%>
+<% if table.is_nested -%>
+\sphinxthistabularywithnohlinesifinlongtable
+<% endif -%>
<% if table.align -%>
<%- if table.align in ('center', 'default') -%>
\centering
diff --git a/sphinx/texinputs/sphinxlatextables.sty b/sphinx/texinputs/sphinxlatextables.sty
index 08efac559c0..96ce2d0c80f 100644
--- a/sphinx/texinputs/sphinxlatextables.sty
+++ b/sphinx/texinputs/sphinxlatextables.sty
@@ -47,6 +47,8 @@
% The method here is with no changes to neither writer nor templates.
\newif\ifspx@intable
\newif\ifspx@thistableisnested
+% Try to allow nested tables in a longtable. But tabulary causes problems.
+\newif\ifspx@longtable
%
% Also provides user command (see docs)
% - \sphixncolorblend
@@ -115,6 +117,7 @@
\edef\sphinxbaselineskip{\dimexpr\the\dimexpr\baselineskip\relax\relax}%
\spx@inframedtrue % message to sphinxheavybox
\spx@table@setnestedflags
+ \spx@longtabletrue
}
% Compatibility with caption package
\def\sphinxthelongtablecaptionisattop{%
@@ -128,7 +131,10 @@
\def\sphinxatlongtableend{\@nobreakfalse % latex3/latex2e#173
\prevdepth\z@\vskip\sphinxtablepost\relax}%
% B. Table with tabular or tabulary
-\def\sphinxattablestart{\par\vskip\dimexpr\sphinxtablepre\relax
+\def\sphinxattablestart{\par
+ \ifvmode % guard agains being nested in a table cell
+ \vskip\dimexpr\sphinxtablepre\relax
+ \fi
\spx@inframedtrue % message to sphinxheavybox
\spx@table@setnestedflags
}%
@@ -142,7 +148,12 @@
\spx@intabletrue
\fi
}%
-\let\sphinxattableend\sphinxatlongtableend
+\def\sphinxattableend{%
+ \@nobreakfalse % <- probably unneeded as this is not a longtable
+ \ifvmode % guard against being nested in a table cell
+ \prevdepth\z@\vskip\sphinxtablepost\relax
+ \fi
+}%
% This is used by tabular and tabulary templates
\newcommand*\sphinxcapstartof[1]{%
\vskip\parskip
@@ -1083,6 +1094,10 @@ local use of booktabs table style}%
% borderless style
\def\sphinxthistablewithborderlessstyle{%
+ \sphinxthistablewithnohlines
+ \def\spx@arrayrulewidth{\z@}%
+}%
+\def\sphinxthistablewithnohlines{%
\let\sphinxhline \@empty
\let\sphinxcline \@gobble
\let\sphinxvlinecrossing\@gobble
@@ -1090,7 +1105,9 @@ local use of booktabs table style}%
\let\spx@toprule \@empty
\let\sphinxmidrule \@empty
\let\sphinxbottomrule \@empty
- \def\spx@arrayrulewidth{\z@}%
+}%
+\def\sphinxthistabularywithnohlinesifinlongtable{%
+ \ifspx@longtable\sphinxthistablewithnohlines\fi
}%
% colorrows style
diff --git a/sphinx/writers/latex.py b/sphinx/writers/latex.py
index 0aa550a3b7e..53fa2564f37 100644
--- a/sphinx/writers/latex.py
+++ b/sphinx/writers/latex.py
@@ -134,6 +134,8 @@ def __init__(self, node: Element) -> None:
self.has_problematic = False
self.has_oldproblematic = False
self.has_verbatim = False
+ # cf https://github.com/sphinx-doc/sphinx/issues/13646#issuecomment-2958309632
+ self.is_nested = False
self.entry_needs_linetrimming = 0
self.caption: list[str] = []
self.stubs: list[int] = []
@@ -147,29 +149,47 @@ def __init__(self, node: Element) -> None:
self.cell_id = 0 # last assigned cell_id
def is_longtable(self) -> bool:
- """True if and only if table uses longtable environment."""
+ """True if and only if table uses longtable environment.
+
+ In absence of longtable class can only be used trustfully on departing
+ the table, as the number of rows is not known until then.
+ """
return self.row > 30 or 'longtable' in self.classes
def get_table_type(self) -> str:
"""Returns the LaTeX environment name for the table.
+ It is used at time of ``depart_table()`` and again via ``get_colspec()``.
The class currently supports:
* longtable
* tabular
* tabulary
"""
- if self.is_longtable():
+ if self.is_longtable() and not self.is_nested:
return 'longtable'
elif self.has_verbatim:
return 'tabular'
elif self.colspec:
- return 'tabulary'
+ if any(c in 'LRCJT' for c in self.colspec):
+ # tabulary would complain "no suitable columns" if none of its
+ # column type were used so we ensure at least one matches.
+ # It is responsability of user to make sure not to use tabulary
+ # column types for a column containing a problematic cell.
+ return 'tabulary'
+ else:
+ return 'tabular'
elif self.has_problematic or (
self.colwidths and 'colwidths-given' in self.classes
):
return 'tabular'
else:
+ # A nested tabulary in a longtable can not use any \hline's,
+ # i.e. it can not use "booktabs" or "standard" styles (due to a
+ # LaTeX upstream bug we do not try to solve). But we can't know
+ # here if it ends up in a tabular or longtable. So it is via
+ # LaTeX macros inserted by the tabulary template that the problem
+ # will be solved.
return 'tabulary'
def get_colspec(self) -> str:
@@ -179,6 +199,7 @@ def get_colspec(self) -> str:
.. note::
+ This is used by the template renderer at time of depart_table().
The ``\\X`` and ``T`` column type specifiers are defined in
``sphinxlatextables.sty``.
"""
@@ -1146,23 +1167,17 @@ def visit_tabular_col_spec(self, node: Element) -> None:
raise nodes.SkipNode
def visit_table(self, node: Element) -> None:
- if len(self.tables) == 1:
- assert self.table is not None
- if self.table.get_table_type() == 'longtable':
- raise UnsupportedError(
- '%s:%s: longtable does not support nesting a table.'
- % (self.curfilestack[-1], node.line or '')
- )
- # change type of parent table to tabular
- # see https://groups.google.com/d/msg/sphinx-users/7m3NeOBixeo/9LKP2B4WBQAJ
- self.table.has_problematic = True
- elif len(self.tables) > 2:
+ table = Table(node)
+ assert table is not None
+ if len(self.tables) >= 1:
+ table.is_nested = True
+ # TODO: do we want > 2, > 1, or actually nothing here?
+ if len(self.tables) > 2:
raise UnsupportedError(
'%s:%s: deeply nested tables are not implemented.'
% (self.curfilestack[-1], node.line or '')
)
- table = Table(node)
self.tables.append(table)
if table.colsep is None:
table.colsep = '|' * (
@@ -1191,6 +1206,25 @@ def depart_table(self, node: Element) -> None:
assert self.table is not None
labels = self.hypertarget_to(node)
table_type = self.table.get_table_type()
+ if table_type == 'tabulary':
+ if len(self.tables) > 1:
+ # tell parents to not be tabulary
+ for _ in self.tables[:-1]:
+ _.has_problematic = True
+ else:
+ if self.table.colspec:
+ if any(c in self.table.colspec for c in 'LRJCT'):
+ logger.warning(
+ __(
+ 'colspec %s was given which uses '
+ 'tabulary syntax. But this table can not be '
+ 'rendered as a tabulary; colspec will be ignored.'
+ ),
+ self.table.colspec[:-1],
+ type='latex',
+ location=node,
+ )
+ self.table.colspec = ''
table = self.render(
table_type + '.tex.jinja', {'table': self.table, 'labels': labels}
)
diff --git a/tests/roots/test-latex-table/expects/tabularcolumn.tex b/tests/roots/test-latex-table/expects/tabularcolumn.tex
index fcb01be3f50..c1f88421f1d 100644
--- a/tests/roots/test-latex-table/expects/tabularcolumn.tex
+++ b/tests/roots/test-latex-table/expects/tabularcolumn.tex
@@ -4,7 +4,7 @@
\sphinxthistablewithglobalstyle
\sphinxthistablewithnovlinesstyle
\centering
-\begin{tabulary}{\linewidth}[t]{cc}
+\begin{tabular}[t]{cc}
\sphinxtoprule
\sphinxstyletheadfamily
\sphinxAtStartPar
@@ -36,6 +36,6 @@
cell3\sphinxhyphen{}2
\\
\sphinxbottomrule
-\end{tabulary}
+\end{tabular}
\sphinxtableafterendhook\par
\sphinxattableend\end{savenotes}
diff --git a/tests/roots/test-root/markup.txt b/tests/roots/test-root/markup.txt
index a9d9132ed98..ca32ce9ddc5 100644
--- a/tests/roots/test-root/markup.txt
+++ b/tests/roots/test-root/markup.txt
@@ -229,6 +229,14 @@ Tables with multirow and multicol:
| +---+ |
+---+---+
+ .. rst-class:: longtable
+
+ +---+---+
+ | +---+ |
+ | | h | |
+ | +---+ |
+ +---+---+
+
.. list-table::
:header-rows: 0
From d20d348fdb8ca0782c8ecd14d34ad8149c564b7f Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Jean-Fran=C3=A7ois=20B=2E?=
<2589111+jfbu@users.noreply.github.com>
Date: Tue, 10 Jun 2025 17:56:58 +0200
Subject: [PATCH 134/466] Fix and update CHANGES.rst
---
CHANGES.rst | 8 +++++---
1 file changed, 5 insertions(+), 3 deletions(-)
diff --git a/CHANGES.rst b/CHANGES.rst
index 7098714e670..4920b4c7736 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -36,11 +36,13 @@ Features added
* #13497: Support C domain objects in the table of contents.
* #13500: LaTeX: add support for ``fontawesome6`` package.
Patch by Jean-François B.
-* #13535: html search: Update to the latest version of Snowball (v3.0.1).
- Patch by Adam Turner.
-* #13704: autodoc: Detect :py:func:`typing_extensions.overload `
+* #13509: autodoc: Detect :py:func:`typing_extensions.overload `
and :py:func:`~typing.final` decorators.
Patch by Spencer Brown.
+* #13535: html search: Update to the latest version of Snowball (v3.0.1).
+ Patch by Adam Turner.
+* #13647: LaTeX: allow more cases of table nesting.
+ Patch by Jean-François B.
Bugs fixed
----------
From 62198d83c33ec43ec8c83c7d5d67c2a4f91dc8ff Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Jean-Fran=C3=A7ois=20B=2E?=
<2589111+jfbu@users.noreply.github.com>
Date: Tue, 10 Jun 2025 22:41:55 +0200
Subject: [PATCH 135/466] LaTeX: test better if tabularcolumns colspec is
tabulary-specific (#13648)
---
sphinx/writers/latex.py | 11 +++++++++--
1 file changed, 9 insertions(+), 2 deletions(-)
diff --git a/sphinx/writers/latex.py b/sphinx/writers/latex.py
index 53fa2564f37..95eb35dda36 100644
--- a/sphinx/writers/latex.py
+++ b/sphinx/writers/latex.py
@@ -171,7 +171,9 @@ def get_table_type(self) -> str:
elif self.has_verbatim:
return 'tabular'
elif self.colspec:
- if any(c in 'LRCJT' for c in self.colspec):
+ assert len(self.colspec) > 2
+ _colspec = re.sub(r'\{.*?\}', '', self.colspec[1:-2])
+ if any(c in 'LRCJT' for c in _colspec):
# tabulary would complain "no suitable columns" if none of its
# column type were used so we ensure at least one matches.
# It is responsability of user to make sure not to use tabulary
@@ -1212,8 +1214,13 @@ def depart_table(self, node: Element) -> None:
for _ in self.tables[:-1]:
_.has_problematic = True
else:
+ # We try to catch a tabularcolumns using L, R, J, C, or T.
+ # We can not simply test for presence in the colspec of
+ # one of those letters due to syntax such as >{\RaggedRight}.
if self.table.colspec:
- if any(c in self.table.colspec for c in 'LRJCT'):
+ assert len(self.table.colspec) > 2
+ _colspec = re.sub(r'\{.*?\}', '', self.table.colspec[1:-2])
+ if any(c in _colspec for c in 'LRJCT'):
logger.warning(
__(
'colspec %s was given which uses '
From 4564486af459d9f862aeea79c9ebbb7405a54618 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Jean-Fran=C3=A7ois=20B=2E?=
<2589111+jfbu@users.noreply.github.com>
Date: Wed, 11 Jun 2025 10:02:50 +0200
Subject: [PATCH 136/466] LaTeX: clarify that latest docs addition refers to
LaTeX
---
doc/usage/restructuredtext/directives.rst | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/doc/usage/restructuredtext/directives.rst b/doc/usage/restructuredtext/directives.rst
index 94526304cdf..f882f33ba3e 100644
--- a/doc/usage/restructuredtext/directives.rst
+++ b/doc/usage/restructuredtext/directives.rst
@@ -1473,8 +1473,8 @@ Check the :confval:`latex_table_style`.
blocks, will render correctly to LaTeX output.
.. versionchanged:: 8.3.0
-
- The partial support for nesting a table in another has been extended.
+ The partial support of the LaTeX builder for nesting a table in another
+ has been extended.
Formerly Sphinx would raise an error if ``longtable`` class was specified
for a table containing a nested table, and some cases would not raise an
error at Sphinx level but fail at LaTeX level during PDF build. This is a
From 118f4a13a78f861983a68dd9b249ecd34b399d3e Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Jean-Fran=C3=A7ois=20B=2E?=
<2589111+jfbu@users.noreply.github.com>
Date: Wed, 11 Jun 2025 11:44:09 +0200
Subject: [PATCH 137/466] LaTeX: make sure tabulary is used if colspec requires
it (#13653)
---
sphinx/writers/latex.py | 29 +++++++++++++++++------------
tests/roots/test-root/markup.txt | 2 +-
2 files changed, 18 insertions(+), 13 deletions(-)
diff --git a/sphinx/writers/latex.py b/sphinx/writers/latex.py
index 95eb35dda36..39aef55ddfe 100644
--- a/sphinx/writers/latex.py
+++ b/sphinx/writers/latex.py
@@ -171,13 +171,13 @@ def get_table_type(self) -> str:
elif self.has_verbatim:
return 'tabular'
elif self.colspec:
- assert len(self.colspec) > 2
- _colspec = re.sub(r'\{.*?\}', '', self.colspec[1:-2])
- if any(c in 'LRCJT' for c in _colspec):
- # tabulary would complain "no suitable columns" if none of its
- # column type were used so we ensure at least one matches.
- # It is responsability of user to make sure not to use tabulary
- # column types for a column containing a problematic cell.
+ # tabulary complains (only a LaTeX warning) if none of its column
+ # types is used. The next test will have false positive from
+ # syntax such as >{\RaggedRight} but it will catch *{3}{J} which
+ # does require tabulary and would crash tabular
+ # It is user responsability not to use a tabulary column type for
+ # a column having a problematic cell.
+ if any(c in 'LRCJT' for c in self.colspec):
return 'tabulary'
else:
return 'tabular'
@@ -1217,17 +1217,22 @@ def depart_table(self, node: Element) -> None:
# We try to catch a tabularcolumns using L, R, J, C, or T.
# We can not simply test for presence in the colspec of
# one of those letters due to syntax such as >{\RaggedRight}.
+ # The test will not catch *{3}{J} syntax, but it would be
+ # overkill to try to implement LaTeX preamble mini-language.
if self.table.colspec:
assert len(self.table.colspec) > 2
- _colspec = re.sub(r'\{.*?\}', '', self.table.colspec[1:-2])
- if any(c in _colspec for c in 'LRJCT'):
+ # cf how self.table.colspec got set in visit_table().
+ _colspec_as_given = self.table.colspec[1:-2]
+ _colspec_stripped = re.sub(r'\{.*?\}', '', _colspec_as_given)
+ if any(c in _colspec_stripped for c in 'LRJCT'):
logger.warning(
__(
- 'colspec %s was given which uses '
+ 'colspec %s was given which appears to use '
'tabulary syntax. But this table can not be '
- 'rendered as a tabulary; colspec will be ignored.'
+ 'rendered as a tabulary; the given colspec will '
+ 'be ignored.'
),
- self.table.colspec[:-1],
+ _colspec_as_given,
type='latex',
location=node,
)
diff --git a/tests/roots/test-root/markup.txt b/tests/roots/test-root/markup.txt
index ca32ce9ddc5..0a7b6cb2c92 100644
--- a/tests/roots/test-root/markup.txt
+++ b/tests/roots/test-root/markup.txt
@@ -182,7 +182,7 @@ With
Tables
------
-.. tabularcolumns:: |L|p{5cm}|R|
+.. tabularcolumns:: |*{1}{L|}p{5cm}|*{1}{R}|
.. _my-table:
From fa6f8c87b79ab9bc13f97503bc9f92ca79af955f Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Wed, 11 Jun 2025 15:58:40 +0100
Subject: [PATCH 138/466] Bump types-requests to 2.32.4.20250611 (#13651)
---
pyproject.toml | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/pyproject.toml b/pyproject.toml
index fd0cdce21bc..70962dd7adf 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -100,7 +100,7 @@ lint = [
"types-docutils==0.21.0.20250525",
"types-Pillow==10.2.0.20240822",
"types-Pygments==2.19.0.20250516",
- "types-requests==2.32.0.20250602", # align with requests
+ "types-requests==2.32.4.20250611", # align with requests
"types-urllib3==1.26.25.14",
"pyright==1.1.400",
"pytest>=8.0",
@@ -169,7 +169,7 @@ type-stubs = [
"types-docutils==0.21.0.20250525",
"types-Pillow==10.2.0.20240822",
"types-Pygments==2.19.0.20250516",
- "types-requests==2.32.0.20250602",
+ "types-requests==2.32.4.20250611",
"types-urllib3==1.26.25.14",
]
From 40b0f8048940865325063b68f087a16860a66c72 Mon Sep 17 00:00:00 2001
From: Adam Turner <9087854+AA-Turner@users.noreply.github.com>
Date: Wed, 11 Jun 2025 15:58:53 +0100
Subject: [PATCH 139/466] Use ``config.verbosity`` (#13650)
---
sphinx/application.py | 2 +-
sphinx/builders/__init__.py | 10 +++++-----
sphinx/environment/__init__.py | 4 +++-
sphinx/ext/viewcode.py | 2 +-
sphinx/util/logging.py | 10 +++++++---
tests/test_util/test_util_logging.py | 17 ++++++-----------
6 files changed, 23 insertions(+), 22 deletions(-)
diff --git a/sphinx/application.py b/sphinx/application.py
index 8e53770545f..e8da1e4d058 100644
--- a/sphinx/application.py
+++ b/sphinx/application.py
@@ -239,7 +239,7 @@ def __init__(
self._fail_on_warnings = bool(warningiserror)
self.pdb = pdb
self._exception_on_warning = exception_on_warning
- logging.setup(self, self._status, self._warning)
+ logging.setup(self, self._status, self._warning, verbosity=verbosity)
self.events = EventManager(self)
diff --git a/sphinx/builders/__init__.py b/sphinx/builders/__init__.py
index 73426c6dc81..184a27f2cd2 100644
--- a/sphinx/builders/__init__.py
+++ b/sphinx/builders/__init__.py
@@ -269,7 +269,7 @@ def cat2relpath(cat: CatalogInfo, srcdir: Path = self.srcdir) -> str:
__('writing output... '),
'darkgreen',
len(catalogs),
- self._app.verbosity,
+ self.config.verbosity,
stringify_func=cat2relpath,
):
catalog.write_mo(
@@ -587,7 +587,7 @@ def _read_serial(self, docnames: list[str]) -> None:
__('reading sources... '),
'purple',
len(docnames),
- self._app.verbosity,
+ self.config.verbosity,
):
# remove all inventory entries for that file
self.events.emit('env-purge-doc', self.env, docname)
@@ -604,7 +604,7 @@ def _read_parallel(self, docnames: list[str], nproc: int) -> None:
__('reading sources... '),
'purple',
len(chunks),
- self._app.verbosity,
+ self.config.verbosity,
)
# clear all outdated docs at once
@@ -776,7 +776,7 @@ def _write_serial(self, docnames: Sequence[str]) -> None:
__('writing output... '),
'darkgreen',
len(docnames),
- self._app.verbosity,
+ self.config.verbosity,
):
_write_docname(docname, env=self.env, builder=self, tags=self.tags)
@@ -800,7 +800,7 @@ def write_process(docs: list[tuple[str, nodes.document]]) -> None:
__('writing output... '),
'darkgreen',
len(chunks),
- self._app.verbosity,
+ self.config.verbosity,
)
def on_chunk_done(args: list[tuple[str, nodes.document]], result: None) -> None:
diff --git a/sphinx/environment/__init__.py b/sphinx/environment/__init__.py
index fd611639e9c..a09978ba279 100644
--- a/sphinx/environment/__init__.py
+++ b/sphinx/environment/__init__.py
@@ -281,7 +281,9 @@ def setup(self, app: Sphinx) -> None:
# The old config is self.config, restored from the pickled environment.
# The new config is app.config, always recreated from ``conf.py``
self.config_status, self.config_status_extra = self._config_status(
- old_config=self.config, new_config=app.config, verbosity=app.verbosity
+ old_config=self.config,
+ new_config=app.config,
+ verbosity=app.config.verbosity,
)
self.config = app.config
diff --git a/sphinx/ext/viewcode.py b/sphinx/ext/viewcode.py
index 195ed95f961..2b9b479e0a1 100644
--- a/sphinx/ext/viewcode.py
+++ b/sphinx/ext/viewcode.py
@@ -295,7 +295,7 @@ def collect_pages(app: Sphinx) -> Iterator[tuple[str, dict[str, Any], str]]:
__('highlighting module code... '),
'blue',
len(env._viewcode_modules),
- app.verbosity,
+ app.config.verbosity,
operator.itemgetter(0),
):
if not entry:
diff --git a/sphinx/util/logging.py b/sphinx/util/logging.py
index d5392936334..9ad035c49af 100644
--- a/sphinx/util/logging.py
+++ b/sphinx/util/logging.py
@@ -608,8 +608,12 @@ def write(self, data: str) -> None:
self._app.messagelog.append(data)
-def setup(app: Sphinx, status: IO[str], warning: IO[str]) -> None:
+def setup(
+ app: Sphinx, status: IO[str], warning: IO[str], *, verbosity: int = 0
+) -> None:
"""Setup root logger for Sphinx"""
+ log_level = VERBOSITY_MAP[max(verbosity, 0)]
+
logger = logging.getLogger(NAMESPACE)
logger.setLevel(logging.DEBUG)
logger.propagate = False
@@ -621,7 +625,7 @@ def setup(app: Sphinx, status: IO[str], warning: IO[str]) -> None:
info_handler = NewLineStreamHandler(SafeEncodingWriter(status))
info_handler.addFilter(InfoFilter())
info_handler.addFilter(InfoLogRecordTranslator(app))
- info_handler.setLevel(VERBOSITY_MAP[app.verbosity])
+ info_handler.setLevel(log_level)
info_handler.setFormatter(ColorizeFormatter())
warning_handler = WarningStreamHandler(SafeEncodingWriter(warning))
@@ -635,7 +639,7 @@ def setup(app: Sphinx, status: IO[str], warning: IO[str]) -> None:
messagelog_handler = logging.StreamHandler(LastMessagesWriter(app, status))
messagelog_handler.addFilter(InfoFilter())
- messagelog_handler.setLevel(VERBOSITY_MAP[app.verbosity])
+ messagelog_handler.setLevel(log_level)
logger.addHandler(info_handler)
logger.addHandler(warning_handler)
diff --git a/tests/test_util/test_util_logging.py b/tests/test_util/test_util_logging.py
index a9ef7f6c4c7..c21434a8414 100644
--- a/tests/test_util/test_util_logging.py
+++ b/tests/test_util/test_util_logging.py
@@ -26,8 +26,7 @@
@pytest.mark.sphinx('html', testroot='root')
def test_info_and_warning(app: SphinxTestApp) -> None:
- app.verbosity = 2
- logging.setup(app, app.status, app.warning)
+ logging.setup(app, app.status, app.warning, verbosity=2)
logger = logging.getLogger(__name__)
logger.debug('message1')
@@ -61,8 +60,7 @@ def test_Exception(app: SphinxTestApp) -> None:
@pytest.mark.sphinx('html', testroot='root')
def test_verbosity_filter(app: SphinxTestApp) -> None:
# verbosity = 0: INFO
- app.verbosity = 0
- logging.setup(app, app.status, app.warning)
+ logging.setup(app, app.status, app.warning, verbosity=0)
logger = logging.getLogger(__name__)
logger.info('message1')
@@ -75,8 +73,7 @@ def test_verbosity_filter(app: SphinxTestApp) -> None:
assert 'message4' not in app.status.getvalue()
# verbosity = 1: VERBOSE
- app.verbosity = 1
- logging.setup(app, app.status, app.warning)
+ logging.setup(app, app.status, app.warning, verbosity=1)
logger = logging.getLogger(__name__)
logger.info('message1')
@@ -89,8 +86,7 @@ def test_verbosity_filter(app: SphinxTestApp) -> None:
assert 'message4' not in app.status.getvalue()
# verbosity = 2: DEBUG
- app.verbosity = 2
- logging.setup(app, app.status, app.warning)
+ logging.setup(app, app.status, app.warning, verbosity=2)
logger = logging.getLogger(__name__)
logger.info('message1')
@@ -312,8 +308,7 @@ def test_log_no_ansi_colors(tmp_path):
@pytest.mark.sphinx('html', testroot='root')
def test_colored_logs(app: SphinxTestApp) -> None:
- app.verbosity = 2
- logging.setup(app, app.status, app.warning)
+ logging.setup(app, app.status, app.warning, verbosity=2)
logger = logging.getLogger(__name__)
# default colors
@@ -363,7 +358,7 @@ class StreamWriter(codecs.StreamWriter):
def write(self, object):
self.stream.write(object.encode('cp1252').decode('cp1252'))
- logging.setup(app, StreamWriter(app.status), app.warning)
+ logging.setup(app, StreamWriter(app.status), app.warning, verbosity=0)
logger = logging.getLogger(__name__)
# info with UnicodeEncodeError
From a2f7b41806bffea6f9528435effa700b3df35ed9 Mon Sep 17 00:00:00 2001
From: Adam Turner <9087854+AA-Turner@users.noreply.github.com>
Date: Wed, 11 Jun 2025 16:16:10 +0100
Subject: [PATCH 140/466] Test with Python 3.15 alpha releases (#13654)
---
.github/workflows/main.yml | 2 ++
pyproject.toml | 1 +
tox.ini | 4 ++--
3 files changed, 5 insertions(+), 2 deletions(-)
diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml
index cf40554e6f4..ca71534d3a6 100644
--- a/.github/workflows/main.yml
+++ b/.github/workflows/main.yml
@@ -89,6 +89,7 @@ jobs:
matrix:
python:
- "3.14"
+ - "3.15"
docutils:
- "0.20"
- "0.21"
@@ -125,6 +126,7 @@ jobs:
matrix:
python:
- "3.14"
+ - "3.15"
steps:
- uses: actions/checkout@v4
diff --git a/pyproject.toml b/pyproject.toml
index 70962dd7adf..3d0e01deb0a 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -39,6 +39,7 @@ classifiers = [
"Programming Language :: Python :: 3.12",
"Programming Language :: Python :: 3.13",
"Programming Language :: Python :: 3.14",
+ "Programming Language :: Python :: 3.15",
"Programming Language :: Python :: Implementation :: CPython",
"Programming Language :: Python :: Implementation :: PyPy",
"Framework :: Sphinx",
diff --git a/tox.ini b/tox.ini
index 87b9d1b6316..58c2b140351 100644
--- a/tox.ini
+++ b/tox.ini
@@ -1,6 +1,6 @@
[tox]
minversion = 4.2.0
-envlist = py{311,312,313,314}
+envlist = py{311,312,313,314,315}
[testenv]
usedevelop = True
@@ -19,7 +19,7 @@ passenv =
BUILDER
READTHEDOCS
description =
- py{311,312,313,314}: Run unit tests against {envname}.
+ py{311,312,313,314,315}: Run unit tests against {envname}.
dependency_groups =
test
setenv =
From 6b136f9b9162acdfc9eb0331fe05a8d464c78992 Mon Sep 17 00:00:00 2001
From: Adam Turner <9087854+AA-Turner@users.noreply.github.com>
Date: Wed, 11 Jun 2025 19:01:08 +0100
Subject: [PATCH 141/466] Replace ``codecs.open()`` with ``open()`` (#13655)
---
sphinx/builders/gettext.py | 7 +++----
1 file changed, 3 insertions(+), 4 deletions(-)
diff --git a/sphinx/builders/gettext.py b/sphinx/builders/gettext.py
index 659bf218983..fc659d744d5 100644
--- a/sphinx/builders/gettext.py
+++ b/sphinx/builders/gettext.py
@@ -2,7 +2,6 @@
from __future__ import annotations
-import codecs
import operator
import os
import os.path
@@ -212,7 +211,7 @@ def should_write(filepath: Path, new_content: str) -> bool:
if not filepath.exists():
return True
try:
- with codecs.open(str(filepath), encoding='utf-8') as oldpot:
+ with open(filepath, encoding='utf-8') as oldpot:
old_content = oldpot.read()
old_header_index = old_content.index('"POT-Creation-Date:')
new_header_index = new_content.index('"POT-Creation-Date:')
@@ -275,7 +274,7 @@ def _extract_from_template(self) -> None:
self.config.verbosity,
):
try:
- with codecs.open(template, encoding='utf-8') as f:
+ with open(template, encoding='utf-8') as f:
context = f.read()
for line, _meth, msg in extract_translations(context):
origin = MsgOrigin(source=template, line=line)
@@ -326,7 +325,7 @@ def finish(self) -> None:
pofn = self.outdir / f'{textdomain}.pot'
if should_write(pofn, content):
- with codecs.open(str(pofn), 'w', encoding='utf-8') as pofile:
+ with open(pofn, 'w', encoding='utf-8') as pofile:
pofile.write(content)
From e1bd9cb3863cd1dfeaec9729dc6c842ef0f7a1f7 Mon Sep 17 00:00:00 2001
From: Adam Turner <9087854+AA-Turner@users.noreply.github.com>
Date: Wed, 11 Jun 2025 19:43:47 +0100
Subject: [PATCH 142/466] Make ``_prepend_prologue()`` and
``_append_epilogue()`` private (#13658)
---
sphinx/parsers.py | 8 ++--
sphinx/util/rst.py | 68 +++++++++++++++----------------
tests/test_markup/test_parser.py | 8 ++--
tests/test_util/test_util_rst.py | 70 +++++++++++++++++---------------
4 files changed, 80 insertions(+), 74 deletions(-)
diff --git a/sphinx/parsers.py b/sphinx/parsers.py
index eb5e77d9387..26437654cc5 100644
--- a/sphinx/parsers.py
+++ b/sphinx/parsers.py
@@ -11,7 +11,7 @@
from docutils.transforms.universal import SmartQuotes
from sphinx.deprecation import _deprecation_warning
-from sphinx.util.rst import append_epilog, prepend_prolog
+from sphinx.util.rst import _append_epilogue, _prepend_prologue
if TYPE_CHECKING:
from docutils import nodes
@@ -100,9 +100,9 @@ def parse(self, inputstring: str | StringList, document: nodes.document) -> None
self.finish_parse()
def decorate(self, content: StringList) -> None:
- """Preprocess reST content before parsing."""
- prepend_prolog(content, self.config.rst_prolog)
- append_epilog(content, self.config.rst_epilog)
+ """Preprocess reStructuredText content before parsing."""
+ _prepend_prologue(content, self._config.rst_prolog)
+ _append_epilogue(content, self._config.rst_epilog)
def setup(app: Sphinx) -> ExtensionMetadata:
diff --git a/sphinx/util/rst.py b/sphinx/util/rst.py
index 7e6853a81ef..485f369766e 100644
--- a/sphinx/util/rst.py
+++ b/sphinx/util/rst.py
@@ -1,11 +1,11 @@
-"""reST helper functions."""
+"""reStructuredText helper functions."""
from __future__ import annotations
import re
from collections import defaultdict
from contextlib import contextmanager
-from typing import TYPE_CHECKING, cast
+from typing import TYPE_CHECKING
from unicodedata import east_asian_width
from docutils.parsers.rst import roles
@@ -25,7 +25,7 @@
logger = logging.getLogger(__name__)
-FIELD_NAME_RE = re.compile(Body.patterns['field_marker'])
+_FIELD_NAME_RE = re.compile(Body.patterns['field_marker'])
symbols_re = re.compile(r'([!-\-/:-@\[-`{-~])') # symbols without dot(0x2e)
SECTIONING_CHARS = ['=', '-', '~']
@@ -77,39 +77,39 @@ def default_role(docname: str, name: str) -> Iterator[None]:
docutils.unregister_role('')
-def prepend_prolog(content: StringList, prolog: str) -> None:
- """Prepend a string to content body as prolog."""
- if prolog:
- pos = 0
- for line in content:
- if FIELD_NAME_RE.match(line):
- pos += 1
- else:
- break
-
- if pos > 0:
- # insert a blank line after docinfo
- content.insert(pos, '', '', 0)
+def _prepend_prologue(content: StringList, prologue: str) -> None:
+ """Prepend a string to content body as a prologue."""
+ if not prologue:
+ return
+ pos = 0
+ for line in content:
+ if _FIELD_NAME_RE.match(line):
pos += 1
+ else:
+ break
- # insert prolog (after docinfo if exists)
- lineno = 0
- for lineno, line in enumerate(prolog.splitlines()):
- content.insert(pos + lineno, line, '', lineno)
+ if pos > 0:
+ # insert a blank line after docinfo
+ content.insert(pos, '', '', 0)
+ pos += 1
- content.insert(pos + lineno + 1, '', '', 0)
+ # insert prologue (after docinfo if exists)
+ lineno = 0
+ for lineno, line in enumerate(prologue.splitlines()):
+ content.insert(pos + lineno, line, '', lineno)
+ content.insert(pos + lineno + 1, '', '', 0)
-def append_epilog(content: StringList, epilog: str) -> None:
- """Append a string to content body as epilog."""
- if epilog:
- if len(content) > 0:
- source, lineno = content.info(-1)
- # lineno will never be None, since len(content) > 0
- lineno = cast('int', lineno)
- else:
- source = ''
- lineno = 0
- content.append('', source, lineno + 1)
- for lineno, line in enumerate(epilog.splitlines()):
- content.append(line, '', lineno)
+
+def _append_epilogue(content: StringList, epilogue: str) -> None:
+ """Append a string to content body as an epilogue."""
+ if not epilogue:
+ return
+ if len(content) > 0:
+ source, lineno = content.items[-1]
+ else:
+ source = ''
+ lineno = 0
+ content.append('', source, lineno + 1)
+ for lineno, line in enumerate(epilogue.splitlines()):
+ content.append(line, '', lineno)
diff --git a/tests/test_markup/test_parser.py b/tests/test_markup/test_parser.py
index 6a71fed9e49..9a79373e6b7 100644
--- a/tests/test_markup/test_parser.py
+++ b/tests/test_markup/test_parser.py
@@ -34,8 +34,8 @@ def test_RSTParser_prolog_epilog(RSTStateMachine, app):
parser.parse(text, document)
(content, _), _ = RSTStateMachine().run.call_args
assert list(content.xitems()) == [
- ('', 0, 'this is rst_prolog'),
- ('', 1, 'hello reST!'),
+ ('', 0, 'this is rst_prolog'),
+ ('', 1, 'hello reST!'),
('', 0, ''),
('dummy.rst', 0, 'hello Sphinx world'),
('dummy.rst', 1, 'Sphinx is a document generator'),
@@ -50,8 +50,8 @@ def test_RSTParser_prolog_epilog(RSTStateMachine, app):
('dummy.rst', 0, 'hello Sphinx world'),
('dummy.rst', 1, 'Sphinx is a document generator'),
('dummy.rst', 2, ''),
- ('', 0, 'this is rst_epilog'),
- ('', 1, 'good-bye reST!'),
+ ('', 0, 'this is rst_epilog'),
+ ('', 1, 'good-bye reST!'),
]
# expandtabs / convert whitespaces
diff --git a/tests/test_util/test_util_rst.py b/tests/test_util/test_util_rst.py
index d8fcf8d12d0..41854c2aecc 100644
--- a/tests/test_util/test_util_rst.py
+++ b/tests/test_util/test_util_rst.py
@@ -5,7 +5,13 @@
from docutils.statemachine import StringList
from jinja2 import Environment
-from sphinx.util.rst import append_epilog, escape, heading, prepend_prolog, textwidth
+from sphinx.util.rst import (
+ _append_epilogue,
+ _prepend_prologue,
+ escape,
+ heading,
+ textwidth,
+)
def test_escape() -> None:
@@ -15,25 +21,25 @@ def test_escape() -> None:
assert escape('.. toctree::') == r'\.. toctree\:\:'
-def test_append_epilog() -> None:
+def test_append_epilogue() -> None:
epilog = 'this is rst_epilog\ngood-bye reST!'
content = StringList(
['hello Sphinx world', 'Sphinx is a document generator'],
'dummy.rst',
)
- append_epilog(content, epilog)
+ _append_epilogue(content, epilog)
assert list(content.xitems()) == [
('dummy.rst', 0, 'hello Sphinx world'),
('dummy.rst', 1, 'Sphinx is a document generator'),
('dummy.rst', 2, ''),
- ('', 0, 'this is rst_epilog'),
- ('', 1, 'good-bye reST!'),
+ ('', 0, 'this is rst_epilog'),
+ ('', 1, 'good-bye reST!'),
]
-def test_prepend_prolog() -> None:
- prolog = 'this is rst_prolog\nhello reST!'
+def test_prepend_prologue() -> None:
+ prologue = 'this is rst_prolog\nhello reST!'
content = StringList(
[
':title: test of SphinxFileInput',
@@ -44,14 +50,14 @@ def test_prepend_prolog() -> None:
],
'dummy.rst',
)
- prepend_prolog(content, prolog)
+ _prepend_prologue(content, prologue)
assert list(content.xitems()) == [
('dummy.rst', 0, ':title: test of SphinxFileInput'),
('dummy.rst', 1, ':author: Sphinx team'),
('', 0, ''),
- ('', 0, 'this is rst_prolog'),
- ('', 1, 'hello reST!'),
+ ('', 0, 'this is rst_prolog'),
+ ('', 1, 'hello reST!'),
('', 0, ''),
('dummy.rst', 2, ''),
('dummy.rst', 3, 'hello Sphinx world'),
@@ -60,17 +66,17 @@ def test_prepend_prolog() -> None:
def test_prepend_prolog_with_CR() -> None:
- # prolog having CR at tail
- prolog = 'this is rst_prolog\nhello reST!\n'
+ # prologue having CR at tail
+ prologue = 'this is rst_prolog\nhello reST!\n'
content = StringList(
['hello Sphinx world', 'Sphinx is a document generator'],
'dummy.rst',
)
- prepend_prolog(content, prolog)
+ _prepend_prologue(content, prologue)
assert list(content.xitems()) == [
- ('', 0, 'this is rst_prolog'),
- ('', 1, 'hello reST!'),
+ ('', 0, 'this is rst_prolog'),
+ ('', 1, 'hello reST!'),
('', 0, ''),
('dummy.rst', 0, 'hello Sphinx world'),
('dummy.rst', 1, 'Sphinx is a document generator'),
@@ -78,17 +84,17 @@ def test_prepend_prolog_with_CR() -> None:
def test_prepend_prolog_without_CR() -> None:
- # prolog not having CR at tail
- prolog = 'this is rst_prolog\nhello reST!'
+ # prologue not having CR at tail
+ prologue = 'this is rst_prolog\nhello reST!'
content = StringList(
['hello Sphinx world', 'Sphinx is a document generator'],
'dummy.rst',
)
- prepend_prolog(content, prolog)
+ _prepend_prologue(content, prologue)
assert list(content.xitems()) == [
- ('', 0, 'this is rst_prolog'),
- ('', 1, 'hello reST!'),
+ ('', 0, 'this is rst_prolog'),
+ ('', 1, 'hello reST!'),
('', 0, ''),
('dummy.rst', 0, 'hello Sphinx world'),
('dummy.rst', 1, 'Sphinx is a document generator'),
@@ -96,7 +102,7 @@ def test_prepend_prolog_without_CR() -> None:
def test_prepend_prolog_with_roles_in_sections() -> None:
- prolog = 'this is rst_prolog\nhello reST!'
+ prologue = 'this is rst_prolog\nhello reST!'
content = StringList(
[
':title: test of SphinxFileInput',
@@ -109,14 +115,14 @@ def test_prepend_prolog_with_roles_in_sections() -> None:
],
'dummy.rst',
)
- prepend_prolog(content, prolog)
+ _prepend_prologue(content, prologue)
assert list(content.xitems()) == [
('dummy.rst', 0, ':title: test of SphinxFileInput'),
('dummy.rst', 1, ':author: Sphinx team'),
('', 0, ''),
- ('', 0, 'this is rst_prolog'),
- ('', 1, 'hello reST!'),
+ ('', 0, 'this is rst_prolog'),
+ ('', 1, 'hello reST!'),
('', 0, ''),
('dummy.rst', 2, ''),
('dummy.rst', 3, ':mod:`foo`'),
@@ -128,13 +134,13 @@ def test_prepend_prolog_with_roles_in_sections() -> None:
def test_prepend_prolog_with_roles_in_sections_with_newline() -> None:
# prologue with trailing line break
- prolog = 'this is rst_prolog\nhello reST!\n'
+ prologue = 'this is rst_prolog\nhello reST!\n'
content = StringList([':mod:`foo`', '-' * 10, '', 'hello'], 'dummy.rst')
- prepend_prolog(content, prolog)
+ _prepend_prologue(content, prologue)
assert list(content.xitems()) == [
- ('', 0, 'this is rst_prolog'),
- ('', 1, 'hello reST!'),
+ ('', 0, 'this is rst_prolog'),
+ ('', 1, 'hello reST!'),
('', 0, ''),
('dummy.rst', 0, ':mod:`foo`'),
('dummy.rst', 1, '----------'),
@@ -145,13 +151,13 @@ def test_prepend_prolog_with_roles_in_sections_with_newline() -> None:
def test_prepend_prolog_with_roles_in_sections_without_newline() -> None:
# prologue with no trailing line break
- prolog = 'this is rst_prolog\nhello reST!'
+ prologue = 'this is rst_prolog\nhello reST!'
content = StringList([':mod:`foo`', '-' * 10, '', 'hello'], 'dummy.rst')
- prepend_prolog(content, prolog)
+ _prepend_prologue(content, prologue)
assert list(content.xitems()) == [
- ('', 0, 'this is rst_prolog'),
- ('', 1, 'hello reST!'),
+ ('', 0, 'this is rst_prolog'),
+ ('', 1, 'hello reST!'),
('', 0, ''),
('dummy.rst', 0, ':mod:`foo`'),
('dummy.rst', 1, '----------'),
From a1639a7cc5bdd6a05f1bbdc54528cd49ef0d06cc Mon Sep 17 00:00:00 2001
From: Adam Turner <9087854+AA-Turner@users.noreply.github.com>
Date: Sun, 15 Jun 2025 03:43:37 +0100
Subject: [PATCH 143/466] Refactor ``render_partial()`` to do less work
(#13664)
---
sphinx/builders/html/__init__.py | 17 ++++++++++++-----
1 file changed, 12 insertions(+), 5 deletions(-)
diff --git a/sphinx/builders/html/__init__.py b/sphinx/builders/html/__init__.py
index de49f89bbb4..b6a67f5f453 100644
--- a/sphinx/builders/html/__init__.py
+++ b/sphinx/builders/html/__init__.py
@@ -17,6 +17,7 @@
from urllib.parse import quote
import docutils.readers.doctree
+import docutils.utils
import jinja2.exceptions
from docutils import nodes
from docutils.core import Publisher
@@ -429,12 +430,18 @@ def render_partial(self, node: Node | None) -> dict[str, str]:
"""Utility: Render a lone doctree node."""
if node is None:
return {'fragment': ''}
-
- doc = new_document('')
+ pub = self._publisher
+ doc = docutils.utils.new_document('', pub.settings)
doc.append(node)
- self._publisher.set_source(doc)
- self._publisher.publish()
- return self._publisher.writer.parts
+ doc.transformer.populate_from_components((pub.reader, pub.parser, pub.writer))
+ doc.transformer.apply_transforms()
+ visitor: HTML5Translator = self.create_translator(doc, self) # type: ignore[assignment]
+ doc.walkabout(visitor)
+ parts = {
+ 'fragment': ''.join(visitor.fragment),
+ 'title': ''.join(visitor.title),
+ }
+ return parts
def prepare_writing(self, docnames: Set[str]) -> None:
# create the search indexer
From 076774224a978e5b7a9e679f30fe84816e50e8f9 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Jean-Fran=C3=A7ois=20B=2E?=
<2589111+jfbu@users.noreply.github.com>
Date: Sun, 15 Jun 2025 21:25:21 +0200
Subject: [PATCH 144/466] Docs: clarify wording regarding default style for
LaTeX tables (#13667)
---
doc/usage/configuration.rst | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/doc/usage/configuration.rst b/doc/usage/configuration.rst
index 7cdf462c4ba..3ec5818f49b 100644
--- a/doc/usage/configuration.rst
+++ b/doc/usage/configuration.rst
@@ -3083,7 +3083,7 @@ These options influence LaTeX output.
the :code-tex:`\\rowcolors` LaTeX command becomes a no-op
(this command has limitations and has never correctly
supported all types of tables Sphinx produces in LaTeX).
- Please update your project to use the
+ Please use the
:ref:`latex table color configuration ` keys instead.
To customise the styles for a table,
@@ -3096,7 +3096,7 @@ These options influence LaTeX output.
The latter two can be combined with any of the first three.
The ``standard`` class produces tables with
both horizontal and vertical lines
- (as has been the default so far with Sphinx).
+ (as had been the default prior to Sphinx 6.0.0).
A single-row multi-column merged cell will obey the row colour,
if it is set.
From 551b8428980cab65d7e5c1b759e5aa95ce0cd5a7 Mon Sep 17 00:00:00 2001
From: Adam Turner <9087854+AA-Turner@users.noreply.github.com>
Date: Mon, 16 Jun 2025 20:32:31 +0100
Subject: [PATCH 145/466] Deprecate support for source encodings other than
UTF-8 (#13666)
---
CHANGES.rst | 3 +++
doc/usage/configuration.rst | 3 +++
doc/usage/restructuredtext/basics.rst | 9 +++++----
doc/usage/restructuredtext/directives.rst | 2 +-
sphinx/config.py | 14 ++++++++++++++
tests/test_config/test_config.py | 15 +++++++++++++--
6 files changed, 39 insertions(+), 7 deletions(-)
diff --git a/CHANGES.rst b/CHANGES.rst
index 4920b4c7736..47eb18fc8a6 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -23,6 +23,9 @@ Deprecated
Patch by Adam Turner.
* #13644: Deprecate the :py:attr:`!Parser.config` and :py:attr:`!env` attributes.
Patch by Adam Turner.
+* #13665: Deprecate support for non-UTF 8 source encodings,
+ scheduled for removal in Sphinx 10.
+ Patch by Adam Turner.
Features added
--------------
diff --git a/doc/usage/configuration.rst b/doc/usage/configuration.rst
index 3ec5818f49b..5babd7e2915 100644
--- a/doc/usage/configuration.rst
+++ b/doc/usage/configuration.rst
@@ -1157,6 +1157,9 @@ Options for source files
The recommended encoding is ``'utf-8-sig'``.
.. versionadded:: 0.5
+ .. deprecated:: 8.3
+ Support for source encodings other than UTF-8 is deprecated.
+ Sphinx 10 will only support UTF-8 files.
.. confval:: source_suffix
:type: :code-py:`dict[str, str] | Sequence[str] | str`
diff --git a/doc/usage/restructuredtext/basics.rst b/doc/usage/restructuredtext/basics.rst
index ea61b80fc85..8f408f45e38 100644
--- a/doc/usage/restructuredtext/basics.rst
+++ b/doc/usage/restructuredtext/basics.rst
@@ -646,10 +646,11 @@ configurations:
Source encoding
---------------
-Since the easiest way to include special characters like em dashes or copyright
-signs in reStructuredText is to directly write them as Unicode characters, one has to
-specify an encoding. Sphinx assumes source files to be encoded in UTF-8 by
-default; you can change this with the :confval:`source_encoding` config value.
+Sphinx supports source files that are encoded in UTF-8.
+This means that the full range of Unicode__ characters may be used
+directly in reStructuredText.
+
+__ https://www.unicode.org/
Gotchas
diff --git a/doc/usage/restructuredtext/directives.rst b/doc/usage/restructuredtext/directives.rst
index f882f33ba3e..5845a6ab717 100644
--- a/doc/usage/restructuredtext/directives.rst
+++ b/doc/usage/restructuredtext/directives.rst
@@ -971,7 +971,7 @@ __ https://pygments.org/docs/lexers
:type: text
Explicitly specify the encoding of the file.
- This overwrites the default encoding (:confval:`source_encoding`).
+ This overwrites the default encoding (UTF-8).
For example:
.. code-block:: rst
diff --git a/sphinx/config.py b/sphinx/config.py
index a43b6cc82d0..e878cd7a834 100644
--- a/sphinx/config.py
+++ b/sphinx/config.py
@@ -895,7 +895,21 @@ def check_master_doc(
return changed
+def deprecate_source_encoding(_app: Sphinx, config: Config) -> None:
+ """Warn on non-UTF 8 source_encoding."""
+ # RemovedInSphinx10Warning
+ if config.source_encoding.lower() not in {'utf-8', 'utf-8-sig', 'utf8'}:
+ msg = _(
+ 'Support for source encodings other than UTF-8 '
+ 'is deprecated and will be removed in Sphinx 10. '
+ 'Please comment at https://github.com/sphinx-doc/sphinx/issues/13665 '
+ 'if this causes a problem.'
+ )
+ logger.warning(msg)
+
+
def setup(app: Sphinx) -> ExtensionMetadata:
+ app.connect('config-inited', deprecate_source_encoding, priority=790)
app.connect('config-inited', convert_source_suffix, priority=800)
app.connect('config-inited', convert_highlight_options, priority=800)
app.connect('config-inited', init_numfig_format, priority=800)
diff --git a/tests/test_config/test_config.py b/tests/test_config/test_config.py
index b3392e654b2..857e1c28e34 100644
--- a/tests/test_config/test_config.py
+++ b/tests/test_config/test_config.py
@@ -19,14 +19,14 @@
)
from sphinx.deprecation import RemovedInSphinx90Warning
from sphinx.errors import ConfigError, ExtensionError, VersionRequirementError
+from sphinx.testing.util import SphinxTestApp
from sphinx.util.tags import Tags
if TYPE_CHECKING:
from collections.abc import Iterable
+ from pathlib import Path
from typing import TypeAlias
- from sphinx.testing.util import SphinxTestApp
-
CircularList: TypeAlias = list[int | 'CircularList']
CircularDict: TypeAlias = dict[str, int | 'CircularDict']
@@ -811,3 +811,14 @@ def test_root_doc_and_master_doc_are_synchronized() -> None:
c.root_doc = '1234'
assert c.master_doc == '1234'
assert c.root_doc == c.master_doc
+
+
+def test_source_encoding_deprecation(tmp_path: Path) -> None:
+ (tmp_path / 'conf.py').touch()
+ app = SphinxTestApp(
+ buildername='dummy',
+ srcdir=tmp_path,
+ confoverrides={'source_encoding': 'latin-1'},
+ )
+ expected = 'Support for source encodings other than UTF-8 is deprecated and will be removed'
+ assert expected in app.warning.getvalue()
From be6593b0de9c273eba311a58c15a7fbdc972c5bf Mon Sep 17 00:00:00 2001
From: Adam Turner <9087854+AA-Turner@users.noreply.github.com>
Date: Mon, 16 Jun 2025 20:37:08 +0100
Subject: [PATCH 146/466] Prefer ``current_document.docname`` (#13669)
---
doc/development/tutorials/examples/recipe.py | 2 +-
doc/development/tutorials/examples/todo.py | 2 +-
sphinx/builders/latex/transforms.py | 2 +-
sphinx/builders/linkcheck.py | 2 +-
sphinx/directives/other.py | 10 +++++-----
sphinx/directives/patches.py | 12 +++++++-----
sphinx/domains/c/__init__.py | 6 ++++--
sphinx/domains/changeset.py | 2 +-
sphinx/domains/citation.py | 4 ++--
sphinx/domains/cpp/__init__.py | 6 ++++--
sphinx/domains/index.py | 2 +-
sphinx/domains/javascript.py | 9 ++++++---
sphinx/domains/python/__init__.py | 6 ++++--
sphinx/domains/rst.py | 2 +-
sphinx/domains/std/__init__.py | 9 ++++++---
sphinx/environment/__init__.py | 2 +-
sphinx/environment/collectors/asset.py | 8 +++++---
sphinx/environment/collectors/metadata.py | 2 +-
sphinx/environment/collectors/title.py | 4 ++--
sphinx/environment/collectors/toctree.py | 2 +-
sphinx/ext/autodoc/directive.py | 2 +-
sphinx/ext/autosectionlabel.py | 2 +-
sphinx/ext/autosummary/__init__.py | 2 +-
sphinx/ext/duration.py | 2 +-
sphinx/ext/graphviz.py | 4 ++--
sphinx/ext/intersphinx/_resolve.py | 2 +-
sphinx/ext/todo.py | 2 +-
sphinx/ext/viewcode.py | 4 ++--
sphinx/io.py | 2 +-
sphinx/roles.py | 2 +-
sphinx/transforms/i18n.py | 4 +++-
sphinx/transforms/post_transforms/__init__.py | 2 +-
sphinx/transforms/post_transforms/images.py | 6 +++---
sphinx/transforms/references.py | 4 +++-
sphinx/util/i18n.py | 2 +-
sphinx/versioning.py | 2 +-
36 files changed, 79 insertions(+), 59 deletions(-)
diff --git a/doc/development/tutorials/examples/recipe.py b/doc/development/tutorials/examples/recipe.py
index 9848629216a..da52fa2df67 100644
--- a/doc/development/tutorials/examples/recipe.py
+++ b/doc/development/tutorials/examples/recipe.py
@@ -165,7 +165,7 @@ def add_recipe(self, signature, ingredients):
name,
signature,
'Recipe',
- self.env.docname,
+ self.env.current_document.docname,
anchor,
0,
))
diff --git a/doc/development/tutorials/examples/todo.py b/doc/development/tutorials/examples/todo.py
index a8aa1ec4a1d..c9993eda198 100644
--- a/doc/development/tutorials/examples/todo.py
+++ b/doc/development/tutorials/examples/todo.py
@@ -44,7 +44,7 @@ def run(self):
self.env.todo_all_todos = []
self.env.todo_all_todos.append({
- 'docname': self.env.docname,
+ 'docname': self.env.current_document.docname,
'lineno': self.lineno,
'todo': todo_node.deepcopy(),
'target': targetnode,
diff --git a/sphinx/builders/latex/transforms.py b/sphinx/builders/latex/transforms.py
index 9fa180a7dd9..759a084cd00 100644
--- a/sphinx/builders/latex/transforms.py
+++ b/sphinx/builders/latex/transforms.py
@@ -40,7 +40,7 @@ class FootnoteDocnameUpdater(SphinxTransform):
def apply(self, **kwargs: Any) -> None:
matcher = NodeMatcher(*self.TARGET_NODES)
for node in matcher.findall(self.document):
- node['docname'] = self.env.docname
+ node['docname'] = self.env.current_document.docname
class SubstitutionDefinitionsRemover(SphinxPostTransform):
diff --git a/sphinx/builders/linkcheck.py b/sphinx/builders/linkcheck.py
index c1b199c5493..b80c9d515b2 100644
--- a/sphinx/builders/linkcheck.py
+++ b/sphinx/builders/linkcheck.py
@@ -261,7 +261,7 @@ def _add_uri(self, uri: str, node: nodes.Element) -> None:
"""
builder = cast('CheckExternalLinksBuilder', self.env._app.builder)
hyperlinks = builder.hyperlinks
- docname = self.env.docname
+ docname = self.env.current_document.docname
if newuri := self.env.events.emit_firstresult('linkcheck-process-uri', uri):
uri = newuri
diff --git a/sphinx/directives/other.py b/sphinx/directives/other.py
index 8c66ed383b5..090e58a4cf0 100644
--- a/sphinx/directives/other.py
+++ b/sphinx/directives/other.py
@@ -63,7 +63,7 @@ class TocTree(SphinxDirective):
def run(self) -> list[Node]:
subnode = addnodes.toctree()
- subnode['parent'] = self.env.docname
+ subnode['parent'] = self.env.current_document.docname
# (title, ref) pairs, where ref may be a document, or an external link,
# and title may be None if the document's title is to be used
@@ -90,7 +90,7 @@ def parse_content(self, toctree: addnodes.toctree) -> None:
"""Populate ``toctree['entries']`` and ``toctree['includefiles']`` from content."""
generated_docnames = frozenset(StandardDomain._virtual_doc_names)
suffixes = self.config.source_suffix
- current_docname = self.env.docname
+ current_docname = self.env.current_document.docname
glob = toctree['glob']
# glob target documents
@@ -267,7 +267,7 @@ def run(self) -> list[Node]:
if len(children) != 1 or not isinstance(children[0], nodes.bullet_list):
logger.warning(
__('.. acks content is not a list'),
- location=(self.env.docname, self.lineno),
+ location=(self.env.current_document.docname, self.lineno),
)
return []
return [addnodes.acks('', *children)]
@@ -290,7 +290,7 @@ def run(self) -> list[Node]:
if len(children) != 1 or not isinstance(children[0], nodes.bullet_list):
logger.warning(
__('.. hlist content is not a list'),
- location=(self.env.docname, self.lineno),
+ location=(self.env.current_document.docname, self.lineno),
)
return []
fulllist = children[0]
@@ -388,7 +388,7 @@ def _insert_input(include_lines: list[str], source: str) -> None:
text = '\n'.join(include_lines[:-2])
path = Path(relpath(Path(source).resolve(), start=self.env.srcdir))
- docname = self.env.docname
+ docname = self.env.current_document.docname
# Emit the "include-read" event
arg = [text]
diff --git a/sphinx/directives/patches.py b/sphinx/directives/patches.py
index 94184de502c..0a7419ed563 100644
--- a/sphinx/directives/patches.py
+++ b/sphinx/directives/patches.py
@@ -72,11 +72,11 @@ def run(self) -> list[Node]:
'an absolute path as a relative path from source directory. '
'Please update your document.'
),
- location=(env.docname, self.lineno),
+ location=(env.current_document.docname, self.lineno),
)
else:
abspath = env.srcdir / self.options['file'][1:]
- doc_dir = env.doc2path(env.docname).parent
+ doc_dir = env.doc2path(env.current_document.docname).parent
self.options['file'] = relpath(abspath, doc_dir)
return super().run()
@@ -162,7 +162,7 @@ def run(self) -> list[Node]:
latex,
latex,
classes=self.options.get('class', []),
- docname=self.env.docname,
+ docname=self.env.current_document.docname,
number=None,
label=label,
)
@@ -180,7 +180,7 @@ def add_target(self, ret: list[Node]) -> None:
# assign label automatically if math_number_all enabled
if node['label'] == '' or (self.config.math_number_all and not node['label']): # NoQA: PLC1901
seq = self.env.new_serialno('sphinx.ext.math#equations')
- node['label'] = f'{self.env.docname}:{seq}'
+ node['label'] = f'{self.env.current_document.docname}:{seq}'
# no targets and numbers are needed
if not node['label']:
@@ -188,7 +188,9 @@ def add_target(self, ret: list[Node]) -> None:
# register label to domain
domain = self.env.domains.math_domain
- domain.note_equation(self.env.docname, node['label'], location=node)
+ domain.note_equation(
+ self.env.current_document.docname, node['label'], location=node
+ )
node['number'] = domain.get_equation_number_for(node['label'])
# add target node
diff --git a/sphinx/domains/c/__init__.py b/sphinx/domains/c/__init__.py
index 80d24c1abe2..194916122cd 100644
--- a/sphinx/domains/c/__init__.py
+++ b/sphinx/domains/c/__init__.py
@@ -156,7 +156,7 @@ def _add_enumerator_to_parent(self, ast: ASTDeclaration) -> None:
parent=target_symbol,
ident=symbol.ident,
declaration=decl_clone,
- docname=self.env.docname,
+ docname=self.env.current_document.docname,
line=self.get_source_info()[1],
)
@@ -259,7 +259,9 @@ def handle_signature(self, sig: str, signode: TextElement) -> ASTDeclaration:
try:
symbol = parent_symbol.add_declaration(
- ast, docname=self.env.docname, line=self.get_source_info()[1]
+ ast,
+ docname=self.env.current_document.docname,
+ line=self.get_source_info()[1],
)
# append the new declaration to the sibling list
assert symbol.siblingAbove is None
diff --git a/sphinx/domains/changeset.py b/sphinx/domains/changeset.py
index d2492dcccb2..e2657ad63ed 100644
--- a/sphinx/domains/changeset.py
+++ b/sphinx/domains/changeset.py
@@ -135,7 +135,7 @@ def note_changeset(self, node: addnodes.versionmodified) -> None:
objname = self.env.current_document.obj_desc_name
changeset = ChangeSet(
node['type'],
- self.env.docname,
+ self.env.current_document.docname,
node.line, # type: ignore[arg-type]
module,
objname,
diff --git a/sphinx/domains/citation.py b/sphinx/domains/citation.py
index 348888c2d50..da7fc6a3fdd 100644
--- a/sphinx/domains/citation.py
+++ b/sphinx/domains/citation.py
@@ -83,7 +83,7 @@ def note_citation(self, node: nodes.citation) -> None:
def note_citation_reference(self, node: pending_xref) -> None:
docnames = self.citation_refs.setdefault(node['reftarget'], set())
- docnames.add(self.env.docname)
+ docnames.add(self.env.current_document.docname)
def check_consistency(self) -> None:
for name, (docname, _labelid, lineno) in self.citations.items():
@@ -139,7 +139,7 @@ def apply(self, **kwargs: Any) -> None:
domain = self.env.domains.citation_domain
for node in self.document.findall(nodes.citation):
# register citation node to domain
- node['docname'] = self.env.docname
+ node['docname'] = self.env.current_document.docname
domain.note_citation(node)
# mark citation labels as not smartquoted
diff --git a/sphinx/domains/cpp/__init__.py b/sphinx/domains/cpp/__init__.py
index ef486897bc4..0ccdc106c44 100644
--- a/sphinx/domains/cpp/__init__.py
+++ b/sphinx/domains/cpp/__init__.py
@@ -219,7 +219,7 @@ def _add_enumerator_to_parent(self, ast: ASTDeclaration) -> None:
templateParams=None,
templateArgs=None,
declaration=decl_clone,
- docname=self.env.docname,
+ docname=self.env.current_document.docname,
line=self.get_source_info()[1],
)
@@ -374,7 +374,9 @@ def handle_signature(self, sig: str, signode: desc_signature) -> ASTDeclaration:
try:
symbol = parent_symbol.add_declaration(
- ast, docname=self.env.docname, line=self.get_source_info()[1]
+ ast,
+ docname=self.env.current_document.docname,
+ line=self.get_source_info()[1],
)
# append the new declaration to the sibling list
assert symbol.siblingAbove is None
diff --git a/sphinx/domains/index.py b/sphinx/domains/index.py
index 09a18d0180e..cefa64a8d5f 100644
--- a/sphinx/domains/index.py
+++ b/sphinx/domains/index.py
@@ -47,7 +47,7 @@ def merge_domaindata(self, docnames: Set[str], otherdata: dict[str, Any]) -> Non
def process_doc(self, env: BuildEnvironment, docname: str, document: Node) -> None:
"""Process a document after it is read by the environment."""
- entries = self.entries.setdefault(env.docname, [])
+ entries = self.entries.setdefault(env.current_document.docname, [])
for node in list(document.findall(addnodes.index)):
node_entries = node['entries']
try:
diff --git a/sphinx/domains/javascript.py b/sphinx/domains/javascript.py
index 22673489d23..e620e04b401 100644
--- a/sphinx/domains/javascript.py
+++ b/sphinx/domains/javascript.py
@@ -363,7 +363,10 @@ def run(self) -> list[Node]:
# Make a duplicate entry in 'objects' to facilitate searching for
# the module in JavaScriptDomain.find_obj()
domain.note_object(
- mod_name, 'module', node_id, location=(self.env.docname, self.lineno)
+ mod_name,
+ 'module',
+ node_id,
+ location=(self.env.current_document.docname, self.lineno),
)
# The node order is: index node first, then target node
@@ -459,14 +462,14 @@ def note_object(
docname,
location=location,
)
- self.objects[fullname] = (self.env.docname, node_id, objtype)
+ self.objects[fullname] = (self.env.current_document.docname, node_id, objtype)
@property
def modules(self) -> dict[str, tuple[str, str]]:
return self.data.setdefault('modules', {}) # modname -> docname, node_id
def note_module(self, modname: str, node_id: str) -> None:
- self.modules[modname] = (self.env.docname, node_id)
+ self.modules[modname] = (self.env.current_document.docname, node_id)
def clear_doc(self, docname: str) -> None:
for fullname, (pkg_docname, _node_id, _l) in list(self.objects.items()):
diff --git a/sphinx/domains/python/__init__.py b/sphinx/domains/python/__init__.py
index 1281b14ad58..a0a0571f069 100644
--- a/sphinx/domains/python/__init__.py
+++ b/sphinx/domains/python/__init__.py
@@ -818,7 +818,9 @@ def note_object(
other.docname,
location=location,
)
- self.objects[name] = ObjectEntry(self.env.docname, node_id, objtype, aliased)
+ self.objects[name] = ObjectEntry(
+ self.env.current_document.docname, node_id, objtype, aliased
+ )
@property
def modules(self) -> dict[str, ModuleEntry]:
@@ -832,7 +834,7 @@ def note_module(
.. versionadded:: 2.1
"""
self.modules[name] = ModuleEntry(
- docname=self.env.docname,
+ docname=self.env.current_document.docname,
node_id=node_id,
synopsis=synopsis,
platform=platform,
diff --git a/sphinx/domains/rst.py b/sphinx/domains/rst.py
index 2b486ea85ed..64aff25a015 100644
--- a/sphinx/domains/rst.py
+++ b/sphinx/domains/rst.py
@@ -266,7 +266,7 @@ def note_object(
location=location,
)
- self.objects[objtype, name] = (self.env.docname, node_id)
+ self.objects[objtype, name] = (self.env.current_document.docname, node_id)
def clear_doc(self, docname: str) -> None:
for (typ, name), (doc, _node_id) in list(self.objects.items()):
diff --git a/sphinx/domains/std/__init__.py b/sphinx/domains/std/__init__.py
index 04161736675..556cb5c5d40 100644
--- a/sphinx/domains/std/__init__.py
+++ b/sphinx/domains/std/__init__.py
@@ -308,7 +308,10 @@ def add_target_and_index(
domain = self.env.domains.standard_domain
for optname in signode.get('allnames', ()):
domain.add_program_option(
- currprogram, optname, self.env.docname, signode['ids'][0]
+ currprogram,
+ optname,
+ self.env.current_document.docname,
+ signode['ids'][0],
)
# create an index entry
@@ -857,7 +860,7 @@ def note_object(
docname,
location=location,
)
- self.objects[objtype, name] = (self.env.docname, labelid)
+ self.objects[objtype, name] = (self.env.current_document.docname, labelid)
@property
def _terms(self) -> dict[str, tuple[str, str]]:
@@ -871,7 +874,7 @@ def _note_term(self, term: str, labelid: str, location: Any = None) -> None:
"""
self.note_object('term', term, labelid, location)
- self._terms[term.lower()] = (self.env.docname, labelid)
+ self._terms[term.lower()] = (self.env.current_document.docname, labelid)
@property
def progoptions(self) -> dict[tuple[str | None, str], tuple[str, str]]:
diff --git a/sphinx/environment/__init__.py b/sphinx/environment/__init__.py
index a09978ba279..36b364f5c3d 100644
--- a/sphinx/environment/__init__.py
+++ b/sphinx/environment/__init__.py
@@ -788,7 +788,7 @@ def apply_post_transforms(self, doctree: nodes.document, docname: str) -> None:
new = deepcopy(backup)
new.docname = docname
try:
- # set env.docname during applying post-transforms
+ # set env.current_document.docname during applying post-transforms
self.current_document = new
transformer = SphinxTransformer(doctree)
diff --git a/sphinx/environment/collectors/asset.py b/sphinx/environment/collectors/asset.py
index e199fc90124..a1af7c33474 100644
--- a/sphinx/environment/collectors/asset.py
+++ b/sphinx/environment/collectors/asset.py
@@ -47,7 +47,7 @@ def merge_other(
def process_doc(self, app: Sphinx, doctree: nodes.document) -> None:
"""Process and rewrite image URIs."""
- docname = app.env.docname
+ docname = app.env.current_document.docname
for node in doctree.findall(nodes.image):
# Map the mimetype to the corresponding image. The writer may
@@ -156,7 +156,9 @@ def process_doc(self, app: Sphinx, doctree: nodes.document) -> None:
if '://' in targetname:
node['refuri'] = targetname
else:
- rel_filename, filename = app.env.relfn2path(targetname, app.env.docname)
+ rel_filename, filename = app.env.relfn2path(
+ targetname, app.env.current_document.docname
+ )
app.env.note_dependency(rel_filename)
if not os.access(filename, os.R_OK):
logger.warning(
@@ -168,7 +170,7 @@ def process_doc(self, app: Sphinx, doctree: nodes.document) -> None:
)
continue
node['filename'] = app.env.dlfiles.add_file(
- app.env.docname, rel_filename
+ app.env.current_document.docname, rel_filename
).as_posix()
diff --git a/sphinx/environment/collectors/metadata.py b/sphinx/environment/collectors/metadata.py
index 2cda65beec2..8936341a919 100644
--- a/sphinx/environment/collectors/metadata.py
+++ b/sphinx/environment/collectors/metadata.py
@@ -41,7 +41,7 @@ def process_doc(self, app: Sphinx, doctree: nodes.document) -> None:
if index is None:
return
elif isinstance(doctree[index], nodes.docinfo):
- md = app.env.metadata[app.env.docname]
+ md = app.env.metadata[app.env.current_document.docname]
for node in doctree[index]: # type: ignore[attr-defined]
# nodes are multiply inherited...
if isinstance(node, nodes.authors):
diff --git a/sphinx/environment/collectors/title.py b/sphinx/environment/collectors/title.py
index 4bd3ed7e146..50dfa2bdc54 100644
--- a/sphinx/environment/collectors/title.py
+++ b/sphinx/environment/collectors/title.py
@@ -55,8 +55,8 @@ def process_doc(self, app: Sphinx, doctree: nodes.document) -> None:
else:
# document has no title
titlenode += nodes.Text(doctree.get('title', ''))
- app.env.titles[app.env.docname] = titlenode
- app.env.longtitles[app.env.docname] = longtitlenode
+ app.env.titles[app.env.current_document.docname] = titlenode
+ app.env.longtitles[app.env.current_document.docname] = longtitlenode
def setup(app: Sphinx) -> ExtensionMetadata:
diff --git a/sphinx/environment/collectors/toctree.py b/sphinx/environment/collectors/toctree.py
index fddd269e1b7..5c3d5c97f8c 100644
--- a/sphinx/environment/collectors/toctree.py
+++ b/sphinx/environment/collectors/toctree.py
@@ -65,7 +65,7 @@ def merge_other(
def process_doc(self, app: Sphinx, doctree: nodes.document) -> None:
"""Build a TOC from the doctree and store it in the inventory."""
- docname = app.env.docname
+ docname = app.env.current_document.docname
numentries = [0] # nonlocal again...
def build_toc(
diff --git a/sphinx/ext/autodoc/directive.py b/sphinx/ext/autodoc/directive.py
index 03d6383e0e1..fd0553047a9 100644
--- a/sphinx/ext/autodoc/directive.py
+++ b/sphinx/ext/autodoc/directive.py
@@ -163,7 +163,7 @@ def run(self) -> list[Node]:
'An option to %s is either unknown or has an invalid value: %s',
self.name,
exc,
- location=(self.env.docname, lineno),
+ location=(self.env.current_document.docname, lineno),
)
return []
diff --git a/sphinx/ext/autosectionlabel.py b/sphinx/ext/autosectionlabel.py
index b1eaa0ceac9..7c5304ad83d 100644
--- a/sphinx/ext/autosectionlabel.py
+++ b/sphinx/ext/autosectionlabel.py
@@ -39,7 +39,7 @@ def register_sections_as_label(app: Sphinx, document: Node) -> None:
):
continue
labelid = node['ids'][0]
- docname = app.env.docname
+ docname = app.env.current_document.docname
title = cast('nodes.title', node[0])
ref_name = getattr(title, 'rawsource', title.astext())
if app.config.autosectionlabel_prefix_document:
diff --git a/sphinx/ext/autosummary/__init__.py b/sphinx/ext/autosummary/__init__.py
index 62c9427ecdb..a0ae7af16b1 100644
--- a/sphinx/ext/autosummary/__init__.py
+++ b/sphinx/ext/autosummary/__init__.py
@@ -268,7 +268,7 @@ def run(self) -> list[Node]:
nodes = self.get_table(items)
if 'toctree' in self.options:
- dirname = posixpath.dirname(self.env.docname)
+ dirname = posixpath.dirname(self.env.current_document.docname)
tree_prefix = self.options['toctree'].strip()
docnames = []
diff --git a/sphinx/ext/duration.py b/sphinx/ext/duration.py
index 1cf3f7b58d4..3f7f64c2875 100644
--- a/sphinx/ext/duration.py
+++ b/sphinx/ext/duration.py
@@ -37,7 +37,7 @@ def reading_durations(self) -> dict[str, float]:
return self.data.setdefault('reading_durations', {})
def note_reading_duration(self, duration: float) -> None:
- self.reading_durations[self.env.docname] = duration
+ self.reading_durations[self.env.current_document.docname] = duration
def clear(self) -> None:
self.reading_durations.clear()
diff --git a/sphinx/ext/graphviz.py b/sphinx/ext/graphviz.py
index b973c1f5870..8ba99cc24ad 100644
--- a/sphinx/ext/graphviz.py
+++ b/sphinx/ext/graphviz.py
@@ -167,7 +167,7 @@ def run(self) -> list[Node]:
]
node = graphviz()
node['code'] = dotcode
- node['options'] = {'docname': self.env.docname}
+ node['options'] = {'docname': self.env.current_document.docname}
if 'graphviz_dot' in self.options:
node['options']['graphviz_dot'] = self.options['graphviz_dot']
@@ -212,7 +212,7 @@ def run(self) -> list[Node]:
node = graphviz()
dot_code = '\n'.join(self.content)
node['code'] = f'{self.name} {self.arguments[0]} {{\n{dot_code}\n}}\n'
- node['options'] = {'docname': self.env.docname}
+ node['options'] = {'docname': self.env.current_document.docname}
if 'graphviz_dot' in self.options:
node['options']['graphviz_dot'] = self.options['graphviz_dot']
if 'layout' in self.options:
diff --git a/sphinx/ext/intersphinx/_resolve.py b/sphinx/ext/intersphinx/_resolve.py
index 2029a0ea971..b68222645bb 100644
--- a/sphinx/ext/intersphinx/_resolve.py
+++ b/sphinx/ext/intersphinx/_resolve.py
@@ -522,7 +522,7 @@ def _emit_warning(self, msg: str, /, *args: Any) -> None:
*args,
type='intersphinx',
subtype='external',
- location=(self.env.docname, self.lineno),
+ location=(self.env.current_document.docname, self.lineno),
)
def _concat_strings(self, strings: Iterable[str]) -> str:
diff --git a/sphinx/ext/todo.py b/sphinx/ext/todo.py
index 53c4d57b4f1..4b2e32bc9c4 100644
--- a/sphinx/ext/todo.py
+++ b/sphinx/ext/todo.py
@@ -59,7 +59,7 @@ def run(self) -> list[Node]:
return [todo]
todo.insert(0, nodes.title(text=_('Todo')))
- todo['docname'] = self.env.docname
+ todo['docname'] = self.env.current_document.docname
self.add_name(todo)
self.set_source_info(todo)
self.state.document.note_explicit_target(todo)
diff --git a/sphinx/ext/viewcode.py b/sphinx/ext/viewcode.py
index 2b9b479e0a1..af352eaaab6 100644
--- a/sphinx/ext/viewcode.py
+++ b/sphinx/ext/viewcode.py
@@ -166,7 +166,7 @@ def has_tag(modname: str, fullname: str, docname: str, refname: str) -> bool:
if not modname:
continue
fullname = signode.get('fullname')
- if not has_tag(modname, fullname, env.docname, refname):
+ if not has_tag(modname, fullname, env.current_document.docname, refname):
continue
if fullname in names:
# only one link per name, please
@@ -174,7 +174,7 @@ def has_tag(modname: str, fullname: str, docname: str, refname: str) -> bool:
names.add(fullname)
pagename = posixpath.join(OUTPUT_DIRNAME, modname.replace('.', '/'))
signode += viewcode_anchor(
- reftarget=pagename, refid=fullname, refdoc=env.docname
+ reftarget=pagename, refid=fullname, refdoc=env.current_document.docname
)
diff --git a/sphinx/io.py b/sphinx/io.py
index e2d299f8ae2..2d7b41beda5 100644
--- a/sphinx/io.py
+++ b/sphinx/io.py
@@ -86,7 +86,7 @@ def read_source(self, env: BuildEnvironment) -> str:
# emit "source-read" event
arg = [content]
- env.events.emit('source-read', env.docname, arg)
+ env.events.emit('source-read', env.current_document.docname, arg)
return arg[0]
diff --git a/sphinx/roles.py b/sphinx/roles.py
index 79ec70e90a3..cadfb5a027b 100644
--- a/sphinx/roles.py
+++ b/sphinx/roles.py
@@ -130,7 +130,7 @@ def create_xref_node(self) -> tuple[list[Node], list[system_message]]:
# create the reference node
options = {
- 'refdoc': self.env.docname,
+ 'refdoc': self.env.current_document.docname,
'refdomain': self.refdomain,
'reftype': self.reftype,
'refexplicit': self.has_explicit_title,
diff --git a/sphinx/transforms/i18n.py b/sphinx/transforms/i18n.py
index 27db99c542f..81182f01718 100644
--- a/sphinx/transforms/i18n.py
+++ b/sphinx/transforms/i18n.py
@@ -419,7 +419,9 @@ def apply(self, **kwargs: Any) -> None:
settings, source = self.document.settings, self.document['source']
msgstr = ''
- textdomain = docname_to_domain(self.env.docname, self.config.gettext_compact)
+ textdomain = docname_to_domain(
+ self.env.current_document.docname, self.config.gettext_compact
+ )
# fetch translations
srcdir = self.env.srcdir
diff --git a/sphinx/transforms/post_transforms/__init__.py b/sphinx/transforms/post_transforms/__init__.py
index ae70ce195d9..ac95f56102a 100644
--- a/sphinx/transforms/post_transforms/__init__.py
+++ b/sphinx/transforms/post_transforms/__init__.py
@@ -98,7 +98,7 @@ def _resolve_pending_xref(
new_node: nodes.reference | None
typ = node['reftype']
target = node['reftarget']
- ref_doc = node.setdefault('refdoc', self.env.docname)
+ ref_doc = node.setdefault('refdoc', self.env.current_document.docname)
ref_domain = node.get('refdomain', '')
domain: Domain | None
if ref_domain:
diff --git a/sphinx/transforms/post_transforms/images.py b/sphinx/transforms/post_transforms/images.py
index 97b585d9cf6..6e6e9becb20 100644
--- a/sphinx/transforms/post_transforms/images.py
+++ b/sphinx/transforms/post_transforms/images.py
@@ -123,7 +123,7 @@ def _process_image(self, node: nodes.image, path: Path) -> None:
node['candidates'].pop('?')
node['candidates'][mimetype] = path_str
node['uri'] = path_str
- self.env.images.add_file(self.env.docname, path_str)
+ self.env.images.add_file(self.env.current_document.docname, path_str)
class DataURIExtractor(BaseImageConverter):
@@ -156,7 +156,7 @@ def handle(self, node: nodes.image) -> None:
node['candidates'].pop('?')
node['candidates'][image.mimetype] = path_str
node['uri'] = path_str
- self.env.images.add_file(self.env.docname, path_str)
+ self.env.images.add_file(self.env.current_document.docname, path_str)
def get_filename_for(filename: str, mimetype: str) -> str:
@@ -278,7 +278,7 @@ def handle(self, node: nodes.image) -> None:
node['uri'] = str(destpath)
self.env.original_image_uri[destpath] = srcpath
- self.env.images.add_file(self.env.docname, destpath)
+ self.env.images.add_file(self.env.current_document.docname, destpath)
def convert(
self, _from: str | os.PathLike[str], _to: str | os.PathLike[str]
diff --git a/sphinx/transforms/references.py b/sphinx/transforms/references.py
index 447e9ded568..17380777997 100644
--- a/sphinx/transforms/references.py
+++ b/sphinx/transforms/references.py
@@ -36,7 +36,9 @@ class SphinxDomains(SphinxTransform):
default_priority = 850
def apply(self, **kwargs: Any) -> None:
- self.env.domains._process_doc(self.env, self.env.docname, self.document)
+ self.env.domains._process_doc(
+ self.env, self.env.current_document.docname, self.document
+ )
def setup(app: Sphinx) -> ExtensionMetadata:
diff --git a/sphinx/util/i18n.py b/sphinx/util/i18n.py
index dd1616a8f31..7553119334b 100644
--- a/sphinx/util/i18n.py
+++ b/sphinx/util/i18n.py
@@ -319,7 +319,7 @@ def get_image_filename_for_language(
) -> str:
root, ext = os.path.splitext(filename)
dirname = os.path.dirname(root)
- docpath = os.path.dirname(env.docname)
+ docpath = os.path.dirname(env.current_document.docname)
try:
return env.config.figure_language_filename.format(
root=root,
diff --git a/sphinx/versioning.py b/sphinx/versioning.py
index 3de5a17ec9c..02bc6edd055 100644
--- a/sphinx/versioning.py
+++ b/sphinx/versioning.py
@@ -160,7 +160,7 @@ def apply(self, **kwargs: Any) -> None:
if env.versioning_compare:
# get old doctree
- filename = env.doctreedir / f'{env.docname}.doctree'
+ filename = env.doctreedir / f'{env.current_document.docname}.doctree'
try:
with open(filename, 'rb') as f:
old_doctree = pickle.load(f)
From 616faf830e748d91ba2de4122b0fa04e633f6fba Mon Sep 17 00:00:00 2001
From: Adam Turner <9087854+AA-Turner@users.noreply.github.com>
Date: Mon, 16 Jun 2025 21:32:22 +0100
Subject: [PATCH 147/466] Simplify ``_publish_msgstr()`` (#13670)
---
sphinx/transforms/i18n.py | 101 ++++++++++++++------------------------
1 file changed, 36 insertions(+), 65 deletions(-)
diff --git a/sphinx/transforms/i18n.py b/sphinx/transforms/i18n.py
index 81182f01718..570154185e9 100644
--- a/sphinx/transforms/i18n.py
+++ b/sphinx/transforms/i18n.py
@@ -2,23 +2,21 @@
from __future__ import annotations
-import contextlib
from re import DOTALL, match
from textwrap import indent
from typing import TYPE_CHECKING, Any, TypeVar
+import docutils.utils
from docutils import nodes
-from docutils.io import StringInput
from sphinx import addnodes
from sphinx.domains.std import make_glossary_term, split_term_classifiers
from sphinx.errors import ConfigError
-from sphinx.io import SphinxBaseReader
from sphinx.locale import __
from sphinx.locale import init as init_locale
-from sphinx.transforms import AutoIndexUpgrader, DoctreeReadEvent, SphinxTransform
-from sphinx.transforms.references import SphinxDomains
+from sphinx.transforms import SphinxTransform
from sphinx.util import get_filetype, logging
+from sphinx.util.docutils import LoggingReporter
from sphinx.util.i18n import docname_to_domain
from sphinx.util.index_entries import split_index_msg
from sphinx.util.nodes import (
@@ -28,11 +26,12 @@
extract_messages,
traverse_translatable_index,
)
-from sphinx.versioning import UIDTransform
if TYPE_CHECKING:
from collections.abc import Sequence
+ from docutils.frontend import Values
+
from sphinx.application import Sphinx
from sphinx.config import Config
from sphinx.environment import BuildEnvironment
@@ -52,77 +51,49 @@
N = TypeVar('N', bound=nodes.Node)
-class _SphinxI18nReader(SphinxBaseReader):
- """A document reader for internationalisation (i18n).
-
- This returns the source line number of the original text
- as the current source line number to let users know where
- the error happened, because the translated texts are
- partial and they don't have correct line numbers.
- """
-
- def __init__(
- self, *args: Any, registry: SphinxComponentRegistry, **kwargs: Any
- ) -> None:
- super().__init__(*args, **kwargs)
- unused = frozenset({
- PreserveTranslatableMessages,
- Locale,
- RemoveTranslatableInline,
- AutoIndexUpgrader,
- SphinxDomains,
- DoctreeReadEvent,
- UIDTransform,
- })
- transforms = self.transforms + registry.get_transforms()
- self.transforms = [
- transform for transform in transforms if transform not in unused
- ]
-
-
-def publish_msgstr(
+def _publish_msgstr(
source: str,
source_path: str,
source_line: int,
- config: Config,
- settings: Any,
*,
+ config: Config,
env: BuildEnvironment,
registry: SphinxComponentRegistry,
+ settings: Values,
) -> nodes.Element:
"""Publish msgstr (single line) into docutils document
:param str source: source text
:param str source_path: source path for warning indication
:param source_line: source line for warning indication
- :param sphinx.config.Config config: sphinx config
:param docutils.frontend.Values settings: docutils settings
- :return: document
- :rtype: docutils.nodes.document
+ :param sphinx.config.Config config: sphinx config
:param sphinx.environment.BuildEnvironment env: sphinx environment
:param sphinx.registry.SphinxComponentRegistry registry: sphinx registry
+ :return: document
+ :rtype: docutils.nodes.document
"""
+ filetype = get_filetype(config.source_suffix, source_path)
+ doc = docutils.utils.new_document(
+ f'{source_path}:{source_line}:', settings
+ )
+ doc.reporter = LoggingReporter.from_reporter(doc.reporter)
+
+ # clear rst_prolog temporarily
+ rst_prolog = config.rst_prolog
+ config.rst_prolog = None
try:
- # clear rst_prolog temporarily
- rst_prolog = config.rst_prolog
- config.rst_prolog = None
-
- reader = _SphinxI18nReader(registry=registry)
- filetype = get_filetype(config.source_suffix, source_path)
parser = registry.create_source_parser(filetype, config=config, env=env)
- doc = reader.read(
- source=StringInput(
- source=source, source_path=f'{source_path}:{source_line}:'
- ),
- parser=parser,
- settings=settings,
- )
- with contextlib.suppress(IndexError): # empty node
- return doc[0]
- return doc
+ parser.parse(source, doc)
+ doc.current_source = doc.current_line = None
finally:
config.rst_prolog = rst_prolog
+ try:
+ return doc[0] # type: ignore[return-value]
+ except IndexError: # empty node
+ return doc
+
def parse_noqa(source: str) -> tuple[str, bool]:
m = match(r'(.*)(? None:
if isinstance(node, LITERAL_TYPE_NODES):
msgstr = '::\n\n' + indent(msgstr, ' ' * 3)
- patch = publish_msgstr(
+ patch = _publish_msgstr(
msgstr,
source,
node.line, # type: ignore[arg-type]
- self.config,
- settings,
+ config=self.config,
env=self.env,
registry=self.env._registry,
+ settings=settings,
)
# FIXME: no warnings about inconsistent references in this part
# XXX doctest and other block markup
@@ -491,14 +462,14 @@ def apply(self, **kwargs: Any) -> None:
if isinstance(node, nodes.term):
for _id in node['ids']:
term, first_classifier = split_term_classifiers(msgstr)
- patch = publish_msgstr(
+ patch = _publish_msgstr(
term or '',
source,
node.line, # type: ignore[arg-type]
- self.config,
- settings,
+ config=self.config,
env=self.env,
registry=self.env._registry,
+ settings=settings,
)
updater.patch = make_glossary_term(
self.env,
@@ -569,14 +540,14 @@ def apply(self, **kwargs: Any) -> None:
# This generates:
msgstr = msgstr + '\n' + '=' * len(msgstr) * 2
- patch = publish_msgstr(
+ patch = _publish_msgstr(
msgstr,
source,
node.line, # type: ignore[arg-type]
- self.config,
- settings,
+ config=self.config,
env=self.env,
registry=self.env._registry,
+ settings=settings,
)
# Structural Subelements phase2
if isinstance(node, nodes.title):
From 2b8f6dab31d5fef15f034b84be8e7e946b9fe961 Mon Sep 17 00:00:00 2001
From: Adam Turner <9087854+AA-Turner@users.noreply.github.com>
Date: Mon, 16 Jun 2025 23:09:54 +0100
Subject: [PATCH 148/466] Make parsing more explicit in
``sphinx.testing.restructuredtext`` (#13671)
---
sphinx/testing/restructuredtext.py | 83 +++++++++++++++++++++---------
1 file changed, 59 insertions(+), 24 deletions(-)
diff --git a/sphinx/testing/restructuredtext.py b/sphinx/testing/restructuredtext.py
index 68c78199606..c8fcd597aa5 100644
--- a/sphinx/testing/restructuredtext.py
+++ b/sphinx/testing/restructuredtext.py
@@ -1,42 +1,77 @@
from __future__ import annotations
+import warnings
from typing import TYPE_CHECKING
-from docutils.core import publish_doctree
+from docutils import nodes
+from docutils.frontend import OptionParser
-from sphinx.io import SphinxStandaloneReader
+from sphinx.io import SphinxBaseReader
from sphinx.parsers import RSTParser
-from sphinx.util.docutils import sphinx_domains
+from sphinx.transforms import SphinxTransformer
+from sphinx.util.docutils import LoggingReporter, sphinx_domains
if TYPE_CHECKING:
- from docutils import nodes
+ from docutils.frontend import Values
from sphinx.application import Sphinx
def parse(app: Sphinx, text: str, docname: str = 'index') -> nodes.document:
- """Parse a string as reStructuredText with Sphinx application."""
+ """Parse a string as reStructuredText with Sphinx."""
+ config = app.config
env = app.env
+ registry = app.registry
+ srcdir = app.srcdir
+
+ source_path = str(srcdir / f'{docname}.rst')
+
+ # Get settings
+ settings_overrides = {
+ 'gettext_compact': True,
+ 'input_encoding': 'utf-8',
+ 'output_encoding': 'unicode',
+ 'traceback': True,
+ }
+ with warnings.catch_warnings():
+ warnings.filterwarnings('ignore', category=DeprecationWarning)
+ option_parser = OptionParser(
+ components=(RSTParser, SphinxBaseReader), defaults=settings_overrides
+ )
+ settings: Values = option_parser.get_default_values() # type: ignore[assignment]
+ settings._source = source_path
+ settings.env = env
+
+ # Create parser
+ parser = RSTParser()
+ parser._config = config
+ parser._env = env
+
+ # Create root document node
+ reporter = LoggingReporter(
+ source_path,
+ settings.report_level,
+ settings.halt_level,
+ settings.debug,
+ settings.error_encoding_error_handler,
+ )
+ document = nodes.document(settings, reporter, source=source_path)
+ document.note_source(source_path, -1)
+
+ # substitute transformer
+ document.transformer = transformer = SphinxTransformer(document)
+ transformer.add_transforms(SphinxBaseReader().get_transforms())
+ transformer.add_transforms(registry.get_transforms())
+ transformer.add_transforms(parser.get_transforms())
+
+ env.current_document.docname = docname
try:
- app.env.current_document.docname = docname
- reader = SphinxStandaloneReader()
- reader._setup_transforms(app.registry.get_transforms())
- parser = RSTParser()
- parser._config = app.config
- parser._env = app.env
with sphinx_domains(env):
- return publish_doctree(
- text,
- str(app.srcdir / f'{docname}.rst'),
- reader=reader,
- parser=parser,
- settings_overrides={
- 'env': env,
- 'gettext_compact': True,
- 'input_encoding': 'utf-8',
- 'output_encoding': 'unicode',
- 'traceback': True,
- },
- )
+ parser.parse(text, document)
+ document.current_source = document.current_line = None
+
+ transformer.apply_transforms()
finally:
env.current_document.docname = ''
+
+ return document
From 97f2fb2e13cb78f3d1788bee8f0bc24b21986e5c Mon Sep 17 00:00:00 2001
From: Adam Turner <9087854+AA-Turner@users.noreply.github.com>
Date: Tue, 17 Jun 2025 01:27:14 +0100
Subject: [PATCH 149/466] Add ``_get_settings()`` helper function (#13672)
---
sphinx/__init__.py | 10 ----------
sphinx/builders/html/__init__.py | 16 ++++------------
sphinx/builders/latex/__init__.py | 17 ++++-------------
sphinx/builders/manpage.py | 16 ++++------------
sphinx/builders/texinfo.py | 17 ++++-------------
sphinx/testing/restructuredtext.py | 13 ++-----------
sphinx/util/docutils.py | 23 ++++++++++++++++++++++-
tests/test_markup/test_markup.py | 17 +++++------------
tests/test_search.py | 8 +-------
tests/test_util/test_util_nodes.py | 7 +------
10 files changed, 47 insertions(+), 97 deletions(-)
diff --git a/sphinx/__init__.py b/sphinx/__init__.py
index 6ddfdba271f..79df3e09df3 100644
--- a/sphinx/__init__.py
+++ b/sphinx/__init__.py
@@ -5,22 +5,12 @@
from __future__ import annotations
-import warnings
-
from sphinx.util._pathlib import _StrPath
TYPE_CHECKING = False
if TYPE_CHECKING:
from typing import Final
-warnings.filterwarnings(
- 'ignore',
- 'The frontend.Option class .*',
- DeprecationWarning,
- module='docutils.frontend',
-)
-del warnings
-
__version__: Final = '8.3.0'
__display_version__: Final = __version__ # used for command line version
diff --git a/sphinx/builders/html/__init__.py b/sphinx/builders/html/__init__.py
index b6a67f5f453..e72dffc2b33 100644
--- a/sphinx/builders/html/__init__.py
+++ b/sphinx/builders/html/__init__.py
@@ -10,7 +10,6 @@
import re
import shutil
import sys
-import warnings
from pathlib import Path
from types import NoneType
from typing import TYPE_CHECKING
@@ -21,7 +20,6 @@
import jinja2.exceptions
from docutils import nodes
from docutils.core import Publisher
-from docutils.frontend import OptionParser
from docutils.io import DocTreeInput, StringOutput
from sphinx import __display_version__, package_dir
@@ -50,7 +48,7 @@
from sphinx.util._timestamps import _format_rfc3339_microseconds
from sphinx.util._uri import is_url
from sphinx.util.display import progress_message, status_iterator
-from sphinx.util.docutils import new_document
+from sphinx.util.docutils import _get_settings, new_document
from sphinx.util.fileutil import copy_asset
from sphinx.util.i18n import format_date
from sphinx.util.inventory import InventoryFile
@@ -459,15 +457,9 @@ def prepare_writing(self, docnames: Set[str]) -> None:
self.load_indexer(docnames)
self.docwriter = HTMLWriter(self)
- with warnings.catch_warnings():
- warnings.filterwarnings('ignore', category=DeprecationWarning)
- # DeprecationWarning: The frontend.OptionParser class will be replaced
- # by a subclass of argparse.ArgumentParser in Docutils 0.21 or later.
- self.docsettings: Any = OptionParser(
- defaults=self.env.settings,
- components=(self.docwriter,),
- read_config_files=True,
- ).get_default_values()
+ self.docsettings = _get_settings(
+ HTMLWriter, defaults=self.env.settings, read_config_files=True
+ )
self.docsettings.compact_lists = bool(self.config.html_compact_lists)
# determine the additional indices to include
diff --git a/sphinx/builders/latex/__init__.py b/sphinx/builders/latex/__init__.py
index d22c959b276..feaa8e021cb 100644
--- a/sphinx/builders/latex/__init__.py
+++ b/sphinx/builders/latex/__init__.py
@@ -4,12 +4,9 @@
import os
import os.path
-import warnings
from pathlib import Path
from typing import TYPE_CHECKING
-from docutils.frontend import OptionParser
-
import sphinx.builders.latex.nodes # NoQA: F401 # Workaround: import this before writer to avoid ImportError
from sphinx import addnodes, highlighting, package_dir
from sphinx._cli.util.colour import darkgreen
@@ -27,7 +24,7 @@
from sphinx.locale import _, __
from sphinx.util import logging, texescape
from sphinx.util.display import progress_message, status_iterator
-from sphinx.util.docutils import SphinxFileOutput, new_document
+from sphinx.util.docutils import SphinxFileOutput, _get_settings, new_document
from sphinx.util.fileutil import copy_asset_file
from sphinx.util.i18n import format_date
from sphinx.util.nodes import inline_all_toctrees
@@ -301,15 +298,9 @@ def copy_assets(self) -> None:
def write_documents(self, _docnames: Set[str]) -> None:
docwriter = LaTeXWriter(self)
- with warnings.catch_warnings():
- warnings.filterwarnings('ignore', category=DeprecationWarning)
- # DeprecationWarning: The frontend.OptionParser class will be replaced
- # by a subclass of argparse.ArgumentParser in Docutils 0.21 or later.
- docsettings: Any = OptionParser(
- defaults=self.env.settings,
- components=(docwriter,),
- read_config_files=True,
- ).get_default_values()
+ docsettings = _get_settings(
+ LaTeXWriter, defaults=self.env.settings, read_config_files=True
+ )
for entry in self.document_data:
docname, targetname, title, author, themename = entry[:5]
diff --git a/sphinx/builders/manpage.py b/sphinx/builders/manpage.py
index feeb35c1877..799f2a64f54 100644
--- a/sphinx/builders/manpage.py
+++ b/sphinx/builders/manpage.py
@@ -2,10 +2,8 @@
from __future__ import annotations
-import warnings
from typing import TYPE_CHECKING
-from docutils.frontend import OptionParser
from docutils.io import FileOutput
from sphinx import addnodes
@@ -14,13 +12,13 @@
from sphinx.locale import __
from sphinx.util import logging
from sphinx.util.display import progress_message
+from sphinx.util.docutils import _get_settings
from sphinx.util.nodes import inline_all_toctrees
from sphinx.util.osutil import ensuredir, make_filename_from_project
from sphinx.writers.manpage import ManualPageTranslator, ManualPageWriter
if TYPE_CHECKING:
from collections.abc import Set
- from typing import Any
from sphinx.application import Sphinx
from sphinx.config import Config
@@ -54,15 +52,9 @@ def get_target_uri(self, docname: str, typ: str | None = None) -> str:
@progress_message(__('writing'))
def write_documents(self, _docnames: Set[str]) -> None:
docwriter = ManualPageWriter(self)
- with warnings.catch_warnings():
- warnings.filterwarnings('ignore', category=DeprecationWarning)
- # DeprecationWarning: The frontend.OptionParser class will be replaced
- # by a subclass of argparse.ArgumentParser in Docutils 0.21 or later.
- docsettings: Any = OptionParser(
- defaults=self.env.settings,
- components=(docwriter,),
- read_config_files=True,
- ).get_default_values()
+ docsettings = _get_settings(
+ ManualPageWriter, defaults=self.env.settings, read_config_files=True
+ )
for info in self.config.man_pages:
docname, name, description, authors, section = info
diff --git a/sphinx/builders/texinfo.py b/sphinx/builders/texinfo.py
index 6611be05465..a0a8a9f8dea 100644
--- a/sphinx/builders/texinfo.py
+++ b/sphinx/builders/texinfo.py
@@ -3,11 +3,9 @@
from __future__ import annotations
import os.path
-import warnings
from typing import TYPE_CHECKING
from docutils import nodes
-from docutils.frontend import OptionParser
from docutils.io import FileOutput
from sphinx import addnodes, package_dir
@@ -18,14 +16,13 @@
from sphinx.locale import _, __
from sphinx.util import logging
from sphinx.util.display import progress_message, status_iterator
-from sphinx.util.docutils import new_document
+from sphinx.util.docutils import _get_settings, new_document
from sphinx.util.nodes import inline_all_toctrees
from sphinx.util.osutil import SEP, copyfile, ensuredir, make_filename_from_project
from sphinx.writers.texinfo import TexinfoTranslator, TexinfoWriter
if TYPE_CHECKING:
from collections.abc import Iterable, Set
- from typing import Any
from docutils.nodes import Node
@@ -119,15 +116,9 @@ def write_documents(self, _docnames: Set[str]) -> None:
with progress_message(__('writing')):
self.post_process_images(doctree)
docwriter = TexinfoWriter(self)
- with warnings.catch_warnings():
- warnings.filterwarnings('ignore', category=DeprecationWarning)
- # DeprecationWarning: The frontend.OptionParser class will be replaced
- # by a subclass of argparse.ArgumentParser in Docutils 0.21 or later.
- settings: Any = OptionParser(
- defaults=self.env.settings,
- components=(docwriter,),
- read_config_files=True,
- ).get_default_values()
+ settings = _get_settings(
+ TexinfoWriter, defaults=self.env.settings, read_config_files=True
+ )
settings.author = author
settings.title = title
settings.texinfo_filename = targetname[:-5] + '.info'
diff --git a/sphinx/testing/restructuredtext.py b/sphinx/testing/restructuredtext.py
index c8fcd597aa5..e5f32cf695d 100644
--- a/sphinx/testing/restructuredtext.py
+++ b/sphinx/testing/restructuredtext.py
@@ -1,19 +1,15 @@
from __future__ import annotations
-import warnings
from typing import TYPE_CHECKING
from docutils import nodes
-from docutils.frontend import OptionParser
from sphinx.io import SphinxBaseReader
from sphinx.parsers import RSTParser
from sphinx.transforms import SphinxTransformer
-from sphinx.util.docutils import LoggingReporter, sphinx_domains
+from sphinx.util.docutils import LoggingReporter, _get_settings, sphinx_domains
if TYPE_CHECKING:
- from docutils.frontend import Values
-
from sphinx.application import Sphinx
@@ -33,12 +29,7 @@ def parse(app: Sphinx, text: str, docname: str = 'index') -> nodes.document:
'output_encoding': 'unicode',
'traceback': True,
}
- with warnings.catch_warnings():
- warnings.filterwarnings('ignore', category=DeprecationWarning)
- option_parser = OptionParser(
- components=(RSTParser, SphinxBaseReader), defaults=settings_overrides
- )
- settings: Values = option_parser.get_default_values() # type: ignore[assignment]
+ settings = _get_settings(SphinxBaseReader, RSTParser, defaults=settings_overrides)
settings._source = source_path
settings.env = env
diff --git a/sphinx/util/docutils.py b/sphinx/util/docutils.py
index b53774aa26f..f75acb1cb5b 100644
--- a/sphinx/util/docutils.py
+++ b/sphinx/util/docutils.py
@@ -4,6 +4,7 @@
import os
import re
+import warnings
from contextlib import contextmanager
from copy import copy
from pathlib import Path
@@ -11,6 +12,7 @@
import docutils
from docutils import nodes
+from docutils.frontend import OptionParser
from docutils.io import FileOutput
from docutils.parsers.rst import Directive, directives, roles
from docutils.statemachine import StateMachine
@@ -27,10 +29,11 @@
)
if TYPE_CHECKING:
- from collections.abc import Iterator, Sequence
+ from collections.abc import Iterator, Mapping, Sequence
from types import ModuleType, TracebackType
from typing import Any, Protocol
+ from docutils import Component
from docutils.frontend import Values
from docutils.nodes import Element, Node, system_message
from docutils.parsers.rst.states import Inliner
@@ -816,3 +819,21 @@ def new_document(source_path: str, settings: Any = None) -> nodes.document:
document = nodes.document(settings, reporter, source=source_path)
document.note_source(source_path, -1)
return document
+
+
+def _get_settings(
+ *components: Component | type[Component],
+ defaults: Mapping[str, Any],
+ read_config_files: bool = False,
+) -> Values:
+ with warnings.catch_warnings(action='ignore', category=DeprecationWarning):
+ # DeprecationWarning: The frontend.OptionParser class will be replaced
+ # by a subclass of argparse.ArgumentParser in Docutils 0.21 or later.
+ # DeprecationWarning: The frontend.Option class will be removed
+ # in Docutils 0.21 or later.
+ option_parser = OptionParser(
+ components=components,
+ defaults=defaults,
+ read_config_files=read_config_files,
+ )
+ return option_parser.get_default_values() # type: ignore[return-value]
diff --git a/tests/test_markup/test_markup.py b/tests/test_markup/test_markup.py
index 3a370ee46ad..f9da6038c7f 100644
--- a/tests/test_markup/test_markup.py
+++ b/tests/test_markup/test_markup.py
@@ -3,11 +3,10 @@
from __future__ import annotations
import re
-import warnings
from types import SimpleNamespace
import pytest
-from docutils import frontend, nodes, utils
+from docutils import nodes, utils
from docutils.parsers.rst import Parser as RstParser
from sphinx import addnodes
@@ -17,7 +16,7 @@
from sphinx.testing.util import assert_node
from sphinx.transforms import SphinxSmartQuotes
from sphinx.util import texescape
-from sphinx.util.docutils import sphinx_domains
+from sphinx.util.docutils import _get_settings, sphinx_domains
from sphinx.writers.html import HTMLWriter
from sphinx.writers.html5 import HTML5Translator
from sphinx.writers.latex import LaTeXTranslator, LaTeXWriter
@@ -27,15 +26,9 @@
def settings(app):
env = app.env
texescape.init() # otherwise done by the latex builder
- with warnings.catch_warnings():
- warnings.filterwarnings('ignore', category=DeprecationWarning)
- # DeprecationWarning: The frontend.OptionParser class will be replaced
- # by a subclass of argparse.ArgumentParser in Docutils 0.21 or later.
- optparser = frontend.OptionParser(
- components=(RstParser, HTMLWriter, LaTeXWriter),
- defaults=default_settings,
- )
- settings = optparser.get_default_values()
+ settings = _get_settings(
+ RstParser, HTMLWriter, LaTeXWriter, defaults=default_settings
+ )
settings.smart_quotes = True
settings.env = env
settings.env.current_document.docname = 'dummy'
diff --git a/tests/test_search.py b/tests/test_search.py
index a8ad186a533..5ed753a2ea1 100644
--- a/tests/test_search.py
+++ b/tests/test_search.py
@@ -3,7 +3,6 @@
from __future__ import annotations
import json
-import warnings
from io import BytesIO
from typing import TYPE_CHECKING
@@ -169,12 +168,7 @@ def test_term_in_raw_directive(app: SphinxTestApp) -> None:
def test_IndexBuilder():
- with warnings.catch_warnings():
- warnings.filterwarnings('ignore', category=DeprecationWarning)
- # DeprecationWarning: The frontend.OptionParser class will be replaced
- # by a subclass of argparse.ArgumentParser in Docutils 0.21 or later.
- optparser = frontend.OptionParser(components=(rst.Parser,))
- settings = optparser.get_default_values()
+ settings = frontend.get_default_settings(rst.Parser)
parser = rst.Parser()
domain1 = DummyDomain(
diff --git a/tests/test_util/test_util_nodes.py b/tests/test_util/test_util_nodes.py
index 61342efdb1b..39c43d6e88a 100644
--- a/tests/test_util/test_util_nodes.py
+++ b/tests/test_util/test_util_nodes.py
@@ -2,7 +2,6 @@
from __future__ import annotations
-import warnings
from textwrap import dedent
from typing import TYPE_CHECKING, Any
@@ -30,11 +29,7 @@ def _transform(doctree) -> None:
def create_new_document() -> document:
- with warnings.catch_warnings():
- warnings.filterwarnings('ignore', category=DeprecationWarning)
- # DeprecationWarning: The frontend.OptionParser class will be replaced
- # by a subclass of argparse.ArgumentParser in Docutils 0.21 or later.
- settings = frontend.OptionParser(components=(rst.Parser,)).get_default_values()
+ settings = frontend.get_default_settings(rst.Parser)
settings.id_prefix = 'id'
document = new_document('dummy.txt', settings)
return document
From 88f7fa95fee90382bdc65ccf80b7add980372c63 Mon Sep 17 00:00:00 2001
From: Adam Turner <9087854+AA-Turner@users.noreply.github.com>
Date: Wed, 18 Jun 2025 03:33:07 +0100
Subject: [PATCH 150/466] Add ``_parse_str_to_doctree()`` helper method
(#13673)
---
sphinx/testing/restructuredtext.py | 46 ++++---------
sphinx/transforms/__init__.py | 3 +-
sphinx/util/docutils.py | 65 ++++++++++++++++++-
.../test_directive_object_description.py | 20 +++---
4 files changed, 89 insertions(+), 45 deletions(-)
diff --git a/sphinx/testing/restructuredtext.py b/sphinx/testing/restructuredtext.py
index e5f32cf695d..548ef5f27b4 100644
--- a/sphinx/testing/restructuredtext.py
+++ b/sphinx/testing/restructuredtext.py
@@ -2,14 +2,12 @@
from typing import TYPE_CHECKING
-from docutils import nodes
-
-from sphinx.io import SphinxBaseReader
from sphinx.parsers import RSTParser
-from sphinx.transforms import SphinxTransformer
-from sphinx.util.docutils import LoggingReporter, _get_settings, sphinx_domains
+from sphinx.util.docutils import _parse_str_to_doctree
if TYPE_CHECKING:
+ from docutils import nodes
+
from sphinx.application import Sphinx
@@ -20,49 +18,29 @@ def parse(app: Sphinx, text: str, docname: str = 'index') -> nodes.document:
registry = app.registry
srcdir = app.srcdir
- source_path = str(srcdir / f'{docname}.rst')
-
# Get settings
settings_overrides = {
+ 'env': env,
'gettext_compact': True,
'input_encoding': 'utf-8',
'output_encoding': 'unicode',
'traceback': True,
}
- settings = _get_settings(SphinxBaseReader, RSTParser, defaults=settings_overrides)
- settings._source = source_path
- settings.env = env
# Create parser
parser = RSTParser()
parser._config = config
parser._env = env
- # Create root document node
- reporter = LoggingReporter(
- source_path,
- settings.report_level,
- settings.halt_level,
- settings.debug,
- settings.error_encoding_error_handler,
- )
- document = nodes.document(settings, reporter, source=source_path)
- document.note_source(source_path, -1)
-
- # substitute transformer
- document.transformer = transformer = SphinxTransformer(document)
- transformer.add_transforms(SphinxBaseReader().get_transforms())
- transformer.add_transforms(registry.get_transforms())
- transformer.add_transforms(parser.get_transforms())
-
env.current_document.docname = docname
try:
- with sphinx_domains(env):
- parser.parse(text, document)
- document.current_source = document.current_line = None
-
- transformer.apply_transforms()
+ return _parse_str_to_doctree(
+ text,
+ filename=srcdir / f'{docname}.rst',
+ default_settings=settings_overrides,
+ env=env,
+ parser=parser,
+ transforms=registry.get_transforms(),
+ )
finally:
env.current_document.docname = ''
-
- return document
diff --git a/sphinx/transforms/__init__.py b/sphinx/transforms/__init__.py
index 7ba50aaa240..760a5e6a67d 100644
--- a/sphinx/transforms/__init__.py
+++ b/sphinx/transforms/__init__.py
@@ -18,7 +18,6 @@
from sphinx.deprecation import _deprecation_warning
from sphinx.locale import _, __
from sphinx.util import logging
-from sphinx.util.docutils import new_document
from sphinx.util.i18n import format_date
from sphinx.util.nodes import apply_source_workaround, is_smartquotable
@@ -97,6 +96,8 @@ def apply_transforms(self) -> None:
else:
# wrap the target node by document node during transforming
try:
+ from sphinx.util.docutils import new_document
+
document = new_document('')
if self.env:
document.settings.env = self.env
diff --git a/sphinx/util/docutils.py b/sphinx/util/docutils.py
index f75acb1cb5b..5d709ff0434 100644
--- a/sphinx/util/docutils.py
+++ b/sphinx/util/docutils.py
@@ -5,7 +5,7 @@
import os
import re
import warnings
-from contextlib import contextmanager
+from contextlib import contextmanager, nullcontext
from copy import copy
from pathlib import Path
from typing import TYPE_CHECKING
@@ -15,12 +15,15 @@
from docutils.frontend import OptionParser
from docutils.io import FileOutput
from docutils.parsers.rst import Directive, directives, roles
+from docutils.readers import standalone
from docutils.statemachine import StateMachine
+from docutils.transforms.references import DanglingReferences
from docutils.utils import Reporter, unescape
from sphinx.errors import SphinxError
from sphinx.locale import __
-from sphinx.util import logging
+from sphinx.transforms import SphinxTransformer
+from sphinx.util import logging, rst
from sphinx.util.parsing import nested_parse_to_nodes
logger = logging.getLogger(__name__)
@@ -36,8 +39,10 @@
from docutils import Component
from docutils.frontend import Values
from docutils.nodes import Element, Node, system_message
+ from docutils.parsers import Parser
from docutils.parsers.rst.states import Inliner
from docutils.statemachine import State, StringList
+ from docutils.transforms import Transform
from sphinx.builders import Builder
from sphinx.config import Config
@@ -69,6 +74,13 @@ def __call__(
) -> tuple[RoleFunction | None, list[system_message]]: ...
+_READER_TRANSFORMS = [
+ transform
+ for transform in standalone.Reader().get_transforms()
+ if transform is not DanglingReferences
+]
+
+
additional_nodes: set[type[Element]] = set()
@@ -821,6 +833,55 @@ def new_document(source_path: str, settings: Any = None) -> nodes.document:
return document
+def _parse_str_to_doctree(
+ content: str,
+ *,
+ filename: Path,
+ default_role: str = '',
+ default_settings: Mapping[str, Any],
+ env: BuildEnvironment,
+ parser: Parser,
+ transforms: Sequence[type[Transform]] = (),
+) -> nodes.document:
+ env.current_document._parser = parser
+
+ # Propagate exceptions by default when used programmatically:
+ defaults = {'traceback': True, **default_settings}
+ settings = _get_settings(standalone.Reader, parser, defaults=defaults)
+ settings._source = str(filename)
+
+ # Create root document node
+ reporter = LoggingReporter(
+ source=str(filename),
+ report_level=settings.report_level,
+ halt_level=settings.halt_level,
+ debug=settings.debug,
+ error_handler=settings.error_encoding_error_handler,
+ )
+ document = nodes.document(settings, reporter, source=str(filename))
+ document.note_source(str(filename), -1)
+
+ # substitute transformer
+ document.transformer = transformer = SphinxTransformer(document)
+ transformer.add_transforms(_READER_TRANSFORMS)
+ transformer.add_transforms(transforms)
+ transformer.add_transforms(parser.get_transforms())
+
+ if default_role:
+ default_role_cm = rst.default_role(env.current_document.docname, default_role)
+ else:
+ default_role_cm = nullcontext() # type: ignore[assignment]
+ with sphinx_domains(env), default_role_cm:
+ # parse content to abstract syntax tree
+ parser.parse(content, document)
+ document.current_source = document.current_line = None
+
+ # run transforms
+ transformer.apply_transforms()
+
+ return document
+
+
def _get_settings(
*components: Component | type[Component],
defaults: Mapping[str, Any],
diff --git a/tests/test_directives/test_directive_object_description.py b/tests/test_directives/test_directive_object_description.py
index 6b85c34d326..6759271bbf0 100644
--- a/tests/test_directives/test_directive_object_description.py
+++ b/tests/test_directives/test_directive_object_description.py
@@ -9,9 +9,8 @@
from docutils import nodes
from sphinx import addnodes
-from sphinx.io import _create_publisher
from sphinx.testing import restructuredtext
-from sphinx.util.docutils import sphinx_domains
+from sphinx.util.docutils import _parse_str_to_doctree
if TYPE_CHECKING:
from sphinx.application import Sphinx
@@ -24,15 +23,20 @@ def _doctree_for_test(
) -> nodes.document:
config = app.config
registry = app.registry
+
+ filename = env.doc2path(docname)
+ content = filename.read_text(encoding='utf-8')
+
env.prepare_settings(docname)
parser = registry.create_source_parser('restructuredtext', config=config, env=env)
- publisher = _create_publisher(
- env=env, parser=parser, transforms=registry.get_transforms()
+ return _parse_str_to_doctree(
+ content,
+ filename=env.doc2path(docname),
+ default_settings={'env': env},
+ env=env,
+ parser=parser,
+ transforms=registry.get_transforms(),
)
- with sphinx_domains(env):
- publisher.set_source(source_path=str(env.doc2path(docname)))
- publisher.publish()
- return publisher.document
@pytest.mark.sphinx('text', testroot='object-description-sections')
From c188e3f24374c2277345e75b342c2e9f09445df3 Mon Sep 17 00:00:00 2001
From: Adam Turner <9087854+AA-Turner@users.noreply.github.com>
Date: Wed, 18 Jun 2025 03:33:34 +0100
Subject: [PATCH 151/466] Restore support for nested ``only`` nodes in toctrees
(#13663)
---
sphinx/environment/adapters/toctree.py | 118 +++++++++++-------
sphinx/util/tags.py | 4 +-
tests/roots/test-toctree-only/conf.py | 0
tests/roots/test-toctree-only/index.rst | 26 ++++
.../test_environment_toctree.py | 92 +++++++++++++-
5 files changed, 192 insertions(+), 48 deletions(-)
create mode 100644 tests/roots/test-toctree-only/conf.py
create mode 100644 tests/roots/test-toctree-only/index.rst
diff --git a/sphinx/environment/adapters/toctree.py b/sphinx/environment/adapters/toctree.py
index 4708383d64b..670ac786629 100644
--- a/sphinx/environment/adapters/toctree.py
+++ b/sphinx/environment/adapters/toctree.py
@@ -482,56 +482,84 @@ def _toctree_add_classes(node: Element, depth: int, docname: str) -> None:
subnode = subnode.parent
-ET = TypeVar('ET', bound=Element)
+_ET = TypeVar('_ET', bound=Element)
def _toctree_copy(
- node: ET, depth: int, maxdepth: int, collapse: bool, tags: Tags
-) -> ET:
+ node: _ET, depth: int, maxdepth: int, collapse: bool, tags: Tags
+) -> _ET:
"""Utility: Cut and deep-copy a TOC at a specified depth."""
- keep_bullet_list_sub_nodes = depth <= 1 or (
- (depth <= maxdepth or maxdepth <= 0) and (not collapse or 'iscurrent' in node)
- )
+ assert not isinstance(node, addnodes.only)
+ depth = max(depth - 1, 1)
+ copied = _toctree_copy_seq(node, depth, maxdepth, collapse, tags, initial_call=True)
+ assert len(copied) == 1
+ return copied[0] # type: ignore[return-value]
- copy = node.copy()
- for subnode in node.children:
- if isinstance(subnode, addnodes.compact_paragraph | nodes.list_item):
- # for and
, just recurse
- copy.append(_toctree_copy(subnode, depth, maxdepth, collapse, tags))
- elif isinstance(subnode, nodes.bullet_list):
- # for , copy if the entry is top-level
- # or, copy if the depth is within bounds and;
- # collapsing is disabled or the sub-entry's parent is 'current'.
- # The boolean is constant so is calculated outwith the loop.
- if keep_bullet_list_sub_nodes:
- copy.append(_toctree_copy(subnode, depth + 1, maxdepth, collapse, tags))
- elif isinstance(subnode, addnodes.toctree):
- # copy sub toctree nodes for later processing
- copy.append(subnode.copy())
- elif isinstance(subnode, addnodes.only):
- # only keep children if the only node matches the tags
- if _only_node_keep_children(subnode, tags):
- for child in subnode.children:
- copy.append(
- _toctree_copy(
- child,
- depth,
- maxdepth,
- collapse,
- tags, # type: ignore[type-var]
- )
- )
- elif isinstance(subnode, nodes.reference | nodes.title):
- # deep copy references and captions
- sub_node_copy = subnode.copy()
- sub_node_copy.children = [child.deepcopy() for child in subnode.children]
- for child in sub_node_copy.children:
- child.parent = sub_node_copy
- copy.append(sub_node_copy)
- else:
- msg = f'Unexpected node type {subnode.__class__.__name__!r}!'
- raise ValueError(msg) # NoQA: TRY004
- return copy
+
+def _toctree_copy_seq(
+ node: Node,
+ depth: int,
+ maxdepth: int,
+ collapse: bool,
+ tags: Tags,
+ *,
+ initial_call: bool = False,
+ is_current: bool = False,
+) -> list[Element]:
+ copy: Element
+ if isinstance(node, addnodes.compact_paragraph | nodes.list_item):
+ # for and
- , just recurse
+ copy = node.copy()
+ for subnode in node.children:
+ copy += _toctree_copy_seq( # type: ignore[assignment,operator]
+ subnode, depth, maxdepth, collapse, tags, is_current='iscurrent' in node
+ )
+ return [copy]
+
+ if isinstance(node, nodes.bullet_list):
+ # for
, copy if the entry is top-level
+ # or, copy if the depth is within bounds and;
+ # collapsing is disabled or the sub-entry's parent is 'current'.
+ # The boolean is constant so is calculated outwith the loop.
+ keep_bullet_list_sub_nodes = depth <= 1 or (
+ (depth <= maxdepth or maxdepth <= 0)
+ and (not collapse or is_current or 'iscurrent' in node)
+ )
+ if not keep_bullet_list_sub_nodes and not initial_call:
+ return []
+ depth += 1
+ copy = node.copy()
+ for subnode in node.children:
+ copy += _toctree_copy_seq(
+ subnode, depth, maxdepth, collapse, tags, is_current='iscurrent' in node
+ )
+ return [copy]
+
+ if isinstance(node, addnodes.toctree):
+ # copy sub toctree nodes for later processing
+ return [node.copy()]
+
+ if isinstance(node, addnodes.only):
+ # only keep children if the only node matches the tags
+ if not _only_node_keep_children(node, tags):
+ return []
+ copied: list[Element] = []
+ for subnode in node.children:
+ copied += _toctree_copy_seq(
+ subnode, depth, maxdepth, collapse, tags, is_current='iscurrent' in node
+ )
+ return copied
+
+ if isinstance(node, nodes.reference | nodes.title):
+ # deep copy references and captions
+ sub_node_copy = node.copy()
+ sub_node_copy.children = [child.deepcopy() for child in node.children]
+ for child in sub_node_copy.children:
+ child.parent = sub_node_copy
+ return [sub_node_copy]
+
+ msg = f'Unexpected node type {node.__class__.__name__!r}!'
+ raise ValueError(msg)
def _get_toctree_ancestors(
diff --git a/sphinx/util/tags.py b/sphinx/util/tags.py
index 4467534a945..ded965c31fa 100644
--- a/sphinx/util/tags.py
+++ b/sphinx/util/tags.py
@@ -10,7 +10,7 @@
from sphinx.deprecation import RemovedInSphinx90Warning
if TYPE_CHECKING:
- from collections.abc import Iterator, Sequence
+ from collections.abc import Collection, Iterator
from typing import Literal
_ENV = jinja2.environment.Environment()
@@ -42,7 +42,7 @@ def parse_compare(self) -> jinja2.nodes.Expr:
class Tags:
- def __init__(self, tags: Sequence[str] = ()) -> None:
+ def __init__(self, tags: Collection[str] = ()) -> None:
self._tags = set(tags or ())
self._condition_cache: dict[str, bool] = {}
diff --git a/tests/roots/test-toctree-only/conf.py b/tests/roots/test-toctree-only/conf.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/tests/roots/test-toctree-only/index.rst b/tests/roots/test-toctree-only/index.rst
new file mode 100644
index 00000000000..cbfb903a588
--- /dev/null
+++ b/tests/roots/test-toctree-only/index.rst
@@ -0,0 +1,26 @@
+test-toctree-only
+=================
+
+.. only:: not nonexistent
+
+ hello world
+
+ .. only:: text or not text
+
+ .. js:data:: test_toctree_only1
+
+ lorem ipsum dolor sit amet...
+
+ .. only:: not lorem
+
+ .. only:: not ipsum
+
+ .. js:data:: test_toctree_only2
+
+ lorem ipsum dolor sit amet...
+
+ after ``only:: not ipsum``
+
+ .. js:data:: test_toctree_only2
+
+we're just normal men; we're just innocent men
diff --git a/tests/test_environment/test_environment_toctree.py b/tests/test_environment/test_environment_toctree.py
index 22474daef55..dcf5f8954da 100644
--- a/tests/test_environment/test_environment_toctree.py
+++ b/tests/test_environment/test_environment_toctree.py
@@ -4,6 +4,7 @@
from typing import TYPE_CHECKING
+import docutils
import pytest
from docutils import nodes
from docutils.nodes import bullet_list, list_item, literal, reference, title
@@ -11,8 +12,13 @@
from sphinx import addnodes
from sphinx.addnodes import compact_paragraph, only
from sphinx.builders.html import StandaloneHTMLBuilder
-from sphinx.environment.adapters.toctree import document_toc, global_toctree_for_doc
+from sphinx.environment.adapters.toctree import (
+ _toctree_copy,
+ document_toc,
+ global_toctree_for_doc,
+)
from sphinx.testing.util import assert_node
+from sphinx.util.tags import Tags
if TYPE_CHECKING:
from sphinx.testing.util import SphinxTestApp
@@ -916,3 +922,87 @@ def test_toctree_index(app):
numbered=0,
entries=[(None, 'genindex'), (None, 'modindex'), (None, 'search')],
)
+
+
+@pytest.mark.sphinx('dummy', testroot='toctree-only')
+def test_toctree_only(app):
+ # regression test for https://github.com/sphinx-doc/sphinx/issues/13022
+ # we mainly care that this doesn't fail
+
+ if docutils.__version_info__[:2] >= (0, 22):
+ true = '1'
+ else:
+ true = 'True'
+ expected_pformat = f"""\
+
+
+
+
+ test-toctree-only
+
+
+
+
+
+ test_toctree_only1
+
+
+
+
+ test_toctree_only2
+
+
+
+
+ test_toctree_only2
+"""
+ app.build()
+ toc = document_toc(app.env, 'index', app.tags)
+ assert toc.pformat(' ') == expected_pformat
+
+
+def test_toctree_copy_only():
+ # regression test for https://github.com/sphinx-doc/sphinx/issues/13022
+ # ensure ``_toctree_copy()`` properly filters out ``only`` nodes,
+ # including nested nodes.
+ node = nodes.literal('lobster!', 'lobster!')
+ node = nodes.reference('', '', node, anchorname='', internal=True, refuri='index')
+ node = addnodes.only('', node, expr='lobster')
+ node = addnodes.compact_paragraph('', '', node, skip_section_number=True)
+ node = nodes.list_item('', node)
+ node = addnodes.only('', node, expr='not spam')
+ node = addnodes.only('', node, expr='lobster')
+ node = addnodes.only('', node, expr='not ham')
+ node = nodes.bullet_list('', node)
+ # this is a tree of the shape:
+ #
+ #
+ #
+ #
+ #
+ #
+ #
+ #
+ #
+ # lobster!
+
+ tags = Tags({'lobster'})
+ toc = _toctree_copy(node, 2, 0, False, tags)
+ # the filtered ToC should look like:
+ #
+ #
+ #
+ #
+ #
+ # lobster!
+
+ # no only nodes should remain
+ assert list(toc.findall(addnodes.only)) == []
+
+ # the tree is preserved
+ assert isinstance(toc, nodes.bullet_list)
+ assert isinstance(toc[0], nodes.list_item)
+ assert isinstance(toc[0][0], addnodes.compact_paragraph)
+ assert isinstance(toc[0][0][0], nodes.reference)
+ assert isinstance(toc[0][0][0][0], nodes.literal)
+ assert toc[0][0][0][0][0] == nodes.Text('lobster!')
From ea53cd47db079e6d0f133a25d18558119974eda8 Mon Sep 17 00:00:00 2001
From: Adam Turner <9087854+aa-turner@users.noreply.github.com>
Date: Wed, 18 Jun 2025 05:02:48 +0100
Subject: [PATCH 152/466] Bump Ruff to 0.12.0
---
pyproject.toml | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/pyproject.toml b/pyproject.toml
index 3d0e01deb0a..c492e346309 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -93,7 +93,7 @@ docs = [
"sphinxcontrib-websupport",
]
lint = [
- "ruff==0.11.13",
+ "ruff==0.12.0",
"mypy==1.15.0",
"sphinx-lint>=0.9",
"types-colorama==0.4.15.20240311",
@@ -136,7 +136,7 @@ docs = [
"sphinxcontrib-websupport",
]
lint = [
- "ruff==0.11.13",
+ "ruff==0.12.0",
"sphinx-lint>=0.9",
]
package = [
From 114093cff01562a6a50f8c88b59c7b2fed52a39a Mon Sep 17 00:00:00 2001
From: Adam Turner <9087854+AA-Turner@users.noreply.github.com>
Date: Thu, 19 Jun 2025 02:25:59 +0100
Subject: [PATCH 153/466] Use ``_parse_str_to_doctree()`` in
``Builder.read_doc()`` (#13676)
---
sphinx/builders/__init__.py | 52 ++++++++++---------
sphinx/util/docutils.py | 16 +++++-
.../test_directive_object_description.py | 2 +-
3 files changed, 43 insertions(+), 27 deletions(-)
diff --git a/sphinx/builders/__init__.py b/sphinx/builders/__init__.py
index 184a27f2cd2..149891bc592 100644
--- a/sphinx/builders/__init__.py
+++ b/sphinx/builders/__init__.py
@@ -11,7 +11,6 @@
from typing import TYPE_CHECKING, final
from docutils import nodes
-from docutils.utils import DependencyList
from sphinx._cli.util.colour import bold
from sphinx.deprecation import _deprecation_warning
@@ -23,16 +22,12 @@
from sphinx.environment.adapters.asset import ImageAdapter
from sphinx.errors import SphinxError
from sphinx.locale import __
-from sphinx.util import (
- get_filetype,
- logging,
- rst,
-)
+from sphinx.util import get_filetype, logging
from sphinx.util._importer import import_object
from sphinx.util._pathlib import _StrPathProperty
from sphinx.util.build_phase import BuildPhase
from sphinx.util.display import progress_message, status_iterator
-from sphinx.util.docutils import sphinx_domains
+from sphinx.util.docutils import _parse_str_to_doctree
from sphinx.util.i18n import CatalogRepository, docname_to_domain
from sphinx.util.osutil import ensuredir, relative_uri, relpath
from sphinx.util.parallel import (
@@ -644,26 +639,33 @@ def read_doc(self, docname: str, *, _cache: bool = True) -> None:
if docutils_conf.is_file():
env.note_dependency(docutils_conf)
- filename = str(env.doc2path(docname))
- filetype = get_filetype(self._app.config.source_suffix, filename)
- publisher = self._registry._get_publisher(
- filetype, config=self.config, env=self.env
+ filename = env.doc2path(docname)
+
+ # set up error_handler for the target document
+ error_handler = _UnicodeDecodeErrorHandler(docname)
+ codecs.register_error('sphinx', error_handler) # type: ignore[arg-type]
+
+ # read the source file
+ content = filename.read_text(
+ encoding=env.settings['input_encoding'], errors='sphinx'
)
- self.env.current_document._parser = publisher.parser
- # record_dependencies is mutable even though it is in settings,
- # explicitly re-initialise for each document
- publisher.settings.record_dependencies = DependencyList()
- with (
- sphinx_domains(env),
- rst.default_role(docname, self.config.default_role),
- ):
- # set up error_handler for the target document
- error_handler = _UnicodeDecodeErrorHandler(docname)
- codecs.register_error('sphinx', error_handler) # type: ignore[arg-type]
- publisher.set_source(source_path=filename)
- publisher.publish()
- doctree = publisher.document
+ # TODO: move the "source-read" event to here.
+
+ filetype = get_filetype(self.config.source_suffix, filename)
+ parser = self._registry.create_source_parser(
+ filetype, config=self.config, env=env
+ )
+ doctree = _parse_str_to_doctree(
+ content,
+ filename=filename,
+ default_role=self.config.default_role,
+ default_settings=env.settings,
+ env=env,
+ events=self.events,
+ parser=parser,
+ transforms=self._registry.get_transforms(),
+ )
# store time of reading, for outdated files detection
env.all_docs[docname] = time.time_ns() // 1_000
diff --git a/sphinx/util/docutils.py b/sphinx/util/docutils.py
index 5d709ff0434..2673a3fc77f 100644
--- a/sphinx/util/docutils.py
+++ b/sphinx/util/docutils.py
@@ -47,6 +47,7 @@
from sphinx.builders import Builder
from sphinx.config import Config
from sphinx.environment import BuildEnvironment
+ from sphinx.events import EventManager
from sphinx.util.typing import RoleFunction
class _LanguageModule(Protocol):
@@ -840,6 +841,7 @@ def _parse_str_to_doctree(
default_role: str = '',
default_settings: Mapping[str, Any],
env: BuildEnvironment,
+ events: EventManager | None = None,
parser: Parser,
transforms: Sequence[type[Transform]] = (),
) -> nodes.document:
@@ -847,7 +849,9 @@ def _parse_str_to_doctree(
# Propagate exceptions by default when used programmatically:
defaults = {'traceback': True, **default_settings}
- settings = _get_settings(standalone.Reader, parser, defaults=defaults)
+ settings = _get_settings(
+ standalone.Reader, parser, defaults=defaults, read_config_files=True
+ )
settings._source = str(filename)
# Create root document node
@@ -872,6 +876,16 @@ def _parse_str_to_doctree(
else:
default_role_cm = nullcontext() # type: ignore[assignment]
with sphinx_domains(env), default_role_cm:
+ # TODO: Move the stanza below to Builder.read_doc(), within
+ # a sphinx_domains() context manager.
+ # This will require changes to IntersphinxDispatcher and/or
+ # CustomReSTDispatcher.
+ if events is not None:
+ # emit "source-read" event
+ arg = [content]
+ events.emit('source-read', env.current_document.docname, arg)
+ content = arg[0]
+
# parse content to abstract syntax tree
parser.parse(content, document)
document.current_source = document.current_line = None
diff --git a/tests/test_directives/test_directive_object_description.py b/tests/test_directives/test_directive_object_description.py
index 6759271bbf0..4cbe3e26697 100644
--- a/tests/test_directives/test_directive_object_description.py
+++ b/tests/test_directives/test_directive_object_description.py
@@ -31,7 +31,7 @@ def _doctree_for_test(
parser = registry.create_source_parser('restructuredtext', config=config, env=env)
return _parse_str_to_doctree(
content,
- filename=env.doc2path(docname),
+ filename=filename,
default_settings={'env': env},
env=env,
parser=parser,
From 5cf62e55c96824545c157372d741b595d8c98c7f Mon Sep 17 00:00:00 2001
From: Adam Turner <9087854+aa-turner@users.noreply.github.com>
Date: Thu, 19 Jun 2025 09:21:36 +0100
Subject: [PATCH 154/466] Note latin-1 encoded files in .gitattributes
---
.gitattributes | 2 ++
1 file changed, 2 insertions(+)
diff --git a/.gitattributes b/.gitattributes
index c10128857f4..c8acd10815a 100644
--- a/.gitattributes
+++ b/.gitattributes
@@ -51,6 +51,8 @@ tests/roots/test-pycode/cp_1251_coded.py dos
# Non UTF-8 encodings
tests/roots/test-pycode/cp_1251_coded.py working-tree-encoding=windows-1251
+tests/roots/test-root/wrongenc.inc working-tree-encoding=latin-1
+tests/roots/test-warnings/wrongenc.inc working-tree-encoding=latin-1
# Generated files
# https://github.com/github/linguist/blob/master/docs/overrides.md
From 9c4902f0908d6b896f7b07ec75fd23e3b53f6ff0 Mon Sep 17 00:00:00 2001
From: Adam Turner <9087854+AA-Turner@users.noreply.github.com>
Date: Thu, 19 Jun 2025 09:33:38 +0100
Subject: [PATCH 155/466] Deprecate replacing non-decodable source bytes
(#13679)
---
CHANGES.rst | 3 +++
sphinx/builders/__init__.py | 24 ++++++++++++----------
tests/test_builders/test_build_warnings.py | 2 +-
3 files changed, 17 insertions(+), 12 deletions(-)
diff --git a/CHANGES.rst b/CHANGES.rst
index 47eb18fc8a6..3b0a591b8e6 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -26,6 +26,9 @@ Deprecated
* #13665: Deprecate support for non-UTF 8 source encodings,
scheduled for removal in Sphinx 10.
Patch by Adam Turner.
+* #13679: Non-decodable characters in source files will raise an error in Sphinx 9.
+ Currently, such bytes are replaced with '?' along with logging a warning.
+ Patch by Adam Turner.
Features added
--------------
diff --git a/sphinx/builders/__init__.py b/sphinx/builders/__init__.py
index 149891bc592..2dd972ecfe0 100644
--- a/sphinx/builders/__init__.py
+++ b/sphinx/builders/__init__.py
@@ -642,6 +642,7 @@ def read_doc(self, docname: str, *, _cache: bool = True) -> None:
filename = env.doc2path(docname)
# set up error_handler for the target document
+ # xref RemovedInSphinx90Warning
error_handler = _UnicodeDecodeErrorHandler(docname)
codecs.register_error('sphinx', error_handler) # type: ignore[arg-type]
@@ -903,20 +904,21 @@ def __init__(self, docname: str, /) -> None:
self.docname = docname
def __call__(self, error: UnicodeDecodeError) -> tuple[str, int]:
- line_start = error.object.rfind(b'\n', 0, error.start)
- line_end = error.object.find(b'\n', error.start)
+ obj = error.object
+ line_start = obj.rfind(b'\n', 0, error.start)
+ line_end = obj.find(b'\n', error.start)
if line_end == -1:
- line_end = len(error.object)
- line_num = error.object.count(b'\n', 0, error.start) + 1
+ line_end = len(obj)
+ line_num = obj.count(b'\n', 0, error.start) + 1
logger.warning(
- __('undecodable source characters, replacing with "?": %r'),
- (
- error.object[line_start + 1 : error.start]
- + b'>>>'
- + error.object[error.start : error.end]
- + b'<<<'
- + error.object[error.end : line_end]
+ __(
+ "undecodable source characters, replacing with '?': '%s>>>%s<<<%s'. "
+ 'This will become an error in Sphinx 9.0.'
+ # xref RemovedInSphinx90Warning
),
+ obj[line_start + 1 : error.start].decode(errors='backslashreplace'),
+ obj[error.start : error.end].decode(errors='backslashreplace'),
+ obj[error.end : line_end].decode(errors='backslashreplace'),
location=(self.docname, line_num),
)
return '?', error.end
diff --git a/tests/test_builders/test_build_warnings.py b/tests/test_builders/test_build_warnings.py
index 65e359ad666..e4ab763f9f4 100644
--- a/tests/test_builders/test_build_warnings.py
+++ b/tests/test_builders/test_build_warnings.py
@@ -23,7 +23,7 @@
{root}/index.rst:\\d+: WARNING: image file not readable: foo.png \\[image.not_readable\\]
{root}/index.rst:\\d+: WARNING: download file not readable: {root}/nonexisting.png \\[download.not_readable\\]
{root}/undecodable.rst:\\d+: WARNING: undecodable source characters, replacing \
-with "\\?": b?'here: >>>(\\\\|/)xbb<<<((\\\\|/)r)?'
+with '\\?': 'here: >>>(\\\\|/)xbb<<<'\\. This will become an error in Sphinx 9\\.0\\.
"""
HTML_WARNINGS = (
From 711eb2b34ae7e293eba7d14609e1c5b583888b80 Mon Sep 17 00:00:00 2001
From: Adam Turner <9087854+AA-Turner@users.noreply.github.com>
Date: Thu, 19 Jun 2025 10:07:32 +0100
Subject: [PATCH 156/466] Remove unused publisher creation functions (#13680)
---
sphinx/io.py | 22 +---------------------
sphinx/registry.py | 18 ------------------
2 files changed, 1 insertion(+), 39 deletions(-)
diff --git a/sphinx/io.py b/sphinx/io.py
index 2d7b41beda5..1746a3a139d 100644
--- a/sphinx/io.py
+++ b/sphinx/io.py
@@ -4,8 +4,7 @@
from typing import TYPE_CHECKING
-from docutils.core import Publisher
-from docutils.io import FileInput, NullOutput
+from docutils.io import FileInput
from docutils.readers import standalone
from docutils.transforms.references import DanglingReferences
from docutils.writers import UnfilteredWriter
@@ -110,22 +109,3 @@ class SphinxFileInput(FileInput):
def __init__(self, *args: Any, **kwargs: Any) -> None:
kwargs['error_handler'] = 'sphinx'
super().__init__(*args, **kwargs)
-
-
-def _create_publisher(
- *, env: BuildEnvironment, parser: Parser, transforms: list[type[Transform]]
-) -> Publisher:
- reader = SphinxStandaloneReader()
- reader._setup_transforms(transforms)
- pub = Publisher(
- reader=reader,
- parser=parser,
- writer=SphinxDummyWriter(),
- source_class=SphinxFileInput,
- destination=NullOutput(),
- )
- # Propagate exceptions by default when used programmatically:
- defaults = {'traceback': True, **env.settings}
- # Set default settings
- pub.get_settings(**defaults)
- return pub
diff --git a/sphinx/registry.py b/sphinx/registry.py
index 6f7d7c477fe..1cf917d31c7 100644
--- a/sphinx/registry.py
+++ b/sphinx/registry.py
@@ -12,7 +12,6 @@
from sphinx.domains.std import GenericObject, Target
from sphinx.errors import ExtensionError, SphinxError, VersionRequirementError
from sphinx.extension import Extension
-from sphinx.io import _create_publisher
from sphinx.locale import __
from sphinx.parsers import Parser as SphinxParser
from sphinx.roles import XRefRole
@@ -26,7 +25,6 @@
from typing import Any, TypeAlias
from docutils import nodes
- from docutils.core import Publisher
from docutils.nodes import Element, Node, TextElement
from docutils.parsers import Parser
from docutils.parsers.rst import Directive
@@ -153,9 +151,6 @@ def __init__(self) -> None:
#: additional transforms; list of transforms
self.transforms: list[type[Transform]] = []
- # private cache of Docutils Publishers (file type -> publisher object)
- self.publishers: dict[str, Publisher] = {}
-
@property
def autodoc_attrgettrs(self) -> dict[type, Callable[[Any, str, Any], Any]]:
return self.autodoc_attrgetters
@@ -596,19 +591,6 @@ def get_envversion(self, app: Sphinx) -> Mapping[str, int]:
return _get_env_version(app.extensions)
- def _get_publisher(
- self, filetype: str, *, config: Config, env: BuildEnvironment
- ) -> Publisher:
- try:
- return self.publishers[filetype]
- except KeyError:
- pass
- parser = self.create_source_parser(filetype, config=config, env=env)
- transforms = self.get_transforms()
- publisher = _create_publisher(env=env, parser=parser, transforms=transforms)
- self.publishers[filetype] = publisher
- return publisher
-
def merge_source_suffix(app: Sphinx, config: Config) -> None:
"""Merge any user-specified source_suffix with any added by extensions."""
From f2bf37dad05f078a4881b7a02d8c709b63fd08b2 Mon Sep 17 00:00:00 2001
From: Adam Turner <9087854+AA-Turner@users.noreply.github.com>
Date: Thu, 19 Jun 2025 11:25:26 +0100
Subject: [PATCH 157/466] Extract change detection into ``_has_doc_changed()``
function (#13681)
---
sphinx/environment/__init__.py | 156 +++++++++++-------
tests/js/fixtures/cpp/searchindex.js | 2 +-
tests/js/fixtures/multiterm/searchindex.js | 2 +-
tests/js/fixtures/partial/searchindex.js | 2 +-
tests/js/fixtures/titles/searchindex.js | 2 +-
.../test_environment_record_dependencies.py | 4 +-
6 files changed, 97 insertions(+), 71 deletions(-)
diff --git a/sphinx/environment/__init__.py b/sphinx/environment/__init__.py
index 36b364f5c3d..fa7d17d7800 100644
--- a/sphinx/environment/__init__.py
+++ b/sphinx/environment/__init__.py
@@ -24,7 +24,7 @@
from sphinx.transforms import SphinxTransformer
from sphinx.util import logging
from sphinx.util._files import DownloadFiles, FilenameUniqDict
-from sphinx.util._pathlib import _StrPath, _StrPathProperty
+from sphinx.util._pathlib import _StrPathProperty
from sphinx.util._serialise import stable_str
from sphinx.util._timestamps import _format_rfc3339_microseconds
from sphinx.util.docutils import LoggingReporter
@@ -33,7 +33,7 @@
from sphinx.util.osutil import _last_modified_time, _relative_path
if TYPE_CHECKING:
- from collections.abc import Callable, Iterable, Iterator, Mapping
+ from collections.abc import Callable, Iterable, Iterator, Mapping, Set
from typing import Any, Final, Literal
from docutils import nodes
@@ -50,6 +50,7 @@
from sphinx.extension import Extension
from sphinx.project import Project
from sphinx.registry import SphinxComponentRegistry
+ from sphinx.util._pathlib import _StrPath
from sphinx.util.tags import Tags
logger = logging.getLogger(__name__)
@@ -74,7 +75,7 @@
# This is increased every time an environment attribute is added
# or changed to properly invalidate pickle files.
-ENV_VERSION = 65
+ENV_VERSION = 66
# config status
CONFIG_UNSET = -1
@@ -519,7 +520,7 @@ def get_outdated_files(
) -> tuple[set[str], set[str], set[str]]:
"""Return (added, changed, removed) sets."""
# clear all files no longer present
- removed = set(self.all_docs) - self.found_docs
+ removed = self.all_docs.keys() - self.found_docs
added: set[str] = set()
changed: set[str] = set()
@@ -527,65 +528,25 @@ def get_outdated_files(
if config_changed:
# config values affect e.g. substitutions
added = self.found_docs
- else:
- for docname in self.found_docs:
- if docname not in self.all_docs:
- logger.debug('[build target] added %r', docname)
- added.add(docname)
- continue
- # if the doctree file is not there, rebuild
- filename = self.doctreedir / f'{docname}.doctree'
- if not filename.is_file():
- logger.debug('[build target] changed %r', docname)
- changed.add(docname)
- continue
- # check the "reread always" list
- if docname in self.reread_always:
- logger.debug('[build target] changed %r', docname)
- changed.add(docname)
- continue
- # check the mtime of the document
- mtime = self.all_docs[docname]
- newmtime = _last_modified_time(self.doc2path(docname))
- if newmtime > mtime:
- logger.debug(
- '[build target] outdated %r: %s -> %s',
- docname,
- _format_rfc3339_microseconds(mtime),
- _format_rfc3339_microseconds(newmtime),
- )
- changed.add(docname)
- continue
- # finally, check the mtime of dependencies
- if docname not in self.dependencies:
- continue
- for dep in self.dependencies[docname]:
- try:
- # this will do the right thing when dep is absolute too
- dep_path = self.srcdir / dep
- if not dep_path.is_file():
- logger.debug(
- '[build target] changed %r missing dependency %r',
- docname,
- dep_path,
- )
- changed.add(docname)
- break
- depmtime = _last_modified_time(dep_path)
- if depmtime > mtime:
- logger.debug(
- '[build target] outdated %r from dependency %r: %s -> %s',
- docname,
- dep_path,
- _format_rfc3339_microseconds(mtime),
- _format_rfc3339_microseconds(depmtime),
- )
- changed.add(docname)
- break
- except OSError:
- # give it another chance
- changed.add(docname)
- break
+ return added, changed, removed
+
+ for docname in self.found_docs:
+ if docname not in self.all_docs:
+ logger.debug('[build target] added %r', docname)
+ added.add(docname)
+ continue
+
+ # if the document has changed, rebuild
+ if _has_doc_changed(
+ docname,
+ filename=self.doc2path(docname),
+ reread_always=self.reread_always,
+ doctreedir=self.doctreedir,
+ all_docs=self.all_docs,
+ dependencies=self.dependencies,
+ ):
+ changed.add(docname)
+ continue
return added, changed, removed
@@ -649,7 +610,9 @@ def note_dependency(
"""
if docname is None:
docname = self.docname
- self.dependencies.setdefault(docname, set()).add(_StrPath(filename))
+ # this will do the right thing when *filename* is absolute too
+ filename = self.srcdir / filename
+ self.dependencies.setdefault(docname, set()).add(filename)
def note_included(self, filename: str | os.PathLike[str]) -> None:
"""Add *filename* as a included from other document.
@@ -872,6 +835,71 @@ def _differing_config_keys(old: Config, new: Config) -> frozenset[str]:
return frozenset(not_in_both | different_values)
+def _has_doc_changed(
+ docname: str,
+ *,
+ filename: Path,
+ reread_always: Set[str],
+ doctreedir: Path,
+ all_docs: Mapping[str, int],
+ dependencies: Mapping[str, Set[Path]],
+) -> bool:
+ # check the "reread always" list
+ if docname in reread_always:
+ logger.debug('[build target] changed %r: re-read forced', docname)
+ return True
+
+ # if the doctree file is not there, rebuild
+ doctree_path = doctreedir / f'{docname}.doctree'
+ if not doctree_path.is_file():
+ logger.debug('[build target] changed %r: doctree file does not exist', docname)
+ return True
+
+ # check the mtime of the document
+ mtime = all_docs[docname]
+ new_mtime = _last_modified_time(filename)
+ if new_mtime > mtime:
+ logger.debug(
+ '[build target] changed: %r is outdated (%s -> %s)',
+ docname,
+ _format_rfc3339_microseconds(mtime),
+ _format_rfc3339_microseconds(new_mtime),
+ )
+ return True
+
+ # finally, check the mtime of dependencies
+ if docname not in dependencies:
+ return False
+ for dep_path in dependencies[docname]:
+ try:
+ dep_path_is_file = dep_path.is_file()
+ except OSError:
+ return True # give it another chance
+ if not dep_path_is_file:
+ logger.debug(
+ '[build target] changed: %r is missing dependency %r',
+ docname,
+ dep_path,
+ )
+ return True
+
+ try:
+ dep_mtime = _last_modified_time(dep_path)
+ except OSError:
+ return True # give it another chance
+ if dep_mtime > mtime:
+ logger.debug(
+ '[build target] changed: %r is outdated due to dependency %r (%s -> %s)',
+ docname,
+ dep_path,
+ _format_rfc3339_microseconds(mtime),
+ _format_rfc3339_microseconds(dep_mtime),
+ )
+ return True
+
+ return False
+
+
def _traverse_toctree(
traversed: set[str],
parent: str | None,
diff --git a/tests/js/fixtures/cpp/searchindex.js b/tests/js/fixtures/cpp/searchindex.js
index 6c50cc9d99d..10ed84d7ff0 100644
--- a/tests/js/fixtures/cpp/searchindex.js
+++ b/tests/js/fixtures/cpp/searchindex.js
@@ -1 +1 @@
-Search.setIndex({"alltitles":{},"docnames":["index"],"envversion":{"sphinx":65,"sphinx.domains.c":3,"sphinx.domains.changeset":1,"sphinx.domains.citation":1,"sphinx.domains.cpp":9,"sphinx.domains.index":1,"sphinx.domains.javascript":3,"sphinx.domains.math":2,"sphinx.domains.python":4,"sphinx.domains.rst":2,"sphinx.domains.std":2},"filenames":["index.rst"],"indexentries":{"sphinx (c++ class)":[[0,"_CPPv46Sphinx",false]]},"objects":{"":[[0,0,1,"_CPPv46Sphinx","Sphinx"]]},"objnames":{"0":["cpp","class","C++ class"]},"objtypes":{"0":"cpp:class"},"terms":{"The":0,"This":0,"becaus":0,"c":0,"can":0,"cardin":0,"challeng":0,"charact":0,"class":0,"descript":0,"drop":0,"engin":0,"fixtur":0,"frequent":0,"generat":0,"index":0,"inflat":0,"mathemat":0,"occur":0,"often":0,"project":0,"punctuat":0,"queri":0,"relat":0,"sampl":0,"search":0,"size":0,"sphinx":0,"term":0,"token":0,"use":0,"web":0},"titles":["<no title>"],"titleterms":{}})
\ No newline at end of file
+Search.setIndex({"alltitles":{},"docnames":["index"],"envversion":{"sphinx":66,"sphinx.domains.c":3,"sphinx.domains.changeset":1,"sphinx.domains.citation":1,"sphinx.domains.cpp":9,"sphinx.domains.index":1,"sphinx.domains.javascript":3,"sphinx.domains.math":2,"sphinx.domains.python":4,"sphinx.domains.rst":2,"sphinx.domains.std":2},"filenames":["index.rst"],"indexentries":{"sphinx (c++ class)":[[0,"_CPPv46Sphinx",false]]},"objects":{"":[[0,0,1,"_CPPv46Sphinx","Sphinx"]]},"objnames":{"0":["cpp","class","C++ class"]},"objtypes":{"0":"cpp:class"},"terms":{"The":0,"This":0,"becaus":0,"c":0,"can":0,"cardin":0,"challeng":0,"charact":0,"class":0,"descript":0,"drop":0,"engin":0,"fixtur":0,"frequent":0,"generat":0,"index":0,"inflat":0,"mathemat":0,"occur":0,"often":0,"project":0,"punctuat":0,"queri":0,"relat":0,"sampl":0,"search":0,"size":0,"sphinx":0,"term":0,"token":0,"use":0,"web":0},"titles":["<no title>"],"titleterms":{}})
\ No newline at end of file
diff --git a/tests/js/fixtures/multiterm/searchindex.js b/tests/js/fixtures/multiterm/searchindex.js
index a3a52b8cf14..bd732522b3d 100644
--- a/tests/js/fixtures/multiterm/searchindex.js
+++ b/tests/js/fixtures/multiterm/searchindex.js
@@ -1 +1 @@
-Search.setIndex({"alltitles":{"Main Page":[[0,null]]},"docnames":["index"],"envversion":{"sphinx":65,"sphinx.domains.c":3,"sphinx.domains.changeset":1,"sphinx.domains.citation":1,"sphinx.domains.cpp":9,"sphinx.domains.index":1,"sphinx.domains.javascript":3,"sphinx.domains.math":2,"sphinx.domains.python":4,"sphinx.domains.rst":2,"sphinx.domains.std":2},"filenames":["index.rst"],"indexentries":{},"objects":{},"objnames":{},"objtypes":{},"terms":{"At":0,"This":0,"adjac":0,"appear":0,"applic":0,"built":0,"can":0,"check":0,"contain":0,"document":0,"doesn":0,"fixtur":0,"format":0,"function":0,"futur":0,"html":0,"includ":0,"match":0,"messag":0,"multipl":0,"multiterm":0,"order":0,"output":0,"perform":0,"perhap":0,"phrase":0,"project":0,"queri":0,"requir":0,"search":0,"success":0,"support":0,"t":0,"term":0,"test":0,"time":0,"use":0,"will":0,"write":0},"titles":["Main Page"],"titleterms":{"main":0,"page":0}})
\ No newline at end of file
+Search.setIndex({"alltitles":{"Main Page":[[0,null]]},"docnames":["index"],"envversion":{"sphinx":66,"sphinx.domains.c":3,"sphinx.domains.changeset":1,"sphinx.domains.citation":1,"sphinx.domains.cpp":9,"sphinx.domains.index":1,"sphinx.domains.javascript":3,"sphinx.domains.math":2,"sphinx.domains.python":4,"sphinx.domains.rst":2,"sphinx.domains.std":2},"filenames":["index.rst"],"indexentries":{},"objects":{},"objnames":{},"objtypes":{},"terms":{"At":0,"This":0,"adjac":0,"appear":0,"applic":0,"built":0,"can":0,"check":0,"contain":0,"document":0,"doesn":0,"fixtur":0,"format":0,"function":0,"futur":0,"html":0,"includ":0,"match":0,"messag":0,"multipl":0,"multiterm":0,"order":0,"output":0,"perform":0,"perhap":0,"phrase":0,"project":0,"queri":0,"requir":0,"search":0,"success":0,"support":0,"t":0,"term":0,"test":0,"time":0,"use":0,"will":0,"write":0},"titles":["Main Page"],"titleterms":{"main":0,"page":0}})
\ No newline at end of file
diff --git a/tests/js/fixtures/partial/searchindex.js b/tests/js/fixtures/partial/searchindex.js
index 02863d73d83..8a65718e2ea 100644
--- a/tests/js/fixtures/partial/searchindex.js
+++ b/tests/js/fixtures/partial/searchindex.js
@@ -1 +1 @@
-Search.setIndex({"alltitles":{"sphinx_utils module":[[0,null]]},"docnames":["index"],"envversion":{"sphinx":65,"sphinx.domains.c":3,"sphinx.domains.changeset":1,"sphinx.domains.citation":1,"sphinx.domains.cpp":9,"sphinx.domains.index":1,"sphinx.domains.javascript":3,"sphinx.domains.math":2,"sphinx.domains.python":4,"sphinx.domains.rst":2,"sphinx.domains.std":2},"filenames":["index.rst"],"indexentries":{},"objects":{},"objnames":{},"objtypes":{},"terms":{"This":0,"built":0,"confirm":0,"document":0,"function":0,"html":0,"includ":0,"input":0,"javascript":0,"match":0,"partial":0,"possibl":0,"project":0,"provid":0,"restructuredtext":0,"sampl":0,"search":0,"term":0,"titl":0,"use":0},"titles":["sphinx_utils module"],"titleterms":{"modul":0,"sphinx_util":0}})
\ No newline at end of file
+Search.setIndex({"alltitles":{"sphinx_utils module":[[0,null]]},"docnames":["index"],"envversion":{"sphinx":66,"sphinx.domains.c":3,"sphinx.domains.changeset":1,"sphinx.domains.citation":1,"sphinx.domains.cpp":9,"sphinx.domains.index":1,"sphinx.domains.javascript":3,"sphinx.domains.math":2,"sphinx.domains.python":4,"sphinx.domains.rst":2,"sphinx.domains.std":2},"filenames":["index.rst"],"indexentries":{},"objects":{},"objnames":{},"objtypes":{},"terms":{"This":0,"built":0,"confirm":0,"document":0,"function":0,"html":0,"includ":0,"input":0,"javascript":0,"match":0,"partial":0,"possibl":0,"project":0,"provid":0,"restructuredtext":0,"sampl":0,"search":0,"term":0,"titl":0,"use":0},"titles":["sphinx_utils module"],"titleterms":{"modul":0,"sphinx_util":0}})
\ No newline at end of file
diff --git a/tests/js/fixtures/titles/searchindex.js b/tests/js/fixtures/titles/searchindex.js
index 9faeadf76c6..fe325c7742d 100644
--- a/tests/js/fixtures/titles/searchindex.js
+++ b/tests/js/fixtures/titles/searchindex.js
@@ -1 +1 @@
-Search.setIndex({"alltitles":{"Main Page":[[0,null]],"Relevance":[[0,"relevance"],[1,null]],"Result Scoring":[[0,"result-scoring"]]},"docnames":["index","relevance"],"envversion":{"sphinx":65,"sphinx.domains.c":3,"sphinx.domains.changeset":1,"sphinx.domains.citation":1,"sphinx.domains.cpp":9,"sphinx.domains.index":1,"sphinx.domains.javascript":3,"sphinx.domains.math":2,"sphinx.domains.python":4,"sphinx.domains.rst":2,"sphinx.domains.std":2},"filenames":["index.rst","relevance.rst"],"indexentries":{"example (class in relevance)":[[0,"relevance.Example",false]],"module":[[0,"module-relevance",false]],"relevance":[[0,"index-1",false],[0,"module-relevance",false]],"relevance (relevance.example attribute)":[[0,"relevance.Example.relevance",false]],"scoring":[[0,"index-0",true]]},"objects":{"":[[0,0,0,"-","relevance"]],"relevance":[[0,1,1,"","Example"]],"relevance.Example":[[0,2,1,"","relevance"]]},"objnames":{"0":["py","module","Python module"],"1":["py","class","Python class"],"2":["py","attribute","Python attribute"]},"objtypes":{"0":"py:module","1":"py:class","2":"py:attribute"},"terms":{"A":1,"By":0,"For":[0,1],"In":[0,1],"This":0,"align":0,"also":1,"answer":0,"appear":1,"area":0,"ask":0,"assign":0,"attempt":0,"attribut":0,"built":1,"can":[0,1],"class":0,"code":[0,1],"collect":0,"consid":1,"contain":0,"context":0,"corpus":1,"demonstr":0,"describ":1,"detail":1,"determin":[0,1],"docstr":0,"document":[0,1],"domain":1,"dure":0,"engin":0,"evalu":0,"exampl":[0,1],"extract":0,"feedback":0,"find":0,"found":0,"function":1,"handl":0,"happen":1,"head":0,"help":0,"high":[0,1],"improv":0,"inform":0,"intend":0,"issu":[0,1],"knowledg":0,"languag":1,"less":1,"like":[0,1],"mani":0,"match":0,"mention":1,"name":[0,1],"numer":0,"object":0,"often":0,"one":[0,1],"onli":[0,1],"order":0,"page":1,"part":1,"particular":0,"present":0,"printf":1,"program":1,"project":0,"queri":[0,1],"question":0,"re":0,"relat":0,"research":0,"result":1,"retriev":0,"s":[0,1],"say":0,"search":[0,1],"seem":0,"softwar":1,"sphinx":0,"straightforward":1,"subject":0,"subsect":0,"term":[0,1],"test":0,"text":0,"time":0,"titl":0,"two":0,"typic":0,"use":0,"user":[0,1],"whether":1,"will":0,"within":0,"word":0},"titles":["Main Page","Relevance"],"titleterms":{"main":0,"page":0,"relev":[0,1],"result":0,"score":0}})
\ No newline at end of file
+Search.setIndex({"alltitles":{"Main Page":[[0,null]],"Relevance":[[0,"relevance"],[1,null]],"Result Scoring":[[0,"result-scoring"]]},"docnames":["index","relevance"],"envversion":{"sphinx":66,"sphinx.domains.c":3,"sphinx.domains.changeset":1,"sphinx.domains.citation":1,"sphinx.domains.cpp":9,"sphinx.domains.index":1,"sphinx.domains.javascript":3,"sphinx.domains.math":2,"sphinx.domains.python":4,"sphinx.domains.rst":2,"sphinx.domains.std":2},"filenames":["index.rst","relevance.rst"],"indexentries":{"example (class in relevance)":[[0,"relevance.Example",false]],"module":[[0,"module-relevance",false]],"relevance":[[0,"index-1",false],[0,"module-relevance",false]],"relevance (relevance.example attribute)":[[0,"relevance.Example.relevance",false]],"scoring":[[0,"index-0",true]]},"objects":{"":[[0,0,0,"-","relevance"]],"relevance":[[0,1,1,"","Example"]],"relevance.Example":[[0,2,1,"","relevance"]]},"objnames":{"0":["py","module","Python module"],"1":["py","class","Python class"],"2":["py","attribute","Python attribute"]},"objtypes":{"0":"py:module","1":"py:class","2":"py:attribute"},"terms":{"A":1,"By":0,"For":[0,1],"In":[0,1],"This":0,"align":0,"also":1,"answer":0,"appear":1,"area":0,"ask":0,"assign":0,"attempt":0,"attribut":0,"built":1,"can":[0,1],"class":0,"code":[0,1],"collect":0,"consid":1,"contain":0,"context":0,"corpus":1,"demonstr":0,"describ":1,"detail":1,"determin":[0,1],"docstr":0,"document":[0,1],"domain":1,"dure":0,"engin":0,"evalu":0,"exampl":[0,1],"extract":0,"feedback":0,"find":0,"found":0,"function":1,"handl":0,"happen":1,"head":0,"help":0,"high":[0,1],"improv":0,"inform":0,"intend":0,"issu":[0,1],"knowledg":0,"languag":1,"less":1,"like":[0,1],"mani":0,"match":0,"mention":1,"name":[0,1],"numer":0,"object":0,"often":0,"one":[0,1],"onli":[0,1],"order":0,"page":1,"part":1,"particular":0,"present":0,"printf":1,"program":1,"project":0,"queri":[0,1],"question":0,"re":0,"relat":0,"research":0,"result":1,"retriev":0,"s":[0,1],"say":0,"search":[0,1],"seem":0,"softwar":1,"sphinx":0,"straightforward":1,"subject":0,"subsect":0,"term":[0,1],"test":0,"text":0,"time":0,"titl":0,"two":0,"typic":0,"use":0,"user":[0,1],"whether":1,"will":0,"within":0,"word":0},"titles":["Main Page","Relevance"],"titleterms":{"main":0,"page":0,"relev":[0,1],"result":0,"score":0}})
\ No newline at end of file
diff --git a/tests/test_environment/test_environment_record_dependencies.py b/tests/test_environment/test_environment_record_dependencies.py
index c3690e410fb..b70242bdb45 100644
--- a/tests/test_environment/test_environment_record_dependencies.py
+++ b/tests/test_environment/test_environment_record_dependencies.py
@@ -6,8 +6,6 @@
import pytest
-from sphinx.util._pathlib import _StrPath
-
if TYPE_CHECKING:
from sphinx.testing.util import SphinxTestApp
@@ -16,4 +14,4 @@
def test_record_dependencies_cleared(app: SphinxTestApp) -> None:
app.builder.read()
assert 'index' not in app.env.dependencies
- assert app.env.dependencies['api'] == {_StrPath('example_module.py')}
+ assert app.env.dependencies['api'] == {app.srcdir / 'example_module.py'}
From f1edefee452c943cd5feee6158dfc88eaaf86b71 Mon Sep 17 00:00:00 2001
From: Adam Turner <9087854+AA-Turner@users.noreply.github.com>
Date: Thu, 19 Jun 2025 11:53:44 +0100
Subject: [PATCH 158/466] Deprecate the ``sphinx.io`` module (#13682)
---
CHANGES.rst | 5 +++++
doc/extdev/deprecated.rst | 5 +++++
sphinx/io.py | 38 ++++++++++++++++++++++++++++++++++++++
3 files changed, 48 insertions(+)
diff --git a/CHANGES.rst b/CHANGES.rst
index 3b0a591b8e6..476b5da0178 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -29,6 +29,11 @@ Deprecated
* #13679: Non-decodable characters in source files will raise an error in Sphinx 9.
Currently, such bytes are replaced with '?' along with logging a warning.
Patch by Adam Turner.
+* #13682: Deprecate :py:mod:`!sphinx.io`.
+ Sphinx no longer uses the :py:mod:`!sphinx.io` classes,
+ having replaced them with standard Python I/O.
+ The entire :py:mod:`!sphinx.io` module will be removed in Sphinx 10.
+ Patch by Adam Turner.
Features added
--------------
diff --git a/doc/extdev/deprecated.rst b/doc/extdev/deprecated.rst
index 898ec49c8fc..484f52cb7e7 100644
--- a/doc/extdev/deprecated.rst
+++ b/doc/extdev/deprecated.rst
@@ -22,6 +22,11 @@ The following is a list of deprecated interfaces.
- Removed
- Alternatives
+ * - ``sphinx.io`` (entire module)
+ - 8.3
+ - 10.0
+ - ``docutils.io`` or standard Python I/O
+
* - ``sphinx.builders.Builder.app``
- 8.3
- 10.0
diff --git a/sphinx/io.py b/sphinx/io.py
index 1746a3a139d..1df5ac454ce 100644
--- a/sphinx/io.py
+++ b/sphinx/io.py
@@ -2,6 +2,7 @@
from __future__ import annotations
+import warnings
from typing import TYPE_CHECKING
from docutils.io import FileInput
@@ -9,6 +10,7 @@
from docutils.transforms.references import DanglingReferences
from docutils.writers import UnfilteredWriter
+from sphinx.deprecation import RemovedInSphinx10Warning
from sphinx.transforms import SphinxTransformer
from sphinx.util import logging
from sphinx.util.docutils import LoggingReporter
@@ -27,6 +29,8 @@
logger = logging.getLogger(__name__)
+warnings.warn('sphinx.io is deprecated', RemovedInSphinx10Warning, stacklevel=2)
+
class SphinxBaseReader(standalone.Reader): # type: ignore[misc]
"""A base class of readers for Sphinx.
@@ -34,6 +38,14 @@ class SphinxBaseReader(standalone.Reader): # type: ignore[misc]
This replaces reporter by Sphinx's on generating document.
"""
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ warnings.warn(
+ 'sphinx.io.SphinxBaseReader is deprecated',
+ RemovedInSphinx10Warning,
+ stacklevel=2,
+ )
+
transforms: list[type[Transform]] = []
def get_transforms(self) -> list[type[Transform]]:
@@ -67,6 +79,14 @@ def new_document(self) -> nodes.document:
class SphinxStandaloneReader(SphinxBaseReader):
"""A basic document reader for Sphinx."""
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ warnings.warn(
+ 'sphinx.io.SphinxStandaloneReader is deprecated',
+ RemovedInSphinx10Warning,
+ stacklevel=2,
+ )
+
def _setup_transforms(self, transforms: list[type[Transform]], /) -> None:
self.transforms = self.transforms + transforms
@@ -92,6 +112,14 @@ def read_source(self, env: BuildEnvironment) -> str:
class SphinxDummyWriter(UnfilteredWriter): # type: ignore[type-arg]
"""Dummy writer module used for generating doctree."""
+ def __init__(self) -> None:
+ super().__init__()
+ warnings.warn(
+ 'sphinx.io.SphinxDummyWriter is deprecated',
+ RemovedInSphinx10Warning,
+ stacklevel=2,
+ )
+
supported = ('html',) # needed to keep "meta" nodes
def translate(self) -> None:
@@ -100,6 +128,11 @@ def translate(self) -> None:
def SphinxDummySourceClass(source: Any, *args: Any, **kwargs: Any) -> Any:
"""Bypass source object as is to cheat Publisher."""
+ warnings.warn(
+ 'sphinx.io.SphinxDummySourceClass is deprecated',
+ RemovedInSphinx10Warning,
+ stacklevel=2,
+ )
return source
@@ -109,3 +142,8 @@ class SphinxFileInput(FileInput):
def __init__(self, *args: Any, **kwargs: Any) -> None:
kwargs['error_handler'] = 'sphinx'
super().__init__(*args, **kwargs)
+ warnings.warn(
+ 'sphinx.io.SphinxFileInput is deprecated',
+ RemovedInSphinx10Warning,
+ stacklevel=2,
+ )
From c384ab96c328c3abc91b39c9e3e527863df30af1 Mon Sep 17 00:00:00 2001
From: Adam Turner <9087854+AA-Turner@users.noreply.github.com>
Date: Fri, 20 Jun 2025 18:08:02 +0100
Subject: [PATCH 159/466] Use Docutils translators directly in the writing
phase (#13683)
---
sphinx/builders/html/__init__.py | 47 +++++++++----------
sphinx/builders/latex/__init__.py | 20 ++++----
sphinx/builders/manpage.py | 19 ++++----
sphinx/builders/texinfo.py | 11 ++---
sphinx/builders/text.py | 17 +++----
sphinx/builders/xml.py | 29 +++++++-----
sphinx/parsers.py | 2 +-
sphinx/writers/html.py | 4 +-
sphinx/writers/manpage.py | 7 ++-
sphinx/writers/xml.py | 6 +--
.../roots/test-build-html-translator/conf.py | 16 -------
.../test-build-html-translator/index.rst | 24 ----------
tests/test_builders/test_build_html.py | 42 +++++++++++++++--
13 files changed, 115 insertions(+), 129 deletions(-)
delete mode 100644 tests/roots/test-build-html-translator/conf.py
delete mode 100644 tests/roots/test-build-html-translator/index.rst
diff --git a/sphinx/builders/html/__init__.py b/sphinx/builders/html/__init__.py
index e72dffc2b33..8cf0340100f 100644
--- a/sphinx/builders/html/__init__.py
+++ b/sphinx/builders/html/__init__.py
@@ -15,12 +15,11 @@
from typing import TYPE_CHECKING
from urllib.parse import quote
+import docutils.parsers.rst
import docutils.readers.doctree
import docutils.utils
import jinja2.exceptions
from docutils import nodes
-from docutils.core import Publisher
-from docutils.io import DocTreeInput, StringOutput
from sphinx import __display_version__, package_dir
from sphinx import version_info as sphinx_version
@@ -69,7 +68,6 @@
from typing import Any, TypeAlias
from docutils.nodes import Node
- from docutils.readers import Reader
from sphinx.application import Sphinx
from sphinx.config import Config
@@ -93,6 +91,10 @@
bool,
]
+_READER_TRANSFORMS = docutils.readers.doctree.Reader().get_transforms()
+_PARSER_TRANSFORMS = docutils.parsers.rst.Parser().get_transforms()
+_WRITER_TRANSFORMS = HTMLWriter(None).get_transforms() # type: ignore[arg-type]
+
def convert_locale_to_language_tag(locale: str | None) -> str | None:
"""Convert a locale string to a language tag (ex. en_US -> en-US).
@@ -150,19 +152,13 @@ def __init__(self, app: Sphinx, env: BuildEnvironment) -> None:
# JS files
self._js_files: list[_JavaScript] = []
- # Cached Publisher for writing doctrees to HTML
- reader: Reader[DocTreeInput] = docutils.readers.doctree.Reader(
- parser_name='restructuredtext'
- )
- pub = Publisher(
- reader=reader,
- parser=reader.parser,
- writer=HTMLWriter(self),
- source_class=DocTreeInput,
- destination=StringOutput(encoding='unicode'),
+ # Cached settings for render_partial()
+ self._settings = _get_settings(
+ docutils.readers.doctree.Reader,
+ docutils.parsers.rst.Parser,
+ HTMLWriter,
+ defaults={'output_encoding': 'unicode', 'traceback': True},
)
- pub.get_settings(output_encoding='unicode', traceback=True)
- self._publisher = pub
def init(self) -> None:
self.build_info = self.create_build_info()
@@ -428,10 +424,11 @@ def render_partial(self, node: Node | None) -> dict[str, str]:
"""Utility: Render a lone doctree node."""
if node is None:
return {'fragment': ''}
- pub = self._publisher
- doc = docutils.utils.new_document('', pub.settings)
+ doc = docutils.utils.new_document('', self._settings)
doc.append(node)
- doc.transformer.populate_from_components((pub.reader, pub.parser, pub.writer))
+ doc.transformer.add_transforms(_READER_TRANSFORMS)
+ doc.transformer.add_transforms(_PARSER_TRANSFORMS)
+ doc.transformer.add_transforms(_WRITER_TRANSFORMS)
doc.transformer.apply_transforms()
visitor: HTML5Translator = self.create_translator(doc, self) # type: ignore[assignment]
doc.walkabout(visitor)
@@ -456,7 +453,6 @@ def prepare_writing(self, docnames: Set[str]) -> None:
)
self.load_indexer(docnames)
- self.docwriter = HTMLWriter(self)
self.docsettings = _get_settings(
HTMLWriter, defaults=self.env.settings, read_config_files=True
)
@@ -666,7 +662,6 @@ def copy_assets(self) -> None:
self.finish_tasks.join()
def write_doc(self, docname: str, doctree: nodes.document) -> None:
- destination = StringOutput(encoding='utf-8')
doctree.settings = self.docsettings
self.secnumbers = self.env.toc_secnumbers.get(docname, {})
@@ -674,13 +669,13 @@ def write_doc(self, docname: str, doctree: nodes.document) -> None:
self.imgpath = relative_uri(self.get_target_uri(docname), '_images')
self.dlpath = relative_uri(self.get_target_uri(docname), '_downloads')
self.current_docname = docname
- self.docwriter.write(doctree, destination)
- self.docwriter.assemble_parts()
- body = self.docwriter.parts['fragment']
- metatags = self.docwriter.clean_meta
+ visitor: HTML5Translator = self.create_translator(doctree, self) # type: ignore[assignment]
+ doctree.walkabout(visitor)
+ body = ''.join(visitor.fragment)
+ clean_meta = ''.join(visitor.meta[2:])
- ctx = self.get_doc_context(docname, body, metatags)
- ctx['has_maths_elements'] = self.docwriter._has_maths_elements
+ ctx = self.get_doc_context(docname, body, clean_meta)
+ ctx['has_maths_elements'] = getattr(visitor, '_has_maths_elements', False)
self.handle_page(docname, ctx, event_arg=doctree)
def write_doc_serialized(self, docname: str, doctree: nodes.document) -> None:
diff --git a/sphinx/builders/latex/__init__.py b/sphinx/builders/latex/__init__.py
index feaa8e021cb..69c11d515b8 100644
--- a/sphinx/builders/latex/__init__.py
+++ b/sphinx/builders/latex/__init__.py
@@ -24,7 +24,7 @@
from sphinx.locale import _, __
from sphinx.util import logging, texescape
from sphinx.util.display import progress_message, status_iterator
-from sphinx.util.docutils import SphinxFileOutput, _get_settings, new_document
+from sphinx.util.docutils import _get_settings, new_document
from sphinx.util.fileutil import copy_asset_file
from sphinx.util.i18n import format_date
from sphinx.util.nodes import inline_all_toctrees
@@ -297,7 +297,6 @@ def copy_assets(self) -> None:
self.copy_latex_additional_files()
def write_documents(self, _docnames: Set[str]) -> None:
- docwriter = LaTeXWriter(self)
docsettings = _get_settings(
LaTeXWriter, defaults=self.env.settings, read_config_files=True
)
@@ -308,11 +307,6 @@ def write_documents(self, _docnames: Set[str]) -> None:
toctree_only = False
if len(entry) > 5:
toctree_only = entry[5]
- destination = SphinxFileOutput(
- destination_path=self.outdir / targetname,
- encoding='utf-8',
- overwrite_if_changed=True,
- )
with progress_message(__('processing %s') % targetname, nonl=False):
doctree = self.env.get_doctree(docname)
toctree = next(doctree.findall(addnodes.toctree), None)
@@ -343,8 +337,16 @@ def write_documents(self, _docnames: Set[str]) -> None:
docsettings._docclass = theme.name
doctree.settings = docsettings
- docwriter.theme = theme
- docwriter.write(doctree, destination)
+ visitor: LaTeXTranslator = self.create_translator(doctree, self, theme) # type: ignore[assignment]
+ doctree.walkabout(visitor)
+ output = visitor.astext()
+ destination_path = self.outdir / targetname
+ # https://github.com/sphinx-doc/sphinx/issues/4362
+ if (
+ not destination_path.is_file()
+ or destination_path.read_bytes() != output.encode()
+ ):
+ destination_path.write_text(output, encoding='utf-8')
def get_contentsname(self, indexfile: str) -> str:
tree = self.env.get_doctree(indexfile)
diff --git a/sphinx/builders/manpage.py b/sphinx/builders/manpage.py
index 799f2a64f54..d30e697d292 100644
--- a/sphinx/builders/manpage.py
+++ b/sphinx/builders/manpage.py
@@ -4,8 +4,6 @@
from typing import TYPE_CHECKING
-from docutils.io import FileOutput
-
from sphinx import addnodes
from sphinx._cli.util.colour import darkgreen
from sphinx.builders import Builder
@@ -15,7 +13,11 @@
from sphinx.util.docutils import _get_settings
from sphinx.util.nodes import inline_all_toctrees
from sphinx.util.osutil import ensuredir, make_filename_from_project
-from sphinx.writers.manpage import ManualPageTranslator, ManualPageWriter
+from sphinx.writers.manpage import (
+ ManualPageTranslator,
+ ManualPageWriter,
+ NestedInlineTransform,
+)
if TYPE_CHECKING:
from collections.abc import Set
@@ -51,7 +53,6 @@ def get_target_uri(self, docname: str, typ: str | None = None) -> str:
@progress_message(__('writing'))
def write_documents(self, _docnames: Set[str]) -> None:
- docwriter = ManualPageWriter(self)
docsettings = _get_settings(
ManualPageWriter, defaults=self.env.settings, read_config_files=True
)
@@ -83,10 +84,6 @@ def write_documents(self, _docnames: Set[str]) -> None:
targetname = f'{name}.{section}'
logger.info('%s { ', darkgreen(targetname))
- destination = FileOutput(
- destination_path=self.outdir / targetname,
- encoding='utf-8',
- )
tree = self.env.get_doctree(docname)
docnames: set[str] = set()
@@ -100,7 +97,11 @@ def write_documents(self, _docnames: Set[str]) -> None:
for pendingnode in largetree.findall(addnodes.pending_xref):
pendingnode.replace_self(pendingnode.children)
- docwriter.write(largetree, destination)
+ transform = NestedInlineTransform(largetree)
+ transform.apply()
+ visitor: ManualPageTranslator = self.create_translator(largetree, self) # type: ignore[assignment]
+ largetree.walkabout(visitor)
+ (self.outdir / targetname).write_text(visitor.astext(), encoding='utf-8')
def finish(self) -> None:
pass
diff --git a/sphinx/builders/texinfo.py b/sphinx/builders/texinfo.py
index a0a8a9f8dea..ba3cd0c0d10 100644
--- a/sphinx/builders/texinfo.py
+++ b/sphinx/builders/texinfo.py
@@ -6,7 +6,6 @@
from typing import TYPE_CHECKING
from docutils import nodes
-from docutils.io import FileOutput
from sphinx import addnodes, package_dir
from sphinx._cli.util.colour import darkgreen
@@ -103,10 +102,6 @@ def write_documents(self, _docnames: Set[str]) -> None:
toctree_only = False
if len(entry) > 7:
toctree_only = entry[7]
- destination = FileOutput(
- destination_path=self.outdir / targetname,
- encoding='utf-8',
- )
with progress_message(__('processing %s') % targetname, nonl=False):
appendices = self.config.texinfo_appendices or []
doctree = self.assemble_doctree(
@@ -115,7 +110,6 @@ def write_documents(self, _docnames: Set[str]) -> None:
with progress_message(__('writing')):
self.post_process_images(doctree)
- docwriter = TexinfoWriter(self)
settings = _get_settings(
TexinfoWriter, defaults=self.env.settings, read_config_files=True
)
@@ -128,7 +122,10 @@ def write_documents(self, _docnames: Set[str]) -> None:
settings.texinfo_dir_description = description or ''
settings.docname = docname
doctree.settings = settings
- docwriter.write(doctree, destination)
+ visitor: TexinfoTranslator = self.create_translator(doctree, self) # type: ignore[assignment]
+ doctree.walkabout(visitor)
+ visitor.finish()
+ (self.outdir / targetname).write_text(visitor.output, encoding='utf-8')
self.copy_image_files(targetname[:-5])
def assemble_doctree(
diff --git a/sphinx/builders/text.py b/sphinx/builders/text.py
index bd7731fdb49..186e71e79da 100644
--- a/sphinx/builders/text.py
+++ b/sphinx/builders/text.py
@@ -4,16 +4,14 @@
from typing import TYPE_CHECKING
-from docutils.io import StringOutput
-
from sphinx.builders import Builder
from sphinx.locale import __
from sphinx.util import logging
from sphinx.util.osutil import _last_modified_time
-from sphinx.writers.text import TextTranslator, TextWriter
+from sphinx.writers.text import TextTranslator
if TYPE_CHECKING:
- from collections.abc import Iterator, Set
+ from collections.abc import Iterator
from docutils import nodes
@@ -59,19 +57,16 @@ def get_outdated_docs(self) -> Iterator[str]:
def get_target_uri(self, docname: str, typ: str | None = None) -> str:
return ''
- def prepare_writing(self, docnames: Set[str]) -> None:
- self.writer = TextWriter(self)
-
def write_doc(self, docname: str, doctree: nodes.document) -> None:
self.current_docname = docname
self.secnumbers = self.env.toc_secnumbers.get(docname, {})
- destination = StringOutput(encoding='utf-8')
- self.writer.write(doctree, destination)
+ visitor: TextTranslator = self.create_translator(doctree, self) # type: ignore[assignment]
+ doctree.walkabout(visitor)
+ output = visitor.body
out_file_name = self.outdir / (docname + self.out_suffix)
out_file_name.parent.mkdir(parents=True, exist_ok=True)
try:
- with open(out_file_name, 'w', encoding='utf-8') as f:
- f.write(self.writer.output)
+ out_file_name.write_text(output, encoding='utf-8')
except OSError as err:
logger.warning(__('error writing file %s: %s'), out_file_name, err)
diff --git a/sphinx/builders/xml.py b/sphinx/builders/xml.py
index fab0f7cb5c4..cf86ea5afef 100644
--- a/sphinx/builders/xml.py
+++ b/sphinx/builders/xml.py
@@ -5,17 +5,15 @@
from typing import TYPE_CHECKING
from docutils import nodes
-from docutils.io import StringOutput
from docutils.writers.docutils_xml import XMLTranslator
from sphinx.builders import Builder
from sphinx.locale import __
from sphinx.util import logging
from sphinx.util.osutil import _last_modified_time
-from sphinx.writers.xml import PseudoXMLWriter, XMLWriter
if TYPE_CHECKING:
- from collections.abc import Iterator, Set
+ from collections.abc import Iterator
from sphinx.application import Sphinx
from sphinx.util.typing import ExtensionMetadata
@@ -33,8 +31,6 @@ class XMLBuilder(Builder):
out_suffix = '.xml'
allow_parallel = True
- _writer_class: type[XMLWriter | PseudoXMLWriter] = XMLWriter
- writer: XMLWriter | PseudoXMLWriter
default_translator_class = XMLTranslator
def init(self) -> None:
@@ -61,9 +57,6 @@ def get_outdated_docs(self) -> Iterator[str]:
def get_target_uri(self, docname: str, typ: str | None = None) -> str:
return docname
- def prepare_writing(self, docnames: Set[str]) -> None:
- self.writer = self._writer_class(self)
-
def write_doc(self, docname: str, doctree: nodes.document) -> None:
# work around multiple string % tuple issues in docutils;
# replace tuples in attribute values with lists
@@ -79,16 +72,25 @@ def write_doc(self, docname: str, doctree: nodes.document) -> None:
for i, val in enumerate(value):
if isinstance(val, tuple):
value[i] = list(val)
- destination = StringOutput(encoding='utf-8')
- self.writer.write(doctree, destination)
+ output = self._translate(doctree)
out_file_name = self.outdir / (docname + self.out_suffix)
out_file_name.parent.mkdir(parents=True, exist_ok=True)
try:
- with open(out_file_name, 'w', encoding='utf-8') as f:
- f.write(self.writer.output)
+ out_file_name.write_text(output, encoding='utf-8')
except OSError as err:
logger.warning(__('error writing file %s: %s'), out_file_name, err)
+ def _translate(self, doctree: nodes.document) -> str:
+ doctree.settings.newlines = doctree.settings.indents = self.config.xml_pretty
+ doctree.settings.xml_declaration = True
+ doctree.settings.doctype_declaration = True
+
+ # copied from docutils.writers.docutils_xml.Writer.translate()
+ # so that we can override the translator class
+ visitor: XMLTranslator = self.create_translator(doctree)
+ doctree.walkabout(visitor)
+ return ''.join(visitor.output)
+
def finish(self) -> None:
pass
@@ -102,7 +104,8 @@ class PseudoXMLBuilder(XMLBuilder):
out_suffix = '.pseudoxml'
- _writer_class = PseudoXMLWriter
+ def _translate(self, doctree: nodes.document) -> str:
+ return doctree.pformat()
def setup(app: Sphinx) -> ExtensionMetadata:
diff --git a/sphinx/parsers.py b/sphinx/parsers.py
index 26437654cc5..97333f86773 100644
--- a/sphinx/parsers.py
+++ b/sphinx/parsers.py
@@ -70,7 +70,7 @@ def get_transforms(self) -> list[type[Transform]]:
refs: sphinx.io.SphinxStandaloneReader
"""
- transforms = super().get_transforms()
+ transforms = super(RSTParser, RSTParser()).get_transforms()
transforms.remove(SmartQuotes)
return transforms
diff --git a/sphinx/writers/html.py b/sphinx/writers/html.py
index e2c04ca32db..04f9af122a4 100644
--- a/sphinx/writers/html.py
+++ b/sphinx/writers/html.py
@@ -4,7 +4,7 @@
from typing import TYPE_CHECKING, cast
-from docutils.writers.html4css1 import Writer
+from docutils.writers import html4css1
from sphinx.util import logging
from sphinx.writers.html5 import HTML5Translator
@@ -20,7 +20,7 @@
# https://www.arnebrodowski.de/blog/write-your-own-restructuredtext-writer.html
-class HTMLWriter(Writer): # type: ignore[misc]
+class HTMLWriter(html4css1.Writer): # type: ignore[misc]
# override embed-stylesheet default value to False.
settings_default_overrides = {'embed_stylesheet': False}
diff --git a/sphinx/writers/manpage.py b/sphinx/writers/manpage.py
index 45ab340c4e3..282cd0ed14c 100644
--- a/sphinx/writers/manpage.py
+++ b/sphinx/writers/manpage.py
@@ -5,8 +5,7 @@
from typing import TYPE_CHECKING, cast
from docutils import nodes
-from docutils.writers.manpage import Translator as BaseTranslator
-from docutils.writers.manpage import Writer
+from docutils.writers import manpage
from sphinx import addnodes
from sphinx.locale import _, admonitionlabels
@@ -26,7 +25,7 @@
logger = logging.getLogger(__name__)
-class ManualPageWriter(Writer): # type: ignore[misc]
+class ManualPageWriter(manpage.Writer): # type: ignore[misc]
def __init__(self, builder: Builder) -> None:
super().__init__()
self.builder = builder
@@ -71,7 +70,7 @@ def apply(self, **kwargs: Any) -> None:
node.parent.remove(node)
-class ManualPageTranslator(SphinxTranslator, BaseTranslator):
+class ManualPageTranslator(SphinxTranslator, manpage.Translator):
"""Custom man page translator."""
_docinfo: dict[str, Any] = {}
diff --git a/sphinx/writers/xml.py b/sphinx/writers/xml.py
index 51f77ee2f01..e9877825de6 100644
--- a/sphinx/writers/xml.py
+++ b/sphinx/writers/xml.py
@@ -4,7 +4,7 @@
from typing import TYPE_CHECKING
-from docutils.writers.docutils_xml import Writer as BaseXMLWriter
+from docutils.writers import docutils_xml
if TYPE_CHECKING:
from typing import Any
@@ -12,7 +12,7 @@
from sphinx.builders import Builder
-class XMLWriter(BaseXMLWriter): # type: ignore[misc]
+class XMLWriter(docutils_xml.Writer): # type: ignore[misc]
output: str
def __init__(self, builder: Builder) -> None:
@@ -34,7 +34,7 @@ def translate(self, *args: Any, **kwargs: Any) -> None:
self.output = ''.join(visitor.output) # type: ignore[attr-defined]
-class PseudoXMLWriter(BaseXMLWriter): # type: ignore[misc]
+class PseudoXMLWriter(docutils_xml.Writer): # type: ignore[misc]
supported = ('pprint', 'pformat', 'pseudoxml')
"""Formats this writer supports."""
diff --git a/tests/roots/test-build-html-translator/conf.py b/tests/roots/test-build-html-translator/conf.py
deleted file mode 100644
index 89448d45741..00000000000
--- a/tests/roots/test-build-html-translator/conf.py
+++ /dev/null
@@ -1,16 +0,0 @@
-from sphinx.writers.html import HTML5Translator
-
-project = 'test'
-
-
-class ConfHTMLTranslator(HTML5Translator):
- depart_with_node = 0
-
- def depart_admonition(self, node=None):
- if node is not None:
- self.depart_with_node += 1
- HTML5Translator.depart_admonition(self, node)
-
-
-def setup(app):
- app.set_translator('html', ConfHTMLTranslator)
diff --git a/tests/roots/test-build-html-translator/index.rst b/tests/roots/test-build-html-translator/index.rst
deleted file mode 100644
index 1610d2b4501..00000000000
--- a/tests/roots/test-build-html-translator/index.rst
+++ /dev/null
@@ -1,24 +0,0 @@
-=======================
-Test HTML admonitions
-=======================
-
-.. seealso:: test
-
-.. note:: test
-
-.. warning:: test
-
-.. attention:: test
-
-.. caution:: test
-
-.. danger:: test
-
-.. error:: test
-
-.. hint:: test
-
-.. important:: test
-
-.. tip:: test
-
diff --git a/tests/test_builders/test_build_html.py b/tests/test_builders/test_build_html.py
index d374ff93177..3ba9c929cc9 100644
--- a/tests/test_builders/test_build_html.py
+++ b/tests/test_builders/test_build_html.py
@@ -8,7 +8,12 @@
from typing import TYPE_CHECKING
import pytest
+from docutils import nodes
+from docutils.parsers import rst
+from docutils.readers import standalone
+from docutils.writers import html5_polyglot
+from sphinx import addnodes
from sphinx._cli.util.errors import strip_escape_sequences
from sphinx.builders.html import (
StandaloneHTMLBuilder,
@@ -17,7 +22,9 @@
)
from sphinx.errors import ConfigError
from sphinx.testing.util import etree_parse
+from sphinx.util.docutils import _get_settings, new_document
from sphinx.util.inventory import InventoryFile, _InventoryItem
+from sphinx.writers.html5 import HTML5Translator
from tests.test_builders.xpath_data import FIGURE_CAPTION
from tests.test_builders.xpath_util import check_xpath
@@ -146,11 +153,38 @@ def test_html_parallel(app: SphinxTestApp) -> None:
app.build()
-@pytest.mark.sphinx('html', testroot='build-html-translator')
+class ConfHTMLTranslator(HTML5Translator):
+ depart_with_node = 0
+
+ def depart_admonition(self, node: nodes.Element | None = None) -> None:
+ if node is not None:
+ self.depart_with_node += 1
+ super().depart_admonition(node)
+
+
+@pytest.mark.sphinx('html', testroot='_blank')
def test_html_translator(app: SphinxTestApp) -> None:
- app.build()
- assert isinstance(app.builder, StandaloneHTMLBuilder) # type-checking
- assert app.builder.docwriter.visitor.depart_with_node == 10
+ settings = _get_settings(
+ standalone.Reader, rst.Parser, html5_polyglot.Writer, defaults={}
+ )
+ doctree = new_document(__file__, settings)
+ doctree.append(addnodes.seealso('test', nodes.Text('test')))
+ doctree.append(nodes.note('test', nodes.Text('test')))
+ doctree.append(nodes.warning('test', nodes.Text('test')))
+ doctree.append(nodes.attention('test', nodes.Text('test')))
+ doctree.append(nodes.caution('test', nodes.Text('test')))
+ doctree.append(nodes.danger('test', nodes.Text('test')))
+ doctree.append(nodes.error('test', nodes.Text('test')))
+ doctree.append(nodes.hint('test', nodes.Text('test')))
+ doctree.append(nodes.important('test', nodes.Text('test')))
+ doctree.append(nodes.tip('test', nodes.Text('test')))
+
+ visitor = ConfHTMLTranslator(doctree, app.builder)
+ assert isinstance(visitor, ConfHTMLTranslator)
+ assert isinstance(visitor, HTML5Translator)
+ doctree.walkabout(visitor)
+
+ assert visitor.depart_with_node == 10
@pytest.mark.parametrize(
From b9641ae82d968d4348bfee06b42c69764c1024bc Mon Sep 17 00:00:00 2001
From: Adam Turner <9087854+AA-Turner@users.noreply.github.com>
Date: Sun, 22 Jun 2025 02:08:53 +0100
Subject: [PATCH 160/466] Add a file-based cache for remote intersphinx
inventories (#13684)
---
CHANGES.rst | 4 ++
sphinx/ext/intersphinx/_cli.py | 10 ++-
sphinx/ext/intersphinx/_load.py | 57 +++++++++++++---
tests/test_extensions/test_ext_intersphinx.py | 65 ++++++++++++++-----
.../test_ext_intersphinx_cache.py | 3 +
5 files changed, 110 insertions(+), 29 deletions(-)
diff --git a/CHANGES.rst b/CHANGES.rst
index 476b5da0178..1b9343435db 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -54,6 +54,10 @@ Features added
Patch by Adam Turner.
* #13647: LaTeX: allow more cases of table nesting.
Patch by Jean-François B.
+* #13684: intersphinx: Add a file-based cache for remote inventories.
+ The location of the cache directory must not be relied upon externally,
+ as it may change without notice or warning in future releases.
+ Patch by Adam Turner.
Bugs fixed
----------
diff --git a/sphinx/ext/intersphinx/_cli.py b/sphinx/ext/intersphinx/_cli.py
index 720f080ebde..bf3a333eb95 100644
--- a/sphinx/ext/intersphinx/_cli.py
+++ b/sphinx/ext/intersphinx/_cli.py
@@ -5,7 +5,11 @@
import sys
from pathlib import Path
-from sphinx.ext.intersphinx._load import _fetch_inventory, _InvConfig
+from sphinx.ext.intersphinx._load import (
+ _fetch_inventory_data,
+ _InvConfig,
+ _load_inventory,
+)
def inspect_main(argv: list[str], /) -> int:
@@ -28,12 +32,14 @@ def inspect_main(argv: list[str], /) -> int:
)
try:
- inv = _fetch_inventory(
+ raw_data, _ = _fetch_inventory_data(
target_uri='',
inv_location=filename,
config=config,
srcdir=Path(),
+ cache_path=None,
)
+ inv = _load_inventory(raw_data, target_uri='')
for key in sorted(inv.data):
print(key)
inv_entries = sorted(inv.data[key].items())
diff --git a/sphinx/ext/intersphinx/_load.py b/sphinx/ext/intersphinx/_load.py
index 6a07dbbc3f0..724566df091 100644
--- a/sphinx/ext/intersphinx/_load.py
+++ b/sphinx/ext/intersphinx/_load.py
@@ -181,6 +181,9 @@ def load_mappings(app: Sphinx) -> None:
now=now,
config=inv_config,
srcdir=app.srcdir,
+ # the location of this cache directory must not be relied upon
+ # externally, it may change without notice or warning.
+ cache_dir=app.doctreedir / '__intersphinx_cache__',
)
for project in projects
]
@@ -230,6 +233,7 @@ def _fetch_inventory_group(
now: int,
config: _InvConfig,
srcdir: Path,
+ cache_dir: Path | None,
) -> bool:
if config.intersphinx_cache_limit >= 0:
# Positive value: cache is expired if its timestamp is below
@@ -250,6 +254,25 @@ def _fetch_inventory_group(
else:
inv_location = location
+ if cache_dir is not None:
+ cache_path = cache_dir / f'{project.name}_{INVENTORY_FILENAME}'
+ else:
+ cache_path = None
+
+ if (
+ cache_path is not None
+ and '://' in inv_location
+ and project.target_uri not in cache
+ and cache_path.is_file()
+ # the saved 'objects.inv' is not older than the cache expiry time
+ and cache_path.stat().st_mtime >= cache_time
+ ):
+ raw_data = cache_path.read_bytes()
+ inv = _load_inventory(raw_data, target_uri=project.target_uri)
+ cache_path_mtime = int(cache_path.stat().st_mtime)
+ cache[project.target_uri] = project.name, cache_path_mtime, inv.data
+ break
+
# decide whether the inventory must be read: always read local
# files; remote ones only if the cache time is expired
if (
@@ -264,17 +287,18 @@ def _fetch_inventory_group(
)
try:
- inv = _fetch_inventory(
+ raw_data, target_uri = _fetch_inventory_data(
target_uri=project.target_uri,
inv_location=inv_location,
config=config,
srcdir=srcdir,
+ cache_path=cache_path,
)
+ inv = _load_inventory(raw_data, target_uri=target_uri)
except Exception as err:
failures.append(err.args)
continue
-
- if inv:
+ else:
cache[project.target_uri] = project.name, now, inv.data
updated = True
break
@@ -302,18 +326,25 @@ def _fetch_inventory_group(
def fetch_inventory(app: Sphinx, uri: InventoryURI, inv: str) -> Inventory:
"""Fetch, parse and return an intersphinx inventory file."""
- return _fetch_inventory(
+ raw_data, uri = _fetch_inventory_data(
target_uri=uri,
inv_location=inv,
config=_InvConfig.from_config(app.config),
srcdir=app.srcdir,
- ).data
+ cache_path=None,
+ )
+ return _load_inventory(raw_data, target_uri=uri).data
-def _fetch_inventory(
- *, target_uri: InventoryURI, inv_location: str, config: _InvConfig, srcdir: Path
-) -> _Inventory:
- """Fetch, parse and return an intersphinx inventory file."""
+def _fetch_inventory_data(
+ *,
+ target_uri: InventoryURI,
+ inv_location: str,
+ config: _InvConfig,
+ srcdir: Path,
+ cache_path: Path | None,
+) -> tuple[bytes, str]:
+ """Fetch inventory data from a local or remote source."""
# both *target_uri* (base URI of the links to generate)
# and *inv_location* (actual location of the inventory file)
# can be local or remote URIs
@@ -324,9 +355,17 @@ def _fetch_inventory(
raw_data, target_uri = _fetch_inventory_url(
target_uri=target_uri, inv_location=inv_location, config=config
)
+ if cache_path is not None:
+ cache_path.parent.mkdir(parents=True, exist_ok=True)
+ cache_path.write_bytes(raw_data)
else:
raw_data = _fetch_inventory_file(inv_location=inv_location, srcdir=srcdir)
+ return raw_data, target_uri
+
+def _load_inventory(raw_data: bytes, /, *, target_uri: InventoryURI) -> _Inventory:
+ """Parse and return an intersphinx inventory file."""
+ # *target_uri* (base URI of the links to generate) can be a local or remote URI
try:
inv = InventoryFile.loads(raw_data, uri=target_uri)
except ValueError as exc:
diff --git a/tests/test_extensions/test_ext_intersphinx.py b/tests/test_extensions/test_ext_intersphinx.py
index c2b17d36a78..240d7e12ecb 100644
--- a/tests/test_extensions/test_ext_intersphinx.py
+++ b/tests/test_extensions/test_ext_intersphinx.py
@@ -18,10 +18,11 @@
from sphinx.ext.intersphinx import setup as intersphinx_setup
from sphinx.ext.intersphinx._cli import inspect_main
from sphinx.ext.intersphinx._load import (
- _fetch_inventory,
+ _fetch_inventory_data,
_fetch_inventory_group,
_get_safe_url,
_InvConfig,
+ _load_inventory,
_strip_basic_auth,
load_mappings,
validate_intersphinx_mapping,
@@ -85,12 +86,15 @@ def test_fetch_inventory_redirection(get_request, InventoryFile, app):
# same uri and inv, not redirected
mocked_get.url = 'https://hostname/' + INVENTORY_FILENAME
- _fetch_inventory(
- target_uri='https://hostname/',
+ target_uri = 'https://hostname/'
+ raw_data, target_uri = _fetch_inventory_data(
+ target_uri=target_uri,
inv_location='https://hostname/' + INVENTORY_FILENAME,
config=_InvConfig.from_config(app.config),
srcdir=app.srcdir,
+ cache_path=None,
)
+ _load_inventory(raw_data, target_uri=target_uri)
assert 'intersphinx inventory has moved' not in app.status.getvalue()
assert InventoryFile.loads.call_args[1]['uri'] == 'https://hostname/'
@@ -99,12 +103,15 @@ def test_fetch_inventory_redirection(get_request, InventoryFile, app):
app.status.truncate(0)
mocked_get.url = 'https://hostname/new/' + INVENTORY_FILENAME
- _fetch_inventory(
- target_uri='https://hostname/',
+ target_uri = 'https://hostname/'
+ raw_data, target_uri = _fetch_inventory_data(
+ target_uri=target_uri,
inv_location='https://hostname/' + INVENTORY_FILENAME,
config=_InvConfig.from_config(app.config),
srcdir=app.srcdir,
+ cache_path=None,
)
+ _load_inventory(raw_data, target_uri=target_uri)
assert app.status.getvalue() == (
'intersphinx inventory has moved: '
'https://hostname/%s -> https://hostname/new/%s\n'
@@ -117,12 +124,15 @@ def test_fetch_inventory_redirection(get_request, InventoryFile, app):
app.status.truncate(0)
mocked_get.url = 'https://hostname/new/' + INVENTORY_FILENAME
- _fetch_inventory(
- target_uri='https://hostname/',
+ target_uri = 'https://hostname/'
+ raw_data, target_uri = _fetch_inventory_data(
+ target_uri=target_uri,
inv_location='https://hostname/new/' + INVENTORY_FILENAME,
config=_InvConfig.from_config(app.config),
srcdir=app.srcdir,
+ cache_path=None,
)
+ _load_inventory(raw_data, target_uri=target_uri)
assert 'intersphinx inventory has moved' not in app.status.getvalue()
assert InventoryFile.loads.call_args[1]['uri'] == 'https://hostname/'
@@ -131,12 +141,15 @@ def test_fetch_inventory_redirection(get_request, InventoryFile, app):
app.status.truncate(0)
mocked_get.url = 'https://hostname/other/' + INVENTORY_FILENAME
- _fetch_inventory(
- target_uri='https://hostname/',
+ target_uri = 'https://hostname/'
+ raw_data, target_uri = _fetch_inventory_data(
+ target_uri=target_uri,
inv_location='https://hostname/new/' + INVENTORY_FILENAME,
config=_InvConfig.from_config(app.config),
srcdir=app.srcdir,
+ cache_path=None,
)
+ _load_inventory(raw_data, target_uri=target_uri)
assert app.status.getvalue() == (
'intersphinx inventory has moved: '
'https://hostname/new/%s -> https://hostname/other/%s\n'
@@ -774,13 +787,16 @@ def test_intersphinx_cache_limit(app, monkeypatch, cache_limit, expected_expired
now = 2 * 86400
monkeypatch.setattr('time.time', lambda: now)
- # `_fetch_inventory_group` calls `_fetch_inventory`.
+ # `_fetch_inventory_group` calls `_fetch_inventory_data`.
# We replace it with a mock to test whether it has been called.
# If it has been called, it means the cache had expired.
- mock_fake_inventory = _Inventory({}) # must be truthy
- mock_fetch_inventory = mock.Mock(return_value=mock_fake_inventory)
monkeypatch.setattr(
- 'sphinx.ext.intersphinx._load._fetch_inventory', mock_fetch_inventory
+ 'sphinx.ext.intersphinx._load._fetch_inventory_data',
+ mock.Mock(return_value=(b'', '')),
+ )
+ mock_fetch_inventory = mock.Mock(return_value=_Inventory({}))
+ monkeypatch.setattr(
+ 'sphinx.ext.intersphinx._load._load_inventory', mock_fetch_inventory
)
for name, (uri, locations) in app.config.intersphinx_mapping.values():
@@ -791,8 +807,9 @@ def test_intersphinx_cache_limit(app, monkeypatch, cache_limit, expected_expired
now=now,
config=_InvConfig.from_config(app.config),
srcdir=app.srcdir,
+ cache_dir=None,
)
- # If we hadn't mocked `_fetch_inventory`, it would've made
+ # If we hadn't mocked `_fetch_inventory_data`, it would've made
# a request to `https://example.org/` and found no inventory
# file. That would've been an error, and `updated` would've been
# False even if the cache had expired. The mock makes it behave
@@ -826,8 +843,14 @@ def log_message(*args, **kwargs):
}
now = int(time.time())
- # we can use 'srcdir=None' since we are raising in _fetch_inventory
- kwds = {'cache': {}, 'now': now, 'config': config, 'srcdir': None}
+ # we can use 'srcdir=None' since we are raising in _fetch_inventory_data
+ kwds = {
+ 'cache': {},
+ 'now': now,
+ 'config': config,
+ 'srcdir': None,
+ 'cache_dir': None,
+ }
# We need an exception with its 'args' attribute set (see error
# handling in sphinx.ext.intersphinx._load._fetch_inventory_group).
side_effect = ValueError('')
@@ -836,7 +859,8 @@ def log_message(*args, **kwargs):
name='1', target_uri=url1, locations=(url1, None)
)
with mock.patch(
- 'sphinx.ext.intersphinx._load._fetch_inventory', side_effect=side_effect
+ 'sphinx.ext.intersphinx._load._fetch_inventory_data',
+ side_effect=side_effect,
) as mockfn:
assert not _fetch_inventory_group(project=project1, **kwds)
mockfn.assert_any_call(
@@ -844,19 +868,22 @@ def log_message(*args, **kwargs):
inv_location=url1,
config=config,
srcdir=None,
+ cache_path=None,
)
mockfn.assert_any_call(
target_uri=url1,
inv_location=url1 + '/' + INVENTORY_FILENAME,
config=config,
srcdir=None,
+ cache_path=None,
)
project2 = _IntersphinxProject(
name='2', target_uri=url2, locations=(url2, None)
)
with mock.patch(
- 'sphinx.ext.intersphinx._load._fetch_inventory', side_effect=side_effect
+ 'sphinx.ext.intersphinx._load._fetch_inventory_data',
+ side_effect=side_effect,
) as mockfn:
assert not _fetch_inventory_group(project=project2, **kwds)
mockfn.assert_any_call(
@@ -864,10 +891,12 @@ def log_message(*args, **kwargs):
inv_location=url2,
config=config,
srcdir=None,
+ cache_path=None,
)
mockfn.assert_any_call(
target_uri=url2,
inv_location=url2 + INVENTORY_FILENAME,
config=config,
srcdir=None,
+ cache_path=None,
)
diff --git a/tests/test_extensions/test_ext_intersphinx_cache.py b/tests/test_extensions/test_ext_intersphinx_cache.py
index 3431f136874..4b45816419a 100644
--- a/tests/test_extensions/test_ext_intersphinx_cache.py
+++ b/tests/test_extensions/test_ext_intersphinx_cache.py
@@ -4,6 +4,7 @@
import posixpath
import re
+import shutil
import zlib
from http.server import BaseHTTPRequestHandler
from io import BytesIO
@@ -261,12 +262,14 @@ def test_load_mappings_cache_update(tmp_path):
app1 = SphinxTestApp('dummy', srcdir=tmp_path, confoverrides=confoverrides1)
app1.build()
app1.cleanup()
+ shutil.rmtree(app1.doctreedir / '__intersphinx_cache__', ignore_errors=True)
# switch to new url and assert that the old URL is no more stored
confoverrides2 = BASE_CONFIG | {'intersphinx_mapping': new_project.record}
app2 = SphinxTestApp('dummy', srcdir=tmp_path, confoverrides=confoverrides2)
app2.build()
app2.cleanup()
+ shutil.rmtree(app2.doctreedir / '__intersphinx_cache__', ignore_errors=True)
entry = new_project.make_entry()
item = dict((new_project.normalise(entry),))
From ca043c3ccdc88ae5d30e4527382fe0c8bd5e2a88 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?B=C3=A9n=C3=A9dikt=20Tran?=
<10796600+picnixz@users.noreply.github.com>
Date: Sun, 22 Jun 2025 09:58:01 +0200
Subject: [PATCH 161/466] `gettext`: Ignore trailing backslashes when
extracting messages (#13686)
Trailing backslashes are now ignored by the `gettext` builder,
and whitespaces preceding and following them are ignored.
---
CHANGES.rst | 2 ++
sphinx/util/nodes.py | 33 +++++++++++++++++++-
tests/roots/test-intl/backslashes.txt | 38 +++++++++++++++++++++++
tests/roots/test-intl/index.txt | 1 +
tests/test_builders/test_build_gettext.py | 26 ++++++++++++++++
5 files changed, 99 insertions(+), 1 deletion(-)
create mode 100644 tests/roots/test-intl/backslashes.txt
diff --git a/CHANGES.rst b/CHANGES.rst
index 1b9343435db..0f047d6da92 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -77,6 +77,8 @@ Bugs fixed
* #13635: LaTeX: if a cell contains a table, row coloring is turned off for
the next table cells.
Patch by Jean-François B.
+* #13685: gettext: Correctly ignore trailing backslashes.
+ Patch by Bénédikt Tran.
Testing
-------
diff --git a/sphinx/util/nodes.py b/sphinx/util/nodes.py
index e29dc376884..5b6bd429a8e 100644
--- a/sphinx/util/nodes.py
+++ b/sphinx/util/nodes.py
@@ -5,6 +5,7 @@
import contextlib
import re
import unicodedata
+from io import StringIO
from typing import TYPE_CHECKING, Any, Generic, TypeVar, cast
from docutils import nodes
@@ -289,6 +290,35 @@ def is_translatable(node: Node) -> bool:
) # fmt: skip
+def _clean_extracted_message(text: str) -> str:
+ """Remove trailing backslashes from each line of *text*."""
+ if '\\' in text:
+ # TODO(picnixz): if possible, find a regex alternative
+ # that is not vulnerable to a ReDOS (the code below is
+ # equivalent to re.sub(r'[ \t]*\\[ \t]*$', text, re.MULTILINE)).
+ buffer = StringIO()
+ for line in text.splitlines(keepends=True):
+ split = line.rsplit('\\', maxsplit=1)
+ if len(split) == 2:
+ prefix, suffix = split
+ if re.match(r'^[ \t]*\s$', suffix):
+ # The line ends with some NL character, preceded by
+ # one or more whitespaces (to be dropped), the backslash,
+ # and possibly other whitespaces on its left.
+ buffer.write(prefix.rstrip(' \t'))
+ buffer.write(suffix.lstrip(' \t'))
+ elif not suffix:
+ # backslash is at the end of the LAST line
+ buffer.write(prefix.rstrip(' \t'))
+ else:
+ # backslash is is in the middle of the line
+ buffer.write(line)
+ else:
+ buffer.write(line)
+ text = buffer.getvalue()
+ return text.replace('\n', ' ').strip()
+
+
def extract_messages(doctree: Element) -> Iterable[tuple[Element, str]]:
"""Extract translatable messages from a document tree."""
for node in doctree.findall(is_translatable):
@@ -311,7 +341,8 @@ def extract_messages(doctree: Element) -> Iterable[tuple[Element, str]]:
elif isinstance(node, nodes.meta):
msg = node['content']
else:
- msg = node.rawsource.replace('\n', ' ').strip() # type: ignore[attr-defined]
+ text = node.rawsource # type: ignore[attr-defined]
+ msg = _clean_extracted_message(text)
# XXX nodes rendering empty are likely a bug in sphinx.addnodes
if msg:
diff --git a/tests/roots/test-intl/backslashes.txt b/tests/roots/test-intl/backslashes.txt
new file mode 100644
index 00000000000..7a16b6f6b62
--- /dev/null
+++ b/tests/roots/test-intl/backslashes.txt
@@ -0,0 +1,38 @@
+:tocdepth: 2
+
+i18n with backslashes
+=====================
+
+line 1\
+line 2 \
+line 3 \
+line 4a \ and 4b \
+line with spaces after backslash \
+last line with spaces \
+and done 1
+
+.. gettext parses the following lines as "abc",
+ while a C pre-processor would have produced "abc".
+
+a \
+b\
+c \
+
+last trailing \ \ \
+is ignored
+
+
+See [#]_
+
+.. [#] footnote with backslashes \
+ and done 2
+
+
+.. note:: directive with \
+ backslashes
+
+
+.. function:: foo(a, \
+ b, \
+ c, d, e, f)
+ the foo
diff --git a/tests/roots/test-intl/index.txt b/tests/roots/test-intl/index.txt
index ac68314f97d..52644e34be1 100644
--- a/tests/roots/test-intl/index.txt
+++ b/tests/roots/test-intl/index.txt
@@ -32,6 +32,7 @@ CONTENTS
translation_progress
topic
markup
+ backslashes
.. toctree::
:maxdepth: 2
diff --git a/tests/test_builders/test_build_gettext.py b/tests/test_builders/test_build_gettext.py
index 08c6e07d9ac..30798cc5070 100644
--- a/tests/test_builders/test_build_gettext.py
+++ b/tests/test_builders/test_build_gettext.py
@@ -323,3 +323,29 @@ def test_gettext_literalblock_additional(app: SphinxTestApp) -> None:
"stdout object\\n>>>\\n>>> if __name__ == '__main__': # if run this py "
'file as python script\\n... main() # call main',
]
+
+
+@pytest.mark.sphinx('gettext', testroot='intl', srcdir='gettext')
+def test_gettext_trailing_backslashes(app: SphinxTestApp) -> None:
+ app.build(force_all=True)
+
+ assert (app.outdir / 'backslashes.pot').is_file()
+ pot = (app.outdir / 'backslashes.pot').read_text(encoding='utf8')
+ msg_ids = get_msgids(pot)
+ assert msg_ids == [
+ 'i18n with backslashes',
+ (
+ 'line 1 line 2 line 3 '
+ # middle backslashes are escaped normally
+ 'line 4a \\\\ and 4b '
+ # whitespaces after backslashes are dropped
+ 'line with spaces after backslash '
+ 'last line with spaces '
+ 'and done 1'
+ ),
+ 'a b c',
+ 'last trailing \\\\ \\\\ is ignored',
+ 'See [#]_',
+ 'footnote with backslashes and done 2',
+ 'directive with backslashes',
+ ]
From bb9cf58d0c3b8984a6b0a40b305394399fcb6e05 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Jean-Fran=C3=A7ois=20B=2E?=
<2589111+jfbu@users.noreply.github.com>
Date: Mon, 23 Jun 2025 17:56:47 +0200
Subject: [PATCH 162/466] Update CHANGES.rst
From PR #13597:
Close #11498
Close #11515
But complete resolution looks out-of-reach barring tremendous effort.
---
CHANGES.rst | 4 ++++
1 file changed, 4 insertions(+)
diff --git a/CHANGES.rst b/CHANGES.rst
index 0f047d6da92..2626062b18f 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -62,6 +62,10 @@ Features added
Bugs fixed
----------
+* #11498: LaTeX: Table in cell fails to build if it has many rows.
+ Patch by Jean-François B.
+* #11515: LaTeX: longtable does not allow nested table.
+ Patch by Jean-François B.
* #12821: LaTeX: URLs/links in section titles should render in PDF.
Patch by Jean-François B.
* #13369: Correctly parse and cross-reference unpacked type annotations.
From 5221bfccd401e4459b52e0f86006ff592bcf556e Mon Sep 17 00:00:00 2001
From: Adam Turner <9087854+AA-Turner@users.noreply.github.com>
Date: Tue, 24 Jun 2025 01:20:48 +0100
Subject: [PATCH 163/466] Refresh the list of third-party domains (#13691)
---
doc/usage/domains/index.rst | 100 +++++++++++++++++++++++-------------
1 file changed, 63 insertions(+), 37 deletions(-)
diff --git a/doc/usage/domains/index.rst b/doc/usage/domains/index.rst
index cc3f272646c..fac8e94e8e8 100644
--- a/doc/usage/domains/index.rst
+++ b/doc/usage/domains/index.rst
@@ -35,6 +35,69 @@ easier to write.
This section describes what the domains that are included with Sphinx provide.
The domain API is documented as well, in the section :ref:`domain-api`.
+Built-in domains
+----------------
+
+The following domains are included within Sphinx:
+
+.. toctree::
+ :maxdepth: 1
+
+ standard
+ c
+ cpp
+ javascript
+ mathematics
+ python
+ restructuredtext
+
+
+Third-party domains
+-------------------
+
+Several third-party domains are available as extensions, including:
+
+* `Ada `__
+* `Antlr4 `__
+* `Bazel `__
+* `BibTex `__
+* `Chapel `__
+* `CMake `__
+* `Common Lisp `__
+* `Erlang `__
+* `Fortran `__
+* `GraphQL `__
+* `Go `__
+* `HTTP `__
+* `Hy `__
+* `Lua `__
+* `MATLAB `__
+* `PHP `__
+* `Ruby `__
+* `Rust `__
+* `Verilog `__
+* `VHDL `__
+* `Visual Basic `__
+
+Other domains may be found on the Python Package Index
+(via the `Framework :: Sphinx :: Domain`__ classifier),
+`GitHub `__, or
+`GitLab `__.
+
+__ https://pypi.org/search/?c=Framework+%3A%3A+Sphinx+%3A%3A+Domain
+
+.. NOTE: The following all seem unmaintained, last released 2018 or earlier.
+ The links are preserved in this comment for reference.
+
+ * `CoffeeScript `__
+ * `DotNET `__
+ * `dqn `__
+ * `Jinja `__
+ * `JSON `__
+ * `Lasso `__
+ * `Operation `__
+ * `Scala `__
+
.. _basic-domain-markup:
@@ -174,40 +237,3 @@ In short:
component of the target.
For example, ``:py:meth:`~queue.Queue.get``` will
refer to ``queue.Queue.get`` but only display ``get`` as the link text.
-
-Built-in domains
-----------------
-
-The following domains are included within Sphinx:
-
-.. toctree::
- :maxdepth: 1
-
- standard
- c
- cpp
- javascript
- mathematics
- python
- restructuredtext
-
-More domains
-------------
-
-There are several third-party domains available as extensions, including:
-
-* `Ada `__
-* `Chapel `__
-* `CoffeeScript `__
-* `Common Lisp `__
-* `dqn `__
-* `Erlang `__
-* `Go `__
-* `HTTP `__
-* `Jinja `__
-* `Lasso `__
-* `MATLAB `__
-* `Operation `__
-* `PHP `__
-* `Ruby `__
-* `Scala `__
From 7551ef5b817d3de936c3a708c9414573bb70dd0f Mon Sep 17 00:00:00 2001
From: Adam Turner <9087854+AA-Turner@users.noreply.github.com>
Date: Tue, 24 Jun 2025 02:12:57 +0100
Subject: [PATCH 164/466] Remove mypy overrides from
``tests.test_directives.test_directive_only`` (#13692)
---
pyproject.toml | 1 -
tests/test_directives/test_directive_only.py | 67 +++++++++++---------
2 files changed, 38 insertions(+), 30 deletions(-)
diff --git a/pyproject.toml b/pyproject.toml
index c492e346309..98bf35c6e9b 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -304,7 +304,6 @@ module = [
# tests/test_config
"tests.test_config.test_config",
# tests/test_directives
- "tests.test_directives.test_directive_only",
"tests.test_directives.test_directive_other",
"tests.test_directives.test_directive_patch",
# tests/test_domains
diff --git a/tests/test_directives/test_directive_only.py b/tests/test_directives/test_directive_only.py
index 9e62f4cb3eb..dbabffaa5d5 100644
--- a/tests/test_directives/test_directive_only.py
+++ b/tests/test_directives/test_directive_only.py
@@ -9,44 +9,53 @@
from docutils import nodes
if TYPE_CHECKING:
+ from typing import Any
+
from sphinx.testing.util import SphinxTestApp
@pytest.mark.sphinx('text', testroot='directive-only')
def test_sectioning(app: SphinxTestApp) -> None:
- def getsects(section):
- if not isinstance(section, nodes.section):
- return [getsects(n) for n in section.children]
- title = section.next_node(nodes.title).astext().strip()
- subsects = []
- children = section.children[:]
- while children:
- node = children.pop(0)
- if isinstance(node, nodes.section):
- subsects.append(node)
- continue
- children = list(node.children) + children
- return [title, [getsects(subsect) for subsect in subsects]]
-
- def testsects(prefix, sects, indent=0):
- title = sects[0]
- parent_num = title.split()[0]
- assert prefix == parent_num, f'Section out of place: {title!r}'
- for i, subsect in enumerate(sects[1]):
- num = subsect[0].split()[0]
- assert re.match('[0-9]+[.0-9]*[.]', num), (
- f'Unnumbered section: {subsect[0]!r}'
- )
- testsects(prefix + str(i + 1) + '.', subsect, indent + 4)
-
app.build(filenames=[app.srcdir / 'only.rst'])
doctree = app.env.get_doctree('only')
app.env.apply_post_transforms(doctree, 'only')
- parts = [getsects(n) for n in doctree.children if isinstance(n, nodes.section)]
- for i, s in enumerate(parts):
- testsects(str(i + 1) + '.', s, 4)
- actual_headings = '\n'.join(p[0] for p in parts)
+ parts = [_get_sections(n) for n in doctree.children if isinstance(n, nodes.section)]
+ for i, section in enumerate(parts):
+ _test_sections(f'{i + 1}.', section, 4)
+ actual_headings = '\n'.join(p[0] for p in parts) # type: ignore[misc]
assert len(parts) == 4, (
f'Expected 4 document level headings, got:\n{actual_headings}'
)
+
+
+def _get_sections(section: nodes.Node) -> list[str | list[Any]]:
+ if not isinstance(section, nodes.section):
+ return list(map(_get_sections, section.children))
+ title = section.next_node(nodes.title).astext().strip()
+ subsections = []
+ children = section.children.copy()
+ while children:
+ node = children.pop(0)
+ if isinstance(node, nodes.section):
+ subsections.append(node)
+ continue
+ children = list(node.children) + children
+ return [title, list(map(_get_sections, subsections))]
+
+
+def _test_sections(
+ prefix: str, sections: list[str | list[Any]], indent: int = 0
+) -> None:
+ title = sections[0]
+ assert isinstance(title, str)
+ parent_num = title.partition(' ')[0]
+ assert prefix == parent_num, f'Section out of place: {title!r}'
+ for i, subsection in enumerate(sections[1]):
+ subsection_title = subsection[0]
+ assert isinstance(subsection_title, str)
+ num = subsection_title.partition(' ')[0]
+ assert re.match('[0-9]+[.0-9]*[.]', num), (
+ f'Unnumbered section: {subsection[0]!r}'
+ )
+ _test_sections(f'{prefix}{i + 1}.', subsection, indent + 4)
From b19e7dcbe2d86c56a0270f58d3db318e8e2bc766 Mon Sep 17 00:00:00 2001
From: Adam Dangoor
Date: Tue, 24 Jun 2025 11:03:15 +0100
Subject: [PATCH 165/466] Remove mypy overrides for
``tests/test_extensions/test_ext_imgconverter.py`` (#13693)
---
pyproject.toml | 1 -
tests/test_extensions/test_ext_imgconverter.py | 2 +-
2 files changed, 1 insertion(+), 2 deletions(-)
diff --git a/pyproject.toml b/pyproject.toml
index 98bf35c6e9b..74dbde30354 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -269,7 +269,6 @@ module = [
# tests/test_extensions
"tests.test_extensions.test_ext_autodoc_autoclass",
"tests.test_extensions.test_ext_autosummary_imports",
- "tests.test_extensions.test_ext_imgconverter",
"tests.test_extensions.test_ext_intersphinx_cache",
"tests.test_extensions.test_ext_math",
"tests.test_extensions.test_ext_napoleon",
diff --git a/tests/test_extensions/test_ext_imgconverter.py b/tests/test_extensions/test_ext_imgconverter.py
index a2078f9fe11..c39f885e97e 100644
--- a/tests/test_extensions/test_ext_imgconverter.py
+++ b/tests/test_extensions/test_ext_imgconverter.py
@@ -12,7 +12,7 @@
@pytest.fixture
-def _if_converter_found(app):
+def _if_converter_found(app: SphinxTestApp) -> None:
image_converter = getattr(app.config, 'image_converter', '')
try:
if image_converter:
From 3f63a1420daf7542b3bab7bf10cdbc837dd16b74 Mon Sep 17 00:00:00 2001
From: Adam Dangoor
Date: Tue, 24 Jun 2025 11:04:27 +0100
Subject: [PATCH 166/466] Remove mypy overrides for
``tests/test_transforms/test_transforms_reorder_nodes.py`` (#13694)
---
pyproject.toml | 1 -
tests/test_transforms/test_transforms_reorder_nodes.py | 4 ++--
2 files changed, 2 insertions(+), 3 deletions(-)
diff --git a/pyproject.toml b/pyproject.toml
index 74dbde30354..03ad3cfedfe 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -285,7 +285,6 @@ module = [
"tests.test_theming.test_theming",
# tests/test_transforms
"tests.test_transforms.test_transforms_post_transforms_images",
- "tests.test_transforms.test_transforms_reorder_nodes",
# tests/test_util
"tests.test_util.test_util_docutils",
# tests/test_writers
diff --git a/tests/test_transforms/test_transforms_reorder_nodes.py b/tests/test_transforms/test_transforms_reorder_nodes.py
index aa9bee43d04..b81a47f5b86 100644
--- a/tests/test_transforms/test_transforms_reorder_nodes.py
+++ b/tests/test_transforms/test_transforms_reorder_nodes.py
@@ -56,8 +56,8 @@ def test_transforms_reorder_consecutive_target_and_index_nodes_preserve_order(
@pytest.mark.sphinx('html', testroot='_blank')
def test_transforms_reorder_consecutive_target_and_index_nodes_no_merge_across_other_nodes(
- app,
-):
+ app: SphinxTestApp,
+) -> None:
text = (
'.. index:: abc\n'
'.. index:: def\n'
From a15c149a607a1dcbc07e0058108194726d382d9f Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Tue, 24 Jun 2025 11:07:02 +0100
Subject: [PATCH 167/466] Bump mypy to 1.16.1 (#13674)
Co-authored-by: Adam Dangoor
---
pyproject.toml | 4 ++--
sphinx/cmd/quickstart.py | 2 +-
sphinx/config.py | 2 +-
sphinx/domains/javascript.py | 2 +-
sphinx/domains/python/_object.py | 2 +-
sphinx/util/math.py | 2 +-
6 files changed, 7 insertions(+), 7 deletions(-)
diff --git a/pyproject.toml b/pyproject.toml
index 03ad3cfedfe..83d8de0d018 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -94,7 +94,7 @@ docs = [
]
lint = [
"ruff==0.12.0",
- "mypy==1.15.0",
+ "mypy==1.16.1",
"sphinx-lint>=0.9",
"types-colorama==0.4.15.20240311",
"types-defusedxml==0.7.0.20250516",
@@ -158,7 +158,7 @@ translations = [
"Jinja2>=3.1",
]
types = [
- "mypy==1.15.0",
+ "mypy==1.16.1",
"pyrefly",
"pyright==1.1.400",
{ include-group = "type-stubs" },
diff --git a/sphinx/cmd/quickstart.py b/sphinx/cmd/quickstart.py
index 0275343e847..73cdabfd97b 100644
--- a/sphinx/cmd/quickstart.py
+++ b/sphinx/cmd/quickstart.py
@@ -801,7 +801,7 @@ def main(argv: Sequence[str] = (), /) -> int:
print('[Interrupted.]')
return 130 # 128 + SIGINT
- for variable in d.get('variables', []):
+ for variable in d.get('variables', []): # type: ignore[union-attr]
try:
name, value = variable.split('=')
d[name] = value
diff --git a/sphinx/config.py b/sphinx/config.py
index e878cd7a834..ba6ec2bd619 100644
--- a/sphinx/config.py
+++ b/sphinx/config.py
@@ -635,7 +635,7 @@ def _validate_valid_types(
if isinstance(valid_types, type):
return frozenset((valid_types,))
if valid_types is Any:
- return frozenset({Any}) # type: ignore[arg-type]
+ return frozenset({Any})
if isinstance(valid_types, set):
return frozenset(valid_types)
try:
diff --git a/sphinx/domains/javascript.py b/sphinx/domains/javascript.py
index e620e04b401..6ebd1dec3fd 100644
--- a/sphinx/domains/javascript.py
+++ b/sphinx/domains/javascript.py
@@ -141,7 +141,7 @@ def handle_signature(self, sig: str, signode: desc_signature) -> tuple[str, str]
trailing_comma=trailing_comma,
env=self.env,
)
- return fullname, prefix
+ return fullname, prefix # type: ignore[return-value]
def _object_hierarchy_parts(self, sig_node: desc_signature) -> tuple[str, ...]:
if 'fullname' not in sig_node:
diff --git a/sphinx/domains/python/_object.py b/sphinx/domains/python/_object.py
index 6cd19245aae..5dcb3143ce5 100644
--- a/sphinx/domains/python/_object.py
+++ b/sphinx/domains/python/_object.py
@@ -431,7 +431,7 @@ def add_target_and_index(
)
if 'no-index-entry' not in self.options:
- if index_text := self.get_index_text(mod_name, name_cls):
+ if index_text := self.get_index_text(mod_name, name_cls): # type: ignore[arg-type]
self.indexnode['entries'].append((
'single',
index_text,
diff --git a/sphinx/util/math.py b/sphinx/util/math.py
index f482e0c4dac..898aab2d617 100644
--- a/sphinx/util/math.py
+++ b/sphinx/util/math.py
@@ -14,7 +14,7 @@ def get_node_equation_number(writer: HTML5Translator, node: nodes.math_block) ->
if writer.builder.config.math_numfig and writer.builder.config.numfig:
figtype = 'displaymath'
if writer.builder.name == 'singlehtml':
- key = f'{writer.docnames[-1]}/{figtype}' # type: ignore[has-type]
+ key = f'{writer.docnames[-1]}/{figtype}'
else:
key = figtype
From 74c16867892ba222fc22c094bb707dc335c01cd2 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Jean-Fran=C3=A7ois=20B=2E?=
<2589111+jfbu@users.noreply.github.com>
Date: Sun, 29 Jun 2025 10:06:30 +0200
Subject: [PATCH 168/466] LaTeX: improve the table column widths computed by
LaTeX itself
Fix #3447
Fix #8228
With this patch the legacy notion of "problematic cell" is almost
entirely removed, and LaTeX package tabulary will be used more often so
that column widths in tables may get better fit to actual contents.
This works well when the column contains cells with multiple paragraphs,
bullet lists, enumerated lists, or line blocks. For cells with object
descriptions or admonitions, the column may not fit tightly to the
contents, but other columns will probably be improved.
---
CHANGES.rst | 6 +
doc/usage/restructuredtext/directives.rst | 76 ++++----
sphinx/texinputs/sphinxlatextables.sty | 40 +++-
sphinx/writers/latex.py | 81 +++------
tests/roots/test-latex-table/complex.rst | 4 +-
.../expects/complex_spanning_cell.tex | 28 +--
.../expects/{gridtable.tex => grid_table.tex} | 52 ++++--
...tex => grid_table_with_tabularcolumns.tex} | 54 ++++--
.../test-latex-table/expects/longtable.tex | 50 +++--
....tex => longtable_having_align_option.tex} | 50 +++--
.../expects/longtable_having_caption.tex | 50 +++--
.../longtable_having_formerly_problematic.tex | 97 ++++++++++
.../longtable_having_problematic_cell.tex | 76 --------
..._stub_columns_and_formerly_problematic.tex | 106 +++++++++++
...ving_stub_columns_and_problematic_cell.tex | 81 ---------
.../expects/longtable_having_verbatim.tex | 49 +++--
...having_widths_and_formerly_problematic.tex | 97 ++++++++++
...ble_having_widths_and_problematic_cell.tex | 76 --------
...tex => longtable_having_widths_option.tex} | 50 +++--
.../expects/longtable_with_tabularcolumn.tex | 70 -------
.../expects/longtable_with_tabularcolumns.tex | 90 +++++++++
.../test-latex-table/expects/simple_table.tex | 38 ++--
.../expects/table_having_caption.tex | 38 ++--
.../table_having_formerly_problematic.tex | 64 +++++++
.../expects/table_having_problematic_cell.tex | 47 -----
..._stub_columns_and_formerly_problematic.tex | 68 +++++++
...ving_stub_columns_and_problematic_cell.tex | 49 -----
...ng_three_paragraphs_cell_in_first_col.tex} | 14 +-
.../expects/table_having_verbatim.tex | 39 ++--
...having_widths_and_formerly_problematic.tex | 64 +++++++
...ble_having_widths_and_problematic_cell.tex | 47 -----
...ths.tex => table_having_widths_option.tex} | 38 ++--
.../expects/table_with_tabularcolumns.tex | 57 ++++++
.../expects/tabular_having_align_option.tex | 56 ++++++
.../expects/tabular_having_widths.tex | 40 ----
.../expects/tabularcolumn.tex | 41 -----
.../expects/tabulary_having_align_option.tex | 56 ++++++
.../expects/tabulary_having_widths.tex | 40 ----
tests/roots/test-latex-table/longtable.rst | 26 +--
tests/roots/test-latex-table/tabular.rst | 34 ++--
tests/test_builders/test_build_latex.py | 171 ++++++------------
41 files changed, 1324 insertions(+), 986 deletions(-)
rename tests/roots/test-latex-table/expects/{gridtable.tex => grid_table.tex} (55%)
rename tests/roots/test-latex-table/expects/{gridtable_with_tabularcolumn.tex => grid_table_with_tabularcolumns.tex} (53%)
rename tests/roots/test-latex-table/expects/{longtable_having_align.tex => longtable_having_align_option.tex} (50%)
create mode 100644 tests/roots/test-latex-table/expects/longtable_having_formerly_problematic.tex
delete mode 100644 tests/roots/test-latex-table/expects/longtable_having_problematic_cell.tex
create mode 100644 tests/roots/test-latex-table/expects/longtable_having_stub_columns_and_formerly_problematic.tex
delete mode 100644 tests/roots/test-latex-table/expects/longtable_having_stub_columns_and_problematic_cell.tex
create mode 100644 tests/roots/test-latex-table/expects/longtable_having_widths_and_formerly_problematic.tex
delete mode 100644 tests/roots/test-latex-table/expects/longtable_having_widths_and_problematic_cell.tex
rename tests/roots/test-latex-table/expects/{longtable_having_widths.tex => longtable_having_widths_option.tex} (58%)
delete mode 100644 tests/roots/test-latex-table/expects/longtable_with_tabularcolumn.tex
create mode 100644 tests/roots/test-latex-table/expects/longtable_with_tabularcolumns.tex
create mode 100644 tests/roots/test-latex-table/expects/table_having_formerly_problematic.tex
delete mode 100644 tests/roots/test-latex-table/expects/table_having_problematic_cell.tex
create mode 100644 tests/roots/test-latex-table/expects/table_having_stub_columns_and_formerly_problematic.tex
delete mode 100644 tests/roots/test-latex-table/expects/table_having_stub_columns_and_problematic_cell.tex
rename tests/roots/test-latex-table/expects/{table_having_threeparagraphs_cell_in_first_col.tex => table_having_three_paragraphs_cell_in_first_col.tex} (53%)
create mode 100644 tests/roots/test-latex-table/expects/table_having_widths_and_formerly_problematic.tex
delete mode 100644 tests/roots/test-latex-table/expects/table_having_widths_and_problematic_cell.tex
rename tests/roots/test-latex-table/expects/{table_having_widths.tex => table_having_widths_option.tex} (50%)
create mode 100644 tests/roots/test-latex-table/expects/table_with_tabularcolumns.tex
create mode 100644 tests/roots/test-latex-table/expects/tabular_having_align_option.tex
delete mode 100644 tests/roots/test-latex-table/expects/tabular_having_widths.tex
delete mode 100644 tests/roots/test-latex-table/expects/tabularcolumn.tex
create mode 100644 tests/roots/test-latex-table/expects/tabulary_having_align_option.tex
delete mode 100644 tests/roots/test-latex-table/expects/tabulary_having_widths.tex
diff --git a/CHANGES.rst b/CHANGES.rst
index 2626062b18f..06990919e1b 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -62,6 +62,12 @@ Features added
Bugs fixed
----------
+* #3447: LaTeX: when assigning longtable class to table for PDF, it may render
+ "horizontally" and overflow in right margin.
+ Patch by Jean-François B.
+* #8828: LaTeX: adding a footnote to a longtable cell causes table to occupy
+ full width.
+ Patch by Jean-François B.
* #11498: LaTeX: Table in cell fails to build if it has many rows.
Patch by Jean-François B.
* #11515: LaTeX: longtable does not allow nested table.
diff --git a/doc/usage/restructuredtext/directives.rst b/doc/usage/restructuredtext/directives.rst
index 5845a6ab717..25116edd4fc 100644
--- a/doc/usage/restructuredtext/directives.rst
+++ b/doc/usage/restructuredtext/directives.rst
@@ -1483,9 +1483,9 @@ Check the :confval:`latex_table_style`.
.. rst:directive:: .. tabularcolumns:: column spec
- This directive influences only the LaTeX output for the next table in
- source. The mandatory argument is a column specification (known as an
- "alignment preamble" in LaTeX idiom). Please refer to a LaTeX
+ This directive influences only the LaTeX output, and only for the next
+ table in source. The mandatory argument is a column specification (known
+ as an "alignment preamble" in LaTeX idiom). Please refer to a LaTeX
documentation, such as the `wiki page`_, for basics of such a column
specification.
@@ -1493,50 +1493,62 @@ Check the :confval:`latex_table_style`.
.. versionadded:: 0.3
- .. note::
+ Sphinx renders tables with at most 30 rows using ``tabulary`` (or
+ ``tabular`` if at least one cell contains either a code-block or a nested
+ table), and those with more rows with ``longtable``. The advantage of
+ using ``tabulary`` is that it tries to compute automatically (internally to
+ LaTeX) suitable column widths.
+
+ :rst:dir:`tabularcolumns` can serve to provide one's own "colspec" choice.
+ Here is an advanced example:
+
+ .. code-block:: latex
- :rst:dir:`tabularcolumns` conflicts with ``:widths:`` option of table
- directives. If both are specified, ``:widths:`` option will be ignored.
+ .. tabularcolumns:: >{\raggedright}\Y{.4}>{\centering}\Y{.1}>{\sphinxcolorblend{!95!red}\centering\noindent\bfseries\color{red}}\Y{.12}>{\raggedright\arraybackslash}\Y{.38}
- Sphinx renders tables with at most 30 rows using ``tabulary``, and those
- with more rows with ``longtable``.
+ This is used in Sphinx own PDF docs at :ref:`dev-deprecated-apis`.
+ Regarding column widths, this "colspec" achieves the same as would
+ ``:widths:`` option set to ``40 10 12 38`` but it injects extra effects.
- ``tabulary`` tries to compute automatically (internally to LaTeX) suitable
- column widths. However, cells are then not allowed to contain
- "problematic" elements such as lists, object descriptions,
- blockquotes... Sphinx will fall back to using ``tabular`` if such a cell is
- encountered (or a nested ``tabulary``). In such a case the table will have
- a tendency to try to fill the whole available line width.
+ .. note::
- :rst:dir:`tabularcolumns` can help in coercing the usage of ``tabulary`` if
- one is careful to not employ the ``tabulary`` column types (``L``, ``R``,
- ``C`` or ``J``) for those columns with at least one "problematic" cell, but
- only LaTeX's ``p{}`` or Sphinx ``\X`` and ``\Y`` (described next).
+ In case both :rst:dir:`tabularcolumns` and ``:widths:`` option of table
+ directives are used, ``:widths:`` option will be ignored by the LaTeX
+ builder. Of course it is obeyed by other builders.
- Literal blocks do not work at all with ``tabulary``. Sphinx will fall back
- to ``tabular`` or ``longtable`` environments depending on the number of
- rows. It will employ the :rst:dir:`tabularcolumns` specification only if it
- contains no usage of the ``tabulary`` specific types.
+ Literal blocks do not work at all with ``tabulary`` and Sphinx will then
+ fall back to ``tabular`` LaTeX environment. It will employ the
+ :rst:dir:`tabularcolumns` specification in that case only if it contains no
+ usage of the ``tabulary`` specific column types (which are ``L``, ``R``,
+ ``C`` and ``J``).
Besides the LaTeX ``l``, ``r``, ``c`` and ``p{width}`` column specifiers,
- one can also use ``\X{a}{b}`` which configures the column width to be a
- fraction ``a/b`` of the total line width and ``\Y{f}`` where ``f`` is a
- decimal: for example ``\Y{0.2}`` means that the column will occupy ``0.2``
- times the line width.
+ and the ``tabulary`` specific ``L``, ``R``, ``C`` and ``J``, one can also
+ use (with all table types) ``\X{a}{b}`` which configures the column width
+ to be a fraction ``a/b`` of the total line width and ``\Y{f}`` where ``f``
+ is a decimal: for example ``\Y{0.2}`` means that the column will occupy
+ ``0.2`` times the line width.
.. versionchanged:: 1.6
- Use ``J`` (justified) by default with ``tabulary``, not ``L``
+ Sphinx uses ``J`` (justified) by default with ``tabulary``, not ``L``
(flushed-left). To revert, include ``\newcolumntype{T}{L}`` in the LaTeX
preamble, as in fact Sphinx uses ``T`` and sets it by default to be an
alias of ``J``.
-.. hint::
+.. versionchanged:: 8.3.0
- A frequent issue with ``tabulary`` is that columns with little contents
- appear to be "squeezed". One can add to the LaTeX preamble for example
- ``\setlength{\tymin}{40pt}`` to ensure a minimal column width of ``40pt``,
- the ``tabulary`` default of ``10pt`` being too small.
+ Formerly, Sphinx did not use ``tabulary`` if the table had at least one
+ cell containing "problematic" elements such as lists, object descriptions,
+ blockquotes (etc...) because such contents are not out-of-the-box
+ compatible with ``tabulary``. At ``8.3.0`` a technique, which was already
+ in use for merged cells, was extended to such cases, and the sole
+ "problematic" contents are code-blocks and nested tables. So tables
+ containing (only) cells with mutliple paragraphs, bullet or enumerated
+ lists, or line blocks, will now better fit to their contents (if not
+ rendered by ``longtable``). Cells with object descriptions or admonitions
+ will still have a tendency to induce the table to fill the full text area
+ width, but columns in that table with no such contents will be tighter.
.. hint::
diff --git a/sphinx/texinputs/sphinxlatextables.sty b/sphinx/texinputs/sphinxlatextables.sty
index 96ce2d0c80f..00719015e41 100644
--- a/sphinx/texinputs/sphinxlatextables.sty
+++ b/sphinx/texinputs/sphinxlatextables.sty
@@ -1,7 +1,7 @@
%% TABLES (WITH SUPPORT FOR MERGED CELLS OF GENERAL CONTENTS)
%
% change this info string if making any custom modification
-\ProvidesPackage{sphinxlatextables}[2025/06/09 v8.3.0 tables]%
+\ProvidesPackage{sphinxlatextables}[2025/06/30 v8.3.0 tables]%
% Provides support for this output mark-up from Sphinx latex writer
% and table templates:
@@ -42,6 +42,13 @@
% - \sphinxthistablewithnocolorrowsstyle
% - \sphinxthistablewithvlinesstyle
% - \sphinxthistablewithnovlinesstyle
+% - \sphinxbeforeendvarwidth
+
+% At 8.3.0, ALL table cell contents are wrapped into a varwidth environment.
+% This helps solve issues such as #3447, #8828, and helps use tabulary
+% in many more cases hence obtain better looking tables.
+\def\sphinxbeforeendvarwidth{\par\vskip-\baselineskip\vbox{\hbox{\strut}}}
+
% These conditionals added at 8.3.0 for nested tables not to break row colors
% (#13635). Nested tables are only partially supported by Sphinx LaTeX.
% The method here is with no changes to neither writer nor templates.
@@ -251,7 +258,29 @@
%
% configuration of tabulary
\setlength{\tymin}{3\fontcharwd\font`0 }% minimal width of "squeezed" columns
-\setlength{\tymax}{10000pt}% allow enough room for paragraphs to "compete"
+\setlength{\tymax}{2\textwidth}% allow enough room for paragraphs to "compete"
+%
+% MEMO: tabulary initially renders cell contents "horizontally" to measure
+% them and compare their relative importance. Its goal is to choose the
+% column width so that, roughly, all columns will look about evenly
+% filled. "Horizontal" rendering is incompatible with many LaTeX
+% structures such as lists, so prior to Sphinx 8.3.0 cells with such
+% "problematic" contents caused Sphinx to use tabular not tabulary; the
+% tabular would then render each column in absence of :widths: option or
+% tabularcolumns directive the same width equal to available text width
+% divided by number of columns. At 8.3.0, "problematic" contents is
+% wrapped into a "varwidth" environment, as was already done formerly
+% for merged cells, and this avoids tabulary causing errors such as
+% "incompatible with LR mode"; \sphinxcolwidth is used which sets
+% the initial horizontal width for "varwidth". In the first tabulary
+% pass, \sphinxcolwidth is configured (by us) to use \tymax.
+%
+% During testing, it was determined that our former 10000pt setting for
+% \tymax could cause "Dimension too large" TeX error if two columns or
+% more contained such cells. So we use now 2\textwidth which is more
+% than 10 times smaller but proves large enough for the tabulary
+% algorithm to provide reasonable results.
+%
% we need access to tabulary's final computed width. \@tempdima is too volatile
% to hope it has kept tabulary's value when \sphinxcolwidth needs it.
\newdimen\sphinx@TY@tablewidth
@@ -302,9 +331,7 @@
% **** TODO: clarify if next paragraph means we must raise an
% **** if LaTeX writer detects a merged cell inside nested table.
% MEMO about nesting: if sphinxmulticolumn is encountered in a nested tabular
-% inside a tabulary it will think to be at top level in the tabulary. But
-% Sphinx generates no nested tables, and if some LaTeX macro uses internally a
-% tabular this will not have a \sphinxstartmulticolumn within it!
+% inside a tabulary it will think to be at top level in the tabulary.
%
% 5.3.0 adds a check for multirow as single-row multi-column will allow a row
% colour but multi-row multi-column should not.
@@ -407,8 +434,7 @@
}%
\newcommand*\sphinxcolwidth[2]{%
% this dimension will always be used for varwidth, and serves as maximum
- % width when cells are merged either via multirow or multicolumn or both,
- % as always their contents is wrapped in varwidth environment.
+ % width for cells whose contents are wrapped in varwidth environment.
\ifnum#1>\@ne % multi-column (and possibly also multi-row)
% we wrote our own multicolumn code especially to handle that (and allow
% verbatim contents)
diff --git a/sphinx/writers/latex.py b/sphinx/writers/latex.py
index 39aef55ddfe..005d0ef5e06 100644
--- a/sphinx/writers/latex.py
+++ b/sphinx/writers/latex.py
@@ -132,11 +132,9 @@ def __init__(self, node: Element) -> None:
self.colsep = None
self.colwidths: list[int] = []
self.has_problematic = False
- self.has_oldproblematic = False
self.has_verbatim = False
# cf https://github.com/sphinx-doc/sphinx/issues/13646#issuecomment-2958309632
self.is_nested = False
- self.entry_needs_linetrimming = 0
self.caption: list[str] = []
self.stubs: list[int] = []
@@ -223,12 +221,6 @@ def get_colspec(self) -> str:
elif self.get_table_type() == 'tabulary':
# sphinx.sty sets T to be J by default.
return '{' + _colsep + (('T' + _colsep) * self.colcount) + '}' + CR
- elif self.has_oldproblematic:
- return (
- r'{%s*{%d}{\X{1}{%d}%s}}'
- % (_colsep, self.colcount, self.colcount, _colsep)
- + CR
- )
else:
return '{' + _colsep + (('l' + _colsep) * self.colcount) + '}' + CR
@@ -804,8 +796,6 @@ def visit_desc(self, node: Element) -> None:
else:
self.body.append(BLANKLINE)
self.body.append(r'\begin{fulllineitems}' + CR)
- if self.table:
- self.table.has_problematic = True
def depart_desc(self, node: Element) -> None:
if self.in_desc_signature:
@@ -1107,8 +1097,6 @@ def visit_seealso(self, node: Element) -> None:
r'\begin{sphinxseealso}{%s:}' % admonitionlabels['seealso'] + CR
)
self.no_latex_floats += 1
- if self.table:
- self.table.has_problematic = True
def depart_seealso(self, node: Element) -> None:
self.body.append(BLANKLINE)
@@ -1367,19 +1355,25 @@ def visit_entry(self, node: Element) -> None:
r'\sphinxmultirow{%d}{%d}{%%' % (cell.height, cell.cell_id) + CR
)
context = '}%' + CR + context
- if cell.width > 1 or cell.height > 1:
- self.body.append(
- r'\begin{varwidth}[t]{\sphinxcolwidth{%d}{%d}}'
- % (cell.width, self.table.colcount)
- + CR
- )
- context = (
- r'\par' + CR + r'\vskip-\baselineskip'
- r'\vbox{\hbox{\strut}}\end{varwidth}%' + CR + context
- )
- self.table.entry_needs_linetrimming = 1
- if len(list(node.findall(nodes.paragraph))) >= 2:
- self.table.has_oldproblematic = True
+ # 8.3.0 wraps ALL cells contents in "varwidth". This fixes a
+ # number of issues and allows more usage of tabulary.
+ #
+ # "varwidth" usage allows a *tight fit* to multiple paragraphs,
+ # line blocks, bullet lists, enumerated lists; it is less
+ # successful at finding a tight fit for object descriptions or
+ # admonitions: the table will then probably occupy full-width, and
+ # columns containing such cells will auto-divide the total width
+ # equally.
+ #
+ # "\sphinxcolwidth" has an appropriate definition in
+ # sphinxlatextables.sty which in particular takes into account
+ # tabulary "two-pass" system.
+ self.body.append(
+ r'\begin{varwidth}[t]{\sphinxcolwidth{%d}{%d}}'
+ % (cell.width, self.table.colcount)
+ + CR
+ )
+ context = r'\sphinxbeforeendvarwidth' + CR + r'\end{varwidth}%' + CR + context
if (
isinstance(node.parent.parent, nodes.thead)
or (cell.col in self.table.stubs)
@@ -1392,20 +1386,17 @@ def visit_entry(self, node: Element) -> None:
pass
else:
self.body.append(r'\sphinxstyletheadfamily ')
- if self.table.entry_needs_linetrimming:
- self.pushbody([])
+ self.pushbody([])
self.context.append(context)
def depart_entry(self, node: Element) -> None:
assert self.table is not None
- if self.table.entry_needs_linetrimming:
- self.table.entry_needs_linetrimming = 0
- body = self.popbody()
+ body = self.popbody()
- # Remove empty lines from top of merged cell
- while body and body[0] == CR:
- body.pop(0)
- self.body.extend(body)
+ # Remove empty lines from top of merged cell
+ while body and body[0] == CR:
+ body.pop(0)
+ self.body.extend(body)
self.body.append(self.context.pop())
@@ -1446,8 +1437,6 @@ def visit_acks(self, node: Element) -> None:
def visit_bullet_list(self, node: Element) -> None:
if not self.compact_list:
self.body.append(r'\begin{itemize}' + CR)
- if self.table:
- self.table.has_problematic = True
def depart_bullet_list(self, node: Element) -> None:
if not self.compact_list:
@@ -1485,8 +1474,6 @@ def get_nested_level(node: Element) -> int:
)
if 'start' in node:
self.body.append(r'\setcounter{%s}{%d}' % (enum, node['start'] - 1) + CR)
- if self.table:
- self.table.has_problematic = True
def depart_enumerated_list(self, node: Element) -> None:
self.body.append(r'\end{enumerate}' + CR)
@@ -1501,8 +1488,6 @@ def depart_list_item(self, node: Element) -> None:
def visit_definition_list(self, node: Element) -> None:
self.body.append(r'\begin{description}' + CR)
- if self.table:
- self.table.has_problematic = True
def depart_definition_list(self, node: Element) -> None:
self.body.append(r'\end{description}' + CR)
@@ -1542,8 +1527,6 @@ def depart_definition(self, node: Element) -> None:
def visit_field_list(self, node: Element) -> None:
self.body.append(r'\begin{quote}\begin{description}' + CR)
- if self.table:
- self.table.has_problematic = True
def depart_field_list(self, node: Element) -> None:
self.body.append(r'\end{description}\end{quote}' + CR)
@@ -1585,8 +1568,6 @@ def depart_paragraph(self, node: Element) -> None:
def visit_centered(self, node: Element) -> None:
self.body.append(CR + r'\begin{center}')
- if self.table:
- self.table.has_problematic = True
def depart_centered(self, node: Element) -> None:
self.body.append(CR + r'\end{center}')
@@ -1601,8 +1582,6 @@ def visit_hlist(self, node: Element) -> None:
r'\begin{itemize}\setlength{\itemsep}{0pt}'
r'\setlength{\parskip}{0pt}' + CR
)
- if self.table:
- self.table.has_problematic = True
def depart_hlist(self, node: Element) -> None:
self.compact_list -= 1
@@ -1798,8 +1777,6 @@ def depart_legend(self, node: Element) -> None:
def visit_admonition(self, node: Element) -> None:
self.body.append(CR + r'\begin{sphinxadmonition}{note}')
self.no_latex_floats += 1
- if self.table:
- self.table.has_problematic = True
def depart_admonition(self, node: Element) -> None:
self.body.append(r'\end{sphinxadmonition}' + CR)
@@ -1811,8 +1788,6 @@ def _visit_named_admonition(self, node: Element) -> None:
CR + r'\begin{sphinxadmonition}{%s}{%s:}' % (node.tagname, label)
)
self.no_latex_floats += 1
- if self.table:
- self.table.has_problematic = True
def _depart_named_admonition(self, node: Element) -> None:
self.body.append(r'\end{sphinxadmonition}' + CR)
@@ -2312,8 +2287,6 @@ def visit_line_block(self, node: Element) -> None:
self.body.append(r'\begin{DUlineblock}{\DUlineblockindent}' + CR)
else:
self.body.append(CR + r'\begin{DUlineblock}{0em}' + CR)
- if self.table:
- self.table.has_problematic = True
def depart_line_block(self, node: Element) -> None:
self.body.append(r'\end{DUlineblock}' + CR)
@@ -2329,8 +2302,6 @@ def visit_block_quote(self, node: Element) -> None:
done = 1
if not done:
self.body.append(r'\begin{quote}' + CR)
- if self.table:
- self.table.has_problematic = True
def depart_block_quote(self, node: Element) -> None:
done = 0
@@ -2370,8 +2341,6 @@ def depart_option_group(self, node: Element) -> None:
def visit_option_list(self, node: Element) -> None:
self.body.append(r'\begin{optionlist}{3cm}' + CR)
- if self.table:
- self.table.has_problematic = True
def depart_option_list(self, node: Element) -> None:
self.body.append(r'\end{optionlist}' + CR)
diff --git a/tests/roots/test-latex-table/complex.rst b/tests/roots/test-latex-table/complex.rst
index d648ff194c4..d35927a96e0 100644
--- a/tests/roots/test-latex-table/complex.rst
+++ b/tests/roots/test-latex-table/complex.rst
@@ -20,8 +20,8 @@ grid table
| cell5-1 |
+---------+---------+---------+
-grid table with tabularcolumns having no vline
-----------------------------------------------
+grid table with tabularcolumns
+------------------------------
.. tabularcolumns:: TTT
diff --git a/tests/roots/test-latex-table/expects/complex_spanning_cell.tex b/tests/roots/test-latex-table/expects/complex_spanning_cell.tex
index d2d61894251..645d84b30b9 100644
--- a/tests/roots/test-latex-table/expects/complex_spanning_cell.tex
+++ b/tests/roots/test-latex-table/expects/complex_spanning_cell.tex
@@ -23,45 +23,49 @@
\begin{varwidth}[t]{\sphinxcolwidth{1}{5}}
\sphinxAtStartPar
cell1\sphinxhyphen{}1
-\par
-\vskip-\baselineskip\vbox{\hbox{\strut}}\end{varwidth}%
+\sphinxbeforeendvarwidth
+\end{varwidth}%
}%
&\sphinxmultirow{3}{2}{%
\begin{varwidth}[t]{\sphinxcolwidth{1}{5}}
\sphinxAtStartPar
cell1\sphinxhyphen{}2
-\par
-\vskip-\baselineskip\vbox{\hbox{\strut}}\end{varwidth}%
+\sphinxbeforeendvarwidth
+\end{varwidth}%
}%
-&
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{5}}
\sphinxAtStartPar
cell1\sphinxhyphen{}3
+\sphinxbeforeendvarwidth
+\end{varwidth}%
&\sphinxmultirow{3}{4}{%
\begin{varwidth}[t]{\sphinxcolwidth{1}{5}}
\sphinxAtStartPar
cell1\sphinxhyphen{}4
-\par
-\vskip-\baselineskip\vbox{\hbox{\strut}}\end{varwidth}%
+\sphinxbeforeendvarwidth
+\end{varwidth}%
}%
&\sphinxmultirow{2}{5}{%
\begin{varwidth}[t]{\sphinxcolwidth{1}{5}}
\sphinxAtStartPar
cell1\sphinxhyphen{}5
-\par
-\vskip-\baselineskip\vbox{\hbox{\strut}}\end{varwidth}%
+\sphinxbeforeendvarwidth
+\end{varwidth}%
}%
\\
\sphinxvlinecrossing{1}\sphinxcline{3-3}\sphinxvlinecrossing{4}\sphinxfixclines{5}\sphinxtablestrut{1}&\sphinxtablestrut{2}&\sphinxmultirow{2}{6}{%
\begin{varwidth}[t]{\sphinxcolwidth{1}{5}}
\sphinxAtStartPar
cell2\sphinxhyphen{}3
-\par
-\vskip-\baselineskip\vbox{\hbox{\strut}}\end{varwidth}%
+\sphinxbeforeendvarwidth
+\end{varwidth}%
}%
&\sphinxtablestrut{4}&\sphinxtablestrut{5}\\
-\sphinxvlinecrossing{1}\sphinxvlinecrossing{2}\sphinxvlinecrossing{3}\sphinxcline{5-5}\sphinxfixclines{5}\sphinxtablestrut{1}&\sphinxtablestrut{2}&\sphinxtablestrut{6}&\sphinxtablestrut{4}&
+\sphinxvlinecrossing{1}\sphinxvlinecrossing{2}\sphinxvlinecrossing{3}\sphinxcline{5-5}\sphinxfixclines{5}\sphinxtablestrut{1}&\sphinxtablestrut{2}&\sphinxtablestrut{6}&\sphinxtablestrut{4}&\begin{varwidth}[t]{\sphinxcolwidth{1}{5}}
\sphinxAtStartPar
cell3\sphinxhyphen{}5
+\sphinxbeforeendvarwidth
+\end{varwidth}%
\\
\sphinxbottomrule
\end{tabulary}
diff --git a/tests/roots/test-latex-table/expects/gridtable.tex b/tests/roots/test-latex-table/expects/grid_table.tex
similarity index 55%
rename from tests/roots/test-latex-table/expects/gridtable.tex
rename to tests/roots/test-latex-table/expects/grid_table.tex
index 407abe7f2c8..e5b3a6bda31 100644
--- a/tests/roots/test-latex-table/expects/gridtable.tex
+++ b/tests/roots/test-latex-table/expects/grid_table.tex
@@ -6,41 +6,53 @@
\centering
\begin{tabulary}{\linewidth}[t]{|T|T|T|}
\sphinxtoprule
-\sphinxstyletheadfamily
-\sphinxAtStartPar
+\begin{varwidth}[t]{\sphinxcolwidth{1}{3}}
+\sphinxstyletheadfamily \sphinxAtStartPar
header1
-&\sphinxstyletheadfamily
-\sphinxAtStartPar
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{3}}
+\sphinxstyletheadfamily \sphinxAtStartPar
header2
-&\sphinxstyletheadfamily
-\sphinxAtStartPar
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{3}}
+\sphinxstyletheadfamily \sphinxAtStartPar
header3
+\sphinxbeforeendvarwidth
+\end{varwidth}%
\\
\sphinxmidrule
-\sphinxtableatstartofbodyhook
+\sphinxtableatstartofbodyhook\begin{varwidth}[t]{\sphinxcolwidth{1}{3}}
\sphinxAtStartPar
cell1\sphinxhyphen{}1
+\sphinxbeforeendvarwidth
+\end{varwidth}%
&\sphinxmultirow{2}{5}{%
\begin{varwidth}[t]{\sphinxcolwidth{1}{3}}
\sphinxAtStartPar
cell1\sphinxhyphen{}2
-\par
-\vskip-\baselineskip\vbox{\hbox{\strut}}\end{varwidth}%
+\sphinxbeforeendvarwidth
+\end{varwidth}%
}%
-&
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{3}}
\sphinxAtStartPar
cell1\sphinxhyphen{}3
+\sphinxbeforeendvarwidth
+\end{varwidth}%
\\
\sphinxcline{1-1}\sphinxcline{3-3}\sphinxfixclines{3}\sphinxmultirow{2}{7}{%
\begin{varwidth}[t]{\sphinxcolwidth{1}{3}}
\sphinxAtStartPar
cell2\sphinxhyphen{}1
-\par
-\vskip-\baselineskip\vbox{\hbox{\strut}}\end{varwidth}%
+\sphinxbeforeendvarwidth
+\end{varwidth}%
}%
-&\sphinxtablestrut{5}&
+&\sphinxtablestrut{5}&\begin{varwidth}[t]{\sphinxcolwidth{1}{3}}
\sphinxAtStartPar
cell2\sphinxhyphen{}3
+\sphinxbeforeendvarwidth
+\end{varwidth}%
\\
\sphinxcline{2-3}\sphinxfixclines{3}\sphinxtablestrut{7}&\sphinxstartmulticolumn{2}%
\sphinxmultirow{2}{9}{%
@@ -50,24 +62,26 @@
\sphinxAtStartPar
cell3\sphinxhyphen{}2\sphinxhyphen{}par2
-\par
-\vskip-\baselineskip\vbox{\hbox{\strut}}\end{varwidth}%
+\sphinxbeforeendvarwidth
+\end{varwidth}%
}%
\sphinxstopmulticolumn
\\
-\sphinxcline{1-1}\sphinxfixclines{3}
+\sphinxcline{1-1}\sphinxfixclines{3}\begin{varwidth}[t]{\sphinxcolwidth{1}{3}}
\sphinxAtStartPar
cell4\sphinxhyphen{}1
+\sphinxbeforeendvarwidth
+\end{varwidth}%
&\multicolumn{2}{l|}{\sphinxtablestrut{9}}\\
\sphinxhline\sphinxstartmulticolumn{3}%
\begin{varwidth}[t]{\sphinxcolwidth{3}{3}}
\sphinxAtStartPar
cell5\sphinxhyphen{}1
-\par
-\vskip-\baselineskip\vbox{\hbox{\strut}}\end{varwidth}%
+\sphinxbeforeendvarwidth
+\end{varwidth}%
\sphinxstopmulticolumn
\\
\sphinxbottomrule
\end{tabulary}
\sphinxtableafterendhook\par
-\sphinxattableend\end{savenotes}
+\sphinxattableend\end{savenotes}
\ No newline at end of file
diff --git a/tests/roots/test-latex-table/expects/gridtable_with_tabularcolumn.tex b/tests/roots/test-latex-table/expects/grid_table_with_tabularcolumns.tex
similarity index 53%
rename from tests/roots/test-latex-table/expects/gridtable_with_tabularcolumn.tex
rename to tests/roots/test-latex-table/expects/grid_table_with_tabularcolumns.tex
index c77b99041ff..1fb5bf3f34e 100644
--- a/tests/roots/test-latex-table/expects/gridtable_with_tabularcolumn.tex
+++ b/tests/roots/test-latex-table/expects/grid_table_with_tabularcolumns.tex
@@ -1,4 +1,4 @@
-\label{\detokenize{complex:grid-table-with-tabularcolumns-having-no-vline}}
+\label{\detokenize{complex:grid-table-with-tabularcolumns}}
\begin{savenotes}\sphinxattablestart
\sphinxthistablewithglobalstyle
@@ -6,41 +6,53 @@
\centering
\begin{tabulary}{\linewidth}[t]{TTT}
\sphinxtoprule
-\sphinxstyletheadfamily
-\sphinxAtStartPar
+\begin{varwidth}[t]{\sphinxcolwidth{1}{3}}
+\sphinxstyletheadfamily \sphinxAtStartPar
header1
-&\sphinxstyletheadfamily
-\sphinxAtStartPar
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{3}}
+\sphinxstyletheadfamily \sphinxAtStartPar
header2
-&\sphinxstyletheadfamily
-\sphinxAtStartPar
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{3}}
+\sphinxstyletheadfamily \sphinxAtStartPar
header3
+\sphinxbeforeendvarwidth
+\end{varwidth}%
\\
\sphinxmidrule
-\sphinxtableatstartofbodyhook
+\sphinxtableatstartofbodyhook\begin{varwidth}[t]{\sphinxcolwidth{1}{3}}
\sphinxAtStartPar
cell1\sphinxhyphen{}1
+\sphinxbeforeendvarwidth
+\end{varwidth}%
&\sphinxmultirow{2}{5}{%
\begin{varwidth}[t]{\sphinxcolwidth{1}{3}}
\sphinxAtStartPar
cell1\sphinxhyphen{}2
-\par
-\vskip-\baselineskip\vbox{\hbox{\strut}}\end{varwidth}%
+\sphinxbeforeendvarwidth
+\end{varwidth}%
}%
-&
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{3}}
\sphinxAtStartPar
cell1\sphinxhyphen{}3
+\sphinxbeforeendvarwidth
+\end{varwidth}%
\\
\sphinxcline{1-1}\sphinxcline{3-3}\sphinxfixclines{3}\sphinxmultirow{2}{7}{%
\begin{varwidth}[t]{\sphinxcolwidth{1}{3}}
\sphinxAtStartPar
cell2\sphinxhyphen{}1
-\par
-\vskip-\baselineskip\vbox{\hbox{\strut}}\end{varwidth}%
+\sphinxbeforeendvarwidth
+\end{varwidth}%
}%
-&\sphinxtablestrut{5}&
+&\sphinxtablestrut{5}&\begin{varwidth}[t]{\sphinxcolwidth{1}{3}}
\sphinxAtStartPar
cell2\sphinxhyphen{}3
+\sphinxbeforeendvarwidth
+\end{varwidth}%
\\
\sphinxcline{2-3}\sphinxfixclines{3}\sphinxtablestrut{7}&\sphinxstartmulticolumn{2}%
\sphinxmultirow{2}{9}{%
@@ -50,24 +62,26 @@
\sphinxAtStartPar
cell3\sphinxhyphen{}2\sphinxhyphen{}par2
-\par
-\vskip-\baselineskip\vbox{\hbox{\strut}}\end{varwidth}%
+\sphinxbeforeendvarwidth
+\end{varwidth}%
}%
\sphinxstopmulticolumn
\\
-\sphinxcline{1-1}\sphinxfixclines{3}
+\sphinxcline{1-1}\sphinxfixclines{3}\begin{varwidth}[t]{\sphinxcolwidth{1}{3}}
\sphinxAtStartPar
cell4\sphinxhyphen{}1
+\sphinxbeforeendvarwidth
+\end{varwidth}%
&\multicolumn{2}{l}{\sphinxtablestrut{9}}\\
\sphinxhline\sphinxstartmulticolumn{3}%
\begin{varwidth}[t]{\sphinxcolwidth{3}{3}}
\sphinxAtStartPar
cell5\sphinxhyphen{}1
-\par
-\vskip-\baselineskip\vbox{\hbox{\strut}}\end{varwidth}%
+\sphinxbeforeendvarwidth
+\end{varwidth}%
\sphinxstopmulticolumn
\\
\sphinxbottomrule
\end{tabulary}
\sphinxtableafterendhook\par
-\sphinxattableend\end{savenotes}
+\sphinxattableend\end{savenotes}
\ No newline at end of file
diff --git a/tests/roots/test-latex-table/expects/longtable.tex b/tests/roots/test-latex-table/expects/longtable.tex
index 1fe1022b7d5..518b2cc0196 100644
--- a/tests/roots/test-latex-table/expects/longtable.tex
+++ b/tests/roots/test-latex-table/expects/longtable.tex
@@ -10,12 +10,16 @@
\makeatother
\begin{longtable}{ll}
\sphinxtoprule
-\sphinxstyletheadfamily
-\sphinxAtStartPar
+\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxstyletheadfamily \sphinxAtStartPar
header1
-&\sphinxstyletheadfamily
-\sphinxAtStartPar
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxstyletheadfamily \sphinxAtStartPar
header2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
\\
\sphinxmidrule
\endfirsthead
@@ -24,12 +28,16 @@
\makebox[0pt]{\sphinxtablecontinued{\tablename\ \thetable{} \textendash{} continued from previous page}}%
}\\
\sphinxtoprule
-\sphinxstyletheadfamily
-\sphinxAtStartPar
+\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxstyletheadfamily \sphinxAtStartPar
header1
-&\sphinxstyletheadfamily
-\sphinxAtStartPar
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxstyletheadfamily \sphinxAtStartPar
header2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
\\
\sphinxmidrule
\endhead
@@ -42,29 +50,41 @@
\endlastfoot
\sphinxtableatstartofbodyhook
-
+\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
\sphinxAtStartPar
cell1\sphinxhyphen{}1
-&
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
\sphinxAtStartPar
cell1\sphinxhyphen{}2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
\\
-\sphinxhline
+\sphinxhline\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
\sphinxAtStartPar
cell2\sphinxhyphen{}1
-&
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
\sphinxAtStartPar
cell2\sphinxhyphen{}2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
\\
-\sphinxhline
+\sphinxhline\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
\sphinxAtStartPar
cell3\sphinxhyphen{}1
-&
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
\sphinxAtStartPar
cell3\sphinxhyphen{}2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
\\
\sphinxbottomrule
\end{longtable}
\sphinxtableafterendhook
\sphinxatlongtableend
-\end{savenotes}
+\end{savenotes}
\ No newline at end of file
diff --git a/tests/roots/test-latex-table/expects/longtable_having_align.tex b/tests/roots/test-latex-table/expects/longtable_having_align_option.tex
similarity index 50%
rename from tests/roots/test-latex-table/expects/longtable_having_align.tex
rename to tests/roots/test-latex-table/expects/longtable_having_align_option.tex
index 4a4df1824e7..90f975bf21c 100644
--- a/tests/roots/test-latex-table/expects/longtable_having_align.tex
+++ b/tests/roots/test-latex-table/expects/longtable_having_align_option.tex
@@ -9,12 +9,16 @@
\makeatother
\begin{longtable}{|l|l|}
\sphinxtoprule
-\sphinxstyletheadfamily
-\sphinxAtStartPar
+\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxstyletheadfamily \sphinxAtStartPar
header1
-&\sphinxstyletheadfamily
-\sphinxAtStartPar
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxstyletheadfamily \sphinxAtStartPar
header2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
\\
\sphinxmidrule
\endfirsthead
@@ -23,12 +27,16 @@
\makebox[0pt]{\sphinxtablecontinued{\tablename\ \thetable{} \textendash{} continued from previous page}}%
}\\
\sphinxtoprule
-\sphinxstyletheadfamily
-\sphinxAtStartPar
+\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxstyletheadfamily \sphinxAtStartPar
header1
-&\sphinxstyletheadfamily
-\sphinxAtStartPar
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxstyletheadfamily \sphinxAtStartPar
header2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
\\
\sphinxmidrule
\endhead
@@ -41,29 +49,41 @@
\endlastfoot
\sphinxtableatstartofbodyhook
-
+\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
\sphinxAtStartPar
cell1\sphinxhyphen{}1
-&
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
\sphinxAtStartPar
cell1\sphinxhyphen{}2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
\\
-\sphinxhline
+\sphinxhline\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
\sphinxAtStartPar
cell2\sphinxhyphen{}1
-&
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
\sphinxAtStartPar
cell2\sphinxhyphen{}2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
\\
-\sphinxhline
+\sphinxhline\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
\sphinxAtStartPar
cell3\sphinxhyphen{}1
-&
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
\sphinxAtStartPar
cell3\sphinxhyphen{}2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
\\
\sphinxbottomrule
\end{longtable}
\sphinxtableafterendhook
\sphinxatlongtableend
-\end{savenotes}
+\end{savenotes}
\ No newline at end of file
diff --git a/tests/roots/test-latex-table/expects/longtable_having_caption.tex b/tests/roots/test-latex-table/expects/longtable_having_caption.tex
index a1aa65d7a8b..17617ab5929 100644
--- a/tests/roots/test-latex-table/expects/longtable_having_caption.tex
+++ b/tests/roots/test-latex-table/expects/longtable_having_caption.tex
@@ -11,12 +11,16 @@
\sphinxthelongtablecaptionisattop
\caption{caption for longtable\strut}\label{\detokenize{longtable:id1}}\\*[\sphinxlongtablecapskipadjust]
\sphinxtoprule
-\sphinxstyletheadfamily
-\sphinxAtStartPar
+\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxstyletheadfamily \sphinxAtStartPar
header1
-&\sphinxstyletheadfamily
-\sphinxAtStartPar
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxstyletheadfamily \sphinxAtStartPar
header2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
\\
\sphinxmidrule
\endfirsthead
@@ -25,12 +29,16 @@
\makebox[0pt]{\sphinxtablecontinued{\tablename\ \thetable{} \textendash{} continued from previous page}}%
}\\
\sphinxtoprule
-\sphinxstyletheadfamily
-\sphinxAtStartPar
+\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxstyletheadfamily \sphinxAtStartPar
header1
-&\sphinxstyletheadfamily
-\sphinxAtStartPar
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxstyletheadfamily \sphinxAtStartPar
header2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
\\
\sphinxmidrule
\endhead
@@ -43,29 +51,41 @@
\endlastfoot
\sphinxtableatstartofbodyhook
-
+\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
\sphinxAtStartPar
cell1\sphinxhyphen{}1
-&
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
\sphinxAtStartPar
cell1\sphinxhyphen{}2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
\\
-\sphinxhline
+\sphinxhline\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
\sphinxAtStartPar
cell2\sphinxhyphen{}1
-&
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
\sphinxAtStartPar
cell2\sphinxhyphen{}2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
\\
-\sphinxhline
+\sphinxhline\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
\sphinxAtStartPar
cell3\sphinxhyphen{}1
-&
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
\sphinxAtStartPar
cell3\sphinxhyphen{}2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
\\
\sphinxbottomrule
\end{longtable}
\sphinxtableafterendhook
\sphinxatlongtableend
-\end{savenotes}
+\end{savenotes}
\ No newline at end of file
diff --git a/tests/roots/test-latex-table/expects/longtable_having_formerly_problematic.tex b/tests/roots/test-latex-table/expects/longtable_having_formerly_problematic.tex
new file mode 100644
index 00000000000..2298fbc7a1d
--- /dev/null
+++ b/tests/roots/test-latex-table/expects/longtable_having_formerly_problematic.tex
@@ -0,0 +1,97 @@
+\label{\detokenize{longtable:longtable-having-formerly-problematic}}
+
+\begin{savenotes}
+\sphinxatlongtablestart
+\sphinxthistablewithglobalstyle
+\makeatletter
+ \LTleft \@totalleftmargin plus1fill
+ \LTright\dimexpr\columnwidth-\@totalleftmargin-\linewidth\relax plus1fill
+\makeatother
+\begin{longtable}{|l|l|}
+\sphinxtoprule
+\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxstyletheadfamily \sphinxAtStartPar
+header1
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxstyletheadfamily \sphinxAtStartPar
+header2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+\\
+\sphinxmidrule
+\endfirsthead
+
+\multicolumn{2}{c}{\sphinxnorowcolor
+ \makebox[0pt]{\sphinxtablecontinued{\tablename\ \thetable{} \textendash{} continued from previous page}}%
+}\\
+\sphinxtoprule
+\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxstyletheadfamily \sphinxAtStartPar
+header1
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxstyletheadfamily \sphinxAtStartPar
+header2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+\\
+\sphinxmidrule
+\endhead
+
+\sphinxbottomrule
+\multicolumn{2}{r}{\sphinxnorowcolor
+ \makebox[0pt][r]{\sphinxtablecontinued{continues on next page}}%
+}\\
+\endfoot
+
+\endlastfoot
+\sphinxtableatstartofbodyhook
+\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\begin{itemize}
+\item {}
+\sphinxAtStartPar
+item1
+
+\item {}
+\sphinxAtStartPar
+item2
+
+\end{itemize}
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxAtStartPar
+cell1\sphinxhyphen{}2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+\\
+\sphinxhline\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxAtStartPar
+cell2\sphinxhyphen{}1
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxAtStartPar
+cell2\sphinxhyphen{}2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+\\
+\sphinxhline\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxAtStartPar
+cell3\sphinxhyphen{}1
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxAtStartPar
+cell3\sphinxhyphen{}2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+\\
+\sphinxbottomrule
+\end{longtable}
+\sphinxtableafterendhook
+\sphinxatlongtableend
+\end{savenotes}
\ No newline at end of file
diff --git a/tests/roots/test-latex-table/expects/longtable_having_problematic_cell.tex b/tests/roots/test-latex-table/expects/longtable_having_problematic_cell.tex
deleted file mode 100644
index 240a7609384..00000000000
--- a/tests/roots/test-latex-table/expects/longtable_having_problematic_cell.tex
+++ /dev/null
@@ -1,76 +0,0 @@
-\label{\detokenize{longtable:longtable-having-problematic-cell}}
-
-\begin{savenotes}
-\sphinxatlongtablestart
-\sphinxthistablewithglobalstyle
-\makeatletter
- \LTleft \@totalleftmargin plus1fill
- \LTright\dimexpr\columnwidth-\@totalleftmargin-\linewidth\relax plus1fill
-\makeatother
-\begin{longtable}{|*{2}{\X{1}{2}|}}
-\sphinxtoprule
-\sphinxstyletheadfamily
-\sphinxAtStartPar
-header1
-&\sphinxstyletheadfamily
-\sphinxAtStartPar
-header2
-\\
-\sphinxmidrule
-\endfirsthead
-
-\multicolumn{2}{c}{\sphinxnorowcolor
- \makebox[0pt]{\sphinxtablecontinued{\tablename\ \thetable{} \textendash{} continued from previous page}}%
-}\\
-\sphinxtoprule
-\sphinxstyletheadfamily
-\sphinxAtStartPar
-header1
-&\sphinxstyletheadfamily
-\sphinxAtStartPar
-header2
-\\
-\sphinxmidrule
-\endhead
-
-\sphinxbottomrule
-\multicolumn{2}{r}{\sphinxnorowcolor
- \makebox[0pt][r]{\sphinxtablecontinued{continues on next page}}%
-}\\
-\endfoot
-
-\endlastfoot
-\sphinxtableatstartofbodyhook
-\begin{itemize}
-\item {}
-\sphinxAtStartPar
-item1
-
-\item {}
-\sphinxAtStartPar
-item2
-
-\end{itemize}
-&
-\sphinxAtStartPar
-cell1\sphinxhyphen{}2
-\\
-\sphinxhline
-\sphinxAtStartPar
-cell2\sphinxhyphen{}1
-&
-\sphinxAtStartPar
-cell2\sphinxhyphen{}2
-\\
-\sphinxhline
-\sphinxAtStartPar
-cell3\sphinxhyphen{}1
-&
-\sphinxAtStartPar
-cell3\sphinxhyphen{}2
-\\
-\sphinxbottomrule
-\end{longtable}
-\sphinxtableafterendhook
-\sphinxatlongtableend
-\end{savenotes}
diff --git a/tests/roots/test-latex-table/expects/longtable_having_stub_columns_and_formerly_problematic.tex b/tests/roots/test-latex-table/expects/longtable_having_stub_columns_and_formerly_problematic.tex
new file mode 100644
index 00000000000..8a95833e326
--- /dev/null
+++ b/tests/roots/test-latex-table/expects/longtable_having_stub_columns_and_formerly_problematic.tex
@@ -0,0 +1,106 @@
+\label{\detokenize{longtable:longtable-having-stub-columns-and-formerly-problematic}}
+
+\begin{savenotes}
+\sphinxatlongtablestart
+\sphinxthistablewithglobalstyle
+\makeatletter
+ \LTleft \@totalleftmargin plus1fill
+ \LTright\dimexpr\columnwidth-\@totalleftmargin-\linewidth\relax plus1fill
+\makeatother
+\begin{longtable}{|l|l|l|}
+\sphinxtoprule
+\begin{varwidth}[t]{\sphinxcolwidth{1}{3}}
+\sphinxstyletheadfamily \sphinxAtStartPar
+header1
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{3}}
+\sphinxstyletheadfamily \sphinxAtStartPar
+header2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{3}}
+\sphinxstyletheadfamily \sphinxAtStartPar
+header3
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+\\
+\sphinxmidrule
+\endfirsthead
+
+\multicolumn{3}{c}{\sphinxnorowcolor
+ \makebox[0pt]{\sphinxtablecontinued{\tablename\ \thetable{} \textendash{} continued from previous page}}%
+}\\
+\sphinxtoprule
+\begin{varwidth}[t]{\sphinxcolwidth{1}{3}}
+\sphinxstyletheadfamily \sphinxAtStartPar
+header1
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{3}}
+\sphinxstyletheadfamily \sphinxAtStartPar
+header2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{3}}
+\sphinxstyletheadfamily \sphinxAtStartPar
+header3
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+\\
+\sphinxmidrule
+\endhead
+
+\sphinxbottomrule
+\multicolumn{3}{r}{\sphinxnorowcolor
+ \makebox[0pt][r]{\sphinxtablecontinued{continues on next page}}%
+}\\
+\endfoot
+
+\endlastfoot
+\sphinxtableatstartofbodyhook
+\begin{varwidth}[t]{\sphinxcolwidth{1}{3}}
+\sphinxstyletheadfamily \begin{itemize}
+\item {}
+\sphinxAtStartPar
+instub1\sphinxhyphen{}1a
+
+\item {}
+\sphinxAtStartPar
+instub1\sphinxhyphen{}1b
+
+\end{itemize}
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{3}}
+\sphinxstyletheadfamily \sphinxAtStartPar
+instub1\sphinxhyphen{}2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{3}}
+\sphinxAtStartPar
+notinstub1\sphinxhyphen{}3
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+\\
+\sphinxhline\begin{varwidth}[t]{\sphinxcolwidth{1}{3}}
+\sphinxstyletheadfamily \sphinxAtStartPar
+cell2\sphinxhyphen{}1
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{3}}
+\sphinxstyletheadfamily \sphinxAtStartPar
+cell2\sphinxhyphen{}2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{3}}
+\sphinxAtStartPar
+cell2\sphinxhyphen{}3
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+\\
+\sphinxbottomrule
+\end{longtable}
+\sphinxtableafterendhook
+\sphinxatlongtableend
+\end{savenotes}
\ No newline at end of file
diff --git a/tests/roots/test-latex-table/expects/longtable_having_stub_columns_and_problematic_cell.tex b/tests/roots/test-latex-table/expects/longtable_having_stub_columns_and_problematic_cell.tex
deleted file mode 100644
index 897830b1c02..00000000000
--- a/tests/roots/test-latex-table/expects/longtable_having_stub_columns_and_problematic_cell.tex
+++ /dev/null
@@ -1,81 +0,0 @@
-\label{\detokenize{longtable:longtable-having-both-stub-columns-and-problematic-cell}}
-
-\begin{savenotes}
-\sphinxatlongtablestart
-\sphinxthistablewithglobalstyle
-\makeatletter
- \LTleft \@totalleftmargin plus1fill
- \LTright\dimexpr\columnwidth-\@totalleftmargin-\linewidth\relax plus1fill
-\makeatother
-\begin{longtable}{|*{3}{\X{1}{3}|}}
-\sphinxtoprule
-\sphinxstyletheadfamily
-\sphinxAtStartPar
-header1
-&\sphinxstyletheadfamily
-\sphinxAtStartPar
-header2
-&\sphinxstyletheadfamily
-\sphinxAtStartPar
-header3
-\\
-\sphinxmidrule
-\endfirsthead
-
-\multicolumn{3}{c}{\sphinxnorowcolor
- \makebox[0pt]{\sphinxtablecontinued{\tablename\ \thetable{} \textendash{} continued from previous page}}%
-}\\
-\sphinxtoprule
-\sphinxstyletheadfamily
-\sphinxAtStartPar
-header1
-&\sphinxstyletheadfamily
-\sphinxAtStartPar
-header2
-&\sphinxstyletheadfamily
-\sphinxAtStartPar
-header3
-\\
-\sphinxmidrule
-\endhead
-
-\sphinxbottomrule
-\multicolumn{3}{r}{\sphinxnorowcolor
- \makebox[0pt][r]{\sphinxtablecontinued{continues on next page}}%
-}\\
-\endfoot
-
-\endlastfoot
-\sphinxtableatstartofbodyhook
-\sphinxstyletheadfamily \begin{itemize}
-\item {}
-\sphinxAtStartPar
-instub1\sphinxhyphen{}1a
-
-\item {}
-\sphinxAtStartPar
-instub1\sphinxhyphen{}1b
-
-\end{itemize}
-&\sphinxstyletheadfamily
-\sphinxAtStartPar
-instub1\sphinxhyphen{}2
-&
-\sphinxAtStartPar
-notinstub1\sphinxhyphen{}3
-\\
-\sphinxhline\sphinxstyletheadfamily
-\sphinxAtStartPar
-cell2\sphinxhyphen{}1
-&\sphinxstyletheadfamily
-\sphinxAtStartPar
-cell2\sphinxhyphen{}2
-&
-\sphinxAtStartPar
-cell2\sphinxhyphen{}3
-\\
-\sphinxbottomrule
-\end{longtable}
-\sphinxtableafterendhook
-\sphinxatlongtableend
-\end{savenotes}
diff --git a/tests/roots/test-latex-table/expects/longtable_having_verbatim.tex b/tests/roots/test-latex-table/expects/longtable_having_verbatim.tex
index b9f75129c68..d7a86633d75 100644
--- a/tests/roots/test-latex-table/expects/longtable_having_verbatim.tex
+++ b/tests/roots/test-latex-table/expects/longtable_having_verbatim.tex
@@ -9,12 +9,16 @@
\makeatother
\begin{longtable}{|*{2}{\X{1}{2}|}}
\sphinxtoprule
-\sphinxstyletheadfamily
-\sphinxAtStartPar
+\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxstyletheadfamily \sphinxAtStartPar
header1
-&\sphinxstyletheadfamily
-\sphinxAtStartPar
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxstyletheadfamily \sphinxAtStartPar
header2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
\\
\sphinxmidrule
\endfirsthead
@@ -23,12 +27,16 @@
\makebox[0pt]{\sphinxtablecontinued{\tablename\ \thetable{} \textendash{} continued from previous page}}%
}\\
\sphinxtoprule
-\sphinxstyletheadfamily
-\sphinxAtStartPar
+\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxstyletheadfamily \sphinxAtStartPar
header1
-&\sphinxstyletheadfamily
-\sphinxAtStartPar
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxstyletheadfamily \sphinxAtStartPar
header2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
\\
\sphinxmidrule
\endhead
@@ -41,30 +49,43 @@
\endlastfoot
\sphinxtableatstartofbodyhook
+\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
\begin{sphinxVerbatimintable}[commandchars=\\\{\}]
\PYG{n}{hello} \PYG{n}{world}
\end{sphinxVerbatimintable}
-&
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
\sphinxAtStartPar
cell1\sphinxhyphen{}2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
\\
-\sphinxhline
+\sphinxhline\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
\sphinxAtStartPar
cell2\sphinxhyphen{}1
-&
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
\sphinxAtStartPar
cell2\sphinxhyphen{}2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
\\
-\sphinxhline
+\sphinxhline\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
\sphinxAtStartPar
cell3\sphinxhyphen{}1
-&
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
\sphinxAtStartPar
cell3\sphinxhyphen{}2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
\\
\sphinxbottomrule
\end{longtable}
\sphinxtableafterendhook
\sphinxatlongtableend
-\end{savenotes}
+\end{savenotes}
\ No newline at end of file
diff --git a/tests/roots/test-latex-table/expects/longtable_having_widths_and_formerly_problematic.tex b/tests/roots/test-latex-table/expects/longtable_having_widths_and_formerly_problematic.tex
new file mode 100644
index 00000000000..fb9f39f5f30
--- /dev/null
+++ b/tests/roots/test-latex-table/expects/longtable_having_widths_and_formerly_problematic.tex
@@ -0,0 +1,97 @@
+\label{\detokenize{longtable:longtable-having-widths-and-formerly-problematic}}
+
+\begin{savenotes}
+\sphinxatlongtablestart
+\sphinxthistablewithglobalstyle
+\makeatletter
+ \LTleft \@totalleftmargin plus1fill
+ \LTright\dimexpr\columnwidth-\@totalleftmargin-\linewidth\relax plus1fill
+\makeatother
+\begin{longtable}{|\X{30}{100}|\X{70}{100}|}
+\sphinxtoprule
+\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxstyletheadfamily \sphinxAtStartPar
+header1
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxstyletheadfamily \sphinxAtStartPar
+header2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+\\
+\sphinxmidrule
+\endfirsthead
+
+\multicolumn{2}{c}{\sphinxnorowcolor
+ \makebox[0pt]{\sphinxtablecontinued{\tablename\ \thetable{} \textendash{} continued from previous page}}%
+}\\
+\sphinxtoprule
+\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxstyletheadfamily \sphinxAtStartPar
+header1
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxstyletheadfamily \sphinxAtStartPar
+header2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+\\
+\sphinxmidrule
+\endhead
+
+\sphinxbottomrule
+\multicolumn{2}{r}{\sphinxnorowcolor
+ \makebox[0pt][r]{\sphinxtablecontinued{continues on next page}}%
+}\\
+\endfoot
+
+\endlastfoot
+\sphinxtableatstartofbodyhook
+\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\begin{itemize}
+\item {}
+\sphinxAtStartPar
+item1
+
+\item {}
+\sphinxAtStartPar
+item2
+
+\end{itemize}
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxAtStartPar
+cell1\sphinxhyphen{}2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+\\
+\sphinxhline\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxAtStartPar
+cell2\sphinxhyphen{}1
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxAtStartPar
+cell2\sphinxhyphen{}2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+\\
+\sphinxhline\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxAtStartPar
+cell3\sphinxhyphen{}1
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxAtStartPar
+cell3\sphinxhyphen{}2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+\\
+\sphinxbottomrule
+\end{longtable}
+\sphinxtableafterendhook
+\sphinxatlongtableend
+\end{savenotes}
\ No newline at end of file
diff --git a/tests/roots/test-latex-table/expects/longtable_having_widths_and_problematic_cell.tex b/tests/roots/test-latex-table/expects/longtable_having_widths_and_problematic_cell.tex
deleted file mode 100644
index b4758caa08e..00000000000
--- a/tests/roots/test-latex-table/expects/longtable_having_widths_and_problematic_cell.tex
+++ /dev/null
@@ -1,76 +0,0 @@
-\label{\detokenize{longtable:longtable-having-both-widths-and-problematic-cell}}
-
-\begin{savenotes}
-\sphinxatlongtablestart
-\sphinxthistablewithglobalstyle
-\makeatletter
- \LTleft \@totalleftmargin plus1fill
- \LTright\dimexpr\columnwidth-\@totalleftmargin-\linewidth\relax plus1fill
-\makeatother
-\begin{longtable}{|\X{30}{100}|\X{70}{100}|}
-\sphinxtoprule
-\sphinxstyletheadfamily
-\sphinxAtStartPar
-header1
-&\sphinxstyletheadfamily
-\sphinxAtStartPar
-header2
-\\
-\sphinxmidrule
-\endfirsthead
-
-\multicolumn{2}{c}{\sphinxnorowcolor
- \makebox[0pt]{\sphinxtablecontinued{\tablename\ \thetable{} \textendash{} continued from previous page}}%
-}\\
-\sphinxtoprule
-\sphinxstyletheadfamily
-\sphinxAtStartPar
-header1
-&\sphinxstyletheadfamily
-\sphinxAtStartPar
-header2
-\\
-\sphinxmidrule
-\endhead
-
-\sphinxbottomrule
-\multicolumn{2}{r}{\sphinxnorowcolor
- \makebox[0pt][r]{\sphinxtablecontinued{continues on next page}}%
-}\\
-\endfoot
-
-\endlastfoot
-\sphinxtableatstartofbodyhook
-\begin{itemize}
-\item {}
-\sphinxAtStartPar
-item1
-
-\item {}
-\sphinxAtStartPar
-item2
-
-\end{itemize}
-&
-\sphinxAtStartPar
-cell1\sphinxhyphen{}2
-\\
-\sphinxhline
-\sphinxAtStartPar
-cell2\sphinxhyphen{}1
-&
-\sphinxAtStartPar
-cell2\sphinxhyphen{}2
-\\
-\sphinxhline
-\sphinxAtStartPar
-cell3\sphinxhyphen{}1
-&
-\sphinxAtStartPar
-cell3\sphinxhyphen{}2
-\\
-\sphinxbottomrule
-\end{longtable}
-\sphinxtableafterendhook
-\sphinxatlongtableend
-\end{savenotes}
diff --git a/tests/roots/test-latex-table/expects/longtable_having_widths.tex b/tests/roots/test-latex-table/expects/longtable_having_widths_option.tex
similarity index 58%
rename from tests/roots/test-latex-table/expects/longtable_having_widths.tex
rename to tests/roots/test-latex-table/expects/longtable_having_widths_option.tex
index bcad23be4f0..d09b56d6900 100644
--- a/tests/roots/test-latex-table/expects/longtable_having_widths.tex
+++ b/tests/roots/test-latex-table/expects/longtable_having_widths_option.tex
@@ -10,12 +10,16 @@
\begin{longtable}{|\X{30}{100}|\X{70}{100}|}
\noalign{\phantomsection\label{\detokenize{longtable:namedlongtable}}\label{\detokenize{longtable:mylongtable}}}%
\sphinxtoprule
-\sphinxstyletheadfamily
-\sphinxAtStartPar
+\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxstyletheadfamily \sphinxAtStartPar
header1
-&\sphinxstyletheadfamily
-\sphinxAtStartPar
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxstyletheadfamily \sphinxAtStartPar
header2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
\\
\sphinxmidrule
\endfirsthead
@@ -24,12 +28,16 @@
\makebox[0pt]{\sphinxtablecontinued{\tablename\ \thetable{} \textendash{} continued from previous page}}%
}\\
\sphinxtoprule
-\sphinxstyletheadfamily
-\sphinxAtStartPar
+\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxstyletheadfamily \sphinxAtStartPar
header1
-&\sphinxstyletheadfamily
-\sphinxAtStartPar
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxstyletheadfamily \sphinxAtStartPar
header2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
\\
\sphinxmidrule
\endhead
@@ -42,26 +50,38 @@
\endlastfoot
\sphinxtableatstartofbodyhook
-
+\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
\sphinxAtStartPar
cell1\sphinxhyphen{}1
-&
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
\sphinxAtStartPar
cell1\sphinxhyphen{}2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
\\
-\sphinxhline
+\sphinxhline\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
\sphinxAtStartPar
cell2\sphinxhyphen{}1
-&
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
\sphinxAtStartPar
cell2\sphinxhyphen{}2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
\\
-\sphinxhline
+\sphinxhline\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
\sphinxAtStartPar
cell3\sphinxhyphen{}1
-&
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
\sphinxAtStartPar
cell3\sphinxhyphen{}2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
\\
\sphinxbottomrule
\end{longtable}
@@ -70,4 +90,4 @@
\end{savenotes}
\sphinxAtStartPar
-See {\hyperref[\detokenize{longtable:mylongtable}]{\sphinxcrossref{mylongtable}}}, same as {\hyperref[\detokenize{longtable:namedlongtable}]{\sphinxcrossref{\DUrole{std}{\DUrole{std-ref}{this one}}}}}.
+See {\hyperref[\detokenize{longtable:mylongtable}]{\sphinxcrossref{mylongtable}}}, same as {\hyperref[\detokenize{longtable:namedlongtable}]{\sphinxcrossref{\DUrole{std}{\DUrole{std-ref}{this one}}}}}.
\ No newline at end of file
diff --git a/tests/roots/test-latex-table/expects/longtable_with_tabularcolumn.tex b/tests/roots/test-latex-table/expects/longtable_with_tabularcolumn.tex
deleted file mode 100644
index 4c380fed7a4..00000000000
--- a/tests/roots/test-latex-table/expects/longtable_with_tabularcolumn.tex
+++ /dev/null
@@ -1,70 +0,0 @@
-\label{\detokenize{longtable:longtable-with-tabularcolumn}}
-
-\begin{savenotes}
-\sphinxatlongtablestart
-\sphinxthistablewithglobalstyle
-\sphinxthistablewithvlinesstyle
-\makeatletter
- \LTleft \@totalleftmargin plus1fill
- \LTright\dimexpr\columnwidth-\@totalleftmargin-\linewidth\relax plus1fill
-\makeatother
-\begin{longtable}{|c|c|}
-\sphinxtoprule
-\sphinxstyletheadfamily
-\sphinxAtStartPar
-header1
-&\sphinxstyletheadfamily
-\sphinxAtStartPar
-header2
-\\
-\sphinxmidrule
-\endfirsthead
-
-\multicolumn{2}{c}{\sphinxnorowcolor
- \makebox[0pt]{\sphinxtablecontinued{\tablename\ \thetable{} \textendash{} continued from previous page}}%
-}\\
-\sphinxtoprule
-\sphinxstyletheadfamily
-\sphinxAtStartPar
-header1
-&\sphinxstyletheadfamily
-\sphinxAtStartPar
-header2
-\\
-\sphinxmidrule
-\endhead
-
-\sphinxbottomrule
-\multicolumn{2}{r}{\sphinxnorowcolor
- \makebox[0pt][r]{\sphinxtablecontinued{continues on next page}}%
-}\\
-\endfoot
-
-\endlastfoot
-\sphinxtableatstartofbodyhook
-
-\sphinxAtStartPar
-cell1\sphinxhyphen{}1
-&
-\sphinxAtStartPar
-cell1\sphinxhyphen{}2
-\\
-\sphinxhline
-\sphinxAtStartPar
-cell2\sphinxhyphen{}1
-&
-\sphinxAtStartPar
-cell2\sphinxhyphen{}2
-\\
-\sphinxhline
-\sphinxAtStartPar
-cell3\sphinxhyphen{}1
-&
-\sphinxAtStartPar
-cell3\sphinxhyphen{}2
-\\
-\sphinxbottomrule
-\end{longtable}
-\sphinxtableafterendhook
-\sphinxatlongtableend
-\end{savenotes}
diff --git a/tests/roots/test-latex-table/expects/longtable_with_tabularcolumns.tex b/tests/roots/test-latex-table/expects/longtable_with_tabularcolumns.tex
new file mode 100644
index 00000000000..764dfd8ff9b
--- /dev/null
+++ b/tests/roots/test-latex-table/expects/longtable_with_tabularcolumns.tex
@@ -0,0 +1,90 @@
+\label{\detokenize{longtable:longtable-with-tabularcolumns}}
+
+\begin{savenotes}
+\sphinxatlongtablestart
+\sphinxthistablewithglobalstyle
+\sphinxthistablewithvlinesstyle
+\makeatletter
+ \LTleft \@totalleftmargin plus1fill
+ \LTright\dimexpr\columnwidth-\@totalleftmargin-\linewidth\relax plus1fill
+\makeatother
+\begin{longtable}{|c|c|}
+\sphinxtoprule
+\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxstyletheadfamily \sphinxAtStartPar
+header1
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxstyletheadfamily \sphinxAtStartPar
+header2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+\\
+\sphinxmidrule
+\endfirsthead
+
+\multicolumn{2}{c}{\sphinxnorowcolor
+ \makebox[0pt]{\sphinxtablecontinued{\tablename\ \thetable{} \textendash{} continued from previous page}}%
+}\\
+\sphinxtoprule
+\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxstyletheadfamily \sphinxAtStartPar
+header1
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxstyletheadfamily \sphinxAtStartPar
+header2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+\\
+\sphinxmidrule
+\endhead
+
+\sphinxbottomrule
+\multicolumn{2}{r}{\sphinxnorowcolor
+ \makebox[0pt][r]{\sphinxtablecontinued{continues on next page}}%
+}\\
+\endfoot
+
+\endlastfoot
+\sphinxtableatstartofbodyhook
+\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxAtStartPar
+cell1\sphinxhyphen{}1
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxAtStartPar
+cell1\sphinxhyphen{}2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+\\
+\sphinxhline\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxAtStartPar
+cell2\sphinxhyphen{}1
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxAtStartPar
+cell2\sphinxhyphen{}2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+\\
+\sphinxhline\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxAtStartPar
+cell3\sphinxhyphen{}1
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxAtStartPar
+cell3\sphinxhyphen{}2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+\\
+\sphinxbottomrule
+\end{longtable}
+\sphinxtableafterendhook
+\sphinxatlongtableend
+\end{savenotes}
\ No newline at end of file
diff --git a/tests/roots/test-latex-table/expects/simple_table.tex b/tests/roots/test-latex-table/expects/simple_table.tex
index 7bd85c737b2..8a17635fe64 100644
--- a/tests/roots/test-latex-table/expects/simple_table.tex
+++ b/tests/roots/test-latex-table/expects/simple_table.tex
@@ -5,36 +5,52 @@
\centering
\begin{tabulary}{\linewidth}[t]{|T|T|}
\sphinxtoprule
-\sphinxstyletheadfamily
-\sphinxAtStartPar
+\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxstyletheadfamily \sphinxAtStartPar
header1
-&\sphinxstyletheadfamily
-\sphinxAtStartPar
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxstyletheadfamily \sphinxAtStartPar
header2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
\\
\sphinxmidrule
-\sphinxtableatstartofbodyhook
+\sphinxtableatstartofbodyhook\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
\sphinxAtStartPar
cell1\sphinxhyphen{}1
-&
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
\sphinxAtStartPar
cell1\sphinxhyphen{}2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
\\
-\sphinxhline
+\sphinxhline\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
\sphinxAtStartPar
cell2\sphinxhyphen{}1
-&
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
\sphinxAtStartPar
cell2\sphinxhyphen{}2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
\\
-\sphinxhline
+\sphinxhline\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
\sphinxAtStartPar
cell3\sphinxhyphen{}1
-&
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
\sphinxAtStartPar
cell3\sphinxhyphen{}2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
\\
\sphinxbottomrule
\end{tabulary}
\sphinxtableafterendhook\par
-\sphinxattableend\end{savenotes}
+\sphinxattableend\end{savenotes}
\ No newline at end of file
diff --git a/tests/roots/test-latex-table/expects/table_having_caption.tex b/tests/roots/test-latex-table/expects/table_having_caption.tex
index f2ce5536021..450d370e1ba 100644
--- a/tests/roots/test-latex-table/expects/table_having_caption.tex
+++ b/tests/roots/test-latex-table/expects/table_having_caption.tex
@@ -9,36 +9,52 @@
\sphinxaftertopcaption
\begin{tabulary}{\linewidth}[t]{|T|T|}
\sphinxtoprule
-\sphinxstyletheadfamily
-\sphinxAtStartPar
+\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxstyletheadfamily \sphinxAtStartPar
header1
-&\sphinxstyletheadfamily
-\sphinxAtStartPar
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxstyletheadfamily \sphinxAtStartPar
header2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
\\
\sphinxmidrule
-\sphinxtableatstartofbodyhook
+\sphinxtableatstartofbodyhook\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
\sphinxAtStartPar
cell1\sphinxhyphen{}1
-&
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
\sphinxAtStartPar
cell1\sphinxhyphen{}2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
\\
-\sphinxhline
+\sphinxhline\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
\sphinxAtStartPar
cell2\sphinxhyphen{}1
-&
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
\sphinxAtStartPar
cell2\sphinxhyphen{}2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
\\
-\sphinxhline
+\sphinxhline\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
\sphinxAtStartPar
cell3\sphinxhyphen{}1
-&
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
\sphinxAtStartPar
cell3\sphinxhyphen{}2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
\\
\sphinxbottomrule
\end{tabulary}
\sphinxtableafterendhook\par
-\sphinxattableend\end{savenotes}
+\sphinxattableend\end{savenotes}
\ No newline at end of file
diff --git a/tests/roots/test-latex-table/expects/table_having_formerly_problematic.tex b/tests/roots/test-latex-table/expects/table_having_formerly_problematic.tex
new file mode 100644
index 00000000000..fb882fea57f
--- /dev/null
+++ b/tests/roots/test-latex-table/expects/table_having_formerly_problematic.tex
@@ -0,0 +1,64 @@
+\label{\detokenize{tabular:table-having-formerly-problematic}}
+
+\begin{savenotes}\sphinxattablestart
+\sphinxthistablewithglobalstyle
+\centering
+\begin{tabulary}{\linewidth}[t]{|T|T|}
+\sphinxtoprule
+\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxstyletheadfamily \sphinxAtStartPar
+header1
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxstyletheadfamily \sphinxAtStartPar
+header2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+\\
+\sphinxmidrule
+\sphinxtableatstartofbodyhook\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\begin{itemize}
+\item {}
+\sphinxAtStartPar
+item1
+
+\item {}
+\sphinxAtStartPar
+item2
+
+\end{itemize}
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxAtStartPar
+cell1\sphinxhyphen{}2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+\\
+\sphinxhline\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxAtStartPar
+cell2\sphinxhyphen{}1
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxAtStartPar
+cell2\sphinxhyphen{}2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+\\
+\sphinxhline\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxAtStartPar
+cell3\sphinxhyphen{}1
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxAtStartPar
+cell3\sphinxhyphen{}2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+\\
+\sphinxbottomrule
+\end{tabulary}
+\sphinxtableafterendhook\par
+\sphinxattableend\end{savenotes}
\ No newline at end of file
diff --git a/tests/roots/test-latex-table/expects/table_having_problematic_cell.tex b/tests/roots/test-latex-table/expects/table_having_problematic_cell.tex
deleted file mode 100644
index 7d7ad4b715b..00000000000
--- a/tests/roots/test-latex-table/expects/table_having_problematic_cell.tex
+++ /dev/null
@@ -1,47 +0,0 @@
-\label{\detokenize{tabular:table-having-problematic-cell}}
-
-\begin{savenotes}\sphinxattablestart
-\sphinxthistablewithglobalstyle
-\centering
-\begin{tabular}[t]{|*{2}{\X{1}{2}|}}
-\sphinxtoprule
-\sphinxstyletheadfamily
-\sphinxAtStartPar
-header1
-&\sphinxstyletheadfamily
-\sphinxAtStartPar
-header2
-\\
-\sphinxmidrule
-\sphinxtableatstartofbodyhook\begin{itemize}
-\item {}
-\sphinxAtStartPar
-item1
-
-\item {}
-\sphinxAtStartPar
-item2
-
-\end{itemize}
-&
-\sphinxAtStartPar
-cell1\sphinxhyphen{}2
-\\
-\sphinxhline
-\sphinxAtStartPar
-cell2\sphinxhyphen{}1
-&
-\sphinxAtStartPar
-cell2\sphinxhyphen{}2
-\\
-\sphinxhline
-\sphinxAtStartPar
-cell3\sphinxhyphen{}1
-&
-\sphinxAtStartPar
-cell3\sphinxhyphen{}2
-\\
-\sphinxbottomrule
-\end{tabular}
-\sphinxtableafterendhook\par
-\sphinxattableend\end{savenotes}
diff --git a/tests/roots/test-latex-table/expects/table_having_stub_columns_and_formerly_problematic.tex b/tests/roots/test-latex-table/expects/table_having_stub_columns_and_formerly_problematic.tex
new file mode 100644
index 00000000000..548008a2379
--- /dev/null
+++ b/tests/roots/test-latex-table/expects/table_having_stub_columns_and_formerly_problematic.tex
@@ -0,0 +1,68 @@
+\label{\detokenize{tabular:table-having-stub-columns-and-formerly-problematic}}
+
+\begin{savenotes}\sphinxattablestart
+\sphinxthistablewithglobalstyle
+\centering
+\begin{tabulary}{\linewidth}[t]{|T|T|T|}
+\sphinxtoprule
+\begin{varwidth}[t]{\sphinxcolwidth{1}{3}}
+\sphinxstyletheadfamily \sphinxAtStartPar
+header1
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{3}}
+\sphinxstyletheadfamily \sphinxAtStartPar
+header2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{3}}
+\sphinxstyletheadfamily \sphinxAtStartPar
+header3
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+\\
+\sphinxmidrule
+\sphinxtableatstartofbodyhook\begin{varwidth}[t]{\sphinxcolwidth{1}{3}}
+\sphinxstyletheadfamily \begin{itemize}
+\item {}
+\sphinxAtStartPar
+instub1\sphinxhyphen{}1a
+
+\item {}
+\sphinxAtStartPar
+instub1\sphinxhyphen{}1b
+
+\end{itemize}
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{3}}
+\sphinxstyletheadfamily \sphinxAtStartPar
+instub1\sphinxhyphen{}2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{3}}
+\sphinxAtStartPar
+notinstub1\sphinxhyphen{}3
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+\\
+\sphinxhline\begin{varwidth}[t]{\sphinxcolwidth{1}{3}}
+\sphinxstyletheadfamily \sphinxAtStartPar
+cell2\sphinxhyphen{}1
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{3}}
+\sphinxstyletheadfamily \sphinxAtStartPar
+cell2\sphinxhyphen{}2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{3}}
+\sphinxAtStartPar
+cell2\sphinxhyphen{}3
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+\\
+\sphinxbottomrule
+\end{tabulary}
+\sphinxtableafterendhook\par
+\sphinxattableend\end{savenotes}
\ No newline at end of file
diff --git a/tests/roots/test-latex-table/expects/table_having_stub_columns_and_problematic_cell.tex b/tests/roots/test-latex-table/expects/table_having_stub_columns_and_problematic_cell.tex
deleted file mode 100644
index fbd797a1bd3..00000000000
--- a/tests/roots/test-latex-table/expects/table_having_stub_columns_and_problematic_cell.tex
+++ /dev/null
@@ -1,49 +0,0 @@
-\label{\detokenize{tabular:table-having-both-stub-columns-and-problematic-cell}}
-
-\begin{savenotes}\sphinxattablestart
-\sphinxthistablewithglobalstyle
-\centering
-\begin{tabular}[t]{|*{3}{\X{1}{3}|}}
-\sphinxtoprule
-\sphinxstyletheadfamily
-\sphinxAtStartPar
-header1
-&\sphinxstyletheadfamily
-\sphinxAtStartPar
-header2
-&\sphinxstyletheadfamily
-\sphinxAtStartPar
-header3
-\\
-\sphinxmidrule
-\sphinxtableatstartofbodyhook\sphinxstyletheadfamily \begin{itemize}
-\item {}
-\sphinxAtStartPar
-instub1\sphinxhyphen{}1a
-
-\item {}
-\sphinxAtStartPar
-instub1\sphinxhyphen{}1b
-
-\end{itemize}
-&\sphinxstyletheadfamily
-\sphinxAtStartPar
-instub1\sphinxhyphen{}2
-&
-\sphinxAtStartPar
-notinstub1\sphinxhyphen{}3
-\\
-\sphinxhline\sphinxstyletheadfamily
-\sphinxAtStartPar
-cell2\sphinxhyphen{}1
-&\sphinxstyletheadfamily
-\sphinxAtStartPar
-cell2\sphinxhyphen{}2
-&
-\sphinxAtStartPar
-cell2\sphinxhyphen{}3
-\\
-\sphinxbottomrule
-\end{tabular}
-\sphinxtableafterendhook\par
-\sphinxattableend\end{savenotes}
diff --git a/tests/roots/test-latex-table/expects/table_having_threeparagraphs_cell_in_first_col.tex b/tests/roots/test-latex-table/expects/table_having_three_paragraphs_cell_in_first_col.tex
similarity index 53%
rename from tests/roots/test-latex-table/expects/table_having_threeparagraphs_cell_in_first_col.tex
rename to tests/roots/test-latex-table/expects/table_having_three_paragraphs_cell_in_first_col.tex
index 9acd9a86d46..ad1d9ee79b7 100644
--- a/tests/roots/test-latex-table/expects/table_having_threeparagraphs_cell_in_first_col.tex
+++ b/tests/roots/test-latex-table/expects/table_having_three_paragraphs_cell_in_first_col.tex
@@ -1,16 +1,18 @@
-\label{\detokenize{tabular:table-with-cell-in-first-column-having-three-paragraphs}}
+\label{\detokenize{tabular:table-having-three-paragraphs-cell-in-first-col}}
\begin{savenotes}\sphinxattablestart
\sphinxthistablewithglobalstyle
\centering
\begin{tabulary}{\linewidth}[t]{|T|}
\sphinxtoprule
-\sphinxstyletheadfamily
-\sphinxAtStartPar
+\begin{varwidth}[t]{\sphinxcolwidth{1}{1}}
+\sphinxstyletheadfamily \sphinxAtStartPar
header1
+\sphinxbeforeendvarwidth
+\end{varwidth}%
\\
\sphinxmidrule
-\sphinxtableatstartofbodyhook
+\sphinxtableatstartofbodyhook\begin{varwidth}[t]{\sphinxcolwidth{1}{1}}
\sphinxAtStartPar
cell1\sphinxhyphen{}1\sphinxhyphen{}par1
@@ -19,8 +21,10 @@
\sphinxAtStartPar
cell1\sphinxhyphen{}1\sphinxhyphen{}par3
+\sphinxbeforeendvarwidth
+\end{varwidth}%
\\
\sphinxbottomrule
\end{tabulary}
\sphinxtableafterendhook\par
-\sphinxattableend\end{savenotes}
+\sphinxattableend\end{savenotes}
\ No newline at end of file
diff --git a/tests/roots/test-latex-table/expects/table_having_verbatim.tex b/tests/roots/test-latex-table/expects/table_having_verbatim.tex
index a002de58618..adc6baf6ceb 100644
--- a/tests/roots/test-latex-table/expects/table_having_verbatim.tex
+++ b/tests/roots/test-latex-table/expects/table_having_verbatim.tex
@@ -5,37 +5,54 @@
\centering
\begin{tabular}[t]{|*{2}{\X{1}{2}|}}
\sphinxtoprule
-\sphinxstyletheadfamily
-\sphinxAtStartPar
+\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxstyletheadfamily \sphinxAtStartPar
header1
-&\sphinxstyletheadfamily
-\sphinxAtStartPar
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxstyletheadfamily \sphinxAtStartPar
header2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
\\
\sphinxmidrule
-\sphinxtableatstartofbodyhook
+\sphinxtableatstartofbodyhook\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+
\begin{sphinxVerbatimintable}[commandchars=\\\{\}]
\PYG{n}{hello} \PYG{n}{world}
\end{sphinxVerbatimintable}
-&
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
\sphinxAtStartPar
cell1\sphinxhyphen{}2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
\\
-\sphinxhline
+\sphinxhline\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
\sphinxAtStartPar
cell2\sphinxhyphen{}1
-&
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
\sphinxAtStartPar
cell2\sphinxhyphen{}2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
\\
-\sphinxhline
+\sphinxhline\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
\sphinxAtStartPar
cell3\sphinxhyphen{}1
-&
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
\sphinxAtStartPar
cell3\sphinxhyphen{}2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
\\
\sphinxbottomrule
\end{tabular}
\sphinxtableafterendhook\par
-\sphinxattableend\end{savenotes}
+\sphinxattableend\end{savenotes}
\ No newline at end of file
diff --git a/tests/roots/test-latex-table/expects/table_having_widths_and_formerly_problematic.tex b/tests/roots/test-latex-table/expects/table_having_widths_and_formerly_problematic.tex
new file mode 100644
index 00000000000..c103ab0a8aa
--- /dev/null
+++ b/tests/roots/test-latex-table/expects/table_having_widths_and_formerly_problematic.tex
@@ -0,0 +1,64 @@
+\label{\detokenize{tabular:table-having-widths-and-formerly-problematic}}
+
+\begin{savenotes}\sphinxattablestart
+\sphinxthistablewithglobalstyle
+\centering
+\begin{tabular}[t]{|\X{30}{100}|\X{70}{100}|}
+\sphinxtoprule
+\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxstyletheadfamily \sphinxAtStartPar
+header1
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxstyletheadfamily \sphinxAtStartPar
+header2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+\\
+\sphinxmidrule
+\sphinxtableatstartofbodyhook\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\begin{itemize}
+\item {}
+\sphinxAtStartPar
+item1
+
+\item {}
+\sphinxAtStartPar
+item2
+
+\end{itemize}
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxAtStartPar
+cell1\sphinxhyphen{}2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+\\
+\sphinxhline\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxAtStartPar
+cell2\sphinxhyphen{}1
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxAtStartPar
+cell2\sphinxhyphen{}2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+\\
+\sphinxhline\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxAtStartPar
+cell3\sphinxhyphen{}1
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxAtStartPar
+cell3\sphinxhyphen{}2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+\\
+\sphinxbottomrule
+\end{tabular}
+\sphinxtableafterendhook\par
+\sphinxattableend\end{savenotes}
\ No newline at end of file
diff --git a/tests/roots/test-latex-table/expects/table_having_widths_and_problematic_cell.tex b/tests/roots/test-latex-table/expects/table_having_widths_and_problematic_cell.tex
deleted file mode 100644
index 1baf92c1ae6..00000000000
--- a/tests/roots/test-latex-table/expects/table_having_widths_and_problematic_cell.tex
+++ /dev/null
@@ -1,47 +0,0 @@
-\label{\detokenize{tabular:table-having-both-widths-and-problematic-cell}}
-
-\begin{savenotes}\sphinxattablestart
-\sphinxthistablewithglobalstyle
-\centering
-\begin{tabular}[t]{|\X{30}{100}|\X{70}{100}|}
-\sphinxtoprule
-\sphinxstyletheadfamily
-\sphinxAtStartPar
-header1
-&\sphinxstyletheadfamily
-\sphinxAtStartPar
-header2
-\\
-\sphinxmidrule
-\sphinxtableatstartofbodyhook\begin{itemize}
-\item {}
-\sphinxAtStartPar
-item1
-
-\item {}
-\sphinxAtStartPar
-item2
-
-\end{itemize}
-&
-\sphinxAtStartPar
-cell1\sphinxhyphen{}2
-\\
-\sphinxhline
-\sphinxAtStartPar
-cell2\sphinxhyphen{}1
-&
-\sphinxAtStartPar
-cell2\sphinxhyphen{}2
-\\
-\sphinxhline
-\sphinxAtStartPar
-cell3\sphinxhyphen{}1
-&
-\sphinxAtStartPar
-cell3\sphinxhyphen{}2
-\\
-\sphinxbottomrule
-\end{tabular}
-\sphinxtableafterendhook\par
-\sphinxattableend\end{savenotes}
diff --git a/tests/roots/test-latex-table/expects/table_having_widths.tex b/tests/roots/test-latex-table/expects/table_having_widths_option.tex
similarity index 50%
rename from tests/roots/test-latex-table/expects/table_having_widths.tex
rename to tests/roots/test-latex-table/expects/table_having_widths_option.tex
index e9863d277f6..668f4c63206 100644
--- a/tests/roots/test-latex-table/expects/table_having_widths.tex
+++ b/tests/roots/test-latex-table/expects/table_having_widths_option.tex
@@ -8,34 +8,50 @@
\phantomsection\label{\detokenize{tabular:namedtabular}}\label{\detokenize{tabular:mytabular}}\nobreak
\begin{tabular}[t]{\X{30}{100}\X{70}{100}}
\sphinxtoprule
-\sphinxstyletheadfamily
-\sphinxAtStartPar
+\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxstyletheadfamily \sphinxAtStartPar
header1
-&\sphinxstyletheadfamily
-\sphinxAtStartPar
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxstyletheadfamily \sphinxAtStartPar
header2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
\\
\sphinxmidrule
-\sphinxtableatstartofbodyhook
+\sphinxtableatstartofbodyhook\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
\sphinxAtStartPar
cell1\sphinxhyphen{}1
-&
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
\sphinxAtStartPar
cell1\sphinxhyphen{}2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
\\
-\sphinxhline
+\sphinxhline\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
\sphinxAtStartPar
cell2\sphinxhyphen{}1
-&
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
\sphinxAtStartPar
cell2\sphinxhyphen{}2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
\\
-\sphinxhline
+\sphinxhline\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
\sphinxAtStartPar
cell3\sphinxhyphen{}1
-&
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
\sphinxAtStartPar
cell3\sphinxhyphen{}2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
\\
\sphinxbottomrule
\end{tabular}
@@ -43,4 +59,4 @@
\sphinxattableend\end{savenotes}
\sphinxAtStartPar
-See {\hyperref[\detokenize{tabular:mytabular}]{\sphinxcrossref{\DUrole{std}{\DUrole{std-ref}{this}}}}}, same as {\hyperref[\detokenize{tabular:namedtabular}]{\sphinxcrossref{namedtabular}}}.
+See {\hyperref[\detokenize{tabular:mytabular}]{\sphinxcrossref{\DUrole{std}{\DUrole{std-ref}{this}}}}}, same as {\hyperref[\detokenize{tabular:namedtabular}]{\sphinxcrossref{namedtabular}}}.
\ No newline at end of file
diff --git a/tests/roots/test-latex-table/expects/table_with_tabularcolumns.tex b/tests/roots/test-latex-table/expects/table_with_tabularcolumns.tex
new file mode 100644
index 00000000000..64edba98e89
--- /dev/null
+++ b/tests/roots/test-latex-table/expects/table_with_tabularcolumns.tex
@@ -0,0 +1,57 @@
+\label{\detokenize{tabular:table-with-tabularcolumns}}
+
+\begin{savenotes}\sphinxattablestart
+\sphinxthistablewithglobalstyle
+\sphinxthistablewithnovlinesstyle
+\centering
+\begin{tabular}[t]{cc}
+\sphinxtoprule
+\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxstyletheadfamily \sphinxAtStartPar
+header1
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxstyletheadfamily \sphinxAtStartPar
+header2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+\\
+\sphinxmidrule
+\sphinxtableatstartofbodyhook\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxAtStartPar
+cell1\sphinxhyphen{}1
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxAtStartPar
+cell1\sphinxhyphen{}2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+\\
+\sphinxhline\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxAtStartPar
+cell2\sphinxhyphen{}1
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxAtStartPar
+cell2\sphinxhyphen{}2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+\\
+\sphinxhline\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxAtStartPar
+cell3\sphinxhyphen{}1
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxAtStartPar
+cell3\sphinxhyphen{}2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+\\
+\sphinxbottomrule
+\end{tabular}
+\sphinxtableafterendhook\par
+\sphinxattableend\end{savenotes}
\ No newline at end of file
diff --git a/tests/roots/test-latex-table/expects/tabular_having_align_option.tex b/tests/roots/test-latex-table/expects/tabular_having_align_option.tex
new file mode 100644
index 00000000000..ba61c6bde2c
--- /dev/null
+++ b/tests/roots/test-latex-table/expects/tabular_having_align_option.tex
@@ -0,0 +1,56 @@
+\label{\detokenize{tabular:tabular-having-align-option}}
+
+\begin{savenotes}\sphinxattablestart
+\sphinxthistablewithglobalstyle
+\raggedright
+\begin{tabular}[t]{|\X{30}{100}|\X{70}{100}|}
+\sphinxtoprule
+\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxstyletheadfamily \sphinxAtStartPar
+header1
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxstyletheadfamily \sphinxAtStartPar
+header2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+\\
+\sphinxmidrule
+\sphinxtableatstartofbodyhook\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxAtStartPar
+cell1\sphinxhyphen{}1
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxAtStartPar
+cell1\sphinxhyphen{}2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+\\
+\sphinxhline\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxAtStartPar
+cell2\sphinxhyphen{}1
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxAtStartPar
+cell2\sphinxhyphen{}2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+\\
+\sphinxhline\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxAtStartPar
+cell3\sphinxhyphen{}1
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxAtStartPar
+cell3\sphinxhyphen{}2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+\\
+\sphinxbottomrule
+\end{tabular}
+\sphinxtableafterendhook\par
+\sphinxattableend\end{savenotes}
\ No newline at end of file
diff --git a/tests/roots/test-latex-table/expects/tabular_having_widths.tex b/tests/roots/test-latex-table/expects/tabular_having_widths.tex
deleted file mode 100644
index 15321d693cf..00000000000
--- a/tests/roots/test-latex-table/expects/tabular_having_widths.tex
+++ /dev/null
@@ -1,40 +0,0 @@
-\label{\detokenize{tabular:table-having-align-option-tabular}}
-
-\begin{savenotes}\sphinxattablestart
-\sphinxthistablewithglobalstyle
-\raggedright
-\begin{tabular}[t]{|\X{30}{100}|\X{70}{100}|}
-\sphinxtoprule
-\sphinxstyletheadfamily
-\sphinxAtStartPar
-header1
-&\sphinxstyletheadfamily
-\sphinxAtStartPar
-header2
-\\
-\sphinxmidrule
-\sphinxtableatstartofbodyhook
-\sphinxAtStartPar
-cell1\sphinxhyphen{}1
-&
-\sphinxAtStartPar
-cell1\sphinxhyphen{}2
-\\
-\sphinxhline
-\sphinxAtStartPar
-cell2\sphinxhyphen{}1
-&
-\sphinxAtStartPar
-cell2\sphinxhyphen{}2
-\\
-\sphinxhline
-\sphinxAtStartPar
-cell3\sphinxhyphen{}1
-&
-\sphinxAtStartPar
-cell3\sphinxhyphen{}2
-\\
-\sphinxbottomrule
-\end{tabular}
-\sphinxtableafterendhook\par
-\sphinxattableend\end{savenotes}
diff --git a/tests/roots/test-latex-table/expects/tabularcolumn.tex b/tests/roots/test-latex-table/expects/tabularcolumn.tex
deleted file mode 100644
index c1f88421f1d..00000000000
--- a/tests/roots/test-latex-table/expects/tabularcolumn.tex
+++ /dev/null
@@ -1,41 +0,0 @@
-\label{\detokenize{tabular:table-with-tabularcolumn}}
-
-\begin{savenotes}\sphinxattablestart
-\sphinxthistablewithglobalstyle
-\sphinxthistablewithnovlinesstyle
-\centering
-\begin{tabular}[t]{cc}
-\sphinxtoprule
-\sphinxstyletheadfamily
-\sphinxAtStartPar
-header1
-&\sphinxstyletheadfamily
-\sphinxAtStartPar
-header2
-\\
-\sphinxmidrule
-\sphinxtableatstartofbodyhook
-\sphinxAtStartPar
-cell1\sphinxhyphen{}1
-&
-\sphinxAtStartPar
-cell1\sphinxhyphen{}2
-\\
-\sphinxhline
-\sphinxAtStartPar
-cell2\sphinxhyphen{}1
-&
-\sphinxAtStartPar
-cell2\sphinxhyphen{}2
-\\
-\sphinxhline
-\sphinxAtStartPar
-cell3\sphinxhyphen{}1
-&
-\sphinxAtStartPar
-cell3\sphinxhyphen{}2
-\\
-\sphinxbottomrule
-\end{tabular}
-\sphinxtableafterendhook\par
-\sphinxattableend\end{savenotes}
diff --git a/tests/roots/test-latex-table/expects/tabulary_having_align_option.tex b/tests/roots/test-latex-table/expects/tabulary_having_align_option.tex
new file mode 100644
index 00000000000..2d62b0e3ff1
--- /dev/null
+++ b/tests/roots/test-latex-table/expects/tabulary_having_align_option.tex
@@ -0,0 +1,56 @@
+\label{\detokenize{tabular:tabulary-having-align-option}}
+
+\begin{savenotes}\sphinxattablestart
+\sphinxthistablewithglobalstyle
+\raggedleft
+\begin{tabulary}{\linewidth}[t]{|T|T|}
+\sphinxtoprule
+\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxstyletheadfamily \sphinxAtStartPar
+header1
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxstyletheadfamily \sphinxAtStartPar
+header2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+\\
+\sphinxmidrule
+\sphinxtableatstartofbodyhook\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxAtStartPar
+cell1\sphinxhyphen{}1
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxAtStartPar
+cell1\sphinxhyphen{}2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+\\
+\sphinxhline\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxAtStartPar
+cell2\sphinxhyphen{}1
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxAtStartPar
+cell2\sphinxhyphen{}2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+\\
+\sphinxhline\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxAtStartPar
+cell3\sphinxhyphen{}1
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxAtStartPar
+cell3\sphinxhyphen{}2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+\\
+\sphinxbottomrule
+\end{tabulary}
+\sphinxtableafterendhook\par
+\sphinxattableend\end{savenotes}
\ No newline at end of file
diff --git a/tests/roots/test-latex-table/expects/tabulary_having_widths.tex b/tests/roots/test-latex-table/expects/tabulary_having_widths.tex
deleted file mode 100644
index 24634163010..00000000000
--- a/tests/roots/test-latex-table/expects/tabulary_having_widths.tex
+++ /dev/null
@@ -1,40 +0,0 @@
-\label{\detokenize{tabular:table-having-align-option-tabulary}}
-
-\begin{savenotes}\sphinxattablestart
-\sphinxthistablewithglobalstyle
-\raggedleft
-\begin{tabulary}{\linewidth}[t]{|T|T|}
-\sphinxtoprule
-\sphinxstyletheadfamily
-\sphinxAtStartPar
-header1
-&\sphinxstyletheadfamily
-\sphinxAtStartPar
-header2
-\\
-\sphinxmidrule
-\sphinxtableatstartofbodyhook
-\sphinxAtStartPar
-cell1\sphinxhyphen{}1
-&
-\sphinxAtStartPar
-cell1\sphinxhyphen{}2
-\\
-\sphinxhline
-\sphinxAtStartPar
-cell2\sphinxhyphen{}1
-&
-\sphinxAtStartPar
-cell2\sphinxhyphen{}2
-\\
-\sphinxhline
-\sphinxAtStartPar
-cell3\sphinxhyphen{}1
-&
-\sphinxAtStartPar
-cell3\sphinxhyphen{}2
-\\
-\sphinxbottomrule
-\end{tabulary}
-\sphinxtableafterendhook\par
-\sphinxattableend\end{savenotes}
diff --git a/tests/roots/test-latex-table/longtable.rst b/tests/roots/test-latex-table/longtable.rst
index da6fa5c5cec..89a37b8ed8d 100644
--- a/tests/roots/test-latex-table/longtable.rst
+++ b/tests/roots/test-latex-table/longtable.rst
@@ -15,8 +15,8 @@ longtable
cell3-1 cell3-2
======= =======
-longtable having :widths: option
---------------------------------
+longtable having widths option
+------------------------------
.. _mylongtable:
@@ -35,8 +35,8 @@ longtable having :widths: option
See mylongtable_, same as :ref:`this one `.
-longtable having :align: option
--------------------------------
+longtable having align option
+-----------------------------
.. table::
:align: right
@@ -50,8 +50,8 @@ longtable having :align: option
cell3-1 cell3-2
======= =======
-longtable with tabularcolumn
-----------------------------
+longtable with tabularcolumns
+-----------------------------
.. tabularcolumns:: |c|c|
@@ -101,13 +101,12 @@ longtable having verbatim
* - cell3-1
- cell3-2
-longtable having both :widths: and problematic cell
----------------------------------------------------
+longtable having formerly problematic
+-------------------------------------
.. list-table::
:class: longtable
:header-rows: 1
- :widths: 30,70
* - header1
- header2
@@ -119,12 +118,13 @@ longtable having both :widths: and problematic cell
* - cell3-1
- cell3-2
-longtable having problematic cell
----------------------------------
+longtable having widths and formerly problematic
+------------------------------------------------
.. list-table::
:class: longtable
:header-rows: 1
+ :widths: 30,70
* - header1
- header2
@@ -136,8 +136,8 @@ longtable having problematic cell
* - cell3-1
- cell3-2
-longtable having both stub columns and problematic cell
--------------------------------------------------------
+longtable having stub columns and formerly problematic
+------------------------------------------------------
.. list-table::
:class: longtable
diff --git a/tests/roots/test-latex-table/tabular.rst b/tests/roots/test-latex-table/tabular.rst
index 15db823a05b..b5011539795 100644
--- a/tests/roots/test-latex-table/tabular.rst
+++ b/tests/roots/test-latex-table/tabular.rst
@@ -12,8 +12,8 @@ cell2-1 cell2-2
cell3-1 cell3-2
======= =======
-table having :widths: option
-----------------------------
+table having widths option
+--------------------------
.. _mytabular:
@@ -32,8 +32,8 @@ table having :widths: option
See :ref:`this `, same as namedtabular_.
-table having :align: option (tabulary)
---------------------------------------
+tabulary having align option
+----------------------------
.. table::
:align: right
@@ -46,8 +46,8 @@ table having :align: option (tabulary)
cell3-1 cell3-2
======= =======
-table having :align: option (tabular)
--------------------------------------
+tabular having align option
+---------------------------
.. table::
:align: left
@@ -61,8 +61,8 @@ table having :align: option (tabular)
cell3-1 cell3-2
======= =======
-table with tabularcolumn
-------------------------
+table with tabularcolumns
+-------------------------
.. tabularcolumns:: cc
@@ -74,8 +74,8 @@ cell2-1 cell2-2
cell3-1 cell3-2
======= =======
-table with cell in first column having three paragraphs
--------------------------------------------------------
+table having three paragraphs cell in first col
+-----------------------------------------------
+--------------+
| header1 |
@@ -121,12 +121,11 @@ table having verbatim
* - cell3-1
- cell3-2
-table having both :widths: and problematic cell
------------------------------------------------
+table having formerly problematic
+---------------------------------
.. list-table::
:header-rows: 1
- :widths: 30,70
* - header1
- header2
@@ -138,11 +137,12 @@ table having both :widths: and problematic cell
* - cell3-1
- cell3-2
-table having problematic cell
------------------------------
+table having widths and formerly problematic
+--------------------------------------------
.. list-table::
:header-rows: 1
+ :widths: 30,70
* - header1
- header2
@@ -154,8 +154,8 @@ table having problematic cell
* - cell3-1
- cell3-2
-table having both stub columns and problematic cell
----------------------------------------------------
+table having stub columns and formerly problematic
+--------------------------------------------------
.. list-table::
:header-rows: 1
diff --git a/tests/test_builders/test_build_latex.py b/tests/test_builders/test_build_latex.py
index 16f3437c154..007cddef3ef 100644
--- a/tests/test_builders/test_build_latex.py
+++ b/tests/test_builders/test_build_latex.py
@@ -956,13 +956,20 @@ def test_footnote(app: SphinxTestApp) -> None:
'footnote in table caption\n%\n\\end{footnotetext}\\ignorespaces %\n'
'\\begin{footnotetext}[5]\\sphinxAtStartFootnote\n'
'footnote in table header\n%\n\\end{footnotetext}\\ignorespaces '
+ '\\begin{varwidth}[t]{\\sphinxcolwidth{1}{2}}'
'\n\\sphinxAtStartPar\n'
- 'VIDIOC\\_CROPCAP\n&\n\\sphinxAtStartPar\n'
+ 'VIDIOC\\_CROPCAP\n'
+ '\\sphinxbeforeendvarwidth\n'
+ '\\end{varwidth}%\n'
) in result
assert (
+ '&\\begin{varwidth}[t]{\\sphinxcolwidth{1}{2}}\n'
+ '\\sphinxAtStartPar\n'
'Information about VIDIOC\\_CROPCAP %\n'
'\\begin{footnote}[6]\\sphinxAtStartFootnote\n'
- 'footnote in table not in header\n%\n\\end{footnote}\n\\\\\n'
+ 'footnote in table not in header\n%\n\\end{footnote}\n'
+ '\\sphinxbeforeendvarwidth\n'
+ '\\end{varwidth}%\n\\\\\n'
'\\sphinxbottomrule\n\\end{tabulary}\n'
'\\sphinxtableafterendhook\\par\n\\sphinxattableend\\end{savenotes}\n'
) in result
@@ -1020,11 +1027,15 @@ def test_reference_in_caption_and_codeblock_in_footnote(app: SphinxTestApp) -> N
'{I am in a footnote}}}}}'
) in result
assert (
- '&\n\\sphinxAtStartPar\nThis is one more footnote with some code in it %\n'
+ '&\\begin{varwidth}[t]{\\sphinxcolwidth{1}{2}}\n'
+ '\\sphinxAtStartPar\nThis is one more footnote with some code in it %\n'
'\\begin{footnote}[12]\\sphinxAtStartFootnote\n'
'Third footnote in longtable\n'
) in result
- assert '\\end{sphinxVerbatim}\n%\n\\end{footnote}.\n' in result
+ assert (
+ '\\end{sphinxVerbatim}\n%\n\\end{footnote}.\n'
+ '\\sphinxbeforeendvarwidth\n\\end{varwidth}%\n\\\\'
+ ) in result
assert '\\begin{sphinxVerbatim}[commandchars=\\\\\\{\\}]' in result
@@ -1577,60 +1588,22 @@ def get_expected(name):
.strip()
)
- # simple_table
- actual = tables['simple table']
- expected = get_expected('simple_table')
- assert actual == expected
-
- # table having :widths: option
- actual = tables['table having :widths: option']
- expected = get_expected('table_having_widths')
- assert actual == expected
-
- # table having :align: option (tabulary)
- actual = tables['table having :align: option (tabulary)']
- expected = get_expected('tabulary_having_widths')
- assert actual == expected
-
- # table having :align: option (tabular)
- actual = tables['table having :align: option (tabular)']
- expected = get_expected('tabular_having_widths')
- assert actual == expected
-
- # table with tabularcolumn
- actual = tables['table with tabularcolumn']
- expected = get_expected('tabularcolumn')
- assert actual == expected
-
- # table with cell in first column having three paragraphs
- actual = tables['table with cell in first column having three paragraphs']
- expected = get_expected('table_having_threeparagraphs_cell_in_first_col')
- assert actual == expected
-
- # table having caption
- actual = tables['table having caption']
- expected = get_expected('table_having_caption')
- assert actual == expected
-
- # table having verbatim
- actual = tables['table having verbatim']
- expected = get_expected('table_having_verbatim')
- assert actual == expected
-
- # table having problematic cell
- actual = tables['table having problematic cell']
- expected = get_expected('table_having_problematic_cell')
- assert actual == expected
-
- # table having both :widths: and problematic cell
- actual = tables['table having both :widths: and problematic cell']
- expected = get_expected('table_having_widths_and_problematic_cell')
- assert actual == expected
-
- # table having both stub columns and problematic cell
- actual = tables['table having both stub columns and problematic cell']
- expected = get_expected('table_having_stub_columns_and_problematic_cell')
- assert actual == expected
+ for sectname in (
+ 'simple table',
+ 'table having widths option',
+ 'tabulary having align option',
+ 'tabular having align option',
+ 'table with tabularcolumns',
+ 'table having three paragraphs cell in first col',
+ 'table having caption',
+ 'table having verbatim',
+ 'table having formerly problematic',
+ 'table having widths and formerly problematic',
+ 'table having stub columns and formerly problematic',
+ ):
+ actual = tables[sectname]
+ expected = get_expected(sectname.replace(' ', '_'))
+ assert actual == expected
@pytest.mark.sphinx(
@@ -1655,50 +1628,20 @@ def get_expected(name):
.strip()
)
- # longtable
- actual = tables['longtable']
- expected = get_expected('longtable')
- assert actual == expected
-
- # longtable having :widths: option
- actual = tables['longtable having :widths: option']
- expected = get_expected('longtable_having_widths')
- assert actual == expected
-
- # longtable having :align: option
- actual = tables['longtable having :align: option']
- expected = get_expected('longtable_having_align')
- assert actual == expected
-
- # longtable with tabularcolumn
- actual = tables['longtable with tabularcolumn']
- expected = get_expected('longtable_with_tabularcolumn')
- assert actual == expected
-
- # longtable having caption
- actual = tables['longtable having caption']
- expected = get_expected('longtable_having_caption')
- assert actual == expected
-
- # longtable having verbatim
- actual = tables['longtable having verbatim']
- expected = get_expected('longtable_having_verbatim')
- assert actual == expected
-
- # longtable having problematic cell
- actual = tables['longtable having problematic cell']
- expected = get_expected('longtable_having_problematic_cell')
- assert actual == expected
-
- # longtable having both :widths: and problematic cell
- actual = tables['longtable having both :widths: and problematic cell']
- expected = get_expected('longtable_having_widths_and_problematic_cell')
- assert actual == expected
-
- # longtable having both stub columns and problematic cell
- actual = tables['longtable having both stub columns and problematic cell']
- expected = get_expected('longtable_having_stub_columns_and_problematic_cell')
- assert actual == expected
+ for sectname in (
+ 'longtable',
+ 'longtable having widths option',
+ 'longtable having align option',
+ 'longtable with tabularcolumns',
+ 'longtable having caption',
+ 'longtable having verbatim',
+ 'longtable having formerly problematic',
+ 'longtable having widths and formerly problematic',
+ 'longtable having stub columns and formerly problematic',
+ ):
+ actual = tables[sectname]
+ expected = get_expected(sectname.replace(' ', '_'))
+ assert actual == expected
@pytest.mark.sphinx(
@@ -1722,22 +1665,14 @@ def get_expected(name):
.strip()
)
- # grid table
- actual = tables['grid table']
- expected = get_expected('gridtable')
- assert actual == expected
-
- # grid table with tabularcolumns
- # MEMO: filename should end with tabularcolumns but tabularcolumn has been
- # used in existing other cases
- actual = tables['grid table with tabularcolumns having no vline']
- expected = get_expected('gridtable_with_tabularcolumn')
- assert actual == expected
-
- # complex spanning cell
- actual = tables['complex spanning cell']
- expected = get_expected('complex_spanning_cell')
- assert actual == expected
+ for sectname in (
+ 'grid table',
+ 'grid table with tabularcolumns',
+ 'complex spanning cell',
+ ):
+ actual = tables[sectname]
+ expected = get_expected(sectname.replace(' ', '_'))
+ assert actual == expected
@pytest.mark.sphinx('latex', testroot='latex-table')
From bd343b29457182e58d3e8b26133446dc2496dc85 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Jean-Fran=C3=A7ois=20B=2E?=
<2589111+jfbu@users.noreply.github.com>
Date: Mon, 30 Jun 2025 17:39:53 +0200
Subject: [PATCH 169/466] Update CHANGES to mention fix of #11973
Close #11973 (it was fixed via #13593).
---
CHANGES.rst | 2 ++
1 file changed, 2 insertions(+)
diff --git a/CHANGES.rst b/CHANGES.rst
index 06990919e1b..804c3a7628e 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -72,6 +72,8 @@ Bugs fixed
Patch by Jean-François B.
* #11515: LaTeX: longtable does not allow nested table.
Patch by Jean-François B.
+* #11973: LaTeX: links in table captions do not work in PDF.
+ Patch by Jean-François B.
* #12821: LaTeX: URLs/links in section titles should render in PDF.
Patch by Jean-François B.
* #13369: Correctly parse and cross-reference unpacked type annotations.
From 5fd1462390d831f10bd6b4795bf3ee9470943262 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Jean-Fran=C3=A7ois=20B=2E?=
<2589111+jfbu@users.noreply.github.com>
Date: Mon, 30 Jun 2025 17:57:48 +0200
Subject: [PATCH 170/466] Update changes/5.3.rst to mention fix of #6740
Close #6740.
---
doc/changes/5.3.rst | 5 ++++-
1 file changed, 4 insertions(+), 1 deletion(-)
diff --git a/doc/changes/5.3.rst b/doc/changes/5.3.rst
index b2a2e5a78f1..171b0792bbe 100644
--- a/doc/changes/5.3.rst
+++ b/doc/changes/5.3.rst
@@ -8,7 +8,10 @@ Release 5.3.0 (released Oct 16, 2022)
* #10759: LaTeX: add :confval:`latex_table_style` and support the
``'booktabs'``, ``'borderless'``, and ``'colorrows'`` styles.
- (thanks to Stefan Wiehler for initial pull requests #6666, #6671)
+ (thanks to Stefan Wiehler for initial pull requests #6666, #6671).
+ Using the ``'booktabs'`` style solves #6740 (Removing LaTeX
+ column borders for automatic colspec).
+ Patch by Jean-François B.
* #10840: One can cross-reference including an option value like
``:option:`--module=foobar```, ``:option:`--module[=foobar]```,
or ``:option:`--module foobar```.
From 83146f12defe287fca82e167c015e25c81f4c276 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Jean-Fran=C3=A7ois=20B=2E?=
<2589111+jfbu@users.noreply.github.com>
Date: Mon, 30 Jun 2025 18:36:12 +0200
Subject: [PATCH 171/466] Update CHANGES to indicate fix of #1327
Close #1327. It was fixed as part of PR #13647.
---
CHANGES.rst | 4 ++++
1 file changed, 4 insertions(+)
diff --git a/CHANGES.rst b/CHANGES.rst
index 804c3a7628e..82a21d2d3c9 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -62,6 +62,10 @@ Features added
Bugs fixed
----------
+* #1327: LaTeX: tables using longtable raise error if
+ :rst:dir:`tabularcolumns` specifies automatic widths
+ (``L``, ``R``, ``C``, or ``J``).
+ Patch by Jean-François B.
* #3447: LaTeX: when assigning longtable class to table for PDF, it may render
"horizontally" and overflow in right margin.
Patch by Jean-François B.
From 52c4d722d236ecaf9b645bc7fe83d6b28db092ba Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Jean-Fran=C3=A7ois=20B=2E?=
<2589111+jfbu@users.noreply.github.com>
Date: Tue, 1 Jul 2025 12:43:21 +0200
Subject: [PATCH 172/466] Simplify a LaTeX macro (#13710)
---
sphinx/texinputs/sphinxlatextables.sty | 11 ++++++++++-
1 file changed, 10 insertions(+), 1 deletion(-)
diff --git a/sphinx/texinputs/sphinxlatextables.sty b/sphinx/texinputs/sphinxlatextables.sty
index 00719015e41..d5970d87281 100644
--- a/sphinx/texinputs/sphinxlatextables.sty
+++ b/sphinx/texinputs/sphinxlatextables.sty
@@ -47,7 +47,16 @@
% At 8.3.0, ALL table cell contents are wrapped into a varwidth environment.
% This helps solve issues such as #3447, #8828, and helps use tabulary
% in many more cases hence obtain better looking tables.
-\def\sphinxbeforeendvarwidth{\par\vskip-\baselineskip\vbox{\hbox{\strut}}}
+\def\sphinxbeforeendvarwidth{\par\vskip-\baselineskip\hbox{\strut}}
+% MEMO: Mark-up uses the above macro right before all \end{varwdith} so that
+% if the cell in a row extends lower than the others, its last line acquires
+% standard "depth". Else it may lack any depth if without descenders such as
+% "p" or "q" letters and the horizontal line or color panel will look strange.
+% It originates in PR #3435 from 2017 which solved *many* table issues for
+% merged cells (and injected the varwidth technique now at 8.3.0 applied to
+% all cells). The original used \vbox{\hbox{\strut}} but that \vbox appears
+% to do nothing, and it was decided after some testing (July 2025) to remove
+% it, the original rationale for it being now lost.
% These conditionals added at 8.3.0 for nested tables not to break row colors
% (#13635). Nested tables are only partially supported by Sphinx LaTeX.
From 7b4164a5f2b64781475e64daf2d05cce2a0261d8 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Jean-Fran=C3=A7ois=20B=2E?=
<2589111+jfbu@users.noreply.github.com>
Date: Tue, 1 Jul 2025 20:29:08 +0200
Subject: [PATCH 173/466] LaTeX: better choice for \tymax parameter (#13711)
---
doc/usage/restructuredtext/directives.rst | 27 +++++++++++++++++++++--
sphinx/texinputs/sphinxlatextables.sty | 15 ++++++++-----
2 files changed, 34 insertions(+), 8 deletions(-)
diff --git a/doc/usage/restructuredtext/directives.rst b/doc/usage/restructuredtext/directives.rst
index 25116edd4fc..c75d04c7266 100644
--- a/doc/usage/restructuredtext/directives.rst
+++ b/doc/usage/restructuredtext/directives.rst
@@ -1499,8 +1499,31 @@ Check the :confval:`latex_table_style`.
using ``tabulary`` is that it tries to compute automatically (internally to
LaTeX) suitable column widths.
- :rst:dir:`tabularcolumns` can serve to provide one's own "colspec" choice.
- Here is an advanced example:
+ The ``tabulary`` algorithm often works well, but in some cases when a cell
+ contains long paragraphs, the column will be given a large width and other
+ columns whose cells contain only single words may end up too narrow. The
+ :rst:dir:`tabularcolumns` can help solve this via providing to LaTeX a
+ custom "alignment preamble" (aka "colspec"). For example ``lJJ`` will be
+ suitable for a three-columns table whose first column contains only single
+ words and the other two have cells with long paragraphs.
+
+ .. note::
+
+ Of course, a fully automated solution would be better, and it is still
+ hoped for, but it is an intrinsic aspect of ``tabulary``, and the latter
+ is in use by Sphinx ever since ``0.3``... It looks as if solving the
+ problem of squeezed columns could require substantial changes to that
+ LaTeX package. And no good alternative appears to exist, as of 2025.
+
+ .. hint::
+
+ A way to solve the issue for all tables at once, is to inject in the
+ LaTeX preamble (see :confval:`latex_elements`) a command such as
+ ``\setlength{\tymin}{1cm}`` which causes all columns to be at least
+ ``1cm`` wide (not counting inter-column whitespace). Currently, Sphinx
+ configures ``\tymin`` to allow room for three characters at least.
+
+ Here is a more sophisticated "colspec", for a 4-columns table:
.. code-block:: latex
diff --git a/sphinx/texinputs/sphinxlatextables.sty b/sphinx/texinputs/sphinxlatextables.sty
index d5970d87281..b80cc83be8d 100644
--- a/sphinx/texinputs/sphinxlatextables.sty
+++ b/sphinx/texinputs/sphinxlatextables.sty
@@ -267,7 +267,7 @@
%
% configuration of tabulary
\setlength{\tymin}{3\fontcharwd\font`0 }% minimal width of "squeezed" columns
-\setlength{\tymax}{2\textwidth}% allow enough room for paragraphs to "compete"
+\setlength{\tymax}{3000pt}% allow enough room for paragraphs to "compete"
%
% MEMO: tabulary initially renders cell contents "horizontally" to measure
% them and compare their relative importance. Its goal is to choose the
@@ -284,11 +284,14 @@
% the initial horizontal width for "varwidth". In the first tabulary
% pass, \sphinxcolwidth is configured (by us) to use \tymax.
%
-% During testing, it was determined that our former 10000pt setting for
-% \tymax could cause "Dimension too large" TeX error if two columns or
-% more contained such cells. So we use now 2\textwidth which is more
-% than 10 times smaller but proves large enough for the tabulary
-% algorithm to provide reasonable results.
+% During testing, it was determined that the former 10000pt setting for
+% \tymax would cause "Dimension too large" TeX error if two columns or
+% more had cells containing admonitions (such contents does not allow
+% "varwidth" to reduce the width automatically). So we use now 3000pt
+% which allows up to 5 such columns while being large enough for
+% tabulary algorithm to give good results for cells containing a few
+% dozen words. The tabulary default of 2\textwidth proves to be too
+% small for that.
%
% we need access to tabulary's final computed width. \@tempdima is too volatile
% to hope it has kept tabulary's value when \sphinxcolwidth needs it.
From 8949df498fdf70acd3412b85ce42f20d0b4a0cdc Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Szymon=20Karpi=C5=84ski?=
<34919255+szkarpinski@users.noreply.github.com>
Date: Sat, 19 Jul 2025 12:34:06 +0200
Subject: [PATCH 174/466] Fix intersphinx reftitle for non-numeric versions
(#13737)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
Signed-off-by: Szymon Karpiński
---
AUTHORS.rst | 1 +
CHANGES.rst | 2 ++
sphinx/ext/intersphinx/_resolve.py | 7 +++++-
tests/test_extensions/test_ext_intersphinx.py | 23 +++++++++++++++++++
tests/test_util/intersphinx_data.py | 9 ++++++++
5 files changed, 41 insertions(+), 1 deletion(-)
diff --git a/AUTHORS.rst b/AUTHORS.rst
index 43a8da3469d..d08f44875b4 100644
--- a/AUTHORS.rst
+++ b/AUTHORS.rst
@@ -103,6 +103,7 @@ Contributors
* Stefan Seefeld -- toctree improvements
* Stefan van der Walt -- autosummary extension
* Steve Piercy -- documentation improvements
+* Szymon Karpinski -- intersphinx improvements
* \T. Powers -- HTML output improvements
* Taku Shimizu -- epub3 builder
* Thomas Lamb -- linkcheck builder
diff --git a/CHANGES.rst b/CHANGES.rst
index 82a21d2d3c9..791038e5e15 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -95,6 +95,8 @@ Bugs fixed
Patch by Jean-François B.
* #13685: gettext: Correctly ignore trailing backslashes.
Patch by Bénédikt Tran.
+* #13712: intersphinx: Don't add "v" prefix to non-numeric versions.
+ Patch by Szymon Karpinski.
Testing
-------
diff --git a/sphinx/ext/intersphinx/_resolve.py b/sphinx/ext/intersphinx/_resolve.py
index b68222645bb..52b672d783a 100644
--- a/sphinx/ext/intersphinx/_resolve.py
+++ b/sphinx/ext/intersphinx/_resolve.py
@@ -46,7 +46,12 @@ def _create_element_from_result(
# get correct path in case of subdirectories
uri = (_relative_path(Path(), Path(node['refdoc']).parent) / uri).as_posix()
if inv_item.project_version:
- reftitle = _('(in %s v%s)') % (inv_item.project_name, inv_item.project_version)
+ if not inv_item.project_version[0].isdigit():
+ # Do not append 'v' to non-numeric version
+ version = inv_item.project_version
+ else:
+ version = f'v{inv_item.project_version}'
+ reftitle = _('(in %s %s)') % (inv_item.project_name, version)
else:
reftitle = _('(in %s)') % (inv_item.project_name,)
diff --git a/tests/test_extensions/test_ext_intersphinx.py b/tests/test_extensions/test_ext_intersphinx.py
index 240d7e12ecb..a4d7f4fe6fe 100644
--- a/tests/test_extensions/test_ext_intersphinx.py
+++ b/tests/test_extensions/test_ext_intersphinx.py
@@ -35,6 +35,7 @@
INVENTORY_V2,
INVENTORY_V2_AMBIGUOUS_TERMS,
INVENTORY_V2_NO_VERSION,
+ INVENTORY_V2_TEXT_VERSION,
)
from tests.utils import http_server
@@ -900,3 +901,25 @@ def log_message(*args, **kwargs):
srcdir=None,
cache_path=None,
)
+
+
+@pytest.mark.sphinx('html', testroot='root')
+def test_inventory_text_version(tmp_path, app):
+ inv_file = tmp_path / 'inventory'
+ inv_file.write_bytes(INVENTORY_V2_TEXT_VERSION)
+ set_config(
+ app,
+ {
+ 'python': ('https://docs.python.org/', str(inv_file)),
+ },
+ )
+
+ # load the inventory and check if non-numeric version is handled correctly
+ validate_intersphinx_mapping(app, app.config)
+ load_mappings(app)
+
+ rn = reference_check(app, 'py', 'mod', 'module1', 'foo')
+ assert isinstance(rn, nodes.reference)
+ assert rn['refuri'] == 'https://docs.python.org/foo.html#module-module1'
+ assert rn['reftitle'] == '(in foo stable)'
+ assert rn[0].astext() == 'Long Module desc'
diff --git a/tests/test_util/intersphinx_data.py b/tests/test_util/intersphinx_data.py
index 2b3489da465..24fcc4ae311 100644
--- a/tests/test_util/intersphinx_data.py
+++ b/tests/test_util/intersphinx_data.py
@@ -62,3 +62,12 @@
b term std:term -1 document.html#id5 -
B term std:term -1 document.html#B -
""")
+
+INVENTORY_V2_TEXT_VERSION: Final[bytes] = b"""\
+# Sphinx inventory version 2
+# Project: foo
+# Version: stable
+# The remainder of this file is compressed with zlib.
+""" + zlib.compress(b"""\
+module1 py:module 0 foo.html#module-module1 Long Module desc
+""")
From ca794cbe06122fbb523511f684a9434dff6baaac Mon Sep 17 00:00:00 2001
From: Adam Dangoor
Date: Wed, 23 Jul 2025 19:23:44 +0100
Subject: [PATCH 175/466] Remove mypy overrides for
``tests/test_builders/test_build_html_tocdepth.py`` (#13696)
---
pyproject.toml | 1 -
.../test_builders/test_build_html_tocdepth.py | 24 +++++++++++++++++--
2 files changed, 22 insertions(+), 3 deletions(-)
diff --git a/pyproject.toml b/pyproject.toml
index 83d8de0d018..b29fa9c90cc 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -257,7 +257,6 @@ module = [
"tests.test_builders.test_build_html_assets",
"tests.test_builders.test_build_html_maths",
"tests.test_builders.test_build_html_numfig",
- "tests.test_builders.test_build_html_tocdepth",
"tests.test_builders.test_build_html_toctree",
"tests.test_builders.test_build_linkcheck",
"tests.test_builders.test_build_warnings",
diff --git a/tests/test_builders/test_build_html_tocdepth.py b/tests/test_builders/test_build_html_tocdepth.py
index 003ba02e5f0..0fe83e0ff34 100644
--- a/tests/test_builders/test_build_html_tocdepth.py
+++ b/tests/test_builders/test_build_html_tocdepth.py
@@ -2,11 +2,20 @@
from __future__ import annotations
+from typing import TYPE_CHECKING
+
import pytest
from tests.test_builders.xpath_html_util import _intradocument_hyperlink_check
from tests.test_builders.xpath_util import check_xpath
+if TYPE_CHECKING:
+ from collections.abc import Callable
+ from pathlib import Path
+ from xml.etree.ElementTree import ElementTree
+
+ from sphinx.testing.util import SphinxTestApp
+
@pytest.mark.parametrize(
('fname', 'path', 'check', 'be_found'),
@@ -68,7 +77,14 @@
)
@pytest.mark.sphinx('html', testroot='tocdepth')
@pytest.mark.test_params(shared_result='test_build_html_tocdepth')
-def test_tocdepth(app, cached_etree_parse, fname, path, check, be_found):
+def test_tocdepth(
+ app: SphinxTestApp,
+ cached_etree_parse: Callable[[Path], ElementTree],
+ fname: str,
+ path: str,
+ check: str,
+ be_found: bool,
+) -> None:
app.build()
# https://github.com/sphinx-doc/sphinx/issues/1251
check_xpath(cached_etree_parse(app.outdir / fname), fname, path, check, be_found)
@@ -111,6 +127,10 @@ def test_tocdepth(app, cached_etree_parse, fname, path, check, be_found):
)
@pytest.mark.sphinx('singlehtml', testroot='tocdepth')
@pytest.mark.test_params(shared_result='test_build_html_tocdepth')
-def test_tocdepth_singlehtml(app, cached_etree_parse, expect):
+def test_tocdepth_singlehtml(
+ app: SphinxTestApp,
+ cached_etree_parse: Callable[[Path], ElementTree],
+ expect: tuple[str, str, bool],
+) -> None:
app.build()
check_xpath(cached_etree_parse(app.outdir / 'index.html'), 'index.html', *expect)
From d73f02dedb660848b157ec5a93da8be1ab6a272d Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Wed, 23 Jul 2025 19:24:49 +0100
Subject: [PATCH 176/466] Bump types-defusedxml to 0.7.0.20250708 (#13719)
---
pyproject.toml | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/pyproject.toml b/pyproject.toml
index b29fa9c90cc..f4e651bf6aa 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -97,7 +97,7 @@ lint = [
"mypy==1.16.1",
"sphinx-lint>=0.9",
"types-colorama==0.4.15.20240311",
- "types-defusedxml==0.7.0.20250516",
+ "types-defusedxml==0.7.0.20250708",
"types-docutils==0.21.0.20250525",
"types-Pillow==10.2.0.20240822",
"types-Pygments==2.19.0.20250516",
@@ -166,7 +166,7 @@ types = [
type-stubs = [
# align with versions used elsewhere
"types-colorama==0.4.15.20240311",
- "types-defusedxml==0.7.0.20250516",
+ "types-defusedxml==0.7.0.20250708",
"types-docutils==0.21.0.20250525",
"types-Pillow==10.2.0.20240822",
"types-Pygments==2.19.0.20250516",
From 8f28285b78a27d9735cbaa8f435c08189b773bf5 Mon Sep 17 00:00:00 2001
From: Adam Turner <9087854+aa-turner@users.noreply.github.com>
Date: Wed, 23 Jul 2025 19:31:34 +0100
Subject: [PATCH 177/466] Bump pypi-attestations to 0.0.27
---
pyproject.toml | 4 ++--
utils/convert_attestations.py | 2 +-
2 files changed, 3 insertions(+), 3 deletions(-)
diff --git a/pyproject.toml b/pyproject.toml
index f4e651bf6aa..e165b29e372 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -105,7 +105,7 @@ lint = [
"types-urllib3==1.26.25.14",
"pyright==1.1.400",
"pytest>=8.0",
- "pypi-attestations==0.0.26",
+ "pypi-attestations==0.0.27",
"betterproto==2.0.0b6",
]
test = [
@@ -142,7 +142,7 @@ lint = [
package = [
"betterproto==2.0.0b6", # resolution fails without betterproto
"build",
- "pypi-attestations==0.0.26",
+ "pypi-attestations==0.0.27",
"twine>=6.1",
]
test = [
diff --git a/utils/convert_attestations.py b/utils/convert_attestations.py
index c62fd5a057c..e015da7d5c0 100644
--- a/utils/convert_attestations.py
+++ b/utils/convert_attestations.py
@@ -7,7 +7,7 @@
# /// script
# requires-python = ">=3.11"
# dependencies = [
-# "pypi-attestations==0.0.26",
+# "pypi-attestations==0.0.27",
# "betterproto==2.0.0b6",
# ]
# ///
From 56b4c5002679b05900e3a06662962b60ed92175c Mon Sep 17 00:00:00 2001
From: Adam Turner <9087854+aa-turner@users.noreply.github.com>
Date: Wed, 23 Jul 2025 19:34:22 +0100
Subject: [PATCH 178/466] Bump Ruff to 0.12.4
---
pyproject.toml | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/pyproject.toml b/pyproject.toml
index e165b29e372..21261e85904 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -93,7 +93,7 @@ docs = [
"sphinxcontrib-websupport",
]
lint = [
- "ruff==0.12.0",
+ "ruff==0.12.4",
"mypy==1.16.1",
"sphinx-lint>=0.9",
"types-colorama==0.4.15.20240311",
@@ -136,7 +136,7 @@ docs = [
"sphinxcontrib-websupport",
]
lint = [
- "ruff==0.12.0",
+ "ruff==0.12.4",
"sphinx-lint>=0.9",
]
package = [
From 16ff1fedbbb9346805e7483d7307220a13d206d9 Mon Sep 17 00:00:00 2001
From: Adam Turner <9087854+aa-turner@users.noreply.github.com>
Date: Wed, 23 Jul 2025 23:20:47 +0100
Subject: [PATCH 179/466] Move event listeners to
``sphinx.ext.autodoc._event_listeners``
---
sphinx/application.py | 3 +-
sphinx/events.py | 2 +-
sphinx/ext/autodoc/__init__.py | 106 +----------------------
sphinx/ext/autodoc/_event_listeners.py | 111 +++++++++++++++++++++++++
4 files changed, 117 insertions(+), 105 deletions(-)
create mode 100644 sphinx/ext/autodoc/_event_listeners.py
diff --git a/sphinx/application.py b/sphinx/application.py
index e8da1e4d058..05cdaece0bf 100644
--- a/sphinx/application.py
+++ b/sphinx/application.py
@@ -52,7 +52,8 @@
from sphinx.config import ENUM, _ConfigRebuild
from sphinx.domains import Domain, Index
from sphinx.environment.collectors import EnvironmentCollector
- from sphinx.ext.autodoc import Documenter, _AutodocProcessDocstringListener
+ from sphinx.ext.autodoc import Documenter
+ from sphinx.ext.autodoc._event_listeners import _AutodocProcessDocstringListener
from sphinx.ext.todo import todo_node
from sphinx.extension import Extension
from sphinx.registry import (
diff --git a/sphinx/events.py b/sphinx/events.py
index e408d80b796..ffda8f09e8e 100644
--- a/sphinx/events.py
+++ b/sphinx/events.py
@@ -28,7 +28,7 @@
from sphinx.config import Config
from sphinx.domains import Domain
from sphinx.environment import BuildEnvironment
- from sphinx.ext.autodoc import _AutodocProcessDocstringListener
+ from sphinx.ext.autodoc._event_listeners import _AutodocProcessDocstringListener
from sphinx.ext.todo import todo_node
diff --git a/sphinx/ext/autodoc/__init__.py b/sphinx/ext/autodoc/__init__.py
index 6d20e4007b2..af76cb5915e 100644
--- a/sphinx/ext/autodoc/__init__.py
+++ b/sphinx/ext/autodoc/__init__.py
@@ -19,6 +19,8 @@
import sphinx
from sphinx.config import ENUM
from sphinx.errors import PycodeError
+from sphinx.ext.autodoc._event_listeners import between as between
+from sphinx.ext.autodoc._event_listeners import cut_lines as cut_lines
from sphinx.ext.autodoc.importer import get_class_members, import_module, import_object
from sphinx.ext.autodoc.mock import ismock, mock, undecorate
from sphinx.locale import _, __
@@ -37,7 +39,7 @@
if TYPE_CHECKING:
from collections.abc import Callable, Iterator, Sequence
from types import ModuleType
- from typing import ClassVar, Literal, TypeAlias
+ from typing import ClassVar, Literal
from sphinx.application import Sphinx
from sphinx.config import Config
@@ -47,13 +49,6 @@
from sphinx.registry import SphinxComponentRegistry
from sphinx.util.typing import ExtensionMetadata, OptionSpec, _RestifyMode
- _AutodocObjType = Literal[
- 'module', 'class', 'exception', 'function', 'method', 'attribute'
- ]
- _AutodocProcessDocstringListener: TypeAlias = Callable[
- [Sphinx, _AutodocObjType, str, Any, dict[str, bool], list[str]], None
- ]
-
logger = logging.getLogger(__name__)
@@ -193,101 +188,6 @@ def merge_members_option(options: dict[str, Any]) -> None:
members.append(member)
-# Some useful event listener factories for autodoc-process-docstring.
-
-
-def cut_lines(
- pre: int, post: int = 0, what: Sequence[str] | None = None
-) -> _AutodocProcessDocstringListener:
- """Return a listener that removes the first *pre* and last *post*
- lines of every docstring. If *what* is a sequence of strings,
- only docstrings of a type in *what* will be processed.
-
- Use like this (e.g. in the ``setup()`` function of :file:`conf.py`)::
-
- from sphinx.ext.autodoc import cut_lines
-
- app.connect('autodoc-process-docstring', cut_lines(4, what={'module'}))
-
- This can (and should) be used in place of :confval:`automodule_skip_lines`.
- """
- if not what:
- what_unique: frozenset[str] = frozenset()
- elif isinstance(what, str): # strongly discouraged
- what_unique = frozenset({what})
- else:
- what_unique = frozenset(what)
-
- def process(
- app: Sphinx,
- what_: _AutodocObjType,
- name: str,
- obj: Any,
- options: dict[str, bool],
- lines: list[str],
- ) -> None:
- if what_unique and what_ not in what_unique:
- return
- del lines[:pre]
- if post:
- # remove one trailing blank line.
- if lines and not lines[-1]:
- lines.pop(-1)
- del lines[-post:]
- # make sure there is a blank line at the end
- if lines and lines[-1]:
- lines.append('')
-
- return process
-
-
-def between(
- marker: str,
- what: Sequence[str] | None = None,
- keepempty: bool = False,
- exclude: bool = False,
-) -> _AutodocProcessDocstringListener:
- """Return a listener that either keeps, or if *exclude* is True excludes,
- lines between lines that match the *marker* regular expression. If no line
- matches, the resulting docstring would be empty, so no change will be made
- unless *keepempty* is true.
-
- If *what* is a sequence of strings, only docstrings of a type in *what* will
- be processed.
- """
- marker_re = re.compile(marker)
-
- def process(
- app: Sphinx,
- what_: _AutodocObjType,
- name: str,
- obj: Any,
- options: dict[str, bool],
- lines: list[str],
- ) -> None:
- if what and what_ not in what:
- return
- deleted = 0
- delete = not exclude
- orig_lines = lines.copy()
- for i, line in enumerate(orig_lines):
- if delete:
- lines.pop(i - deleted)
- deleted += 1
- if marker_re.match(line):
- delete = not delete
- if delete:
- lines.pop(i - deleted)
- deleted += 1
- if not lines and not keepempty:
- lines[:] = orig_lines
- # make sure there is a blank line at the end
- if lines and lines[-1]:
- lines.append('')
-
- return process
-
-
# This class is used only in ``sphinx.ext.autodoc.directive``,
# But we define this class here to keep compatibility
# See: https://github.com/sphinx-doc/sphinx/issues/4538
diff --git a/sphinx/ext/autodoc/_event_listeners.py b/sphinx/ext/autodoc/_event_listeners.py
new file mode 100644
index 00000000000..e328e54bf3c
--- /dev/null
+++ b/sphinx/ext/autodoc/_event_listeners.py
@@ -0,0 +1,111 @@
+"""Some useful event listener factories for autodoc-process-docstring."""
+
+from __future__ import annotations
+
+import re
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+ from collections.abc import Callable, Sequence
+ from typing import Any, Literal, TypeAlias
+
+ from sphinx.application import Sphinx
+
+ _AutodocObjType = Literal[
+ 'module', 'class', 'exception', 'function', 'method', 'attribute'
+ ]
+ _AutodocProcessDocstringListener: TypeAlias = Callable[
+ [Sphinx, _AutodocObjType, str, Any, dict[str, bool], list[str]], None
+ ]
+
+
+def cut_lines(
+ pre: int, post: int = 0, what: Sequence[str] | None = None
+) -> _AutodocProcessDocstringListener:
+ """Return a listener that removes the first *pre* and last *post*
+ lines of every docstring. If *what* is a sequence of strings,
+ only docstrings of a type in *what* will be processed.
+
+ Use like this (e.g. in the ``setup()`` function of :file:`conf.py`)::
+
+ from sphinx.ext.autodoc import cut_lines
+
+ app.connect('autodoc-process-docstring', cut_lines(4, what={'module'}))
+
+ This can (and should) be used in place of :confval:`automodule_skip_lines`.
+ """
+ if not what:
+ what_unique: frozenset[str] = frozenset()
+ elif isinstance(what, str): # strongly discouraged
+ what_unique = frozenset({what})
+ else:
+ what_unique = frozenset(what)
+
+ def process(
+ app: Sphinx,
+ what_: _AutodocObjType,
+ name: str,
+ obj: Any,
+ options: dict[str, bool],
+ lines: list[str],
+ ) -> None:
+ if what_unique and what_ not in what_unique:
+ return
+ del lines[:pre]
+ if post:
+ # remove one trailing blank line.
+ if lines and not lines[-1]:
+ lines.pop(-1)
+ del lines[-post:]
+ # make sure there is a blank line at the end
+ if lines and lines[-1]:
+ lines.append('')
+
+ return process
+
+
+def between(
+ marker: str,
+ what: Sequence[str] | None = None,
+ keepempty: bool = False,
+ exclude: bool = False,
+) -> _AutodocProcessDocstringListener:
+ """Return a listener that either keeps, or if *exclude* is True excludes,
+ lines between lines that match the *marker* regular expression. If no line
+ matches, the resulting docstring would be empty, so no change will be made
+ unless *keepempty* is true.
+
+ If *what* is a sequence of strings, only docstrings of a type in *what* will
+ be processed.
+ """
+ marker_re = re.compile(marker)
+
+ def process(
+ app: Sphinx,
+ what_: _AutodocObjType,
+ name: str,
+ obj: Any,
+ options: dict[str, bool],
+ lines: list[str],
+ ) -> None:
+ if what and what_ not in what:
+ return
+ deleted = 0
+ delete = not exclude
+ orig_lines = lines.copy()
+ for i, line in enumerate(orig_lines):
+ if delete:
+ lines.pop(i - deleted)
+ deleted += 1
+ if marker_re.match(line):
+ delete = not delete
+ if delete:
+ lines.pop(i - deleted)
+ deleted += 1
+ if not lines and not keepempty:
+ lines[:] = orig_lines
+ # make sure there is a blank line at the end
+ if lines and lines[-1]:
+ lines.append('')
+
+ return process
From dd67c7ca88aa0099e6ae7b6f7f5e6bfdae692a6f Mon Sep 17 00:00:00 2001
From: Adam Turner <9087854+aa-turner@users.noreply.github.com>
Date: Wed, 23 Jul 2025 23:25:14 +0100
Subject: [PATCH 180/466] Move sentinels to ``sphinx.ext.autodoc._sentinels``
---
sphinx/ext/autodoc/__init__.py | 39 ++++++++++----------------------
sphinx/ext/autodoc/_sentinels.py | 31 +++++++++++++++++++++++++
2 files changed, 43 insertions(+), 27 deletions(-)
create mode 100644 sphinx/ext/autodoc/_sentinels.py
diff --git a/sphinx/ext/autodoc/__init__.py b/sphinx/ext/autodoc/__init__.py
index af76cb5915e..403e7329fb7 100644
--- a/sphinx/ext/autodoc/__init__.py
+++ b/sphinx/ext/autodoc/__init__.py
@@ -21,6 +21,18 @@
from sphinx.errors import PycodeError
from sphinx.ext.autodoc._event_listeners import between as between
from sphinx.ext.autodoc._event_listeners import cut_lines as cut_lines
+from sphinx.ext.autodoc._sentinels import (
+ ALL,
+ EMPTY,
+ SUPPRESS,
+ UNINITIALIZED_ATTR,
+)
+from sphinx.ext.autodoc._sentinels import (
+ INSTANCE_ATTR as INSTANCEATTR,
+)
+from sphinx.ext.autodoc._sentinels import (
+ SLOTS_ATTR as SLOTSATTR,
+)
from sphinx.ext.autodoc.importer import get_class_members, import_module, import_object
from sphinx.ext.autodoc.mock import ismock, mock, undecorate
from sphinx.locale import _, __
@@ -84,30 +96,6 @@ def identity(x: Any) -> Any:
return x
-class _All:
- """A special value for :*-members: that matches to any member."""
-
- def __contains__(self, item: Any) -> bool:
- return True
-
- def append(self, item: Any) -> None:
- pass # nothing
-
-
-class _Empty:
- """A special value for :exclude-members: that never matches to any member."""
-
- def __contains__(self, item: Any) -> bool:
- return False
-
-
-ALL = _All()
-EMPTY = _Empty()
-UNINITIALIZED_ATTR = object()
-INSTANCEATTR = object()
-SLOTSATTR = object()
-
-
def members_option(arg: Any) -> object | list[str]:
"""Used to convert the :members: option to auto directives."""
if arg in {None, True}:
@@ -153,9 +141,6 @@ def class_doc_from_option(arg: Any) -> str | None:
raise ValueError(__('invalid value for class-doc-from option: %s') % arg)
-SUPPRESS = object()
-
-
def annotation_option(arg: Any) -> Any:
if arg in {None, True}:
# suppress showing the representation of the object
diff --git a/sphinx/ext/autodoc/_sentinels.py b/sphinx/ext/autodoc/_sentinels.py
new file mode 100644
index 00000000000..f1544b11165
--- /dev/null
+++ b/sphinx/ext/autodoc/_sentinels.py
@@ -0,0 +1,31 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+ from typing import Any
+
+
+class _All:
+ """A special value for :*-members: that matches to any member."""
+
+ def __contains__(self, item: Any) -> bool:
+ return True
+
+ def append(self, item: Any) -> None:
+ pass # nothing
+
+
+class _Empty:
+ """A special value for :exclude-members: that never matches to any member."""
+
+ def __contains__(self, item: Any) -> bool:
+ return False
+
+
+ALL = _All()
+EMPTY = _Empty()
+UNINITIALIZED_ATTR = object()
+INSTANCE_ATTR = object()
+SLOTS_ATTR = object()
+SUPPRESS = object()
From 193b12427b44d97ccd2cacf537b3ed78335ed334 Mon Sep 17 00:00:00 2001
From: Adam Turner <9087854+aa-turner@users.noreply.github.com>
Date: Wed, 23 Jul 2025 23:27:48 +0100
Subject: [PATCH 181/466] Delete unused MethodDescriptorType
---
sphinx/ext/autodoc/__init__.py | 5 -----
1 file changed, 5 deletions(-)
diff --git a/sphinx/ext/autodoc/__init__.py b/sphinx/ext/autodoc/__init__.py
index 403e7329fb7..2efbafddaa0 100644
--- a/sphinx/ext/autodoc/__init__.py
+++ b/sphinx/ext/autodoc/__init__.py
@@ -64,11 +64,6 @@
logger = logging.getLogger(__name__)
-# This type isn't exposed directly in any modules, but can be found
-# here in most Python versions
-MethodDescriptorType = type(type.__subclasses__)
-
-
#: extended signature RE: with explicit module name separated by ::
py_ext_sig_re = re.compile(
r"""^ ([\w.]+::)? # explicit module name
From 149fb443bd3de1f05e4bbf5bbfe0ba6e63904ed3 Mon Sep 17 00:00:00 2001
From: Adam Turner <9087854+aa-turner@users.noreply.github.com>
Date: Wed, 23 Jul 2025 23:37:19 +0100
Subject: [PATCH 182/466] Move options to
``sphinx.ext.autodoc._directive_options``
---
sphinx/ext/autodoc/__init__.py | 122 ++++-------------------
sphinx/ext/autodoc/_directive_options.py | 100 +++++++++++++++++++
sphinx/ext/autodoc/directive.py | 2 +-
sphinx/ext/autodoc/typehints.py | 2 +-
4 files changed, 124 insertions(+), 102 deletions(-)
create mode 100644 sphinx/ext/autodoc/_directive_options.py
diff --git a/sphinx/ext/autodoc/__init__.py b/sphinx/ext/autodoc/__init__.py
index 2efbafddaa0..a35bef1454e 100644
--- a/sphinx/ext/autodoc/__init__.py
+++ b/sphinx/ext/autodoc/__init__.py
@@ -12,21 +12,40 @@
import re
import sys
from inspect import Parameter, Signature
-from typing import TYPE_CHECKING, Any, NewType, TypeVar
+from typing import TYPE_CHECKING, NewType, TypeVar
from docutils.statemachine import StringList
import sphinx
from sphinx.config import ENUM
from sphinx.errors import PycodeError
+# This class is only used in ``sphinx.ext.autodoc.directive``,
+# but we define it class here for compatibility.
+# See: https://github.com/sphinx-doc/sphinx/issues/4538
+from sphinx.ext.autodoc._directive_options import (
+ Options as Options,
+)
+from sphinx.ext.autodoc._directive_options import (
+ annotation_option,
+ bool_option,
+ class_doc_from_option,
+ exclude_members_option,
+ identity,
+ inherited_members_option,
+ member_order_option,
+ members_option,
+ merge_members_option,
+)
from sphinx.ext.autodoc._event_listeners import between as between
from sphinx.ext.autodoc._event_listeners import cut_lines as cut_lines
from sphinx.ext.autodoc._sentinels import (
ALL,
- EMPTY,
SUPPRESS,
UNINITIALIZED_ATTR,
)
+from sphinx.ext.autodoc._sentinels import (
+ EMPTY as EMPTY,
+)
from sphinx.ext.autodoc._sentinels import (
INSTANCE_ATTR as INSTANCEATTR,
)
@@ -51,7 +70,7 @@
if TYPE_CHECKING:
from collections.abc import Callable, Iterator, Sequence
from types import ModuleType
- from typing import ClassVar, Literal
+ from typing import Any, ClassVar, Literal
from sphinx.application import Sphinx
from sphinx.config import Config
@@ -87,103 +106,6 @@ def _get_render_mode(
return 'fully-qualified-except-typing'
-def identity(x: Any) -> Any:
- return x
-
-
-def members_option(arg: Any) -> object | list[str]:
- """Used to convert the :members: option to auto directives."""
- if arg in {None, True}:
- return ALL
- elif arg is False:
- return None
- else:
- return [x.strip() for x in arg.split(',') if x.strip()]
-
-
-def exclude_members_option(arg: Any) -> object | set[str]:
- """Used to convert the :exclude-members: option."""
- if arg in {None, True}:
- return EMPTY
- return {x.strip() for x in arg.split(',') if x.strip()}
-
-
-def inherited_members_option(arg: Any) -> set[str]:
- """Used to convert the :inherited-members: option to auto directives."""
- if arg in {None, True}:
- return {'object'}
- elif arg:
- return {x.strip() for x in arg.split(',')}
- else:
- return set()
-
-
-def member_order_option(arg: Any) -> str | None:
- """Used to convert the :member-order: option to auto directives."""
- if arg in {None, True}:
- return None
- elif arg in {'alphabetical', 'bysource', 'groupwise'}:
- return arg
- else:
- raise ValueError(__('invalid value for member-order option: %s') % arg)
-
-
-def class_doc_from_option(arg: Any) -> str | None:
- """Used to convert the :class-doc-from: option to autoclass directives."""
- if arg in {'both', 'class', 'init'}:
- return arg
- else:
- raise ValueError(__('invalid value for class-doc-from option: %s') % arg)
-
-
-def annotation_option(arg: Any) -> Any:
- if arg in {None, True}:
- # suppress showing the representation of the object
- return SUPPRESS
- else:
- return arg
-
-
-def bool_option(arg: Any) -> bool:
- """Used to convert flag options to auto directives. (Instead of
- directives.flag(), which returns None).
- """
- return True
-
-
-def merge_members_option(options: dict[str, Any]) -> None:
- """Merge :private-members: and :special-members: options to the
- :members: option.
- """
- if options.get('members') is ALL:
- # merging is not needed when members: ALL
- return
-
- members = options.setdefault('members', [])
- for key in ('private-members', 'special-members'):
- other_members = options.get(key)
- if other_members is not None and other_members is not ALL:
- for member in other_members:
- if member not in members:
- members.append(member)
-
-
-# This class is used only in ``sphinx.ext.autodoc.directive``,
-# But we define this class here to keep compatibility
-# See: https://github.com/sphinx-doc/sphinx/issues/4538
-class Options(dict[str, Any]): # NoQA: FURB189
- """A dict/attribute hybrid that returns None on nonexisting keys."""
-
- def copy(self) -> Options:
- return Options(super().copy())
-
- def __getattr__(self, name: str) -> Any:
- try:
- return self[name.replace('_', '-')]
- except KeyError:
- return None
-
-
class ObjectMember:
"""A member of object.
diff --git a/sphinx/ext/autodoc/_directive_options.py b/sphinx/ext/autodoc/_directive_options.py
new file mode 100644
index 00000000000..28668cfe97f
--- /dev/null
+++ b/sphinx/ext/autodoc/_directive_options.py
@@ -0,0 +1,100 @@
+from __future__ import annotations
+
+from typing import Any
+
+from sphinx.ext.autodoc._sentinels import ALL, EMPTY, SUPPRESS
+from sphinx.locale import __
+
+
+def identity(x: Any) -> Any:
+ return x
+
+
+def members_option(arg: Any) -> object | list[str]:
+ """Used to convert the :members: option to auto directives."""
+ if arg in {None, True}:
+ return ALL
+ elif arg is False:
+ return None
+ else:
+ return [x.strip() for x in arg.split(',') if x.strip()]
+
+
+def exclude_members_option(arg: Any) -> object | set[str]:
+ """Used to convert the :exclude-members: option."""
+ if arg in {None, True}:
+ return EMPTY
+ return {x.strip() for x in arg.split(',') if x.strip()}
+
+
+def inherited_members_option(arg: Any) -> set[str]:
+ """Used to convert the :inherited-members: option to auto directives."""
+ if arg in {None, True}:
+ return {'object'}
+ elif arg:
+ return {x.strip() for x in arg.split(',')}
+ else:
+ return set()
+
+
+def member_order_option(arg: Any) -> str | None:
+ """Used to convert the :member-order: option to auto directives."""
+ if arg in {None, True}:
+ return None
+ elif arg in {'alphabetical', 'bysource', 'groupwise'}:
+ return arg
+ else:
+ raise ValueError(__('invalid value for member-order option: %s') % arg)
+
+
+def class_doc_from_option(arg: Any) -> str | None:
+ """Used to convert the :class-doc-from: option to autoclass directives."""
+ if arg in {'both', 'class', 'init'}:
+ return arg
+ else:
+ raise ValueError(__('invalid value for class-doc-from option: %s') % arg)
+
+
+def annotation_option(arg: Any) -> Any:
+ if arg in {None, True}:
+ # suppress showing the representation of the object
+ return SUPPRESS
+ else:
+ return arg
+
+
+def bool_option(arg: Any) -> bool:
+ """Used to convert flag options to auto directives. (Instead of
+ directives.flag(), which returns None).
+ """
+ return True
+
+
+def merge_members_option(options: dict[str, Any]) -> None:
+ """Merge :private-members: and :special-members: options to the
+ :members: option.
+ """
+ if options.get('members') is ALL:
+ # merging is not needed when members: ALL
+ return
+
+ members = options.setdefault('members', [])
+ for key in ('private-members', 'special-members'):
+ other_members = options.get(key)
+ if other_members is not None and other_members is not ALL:
+ for member in other_members:
+ if member not in members:
+ members.append(member)
+
+
+class Options(dict[str, Any]): # NoQA: FURB189
+ """A dict/attribute hybrid that returns None on nonexisting keys."""
+
+ def copy(self) -> Options:
+ return Options(super().copy())
+
+ def __getattr__(self, name: str) -> Any:
+ try:
+ return self[name.replace('_', '-')]
+ except KeyError:
+ return None
diff --git a/sphinx/ext/autodoc/directive.py b/sphinx/ext/autodoc/directive.py
index fd0553047a9..c2c5cb25afe 100644
--- a/sphinx/ext/autodoc/directive.py
+++ b/sphinx/ext/autodoc/directive.py
@@ -7,7 +7,7 @@
from docutils.statemachine import StringList
from docutils.utils import assemble_option_dict
-from sphinx.ext.autodoc import Options
+from sphinx.ext.autodoc._directive_options import Options
from sphinx.util import logging
from sphinx.util.docutils import SphinxDirective, switch_source_input
from sphinx.util.parsing import nested_parse_to_nodes
diff --git a/sphinx/ext/autodoc/typehints.py b/sphinx/ext/autodoc/typehints.py
index 63403772137..3d94d964613 100644
--- a/sphinx/ext/autodoc/typehints.py
+++ b/sphinx/ext/autodoc/typehints.py
@@ -19,7 +19,7 @@
from docutils.nodes import Element
from sphinx.application import Sphinx
- from sphinx.ext.autodoc import Options
+ from sphinx.ext.autodoc._directive_options import Options
from sphinx.util.typing import ExtensionMetadata, _StringifyMode
From 58293c230336e494d4f0f6ca329c4b22b36275a8 Mon Sep 17 00:00:00 2001
From: Adam Turner <9087854+aa-turner@users.noreply.github.com>
Date: Wed, 23 Jul 2025 23:48:13 +0100
Subject: [PATCH 183/466] Move documenters to
``sphinx.ext.autodoc._documenters``
---
sphinx/application.py | 2 +-
sphinx/ext/autodoc/__init__.py | 3004 +---------------------------
sphinx/ext/autodoc/_documenters.py | 2929 +++++++++++++++++++++++++++
3 files changed, 3017 insertions(+), 2918 deletions(-)
create mode 100644 sphinx/ext/autodoc/_documenters.py
diff --git a/sphinx/application.py b/sphinx/application.py
index 05cdaece0bf..3864452886f 100644
--- a/sphinx/application.py
+++ b/sphinx/application.py
@@ -52,7 +52,7 @@
from sphinx.config import ENUM, _ConfigRebuild
from sphinx.domains import Domain, Index
from sphinx.environment.collectors import EnvironmentCollector
- from sphinx.ext.autodoc import Documenter
+ from sphinx.ext.autodoc._documenters import Documenter
from sphinx.ext.autodoc._event_listeners import _AutodocProcessDocstringListener
from sphinx.ext.todo import todo_node
from sphinx.extension import Extension
diff --git a/sphinx/ext/autodoc/__init__.py b/sphinx/ext/autodoc/__init__.py
index a35bef1454e..87ffa660586 100644
--- a/sphinx/ext/autodoc/__init__.py
+++ b/sphinx/ext/autodoc/__init__.py
@@ -7,25 +7,12 @@
from __future__ import annotations
-import functools
-import operator
-import re
-import sys
-from inspect import Parameter, Signature
-from typing import TYPE_CHECKING, NewType, TypeVar
-
-from docutils.statemachine import StringList
+from typing import TYPE_CHECKING
import sphinx
from sphinx.config import ENUM
-from sphinx.errors import PycodeError
-# This class is only used in ``sphinx.ext.autodoc.directive``,
-# but we define it class here for compatibility.
-# See: https://github.com/sphinx-doc/sphinx/issues/4538
-from sphinx.ext.autodoc._directive_options import (
- Options as Options,
-)
from sphinx.ext.autodoc._directive_options import (
+ Options,
annotation_option,
bool_option,
class_doc_from_option,
@@ -36,2919 +23,102 @@
members_option,
merge_members_option,
)
-from sphinx.ext.autodoc._event_listeners import between as between
-from sphinx.ext.autodoc._event_listeners import cut_lines as cut_lines
-from sphinx.ext.autodoc._sentinels import (
- ALL,
- SUPPRESS,
- UNINITIALIZED_ATTR,
-)
-from sphinx.ext.autodoc._sentinels import (
- EMPTY as EMPTY,
+from sphinx.ext.autodoc._documenters import (
+ AttributeDocumenter,
+ ClassDocumenter,
+ ClassLevelDocumenter,
+ DataDocumenter,
+ DataDocumenterMixinBase,
+ DecoratorDocumenter,
+ DocstringSignatureMixin,
+ DocstringStripSignatureMixin,
+ Documenter,
+ ExceptionDocumenter,
+ FunctionDocumenter,
+ GenericAliasMixin,
+ MethodDocumenter,
+ ModuleDocumenter,
+ ModuleLevelDocumenter,
+ NonDataDescriptorMixin,
+ ObjectMember,
+ PropertyDocumenter,
+ RuntimeInstanceAttributeMixin,
+ SlotsMixin,
+ UninitializedGlobalVariableMixin,
+ UninitializedInstanceAttributeMixin,
+ autodoc_attrgetter,
+ py_ext_sig_re,
+ special_member_re,
)
+from sphinx.ext.autodoc._event_listeners import between, cut_lines
+from sphinx.ext.autodoc._sentinels import ALL, EMPTY, SUPPRESS, UNINITIALIZED_ATTR
from sphinx.ext.autodoc._sentinels import (
INSTANCE_ATTR as INSTANCEATTR,
)
from sphinx.ext.autodoc._sentinels import (
SLOTS_ATTR as SLOTSATTR,
)
-from sphinx.ext.autodoc.importer import get_class_members, import_module, import_object
-from sphinx.ext.autodoc.mock import ismock, mock, undecorate
-from sphinx.locale import _, __
-from sphinx.pycode import ModuleAnalyzer
-from sphinx.util import inspect, logging
-from sphinx.util.docstrings import prepare_docstring, separate_metadata
-from sphinx.util.inspect import (
- evaluate_signature,
- getdoc,
- object_description,
- safe_getattr,
- stringify_signature,
-)
-from sphinx.util.typing import get_type_hints, restify, stringify_annotation
if TYPE_CHECKING:
- from collections.abc import Callable, Iterator, Sequence
- from types import ModuleType
- from typing import Any, ClassVar, Literal
-
from sphinx.application import Sphinx
- from sphinx.config import Config
- from sphinx.environment import BuildEnvironment, _CurrentDocument
- from sphinx.events import EventManager
- from sphinx.ext.autodoc.directive import DocumenterBridge
- from sphinx.registry import SphinxComponentRegistry
- from sphinx.util.typing import ExtensionMetadata, OptionSpec, _RestifyMode
-
-logger = logging.getLogger(__name__)
-
-
-#: extended signature RE: with explicit module name separated by ::
-py_ext_sig_re = re.compile(
- r"""^ ([\w.]+::)? # explicit module name
- ([\w.]+\.)? # module and/or class name(s)
- (\w+) \s* # thing name
- (?: \[\s*(.*?)\s*])? # optional: type parameters list
- (?: \((.*)\) # optional: arguments
- (?:\s* -> \s* (.*))? # return annotation
- )? $ # and nothing more
- """,
- re.VERBOSE,
+ from sphinx.util.typing import ExtensionMetadata
+
+__all__ = (
+ # Useful event listener factories for autodoc-process-docstring
+ 'cut_lines',
+ 'between',
+ # Documenters
+ 'AttributeDocumenter',
+ 'ClassDocumenter',
+ 'DataDocumenter',
+ 'DecoratorDocumenter',
+ 'ExceptionDocumenter',
+ 'FunctionDocumenter',
+ 'MethodDocumenter',
+ 'ModuleDocumenter',
+ 'PropertyDocumenter',
+ # This class is only used in ``sphinx.ext.autodoc.directive``,
+ # but we export it here for compatibility.
+ # See: https://github.com/sphinx-doc/sphinx/issues/4538
+ 'Options',
+ # Option spec functions.
+ # Exported for compatibility.
+ 'annotation_option',
+ 'bool_option',
+ 'class_doc_from_option',
+ 'exclude_members_option',
+ 'identity',
+ 'inherited_members_option',
+ 'member_order_option',
+ 'members_option',
+ 'merge_members_option',
+ # Sentinels.
+ # Exported for compatibility.
+ 'ALL',
+ 'EMPTY',
+ 'INSTANCEATTR',
+ 'SLOTSATTR',
+ 'SUPPRESS',
+ 'UNINITIALIZED_ATTR',
+ # Miscellaneous other names.
+ # Exported for compatibility.
+ 'ObjectMember',
+ 'py_ext_sig_re',
+ 'special_member_re',
+ 'ModuleLevelDocumenter',
+ 'ClassLevelDocumenter',
+ 'DocstringSignatureMixin',
+ 'DocstringStripSignatureMixin',
+ 'DataDocumenterMixinBase',
+ 'GenericAliasMixin',
+ 'UninitializedGlobalVariableMixin',
+ 'NonDataDescriptorMixin',
+ 'SlotsMixin',
+ 'RuntimeInstanceAttributeMixin',
+ 'UninitializedInstanceAttributeMixin',
+ 'autodoc_attrgetter',
+ 'Documenter',
)
-special_member_re = re.compile(r'^__\S+__$')
-
-
-def _get_render_mode(
- typehints_format: Literal['fully-qualified', 'short'],
-) -> _RestifyMode:
- if typehints_format == 'short':
- return 'smart'
- return 'fully-qualified-except-typing'
-
-
-class ObjectMember:
- """A member of object.
-
- This is used for the result of `Documenter.get_module_members()` to
- represent each member of the object.
- """
-
- __slots__ = '__name__', 'object', 'docstring', 'class_', 'skipped'
-
- __name__: str
- object: Any
- docstring: str | None
- class_: Any
- skipped: bool
-
- def __init__(
- self,
- name: str,
- obj: Any,
- *,
- docstring: str | None = None,
- class_: Any = None,
- skipped: bool = False,
- ) -> None:
- self.__name__ = name
- self.object = obj
- self.docstring = docstring
- self.class_ = class_
- self.skipped = skipped
-
- def __repr__(self) -> str:
- return (
- f'ObjectMember('
- f'name={self.__name__!r}, '
- f'obj={self.object!r}, '
- f'docstring={self.docstring!r}, '
- f'class_={self.class_!r}, '
- f'skipped={self.skipped!r}'
- f')'
- )
-
-
-class Documenter:
- """A Documenter knows how to autodocument a single object type. When
- registered with the AutoDirective, it will be used to document objects
- of that type when needed by autodoc.
-
- Its *objtype* attribute selects what auto directive it is assigned to
- (the directive name is 'auto' + objtype), and what directive it generates
- by default, though that can be overridden by an attribute called
- *directivetype*.
-
- A Documenter has an *option_spec* that works like a docutils directive's;
- in fact, it will be used to parse an auto directive's options that matches
- the Documenter.
- """
-
- #: name by which the directive is called (auto...) and the default
- #: generated directive name
- objtype: ClassVar = 'object'
- #: indentation by which to indent the directive content
- content_indent: ClassVar = ' '
- #: priority if multiple documenters return True from can_document_member
- priority: ClassVar = 0
- #: order if autodoc_member_order is set to 'groupwise'
- member_order: ClassVar = 0
- #: true if the generated content may contain titles
- titles_allowed: ClassVar = True
-
- option_spec: ClassVar[OptionSpec] = {
- 'no-index': bool_option,
- 'no-index-entry': bool_option,
- 'noindex': bool_option,
- }
-
- def get_attr(self, obj: Any, name: str, *defargs: Any) -> Any:
- """getattr() override for types such as Zope interfaces."""
- return autodoc_attrgetter(obj, name, *defargs, registry=self.env._registry)
-
- @classmethod
- def can_document_member(
- cls: type[Documenter], member: Any, membername: str, isattr: bool, parent: Any
- ) -> bool:
- """Called to see if a member can be documented by this Documenter."""
- msg = 'must be implemented in subclasses'
- raise NotImplementedError(msg)
-
- def __init__(
- self, directive: DocumenterBridge, name: str, indent: str = ''
- ) -> None:
- self.directive = directive
- self.config: Config = directive.env.config
- self.env: BuildEnvironment = directive.env
- self._current_document: _CurrentDocument = directive.env.current_document
- self._events: EventManager = directive.env.events
- self.options = directive.genopt
- self.name = name
- self.indent = indent
- # the module and object path within the module, and the fully
- # qualified name (all set after resolve_name succeeds)
- self.modname: str = ''
- self.module: ModuleType | None = None
- self.objpath: list[str] = []
- self.fullname = ''
- # extra signature items (arguments and return annotation,
- # also set after resolve_name succeeds)
- self.args: str | None = None
- self.retann: str = ''
- # the object to document (set after import_object succeeds)
- self.object: Any = None
- self.object_name = ''
- # the parent/owner of the object to document
- self.parent: Any = None
- # the module analyzer to get at attribute docs, or None
- self.analyzer: ModuleAnalyzer | None = None
-
- @property
- def documenters(self) -> dict[str, type[Documenter]]:
- """Returns registered Documenter classes"""
- return self.env._registry.documenters
-
- def add_line(self, line: str, source: str, *lineno: int) -> None:
- """Append one line of generated reST to the output."""
- if line.strip(): # not a blank line
- self.directive.result.append(self.indent + line, source, *lineno)
- else:
- self.directive.result.append('', source, *lineno)
-
- def resolve_name(
- self, modname: str | None, parents: Any, path: str, base: str
- ) -> tuple[str | None, list[str]]:
- """Resolve the module and name of the object to document given by the
- arguments and the current module/class.
-
- Must return a pair of the module name and a chain of attributes; for
- example, it would return ``('zipfile', ['ZipFile', 'open'])`` for the
- ``zipfile.ZipFile.open`` method.
- """
- msg = 'must be implemented in subclasses'
- raise NotImplementedError(msg)
-
- def parse_name(self) -> bool:
- """Determine what module to import and what attribute to document.
-
- Returns True and sets *self.modname*, *self.objpath*, *self.fullname*,
- *self.args* and *self.retann* if parsing and resolving was successful.
- """
- # first, parse the definition -- auto directives for classes and
- # functions can contain a signature which is then used instead of
- # an autogenerated one
- matched = py_ext_sig_re.match(self.name)
- if matched is None:
- logger.warning(
- __('invalid signature for auto%s (%r)'),
- self.objtype,
- self.name,
- type='autodoc',
- )
- return False
- explicit_modname, path, base, _tp_list, args, retann = matched.groups()
-
- # support explicit module and class name separation via ::
- if explicit_modname is not None:
- modname = explicit_modname[:-2]
- parents = path.rstrip('.').split('.') if path else []
- else:
- modname = None
- parents = []
-
- with mock(self.config.autodoc_mock_imports):
- modname, self.objpath = self.resolve_name(modname, parents, path, base)
-
- if not modname:
- return False
-
- self.modname = modname
- self.args = args
- self.retann = retann
- self.fullname = '.'.join((self.modname or '', *self.objpath))
- return True
-
- def import_object(self, raiseerror: bool = False) -> bool:
- """Import the object given by *self.modname* and *self.objpath* and set
- it as *self.object*.
-
- Returns True if successful, False if an error occurred.
- """
- with mock(self.config.autodoc_mock_imports):
- try:
- ret = import_object(
- self.modname, self.objpath, self.objtype, attrgetter=self.get_attr
- )
- self.module, self.parent, self.object_name, self.object = ret
- if ismock(self.object):
- self.object = undecorate(self.object)
- return True
- except ImportError as exc:
- if raiseerror:
- raise
- logger.warning(exc.args[0], type='autodoc', subtype='import_object')
- self.env.note_reread()
- return False
-
- def get_real_modname(self) -> str:
- """Get the real module name of an object to document.
-
- It can differ from the name of the module through which the object was
- imported.
- """
- return self.get_attr(self.object, '__module__', None) or self.modname
-
- def check_module(self) -> bool:
- """Check if *self.object* is really defined in the module given by
- *self.modname*.
- """
- if self.options.imported_members:
- return True
-
- subject = inspect.unpartial(self.object)
- modname = self.get_attr(subject, '__module__', None)
- return not modname or modname == self.modname
-
- def format_args(self, **kwargs: Any) -> str:
- """Format the argument signature of *self.object*.
-
- Should return None if the object does not have a signature.
- """
- return ''
-
- def format_name(self) -> str:
- """Format the name of *self.object*.
-
- This normally should be something that can be parsed by the generated
- directive, but doesn't need to be (Sphinx will display it unparsed
- then).
- """
- # normally the name doesn't contain the module (except for module
- # directives of course)
- return '.'.join(self.objpath) or self.modname
-
- def _call_format_args(self, **kwargs: Any) -> str:
- if kwargs:
- try:
- return self.format_args(**kwargs)
- except TypeError:
- # avoid chaining exceptions, by putting nothing here
- pass
-
- # retry without arguments for old documenters
- return self.format_args()
-
- def format_signature(self, **kwargs: Any) -> str:
- """Format the signature (arguments and return annotation) of the object.
-
- Let the user process it via the ``autodoc-process-signature`` event.
- """
- if self.args is not None:
- # signature given explicitly
- args = f'({self.args})'
- retann = self.retann
- else:
- # try to introspect the signature
- try:
- retann = None
- args = self._call_format_args(**kwargs)
- if args:
- matched = re.match(r'^(\(.*\))\s+->\s+(.*)$', args)
- if matched:
- args = matched.group(1)
- retann = matched.group(2)
- except Exception as exc:
- logger.warning(
- __('error while formatting arguments for %s: %s'),
- self.fullname,
- exc,
- type='autodoc',
- )
- args = None
-
- result = self._events.emit_firstresult(
- 'autodoc-process-signature',
- self.objtype,
- self.fullname,
- self.object,
- self.options,
- args,
- retann,
- )
- if result:
- args, retann = result
-
- if args is not None:
- return args + ((' -> %s' % retann) if retann else '')
- else:
- return ''
-
- def add_directive_header(self, sig: str) -> None:
- """Add the directive header and options to the generated content."""
- domain = getattr(self, 'domain', 'py')
- directive = getattr(self, 'directivetype', self.objtype)
- name = self.format_name()
- sourcename = self.get_sourcename()
-
- # one signature per line, indented by column
- prefix = f'.. {domain}:{directive}:: '
- for i, sig_line in enumerate(sig.split('\n')):
- self.add_line(f'{prefix}{name}{sig_line}', sourcename)
- if i == 0:
- prefix = ' ' * len(prefix)
-
- if self.options.no_index or self.options.noindex:
- self.add_line(' :no-index:', sourcename)
- if self.options.no_index_entry:
- self.add_line(' :no-index-entry:', sourcename)
- if self.objpath:
- # Be explicit about the module, this is necessary since .. class::
- # etc. don't support a prepended module name
- self.add_line(' :module: %s' % self.modname, sourcename)
-
- def get_doc(self) -> list[list[str]] | None:
- """Decode and return lines of the docstring(s) for the object.
-
- When it returns None, autodoc-process-docstring will not be called for this
- object.
- """
- docstring = getdoc(
- self.object,
- self.get_attr,
- self.config.autodoc_inherit_docstrings,
- self.parent,
- self.object_name,
- )
- if docstring:
- tab_width = self.directive.state.document.settings.tab_width
- return [prepare_docstring(docstring, tab_width)]
- return []
-
- def process_doc(self, docstrings: list[list[str]]) -> Iterator[str]:
- """Let the user process the docstrings before adding them."""
- for docstringlines in docstrings:
- if self._events is not None:
- # let extensions preprocess docstrings
- self._events.emit(
- 'autodoc-process-docstring',
- self.objtype,
- self.fullname,
- self.object,
- self.options,
- docstringlines,
- )
-
- if docstringlines and docstringlines[-1]:
- # append a blank line to the end of the docstring
- docstringlines.append('')
-
- yield from docstringlines
-
- def get_sourcename(self) -> str:
- obj_module = inspect.safe_getattr(self.object, '__module__', None)
- obj_qualname = inspect.safe_getattr(self.object, '__qualname__', None)
- if obj_module and obj_qualname:
- # Get the correct location of docstring from self.object
- # to support inherited methods
- fullname = f'{self.object.__module__}.{self.object.__qualname__}'
- else:
- fullname = self.fullname
-
- if self.analyzer:
- return f'{self.analyzer.srcname}:docstring of {fullname}'
- else:
- return 'docstring of %s' % fullname
-
- def add_content(self, more_content: StringList | None) -> None:
- """Add content from docstrings, attribute documentation and user."""
- docstring = True
-
- # set sourcename and add content from attribute documentation
- sourcename = self.get_sourcename()
- if self.analyzer:
- attr_docs = self.analyzer.find_attr_docs()
- if self.objpath:
- key = ('.'.join(self.objpath[:-1]), self.objpath[-1])
- if key in attr_docs:
- docstring = False
- # make a copy of docstring for attributes to avoid cache
- # the change of autodoc-process-docstring event.
- attribute_docstrings = [list(attr_docs[key])]
-
- for i, line in enumerate(self.process_doc(attribute_docstrings)):
- self.add_line(line, sourcename, i)
-
- # add content from docstrings
- if docstring:
- docstrings = self.get_doc()
- if docstrings is None:
- # Do not call autodoc-process-docstring on get_doc() returns None.
- pass
- else:
- if not docstrings:
- # append at least a dummy docstring, so that the event
- # autodoc-process-docstring is fired and can add some
- # content if desired
- docstrings.append([])
- for i, line in enumerate(self.process_doc(docstrings)):
- self.add_line(line, sourcename, i)
-
- # add additional content (e.g. from document), if present
- if more_content:
- for line, src in zip(more_content.data, more_content.items, strict=True):
- self.add_line(line, src[0], src[1])
-
- def get_object_members(self, want_all: bool) -> tuple[bool, list[ObjectMember]]:
- """Return `(members_check_module, members)` where `members` is a
- list of `(membername, member)` pairs of the members of *self.object*.
-
- If *want_all* is True, return all members. Else, only return those
- members given by *self.options.members* (which may also be None).
- """
- msg = 'must be implemented in subclasses'
- raise NotImplementedError(msg)
-
- def filter_members(
- self, members: list[ObjectMember], want_all: bool
- ) -> list[tuple[str, Any, bool]]:
- """Filter the given member list.
-
- Members are skipped if
-
- - they are private (except if given explicitly or the private-members
- option is set)
- - they are special methods (except if given explicitly or the
- special-members option is set)
- - they are undocumented (except if the undoc-members option is set)
-
- The user can override the skipping decision by connecting to the
- ``autodoc-skip-member`` event.
- """
-
- def is_filtered_inherited_member(name: str, obj: Any) -> bool:
- inherited_members = self.options.inherited_members or set()
- seen = set()
-
- if inspect.isclass(self.object):
- for cls in self.object.__mro__:
- if name in cls.__dict__:
- seen.add(cls)
- if (
- cls.__name__ in inherited_members
- and cls != self.object
- and any(
- issubclass(potential_child, cls) for potential_child in seen
- )
- ):
- # given member is a member of specified *super class*
- return True
- if name in cls.__dict__:
- return False
- if name in self.get_attr(cls, '__annotations__', {}):
- return False
- if isinstance(obj, ObjectMember) and obj.class_ is cls:
- return False
-
- return False
-
- ret = []
-
- # search for members in source code too
- namespace = '.'.join(self.objpath) # will be empty for modules
-
- if self.analyzer:
- attr_docs = self.analyzer.find_attr_docs()
- else:
- attr_docs = {}
-
- # process members and determine which to skip
- for obj in members:
- membername = obj.__name__
- member = obj.object
-
- # if isattr is True, the member is documented as an attribute
- isattr = member is INSTANCEATTR or (namespace, membername) in attr_docs
-
- try:
- doc = getdoc(
- member,
- self.get_attr,
- self.config.autodoc_inherit_docstrings,
- self.object,
- membername,
- )
- if not isinstance(doc, str):
- # Ignore non-string __doc__
- doc = None
-
- # if the member __doc__ is the same as self's __doc__, it's just
- # inherited and therefore not the member's doc
- cls = self.get_attr(member, '__class__', None)
- if cls:
- cls_doc = self.get_attr(cls, '__doc__', None)
- if cls_doc == doc:
- doc = None
-
- if isinstance(obj, ObjectMember) and obj.docstring:
- # hack for ClassDocumenter to inject docstring via ObjectMember
- doc = obj.docstring
-
- doc, metadata = separate_metadata(doc)
- has_doc = bool(doc)
-
- if 'private' in metadata:
- # consider a member private if docstring has "private" metadata
- isprivate = True
- elif 'public' in metadata:
- # consider a member public if docstring has "public" metadata
- isprivate = False
- else:
- isprivate = membername.startswith('_')
-
- keep = False
- if ismock(member) and (namespace, membername) not in attr_docs:
- # mocked module or object
- pass
- elif (
- self.options.exclude_members
- and membername in self.options.exclude_members
- ):
- # remove members given by exclude-members
- keep = False
- elif want_all and special_member_re.match(membername):
- # special __methods__
- if (
- self.options.special_members
- and membername in self.options.special_members
- ):
- if membername == '__doc__': # NoQA: SIM114
- keep = False
- elif is_filtered_inherited_member(membername, obj):
- keep = False
- else:
- keep = has_doc or self.options.undoc_members
- else:
- keep = False
- elif (namespace, membername) in attr_docs:
- if want_all and isprivate:
- if self.options.private_members is None:
- keep = False
- else:
- keep = membername in self.options.private_members
- else:
- # keep documented attributes
- keep = True
- elif want_all and isprivate:
- if has_doc or self.options.undoc_members:
- if self.options.private_members is None: # NoQA: SIM114
- keep = False
- elif is_filtered_inherited_member(membername, obj):
- keep = False
- else:
- keep = membername in self.options.private_members
- else:
- keep = False
- else:
- if self.options.members is ALL and is_filtered_inherited_member(
- membername, obj
- ):
- keep = False
- else:
- # ignore undocumented members if :undoc-members: is not given
- keep = has_doc or self.options.undoc_members
-
- if isinstance(obj, ObjectMember) and obj.skipped:
- # forcedly skipped member (ex. a module attribute not defined in __all__)
- keep = False
-
- # give the user a chance to decide whether this member
- # should be skipped
- if self._events is not None:
- # let extensions preprocess docstrings
- skip_user = self._events.emit_firstresult(
- 'autodoc-skip-member',
- self.objtype,
- membername,
- member,
- not keep,
- self.options,
- )
- if skip_user is not None:
- keep = not skip_user
- except Exception as exc:
- logger.warning(
- __(
- 'autodoc: failed to determine %s.%s (%r) to be documented, '
- 'the following exception was raised:\n%s'
- ),
- self.name,
- membername,
- member,
- exc,
- type='autodoc',
- )
- keep = False
-
- if keep:
- ret.append((membername, member, isattr))
-
- return ret
-
- def document_members(self, all_members: bool = False) -> None:
- """Generate reST for member documentation.
-
- If *all_members* is True, document all members, else those given by
- *self.options.members*.
- """
- # set current namespace for finding members
- self._current_document.autodoc_module = self.modname
- if self.objpath:
- self._current_document.autodoc_class = self.objpath[0]
-
- want_all = (
- all_members or self.options.inherited_members or self.options.members is ALL
- )
- # find out which members are documentable
- members_check_module, members = self.get_object_members(want_all)
-
- # document non-skipped members
- member_documenters: list[tuple[Documenter, bool]] = []
- for mname, member, isattr in self.filter_members(members, want_all):
- classes = [
- cls
- for cls in self.documenters.values()
- if cls.can_document_member(member, mname, isattr, self)
- ]
- if not classes:
- # don't know how to document this member
- continue
- # prefer the documenter with the highest priority
- classes.sort(key=lambda cls: cls.priority)
- # give explicitly separated module name, so that members
- # of inner classes can be documented
- full_mname = f'{self.modname}::' + '.'.join((*self.objpath, mname))
- documenter = classes[-1](self.directive, full_mname, self.indent)
- member_documenters.append((documenter, isattr))
-
- member_order = self.options.member_order or self.config.autodoc_member_order
- # We now try to import all objects before ordering them. This is to
- # avoid possible circular imports if we were to import objects after
- # their associated documenters have been sorted.
- member_documenters = [
- (documenter, isattr)
- for documenter, isattr in member_documenters
- if documenter.parse_name() and documenter.import_object()
- ]
- member_documenters = self.sort_members(member_documenters, member_order)
-
- for documenter, isattr in member_documenters:
- assert documenter.modname
- # We can directly call ._generate() since the documenters
- # already called parse_name() and import_object() before.
- #
- # Note that those two methods above do not emit events, so
- # whatever objects we deduced should not have changed.
- documenter._generate(
- all_members=True,
- real_modname=self.real_modname,
- check_module=members_check_module and not isattr,
- )
-
- # reset current objects
- self._current_document.autodoc_module = ''
- self._current_document.autodoc_class = ''
-
- def sort_members(
- self, documenters: list[tuple[Documenter, bool]], order: str
- ) -> list[tuple[Documenter, bool]]:
- """Sort the given member list."""
- if order == 'groupwise':
- # sort by group; alphabetically within groups
- documenters.sort(key=lambda e: (e[0].member_order, e[0].name))
- elif order == 'bysource':
- # By default, member discovery order matches source order,
- # as dicts are insertion-ordered from Python 3.7.
- if self.analyzer:
- # sort by source order, by virtue of the module analyzer
- tagorder = self.analyzer.tagorder
-
- def keyfunc(entry: tuple[Documenter, bool]) -> int:
- fullname = entry[0].name.split('::')[1]
- return tagorder.get(fullname, len(tagorder))
-
- documenters.sort(key=keyfunc)
- else: # alphabetical
- documenters.sort(key=lambda e: e[0].name)
-
- return documenters
-
- def generate(
- self,
- more_content: StringList | None = None,
- real_modname: str | None = None,
- check_module: bool = False,
- all_members: bool = False,
- ) -> None:
- """Generate reST for the object given by *self.name*, and possibly for
- its members.
-
- If *more_content* is given, include that content. If *real_modname* is
- given, use that module name to find attribute docs. If *check_module* is
- True, only generate if the object is defined in the module name it is
- imported from. If *all_members* is True, document all members.
- """
- if not self.parse_name():
- # need a module to import
- logger.warning(
- __(
- "don't know which module to import for autodocumenting "
- '%r (try placing a "module" or "currentmodule" directive '
- 'in the document, or giving an explicit module name)'
- ),
- self.name,
- type='autodoc',
- )
- return
-
- # now, import the module and get object to document
- if not self.import_object():
- return
-
- self._generate(more_content, real_modname, check_module, all_members)
-
- def _generate(
- self,
- more_content: StringList | None = None,
- real_modname: str | None = None,
- check_module: bool = False,
- all_members: bool = False,
- ) -> None:
- # If there is no real module defined, figure out which to use.
- # The real module is used in the module analyzer to look up the module
- # where the attribute documentation would actually be found in.
- # This is used for situations where you have a module that collects the
- # functions and classes of internal submodules.
- guess_modname = self.get_real_modname()
- self.real_modname: str = real_modname or guess_modname
-
- # try to also get a source code analyzer for attribute docs
- try:
- self.analyzer = ModuleAnalyzer.for_module(self.real_modname)
- # parse right now, to get PycodeErrors on parsing (results will
- # be cached anyway)
- self.analyzer.find_attr_docs()
- except PycodeError as exc:
- logger.debug('[autodoc] module analyzer failed: %s', exc)
- # no source file -- e.g. for builtin and C modules
- self.analyzer = None
- # at least add the module.__file__ as a dependency
- if module___file__ := getattr(self.module, '__file__', ''):
- self.directive.record_dependencies.add(module___file__)
- else:
- self.directive.record_dependencies.add(self.analyzer.srcname)
-
- if self.real_modname != guess_modname:
- # Add module to dependency list if target object is defined in other module.
- try:
- analyzer = ModuleAnalyzer.for_module(guess_modname)
- self.directive.record_dependencies.add(analyzer.srcname)
- except PycodeError:
- pass
-
- docstrings: list[str] = functools.reduce(
- operator.iadd, self.get_doc() or [], []
- )
- if ismock(self.object) and not docstrings:
- logger.warning(
- __('A mocked object is detected: %r'),
- self.name,
- type='autodoc',
- subtype='mocked_object',
- )
-
- # check __module__ of object (for members not given explicitly)
- if check_module:
- if not self.check_module():
- return
-
- sourcename = self.get_sourcename()
-
- # make sure that the result starts with an empty line. This is
- # necessary for some situations where another directive preprocesses
- # reST and no starting newline is present
- self.add_line('', sourcename)
-
- # format the object's signature, if any
- try:
- sig = self.format_signature()
- except Exception as exc:
- logger.warning(
- __('error while formatting signature for %s: %s'),
- self.fullname,
- exc,
- type='autodoc',
- )
- return
-
- # generate the directive header and options, if applicable
- self.add_directive_header(sig)
- self.add_line('', sourcename)
-
- # e.g. the module directive doesn't have content
- self.indent += self.content_indent
-
- # add all content (from docstrings, attribute docs etc.)
- self.add_content(more_content)
-
- # document members, if possible
- self.document_members(all_members)
-
-
-class ModuleDocumenter(Documenter):
- """Specialized Documenter subclass for modules."""
-
- objtype = 'module'
- content_indent = ''
- _extra_indent = ' '
-
- option_spec: ClassVar[OptionSpec] = {
- 'members': members_option,
- 'undoc-members': bool_option,
- 'no-index': bool_option,
- 'no-index-entry': bool_option,
- 'inherited-members': inherited_members_option,
- 'show-inheritance': bool_option,
- 'synopsis': identity,
- 'platform': identity,
- 'deprecated': bool_option,
- 'member-order': member_order_option,
- 'exclude-members': exclude_members_option,
- 'private-members': members_option,
- 'special-members': members_option,
- 'imported-members': bool_option,
- 'ignore-module-all': bool_option,
- 'no-value': bool_option,
- 'noindex': bool_option,
- }
-
- def __init__(self, *args: Any) -> None:
- super().__init__(*args)
- merge_members_option(self.options)
- self.__all__: Sequence[str] | None = None
-
- def add_content(self, more_content: StringList | None) -> None:
- old_indent = self.indent
- self.indent += self._extra_indent
- super().add_content(None)
- self.indent = old_indent
- if more_content:
- for line, src in zip(more_content.data, more_content.items, strict=True):
- self.add_line(line, src[0], src[1])
-
- @classmethod
- def can_document_member(
- cls: type[Documenter], member: Any, membername: str, isattr: bool, parent: Any
- ) -> bool:
- # don't document submodules automatically
- return False
-
- def resolve_name(
- self, modname: str | None, parents: Any, path: str, base: str
- ) -> tuple[str | None, list[str]]:
- if modname is not None:
- logger.warning(
- __('"::" in automodule name doesn\'t make sense'), type='autodoc'
- )
- return (path or '') + base, []
-
- def parse_name(self) -> bool:
- ret = super().parse_name()
- if self.args or self.retann:
- logger.warning(
- __('signature arguments or return annotation given for automodule %s'),
- self.fullname,
- type='autodoc',
- )
- return ret
-
- def import_object(self, raiseerror: bool = False) -> bool:
- ret = super().import_object(raiseerror)
-
- try:
- if not self.options.ignore_module_all:
- self.__all__ = inspect.getall(self.object)
- except ValueError as exc:
- # invalid __all__ found.
- logger.warning(
- __(
- '__all__ should be a list of strings, not %r '
- '(in module %s) -- ignoring __all__'
- ),
- exc.args[0],
- self.fullname,
- type='autodoc',
- )
-
- return ret
-
- def add_directive_header(self, sig: str) -> None:
- Documenter.add_directive_header(self, sig)
-
- sourcename = self.get_sourcename()
-
- # add some module-specific options
- if self.options.synopsis:
- self.add_line(' :synopsis: ' + self.options.synopsis, sourcename)
- if self.options.platform:
- self.add_line(' :platform: ' + self.options.platform, sourcename)
- if self.options.deprecated:
- self.add_line(' :deprecated:', sourcename)
- if self.options.no_index_entry:
- self.add_line(' :no-index-entry:', sourcename)
-
- def get_module_members(self) -> dict[str, ObjectMember]:
- """Get members of target module."""
- if self.analyzer:
- attr_docs = self.analyzer.attr_docs
- else:
- attr_docs = {}
-
- members: dict[str, ObjectMember] = {}
- for name in dir(self.object):
- try:
- value = safe_getattr(self.object, name, None)
- if ismock(value):
- value = undecorate(value)
- docstring = attr_docs.get(('', name), [])
- members[name] = ObjectMember(
- name, value, docstring='\n'.join(docstring)
- )
- except AttributeError:
- continue
-
- # annotation only member (ex. attr: int)
- for name in inspect.getannotations(self.object):
- if name not in members:
- docstring = attr_docs.get(('', name), [])
- members[name] = ObjectMember(
- name, INSTANCEATTR, docstring='\n'.join(docstring)
- )
-
- return members
-
- def get_object_members(self, want_all: bool) -> tuple[bool, list[ObjectMember]]:
- members = self.get_module_members()
- if want_all:
- if self.__all__ is None:
- # for implicit module members, check __module__ to avoid
- # documenting imported objects
- return True, list(members.values())
- else:
- for member in members.values():
- if member.__name__ not in self.__all__:
- member.skipped = True
-
- return False, list(members.values())
- else:
- memberlist = self.options.members or []
- ret = []
- for name in memberlist:
- if name in members:
- ret.append(members[name])
- else:
- logger.warning(
- __(
- 'missing attribute mentioned in :members: option: '
- 'module %s, attribute %s'
- ),
- safe_getattr(self.object, '__name__', '???'),
- name,
- type='autodoc',
- )
- return False, ret
-
- def sort_members(
- self, documenters: list[tuple[Documenter, bool]], order: str
- ) -> list[tuple[Documenter, bool]]:
- if order == 'bysource' and self.__all__:
- assert self.__all__ is not None
- module_all = self.__all__
- module_all_set = set(module_all)
- module_all_len = len(module_all)
-
- # Sort alphabetically first (for members not listed on the __all__)
- documenters.sort(key=lambda e: e[0].name)
-
- # Sort by __all__
- def keyfunc(entry: tuple[Documenter, bool]) -> int:
- name = entry[0].name.split('::')[1]
- if name in module_all_set:
- return module_all.index(name)
- else:
- return module_all_len
-
- documenters.sort(key=keyfunc)
-
- return documenters
- else:
- return super().sort_members(documenters, order)
-
-
-class ModuleLevelDocumenter(Documenter):
- """Specialized Documenter subclass for objects on module level (functions,
- classes, data/constants).
- """
-
- def resolve_name(
- self, modname: str | None, parents: Any, path: str, base: str
- ) -> tuple[str | None, list[str]]:
- if modname is not None:
- return modname, [*parents, base]
- if path:
- modname = path.rstrip('.')
- return modname, [*parents, base]
-
- # if documenting a toplevel object without explicit module,
- # it can be contained in another auto directive ...
- modname = self._current_document.autodoc_module
- # ... or in the scope of a module directive
- if not modname:
- modname = self.env.ref_context.get('py:module')
- # ... else, it stays None, which means invalid
- return modname, [*parents, base]
-
-
-class ClassLevelDocumenter(Documenter):
- """Specialized Documenter subclass for objects on class level (methods,
- attributes).
- """
-
- def resolve_name(
- self, modname: str | None, parents: Any, path: str, base: str
- ) -> tuple[str | None, list[str]]:
- if modname is not None:
- return modname, [*parents, base]
-
- if path:
- mod_cls = path.rstrip('.')
- else:
- # if documenting a class-level object without path,
- # there must be a current class, either from a parent
- # auto directive ...
- mod_cls = self._current_document.autodoc_class
- # ... or from a class directive
- if not mod_cls:
- mod_cls = self.env.ref_context.get('py:class', '')
- # ... if still falsy, there's no way to know
- if not mod_cls:
- return None, []
- modname, _sep, cls = mod_cls.rpartition('.')
- parents = [cls]
- # if the module name is still missing, get it like above
- if not modname:
- modname = self._current_document.autodoc_module
- if not modname:
- modname = self.env.ref_context.get('py:module')
- # ... else, it stays None, which means invalid
- return modname, [*parents, base]
-
-
-class DocstringSignatureMixin:
- """Mixin for FunctionDocumenter and MethodDocumenter to provide the
- feature of reading the signature from the docstring.
- """
-
- _new_docstrings: list[list[str]] | None = None
- _signatures: list[str] = []
-
- def _find_signature(self) -> tuple[str | None, str | None] | None:
- # candidates of the object name
- valid_names = [self.objpath[-1]] # type: ignore[attr-defined]
- if isinstance(self, ClassDocumenter):
- valid_names.append('__init__')
- if hasattr(self.object, '__mro__'):
- valid_names.extend(cls.__name__ for cls in self.object.__mro__)
-
- docstrings = self.get_doc()
- if docstrings is None:
- return None, None
- self._new_docstrings = docstrings[:]
- self._signatures = []
- result = None
- for i, doclines in enumerate(docstrings):
- for j, line in enumerate(doclines):
- if not line:
- # no lines in docstring, no match
- break
-
- if line.endswith('\\'):
- line = line.rstrip('\\').rstrip()
-
- # match first line of docstring against signature RE
- match = py_ext_sig_re.match(line)
- if not match:
- break
- _exmod, _path, base, _tp_list, args, retann = match.groups()
-
- # the base name must match ours
- if base not in valid_names:
- break
-
- # re-prepare docstring to ignore more leading indentation
- directive = self.directive # type: ignore[attr-defined]
- tab_width = directive.state.document.settings.tab_width
- self._new_docstrings[i] = prepare_docstring(
- '\n'.join(doclines[j + 1 :]), tab_width
- )
-
- if result is None:
- # first signature
- result = args, retann
- else:
- # subsequent signatures
- self._signatures.append(f'({args}) -> {retann}')
-
- if result is not None:
- # finish the loop when signature found
- break
-
- return result
-
- def get_doc(self) -> list[list[str]] | None:
- if self._new_docstrings is not None:
- return self._new_docstrings
- return super().get_doc() # type: ignore[misc]
-
- def format_signature(self, **kwargs: Any) -> str:
- self.args: str | None
- if self.args is None and self.config.autodoc_docstring_signature: # type: ignore[attr-defined]
- # only act if a signature is not explicitly given already, and if
- # the feature is enabled
- result = self._find_signature()
- if result is not None:
- self.args, self.retann = result
- sig = super().format_signature(**kwargs) # type: ignore[misc]
- if self._signatures:
- return '\n'.join((sig, *self._signatures))
- else:
- return sig
-
-
-class DocstringStripSignatureMixin(DocstringSignatureMixin):
- """Mixin for AttributeDocumenter to provide the
- feature of stripping any function signature from the docstring.
- """
-
- def format_signature(self, **kwargs: Any) -> str:
- if self.args is None and self.config.autodoc_docstring_signature: # type: ignore[attr-defined]
- # only act if a signature is not explicitly given already, and if
- # the feature is enabled
- result = self._find_signature()
- if result is not None:
- # Discarding _args is a only difference with
- # DocstringSignatureMixin.format_signature.
- # Documenter.format_signature use self.args value to format.
- _args, self.retann = result
- return super().format_signature(**kwargs)
-
-
-class FunctionDocumenter(DocstringSignatureMixin, ModuleLevelDocumenter): # type: ignore[misc]
- """Specialized Documenter subclass for functions."""
-
- objtype = 'function'
- member_order = 30
-
- @classmethod
- def can_document_member(
- cls: type[Documenter], member: Any, membername: str, isattr: bool, parent: Any
- ) -> bool:
- # supports functions, builtins and bound methods exported at the module level
- return (
- inspect.isfunction(member)
- or inspect.isbuiltin(member)
- or (inspect.isroutine(member) and isinstance(parent, ModuleDocumenter))
- )
-
- def format_args(self, **kwargs: Any) -> str:
- if self.config.autodoc_typehints in {'none', 'description'}:
- kwargs.setdefault('show_annotation', False)
- if self.config.autodoc_typehints_format == 'short':
- kwargs.setdefault('unqualified_typehints', True)
- if self.config.python_display_short_literal_types:
- kwargs.setdefault('short_literals', True)
-
- try:
- self._events.emit('autodoc-before-process-signature', self.object, False)
- sig = inspect.signature(
- self.object, type_aliases=self.config.autodoc_type_aliases
- )
- args = stringify_signature(sig, **kwargs)
- except TypeError as exc:
- logger.warning(
- __('Failed to get a function signature for %s: %s'), self.fullname, exc
- )
- return ''
- except ValueError:
- args = ''
-
- if self.config.strip_signature_backslash:
- # escape backslashes for reST
- args = args.replace('\\', '\\\\')
- return args
-
- def document_members(self, all_members: bool = False) -> None:
- pass
-
- def add_directive_header(self, sig: str) -> None:
- sourcename = self.get_sourcename()
- super().add_directive_header(sig)
-
- is_coro = inspect.iscoroutinefunction(self.object)
- is_acoro = inspect.isasyncgenfunction(self.object)
- if is_coro or is_acoro:
- self.add_line(' :async:', sourcename)
-
- def format_signature(self, **kwargs: Any) -> str:
- if self.config.autodoc_typehints_format == 'short':
- kwargs.setdefault('unqualified_typehints', True)
- if self.config.python_display_short_literal_types:
- kwargs.setdefault('short_literals', True)
-
- sigs = []
- if (
- self.analyzer
- and '.'.join(self.objpath) in self.analyzer.overloads
- and self.config.autodoc_typehints != 'none'
- ):
- # Use signatures for overloaded functions instead of the implementation function.
- overloaded = True
- else:
- overloaded = False
- sig = super().format_signature(**kwargs)
- sigs.append(sig)
-
- if inspect.is_singledispatch_function(self.object):
- # append signature of singledispatch'ed functions
- for typ, func in self.object.registry.items():
- if typ is object:
- pass # default implementation. skipped.
- else:
- dispatchfunc = self.annotate_to_first_argument(func, typ)
- if dispatchfunc:
- documenter = FunctionDocumenter(self.directive, '')
- documenter.object = dispatchfunc
- documenter.objpath = ['']
- sigs.append(documenter.format_signature())
- if overloaded and self.analyzer is not None:
- actual = inspect.signature(
- self.object, type_aliases=self.config.autodoc_type_aliases
- )
- __globals__ = safe_getattr(self.object, '__globals__', {})
- for overload in self.analyzer.overloads['.'.join(self.objpath)]:
- overload = self.merge_default_value(actual, overload)
- overload = evaluate_signature(
- overload, __globals__, self.config.autodoc_type_aliases
- )
-
- sig = stringify_signature(overload, **kwargs)
- sigs.append(sig)
-
- return '\n'.join(sigs)
-
- def merge_default_value(self, actual: Signature, overload: Signature) -> Signature:
- """Merge default values of actual implementation to the overload variants."""
- parameters = list(overload.parameters.values())
- for i, param in enumerate(parameters):
- actual_param = actual.parameters.get(param.name)
- if actual_param and param.default == '...':
- parameters[i] = param.replace(default=actual_param.default)
-
- return overload.replace(parameters=parameters)
-
- def annotate_to_first_argument(
- self, func: Callable[..., Any], typ: type
- ) -> Callable[..., Any] | None:
- """Annotate type hint to the first argument of function if needed."""
- try:
- sig = inspect.signature(func, type_aliases=self.config.autodoc_type_aliases)
- except TypeError as exc:
- logger.warning(
- __('Failed to get a function signature for %s: %s'), self.fullname, exc
- )
- return None
- except ValueError:
- return None
-
- if len(sig.parameters) == 0:
- return None
-
- def dummy(): # type: ignore[no-untyped-def] # NoQA: ANN202
- pass
-
- params = list(sig.parameters.values())
- if params[0].annotation is Parameter.empty:
- params[0] = params[0].replace(annotation=typ)
- try:
- dummy.__signature__ = sig.replace(parameters=params) # type: ignore[attr-defined]
- return dummy
- except (AttributeError, TypeError):
- # failed to update signature (ex. built-in or extension types)
- return None
-
- return func
-
-
-class DecoratorDocumenter(FunctionDocumenter):
- """Specialized Documenter subclass for decorator functions."""
-
- objtype = 'decorator'
-
- # must be lower than FunctionDocumenter
- priority = -1
-
- def format_args(self, **kwargs: Any) -> str:
- args = super().format_args(**kwargs)
- if ',' in args:
- return args
- else:
- return ''
-
-
-# Types which have confusing metaclass signatures it would be best not to show.
-# These are listed by name, rather than storing the objects themselves, to avoid
-# needing to import the modules.
-_METACLASS_CALL_BLACKLIST = frozenset({
- 'enum.EnumType.__call__',
-})
-
-
-# Types whose __new__ signature is a pass-through.
-_CLASS_NEW_BLACKLIST = frozenset({
- 'typing.Generic.__new__',
-})
-
-
-class ClassDocumenter(DocstringSignatureMixin, ModuleLevelDocumenter): # type: ignore[misc]
- """Specialized Documenter subclass for classes."""
-
- objtype = 'class'
- member_order = 20
- option_spec: ClassVar[OptionSpec] = {
- 'members': members_option,
- 'undoc-members': bool_option,
- 'no-index': bool_option,
- 'no-index-entry': bool_option,
- 'inherited-members': inherited_members_option,
- 'show-inheritance': bool_option,
- 'member-order': member_order_option,
- 'exclude-members': exclude_members_option,
- 'private-members': members_option,
- 'special-members': members_option,
- 'class-doc-from': class_doc_from_option,
- 'noindex': bool_option,
- }
-
- # Must be higher than FunctionDocumenter, ClassDocumenter, and
- # AttributeDocumenter as NewType can be an attribute and is a class
- # after Python 3.10.
- priority = 15
-
- _signature_class: Any = None
- _signature_method_name: str = ''
-
- def __init__(self, *args: Any) -> None:
- super().__init__(*args)
-
- if self.config.autodoc_class_signature == 'separated':
- self.options = self.options.copy()
-
- # show __init__() method
- if self.options.special_members is None:
- self.options['special-members'] = ['__new__', '__init__']
- else:
- self.options.special_members.append('__new__')
- self.options.special_members.append('__init__')
-
- merge_members_option(self.options)
-
- @classmethod
- def can_document_member(
- cls: type[Documenter], member: Any, membername: str, isattr: bool, parent: Any
- ) -> bool:
- return isinstance(member, type) or (
- isattr and isinstance(member, NewType | TypeVar)
- )
-
- def import_object(self, raiseerror: bool = False) -> bool:
- ret = super().import_object(raiseerror)
- # if the class is documented under another name, document it
- # as data/attribute
- if ret:
- if hasattr(self.object, '__name__'):
- self.doc_as_attr = self.objpath[-1] != self.object.__name__
- else:
- self.doc_as_attr = True
- if isinstance(self.object, NewType | TypeVar):
- modname = getattr(self.object, '__module__', self.modname)
- if modname != self.modname and self.modname.startswith(modname):
- bases = self.modname[len(modname) :].strip('.').split('.')
- self.objpath = bases + self.objpath
- self.modname = modname
- return ret
-
- def _get_signature(self) -> tuple[Any | None, str | None, Signature | None]:
- if isinstance(self.object, NewType | TypeVar):
- # Suppress signature
- return None, None, None
-
- def get_user_defined_function_or_method(obj: Any, attr: str) -> Any:
- """Get the `attr` function or method from `obj`, if it is user-defined."""
- if inspect.is_builtin_class_method(obj, attr):
- return None
- attr = self.get_attr(obj, attr, None)
- if not (inspect.ismethod(attr) or inspect.isfunction(attr)):
- return None
- return attr
-
- # This sequence is copied from inspect._signature_from_callable.
- # ValueError means that no signature could be found, so we keep going.
-
- # First, we check if obj has a __signature__ attribute
- if hasattr(self.object, '__signature__'):
- object_sig = self.object.__signature__
- if isinstance(object_sig, Signature):
- return None, None, object_sig
- if sys.version_info[:2] in {(3, 12), (3, 13)} and callable(object_sig):
- # Support for enum.Enum.__signature__ in Python 3.12
- if isinstance(object_sig_str := object_sig(), str):
- return None, None, inspect.signature_from_str(object_sig_str)
-
- # Next, let's see if it has an overloaded __call__ defined
- # in its metaclass
- call = get_user_defined_function_or_method(type(self.object), '__call__')
-
- if call is not None:
- if f'{call.__module__}.{call.__qualname__}' in _METACLASS_CALL_BLACKLIST:
- call = None
-
- if call is not None:
- self._events.emit('autodoc-before-process-signature', call, True)
- try:
- sig = inspect.signature(
- call,
- bound_method=True,
- type_aliases=self.config.autodoc_type_aliases,
- )
- return type(self.object), '__call__', sig
- except ValueError:
- pass
-
- # Now we check if the 'obj' class has a '__new__' method
- new = get_user_defined_function_or_method(self.object, '__new__')
-
- if new is not None:
- if f'{new.__module__}.{new.__qualname__}' in _CLASS_NEW_BLACKLIST:
- new = None
-
- if new is not None:
- self._events.emit('autodoc-before-process-signature', new, True)
- try:
- sig = inspect.signature(
- new,
- bound_method=True,
- type_aliases=self.config.autodoc_type_aliases,
- )
- return self.object, '__new__', sig
- except ValueError:
- pass
-
- # Finally, we should have at least __init__ implemented
- init = get_user_defined_function_or_method(self.object, '__init__')
- if init is not None:
- self._events.emit('autodoc-before-process-signature', init, True)
- try:
- sig = inspect.signature(
- init,
- bound_method=True,
- type_aliases=self.config.autodoc_type_aliases,
- )
- return self.object, '__init__', sig
- except ValueError:
- pass
-
- # None of the attributes are user-defined, so fall back to let inspect
- # handle it.
- # We don't know the exact method that inspect.signature will read
- # the signature from, so just pass the object itself to our hook.
- self._events.emit('autodoc-before-process-signature', self.object, False)
- try:
- sig = inspect.signature(
- self.object,
- bound_method=False,
- type_aliases=self.config.autodoc_type_aliases,
- )
- return None, None, sig
- except ValueError:
- pass
-
- # Still no signature: happens e.g. for old-style classes
- # with __init__ in C and no `__text_signature__`.
- return None, None, None
-
- def format_args(self, **kwargs: Any) -> str:
- if self.config.autodoc_typehints in {'none', 'description'}:
- kwargs.setdefault('show_annotation', False)
- if self.config.autodoc_typehints_format == 'short':
- kwargs.setdefault('unqualified_typehints', True)
- if self.config.python_display_short_literal_types:
- kwargs.setdefault('short_literals', True)
-
- try:
- self._signature_class, _signature_method_name, sig = self._get_signature()
- except TypeError as exc:
- # __signature__ attribute contained junk
- logger.warning(
- __('Failed to get a constructor signature for %s: %s'),
- self.fullname,
- exc,
- )
- return ''
- self._signature_method_name = _signature_method_name or ''
-
- if sig is None:
- return ''
-
- return stringify_signature(sig, show_return_annotation=False, **kwargs)
-
- def _find_signature(self) -> tuple[str | None, str | None] | None:
- result = super()._find_signature()
- if result is not None:
- # Strip a return value from signature of constructor in docstring (first entry)
- result = (result[0], None)
-
- for i, sig in enumerate(self._signatures):
- if sig.endswith(' -> None'):
- # Strip a return value from signatures of constructor in docstring (subsequent
- # entries)
- self._signatures[i] = sig[:-8]
-
- return result
-
- def format_signature(self, **kwargs: Any) -> str:
- if self.doc_as_attr:
- return ''
- if self.config.autodoc_class_signature == 'separated':
- # do not show signatures
- return ''
-
- if self.config.autodoc_typehints_format == 'short':
- kwargs.setdefault('unqualified_typehints', True)
- if self.config.python_display_short_literal_types:
- kwargs.setdefault('short_literals', True)
-
- sig = super().format_signature()
- sigs = []
-
- overloads = self.get_overloaded_signatures()
- if overloads and self.config.autodoc_typehints != 'none':
- # Use signatures for overloaded methods instead of the implementation method.
- method = safe_getattr(
- self._signature_class, self._signature_method_name, None
- )
- __globals__ = safe_getattr(method, '__globals__', {})
- for overload in overloads:
- overload = evaluate_signature(
- overload, __globals__, self.config.autodoc_type_aliases
- )
-
- parameters = list(overload.parameters.values())
- overload = overload.replace(
- parameters=parameters[1:], return_annotation=Parameter.empty
- )
- sig = stringify_signature(overload, **kwargs)
- sigs.append(sig)
- else:
- sigs.append(sig)
-
- return '\n'.join(sigs)
-
- def get_overloaded_signatures(self) -> list[Signature]:
- if self._signature_class and self._signature_method_name:
- for cls in self._signature_class.__mro__:
- try:
- analyzer = ModuleAnalyzer.for_module(cls.__module__)
- analyzer.analyze()
- qualname = f'{cls.__qualname__}.{self._signature_method_name}'
- if qualname in analyzer.overloads:
- return analyzer.overloads.get(qualname, [])
- elif qualname in analyzer.tagorder:
- # the constructor is defined in the class, but not overridden.
- return []
- except PycodeError:
- pass
-
- return []
-
- def get_canonical_fullname(self) -> str | None:
- __modname__ = safe_getattr(self.object, '__module__', self.modname)
- __qualname__ = safe_getattr(self.object, '__qualname__', None)
- if __qualname__ is None:
- __qualname__ = safe_getattr(self.object, '__name__', None)
- if __qualname__ and '' in __qualname__:
- # No valid qualname found if the object is defined as locals
- __qualname__ = None
-
- if __modname__ and __qualname__:
- return f'{__modname__}.{__qualname__}'
- else:
- return None
-
- def add_directive_header(self, sig: str) -> None:
- sourcename = self.get_sourcename()
-
- if self.doc_as_attr:
- self.directivetype = 'attribute'
- super().add_directive_header(sig)
-
- if isinstance(self.object, NewType | TypeVar):
- return
-
- if self.analyzer and '.'.join(self.objpath) in self.analyzer.finals:
- self.add_line(' :final:', sourcename)
-
- canonical_fullname = self.get_canonical_fullname()
- if (
- not self.doc_as_attr
- and not isinstance(self.object, NewType)
- and canonical_fullname
- and self.fullname != canonical_fullname
- ):
- self.add_line(' :canonical: %s' % canonical_fullname, sourcename)
-
- # add inheritance info, if wanted
- if not self.doc_as_attr and self.options.show_inheritance:
- if inspect.getorigbases(self.object):
- # A subclass of generic types
- # refs: PEP-560
- bases = list(self.object.__orig_bases__)
- elif hasattr(self.object, '__bases__') and len(self.object.__bases__):
- # A normal class
- bases = list(self.object.__bases__)
- else:
- bases = []
-
- self._events.emit(
- 'autodoc-process-bases', self.fullname, self.object, self.options, bases
- )
-
- mode = _get_render_mode(self.config.autodoc_typehints_format)
- base_classes = [restify(cls, mode=mode) for cls in bases]
-
- sourcename = self.get_sourcename()
- self.add_line('', sourcename)
- self.add_line(' ' + _('Bases: %s') % ', '.join(base_classes), sourcename)
-
- def get_object_members(self, want_all: bool) -> tuple[bool, list[ObjectMember]]:
- members = get_class_members(
- self.object,
- self.objpath,
- self.get_attr,
- self.config.autodoc_inherit_docstrings,
- )
- if not want_all:
- if not self.options.members:
- return False, []
- # specific members given
- selected = []
- for name in self.options.members:
- if name in members:
- selected.append(members[name])
- else:
- logger.warning(
- __('missing attribute %s in object %s'),
- name,
- self.fullname,
- type='autodoc',
- )
- return False, selected
- elif self.options.inherited_members:
- return False, list(members.values())
- else:
- return False, [m for m in members.values() if m.class_ == self.object]
-
- def get_doc(self) -> list[list[str]] | None:
- if isinstance(self.object, TypeVar):
- if self.object.__doc__ == TypeVar.__doc__:
- return []
- if self.doc_as_attr:
- # Don't show the docstring of the class when it is an alias.
- if self.get_variable_comment():
- return []
- else:
- return None
-
- lines = getattr(self, '_new_docstrings', None)
- if lines is not None:
- return lines
-
- classdoc_from = self.options.get(
- 'class-doc-from', self.config.autoclass_content
- )
-
- docstrings = []
- attrdocstring = getdoc(self.object, self.get_attr)
- if attrdocstring:
- docstrings.append(attrdocstring)
-
- # for classes, what the "docstring" is can be controlled via a
- # config value; the default is only the class docstring
- if classdoc_from in {'both', 'init'}:
- __init__ = self.get_attr(self.object, '__init__', None)
- initdocstring = getdoc(
- __init__,
- self.get_attr,
- self.config.autodoc_inherit_docstrings,
- self.object,
- '__init__',
- )
- # for new-style classes, no __init__ means default __init__
- if initdocstring is not None and (
- initdocstring == object.__init__.__doc__ # for pypy
- or initdocstring.strip() == object.__init__.__doc__ # for !pypy
- ):
- initdocstring = None
- if not initdocstring:
- # try __new__
- __new__ = self.get_attr(self.object, '__new__', None)
- initdocstring = getdoc(
- __new__,
- self.get_attr,
- self.config.autodoc_inherit_docstrings,
- self.object,
- '__new__',
- )
- # for new-style classes, no __new__ means default __new__
- if initdocstring is not None and (
- initdocstring == object.__new__.__doc__ # for pypy
- or initdocstring.strip() == object.__new__.__doc__ # for !pypy
- ):
- initdocstring = None
- if initdocstring:
- if classdoc_from == 'init':
- docstrings = [initdocstring]
- else:
- docstrings.append(initdocstring)
-
- tab_width = self.directive.state.document.settings.tab_width
- return [prepare_docstring(docstring, tab_width) for docstring in docstrings]
-
- def get_variable_comment(self) -> list[str] | None:
- try:
- key = ('', '.'.join(self.objpath))
- if self.doc_as_attr:
- analyzer = ModuleAnalyzer.for_module(self.modname)
- else:
- analyzer = ModuleAnalyzer.for_module(self.get_real_modname())
- analyzer.analyze()
- return list(analyzer.attr_docs.get(key, []))
- except PycodeError:
- return None
-
- def add_content(self, more_content: StringList | None) -> None:
- mode = _get_render_mode(self.config.autodoc_typehints_format)
- short_literals = self.config.python_display_short_literal_types
-
- if isinstance(self.object, NewType):
- supertype = restify(self.object.__supertype__, mode=mode)
-
- more_content = StringList([_('alias of %s') % supertype, ''], source='')
- if isinstance(self.object, TypeVar):
- attrs = [repr(self.object.__name__)]
- attrs.extend(
- stringify_annotation(constraint, mode, short_literals=short_literals)
- for constraint in self.object.__constraints__
- )
- if self.object.__bound__:
- bound = restify(self.object.__bound__, mode=mode)
- attrs.append(r'bound=\ ' + bound)
- if self.object.__covariant__:
- attrs.append('covariant=True')
- if self.object.__contravariant__:
- attrs.append('contravariant=True')
-
- more_content = StringList(
- [_('alias of TypeVar(%s)') % ', '.join(attrs), ''], source=''
- )
- if self.doc_as_attr and self.modname != self.get_real_modname():
- try:
- # override analyzer to obtain doccomment around its definition.
- self.analyzer = ModuleAnalyzer.for_module(self.modname)
- self.analyzer.analyze()
- except PycodeError:
- pass
-
- if self.doc_as_attr and not self.get_variable_comment():
- try:
- alias = restify(self.object, mode=mode)
- more_content = StringList([_('alias of %s') % alias], source='')
- except AttributeError:
- pass # Invalid class object is passed.
-
- super().add_content(more_content)
-
- def document_members(self, all_members: bool = False) -> None:
- if self.doc_as_attr:
- return
- super().document_members(all_members)
-
- def generate(
- self,
- more_content: StringList | None = None,
- real_modname: str | None = None,
- check_module: bool = False,
- all_members: bool = False,
- ) -> None:
- # Do not pass real_modname and use the name from the __module__
- # attribute of the class.
- # If a class gets imported into the module real_modname
- # the analyzer won't find the source of the class, if
- # it looks in real_modname.
- return super().generate(
- more_content=more_content,
- check_module=check_module,
- all_members=all_members,
- )
-
-
-class ExceptionDocumenter(ClassDocumenter):
- """Specialized ClassDocumenter subclass for exceptions."""
-
- objtype = 'exception'
- member_order = 10
-
- # needs a higher priority than ClassDocumenter
- priority = ClassDocumenter.priority + 5
-
- @classmethod
- def can_document_member(
- cls: type[Documenter], member: Any, membername: str, isattr: bool, parent: Any
- ) -> bool:
- try:
- return isinstance(member, type) and issubclass(member, BaseException)
- except TypeError as exc:
- # It's possible for a member to be considered a type, but fail
- # issubclass checks due to not being a class. For example:
- # https://github.com/sphinx-doc/sphinx/issues/11654#issuecomment-1696790436
- msg = (
- f'{cls.__name__} failed to discern if member {member} with'
- f' membername {membername} is a BaseException subclass.'
- )
- raise ValueError(msg) from exc
-
-
-class DataDocumenterMixinBase:
- # define types of instance variables
- config: Config
- env: BuildEnvironment
- modname: str
- parent: Any
- object: Any
- objpath: list[str]
-
- def should_suppress_directive_header(self) -> bool:
- """Check directive header should be suppressed."""
- return False
-
- def should_suppress_value_header(self) -> bool:
- """Check :value: header should be suppressed."""
- return False
-
- def update_content(self, more_content: StringList) -> None:
- """Update docstring, for example with TypeVar variance."""
- pass
-
-
-class GenericAliasMixin(DataDocumenterMixinBase):
- """Mixin for DataDocumenter and AttributeDocumenter to provide the feature for
- supporting GenericAliases.
- """
-
- def should_suppress_directive_header(self) -> bool:
- return (
- inspect.isgenericalias(self.object)
- or super().should_suppress_directive_header()
- )
-
- def update_content(self, more_content: StringList) -> None:
- if inspect.isgenericalias(self.object):
- mode = _get_render_mode(self.config.autodoc_typehints_format)
- alias = restify(self.object, mode=mode)
-
- more_content.append(_('alias of %s') % alias, '')
- more_content.append('', '')
-
- super().update_content(more_content)
-
-
-class UninitializedGlobalVariableMixin(DataDocumenterMixinBase):
- """Mixin for DataDocumenter to provide the feature for supporting uninitialized
- (type annotation only) global variables.
- """
-
- def import_object(self, raiseerror: bool = False) -> bool:
- try:
- return super().import_object(raiseerror=True) # type: ignore[misc]
- except ImportError as exc:
- # annotation only instance variable (PEP-526)
- try:
- with mock(self.config.autodoc_mock_imports):
- parent = import_module(self.modname)
- annotations = get_type_hints(
- parent,
- None,
- self.config.autodoc_type_aliases,
- include_extras=True,
- )
- if self.objpath[-1] in annotations:
- self.object = UNINITIALIZED_ATTR
- self.parent = parent
- return True
- except ImportError:
- pass
-
- if raiseerror:
- raise
- logger.warning(exc.args[0], type='autodoc', subtype='import_object')
- self.env.note_reread()
- return False
-
- def should_suppress_value_header(self) -> bool:
- return (
- self.object is UNINITIALIZED_ATTR or super().should_suppress_value_header()
- )
-
- def get_doc(self) -> list[list[str]] | None:
- if self.object is UNINITIALIZED_ATTR:
- return []
- else:
- return super().get_doc() # type: ignore[misc]
-
-
-class DataDocumenter(
- GenericAliasMixin, UninitializedGlobalVariableMixin, ModuleLevelDocumenter
-):
- """Specialized Documenter subclass for data items."""
-
- objtype = 'data'
- member_order = 40
- priority = -10
- option_spec: ClassVar[OptionSpec] = dict(ModuleLevelDocumenter.option_spec)
- option_spec['annotation'] = annotation_option
- option_spec['no-value'] = bool_option
-
- @classmethod
- def can_document_member(
- cls: type[Documenter], member: Any, membername: str, isattr: bool, parent: Any
- ) -> bool:
- return isinstance(parent, ModuleDocumenter) and isattr
-
- def update_annotations(self, parent: Any) -> None:
- """Update __annotations__ to support type_comment and so on."""
- annotations = dict(inspect.getannotations(parent))
- parent.__annotations__ = annotations
-
- try:
- analyzer = ModuleAnalyzer.for_module(self.modname)
- analyzer.analyze()
- for (classname, attrname), annotation in analyzer.annotations.items():
- if not classname and attrname not in annotations:
- annotations[attrname] = annotation
- except PycodeError:
- pass
-
- def import_object(self, raiseerror: bool = False) -> bool:
- ret = super().import_object(raiseerror)
- if self.parent:
- self.update_annotations(self.parent)
-
- return ret
-
- def should_suppress_value_header(self) -> bool:
- if super().should_suppress_value_header():
- return True
- else:
- doc = self.get_doc() or []
- _docstring, metadata = separate_metadata(
- '\n'.join(functools.reduce(operator.iadd, doc, []))
- )
- if 'hide-value' in metadata:
- return True
-
- return False
-
- def add_directive_header(self, sig: str) -> None:
- super().add_directive_header(sig)
- sourcename = self.get_sourcename()
- if (
- self.options.annotation is SUPPRESS
- or self.should_suppress_directive_header()
- ):
- pass
- elif self.options.annotation:
- self.add_line(' :annotation: %s' % self.options.annotation, sourcename)
- else:
- if self.config.autodoc_typehints != 'none':
- # obtain annotation for this data
- annotations = get_type_hints(
- self.parent,
- None,
- self.config.autodoc_type_aliases,
- include_extras=True,
- )
- if self.objpath[-1] in annotations:
- mode = _get_render_mode(self.config.autodoc_typehints_format)
- short_literals = self.config.python_display_short_literal_types
- objrepr = stringify_annotation(
- annotations.get(self.objpath[-1]),
- mode,
- short_literals=short_literals,
- )
- self.add_line(' :type: ' + objrepr, sourcename)
-
- try:
- if (
- self.options.no_value
- or self.should_suppress_value_header()
- or ismock(self.object)
- ):
- pass
- else:
- objrepr = object_description(self.object)
- self.add_line(' :value: ' + objrepr, sourcename)
- except ValueError:
- pass
-
- def document_members(self, all_members: bool = False) -> None:
- pass
-
- def get_real_modname(self) -> str:
- real_modname = self.get_attr(self.parent or self.object, '__module__', None)
- return real_modname or self.modname
-
- def get_module_comment(self, attrname: str) -> list[str] | None:
- try:
- analyzer = ModuleAnalyzer.for_module(self.modname)
- analyzer.analyze()
- key = ('', attrname)
- if key in analyzer.attr_docs:
- return list(analyzer.attr_docs[key])
- except PycodeError:
- pass
-
- return None
-
- def get_doc(self) -> list[list[str]] | None:
- # Check the variable has a docstring-comment
- comment = self.get_module_comment(self.objpath[-1])
- if comment:
- return [comment]
- else:
- return super().get_doc()
-
- def add_content(self, more_content: StringList | None) -> None:
- # Disable analyzing variable comment on Documenter.add_content() to control it on
- # DataDocumenter.add_content()
- self.analyzer = None
-
- if not more_content:
- more_content = StringList()
-
- self.update_content(more_content)
- super().add_content(more_content)
-
-
-class MethodDocumenter(DocstringSignatureMixin, ClassLevelDocumenter): # type: ignore[misc]
- """Specialized Documenter subclass for methods (normal, static and class)."""
-
- objtype = 'method'
- directivetype = 'method'
- member_order = 50
- priority = 1 # must be more than FunctionDocumenter
-
- @classmethod
- def can_document_member(
- cls: type[Documenter], member: Any, membername: str, isattr: bool, parent: Any
- ) -> bool:
- return inspect.isroutine(member) and not isinstance(parent, ModuleDocumenter)
-
- def import_object(self, raiseerror: bool = False) -> bool:
- ret = super().import_object(raiseerror)
- if not ret:
- return ret
-
- # to distinguish classmethod/staticmethod
- obj = self.parent.__dict__.get(self.object_name, self.object)
- if inspect.isstaticmethod(obj, cls=self.parent, name=self.object_name):
- # document static members before regular methods
- self.member_order -= 1 # type: ignore[misc]
- elif inspect.isclassmethod(obj):
- # document class methods before static methods as
- # they usually behave as alternative constructors
- self.member_order -= 2 # type: ignore[misc]
- return ret
-
- def format_args(self, **kwargs: Any) -> str:
- if self.config.autodoc_typehints in {'none', 'description'}:
- kwargs.setdefault('show_annotation', False)
- if self.config.autodoc_typehints_format == 'short':
- kwargs.setdefault('unqualified_typehints', True)
- if self.config.python_display_short_literal_types:
- kwargs.setdefault('short_literals', True)
-
- try:
- if self.object == object.__init__ and self.parent != object: # NoQA: E721
- # Classes not having own __init__() method are shown as no arguments.
- #
- # Note: The signature of object.__init__() is (self, /, *args, **kwargs).
- # But it makes users confused.
- args = '()'
- else:
- if inspect.isstaticmethod(
- self.object, cls=self.parent, name=self.object_name
- ):
- self._events.emit(
- 'autodoc-before-process-signature', self.object, False
- )
- sig = inspect.signature(
- self.object,
- bound_method=False,
- type_aliases=self.config.autodoc_type_aliases,
- )
- else:
- self._events.emit(
- 'autodoc-before-process-signature', self.object, True
- )
- sig = inspect.signature(
- self.object,
- bound_method=True,
- type_aliases=self.config.autodoc_type_aliases,
- )
- args = stringify_signature(sig, **kwargs)
- except TypeError as exc:
- logger.warning(
- __('Failed to get a method signature for %s: %s'), self.fullname, exc
- )
- return ''
- except ValueError:
- args = ''
-
- if self.config.strip_signature_backslash:
- # escape backslashes for reST
- args = args.replace('\\', '\\\\')
- return args
-
- def add_directive_header(self, sig: str) -> None:
- super().add_directive_header(sig)
-
- sourcename = self.get_sourcename()
- obj = self.parent.__dict__.get(self.object_name, self.object)
- if inspect.isabstractmethod(obj):
- self.add_line(' :abstractmethod:', sourcename)
- if inspect.iscoroutinefunction(obj) or inspect.isasyncgenfunction(obj):
- self.add_line(' :async:', sourcename)
- if (
- inspect.is_classmethod_like(obj)
- or inspect.is_singledispatch_method(obj)
- and inspect.is_classmethod_like(obj.func)
- ):
- self.add_line(' :classmethod:', sourcename)
- if inspect.isstaticmethod(obj, cls=self.parent, name=self.object_name):
- self.add_line(' :staticmethod:', sourcename)
- if self.analyzer and '.'.join(self.objpath) in self.analyzer.finals:
- self.add_line(' :final:', sourcename)
-
- def document_members(self, all_members: bool = False) -> None:
- pass
-
- def format_signature(self, **kwargs: Any) -> str:
- if self.config.autodoc_typehints_format == 'short':
- kwargs.setdefault('unqualified_typehints', True)
- if self.config.python_display_short_literal_types:
- kwargs.setdefault('short_literals', True)
-
- sigs = []
- if (
- self.analyzer
- and '.'.join(self.objpath) in self.analyzer.overloads
- and self.config.autodoc_typehints != 'none'
- ):
- # Use signatures for overloaded methods instead of the implementation method.
- overloaded = True
- else:
- overloaded = False
- sig = super().format_signature(**kwargs)
- sigs.append(sig)
-
- meth = self.parent.__dict__.get(self.objpath[-1])
- if inspect.is_singledispatch_method(meth):
- # append signature of singledispatch'ed functions
- for typ, func in meth.dispatcher.registry.items():
- if typ is object:
- pass # default implementation. skipped.
- else:
- if inspect.isclassmethod(func):
- func = func.__func__
- dispatchmeth = self.annotate_to_first_argument(func, typ)
- if dispatchmeth:
- documenter = MethodDocumenter(self.directive, '')
- documenter.parent = self.parent
- documenter.object = dispatchmeth
- documenter.objpath = ['']
- sigs.append(documenter.format_signature())
- if overloaded and self.analyzer is not None:
- if inspect.isstaticmethod(
- self.object, cls=self.parent, name=self.object_name
- ):
- actual = inspect.signature(
- self.object,
- bound_method=False,
- type_aliases=self.config.autodoc_type_aliases,
- )
- else:
- actual = inspect.signature(
- self.object,
- bound_method=True,
- type_aliases=self.config.autodoc_type_aliases,
- )
-
- __globals__ = safe_getattr(self.object, '__globals__', {})
- for overload in self.analyzer.overloads['.'.join(self.objpath)]:
- overload = self.merge_default_value(actual, overload)
- overload = evaluate_signature(
- overload, __globals__, self.config.autodoc_type_aliases
- )
-
- if not inspect.isstaticmethod(
- self.object, cls=self.parent, name=self.object_name
- ):
- parameters = list(overload.parameters.values())
- overload = overload.replace(parameters=parameters[1:])
- sig = stringify_signature(overload, **kwargs)
- sigs.append(sig)
-
- return '\n'.join(sigs)
-
- def merge_default_value(self, actual: Signature, overload: Signature) -> Signature:
- """Merge default values of actual implementation to the overload variants."""
- parameters = list(overload.parameters.values())
- for i, param in enumerate(parameters):
- actual_param = actual.parameters.get(param.name)
- if actual_param and param.default == '...':
- parameters[i] = param.replace(default=actual_param.default)
-
- return overload.replace(parameters=parameters)
-
- def annotate_to_first_argument(
- self, func: Callable[..., Any], typ: type
- ) -> Callable[..., Any] | None:
- """Annotate type hint to the first argument of function if needed."""
- try:
- sig = inspect.signature(func, type_aliases=self.config.autodoc_type_aliases)
- except TypeError as exc:
- logger.warning(
- __('Failed to get a method signature for %s: %s'), self.fullname, exc
- )
- return None
- except ValueError:
- return None
-
- if len(sig.parameters) == 1:
- return None
-
- def dummy(): # type: ignore[no-untyped-def] # NoQA: ANN202
- pass
-
- params = list(sig.parameters.values())
- if params[1].annotation is Parameter.empty:
- params[1] = params[1].replace(annotation=typ)
- try:
- dummy.__signature__ = sig.replace( # type: ignore[attr-defined]
- parameters=params
- )
- return dummy
- except (AttributeError, TypeError):
- # failed to update signature (ex. built-in or extension types)
- return None
-
- return func
-
- def get_doc(self) -> list[list[str]] | None:
- if self._new_docstrings is not None:
- # docstring already returned previously, then modified by
- # `DocstringSignatureMixin`. Just return the previously-computed
- # result, so that we don't lose the processing done by
- # `DocstringSignatureMixin`.
- return self._new_docstrings
- if self.objpath[-1] == '__init__':
- docstring = getdoc(
- self.object,
- self.get_attr,
- self.config.autodoc_inherit_docstrings,
- self.parent,
- self.object_name,
- )
- if docstring is not None and (
- docstring == object.__init__.__doc__ # for pypy
- or docstring.strip() == object.__init__.__doc__ # for !pypy
- ):
- docstring = None
- if docstring:
- tab_width = self.directive.state.document.settings.tab_width
- return [prepare_docstring(docstring, tabsize=tab_width)]
- else:
- return []
- elif self.objpath[-1] == '__new__':
- docstring = getdoc(
- self.object,
- self.get_attr,
- self.config.autodoc_inherit_docstrings,
- self.parent,
- self.object_name,
- )
- if docstring is not None and (
- docstring == object.__new__.__doc__ # for pypy
- or docstring.strip() == object.__new__.__doc__ # for !pypy
- ):
- docstring = None
- if docstring:
- tab_width = self.directive.state.document.settings.tab_width
- return [prepare_docstring(docstring, tabsize=tab_width)]
- else:
- return []
- else:
- return super().get_doc()
-
-
-class NonDataDescriptorMixin(DataDocumenterMixinBase):
- """Mixin for AttributeDocumenter to provide the feature for supporting non
- data-descriptors.
-
- .. note:: This mix-in must be inherited after other mix-ins. Otherwise, docstring
- and :value: header will be suppressed unexpectedly.
- """
-
- def import_object(self, raiseerror: bool = False) -> bool:
- ret = super().import_object(raiseerror) # type: ignore[misc]
- if ret and not inspect.isattributedescriptor(self.object):
- self.non_data_descriptor = True
- else:
- self.non_data_descriptor = False
-
- return ret
-
- def should_suppress_value_header(self) -> bool:
- return (
- not getattr(self, 'non_data_descriptor', False)
- or super().should_suppress_directive_header()
- )
-
- def get_doc(self) -> list[list[str]] | None:
- if getattr(self, 'non_data_descriptor', False):
- # the docstring of non datadescriptor is very probably the wrong thing
- # to display
- return None
- else:
- return super().get_doc() # type: ignore[misc]
-
-
-class SlotsMixin(DataDocumenterMixinBase):
- """Mixin for AttributeDocumenter to provide the feature for supporting __slots__."""
-
- def isslotsattribute(self) -> bool:
- """Check the subject is an attribute in __slots__."""
- try:
- if parent___slots__ := inspect.getslots(self.parent):
- return self.objpath[-1] in parent___slots__
- else:
- return False
- except (ValueError, TypeError):
- return False
-
- def import_object(self, raiseerror: bool = False) -> bool:
- ret = super().import_object(raiseerror) # type: ignore[misc]
- if self.isslotsattribute():
- self.object = SLOTSATTR
-
- return ret
-
- def should_suppress_value_header(self) -> bool:
- if self.object is SLOTSATTR:
- return True
- else:
- return super().should_suppress_value_header()
-
- def get_doc(self) -> list[list[str]] | None:
- if self.object is SLOTSATTR:
- try:
- parent___slots__ = inspect.getslots(self.parent)
- if parent___slots__ and (
- docstring := parent___slots__.get(self.objpath[-1])
- ):
- docstring = prepare_docstring(docstring)
- return [docstring]
- else:
- return []
- except ValueError as exc:
- logger.warning(
- __('Invalid __slots__ found on %s. Ignored.'),
- (self.parent.__qualname__, exc),
- type='autodoc',
- )
- return []
- else:
- return super().get_doc() # type: ignore[misc]
-
-
-class RuntimeInstanceAttributeMixin(DataDocumenterMixinBase):
- """Mixin for AttributeDocumenter to provide the feature for supporting runtime
- instance attributes (that are defined in __init__() methods with doc-comments).
-
- Example::
-
- class Foo:
- def __init__(self):
- self.attr = None #: This is a target of this mix-in.
- """
-
- RUNTIME_INSTANCE_ATTRIBUTE = object()
-
- def is_runtime_instance_attribute(self, parent: Any) -> bool:
- """Check the subject is an attribute defined in __init__()."""
- # An instance variable defined in __init__().
- if self.get_attribute_comment(parent, self.objpath[-1]): # type: ignore[attr-defined]
- return True
- return self.is_runtime_instance_attribute_not_commented(parent)
-
- def is_runtime_instance_attribute_not_commented(self, parent: Any) -> bool:
- """Check the subject is an attribute defined in __init__() without comment."""
- for cls in inspect.getmro(parent):
- try:
- module = safe_getattr(cls, '__module__')
- qualname = safe_getattr(cls, '__qualname__')
-
- analyzer = ModuleAnalyzer.for_module(module)
- analyzer.analyze()
- if qualname and self.objpath:
- key = f'{qualname}.{self.objpath[-1]}'
- if key in analyzer.tagorder:
- return True
- except (AttributeError, PycodeError):
- pass
-
- return False
-
- def import_object(self, raiseerror: bool = False) -> bool:
- """Check the existence of runtime instance attribute after failing to import the
- attribute.
- """
- try:
- return super().import_object(raiseerror=True) # type: ignore[misc]
- except ImportError as exc:
- try:
- with mock(self.config.autodoc_mock_imports):
- ret = import_object(
- self.modname,
- self.objpath[:-1],
- 'class',
- attrgetter=self.get_attr, # type: ignore[attr-defined]
- )
- parent = ret[3]
- if self.is_runtime_instance_attribute(parent):
- self.object = self.RUNTIME_INSTANCE_ATTRIBUTE
- self.parent = parent
- return True
- except ImportError:
- pass
-
- if raiseerror:
- raise
- logger.warning(exc.args[0], type='autodoc', subtype='import_object')
- self.env.note_reread()
- return False
-
- def should_suppress_value_header(self) -> bool:
- return (
- self.object is self.RUNTIME_INSTANCE_ATTRIBUTE
- or super().should_suppress_value_header()
- )
-
- def get_doc(self) -> list[list[str]] | None:
- if (
- self.object is self.RUNTIME_INSTANCE_ATTRIBUTE
- and self.is_runtime_instance_attribute_not_commented(self.parent)
- ):
- return None
- else:
- return super().get_doc() # type: ignore[misc]
-
-
-class UninitializedInstanceAttributeMixin(DataDocumenterMixinBase):
- """Mixin for AttributeDocumenter to provide the feature for supporting uninitialized
- instance attributes (PEP-526 styled, annotation only attributes).
-
- Example::
-
- class Foo:
- attr: int #: This is a target of this mix-in.
- """
-
- def is_uninitialized_instance_attribute(self, parent: Any) -> bool:
- """Check the subject is an annotation only attribute."""
- annotations = get_type_hints(
- parent, None, self.config.autodoc_type_aliases, include_extras=True
- )
- return self.objpath[-1] in annotations
-
- def import_object(self, raiseerror: bool = False) -> bool:
- """Check the existence of uninitialized instance attribute when failed to import
- the attribute.
- """
- try:
- return super().import_object(raiseerror=True) # type: ignore[misc]
- except ImportError as exc:
- try:
- ret = import_object(
- self.modname,
- self.objpath[:-1],
- 'class',
- attrgetter=self.get_attr, # type: ignore[attr-defined]
- )
- parent = ret[3]
- if self.is_uninitialized_instance_attribute(parent):
- self.object = UNINITIALIZED_ATTR
- self.parent = parent
- return True
- except ImportError:
- pass
-
- if raiseerror:
- raise
- logger.warning(exc.args[0], type='autodoc', subtype='import_object')
- self.env.note_reread()
- return False
-
- def should_suppress_value_header(self) -> bool:
- return (
- self.object is UNINITIALIZED_ATTR or super().should_suppress_value_header()
- )
-
- def get_doc(self) -> list[list[str]] | None:
- if self.object is UNINITIALIZED_ATTR:
- return None
- return super().get_doc() # type: ignore[misc]
-
-
-class AttributeDocumenter( # type: ignore[misc]
- GenericAliasMixin,
- SlotsMixin,
- RuntimeInstanceAttributeMixin,
- UninitializedInstanceAttributeMixin,
- NonDataDescriptorMixin,
- DocstringStripSignatureMixin,
- ClassLevelDocumenter,
-):
- """Specialized Documenter subclass for attributes."""
-
- objtype = 'attribute'
- member_order = 60
- option_spec: ClassVar[OptionSpec] = dict(ModuleLevelDocumenter.option_spec)
- option_spec['annotation'] = annotation_option
- option_spec['no-value'] = bool_option
-
- # must be higher than the MethodDocumenter, else it will recognize
- # some non-data descriptors as methods
- priority = 10
-
- @staticmethod
- def is_function_or_method(obj: Any) -> bool:
- return (
- inspect.isfunction(obj) or inspect.isbuiltin(obj) or inspect.ismethod(obj)
- )
-
- @classmethod
- def can_document_member(
- cls: type[Documenter], member: Any, membername: str, isattr: bool, parent: Any
- ) -> bool:
- if isinstance(parent, ModuleDocumenter):
- return False
- if inspect.isattributedescriptor(member):
- return True
- return not inspect.isroutine(member) and not isinstance(member, type)
-
- def document_members(self, all_members: bool = False) -> None:
- pass
-
- def update_annotations(self, parent: Any) -> None:
- """Update __annotations__ to support type_comment and so on."""
- try:
- annotations = dict(inspect.getannotations(parent))
- parent.__annotations__ = annotations
-
- for cls in inspect.getmro(parent):
- try:
- module = safe_getattr(cls, '__module__')
- qualname = safe_getattr(cls, '__qualname__')
-
- analyzer = ModuleAnalyzer.for_module(module)
- analyzer.analyze()
- anns = analyzer.annotations
- for (classname, attrname), annotation in anns.items():
- if classname == qualname and attrname not in annotations:
- annotations[attrname] = annotation
- except (AttributeError, PycodeError):
- pass
- except (AttributeError, TypeError):
- # Failed to set __annotations__ (built-in, extensions, etc.)
- pass
-
- def import_object(self, raiseerror: bool = False) -> bool:
- ret = super().import_object(raiseerror)
- if inspect.isenumattribute(self.object):
- self.object = self.object.value
- if self.parent:
- self.update_annotations(self.parent)
-
- return ret
-
- def get_real_modname(self) -> str:
- real_modname = self.get_attr(self.parent or self.object, '__module__', None)
- return real_modname or self.modname
-
- def should_suppress_value_header(self) -> bool:
- if super().should_suppress_value_header():
- return True
- else:
- doc = self.get_doc()
- if doc:
- _docstring, metadata = separate_metadata(
- '\n'.join(functools.reduce(operator.iadd, doc, []))
- )
- if 'hide-value' in metadata:
- return True
-
- return False
-
- def add_directive_header(self, sig: str) -> None:
- super().add_directive_header(sig)
- sourcename = self.get_sourcename()
- if (
- self.options.annotation is SUPPRESS
- or self.should_suppress_directive_header()
- ):
- pass
- elif self.options.annotation:
- self.add_line(' :annotation: %s' % self.options.annotation, sourcename)
- else:
- if self.config.autodoc_typehints != 'none':
- # obtain type annotation for this attribute
- annotations = get_type_hints(
- self.parent,
- None,
- self.config.autodoc_type_aliases,
- include_extras=True,
- )
- if self.objpath[-1] in annotations:
- mode = _get_render_mode(self.config.autodoc_typehints_format)
- short_literals = self.config.python_display_short_literal_types
- objrepr = stringify_annotation(
- annotations.get(self.objpath[-1]),
- mode,
- short_literals=short_literals,
- )
- self.add_line(' :type: ' + objrepr, sourcename)
-
- try:
- if (
- self.options.no_value
- or self.should_suppress_value_header()
- or ismock(self.object)
- ):
- pass
- else:
- objrepr = object_description(self.object)
- self.add_line(' :value: ' + objrepr, sourcename)
- except ValueError:
- pass
-
- def get_attribute_comment(self, parent: Any, attrname: str) -> list[str] | None:
- for cls in inspect.getmro(parent):
- try:
- module = safe_getattr(cls, '__module__')
- qualname = safe_getattr(cls, '__qualname__')
-
- analyzer = ModuleAnalyzer.for_module(module)
- analyzer.analyze()
- if qualname and self.objpath:
- key = (qualname, attrname)
- if key in analyzer.attr_docs:
- return list(analyzer.attr_docs[key])
- except (AttributeError, PycodeError):
- pass
-
- return None
-
- def get_doc(self) -> list[list[str]] | None:
- # Check the attribute has a docstring-comment
- comment = self.get_attribute_comment(self.parent, self.objpath[-1])
- if comment:
- return [comment]
-
- try:
- # Disable `autodoc_inherit_docstring` temporarily to avoid to obtain
- # a docstring from the value which descriptor returns unexpectedly.
- # See: https://github.com/sphinx-doc/sphinx/issues/7805
- orig = self.config.autodoc_inherit_docstrings
- self.config.autodoc_inherit_docstrings = False
- return super().get_doc()
- finally:
- self.config.autodoc_inherit_docstrings = orig
-
- def add_content(self, more_content: StringList | None) -> None:
- # Disable analyzing attribute comment on Documenter.add_content() to control it on
- # AttributeDocumenter.add_content()
- self.analyzer = None
-
- if more_content is None:
- more_content = StringList()
- self.update_content(more_content)
- super().add_content(more_content)
-
-
-class PropertyDocumenter(DocstringStripSignatureMixin, ClassLevelDocumenter): # type: ignore[misc]
- """Specialized Documenter subclass for properties."""
-
- objtype = 'property'
- member_order = 60
-
- # before AttributeDocumenter
- priority = AttributeDocumenter.priority + 1
-
- @classmethod
- def can_document_member(
- cls: type[Documenter], member: Any, membername: str, isattr: bool, parent: Any
- ) -> bool:
- if isinstance(parent, ClassDocumenter):
- if inspect.isproperty(member):
- return True
- else:
- __dict__ = safe_getattr(parent.object, '__dict__', {})
- obj = __dict__.get(membername)
- return isinstance(obj, classmethod) and inspect.isproperty(obj.__func__)
- else:
- return False
-
- def import_object(self, raiseerror: bool = False) -> bool:
- """Check the existence of uninitialized instance attribute when failed to import
- the attribute.
- """
- ret = super().import_object(raiseerror)
- if ret and not inspect.isproperty(self.object):
- __dict__ = safe_getattr(self.parent, '__dict__', {})
- obj = __dict__.get(self.objpath[-1])
- if isinstance(obj, classmethod) and inspect.isproperty(obj.__func__):
- self.object = obj.__func__
- self.isclassmethod: bool = True
- return True
- else:
- return False
-
- self.isclassmethod = False
- return ret
-
- def format_args(self, **kwargs: Any) -> str:
- func = self._get_property_getter()
- if func is None:
- return ''
-
- # update the annotations of the property getter
- self._events.emit('autodoc-before-process-signature', func, False)
- # correctly format the arguments for a property
- return super().format_args(**kwargs)
-
- def document_members(self, all_members: bool = False) -> None:
- pass
-
- def get_real_modname(self) -> str:
- real_modname = self.get_attr(self.parent or self.object, '__module__', None)
- return real_modname or self.modname
-
- def add_directive_header(self, sig: str) -> None:
- super().add_directive_header(sig)
- sourcename = self.get_sourcename()
- if inspect.isabstractmethod(self.object):
- self.add_line(' :abstractmethod:', sourcename)
- if self.isclassmethod:
- self.add_line(' :classmethod:', sourcename)
-
- func = self._get_property_getter()
- if func is None or self.config.autodoc_typehints == 'none':
- return
-
- try:
- signature = inspect.signature(
- func, type_aliases=self.config.autodoc_type_aliases
- )
- if signature.return_annotation is not Parameter.empty:
- mode = _get_render_mode(self.config.autodoc_typehints_format)
- short_literals = self.config.python_display_short_literal_types
- objrepr = stringify_annotation(
- signature.return_annotation, mode, short_literals=short_literals
- )
- self.add_line(' :type: ' + objrepr, sourcename)
- except TypeError as exc:
- logger.warning(
- __('Failed to get a function signature for %s: %s'), self.fullname, exc
- )
- pass
- except ValueError:
- pass
-
- def _get_property_getter(self) -> Callable[..., Any] | None:
- if safe_getattr(self.object, 'fget', None): # property
- return self.object.fget
- if safe_getattr(self.object, 'func', None): # cached_property
- return self.object.func
- return None
-
-
-def autodoc_attrgetter(
- obj: Any, name: str, *defargs: Any, registry: SphinxComponentRegistry
-) -> Any:
- """Alternative getattr() for types"""
- for typ, func in registry.autodoc_attrgetters.items():
- if isinstance(obj, typ):
- return func(obj, name, *defargs)
-
- return safe_getattr(obj, name, *defargs)
def setup(app: Sphinx) -> ExtensionMetadata:
diff --git a/sphinx/ext/autodoc/_documenters.py b/sphinx/ext/autodoc/_documenters.py
new file mode 100644
index 00000000000..10047a7afb8
--- /dev/null
+++ b/sphinx/ext/autodoc/_documenters.py
@@ -0,0 +1,2929 @@
+from __future__ import annotations
+
+import functools
+import operator
+import re
+import sys
+from inspect import Parameter, Signature
+from typing import TYPE_CHECKING, NewType, TypeVar
+
+from docutils.statemachine import StringList
+
+from sphinx.errors import PycodeError
+from sphinx.ext.autodoc._directive_options import (
+ annotation_option,
+ bool_option,
+ class_doc_from_option,
+ exclude_members_option,
+ identity,
+ inherited_members_option,
+ member_order_option,
+ members_option,
+ merge_members_option,
+)
+from sphinx.ext.autodoc._sentinels import (
+ ALL,
+ SUPPRESS,
+ UNINITIALIZED_ATTR,
+)
+from sphinx.ext.autodoc._sentinels import (
+ INSTANCE_ATTR as INSTANCEATTR,
+)
+from sphinx.ext.autodoc._sentinels import (
+ SLOTS_ATTR as SLOTSATTR,
+)
+from sphinx.ext.autodoc.importer import get_class_members, import_module, import_object
+from sphinx.ext.autodoc.mock import ismock, mock, undecorate
+from sphinx.locale import _, __
+from sphinx.pycode import ModuleAnalyzer
+from sphinx.util import inspect, logging
+from sphinx.util.docstrings import prepare_docstring, separate_metadata
+from sphinx.util.inspect import (
+ evaluate_signature,
+ getdoc,
+ object_description,
+ safe_getattr,
+ stringify_signature,
+)
+from sphinx.util.typing import get_type_hints, restify, stringify_annotation
+
+if TYPE_CHECKING:
+ from collections.abc import Callable, Iterator, Sequence
+ from types import ModuleType
+ from typing import Any, ClassVar, Literal
+
+ from sphinx.config import Config
+ from sphinx.environment import BuildEnvironment, _CurrentDocument
+ from sphinx.events import EventManager
+ from sphinx.ext.autodoc.directive import DocumenterBridge
+ from sphinx.registry import SphinxComponentRegistry
+ from sphinx.util.typing import OptionSpec, _RestifyMode
+
+logger = logging.getLogger('sphinx.ext.autodoc')
+
+#: extended signature RE: with explicit module name separated by ::
+py_ext_sig_re = re.compile(
+ r"""^ ([\w.]+::)? # explicit module name
+ ([\w.]+\.)? # module and/or class name(s)
+ (\w+) \s* # thing name
+ (?: \[\s*(.*?)\s*])? # optional: type parameters list
+ (?: \((.*)\) # optional: arguments
+ (?:\s* -> \s* (.*))? # return annotation
+ )? $ # and nothing more
+ """,
+ re.VERBOSE,
+)
+special_member_re = re.compile(r'^__\S+__$')
+
+
+def _get_render_mode(
+ typehints_format: Literal['fully-qualified', 'short'],
+) -> _RestifyMode:
+ if typehints_format == 'short':
+ return 'smart'
+ return 'fully-qualified-except-typing'
+
+
+class ObjectMember:
+ """A member of object.
+
+ This is used for the result of `Documenter.get_module_members()` to
+ represent each member of the object.
+ """
+
+ __slots__ = '__name__', 'object', 'docstring', 'class_', 'skipped'
+
+ __name__: str
+ object: Any
+ docstring: str | None
+ class_: Any
+ skipped: bool
+
+ def __init__(
+ self,
+ name: str,
+ obj: Any,
+ *,
+ docstring: str | None = None,
+ class_: Any = None,
+ skipped: bool = False,
+ ) -> None:
+ self.__name__ = name
+ self.object = obj
+ self.docstring = docstring
+ self.class_ = class_
+ self.skipped = skipped
+
+ def __repr__(self) -> str:
+ return (
+ f'ObjectMember('
+ f'name={self.__name__!r}, '
+ f'obj={self.object!r}, '
+ f'docstring={self.docstring!r}, '
+ f'class_={self.class_!r}, '
+ f'skipped={self.skipped!r}'
+ f')'
+ )
+
+
+class Documenter:
+ """A Documenter knows how to autodocument a single object type. When
+ registered with the AutoDirective, it will be used to document objects
+ of that type when needed by autodoc.
+
+ Its *objtype* attribute selects what auto directive it is assigned to
+ (the directive name is 'auto' + objtype), and what directive it generates
+ by default, though that can be overridden by an attribute called
+ *directivetype*.
+
+ A Documenter has an *option_spec* that works like a docutils directive's;
+ in fact, it will be used to parse an auto directive's options that matches
+ the Documenter.
+ """
+
+ #: name by which the directive is called (auto...) and the default
+ #: generated directive name
+ objtype: ClassVar = 'object'
+ #: indentation by which to indent the directive content
+ content_indent: ClassVar = ' '
+ #: priority if multiple documenters return True from can_document_member
+ priority: ClassVar = 0
+ #: order if autodoc_member_order is set to 'groupwise'
+ member_order: ClassVar = 0
+ #: true if the generated content may contain titles
+ titles_allowed: ClassVar = True
+
+ option_spec: ClassVar[OptionSpec] = {
+ 'no-index': bool_option,
+ 'no-index-entry': bool_option,
+ 'noindex': bool_option,
+ }
+
+ def get_attr(self, obj: Any, name: str, *defargs: Any) -> Any:
+ """getattr() override for types such as Zope interfaces."""
+ return autodoc_attrgetter(obj, name, *defargs, registry=self.env._registry)
+
+ @classmethod
+ def can_document_member(
+ cls: type[Documenter], member: Any, membername: str, isattr: bool, parent: Any
+ ) -> bool:
+ """Called to see if a member can be documented by this Documenter."""
+ msg = 'must be implemented in subclasses'
+ raise NotImplementedError(msg)
+
+ def __init__(
+ self, directive: DocumenterBridge, name: str, indent: str = ''
+ ) -> None:
+ self.directive = directive
+ self.config: Config = directive.env.config
+ self.env: BuildEnvironment = directive.env
+ self._current_document: _CurrentDocument = directive.env.current_document
+ self._events: EventManager = directive.env.events
+ self.options = directive.genopt
+ self.name = name
+ self.indent = indent
+ # the module and object path within the module, and the fully
+ # qualified name (all set after resolve_name succeeds)
+ self.modname: str = ''
+ self.module: ModuleType | None = None
+ self.objpath: list[str] = []
+ self.fullname = ''
+ # extra signature items (arguments and return annotation,
+ # also set after resolve_name succeeds)
+ self.args: str | None = None
+ self.retann: str = ''
+ # the object to document (set after import_object succeeds)
+ self.object: Any = None
+ self.object_name = ''
+ # the parent/owner of the object to document
+ self.parent: Any = None
+ # the module analyzer to get at attribute docs, or None
+ self.analyzer: ModuleAnalyzer | None = None
+
+ @property
+ def documenters(self) -> dict[str, type[Documenter]]:
+ """Returns registered Documenter classes"""
+ return self.env._registry.documenters
+
+ def add_line(self, line: str, source: str, *lineno: int) -> None:
+ """Append one line of generated reST to the output."""
+ if line.strip(): # not a blank line
+ self.directive.result.append(self.indent + line, source, *lineno)
+ else:
+ self.directive.result.append('', source, *lineno)
+
+ def resolve_name(
+ self, modname: str | None, parents: Any, path: str, base: str
+ ) -> tuple[str | None, list[str]]:
+ """Resolve the module and name of the object to document given by the
+ arguments and the current module/class.
+
+ Must return a pair of the module name and a chain of attributes; for
+ example, it would return ``('zipfile', ['ZipFile', 'open'])`` for the
+ ``zipfile.ZipFile.open`` method.
+ """
+ msg = 'must be implemented in subclasses'
+ raise NotImplementedError(msg)
+
+ def parse_name(self) -> bool:
+ """Determine what module to import and what attribute to document.
+
+ Returns True and sets *self.modname*, *self.objpath*, *self.fullname*,
+ *self.args* and *self.retann* if parsing and resolving was successful.
+ """
+ # first, parse the definition -- auto directives for classes and
+ # functions can contain a signature which is then used instead of
+ # an autogenerated one
+ matched = py_ext_sig_re.match(self.name)
+ if matched is None:
+ logger.warning(
+ __('invalid signature for auto%s (%r)'),
+ self.objtype,
+ self.name,
+ type='autodoc',
+ )
+ return False
+ explicit_modname, path, base, _tp_list, args, retann = matched.groups()
+
+ # support explicit module and class name separation via ::
+ if explicit_modname is not None:
+ modname = explicit_modname[:-2]
+ parents = path.rstrip('.').split('.') if path else []
+ else:
+ modname = None
+ parents = []
+
+ with mock(self.config.autodoc_mock_imports):
+ modname, self.objpath = self.resolve_name(modname, parents, path, base)
+
+ if not modname:
+ return False
+
+ self.modname = modname
+ self.args = args
+ self.retann = retann
+ self.fullname = '.'.join((self.modname or '', *self.objpath))
+ return True
+
+ def import_object(self, raiseerror: bool = False) -> bool:
+ """Import the object given by *self.modname* and *self.objpath* and set
+ it as *self.object*.
+
+ Returns True if successful, False if an error occurred.
+ """
+ with mock(self.config.autodoc_mock_imports):
+ try:
+ ret = import_object(
+ self.modname, self.objpath, self.objtype, attrgetter=self.get_attr
+ )
+ self.module, self.parent, self.object_name, self.object = ret
+ if ismock(self.object):
+ self.object = undecorate(self.object)
+ return True
+ except ImportError as exc:
+ if raiseerror:
+ raise
+ logger.warning(exc.args[0], type='autodoc', subtype='import_object')
+ self.env.note_reread()
+ return False
+
+ def get_real_modname(self) -> str:
+ """Get the real module name of an object to document.
+
+ It can differ from the name of the module through which the object was
+ imported.
+ """
+ return self.get_attr(self.object, '__module__', None) or self.modname
+
+ def check_module(self) -> bool:
+ """Check if *self.object* is really defined in the module given by
+ *self.modname*.
+ """
+ if self.options.imported_members:
+ return True
+
+ subject = inspect.unpartial(self.object)
+ modname = self.get_attr(subject, '__module__', None)
+ return not modname or modname == self.modname
+
+ def format_args(self, **kwargs: Any) -> str:
+ """Format the argument signature of *self.object*.
+
+ Should return None if the object does not have a signature.
+ """
+ return ''
+
+ def format_name(self) -> str:
+ """Format the name of *self.object*.
+
+ This normally should be something that can be parsed by the generated
+ directive, but doesn't need to be (Sphinx will display it unparsed
+ then).
+ """
+ # normally the name doesn't contain the module (except for module
+ # directives of course)
+ return '.'.join(self.objpath) or self.modname
+
+ def _call_format_args(self, **kwargs: Any) -> str:
+ if kwargs:
+ try:
+ return self.format_args(**kwargs)
+ except TypeError:
+ # avoid chaining exceptions, by putting nothing here
+ pass
+
+ # retry without arguments for old documenters
+ return self.format_args()
+
+ def format_signature(self, **kwargs: Any) -> str:
+ """Format the signature (arguments and return annotation) of the object.
+
+ Let the user process it via the ``autodoc-process-signature`` event.
+ """
+ if self.args is not None:
+ # signature given explicitly
+ args = f'({self.args})'
+ retann = self.retann
+ else:
+ # try to introspect the signature
+ try:
+ retann = None
+ args = self._call_format_args(**kwargs)
+ if args:
+ matched = re.match(r'^(\(.*\))\s+->\s+(.*)$', args)
+ if matched:
+ args = matched.group(1)
+ retann = matched.group(2)
+ except Exception as exc:
+ logger.warning(
+ __('error while formatting arguments for %s: %s'),
+ self.fullname,
+ exc,
+ type='autodoc',
+ )
+ args = None
+
+ result = self._events.emit_firstresult(
+ 'autodoc-process-signature',
+ self.objtype,
+ self.fullname,
+ self.object,
+ self.options,
+ args,
+ retann,
+ )
+ if result:
+ args, retann = result
+
+ if args is not None:
+ return args + ((' -> %s' % retann) if retann else '')
+ else:
+ return ''
+
+ def add_directive_header(self, sig: str) -> None:
+ """Add the directive header and options to the generated content."""
+ domain = getattr(self, 'domain', 'py')
+ directive = getattr(self, 'directivetype', self.objtype)
+ name = self.format_name()
+ sourcename = self.get_sourcename()
+
+ # one signature per line, indented by column
+ prefix = f'.. {domain}:{directive}:: '
+ for i, sig_line in enumerate(sig.split('\n')):
+ self.add_line(f'{prefix}{name}{sig_line}', sourcename)
+ if i == 0:
+ prefix = ' ' * len(prefix)
+
+ if self.options.no_index or self.options.noindex:
+ self.add_line(' :no-index:', sourcename)
+ if self.options.no_index_entry:
+ self.add_line(' :no-index-entry:', sourcename)
+ if self.objpath:
+ # Be explicit about the module, this is necessary since .. class::
+ # etc. don't support a prepended module name
+ self.add_line(' :module: %s' % self.modname, sourcename)
+
+ def get_doc(self) -> list[list[str]] | None:
+ """Decode and return lines of the docstring(s) for the object.
+
+ When it returns None, autodoc-process-docstring will not be called for this
+ object.
+ """
+ docstring = getdoc(
+ self.object,
+ self.get_attr,
+ self.config.autodoc_inherit_docstrings,
+ self.parent,
+ self.object_name,
+ )
+ if docstring:
+ tab_width = self.directive.state.document.settings.tab_width
+ return [prepare_docstring(docstring, tab_width)]
+ return []
+
+ def process_doc(self, docstrings: list[list[str]]) -> Iterator[str]:
+ """Let the user process the docstrings before adding them."""
+ for docstringlines in docstrings:
+ if self._events is not None:
+ # let extensions preprocess docstrings
+ self._events.emit(
+ 'autodoc-process-docstring',
+ self.objtype,
+ self.fullname,
+ self.object,
+ self.options,
+ docstringlines,
+ )
+
+ if docstringlines and docstringlines[-1]:
+ # append a blank line to the end of the docstring
+ docstringlines.append('')
+
+ yield from docstringlines
+
+ def get_sourcename(self) -> str:
+ obj_module = inspect.safe_getattr(self.object, '__module__', None)
+ obj_qualname = inspect.safe_getattr(self.object, '__qualname__', None)
+ if obj_module and obj_qualname:
+ # Get the correct location of docstring from self.object
+ # to support inherited methods
+ fullname = f'{self.object.__module__}.{self.object.__qualname__}'
+ else:
+ fullname = self.fullname
+
+ if self.analyzer:
+ return f'{self.analyzer.srcname}:docstring of {fullname}'
+ else:
+ return 'docstring of %s' % fullname
+
+ def add_content(self, more_content: StringList | None) -> None:
+ """Add content from docstrings, attribute documentation and user."""
+ docstring = True
+
+ # set sourcename and add content from attribute documentation
+ sourcename = self.get_sourcename()
+ if self.analyzer:
+ attr_docs = self.analyzer.find_attr_docs()
+ if self.objpath:
+ key = ('.'.join(self.objpath[:-1]), self.objpath[-1])
+ if key in attr_docs:
+ docstring = False
+ # make a copy of docstring for attributes to avoid cache
+ # the change of autodoc-process-docstring event.
+ attribute_docstrings = [list(attr_docs[key])]
+
+ for i, line in enumerate(self.process_doc(attribute_docstrings)):
+ self.add_line(line, sourcename, i)
+
+ # add content from docstrings
+ if docstring:
+ docstrings = self.get_doc()
+ if docstrings is None:
+ # Do not call autodoc-process-docstring on get_doc() returns None.
+ pass
+ else:
+ if not docstrings:
+ # append at least a dummy docstring, so that the event
+ # autodoc-process-docstring is fired and can add some
+ # content if desired
+ docstrings.append([])
+ for i, line in enumerate(self.process_doc(docstrings)):
+ self.add_line(line, sourcename, i)
+
+ # add additional content (e.g. from document), if present
+ if more_content:
+ for line, src in zip(more_content.data, more_content.items, strict=True):
+ self.add_line(line, src[0], src[1])
+
+ def get_object_members(self, want_all: bool) -> tuple[bool, list[ObjectMember]]:
+ """Return `(members_check_module, members)` where `members` is a
+ list of `(membername, member)` pairs of the members of *self.object*.
+
+ If *want_all* is True, return all members. Else, only return those
+ members given by *self.options.members* (which may also be None).
+ """
+ msg = 'must be implemented in subclasses'
+ raise NotImplementedError(msg)
+
+ def filter_members(
+ self, members: list[ObjectMember], want_all: bool
+ ) -> list[tuple[str, Any, bool]]:
+ """Filter the given member list.
+
+ Members are skipped if
+
+ - they are private (except if given explicitly or the private-members
+ option is set)
+ - they are special methods (except if given explicitly or the
+ special-members option is set)
+ - they are undocumented (except if the undoc-members option is set)
+
+ The user can override the skipping decision by connecting to the
+ ``autodoc-skip-member`` event.
+ """
+
+ def is_filtered_inherited_member(name: str, obj: Any) -> bool:
+ inherited_members = self.options.inherited_members or set()
+ seen = set()
+
+ if inspect.isclass(self.object):
+ for cls in self.object.__mro__:
+ if name in cls.__dict__:
+ seen.add(cls)
+ if (
+ cls.__name__ in inherited_members
+ and cls != self.object
+ and any(
+ issubclass(potential_child, cls) for potential_child in seen
+ )
+ ):
+ # given member is a member of specified *super class*
+ return True
+ if name in cls.__dict__:
+ return False
+ if name in self.get_attr(cls, '__annotations__', {}):
+ return False
+ if isinstance(obj, ObjectMember) and obj.class_ is cls:
+ return False
+
+ return False
+
+ ret = []
+
+ # search for members in source code too
+ namespace = '.'.join(self.objpath) # will be empty for modules
+
+ if self.analyzer:
+ attr_docs = self.analyzer.find_attr_docs()
+ else:
+ attr_docs = {}
+
+ # process members and determine which to skip
+ for obj in members:
+ membername = obj.__name__
+ member = obj.object
+
+ # if isattr is True, the member is documented as an attribute
+ isattr = member is INSTANCEATTR or (namespace, membername) in attr_docs
+
+ try:
+ doc = getdoc(
+ member,
+ self.get_attr,
+ self.config.autodoc_inherit_docstrings,
+ self.object,
+ membername,
+ )
+ if not isinstance(doc, str):
+ # Ignore non-string __doc__
+ doc = None
+
+ # if the member __doc__ is the same as self's __doc__, it's just
+ # inherited and therefore not the member's doc
+ cls = self.get_attr(member, '__class__', None)
+ if cls:
+ cls_doc = self.get_attr(cls, '__doc__', None)
+ if cls_doc == doc:
+ doc = None
+
+ if isinstance(obj, ObjectMember) and obj.docstring:
+ # hack for ClassDocumenter to inject docstring via ObjectMember
+ doc = obj.docstring
+
+ doc, metadata = separate_metadata(doc)
+ has_doc = bool(doc)
+
+ if 'private' in metadata:
+ # consider a member private if docstring has "private" metadata
+ isprivate = True
+ elif 'public' in metadata:
+ # consider a member public if docstring has "public" metadata
+ isprivate = False
+ else:
+ isprivate = membername.startswith('_')
+
+ keep = False
+ if ismock(member) and (namespace, membername) not in attr_docs:
+ # mocked module or object
+ pass
+ elif (
+ self.options.exclude_members
+ and membername in self.options.exclude_members
+ ):
+ # remove members given by exclude-members
+ keep = False
+ elif want_all and special_member_re.match(membername):
+ # special __methods__
+ if (
+ self.options.special_members
+ and membername in self.options.special_members
+ ):
+ if membername == '__doc__': # NoQA: SIM114
+ keep = False
+ elif is_filtered_inherited_member(membername, obj):
+ keep = False
+ else:
+ keep = has_doc or self.options.undoc_members
+ else:
+ keep = False
+ elif (namespace, membername) in attr_docs:
+ if want_all and isprivate:
+ if self.options.private_members is None:
+ keep = False
+ else:
+ keep = membername in self.options.private_members
+ else:
+ # keep documented attributes
+ keep = True
+ elif want_all and isprivate:
+ if has_doc or self.options.undoc_members:
+ if self.options.private_members is None: # NoQA: SIM114
+ keep = False
+ elif is_filtered_inherited_member(membername, obj):
+ keep = False
+ else:
+ keep = membername in self.options.private_members
+ else:
+ keep = False
+ else:
+ if self.options.members is ALL and is_filtered_inherited_member(
+ membername, obj
+ ):
+ keep = False
+ else:
+ # ignore undocumented members if :undoc-members: is not given
+ keep = has_doc or self.options.undoc_members
+
+ if isinstance(obj, ObjectMember) and obj.skipped:
+ # forcedly skipped member (ex. a module attribute not defined in __all__)
+ keep = False
+
+ # give the user a chance to decide whether this member
+ # should be skipped
+ if self._events is not None:
+ # let extensions preprocess docstrings
+ skip_user = self._events.emit_firstresult(
+ 'autodoc-skip-member',
+ self.objtype,
+ membername,
+ member,
+ not keep,
+ self.options,
+ )
+ if skip_user is not None:
+ keep = not skip_user
+ except Exception as exc:
+ logger.warning(
+ __(
+ 'autodoc: failed to determine %s.%s (%r) to be documented, '
+ 'the following exception was raised:\n%s'
+ ),
+ self.name,
+ membername,
+ member,
+ exc,
+ type='autodoc',
+ )
+ keep = False
+
+ if keep:
+ ret.append((membername, member, isattr))
+
+ return ret
+
+ def document_members(self, all_members: bool = False) -> None:
+ """Generate reST for member documentation.
+
+ If *all_members* is True, document all members, else those given by
+ *self.options.members*.
+ """
+ # set current namespace for finding members
+ self._current_document.autodoc_module = self.modname
+ if self.objpath:
+ self._current_document.autodoc_class = self.objpath[0]
+
+ want_all = (
+ all_members or self.options.inherited_members or self.options.members is ALL
+ )
+ # find out which members are documentable
+ members_check_module, members = self.get_object_members(want_all)
+
+ # document non-skipped members
+ member_documenters: list[tuple[Documenter, bool]] = []
+ for mname, member, isattr in self.filter_members(members, want_all):
+ classes = [
+ cls
+ for cls in self.documenters.values()
+ if cls.can_document_member(member, mname, isattr, self)
+ ]
+ if not classes:
+ # don't know how to document this member
+ continue
+ # prefer the documenter with the highest priority
+ classes.sort(key=lambda cls: cls.priority)
+ # give explicitly separated module name, so that members
+ # of inner classes can be documented
+ full_mname = f'{self.modname}::' + '.'.join((*self.objpath, mname))
+ documenter = classes[-1](self.directive, full_mname, self.indent)
+ member_documenters.append((documenter, isattr))
+
+ member_order = self.options.member_order or self.config.autodoc_member_order
+ # We now try to import all objects before ordering them. This is to
+ # avoid possible circular imports if we were to import objects after
+ # their associated documenters have been sorted.
+ member_documenters = [
+ (documenter, isattr)
+ for documenter, isattr in member_documenters
+ if documenter.parse_name() and documenter.import_object()
+ ]
+ member_documenters = self.sort_members(member_documenters, member_order)
+
+ for documenter, isattr in member_documenters:
+ assert documenter.modname
+ # We can directly call ._generate() since the documenters
+ # already called parse_name() and import_object() before.
+ #
+ # Note that those two methods above do not emit events, so
+ # whatever objects we deduced should not have changed.
+ documenter._generate(
+ all_members=True,
+ real_modname=self.real_modname,
+ check_module=members_check_module and not isattr,
+ )
+
+ # reset current objects
+ self._current_document.autodoc_module = ''
+ self._current_document.autodoc_class = ''
+
+ def sort_members(
+ self, documenters: list[tuple[Documenter, bool]], order: str
+ ) -> list[tuple[Documenter, bool]]:
+ """Sort the given member list."""
+ if order == 'groupwise':
+ # sort by group; alphabetically within groups
+ documenters.sort(key=lambda e: (e[0].member_order, e[0].name))
+ elif order == 'bysource':
+ # By default, member discovery order matches source order,
+ # as dicts are insertion-ordered from Python 3.7.
+ if self.analyzer:
+ # sort by source order, by virtue of the module analyzer
+ tagorder = self.analyzer.tagorder
+
+ def keyfunc(entry: tuple[Documenter, bool]) -> int:
+ fullname = entry[0].name.split('::')[1]
+ return tagorder.get(fullname, len(tagorder))
+
+ documenters.sort(key=keyfunc)
+ else: # alphabetical
+ documenters.sort(key=lambda e: e[0].name)
+
+ return documenters
+
+ def generate(
+ self,
+ more_content: StringList | None = None,
+ real_modname: str | None = None,
+ check_module: bool = False,
+ all_members: bool = False,
+ ) -> None:
+ """Generate reST for the object given by *self.name*, and possibly for
+ its members.
+
+ If *more_content* is given, include that content. If *real_modname* is
+ given, use that module name to find attribute docs. If *check_module* is
+ True, only generate if the object is defined in the module name it is
+ imported from. If *all_members* is True, document all members.
+ """
+ if not self.parse_name():
+ # need a module to import
+ logger.warning(
+ __(
+ "don't know which module to import for autodocumenting "
+ '%r (try placing a "module" or "currentmodule" directive '
+ 'in the document, or giving an explicit module name)'
+ ),
+ self.name,
+ type='autodoc',
+ )
+ return
+
+ # now, import the module and get object to document
+ if not self.import_object():
+ return
+
+ self._generate(more_content, real_modname, check_module, all_members)
+
+ def _generate(
+ self,
+ more_content: StringList | None = None,
+ real_modname: str | None = None,
+ check_module: bool = False,
+ all_members: bool = False,
+ ) -> None:
+ # If there is no real module defined, figure out which to use.
+ # The real module is used in the module analyzer to look up the module
+ # where the attribute documentation would actually be found in.
+ # This is used for situations where you have a module that collects the
+ # functions and classes of internal submodules.
+ guess_modname = self.get_real_modname()
+ self.real_modname: str = real_modname or guess_modname
+
+ # try to also get a source code analyzer for attribute docs
+ try:
+ self.analyzer = ModuleAnalyzer.for_module(self.real_modname)
+ # parse right now, to get PycodeErrors on parsing (results will
+ # be cached anyway)
+ self.analyzer.find_attr_docs()
+ except PycodeError as exc:
+ logger.debug('[autodoc] module analyzer failed: %s', exc)
+ # no source file -- e.g. for builtin and C modules
+ self.analyzer = None
+ # at least add the module.__file__ as a dependency
+ if module___file__ := getattr(self.module, '__file__', ''):
+ self.directive.record_dependencies.add(module___file__)
+ else:
+ self.directive.record_dependencies.add(self.analyzer.srcname)
+
+ if self.real_modname != guess_modname:
+ # Add module to dependency list if target object is defined in other module.
+ try:
+ analyzer = ModuleAnalyzer.for_module(guess_modname)
+ self.directive.record_dependencies.add(analyzer.srcname)
+ except PycodeError:
+ pass
+
+ docstrings: list[str] = functools.reduce(
+ operator.iadd, self.get_doc() or [], []
+ )
+ if ismock(self.object) and not docstrings:
+ logger.warning(
+ __('A mocked object is detected: %r'),
+ self.name,
+ type='autodoc',
+ subtype='mocked_object',
+ )
+
+ # check __module__ of object (for members not given explicitly)
+ if check_module:
+ if not self.check_module():
+ return
+
+ sourcename = self.get_sourcename()
+
+ # make sure that the result starts with an empty line. This is
+ # necessary for some situations where another directive preprocesses
+ # reST and no starting newline is present
+ self.add_line('', sourcename)
+
+ # format the object's signature, if any
+ try:
+ sig = self.format_signature()
+ except Exception as exc:
+ logger.warning(
+ __('error while formatting signature for %s: %s'),
+ self.fullname,
+ exc,
+ type='autodoc',
+ )
+ return
+
+ # generate the directive header and options, if applicable
+ self.add_directive_header(sig)
+ self.add_line('', sourcename)
+
+ # e.g. the module directive doesn't have content
+ self.indent += self.content_indent
+
+ # add all content (from docstrings, attribute docs etc.)
+ self.add_content(more_content)
+
+ # document members, if possible
+ self.document_members(all_members)
+
+
+class ModuleDocumenter(Documenter):
+ """Specialized Documenter subclass for modules."""
+
+ objtype = 'module'
+ content_indent = ''
+ _extra_indent = ' '
+
+ option_spec: ClassVar[OptionSpec] = {
+ 'members': members_option,
+ 'undoc-members': bool_option,
+ 'no-index': bool_option,
+ 'no-index-entry': bool_option,
+ 'inherited-members': inherited_members_option,
+ 'show-inheritance': bool_option,
+ 'synopsis': identity,
+ 'platform': identity,
+ 'deprecated': bool_option,
+ 'member-order': member_order_option,
+ 'exclude-members': exclude_members_option,
+ 'private-members': members_option,
+ 'special-members': members_option,
+ 'imported-members': bool_option,
+ 'ignore-module-all': bool_option,
+ 'no-value': bool_option,
+ 'noindex': bool_option,
+ }
+
+ def __init__(self, *args: Any) -> None:
+ super().__init__(*args)
+ merge_members_option(self.options)
+ self.__all__: Sequence[str] | None = None
+
+ def add_content(self, more_content: StringList | None) -> None:
+ old_indent = self.indent
+ self.indent += self._extra_indent
+ super().add_content(None)
+ self.indent = old_indent
+ if more_content:
+ for line, src in zip(more_content.data, more_content.items, strict=True):
+ self.add_line(line, src[0], src[1])
+
+ @classmethod
+ def can_document_member(
+ cls: type[Documenter], member: Any, membername: str, isattr: bool, parent: Any
+ ) -> bool:
+ # don't document submodules automatically
+ return False
+
+ def resolve_name(
+ self, modname: str | None, parents: Any, path: str, base: str
+ ) -> tuple[str | None, list[str]]:
+ if modname is not None:
+ logger.warning(
+ __('"::" in automodule name doesn\'t make sense'), type='autodoc'
+ )
+ return (path or '') + base, []
+
+ def parse_name(self) -> bool:
+ ret = super().parse_name()
+ if self.args or self.retann:
+ logger.warning(
+ __('signature arguments or return annotation given for automodule %s'),
+ self.fullname,
+ type='autodoc',
+ )
+ return ret
+
+ def import_object(self, raiseerror: bool = False) -> bool:
+ ret = super().import_object(raiseerror)
+
+ try:
+ if not self.options.ignore_module_all:
+ self.__all__ = inspect.getall(self.object)
+ except ValueError as exc:
+ # invalid __all__ found.
+ logger.warning(
+ __(
+ '__all__ should be a list of strings, not %r '
+ '(in module %s) -- ignoring __all__'
+ ),
+ exc.args[0],
+ self.fullname,
+ type='autodoc',
+ )
+
+ return ret
+
+ def add_directive_header(self, sig: str) -> None:
+ Documenter.add_directive_header(self, sig)
+
+ sourcename = self.get_sourcename()
+
+ # add some module-specific options
+ if self.options.synopsis:
+ self.add_line(' :synopsis: ' + self.options.synopsis, sourcename)
+ if self.options.platform:
+ self.add_line(' :platform: ' + self.options.platform, sourcename)
+ if self.options.deprecated:
+ self.add_line(' :deprecated:', sourcename)
+ if self.options.no_index_entry:
+ self.add_line(' :no-index-entry:', sourcename)
+
+ def get_module_members(self) -> dict[str, ObjectMember]:
+ """Get members of target module."""
+ if self.analyzer:
+ attr_docs = self.analyzer.attr_docs
+ else:
+ attr_docs = {}
+
+ members: dict[str, ObjectMember] = {}
+ for name in dir(self.object):
+ try:
+ value = safe_getattr(self.object, name, None)
+ if ismock(value):
+ value = undecorate(value)
+ docstring = attr_docs.get(('', name), [])
+ members[name] = ObjectMember(
+ name, value, docstring='\n'.join(docstring)
+ )
+ except AttributeError:
+ continue
+
+ # annotation only member (ex. attr: int)
+ for name in inspect.getannotations(self.object):
+ if name not in members:
+ docstring = attr_docs.get(('', name), [])
+ members[name] = ObjectMember(
+ name, INSTANCEATTR, docstring='\n'.join(docstring)
+ )
+
+ return members
+
+ def get_object_members(self, want_all: bool) -> tuple[bool, list[ObjectMember]]:
+ members = self.get_module_members()
+ if want_all:
+ if self.__all__ is None:
+ # for implicit module members, check __module__ to avoid
+ # documenting imported objects
+ return True, list(members.values())
+ else:
+ for member in members.values():
+ if member.__name__ not in self.__all__:
+ member.skipped = True
+
+ return False, list(members.values())
+ else:
+ memberlist = self.options.members or []
+ ret = []
+ for name in memberlist:
+ if name in members:
+ ret.append(members[name])
+ else:
+ logger.warning(
+ __(
+ 'missing attribute mentioned in :members: option: '
+ 'module %s, attribute %s'
+ ),
+ safe_getattr(self.object, '__name__', '???'),
+ name,
+ type='autodoc',
+ )
+ return False, ret
+
+ def sort_members(
+ self, documenters: list[tuple[Documenter, bool]], order: str
+ ) -> list[tuple[Documenter, bool]]:
+ if order == 'bysource' and self.__all__:
+ assert self.__all__ is not None
+ module_all = self.__all__
+ module_all_set = set(module_all)
+ module_all_len = len(module_all)
+
+ # Sort alphabetically first (for members not listed on the __all__)
+ documenters.sort(key=lambda e: e[0].name)
+
+ # Sort by __all__
+ def keyfunc(entry: tuple[Documenter, bool]) -> int:
+ name = entry[0].name.split('::')[1]
+ if name in module_all_set:
+ return module_all.index(name)
+ else:
+ return module_all_len
+
+ documenters.sort(key=keyfunc)
+
+ return documenters
+ else:
+ return super().sort_members(documenters, order)
+
+
+class ModuleLevelDocumenter(Documenter):
+ """Specialized Documenter subclass for objects on module level (functions,
+ classes, data/constants).
+ """
+
+ def resolve_name(
+ self, modname: str | None, parents: Any, path: str, base: str
+ ) -> tuple[str | None, list[str]]:
+ if modname is not None:
+ return modname, [*parents, base]
+ if path:
+ modname = path.rstrip('.')
+ return modname, [*parents, base]
+
+ # if documenting a toplevel object without explicit module,
+ # it can be contained in another auto directive ...
+ modname = self._current_document.autodoc_module
+ # ... or in the scope of a module directive
+ if not modname:
+ modname = self.env.ref_context.get('py:module')
+ # ... else, it stays None, which means invalid
+ return modname, [*parents, base]
+
+
+class ClassLevelDocumenter(Documenter):
+ """Specialized Documenter subclass for objects on class level (methods,
+ attributes).
+ """
+
+ def resolve_name(
+ self, modname: str | None, parents: Any, path: str, base: str
+ ) -> tuple[str | None, list[str]]:
+ if modname is not None:
+ return modname, [*parents, base]
+
+ if path:
+ mod_cls = path.rstrip('.')
+ else:
+ # if documenting a class-level object without path,
+ # there must be a current class, either from a parent
+ # auto directive ...
+ mod_cls = self._current_document.autodoc_class
+ # ... or from a class directive
+ if not mod_cls:
+ mod_cls = self.env.ref_context.get('py:class', '')
+ # ... if still falsy, there's no way to know
+ if not mod_cls:
+ return None, []
+ modname, _sep, cls = mod_cls.rpartition('.')
+ parents = [cls]
+ # if the module name is still missing, get it like above
+ if not modname:
+ modname = self._current_document.autodoc_module
+ if not modname:
+ modname = self.env.ref_context.get('py:module')
+ # ... else, it stays None, which means invalid
+ return modname, [*parents, base]
+
+
+class DocstringSignatureMixin:
+ """Mixin for FunctionDocumenter and MethodDocumenter to provide the
+ feature of reading the signature from the docstring.
+ """
+
+ _new_docstrings: list[list[str]] | None = None
+ _signatures: list[str] = []
+
+ def _find_signature(self) -> tuple[str | None, str | None] | None:
+ # candidates of the object name
+ valid_names = [self.objpath[-1]] # type: ignore[attr-defined]
+ if isinstance(self, ClassDocumenter):
+ valid_names.append('__init__')
+ if hasattr(self.object, '__mro__'):
+ valid_names.extend(cls.__name__ for cls in self.object.__mro__)
+
+ docstrings = self.get_doc()
+ if docstrings is None:
+ return None, None
+ self._new_docstrings = docstrings[:]
+ self._signatures = []
+ result = None
+ for i, doclines in enumerate(docstrings):
+ for j, line in enumerate(doclines):
+ if not line:
+ # no lines in docstring, no match
+ break
+
+ if line.endswith('\\'):
+ line = line.rstrip('\\').rstrip()
+
+ # match first line of docstring against signature RE
+ match = py_ext_sig_re.match(line)
+ if not match:
+ break
+ _exmod, _path, base, _tp_list, args, retann = match.groups()
+
+ # the base name must match ours
+ if base not in valid_names:
+ break
+
+ # re-prepare docstring to ignore more leading indentation
+ directive = self.directive # type: ignore[attr-defined]
+ tab_width = directive.state.document.settings.tab_width
+ self._new_docstrings[i] = prepare_docstring(
+ '\n'.join(doclines[j + 1 :]), tab_width
+ )
+
+ if result is None:
+ # first signature
+ result = args, retann
+ else:
+ # subsequent signatures
+ self._signatures.append(f'({args}) -> {retann}')
+
+ if result is not None:
+ # finish the loop when signature found
+ break
+
+ return result
+
+ def get_doc(self) -> list[list[str]] | None:
+ if self._new_docstrings is not None:
+ return self._new_docstrings
+ return super().get_doc() # type: ignore[misc]
+
+ def format_signature(self, **kwargs: Any) -> str:
+ self.args: str | None
+ if self.args is None and self.config.autodoc_docstring_signature: # type: ignore[attr-defined]
+ # only act if a signature is not explicitly given already, and if
+ # the feature is enabled
+ result = self._find_signature()
+ if result is not None:
+ self.args, self.retann = result
+ sig = super().format_signature(**kwargs) # type: ignore[misc]
+ if self._signatures:
+ return '\n'.join((sig, *self._signatures))
+ else:
+ return sig
+
+
+class DocstringStripSignatureMixin(DocstringSignatureMixin):
+ """Mixin for AttributeDocumenter to provide the
+ feature of stripping any function signature from the docstring.
+ """
+
+ def format_signature(self, **kwargs: Any) -> str:
+ if self.args is None and self.config.autodoc_docstring_signature: # type: ignore[attr-defined]
+ # only act if a signature is not explicitly given already, and if
+ # the feature is enabled
+ result = self._find_signature()
+ if result is not None:
+ # Discarding _args is a only difference with
+ # DocstringSignatureMixin.format_signature.
+ # Documenter.format_signature use self.args value to format.
+ _args, self.retann = result
+ return super().format_signature(**kwargs)
+
+
+class FunctionDocumenter(DocstringSignatureMixin, ModuleLevelDocumenter): # type: ignore[misc]
+ """Specialized Documenter subclass for functions."""
+
+ objtype = 'function'
+ member_order = 30
+
+ @classmethod
+ def can_document_member(
+ cls: type[Documenter], member: Any, membername: str, isattr: bool, parent: Any
+ ) -> bool:
+ # supports functions, builtins and bound methods exported at the module level
+ return (
+ inspect.isfunction(member)
+ or inspect.isbuiltin(member)
+ or (inspect.isroutine(member) and isinstance(parent, ModuleDocumenter))
+ )
+
+ def format_args(self, **kwargs: Any) -> str:
+ if self.config.autodoc_typehints in {'none', 'description'}:
+ kwargs.setdefault('show_annotation', False)
+ if self.config.autodoc_typehints_format == 'short':
+ kwargs.setdefault('unqualified_typehints', True)
+ if self.config.python_display_short_literal_types:
+ kwargs.setdefault('short_literals', True)
+
+ try:
+ self._events.emit('autodoc-before-process-signature', self.object, False)
+ sig = inspect.signature(
+ self.object, type_aliases=self.config.autodoc_type_aliases
+ )
+ args = stringify_signature(sig, **kwargs)
+ except TypeError as exc:
+ logger.warning(
+ __('Failed to get a function signature for %s: %s'), self.fullname, exc
+ )
+ return ''
+ except ValueError:
+ args = ''
+
+ if self.config.strip_signature_backslash:
+ # escape backslashes for reST
+ args = args.replace('\\', '\\\\')
+ return args
+
+ def document_members(self, all_members: bool = False) -> None:
+ pass
+
+ def add_directive_header(self, sig: str) -> None:
+ sourcename = self.get_sourcename()
+ super().add_directive_header(sig)
+
+ is_coro = inspect.iscoroutinefunction(self.object)
+ is_acoro = inspect.isasyncgenfunction(self.object)
+ if is_coro or is_acoro:
+ self.add_line(' :async:', sourcename)
+
+ def format_signature(self, **kwargs: Any) -> str:
+ if self.config.autodoc_typehints_format == 'short':
+ kwargs.setdefault('unqualified_typehints', True)
+ if self.config.python_display_short_literal_types:
+ kwargs.setdefault('short_literals', True)
+
+ sigs = []
+ if (
+ self.analyzer
+ and '.'.join(self.objpath) in self.analyzer.overloads
+ and self.config.autodoc_typehints != 'none'
+ ):
+ # Use signatures for overloaded functions instead of the implementation function.
+ overloaded = True
+ else:
+ overloaded = False
+ sig = super().format_signature(**kwargs)
+ sigs.append(sig)
+
+ if inspect.is_singledispatch_function(self.object):
+ # append signature of singledispatch'ed functions
+ for typ, func in self.object.registry.items():
+ if typ is object:
+ pass # default implementation. skipped.
+ else:
+ dispatchfunc = self.annotate_to_first_argument(func, typ)
+ if dispatchfunc:
+ documenter = FunctionDocumenter(self.directive, '')
+ documenter.object = dispatchfunc
+ documenter.objpath = ['']
+ sigs.append(documenter.format_signature())
+ if overloaded and self.analyzer is not None:
+ actual = inspect.signature(
+ self.object, type_aliases=self.config.autodoc_type_aliases
+ )
+ __globals__ = safe_getattr(self.object, '__globals__', {})
+ for overload in self.analyzer.overloads['.'.join(self.objpath)]:
+ overload = self.merge_default_value(actual, overload)
+ overload = evaluate_signature(
+ overload, __globals__, self.config.autodoc_type_aliases
+ )
+
+ sig = stringify_signature(overload, **kwargs)
+ sigs.append(sig)
+
+ return '\n'.join(sigs)
+
+ def merge_default_value(self, actual: Signature, overload: Signature) -> Signature:
+ """Merge default values of actual implementation to the overload variants."""
+ parameters = list(overload.parameters.values())
+ for i, param in enumerate(parameters):
+ actual_param = actual.parameters.get(param.name)
+ if actual_param and param.default == '...':
+ parameters[i] = param.replace(default=actual_param.default)
+
+ return overload.replace(parameters=parameters)
+
+ def annotate_to_first_argument(
+ self, func: Callable[..., Any], typ: type
+ ) -> Callable[..., Any] | None:
+ """Annotate type hint to the first argument of function if needed."""
+ try:
+ sig = inspect.signature(func, type_aliases=self.config.autodoc_type_aliases)
+ except TypeError as exc:
+ logger.warning(
+ __('Failed to get a function signature for %s: %s'), self.fullname, exc
+ )
+ return None
+ except ValueError:
+ return None
+
+ if len(sig.parameters) == 0:
+ return None
+
+ def dummy(): # type: ignore[no-untyped-def] # NoQA: ANN202
+ pass
+
+ params = list(sig.parameters.values())
+ if params[0].annotation is Parameter.empty:
+ params[0] = params[0].replace(annotation=typ)
+ try:
+ dummy.__signature__ = sig.replace(parameters=params) # type: ignore[attr-defined]
+ return dummy
+ except (AttributeError, TypeError):
+ # failed to update signature (ex. built-in or extension types)
+ return None
+
+ return func
+
+
+class DecoratorDocumenter(FunctionDocumenter):
+ """Specialized Documenter subclass for decorator functions."""
+
+ objtype = 'decorator'
+
+ # must be lower than FunctionDocumenter
+ priority = -1
+
+ def format_args(self, **kwargs: Any) -> str:
+ args = super().format_args(**kwargs)
+ if ',' in args:
+ return args
+ else:
+ return ''
+
+
+# Types which have confusing metaclass signatures it would be best not to show.
+# These are listed by name, rather than storing the objects themselves, to avoid
+# needing to import the modules.
+_METACLASS_CALL_BLACKLIST = frozenset({
+ 'enum.EnumType.__call__',
+})
+
+
+# Types whose __new__ signature is a pass-through.
+_CLASS_NEW_BLACKLIST = frozenset({
+ 'typing.Generic.__new__',
+})
+
+
+class ClassDocumenter(DocstringSignatureMixin, ModuleLevelDocumenter): # type: ignore[misc]
+ """Specialized Documenter subclass for classes."""
+
+ objtype = 'class'
+ member_order = 20
+ option_spec: ClassVar[OptionSpec] = {
+ 'members': members_option,
+ 'undoc-members': bool_option,
+ 'no-index': bool_option,
+ 'no-index-entry': bool_option,
+ 'inherited-members': inherited_members_option,
+ 'show-inheritance': bool_option,
+ 'member-order': member_order_option,
+ 'exclude-members': exclude_members_option,
+ 'private-members': members_option,
+ 'special-members': members_option,
+ 'class-doc-from': class_doc_from_option,
+ 'noindex': bool_option,
+ }
+
+ # Must be higher than FunctionDocumenter, ClassDocumenter, and
+ # AttributeDocumenter as NewType can be an attribute and is a class
+ # after Python 3.10.
+ priority = 15
+
+ _signature_class: Any = None
+ _signature_method_name: str = ''
+
+ def __init__(self, *args: Any) -> None:
+ super().__init__(*args)
+
+ if self.config.autodoc_class_signature == 'separated':
+ self.options = self.options.copy()
+
+ # show __init__() method
+ if self.options.special_members is None:
+ self.options['special-members'] = ['__new__', '__init__']
+ else:
+ self.options.special_members.append('__new__')
+ self.options.special_members.append('__init__')
+
+ merge_members_option(self.options)
+
+ @classmethod
+ def can_document_member(
+ cls: type[Documenter], member: Any, membername: str, isattr: bool, parent: Any
+ ) -> bool:
+ return isinstance(member, type) or (
+ isattr and isinstance(member, NewType | TypeVar)
+ )
+
+ def import_object(self, raiseerror: bool = False) -> bool:
+ ret = super().import_object(raiseerror)
+ # if the class is documented under another name, document it
+ # as data/attribute
+ if ret:
+ if hasattr(self.object, '__name__'):
+ self.doc_as_attr = self.objpath[-1] != self.object.__name__
+ else:
+ self.doc_as_attr = True
+ if isinstance(self.object, NewType | TypeVar):
+ modname = getattr(self.object, '__module__', self.modname)
+ if modname != self.modname and self.modname.startswith(modname):
+ bases = self.modname[len(modname) :].strip('.').split('.')
+ self.objpath = bases + self.objpath
+ self.modname = modname
+ return ret
+
+ def _get_signature(self) -> tuple[Any | None, str | None, Signature | None]:
+ if isinstance(self.object, NewType | TypeVar):
+ # Suppress signature
+ return None, None, None
+
+ def get_user_defined_function_or_method(obj: Any, attr: str) -> Any:
+ """Get the `attr` function or method from `obj`, if it is user-defined."""
+ if inspect.is_builtin_class_method(obj, attr):
+ return None
+ attr = self.get_attr(obj, attr, None)
+ if not (inspect.ismethod(attr) or inspect.isfunction(attr)):
+ return None
+ return attr
+
+ # This sequence is copied from inspect._signature_from_callable.
+ # ValueError means that no signature could be found, so we keep going.
+
+ # First, we check if obj has a __signature__ attribute
+ if hasattr(self.object, '__signature__'):
+ object_sig = self.object.__signature__
+ if isinstance(object_sig, Signature):
+ return None, None, object_sig
+ if sys.version_info[:2] in {(3, 12), (3, 13)} and callable(object_sig):
+ # Support for enum.Enum.__signature__ in Python 3.12
+ if isinstance(object_sig_str := object_sig(), str):
+ return None, None, inspect.signature_from_str(object_sig_str)
+
+ # Next, let's see if it has an overloaded __call__ defined
+ # in its metaclass
+ call = get_user_defined_function_or_method(type(self.object), '__call__')
+
+ if call is not None:
+ if f'{call.__module__}.{call.__qualname__}' in _METACLASS_CALL_BLACKLIST:
+ call = None
+
+ if call is not None:
+ self._events.emit('autodoc-before-process-signature', call, True)
+ try:
+ sig = inspect.signature(
+ call,
+ bound_method=True,
+ type_aliases=self.config.autodoc_type_aliases,
+ )
+ return type(self.object), '__call__', sig
+ except ValueError:
+ pass
+
+ # Now we check if the 'obj' class has a '__new__' method
+ new = get_user_defined_function_or_method(self.object, '__new__')
+
+ if new is not None:
+ if f'{new.__module__}.{new.__qualname__}' in _CLASS_NEW_BLACKLIST:
+ new = None
+
+ if new is not None:
+ self._events.emit('autodoc-before-process-signature', new, True)
+ try:
+ sig = inspect.signature(
+ new,
+ bound_method=True,
+ type_aliases=self.config.autodoc_type_aliases,
+ )
+ return self.object, '__new__', sig
+ except ValueError:
+ pass
+
+ # Finally, we should have at least __init__ implemented
+ init = get_user_defined_function_or_method(self.object, '__init__')
+ if init is not None:
+ self._events.emit('autodoc-before-process-signature', init, True)
+ try:
+ sig = inspect.signature(
+ init,
+ bound_method=True,
+ type_aliases=self.config.autodoc_type_aliases,
+ )
+ return self.object, '__init__', sig
+ except ValueError:
+ pass
+
+ # None of the attributes are user-defined, so fall back to let inspect
+ # handle it.
+ # We don't know the exact method that inspect.signature will read
+ # the signature from, so just pass the object itself to our hook.
+ self._events.emit('autodoc-before-process-signature', self.object, False)
+ try:
+ sig = inspect.signature(
+ self.object,
+ bound_method=False,
+ type_aliases=self.config.autodoc_type_aliases,
+ )
+ return None, None, sig
+ except ValueError:
+ pass
+
+ # Still no signature: happens e.g. for old-style classes
+ # with __init__ in C and no `__text_signature__`.
+ return None, None, None
+
+ def format_args(self, **kwargs: Any) -> str:
+ if self.config.autodoc_typehints in {'none', 'description'}:
+ kwargs.setdefault('show_annotation', False)
+ if self.config.autodoc_typehints_format == 'short':
+ kwargs.setdefault('unqualified_typehints', True)
+ if self.config.python_display_short_literal_types:
+ kwargs.setdefault('short_literals', True)
+
+ try:
+ self._signature_class, _signature_method_name, sig = self._get_signature()
+ except TypeError as exc:
+ # __signature__ attribute contained junk
+ logger.warning(
+ __('Failed to get a constructor signature for %s: %s'),
+ self.fullname,
+ exc,
+ )
+ return ''
+ self._signature_method_name = _signature_method_name or ''
+
+ if sig is None:
+ return ''
+
+ return stringify_signature(sig, show_return_annotation=False, **kwargs)
+
+ def _find_signature(self) -> tuple[str | None, str | None] | None:
+ result = super()._find_signature()
+ if result is not None:
+ # Strip a return value from signature of constructor in docstring (first entry)
+ result = (result[0], None)
+
+ for i, sig in enumerate(self._signatures):
+ if sig.endswith(' -> None'):
+ # Strip a return value from signatures of constructor in docstring (subsequent
+ # entries)
+ self._signatures[i] = sig[:-8]
+
+ return result
+
+ def format_signature(self, **kwargs: Any) -> str:
+ if self.doc_as_attr:
+ return ''
+ if self.config.autodoc_class_signature == 'separated':
+ # do not show signatures
+ return ''
+
+ if self.config.autodoc_typehints_format == 'short':
+ kwargs.setdefault('unqualified_typehints', True)
+ if self.config.python_display_short_literal_types:
+ kwargs.setdefault('short_literals', True)
+
+ sig = super().format_signature()
+ sigs = []
+
+ overloads = self.get_overloaded_signatures()
+ if overloads and self.config.autodoc_typehints != 'none':
+ # Use signatures for overloaded methods instead of the implementation method.
+ method = safe_getattr(
+ self._signature_class, self._signature_method_name, None
+ )
+ __globals__ = safe_getattr(method, '__globals__', {})
+ for overload in overloads:
+ overload = evaluate_signature(
+ overload, __globals__, self.config.autodoc_type_aliases
+ )
+
+ parameters = list(overload.parameters.values())
+ overload = overload.replace(
+ parameters=parameters[1:], return_annotation=Parameter.empty
+ )
+ sig = stringify_signature(overload, **kwargs)
+ sigs.append(sig)
+ else:
+ sigs.append(sig)
+
+ return '\n'.join(sigs)
+
+ def get_overloaded_signatures(self) -> list[Signature]:
+ if self._signature_class and self._signature_method_name:
+ for cls in self._signature_class.__mro__:
+ try:
+ analyzer = ModuleAnalyzer.for_module(cls.__module__)
+ analyzer.analyze()
+ qualname = f'{cls.__qualname__}.{self._signature_method_name}'
+ if qualname in analyzer.overloads:
+ return analyzer.overloads.get(qualname, [])
+ elif qualname in analyzer.tagorder:
+ # the constructor is defined in the class, but not overridden.
+ return []
+ except PycodeError:
+ pass
+
+ return []
+
+ def get_canonical_fullname(self) -> str | None:
+ __modname__ = safe_getattr(self.object, '__module__', self.modname)
+ __qualname__ = safe_getattr(self.object, '__qualname__', None)
+ if __qualname__ is None:
+ __qualname__ = safe_getattr(self.object, '__name__', None)
+ if __qualname__ and '' in __qualname__:
+ # No valid qualname found if the object is defined as locals
+ __qualname__ = None
+
+ if __modname__ and __qualname__:
+ return f'{__modname__}.{__qualname__}'
+ else:
+ return None
+
+ def add_directive_header(self, sig: str) -> None:
+ sourcename = self.get_sourcename()
+
+ if self.doc_as_attr:
+ self.directivetype = 'attribute'
+ super().add_directive_header(sig)
+
+ if isinstance(self.object, NewType | TypeVar):
+ return
+
+ if self.analyzer and '.'.join(self.objpath) in self.analyzer.finals:
+ self.add_line(' :final:', sourcename)
+
+ canonical_fullname = self.get_canonical_fullname()
+ if (
+ not self.doc_as_attr
+ and not isinstance(self.object, NewType)
+ and canonical_fullname
+ and self.fullname != canonical_fullname
+ ):
+ self.add_line(' :canonical: %s' % canonical_fullname, sourcename)
+
+ # add inheritance info, if wanted
+ if not self.doc_as_attr and self.options.show_inheritance:
+ if inspect.getorigbases(self.object):
+ # A subclass of generic types
+ # refs: PEP-560
+ bases = list(self.object.__orig_bases__)
+ elif hasattr(self.object, '__bases__') and len(self.object.__bases__):
+ # A normal class
+ bases = list(self.object.__bases__)
+ else:
+ bases = []
+
+ self._events.emit(
+ 'autodoc-process-bases', self.fullname, self.object, self.options, bases
+ )
+
+ mode = _get_render_mode(self.config.autodoc_typehints_format)
+ base_classes = [restify(cls, mode=mode) for cls in bases]
+
+ sourcename = self.get_sourcename()
+ self.add_line('', sourcename)
+ self.add_line(' ' + _('Bases: %s') % ', '.join(base_classes), sourcename)
+
+ def get_object_members(self, want_all: bool) -> tuple[bool, list[ObjectMember]]:
+ members = get_class_members(
+ self.object,
+ self.objpath,
+ self.get_attr,
+ self.config.autodoc_inherit_docstrings,
+ )
+ if not want_all:
+ if not self.options.members:
+ return False, []
+ # specific members given
+ selected = []
+ for name in self.options.members:
+ if name in members:
+ selected.append(members[name])
+ else:
+ logger.warning(
+ __('missing attribute %s in object %s'),
+ name,
+ self.fullname,
+ type='autodoc',
+ )
+ return False, selected
+ elif self.options.inherited_members:
+ return False, list(members.values())
+ else:
+ return False, [m for m in members.values() if m.class_ == self.object]
+
+ def get_doc(self) -> list[list[str]] | None:
+ if isinstance(self.object, TypeVar):
+ if self.object.__doc__ == TypeVar.__doc__:
+ return []
+ if self.doc_as_attr:
+ # Don't show the docstring of the class when it is an alias.
+ if self.get_variable_comment():
+ return []
+ else:
+ return None
+
+ lines = getattr(self, '_new_docstrings', None)
+ if lines is not None:
+ return lines
+
+ classdoc_from = self.options.get(
+ 'class-doc-from', self.config.autoclass_content
+ )
+
+ docstrings = []
+ attrdocstring = getdoc(self.object, self.get_attr)
+ if attrdocstring:
+ docstrings.append(attrdocstring)
+
+ # for classes, what the "docstring" is can be controlled via a
+ # config value; the default is only the class docstring
+ if classdoc_from in {'both', 'init'}:
+ __init__ = self.get_attr(self.object, '__init__', None)
+ initdocstring = getdoc(
+ __init__,
+ self.get_attr,
+ self.config.autodoc_inherit_docstrings,
+ self.object,
+ '__init__',
+ )
+ # for new-style classes, no __init__ means default __init__
+ if initdocstring is not None and (
+ initdocstring == object.__init__.__doc__ # for pypy
+ or initdocstring.strip() == object.__init__.__doc__ # for !pypy
+ ):
+ initdocstring = None
+ if not initdocstring:
+ # try __new__
+ __new__ = self.get_attr(self.object, '__new__', None)
+ initdocstring = getdoc(
+ __new__,
+ self.get_attr,
+ self.config.autodoc_inherit_docstrings,
+ self.object,
+ '__new__',
+ )
+ # for new-style classes, no __new__ means default __new__
+ if initdocstring is not None and (
+ initdocstring == object.__new__.__doc__ # for pypy
+ or initdocstring.strip() == object.__new__.__doc__ # for !pypy
+ ):
+ initdocstring = None
+ if initdocstring:
+ if classdoc_from == 'init':
+ docstrings = [initdocstring]
+ else:
+ docstrings.append(initdocstring)
+
+ tab_width = self.directive.state.document.settings.tab_width
+ return [prepare_docstring(docstring, tab_width) for docstring in docstrings]
+
+ def get_variable_comment(self) -> list[str] | None:
+ try:
+ key = ('', '.'.join(self.objpath))
+ if self.doc_as_attr:
+ analyzer = ModuleAnalyzer.for_module(self.modname)
+ else:
+ analyzer = ModuleAnalyzer.for_module(self.get_real_modname())
+ analyzer.analyze()
+ return list(analyzer.attr_docs.get(key, []))
+ except PycodeError:
+ return None
+
+ def add_content(self, more_content: StringList | None) -> None:
+ mode = _get_render_mode(self.config.autodoc_typehints_format)
+ short_literals = self.config.python_display_short_literal_types
+
+ if isinstance(self.object, NewType):
+ supertype = restify(self.object.__supertype__, mode=mode)
+
+ more_content = StringList([_('alias of %s') % supertype, ''], source='')
+ if isinstance(self.object, TypeVar):
+ attrs = [repr(self.object.__name__)]
+ attrs.extend(
+ stringify_annotation(constraint, mode, short_literals=short_literals)
+ for constraint in self.object.__constraints__
+ )
+ if self.object.__bound__:
+ bound = restify(self.object.__bound__, mode=mode)
+ attrs.append(r'bound=\ ' + bound)
+ if self.object.__covariant__:
+ attrs.append('covariant=True')
+ if self.object.__contravariant__:
+ attrs.append('contravariant=True')
+
+ more_content = StringList(
+ [_('alias of TypeVar(%s)') % ', '.join(attrs), ''], source=''
+ )
+ if self.doc_as_attr and self.modname != self.get_real_modname():
+ try:
+ # override analyzer to obtain doccomment around its definition.
+ self.analyzer = ModuleAnalyzer.for_module(self.modname)
+ self.analyzer.analyze()
+ except PycodeError:
+ pass
+
+ if self.doc_as_attr and not self.get_variable_comment():
+ try:
+ alias = restify(self.object, mode=mode)
+ more_content = StringList([_('alias of %s') % alias], source='')
+ except AttributeError:
+ pass # Invalid class object is passed.
+
+ super().add_content(more_content)
+
+ def document_members(self, all_members: bool = False) -> None:
+ if self.doc_as_attr:
+ return
+ super().document_members(all_members)
+
+ def generate(
+ self,
+ more_content: StringList | None = None,
+ real_modname: str | None = None,
+ check_module: bool = False,
+ all_members: bool = False,
+ ) -> None:
+ # Do not pass real_modname and use the name from the __module__
+ # attribute of the class.
+ # If a class gets imported into the module real_modname
+ # the analyzer won't find the source of the class, if
+ # it looks in real_modname.
+ return super().generate(
+ more_content=more_content,
+ check_module=check_module,
+ all_members=all_members,
+ )
+
+
+class ExceptionDocumenter(ClassDocumenter):
+ """Specialized ClassDocumenter subclass for exceptions."""
+
+ objtype = 'exception'
+ member_order = 10
+
+ # needs a higher priority than ClassDocumenter
+ priority = ClassDocumenter.priority + 5
+
+ @classmethod
+ def can_document_member(
+ cls: type[Documenter], member: Any, membername: str, isattr: bool, parent: Any
+ ) -> bool:
+ try:
+ return isinstance(member, type) and issubclass(member, BaseException)
+ except TypeError as exc:
+ # It's possible for a member to be considered a type, but fail
+ # issubclass checks due to not being a class. For example:
+ # https://github.com/sphinx-doc/sphinx/issues/11654#issuecomment-1696790436
+ msg = (
+ f'{cls.__name__} failed to discern if member {member} with'
+ f' membername {membername} is a BaseException subclass.'
+ )
+ raise ValueError(msg) from exc
+
+
+class DataDocumenterMixinBase:
+ # define types of instance variables
+ config: Config
+ env: BuildEnvironment
+ modname: str
+ parent: Any
+ object: Any
+ objpath: list[str]
+
+ def should_suppress_directive_header(self) -> bool:
+ """Check directive header should be suppressed."""
+ return False
+
+ def should_suppress_value_header(self) -> bool:
+ """Check :value: header should be suppressed."""
+ return False
+
+ def update_content(self, more_content: StringList) -> None:
+ """Update docstring, for example with TypeVar variance."""
+ pass
+
+
+class GenericAliasMixin(DataDocumenterMixinBase):
+ """Mixin for DataDocumenter and AttributeDocumenter to provide the feature for
+ supporting GenericAliases.
+ """
+
+ def should_suppress_directive_header(self) -> bool:
+ return (
+ inspect.isgenericalias(self.object)
+ or super().should_suppress_directive_header()
+ )
+
+ def update_content(self, more_content: StringList) -> None:
+ if inspect.isgenericalias(self.object):
+ mode = _get_render_mode(self.config.autodoc_typehints_format)
+ alias = restify(self.object, mode=mode)
+
+ more_content.append(_('alias of %s') % alias, '')
+ more_content.append('', '')
+
+ super().update_content(more_content)
+
+
+class UninitializedGlobalVariableMixin(DataDocumenterMixinBase):
+ """Mixin for DataDocumenter to provide the feature for supporting uninitialized
+ (type annotation only) global variables.
+ """
+
+ def import_object(self, raiseerror: bool = False) -> bool:
+ try:
+ return super().import_object(raiseerror=True) # type: ignore[misc]
+ except ImportError as exc:
+ # annotation only instance variable (PEP-526)
+ try:
+ with mock(self.config.autodoc_mock_imports):
+ parent = import_module(self.modname)
+ annotations = get_type_hints(
+ parent,
+ None,
+ self.config.autodoc_type_aliases,
+ include_extras=True,
+ )
+ if self.objpath[-1] in annotations:
+ self.object = UNINITIALIZED_ATTR
+ self.parent = parent
+ return True
+ except ImportError:
+ pass
+
+ if raiseerror:
+ raise
+ logger.warning(exc.args[0], type='autodoc', subtype='import_object')
+ self.env.note_reread()
+ return False
+
+ def should_suppress_value_header(self) -> bool:
+ return (
+ self.object is UNINITIALIZED_ATTR or super().should_suppress_value_header()
+ )
+
+ def get_doc(self) -> list[list[str]] | None:
+ if self.object is UNINITIALIZED_ATTR:
+ return []
+ else:
+ return super().get_doc() # type: ignore[misc]
+
+
+class DataDocumenter(
+ GenericAliasMixin, UninitializedGlobalVariableMixin, ModuleLevelDocumenter
+):
+ """Specialized Documenter subclass for data items."""
+
+ objtype = 'data'
+ member_order = 40
+ priority = -10
+ option_spec: ClassVar[OptionSpec] = dict(ModuleLevelDocumenter.option_spec)
+ option_spec['annotation'] = annotation_option
+ option_spec['no-value'] = bool_option
+
+ @classmethod
+ def can_document_member(
+ cls: type[Documenter], member: Any, membername: str, isattr: bool, parent: Any
+ ) -> bool:
+ return isinstance(parent, ModuleDocumenter) and isattr
+
+ def update_annotations(self, parent: Any) -> None:
+ """Update __annotations__ to support type_comment and so on."""
+ annotations = dict(inspect.getannotations(parent))
+ parent.__annotations__ = annotations
+
+ try:
+ analyzer = ModuleAnalyzer.for_module(self.modname)
+ analyzer.analyze()
+ for (classname, attrname), annotation in analyzer.annotations.items():
+ if not classname and attrname not in annotations:
+ annotations[attrname] = annotation
+ except PycodeError:
+ pass
+
+ def import_object(self, raiseerror: bool = False) -> bool:
+ ret = super().import_object(raiseerror)
+ if self.parent:
+ self.update_annotations(self.parent)
+
+ return ret
+
+ def should_suppress_value_header(self) -> bool:
+ if super().should_suppress_value_header():
+ return True
+ else:
+ doc = self.get_doc() or []
+ _docstring, metadata = separate_metadata(
+ '\n'.join(functools.reduce(operator.iadd, doc, []))
+ )
+ if 'hide-value' in metadata:
+ return True
+
+ return False
+
+ def add_directive_header(self, sig: str) -> None:
+ super().add_directive_header(sig)
+ sourcename = self.get_sourcename()
+ if (
+ self.options.annotation is SUPPRESS
+ or self.should_suppress_directive_header()
+ ):
+ pass
+ elif self.options.annotation:
+ self.add_line(' :annotation: %s' % self.options.annotation, sourcename)
+ else:
+ if self.config.autodoc_typehints != 'none':
+ # obtain annotation for this data
+ annotations = get_type_hints(
+ self.parent,
+ None,
+ self.config.autodoc_type_aliases,
+ include_extras=True,
+ )
+ if self.objpath[-1] in annotations:
+ mode = _get_render_mode(self.config.autodoc_typehints_format)
+ short_literals = self.config.python_display_short_literal_types
+ objrepr = stringify_annotation(
+ annotations.get(self.objpath[-1]),
+ mode,
+ short_literals=short_literals,
+ )
+ self.add_line(' :type: ' + objrepr, sourcename)
+
+ try:
+ if (
+ self.options.no_value
+ or self.should_suppress_value_header()
+ or ismock(self.object)
+ ):
+ pass
+ else:
+ objrepr = object_description(self.object)
+ self.add_line(' :value: ' + objrepr, sourcename)
+ except ValueError:
+ pass
+
+ def document_members(self, all_members: bool = False) -> None:
+ pass
+
+ def get_real_modname(self) -> str:
+ real_modname = self.get_attr(self.parent or self.object, '__module__', None)
+ return real_modname or self.modname
+
+ def get_module_comment(self, attrname: str) -> list[str] | None:
+ try:
+ analyzer = ModuleAnalyzer.for_module(self.modname)
+ analyzer.analyze()
+ key = ('', attrname)
+ if key in analyzer.attr_docs:
+ return list(analyzer.attr_docs[key])
+ except PycodeError:
+ pass
+
+ return None
+
+ def get_doc(self) -> list[list[str]] | None:
+ # Check the variable has a docstring-comment
+ comment = self.get_module_comment(self.objpath[-1])
+ if comment:
+ return [comment]
+ else:
+ return super().get_doc()
+
+ def add_content(self, more_content: StringList | None) -> None:
+ # Disable analyzing variable comment on Documenter.add_content() to control it on
+ # DataDocumenter.add_content()
+ self.analyzer = None
+
+ if not more_content:
+ more_content = StringList()
+
+ self.update_content(more_content)
+ super().add_content(more_content)
+
+
+class MethodDocumenter(DocstringSignatureMixin, ClassLevelDocumenter): # type: ignore[misc]
+ """Specialized Documenter subclass for methods (normal, static and class)."""
+
+ objtype = 'method'
+ directivetype = 'method'
+ member_order = 50
+ priority = 1 # must be more than FunctionDocumenter
+
+ @classmethod
+ def can_document_member(
+ cls: type[Documenter], member: Any, membername: str, isattr: bool, parent: Any
+ ) -> bool:
+ return inspect.isroutine(member) and not isinstance(parent, ModuleDocumenter)
+
+ def import_object(self, raiseerror: bool = False) -> bool:
+ ret = super().import_object(raiseerror)
+ if not ret:
+ return ret
+
+ # to distinguish classmethod/staticmethod
+ obj = self.parent.__dict__.get(self.object_name, self.object)
+ if inspect.isstaticmethod(obj, cls=self.parent, name=self.object_name):
+ # document static members before regular methods
+ self.member_order -= 1 # type: ignore[misc]
+ elif inspect.isclassmethod(obj):
+ # document class methods before static methods as
+ # they usually behave as alternative constructors
+ self.member_order -= 2 # type: ignore[misc]
+ return ret
+
+ def format_args(self, **kwargs: Any) -> str:
+ if self.config.autodoc_typehints in {'none', 'description'}:
+ kwargs.setdefault('show_annotation', False)
+ if self.config.autodoc_typehints_format == 'short':
+ kwargs.setdefault('unqualified_typehints', True)
+ if self.config.python_display_short_literal_types:
+ kwargs.setdefault('short_literals', True)
+
+ try:
+ if self.object == object.__init__ and self.parent != object: # NoQA: E721
+ # Classes not having own __init__() method are shown as no arguments.
+ #
+ # Note: The signature of object.__init__() is (self, /, *args, **kwargs).
+ # But it makes users confused.
+ args = '()'
+ else:
+ if inspect.isstaticmethod(
+ self.object, cls=self.parent, name=self.object_name
+ ):
+ self._events.emit(
+ 'autodoc-before-process-signature', self.object, False
+ )
+ sig = inspect.signature(
+ self.object,
+ bound_method=False,
+ type_aliases=self.config.autodoc_type_aliases,
+ )
+ else:
+ self._events.emit(
+ 'autodoc-before-process-signature', self.object, True
+ )
+ sig = inspect.signature(
+ self.object,
+ bound_method=True,
+ type_aliases=self.config.autodoc_type_aliases,
+ )
+ args = stringify_signature(sig, **kwargs)
+ except TypeError as exc:
+ logger.warning(
+ __('Failed to get a method signature for %s: %s'), self.fullname, exc
+ )
+ return ''
+ except ValueError:
+ args = ''
+
+ if self.config.strip_signature_backslash:
+ # escape backslashes for reST
+ args = args.replace('\\', '\\\\')
+ return args
+
+ def add_directive_header(self, sig: str) -> None:
+ super().add_directive_header(sig)
+
+ sourcename = self.get_sourcename()
+ obj = self.parent.__dict__.get(self.object_name, self.object)
+ if inspect.isabstractmethod(obj):
+ self.add_line(' :abstractmethod:', sourcename)
+ if inspect.iscoroutinefunction(obj) or inspect.isasyncgenfunction(obj):
+ self.add_line(' :async:', sourcename)
+ if (
+ inspect.is_classmethod_like(obj)
+ or inspect.is_singledispatch_method(obj)
+ and inspect.is_classmethod_like(obj.func)
+ ):
+ self.add_line(' :classmethod:', sourcename)
+ if inspect.isstaticmethod(obj, cls=self.parent, name=self.object_name):
+ self.add_line(' :staticmethod:', sourcename)
+ if self.analyzer and '.'.join(self.objpath) in self.analyzer.finals:
+ self.add_line(' :final:', sourcename)
+
+ def document_members(self, all_members: bool = False) -> None:
+ pass
+
+ def format_signature(self, **kwargs: Any) -> str:
+ if self.config.autodoc_typehints_format == 'short':
+ kwargs.setdefault('unqualified_typehints', True)
+ if self.config.python_display_short_literal_types:
+ kwargs.setdefault('short_literals', True)
+
+ sigs = []
+ if (
+ self.analyzer
+ and '.'.join(self.objpath) in self.analyzer.overloads
+ and self.config.autodoc_typehints != 'none'
+ ):
+ # Use signatures for overloaded methods instead of the implementation method.
+ overloaded = True
+ else:
+ overloaded = False
+ sig = super().format_signature(**kwargs)
+ sigs.append(sig)
+
+ meth = self.parent.__dict__.get(self.objpath[-1])
+ if inspect.is_singledispatch_method(meth):
+ # append signature of singledispatch'ed functions
+ for typ, func in meth.dispatcher.registry.items():
+ if typ is object:
+ pass # default implementation. skipped.
+ else:
+ if inspect.isclassmethod(func):
+ func = func.__func__
+ dispatchmeth = self.annotate_to_first_argument(func, typ)
+ if dispatchmeth:
+ documenter = MethodDocumenter(self.directive, '')
+ documenter.parent = self.parent
+ documenter.object = dispatchmeth
+ documenter.objpath = ['']
+ sigs.append(documenter.format_signature())
+ if overloaded and self.analyzer is not None:
+ if inspect.isstaticmethod(
+ self.object, cls=self.parent, name=self.object_name
+ ):
+ actual = inspect.signature(
+ self.object,
+ bound_method=False,
+ type_aliases=self.config.autodoc_type_aliases,
+ )
+ else:
+ actual = inspect.signature(
+ self.object,
+ bound_method=True,
+ type_aliases=self.config.autodoc_type_aliases,
+ )
+
+ __globals__ = safe_getattr(self.object, '__globals__', {})
+ for overload in self.analyzer.overloads['.'.join(self.objpath)]:
+ overload = self.merge_default_value(actual, overload)
+ overload = evaluate_signature(
+ overload, __globals__, self.config.autodoc_type_aliases
+ )
+
+ if not inspect.isstaticmethod(
+ self.object, cls=self.parent, name=self.object_name
+ ):
+ parameters = list(overload.parameters.values())
+ overload = overload.replace(parameters=parameters[1:])
+ sig = stringify_signature(overload, **kwargs)
+ sigs.append(sig)
+
+ return '\n'.join(sigs)
+
+ def merge_default_value(self, actual: Signature, overload: Signature) -> Signature:
+ """Merge default values of actual implementation to the overload variants."""
+ parameters = list(overload.parameters.values())
+ for i, param in enumerate(parameters):
+ actual_param = actual.parameters.get(param.name)
+ if actual_param and param.default == '...':
+ parameters[i] = param.replace(default=actual_param.default)
+
+ return overload.replace(parameters=parameters)
+
+ def annotate_to_first_argument(
+ self, func: Callable[..., Any], typ: type
+ ) -> Callable[..., Any] | None:
+ """Annotate type hint to the first argument of function if needed."""
+ try:
+ sig = inspect.signature(func, type_aliases=self.config.autodoc_type_aliases)
+ except TypeError as exc:
+ logger.warning(
+ __('Failed to get a method signature for %s: %s'), self.fullname, exc
+ )
+ return None
+ except ValueError:
+ return None
+
+ if len(sig.parameters) == 1:
+ return None
+
+ def dummy(): # type: ignore[no-untyped-def] # NoQA: ANN202
+ pass
+
+ params = list(sig.parameters.values())
+ if params[1].annotation is Parameter.empty:
+ params[1] = params[1].replace(annotation=typ)
+ try:
+ dummy.__signature__ = sig.replace( # type: ignore[attr-defined]
+ parameters=params
+ )
+ return dummy
+ except (AttributeError, TypeError):
+ # failed to update signature (ex. built-in or extension types)
+ return None
+
+ return func
+
+ def get_doc(self) -> list[list[str]] | None:
+ if self._new_docstrings is not None:
+ # docstring already returned previously, then modified by
+ # `DocstringSignatureMixin`. Just return the previously-computed
+ # result, so that we don't lose the processing done by
+ # `DocstringSignatureMixin`.
+ return self._new_docstrings
+ if self.objpath[-1] == '__init__':
+ docstring = getdoc(
+ self.object,
+ self.get_attr,
+ self.config.autodoc_inherit_docstrings,
+ self.parent,
+ self.object_name,
+ )
+ if docstring is not None and (
+ docstring == object.__init__.__doc__ # for pypy
+ or docstring.strip() == object.__init__.__doc__ # for !pypy
+ ):
+ docstring = None
+ if docstring:
+ tab_width = self.directive.state.document.settings.tab_width
+ return [prepare_docstring(docstring, tabsize=tab_width)]
+ else:
+ return []
+ elif self.objpath[-1] == '__new__':
+ docstring = getdoc(
+ self.object,
+ self.get_attr,
+ self.config.autodoc_inherit_docstrings,
+ self.parent,
+ self.object_name,
+ )
+ if docstring is not None and (
+ docstring == object.__new__.__doc__ # for pypy
+ or docstring.strip() == object.__new__.__doc__ # for !pypy
+ ):
+ docstring = None
+ if docstring:
+ tab_width = self.directive.state.document.settings.tab_width
+ return [prepare_docstring(docstring, tabsize=tab_width)]
+ else:
+ return []
+ else:
+ return super().get_doc()
+
+
+class NonDataDescriptorMixin(DataDocumenterMixinBase):
+ """Mixin for AttributeDocumenter to provide the feature for supporting non
+ data-descriptors.
+
+ .. note:: This mix-in must be inherited after other mix-ins. Otherwise, docstring
+ and :value: header will be suppressed unexpectedly.
+ """
+
+ def import_object(self, raiseerror: bool = False) -> bool:
+ ret = super().import_object(raiseerror) # type: ignore[misc]
+ if ret and not inspect.isattributedescriptor(self.object):
+ self.non_data_descriptor = True
+ else:
+ self.non_data_descriptor = False
+
+ return ret
+
+ def should_suppress_value_header(self) -> bool:
+ return (
+ not getattr(self, 'non_data_descriptor', False)
+ or super().should_suppress_directive_header()
+ )
+
+ def get_doc(self) -> list[list[str]] | None:
+ if getattr(self, 'non_data_descriptor', False):
+ # the docstring of non datadescriptor is very probably the wrong thing
+ # to display
+ return None
+ else:
+ return super().get_doc() # type: ignore[misc]
+
+
+class SlotsMixin(DataDocumenterMixinBase):
+ """Mixin for AttributeDocumenter to provide the feature for supporting __slots__."""
+
+ def isslotsattribute(self) -> bool:
+ """Check the subject is an attribute in __slots__."""
+ try:
+ if parent___slots__ := inspect.getslots(self.parent):
+ return self.objpath[-1] in parent___slots__
+ else:
+ return False
+ except (ValueError, TypeError):
+ return False
+
+ def import_object(self, raiseerror: bool = False) -> bool:
+ ret = super().import_object(raiseerror) # type: ignore[misc]
+ if self.isslotsattribute():
+ self.object = SLOTSATTR
+
+ return ret
+
+ def should_suppress_value_header(self) -> bool:
+ if self.object is SLOTSATTR:
+ return True
+ else:
+ return super().should_suppress_value_header()
+
+ def get_doc(self) -> list[list[str]] | None:
+ if self.object is SLOTSATTR:
+ try:
+ parent___slots__ = inspect.getslots(self.parent)
+ if parent___slots__ and (
+ docstring := parent___slots__.get(self.objpath[-1])
+ ):
+ docstring = prepare_docstring(docstring)
+ return [docstring]
+ else:
+ return []
+ except ValueError as exc:
+ logger.warning(
+ __('Invalid __slots__ found on %s. Ignored.'),
+ (self.parent.__qualname__, exc),
+ type='autodoc',
+ )
+ return []
+ else:
+ return super().get_doc() # type: ignore[misc]
+
+
+class RuntimeInstanceAttributeMixin(DataDocumenterMixinBase):
+ """Mixin for AttributeDocumenter to provide the feature for supporting runtime
+ instance attributes (that are defined in __init__() methods with doc-comments).
+
+ Example::
+
+ class Foo:
+ def __init__(self):
+ self.attr = None #: This is a target of this mix-in.
+ """
+
+ RUNTIME_INSTANCE_ATTRIBUTE = object()
+
+ def is_runtime_instance_attribute(self, parent: Any) -> bool:
+ """Check the subject is an attribute defined in __init__()."""
+ # An instance variable defined in __init__().
+ if self.get_attribute_comment(parent, self.objpath[-1]): # type: ignore[attr-defined]
+ return True
+ return self.is_runtime_instance_attribute_not_commented(parent)
+
+ def is_runtime_instance_attribute_not_commented(self, parent: Any) -> bool:
+ """Check the subject is an attribute defined in __init__() without comment."""
+ for cls in inspect.getmro(parent):
+ try:
+ module = safe_getattr(cls, '__module__')
+ qualname = safe_getattr(cls, '__qualname__')
+
+ analyzer = ModuleAnalyzer.for_module(module)
+ analyzer.analyze()
+ if qualname and self.objpath:
+ key = f'{qualname}.{self.objpath[-1]}'
+ if key in analyzer.tagorder:
+ return True
+ except (AttributeError, PycodeError):
+ pass
+
+ return False
+
+ def import_object(self, raiseerror: bool = False) -> bool:
+ """Check the existence of runtime instance attribute after failing to import the
+ attribute.
+ """
+ try:
+ return super().import_object(raiseerror=True) # type: ignore[misc]
+ except ImportError as exc:
+ try:
+ with mock(self.config.autodoc_mock_imports):
+ ret = import_object(
+ self.modname,
+ self.objpath[:-1],
+ 'class',
+ attrgetter=self.get_attr, # type: ignore[attr-defined]
+ )
+ parent = ret[3]
+ if self.is_runtime_instance_attribute(parent):
+ self.object = self.RUNTIME_INSTANCE_ATTRIBUTE
+ self.parent = parent
+ return True
+ except ImportError:
+ pass
+
+ if raiseerror:
+ raise
+ logger.warning(exc.args[0], type='autodoc', subtype='import_object')
+ self.env.note_reread()
+ return False
+
+ def should_suppress_value_header(self) -> bool:
+ return (
+ self.object is self.RUNTIME_INSTANCE_ATTRIBUTE
+ or super().should_suppress_value_header()
+ )
+
+ def get_doc(self) -> list[list[str]] | None:
+ if (
+ self.object is self.RUNTIME_INSTANCE_ATTRIBUTE
+ and self.is_runtime_instance_attribute_not_commented(self.parent)
+ ):
+ return None
+ else:
+ return super().get_doc() # type: ignore[misc]
+
+
+class UninitializedInstanceAttributeMixin(DataDocumenterMixinBase):
+ """Mixin for AttributeDocumenter to provide the feature for supporting uninitialized
+ instance attributes (PEP-526 styled, annotation only attributes).
+
+ Example::
+
+ class Foo:
+ attr: int #: This is a target of this mix-in.
+ """
+
+ def is_uninitialized_instance_attribute(self, parent: Any) -> bool:
+ """Check the subject is an annotation only attribute."""
+ annotations = get_type_hints(
+ parent, None, self.config.autodoc_type_aliases, include_extras=True
+ )
+ return self.objpath[-1] in annotations
+
+ def import_object(self, raiseerror: bool = False) -> bool:
+ """Check the existence of uninitialized instance attribute when failed to import
+ the attribute.
+ """
+ try:
+ return super().import_object(raiseerror=True) # type: ignore[misc]
+ except ImportError as exc:
+ try:
+ ret = import_object(
+ self.modname,
+ self.objpath[:-1],
+ 'class',
+ attrgetter=self.get_attr, # type: ignore[attr-defined]
+ )
+ parent = ret[3]
+ if self.is_uninitialized_instance_attribute(parent):
+ self.object = UNINITIALIZED_ATTR
+ self.parent = parent
+ return True
+ except ImportError:
+ pass
+
+ if raiseerror:
+ raise
+ logger.warning(exc.args[0], type='autodoc', subtype='import_object')
+ self.env.note_reread()
+ return False
+
+ def should_suppress_value_header(self) -> bool:
+ return (
+ self.object is UNINITIALIZED_ATTR or super().should_suppress_value_header()
+ )
+
+ def get_doc(self) -> list[list[str]] | None:
+ if self.object is UNINITIALIZED_ATTR:
+ return None
+ return super().get_doc() # type: ignore[misc]
+
+
+class AttributeDocumenter( # type: ignore[misc]
+ GenericAliasMixin,
+ SlotsMixin,
+ RuntimeInstanceAttributeMixin,
+ UninitializedInstanceAttributeMixin,
+ NonDataDescriptorMixin,
+ DocstringStripSignatureMixin,
+ ClassLevelDocumenter,
+):
+ """Specialized Documenter subclass for attributes."""
+
+ objtype = 'attribute'
+ member_order = 60
+ option_spec: ClassVar[OptionSpec] = dict(ModuleLevelDocumenter.option_spec)
+ option_spec['annotation'] = annotation_option
+ option_spec['no-value'] = bool_option
+
+ # must be higher than the MethodDocumenter, else it will recognize
+ # some non-data descriptors as methods
+ priority = 10
+
+ @staticmethod
+ def is_function_or_method(obj: Any) -> bool:
+ return (
+ inspect.isfunction(obj) or inspect.isbuiltin(obj) or inspect.ismethod(obj)
+ )
+
+ @classmethod
+ def can_document_member(
+ cls: type[Documenter], member: Any, membername: str, isattr: bool, parent: Any
+ ) -> bool:
+ if isinstance(parent, ModuleDocumenter):
+ return False
+ if inspect.isattributedescriptor(member):
+ return True
+ return not inspect.isroutine(member) and not isinstance(member, type)
+
+ def document_members(self, all_members: bool = False) -> None:
+ pass
+
+ def update_annotations(self, parent: Any) -> None:
+ """Update __annotations__ to support type_comment and so on."""
+ try:
+ annotations = dict(inspect.getannotations(parent))
+ parent.__annotations__ = annotations
+
+ for cls in inspect.getmro(parent):
+ try:
+ module = safe_getattr(cls, '__module__')
+ qualname = safe_getattr(cls, '__qualname__')
+
+ analyzer = ModuleAnalyzer.for_module(module)
+ analyzer.analyze()
+ anns = analyzer.annotations
+ for (classname, attrname), annotation in anns.items():
+ if classname == qualname and attrname not in annotations:
+ annotations[attrname] = annotation
+ except (AttributeError, PycodeError):
+ pass
+ except (AttributeError, TypeError):
+ # Failed to set __annotations__ (built-in, extensions, etc.)
+ pass
+
+ def import_object(self, raiseerror: bool = False) -> bool:
+ ret = super().import_object(raiseerror)
+ if inspect.isenumattribute(self.object):
+ self.object = self.object.value
+ if self.parent:
+ self.update_annotations(self.parent)
+
+ return ret
+
+ def get_real_modname(self) -> str:
+ real_modname = self.get_attr(self.parent or self.object, '__module__', None)
+ return real_modname or self.modname
+
+ def should_suppress_value_header(self) -> bool:
+ if super().should_suppress_value_header():
+ return True
+ else:
+ doc = self.get_doc()
+ if doc:
+ _docstring, metadata = separate_metadata(
+ '\n'.join(functools.reduce(operator.iadd, doc, []))
+ )
+ if 'hide-value' in metadata:
+ return True
+
+ return False
+
+ def add_directive_header(self, sig: str) -> None:
+ super().add_directive_header(sig)
+ sourcename = self.get_sourcename()
+ if (
+ self.options.annotation is SUPPRESS
+ or self.should_suppress_directive_header()
+ ):
+ pass
+ elif self.options.annotation:
+ self.add_line(' :annotation: %s' % self.options.annotation, sourcename)
+ else:
+ if self.config.autodoc_typehints != 'none':
+ # obtain type annotation for this attribute
+ annotations = get_type_hints(
+ self.parent,
+ None,
+ self.config.autodoc_type_aliases,
+ include_extras=True,
+ )
+ if self.objpath[-1] in annotations:
+ mode = _get_render_mode(self.config.autodoc_typehints_format)
+ short_literals = self.config.python_display_short_literal_types
+ objrepr = stringify_annotation(
+ annotations.get(self.objpath[-1]),
+ mode,
+ short_literals=short_literals,
+ )
+ self.add_line(' :type: ' + objrepr, sourcename)
+
+ try:
+ if (
+ self.options.no_value
+ or self.should_suppress_value_header()
+ or ismock(self.object)
+ ):
+ pass
+ else:
+ objrepr = object_description(self.object)
+ self.add_line(' :value: ' + objrepr, sourcename)
+ except ValueError:
+ pass
+
+ def get_attribute_comment(self, parent: Any, attrname: str) -> list[str] | None:
+ for cls in inspect.getmro(parent):
+ try:
+ module = safe_getattr(cls, '__module__')
+ qualname = safe_getattr(cls, '__qualname__')
+
+ analyzer = ModuleAnalyzer.for_module(module)
+ analyzer.analyze()
+ if qualname and self.objpath:
+ key = (qualname, attrname)
+ if key in analyzer.attr_docs:
+ return list(analyzer.attr_docs[key])
+ except (AttributeError, PycodeError):
+ pass
+
+ return None
+
+ def get_doc(self) -> list[list[str]] | None:
+ # Check the attribute has a docstring-comment
+ comment = self.get_attribute_comment(self.parent, self.objpath[-1])
+ if comment:
+ return [comment]
+
+ try:
+ # Disable `autodoc_inherit_docstring` temporarily to avoid to obtain
+ # a docstring from the value which descriptor returns unexpectedly.
+ # See: https://github.com/sphinx-doc/sphinx/issues/7805
+ orig = self.config.autodoc_inherit_docstrings
+ self.config.autodoc_inherit_docstrings = False
+ return super().get_doc()
+ finally:
+ self.config.autodoc_inherit_docstrings = orig
+
+ def add_content(self, more_content: StringList | None) -> None:
+ # Disable analyzing attribute comment on Documenter.add_content() to control it on
+ # AttributeDocumenter.add_content()
+ self.analyzer = None
+
+ if more_content is None:
+ more_content = StringList()
+ self.update_content(more_content)
+ super().add_content(more_content)
+
+
+class PropertyDocumenter(DocstringStripSignatureMixin, ClassLevelDocumenter): # type: ignore[misc]
+ """Specialized Documenter subclass for properties."""
+
+ objtype = 'property'
+ member_order = 60
+
+ # before AttributeDocumenter
+ priority = AttributeDocumenter.priority + 1
+
+ @classmethod
+ def can_document_member(
+ cls: type[Documenter], member: Any, membername: str, isattr: bool, parent: Any
+ ) -> bool:
+ if isinstance(parent, ClassDocumenter):
+ if inspect.isproperty(member):
+ return True
+ else:
+ __dict__ = safe_getattr(parent.object, '__dict__', {})
+ obj = __dict__.get(membername)
+ return isinstance(obj, classmethod) and inspect.isproperty(obj.__func__)
+ else:
+ return False
+
+ def import_object(self, raiseerror: bool = False) -> bool:
+ """Check the existence of uninitialized instance attribute when failed to import
+ the attribute.
+ """
+ ret = super().import_object(raiseerror)
+ if ret and not inspect.isproperty(self.object):
+ __dict__ = safe_getattr(self.parent, '__dict__', {})
+ obj = __dict__.get(self.objpath[-1])
+ if isinstance(obj, classmethod) and inspect.isproperty(obj.__func__):
+ self.object = obj.__func__
+ self.isclassmethod: bool = True
+ return True
+ else:
+ return False
+
+ self.isclassmethod = False
+ return ret
+
+ def format_args(self, **kwargs: Any) -> str:
+ func = self._get_property_getter()
+ if func is None:
+ return ''
+
+ # update the annotations of the property getter
+ self._events.emit('autodoc-before-process-signature', func, False)
+ # correctly format the arguments for a property
+ return super().format_args(**kwargs)
+
+ def document_members(self, all_members: bool = False) -> None:
+ pass
+
+ def get_real_modname(self) -> str:
+ real_modname = self.get_attr(self.parent or self.object, '__module__', None)
+ return real_modname or self.modname
+
+ def add_directive_header(self, sig: str) -> None:
+ super().add_directive_header(sig)
+ sourcename = self.get_sourcename()
+ if inspect.isabstractmethod(self.object):
+ self.add_line(' :abstractmethod:', sourcename)
+ if self.isclassmethod:
+ self.add_line(' :classmethod:', sourcename)
+
+ func = self._get_property_getter()
+ if func is None or self.config.autodoc_typehints == 'none':
+ return
+
+ try:
+ signature = inspect.signature(
+ func, type_aliases=self.config.autodoc_type_aliases
+ )
+ if signature.return_annotation is not Parameter.empty:
+ mode = _get_render_mode(self.config.autodoc_typehints_format)
+ short_literals = self.config.python_display_short_literal_types
+ objrepr = stringify_annotation(
+ signature.return_annotation, mode, short_literals=short_literals
+ )
+ self.add_line(' :type: ' + objrepr, sourcename)
+ except TypeError as exc:
+ logger.warning(
+ __('Failed to get a function signature for %s: %s'), self.fullname, exc
+ )
+ pass
+ except ValueError:
+ pass
+
+ def _get_property_getter(self) -> Callable[..., Any] | None:
+ if safe_getattr(self.object, 'fget', None): # property
+ return self.object.fget
+ if safe_getattr(self.object, 'func', None): # cached_property
+ return self.object.func
+ return None
+
+
+def autodoc_attrgetter(
+ obj: Any, name: str, *defargs: Any, registry: SphinxComponentRegistry
+) -> Any:
+ """Alternative getattr() for types"""
+ for typ, func in registry.autodoc_attrgetters.items():
+ if isinstance(obj, typ):
+ return func(obj, name, *defargs)
+
+ return safe_getattr(obj, name, *defargs)
From 912e5024c93ac7cc642899893f512f67e20c127c Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Thu, 24 Jul 2025 10:15:06 +0100
Subject: [PATCH 184/466] Bump types-pygments to 2.19.0.20250715 (#13746)
---
pyproject.toml | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/pyproject.toml b/pyproject.toml
index 21261e85904..fac9361c9e3 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -100,7 +100,7 @@ lint = [
"types-defusedxml==0.7.0.20250708",
"types-docutils==0.21.0.20250525",
"types-Pillow==10.2.0.20240822",
- "types-Pygments==2.19.0.20250516",
+ "types-Pygments==2.19.0.20250715",
"types-requests==2.32.4.20250611", # align with requests
"types-urllib3==1.26.25.14",
"pyright==1.1.400",
@@ -169,7 +169,7 @@ type-stubs = [
"types-defusedxml==0.7.0.20250708",
"types-docutils==0.21.0.20250525",
"types-Pillow==10.2.0.20240822",
- "types-Pygments==2.19.0.20250516",
+ "types-Pygments==2.19.0.20250715",
"types-requests==2.32.4.20250611",
"types-urllib3==1.26.25.14",
]
From 7522fb6123b2d0881625429053b9a432590ac051 Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Thu, 24 Jul 2025 10:15:34 +0100
Subject: [PATCH 185/466] Bump mypy to 1.17.0 (#13745)
---
pyproject.toml | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/pyproject.toml b/pyproject.toml
index fac9361c9e3..0a645b53e22 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -94,7 +94,7 @@ docs = [
]
lint = [
"ruff==0.12.4",
- "mypy==1.16.1",
+ "mypy==1.17.0",
"sphinx-lint>=0.9",
"types-colorama==0.4.15.20240311",
"types-defusedxml==0.7.0.20250708",
@@ -158,7 +158,7 @@ translations = [
"Jinja2>=3.1",
]
types = [
- "mypy==1.16.1",
+ "mypy==1.17.0",
"pyrefly",
"pyright==1.1.400",
{ include-group = "type-stubs" },
From fb2ec7b9d8ff1c81bc4ec4e6b6abd1f2f7cf3eff Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Fri, 25 Jul 2025 15:27:15 +0100
Subject: [PATCH 186/466] Bump Ruff to 0.12.5 (#13750)
---
pyproject.toml | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/pyproject.toml b/pyproject.toml
index 0a645b53e22..74c3f5a2deb 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -93,7 +93,7 @@ docs = [
"sphinxcontrib-websupport",
]
lint = [
- "ruff==0.12.4",
+ "ruff==0.12.5",
"mypy==1.17.0",
"sphinx-lint>=0.9",
"types-colorama==0.4.15.20240311",
@@ -136,7 +136,7 @@ docs = [
"sphinxcontrib-websupport",
]
lint = [
- "ruff==0.12.4",
+ "ruff==0.12.5",
"sphinx-lint>=0.9",
]
package = [
From 1adc61a7a21599663ece655724018968be80ac2c Mon Sep 17 00:00:00 2001
From: Stephen Finucane
Date: Fri, 25 Jul 2025 15:27:58 +0100
Subject: [PATCH 187/466] Indicate preference for follow-up commits over force
pushes (#13748)
---
doc/internals/contributing.rst | 4 ++++
1 file changed, 4 insertions(+)
diff --git a/doc/internals/contributing.rst b/doc/internals/contributing.rst
index de4224d7bc3..83b23fb388e 100644
--- a/doc/internals/contributing.rst
+++ b/doc/internals/contributing.rst
@@ -138,6 +138,10 @@ These are the basic steps needed to start developing on Sphinx.
#. Wait for a core developer or contributor to review your changes.
+ You may be asked to address comments on the review. If so, please avoid
+ force pushing to the branch. Sphinx uses the *squash merge* strategy when
+ merging PRs, so follow-up commits will all be combined.
+
Coding style
~~~~~~~~~~~~
From b92a4ed1e6657bef7b1f3ec58a1fec697317bd25 Mon Sep 17 00:00:00 2001
From: Adam Turner <9087854+AA-Turner@users.noreply.github.com>
Date: Fri, 25 Jul 2025 16:33:26 +0200
Subject: [PATCH 188/466] Merge DocstringStripSignatureMixin into
DocstringSignatureMixin (#13752)
---
sphinx/ext/autodoc/__init__.py | 2 --
sphinx/ext/autodoc/_documenters.py | 36 +++++++++++++-----------------
2 files changed, 15 insertions(+), 23 deletions(-)
diff --git a/sphinx/ext/autodoc/__init__.py b/sphinx/ext/autodoc/__init__.py
index 87ffa660586..ca1fcfd1dbe 100644
--- a/sphinx/ext/autodoc/__init__.py
+++ b/sphinx/ext/autodoc/__init__.py
@@ -31,7 +31,6 @@
DataDocumenterMixinBase,
DecoratorDocumenter,
DocstringSignatureMixin,
- DocstringStripSignatureMixin,
Documenter,
ExceptionDocumenter,
FunctionDocumenter,
@@ -108,7 +107,6 @@
'ModuleLevelDocumenter',
'ClassLevelDocumenter',
'DocstringSignatureMixin',
- 'DocstringStripSignatureMixin',
'DataDocumenterMixinBase',
'GenericAliasMixin',
'UninitializedGlobalVariableMixin',
diff --git a/sphinx/ext/autodoc/_documenters.py b/sphinx/ext/autodoc/_documenters.py
index 10047a7afb8..1ea1606c87a 100644
--- a/sphinx/ext/autodoc/_documenters.py
+++ b/sphinx/ext/autodoc/_documenters.py
@@ -1155,6 +1155,9 @@ class DocstringSignatureMixin:
feature of reading the signature from the docstring.
"""
+ __docstring_strip_signature__: ClassVar[bool] = False
+ """If True, strip any function signature from the docstring."""
+
_new_docstrings: list[list[str]] | None = None
_signatures: list[str] = []
@@ -1223,7 +1226,12 @@ def format_signature(self, **kwargs: Any) -> str:
# the feature is enabled
result = self._find_signature()
if result is not None:
- self.args, self.retann = result
+ if self.__docstring_strip_signature__:
+ # Discarding _args is the only difference.
+ # Documenter.format_signature use self.args value to format.
+ _args, self.retann = result
+ else:
+ self.args, self.retann = result
sig = super().format_signature(**kwargs) # type: ignore[misc]
if self._signatures:
return '\n'.join((sig, *self._signatures))
@@ -1231,24 +1239,6 @@ def format_signature(self, **kwargs: Any) -> str:
return sig
-class DocstringStripSignatureMixin(DocstringSignatureMixin):
- """Mixin for AttributeDocumenter to provide the
- feature of stripping any function signature from the docstring.
- """
-
- def format_signature(self, **kwargs: Any) -> str:
- if self.args is None and self.config.autodoc_docstring_signature: # type: ignore[attr-defined]
- # only act if a signature is not explicitly given already, and if
- # the feature is enabled
- result = self._find_signature()
- if result is not None:
- # Discarding _args is a only difference with
- # DocstringSignatureMixin.format_signature.
- # Documenter.format_signature use self.args value to format.
- _args, self.retann = result
- return super().format_signature(**kwargs)
-
-
class FunctionDocumenter(DocstringSignatureMixin, ModuleLevelDocumenter): # type: ignore[misc]
"""Specialized Documenter subclass for functions."""
@@ -2651,11 +2641,13 @@ class AttributeDocumenter( # type: ignore[misc]
RuntimeInstanceAttributeMixin,
UninitializedInstanceAttributeMixin,
NonDataDescriptorMixin,
- DocstringStripSignatureMixin,
+ DocstringSignatureMixin,
ClassLevelDocumenter,
):
"""Specialized Documenter subclass for attributes."""
+ __docstring_strip_signature__ = True
+
objtype = 'attribute'
member_order = 60
option_spec: ClassVar[OptionSpec] = dict(ModuleLevelDocumenter.option_spec)
@@ -2821,9 +2813,11 @@ def add_content(self, more_content: StringList | None) -> None:
super().add_content(more_content)
-class PropertyDocumenter(DocstringStripSignatureMixin, ClassLevelDocumenter): # type: ignore[misc]
+class PropertyDocumenter(DocstringSignatureMixin, ClassLevelDocumenter): # type: ignore[misc]
"""Specialized Documenter subclass for properties."""
+ __docstring_strip_signature__ = True
+
objtype = 'property'
member_order = 60
From 69915e14cce11c73d137710962d598900bb4e607 Mon Sep 17 00:00:00 2001
From: Adam Turner <9087854+AA-Turner@users.noreply.github.com>
Date: Fri, 25 Jul 2025 16:43:27 +0200
Subject: [PATCH 189/466] Merge ``DecoratorDocumenter.format_args()`` into
FunctionDocumenter (#13753)
---
sphinx/ext/autodoc/_documenters.py | 13 +++++--------
1 file changed, 5 insertions(+), 8 deletions(-)
diff --git a/sphinx/ext/autodoc/_documenters.py b/sphinx/ext/autodoc/_documenters.py
index 1ea1606c87a..238cae28104 100644
--- a/sphinx/ext/autodoc/_documenters.py
+++ b/sphinx/ext/autodoc/_documenters.py
@@ -1281,6 +1281,10 @@ def format_args(self, **kwargs: Any) -> str:
if self.config.strip_signature_backslash:
# escape backslashes for reST
args = args.replace('\\', '\\\\')
+
+ if self.objtype == 'decorator' and ',' not in args:
+ # Special case for single-argument decorators
+ return ''
return args
def document_members(self, all_members: bool = False) -> None:
@@ -1391,14 +1395,7 @@ class DecoratorDocumenter(FunctionDocumenter):
objtype = 'decorator'
# must be lower than FunctionDocumenter
- priority = -1
-
- def format_args(self, **kwargs: Any) -> str:
- args = super().format_args(**kwargs)
- if ',' in args:
- return args
- else:
- return ''
+ priority = FunctionDocumenter.priority - 1
# Types which have confusing metaclass signatures it would be best not to show.
From ab832026ff34882633c6d659bb53e9fd2a4716ce Mon Sep 17 00:00:00 2001
From: Adam Turner <9087854+AA-Turner@users.noreply.github.com>
Date: Fri, 25 Jul 2025 16:57:35 +0200
Subject: [PATCH 190/466] Combine ``resolve_name()`` methods (#13754)
---
sphinx/ext/autodoc/_documenters.py | 160 +++++++++++++++--------------
1 file changed, 82 insertions(+), 78 deletions(-)
diff --git a/sphinx/ext/autodoc/_documenters.py b/sphinx/ext/autodoc/_documenters.py
index 238cae28104..01c8d874ecf 100644
--- a/sphinx/ext/autodoc/_documenters.py
+++ b/sphinx/ext/autodoc/_documenters.py
@@ -222,6 +222,72 @@ def resolve_name(
example, it would return ``('zipfile', ['ZipFile', 'open'])`` for the
``zipfile.ZipFile.open`` method.
"""
+ if isinstance(self, ModuleDocumenter):
+ if modname is not None:
+ logger.warning(
+ __('"::" in automodule name doesn\'t make sense'), type='autodoc'
+ )
+ return (path or '') + base, []
+
+ if isinstance(
+ self,
+ (
+ ModuleLevelDocumenter,
+ FunctionDocumenter,
+ ClassDocumenter,
+ DataDocumenter,
+ ),
+ ):
+ if modname is not None:
+ return modname, [*parents, base]
+ if path:
+ modname = path.rstrip('.')
+ return modname, [*parents, base]
+
+ # if documenting a toplevel object without explicit module,
+ # it can be contained in another auto directive ...
+ modname = self._current_document.autodoc_module
+ # ... or in the scope of a module directive
+ if not modname:
+ modname = self.env.ref_context.get('py:module')
+ # ... else, it stays None, which means invalid
+ return modname, [*parents, base]
+
+ if isinstance(
+ self,
+ (
+ ClassLevelDocumenter,
+ MethodDocumenter,
+ AttributeDocumenter,
+ PropertyDocumenter,
+ ),
+ ):
+ if modname is not None:
+ return modname, [*parents, base]
+
+ if path:
+ mod_cls = path.rstrip('.')
+ else:
+ # if documenting a class-level object without path,
+ # there must be a current class, either from a parent
+ # auto directive ...
+ mod_cls = self._current_document.autodoc_class
+ # ... or from a class directive
+ if not mod_cls:
+ mod_cls = self.env.ref_context.get('py:class', '')
+ # ... if still falsy, there's no way to know
+ if not mod_cls:
+ return None, []
+ modname, _sep, cls = mod_cls.rpartition('.')
+ parents = [cls]
+ # if the module name is still missing, get it like above
+ if not modname:
+ modname = self._current_document.autodoc_module
+ if not modname:
+ modname = self.env.ref_context.get('py:module')
+ # ... else, it stays None, which means invalid
+ return modname, [*parents, base]
+
msg = 'must be implemented in subclasses'
raise NotImplementedError(msg)
@@ -949,15 +1015,6 @@ def can_document_member(
# don't document submodules automatically
return False
- def resolve_name(
- self, modname: str | None, parents: Any, path: str, base: str
- ) -> tuple[str | None, list[str]]:
- if modname is not None:
- logger.warning(
- __('"::" in automodule name doesn\'t make sense'), type='autodoc'
- )
- return (path or '') + base, []
-
def parse_name(self) -> bool:
ret = super().parse_name()
if self.args or self.retann:
@@ -1091,65 +1148,6 @@ def keyfunc(entry: tuple[Documenter, bool]) -> int:
return super().sort_members(documenters, order)
-class ModuleLevelDocumenter(Documenter):
- """Specialized Documenter subclass for objects on module level (functions,
- classes, data/constants).
- """
-
- def resolve_name(
- self, modname: str | None, parents: Any, path: str, base: str
- ) -> tuple[str | None, list[str]]:
- if modname is not None:
- return modname, [*parents, base]
- if path:
- modname = path.rstrip('.')
- return modname, [*parents, base]
-
- # if documenting a toplevel object without explicit module,
- # it can be contained in another auto directive ...
- modname = self._current_document.autodoc_module
- # ... or in the scope of a module directive
- if not modname:
- modname = self.env.ref_context.get('py:module')
- # ... else, it stays None, which means invalid
- return modname, [*parents, base]
-
-
-class ClassLevelDocumenter(Documenter):
- """Specialized Documenter subclass for objects on class level (methods,
- attributes).
- """
-
- def resolve_name(
- self, modname: str | None, parents: Any, path: str, base: str
- ) -> tuple[str | None, list[str]]:
- if modname is not None:
- return modname, [*parents, base]
-
- if path:
- mod_cls = path.rstrip('.')
- else:
- # if documenting a class-level object without path,
- # there must be a current class, either from a parent
- # auto directive ...
- mod_cls = self._current_document.autodoc_class
- # ... or from a class directive
- if not mod_cls:
- mod_cls = self.env.ref_context.get('py:class', '')
- # ... if still falsy, there's no way to know
- if not mod_cls:
- return None, []
- modname, _sep, cls = mod_cls.rpartition('.')
- parents = [cls]
- # if the module name is still missing, get it like above
- if not modname:
- modname = self._current_document.autodoc_module
- if not modname:
- modname = self.env.ref_context.get('py:module')
- # ... else, it stays None, which means invalid
- return modname, [*parents, base]
-
-
class DocstringSignatureMixin:
"""Mixin for FunctionDocumenter and MethodDocumenter to provide the
feature of reading the signature from the docstring.
@@ -1239,7 +1237,7 @@ def format_signature(self, **kwargs: Any) -> str:
return sig
-class FunctionDocumenter(DocstringSignatureMixin, ModuleLevelDocumenter): # type: ignore[misc]
+class FunctionDocumenter(DocstringSignatureMixin, Documenter): # type: ignore[misc]
"""Specialized Documenter subclass for functions."""
objtype = 'function'
@@ -1412,7 +1410,7 @@ class DecoratorDocumenter(FunctionDocumenter):
})
-class ClassDocumenter(DocstringSignatureMixin, ModuleLevelDocumenter): # type: ignore[misc]
+class ClassDocumenter(DocstringSignatureMixin, Documenter): # type: ignore[misc]
"""Specialized Documenter subclass for classes."""
objtype = 'class'
@@ -2018,15 +2016,13 @@ def get_doc(self) -> list[list[str]] | None:
return super().get_doc() # type: ignore[misc]
-class DataDocumenter(
- GenericAliasMixin, UninitializedGlobalVariableMixin, ModuleLevelDocumenter
-):
+class DataDocumenter(GenericAliasMixin, UninitializedGlobalVariableMixin, Documenter):
"""Specialized Documenter subclass for data items."""
objtype = 'data'
member_order = 40
priority = -10
- option_spec: ClassVar[OptionSpec] = dict(ModuleLevelDocumenter.option_spec)
+ option_spec: ClassVar[OptionSpec] = dict(Documenter.option_spec)
option_spec['annotation'] = annotation_option
option_spec['no-value'] = bool_option
@@ -2151,7 +2147,7 @@ def add_content(self, more_content: StringList | None) -> None:
super().add_content(more_content)
-class MethodDocumenter(DocstringSignatureMixin, ClassLevelDocumenter): # type: ignore[misc]
+class MethodDocumenter(DocstringSignatureMixin, Documenter): # type: ignore[misc]
"""Specialized Documenter subclass for methods (normal, static and class)."""
objtype = 'method'
@@ -2639,7 +2635,7 @@ class AttributeDocumenter( # type: ignore[misc]
UninitializedInstanceAttributeMixin,
NonDataDescriptorMixin,
DocstringSignatureMixin,
- ClassLevelDocumenter,
+ Documenter,
):
"""Specialized Documenter subclass for attributes."""
@@ -2647,7 +2643,7 @@ class AttributeDocumenter( # type: ignore[misc]
objtype = 'attribute'
member_order = 60
- option_spec: ClassVar[OptionSpec] = dict(ModuleLevelDocumenter.option_spec)
+ option_spec: ClassVar[OptionSpec] = dict(Documenter.option_spec)
option_spec['annotation'] = annotation_option
option_spec['no-value'] = bool_option
@@ -2810,7 +2806,7 @@ def add_content(self, more_content: StringList | None) -> None:
super().add_content(more_content)
-class PropertyDocumenter(DocstringSignatureMixin, ClassLevelDocumenter): # type: ignore[misc]
+class PropertyDocumenter(DocstringSignatureMixin, Documenter): # type: ignore[misc]
"""Specialized Documenter subclass for properties."""
__docstring_strip_signature__ = True
@@ -2909,6 +2905,14 @@ def _get_property_getter(self) -> Callable[..., Any] | None:
return None
+class ModuleLevelDocumenter(Documenter):
+ """Retained for compatibility."""
+
+
+class ClassLevelDocumenter(Documenter):
+ """Retained for compatibility."""
+
+
def autodoc_attrgetter(
obj: Any, name: str, *defargs: Any, registry: SphinxComponentRegistry
) -> Any:
From 345f0686385a324738712fa2681d824b71aebb05 Mon Sep 17 00:00:00 2001
From: Adam Turner <9087854+AA-Turner@users.noreply.github.com>
Date: Fri, 25 Jul 2025 17:29:59 +0200
Subject: [PATCH 191/466] Merge ``DocstringSignatureMixin`` into ``Documenter``
(#13755)
---
sphinx/ext/autodoc/_documenters.py | 215 +++++++++++++++--------------
1 file changed, 113 insertions(+), 102 deletions(-)
diff --git a/sphinx/ext/autodoc/_documenters.py b/sphinx/ext/autodoc/_documenters.py
index 01c8d874ecf..42d6d867b7c 100644
--- a/sphinx/ext/autodoc/_documenters.py
+++ b/sphinx/ext/autodoc/_documenters.py
@@ -153,6 +153,15 @@ class Documenter:
#: true if the generated content may contain titles
titles_allowed: ClassVar = True
+ __docstring_signature__: ClassVar[bool] = False
+ """If True, attempt to read the signature from the docstring."""
+
+ __docstring_strip_signature__: ClassVar[bool] = False
+ """If True, strip any function signature from the docstring."""
+
+ _new_docstrings: list[list[str]] | None = None
+ _signatures: list[str] = []
+
option_spec: ClassVar[OptionSpec] = {
'no-index': bool_option,
'no-index-entry': bool_option,
@@ -191,7 +200,7 @@ def __init__(
# extra signature items (arguments and return annotation,
# also set after resolve_name succeeds)
self.args: str | None = None
- self.retann: str = ''
+ self.retann: str | None = ''
# the object to document (set after import_object succeeds)
self.object: Any = None
self.object_name = ''
@@ -401,11 +410,80 @@ def _call_format_args(self, **kwargs: Any) -> str:
# retry without arguments for old documenters
return self.format_args()
+ def _find_signature(self) -> tuple[str | None, str | None] | None:
+ # candidates of the object name
+ valid_names = [self.objpath[-1]]
+ if isinstance(self, ClassDocumenter):
+ valid_names.append('__init__')
+ if hasattr(self.object, '__mro__'):
+ valid_names.extend(cls.__name__ for cls in self.object.__mro__)
+
+ docstrings = self.get_doc()
+ if docstrings is None:
+ return None, None
+ self._new_docstrings = docstrings[:]
+ self._signatures = []
+ result = None
+ for i, doclines in enumerate(docstrings):
+ for j, line in enumerate(doclines):
+ if not line:
+ # no lines in docstring, no match
+ break
+
+ if line.endswith('\\'):
+ line = line.rstrip('\\').rstrip()
+
+ # match first line of docstring against signature RE
+ match = py_ext_sig_re.match(line)
+ if not match:
+ break
+ _exmod, _path, base, _tp_list, args, retann = match.groups()
+
+ # the base name must match ours
+ if base not in valid_names:
+ break
+
+ # re-prepare docstring to ignore more leading indentation
+ directive = self.directive
+ tab_width = directive.state.document.settings.tab_width
+ self._new_docstrings[i] = prepare_docstring(
+ '\n'.join(doclines[j + 1 :]), tab_width
+ )
+
+ if result is None:
+ # first signature
+ result = args, retann
+ else:
+ # subsequent signatures
+ self._signatures.append(f'({args}) -> {retann}')
+
+ if result is not None:
+ # finish the loop when signature found
+ break
+
+ return result
+
def format_signature(self, **kwargs: Any) -> str:
"""Format the signature (arguments and return annotation) of the object.
Let the user process it via the ``autodoc-process-signature`` event.
"""
+ if (
+ self.__docstring_signature__
+ and self.args is None
+ and self.config.autodoc_docstring_signature
+ ):
+ # only act if a signature is not explicitly given already, and if
+ # the feature is enabled
+ result = self._find_signature()
+ if result is not None:
+ if self.__docstring_strip_signature__:
+ # Discarding _args is the only difference.
+ # Documenter.format_signature use self.args value to format.
+ _args, self.retann = result
+ else:
+ self.args, self.retann = result
+
if self.args is not None:
# signature given explicitly
args = f'({self.args})'
@@ -442,9 +520,16 @@ def format_signature(self, **kwargs: Any) -> str:
args, retann = result
if args is not None:
- return args + ((' -> %s' % retann) if retann else '')
+ if retann:
+ sig = f'{args} -> {retann}'
+ else:
+ sig = args
else:
- return ''
+ sig = ''
+
+ if self.__docstring_signature__ and self._signatures:
+ return '\n'.join((sig, *self._signatures))
+ return sig
def add_directive_header(self, sig: str) -> None:
"""Add the directive header and options to the generated content."""
@@ -475,6 +560,9 @@ def get_doc(self) -> list[list[str]] | None:
When it returns None, autodoc-process-docstring will not be called for this
object.
"""
+ if self.__docstring_signature__ and self._new_docstrings is not None:
+ return self._new_docstrings
+
docstring = getdoc(
self.object,
self.get_attr,
@@ -1148,98 +1236,11 @@ def keyfunc(entry: tuple[Documenter, bool]) -> int:
return super().sort_members(documenters, order)
-class DocstringSignatureMixin:
- """Mixin for FunctionDocumenter and MethodDocumenter to provide the
- feature of reading the signature from the docstring.
- """
-
- __docstring_strip_signature__: ClassVar[bool] = False
- """If True, strip any function signature from the docstring."""
-
- _new_docstrings: list[list[str]] | None = None
- _signatures: list[str] = []
-
- def _find_signature(self) -> tuple[str | None, str | None] | None:
- # candidates of the object name
- valid_names = [self.objpath[-1]] # type: ignore[attr-defined]
- if isinstance(self, ClassDocumenter):
- valid_names.append('__init__')
- if hasattr(self.object, '__mro__'):
- valid_names.extend(cls.__name__ for cls in self.object.__mro__)
-
- docstrings = self.get_doc()
- if docstrings is None:
- return None, None
- self._new_docstrings = docstrings[:]
- self._signatures = []
- result = None
- for i, doclines in enumerate(docstrings):
- for j, line in enumerate(doclines):
- if not line:
- # no lines in docstring, no match
- break
-
- if line.endswith('\\'):
- line = line.rstrip('\\').rstrip()
-
- # match first line of docstring against signature RE
- match = py_ext_sig_re.match(line)
- if not match:
- break
- _exmod, _path, base, _tp_list, args, retann = match.groups()
-
- # the base name must match ours
- if base not in valid_names:
- break
-
- # re-prepare docstring to ignore more leading indentation
- directive = self.directive # type: ignore[attr-defined]
- tab_width = directive.state.document.settings.tab_width
- self._new_docstrings[i] = prepare_docstring(
- '\n'.join(doclines[j + 1 :]), tab_width
- )
-
- if result is None:
- # first signature
- result = args, retann
- else:
- # subsequent signatures
- self._signatures.append(f'({args}) -> {retann}')
-
- if result is not None:
- # finish the loop when signature found
- break
-
- return result
-
- def get_doc(self) -> list[list[str]] | None:
- if self._new_docstrings is not None:
- return self._new_docstrings
- return super().get_doc() # type: ignore[misc]
-
- def format_signature(self, **kwargs: Any) -> str:
- self.args: str | None
- if self.args is None and self.config.autodoc_docstring_signature: # type: ignore[attr-defined]
- # only act if a signature is not explicitly given already, and if
- # the feature is enabled
- result = self._find_signature()
- if result is not None:
- if self.__docstring_strip_signature__:
- # Discarding _args is the only difference.
- # Documenter.format_signature use self.args value to format.
- _args, self.retann = result
- else:
- self.args, self.retann = result
- sig = super().format_signature(**kwargs) # type: ignore[misc]
- if self._signatures:
- return '\n'.join((sig, *self._signatures))
- else:
- return sig
-
-
-class FunctionDocumenter(DocstringSignatureMixin, Documenter): # type: ignore[misc]
+class FunctionDocumenter(Documenter):
"""Specialized Documenter subclass for functions."""
+ __docstring_signature__ = True
+
objtype = 'function'
member_order = 30
@@ -1410,9 +1411,11 @@ class DecoratorDocumenter(FunctionDocumenter):
})
-class ClassDocumenter(DocstringSignatureMixin, Documenter): # type: ignore[misc]
+class ClassDocumenter(Documenter):
"""Specialized Documenter subclass for classes."""
+ __docstring_signature__ = True
+
objtype = 'class'
member_order = 20
option_spec: ClassVar[OptionSpec] = {
@@ -2147,9 +2150,11 @@ def add_content(self, more_content: StringList | None) -> None:
super().add_content(more_content)
-class MethodDocumenter(DocstringSignatureMixin, Documenter): # type: ignore[misc]
+class MethodDocumenter(Documenter):
"""Specialized Documenter subclass for methods (normal, static and class)."""
+ __docstring_signature__ = True
+
objtype = 'method'
directivetype = 'method'
member_order = 50
@@ -2364,10 +2369,9 @@ def dummy(): # type: ignore[no-untyped-def] # NoQA: ANN202
def get_doc(self) -> list[list[str]] | None:
if self._new_docstrings is not None:
- # docstring already returned previously, then modified by
- # `DocstringSignatureMixin`. Just return the previously-computed
- # result, so that we don't lose the processing done by
- # `DocstringSignatureMixin`.
+ # docstring already returned previously, then modified due to
+ # ``__docstring_signature__ = True``. Just return the
+ # previously-computed result, so that we don't loose the processing.
return self._new_docstrings
if self.objpath[-1] == '__init__':
docstring = getdoc(
@@ -2628,17 +2632,17 @@ def get_doc(self) -> list[list[str]] | None:
return super().get_doc() # type: ignore[misc]
-class AttributeDocumenter( # type: ignore[misc]
+class AttributeDocumenter(
GenericAliasMixin,
SlotsMixin,
RuntimeInstanceAttributeMixin,
UninitializedInstanceAttributeMixin,
NonDataDescriptorMixin,
- DocstringSignatureMixin,
Documenter,
):
"""Specialized Documenter subclass for attributes."""
+ __docstring_signature__ = True
__docstring_strip_signature__ = True
objtype = 'attribute'
@@ -2806,9 +2810,10 @@ def add_content(self, more_content: StringList | None) -> None:
super().add_content(more_content)
-class PropertyDocumenter(DocstringSignatureMixin, Documenter): # type: ignore[misc]
+class PropertyDocumenter(Documenter):
"""Specialized Documenter subclass for properties."""
+ __docstring_signature__ = True
__docstring_strip_signature__ = True
objtype = 'property'
@@ -2905,6 +2910,12 @@ def _get_property_getter(self) -> Callable[..., Any] | None:
return None
+class DocstringSignatureMixin:
+ """Retained for compatibility."""
+
+ __docstring_signature__ = True
+
+
class ModuleLevelDocumenter(Documenter):
"""Retained for compatibility."""
From aae9c4509fdc610111a50a3aa792567fa8688274 Mon Sep 17 00:00:00 2001
From: Adam Turner <9087854+AA-Turner@users.noreply.github.com>
Date: Fri, 25 Jul 2025 17:53:15 +0200
Subject: [PATCH 192/466] Improve ``autodoc`` sentinels (#13756)
---
sphinx/ext/autodoc/_documenters.py | 18 ++++-----
sphinx/ext/autodoc/_sentinels.py | 47 ++++++++++++++++-------
sphinx/ext/autodoc/importer.py | 21 +++++-----
sphinx/ext/autosummary/__init__.py | 5 ++-
tests/test_extensions/test_ext_autodoc.py | 4 +-
5 files changed, 57 insertions(+), 38 deletions(-)
diff --git a/sphinx/ext/autodoc/_documenters.py b/sphinx/ext/autodoc/_documenters.py
index 42d6d867b7c..430625902a5 100644
--- a/sphinx/ext/autodoc/_documenters.py
+++ b/sphinx/ext/autodoc/_documenters.py
@@ -23,15 +23,11 @@
)
from sphinx.ext.autodoc._sentinels import (
ALL,
+ INSTANCE_ATTR,
+ SLOTS_ATTR,
SUPPRESS,
UNINITIALIZED_ATTR,
)
-from sphinx.ext.autodoc._sentinels import (
- INSTANCE_ATTR as INSTANCEATTR,
-)
-from sphinx.ext.autodoc._sentinels import (
- SLOTS_ATTR as SLOTSATTR,
-)
from sphinx.ext.autodoc.importer import get_class_members, import_module, import_object
from sphinx.ext.autodoc.mock import ismock, mock, undecorate
from sphinx.locale import _, __
@@ -718,7 +714,7 @@ def is_filtered_inherited_member(name: str, obj: Any) -> bool:
member = obj.object
# if isattr is True, the member is documented as an attribute
- isattr = member is INSTANCEATTR or (namespace, membername) in attr_docs
+ isattr = member is INSTANCE_ATTR or (namespace, membername) in attr_docs
try:
doc = getdoc(
@@ -1173,7 +1169,7 @@ def get_module_members(self) -> dict[str, ObjectMember]:
if name not in members:
docstring = attr_docs.get(('', name), [])
members[name] = ObjectMember(
- name, INSTANCEATTR, docstring='\n'.join(docstring)
+ name, INSTANCE_ATTR, docstring='\n'.join(docstring)
)
return members
@@ -2461,18 +2457,18 @@ def isslotsattribute(self) -> bool:
def import_object(self, raiseerror: bool = False) -> bool:
ret = super().import_object(raiseerror) # type: ignore[misc]
if self.isslotsattribute():
- self.object = SLOTSATTR
+ self.object = SLOTS_ATTR
return ret
def should_suppress_value_header(self) -> bool:
- if self.object is SLOTSATTR:
+ if self.object is SLOTS_ATTR:
return True
else:
return super().should_suppress_value_header()
def get_doc(self) -> list[list[str]] | None:
- if self.object is SLOTSATTR:
+ if self.object is SLOTS_ATTR:
try:
parent___slots__ = inspect.getslots(self.parent)
if parent___slots__ and (
diff --git a/sphinx/ext/autodoc/_sentinels.py b/sphinx/ext/autodoc/_sentinels.py
index f1544b11165..aa663b5be86 100644
--- a/sphinx/ext/autodoc/_sentinels.py
+++ b/sphinx/ext/autodoc/_sentinels.py
@@ -1,31 +1,50 @@
from __future__ import annotations
-from typing import TYPE_CHECKING
-
+TYPE_CHECKING = False
if TYPE_CHECKING:
- from typing import Any
+ from typing import NoReturn
+
+
+class _Sentinel:
+ """Create a unique sentinel object."""
+
+ def __init__(self, name: str, /) -> None:
+ self._name = name
+
+ def __repr__(self) -> str:
+ return f'<{self._name}>'
+
+ def __or__(self, other: object) -> type[_Sentinel | object]:
+ return self | other
+
+ def __ror__(self, other: object) -> type[object | _Sentinel]:
+ return other | self
+
+ def __getstate__(self) -> NoReturn:
+ msg = f'Cannot pickle {type(self).__name__!r} object'
+ raise TypeError(msg)
-class _All:
+class _All(_Sentinel):
"""A special value for :*-members: that matches to any member."""
- def __contains__(self, item: Any) -> bool:
+ def __contains__(self, item: object) -> bool:
return True
- def append(self, item: Any) -> None:
+ def append(self, item: object) -> None:
pass # nothing
-class _Empty:
+class _Empty(_Sentinel):
"""A special value for :exclude-members: that never matches to any member."""
- def __contains__(self, item: Any) -> bool:
+ def __contains__(self, item: object) -> bool:
return False
-ALL = _All()
-EMPTY = _Empty()
-UNINITIALIZED_ATTR = object()
-INSTANCE_ATTR = object()
-SLOTS_ATTR = object()
-SUPPRESS = object()
+ALL = _All('ALL')
+EMPTY = _Empty('EMPTY')
+UNINITIALIZED_ATTR = _Sentinel('UNINITIALIZED_ATTR')
+INSTANCE_ATTR = _Sentinel('INSTANCE_ATTR')
+SLOTS_ATTR = _Sentinel('SLOTS_ATTR')
+SUPPRESS = _Sentinel('SUPPRESS')
diff --git a/sphinx/ext/autodoc/importer.py b/sphinx/ext/autodoc/importer.py
index ca9c7ca7778..e37df21a614 100644
--- a/sphinx/ext/autodoc/importer.py
+++ b/sphinx/ext/autodoc/importer.py
@@ -347,7 +347,7 @@ def get_object_members(
analyzer: ModuleAnalyzer | None = None,
) -> dict[str, Attribute]:
"""Get members and attributes of target object."""
- from sphinx.ext.autodoc import INSTANCEATTR
+ from sphinx.ext.autodoc._sentinels import INSTANCE_ATTR
# the members directly defined in the class
obj_dict = attrgetter(subject, '__dict__', {})
@@ -372,11 +372,11 @@ def get_object_members(
try:
subject___slots__ = getslots(subject)
if subject___slots__:
- from sphinx.ext.autodoc import SLOTSATTR
+ from sphinx.ext.autodoc._sentinels import SLOTS_ATTR
for name in subject___slots__:
members[name] = Attribute(
- name=name, directly_defined=True, value=SLOTSATTR
+ name=name, directly_defined=True, value=SLOTS_ATTR
)
except (TypeError, ValueError):
pass
@@ -400,7 +400,7 @@ def get_object_members(
unmangled = unmangle(cls, name)
if unmangled and unmangled not in members:
members[unmangled] = Attribute(
- name=unmangled, directly_defined=cls is subject, value=INSTANCEATTR
+ name=unmangled, directly_defined=cls is subject, value=INSTANCE_ATTR
)
if analyzer:
@@ -409,7 +409,7 @@ def get_object_members(
for ns, name in analyzer.find_attr_docs():
if namespace == ns and name not in members:
members[name] = Attribute(
- name=name, directly_defined=True, value=INSTANCEATTR
+ name=name, directly_defined=True, value=INSTANCE_ATTR
)
return members
@@ -419,7 +419,8 @@ def get_class_members(
subject: Any, objpath: Any, attrgetter: _AttrGetter, inherit_docstrings: bool = True
) -> dict[str, ObjectMember]:
"""Get members and attributes of target class."""
- from sphinx.ext.autodoc import INSTANCEATTR, ObjectMember
+ from sphinx.ext.autodoc._documenters import ObjectMember
+ from sphinx.ext.autodoc._sentinels import INSTANCE_ATTR
# the members directly defined in the class
obj_dict = attrgetter(subject, '__dict__', {})
@@ -442,11 +443,11 @@ def get_class_members(
try:
subject___slots__ = getslots(subject)
if subject___slots__:
- from sphinx.ext.autodoc import SLOTSATTR
+ from sphinx.ext.autodoc._sentinels import SLOTS_ATTR
for name, docstring in subject___slots__.items():
members[name] = ObjectMember(
- name, SLOTSATTR, class_=subject, docstring=docstring
+ name, SLOTS_ATTR, class_=subject, docstring=docstring
)
except (TypeError, ValueError):
pass
@@ -490,7 +491,7 @@ def get_class_members(
docstring = None
members[unmangled] = ObjectMember(
- unmangled, INSTANCEATTR, class_=cls, docstring=docstring
+ unmangled, INSTANCE_ATTR, class_=cls, docstring=docstring
)
# append or complete instance attributes (cf. self.attr1) if analyzer knows
@@ -500,7 +501,7 @@ def get_class_members(
# otherwise unknown instance attribute
members[name] = ObjectMember(
name,
- INSTANCEATTR,
+ INSTANCE_ATTR,
class_=cls,
docstring='\n'.join(docstring),
)
diff --git a/sphinx/ext/autosummary/__init__.py b/sphinx/ext/autosummary/__init__.py
index a0ae7af16b1..3fe4832a795 100644
--- a/sphinx/ext/autosummary/__init__.py
+++ b/sphinx/ext/autosummary/__init__.py
@@ -69,7 +69,8 @@
from sphinx.config import Config
from sphinx.environment import BuildEnvironment
from sphinx.errors import PycodeError
-from sphinx.ext.autodoc import INSTANCEATTR, Options
+from sphinx.ext.autodoc._directive_options import Options
+from sphinx.ext.autodoc._sentinels import INSTANCE_ATTR
from sphinx.ext.autodoc.directive import DocumenterBridge
from sphinx.ext.autodoc.importer import import_module
from sphinx.ext.autodoc.mock import mock
@@ -832,7 +833,7 @@ def import_ivar_by_name(
found_attrs |= {attr for (qualname, attr) in analyzer.attr_docs}
found_attrs |= {attr for (qualname, attr) in analyzer.annotations}
if attr in found_attrs:
- return f'{real_name}.{attr}', INSTANCEATTR, obj, modname
+ return f'{real_name}.{attr}', INSTANCE_ATTR, obj, modname
except (ImportError, ValueError, PycodeError) as exc:
raise ImportError from exc
except ImportExceptionGroup:
diff --git a/tests/test_extensions/test_ext_autodoc.py b/tests/test_extensions/test_ext_autodoc.py
index d7c41291e01..f47515572c8 100644
--- a/tests/test_extensions/test_ext_autodoc.py
+++ b/tests/test_extensions/test_ext_autodoc.py
@@ -17,7 +17,9 @@
import pytest
from sphinx import addnodes
-from sphinx.ext.autodoc import ALL, ModuleLevelDocumenter, Options
+from sphinx.ext.autodoc._directive_options import Options
+from sphinx.ext.autodoc._documenters import ModuleLevelDocumenter
+from sphinx.ext.autodoc._sentinels import ALL
# NEVER import these objects from sphinx.ext.autodoc directly
from sphinx.ext.autodoc.directive import DocumenterBridge
From f82de0351d8ec451708482bd54454523744c3604 Mon Sep 17 00:00:00 2001
From: Adam Turner <9087854+AA-Turner@users.noreply.github.com>
Date: Fri, 25 Jul 2025 19:15:43 +0200
Subject: [PATCH 193/466] Inline ``DataDocumenter`` mixin classes (#13757)
---
sphinx/ext/autodoc/__init__.py | 6 --
sphinx/ext/autodoc/_documenters.py | 147 +++++++++++------------------
2 files changed, 56 insertions(+), 97 deletions(-)
diff --git a/sphinx/ext/autodoc/__init__.py b/sphinx/ext/autodoc/__init__.py
index ca1fcfd1dbe..db90b87bb3d 100644
--- a/sphinx/ext/autodoc/__init__.py
+++ b/sphinx/ext/autodoc/__init__.py
@@ -28,13 +28,11 @@
ClassDocumenter,
ClassLevelDocumenter,
DataDocumenter,
- DataDocumenterMixinBase,
DecoratorDocumenter,
DocstringSignatureMixin,
Documenter,
ExceptionDocumenter,
FunctionDocumenter,
- GenericAliasMixin,
MethodDocumenter,
ModuleDocumenter,
ModuleLevelDocumenter,
@@ -43,7 +41,6 @@
PropertyDocumenter,
RuntimeInstanceAttributeMixin,
SlotsMixin,
- UninitializedGlobalVariableMixin,
UninitializedInstanceAttributeMixin,
autodoc_attrgetter,
py_ext_sig_re,
@@ -107,9 +104,6 @@
'ModuleLevelDocumenter',
'ClassLevelDocumenter',
'DocstringSignatureMixin',
- 'DataDocumenterMixinBase',
- 'GenericAliasMixin',
- 'UninitializedGlobalVariableMixin',
'NonDataDescriptorMixin',
'SlotsMixin',
'RuntimeInstanceAttributeMixin',
diff --git a/sphinx/ext/autodoc/_documenters.py b/sphinx/ext/autodoc/_documenters.py
index 430625902a5..6d32653d653 100644
--- a/sphinx/ext/autodoc/_documenters.py
+++ b/sphinx/ext/autodoc/_documenters.py
@@ -155,6 +155,9 @@ class Documenter:
__docstring_strip_signature__: ClassVar[bool] = False
"""If True, strip any function signature from the docstring."""
+ __uninitialized_global_variable__: ClassVar[bool] = False
+ """If True, support uninitialized (type annotation only) global variables"""
+
_new_docstrings: list[list[str]] | None = None
_signatures: list[str] = []
@@ -352,6 +355,23 @@ def import_object(self, raiseerror: bool = False) -> bool:
self.object = undecorate(self.object)
return True
except ImportError as exc:
+ if self.__uninitialized_global_variable__:
+ # annotation only instance variable (PEP-526)
+ try:
+ parent = import_module(self.modname)
+ annotations = get_type_hints(
+ parent,
+ None,
+ self.config.autodoc_type_aliases,
+ include_extras=True,
+ )
+ if self.objpath[-1] in annotations:
+ self.object = UNINITIALIZED_ATTR
+ self.parent = parent
+ return True
+ except ImportError:
+ pass
+
if raiseerror:
raise
logger.warning(exc.args[0], type='autodoc', subtype='import_object')
@@ -556,6 +576,9 @@ def get_doc(self) -> list[list[str]] | None:
When it returns None, autodoc-process-docstring will not be called for this
object.
"""
+ if self.object is UNINITIALIZED_ATTR:
+ return []
+
if self.__docstring_signature__ and self._new_docstrings is not None:
return self._new_docstrings
@@ -1936,88 +1959,16 @@ class DataDocumenterMixinBase:
object: Any
objpath: list[str]
- def should_suppress_directive_header(self) -> bool:
- """Check directive header should be suppressed."""
- return False
-
def should_suppress_value_header(self) -> bool:
"""Check :value: header should be suppressed."""
return False
- def update_content(self, more_content: StringList) -> None:
- """Update docstring, for example with TypeVar variance."""
- pass
-
-
-class GenericAliasMixin(DataDocumenterMixinBase):
- """Mixin for DataDocumenter and AttributeDocumenter to provide the feature for
- supporting GenericAliases.
- """
-
- def should_suppress_directive_header(self) -> bool:
- return (
- inspect.isgenericalias(self.object)
- or super().should_suppress_directive_header()
- )
-
- def update_content(self, more_content: StringList) -> None:
- if inspect.isgenericalias(self.object):
- mode = _get_render_mode(self.config.autodoc_typehints_format)
- alias = restify(self.object, mode=mode)
-
- more_content.append(_('alias of %s') % alias, '')
- more_content.append('', '')
-
- super().update_content(more_content)
-
-
-class UninitializedGlobalVariableMixin(DataDocumenterMixinBase):
- """Mixin for DataDocumenter to provide the feature for supporting uninitialized
- (type annotation only) global variables.
- """
-
- def import_object(self, raiseerror: bool = False) -> bool:
- try:
- return super().import_object(raiseerror=True) # type: ignore[misc]
- except ImportError as exc:
- # annotation only instance variable (PEP-526)
- try:
- with mock(self.config.autodoc_mock_imports):
- parent = import_module(self.modname)
- annotations = get_type_hints(
- parent,
- None,
- self.config.autodoc_type_aliases,
- include_extras=True,
- )
- if self.objpath[-1] in annotations:
- self.object = UNINITIALIZED_ATTR
- self.parent = parent
- return True
- except ImportError:
- pass
-
- if raiseerror:
- raise
- logger.warning(exc.args[0], type='autodoc', subtype='import_object')
- self.env.note_reread()
- return False
-
- def should_suppress_value_header(self) -> bool:
- return (
- self.object is UNINITIALIZED_ATTR or super().should_suppress_value_header()
- )
-
- def get_doc(self) -> list[list[str]] | None:
- if self.object is UNINITIALIZED_ATTR:
- return []
- else:
- return super().get_doc() # type: ignore[misc]
-
-class DataDocumenter(GenericAliasMixin, UninitializedGlobalVariableMixin, Documenter):
+class DataDocumenter(Documenter):
"""Specialized Documenter subclass for data items."""
+ __uninitialized_global_variable__ = True
+
objtype = 'data'
member_order = 40
priority = -10
@@ -2053,7 +2004,7 @@ def import_object(self, raiseerror: bool = False) -> bool:
return ret
def should_suppress_value_header(self) -> bool:
- if super().should_suppress_value_header():
+ if self.object is UNINITIALIZED_ATTR:
return True
else:
doc = self.get_doc() or []
@@ -2068,10 +2019,7 @@ def should_suppress_value_header(self) -> bool:
def add_directive_header(self, sig: str) -> None:
super().add_directive_header(sig)
sourcename = self.get_sourcename()
- if (
- self.options.annotation is SUPPRESS
- or self.should_suppress_directive_header()
- ):
+ if self.options.annotation is SUPPRESS or inspect.isgenericalias(self.object):
pass
elif self.options.annotation:
self.add_line(' :annotation: %s' % self.options.annotation, sourcename)
@@ -2142,7 +2090,11 @@ def add_content(self, more_content: StringList | None) -> None:
if not more_content:
more_content = StringList()
- self.update_content(more_content)
+ _add_content_generic_alias_(
+ more_content,
+ self.object,
+ autodoc_typehints_format=self.config.autodoc_typehints_format,
+ )
super().add_content(more_content)
@@ -2417,6 +2369,8 @@ class NonDataDescriptorMixin(DataDocumenterMixinBase):
and :value: header will be suppressed unexpectedly.
"""
+ non_data_descriptor: bool = False
+
def import_object(self, raiseerror: bool = False) -> bool:
ret = super().import_object(raiseerror) # type: ignore[misc]
if ret and not inspect.isattributedescriptor(self.object):
@@ -2427,10 +2381,8 @@ def import_object(self, raiseerror: bool = False) -> bool:
return ret
def should_suppress_value_header(self) -> bool:
- return (
- not getattr(self, 'non_data_descriptor', False)
- or super().should_suppress_directive_header()
- )
+ non_data_descriptor = getattr(self, 'non_data_descriptor', False)
+ return not non_data_descriptor or inspect.isgenericalias(self.object)
def get_doc(self) -> list[list[str]] | None:
if getattr(self, 'non_data_descriptor', False):
@@ -2629,7 +2581,6 @@ def get_doc(self) -> list[list[str]] | None:
class AttributeDocumenter(
- GenericAliasMixin,
SlotsMixin,
RuntimeInstanceAttributeMixin,
UninitializedInstanceAttributeMixin,
@@ -2723,10 +2674,7 @@ def should_suppress_value_header(self) -> bool:
def add_directive_header(self, sig: str) -> None:
super().add_directive_header(sig)
sourcename = self.get_sourcename()
- if (
- self.options.annotation is SUPPRESS
- or self.should_suppress_directive_header()
- ):
+ if self.options.annotation is SUPPRESS or inspect.isgenericalias(self.object):
pass
elif self.options.annotation:
self.add_line(' :annotation: %s' % self.options.annotation, sourcename)
@@ -2802,7 +2750,11 @@ def add_content(self, more_content: StringList | None) -> None:
if more_content is None:
more_content = StringList()
- self.update_content(more_content)
+ _add_content_generic_alias_(
+ more_content,
+ self.object,
+ autodoc_typehints_format=self.config.autodoc_typehints_format,
+ )
super().add_content(more_content)
@@ -2929,3 +2881,16 @@ def autodoc_attrgetter(
return func(obj, name, *defargs)
return safe_getattr(obj, name, *defargs)
+
+
+def _add_content_generic_alias_(
+ more_content: StringList,
+ /,
+ obj: object,
+ autodoc_typehints_format: Literal['fully-qualified', 'short'],
+) -> None:
+ """Support for documenting GenericAliases."""
+ if inspect.isgenericalias(obj):
+ alias = restify(obj, mode=_get_render_mode(autodoc_typehints_format))
+ more_content.append(_('alias of %s') % alias, '')
+ more_content.append('', '')
From b98e1c12815bd99c0c92399bf899e69513307685 Mon Sep 17 00:00:00 2001
From: Adam Turner <9087854+AA-Turner@users.noreply.github.com>
Date: Fri, 25 Jul 2025 22:48:56 +0200
Subject: [PATCH 194/466] Inline ``AttributeDocumenter`` mixin classes (#13758)
---
sphinx/ext/autodoc/__init__.py | 8 -
sphinx/ext/autodoc/_documenters.py | 368 ++++++++++-------------------
sphinx/ext/autodoc/_sentinels.py | 1 +
3 files changed, 127 insertions(+), 250 deletions(-)
diff --git a/sphinx/ext/autodoc/__init__.py b/sphinx/ext/autodoc/__init__.py
index db90b87bb3d..86f0a927b21 100644
--- a/sphinx/ext/autodoc/__init__.py
+++ b/sphinx/ext/autodoc/__init__.py
@@ -36,12 +36,8 @@
MethodDocumenter,
ModuleDocumenter,
ModuleLevelDocumenter,
- NonDataDescriptorMixin,
ObjectMember,
PropertyDocumenter,
- RuntimeInstanceAttributeMixin,
- SlotsMixin,
- UninitializedInstanceAttributeMixin,
autodoc_attrgetter,
py_ext_sig_re,
special_member_re,
@@ -104,10 +100,6 @@
'ModuleLevelDocumenter',
'ClassLevelDocumenter',
'DocstringSignatureMixin',
- 'NonDataDescriptorMixin',
- 'SlotsMixin',
- 'RuntimeInstanceAttributeMixin',
- 'UninitializedInstanceAttributeMixin',
'autodoc_attrgetter',
'Documenter',
)
diff --git a/sphinx/ext/autodoc/_documenters.py b/sphinx/ext/autodoc/_documenters.py
index 6d32653d653..2a458df14cc 100644
--- a/sphinx/ext/autodoc/_documenters.py
+++ b/sphinx/ext/autodoc/_documenters.py
@@ -24,6 +24,7 @@
from sphinx.ext.autodoc._sentinels import (
ALL,
INSTANCE_ATTR,
+ RUNTIME_INSTANCE_ATTRIBUTE,
SLOTS_ATTR,
SUPPRESS,
UNINITIALIZED_ATTR,
@@ -372,12 +373,85 @@ def import_object(self, raiseerror: bool = False) -> bool:
except ImportError:
pass
+ if isinstance(self, AttributeDocumenter):
+ # Support runtime & uninitialized instance attributes.
+ #
+ # The former are defined in __init__() methods with doc-comments.
+ # The latter are PEP-526 style annotation only annotations.
+ #
+ # class Foo:
+ # attr: int #: uninitialized attribute
+ #
+ # def __init__(self):
+ # self.attr = None #: runtime attribute
+ try:
+ ret = import_object(
+ self.modname,
+ self.objpath[:-1],
+ 'class',
+ attrgetter=self.get_attr,
+ )
+ parent = ret[3]
+ if self._is_runtime_instance_attribute(parent):
+ self.object = RUNTIME_INSTANCE_ATTRIBUTE
+ self.parent = parent
+ return True
+
+ if self._is_uninitialized_instance_attribute(parent):
+ self.object = UNINITIALIZED_ATTR
+ self.parent = parent
+ return True
+ except ImportError:
+ pass
+
if raiseerror:
raise
logger.warning(exc.args[0], type='autodoc', subtype='import_object')
self.env.note_reread()
return False
+ def _is_slots_attribute(self) -> bool:
+ """Check the subject is an attribute in __slots__."""
+ try:
+ if parent___slots__ := inspect.getslots(self.parent):
+ return self.objpath[-1] in parent___slots__
+ else:
+ return False
+ except (ValueError, TypeError):
+ return False
+
+ def _is_runtime_instance_attribute(self, parent: Any) -> bool:
+ """Check the subject is an attribute defined in __init__()."""
+ # An instance variable defined in __init__().
+ if self.get_attribute_comment(parent, self.objpath[-1]): # type: ignore[attr-defined]
+ return True
+ return self._is_runtime_instance_attribute_not_commented(parent)
+
+ def _is_runtime_instance_attribute_not_commented(self, parent: Any) -> bool:
+ """Check the subject is an attribute defined in __init__() without comment."""
+ for cls in inspect.getmro(parent):
+ try:
+ module = safe_getattr(cls, '__module__')
+ qualname = safe_getattr(cls, '__qualname__')
+
+ analyzer = ModuleAnalyzer.for_module(module)
+ analyzer.analyze()
+ if qualname and self.objpath:
+ key = f'{qualname}.{self.objpath[-1]}'
+ if key in analyzer.tagorder:
+ return True
+ except (AttributeError, PycodeError):
+ pass
+
+ return False
+
+ def _is_uninitialized_instance_attribute(self, parent: Any) -> bool:
+ """Check the subject is an annotation only attribute."""
+ annotations = get_type_hints(
+ parent, None, self.config.autodoc_type_aliases, include_extras=True
+ )
+ return self.objpath[-1] in annotations
+
def get_real_modname(self) -> str:
"""Get the real module name of an object to document.
@@ -1950,20 +2024,6 @@ def can_document_member(
raise ValueError(msg) from exc
-class DataDocumenterMixinBase:
- # define types of instance variables
- config: Config
- env: BuildEnvironment
- modname: str
- parent: Any
- object: Any
- objpath: list[str]
-
- def should_suppress_value_header(self) -> bool:
- """Check :value: header should be suppressed."""
- return False
-
-
class DataDocumenter(Documenter):
"""Specialized Documenter subclass for data items."""
@@ -2361,236 +2421,12 @@ def get_doc(self) -> list[list[str]] | None:
return super().get_doc()
-class NonDataDescriptorMixin(DataDocumenterMixinBase):
- """Mixin for AttributeDocumenter to provide the feature for supporting non
- data-descriptors.
-
- .. note:: This mix-in must be inherited after other mix-ins. Otherwise, docstring
- and :value: header will be suppressed unexpectedly.
- """
-
- non_data_descriptor: bool = False
-
- def import_object(self, raiseerror: bool = False) -> bool:
- ret = super().import_object(raiseerror) # type: ignore[misc]
- if ret and not inspect.isattributedescriptor(self.object):
- self.non_data_descriptor = True
- else:
- self.non_data_descriptor = False
-
- return ret
-
- def should_suppress_value_header(self) -> bool:
- non_data_descriptor = getattr(self, 'non_data_descriptor', False)
- return not non_data_descriptor or inspect.isgenericalias(self.object)
-
- def get_doc(self) -> list[list[str]] | None:
- if getattr(self, 'non_data_descriptor', False):
- # the docstring of non datadescriptor is very probably the wrong thing
- # to display
- return None
- else:
- return super().get_doc() # type: ignore[misc]
-
-
-class SlotsMixin(DataDocumenterMixinBase):
- """Mixin for AttributeDocumenter to provide the feature for supporting __slots__."""
-
- def isslotsattribute(self) -> bool:
- """Check the subject is an attribute in __slots__."""
- try:
- if parent___slots__ := inspect.getslots(self.parent):
- return self.objpath[-1] in parent___slots__
- else:
- return False
- except (ValueError, TypeError):
- return False
-
- def import_object(self, raiseerror: bool = False) -> bool:
- ret = super().import_object(raiseerror) # type: ignore[misc]
- if self.isslotsattribute():
- self.object = SLOTS_ATTR
-
- return ret
-
- def should_suppress_value_header(self) -> bool:
- if self.object is SLOTS_ATTR:
- return True
- else:
- return super().should_suppress_value_header()
-
- def get_doc(self) -> list[list[str]] | None:
- if self.object is SLOTS_ATTR:
- try:
- parent___slots__ = inspect.getslots(self.parent)
- if parent___slots__ and (
- docstring := parent___slots__.get(self.objpath[-1])
- ):
- docstring = prepare_docstring(docstring)
- return [docstring]
- else:
- return []
- except ValueError as exc:
- logger.warning(
- __('Invalid __slots__ found on %s. Ignored.'),
- (self.parent.__qualname__, exc),
- type='autodoc',
- )
- return []
- else:
- return super().get_doc() # type: ignore[misc]
-
-
-class RuntimeInstanceAttributeMixin(DataDocumenterMixinBase):
- """Mixin for AttributeDocumenter to provide the feature for supporting runtime
- instance attributes (that are defined in __init__() methods with doc-comments).
-
- Example::
-
- class Foo:
- def __init__(self):
- self.attr = None #: This is a target of this mix-in.
- """
-
- RUNTIME_INSTANCE_ATTRIBUTE = object()
-
- def is_runtime_instance_attribute(self, parent: Any) -> bool:
- """Check the subject is an attribute defined in __init__()."""
- # An instance variable defined in __init__().
- if self.get_attribute_comment(parent, self.objpath[-1]): # type: ignore[attr-defined]
- return True
- return self.is_runtime_instance_attribute_not_commented(parent)
-
- def is_runtime_instance_attribute_not_commented(self, parent: Any) -> bool:
- """Check the subject is an attribute defined in __init__() without comment."""
- for cls in inspect.getmro(parent):
- try:
- module = safe_getattr(cls, '__module__')
- qualname = safe_getattr(cls, '__qualname__')
-
- analyzer = ModuleAnalyzer.for_module(module)
- analyzer.analyze()
- if qualname and self.objpath:
- key = f'{qualname}.{self.objpath[-1]}'
- if key in analyzer.tagorder:
- return True
- except (AttributeError, PycodeError):
- pass
-
- return False
-
- def import_object(self, raiseerror: bool = False) -> bool:
- """Check the existence of runtime instance attribute after failing to import the
- attribute.
- """
- try:
- return super().import_object(raiseerror=True) # type: ignore[misc]
- except ImportError as exc:
- try:
- with mock(self.config.autodoc_mock_imports):
- ret = import_object(
- self.modname,
- self.objpath[:-1],
- 'class',
- attrgetter=self.get_attr, # type: ignore[attr-defined]
- )
- parent = ret[3]
- if self.is_runtime_instance_attribute(parent):
- self.object = self.RUNTIME_INSTANCE_ATTRIBUTE
- self.parent = parent
- return True
- except ImportError:
- pass
-
- if raiseerror:
- raise
- logger.warning(exc.args[0], type='autodoc', subtype='import_object')
- self.env.note_reread()
- return False
-
- def should_suppress_value_header(self) -> bool:
- return (
- self.object is self.RUNTIME_INSTANCE_ATTRIBUTE
- or super().should_suppress_value_header()
- )
-
- def get_doc(self) -> list[list[str]] | None:
- if (
- self.object is self.RUNTIME_INSTANCE_ATTRIBUTE
- and self.is_runtime_instance_attribute_not_commented(self.parent)
- ):
- return None
- else:
- return super().get_doc() # type: ignore[misc]
-
-
-class UninitializedInstanceAttributeMixin(DataDocumenterMixinBase):
- """Mixin for AttributeDocumenter to provide the feature for supporting uninitialized
- instance attributes (PEP-526 styled, annotation only attributes).
-
- Example::
-
- class Foo:
- attr: int #: This is a target of this mix-in.
- """
-
- def is_uninitialized_instance_attribute(self, parent: Any) -> bool:
- """Check the subject is an annotation only attribute."""
- annotations = get_type_hints(
- parent, None, self.config.autodoc_type_aliases, include_extras=True
- )
- return self.objpath[-1] in annotations
-
- def import_object(self, raiseerror: bool = False) -> bool:
- """Check the existence of uninitialized instance attribute when failed to import
- the attribute.
- """
- try:
- return super().import_object(raiseerror=True) # type: ignore[misc]
- except ImportError as exc:
- try:
- ret = import_object(
- self.modname,
- self.objpath[:-1],
- 'class',
- attrgetter=self.get_attr, # type: ignore[attr-defined]
- )
- parent = ret[3]
- if self.is_uninitialized_instance_attribute(parent):
- self.object = UNINITIALIZED_ATTR
- self.parent = parent
- return True
- except ImportError:
- pass
-
- if raiseerror:
- raise
- logger.warning(exc.args[0], type='autodoc', subtype='import_object')
- self.env.note_reread()
- return False
-
- def should_suppress_value_header(self) -> bool:
- return (
- self.object is UNINITIALIZED_ATTR or super().should_suppress_value_header()
- )
-
- def get_doc(self) -> list[list[str]] | None:
- if self.object is UNINITIALIZED_ATTR:
- return None
- return super().get_doc() # type: ignore[misc]
-
-
-class AttributeDocumenter(
- SlotsMixin,
- RuntimeInstanceAttributeMixin,
- UninitializedInstanceAttributeMixin,
- NonDataDescriptorMixin,
- Documenter,
-):
+class AttributeDocumenter(Documenter):
"""Specialized Documenter subclass for attributes."""
__docstring_signature__ = True
__docstring_strip_signature__ = True
+ _non_data_descriptor: bool = False
objtype = 'attribute'
member_order = 60
@@ -2646,11 +2482,18 @@ def update_annotations(self, parent: Any) -> None:
def import_object(self, raiseerror: bool = False) -> bool:
ret = super().import_object(raiseerror)
- if inspect.isenumattribute(self.object):
+ if self._is_slots_attribute():
+ self.object = SLOTS_ATTR
+ elif inspect.isenumattribute(self.object):
self.object = self.object.value
if self.parent:
self.update_annotations(self.parent)
+ if ret and not inspect.isattributedescriptor(self.object):
+ self._non_data_descriptor = True
+ else:
+ self._non_data_descriptor = False
+
return ret
def get_real_modname(self) -> str:
@@ -2658,7 +2501,14 @@ def get_real_modname(self) -> str:
return real_modname or self.modname
def should_suppress_value_header(self) -> bool:
- if super().should_suppress_value_header():
+ if self.object is SLOTS_ATTR:
+ return True
+ if self.object is RUNTIME_INSTANCE_ATTRIBUTE:
+ return True
+ if self.object is UNINITIALIZED_ATTR:
+ return True
+ _non_data_descriptor = getattr(self, '_non_data_descriptor', False)
+ if not _non_data_descriptor or inspect.isgenericalias(self.object):
return True
else:
doc = self.get_doc()
@@ -2739,6 +2589,40 @@ def get_doc(self) -> list[list[str]] | None:
# See: https://github.com/sphinx-doc/sphinx/issues/7805
orig = self.config.autodoc_inherit_docstrings
self.config.autodoc_inherit_docstrings = False
+
+ if self.object is SLOTS_ATTR:
+ # support for __slots__
+ try:
+ parent___slots__ = inspect.getslots(self.parent)
+ if parent___slots__ and (
+ docstring := parent___slots__.get(self.objpath[-1])
+ ):
+ docstring = prepare_docstring(docstring)
+ return [docstring]
+ else:
+ return []
+ except ValueError as exc:
+ logger.warning(
+ __('Invalid __slots__ found on %s. Ignored.'),
+ (self.parent.__qualname__, exc),
+ type='autodoc',
+ )
+ return []
+
+ if (
+ self.object is RUNTIME_INSTANCE_ATTRIBUTE
+ and self._is_runtime_instance_attribute_not_commented(self.parent)
+ ):
+ return None
+
+ if self.object is UNINITIALIZED_ATTR:
+ return None
+
+ if self._non_data_descriptor:
+ # the docstring of non-data descriptor is very probably
+ # the wrong thing to display
+ return None
+
return super().get_doc()
finally:
self.config.autodoc_inherit_docstrings = orig
diff --git a/sphinx/ext/autodoc/_sentinels.py b/sphinx/ext/autodoc/_sentinels.py
index aa663b5be86..d18aecc8d6a 100644
--- a/sphinx/ext/autodoc/_sentinels.py
+++ b/sphinx/ext/autodoc/_sentinels.py
@@ -48,3 +48,4 @@ def __contains__(self, item: object) -> bool:
INSTANCE_ATTR = _Sentinel('INSTANCE_ATTR')
SLOTS_ATTR = _Sentinel('SLOTS_ATTR')
SUPPRESS = _Sentinel('SUPPRESS')
+RUNTIME_INSTANCE_ATTRIBUTE = _Sentinel('RUNTIME_INSTANCE_ATTRIBUTE')
From a7bab7e9ec46194b435328945d2b2191dbbb827b Mon Sep 17 00:00:00 2001
From: Adam Turner <9087854+AA-Turner@users.noreply.github.com>
Date: Sat, 26 Jul 2025 02:58:20 +0200
Subject: [PATCH 195/466] Move options processing to ``_directive_options``
(#13759)
---
sphinx/ext/autodoc/_directive_options.py | 69 +++++++++++++++++++++++-
sphinx/ext/autodoc/directive.py | 62 ++++-----------------
tests/test_extensions/autodoc_util.py | 10 +++-
3 files changed, 86 insertions(+), 55 deletions(-)
diff --git a/sphinx/ext/autodoc/_directive_options.py b/sphinx/ext/autodoc/_directive_options.py
index 28668cfe97f..1b2d2fff607 100644
--- a/sphinx/ext/autodoc/_directive_options.py
+++ b/sphinx/ext/autodoc/_directive_options.py
@@ -1,10 +1,43 @@
from __future__ import annotations
-from typing import Any
+from typing import TYPE_CHECKING
+
+from docutils.utils import assemble_option_dict
from sphinx.ext.autodoc._sentinels import ALL, EMPTY, SUPPRESS
from sphinx.locale import __
+if TYPE_CHECKING:
+ from typing import Any
+
+ from sphinx.ext.autodoc._documenters import Documenter
+
+
+# common option names for autodoc directives
+AUTODOC_DEFAULT_OPTIONS = (
+ 'members',
+ 'undoc-members',
+ 'no-index',
+ 'no-index-entry',
+ 'inherited-members',
+ 'show-inheritance',
+ 'private-members',
+ 'special-members',
+ 'ignore-module-all',
+ 'exclude-members',
+ 'member-order',
+ 'imported-members',
+ 'class-doc-from',
+ 'no-value',
+)
+
+AUTODOC_EXTENDABLE_OPTIONS = frozenset({
+ 'members',
+ 'private-members',
+ 'special-members',
+ 'exclude-members',
+})
+
def identity(x: Any) -> Any:
return x
@@ -87,9 +120,12 @@ def merge_members_option(options: dict[str, Any]) -> None:
members.append(member)
-class Options(dict[str, Any]): # NoQA: FURB189
+class Options(dict[str, object]): # NoQA: FURB189
"""A dict/attribute hybrid that returns None on nonexisting keys."""
+ def __repr__(self) -> str:
+ return f'Options({super().__repr__()})'
+
def copy(self) -> Options:
return Options(super().copy())
@@ -98,3 +134,32 @@ def __getattr__(self, name: str) -> Any:
return self[name.replace('_', '-')]
except KeyError:
return None
+
+
+def _process_documenter_options(
+ documenter: type[Documenter],
+ *,
+ default_options: dict[str, str | bool],
+ options: dict[str, str],
+) -> Options:
+ """Recognize options of Documenter from user input."""
+ for name in AUTODOC_DEFAULT_OPTIONS:
+ if name not in documenter.option_spec:
+ continue
+
+ negated = options.pop(f'no-{name}', True) is None
+ if name in default_options and not negated:
+ if name in options and isinstance(default_options[name], str):
+ # take value from options if present or extend it
+ # with autodoc_default_options if necessary
+ if name in AUTODOC_EXTENDABLE_OPTIONS:
+ if options[name] is not None and options[name].startswith('+'):
+ options[name] = f'{default_options[name]},{options[name][1:]}'
+ else:
+ options[name] = default_options[name] # type: ignore[assignment]
+ elif options.get(name) is not None:
+ # remove '+' from option argument if there's nothing to merge it with
+ options[name] = options[name].removeprefix('+')
+
+ opts = assemble_option_dict(options.items(), documenter.option_spec)
+ return Options(opts)
diff --git a/sphinx/ext/autodoc/directive.py b/sphinx/ext/autodoc/directive.py
index c2c5cb25afe..2b6747484c0 100644
--- a/sphinx/ext/autodoc/directive.py
+++ b/sphinx/ext/autodoc/directive.py
@@ -5,9 +5,8 @@
from docutils import nodes
from docutils.statemachine import StringList
-from docutils.utils import assemble_option_dict
-from sphinx.ext.autodoc._directive_options import Options
+from sphinx.ext.autodoc._directive_options import _process_documenter_options
from sphinx.util import logging
from sphinx.util.docutils import SphinxDirective, switch_source_input
from sphinx.util.parsing import nested_parse_to_nodes
@@ -22,36 +21,11 @@
from sphinx.config import Config
from sphinx.environment import BuildEnvironment
from sphinx.ext.autodoc import Documenter
+ from sphinx.ext.autodoc._directive_options import Options
logger = logging.getLogger(__name__)
-# common option names for autodoc directives
-AUTODOC_DEFAULT_OPTIONS = [
- 'members',
- 'undoc-members',
- 'no-index',
- 'no-index-entry',
- 'inherited-members',
- 'show-inheritance',
- 'private-members',
- 'special-members',
- 'ignore-module-all',
- 'exclude-members',
- 'member-order',
- 'imported-members',
- 'class-doc-from',
- 'no-value',
-]
-
-AUTODOC_EXTENDABLE_OPTIONS = frozenset({
- 'members',
- 'private-members',
- 'special-members',
- 'exclude-members',
-})
-
-
class DummyOptionSpec(dict[str, Callable[[str], str]]): # NoQA: FURB189
"""An option_spec allows any options."""
@@ -86,27 +60,11 @@ def __init__(
def process_documenter_options(
documenter: type[Documenter], config: Config, options: dict[str, str]
) -> Options:
- """Recognize options of Documenter from user input."""
- default_options = config.autodoc_default_options
- for name in AUTODOC_DEFAULT_OPTIONS:
- if name not in documenter.option_spec:
- continue
- negated = options.pop('no-' + name, True) is None
- if name in default_options and not negated:
- if name in options and isinstance(default_options[name], str):
- # take value from options if present or extend it
- # with autodoc_default_options if necessary
- if name in AUTODOC_EXTENDABLE_OPTIONS:
- if options[name] is not None and options[name].startswith('+'):
- options[name] = f'{default_options[name]},{options[name][1:]}'
- else:
- options[name] = default_options[name]
-
- elif options.get(name) is not None:
- # remove '+' from option argument if there's nothing to merge it with
- options[name] = options[name].lstrip('+')
-
- return Options(assemble_option_dict(options.items(), documenter.option_spec))
+ return _process_documenter_options(
+ documenter,
+ default_options=config.autodoc_default_options,
+ options=options,
+ )
def parse_generated_content(
@@ -154,8 +112,10 @@ def run(self) -> list[Node]:
# process the options with the selected documenter's option_spec
try:
- documenter_options = process_documenter_options(
- doccls, self.config, self.options
+ documenter_options = _process_documenter_options(
+ doccls,
+ default_options=self.config.autodoc_default_options,
+ options=self.options,
)
except (KeyError, ValueError, TypeError) as exc:
# an option is either unknown or has a wrong type
diff --git a/tests/test_extensions/autodoc_util.py b/tests/test_extensions/autodoc_util.py
index 3d08c739300..492b6339867 100644
--- a/tests/test_extensions/autodoc_util.py
+++ b/tests/test_extensions/autodoc_util.py
@@ -3,8 +3,10 @@
from typing import TYPE_CHECKING
from unittest.mock import Mock
+from sphinx.ext.autodoc._directive_options import _process_documenter_options
+
# NEVER import those objects from sphinx.ext.autodoc directly
-from sphinx.ext.autodoc.directive import DocumenterBridge, process_documenter_options
+from sphinx.ext.autodoc.directive import DocumenterBridge
from sphinx.util.docutils import LoggingReporter
if TYPE_CHECKING:
@@ -25,7 +27,11 @@ def do_autodoc(
if not app.env.current_document.docname:
app.env.current_document.docname = 'index' # set dummy docname
doccls = app.registry.documenters[objtype]
- docoptions = process_documenter_options(doccls, app.config, options)
+ docoptions = _process_documenter_options(
+ doccls,
+ default_options=app.config.autodoc_default_options,
+ options=options,
+ )
state = Mock()
state.document.settings.tab_width = 8
bridge = DocumenterBridge(app.env, LoggingReporter(''), docoptions, 1, state)
From 3d79d84405e329d3447da62457fc95f4c0b84f38 Mon Sep 17 00:00:00 2001
From: Adam Dangoor
Date: Sat, 26 Jul 2025 15:41:01 +0100
Subject: [PATCH 196/466] Remove mypy overrides for ``tests.test_quickstart``
(#13749)
---
pyproject.toml | 1 -
sphinx/cmd/quickstart.py | 3 ++-
tests/test_quickstart.py | 24 ++++++++++++------------
3 files changed, 14 insertions(+), 14 deletions(-)
diff --git a/pyproject.toml b/pyproject.toml
index 74c3f5a2deb..a66d910a5cc 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -294,7 +294,6 @@ disallow_untyped_defs = false
[[tool.mypy.overrides]]
module = [
# tests/
- "tests.test_quickstart",
"tests.test_search",
# tests/test_builders
"tests.test_builders.test_build_latex",
diff --git a/sphinx/cmd/quickstart.py b/sphinx/cmd/quickstart.py
index 73cdabfd97b..a11856e497a 100644
--- a/sphinx/cmd/quickstart.py
+++ b/sphinx/cmd/quickstart.py
@@ -89,7 +89,8 @@
# function to get input from terminal -- overridden by the test suite
-def term_input(prompt: str) -> str:
+# Arguments are positional-only to match ``input``.
+def term_input(prompt: str, /) -> str:
if sys.platform == 'win32':
# Important: On windows, readline is not enabled by default. In these
# environment, escape sequences have been broken. To avoid the
diff --git a/tests/test_quickstart.py b/tests/test_quickstart.py
index a66b7c58128..3688984d9c2 100644
--- a/tests/test_quickstart.py
+++ b/tests/test_quickstart.py
@@ -20,7 +20,7 @@
warnfile = StringIO()
-def setup_module():
+def setup_module() -> None:
disable_colour()
@@ -48,7 +48,7 @@ def input_(prompt: str) -> str:
real_input: Callable[[str], str] = input
-def teardown_module():
+def teardown_module() -> None:
qs.term_input = real_input
enable_colour()
@@ -61,7 +61,7 @@ def test_do_prompt() -> None:
'Q5': 'no',
'Q6': 'foo',
}
- qs.term_input = mock_input(answers) # type: ignore[assignment]
+ qs.term_input = mock_input(answers)
assert qs.do_prompt('Q1', default='v1') == 'v1'
assert qs.do_prompt('Q3', default='v3_default') == 'v3'
@@ -79,7 +79,7 @@ def test_do_prompt_inputstrip() -> None:
'Q3': 'N',
'Q4': 'N ',
}
- qs.term_input = mock_input(answers) # type: ignore[assignment]
+ qs.term_input = mock_input(answers)
assert qs.do_prompt('Q1') == 'Y'
assert qs.do_prompt('Q2') == 'Yes'
@@ -91,12 +91,12 @@ def test_do_prompt_with_nonascii() -> None:
answers = {
'Q1': '\u30c9\u30a4\u30c4',
}
- qs.term_input = mock_input(answers) # type: ignore[assignment]
+ qs.term_input = mock_input(answers)
result = qs.do_prompt('Q1', default='\u65e5\u672c')
assert result == '\u30c9\u30a4\u30c4'
-def test_quickstart_defaults(tmp_path):
+def test_quickstart_defaults(tmp_path: Path) -> None:
answers = {
'Root path': str(tmp_path),
'Project name': 'Sphinx Test',
@@ -127,7 +127,7 @@ def test_quickstart_defaults(tmp_path):
assert (tmp_path / 'make.bat').is_file()
-def test_quickstart_all_answers(tmp_path):
+def test_quickstart_all_answers(tmp_path: Path) -> None:
answers = {
'Root path': str(tmp_path),
'Separate source and build': 'y',
@@ -185,7 +185,7 @@ def test_quickstart_all_answers(tmp_path):
assert (tmp_path / 'source' / 'contents.txt').is_file()
-def test_generated_files_eol(tmp_path):
+def test_generated_files_eol(tmp_path: Path) -> None:
answers = {
'Root path': str(tmp_path),
'Project name': 'Sphinx Test',
@@ -205,7 +205,7 @@ def assert_eol(filename: Path, eol: str) -> None:
assert_eol(tmp_path / 'Makefile', '\n')
-def test_quickstart_and_build(tmp_path):
+def test_quickstart_and_build(tmp_path: Path) -> None:
answers = {
'Root path': str(tmp_path),
'Project name': 'Fullwidth characters: \u30c9\u30a4\u30c4',
@@ -224,7 +224,7 @@ def test_quickstart_and_build(tmp_path):
assert not warnings
-def test_default_filename(tmp_path):
+def test_default_filename(tmp_path: Path) -> None:
answers = {
'Root path': str(tmp_path),
'Project name': '\u30c9\u30a4\u30c4', # Fullwidth characters only
@@ -242,7 +242,7 @@ def test_default_filename(tmp_path):
exec(conffile.read_text(encoding='utf8'), ns) # NoQA: S102
-def test_extensions(tmp_path):
+def test_extensions(tmp_path: Path) -> None:
qs.main([
'-q',
'-p',
@@ -261,7 +261,7 @@ def test_extensions(tmp_path):
assert ns['extensions'] == ['foo', 'bar', 'baz']
-def test_exits_when_existing_confpy(monkeypatch):
+def test_exits_when_existing_confpy(monkeypatch: pytest.MonkeyPatch) -> None:
# The code detects existing conf.py with path.is_file()
# so we mock it as True with pytest's monkeypatch
monkeypatch.setattr('os.path.isfile', lambda path: True)
From 85a08700d5818e9bea9fdfd33df64c690347e1c0 Mon Sep 17 00:00:00 2001
From: Adam Turner <9087854+AA-Turner@users.noreply.github.com>
Date: Sat, 26 Jul 2025 16:59:29 +0200
Subject: [PATCH 197/466] Remove ``ComplainOnUnhighlighted`` (#13763)
---
tests/test_highlighting.py | 7 +------
1 file changed, 1 insertion(+), 6 deletions(-)
diff --git a/tests/test_highlighting.py b/tests/test_highlighting.py
index 5b7ee2c8def..60408c36fdb 100644
--- a/tests/test_highlighting.py
+++ b/tests/test_highlighting.py
@@ -39,11 +39,6 @@ def format(self, tokensource, outfile):
outfile.write(tok[1])
-class ComplainOnUnhighlighted(PygmentsBridge):
- def unhighlighted(self, source):
- raise AssertionError('should highlight %r' % source)
-
-
@pytest.mark.sphinx('html', testroot='root')
def test_add_lexer(app: SphinxTestApp) -> None:
app.add_lexer('test', MyLexer)
@@ -54,7 +49,7 @@ def test_add_lexer(app: SphinxTestApp) -> None:
def test_detect_interactive() -> None:
- bridge = ComplainOnUnhighlighted('html')
+ bridge = PygmentsBridge('html')
blocks = [
"""
>>> testing()
From 44f0d7da92c754e7e19430db6f8c3ddffefcc153 Mon Sep 17 00:00:00 2001
From: Adam Dangoor
Date: Sun, 27 Jul 2025 14:41:23 +0100
Subject: [PATCH 198/466] Remove mypy overrides for ``test_highlighting``
(#13761)
---
pyproject.toml | 1 -
tests/test_highlighting.py | 16 ++++++++++++----
2 files changed, 12 insertions(+), 5 deletions(-)
diff --git a/pyproject.toml b/pyproject.toml
index a66d910a5cc..7428df36c5f 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -247,7 +247,6 @@ ignore_missing_imports = true
[[tool.mypy.overrides]]
module = [
# tests/
- "tests.test_highlighting",
"tests.test_project",
"tests.test_versioning",
# tests/test_builders
diff --git a/tests/test_highlighting.py b/tests/test_highlighting.py
index 60408c36fdb..41dd8395f90 100644
--- a/tests/test_highlighting.py
+++ b/tests/test_highlighting.py
@@ -14,6 +14,11 @@
from sphinx.highlighting import PygmentsBridge
if TYPE_CHECKING:
+ from collections.abc import Iterable
+ from typing import TextIO
+
+ from pygments.token import _TokenType
+
from sphinx.testing.util import SphinxTestApp
if tuple(map(int, pygments.__version__.split('.')[:2])) < (2, 18):
@@ -34,9 +39,12 @@ class MyLexer(RegexLexer):
class MyFormatter(HtmlFormatter[str]):
- def format(self, tokensource, outfile):
- for tok in tokensource:
- outfile.write(tok[1])
+ def format(
+ self,
+ tokensource: Iterable[tuple[_TokenType, str]],
+ outfile: TextIO,
+ ) -> None:
+ outfile.writelines(tok[1] for tok in tokensource)
@pytest.mark.sphinx('html', testroot='root')
@@ -78,7 +86,7 @@ def test_set_formatter() -> None:
@mock.patch('sphinx.highlighting.logger')
-def test_default_highlight(logger):
+def test_default_highlight(logger: mock.Mock) -> None:
bridge = PygmentsBridge('html')
# default: highlights as python3
From e094ae355d8a5f6419ae01b210056a1b05641219 Mon Sep 17 00:00:00 2001
From: Adam Dangoor
Date: Sun, 27 Jul 2025 14:45:10 +0100
Subject: [PATCH 199/466] Remove mypy overrides for
``tests/test_directives/test_directive_code.py`` (#13765)
---
tests/test_directives/test_directive_code.py | 50 +++++++++++---------
1 file changed, 28 insertions(+), 22 deletions(-)
diff --git a/tests/test_directives/test_directive_code.py b/tests/test_directives/test_directive_code.py
index 16a25dda687..525071f9e45 100644
--- a/tests/test_directives/test_directive_code.py
+++ b/tests/test_directives/test_directive_code.py
@@ -20,17 +20,17 @@
@pytest.fixture(scope='module')
-def testroot(rootdir):
+def testroot(rootdir: Path) -> Path:
testroot_path = rootdir / 'test-directive-code'
return testroot_path
@pytest.fixture(scope='module')
-def literal_inc_path(testroot):
+def literal_inc_path(testroot: Path) -> Path:
return testroot / 'literal.inc'
-def test_LiteralIncludeReader(literal_inc_path):
+def test_LiteralIncludeReader(literal_inc_path: Path) -> None:
options = {'lineno-match': True}
reader = LiteralIncludeReader(literal_inc_path, options, DUMMY_CONFIG)
content, lines = reader.read()
@@ -39,7 +39,7 @@ def test_LiteralIncludeReader(literal_inc_path):
assert reader.lineno_start == 1
-def test_LiteralIncludeReader_lineno_start(literal_inc_path):
+def test_LiteralIncludeReader_lineno_start(literal_inc_path: Path) -> None:
options = {'lineno-start': 4}
reader = LiteralIncludeReader(literal_inc_path, options, DUMMY_CONFIG)
content, lines = reader.read()
@@ -48,7 +48,7 @@ def test_LiteralIncludeReader_lineno_start(literal_inc_path):
assert reader.lineno_start == 4
-def test_LiteralIncludeReader_pyobject1(literal_inc_path):
+def test_LiteralIncludeReader_pyobject1(literal_inc_path: Path) -> None:
options = {'lineno-match': True, 'pyobject': 'Foo'}
reader = LiteralIncludeReader(literal_inc_path, options, DUMMY_CONFIG)
content, _lines = reader.read()
@@ -56,7 +56,7 @@ def test_LiteralIncludeReader_pyobject1(literal_inc_path):
assert reader.lineno_start == 5
-def test_LiteralIncludeReader_pyobject2(literal_inc_path):
+def test_LiteralIncludeReader_pyobject2(literal_inc_path: Path) -> None:
options = {'pyobject': 'Bar'}
reader = LiteralIncludeReader(literal_inc_path, options, DUMMY_CONFIG)
content, _lines = reader.read()
@@ -64,21 +64,21 @@ def test_LiteralIncludeReader_pyobject2(literal_inc_path):
assert reader.lineno_start == 1 # no lineno-match
-def test_LiteralIncludeReader_pyobject3(literal_inc_path):
+def test_LiteralIncludeReader_pyobject3(literal_inc_path: Path) -> None:
options = {'pyobject': 'Bar.baz'}
reader = LiteralIncludeReader(literal_inc_path, options, DUMMY_CONFIG)
content, _lines = reader.read()
assert content == ' def baz():\n pass\n'
-def test_LiteralIncludeReader_pyobject_and_lines(literal_inc_path):
+def test_LiteralIncludeReader_pyobject_and_lines(literal_inc_path: Path) -> None:
options = {'pyobject': 'Bar', 'lines': '2-'}
reader = LiteralIncludeReader(literal_inc_path, options, DUMMY_CONFIG)
content, _lines = reader.read()
assert content == ' def baz():\n pass\n'
-def test_LiteralIncludeReader_lines1(literal_inc_path):
+def test_LiteralIncludeReader_lines1(literal_inc_path: Path) -> None:
options = {'lines': '1-3'}
reader = LiteralIncludeReader(literal_inc_path, options, DUMMY_CONFIG)
content, _lines = reader.read()
@@ -89,7 +89,7 @@ def test_LiteralIncludeReader_lines1(literal_inc_path):
)
-def test_LiteralIncludeReader_lines2(literal_inc_path):
+def test_LiteralIncludeReader_lines2(literal_inc_path: Path) -> None:
options = {'lines': '1,3,5'}
reader = LiteralIncludeReader(literal_inc_path, options, DUMMY_CONFIG)
content, _lines = reader.read()
@@ -100,7 +100,7 @@ def test_LiteralIncludeReader_lines2(literal_inc_path):
)
-def test_LiteralIncludeReader_lines_and_lineno_match1(literal_inc_path):
+def test_LiteralIncludeReader_lines_and_lineno_match1(literal_inc_path: Path) -> None:
options = {'lines': '3-5', 'lineno-match': True}
reader = LiteralIncludeReader(literal_inc_path, options, DUMMY_CONFIG)
content, _lines = reader.read()
@@ -109,7 +109,9 @@ def test_LiteralIncludeReader_lines_and_lineno_match1(literal_inc_path):
@pytest.mark.sphinx('html', testroot='root') # init locale for errors
-def test_LiteralIncludeReader_lines_and_lineno_match2(literal_inc_path, app):
+def test_LiteralIncludeReader_lines_and_lineno_match2(
+ literal_inc_path: Path, app: SphinxTestApp
+) -> None:
options = {'lines': '0,3,5', 'lineno-match': True}
reader = LiteralIncludeReader(literal_inc_path, options, DUMMY_CONFIG)
with pytest.raises(
@@ -120,7 +122,9 @@ def test_LiteralIncludeReader_lines_and_lineno_match2(literal_inc_path, app):
@pytest.mark.sphinx('html', testroot='root') # init locale for errors
-def test_LiteralIncludeReader_lines_and_lineno_match3(literal_inc_path, app):
+def test_LiteralIncludeReader_lines_and_lineno_match3(
+ literal_inc_path: Path, app: SphinxTestApp
+) -> None:
options = {'lines': '100-', 'lineno-match': True}
reader = LiteralIncludeReader(literal_inc_path, options, DUMMY_CONFIG)
with pytest.raises(
@@ -130,7 +134,7 @@ def test_LiteralIncludeReader_lines_and_lineno_match3(literal_inc_path, app):
reader.read()
-def test_LiteralIncludeReader_start_at(literal_inc_path):
+def test_LiteralIncludeReader_start_at(literal_inc_path: Path) -> None:
options = {'lineno-match': True, 'start-at': 'Foo', 'end-at': 'Bar'}
reader = LiteralIncludeReader(literal_inc_path, options, DUMMY_CONFIG)
content, _lines = reader.read()
@@ -138,7 +142,7 @@ def test_LiteralIncludeReader_start_at(literal_inc_path):
assert reader.lineno_start == 5
-def test_LiteralIncludeReader_start_after(literal_inc_path):
+def test_LiteralIncludeReader_start_after(literal_inc_path: Path) -> None:
options = {'lineno-match': True, 'start-after': 'Foo', 'end-before': 'Bar'}
reader = LiteralIncludeReader(literal_inc_path, options, DUMMY_CONFIG)
content, _lines = reader.read()
@@ -146,7 +150,7 @@ def test_LiteralIncludeReader_start_after(literal_inc_path):
assert reader.lineno_start == 6
-def test_LiteralIncludeReader_start_after_and_lines(literal_inc_path):
+def test_LiteralIncludeReader_start_after_and_lines(literal_inc_path: Path) -> None:
options = {
'lineno-match': True,
'lines': '6-',
@@ -159,7 +163,7 @@ def test_LiteralIncludeReader_start_after_and_lines(literal_inc_path):
assert reader.lineno_start == 7
-def test_LiteralIncludeReader_start_at_and_lines(literal_inc_path):
+def test_LiteralIncludeReader_start_at_and_lines(literal_inc_path: Path) -> None:
options = {'lines': '2, 3, 5', 'start-at': 'foo', 'end-before': '#'}
reader = LiteralIncludeReader(literal_inc_path, options, DUMMY_CONFIG)
content, _lines = reader.read()
@@ -167,7 +171,7 @@ def test_LiteralIncludeReader_start_at_and_lines(literal_inc_path):
assert reader.lineno_start == 1
-def test_LiteralIncludeReader_missing_start_and_end(literal_inc_path):
+def test_LiteralIncludeReader_missing_start_and_end(literal_inc_path: Path) -> None:
options = {'start-at': 'NOTHING'}
reader = LiteralIncludeReader(literal_inc_path, options, DUMMY_CONFIG)
with pytest.raises(ValueError, match='start-at pattern not found: NOTHING'):
@@ -189,14 +193,14 @@ def test_LiteralIncludeReader_missing_start_and_end(literal_inc_path):
reader.read()
-def test_LiteralIncludeReader_end_before(literal_inc_path):
+def test_LiteralIncludeReader_end_before(literal_inc_path: Path) -> None:
options = {'end-before': 'nclud'} # *nclud* matches first and third lines.
reader = LiteralIncludeReader(literal_inc_path, options, DUMMY_CONFIG)
content, _lines = reader.read()
assert content == '# Literally included file using Python highlighting\n\n'
-def test_LiteralIncludeReader_prepend(literal_inc_path):
+def test_LiteralIncludeReader_prepend(literal_inc_path: Path) -> None:
options = {'lines': '1', 'prepend': 'Hello', 'append': 'Sphinx'}
reader = LiteralIncludeReader(literal_inc_path, options, DUMMY_CONFIG)
content, _lines = reader.read()
@@ -205,7 +209,7 @@ def test_LiteralIncludeReader_prepend(literal_inc_path):
)
-def test_LiteralIncludeReader_dedent(literal_inc_path):
+def test_LiteralIncludeReader_dedent(literal_inc_path: Path) -> None:
# dedent: 2
options = {'lines': '9-11', 'dedent': 2}
reader = LiteralIncludeReader(literal_inc_path, options, DUMMY_CONFIG)
@@ -231,7 +235,9 @@ def test_LiteralIncludeReader_dedent(literal_inc_path):
assert content == 'def baz():\n pass\n\n'
-def test_LiteralIncludeReader_dedent_and_append_and_prepend(literal_inc_path):
+def test_LiteralIncludeReader_dedent_and_append_and_prepend(
+ literal_inc_path: Path,
+) -> None:
# dedent: 2
options = {
'lines': '9-11',
From f25cfdd2d58c59c009118fb15ca9418e455e4006 Mon Sep 17 00:00:00 2001
From: Adam Dangoor
Date: Sun, 27 Jul 2025 15:10:56 +0100
Subject: [PATCH 200/466] Remove mypy overrides for
``tests/test_intl/test_locale.py`` (#13764)
Co-authored-by: Adam Turner <9087854+AA-Turner@users.noreply.github.com>
---
pyproject.toml | 1 -
tests/test_intl/test_locale.py | 16 ++++++++++------
2 files changed, 10 insertions(+), 7 deletions(-)
diff --git a/pyproject.toml b/pyproject.toml
index 7428df36c5f..9440a9d21ae 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -274,7 +274,6 @@ module = [
"tests.test_extensions.test_ext_viewcode",
# tests/test_intl
"tests.test_intl.test_catalogs",
- "tests.test_intl.test_locale",
# tests/test_markup
"tests.test_markup.test_markup",
"tests.test_markup.test_parser",
diff --git a/tests/test_intl/test_locale.py b/tests/test_intl/test_locale.py
index c85c9f7078e..2addbfbfa24 100644
--- a/tests/test_intl/test_locale.py
+++ b/tests/test_intl/test_locale.py
@@ -9,17 +9,19 @@
from sphinx import locale
if TYPE_CHECKING:
- from collections.abc import Callable
+ from collections.abc import Callable, Iterator
from pathlib import Path
+ from sphinx.testing.util import SphinxTestApp
+
@pytest.fixture(autouse=True)
-def _cleanup_translations():
+def _cleanup_translations() -> Iterator[None]:
yield
locale.translators.clear()
-def test_init(rootdir):
+def test_init(rootdir: Path) -> None:
# not initialized yet
_ = locale.get_translation('myext')
assert _('Hello world') == 'Hello world'
@@ -48,7 +50,7 @@ def test_init(rootdir):
assert _('Hello reST') == 'Hello reST'
-def test_init_with_unknown_language(rootdir):
+def test_init_with_unknown_language(rootdir: Path) -> None:
locale.init([rootdir / 'test-locale' / 'locale1'], 'unknown', 'myext')
_ = locale.get_translation('myext')
assert _('Hello world') == 'Hello world'
@@ -57,7 +59,7 @@ def test_init_with_unknown_language(rootdir):
@pytest.mark.sphinx('html', testroot='root')
-def test_add_message_catalog(app, rootdir):
+def test_add_message_catalog(app: SphinxTestApp, rootdir: Path) -> None:
app.config.language = 'en'
app.add_message_catalog('myext', rootdir / 'test-locale' / 'locale1')
_ = locale.get_translation('myext')
@@ -73,7 +75,9 @@ def _empty_language_translation(rootdir: Path) -> Callable[[str], str]:
return locale.get_translation(catalog)
-def test_init_environment_language(rootdir, monkeypatch):
+def test_init_environment_language(
+ rootdir: Path, monkeypatch: pytest.MonkeyPatch
+) -> None:
with monkeypatch.context() as m:
m.setenv('LANGUAGE', 'en_US:en')
_ = _empty_language_translation(rootdir)
From 8dd3d886ef8f56c8dd65e8dfc2c086208977807a Mon Sep 17 00:00:00 2001
From: Mark Ostroth
Date: Sun, 27 Jul 2025 09:28:05 -0500
Subject: [PATCH 201/466] Use ```` in ``desc_annotation`` for better
semantic HTML (#13689)
Co-authored-by: Adam Turner <9087854+AA-Turner@users.noreply.github.com>
---
AUTHORS.rst | 1 +
CHANGES.rst | 5 +++++
sphinx/writers/html5.py | 4 ++--
tests/test_domains/test_domain_py_pyobject.py | 6 +++---
4 files changed, 11 insertions(+), 5 deletions(-)
diff --git a/AUTHORS.rst b/AUTHORS.rst
index d08f44875b4..11e0206f7ca 100644
--- a/AUTHORS.rst
+++ b/AUTHORS.rst
@@ -83,6 +83,7 @@ Contributors
* Louis Maddox -- better docstrings
* Łukasz Langa -- partial support for autodoc
* Marco Buttu -- doctest extension (pyversion option)
+* Mark Ostroth -- semantic HTML contributions
* Martin Hans -- autodoc improvements
* Martin Larralde -- additional napoleon admonitions
* Martin Liška -- option directive and role improvements
diff --git a/CHANGES.rst b/CHANGES.rst
index 791038e5e15..7f405121bc3 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -97,6 +97,11 @@ Bugs fixed
Patch by Bénédikt Tran.
* #13712: intersphinx: Don't add "v" prefix to non-numeric versions.
Patch by Szymon Karpinski.
+* #13688: HTML builder: Replace ```` with
+ ```` for attribute type annotations
+ to improve `semantic HTML structure
+ `__.
+ Patch by Mark Ostroth.
Testing
-------
diff --git a/sphinx/writers/html5.py b/sphinx/writers/html5.py
index 39d7ecea680..bbcd247e33c 100644
--- a/sphinx/writers/html5.py
+++ b/sphinx/writers/html5.py
@@ -305,10 +305,10 @@ def depart_desc_optional(self, node: Element) -> None:
self.param_group_index += 1
def visit_desc_annotation(self, node: Element) -> None:
- self.body.append(self.starttag(node, 'em', '', CLASS='property'))
+ self.body.append(self.starttag(node, 'span', '', CLASS='property'))
def depart_desc_annotation(self, node: Element) -> None:
- self.body.append('')
+ self.body.append('')
##############################################
diff --git a/tests/test_domains/test_domain_py_pyobject.py b/tests/test_domains/test_domain_py_pyobject.py
index 12b22a04dcc..dec4d71546e 100644
--- a/tests/test_domains/test_domain_py_pyobject.py
+++ b/tests/test_domains/test_domain_py_pyobject.py
@@ -925,17 +925,17 @@ def test_domain_py_type_alias(app):
content = (app.outdir / 'type_alias.html').read_text(encoding='utf8')
assert (
- 'type '
+ 'type '
'module_one.'
'MyAlias'
- ' ='
+ ' ='
' list'
'['
'int '
'| '
''
'module_two.SomeClass'
- ']'
+ ']'
) in content
assert app.warning.getvalue() == ''
From cb3fbe2a23d5019d739de9733da8e49b3c320eaa Mon Sep 17 00:00:00 2001
From: Adam Dangoor
Date: Sun, 27 Jul 2025 20:55:40 +0100
Subject: [PATCH 202/466] Remove mypy overrides for
``tests/test_intl/test_catalogs.py`` (#13767)
---
pyproject.toml | 2 --
tests/test_intl/test_catalogs.py | 5 ++++-
2 files changed, 4 insertions(+), 3 deletions(-)
diff --git a/pyproject.toml b/pyproject.toml
index 9440a9d21ae..77ee042a739 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -272,8 +272,6 @@ module = [
"tests.test_extensions.test_ext_napoleon",
"tests.test_extensions.test_ext_todo",
"tests.test_extensions.test_ext_viewcode",
- # tests/test_intl
- "tests.test_intl.test_catalogs",
# tests/test_markup
"tests.test_markup.test_markup",
"tests.test_markup.test_parser",
diff --git a/tests/test_intl/test_catalogs.py b/tests/test_intl/test_catalogs.py
index 022d5c6a64f..200e8d95286 100644
--- a/tests/test_intl/test_catalogs.py
+++ b/tests/test_intl/test_catalogs.py
@@ -9,11 +9,14 @@
import pytest
if TYPE_CHECKING:
+ from collections.abc import Iterator
+
+ from sphinx.testing.fixtures import _app_params
from sphinx.testing.util import SphinxTestApp
@pytest.fixture
-def _setup_test(app_params):
+def _setup_test(app_params: _app_params) -> Iterator[None]:
assert isinstance(app_params.kwargs['srcdir'], Path)
srcdir = app_params.kwargs['srcdir']
src_locale_dir = srcdir / 'xx' / 'LC_MESSAGES'
From 087d77c823e2a08165dd3846f8ce62c9bdc3c430 Mon Sep 17 00:00:00 2001
From: Adam Dangoor
Date: Sun, 27 Jul 2025 20:56:23 +0100
Subject: [PATCH 203/466] Fix type annotation for ``make_app`` pytest fixture
(#13768)
---
sphinx/testing/fixtures.py | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/sphinx/testing/fixtures.py b/sphinx/testing/fixtures.py
index ec143faccf4..255bc589dee 100644
--- a/sphinx/testing/fixtures.py
+++ b/sphinx/testing/fixtures.py
@@ -151,7 +151,7 @@ def test_params(request: Any) -> dict[str, Any]:
def app(
test_params: dict[str, Any],
app_params: _app_params,
- make_app: Callable[[], SphinxTestApp],
+ make_app: Callable[..., SphinxTestApp],
shared_result: SharedResult,
) -> Iterator[SphinxTestApp]:
"""Provides the 'sphinx.application.Sphinx' object"""
@@ -183,7 +183,7 @@ def warning(app: SphinxTestApp) -> StringIO:
@pytest.fixture
-def make_app(test_params: dict[str, Any]) -> Iterator[Callable[[], SphinxTestApp]]:
+def make_app(test_params: dict[str, Any]) -> Iterator[Callable[..., SphinxTestApp]]:
"""Provides make_app function to initialize SphinxTestApp instance.
if you want to initialize 'app' in your test function. please use this
instead of using SphinxTestApp class directory.
From 8784a785c70481506c2b0f45a69d149d1d37a55a Mon Sep 17 00:00:00 2001
From: Adam Dangoor
Date: Sun, 27 Jul 2025 23:12:33 +0100
Subject: [PATCH 204/466] Remove mypy overrides for
``tests/test_util/test_util_docutils.py`` (#13770)
---
pyproject.toml | 2 --
tests/test_util/test_util_docutils.py | 12 +++++++-----
2 files changed, 7 insertions(+), 7 deletions(-)
diff --git a/pyproject.toml b/pyproject.toml
index 77ee042a739..9eb3d6a3826 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -280,8 +280,6 @@ module = [
"tests.test_theming.test_theming",
# tests/test_transforms
"tests.test_transforms.test_transforms_post_transforms_images",
- # tests/test_util
- "tests.test_util.test_util_docutils",
# tests/test_writers
"tests.test_writers.test_docutilsconf",
]
diff --git a/tests/test_util/test_util_docutils.py b/tests/test_util/test_util_docutils.py
index e44d508bfaa..ef281c1852a 100644
--- a/tests/test_util/test_util_docutils.py
+++ b/tests/test_util/test_util_docutils.py
@@ -17,6 +17,8 @@
)
if TYPE_CHECKING:
+ from pathlib import Path
+
from sphinx.builders import Builder
from sphinx.testing.util import SphinxTestApp
@@ -41,7 +43,7 @@ class custom_node(nodes.Element):
assert not hasattr(nodes.SparseNodeVisitor, 'depart_custom_node')
-def test_SphinxFileOutput(tmp_path):
+def test_SphinxFileOutput(tmp_path: Path) -> None:
content = 'Hello Sphinx World'
# write test.txt at first
@@ -79,16 +81,16 @@ def __init__(self, document: nodes.document, builder: Builder):
self.called: list[str] = []
super().__init__(document, builder)
- def visit_document(self, node):
+ def visit_document(self, node: nodes.document) -> None:
pass
- def depart_document(self, node):
+ def depart_document(self, node: nodes.document) -> None:
pass
- def visit_inline(self, node):
+ def visit_inline(self, node: nodes.inline) -> None:
self.called.append('visit_inline')
- def depart_inline(self, node):
+ def depart_inline(self, node: nodes.inline) -> None:
self.called.append('depart_inline')
document = new_document('')
From 641f32a101712dd17e5dbb5f7fd127fac403cebd Mon Sep 17 00:00:00 2001
From: Adam Dangoor
Date: Sun, 27 Jul 2025 23:25:55 +0100
Subject: [PATCH 205/466] Remove mypy overrides for
``tests/test_extensions/test_ext_todo.py`` (#13772)
---
pyproject.toml | 1 -
tests/test_extensions/test_ext_todo.py | 6 ++++--
2 files changed, 4 insertions(+), 3 deletions(-)
diff --git a/pyproject.toml b/pyproject.toml
index 9eb3d6a3826..1b23ab9cace 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -270,7 +270,6 @@ module = [
"tests.test_extensions.test_ext_intersphinx_cache",
"tests.test_extensions.test_ext_math",
"tests.test_extensions.test_ext_napoleon",
- "tests.test_extensions.test_ext_todo",
"tests.test_extensions.test_ext_viewcode",
# tests/test_markup
"tests.test_markup.test_markup",
diff --git a/tests/test_extensions/test_ext_todo.py b/tests/test_extensions/test_ext_todo.py
index a19c880b47b..38a6f81e16b 100644
--- a/tests/test_extensions/test_ext_todo.py
+++ b/tests/test_extensions/test_ext_todo.py
@@ -8,6 +8,8 @@
import pytest
if TYPE_CHECKING:
+ from sphinx.application import Sphinx
+ from sphinx.ext.todo import todo_node
from sphinx.testing.util import SphinxTestApp
@@ -20,7 +22,7 @@
def test_todo(app: SphinxTestApp) -> None:
todos = []
- def on_todo_defined(app, node):
+ def on_todo_defined(app: Sphinx, node: todo_node) -> None:
todos.append(node)
app.connect('todo-defined', on_todo_defined)
@@ -62,7 +64,7 @@ def on_todo_defined(app, node):
def test_todo_not_included(app: SphinxTestApp) -> None:
todos = []
- def on_todo_defined(app, node):
+ def on_todo_defined(app: Sphinx, node: todo_node) -> None:
todos.append(node)
app.connect('todo-defined', on_todo_defined)
From 9b8dce66cd681222f4edb5bb47ff3912093beac4 Mon Sep 17 00:00:00 2001
From: Adam Dangoor
Date: Sun, 27 Jul 2025 23:54:23 +0100
Subject: [PATCH 206/466] Remove mypy overrides for
``tests/test_builders/test_build_warnings.py`` (#13771)
---
pyproject.toml | 1 -
tests/test_builders/test_build_warnings.py | 7 ++++++-
2 files changed, 6 insertions(+), 2 deletions(-)
diff --git a/pyproject.toml b/pyproject.toml
index 1b23ab9cace..eed502ba153 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -258,7 +258,6 @@ module = [
"tests.test_builders.test_build_html_numfig",
"tests.test_builders.test_build_html_toctree",
"tests.test_builders.test_build_linkcheck",
- "tests.test_builders.test_build_warnings",
# tests/test_directives
"tests.test_directives.test_directive_code",
"tests.test_directives.test_directives_no_typesetting",
diff --git a/tests/test_builders/test_build_warnings.py b/tests/test_builders/test_build_warnings.py
index e4ab763f9f4..adf5647dbd4 100644
--- a/tests/test_builders/test_build_warnings.py
+++ b/tests/test_builders/test_build_warnings.py
@@ -12,6 +12,9 @@
from sphinx.errors import SphinxError
if TYPE_CHECKING:
+ from collections.abc import Callable
+ from pathlib import Path
+
from sphinx.testing.util import SphinxTestApp
ENV_WARNINGS = """\
@@ -117,7 +120,9 @@ def test_texinfo_warnings(app: SphinxTestApp) -> None:
_check_warnings(warnings_exp, app.warning.getvalue())
-def test_uncacheable_config_warning(make_app, tmp_path):
+def test_uncacheable_config_warning(
+ make_app: Callable[..., SphinxTestApp], tmp_path: Path
+) -> None:
"""Test that an unpickleable config value raises a warning."""
tmp_path.joinpath('conf.py').write_text(
"""\
From 00d5f48bebdb4bbea67dcd28c9435b7386955f93 Mon Sep 17 00:00:00 2001
From: Adam Turner <9087854+AA-Turner@users.noreply.github.com>
Date: Mon, 28 Jul 2025 04:40:30 +0200
Subject: [PATCH 207/466] Extract autodoc importing to module-level functions
(#13773)
The import functions are now simple functions that take a module and
path to import and return an object representing the successful
import. Previously, substantial amounts of state were implicit, or
set directly onto the `Documenter` subclass. This should help with
testing in the future, as autodoc importing no longer requires any
of the core Sphinx classes. Finally, this means we can simplify the
import functions by splitting apart different object types, removing
single purpose logic from the main ``_import_obect()`` function.
---
sphinx/ext/autodoc/_documenters.py | 376 +++++++-------
sphinx/ext/autodoc/importer.py | 470 ++++++++++++++++--
tests/test_extensions/test_ext_autodoc.py | 8 +-
.../test_ext_autodoc_configs.py | 2 +-
4 files changed, 597 insertions(+), 259 deletions(-)
diff --git a/sphinx/ext/autodoc/_documenters.py b/sphinx/ext/autodoc/_documenters.py
index 2a458df14cc..557be042701 100644
--- a/sphinx/ext/autodoc/_documenters.py
+++ b/sphinx/ext/autodoc/_documenters.py
@@ -29,7 +29,17 @@
SUPPRESS,
UNINITIALIZED_ATTR,
)
-from sphinx.ext.autodoc.importer import get_class_members, import_module, import_object
+from sphinx.ext.autodoc.importer import (
+ _get_attribute_comment,
+ _import_assignment_attribute,
+ _import_assignment_data,
+ _import_class,
+ _import_method,
+ _import_object,
+ _import_property,
+ _is_runtime_instance_attribute_not_commented,
+ get_class_members,
+)
from sphinx.ext.autodoc.mock import ismock, mock, undecorate
from sphinx.locale import _, __
from sphinx.pycode import ModuleAnalyzer
@@ -346,111 +356,25 @@ def import_object(self, raiseerror: bool = False) -> bool:
Returns True if successful, False if an error occurred.
"""
- with mock(self.config.autodoc_mock_imports):
- try:
- ret = import_object(
- self.modname, self.objpath, self.objtype, attrgetter=self.get_attr
- )
- self.module, self.parent, self.object_name, self.object = ret
- if ismock(self.object):
- self.object = undecorate(self.object)
- return True
- except ImportError as exc:
- if self.__uninitialized_global_variable__:
- # annotation only instance variable (PEP-526)
- try:
- parent = import_module(self.modname)
- annotations = get_type_hints(
- parent,
- None,
- self.config.autodoc_type_aliases,
- include_extras=True,
- )
- if self.objpath[-1] in annotations:
- self.object = UNINITIALIZED_ATTR
- self.parent = parent
- return True
- except ImportError:
- pass
-
- if isinstance(self, AttributeDocumenter):
- # Support runtime & uninitialized instance attributes.
- #
- # The former are defined in __init__() methods with doc-comments.
- # The latter are PEP-526 style annotation only annotations.
- #
- # class Foo:
- # attr: int #: uninitialized attribute
- #
- # def __init__(self):
- # self.attr = None #: runtime attribute
- try:
- ret = import_object(
- self.modname,
- self.objpath[:-1],
- 'class',
- attrgetter=self.get_attr,
- )
- parent = ret[3]
- if self._is_runtime_instance_attribute(parent):
- self.object = RUNTIME_INSTANCE_ATTRIBUTE
- self.parent = parent
- return True
-
- if self._is_uninitialized_instance_attribute(parent):
- self.object = UNINITIALIZED_ATTR
- self.parent = parent
- return True
- except ImportError:
- pass
-
- if raiseerror:
- raise
- logger.warning(exc.args[0], type='autodoc', subtype='import_object')
- self.env.note_reread()
- return False
-
- def _is_slots_attribute(self) -> bool:
- """Check the subject is an attribute in __slots__."""
try:
- if parent___slots__ := inspect.getslots(self.parent):
- return self.objpath[-1] in parent___slots__
- else:
- return False
- except (ValueError, TypeError):
+ im = _import_object(
+ module_name=self.modname,
+ obj_path=self.objpath,
+ mock_imports=self.config.autodoc_mock_imports,
+ get_attr=self.get_attr,
+ )
+ except ImportError as exc:
+ if raiseerror:
+ raise
+ logger.warning(exc.args[0], type='autodoc', subtype='import_object')
+ self.env.note_reread()
return False
- def _is_runtime_instance_attribute(self, parent: Any) -> bool:
- """Check the subject is an attribute defined in __init__()."""
- # An instance variable defined in __init__().
- if self.get_attribute_comment(parent, self.objpath[-1]): # type: ignore[attr-defined]
- return True
- return self._is_runtime_instance_attribute_not_commented(parent)
-
- def _is_runtime_instance_attribute_not_commented(self, parent: Any) -> bool:
- """Check the subject is an attribute defined in __init__() without comment."""
- for cls in inspect.getmro(parent):
- try:
- module = safe_getattr(cls, '__module__')
- qualname = safe_getattr(cls, '__qualname__')
-
- analyzer = ModuleAnalyzer.for_module(module)
- analyzer.analyze()
- if qualname and self.objpath:
- key = f'{qualname}.{self.objpath[-1]}'
- if key in analyzer.tagorder:
- return True
- except (AttributeError, PycodeError):
- pass
-
- return False
-
- def _is_uninitialized_instance_attribute(self, parent: Any) -> bool:
- """Check the subject is an annotation only attribute."""
- annotations = get_type_hints(
- parent, None, self.config.autodoc_type_aliases, include_extras=True
- )
- return self.objpath[-1] in annotations
+ self.object = im.__dict__.pop('obj', None)
+ for k in 'module', 'parent', 'object_name':
+ if hasattr(im, k):
+ setattr(self, k, getattr(im, k))
+ return True
def get_real_modname(self) -> str:
"""Get the real module name of an object to document.
@@ -1206,25 +1130,24 @@ def parse_name(self) -> bool:
)
return ret
- def import_object(self, raiseerror: bool = False) -> bool:
- ret = super().import_object(raiseerror)
-
- try:
- if not self.options.ignore_module_all:
- self.__all__ = inspect.getall(self.object)
- except ValueError as exc:
- # invalid __all__ found.
- logger.warning(
- __(
- '__all__ should be a list of strings, not %r '
- '(in module %s) -- ignoring __all__'
- ),
- exc.args[0],
- self.fullname,
- type='autodoc',
- )
+ def _module_all(self) -> Sequence[str] | None:
+ if self.object is not None and self.__all__ is None:
+ try:
+ if not self.options.ignore_module_all:
+ self.__all__ = inspect.getall(self.object)
+ except ValueError as exc:
+ # invalid __all__ found.
+ logger.warning(
+ __(
+ '__all__ should be a list of strings, not %r '
+ '(in module %s) -- ignoring __all__'
+ ),
+ exc.args[0],
+ self.fullname,
+ type='autodoc',
+ )
- return ret
+ return self.__all__
def add_directive_header(self, sig: str) -> None:
Documenter.add_directive_header(self, sig)
@@ -1274,13 +1197,15 @@ def get_module_members(self) -> dict[str, ObjectMember]:
def get_object_members(self, want_all: bool) -> tuple[bool, list[ObjectMember]]:
members = self.get_module_members()
if want_all:
- if self.__all__ is None:
+ module_all = self._module_all()
+ if module_all is None:
# for implicit module members, check __module__ to avoid
# documenting imported objects
return True, list(members.values())
else:
+ module_all_set = frozenset(module_all)
for member in members.values():
- if member.__name__ not in self.__all__:
+ if member.__name__ not in module_all_set:
member.skipped = True
return False, list(members.values())
@@ -1305,10 +1230,10 @@ def get_object_members(self, want_all: bool) -> tuple[bool, list[ObjectMember]]:
def sort_members(
self, documenters: list[tuple[Documenter, bool]], order: str
) -> list[tuple[Documenter, bool]]:
- if order == 'bysource' and self.__all__:
- assert self.__all__ is not None
- module_all = self.__all__
- module_all_set = set(module_all)
+ module_all = self._module_all()
+ if order == 'bysource' and module_all:
+ assert module_all is not None
+ module_all_set = frozenset(module_all)
module_all_len = len(module_all)
# Sort alphabetically first (for members not listed on the __all__)
@@ -1534,6 +1459,8 @@ class ClassDocumenter(Documenter):
_signature_class: Any = None
_signature_method_name: str = ''
+ doc_as_attr: bool
+
def __init__(self, *args: Any) -> None:
super().__init__(*args)
@@ -1558,21 +1485,25 @@ def can_document_member(
)
def import_object(self, raiseerror: bool = False) -> bool:
- ret = super().import_object(raiseerror)
- # if the class is documented under another name, document it
- # as data/attribute
- if ret:
- if hasattr(self.object, '__name__'):
- self.doc_as_attr = self.objpath[-1] != self.object.__name__
- else:
- self.doc_as_attr = True
- if isinstance(self.object, NewType | TypeVar):
- modname = getattr(self.object, '__module__', self.modname)
- if modname != self.modname and self.modname.startswith(modname):
- bases = self.modname[len(modname) :].strip('.').split('.')
- self.objpath = bases + self.objpath
- self.modname = modname
- return ret
+ try:
+ im = _import_class(
+ module_name=self.modname,
+ obj_path=self.objpath,
+ mock_imports=self.config.autodoc_mock_imports,
+ get_attr=self.get_attr,
+ )
+ except ImportError as exc:
+ if raiseerror:
+ raise
+ logger.warning(exc.args[0], type='autodoc', subtype='import_object')
+ self.env.note_reread()
+ return False
+
+ self.object = im.__dict__.pop('obj', None)
+ for k in 'module', 'parent', 'object_name', 'doc_as_attr', 'objpath', 'modname':
+ if hasattr(im, k):
+ setattr(self, k, getattr(im, k))
+ return True
def _get_signature(self) -> tuple[Any | None, str | None, Signature | None]:
if isinstance(self.object, NewType | TypeVar):
@@ -2057,11 +1988,26 @@ def update_annotations(self, parent: Any) -> None:
pass
def import_object(self, raiseerror: bool = False) -> bool:
- ret = super().import_object(raiseerror)
- if self.parent:
- self.update_annotations(self.parent)
+ try:
+ im = _import_assignment_data(
+ module_name=self.modname,
+ obj_path=self.objpath,
+ mock_imports=self.config.autodoc_mock_imports,
+ type_aliases=self.config.autodoc_type_aliases,
+ get_attr=self.get_attr,
+ )
+ except ImportError as exc:
+ if raiseerror:
+ raise
+ logger.warning(exc.args[0], type='autodoc', subtype='import_object')
+ self.env.note_reread()
+ return False
- return ret
+ self.object = im.__dict__.pop('obj', None)
+ for k in 'module', 'parent', 'object_name':
+ if hasattr(im, k):
+ setattr(self, k, getattr(im, k))
+ return True
def should_suppress_value_header(self) -> bool:
if self.object is UNINITIALIZED_ATTR:
@@ -2175,20 +2121,26 @@ def can_document_member(
return inspect.isroutine(member) and not isinstance(parent, ModuleDocumenter)
def import_object(self, raiseerror: bool = False) -> bool:
- ret = super().import_object(raiseerror)
- if not ret:
- return ret
+ try:
+ im = _import_method(
+ module_name=self.modname,
+ obj_path=self.objpath,
+ member_order=self.member_order,
+ mock_imports=self.config.autodoc_mock_imports,
+ get_attr=self.get_attr,
+ )
+ except ImportError as exc:
+ if raiseerror:
+ raise
+ logger.warning(exc.args[0], type='autodoc', subtype='import_object')
+ self.env.note_reread()
+ return False
- # to distinguish classmethod/staticmethod
- obj = self.parent.__dict__.get(self.object_name, self.object)
- if inspect.isstaticmethod(obj, cls=self.parent, name=self.object_name):
- # document static members before regular methods
- self.member_order -= 1 # type: ignore[misc]
- elif inspect.isclassmethod(obj):
- # document class methods before static methods as
- # they usually behave as alternative constructors
- self.member_order -= 2 # type: ignore[misc]
- return ret
+ self.object = im.__dict__.pop('obj', None)
+ for k in 'module', 'parent', 'object_name', 'member_order':
+ if hasattr(im, k):
+ setattr(self, k, getattr(im, k))
+ return True
def format_args(self, **kwargs: Any) -> str:
if self.config.autodoc_typehints in {'none', 'description'}:
@@ -2426,7 +2378,6 @@ class AttributeDocumenter(Documenter):
__docstring_signature__ = True
__docstring_strip_signature__ = True
- _non_data_descriptor: bool = False
objtype = 'attribute'
member_order = 60
@@ -2481,20 +2432,30 @@ def update_annotations(self, parent: Any) -> None:
pass
def import_object(self, raiseerror: bool = False) -> bool:
- ret = super().import_object(raiseerror)
- if self._is_slots_attribute():
- self.object = SLOTS_ATTR
- elif inspect.isenumattribute(self.object):
- self.object = self.object.value
- if self.parent:
- self.update_annotations(self.parent)
-
- if ret and not inspect.isattributedescriptor(self.object):
- self._non_data_descriptor = True
- else:
- self._non_data_descriptor = False
+ try:
+ im = _import_assignment_attribute(
+ module_name=self.modname,
+ obj_path=self.objpath,
+ mock_imports=self.config.autodoc_mock_imports,
+ type_aliases=self.config.autodoc_type_aliases,
+ get_attr=self.get_attr,
+ )
+ except ImportError as exc:
+ if raiseerror:
+ raise
+ logger.warning(exc.args[0], type='autodoc', subtype='import_object')
+ self.env.note_reread()
+ return False
- return ret
+ self.object = im.__dict__.pop('obj', None)
+ for k in 'module', 'parent', 'object_name':
+ if hasattr(im, k):
+ setattr(self, k, getattr(im, k))
+ return True
+
+ @property
+ def _is_non_data_descriptor(self) -> bool:
+ return not inspect.isattributedescriptor(self.object)
def get_real_modname(self) -> str:
real_modname = self.get_attr(self.parent or self.object, '__module__', None)
@@ -2507,8 +2468,7 @@ def should_suppress_value_header(self) -> bool:
return True
if self.object is UNINITIALIZED_ATTR:
return True
- _non_data_descriptor = getattr(self, '_non_data_descriptor', False)
- if not _non_data_descriptor or inspect.isgenericalias(self.object):
+ if not self._is_non_data_descriptor or inspect.isgenericalias(self.object):
return True
else:
doc = self.get_doc()
@@ -2561,25 +2521,15 @@ def add_directive_header(self, sig: str) -> None:
pass
def get_attribute_comment(self, parent: Any, attrname: str) -> list[str] | None:
- for cls in inspect.getmro(parent):
- try:
- module = safe_getattr(cls, '__module__')
- qualname = safe_getattr(cls, '__qualname__')
-
- analyzer = ModuleAnalyzer.for_module(module)
- analyzer.analyze()
- if qualname and self.objpath:
- key = (qualname, attrname)
- if key in analyzer.attr_docs:
- return list(analyzer.attr_docs[key])
- except (AttributeError, PycodeError):
- pass
-
- return None
+ return _get_attribute_comment(
+ parent=parent, obj_path=self.objpath, attrname=attrname
+ )
def get_doc(self) -> list[list[str]] | None:
# Check the attribute has a docstring-comment
- comment = self.get_attribute_comment(self.parent, self.objpath[-1])
+ comment = _get_attribute_comment(
+ parent=self.parent, obj_path=self.objpath, attrname=self.objpath[-1]
+ )
if comment:
return [comment]
@@ -2611,14 +2561,16 @@ def get_doc(self) -> list[list[str]] | None:
if (
self.object is RUNTIME_INSTANCE_ATTRIBUTE
- and self._is_runtime_instance_attribute_not_commented(self.parent)
+ and _is_runtime_instance_attribute_not_commented(
+ parent=self.parent, obj_path=self.objpath
+ )
):
return None
if self.object is UNINITIALIZED_ATTR:
return None
- if self._non_data_descriptor:
+ if self._is_non_data_descriptor:
# the docstring of non-data descriptor is very probably
# the wrong thing to display
return None
@@ -2654,6 +2606,9 @@ class PropertyDocumenter(Documenter):
# before AttributeDocumenter
priority = AttributeDocumenter.priority + 1
+ # Support for class properties. Note: these only work on Python 3.9.
+ isclassmethod: bool = False
+
@classmethod
def can_document_member(
cls: type[Documenter], member: Any, membername: str, isattr: bool, parent: Any
@@ -2669,22 +2624,27 @@ def can_document_member(
return False
def import_object(self, raiseerror: bool = False) -> bool:
- """Check the existence of uninitialized instance attribute when failed to import
- the attribute.
- """
- ret = super().import_object(raiseerror)
- if ret and not inspect.isproperty(self.object):
- __dict__ = safe_getattr(self.parent, '__dict__', {})
- obj = __dict__.get(self.objpath[-1])
- if isinstance(obj, classmethod) and inspect.isproperty(obj.__func__):
- self.object = obj.__func__
- self.isclassmethod: bool = True
- return True
- else:
- return False
+ try:
+ im = _import_property(
+ module_name=self.modname,
+ obj_path=self.objpath,
+ mock_imports=self.config.autodoc_mock_imports,
+ get_attr=self.get_attr,
+ )
+ except ImportError as exc:
+ if raiseerror:
+ raise
+ logger.warning(exc.args[0], type='autodoc', subtype='import_object')
+ self.env.note_reread()
+ return False
+ if im is None:
+ return False
- self.isclassmethod = False
- return ret
+ self.object = im.__dict__.pop('obj', None)
+ for k in 'module', 'parent', 'object_name', 'isclassmethod':
+ if hasattr(im, k):
+ setattr(self, k, getattr(im, k))
+ return True
def format_args(self, **kwargs: Any) -> str:
func = self._get_property_getter()
diff --git a/sphinx/ext/autodoc/importer.py b/sphinx/ext/autodoc/importer.py
index e37df21a614..88cbc230023 100644
--- a/sphinx/ext/autodoc/importer.py
+++ b/sphinx/ext/autodoc/importer.py
@@ -13,12 +13,18 @@
from importlib.machinery import EXTENSION_SUFFIXES
from importlib.util import decode_source, find_spec, module_from_spec, spec_from_loader
from pathlib import Path
-from typing import TYPE_CHECKING, NamedTuple
+from typing import TYPE_CHECKING, NamedTuple, NewType, TypeVar
from sphinx.errors import PycodeError
-from sphinx.ext.autodoc.mock import ismock, undecorate
+from sphinx.ext.autodoc._sentinels import (
+ INSTANCE_ATTR,
+ RUNTIME_INSTANCE_ATTRIBUTE,
+ SLOTS_ATTR,
+ UNINITIALIZED_ATTR,
+)
+from sphinx.ext.autodoc.mock import ismock, mock, undecorate
from sphinx.pycode import ModuleAnalyzer
-from sphinx.util import logging
+from sphinx.util import inspect, logging
from sphinx.util.inspect import (
getannotations,
getmro,
@@ -28,9 +34,10 @@
safe_getattr,
unwrap_all,
)
+from sphinx.util.typing import get_type_hints
if TYPE_CHECKING:
- from collections.abc import Iterator, Mapping
+ from collections.abc import Iterator, Mapping, Sequence
from importlib.machinery import ModuleSpec
from types import ModuleType
from typing import Any, Protocol
@@ -45,6 +52,47 @@ def __call__(self, obj: Any, name: str, default: Any = ..., /) -> Any: ...
logger = logging.getLogger(__name__)
+class _ImportedObject:
+ #: module containing the object to document
+ module: ModuleType | None
+
+ #: parent/owner of the object to document
+ parent: Any
+
+ #: name of the object to document
+ object_name: str
+
+ #: object to document
+ obj: Any
+
+ # ClassDocumenter
+ doc_as_attr: bool
+ objpath: list[str]
+ modname: str
+
+ # MethodDocumenter
+ member_order: int
+
+ # PropertyDocumenter
+ isclassmethod: bool
+
+ def __init__(
+ self,
+ *,
+ module: ModuleType | None = None,
+ parent: Any,
+ object_name: str = '',
+ obj: Any,
+ ) -> None:
+ self.module = module
+ self.parent = parent
+ self.object_name = object_name
+ self.obj = obj
+
+ def __repr__(self) -> str:
+ return f'<{self.__class__.__name__} {self.__dict__}>'
+
+
def _filter_enum_dict(
enum_class: type[Enum],
attrgetter: _AttrGetter,
@@ -256,37 +304,51 @@ def import_object(
objtype: str = '',
attrgetter: _AttrGetter = safe_getattr,
) -> Any:
- if objpath:
- logger.debug('[autodoc] from %s import %s', modname, '.'.join(objpath))
+ ret = _import_from_module_and_path(
+ module_name=modname, obj_path=objpath, get_attr=attrgetter
+ )
+ if isinstance(ret, _ImportedObject):
+ return [ret.module, ret.parent, ret.object_name, ret.obj]
+ return None
+
+
+def _import_from_module_and_path(
+ *,
+ module_name: str,
+ obj_path: Sequence[str],
+ get_attr: _AttrGetter = safe_getattr,
+) -> _ImportedObject:
+ obj_path = list(obj_path)
+ if obj_path:
+ logger.debug('[autodoc] from %s import %s', module_name, '.'.join(obj_path))
else:
- logger.debug('[autodoc] import %s', modname)
+ logger.debug('[autodoc] import %s', module_name)
+ module = None
+ exc_on_importing = None
try:
- module = None
- exc_on_importing = None
- objpath = objpath.copy()
while module is None:
try:
- module = import_module(modname, try_reload=True)
- logger.debug('[autodoc] import %s => %r', modname, module)
+ module = import_module(module_name, try_reload=True)
+ logger.debug('[autodoc] import %s => %r', module_name, module)
except ImportError as exc:
- logger.debug('[autodoc] import %s => failed', modname)
+ logger.debug('[autodoc] import %s => failed', module_name)
exc_on_importing = exc
- if '.' in modname:
- # retry with parent module
- modname, name = modname.rsplit('.', 1)
- objpath.insert(0, name)
- else:
+ if '.' not in module_name:
raise
+ # retry with parent module
+ module_name, _, name = module_name.rpartition('.')
+ obj_path.insert(0, name)
+
obj = module
parent = None
- object_name = None
- for attrname in objpath:
+ object_name = ''
+ for attr_name in obj_path:
parent = obj
- logger.debug('[autodoc] getattr(_, %r)', attrname)
- mangled_name = mangle(obj, attrname)
- obj = attrgetter(obj, mangled_name)
+ logger.debug('[autodoc] getattr(_, %r)', attr_name)
+ mangled_name = mangle(obj, attr_name)
+ obj = get_attr(obj, mangled_name)
try:
logger.debug('[autodoc] => %r', obj)
@@ -295,21 +357,26 @@ def import_object(
# See: https://github.com/sphinx-doc/sphinx/issues/9095
logger.debug('[autodoc] => %r', (obj,))
- object_name = attrname
- return [module, parent, object_name, obj]
+ object_name = attr_name
+ return _ImportedObject(
+ module=module,
+ parent=parent,
+ object_name=object_name,
+ obj=obj,
+ )
except (AttributeError, ImportError) as exc:
if isinstance(exc, AttributeError) and exc_on_importing:
# restore ImportError
exc = exc_on_importing
- if objpath:
- errmsg = 'autodoc: failed to import %s %r from module %r' % (
- objtype,
- '.'.join(objpath),
- modname,
- )
+ if obj_path:
+ dotted_objpath = '.'.join(obj_path)
+ err_parts = [
+ f'autodoc: failed to import {dotted_objpath!r} '
+ f'from module {module_name!r}'
+ ]
else:
- errmsg = f'autodoc: failed to import {objtype} {modname!r}'
+ err_parts = [f'autodoc: failed to import {module_name!r}']
if isinstance(exc, ImportError):
# import_module() raises ImportError having real exception obj and
@@ -317,19 +384,24 @@ def import_object(
real_exc = exc.args[0]
traceback_msg = traceback.format_exception(exc)
if isinstance(real_exc, SystemExit):
- errmsg += (
- '; the module executes module level statement '
+ err_parts.append(
+ 'the module executes module level statement '
'and it might call sys.exit().'
)
elif isinstance(real_exc, ImportError) and real_exc.args:
- errmsg += '; the following exception was raised:\n%s' % real_exc.args[0]
+ err_parts.append(
+ f'the following exception was raised:\n{real_exc.args[0]}'
+ )
else:
- errmsg += '; the following exception was raised:\n%s' % traceback_msg
+ err_parts.append(
+ f'the following exception was raised:\n{traceback_msg}'
+ )
else:
- errmsg += (
- '; the following exception was raised:\n%s' % traceback.format_exc()
+ err_parts.append(
+ f'the following exception was raised:\n{traceback.format_exc()}'
)
+ errmsg = '; '.join(err_parts)
logger.debug(errmsg)
raise ImportError(errmsg) from exc
@@ -347,8 +419,6 @@ def get_object_members(
analyzer: ModuleAnalyzer | None = None,
) -> dict[str, Attribute]:
"""Get members and attributes of target object."""
- from sphinx.ext.autodoc._sentinels import INSTANCE_ATTR
-
# the members directly defined in the class
obj_dict = attrgetter(subject, '__dict__', {})
@@ -372,8 +442,6 @@ def get_object_members(
try:
subject___slots__ = getslots(subject)
if subject___slots__:
- from sphinx.ext.autodoc._sentinels import SLOTS_ATTR
-
for name in subject___slots__:
members[name] = Attribute(
name=name, directly_defined=True, value=SLOTS_ATTR
@@ -420,7 +488,6 @@ def get_class_members(
) -> dict[str, ObjectMember]:
"""Get members and attributes of target class."""
from sphinx.ext.autodoc._documenters import ObjectMember
- from sphinx.ext.autodoc._sentinels import INSTANCE_ATTR
# the members directly defined in the class
obj_dict = attrgetter(subject, '__dict__', {})
@@ -443,8 +510,6 @@ def get_class_members(
try:
subject___slots__ = getslots(subject)
if subject___slots__:
- from sphinx.ext.autodoc._sentinels import SLOTS_ATTR
-
for name, docstring in subject___slots__.items():
members[name] = ObjectMember(
name, SLOTS_ATTR, class_=subject, docstring=docstring
@@ -523,3 +588,318 @@ def get_class_members(
pass
return members
+
+
+def _import_object(
+ *,
+ module_name: str,
+ obj_path: Sequence[str],
+ mock_imports: list[str],
+ get_attr: _AttrGetter = safe_getattr,
+) -> _ImportedObject:
+ """Import the object given by *module_name* and *obj_path* and set
+ it as *object*.
+
+ Returns True if successful, False if an error occurred.
+ """
+ try:
+ with mock(mock_imports):
+ im = _import_from_module_and_path(
+ module_name=module_name, obj_path=obj_path, get_attr=get_attr
+ )
+ if ismock(im.obj):
+ im.obj = undecorate(im.obj)
+ return im
+ except ImportError: # NoQA: TRY203
+ raise
+
+
+def _import_class(
+ *,
+ module_name: str,
+ obj_path: Sequence[str],
+ mock_imports: list[str],
+ get_attr: _AttrGetter = safe_getattr,
+) -> _ImportedObject:
+ im = _import_object(
+ module_name=module_name,
+ obj_path=obj_path,
+ mock_imports=mock_imports,
+ get_attr=get_attr,
+ )
+
+ # if the class is documented under another name, document it
+ # as data/attribute
+ if hasattr(im.obj, '__name__'):
+ im.doc_as_attr = obj_path[-1] != im.obj.__name__
+ else:
+ im.doc_as_attr = True
+
+ if isinstance(im.obj, NewType | TypeVar):
+ obj_module_name = getattr(im.obj, '__module__', module_name)
+ if obj_module_name != module_name and module_name.startswith(obj_module_name):
+ bases = module_name[len(obj_module_name) :].strip('.').split('.')
+ im.objpath = bases + list(obj_path)
+ im.modname = obj_module_name
+ return im
+
+
+def _import_method(
+ *,
+ module_name: str,
+ obj_path: Sequence[str],
+ member_order: int,
+ mock_imports: list[str],
+ get_attr: _AttrGetter = safe_getattr,
+) -> _ImportedObject:
+ im = _import_object(
+ module_name=module_name,
+ obj_path=obj_path,
+ mock_imports=mock_imports,
+ get_attr=get_attr,
+ )
+
+ # to distinguish classmethod/staticmethod
+ obj = im.parent.__dict__.get(im.object_name, im.obj)
+ if inspect.isstaticmethod(obj, cls=im.parent, name=im.object_name):
+ # document static members before regular methods
+ im.member_order = member_order - 1
+ elif inspect.isclassmethod(obj):
+ # document class methods before static methods as
+ # they usually behave as alternative constructors
+ im.member_order = member_order - 2
+ return im
+
+
+def _import_property(
+ *,
+ module_name: str,
+ obj_path: Sequence[str],
+ mock_imports: list[str],
+ get_attr: _AttrGetter = safe_getattr,
+) -> _ImportedObject | None:
+ im = _import_object(
+ module_name=module_name,
+ obj_path=obj_path,
+ mock_imports=mock_imports,
+ get_attr=get_attr,
+ )
+
+ if not inspect.isproperty(im.obj):
+ # Support for class properties. Note: these only work on Python 3.9.
+ __dict__ = safe_getattr(im.parent, '__dict__', {})
+ obj = __dict__.get(obj_path[-1])
+ if isinstance(obj, classmethod) and inspect.isproperty(obj.__func__):
+ im.obj = obj.__func__
+ im.isclassmethod = True
+ return im
+ else:
+ return None
+
+ return im
+
+
+def _import_assignment_data(
+ *,
+ module_name: str,
+ obj_path: Sequence[str],
+ mock_imports: list[str],
+ type_aliases: dict[str, Any] | None,
+ get_attr: _AttrGetter = safe_getattr,
+) -> _ImportedObject:
+ import_failed = True
+ try:
+ with mock(mock_imports):
+ im = _import_from_module_and_path(
+ module_name=module_name, obj_path=obj_path, get_attr=get_attr
+ )
+ if ismock(im.obj):
+ im.obj = undecorate(im.obj)
+ import_failed = False
+ except ImportError as exc:
+ # annotation only instance variable (PEP-526)
+ try:
+ with mock(mock_imports):
+ parent = import_module(module_name)
+ annotations = get_type_hints(
+ parent, None, type_aliases, include_extras=True
+ )
+ if obj_path[-1] in annotations:
+ im = _ImportedObject(
+ parent=parent,
+ obj=UNINITIALIZED_ATTR,
+ )
+ import_failed = False
+ except ImportError:
+ pass
+
+ if import_failed:
+ raise
+
+ # Update __annotations__ to support type_comment and so on
+ annotations = dict(inspect.getannotations(im.parent))
+ im.parent.__annotations__ = annotations
+
+ try:
+ analyzer = ModuleAnalyzer.for_module(module_name)
+ analyzer.analyze()
+ for (classname, attrname), annotation in analyzer.annotations.items():
+ if not classname and attrname not in annotations:
+ annotations[attrname] = annotation
+ except PycodeError:
+ pass
+ return im
+
+
+def _import_assignment_attribute(
+ *,
+ module_name: str,
+ obj_path: Sequence[str],
+ mock_imports: list[str],
+ type_aliases: dict[str, Any] | None,
+ get_attr: _AttrGetter = safe_getattr,
+) -> _ImportedObject:
+ import_failed = True
+ try:
+ with mock(mock_imports):
+ im = _import_from_module_and_path(
+ module_name=module_name, obj_path=obj_path, get_attr=get_attr
+ )
+ if ismock(im.obj):
+ im.obj = undecorate(im.obj)
+ import_failed = False
+ except ImportError as exc:
+ # Support runtime & uninitialized instance attributes.
+ #
+ # The former are defined in __init__() methods with doc-comments.
+ # The latter are PEP-526 style annotation only annotations.
+ #
+ # class Foo:
+ # attr: int #: uninitialized attribute
+ #
+ # def __init__(self):
+ # self.attr = None #: runtime attribute
+ try:
+ with mock(mock_imports):
+ ret = _import_from_module_and_path(
+ module_name=module_name, obj_path=obj_path[:-1], get_attr=get_attr
+ )
+ parent = ret.obj
+ if _is_runtime_instance_attribute(parent=parent, obj_path=obj_path):
+ im = _ImportedObject(
+ parent=parent,
+ obj=RUNTIME_INSTANCE_ATTRIBUTE,
+ )
+ import_failed = False
+ elif _is_uninitialized_instance_attribute(
+ parent=parent, obj_path=obj_path, type_aliases=type_aliases
+ ):
+ im = _ImportedObject(
+ parent=parent,
+ obj=UNINITIALIZED_ATTR,
+ )
+ import_failed = False
+ except ImportError:
+ pass
+
+ if import_failed:
+ raise
+
+ if _is_slots_attribute(parent=im.parent, obj_path=obj_path):
+ im.obj = SLOTS_ATTR
+ elif inspect.isenumattribute(im.obj):
+ im.obj = im.obj.value
+ if im.parent:
+ # Update __annotations__ to support type_comment and so on.
+ try:
+ annotations = dict(inspect.getannotations(im.parent))
+ im.parent.__annotations__ = annotations
+
+ for cls in inspect.getmro(im.parent):
+ try:
+ module = safe_getattr(cls, '__module__')
+ qualname = safe_getattr(cls, '__qualname__')
+
+ analyzer = ModuleAnalyzer.for_module(module)
+ analyzer.analyze()
+ anns = analyzer.annotations
+ for (classname, attrname), annotation in anns.items():
+ if classname == qualname and attrname not in annotations:
+ annotations[attrname] = annotation
+ except (AttributeError, PycodeError):
+ pass
+ except (AttributeError, TypeError):
+ # Failed to set __annotations__ (built-in, extensions, etc.)
+ pass
+
+ return im
+
+
+def _is_runtime_instance_attribute(*, parent: Any, obj_path: Sequence[str]) -> bool:
+ """Check the subject is an attribute defined in __init__()."""
+ # An instance variable defined in __init__().
+ if _get_attribute_comment(parent=parent, obj_path=obj_path, attrname=obj_path[-1]):
+ return True
+ return _is_runtime_instance_attribute_not_commented(
+ parent=parent, obj_path=obj_path
+ )
+
+
+def _is_runtime_instance_attribute_not_commented(
+ *, parent: Any, obj_path: Sequence[str]
+) -> bool:
+ """Check the subject is an attribute defined in __init__() without comment."""
+ for cls in inspect.getmro(parent):
+ try:
+ module = safe_getattr(cls, '__module__')
+ qualname = safe_getattr(cls, '__qualname__')
+
+ analyzer = ModuleAnalyzer.for_module(module)
+ analyzer.analyze()
+ if qualname and obj_path:
+ key = f'{qualname}.{obj_path[-1]}'
+ if key in analyzer.tagorder:
+ return True
+ except (AttributeError, PycodeError):
+ pass
+
+ return False
+
+
+def _get_attribute_comment(
+ parent: Any, obj_path: Sequence[str], attrname: str
+) -> list[str] | None:
+ for cls in inspect.getmro(parent):
+ try:
+ module = safe_getattr(cls, '__module__')
+ qualname = safe_getattr(cls, '__qualname__')
+
+ analyzer = ModuleAnalyzer.for_module(module)
+ analyzer.analyze()
+ if qualname and obj_path:
+ key = (qualname, attrname)
+ if key in analyzer.attr_docs:
+ return list(analyzer.attr_docs[key])
+ except (AttributeError, PycodeError):
+ pass
+
+ return None
+
+
+def _is_uninitialized_instance_attribute(
+ *, parent: Any, obj_path: Sequence[str], type_aliases: dict[str, Any] | None
+) -> bool:
+ """Check the subject is an annotation only attribute."""
+ annotations = get_type_hints(parent, None, type_aliases, include_extras=True)
+ return obj_path[-1] in annotations
+
+
+def _is_slots_attribute(*, parent: Any, obj_path: Sequence[str]) -> bool:
+ """Check the subject is an attribute in __slots__."""
+ try:
+ if parent___slots__ := inspect.getslots(parent):
+ return obj_path[-1] in parent___slots__
+ else:
+ return False
+ except (ValueError, TypeError):
+ return False
diff --git a/tests/test_extensions/test_ext_autodoc.py b/tests/test_extensions/test_ext_autodoc.py
index f47515572c8..618f426fc53 100644
--- a/tests/test_extensions/test_ext_autodoc.py
+++ b/tests/test_extensions/test_ext_autodoc.py
@@ -555,21 +555,19 @@ def test_autodoc_warnings(app):
# can't import module
do_autodoc(app, 'module', 'unknown')
- assert "failed to import module 'unknown'" in app.warning.getvalue()
+ assert "failed to import 'unknown'" in app.warning.getvalue()
# missing function
do_autodoc(app, 'function', 'unknown')
assert "import for autodocumenting 'unknown'" in app.warning.getvalue()
do_autodoc(app, 'function', 'target.unknown')
- assert (
- "failed to import function 'unknown' from module 'target'"
- ) in app.warning.getvalue()
+ assert "failed to import 'unknown' from module 'target'" in app.warning.getvalue()
# missing method
do_autodoc(app, 'method', 'target.Class.unknown')
assert (
- "failed to import method 'Class.unknown' from module 'target'"
+ "failed to import 'Class.unknown' from module 'target'"
) in app.warning.getvalue()
diff --git a/tests/test_extensions/test_ext_autodoc_configs.py b/tests/test_extensions/test_ext_autodoc_configs.py
index ab7539190e0..bde2d0234ca 100644
--- a/tests/test_extensions/test_ext_autodoc_configs.py
+++ b/tests/test_extensions/test_ext_autodoc_configs.py
@@ -647,7 +647,7 @@ def test_mocked_module_imports(app: SphinxTestApp) -> None:
options = {'members': 'TestAutodoc,decorated_function,func,Alias'}
actual = do_autodoc(app, 'module', 'target.need_mocks', options)
assert list(actual) == []
- assert "autodoc: failed to import module 'need_mocks'" in app.warning.getvalue()
+ assert "autodoc: failed to import 'need_mocks'" in app.warning.getvalue()
# with autodoc_mock_imports
app.config.autodoc_mock_imports = [
From 813bd3bdf70a6e48883ac870059cf0e6e38f45fd Mon Sep 17 00:00:00 2001
From: Adam Dangoor
Date: Mon, 28 Jul 2025 12:47:26 +0100
Subject: [PATCH 208/466] Remove mypy overrides for
``tests/test_extensions/test_ext_viewcode.py`` (#13774)
Co-authored-by: Adam Turner <9087854+AA-Turner@users.noreply.github.com>
---
pyproject.toml | 1 -
tests/test_extensions/test_ext_viewcode.py | 9 +++++----
2 files changed, 5 insertions(+), 5 deletions(-)
diff --git a/pyproject.toml b/pyproject.toml
index eed502ba153..4e4b0db33a9 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -269,7 +269,6 @@ module = [
"tests.test_extensions.test_ext_intersphinx_cache",
"tests.test_extensions.test_ext_math",
"tests.test_extensions.test_ext_napoleon",
- "tests.test_extensions.test_ext_viewcode",
# tests/test_markup
"tests.test_markup.test_markup",
"tests.test_markup.test_parser",
diff --git a/tests/test_extensions/test_ext_viewcode.py b/tests/test_extensions/test_ext_viewcode.py
index eb8b5b141d5..03493dda4ff 100644
--- a/tests/test_extensions/test_ext_viewcode.py
+++ b/tests/test_extensions/test_ext_viewcode.py
@@ -9,9 +9,8 @@
import pygments
import pytest
-from sphinx.testing.util import SphinxTestApp
-
if TYPE_CHECKING:
+ from sphinx.application import Sphinx
from sphinx.testing.util import SphinxTestApp
@@ -130,7 +129,9 @@ def test_linkcode(app: SphinxTestApp) -> None:
@pytest.mark.sphinx('html', testroot='ext-viewcode-find', freshenv=True)
def test_local_source_files(app: SphinxTestApp) -> None:
- def find_source(app, modname):
+ def find_source(
+ app: Sphinx, modname: str
+ ) -> tuple[str, dict[str, tuple[str, int, int]]]:
if modname == 'not_a_package':
source = app.srcdir / 'not_a_package/__init__.py'
tags = {
@@ -174,7 +175,7 @@ def find_source(app, modname):
@pytest.mark.sphinx('html', testroot='ext-viewcode-find-package', freshenv=True)
-def test_find_local_package_import_path(app, status, warning):
+def test_find_local_package_import_path(app: Sphinx) -> None:
app.build(force_all=True)
result = (app.outdir / 'index.html').read_text(encoding='utf8')
From 1883c0dfc4169639d3f5952080fb87edf0ce39da Mon Sep 17 00:00:00 2001
From: Adam Dangoor
Date: Mon, 28 Jul 2025 12:49:07 +0100
Subject: [PATCH 209/466] Remove mypy overrides for
``tests.test_builders.test_build_html_numfig`` (#13775)
---
pyproject.toml | 1 -
tests/test_builders/test_build_html_numfig.py | 55 ++++++++++++++++---
2 files changed, 46 insertions(+), 10 deletions(-)
diff --git a/pyproject.toml b/pyproject.toml
index 4e4b0db33a9..75ae0f71a9d 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -255,7 +255,6 @@ module = [
"tests.test_builders.test_build_html_5_output",
"tests.test_builders.test_build_html_assets",
"tests.test_builders.test_build_html_maths",
- "tests.test_builders.test_build_html_numfig",
"tests.test_builders.test_build_html_toctree",
"tests.test_builders.test_build_linkcheck",
# tests/test_directives
diff --git a/tests/test_builders/test_build_html_numfig.py b/tests/test_builders/test_build_html_numfig.py
index e338c5b92e5..144d9958d0d 100644
--- a/tests/test_builders/test_build_html_numfig.py
+++ b/tests/test_builders/test_build_html_numfig.py
@@ -11,6 +11,10 @@
from tests.test_builders.xpath_util import check_xpath
if TYPE_CHECKING:
+ from collections.abc import Callable
+ from pathlib import Path
+ from xml.etree.ElementTree import ElementTree
+
from sphinx.testing.util import SphinxTestApp
@@ -73,7 +77,14 @@ def test_numfig_disabled_warn(app: SphinxTestApp) -> None:
)
@pytest.mark.sphinx('html', testroot='numfig')
@pytest.mark.test_params(shared_result='test_build_html_numfig')
-def test_numfig_disabled(app, cached_etree_parse, fname, path, check, be_found):
+def test_numfig_disabled(
+ app: SphinxTestApp,
+ cached_etree_parse: Callable[[Path], ElementTree],
+ fname: str,
+ path: str,
+ check: str | None,
+ be_found: bool,
+) -> None:
app.build()
check_xpath(cached_etree_parse(app.outdir / fname), fname, path, check, be_found)
@@ -305,8 +316,13 @@ def test_numfig_without_numbered_toctree_warn(app: SphinxTestApp) -> None:
confoverrides={'numfig': True},
)
def test_numfig_without_numbered_toctree(
- app, cached_etree_parse, fname, path, check, be_found
-):
+ app: SphinxTestApp,
+ cached_etree_parse: Callable[[Path], ElementTree],
+ fname: str,
+ path: str,
+ check: str | None,
+ be_found: bool,
+) -> None:
# remove :numbered: option
index = (app.srcdir / 'index.rst').read_text(encoding='utf8')
index = re.sub(':numbered:.*', '', index)
@@ -538,8 +554,13 @@ def test_numfig_with_numbered_toctree_warn(app: SphinxTestApp) -> None:
)
@pytest.mark.test_params(shared_result='test_build_html_numfig_on')
def test_numfig_with_numbered_toctree(
- app, cached_etree_parse, fname, path, check, be_found
-):
+ app: SphinxTestApp,
+ cached_etree_parse: Callable[[Path], ElementTree],
+ fname: str,
+ path: str,
+ check: str | None,
+ be_found: bool,
+) -> None:
app.build()
check_xpath(cached_etree_parse(app.outdir / fname), fname, path, check, be_found)
@@ -780,7 +801,14 @@ def test_numfig_with_prefix_warn(app: SphinxTestApp) -> None:
},
)
@pytest.mark.test_params(shared_result='test_build_html_numfig_format_warn')
-def test_numfig_with_prefix(app, cached_etree_parse, fname, path, check, be_found):
+def test_numfig_with_prefix(
+ app: SphinxTestApp,
+ cached_etree_parse: Callable[[Path], ElementTree],
+ fname: str,
+ path: str,
+ check: str | None,
+ be_found: bool,
+) -> None:
app.build()
check_xpath(cached_etree_parse(app.outdir / fname), fname, path, check, be_found)
@@ -1006,8 +1034,13 @@ def test_numfig_with_secnum_depth_warn(app: SphinxTestApp) -> None:
)
@pytest.mark.test_params(shared_result='test_build_html_numfig_depth_2')
def test_numfig_with_secnum_depth(
- app, cached_etree_parse, fname, path, check, be_found
-):
+ app: SphinxTestApp,
+ cached_etree_parse: Callable[[Path], ElementTree],
+ fname: str,
+ path: str,
+ check: str | None,
+ be_found: bool,
+) -> None:
app.build()
check_xpath(cached_etree_parse(app.outdir / fname), fname, path, check, be_found)
@@ -1103,6 +1136,10 @@ def test_numfig_with_secnum_depth(
confoverrides={'numfig': True},
)
@pytest.mark.test_params(shared_result='test_build_html_numfig_on')
-def test_numfig_with_singlehtml(app, cached_etree_parse, expect):
+def test_numfig_with_singlehtml(
+ app: SphinxTestApp,
+ cached_etree_parse: Callable[[Path], ElementTree],
+ expect: tuple[str, str, bool],
+) -> None:
app.build()
check_xpath(cached_etree_parse(app.outdir / 'index.html'), 'index.html', *expect)
From dfe9c2c98c490cc26277bb0f5aa414a0ca85bd01 Mon Sep 17 00:00:00 2001
From: Adam Turner <9087854+AA-Turner@users.noreply.github.com>
Date: Tue, 29 Jul 2025 01:03:31 +0200
Subject: [PATCH 210/466] Make ``_Sentinel`` immutable (#13782)
---
sphinx/ext/autodoc/_sentinels.py | 58 +++++++++++++++++++++++---------
1 file changed, 42 insertions(+), 16 deletions(-)
diff --git a/sphinx/ext/autodoc/_sentinels.py b/sphinx/ext/autodoc/_sentinels.py
index d18aecc8d6a..983fc48d7cd 100644
--- a/sphinx/ext/autodoc/_sentinels.py
+++ b/sphinx/ext/autodoc/_sentinels.py
@@ -2,26 +2,40 @@
TYPE_CHECKING = False
if TYPE_CHECKING:
- from typing import NoReturn
+ from typing import NoReturn, Self, _SpecialForm
class _Sentinel:
"""Create a unique sentinel object."""
- def __init__(self, name: str, /) -> None:
- self._name = name
+ __slots__ = ('_name',)
+
+ _name: str
+
+ def __new__(cls, name: str, /) -> Self:
+ sentinel = super().__new__(cls)
+ object.__setattr__(sentinel, '_name', str(name))
+ return sentinel
def __repr__(self) -> str:
- return f'<{self._name}>'
+ return self._name
+
+ def __setattr__(self, key: str, value: object) -> NoReturn:
+ msg = f'{self._name} is immutable'
+ raise AttributeError(msg)
- def __or__(self, other: object) -> type[_Sentinel | object]:
- return self | other
+ def __or__(self, other: object) -> _SpecialForm:
+ from typing import Union
- def __ror__(self, other: object) -> type[object | _Sentinel]:
- return other | self
+ return Union[self, other] # NoQA: UP007
+
+ def __ror__(self, other: object) -> _SpecialForm:
+ from typing import Union
+
+ return Union[other, self] # NoQA: UP007
def __getstate__(self) -> NoReturn:
- msg = f'Cannot pickle {type(self).__name__!r} object'
+ msg = f'Cannot pickle {self._name}'
raise TypeError(msg)
@@ -42,10 +56,22 @@ def __contains__(self, item: object) -> bool:
return False
-ALL = _All('ALL')
-EMPTY = _Empty('EMPTY')
-UNINITIALIZED_ATTR = _Sentinel('UNINITIALIZED_ATTR')
-INSTANCE_ATTR = _Sentinel('INSTANCE_ATTR')
-SLOTS_ATTR = _Sentinel('SLOTS_ATTR')
-SUPPRESS = _Sentinel('SUPPRESS')
-RUNTIME_INSTANCE_ATTRIBUTE = _Sentinel('RUNTIME_INSTANCE_ATTRIBUTE')
+if TYPE_CHECKING:
+ # For the sole purpose of satisfying the type checker.
+ # fmt: off
+ class ALL: ...
+ class EMPTY: ...
+ class INSTANCE_ATTR: ...
+ class RUNTIME_INSTANCE_ATTRIBUTE: ...
+ class SLOTS_ATTR: ...
+ class SUPPRESS: ...
+ class UNINITIALIZED_ATTR: ...
+ # fmt: on
+else:
+ ALL = _All('ALL')
+ EMPTY = _Empty('EMPTY')
+ INSTANCE_ATTR = _Sentinel('INSTANCE_ATTR')
+ RUNTIME_INSTANCE_ATTRIBUTE = _Sentinel('RUNTIME_INSTANCE_ATTRIBUTE')
+ SLOTS_ATTR = _Sentinel('SLOTS_ATTR')
+ SUPPRESS = _Sentinel('SUPPRESS')
+ UNINITIALIZED_ATTR = _Sentinel('UNINITIALIZED_ATTR')
From ead64dfda8598a48c90f03f8dd80f68a16b9dfa7 Mon Sep 17 00:00:00 2001
From: Adam Turner <9087854+AA-Turner@users.noreply.github.com>
Date: Tue, 29 Jul 2025 04:33:05 +0200
Subject: [PATCH 211/466] Introduce a new type for documenter options (#13783)
---
sphinx/ext/autodoc/_directive_options.py | 141 +++++++++++++-----
sphinx/ext/autodoc/_documenters.py | 25 ++--
sphinx/ext/autodoc/_sentinels.py | 49 ++++--
sphinx/ext/autodoc/directive.py | 21 ++-
sphinx/ext/autodoc/typehints.py | 4 +-
sphinx/ext/autosummary/__init__.py | 8 +-
tests/test_extensions/autodoc_util.py | 10 +-
tests/test_extensions/test_ext_autodoc.py | 32 ++--
.../test_ext_autodoc_autoclass.py | 2 +-
9 files changed, 201 insertions(+), 91 deletions(-)
diff --git a/sphinx/ext/autodoc/_directive_options.py b/sphinx/ext/autodoc/_directive_options.py
index 1b2d2fff607..bfbf498bf71 100644
--- a/sphinx/ext/autodoc/_directive_options.py
+++ b/sphinx/ext/autodoc/_directive_options.py
@@ -8,9 +8,11 @@
from sphinx.locale import __
if TYPE_CHECKING:
- from typing import Any
+ from collections.abc import Mapping, Set
+ from typing import Any, Literal, Self
- from sphinx.ext.autodoc._documenters import Documenter
+ from sphinx.ext.autodoc._sentinels import ALL_T, EMPTY_T, SUPPRESS_T
+ from sphinx.util.typing import OptionSpec
# common option names for autodoc directives
@@ -39,64 +41,123 @@
})
+class _AutoDocumenterOptions:
+ # TODO: make immutable.
+
+ no_index: Literal[True] | None = None
+ no_index_entry: Literal[True] | None = None
+
+ # module-like options
+ members: ALL_T | list[str] | None = None
+ undoc_members: Literal[True] | None = None
+ inherited_members: Set[str] | None = None
+ show_inheritance: Literal[True] | None = None
+ synopsis: str | None = None
+ platform: str | None = None
+ deprecated: Literal[True] | None = None
+ member_order: Literal['alphabetical', 'bysource', 'groupwise'] | None = None
+ exclude_members: EMPTY_T | set[str] | None = None
+ private_members: ALL_T | list[str] | None = None
+ special_members: ALL_T | list[str] | None = None
+ imported_members: Literal[True] | None = None
+ ignore_module_all: Literal[True] | None = None
+ no_value: Literal[True] | None = None
+
+ # class-like options (class, exception)
+ class_doc_from: Literal['both', 'class', 'init'] | None = None
+
+ # assignment-like (data, attribute)
+ annotation: SUPPRESS_T | str | None = None
+
+ noindex: Literal[True] | None = None
+
+ def __init__(self, **kwargs: Any) -> None:
+ vars(self).update(kwargs)
+
+ def __repr__(self) -> str:
+ args = ', '.join(f'{k}={v!r}' for k, v in vars(self).items())
+ return f'_AutoDocumenterOptions({args})'
+
+ def __getattr__(self, name: str) -> object:
+ return None # return None for missing attributes
+
+ def copy(self) -> Self:
+ return self.__class__(**vars(self))
+
+ @classmethod
+ def from_directive_options(cls, opts: Mapping[str, Any], /) -> Self:
+ return cls(**{k.replace('-', '_'): v for k, v in opts.items() if v is not None})
+
+ def merge_member_options(self) -> Self:
+ """Merge :private-members: and :special-members: into :members:"""
+ if self.members is ALL:
+ # merging is not needed when members: ALL
+ return self
+
+ members = self.members or []
+ for others in self.private_members, self.special_members:
+ if others is not None and others is not ALL:
+ members.extend(others)
+ new = self.copy()
+ new.members = list(dict.fromkeys(members)) # deduplicate; preserve order
+ return new
+
+
def identity(x: Any) -> Any:
return x
-def members_option(arg: Any) -> object | list[str]:
+def members_option(arg: str | None) -> ALL_T | list[str] | None:
"""Used to convert the :members: option to auto directives."""
- if arg in {None, True}:
+ if arg is None or arg is True:
return ALL
- elif arg is False:
+ if arg is False:
return None
- else:
- return [x.strip() for x in arg.split(',') if x.strip()]
+ return [stripped for x in arg.split(',') if (stripped := x.strip())]
-def exclude_members_option(arg: Any) -> object | set[str]:
+def exclude_members_option(arg: str | None) -> EMPTY_T | set[str]:
"""Used to convert the :exclude-members: option."""
- if arg in {None, True}:
+ if arg is None or arg is True:
return EMPTY
- return {x.strip() for x in arg.split(',') if x.strip()}
+ return {stripped for x in arg.split(',') if (stripped := x.strip())}
-def inherited_members_option(arg: Any) -> set[str]:
+def inherited_members_option(arg: str | None) -> set[str]:
"""Used to convert the :inherited-members: option to auto directives."""
- if arg in {None, True}:
+ if arg is None or arg is True:
return {'object'}
- elif arg:
+ if arg:
return {x.strip() for x in arg.split(',')}
- else:
- return set()
+ return set()
-def member_order_option(arg: Any) -> str | None:
+def member_order_option(
+ arg: str | None,
+) -> Literal['alphabetical', 'bysource', 'groupwise'] | None:
"""Used to convert the :member-order: option to auto directives."""
- if arg in {None, True}:
+ if arg is None or arg is True:
return None
- elif arg in {'alphabetical', 'bysource', 'groupwise'}:
- return arg
- else:
- raise ValueError(__('invalid value for member-order option: %s') % arg)
+ if arg in {'alphabetical', 'bysource', 'groupwise'}:
+ return arg # type: ignore[return-value]
+ raise ValueError(__('invalid value for member-order option: %s') % arg)
-def class_doc_from_option(arg: Any) -> str | None:
+def class_doc_from_option(arg: str | None) -> Literal['both', 'class', 'init']:
"""Used to convert the :class-doc-from: option to autoclass directives."""
if arg in {'both', 'class', 'init'}:
- return arg
- else:
- raise ValueError(__('invalid value for class-doc-from option: %s') % arg)
+ return arg # type: ignore[return-value]
+ raise ValueError(__('invalid value for class-doc-from option: %s') % arg)
-def annotation_option(arg: Any) -> Any:
- if arg in {None, True}:
+def annotation_option(arg: str | None) -> SUPPRESS_T | str | Literal[False]:
+ if arg is None or arg is True:
# suppress showing the representation of the object
return SUPPRESS
- else:
- return arg
+ return arg
-def bool_option(arg: Any) -> bool:
+def bool_option(arg: str | None) -> bool:
"""Used to convert flag options to auto directives. (Instead of
directives.flag(), which returns None).
"""
@@ -137,14 +198,14 @@ def __getattr__(self, name: str) -> Any:
def _process_documenter_options(
- documenter: type[Documenter],
*,
+ option_spec: OptionSpec,
default_options: dict[str, str | bool],
- options: dict[str, str],
-) -> Options:
+ options: dict[str, str | None],
+) -> dict[str, object]:
"""Recognize options of Documenter from user input."""
for name in AUTODOC_DEFAULT_OPTIONS:
- if name not in documenter.option_spec:
+ if name not in option_spec:
continue
negated = options.pop(f'no-{name}', True) is None
@@ -153,13 +214,13 @@ def _process_documenter_options(
# take value from options if present or extend it
# with autodoc_default_options if necessary
if name in AUTODOC_EXTENDABLE_OPTIONS:
- if options[name] is not None and options[name].startswith('+'):
- options[name] = f'{default_options[name]},{options[name][1:]}'
+ opt_value = options[name]
+ if opt_value is not None and opt_value.startswith('+'):
+ options[name] = f'{default_options[name]},{opt_value[1:]}'
else:
options[name] = default_options[name] # type: ignore[assignment]
- elif options.get(name) is not None:
+ elif (opt_value := options.get(name)) is not None:
# remove '+' from option argument if there's nothing to merge it with
- options[name] = options[name].removeprefix('+')
+ options[name] = opt_value.removeprefix('+')
- opts = assemble_option_dict(options.items(), documenter.option_spec)
- return Options(opts)
+ return assemble_option_dict(options.items(), option_spec) # type: ignore[arg-type]
diff --git a/sphinx/ext/autodoc/_documenters.py b/sphinx/ext/autodoc/_documenters.py
index 557be042701..ca35448bc33 100644
--- a/sphinx/ext/autodoc/_documenters.py
+++ b/sphinx/ext/autodoc/_documenters.py
@@ -19,7 +19,6 @@
inherited_members_option,
member_order_option,
members_option,
- merge_members_option,
)
from sphinx.ext.autodoc._sentinels import (
ALL,
@@ -62,6 +61,7 @@
from sphinx.config import Config
from sphinx.environment import BuildEnvironment, _CurrentDocument
from sphinx.events import EventManager
+ from sphinx.ext.autodoc._directive_options import _AutoDocumenterOptions
from sphinx.ext.autodoc.directive import DocumenterBridge
from sphinx.registry import SphinxComponentRegistry
from sphinx.util.typing import OptionSpec, _RestifyMode
@@ -198,7 +198,7 @@ def __init__(
self.env: BuildEnvironment = directive.env
self._current_document: _CurrentDocument = directive.env.current_document
self._events: EventManager = directive.env.events
- self.options = directive.genopt
+ self.options: _AutoDocumenterOptions = directive.genopt
self.name = name
self.indent = indent
# the module and object path within the module, and the fully
@@ -794,7 +794,7 @@ def is_filtered_inherited_member(name: str, obj: Any) -> bool:
elif is_filtered_inherited_member(membername, obj):
keep = False
else:
- keep = has_doc or self.options.undoc_members
+ keep = has_doc or self.options.undoc_members # type: ignore[assignment]
else:
keep = False
elif (namespace, membername) in attr_docs:
@@ -823,7 +823,7 @@ def is_filtered_inherited_member(name: str, obj: Any) -> bool:
keep = False
else:
# ignore undocumented members if :undoc-members: is not given
- keep = has_doc or self.options.undoc_members
+ keep = has_doc or self.options.undoc_members # type: ignore[assignment]
if isinstance(obj, ObjectMember) and obj.skipped:
# forcedly skipped member (ex. a module attribute not defined in __all__)
@@ -873,7 +873,7 @@ def document_members(self, all_members: bool = False) -> None:
if self.objpath:
self._current_document.autodoc_class = self.objpath[0]
- want_all = (
+ want_all = bool(
all_members or self.options.inherited_members or self.options.members is ALL
)
# find out which members are documentable
@@ -1101,7 +1101,7 @@ class ModuleDocumenter(Documenter):
def __init__(self, *args: Any) -> None:
super().__init__(*args)
- merge_members_option(self.options)
+ self.options = self.options.merge_member_options()
self.__all__: Sequence[str] | None = None
def add_content(self, more_content: StringList | None) -> None:
@@ -1210,6 +1210,7 @@ def get_object_members(self, want_all: bool) -> tuple[bool, list[ObjectMember]]:
return False, list(members.values())
else:
+ assert self.options.members is not ALL
memberlist = self.options.members or []
ret = []
for name in memberlist:
@@ -1469,12 +1470,12 @@ def __init__(self, *args: Any) -> None:
# show __init__() method
if self.options.special_members is None:
- self.options['special-members'] = ['__new__', '__init__']
+ self.options.special_members = ['__new__', '__init__']
else:
self.options.special_members.append('__new__')
self.options.special_members.append('__init__')
- merge_members_option(self.options)
+ self.options = self.options.merge_member_options()
@classmethod
def can_document_member(
@@ -1769,6 +1770,7 @@ def get_object_members(self, want_all: bool) -> tuple[bool, list[ObjectMember]]:
return False, []
# specific members given
selected = []
+ assert self.options.members is not ALL
for name in self.options.members:
if name in members:
selected.append(members[name])
@@ -1800,9 +1802,10 @@ def get_doc(self) -> list[list[str]] | None:
if lines is not None:
return lines
- classdoc_from = self.options.get(
- 'class-doc-from', self.config.autoclass_content
- )
+ if self.options.class_doc_from is not None:
+ classdoc_from = self.options.class_doc_from
+ else:
+ classdoc_from = self.config.autoclass_content
docstrings = []
attrdocstring = getdoc(self.object, self.get_attr)
diff --git a/sphinx/ext/autodoc/_sentinels.py b/sphinx/ext/autodoc/_sentinels.py
index 983fc48d7cd..e1a0fb4a089 100644
--- a/sphinx/ext/autodoc/_sentinels.py
+++ b/sphinx/ext/autodoc/_sentinels.py
@@ -2,7 +2,7 @@
TYPE_CHECKING = False
if TYPE_CHECKING:
- from typing import NoReturn, Self, _SpecialForm
+ from typing import Final, Literal, NoReturn, Self, TypeAlias, _SpecialForm
class _Sentinel:
@@ -42,7 +42,7 @@ def __getstate__(self) -> NoReturn:
class _All(_Sentinel):
"""A special value for :*-members: that matches to any member."""
- def __contains__(self, item: object) -> bool:
+ def __contains__(self, item: object) -> Literal[True]:
return True
def append(self, item: object) -> None:
@@ -52,20 +52,49 @@ def append(self, item: object) -> None:
class _Empty(_Sentinel):
"""A special value for :exclude-members: that never matches to any member."""
- def __contains__(self, item: object) -> bool:
+ def __contains__(self, item: object) -> Literal[False]:
return False
if TYPE_CHECKING:
# For the sole purpose of satisfying the type checker.
# fmt: off
- class ALL: ...
- class EMPTY: ...
- class INSTANCE_ATTR: ...
- class RUNTIME_INSTANCE_ATTRIBUTE: ...
- class SLOTS_ATTR: ...
- class SUPPRESS: ...
- class UNINITIALIZED_ATTR: ...
+ import enum
+ class _AllTC(enum.Enum):
+ ALL = enum.auto()
+
+ def __contains__(self, item: object) -> Literal[True]: return True
+ def append(self, item: object) -> None: pass
+ ALL_T: TypeAlias = Literal[_AllTC.ALL]
+ ALL: Final[ALL_T] = _AllTC.ALL
+
+ class _EmptyTC(enum.Enum):
+ EMPTY = enum.auto()
+
+ def __contains__(self, item: object) -> Literal[False]: return False
+ EMPTY_T: TypeAlias = Literal[_EmptyTC.EMPTY]
+ EMPTY: Final[EMPTY_T] = _EmptyTC.EMPTY
+
+ class _SentinelTC(enum.Enum):
+ INSTANCE_ATTR = enum.auto()
+ RUNTIME_INSTANCE_ATTRIBUTE = enum.auto()
+ SLOTS_ATTR = enum.auto()
+ SUPPRESS = enum.auto()
+ UNINITIALIZED_ATTR = enum.auto()
+ INSTANCE_ATTR_T: TypeAlias = Literal[_SentinelTC.INSTANCE_ATTR]
+ RUNTIME_INSTANCE_ATTRIBUTE_T: TypeAlias = Literal[
+ _SentinelTC.RUNTIME_INSTANCE_ATTRIBUTE
+ ]
+ SLOTS_ATTR_T: TypeAlias = Literal[_SentinelTC.SLOTS_ATTR]
+ SUPPRESS_T: TypeAlias = Literal[_SentinelTC.SUPPRESS]
+ UNINITIALIZED_ATTR_T: TypeAlias = Literal[_SentinelTC.UNINITIALIZED_ATTR]
+ INSTANCE_ATTR: Final[INSTANCE_ATTR_T] = _SentinelTC.INSTANCE_ATTR
+ RUNTIME_INSTANCE_ATTRIBUTE: Final[RUNTIME_INSTANCE_ATTRIBUTE_T] = (
+ _SentinelTC.RUNTIME_INSTANCE_ATTRIBUTE
+ )
+ SLOTS_ATTR: Final[SLOTS_ATTR_T] = _SentinelTC.SLOTS_ATTR
+ SUPPRESS: Final[SUPPRESS_T] = _SentinelTC.SUPPRESS
+ UNINITIALIZED_ATTR: Final[UNINITIALIZED_ATTR_T] = _SentinelTC.UNINITIALIZED_ATTR
# fmt: on
else:
ALL = _All('ALL')
diff --git a/sphinx/ext/autodoc/directive.py b/sphinx/ext/autodoc/directive.py
index 2b6747484c0..0f27fd7df3a 100644
--- a/sphinx/ext/autodoc/directive.py
+++ b/sphinx/ext/autodoc/directive.py
@@ -6,7 +6,10 @@
from docutils import nodes
from docutils.statemachine import StringList
-from sphinx.ext.autodoc._directive_options import _process_documenter_options
+from sphinx.ext.autodoc._directive_options import (
+ _AutoDocumenterOptions,
+ _process_documenter_options,
+)
from sphinx.util import logging
from sphinx.util.docutils import SphinxDirective, switch_source_input
from sphinx.util.parsing import nested_parse_to_nodes
@@ -44,7 +47,7 @@ def __init__(
self,
env: BuildEnvironment,
reporter: Reporter | None,
- options: Options,
+ options: _AutoDocumenterOptions,
lineno: int,
state: Any,
) -> None:
@@ -58,13 +61,16 @@ def __init__(
def process_documenter_options(
- documenter: type[Documenter], config: Config, options: dict[str, str]
+ documenter: type[Documenter], config: Config, options: dict[str, str | None]
) -> Options:
- return _process_documenter_options(
- documenter,
+ from sphinx.ext.autodoc._directive_options import Options
+
+ opts = _process_documenter_options(
+ option_spec=documenter.option_spec,
default_options=config.autodoc_default_options,
options=options,
)
+ return Options(opts)
def parse_generated_content(
@@ -112,11 +118,12 @@ def run(self) -> list[Node]:
# process the options with the selected documenter's option_spec
try:
- documenter_options = _process_documenter_options(
- doccls,
+ opts = _process_documenter_options(
+ option_spec=doccls.option_spec,
default_options=self.config.autodoc_default_options,
options=self.options,
)
+ documenter_options = _AutoDocumenterOptions.from_directive_options(opts)
except (KeyError, ValueError, TypeError) as exc:
# an option is either unknown or has a wrong type
logger.error( # NoQA: TRY400
diff --git a/sphinx/ext/autodoc/typehints.py b/sphinx/ext/autodoc/typehints.py
index 3d94d964613..86d7ad6ba1c 100644
--- a/sphinx/ext/autodoc/typehints.py
+++ b/sphinx/ext/autodoc/typehints.py
@@ -19,7 +19,7 @@
from docutils.nodes import Element
from sphinx.application import Sphinx
- from sphinx.ext.autodoc._directive_options import Options
+ from sphinx.ext.autodoc._directive_options import _AutoDocumenterOptions
from sphinx.util.typing import ExtensionMetadata, _StringifyMode
@@ -28,7 +28,7 @@ def record_typehints(
objtype: str,
name: str,
obj: Any,
- options: Options,
+ options: _AutoDocumenterOptions,
args: str,
retann: str,
) -> None:
diff --git a/sphinx/ext/autosummary/__init__.py b/sphinx/ext/autosummary/__init__.py
index 3fe4832a795..430f8484459 100644
--- a/sphinx/ext/autosummary/__init__.py
+++ b/sphinx/ext/autosummary/__init__.py
@@ -69,7 +69,7 @@
from sphinx.config import Config
from sphinx.environment import BuildEnvironment
from sphinx.errors import PycodeError
-from sphinx.ext.autodoc._directive_options import Options
+from sphinx.ext.autodoc._directive_options import _AutoDocumenterOptions
from sphinx.ext.autodoc._sentinels import INSTANCE_ATTR
from sphinx.ext.autodoc.directive import DocumenterBridge
from sphinx.ext.autodoc.importer import import_module
@@ -177,8 +177,9 @@ def __init__(self) -> None:
app = FakeApplication()
app.config.add('autodoc_class_signature', 'mixed', 'env', ())
env = BuildEnvironment(app) # type: ignore[arg-type]
+ opts = _AutoDocumenterOptions()
state = Struct(document=document)
- super().__init__(env, None, Options(), 0, state)
+ super().__init__(env, None, opts, 0, state)
def get_documenter(app: Sphinx, obj: Any, parent: Any) -> type[Documenter]:
@@ -256,8 +257,9 @@ class Autosummary(SphinxDirective):
}
def run(self) -> list[Node]:
+ opts = _AutoDocumenterOptions()
self.bridge = DocumenterBridge(
- self.env, self.state.document.reporter, Options(), self.lineno, self.state
+ self.env, self.state.document.reporter, opts, self.lineno, self.state
)
names = [
diff --git a/tests/test_extensions/autodoc_util.py b/tests/test_extensions/autodoc_util.py
index 492b6339867..0831a709e08 100644
--- a/tests/test_extensions/autodoc_util.py
+++ b/tests/test_extensions/autodoc_util.py
@@ -3,7 +3,10 @@
from typing import TYPE_CHECKING
from unittest.mock import Mock
-from sphinx.ext.autodoc._directive_options import _process_documenter_options
+from sphinx.ext.autodoc._directive_options import (
+ _AutoDocumenterOptions,
+ _process_documenter_options,
+)
# NEVER import those objects from sphinx.ext.autodoc directly
from sphinx.ext.autodoc.directive import DocumenterBridge
@@ -27,11 +30,12 @@ def do_autodoc(
if not app.env.current_document.docname:
app.env.current_document.docname = 'index' # set dummy docname
doccls = app.registry.documenters[objtype]
- docoptions = _process_documenter_options(
- doccls,
+ opts = _process_documenter_options(
+ option_spec=doccls.option_spec,
default_options=app.config.autodoc_default_options,
options=options,
)
+ docoptions = _AutoDocumenterOptions.from_directive_options(opts)
state = Mock()
state.document.settings.tab_width = 8
bridge = DocumenterBridge(app.env, LoggingReporter(''), docoptions, 1, state)
diff --git a/tests/test_extensions/test_ext_autodoc.py b/tests/test_extensions/test_ext_autodoc.py
index 618f426fc53..4761c5560e0 100644
--- a/tests/test_extensions/test_ext_autodoc.py
+++ b/tests/test_extensions/test_ext_autodoc.py
@@ -17,7 +17,10 @@
import pytest
from sphinx import addnodes
-from sphinx.ext.autodoc._directive_options import Options
+from sphinx.ext.autodoc._directive_options import (
+ _AutoDocumenterOptions,
+ inherited_members_option,
+)
from sphinx.ext.autodoc._documenters import ModuleLevelDocumenter
from sphinx.ext.autodoc._sentinels import ALL
@@ -41,22 +44,22 @@
def make_directive_bridge(env: BuildEnvironment) -> DocumenterBridge:
- options = Options(
- inherited_members=False,
- undoc_members=False,
- private_members=False,
- special_members=False,
- imported_members=False,
- show_inheritance=False,
- no_index=False,
+ options = _AutoDocumenterOptions(
+ inherited_members=None,
+ undoc_members=None,
+ private_members=None,
+ special_members=None,
+ imported_members=None,
+ show_inheritance=None,
+ no_index=None,
annotation=None,
synopsis='',
platform='',
- deprecated=False,
+ deprecated=None,
members=[],
member_order='alphabetical',
exclude_members=set(),
- ignore_module_all=False,
+ ignore_module_all=None,
)
directive = DocumenterBridge(
@@ -451,13 +454,14 @@ def document_members(self, all_members=False):
@pytest.mark.sphinx('html', testroot='ext-autodoc')
def test_attrgetter_using(app):
directive = make_directive_bridge(app.env)
- directive.genopt['members'] = ALL
+ options = directive.genopt
+ options.members = ALL
- directive.genopt['inherited_members'] = False
+ options.inherited_members = inherited_members_option(False)
with catch_warnings(record=True):
_assert_getter_works(app, directive, 'class', 'target.Class', ['meth'])
- directive.genopt['inherited_members'] = True
+ options.inherited_members = inherited_members_option(True)
with catch_warnings(record=True):
_assert_getter_works(
app, directive, 'class', 'target.inheritance.Derived', ['inheritedmeth']
diff --git a/tests/test_extensions/test_ext_autodoc_autoclass.py b/tests/test_extensions/test_ext_autodoc_autoclass.py
index 04499efd183..3cb062432a5 100644
--- a/tests/test_extensions/test_ext_autodoc_autoclass.py
+++ b/tests/test_extensions/test_ext_autodoc_autoclass.py
@@ -315,7 +315,7 @@ def autodoc_process_bases(app, name, obj, options, bases):
assert name == 'target.classes.Quux'
assert obj.__module__ == 'target.classes'
assert obj.__name__ == 'Quux'
- assert options == {'show-inheritance': True, 'members': []}
+ assert vars(options) == {'show_inheritance': True, 'members': []}
assert bases == [typing.List[typing.Union[int, float]]] # NoQA: UP006, UP007
bases.pop()
From e00e771b65c5960e57b9dd67599302f78127d0f9 Mon Sep 17 00:00:00 2001
From: Adam Turner <9087854+AA-Turner@users.noreply.github.com>
Date: Tue, 29 Jul 2025 19:30:16 +0200
Subject: [PATCH 212/466] Test with Docutils 0.22 (#13784)
---
.github/workflows/main.yml | 2 ++
1 file changed, 2 insertions(+)
diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml
index ca71534d3a6..834c18809d6 100644
--- a/.github/workflows/main.yml
+++ b/.github/workflows/main.yml
@@ -41,6 +41,7 @@ jobs:
docutils:
- "0.20"
- "0.21"
+ - "0.22"
# include:
# # test every supported Docutils version for the latest supported Python
# - python: "3.13"
@@ -93,6 +94,7 @@ jobs:
docutils:
- "0.20"
- "0.21"
+ - "0.22"
steps:
- uses: actions/checkout@v4
From 5d3bb2e3b7c47e4ecd540c657018f16b961c821b Mon Sep 17 00:00:00 2001
From: Adam Turner <9087854+AA-Turner@users.noreply.github.com>
Date: Tue, 29 Jul 2025 19:44:31 +0200
Subject: [PATCH 213/466] Support Docutils 0.22 (#13786)
---
CHANGES.rst | 4 ++++
doc/changes/7.3.rst | 2 +-
pyproject.toml | 2 +-
sphinx/transforms/__init__.py | 2 +-
sphinx/transforms/references.py | 2 +-
tests/test_markup/test_markup.py | 2 +-
6 files changed, 9 insertions(+), 5 deletions(-)
diff --git a/CHANGES.rst b/CHANGES.rst
index 7f405121bc3..b58b6722b3a 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -4,6 +4,10 @@ Release 8.3.0 (in development)
Dependencies
------------
+* #13786: Support `Docutils 0.22`_. Patch by Adam Turner.
+
+ .. _Docutils 0.22: https://docutils.sourceforge.io/RELEASE-NOTES.html#release-0-22-2026-07-29
+
Incompatible changes
--------------------
diff --git a/doc/changes/7.3.rst b/doc/changes/7.3.rst
index b544a722041..c9395c18c4a 100644
--- a/doc/changes/7.3.rst
+++ b/doc/changes/7.3.rst
@@ -86,7 +86,7 @@ Dependencies
* #11858: Increase the minimum supported version of Alabaster to 0.7.14.
Patch by Adam Turner.
-* #11411: Support `Docutils 0.21`_. Patch by Adam Turner.
+* #12267: Support `Docutils 0.21`_. Patch by Adam Turner.
.. _Docutils 0.21: https://docutils.sourceforge.io/RELEASE-NOTES.html#release-0-21-2024-04-09
* #12012: Use ``types-docutils`` instead of ``docutils-stubs``.
diff --git a/pyproject.toml b/pyproject.toml
index 75ae0f71a9d..d915f293f3b 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -76,7 +76,7 @@ dependencies = [
"sphinxcontrib-serializinghtml>=1.1.9",
"Jinja2>=3.1",
"Pygments>=2.17",
- "docutils>=0.20,<0.22",
+ "docutils>=0.20,<0.23",
"snowballstemmer>=2.2",
"babel>=2.13",
"alabaster>=0.7.14",
diff --git a/sphinx/transforms/__init__.py b/sphinx/transforms/__init__.py
index 760a5e6a67d..3728093f093 100644
--- a/sphinx/transforms/__init__.py
+++ b/sphinx/transforms/__init__.py
@@ -372,7 +372,7 @@ def apply(self, **kwargs: Any) -> None:
# override default settings with :confval:`smartquotes_action`
self.smartquotes_action = self.config.smartquotes_action
- super().apply() # type: ignore[no-untyped-call]
+ super().apply()
def is_available(self) -> bool:
builders = self.config.smartquotes_excludes.get('builders', [])
diff --git a/sphinx/transforms/references.py b/sphinx/transforms/references.py
index 17380777997..8655dbfa5f3 100644
--- a/sphinx/transforms/references.py
+++ b/sphinx/transforms/references.py
@@ -25,7 +25,7 @@ def apply(self, **kwargs: Any) -> None:
# suppress INFO level messages for a while
reporter.report_level = max(reporter.WARNING_LEVEL, reporter.report_level)
- super().apply() # type: ignore[no-untyped-call]
+ super().apply()
finally:
reporter.report_level = report_level
diff --git a/tests/test_markup/test_markup.py b/tests/test_markup/test_markup.py
index f9da6038c7f..fb4df4c400b 100644
--- a/tests/test_markup/test_markup.py
+++ b/tests/test_markup/test_markup.py
@@ -62,7 +62,7 @@ def parse_(rst):
document = new_document()
parser = RstParser()
parser.parse(rst, document)
- SphinxSmartQuotes(document, startnode=None).apply() # type: ignore[no-untyped-call]
+ SphinxSmartQuotes(document, startnode=None).apply()
for msg in list(document.findall(nodes.system_message)):
if msg['level'] == 1:
msg.replace_self([])
From 3fd98d868a5dfaba11117fb13ae4872b5b0194b5 Mon Sep 17 00:00:00 2001
From: Tim Hoffmann <2836374+timhoffm@users.noreply.github.com>
Date: Thu, 31 Jul 2025 10:59:13 +0200
Subject: [PATCH 214/466] Rename ``sphinx.ext.jsmath`` to
``sphinxcontrib.jsmath`` (#13788)
Co-authored-by: Adam Turner <9087854+AA-Turner@users.noreply.github.com>
---
doc/usage/extensions/math.rst | 23 +++++++++++++++++------
1 file changed, 17 insertions(+), 6 deletions(-)
diff --git a/doc/usage/extensions/math.rst b/doc/usage/extensions/math.rst
index 6fa8ab851f8..fb41d66d8fb 100644
--- a/doc/usage/extensions/math.rst
+++ b/doc/usage/extensions/math.rst
@@ -318,14 +318,25 @@ Sphinx but is set to automatically include it from a third-party site.
This has been renamed to :confval:`mathjax2_config`.
:confval:`mathjax_config` is still supported for backwards compatibility.
-:mod:`sphinx.ext.jsmath` -- Render math via JavaScript
-------------------------------------------------------
+:mod:`sphinxcontrib.jsmath` -- Render math via JavaScript
+---------------------------------------------------------
-.. module:: sphinx.ext.jsmath
+.. module:: sphinxcontrib.jsmath
:synopsis: Render math using JavaScript via JSMath.
This extension works just as the MathJax extension does, but uses the older
-package jsMath_. It provides this config value:
+package jsMath_. jsMath is no longer actively developed, but it has the
+advantage that the size of the JavaScript package is much smaller than
+MathJax.
+
+.. versionadded:: 0.5
+ The :mod:`!sphinx.ext.jsmath` extension.
+.. versionchanged:: 2.0
+ :mod:`!sphinx.ext.jsmath` was moved to :mod:`sphinxcontrib.jsmath`.
+.. versionremoved:: 4.0
+ The alias from :mod:`!sphinx.ext.jsmath` to :mod:`sphinxcontrib.jsmath`.
+
+Config value:
.. confval:: jsmath_path
:type: :code-py:`str`
@@ -337,7 +348,7 @@ package jsMath_. It provides this config value:
The path can be absolute or relative; if it is relative, it is relative to
the ``_static`` directory of the built docs.
- For example, if you put JSMath into the static path of the Sphinx docs, this
+ For example, if you put jsMath into the static path of the Sphinx docs, this
value would be ``jsMath/easy/load.js``. If you host more than one
Sphinx documentation set on one server, it is advisable to install jsMath in
a shared location.
@@ -347,5 +358,5 @@ package jsMath_. It provides this config value:
.. _dvisvgm: https://dvisvgm.de/
.. _dvisvgm FAQ: https://dvisvgm.de/FAQ
.. _MathJax: https://www.mathjax.org/
-.. _jsMath: https://www.math.union.edu/~dpvc/jsmath/
+.. _jsMath: https://www.math.union.edu/~dpvc/jsMath/
.. _LaTeX preview package: https://www.gnu.org/software/auctex/preview-latex.html
From 99d3756afbd4f90f924eae40d3dea14b4c9fea3d Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Thu, 31 Jul 2025 10:08:56 +0100
Subject: [PATCH 215/466] Bump Ruff to 0.12.7 (#13787)
---
pyproject.toml | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/pyproject.toml b/pyproject.toml
index d915f293f3b..7b2e61f7741 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -93,7 +93,7 @@ docs = [
"sphinxcontrib-websupport",
]
lint = [
- "ruff==0.12.5",
+ "ruff==0.12.7",
"mypy==1.17.0",
"sphinx-lint>=0.9",
"types-colorama==0.4.15.20240311",
@@ -136,7 +136,7 @@ docs = [
"sphinxcontrib-websupport",
]
lint = [
- "ruff==0.12.5",
+ "ruff==0.12.7",
"sphinx-lint>=0.9",
]
package = [
From c780dc467d5f7f27d5d380746a639eec07dd7ed2 Mon Sep 17 00:00:00 2001
From: Adam Dangoor
Date: Thu, 31 Jul 2025 16:15:04 +0100
Subject: [PATCH 216/466] Remove mypy overrides for
``tests/test_builders/test_build_html_*.py`` (#13777)
---
pyproject.toml | 3 ---
tests/test_builders/test_build_html_assets.py | 2 +-
tests/test_builders/test_build_html_maths.py | 11 +++++++++--
tests/test_builders/test_build_html_toctree.py | 14 ++++++++++++--
4 files changed, 22 insertions(+), 8 deletions(-)
diff --git a/pyproject.toml b/pyproject.toml
index 7b2e61f7741..287776daa94 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -253,9 +253,6 @@ module = [
"tests.test_builders.test_build",
"tests.test_builders.test_build_html",
"tests.test_builders.test_build_html_5_output",
- "tests.test_builders.test_build_html_assets",
- "tests.test_builders.test_build_html_maths",
- "tests.test_builders.test_build_html_toctree",
"tests.test_builders.test_build_linkcheck",
# tests/test_directives
"tests.test_directives.test_directive_code",
diff --git a/tests/test_builders/test_build_html_assets.py b/tests/test_builders/test_build_html_assets.py
index 7478f41829d..9059b423224 100644
--- a/tests/test_builders/test_build_html_assets.py
+++ b/tests/test_builders/test_build_html_assets.py
@@ -68,7 +68,7 @@ def test_html_assets(app: SphinxTestApp) -> None:
@pytest.mark.sphinx('html', testroot='html_assets')
-def test_assets_order(app, monkeypatch):
+def test_assets_order(app: SphinxTestApp, monkeypatch: pytest.MonkeyPatch) -> None:
monkeypatch.setattr(sphinx.builders.html, '_file_checksum', lambda o, f: '')
app.add_css_file('normal.css')
diff --git a/tests/test_builders/test_build_html_maths.py b/tests/test_builders/test_build_html_maths.py
index cc21142b355..8654ca99604 100644
--- a/tests/test_builders/test_build_html_maths.py
+++ b/tests/test_builders/test_build_html_maths.py
@@ -8,6 +8,9 @@
from sphinx.errors import ConfigError
if TYPE_CHECKING:
+ from collections.abc import Callable
+
+ from sphinx.testing.fixtures import _app_params
from sphinx.testing.util import SphinxTestApp
@@ -42,7 +45,9 @@ def test_html_math_renderer_is_imgmath(app: SphinxTestApp) -> None:
testroot='basic',
confoverrides={'extensions': ['sphinxcontrib.jsmath', 'sphinx.ext.imgmath']},
)
-def test_html_math_renderer_is_duplicated(make_app, app_params):
+def test_html_math_renderer_is_duplicated(
+ make_app: Callable[..., SphinxTestApp], app_params: _app_params
+) -> None:
args, kwargs = app_params
with pytest.raises(
ConfigError,
@@ -83,7 +88,9 @@ def test_html_math_renderer_is_chosen(app: SphinxTestApp) -> None:
'html_math_renderer': 'imgmath',
},
)
-def test_html_math_renderer_is_mismatched(make_app, app_params):
+def test_html_math_renderer_is_mismatched(
+ make_app: Callable[..., SphinxTestApp], app_params: _app_params
+) -> None:
args, kwargs = app_params
with pytest.raises(
ConfigError,
diff --git a/tests/test_builders/test_build_html_toctree.py b/tests/test_builders/test_build_html_toctree.py
index 255a2001960..ae0dc04fc08 100644
--- a/tests/test_builders/test_build_html_toctree.py
+++ b/tests/test_builders/test_build_html_toctree.py
@@ -14,6 +14,10 @@
from tests.test_builders.xpath_util import check_xpath
if TYPE_CHECKING:
+ from collections.abc import Callable, Sequence
+ from pathlib import Path
+ from xml.etree.ElementTree import Element, ElementTree
+
from sphinx.testing.util import SphinxTestApp
@@ -71,7 +75,11 @@ def test_numbered_toctree(app: SphinxTestApp) -> None:
],
)
@pytest.mark.sphinx('singlehtml', testroot='toctree')
-def test_singlehtml_hyperlinks(app, cached_etree_parse, expect):
+def test_singlehtml_hyperlinks(
+ app: SphinxTestApp,
+ cached_etree_parse: Callable[[Path], ElementTree],
+ expect: tuple[str, str | Callable[[Sequence[Element]], None]],
+) -> None:
app.build()
check_xpath(cached_etree_parse(app.outdir / 'index.html'), 'index.html', *expect)
@@ -81,7 +89,9 @@ def test_singlehtml_hyperlinks(app, cached_etree_parse, expect):
testroot='toctree-multiple-parents',
confoverrides={'html_theme': 'alabaster'},
)
-def test_toctree_multiple_parents(app, cached_etree_parse):
+def test_toctree_multiple_parents(
+ app: SphinxTestApp, cached_etree_parse: Callable[[Path], ElementTree]
+) -> None:
# The lexicographically greatest parent of the document in global toctree
# should be chosen, regardless of the order in which files are read
with patch.object(app.builder, '_read_serial') as m:
From 36b057190146b1878c4d8906aa894b5e5dff228c Mon Sep 17 00:00:00 2001
From: Adam Dangoor
Date: Thu, 31 Jul 2025 18:58:26 +0100
Subject: [PATCH 217/466] Enable ``disallow_incomplete_defs`` for mypy (#13793)
Co-authored-by: Adam Turner <9087854+AA-Turner@users.noreply.github.com>
---
pyproject.toml | 1 -
tests/test_config/test_config.py | 2 +-
tests/test_domains/test_domain_c.py | 4 ++--
tests/test_domains/test_domain_cpp.py | 8 +++++---
tests/test_extensions/test_ext_apidoc.py | 2 +-
tests/test_extensions/test_ext_autodoc.py | 2 +-
tests/test_search.py | 2 +-
tests/test_util/test_util_nodes.py | 15 +++++++++++----
8 files changed, 22 insertions(+), 14 deletions(-)
diff --git a/pyproject.toml b/pyproject.toml
index 287776daa94..87a4a7d031b 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -326,7 +326,6 @@ check_untyped_defs = false
disable_error_code = [
"annotation-unchecked",
]
-disallow_incomplete_defs = false
disallow_untyped_calls = false
disallow_untyped_defs = false
diff --git a/tests/test_config/test_config.py b/tests/test_config/test_config.py
index 857e1c28e34..3a7e58c146c 100644
--- a/tests/test_config/test_config.py
+++ b/tests/test_config/test_config.py
@@ -144,7 +144,7 @@ def test_config_not_found(tmp_path):
@pytest.mark.parametrize('protocol', list(range(pickle.HIGHEST_PROTOCOL)))
-def test_config_pickle_protocol(protocol: int):
+def test_config_pickle_protocol(protocol: int) -> None:
config = Config()
pickled_config = pickle.loads(pickle.dumps(config, protocol))
diff --git a/tests/test_domains/test_domain_c.py b/tests/test_domains/test_domain_c.py
index 23ee25ffa83..d83693c09dd 100644
--- a/tests/test_domains/test_domain_c.py
+++ b/tests/test_domains/test_domain_c.py
@@ -700,14 +700,14 @@ def test_extra_keywords() -> None:
# raise DefinitionError
-def split_warnings(warning: StringIO):
+def split_warnings(warning: StringIO) -> list[str]:
ws = warning.getvalue().split('\n')
assert len(ws) >= 1
assert ws[-1] == ''
return ws[:-1]
-def filter_warnings(warning: StringIO, file):
+def filter_warnings(warning: StringIO, file: str) -> list[str]:
lines = split_warnings(warning)
res = [
l
diff --git a/tests/test_domains/test_domain_cpp.py b/tests/test_domains/test_domain_cpp.py
index 88505a4aa91..2e88625a9fe 100644
--- a/tests/test_domains/test_domain_cpp.py
+++ b/tests/test_domains/test_domain_cpp.py
@@ -35,6 +35,8 @@
if TYPE_CHECKING:
from io import StringIO
+ from sphinx.domains.cpp._ast import ASTTemplateParamType
+
def parse(name, string):
class Config:
@@ -1515,8 +1517,8 @@ def test_domain_cpp_ast_xref_parsing() -> None:
('template class...', True),
],
)
-def test_domain_cpp_template_parameters_is_pack(param: str, is_pack: bool):
- def parse_template_parameter(param: str):
+def test_domain_cpp_template_parameters_is_pack(param: str, is_pack: bool) -> None:
+ def parse_template_parameter(param: str) -> ASTTemplateParamType:
ast = parse('type', 'template<' + param + '> X')
return ast.templatePrefix.templates[0].params[0]
@@ -1531,7 +1533,7 @@ def parse_template_parameter(param: str):
# raise DefinitionError
-def filter_warnings(warning: StringIO, file):
+def filter_warnings(warning: StringIO, file: str) -> list[str]:
lines = warning.getvalue().split('\n')
res = [
l
diff --git a/tests/test_extensions/test_ext_apidoc.py b/tests/test_extensions/test_ext_apidoc.py
index a9742337013..0052a4740bb 100644
--- a/tests/test_extensions/test_ext_apidoc.py
+++ b/tests/test_extensions/test_ext_apidoc.py
@@ -752,7 +752,7 @@ def test_no_duplicates(rootdir, tmp_path):
sphinx.ext.apidoc._generate.PY_SUFFIXES = original_suffixes
-def test_remove_old_files(tmp_path: Path):
+def test_remove_old_files(tmp_path: Path) -> None:
"""Test that old files are removed when using the -r option.
Also ensure that pre-existing files are not re-written, if unchanged.
diff --git a/tests/test_extensions/test_ext_autodoc.py b/tests/test_extensions/test_ext_autodoc.py
index 4761c5560e0..64f1d85f247 100644
--- a/tests/test_extensions/test_ext_autodoc.py
+++ b/tests/test_extensions/test_ext_autodoc.py
@@ -1629,7 +1629,7 @@ def _preamble(
return self._node('class', self.name, doc, args=args, indent=indent, **options)
@staticmethod
- def _preamble_args(functional_constructor: bool = False):
+ def _preamble_args(functional_constructor: bool = False) -> str:
"""EnumType.__call__() is a dual-purpose method:
* Look an enum member (valid only if the enum has members)
diff --git a/tests/test_search.py b/tests/test_search.py
index 5ed753a2ea1..0dd4d643ef1 100644
--- a/tests/test_search.py
+++ b/tests/test_search.py
@@ -43,7 +43,7 @@ def __init__(self, version: str, domains: DummyDomainsContainer) -> None:
self.version = version
self.domains = domains
- def __getattr__(self, name: str):
+ def __getattr__(self, name: str) -> Any:
if name.startswith('_search_index_'):
setattr(self, name, {})
return getattr(self, name, {})
diff --git a/tests/test_util/test_util_nodes.py b/tests/test_util/test_util_nodes.py
index 39c43d6e88a..406e08c815c 100644
--- a/tests/test_util/test_util_nodes.py
+++ b/tests/test_util/test_util_nodes.py
@@ -21,10 +21,12 @@
)
if TYPE_CHECKING:
+ from collections.abc import Iterable
+
from docutils.nodes import document
-def _transform(doctree) -> None:
+def _transform(doctree: nodes.document) -> None:
ApplySourceWorkaround(doctree).apply()
@@ -42,7 +44,11 @@ def _get_doctree(text):
return document
-def assert_node_count(messages, node_type, expect_count) -> None:
+def assert_node_count(
+ messages: Iterable[tuple[nodes.Element, str]],
+ node_type: type[nodes.Node],
+ expect_count: int,
+) -> None:
count = 0
node_list = [node for node, msg in messages]
for node in node_list:
@@ -150,7 +156,7 @@ def test_NodeMatcher():
),
],
)
-def test_extract_messages(rst, node_cls, count):
+def test_extract_messages(rst: str, node_cls: type[nodes.Element], count: int) -> None:
msg = extract_messages(_get_doctree(dedent(rst)))
assert_node_count(msg, node_cls, count)
@@ -177,7 +183,8 @@ def test_extract_messages_without_rawsource() -> None:
assert next(m for n, m in extract_messages(document)), 'text sentence'
-def test_clean_astext():
+def test_clean_astext() -> None:
+ node: nodes.Element
node = nodes.paragraph(text='hello world')
assert clean_astext(node) == 'hello world'
From 81507004599e4b7db80ed10aa4a25de68d73aef0 Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Fri, 1 Aug 2025 14:59:09 +0100
Subject: [PATCH 218/466] Bump mypy to 1.17.1 (#13795)
---
pyproject.toml | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/pyproject.toml b/pyproject.toml
index 87a4a7d031b..fee4bb05aa6 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -94,7 +94,7 @@ docs = [
]
lint = [
"ruff==0.12.7",
- "mypy==1.17.0",
+ "mypy==1.17.1",
"sphinx-lint>=0.9",
"types-colorama==0.4.15.20240311",
"types-defusedxml==0.7.0.20250708",
@@ -158,7 +158,7 @@ translations = [
"Jinja2>=3.1",
]
types = [
- "mypy==1.17.0",
+ "mypy==1.17.1",
"pyrefly",
"pyright==1.1.400",
{ include-group = "type-stubs" },
From 25eb929be2fec7edd497d97ef8699291adc8e32d Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Fri, 1 Aug 2025 14:59:23 +0100
Subject: [PATCH 219/466] Bump types-colorama to 0.4.15.20250801 (#13794)
---
pyproject.toml | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/pyproject.toml b/pyproject.toml
index fee4bb05aa6..37a89b74585 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -96,7 +96,7 @@ lint = [
"ruff==0.12.7",
"mypy==1.17.1",
"sphinx-lint>=0.9",
- "types-colorama==0.4.15.20240311",
+ "types-colorama==0.4.15.20250801",
"types-defusedxml==0.7.0.20250708",
"types-docutils==0.21.0.20250525",
"types-Pillow==10.2.0.20240822",
@@ -165,7 +165,7 @@ types = [
]
type-stubs = [
# align with versions used elsewhere
- "types-colorama==0.4.15.20240311",
+ "types-colorama==0.4.15.20250801",
"types-defusedxml==0.7.0.20250708",
"types-docutils==0.21.0.20250525",
"types-Pillow==10.2.0.20240822",
From 556a5f1095135657360cd45d76ec72db4b4611bf Mon Sep 17 00:00:00 2001
From: Adam Turner <9087854+AA-Turner@users.noreply.github.com>
Date: Fri, 1 Aug 2025 16:07:48 +0200
Subject: [PATCH 220/466] Hold properties for the documented objects in new
types (#13796)
---
sphinx/ext/autodoc/_documenters.py | 176 +++++++++++++++++++---
sphinx/ext/autodoc/_property_types.py | 113 ++++++++++++++
sphinx/ext/autodoc/importer.py | 8 -
tests/test_extensions/test_ext_autodoc.py | 11 ++
4 files changed, 278 insertions(+), 30 deletions(-)
create mode 100644 sphinx/ext/autodoc/_property_types.py
diff --git a/sphinx/ext/autodoc/_documenters.py b/sphinx/ext/autodoc/_documenters.py
index ca35448bc33..7a22ab65de5 100644
--- a/sphinx/ext/autodoc/_documenters.py
+++ b/sphinx/ext/autodoc/_documenters.py
@@ -5,6 +5,7 @@
import re
import sys
from inspect import Parameter, Signature
+from pathlib import Path
from typing import TYPE_CHECKING, NewType, TypeVar
from docutils.statemachine import StringList
@@ -20,6 +21,13 @@
member_order_option,
members_option,
)
+from sphinx.ext.autodoc._property_types import (
+ _AssignStatementProperties,
+ _ClassDefProperties,
+ _FunctionDefProperties,
+ _ItemProperties,
+ _ModuleProperties,
+)
from sphinx.ext.autodoc._sentinels import (
ALL,
INSTANCE_ATTR,
@@ -62,6 +70,7 @@
from sphinx.environment import BuildEnvironment, _CurrentDocument
from sphinx.events import EventManager
from sphinx.ext.autodoc._directive_options import _AutoDocumenterOptions
+ from sphinx.ext.autodoc._property_types import _AutodocFuncProperty
from sphinx.ext.autodoc.directive import DocumenterBridge
from sphinx.registry import SphinxComponentRegistry
from sphinx.util.typing import OptionSpec, _RestifyMode
@@ -148,6 +157,8 @@ class Documenter:
the Documenter.
"""
+ props: _ItemProperties
+
#: name by which the directive is called (auto...) and the default
#: generated directive name
objtype: ClassVar = 'object'
@@ -370,10 +381,51 @@ def import_object(self, raiseerror: bool = False) -> bool:
self.env.note_reread()
return False
- self.object = im.__dict__.pop('obj', None)
+ self.object = obj = im.__dict__.pop('obj', None)
for k in 'module', 'parent', 'object_name':
if hasattr(im, k):
setattr(self, k, getattr(im, k))
+
+ file_path = getattr(im.module, '__file__', None)
+ try:
+ mod_all = inspect.getall(im.module)
+ except ValueError:
+ mod_all = None
+ if self.objtype == 'module':
+ self.props = _ModuleProperties(
+ obj_type=self.objtype,
+ name=im.object_name,
+ module_name=getattr(im, 'modname', self.modname),
+ docstring_lines=(),
+ file_path=Path(file_path) if file_path is not None else None,
+ all=tuple(mod_all) if mod_all is not None else None,
+ _obj=obj,
+ )
+ elif self.objtype in {'function', 'decorator'}:
+ obj_properties: set[_AutodocFuncProperty] = set()
+ if inspect.isstaticmethod(obj, cls=im.parent, name=im.object_name):
+ obj_properties.add('staticmethod')
+ if inspect.isclassmethod(obj):
+ obj_properties.add('classmethod')
+ self.props = _FunctionDefProperties(
+ obj_type=self.objtype,
+ name=im.object_name,
+ module_name=self.modname,
+ parts=tuple(self.objpath),
+ docstring_lines=(),
+ properties=frozenset(obj_properties),
+ _obj=obj,
+ )
+ else:
+ self.props = _ItemProperties(
+ obj_type=self.objtype,
+ name=im.object_name,
+ module_name=self.modname,
+ parts=tuple(self.objpath),
+ docstring_lines=(),
+ _obj=obj,
+ )
+
return True
def get_real_modname(self) -> str:
@@ -1075,6 +1127,8 @@ def _generate(
class ModuleDocumenter(Documenter):
"""Specialized Documenter subclass for modules."""
+ props: _ModuleProperties
+
objtype = 'module'
content_indent = ''
_extra_indent = ' '
@@ -1258,6 +1312,8 @@ def keyfunc(entry: tuple[Documenter, bool]) -> int:
class FunctionDocumenter(Documenter):
"""Specialized Documenter subclass for functions."""
+ props: _FunctionDefProperties
+
__docstring_signature__ = True
objtype = 'function'
@@ -1410,6 +1466,8 @@ def dummy(): # type: ignore[no-untyped-def] # NoQA: ANN202
class DecoratorDocumenter(FunctionDocumenter):
"""Specialized Documenter subclass for decorator functions."""
+ props: _FunctionDefProperties
+
objtype = 'decorator'
# must be lower than FunctionDocumenter
@@ -1433,6 +1491,8 @@ class DecoratorDocumenter(FunctionDocumenter):
class ClassDocumenter(Documenter):
"""Specialized Documenter subclass for classes."""
+ props: _ClassDefProperties
+
__docstring_signature__ = True
objtype = 'class'
@@ -1460,8 +1520,6 @@ class ClassDocumenter(Documenter):
_signature_class: Any = None
_signature_method_name: str = ''
- doc_as_attr: bool
-
def __init__(self, *args: Any) -> None:
super().__init__(*args)
@@ -1500,10 +1558,22 @@ def import_object(self, raiseerror: bool = False) -> bool:
self.env.note_reread()
return False
- self.object = im.__dict__.pop('obj', None)
- for k in 'module', 'parent', 'object_name', 'doc_as_attr', 'objpath', 'modname':
+ self.object = obj = im.__dict__.pop('obj', None)
+ for k in 'module', 'parent', 'object_name', 'objpath', 'modname':
if hasattr(im, k):
setattr(self, k, getattr(im, k))
+
+ self.props = _ClassDefProperties(
+ obj_type=self.objtype, # type: ignore[arg-type]
+ name=im.object_name,
+ module_name=getattr(im, 'modname', self.modname),
+ parts=tuple(getattr(im, 'objpath', self.objpath)),
+ docstring_lines=(),
+ bases=getattr(obj, '__bases__', None),
+ _obj=obj,
+ _obj___name__=getattr(obj, '__name__', None),
+ )
+
return True
def _get_signature(self) -> tuple[Any | None, str | None, Signature | None]:
@@ -1645,7 +1715,7 @@ def _find_signature(self) -> tuple[str | None, str | None] | None:
return result
def format_signature(self, **kwargs: Any) -> str:
- if self.doc_as_attr:
+ if self.props.doc_as_attr:
return ''
if self.config.autodoc_class_signature == 'separated':
# do not show signatures
@@ -1716,7 +1786,7 @@ def get_canonical_fullname(self) -> str | None:
def add_directive_header(self, sig: str) -> None:
sourcename = self.get_sourcename()
- if self.doc_as_attr:
+ if self.props.doc_as_attr:
self.directivetype = 'attribute'
super().add_directive_header(sig)
@@ -1728,7 +1798,7 @@ def add_directive_header(self, sig: str) -> None:
canonical_fullname = self.get_canonical_fullname()
if (
- not self.doc_as_attr
+ not self.props.doc_as_attr
and not isinstance(self.object, NewType)
and canonical_fullname
and self.fullname != canonical_fullname
@@ -1736,7 +1806,7 @@ def add_directive_header(self, sig: str) -> None:
self.add_line(' :canonical: %s' % canonical_fullname, sourcename)
# add inheritance info, if wanted
- if not self.doc_as_attr and self.options.show_inheritance:
+ if not self.props.doc_as_attr and self.options.show_inheritance:
if inspect.getorigbases(self.object):
# A subclass of generic types
# refs: PEP-560
@@ -1791,7 +1861,7 @@ def get_doc(self) -> list[list[str]] | None:
if isinstance(self.object, TypeVar):
if self.object.__doc__ == TypeVar.__doc__:
return []
- if self.doc_as_attr:
+ if self.props.doc_as_attr:
# Don't show the docstring of the class when it is an alias.
if self.get_variable_comment():
return []
@@ -1857,7 +1927,7 @@ def get_doc(self) -> list[list[str]] | None:
def get_variable_comment(self) -> list[str] | None:
try:
key = ('', '.'.join(self.objpath))
- if self.doc_as_attr:
+ if self.props.doc_as_attr:
analyzer = ModuleAnalyzer.for_module(self.modname)
else:
analyzer = ModuleAnalyzer.for_module(self.get_real_modname())
@@ -1891,7 +1961,7 @@ def add_content(self, more_content: StringList | None) -> None:
more_content = StringList(
[_('alias of TypeVar(%s)') % ', '.join(attrs), ''], source=''
)
- if self.doc_as_attr and self.modname != self.get_real_modname():
+ if self.props.doc_as_attr and self.modname != self.get_real_modname():
try:
# override analyzer to obtain doccomment around its definition.
self.analyzer = ModuleAnalyzer.for_module(self.modname)
@@ -1899,7 +1969,7 @@ def add_content(self, more_content: StringList | None) -> None:
except PycodeError:
pass
- if self.doc_as_attr and not self.get_variable_comment():
+ if self.props.doc_as_attr and not self.get_variable_comment():
try:
alias = restify(self.object, mode=mode)
more_content = StringList([_('alias of %s') % alias], source='')
@@ -1909,7 +1979,7 @@ def add_content(self, more_content: StringList | None) -> None:
super().add_content(more_content)
def document_members(self, all_members: bool = False) -> None:
- if self.doc_as_attr:
+ if self.props.doc_as_attr:
return
super().document_members(all_members)
@@ -1935,6 +2005,8 @@ def generate(
class ExceptionDocumenter(ClassDocumenter):
"""Specialized ClassDocumenter subclass for exceptions."""
+ props: _ClassDefProperties
+
objtype = 'exception'
member_order = 10
@@ -1961,6 +2033,8 @@ def can_document_member(
class DataDocumenter(Documenter):
"""Specialized Documenter subclass for data items."""
+ props: _AssignStatementProperties
+
__uninitialized_global_variable__ = True
objtype = 'data'
@@ -2006,10 +2080,23 @@ def import_object(self, raiseerror: bool = False) -> bool:
self.env.note_reread()
return False
- self.object = im.__dict__.pop('obj', None)
+ self.object = obj = im.__dict__.pop('obj', None)
for k in 'module', 'parent', 'object_name':
if hasattr(im, k):
setattr(self, k, getattr(im, k))
+
+ self.props = _AssignStatementProperties(
+ obj_type=self.objtype, # type: ignore[arg-type]
+ name=im.object_name,
+ module_name=self.modname,
+ parts=tuple(self.objpath),
+ docstring_lines=(),
+ value=...,
+ annotation='',
+ class_var=False,
+ instance_var=False,
+ _obj=obj,
+ )
return True
def should_suppress_value_header(self) -> bool:
@@ -2110,6 +2197,8 @@ def add_content(self, more_content: StringList | None) -> None:
class MethodDocumenter(Documenter):
"""Specialized Documenter subclass for methods (normal, static and class)."""
+ props: _FunctionDefProperties
+
__docstring_signature__ = True
objtype = 'method'
@@ -2139,10 +2228,25 @@ def import_object(self, raiseerror: bool = False) -> bool:
self.env.note_reread()
return False
- self.object = im.__dict__.pop('obj', None)
+ self.object = obj = im.__dict__.pop('obj', None)
for k in 'module', 'parent', 'object_name', 'member_order':
if hasattr(im, k):
setattr(self, k, getattr(im, k))
+
+ obj_properties: set[_AutodocFuncProperty] = set()
+ if inspect.isstaticmethod(obj, cls=im.parent, name=im.object_name):
+ obj_properties.add('staticmethod')
+ if inspect.isclassmethod(obj):
+ obj_properties.add('classmethod')
+ self.props = _FunctionDefProperties(
+ obj_type=self.objtype, # type: ignore[arg-type]
+ name=im.object_name,
+ module_name=self.modname,
+ parts=tuple(self.objpath),
+ docstring_lines=(),
+ properties=frozenset(obj_properties),
+ _obj=obj,
+ )
return True
def format_args(self, **kwargs: Any) -> str:
@@ -2379,6 +2483,8 @@ def get_doc(self) -> list[list[str]] | None:
class AttributeDocumenter(Documenter):
"""Specialized Documenter subclass for attributes."""
+ props: _AssignStatementProperties
+
__docstring_signature__ = True
__docstring_strip_signature__ = True
@@ -2450,10 +2556,23 @@ def import_object(self, raiseerror: bool = False) -> bool:
self.env.note_reread()
return False
- self.object = im.__dict__.pop('obj', None)
+ self.object = obj = im.__dict__.pop('obj', None)
for k in 'module', 'parent', 'object_name':
if hasattr(im, k):
setattr(self, k, getattr(im, k))
+
+ self.props = _AssignStatementProperties(
+ obj_type=self.objtype, # type: ignore[arg-type]
+ name=im.object_name,
+ module_name=self.modname,
+ parts=tuple(self.objpath),
+ docstring_lines=(),
+ value=...,
+ annotation='',
+ class_var=False,
+ instance_var=False,
+ _obj=obj,
+ )
return True
@property
@@ -2600,6 +2719,8 @@ def add_content(self, more_content: StringList | None) -> None:
class PropertyDocumenter(Documenter):
"""Specialized Documenter subclass for properties."""
+ props: _FunctionDefProperties
+
__docstring_signature__ = True
__docstring_strip_signature__ = True
@@ -2609,9 +2730,6 @@ class PropertyDocumenter(Documenter):
# before AttributeDocumenter
priority = AttributeDocumenter.priority + 1
- # Support for class properties. Note: these only work on Python 3.9.
- isclassmethod: bool = False
-
@classmethod
def can_document_member(
cls: type[Documenter], member: Any, membername: str, isattr: bool, parent: Any
@@ -2643,10 +2761,23 @@ def import_object(self, raiseerror: bool = False) -> bool:
if im is None:
return False
- self.object = im.__dict__.pop('obj', None)
+ self.object = obj = im.__dict__.pop('obj', None)
for k in 'module', 'parent', 'object_name', 'isclassmethod':
if hasattr(im, k):
setattr(self, k, getattr(im, k))
+
+ obj_properties: set[_AutodocFuncProperty] = set()
+ if getattr(im, 'isclassmethod', False):
+ obj_properties.add('classmethod')
+ self.props = _FunctionDefProperties(
+ obj_type=self.objtype, # type: ignore[arg-type]
+ name=im.object_name,
+ module_name=self.modname,
+ parts=tuple(self.objpath),
+ docstring_lines=(),
+ properties=frozenset(obj_properties),
+ _obj=obj,
+ )
return True
def format_args(self, **kwargs: Any) -> str:
@@ -2671,7 +2802,8 @@ def add_directive_header(self, sig: str) -> None:
sourcename = self.get_sourcename()
if inspect.isabstractmethod(self.object):
self.add_line(' :abstractmethod:', sourcename)
- if self.isclassmethod:
+ # Support for class properties. Note: these only work on Python 3.9.
+ if self.props.is_classmethod:
self.add_line(' :classmethod:', sourcename)
func = self._get_property_getter()
diff --git a/sphinx/ext/autodoc/_property_types.py b/sphinx/ext/autodoc/_property_types.py
new file mode 100644
index 00000000000..57608f541bc
--- /dev/null
+++ b/sphinx/ext/autodoc/_property_types.py
@@ -0,0 +1,113 @@
+from __future__ import annotations
+
+import dataclasses
+
+TYPE_CHECKING = False
+if TYPE_CHECKING:
+ from collections.abc import Sequence
+ from pathlib import Path
+ from typing import Any, Literal, TypeAlias
+
+ _AutodocObjType: TypeAlias = Literal[
+ 'module',
+ 'class',
+ 'exception',
+ 'function',
+ 'decorator',
+ 'method',
+ 'property',
+ 'attribute',
+ 'data',
+ ]
+ _AutodocFuncProperty: TypeAlias = Literal[
+ 'abstractmethod',
+ 'async',
+ 'classmethod',
+ 'final',
+ 'singledispatch',
+ 'staticmethod',
+ ]
+
+
+@dataclasses.dataclass(frozen=False, kw_only=True, slots=True)
+class _ItemProperties:
+ #: The kind of object being documented
+ obj_type: _AutodocObjType
+ #: The name of the item
+ name: str
+ #: The dotted module name
+ module_name: str
+ #: The fully-qualified name within the module
+ parts: tuple[str, ...]
+ #: This item's docstring, as a sequence of lines
+ docstring_lines: tuple[str, ...]
+
+ _obj: Any
+
+ # @property
+ # def name(self) -> str:
+ # return self.module_name.rpartition('.')[2]
+
+ @property
+ def full_name(self) -> str:
+ return '.'.join((self.module_name, *self.parts))
+
+ @property
+ def parent_names(self) -> tuple[str, ...]:
+ return self.parts[:-1]
+
+
+@dataclasses.dataclass(frozen=False, kw_only=True, slots=True)
+class _ModuleProperties(_ItemProperties):
+ obj_type: Literal['module'] = 'module'
+ parts: tuple[()] = () # modules have no parts
+
+ file_path: Path | None
+ all: Sequence[str] | None
+
+ # @property
+ # def name(self) -> str:
+ # return self.module_name.rpartition('.')[2]
+
+ @property
+ def parent_names(self) -> tuple[str, ...]:
+ return tuple(self.module_name.split('.')[:-1])
+
+
+@dataclasses.dataclass(frozen=False, kw_only=True, slots=True)
+class _ClassDefProperties(_ItemProperties):
+ obj_type: Literal['class', 'exception']
+
+ bases: Sequence[tuple[str, ...]] | None
+
+ _obj___name__: str | None
+
+ @property
+ def doc_as_attr(self) -> bool:
+ # if the class is documented under another name, document it
+ # as data/attribute
+ if self._obj___name__ is None:
+ return True
+ return self.parts[-1] != self._obj___name__
+
+
+@dataclasses.dataclass(frozen=False, kw_only=True, slots=True)
+class _FunctionDefProperties(_ItemProperties):
+ obj_type: Literal['function', 'method', 'property', 'decorator']
+
+ properties: frozenset[_AutodocFuncProperty]
+
+ @property
+ def is_classmethod(self) -> bool:
+ return 'classmethod' in self.properties
+
+
+@dataclasses.dataclass(frozen=False, kw_only=True, slots=True)
+class _AssignStatementProperties(_ItemProperties):
+ obj_type: Literal['attribute', 'data']
+
+ value: object
+ annotation: str
+
+ class_var: bool
+ instance_var: bool
diff --git a/sphinx/ext/autodoc/importer.py b/sphinx/ext/autodoc/importer.py
index 88cbc230023..03c791b6f28 100644
--- a/sphinx/ext/autodoc/importer.py
+++ b/sphinx/ext/autodoc/importer.py
@@ -66,7 +66,6 @@ class _ImportedObject:
obj: Any
# ClassDocumenter
- doc_as_attr: bool
objpath: list[str]
modname: str
@@ -628,13 +627,6 @@ def _import_class(
get_attr=get_attr,
)
- # if the class is documented under another name, document it
- # as data/attribute
- if hasattr(im.obj, '__name__'):
- im.doc_as_attr = obj_path[-1] != im.obj.__name__
- else:
- im.doc_as_attr = True
-
if isinstance(im.obj, NewType | TypeVar):
obj_module_name = getattr(im.obj, '__module__', module_name)
if obj_module_name != module_name and module_name.startswith(obj_module_name):
diff --git a/tests/test_extensions/test_ext_autodoc.py b/tests/test_extensions/test_ext_autodoc.py
index 64f1d85f247..233fea3dc4d 100644
--- a/tests/test_extensions/test_ext_autodoc.py
+++ b/tests/test_extensions/test_ext_autodoc.py
@@ -22,6 +22,7 @@
inherited_members_option,
)
from sphinx.ext.autodoc._documenters import ModuleLevelDocumenter
+from sphinx.ext.autodoc._property_types import _ClassDefProperties
from sphinx.ext.autodoc._sentinels import ALL
# NEVER import these objects from sphinx.ext.autodoc directly
@@ -164,6 +165,16 @@ def formatsig(objtype, name, obj, args, retann):
inst.objpath = [name]
inst.args = args
inst.retann = retann
+ inst.props = _ClassDefProperties(
+ obj_type=inst.objtype,
+ name=name,
+ module_name=inst.modname,
+ parts=(name,),
+ docstring_lines=(),
+ bases=getattr(obj, '__bases__', None),
+ _obj=obj,
+ _obj___name__=name,
+ )
res = inst.format_signature()
print(res)
return res
From 4bed65e18f14c98ce96d576bfd695e17cf7f18e4 Mon Sep 17 00:00:00 2001
From: Adam Turner <9087854+AA-Turner@users.noreply.github.com>
Date: Sat, 2 Aug 2025 02:01:21 +0200
Subject: [PATCH 221/466] Combine name resolution and importing (#13797)
---
sphinx/ext/autodoc/_documenters.py | 443 +++------------
sphinx/ext/autodoc/importer.py | 641 +++++++++++++++-------
tests/test_extensions/test_ext_autodoc.py | 117 ++--
3 files changed, 566 insertions(+), 635 deletions(-)
diff --git a/sphinx/ext/autodoc/_documenters.py b/sphinx/ext/autodoc/_documenters.py
index 7a22ab65de5..5b9a8fc849a 100644
--- a/sphinx/ext/autodoc/_documenters.py
+++ b/sphinx/ext/autodoc/_documenters.py
@@ -5,7 +5,6 @@
import re
import sys
from inspect import Parameter, Signature
-from pathlib import Path
from typing import TYPE_CHECKING, NewType, TypeVar
from docutils.statemachine import StringList
@@ -21,13 +20,6 @@
member_order_option,
members_option,
)
-from sphinx.ext.autodoc._property_types import (
- _AssignStatementProperties,
- _ClassDefProperties,
- _FunctionDefProperties,
- _ItemProperties,
- _ModuleProperties,
-)
from sphinx.ext.autodoc._sentinels import (
ALL,
INSTANCE_ATTR,
@@ -38,16 +30,12 @@
)
from sphinx.ext.autodoc.importer import (
_get_attribute_comment,
- _import_assignment_attribute,
- _import_assignment_data,
- _import_class,
- _import_method,
- _import_object,
- _import_property,
_is_runtime_instance_attribute_not_commented,
+ _load_object_by_name,
+ _resolve_name,
get_class_members,
)
-from sphinx.ext.autodoc.mock import ismock, mock, undecorate
+from sphinx.ext.autodoc.mock import ismock, undecorate
from sphinx.locale import _, __
from sphinx.pycode import ModuleAnalyzer
from sphinx.util import inspect, logging
@@ -70,7 +58,13 @@
from sphinx.environment import BuildEnvironment, _CurrentDocument
from sphinx.events import EventManager
from sphinx.ext.autodoc._directive_options import _AutoDocumenterOptions
- from sphinx.ext.autodoc._property_types import _AutodocFuncProperty
+ from sphinx.ext.autodoc._property_types import (
+ _AssignStatementProperties,
+ _ClassDefProperties,
+ _FunctionDefProperties,
+ _ItemProperties,
+ _ModuleProperties,
+ )
from sphinx.ext.autodoc.directive import DocumenterBridge
from sphinx.registry import SphinxComponentRegistry
from sphinx.util.typing import OptionSpec, _RestifyMode
@@ -230,6 +224,8 @@ def __init__(
# the module analyzer to get at attribute docs, or None
self.analyzer: ModuleAnalyzer | None = None
+ self._load_object_has_been_called = False
+
@property
def documenters(self) -> dict[str, type[Documenter]]:
"""Returns registered Documenter classes"""
@@ -242,6 +238,44 @@ def add_line(self, line: str, source: str, *lineno: int) -> None:
else:
self.directive.result.append('', source, *lineno)
+ def _load_object_by_name(self) -> Literal[True] | None:
+ if self._load_object_has_been_called:
+ return True
+
+ ret = _load_object_by_name(
+ name=self.name,
+ objtype=self.objtype,
+ mock_imports=self.config.autodoc_mock_imports,
+ type_aliases=self.config.autodoc_type_aliases,
+ current_document=self._current_document,
+ env=self.env,
+ get_attr=self.get_attr,
+ )
+ if ret is None:
+ return None
+ props, args, retann, module, parent = ret
+
+ self.props = props
+ self.args = args
+ self.retann = retann
+ self.modname = props.module_name
+ self.objpath = list(props.parts)
+ self.fullname = props.full_name
+ self.module = module
+ self.parent = parent
+ self.object_name = props.name
+ self.object = props._obj
+ if self.objtype == 'method':
+ if 'staticmethod' in props.properties: # type: ignore[attr-defined]
+ # document static members before regular methods
+ self.member_order -= 1 # type: ignore[misc]
+ elif 'classmethod' in props.properties: # type: ignore[attr-defined]
+ # document class methods before static methods as
+ # they usually behave as alternative constructors
+ self.member_order -= 2 # type: ignore[misc]
+ self._load_object_has_been_called = True
+ return True
+
def resolve_name(
self, modname: str | None, parents: Any, path: str, base: str
) -> tuple[str | None, list[str]]:
@@ -252,71 +286,19 @@ def resolve_name(
example, it would return ``('zipfile', ['ZipFile', 'open'])`` for the
``zipfile.ZipFile.open`` method.
"""
- if isinstance(self, ModuleDocumenter):
- if modname is not None:
- logger.warning(
- __('"::" in automodule name doesn\'t make sense'), type='autodoc'
- )
- return (path or '') + base, []
-
- if isinstance(
- self,
- (
- ModuleLevelDocumenter,
- FunctionDocumenter,
- ClassDocumenter,
- DataDocumenter,
- ),
- ):
- if modname is not None:
- return modname, [*parents, base]
- if path:
- modname = path.rstrip('.')
- return modname, [*parents, base]
-
- # if documenting a toplevel object without explicit module,
- # it can be contained in another auto directive ...
- modname = self._current_document.autodoc_module
- # ... or in the scope of a module directive
- if not modname:
- modname = self.env.ref_context.get('py:module')
- # ... else, it stays None, which means invalid
- return modname, [*parents, base]
-
- if isinstance(
- self,
- (
- ClassLevelDocumenter,
- MethodDocumenter,
- AttributeDocumenter,
- PropertyDocumenter,
- ),
- ):
- if modname is not None:
- return modname, [*parents, base]
-
- if path:
- mod_cls = path.rstrip('.')
- else:
- # if documenting a class-level object without path,
- # there must be a current class, either from a parent
- # auto directive ...
- mod_cls = self._current_document.autodoc_class
- # ... or from a class directive
- if not mod_cls:
- mod_cls = self.env.ref_context.get('py:class', '')
- # ... if still falsy, there's no way to know
- if not mod_cls:
- return None, []
- modname, _sep, cls = mod_cls.rpartition('.')
- parents = [cls]
- # if the module name is still missing, get it like above
- if not modname:
- modname = self._current_document.autodoc_module
- if not modname:
- modname = self.env.ref_context.get('py:module')
- # ... else, it stays None, which means invalid
- return modname, [*parents, base]
+ ret = _resolve_name(
+ objtype=self.objtype,
+ module_name=modname,
+ path=path,
+ base=base,
+ parents=parents,
+ current_document=self._current_document,
+ ref_context_py_module=self.env.ref_context.get('py:module'),
+ ref_context_py_class=self.env.ref_context.get('py:class', ''),
+ )
+ if ret is not None:
+ module_name, parts = ret
+ return module_name, list(parts)
msg = 'must be implemented in subclasses'
raise NotImplementedError(msg)
@@ -327,39 +309,7 @@ def parse_name(self) -> bool:
Returns True and sets *self.modname*, *self.objpath*, *self.fullname*,
*self.args* and *self.retann* if parsing and resolving was successful.
"""
- # first, parse the definition -- auto directives for classes and
- # functions can contain a signature which is then used instead of
- # an autogenerated one
- matched = py_ext_sig_re.match(self.name)
- if matched is None:
- logger.warning(
- __('invalid signature for auto%s (%r)'),
- self.objtype,
- self.name,
- type='autodoc',
- )
- return False
- explicit_modname, path, base, _tp_list, args, retann = matched.groups()
-
- # support explicit module and class name separation via ::
- if explicit_modname is not None:
- modname = explicit_modname[:-2]
- parents = path.rstrip('.').split('.') if path else []
- else:
- modname = None
- parents = []
-
- with mock(self.config.autodoc_mock_imports):
- modname, self.objpath = self.resolve_name(modname, parents, path, base)
-
- if not modname:
- return False
-
- self.modname = modname
- self.args = args
- self.retann = retann
- self.fullname = '.'.join((self.modname or '', *self.objpath))
- return True
+ return self._load_object_by_name() is not None
def import_object(self, raiseerror: bool = False) -> bool:
"""Import the object given by *self.modname* and *self.objpath* and set
@@ -367,66 +317,7 @@ def import_object(self, raiseerror: bool = False) -> bool:
Returns True if successful, False if an error occurred.
"""
- try:
- im = _import_object(
- module_name=self.modname,
- obj_path=self.objpath,
- mock_imports=self.config.autodoc_mock_imports,
- get_attr=self.get_attr,
- )
- except ImportError as exc:
- if raiseerror:
- raise
- logger.warning(exc.args[0], type='autodoc', subtype='import_object')
- self.env.note_reread()
- return False
-
- self.object = obj = im.__dict__.pop('obj', None)
- for k in 'module', 'parent', 'object_name':
- if hasattr(im, k):
- setattr(self, k, getattr(im, k))
-
- file_path = getattr(im.module, '__file__', None)
- try:
- mod_all = inspect.getall(im.module)
- except ValueError:
- mod_all = None
- if self.objtype == 'module':
- self.props = _ModuleProperties(
- obj_type=self.objtype,
- name=im.object_name,
- module_name=getattr(im, 'modname', self.modname),
- docstring_lines=(),
- file_path=Path(file_path) if file_path is not None else None,
- all=tuple(mod_all) if mod_all is not None else None,
- _obj=obj,
- )
- elif self.objtype in {'function', 'decorator'}:
- obj_properties: set[_AutodocFuncProperty] = set()
- if inspect.isstaticmethod(obj, cls=im.parent, name=im.object_name):
- obj_properties.add('staticmethod')
- if inspect.isclassmethod(obj):
- obj_properties.add('classmethod')
- self.props = _FunctionDefProperties(
- obj_type=self.objtype,
- name=im.object_name,
- module_name=self.modname,
- parts=tuple(self.objpath),
- docstring_lines=(),
- properties=frozenset(obj_properties),
- _obj=obj,
- )
- else:
- self.props = _ItemProperties(
- obj_type=self.objtype,
- name=im.object_name,
- module_name=self.modname,
- parts=tuple(self.objpath),
- docstring_lines=(),
- _obj=obj,
- )
-
- return True
+ return self._load_object_by_name() is not None
def get_real_modname(self) -> str:
"""Get the real module name of an object to document.
@@ -957,7 +848,7 @@ def document_members(self, all_members: bool = False) -> None:
member_documenters = [
(documenter, isattr)
for documenter, isattr in member_documenters
- if documenter.parse_name() and documenter.import_object()
+ if documenter._load_object_by_name() is not None
]
member_documenters = self.sort_members(member_documenters, member_order)
@@ -1017,21 +908,7 @@ def generate(
True, only generate if the object is defined in the module name it is
imported from. If *all_members* is True, document all members.
"""
- if not self.parse_name():
- # need a module to import
- logger.warning(
- __(
- "don't know which module to import for autodocumenting "
- '%r (try placing a "module" or "currentmodule" directive '
- 'in the document, or giving an explicit module name)'
- ),
- self.name,
- type='autodoc',
- )
- return
-
- # now, import the module and get object to document
- if not self.import_object():
+ if self._load_object_by_name() is None:
return
self._generate(more_content, real_modname, check_module, all_members)
@@ -1174,16 +1051,6 @@ def can_document_member(
# don't document submodules automatically
return False
- def parse_name(self) -> bool:
- ret = super().parse_name()
- if self.args or self.retann:
- logger.warning(
- __('signature arguments or return annotation given for automodule %s'),
- self.fullname,
- type='autodoc',
- )
- return ret
-
def _module_all(self) -> Sequence[str] | None:
if self.object is not None and self.__all__ is None:
try:
@@ -1543,39 +1410,6 @@ def can_document_member(
isattr and isinstance(member, NewType | TypeVar)
)
- def import_object(self, raiseerror: bool = False) -> bool:
- try:
- im = _import_class(
- module_name=self.modname,
- obj_path=self.objpath,
- mock_imports=self.config.autodoc_mock_imports,
- get_attr=self.get_attr,
- )
- except ImportError as exc:
- if raiseerror:
- raise
- logger.warning(exc.args[0], type='autodoc', subtype='import_object')
- self.env.note_reread()
- return False
-
- self.object = obj = im.__dict__.pop('obj', None)
- for k in 'module', 'parent', 'object_name', 'objpath', 'modname':
- if hasattr(im, k):
- setattr(self, k, getattr(im, k))
-
- self.props = _ClassDefProperties(
- obj_type=self.objtype, # type: ignore[arg-type]
- name=im.object_name,
- module_name=getattr(im, 'modname', self.modname),
- parts=tuple(getattr(im, 'objpath', self.objpath)),
- docstring_lines=(),
- bases=getattr(obj, '__bases__', None),
- _obj=obj,
- _obj___name__=getattr(obj, '__name__', None),
- )
-
- return True
-
def _get_signature(self) -> tuple[Any | None, str | None, Signature | None]:
if isinstance(self.object, NewType | TypeVar):
# Suppress signature
@@ -2064,41 +1898,6 @@ def update_annotations(self, parent: Any) -> None:
except PycodeError:
pass
- def import_object(self, raiseerror: bool = False) -> bool:
- try:
- im = _import_assignment_data(
- module_name=self.modname,
- obj_path=self.objpath,
- mock_imports=self.config.autodoc_mock_imports,
- type_aliases=self.config.autodoc_type_aliases,
- get_attr=self.get_attr,
- )
- except ImportError as exc:
- if raiseerror:
- raise
- logger.warning(exc.args[0], type='autodoc', subtype='import_object')
- self.env.note_reread()
- return False
-
- self.object = obj = im.__dict__.pop('obj', None)
- for k in 'module', 'parent', 'object_name':
- if hasattr(im, k):
- setattr(self, k, getattr(im, k))
-
- self.props = _AssignStatementProperties(
- obj_type=self.objtype, # type: ignore[arg-type]
- name=im.object_name,
- module_name=self.modname,
- parts=tuple(self.objpath),
- docstring_lines=(),
- value=...,
- annotation='',
- class_var=False,
- instance_var=False,
- _obj=obj,
- )
- return True
-
def should_suppress_value_header(self) -> bool:
if self.object is UNINITIALIZED_ATTR:
return True
@@ -2212,43 +2011,6 @@ def can_document_member(
) -> bool:
return inspect.isroutine(member) and not isinstance(parent, ModuleDocumenter)
- def import_object(self, raiseerror: bool = False) -> bool:
- try:
- im = _import_method(
- module_name=self.modname,
- obj_path=self.objpath,
- member_order=self.member_order,
- mock_imports=self.config.autodoc_mock_imports,
- get_attr=self.get_attr,
- )
- except ImportError as exc:
- if raiseerror:
- raise
- logger.warning(exc.args[0], type='autodoc', subtype='import_object')
- self.env.note_reread()
- return False
-
- self.object = obj = im.__dict__.pop('obj', None)
- for k in 'module', 'parent', 'object_name', 'member_order':
- if hasattr(im, k):
- setattr(self, k, getattr(im, k))
-
- obj_properties: set[_AutodocFuncProperty] = set()
- if inspect.isstaticmethod(obj, cls=im.parent, name=im.object_name):
- obj_properties.add('staticmethod')
- if inspect.isclassmethod(obj):
- obj_properties.add('classmethod')
- self.props = _FunctionDefProperties(
- obj_type=self.objtype, # type: ignore[arg-type]
- name=im.object_name,
- module_name=self.modname,
- parts=tuple(self.objpath),
- docstring_lines=(),
- properties=frozenset(obj_properties),
- _obj=obj,
- )
- return True
-
def format_args(self, **kwargs: Any) -> str:
if self.config.autodoc_typehints in {'none', 'description'}:
kwargs.setdefault('show_annotation', False)
@@ -2540,41 +2302,6 @@ def update_annotations(self, parent: Any) -> None:
# Failed to set __annotations__ (built-in, extensions, etc.)
pass
- def import_object(self, raiseerror: bool = False) -> bool:
- try:
- im = _import_assignment_attribute(
- module_name=self.modname,
- obj_path=self.objpath,
- mock_imports=self.config.autodoc_mock_imports,
- type_aliases=self.config.autodoc_type_aliases,
- get_attr=self.get_attr,
- )
- except ImportError as exc:
- if raiseerror:
- raise
- logger.warning(exc.args[0], type='autodoc', subtype='import_object')
- self.env.note_reread()
- return False
-
- self.object = obj = im.__dict__.pop('obj', None)
- for k in 'module', 'parent', 'object_name':
- if hasattr(im, k):
- setattr(self, k, getattr(im, k))
-
- self.props = _AssignStatementProperties(
- obj_type=self.objtype, # type: ignore[arg-type]
- name=im.object_name,
- module_name=self.modname,
- parts=tuple(self.objpath),
- docstring_lines=(),
- value=...,
- annotation='',
- class_var=False,
- instance_var=False,
- _obj=obj,
- )
- return True
-
@property
def _is_non_data_descriptor(self) -> bool:
return not inspect.isattributedescriptor(self.object)
@@ -2744,42 +2471,6 @@ def can_document_member(
else:
return False
- def import_object(self, raiseerror: bool = False) -> bool:
- try:
- im = _import_property(
- module_name=self.modname,
- obj_path=self.objpath,
- mock_imports=self.config.autodoc_mock_imports,
- get_attr=self.get_attr,
- )
- except ImportError as exc:
- if raiseerror:
- raise
- logger.warning(exc.args[0], type='autodoc', subtype='import_object')
- self.env.note_reread()
- return False
- if im is None:
- return False
-
- self.object = obj = im.__dict__.pop('obj', None)
- for k in 'module', 'parent', 'object_name', 'isclassmethod':
- if hasattr(im, k):
- setattr(self, k, getattr(im, k))
-
- obj_properties: set[_AutodocFuncProperty] = set()
- if getattr(im, 'isclassmethod', False):
- obj_properties.add('classmethod')
- self.props = _FunctionDefProperties(
- obj_type=self.objtype, # type: ignore[arg-type]
- name=im.object_name,
- module_name=self.modname,
- parts=tuple(self.objpath),
- docstring_lines=(),
- properties=frozenset(obj_properties),
- _obj=obj,
- )
- return True
-
def format_args(self, **kwargs: Any) -> str:
func = self._get_property_getter()
if func is None:
diff --git a/sphinx/ext/autodoc/importer.py b/sphinx/ext/autodoc/importer.py
index 03c791b6f28..0386a1c9f5b 100644
--- a/sphinx/ext/autodoc/importer.py
+++ b/sphinx/ext/autodoc/importer.py
@@ -16,6 +16,13 @@
from typing import TYPE_CHECKING, NamedTuple, NewType, TypeVar
from sphinx.errors import PycodeError
+from sphinx.ext.autodoc._property_types import (
+ _AssignStatementProperties,
+ _ClassDefProperties,
+ _FunctionDefProperties,
+ _ItemProperties,
+ _ModuleProperties,
+)
from sphinx.ext.autodoc._sentinels import (
INSTANCE_ATTR,
RUNTIME_INSTANCE_ATTRIBUTE,
@@ -23,6 +30,7 @@
UNINITIALIZED_ATTR,
)
from sphinx.ext.autodoc.mock import ismock, mock, undecorate
+from sphinx.locale import __
from sphinx.pycode import ModuleAnalyzer
from sphinx.util import inspect, logging
from sphinx.util.inspect import (
@@ -42,7 +50,9 @@
from types import ModuleType
from typing import Any, Protocol
+ from sphinx.environment import BuildEnvironment, _CurrentDocument
from sphinx.ext.autodoc import ObjectMember
+ from sphinx.ext.autodoc._property_types import _AutodocFuncProperty, _AutodocObjType
class _AttrGetter(Protocol):
def __call__(self, obj: Any, name: str, default: Any = ..., /) -> Any: ...
@@ -65,16 +75,6 @@ class _ImportedObject:
#: object to document
obj: Any
- # ClassDocumenter
- objpath: list[str]
- modname: str
-
- # MethodDocumenter
- member_order: int
-
- # PropertyDocumenter
- isclassmethod: bool
-
def __init__(
self,
*,
@@ -613,218 +613,70 @@ def _import_object(
raise
-def _import_class(
- *,
- module_name: str,
- obj_path: Sequence[str],
- mock_imports: list[str],
- get_attr: _AttrGetter = safe_getattr,
-) -> _ImportedObject:
- im = _import_object(
- module_name=module_name,
- obj_path=obj_path,
- mock_imports=mock_imports,
- get_attr=get_attr,
- )
-
- if isinstance(im.obj, NewType | TypeVar):
- obj_module_name = getattr(im.obj, '__module__', module_name)
- if obj_module_name != module_name and module_name.startswith(obj_module_name):
- bases = module_name[len(obj_module_name) :].strip('.').split('.')
- im.objpath = bases + list(obj_path)
- im.modname = obj_module_name
- return im
-
-
-def _import_method(
- *,
- module_name: str,
- obj_path: Sequence[str],
- member_order: int,
- mock_imports: list[str],
- get_attr: _AttrGetter = safe_getattr,
-) -> _ImportedObject:
- im = _import_object(
- module_name=module_name,
- obj_path=obj_path,
- mock_imports=mock_imports,
- get_attr=get_attr,
- )
-
- # to distinguish classmethod/staticmethod
- obj = im.parent.__dict__.get(im.object_name, im.obj)
- if inspect.isstaticmethod(obj, cls=im.parent, name=im.object_name):
- # document static members before regular methods
- im.member_order = member_order - 1
- elif inspect.isclassmethod(obj):
- # document class methods before static methods as
- # they usually behave as alternative constructors
- im.member_order = member_order - 2
- return im
-
-
-def _import_property(
- *,
- module_name: str,
- obj_path: Sequence[str],
- mock_imports: list[str],
- get_attr: _AttrGetter = safe_getattr,
-) -> _ImportedObject | None:
- im = _import_object(
- module_name=module_name,
- obj_path=obj_path,
- mock_imports=mock_imports,
- get_attr=get_attr,
- )
-
- if not inspect.isproperty(im.obj):
- # Support for class properties. Note: these only work on Python 3.9.
- __dict__ = safe_getattr(im.parent, '__dict__', {})
- obj = __dict__.get(obj_path[-1])
- if isinstance(obj, classmethod) and inspect.isproperty(obj.__func__):
- im.obj = obj.__func__
- im.isclassmethod = True
- return im
- else:
- return None
-
- return im
-
-
-def _import_assignment_data(
+def _import_data_declaration(
*,
module_name: str,
obj_path: Sequence[str],
mock_imports: list[str],
type_aliases: dict[str, Any] | None,
- get_attr: _AttrGetter = safe_getattr,
-) -> _ImportedObject:
- import_failed = True
+) -> _ImportedObject | None:
+ # annotation only instance variable (PEP-526)
try:
with mock(mock_imports):
- im = _import_from_module_and_path(
- module_name=module_name, obj_path=obj_path, get_attr=get_attr
+ parent = import_module(module_name)
+ annotations = get_type_hints(parent, None, type_aliases, include_extras=True)
+ if obj_path[-1] in annotations:
+ im = _ImportedObject(
+ parent=parent,
+ obj=UNINITIALIZED_ATTR,
)
- if ismock(im.obj):
- im.obj = undecorate(im.obj)
- import_failed = False
- except ImportError as exc:
- # annotation only instance variable (PEP-526)
- try:
- with mock(mock_imports):
- parent = import_module(module_name)
- annotations = get_type_hints(
- parent, None, type_aliases, include_extras=True
- )
- if obj_path[-1] in annotations:
- im = _ImportedObject(
- parent=parent,
- obj=UNINITIALIZED_ATTR,
- )
- import_failed = False
- except ImportError:
- pass
-
- if import_failed:
- raise
-
- # Update __annotations__ to support type_comment and so on
- annotations = dict(inspect.getannotations(im.parent))
- im.parent.__annotations__ = annotations
-
- try:
- analyzer = ModuleAnalyzer.for_module(module_name)
- analyzer.analyze()
- for (classname, attrname), annotation in analyzer.annotations.items():
- if not classname and attrname not in annotations:
- annotations[attrname] = annotation
- except PycodeError:
+ return im
+ except ImportError:
pass
- return im
+ return None
-def _import_assignment_attribute(
+def _import_attribute_declaration(
*,
module_name: str,
obj_path: Sequence[str],
mock_imports: list[str],
type_aliases: dict[str, Any] | None,
get_attr: _AttrGetter = safe_getattr,
-) -> _ImportedObject:
- import_failed = True
+) -> _ImportedObject | None:
+ # Support runtime & uninitialized instance attributes.
+ #
+ # The former are defined in __init__() methods with doc-comments.
+ # The latter are PEP-526 style annotation only annotations.
+ #
+ # class Foo:
+ # attr: int #: uninitialized attribute
+ #
+ # def __init__(self):
+ # self.attr = None #: runtime attribute
try:
with mock(mock_imports):
- im = _import_from_module_and_path(
- module_name=module_name, obj_path=obj_path, get_attr=get_attr
+ ret = _import_from_module_and_path(
+ module_name=module_name, obj_path=obj_path[:-1], get_attr=get_attr
)
- if ismock(im.obj):
- im.obj = undecorate(im.obj)
- import_failed = False
- except ImportError as exc:
- # Support runtime & uninitialized instance attributes.
- #
- # The former are defined in __init__() methods with doc-comments.
- # The latter are PEP-526 style annotation only annotations.
- #
- # class Foo:
- # attr: int #: uninitialized attribute
- #
- # def __init__(self):
- # self.attr = None #: runtime attribute
- try:
- with mock(mock_imports):
- ret = _import_from_module_and_path(
- module_name=module_name, obj_path=obj_path[:-1], get_attr=get_attr
- )
- parent = ret.obj
- if _is_runtime_instance_attribute(parent=parent, obj_path=obj_path):
- im = _ImportedObject(
- parent=parent,
- obj=RUNTIME_INSTANCE_ATTRIBUTE,
- )
- import_failed = False
- elif _is_uninitialized_instance_attribute(
- parent=parent, obj_path=obj_path, type_aliases=type_aliases
- ):
- im = _ImportedObject(
- parent=parent,
- obj=UNINITIALIZED_ATTR,
- )
- import_failed = False
- except ImportError:
- pass
-
- if import_failed:
- raise
-
- if _is_slots_attribute(parent=im.parent, obj_path=obj_path):
- im.obj = SLOTS_ATTR
- elif inspect.isenumattribute(im.obj):
- im.obj = im.obj.value
- if im.parent:
- # Update __annotations__ to support type_comment and so on.
- try:
- annotations = dict(inspect.getannotations(im.parent))
- im.parent.__annotations__ = annotations
-
- for cls in inspect.getmro(im.parent):
- try:
- module = safe_getattr(cls, '__module__')
- qualname = safe_getattr(cls, '__qualname__')
-
- analyzer = ModuleAnalyzer.for_module(module)
- analyzer.analyze()
- anns = analyzer.annotations
- for (classname, attrname), annotation in anns.items():
- if classname == qualname and attrname not in annotations:
- annotations[attrname] = annotation
- except (AttributeError, PycodeError):
- pass
- except (AttributeError, TypeError):
- # Failed to set __annotations__ (built-in, extensions, etc.)
- pass
-
- return im
+ parent = ret.obj
+ if _is_runtime_instance_attribute(parent=parent, obj_path=obj_path):
+ im = _ImportedObject(
+ parent=parent,
+ obj=RUNTIME_INSTANCE_ATTRIBUTE,
+ )
+ return im
+ elif _is_uninitialized_instance_attribute(
+ parent=parent, obj_path=obj_path, type_aliases=type_aliases
+ ):
+ im = _ImportedObject(
+ parent=parent,
+ obj=UNINITIALIZED_ATTR,
+ )
+ return im
+ except ImportError:
+ pass
+ return None
def _is_runtime_instance_attribute(*, parent: Any, obj_path: Sequence[str]) -> bool:
@@ -895,3 +747,388 @@ def _is_slots_attribute(*, parent: Any, obj_path: Sequence[str]) -> bool:
return False
except (ValueError, TypeError):
return False
+
+
+def _load_object_by_name(
+ *,
+ name: str,
+ objtype: _AutodocObjType,
+ mock_imports: list[str],
+ type_aliases: dict[str, Any] | None,
+ current_document: _CurrentDocument,
+ env: BuildEnvironment,
+ get_attr: _AttrGetter,
+) -> tuple[_ItemProperties, str | None, str | None, ModuleType | None, Any] | None:
+ """Import and load the object given by *name*."""
+ parsed = _parse_name(
+ name=name,
+ objtype=objtype,
+ current_document=current_document,
+ env=env,
+ )
+ if parsed is None:
+ return None
+ module_name, parts, args, retann = parsed
+
+ # Import the module and get the object to document
+ try:
+ im = _import_object(
+ module_name=module_name,
+ obj_path=parts,
+ mock_imports=mock_imports,
+ get_attr=get_attr,
+ )
+ except ImportError as exc:
+ if objtype == 'data':
+ im_ = _import_data_declaration(
+ module_name=module_name,
+ obj_path=parts,
+ mock_imports=mock_imports,
+ type_aliases=type_aliases,
+ )
+ elif objtype == 'attribute':
+ im_ = _import_attribute_declaration(
+ module_name=module_name,
+ obj_path=parts,
+ mock_imports=mock_imports,
+ type_aliases=type_aliases,
+ get_attr=get_attr,
+ )
+ else:
+ im_ = None
+ if im_ is None:
+ logger.warning(exc.args[0], type='autodoc', subtype='import_object')
+ env.note_reread()
+ return None
+ else:
+ im = im_
+
+ # Assemble object properties from the imported object.
+ props: _ItemProperties
+ module = im.module
+ parent = im.parent
+ object_name = im.object_name
+ obj = im.obj
+ obj_properties: set[_AutodocFuncProperty] = set()
+ if objtype == 'module':
+ file_path = getattr(module, '__file__', None)
+ try:
+ mod_all = inspect.getall(module)
+ except ValueError:
+ mod_all = None
+
+ props = _ModuleProperties(
+ obj_type=objtype,
+ name=object_name,
+ module_name=module_name,
+ docstring_lines=(),
+ file_path=Path(file_path) if file_path is not None else None,
+ all=tuple(mod_all) if mod_all is not None else None,
+ _obj=obj,
+ )
+ elif objtype in {'class', 'exception'}:
+ if isinstance(obj, NewType | TypeVar):
+ obj_module_name = getattr(obj, '__module__', module_name)
+ if obj_module_name != module_name and module_name.startswith(
+ obj_module_name
+ ):
+ bases = module_name[len(obj_module_name) :].strip('.').split('.')
+ parts = tuple(bases) + parts
+ module_name = obj_module_name
+
+ props = _ClassDefProperties(
+ obj_type=objtype, # type: ignore[arg-type]
+ name=object_name,
+ module_name=module_name,
+ parts=parts,
+ docstring_lines=(),
+ bases=getattr(obj, '__bases__', None),
+ _obj=obj,
+ _obj___name__=getattr(obj, '__name__', None),
+ )
+ elif objtype in {'function', 'decorator'}:
+ if inspect.isstaticmethod(obj, cls=parent, name=object_name):
+ obj_properties.add('staticmethod')
+ if inspect.isclassmethod(obj):
+ obj_properties.add('classmethod')
+
+ props = _FunctionDefProperties(
+ obj_type=objtype, # type: ignore[arg-type]
+ name=object_name,
+ module_name=module_name,
+ parts=parts,
+ docstring_lines=(),
+ properties=frozenset(obj_properties),
+ _obj=obj,
+ )
+ elif objtype == 'method':
+ # to distinguish classmethod/staticmethod
+ obj_ = parent.__dict__.get(object_name, obj)
+ if inspect.isstaticmethod(obj_, cls=parent, name=object_name):
+ obj_properties.add('staticmethod')
+ elif inspect.isclassmethod(obj_):
+ obj_properties.add('classmethod')
+
+ props = _FunctionDefProperties(
+ obj_type=objtype,
+ name=object_name,
+ module_name=module_name,
+ parts=parts,
+ docstring_lines=(),
+ properties=frozenset(obj_properties),
+ _obj=obj,
+ )
+ elif objtype == 'property':
+ if not inspect.isproperty(obj):
+ # Support for class properties. Note: these only work on Python 3.9.
+ __dict__ = safe_getattr(parent, '__dict__', {})
+ obj = __dict__.get(parts[-1])
+ if isinstance(obj, classmethod) and inspect.isproperty(obj.__func__):
+ obj = obj.__func__
+ obj_properties.add('classmethod')
+ else:
+ return None
+
+ props = _FunctionDefProperties(
+ obj_type=objtype,
+ name=object_name,
+ module_name=module_name,
+ parts=parts,
+ docstring_lines=(),
+ properties=frozenset(obj_properties),
+ _obj=obj,
+ )
+ elif objtype == 'data':
+ # Update __annotations__ to support type_comment and so on
+ annotations = dict(inspect.getannotations(parent))
+ parent.__annotations__ = annotations
+
+ try:
+ analyzer = ModuleAnalyzer.for_module(module_name)
+ analyzer.analyze()
+ for (
+ classname,
+ attrname,
+ ), annotation in analyzer.annotations.items():
+ if not classname and attrname not in annotations:
+ annotations[attrname] = annotation
+ except PycodeError:
+ pass
+
+ props = _AssignStatementProperties(
+ obj_type=objtype,
+ name=object_name,
+ module_name=module_name,
+ parts=parts,
+ docstring_lines=(),
+ value=...,
+ annotation='',
+ class_var=False,
+ instance_var=False,
+ _obj=obj,
+ )
+ elif objtype == 'attribute':
+ if _is_slots_attribute(parent=parent, obj_path=parts):
+ obj = SLOTS_ATTR
+ elif inspect.isenumattribute(obj):
+ obj = obj.value
+ if parent:
+ # Update __annotations__ to support type_comment and so on.
+ try:
+ annotations = dict(inspect.getannotations(parent))
+ parent.__annotations__ = annotations
+
+ for cls in inspect.getmro(parent):
+ try:
+ module = safe_getattr(cls, '__module__')
+ qualname = safe_getattr(cls, '__qualname__')
+
+ analyzer = ModuleAnalyzer.for_module(module)
+ analyzer.analyze()
+ anns = analyzer.annotations
+ for (classname, attrname), annotation in anns.items():
+ if classname == qualname and attrname not in annotations:
+ annotations[attrname] = annotation
+ except (AttributeError, PycodeError):
+ pass
+ except (AttributeError, TypeError):
+ # Failed to set __annotations__ (built-in, extensions, etc.)
+ pass
+
+ props = _AssignStatementProperties(
+ obj_type=objtype,
+ name=object_name,
+ module_name=module_name,
+ parts=parts,
+ docstring_lines=(),
+ value=...,
+ annotation='',
+ class_var=False,
+ instance_var=False,
+ _obj=obj,
+ )
+ else:
+ props = _ItemProperties(
+ obj_type=objtype,
+ name=object_name,
+ module_name=module_name,
+ parts=parts,
+ docstring_lines=(),
+ _obj=obj,
+ )
+
+ return props, args, retann, module, parent
+
+
+def _parse_name(
+ *,
+ name: str,
+ objtype: _AutodocObjType,
+ current_document: _CurrentDocument,
+ env: BuildEnvironment,
+) -> tuple[str, tuple[str, ...], str | None, str | None] | None:
+ """Parse *name* into module name, path, arguments, and return annotation."""
+ from sphinx.ext.autodoc._documenters import py_ext_sig_re
+
+ # Parse the definition in *name*.
+ # autodoc directives for classes and functions can contain a signature,
+ # which overrides the autogenerated one.
+ matched = py_ext_sig_re.match(name)
+ if matched is None:
+ logger.warning(
+ __('invalid signature for auto%s (%r)'),
+ objtype,
+ name,
+ type='autodoc',
+ )
+ # need a module to import
+ logger.warning(
+ __(
+ "don't know which module to import for autodocumenting "
+ '%r (try placing a "module" or "currentmodule" directive '
+ 'in the document, or giving an explicit module name)'
+ ),
+ name,
+ type='autodoc',
+ )
+ return None
+
+ explicit_modname, path, base, _tp_list, args, retann = matched.groups()
+
+ # Support explicit module and class name separation via ``::``
+ if explicit_modname is not None:
+ module_name = explicit_modname.removesuffix('::')
+ parents = path.rstrip('.').split('.') if path else ()
+ else:
+ module_name = None
+ parents = ()
+
+ resolved = _resolve_name(
+ objtype=objtype,
+ module_name=module_name,
+ path=path,
+ base=base,
+ parents=parents,
+ current_document=current_document,
+ ref_context_py_module=env.ref_context.get('py:module'),
+ ref_context_py_class=env.ref_context.get('py:class', ''),
+ )
+ if resolved is None:
+ msg = 'must be implemented in subclasses'
+ raise NotImplementedError(msg)
+ module_name, parts = resolved
+
+ if objtype == 'module' and args:
+ msg = __("signature arguments given for automodule: '%s'")
+ logger.warning(msg, name, type='autodoc')
+ return None
+ if objtype == 'module' and retann:
+ msg = __("return annotation given for automodule: '%s'")
+ logger.warning(msg, name, type='autodoc')
+ return None
+
+ if not module_name:
+ # Could not resolve a module to import
+ logger.warning(
+ __(
+ "don't know which module to import for autodocumenting "
+ '%r (try placing a "module" or "currentmodule" directive '
+ 'in the document, or giving an explicit module name)'
+ ),
+ name,
+ type='autodoc',
+ )
+ return None
+
+ return module_name, parts, args, retann
+
+
+def _resolve_name(
+ *,
+ objtype: str,
+ module_name: str | None,
+ path: str | None,
+ base: str,
+ parents: Sequence[str],
+ current_document: _CurrentDocument,
+ ref_context_py_module: str | None,
+ ref_context_py_class: str,
+) -> tuple[str | None, tuple[str, ...]] | None:
+ """Resolve the module and name of the object to document given by the
+ arguments and the current module/class.
+
+ Must return a pair of the module name and a chain of attributes; for
+ example, it would return ``('zipfile', ('ZipFile', 'open'))`` for the
+ ``zipfile.ZipFile.open`` method.
+ """
+ if objtype == 'module':
+ if module_name is not None:
+ logger.warning(
+ __('"::" in automodule name doesn\'t make sense'), type='autodoc'
+ )
+ return (path or '') + base, ()
+
+ if objtype in {'class', 'exception', 'function', 'decorator', 'data'}:
+ if module_name is not None:
+ return module_name, (*parents, base)
+ if path:
+ module_name = path.rstrip('.')
+ return module_name, (*parents, base)
+
+ # if documenting a toplevel object without explicit module,
+ # it can be contained in another auto directive ...
+ module_name = current_document.autodoc_module
+ # ... or in the scope of a module directive
+ if not module_name:
+ module_name = ref_context_py_module
+ # ... else, it stays None, which means invalid
+ return module_name, (*parents, base)
+
+ if objtype in {'method', 'property', 'attribute'}:
+ if module_name is not None:
+ return module_name, (*parents, base)
+
+ if path:
+ mod_cls = path.rstrip('.')
+ else:
+ # if documenting a class-level object without path,
+ # there must be a current class, either from a parent
+ # auto directive ...
+ mod_cls = current_document.autodoc_class
+ # ... or from a class directive
+ if not mod_cls:
+ mod_cls = ref_context_py_class
+ # ... if still falsy, there's no way to know
+ if not mod_cls:
+ return None, ()
+ module_name, _sep, cls = mod_cls.rpartition('.')
+ parents = [cls]
+ # if the module name is still missing, get it like above
+ if not module_name:
+ module_name = current_document.autodoc_module
+ if not module_name:
+ module_name = ref_context_py_module
+ # ... else, it stays None, which means invalid
+ return module_name, (*parents, base)
+
+ return None
diff --git a/tests/test_extensions/test_ext_autodoc.py b/tests/test_extensions/test_ext_autodoc.py
index 233fea3dc4d..79cd8affc07 100644
--- a/tests/test_extensions/test_ext_autodoc.py
+++ b/tests/test_extensions/test_ext_autodoc.py
@@ -27,6 +27,7 @@
# NEVER import these objects from sphinx.ext.autodoc directly
from sphinx.ext.autodoc.directive import DocumenterBridge
+from sphinx.ext.autodoc.importer import _parse_name
from tests.test_extensions.autodoc_util import do_autodoc
@@ -80,58 +81,54 @@ def make_directive_bridge(env: BuildEnvironment) -> DocumenterBridge:
@pytest.mark.sphinx('html', testroot='root')
def test_parse_name(app):
- def verify(objtype, name, result):
- inst = app.registry.documenters[objtype](directive, name)
- assert inst.parse_name()
- assert (inst.modname, inst.objpath, inst.args, inst.retann) == result
+ env = app.env
+ current_document = env.current_document
- directive = make_directive_bridge(app.env)
+ def parse(objtype, name):
+ parsed = _parse_name(
+ name=name, objtype=objtype, current_document=current_document, env=env
+ )
+ if parsed is None:
+ return None
+ module_name, parts, args, retann = parsed
+ return module_name, list(parts), args, retann
# for modules
- verify('module', 'test_ext_autodoc', ('test_ext_autodoc', [], None, None))
- verify('module', 'test.test_ext_autodoc', ('test.test_ext_autodoc', [], None, None))
- verify('module', 'test(arg)', ('test', [], 'arg', None))
+ parsed = parse('module', 'test_ext_autodoc')
+ assert parsed == ('test_ext_autodoc', [], None, None)
+ parsed = parse('module', 'test.test_ext_autodoc')
+ assert parsed == ('test.test_ext_autodoc', [], None, None)
+ parsed = parse('module', 'test(arg)')
+ assert parsed is None
assert 'signature arguments' in app.warning.getvalue()
# for functions/classes
- verify(
- 'function',
- 'test_ext_autodoc.raises',
- ('test_ext_autodoc', ['raises'], None, None),
- )
- verify(
- 'function',
- 'test_ext_autodoc.raises(exc) -> None',
- ('test_ext_autodoc', ['raises'], 'exc', 'None'),
- )
- directive.env.current_document.autodoc_module = 'test_ext_autodoc'
- verify('function', 'raises', ('test_ext_autodoc', ['raises'], None, None))
- directive.env.current_document.autodoc_module = ''
-
- directive.env.ref_context['py:module'] = 'test_ext_autodoc'
- verify('function', 'raises', ('test_ext_autodoc', ['raises'], None, None))
- verify('class', 'Base', ('test_ext_autodoc', ['Base'], None, None))
+ parsed = parse('function', 'test_ext_autodoc.raises')
+ assert parsed == ('test_ext_autodoc', ['raises'], None, None)
+ parsed = parse('function', 'test_ext_autodoc.raises(exc) -> None')
+ assert parsed == ('test_ext_autodoc', ['raises'], 'exc', 'None')
+ current_document.autodoc_module = 'test_ext_autodoc'
+ parsed = parse('function', 'raises')
+ assert parsed == ('test_ext_autodoc', ['raises'], None, None)
+ current_document.autodoc_module = ''
+
+ env.ref_context['py:module'] = 'test_ext_autodoc'
+ parsed = parse('function', 'raises')
+ assert parsed == ('test_ext_autodoc', ['raises'], None, None)
+ parsed = parse('class', 'Base')
+ assert parsed == ('test_ext_autodoc', ['Base'], None, None)
# for members
- directive.env.ref_context['py:module'] = 'sphinx.testing.util'
- verify(
- 'method',
- 'SphinxTestApp.cleanup',
- ('sphinx.testing.util', ['SphinxTestApp', 'cleanup'], None, None),
- )
- directive.env.ref_context['py:module'] = 'sphinx.testing.util'
- directive.env.ref_context['py:class'] = 'Foo'
- directive.env.current_document.autodoc_class = 'SphinxTestApp'
- verify(
- 'method',
- 'cleanup',
- ('sphinx.testing.util', ['SphinxTestApp', 'cleanup'], None, None),
- )
- verify(
- 'method',
- 'SphinxTestApp.cleanup',
- ('sphinx.testing.util', ['SphinxTestApp', 'cleanup'], None, None),
- )
+ env.ref_context['py:module'] = 'sphinx.testing.util'
+ parsed = parse('method', 'SphinxTestApp.cleanup')
+ assert parsed == ('sphinx.testing.util', ['SphinxTestApp', 'cleanup'], None, None)
+ env.ref_context['py:module'] = 'sphinx.testing.util'
+ env.ref_context['py:class'] = 'Foo'
+ current_document.autodoc_class = 'SphinxTestApp'
+ parsed = parse('method', 'cleanup')
+ assert parsed == ('sphinx.testing.util', ['SphinxTestApp', 'cleanup'], None, None)
+ parsed = parse('method', 'SphinxTestApp.cleanup')
+ assert parsed == ('sphinx.testing.util', ['SphinxTestApp', 'cleanup'], None, None)
@pytest.mark.sphinx('html', testroot='root')
@@ -431,24 +428,30 @@ def foo(self):
assert getdocl('function', J().foo) == ['Method docstring']
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_new_documenter(app):
- class MyDocumenter(ModuleLevelDocumenter):
- objtype = 'integer'
- directivetype = 'integer'
- priority = 100
+class _MyDocumenter(ModuleLevelDocumenter):
+ objtype = 'integer'
+ directivetype = 'integer'
+ priority = 100
- @classmethod
- def can_document_member(cls, member, membername, isattr, parent):
- return isinstance(member, int)
+ @classmethod
+ def can_document_member(cls, member, membername, isattr, parent):
+ return isinstance(member, int)
- def document_members(self, all_members=False):
- return
+ def document_members(self, all_members=False):
+ return
- app.add_autodocumenter(MyDocumenter)
+
+@pytest.mark.sphinx('html', testroot='ext-autodoc')
+def test_new_documenter(app):
+ app.add_autodocumenter(_MyDocumenter)
options = {'members': 'integer'}
- actual = do_autodoc(app, 'module', 'target', options)
+ with pytest.raises(
+ NotImplementedError, match=r'^must be implemented in subclasses$'
+ ):
+ # TODO: Fix! Perhaps add a way to signal module/class-level?
+ actual = do_autodoc(app, 'module', 'target', options)
+ return
assert list(actual) == [
'',
'.. py:module:: target',
From a1b80c795ba2ca9f2f2d5d763f8d1700a46273f7 Mon Sep 17 00:00:00 2001
From: Adam Turner <9087854+AA-Turner@users.noreply.github.com>
Date: Sat, 2 Aug 2025 02:22:49 +0200
Subject: [PATCH 222/466] Add ``object_name`` to ``_ItemProperties`` (#13798)
---
sphinx/ext/autodoc/_documenters.py | 2 +-
sphinx/ext/autodoc/_property_types.py | 31 +++++++++++++++++------
sphinx/ext/autodoc/importer.py | 8 ------
tests/test_extensions/test_ext_autodoc.py | 3 +--
4 files changed, 25 insertions(+), 19 deletions(-)
diff --git a/sphinx/ext/autodoc/_documenters.py b/sphinx/ext/autodoc/_documenters.py
index 5b9a8fc849a..768dd79f4fd 100644
--- a/sphinx/ext/autodoc/_documenters.py
+++ b/sphinx/ext/autodoc/_documenters.py
@@ -263,7 +263,7 @@ def _load_object_by_name(self) -> Literal[True] | None:
self.fullname = props.full_name
self.module = module
self.parent = parent
- self.object_name = props.name
+ self.object_name = props.object_name
self.object = props._obj
if self.objtype == 'method':
if 'staticmethod' in props.properties: # type: ignore[attr-defined]
diff --git a/sphinx/ext/autodoc/_property_types.py b/sphinx/ext/autodoc/_property_types.py
index 57608f541bc..2f349732353 100644
--- a/sphinx/ext/autodoc/_property_types.py
+++ b/sphinx/ext/autodoc/_property_types.py
@@ -2,6 +2,8 @@
import dataclasses
+from sphinx.ext.autodoc._sentinels import RUNTIME_INSTANCE_ATTRIBUTE, UNINITIALIZED_ATTR
+
TYPE_CHECKING = False
if TYPE_CHECKING:
from collections.abc import Sequence
@@ -33,8 +35,6 @@
class _ItemProperties:
#: The kind of object being documented
obj_type: _AutodocObjType
- #: The name of the item
- name: str
#: The dotted module name
module_name: str
#: The fully-qualified name within the module
@@ -44,9 +44,16 @@ class _ItemProperties:
_obj: Any
- # @property
- # def name(self) -> str:
- # return self.module_name.rpartition('.')[2]
+ @property
+ def name(self) -> str:
+ """The name of the item"""
+ return self.parts[-1]
+
+ @property
+ def object_name(self) -> str:
+ if self._obj is RUNTIME_INSTANCE_ATTRIBUTE or self._obj is UNINITIALIZED_ATTR:
+ return ''
+ return self.name
@property
def full_name(self) -> str:
@@ -65,9 +72,17 @@ class _ModuleProperties(_ItemProperties):
file_path: Path | None
all: Sequence[str] | None
- # @property
- # def name(self) -> str:
- # return self.module_name.rpartition('.')[2]
+ @property
+ def name(self) -> str:
+ return self.module_name.rpartition('.')[2]
+
+ @property
+ def object_name(self) -> str:
+ return ''
+
+ @property
+ def full_name(self) -> str:
+ return self.module_name
@property
def parent_names(self) -> tuple[str, ...]:
diff --git a/sphinx/ext/autodoc/importer.py b/sphinx/ext/autodoc/importer.py
index 0386a1c9f5b..63f2b34ef12 100644
--- a/sphinx/ext/autodoc/importer.py
+++ b/sphinx/ext/autodoc/importer.py
@@ -819,7 +819,6 @@ def _load_object_by_name(
props = _ModuleProperties(
obj_type=objtype,
- name=object_name,
module_name=module_name,
docstring_lines=(),
file_path=Path(file_path) if file_path is not None else None,
@@ -838,7 +837,6 @@ def _load_object_by_name(
props = _ClassDefProperties(
obj_type=objtype, # type: ignore[arg-type]
- name=object_name,
module_name=module_name,
parts=parts,
docstring_lines=(),
@@ -854,7 +852,6 @@ def _load_object_by_name(
props = _FunctionDefProperties(
obj_type=objtype, # type: ignore[arg-type]
- name=object_name,
module_name=module_name,
parts=parts,
docstring_lines=(),
@@ -871,7 +868,6 @@ def _load_object_by_name(
props = _FunctionDefProperties(
obj_type=objtype,
- name=object_name,
module_name=module_name,
parts=parts,
docstring_lines=(),
@@ -891,7 +887,6 @@ def _load_object_by_name(
props = _FunctionDefProperties(
obj_type=objtype,
- name=object_name,
module_name=module_name,
parts=parts,
docstring_lines=(),
@@ -917,7 +912,6 @@ def _load_object_by_name(
props = _AssignStatementProperties(
obj_type=objtype,
- name=object_name,
module_name=module_name,
parts=parts,
docstring_lines=(),
@@ -957,7 +951,6 @@ def _load_object_by_name(
props = _AssignStatementProperties(
obj_type=objtype,
- name=object_name,
module_name=module_name,
parts=parts,
docstring_lines=(),
@@ -970,7 +963,6 @@ def _load_object_by_name(
else:
props = _ItemProperties(
obj_type=objtype,
- name=object_name,
module_name=module_name,
parts=parts,
docstring_lines=(),
diff --git a/tests/test_extensions/test_ext_autodoc.py b/tests/test_extensions/test_ext_autodoc.py
index 79cd8affc07..5a018d63d5a 100644
--- a/tests/test_extensions/test_ext_autodoc.py
+++ b/tests/test_extensions/test_ext_autodoc.py
@@ -163,8 +163,7 @@ def formatsig(objtype, name, obj, args, retann):
inst.args = args
inst.retann = retann
inst.props = _ClassDefProperties(
- obj_type=inst.objtype,
- name=name,
+ obj_type=objtype,
module_name=inst.modname,
parts=(name,),
docstring_lines=(),
From 9a17e80fe0c880ed320e6cb005d6923f9a0b60d1 Mon Sep 17 00:00:00 2001
From: Adam Dangoor
Date: Sat, 2 Aug 2025 02:23:41 +0100
Subject: [PATCH 223/466] Remove mypy overrides for
``tests/test_markup/test_parser.py`` (#13780)
---
pyproject.toml | 1 -
tests/test_markup/test_parser.py | 6 +++++-
2 files changed, 5 insertions(+), 2 deletions(-)
diff --git a/pyproject.toml b/pyproject.toml
index 37a89b74585..fe07196cebe 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -267,7 +267,6 @@ module = [
"tests.test_extensions.test_ext_napoleon",
# tests/test_markup
"tests.test_markup.test_markup",
- "tests.test_markup.test_parser",
# tests/test_theming
"tests.test_theming.test_templating",
"tests.test_theming.test_theming",
diff --git a/tests/test_markup/test_parser.py b/tests/test_markup/test_parser.py
index 9a79373e6b7..215fdf04dd6 100644
--- a/tests/test_markup/test_parser.py
+++ b/tests/test_markup/test_parser.py
@@ -2,6 +2,7 @@
from __future__ import annotations
+from typing import TYPE_CHECKING
from unittest.mock import Mock, patch
import pytest
@@ -9,10 +10,13 @@
from sphinx.parsers import RSTParser
from sphinx.util.docutils import new_document
+if TYPE_CHECKING:
+ from sphinx.testing.util import SphinxTestApp
+
@pytest.mark.sphinx('html', testroot='basic')
@patch('docutils.parsers.rst.states.RSTStateMachine')
-def test_RSTParser_prolog_epilog(RSTStateMachine, app):
+def test_RSTParser_prolog_epilog(RSTStateMachine: Mock, app: SphinxTestApp) -> None:
document = new_document('dummy.rst')
document.settings = Mock(tab_width=8, language_code='')
parser = RSTParser()
From d74e54415b6f8239444cb5bde53586cb1cd5633e Mon Sep 17 00:00:00 2001
From: Adam Dangoor
Date: Sat, 2 Aug 2025 02:24:03 +0100
Subject: [PATCH 224/466] Remove mypy overrides for ``tests/test_project.py``
(#13791)
---
pyproject.toml | 1 -
tests/test_project.py | 12 ++++++------
2 files changed, 6 insertions(+), 7 deletions(-)
diff --git a/pyproject.toml b/pyproject.toml
index fe07196cebe..ae33824604c 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -247,7 +247,6 @@ ignore_missing_imports = true
[[tool.mypy.overrides]]
module = [
# tests/
- "tests.test_project",
"tests.test_versioning",
# tests/test_builders
"tests.test_builders.test_build",
diff --git a/tests/test_project.py b/tests/test_project.py
index 5c4fb5cbafb..6d638265ff9 100644
--- a/tests/test_project.py
+++ b/tests/test_project.py
@@ -32,13 +32,13 @@
SUBDIR_DOCNAMES = {'subdir/excluded', 'subdir/images', 'subdir/includes'}
-def test_project_discover_basic(rootdir):
+def test_project_discover_basic(rootdir: Path) -> None:
# basic case
project = Project(rootdir / 'test-root', ['.txt'])
assert project.discover() == DOCNAMES
-def test_project_discover_exclude_patterns(rootdir):
+def test_project_discover_exclude_patterns(rootdir: Path) -> None:
project = Project(rootdir / 'test-root', ['.txt'])
# exclude_paths option
@@ -46,19 +46,19 @@ def test_project_discover_exclude_patterns(rootdir):
assert project.discover(['.txt', 'subdir/*']) == DOCNAMES - SUBDIR_DOCNAMES
-def test_project_discover_multiple_suffixes(rootdir):
+def test_project_discover_multiple_suffixes(rootdir: Path) -> None:
# multiple source_suffixes
project = Project(rootdir / 'test-root', ['.txt', '.foo'])
assert project.discover() == DOCNAMES | {'otherext'}
-def test_project_discover_complicated_suffix(rootdir):
+def test_project_discover_complicated_suffix(rootdir: Path) -> None:
# complicated source_suffix
project = Project(rootdir / 'test-root', ['.foo.png'])
assert project.discover() == {'img'}
-def test_project_discover_templates_path(rootdir):
+def test_project_discover_templates_path(rootdir: Path) -> None:
# templates_path
project = Project(rootdir / 'test-root', ['.html'])
assert project.discover() == {
@@ -70,7 +70,7 @@ def test_project_discover_templates_path(rootdir):
assert project.discover(['_templates']) == set()
-def test_project_path2doc(rootdir):
+def test_project_path2doc(rootdir: Path) -> None:
project = Project(rootdir / 'test-basic', {'.rst': 'restructuredtext'})
assert project.path2doc('index.rst') == 'index'
assert project.path2doc('index.foo') is None # unknown extension
From ab5d43c0ba7ac426d7fa6de9dd9bd56adfeef196 Mon Sep 17 00:00:00 2001
From: Adam Dangoor
Date: Sat, 2 Aug 2025 02:24:32 +0100
Subject: [PATCH 225/466] Remove mypy overrides for
``tests/test_environment/test_environment.py`` (#13792)
---
pyproject.toml | 2 --
tests/test_environment/test_environment.py | 7 ++++++-
2 files changed, 6 insertions(+), 3 deletions(-)
diff --git a/pyproject.toml b/pyproject.toml
index ae33824604c..7174e562255 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -256,8 +256,6 @@ module = [
# tests/test_directives
"tests.test_directives.test_directive_code",
"tests.test_directives.test_directives_no_typesetting",
- # tests/test_environment
- "tests.test_environment.test_environment",
# tests/test_extensions
"tests.test_extensions.test_ext_autodoc_autoclass",
"tests.test_extensions.test_ext_autosummary_imports",
diff --git a/tests/test_environment/test_environment.py b/tests/test_environment/test_environment.py
index 872d0b857ce..08e0abdd61b 100644
--- a/tests/test_environment/test_environment.py
+++ b/tests/test_environment/test_environment.py
@@ -21,11 +21,16 @@
)
if TYPE_CHECKING:
+ from collections.abc import Callable
+
+ from sphinx.testing.fixtures import _app_params
from sphinx.testing.util import SphinxTestApp
@pytest.mark.sphinx('dummy', testroot='basic', copy_test_root=True)
-def test_config_status(make_app, app_params):
+def test_config_status(
+ make_app: Callable[..., SphinxTestApp], app_params: _app_params
+) -> None:
args, kwargs = app_params
# clean build
From 7715d6f50e6d9559787a1b10553fbc5b95aa03cf Mon Sep 17 00:00:00 2001
From: Adam Turner <9087854+AA-Turner@users.noreply.github.com>
Date: Sat, 2 Aug 2025 03:25:02 +0200
Subject: [PATCH 226/466] Store object ``__module__`` for
``get_real_modname()`` (#13799)
---
sphinx/ext/autodoc/_documenters.py | 24 ++++++++---------------
sphinx/ext/autodoc/_property_types.py | 1 +
sphinx/ext/autodoc/importer.py | 8 ++++++++
tests/test_extensions/test_ext_autodoc.py | 1 +
4 files changed, 18 insertions(+), 16 deletions(-)
diff --git a/sphinx/ext/autodoc/_documenters.py b/sphinx/ext/autodoc/_documenters.py
index 768dd79f4fd..476bf3e35bc 100644
--- a/sphinx/ext/autodoc/_documenters.py
+++ b/sphinx/ext/autodoc/_documenters.py
@@ -325,7 +325,7 @@ def get_real_modname(self) -> str:
It can differ from the name of the module through which the object was
imported.
"""
- return self.get_attr(self.object, '__module__', None) or self.modname
+ return self.props._obj___module__ or self.props.module_name
def check_module(self) -> bool:
"""Check if *self.object* is really defined in the module given by
@@ -925,7 +925,7 @@ def _generate(
# where the attribute documentation would actually be found in.
# This is used for situations where you have a module that collects the
# functions and classes of internal submodules.
- guess_modname = self.get_real_modname()
+ guess_modname = self.props._obj___module__ or self.props.module_name
self.real_modname: str = real_modname or guess_modname
# try to also get a source code analyzer for attribute docs
@@ -1764,7 +1764,9 @@ def get_variable_comment(self) -> list[str] | None:
if self.props.doc_as_attr:
analyzer = ModuleAnalyzer.for_module(self.modname)
else:
- analyzer = ModuleAnalyzer.for_module(self.get_real_modname())
+ analyzer = ModuleAnalyzer.for_module(
+ self.props._obj___module__ or self.props.module_name
+ )
analyzer.analyze()
return list(analyzer.attr_docs.get(key, []))
except PycodeError:
@@ -1795,7 +1797,9 @@ def add_content(self, more_content: StringList | None) -> None:
more_content = StringList(
[_('alias of TypeVar(%s)') % ', '.join(attrs), ''], source=''
)
- if self.props.doc_as_attr and self.modname != self.get_real_modname():
+ if self.props.doc_as_attr and self.modname != (
+ self.props._obj___module__ or self.props.module_name
+ ):
try:
# override analyzer to obtain doccomment around its definition.
self.analyzer = ModuleAnalyzer.for_module(self.modname)
@@ -1953,10 +1957,6 @@ def add_directive_header(self, sig: str) -> None:
def document_members(self, all_members: bool = False) -> None:
pass
- def get_real_modname(self) -> str:
- real_modname = self.get_attr(self.parent or self.object, '__module__', None)
- return real_modname or self.modname
-
def get_module_comment(self, attrname: str) -> list[str] | None:
try:
analyzer = ModuleAnalyzer.for_module(self.modname)
@@ -2306,10 +2306,6 @@ def update_annotations(self, parent: Any) -> None:
def _is_non_data_descriptor(self) -> bool:
return not inspect.isattributedescriptor(self.object)
- def get_real_modname(self) -> str:
- real_modname = self.get_attr(self.parent or self.object, '__module__', None)
- return real_modname or self.modname
-
def should_suppress_value_header(self) -> bool:
if self.object is SLOTS_ATTR:
return True
@@ -2484,10 +2480,6 @@ def format_args(self, **kwargs: Any) -> str:
def document_members(self, all_members: bool = False) -> None:
pass
- def get_real_modname(self) -> str:
- real_modname = self.get_attr(self.parent or self.object, '__module__', None)
- return real_modname or self.modname
-
def add_directive_header(self, sig: str) -> None:
super().add_directive_header(sig)
sourcename = self.get_sourcename()
diff --git a/sphinx/ext/autodoc/_property_types.py b/sphinx/ext/autodoc/_property_types.py
index 2f349732353..47fabc1358c 100644
--- a/sphinx/ext/autodoc/_property_types.py
+++ b/sphinx/ext/autodoc/_property_types.py
@@ -43,6 +43,7 @@ class _ItemProperties:
docstring_lines: tuple[str, ...]
_obj: Any
+ _obj___module__: str | None
@property
def name(self) -> str:
diff --git a/sphinx/ext/autodoc/importer.py b/sphinx/ext/autodoc/importer.py
index 63f2b34ef12..697bba5bb24 100644
--- a/sphinx/ext/autodoc/importer.py
+++ b/sphinx/ext/autodoc/importer.py
@@ -824,6 +824,7 @@ def _load_object_by_name(
file_path=Path(file_path) if file_path is not None else None,
all=tuple(mod_all) if mod_all is not None else None,
_obj=obj,
+ _obj___module__=obj.__name__,
)
elif objtype in {'class', 'exception'}:
if isinstance(obj, NewType | TypeVar):
@@ -842,6 +843,7 @@ def _load_object_by_name(
docstring_lines=(),
bases=getattr(obj, '__bases__', None),
_obj=obj,
+ _obj___module__=get_attr(obj, '__module__', None),
_obj___name__=getattr(obj, '__name__', None),
)
elif objtype in {'function', 'decorator'}:
@@ -857,6 +859,7 @@ def _load_object_by_name(
docstring_lines=(),
properties=frozenset(obj_properties),
_obj=obj,
+ _obj___module__=get_attr(obj, '__module__', None),
)
elif objtype == 'method':
# to distinguish classmethod/staticmethod
@@ -873,6 +876,7 @@ def _load_object_by_name(
docstring_lines=(),
properties=frozenset(obj_properties),
_obj=obj,
+ _obj___module__=get_attr(obj, '__module__', None),
)
elif objtype == 'property':
if not inspect.isproperty(obj):
@@ -892,6 +896,7 @@ def _load_object_by_name(
docstring_lines=(),
properties=frozenset(obj_properties),
_obj=obj,
+ _obj___module__=get_attr(parent or obj, '__module__', None) or module_name,
)
elif objtype == 'data':
# Update __annotations__ to support type_comment and so on
@@ -920,6 +925,7 @@ def _load_object_by_name(
class_var=False,
instance_var=False,
_obj=obj,
+ _obj___module__=get_attr(parent or obj, '__module__', None) or module_name,
)
elif objtype == 'attribute':
if _is_slots_attribute(parent=parent, obj_path=parts):
@@ -959,6 +965,7 @@ def _load_object_by_name(
class_var=False,
instance_var=False,
_obj=obj,
+ _obj___module__=get_attr(obj, '__module__', None),
)
else:
props = _ItemProperties(
@@ -967,6 +974,7 @@ def _load_object_by_name(
parts=parts,
docstring_lines=(),
_obj=obj,
+ _obj___module__=get_attr(obj, '__module__', None),
)
return props, args, retann, module, parent
diff --git a/tests/test_extensions/test_ext_autodoc.py b/tests/test_extensions/test_ext_autodoc.py
index 5a018d63d5a..d23ec9926df 100644
--- a/tests/test_extensions/test_ext_autodoc.py
+++ b/tests/test_extensions/test_ext_autodoc.py
@@ -169,6 +169,7 @@ def formatsig(objtype, name, obj, args, retann):
docstring_lines=(),
bases=getattr(obj, '__bases__', None),
_obj=obj,
+ _obj___module__=getattr(obj, '__module__', None),
_obj___name__=name,
)
res = inst.format_signature()
From 3dddb1c53aa86b72febf1ab9db7c683beab3aa07 Mon Sep 17 00:00:00 2001
From: Adam Turner <9087854+AA-Turner@users.noreply.github.com>
Date: Sat, 2 Aug 2025 15:37:55 +0200
Subject: [PATCH 227/466] Remove project extras (optional dependencies)
(#13445)
---
doc/internals/contributing.rst | 2 +-
doc/usage/installation.rst | 2 +-
pyproject.toml | 29 -----------------------------
3 files changed, 2 insertions(+), 31 deletions(-)
diff --git a/doc/internals/contributing.rst b/doc/internals/contributing.rst
index 83b23fb388e..90d7600866d 100644
--- a/doc/internals/contributing.rst
+++ b/doc/internals/contributing.rst
@@ -205,7 +205,7 @@ You can also test by installing dependencies in your local environment:
.. code-block:: shell
- pip install .[test]
+ pip install . --group test
To run JavaScript tests, use :program:`npm`:
diff --git a/doc/usage/installation.rst b/doc/usage/installation.rst
index 8b0aca1cab3..27adf3ab676 100644
--- a/doc/usage/installation.rst
+++ b/doc/usage/installation.rst
@@ -56,7 +56,7 @@ Run the following command::
Or, if writing documentation for a Python package,
place the dependencies in the `pyproject.toml file`__::
- $ pip install .[docs]
+ $ pip install . --group docs
__ https://pip.pypa.io/en/stable/reference/requirements-file-format/
__ https://packaging.python.org/en/latest/guides/writing-pyproject-toml/#dependencies-optional-dependencies
diff --git a/pyproject.toml b/pyproject.toml
index 7174e562255..ef6b6804857 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -88,35 +88,6 @@ dependencies = [
]
dynamic = ["version"]
-[project.optional-dependencies]
-docs = [
- "sphinxcontrib-websupport",
-]
-lint = [
- "ruff==0.12.7",
- "mypy==1.17.1",
- "sphinx-lint>=0.9",
- "types-colorama==0.4.15.20250801",
- "types-defusedxml==0.7.0.20250708",
- "types-docutils==0.21.0.20250525",
- "types-Pillow==10.2.0.20240822",
- "types-Pygments==2.19.0.20250715",
- "types-requests==2.32.4.20250611", # align with requests
- "types-urllib3==1.26.25.14",
- "pyright==1.1.400",
- "pytest>=8.0",
- "pypi-attestations==0.0.27",
- "betterproto==2.0.0b6",
-]
-test = [
- "pytest>=8.0",
- "pytest-xdist[psutil]>=3.4",
- "defusedxml>=0.7.1", # for secure XML/HTML parsing
- "cython>=3.0",
- "setuptools>=70.0", # for Cython compilation
- "typing_extensions>=4.9", # for typing_extensions.Unpack
-]
-
[[project.authors]]
name = "Adam Turner"
email = "aa-turner@users.noreply.github.com"
From 3fc158804ccca39bbc159abc5e46729b088f26f0 Mon Sep 17 00:00:00 2001
From: Adam Turner <9087854+AA-Turner@users.noreply.github.com>
Date: Sun, 3 Aug 2025 18:11:20 +0200
Subject: [PATCH 228/466] Prefer using attributes on ``self.props`` in
Documenter classes (#13801)
---
sphinx/ext/autodoc/_documenters.py | 199 +++++++++++-----------
sphinx/ext/autodoc/_property_types.py | 4 +
tests/test_extensions/test_ext_autodoc.py | 31 +++-
3 files changed, 128 insertions(+), 106 deletions(-)
diff --git a/sphinx/ext/autodoc/_documenters.py b/sphinx/ext/autodoc/_documenters.py
index 476bf3e35bc..fed158ba44c 100644
--- a/sphinx/ext/autodoc/_documenters.py
+++ b/sphinx/ext/autodoc/_documenters.py
@@ -336,7 +336,7 @@ def check_module(self) -> bool:
subject = inspect.unpartial(self.object)
modname = self.get_attr(subject, '__module__', None)
- return not modname or modname == self.modname
+ return not modname or modname == self.props.module_name
def format_args(self, **kwargs: Any) -> str:
"""Format the argument signature of *self.object*.
@@ -354,7 +354,7 @@ def format_name(self) -> str:
"""
# normally the name doesn't contain the module (except for module
# directives of course)
- return '.'.join(self.objpath) or self.modname
+ return self.props.dotted_parts or self.props.module_name
def _call_format_args(self, **kwargs: Any) -> str:
if kwargs:
@@ -369,7 +369,7 @@ def _call_format_args(self, **kwargs: Any) -> str:
def _find_signature(self) -> tuple[str | None, str | None] | None:
# candidates of the object name
- valid_names = [self.objpath[-1]]
+ valid_names = [self.props.parts[-1]]
if isinstance(self, ClassDocumenter):
valid_names.append('__init__')
if hasattr(self.object, '__mro__'):
@@ -456,18 +456,14 @@ def format_signature(self, **kwargs: Any) -> str:
args = matched.group(1)
retann = matched.group(2)
except Exception as exc:
- logger.warning(
- __('error while formatting arguments for %s: %s'),
- self.fullname,
- exc,
- type='autodoc',
- )
+ msg = __('error while formatting arguments for %s: %s')
+ logger.warning(msg, self.props.full_name, exc, type='autodoc')
args = None
result = self._events.emit_firstresult(
'autodoc-process-signature',
self.objtype,
- self.fullname,
+ self.props.full_name,
self.object,
self.options,
args,
@@ -506,10 +502,10 @@ def add_directive_header(self, sig: str) -> None:
self.add_line(' :no-index:', sourcename)
if self.options.no_index_entry:
self.add_line(' :no-index-entry:', sourcename)
- if self.objpath:
+ if self.props.parts:
# Be explicit about the module, this is necessary since .. class::
# etc. don't support a prepended module name
- self.add_line(' :module: %s' % self.modname, sourcename)
+ self.add_line(' :module: %s' % self.props.module_name, sourcename)
def get_doc(self) -> list[list[str]] | None:
"""Decode and return lines of the docstring(s) for the object.
@@ -528,7 +524,7 @@ def get_doc(self) -> list[list[str]] | None:
self.get_attr,
self.config.autodoc_inherit_docstrings,
self.parent,
- self.object_name,
+ self.props.object_name,
)
if docstring:
tab_width = self.directive.state.document.settings.tab_width
@@ -543,7 +539,7 @@ def process_doc(self, docstrings: list[list[str]]) -> Iterator[str]:
self._events.emit(
'autodoc-process-docstring',
self.objtype,
- self.fullname,
+ self.props.full_name,
self.object,
self.options,
docstringlines,
@@ -563,7 +559,7 @@ def get_sourcename(self) -> str:
# to support inherited methods
fullname = f'{self.object.__module__}.{self.object.__qualname__}'
else:
- fullname = self.fullname
+ fullname = self.props.full_name
if self.analyzer:
return f'{self.analyzer.srcname}:docstring of {fullname}'
@@ -578,8 +574,8 @@ def add_content(self, more_content: StringList | None) -> None:
sourcename = self.get_sourcename()
if self.analyzer:
attr_docs = self.analyzer.find_attr_docs()
- if self.objpath:
- key = ('.'.join(self.objpath[:-1]), self.objpath[-1])
+ if self.props.parts:
+ key = ('.'.join(self.props.parts[:-1]), self.props.parts[-1])
if key in attr_docs:
docstring = False
# make a copy of docstring for attributes to avoid cache
@@ -665,7 +661,7 @@ def is_filtered_inherited_member(name: str, obj: Any) -> bool:
ret = []
# search for members in source code too
- namespace = '.'.join(self.objpath) # will be empty for modules
+ namespace = self.props.dotted_parts # will be empty for modules
if self.analyzer:
attr_docs = self.analyzer.find_attr_docs()
@@ -812,9 +808,9 @@ def document_members(self, all_members: bool = False) -> None:
*self.options.members*.
"""
# set current namespace for finding members
- self._current_document.autodoc_module = self.modname
- if self.objpath:
- self._current_document.autodoc_class = self.objpath[0]
+ self._current_document.autodoc_module = self.props.module_name
+ if self.props.parts:
+ self._current_document.autodoc_class = self.props.parts[0]
want_all = bool(
all_members or self.options.inherited_members or self.options.members is ALL
@@ -837,7 +833,8 @@ def document_members(self, all_members: bool = False) -> None:
classes.sort(key=lambda cls: cls.priority)
# give explicitly separated module name, so that members
# of inner classes can be documented
- full_mname = f'{self.modname}::' + '.'.join((*self.objpath, mname))
+ module_prefix = f'{self.props.module_name}::'
+ full_mname = module_prefix + '.'.join((*self.props.parts, mname))
documenter = classes[-1](self.directive, full_mname, self.indent)
member_documenters.append((documenter, isattr))
@@ -979,12 +976,8 @@ def _generate(
try:
sig = self.format_signature()
except Exception as exc:
- logger.warning(
- __('error while formatting signature for %s: %s'),
- self.fullname,
- exc,
- type='autodoc',
- )
+ msg = __('error while formatting signature for %s: %s')
+ logger.warning(msg, self.props.full_name, exc, type='autodoc')
return
# generate the directive header and options, if applicable
@@ -1058,15 +1051,11 @@ def _module_all(self) -> Sequence[str] | None:
self.__all__ = inspect.getall(self.object)
except ValueError as exc:
# invalid __all__ found.
- logger.warning(
- __(
- '__all__ should be a list of strings, not %r '
- '(in module %s) -- ignoring __all__'
- ),
- exc.args[0],
- self.fullname,
- type='autodoc',
+ msg = __(
+ '__all__ should be a list of strings, not %r '
+ '(in module %s) -- ignoring __all__'
)
+ logger.warning(msg, exc.args[0], self.props.full_name, type='autodoc')
return self.__all__
@@ -1212,9 +1201,8 @@ def format_args(self, **kwargs: Any) -> str:
)
args = stringify_signature(sig, **kwargs)
except TypeError as exc:
- logger.warning(
- __('Failed to get a function signature for %s: %s'), self.fullname, exc
- )
+ msg = __('Failed to get a function signature for %s: %s')
+ logger.warning(msg, self.props.full_name, exc)
return ''
except ValueError:
args = ''
@@ -1249,7 +1237,7 @@ def format_signature(self, **kwargs: Any) -> str:
sigs = []
if (
self.analyzer
- and '.'.join(self.objpath) in self.analyzer.overloads
+ and self.props.dotted_parts in self.analyzer.overloads
and self.config.autodoc_typehints != 'none'
):
# Use signatures for overloaded functions instead of the implementation function.
@@ -1260,6 +1248,8 @@ def format_signature(self, **kwargs: Any) -> str:
sigs.append(sig)
if inspect.is_singledispatch_function(self.object):
+ from sphinx.ext.autodoc._property_types import _FunctionDefProperties
+
# append signature of singledispatch'ed functions
for typ, func in self.object.registry.items():
if typ is object:
@@ -1268,15 +1258,23 @@ def format_signature(self, **kwargs: Any) -> str:
dispatchfunc = self.annotate_to_first_argument(func, typ)
if dispatchfunc:
documenter = FunctionDocumenter(self.directive, '')
+ documenter.props = _FunctionDefProperties(
+ obj_type='function',
+ module_name='',
+ parts=('',),
+ docstring_lines=(),
+ _obj=dispatchfunc,
+ _obj___module__=None,
+ properties=frozenset(),
+ )
documenter.object = dispatchfunc
- documenter.objpath = ['']
sigs.append(documenter.format_signature())
if overloaded and self.analyzer is not None:
actual = inspect.signature(
self.object, type_aliases=self.config.autodoc_type_aliases
)
__globals__ = safe_getattr(self.object, '__globals__', {})
- for overload in self.analyzer.overloads['.'.join(self.objpath)]:
+ for overload in self.analyzer.overloads[self.props.dotted_parts]:
overload = self.merge_default_value(actual, overload)
overload = evaluate_signature(
overload, __globals__, self.config.autodoc_type_aliases
@@ -1304,9 +1302,8 @@ def annotate_to_first_argument(
try:
sig = inspect.signature(func, type_aliases=self.config.autodoc_type_aliases)
except TypeError as exc:
- logger.warning(
- __('Failed to get a function signature for %s: %s'), self.fullname, exc
- )
+ msg = __('Failed to get a function signature for %s: %s')
+ logger.warning(msg, self.props.full_name, exc)
return None
except ValueError:
return None
@@ -1521,11 +1518,8 @@ def format_args(self, **kwargs: Any) -> str:
self._signature_class, _signature_method_name, sig = self._get_signature()
except TypeError as exc:
# __signature__ attribute contained junk
- logger.warning(
- __('Failed to get a constructor signature for %s: %s'),
- self.fullname,
- exc,
- )
+ msg = __('Failed to get a constructor signature for %s: %s')
+ logger.warning(msg, self.props.full_name, exc)
return ''
self._signature_method_name = _signature_method_name or ''
@@ -1604,7 +1598,7 @@ def get_overloaded_signatures(self) -> list[Signature]:
return []
def get_canonical_fullname(self) -> str | None:
- __modname__ = safe_getattr(self.object, '__module__', self.modname)
+ __modname__ = safe_getattr(self.object, '__module__', self.props.module_name)
__qualname__ = safe_getattr(self.object, '__qualname__', None)
if __qualname__ is None:
__qualname__ = safe_getattr(self.object, '__name__', None)
@@ -1627,7 +1621,7 @@ def add_directive_header(self, sig: str) -> None:
if isinstance(self.object, NewType | TypeVar):
return
- if self.analyzer and '.'.join(self.objpath) in self.analyzer.finals:
+ if self.analyzer and self.props.dotted_parts in self.analyzer.finals:
self.add_line(' :final:', sourcename)
canonical_fullname = self.get_canonical_fullname()
@@ -1635,7 +1629,7 @@ def add_directive_header(self, sig: str) -> None:
not self.props.doc_as_attr
and not isinstance(self.object, NewType)
and canonical_fullname
- and self.fullname != canonical_fullname
+ and self.props.full_name != canonical_fullname
):
self.add_line(' :canonical: %s' % canonical_fullname, sourcename)
@@ -1652,7 +1646,11 @@ def add_directive_header(self, sig: str) -> None:
bases = []
self._events.emit(
- 'autodoc-process-bases', self.fullname, self.object, self.options, bases
+ 'autodoc-process-bases',
+ self.props.full_name,
+ self.object,
+ self.options,
+ bases,
)
mode = _get_render_mode(self.config.autodoc_typehints_format)
@@ -1665,7 +1663,7 @@ def add_directive_header(self, sig: str) -> None:
def get_object_members(self, want_all: bool) -> tuple[bool, list[ObjectMember]]:
members = get_class_members(
self.object,
- self.objpath,
+ self.props.parts,
self.get_attr,
self.config.autodoc_inherit_docstrings,
)
@@ -1679,12 +1677,8 @@ def get_object_members(self, want_all: bool) -> tuple[bool, list[ObjectMember]]:
if name in members:
selected.append(members[name])
else:
- logger.warning(
- __('missing attribute %s in object %s'),
- name,
- self.fullname,
- type='autodoc',
- )
+ msg = __('missing attribute %s in object %s')
+ logger.warning(msg, name, self.props.full_name, type='autodoc')
return False, selected
elif self.options.inherited_members:
return False, list(members.values())
@@ -1760,9 +1754,9 @@ def get_doc(self) -> list[list[str]] | None:
def get_variable_comment(self) -> list[str] | None:
try:
- key = ('', '.'.join(self.objpath))
+ key = ('', self.props.dotted_parts)
if self.props.doc_as_attr:
- analyzer = ModuleAnalyzer.for_module(self.modname)
+ analyzer = ModuleAnalyzer.for_module(self.props.module_name)
else:
analyzer = ModuleAnalyzer.for_module(
self.props._obj___module__ or self.props.module_name
@@ -1797,12 +1791,12 @@ def add_content(self, more_content: StringList | None) -> None:
more_content = StringList(
[_('alias of TypeVar(%s)') % ', '.join(attrs), ''], source=''
)
- if self.props.doc_as_attr and self.modname != (
+ if self.props.doc_as_attr and self.props.module_name != (
self.props._obj___module__ or self.props.module_name
):
try:
# override analyzer to obtain doccomment around its definition.
- self.analyzer = ModuleAnalyzer.for_module(self.modname)
+ self.analyzer = ModuleAnalyzer.for_module(self.props.module_name)
self.analyzer.analyze()
except PycodeError:
pass
@@ -1894,7 +1888,7 @@ def update_annotations(self, parent: Any) -> None:
parent.__annotations__ = annotations
try:
- analyzer = ModuleAnalyzer.for_module(self.modname)
+ analyzer = ModuleAnalyzer.for_module(self.props.module_name)
analyzer.analyze()
for (classname, attrname), annotation in analyzer.annotations.items():
if not classname and attrname not in annotations:
@@ -1931,11 +1925,11 @@ def add_directive_header(self, sig: str) -> None:
self.config.autodoc_type_aliases,
include_extras=True,
)
- if self.objpath[-1] in annotations:
+ if self.props.name in annotations:
mode = _get_render_mode(self.config.autodoc_typehints_format)
short_literals = self.config.python_display_short_literal_types
objrepr = stringify_annotation(
- annotations.get(self.objpath[-1]),
+ annotations.get(self.props.name),
mode,
short_literals=short_literals,
)
@@ -1959,7 +1953,7 @@ def document_members(self, all_members: bool = False) -> None:
def get_module_comment(self, attrname: str) -> list[str] | None:
try:
- analyzer = ModuleAnalyzer.for_module(self.modname)
+ analyzer = ModuleAnalyzer.for_module(self.props.module_name)
analyzer.analyze()
key = ('', attrname)
if key in analyzer.attr_docs:
@@ -1971,7 +1965,7 @@ def get_module_comment(self, attrname: str) -> list[str] | None:
def get_doc(self) -> list[list[str]] | None:
# Check the variable has a docstring-comment
- comment = self.get_module_comment(self.objpath[-1])
+ comment = self.get_module_comment(self.props.name)
if comment:
return [comment]
else:
@@ -2028,7 +2022,7 @@ def format_args(self, **kwargs: Any) -> str:
args = '()'
else:
if inspect.isstaticmethod(
- self.object, cls=self.parent, name=self.object_name
+ self.object, cls=self.parent, name=self.props.object_name
):
self._events.emit(
'autodoc-before-process-signature', self.object, False
@@ -2049,9 +2043,8 @@ def format_args(self, **kwargs: Any) -> str:
)
args = stringify_signature(sig, **kwargs)
except TypeError as exc:
- logger.warning(
- __('Failed to get a method signature for %s: %s'), self.fullname, exc
- )
+ msg = __('Failed to get a method signature for %s: %s')
+ logger.warning(msg, self.props.full_name, exc)
return ''
except ValueError:
args = ''
@@ -2065,7 +2058,7 @@ def add_directive_header(self, sig: str) -> None:
super().add_directive_header(sig)
sourcename = self.get_sourcename()
- obj = self.parent.__dict__.get(self.object_name, self.object)
+ obj = self.parent.__dict__.get(self.props.object_name, self.object)
if inspect.isabstractmethod(obj):
self.add_line(' :abstractmethod:', sourcename)
if inspect.iscoroutinefunction(obj) or inspect.isasyncgenfunction(obj):
@@ -2076,9 +2069,9 @@ def add_directive_header(self, sig: str) -> None:
and inspect.is_classmethod_like(obj.func)
):
self.add_line(' :classmethod:', sourcename)
- if inspect.isstaticmethod(obj, cls=self.parent, name=self.object_name):
+ if inspect.isstaticmethod(obj, cls=self.parent, name=self.props.object_name):
self.add_line(' :staticmethod:', sourcename)
- if self.analyzer and '.'.join(self.objpath) in self.analyzer.finals:
+ if self.analyzer and self.props.dotted_parts in self.analyzer.finals:
self.add_line(' :final:', sourcename)
def document_members(self, all_members: bool = False) -> None:
@@ -2093,7 +2086,7 @@ def format_signature(self, **kwargs: Any) -> str:
sigs = []
if (
self.analyzer
- and '.'.join(self.objpath) in self.analyzer.overloads
+ and self.props.dotted_parts in self.analyzer.overloads
and self.config.autodoc_typehints != 'none'
):
# Use signatures for overloaded methods instead of the implementation method.
@@ -2103,8 +2096,10 @@ def format_signature(self, **kwargs: Any) -> str:
sig = super().format_signature(**kwargs)
sigs.append(sig)
- meth = self.parent.__dict__.get(self.objpath[-1])
+ meth = self.parent.__dict__.get(self.props.name)
if inspect.is_singledispatch_method(meth):
+ from sphinx.ext.autodoc._property_types import _FunctionDefProperties
+
# append signature of singledispatch'ed functions
for typ, func in meth.dispatcher.registry.items():
if typ is object:
@@ -2115,13 +2110,21 @@ def format_signature(self, **kwargs: Any) -> str:
dispatchmeth = self.annotate_to_first_argument(func, typ)
if dispatchmeth:
documenter = MethodDocumenter(self.directive, '')
+ documenter.props = _FunctionDefProperties(
+ obj_type='method',
+ module_name='',
+ parts=('',),
+ docstring_lines=(),
+ _obj=dispatchmeth,
+ _obj___module__=None,
+ properties=frozenset(),
+ )
documenter.parent = self.parent
documenter.object = dispatchmeth
- documenter.objpath = ['']
sigs.append(documenter.format_signature())
if overloaded and self.analyzer is not None:
if inspect.isstaticmethod(
- self.object, cls=self.parent, name=self.object_name
+ self.object, cls=self.parent, name=self.props.object_name
):
actual = inspect.signature(
self.object,
@@ -2136,14 +2139,14 @@ def format_signature(self, **kwargs: Any) -> str:
)
__globals__ = safe_getattr(self.object, '__globals__', {})
- for overload in self.analyzer.overloads['.'.join(self.objpath)]:
+ for overload in self.analyzer.overloads[self.props.dotted_parts]:
overload = self.merge_default_value(actual, overload)
overload = evaluate_signature(
overload, __globals__, self.config.autodoc_type_aliases
)
if not inspect.isstaticmethod(
- self.object, cls=self.parent, name=self.object_name
+ self.object, cls=self.parent, name=self.props.object_name
):
parameters = list(overload.parameters.values())
overload = overload.replace(parameters=parameters[1:])
@@ -2169,9 +2172,8 @@ def annotate_to_first_argument(
try:
sig = inspect.signature(func, type_aliases=self.config.autodoc_type_aliases)
except TypeError as exc:
- logger.warning(
- __('Failed to get a method signature for %s: %s'), self.fullname, exc
- )
+ msg = __('Failed to get a method signature for %s: %s')
+ logger.warning(msg, self.props.full_name, exc)
return None
except ValueError:
return None
@@ -2202,13 +2204,13 @@ def get_doc(self) -> list[list[str]] | None:
# ``__docstring_signature__ = True``. Just return the
# previously-computed result, so that we don't loose the processing.
return self._new_docstrings
- if self.objpath[-1] == '__init__':
+ if self.props.name == '__init__':
docstring = getdoc(
self.object,
self.get_attr,
self.config.autodoc_inherit_docstrings,
self.parent,
- self.object_name,
+ self.props.object_name,
)
if docstring is not None and (
docstring == object.__init__.__doc__ # for pypy
@@ -2220,13 +2222,13 @@ def get_doc(self) -> list[list[str]] | None:
return [prepare_docstring(docstring, tabsize=tab_width)]
else:
return []
- elif self.objpath[-1] == '__new__':
+ elif self.props.name == '__new__':
docstring = getdoc(
self.object,
self.get_attr,
self.config.autodoc_inherit_docstrings,
self.parent,
- self.object_name,
+ self.props.object_name,
)
if docstring is not None and (
docstring == object.__new__.__doc__ # for pypy
@@ -2342,11 +2344,11 @@ def add_directive_header(self, sig: str) -> None:
self.config.autodoc_type_aliases,
include_extras=True,
)
- if self.objpath[-1] in annotations:
+ if self.props.name in annotations:
mode = _get_render_mode(self.config.autodoc_typehints_format)
short_literals = self.config.python_display_short_literal_types
objrepr = stringify_annotation(
- annotations.get(self.objpath[-1]),
+ annotations.get(self.props.name),
mode,
short_literals=short_literals,
)
@@ -2367,13 +2369,13 @@ def add_directive_header(self, sig: str) -> None:
def get_attribute_comment(self, parent: Any, attrname: str) -> list[str] | None:
return _get_attribute_comment(
- parent=parent, obj_path=self.objpath, attrname=attrname
+ parent=parent, obj_path=self.props.parts, attrname=attrname
)
def get_doc(self) -> list[list[str]] | None:
# Check the attribute has a docstring-comment
comment = _get_attribute_comment(
- parent=self.parent, obj_path=self.objpath, attrname=self.objpath[-1]
+ parent=self.parent, obj_path=self.props.parts, attrname=self.props.parts[-1]
)
if comment:
return [comment]
@@ -2390,7 +2392,7 @@ def get_doc(self) -> list[list[str]] | None:
try:
parent___slots__ = inspect.getslots(self.parent)
if parent___slots__ and (
- docstring := parent___slots__.get(self.objpath[-1])
+ docstring := parent___slots__.get(self.props.name)
):
docstring = prepare_docstring(docstring)
return [docstring]
@@ -2407,7 +2409,7 @@ def get_doc(self) -> list[list[str]] | None:
if (
self.object is RUNTIME_INSTANCE_ATTRIBUTE
and _is_runtime_instance_attribute_not_commented(
- parent=self.parent, obj_path=self.objpath
+ parent=self.parent, obj_path=self.props.parts
)
):
return None
@@ -2505,9 +2507,8 @@ def add_directive_header(self, sig: str) -> None:
)
self.add_line(' :type: ' + objrepr, sourcename)
except TypeError as exc:
- logger.warning(
- __('Failed to get a function signature for %s: %s'), self.fullname, exc
- )
+ msg = __('Failed to get a function signature for %s: %s')
+ logger.warning(msg, self.props.full_name, exc)
pass
except ValueError:
pass
diff --git a/sphinx/ext/autodoc/_property_types.py b/sphinx/ext/autodoc/_property_types.py
index 47fabc1358c..09c98bf870d 100644
--- a/sphinx/ext/autodoc/_property_types.py
+++ b/sphinx/ext/autodoc/_property_types.py
@@ -64,6 +64,10 @@ def full_name(self) -> str:
def parent_names(self) -> tuple[str, ...]:
return self.parts[:-1]
+ @property
+ def dotted_parts(self) -> str:
+ return '.'.join(self.parts)
+
@dataclasses.dataclass(frozen=False, kw_only=True, slots=True)
class _ModuleProperties(_ItemProperties):
diff --git a/tests/test_extensions/test_ext_autodoc.py b/tests/test_extensions/test_ext_autodoc.py
index d23ec9926df..38bad211d74 100644
--- a/tests/test_extensions/test_ext_autodoc.py
+++ b/tests/test_extensions/test_ext_autodoc.py
@@ -22,7 +22,11 @@
inherited_members_option,
)
from sphinx.ext.autodoc._documenters import ModuleLevelDocumenter
-from sphinx.ext.autodoc._property_types import _ClassDefProperties
+from sphinx.ext.autodoc._property_types import (
+ _ClassDefProperties,
+ _FunctionDefProperties,
+ _ItemProperties,
+)
from sphinx.ext.autodoc._sentinels import ALL
# NEVER import these objects from sphinx.ext.autodoc directly
@@ -135,7 +139,7 @@ def parse(objtype, name):
def test_format_signature(app):
def process_signature(app, what, name, obj, options, args, retann):
processed_signatures.append((what, name))
- if name == 'bar':
+ if name == '.bar':
return '42', None
return None
@@ -155,7 +159,6 @@ def skip_member(app, what, name, obj, skip, options):
def formatsig(objtype, name, obj, args, retann):
inst = app.registry.documenters[objtype](directive, name)
- inst.fullname = name
inst.doc_as_attr = False # for class objtype
inst.parent = object # dummy
inst.object = obj
@@ -360,12 +363,19 @@ def func(x: int, y: int) -> int: # type: ignore[empty-body]
directive = make_directive_bridge(app.env)
inst = app.registry.documenters['function'](directive, 'func')
- inst.fullname = 'func'
+ inst.props = _FunctionDefProperties(
+ obj_type='function',
+ module_name='',
+ parts=('func',),
+ docstring_lines=(),
+ _obj=func,
+ _obj___module__=None,
+ properties=frozenset(),
+ )
inst.object = func
- inst.objpath = ['func']
inst.format_signature()
assert captured == [
- (app, 'function', 'func', func, directive.genopt, '(x: int, y: int)', 'int')
+ (app, 'function', '.func', func, directive.genopt, '(x: int, y: int)', 'int')
]
@@ -375,9 +385,16 @@ def test_get_doc(app):
def getdocl(objtype, obj):
inst = app.registry.documenters[objtype](directive, 'tmp')
+ inst.props = _ItemProperties(
+ obj_type=objtype,
+ module_name='',
+ parts=(obj.__name__,),
+ docstring_lines=(),
+ _obj=obj,
+ _obj___module__=getattr(obj, '__module__', None),
+ )
inst.parent = object # dummy
inst.object = obj
- inst.objpath = [obj.__name__]
inst.doc_as_attr = False
inst.format_signature() # handle docstring signatures!
ds = inst.get_doc()
From df5e28474a9a4d4b2673695e070643ef4b9a313e Mon Sep 17 00:00:00 2001
From: Adam Turner <9087854+AA-Turner@users.noreply.github.com>
Date: Sun, 3 Aug 2025 18:24:36 +0200
Subject: [PATCH 229/466] Warn on invalid module ``__all__`` during initial
load (#13802)
---
sphinx/ext/autodoc/_documenters.py | 26 ++++++++++----------------
sphinx/ext/autodoc/importer.py | 14 ++++++++++----
2 files changed, 20 insertions(+), 20 deletions(-)
diff --git a/sphinx/ext/autodoc/_documenters.py b/sphinx/ext/autodoc/_documenters.py
index fed158ba44c..c94ab60628e 100644
--- a/sphinx/ext/autodoc/_documenters.py
+++ b/sphinx/ext/autodoc/_documenters.py
@@ -1045,18 +1045,8 @@ def can_document_member(
return False
def _module_all(self) -> Sequence[str] | None:
- if self.object is not None and self.__all__ is None:
- try:
- if not self.options.ignore_module_all:
- self.__all__ = inspect.getall(self.object)
- except ValueError as exc:
- # invalid __all__ found.
- msg = __(
- '__all__ should be a list of strings, not %r '
- '(in module %s) -- ignoring __all__'
- )
- logger.warning(msg, exc.args[0], self.props.full_name, type='autodoc')
-
+ if self.__all__ is None and not self.options.ignore_module_all:
+ self.__all__ = self.props.all
return self.__all__
def add_directive_header(self, sig: str) -> None:
@@ -1107,8 +1097,8 @@ def get_module_members(self) -> dict[str, ObjectMember]:
def get_object_members(self, want_all: bool) -> tuple[bool, list[ObjectMember]]:
members = self.get_module_members()
if want_all:
- module_all = self._module_all()
- if module_all is None:
+ module_all = self.props.all
+ if self.options.ignore_module_all or module_all is None:
# for implicit module members, check __module__ to avoid
# documenting imported objects
return True, list(members.values())
@@ -1141,8 +1131,12 @@ def get_object_members(self, want_all: bool) -> tuple[bool, list[ObjectMember]]:
def sort_members(
self, documenters: list[tuple[Documenter, bool]], order: str
) -> list[tuple[Documenter, bool]]:
- module_all = self._module_all()
- if order == 'bysource' and module_all:
+ module_all = self.props.all
+ if (
+ order == 'bysource'
+ and not self.options.ignore_module_all
+ and module_all is not None
+ ):
assert module_all is not None
module_all_set = frozenset(module_all)
module_all_len = len(module_all)
diff --git a/sphinx/ext/autodoc/importer.py b/sphinx/ext/autodoc/importer.py
index 697bba5bb24..0d37f2d95bb 100644
--- a/sphinx/ext/autodoc/importer.py
+++ b/sphinx/ext/autodoc/importer.py
@@ -812,9 +812,15 @@ def _load_object_by_name(
obj_properties: set[_AutodocFuncProperty] = set()
if objtype == 'module':
file_path = getattr(module, '__file__', None)
- try:
- mod_all = inspect.getall(module)
- except ValueError:
+ mod_all = safe_getattr(obj, '__all__', None)
+ if isinstance(mod_all, (list, tuple)) and all(
+ isinstance(e, str) for e in mod_all
+ ):
+ mod_all = tuple(mod_all)
+ elif mod_all is not None:
+ # Invalid __all__ found.
+ msg = __('Ignoring invalid __all__ in module %s: %r')
+ logger.warning(msg, module_name, mod_all, type='autodoc')
mod_all = None
props = _ModuleProperties(
@@ -822,7 +828,7 @@ def _load_object_by_name(
module_name=module_name,
docstring_lines=(),
file_path=Path(file_path) if file_path is not None else None,
- all=tuple(mod_all) if mod_all is not None else None,
+ all=mod_all,
_obj=obj,
_obj___module__=obj.__name__,
)
From 667925d2f3f81e34b4570be7b24b4bb95e8604a2 Mon Sep 17 00:00:00 2001
From: Adam Turner <9087854+AA-Turner@users.noreply.github.com>
Date: Sun, 3 Aug 2025 18:26:18 +0200
Subject: [PATCH 230/466] Prefer using attributes on ``documenter.props``
(followup) (#13803)
---
sphinx/ext/autodoc/_documenters.py | 2 +-
tests/test_extensions/test_ext_autodoc.py | 3 +--
2 files changed, 2 insertions(+), 3 deletions(-)
diff --git a/sphinx/ext/autodoc/_documenters.py b/sphinx/ext/autodoc/_documenters.py
index c94ab60628e..02a0e6098f5 100644
--- a/sphinx/ext/autodoc/_documenters.py
+++ b/sphinx/ext/autodoc/_documenters.py
@@ -850,7 +850,7 @@ def document_members(self, all_members: bool = False) -> None:
member_documenters = self.sort_members(member_documenters, member_order)
for documenter, isattr in member_documenters:
- assert documenter.modname
+ assert documenter.props.module_name
# We can directly call ._generate() since the documenters
# already called parse_name() and import_object() before.
#
diff --git a/tests/test_extensions/test_ext_autodoc.py b/tests/test_extensions/test_ext_autodoc.py
index 38bad211d74..9cfa49cb203 100644
--- a/tests/test_extensions/test_ext_autodoc.py
+++ b/tests/test_extensions/test_ext_autodoc.py
@@ -162,12 +162,11 @@ def formatsig(objtype, name, obj, args, retann):
inst.doc_as_attr = False # for class objtype
inst.parent = object # dummy
inst.object = obj
- inst.objpath = [name]
inst.args = args
inst.retann = retann
inst.props = _ClassDefProperties(
obj_type=objtype,
- module_name=inst.modname,
+ module_name='',
parts=(name,),
docstring_lines=(),
bases=getattr(obj, '__bases__', None),
From 67047523bf3ac8b18ced45034fce801bedce4799 Mon Sep 17 00:00:00 2001
From: Adam Turner <9087854+AA-Turner@users.noreply.github.com>
Date: Sun, 3 Aug 2025 19:18:56 +0200
Subject: [PATCH 231/466] Prefer ``props._obj`` to ``documenter.object``
(#13804)
---
.../tutorials/examples/autodoc_intenum.py | 2 +-
sphinx/ext/autodoc/_documenters.py | 223 +++++++++---------
tests/test_extensions/test_ext_autodoc.py | 3 -
3 files changed, 118 insertions(+), 110 deletions(-)
diff --git a/doc/development/tutorials/examples/autodoc_intenum.py b/doc/development/tutorials/examples/autodoc_intenum.py
index 2dd8d6324e6..1a641905e2e 100644
--- a/doc/development/tutorials/examples/autodoc_intenum.py
+++ b/doc/development/tutorials/examples/autodoc_intenum.py
@@ -41,7 +41,7 @@ def add_content(
super().add_content(more_content)
source_name = self.get_sourcename()
- enum_object: IntEnum = self.object
+ enum_object: IntEnum = self.props._obj
use_hex = self.options.hex
self.add_line('', source_name)
diff --git a/sphinx/ext/autodoc/_documenters.py b/sphinx/ext/autodoc/_documenters.py
index 02a0e6098f5..16e25b64424 100644
--- a/sphinx/ext/autodoc/_documenters.py
+++ b/sphinx/ext/autodoc/_documenters.py
@@ -334,7 +334,7 @@ def check_module(self) -> bool:
if self.options.imported_members:
return True
- subject = inspect.unpartial(self.object)
+ subject = inspect.unpartial(self.props._obj)
modname = self.get_attr(subject, '__module__', None)
return not modname or modname == self.props.module_name
@@ -372,8 +372,8 @@ def _find_signature(self) -> tuple[str | None, str | None] | None:
valid_names = [self.props.parts[-1]]
if isinstance(self, ClassDocumenter):
valid_names.append('__init__')
- if hasattr(self.object, '__mro__'):
- valid_names.extend(cls.__name__ for cls in self.object.__mro__)
+ if hasattr(self.props._obj, '__mro__'):
+ valid_names.extend(cls.__name__ for cls in self.props._obj.__mro__)
docstrings = self.get_doc()
if docstrings is None:
@@ -464,7 +464,7 @@ def format_signature(self, **kwargs: Any) -> str:
'autodoc-process-signature',
self.objtype,
self.props.full_name,
- self.object,
+ self.props._obj,
self.options,
args,
retann,
@@ -513,14 +513,14 @@ def get_doc(self) -> list[list[str]] | None:
When it returns None, autodoc-process-docstring will not be called for this
object.
"""
- if self.object is UNINITIALIZED_ATTR:
+ if self.props._obj is UNINITIALIZED_ATTR:
return []
if self.__docstring_signature__ and self._new_docstrings is not None:
return self._new_docstrings
docstring = getdoc(
- self.object,
+ self.props._obj,
self.get_attr,
self.config.autodoc_inherit_docstrings,
self.parent,
@@ -540,7 +540,7 @@ def process_doc(self, docstrings: list[list[str]]) -> Iterator[str]:
'autodoc-process-docstring',
self.objtype,
self.props.full_name,
- self.object,
+ self.props._obj,
self.options,
docstringlines,
)
@@ -552,12 +552,12 @@ def process_doc(self, docstrings: list[list[str]]) -> Iterator[str]:
yield from docstringlines
def get_sourcename(self) -> str:
- obj_module = inspect.safe_getattr(self.object, '__module__', None)
- obj_qualname = inspect.safe_getattr(self.object, '__qualname__', None)
+ obj_module = inspect.safe_getattr(self.props._obj, '__module__', None)
+ obj_qualname = inspect.safe_getattr(self.props._obj, '__qualname__', None)
if obj_module and obj_qualname:
# Get the correct location of docstring from self.object
# to support inherited methods
- fullname = f'{self.object.__module__}.{self.object.__qualname__}'
+ fullname = f'{self.props._obj.__module__}.{self.props._obj.__qualname__}'
else:
fullname = self.props.full_name
@@ -636,13 +636,13 @@ def is_filtered_inherited_member(name: str, obj: Any) -> bool:
inherited_members = self.options.inherited_members or set()
seen = set()
- if inspect.isclass(self.object):
- for cls in self.object.__mro__:
+ if inspect.isclass(self.props._obj):
+ for cls in self.props._obj.__mro__:
if name in cls.__dict__:
seen.add(cls)
if (
cls.__name__ in inherited_members
- and cls != self.object
+ and cls != self.props._obj
and any(
issubclass(potential_child, cls) for potential_child in seen
)
@@ -681,7 +681,7 @@ def is_filtered_inherited_member(name: str, obj: Any) -> bool:
member,
self.get_attr,
self.config.autodoc_inherit_docstrings,
- self.object,
+ self.props._obj,
membername,
)
if not isinstance(doc, str):
@@ -952,7 +952,7 @@ def _generate(
docstrings: list[str] = functools.reduce(
operator.iadd, self.get_doc() or [], []
)
- if ismock(self.object) and not docstrings:
+ if ismock(self.props._obj) and not docstrings:
logger.warning(
__('A mocked object is detected: %r'),
self.name,
@@ -1072,9 +1072,9 @@ def get_module_members(self) -> dict[str, ObjectMember]:
attr_docs = {}
members: dict[str, ObjectMember] = {}
- for name in dir(self.object):
+ for name in dir(self.props._obj):
try:
- value = safe_getattr(self.object, name, None)
+ value = safe_getattr(self.props._obj, name, None)
if ismock(value):
value = undecorate(value)
docstring = attr_docs.get(('', name), [])
@@ -1085,7 +1085,7 @@ def get_module_members(self) -> dict[str, ObjectMember]:
continue
# annotation only member (ex. attr: int)
- for name in inspect.getannotations(self.object):
+ for name in inspect.getannotations(self.props._obj):
if name not in members:
docstring = attr_docs.get(('', name), [])
members[name] = ObjectMember(
@@ -1122,7 +1122,7 @@ def get_object_members(self, want_all: bool) -> tuple[bool, list[ObjectMember]]:
'missing attribute mentioned in :members: option: '
'module %s, attribute %s'
),
- safe_getattr(self.object, '__name__', '???'),
+ safe_getattr(self.props._obj, '__name__', '???'),
name,
type='autodoc',
)
@@ -1189,9 +1189,11 @@ def format_args(self, **kwargs: Any) -> str:
kwargs.setdefault('short_literals', True)
try:
- self._events.emit('autodoc-before-process-signature', self.object, False)
+ self._events.emit(
+ 'autodoc-before-process-signature', self.props._obj, False
+ )
sig = inspect.signature(
- self.object, type_aliases=self.config.autodoc_type_aliases
+ self.props._obj, type_aliases=self.config.autodoc_type_aliases
)
args = stringify_signature(sig, **kwargs)
except TypeError as exc:
@@ -1217,8 +1219,8 @@ def add_directive_header(self, sig: str) -> None:
sourcename = self.get_sourcename()
super().add_directive_header(sig)
- is_coro = inspect.iscoroutinefunction(self.object)
- is_acoro = inspect.isasyncgenfunction(self.object)
+ is_coro = inspect.iscoroutinefunction(self.props._obj)
+ is_acoro = inspect.isasyncgenfunction(self.props._obj)
if is_coro or is_acoro:
self.add_line(' :async:', sourcename)
@@ -1241,11 +1243,11 @@ def format_signature(self, **kwargs: Any) -> str:
sig = super().format_signature(**kwargs)
sigs.append(sig)
- if inspect.is_singledispatch_function(self.object):
+ if inspect.is_singledispatch_function(self.props._obj):
from sphinx.ext.autodoc._property_types import _FunctionDefProperties
# append signature of singledispatch'ed functions
- for typ, func in self.object.registry.items():
+ for typ, func in self.props._obj.registry.items():
if typ is object:
pass # default implementation. skipped.
else:
@@ -1261,13 +1263,12 @@ def format_signature(self, **kwargs: Any) -> str:
_obj___module__=None,
properties=frozenset(),
)
- documenter.object = dispatchfunc
sigs.append(documenter.format_signature())
if overloaded and self.analyzer is not None:
actual = inspect.signature(
- self.object, type_aliases=self.config.autodoc_type_aliases
+ self.props._obj, type_aliases=self.config.autodoc_type_aliases
)
- __globals__ = safe_getattr(self.object, '__globals__', {})
+ __globals__ = safe_getattr(self.props._obj, '__globals__', {})
for overload in self.analyzer.overloads[self.props.dotted_parts]:
overload = self.merge_default_value(actual, overload)
overload = evaluate_signature(
@@ -1402,7 +1403,7 @@ def can_document_member(
)
def _get_signature(self) -> tuple[Any | None, str | None, Signature | None]:
- if isinstance(self.object, NewType | TypeVar):
+ if isinstance(self.props._obj, NewType | TypeVar):
# Suppress signature
return None, None, None
@@ -1419,8 +1420,8 @@ def get_user_defined_function_or_method(obj: Any, attr: str) -> Any:
# ValueError means that no signature could be found, so we keep going.
# First, we check if obj has a __signature__ attribute
- if hasattr(self.object, '__signature__'):
- object_sig = self.object.__signature__
+ if hasattr(self.props._obj, '__signature__'):
+ object_sig = self.props._obj.__signature__
if isinstance(object_sig, Signature):
return None, None, object_sig
if sys.version_info[:2] in {(3, 12), (3, 13)} and callable(object_sig):
@@ -1430,7 +1431,7 @@ def get_user_defined_function_or_method(obj: Any, attr: str) -> Any:
# Next, let's see if it has an overloaded __call__ defined
# in its metaclass
- call = get_user_defined_function_or_method(type(self.object), '__call__')
+ call = get_user_defined_function_or_method(type(self.props._obj), '__call__')
if call is not None:
if f'{call.__module__}.{call.__qualname__}' in _METACLASS_CALL_BLACKLIST:
@@ -1444,12 +1445,12 @@ def get_user_defined_function_or_method(obj: Any, attr: str) -> Any:
bound_method=True,
type_aliases=self.config.autodoc_type_aliases,
)
- return type(self.object), '__call__', sig
+ return type(self.props._obj), '__call__', sig
except ValueError:
pass
# Now we check if the 'obj' class has a '__new__' method
- new = get_user_defined_function_or_method(self.object, '__new__')
+ new = get_user_defined_function_or_method(self.props._obj, '__new__')
if new is not None:
if f'{new.__module__}.{new.__qualname__}' in _CLASS_NEW_BLACKLIST:
@@ -1463,12 +1464,12 @@ def get_user_defined_function_or_method(obj: Any, attr: str) -> Any:
bound_method=True,
type_aliases=self.config.autodoc_type_aliases,
)
- return self.object, '__new__', sig
+ return self.props._obj, '__new__', sig
except ValueError:
pass
# Finally, we should have at least __init__ implemented
- init = get_user_defined_function_or_method(self.object, '__init__')
+ init = get_user_defined_function_or_method(self.props._obj, '__init__')
if init is not None:
self._events.emit('autodoc-before-process-signature', init, True)
try:
@@ -1477,7 +1478,7 @@ def get_user_defined_function_or_method(obj: Any, attr: str) -> Any:
bound_method=True,
type_aliases=self.config.autodoc_type_aliases,
)
- return self.object, '__init__', sig
+ return self.props._obj, '__init__', sig
except ValueError:
pass
@@ -1485,10 +1486,10 @@ def get_user_defined_function_or_method(obj: Any, attr: str) -> Any:
# handle it.
# We don't know the exact method that inspect.signature will read
# the signature from, so just pass the object itself to our hook.
- self._events.emit('autodoc-before-process-signature', self.object, False)
+ self._events.emit('autodoc-before-process-signature', self.props._obj, False)
try:
sig = inspect.signature(
- self.object,
+ self.props._obj,
bound_method=False,
type_aliases=self.config.autodoc_type_aliases,
)
@@ -1592,10 +1593,12 @@ def get_overloaded_signatures(self) -> list[Signature]:
return []
def get_canonical_fullname(self) -> str | None:
- __modname__ = safe_getattr(self.object, '__module__', self.props.module_name)
- __qualname__ = safe_getattr(self.object, '__qualname__', None)
+ __modname__ = safe_getattr(
+ self.props._obj, '__module__', self.props.module_name
+ )
+ __qualname__ = safe_getattr(self.props._obj, '__qualname__', None)
if __qualname__ is None:
- __qualname__ = safe_getattr(self.object, '__name__', None)
+ __qualname__ = safe_getattr(self.props._obj, '__name__', None)
if __qualname__ and '' in __qualname__:
# No valid qualname found if the object is defined as locals
__qualname__ = None
@@ -1612,7 +1615,7 @@ def add_directive_header(self, sig: str) -> None:
self.directivetype = 'attribute'
super().add_directive_header(sig)
- if isinstance(self.object, NewType | TypeVar):
+ if isinstance(self.props._obj, NewType | TypeVar):
return
if self.analyzer and self.props.dotted_parts in self.analyzer.finals:
@@ -1621,7 +1624,7 @@ def add_directive_header(self, sig: str) -> None:
canonical_fullname = self.get_canonical_fullname()
if (
not self.props.doc_as_attr
- and not isinstance(self.object, NewType)
+ and not isinstance(self.props._obj, NewType)
and canonical_fullname
and self.props.full_name != canonical_fullname
):
@@ -1629,20 +1632,22 @@ def add_directive_header(self, sig: str) -> None:
# add inheritance info, if wanted
if not self.props.doc_as_attr and self.options.show_inheritance:
- if inspect.getorigbases(self.object):
+ if inspect.getorigbases(self.props._obj):
# A subclass of generic types
# refs: PEP-560
- bases = list(self.object.__orig_bases__)
- elif hasattr(self.object, '__bases__') and len(self.object.__bases__):
+ bases = list(self.props._obj.__orig_bases__)
+ elif hasattr(self.props._obj, '__bases__') and len(
+ self.props._obj.__bases__
+ ):
# A normal class
- bases = list(self.object.__bases__)
+ bases = list(self.props._obj.__bases__)
else:
bases = []
self._events.emit(
'autodoc-process-bases',
self.props.full_name,
- self.object,
+ self.props._obj,
self.options,
bases,
)
@@ -1656,7 +1661,7 @@ def add_directive_header(self, sig: str) -> None:
def get_object_members(self, want_all: bool) -> tuple[bool, list[ObjectMember]]:
members = get_class_members(
- self.object,
+ self.props._obj,
self.props.parts,
self.get_attr,
self.config.autodoc_inherit_docstrings,
@@ -1677,11 +1682,11 @@ def get_object_members(self, want_all: bool) -> tuple[bool, list[ObjectMember]]:
elif self.options.inherited_members:
return False, list(members.values())
else:
- return False, [m for m in members.values() if m.class_ == self.object]
+ return False, [m for m in members.values() if m.class_ == self.props._obj]
def get_doc(self) -> list[list[str]] | None:
- if isinstance(self.object, TypeVar):
- if self.object.__doc__ == TypeVar.__doc__:
+ if isinstance(self.props._obj, TypeVar):
+ if self.props._obj.__doc__ == TypeVar.__doc__:
return []
if self.props.doc_as_attr:
# Don't show the docstring of the class when it is an alias.
@@ -1700,19 +1705,19 @@ def get_doc(self) -> list[list[str]] | None:
classdoc_from = self.config.autoclass_content
docstrings = []
- attrdocstring = getdoc(self.object, self.get_attr)
+ attrdocstring = getdoc(self.props._obj, self.get_attr)
if attrdocstring:
docstrings.append(attrdocstring)
# for classes, what the "docstring" is can be controlled via a
# config value; the default is only the class docstring
if classdoc_from in {'both', 'init'}:
- __init__ = self.get_attr(self.object, '__init__', None)
+ __init__ = self.get_attr(self.props._obj, '__init__', None)
initdocstring = getdoc(
__init__,
self.get_attr,
self.config.autodoc_inherit_docstrings,
- self.object,
+ self.props._obj,
'__init__',
)
# for new-style classes, no __init__ means default __init__
@@ -1723,12 +1728,12 @@ def get_doc(self) -> list[list[str]] | None:
initdocstring = None
if not initdocstring:
# try __new__
- __new__ = self.get_attr(self.object, '__new__', None)
+ __new__ = self.get_attr(self.props._obj, '__new__', None)
initdocstring = getdoc(
__new__,
self.get_attr,
self.config.autodoc_inherit_docstrings,
- self.object,
+ self.props._obj,
'__new__',
)
# for new-style classes, no __new__ means default __new__
@@ -1764,22 +1769,22 @@ def add_content(self, more_content: StringList | None) -> None:
mode = _get_render_mode(self.config.autodoc_typehints_format)
short_literals = self.config.python_display_short_literal_types
- if isinstance(self.object, NewType):
- supertype = restify(self.object.__supertype__, mode=mode)
+ if isinstance(self.props._obj, NewType):
+ supertype = restify(self.props._obj.__supertype__, mode=mode)
more_content = StringList([_('alias of %s') % supertype, ''], source='')
- if isinstance(self.object, TypeVar):
- attrs = [repr(self.object.__name__)]
+ if isinstance(self.props._obj, TypeVar):
+ attrs = [repr(self.props._obj.__name__)]
attrs.extend(
stringify_annotation(constraint, mode, short_literals=short_literals)
- for constraint in self.object.__constraints__
+ for constraint in self.props._obj.__constraints__
)
- if self.object.__bound__:
- bound = restify(self.object.__bound__, mode=mode)
+ if self.props._obj.__bound__:
+ bound = restify(self.props._obj.__bound__, mode=mode)
attrs.append(r'bound=\ ' + bound)
- if self.object.__covariant__:
+ if self.props._obj.__covariant__:
attrs.append('covariant=True')
- if self.object.__contravariant__:
+ if self.props._obj.__contravariant__:
attrs.append('contravariant=True')
more_content = StringList(
@@ -1797,7 +1802,7 @@ def add_content(self, more_content: StringList | None) -> None:
if self.props.doc_as_attr and not self.get_variable_comment():
try:
- alias = restify(self.object, mode=mode)
+ alias = restify(self.props._obj, mode=mode)
more_content = StringList([_('alias of %s') % alias], source='')
except AttributeError:
pass # Invalid class object is passed.
@@ -1891,7 +1896,7 @@ def update_annotations(self, parent: Any) -> None:
pass
def should_suppress_value_header(self) -> bool:
- if self.object is UNINITIALIZED_ATTR:
+ if self.props._obj is UNINITIALIZED_ATTR:
return True
else:
doc = self.get_doc() or []
@@ -1906,7 +1911,9 @@ def should_suppress_value_header(self) -> bool:
def add_directive_header(self, sig: str) -> None:
super().add_directive_header(sig)
sourcename = self.get_sourcename()
- if self.options.annotation is SUPPRESS or inspect.isgenericalias(self.object):
+ if self.options.annotation is SUPPRESS or inspect.isgenericalias(
+ self.props._obj
+ ):
pass
elif self.options.annotation:
self.add_line(' :annotation: %s' % self.options.annotation, sourcename)
@@ -1933,11 +1940,11 @@ def add_directive_header(self, sig: str) -> None:
if (
self.options.no_value
or self.should_suppress_value_header()
- or ismock(self.object)
+ or ismock(self.props._obj)
):
pass
else:
- objrepr = object_description(self.object)
+ objrepr = object_description(self.props._obj)
self.add_line(' :value: ' + objrepr, sourcename)
except ValueError:
pass
@@ -1975,7 +1982,7 @@ def add_content(self, more_content: StringList | None) -> None:
_add_content_generic_alias_(
more_content,
- self.object,
+ self.props._obj,
autodoc_typehints_format=self.config.autodoc_typehints_format,
)
super().add_content(more_content)
@@ -2008,7 +2015,7 @@ def format_args(self, **kwargs: Any) -> str:
kwargs.setdefault('short_literals', True)
try:
- if self.object == object.__init__ and self.parent != object: # NoQA: E721
+ if self.props._obj == object.__init__ and self.parent != object: # NoQA: E721
# Classes not having own __init__() method are shown as no arguments.
#
# Note: The signature of object.__init__() is (self, /, *args, **kwargs).
@@ -2016,22 +2023,22 @@ def format_args(self, **kwargs: Any) -> str:
args = '()'
else:
if inspect.isstaticmethod(
- self.object, cls=self.parent, name=self.props.object_name
+ self.props._obj, cls=self.parent, name=self.props.object_name
):
self._events.emit(
- 'autodoc-before-process-signature', self.object, False
+ 'autodoc-before-process-signature', self.props._obj, False
)
sig = inspect.signature(
- self.object,
+ self.props._obj,
bound_method=False,
type_aliases=self.config.autodoc_type_aliases,
)
else:
self._events.emit(
- 'autodoc-before-process-signature', self.object, True
+ 'autodoc-before-process-signature', self.props._obj, True
)
sig = inspect.signature(
- self.object,
+ self.props._obj,
bound_method=True,
type_aliases=self.config.autodoc_type_aliases,
)
@@ -2052,7 +2059,7 @@ def add_directive_header(self, sig: str) -> None:
super().add_directive_header(sig)
sourcename = self.get_sourcename()
- obj = self.parent.__dict__.get(self.props.object_name, self.object)
+ obj = self.parent.__dict__.get(self.props.object_name, self.props._obj)
if inspect.isabstractmethod(obj):
self.add_line(' :abstractmethod:', sourcename)
if inspect.iscoroutinefunction(obj) or inspect.isasyncgenfunction(obj):
@@ -2114,25 +2121,24 @@ def format_signature(self, **kwargs: Any) -> str:
properties=frozenset(),
)
documenter.parent = self.parent
- documenter.object = dispatchmeth
sigs.append(documenter.format_signature())
if overloaded and self.analyzer is not None:
if inspect.isstaticmethod(
- self.object, cls=self.parent, name=self.props.object_name
+ self.props._obj, cls=self.parent, name=self.props.object_name
):
actual = inspect.signature(
- self.object,
+ self.props._obj,
bound_method=False,
type_aliases=self.config.autodoc_type_aliases,
)
else:
actual = inspect.signature(
- self.object,
+ self.props._obj,
bound_method=True,
type_aliases=self.config.autodoc_type_aliases,
)
- __globals__ = safe_getattr(self.object, '__globals__', {})
+ __globals__ = safe_getattr(self.props._obj, '__globals__', {})
for overload in self.analyzer.overloads[self.props.dotted_parts]:
overload = self.merge_default_value(actual, overload)
overload = evaluate_signature(
@@ -2140,7 +2146,7 @@ def format_signature(self, **kwargs: Any) -> str:
)
if not inspect.isstaticmethod(
- self.object, cls=self.parent, name=self.props.object_name
+ self.props._obj, cls=self.parent, name=self.props.object_name
):
parameters = list(overload.parameters.values())
overload = overload.replace(parameters=parameters[1:])
@@ -2200,7 +2206,7 @@ def get_doc(self) -> list[list[str]] | None:
return self._new_docstrings
if self.props.name == '__init__':
docstring = getdoc(
- self.object,
+ self.props._obj,
self.get_attr,
self.config.autodoc_inherit_docstrings,
self.parent,
@@ -2218,7 +2224,7 @@ def get_doc(self) -> list[list[str]] | None:
return []
elif self.props.name == '__new__':
docstring = getdoc(
- self.object,
+ self.props._obj,
self.get_attr,
self.config.autodoc_inherit_docstrings,
self.parent,
@@ -2300,16 +2306,16 @@ def update_annotations(self, parent: Any) -> None:
@property
def _is_non_data_descriptor(self) -> bool:
- return not inspect.isattributedescriptor(self.object)
+ return not inspect.isattributedescriptor(self.props._obj)
def should_suppress_value_header(self) -> bool:
- if self.object is SLOTS_ATTR:
+ if self.props._obj is SLOTS_ATTR:
return True
- if self.object is RUNTIME_INSTANCE_ATTRIBUTE:
+ if self.props._obj is RUNTIME_INSTANCE_ATTRIBUTE:
return True
- if self.object is UNINITIALIZED_ATTR:
+ if self.props._obj is UNINITIALIZED_ATTR:
return True
- if not self._is_non_data_descriptor or inspect.isgenericalias(self.object):
+ if not self._is_non_data_descriptor or inspect.isgenericalias(self.props._obj):
return True
else:
doc = self.get_doc()
@@ -2325,7 +2331,9 @@ def should_suppress_value_header(self) -> bool:
def add_directive_header(self, sig: str) -> None:
super().add_directive_header(sig)
sourcename = self.get_sourcename()
- if self.options.annotation is SUPPRESS or inspect.isgenericalias(self.object):
+ if self.options.annotation is SUPPRESS or inspect.isgenericalias(
+ self.props._obj
+ ):
pass
elif self.options.annotation:
self.add_line(' :annotation: %s' % self.options.annotation, sourcename)
@@ -2352,11 +2360,11 @@ def add_directive_header(self, sig: str) -> None:
if (
self.options.no_value
or self.should_suppress_value_header()
- or ismock(self.object)
+ or ismock(self.props._obj)
):
pass
else:
- objrepr = object_description(self.object)
+ objrepr = object_description(self.props._obj)
self.add_line(' :value: ' + objrepr, sourcename)
except ValueError:
pass
@@ -2381,7 +2389,7 @@ def get_doc(self) -> list[list[str]] | None:
orig = self.config.autodoc_inherit_docstrings
self.config.autodoc_inherit_docstrings = False
- if self.object is SLOTS_ATTR:
+ if self.props._obj is SLOTS_ATTR:
# support for __slots__
try:
parent___slots__ = inspect.getslots(self.parent)
@@ -2401,14 +2409,14 @@ def get_doc(self) -> list[list[str]] | None:
return []
if (
- self.object is RUNTIME_INSTANCE_ATTRIBUTE
+ self.props._obj is RUNTIME_INSTANCE_ATTRIBUTE
and _is_runtime_instance_attribute_not_commented(
parent=self.parent, obj_path=self.props.parts
)
):
return None
- if self.object is UNINITIALIZED_ATTR:
+ if self.props._obj is UNINITIALIZED_ATTR:
return None
if self._is_non_data_descriptor:
@@ -2429,7 +2437,7 @@ def add_content(self, more_content: StringList | None) -> None:
more_content = StringList()
_add_content_generic_alias_(
more_content,
- self.object,
+ self.props._obj,
autodoc_typehints_format=self.config.autodoc_typehints_format,
)
super().add_content(more_content)
@@ -2457,7 +2465,10 @@ def can_document_member(
if inspect.isproperty(member):
return True
else:
- __dict__ = safe_getattr(parent.object, '__dict__', {})
+ # See FakeDirective &c in autosummary, parent might not be a
+ # 'proper' Documenter.
+ obj = parent.props._obj if hasattr(parent, 'props') else None
+ __dict__ = safe_getattr(obj, '__dict__', {})
obj = __dict__.get(membername)
return isinstance(obj, classmethod) and inspect.isproperty(obj.__func__)
else:
@@ -2479,7 +2490,7 @@ def document_members(self, all_members: bool = False) -> None:
def add_directive_header(self, sig: str) -> None:
super().add_directive_header(sig)
sourcename = self.get_sourcename()
- if inspect.isabstractmethod(self.object):
+ if inspect.isabstractmethod(self.props._obj):
self.add_line(' :abstractmethod:', sourcename)
# Support for class properties. Note: these only work on Python 3.9.
if self.props.is_classmethod:
@@ -2508,10 +2519,10 @@ def add_directive_header(self, sig: str) -> None:
pass
def _get_property_getter(self) -> Callable[..., Any] | None:
- if safe_getattr(self.object, 'fget', None): # property
- return self.object.fget
- if safe_getattr(self.object, 'func', None): # cached_property
- return self.object.func
+ if safe_getattr(self.props._obj, 'fget', None): # property
+ return self.props._obj.fget
+ if safe_getattr(self.props._obj, 'func', None): # cached_property
+ return self.props._obj.func
return None
diff --git a/tests/test_extensions/test_ext_autodoc.py b/tests/test_extensions/test_ext_autodoc.py
index 9cfa49cb203..48259039bdf 100644
--- a/tests/test_extensions/test_ext_autodoc.py
+++ b/tests/test_extensions/test_ext_autodoc.py
@@ -161,7 +161,6 @@ def formatsig(objtype, name, obj, args, retann):
inst = app.registry.documenters[objtype](directive, name)
inst.doc_as_attr = False # for class objtype
inst.parent = object # dummy
- inst.object = obj
inst.args = args
inst.retann = retann
inst.props = _ClassDefProperties(
@@ -371,7 +370,6 @@ def func(x: int, y: int) -> int: # type: ignore[empty-body]
_obj___module__=None,
properties=frozenset(),
)
- inst.object = func
inst.format_signature()
assert captured == [
(app, 'function', '.func', func, directive.genopt, '(x: int, y: int)', 'int')
@@ -393,7 +391,6 @@ def getdocl(objtype, obj):
_obj___module__=getattr(obj, '__module__', None),
)
inst.parent = object # dummy
- inst.object = obj
inst.doc_as_attr = False
inst.format_signature() # handle docstring signatures!
ds = inst.get_doc()
From 5b3d92e7c69330bc598f025fc275d07d6294e883 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Jean-Fran=C3=A7ois=20B=2E?=
<2589111+jfbu@users.noreply.github.com>
Date: Mon, 4 Aug 2025 09:55:36 +0200
Subject: [PATCH 232/466] LaTeX: support for using the fontawesome7 LaTeX
package (#13805)
---
doc/latex.rst | 27 +++++++--------
sphinx/texinputs/sphinx.sty | 65 +++++++++++++++++++++++++------------
2 files changed, 58 insertions(+), 34 deletions(-)
diff --git a/doc/latex.rst b/doc/latex.rst
index bfc4de73938..30aa8c23f92 100644
--- a/doc/latex.rst
+++ b/doc/latex.rst
@@ -1007,13 +1007,14 @@ The color used in the above example is available from having passed the
``iconpackage``
The name of the LaTeX package used for rendering icons in the admonition
- titles. Its default is set dynamically to either ``fontawesome6``,
- ``fontawesome5``, or ``fontawesome``, or ``none``, depending on whether
+ titles. Its default is set dynamically to either ``fontawesome7``,
+ ``fontawesome6``,
+ ``fontawesome5``, ``fontawesome``, or ``none``, in decreasing order of
+ priority and depending on whether
packages with those names exist in the used LaTeX installation. The LaTeX
- code will use ``\faIcon`` command if with ``fontawesome6/fontawesome5``,
- and ``\faicon`` if with ``fontawesome``. In the latter case the icon used
+ code will use ``\faIcon`` command if with ``fontawesome5-7`` case the icon used
both for :dudir:`caution` and :dudir:`danger` will default to "bolt" not
- "radiation", which is only found in ``fontawesome6`` and ``fontawesome5``.
+ "radiation", which is only found in ``fontawesome5-7``.
If no "Font Awesome" related package is found (or if the option is set
forcefully to ``none``) the icons are silently dropped. User can set this
option to some specific package and must configure the
@@ -1413,16 +1414,16 @@ The next keys, for admonitions, :dudir:`topic`, contents_, and
- ``div._title-icon``: the LaTeX code responsible for producing the
icon. If you want to modify the icons used by Sphinx, employ in these keys
- the ``\faIcon`` LaTeX command (assuming either ``fontawesome6`` or
- ``fontawesome5`` LaTeX package is available on your system). For example
- the default for :dudir:`note` is
- ``div.note_title-icon=\faIcon{info-circle}`` with ``fontawesome5`` and
- ``div.note_title-icon=\faIcon{circle-info}`` with ``fontawesome6`` (which is
+ the ``\faIcon`` LaTeX command if one of ``fontawesome5``, ``6`` or ``7`` is
+ on your LaTeX installation. For example the default for :dudir:`note` is
+ ``div.note_title-icon=\faIcon{info-circle}`` with ``fontawesome5`` but
+ ``div.note_title-icon=\faIcon{circle-info}`` with ``fontawesome7`` (which is
used automatically if available). If your system only provides the
``fontawesome`` package (automatically detected) use its command ``\faicon``
- rather in order to modify the choice of icons. The ``iconpackage`` key can
- be used to use some other package providing icons, use then the commands
- suitable to that package as values of the ``div._title-icon`` keys.
+ (not ``\faIcon``) in order to modify the choice of icons. The
+ ``iconpackage`` key can give the name of some other package providing icons.
+ You must then configure the ``div._title-icon`` keys to use the LaTeX
+ commands from that custom icon package.
.. note::
diff --git a/sphinx/texinputs/sphinx.sty b/sphinx/texinputs/sphinx.sty
index 7e06eff7de8..ded8666be04 100644
--- a/sphinx/texinputs/sphinx.sty
+++ b/sphinx/texinputs/sphinx.sty
@@ -9,7 +9,7 @@
% by the Sphinx LaTeX writer.
\NeedsTeXFormat{LaTeX2e}[1995/12/01]
-\ProvidesPackage{sphinx}[2025/04/24 v8.3.0 Sphinx LaTeX package (sphinx-doc)]
+\ProvidesPackage{sphinx}[2025/08/03 v8.3.0 Sphinx LaTeX package (sphinx-doc)]
% provides \ltx@ifundefined
% (many packages load ltxcmds: graphicx does for pdftex and lualatex but
@@ -870,32 +870,43 @@
% 7.4.0 Support for icons in admonition titles
%
-% We let Sphinx use in order of priority: some user-specifid package,
-% fontawesome6 (since 8.3.0), fontawesome5, fontawesome, or nothing (and then
-% not draw any icon). To allow a user-specified package, an extra interface
+% We let Sphinx use, listed in order of priority:
+% - a user-specified package via iconpackage key,
+% - fontawesome7 (supported since 8.3.0),
+% - fontawesome6 (supported since 8.3.0),
+% - fontawesome5,
+% - fontawesome,
+% - or nothing at all, and then icons are not drawn.
+% To allow a user-specified package, an extra interface
% is provided for specifying the icon-drawing LaTeX code.
%
-% We can't load fontawesome6 (or 5) unconditionally even if available, as it
+% We can't load fontawesome7 (or 6 or 5) unconditionally even if available, as it
% is incompatible with fontawesome package which may be preferred by user. We
% thus must delay loading the package to at begin document, and for now can
-% only set the default value of iconpackage key..
-\IfFileExists{fontawesome6.sty}{%
- \DeclareStringOption[fontawesome6]{iconpackage}%
+% only set the default value of iconpackage key.
+\IfFileExists{fontawesome7.sty}{%
+ \DeclareStringOption[fontawesome7]{iconpackage}%
}%
{%
- \IfFileExists{fontawesome5.sty}{%
- \DeclareStringOption[fontawesome5]{iconpackage}%
- }%
- {%
- \IfFileExists{fontawesome.sty}
- {\DeclareStringOption[fontawesome]{iconpackage}}
- {\DeclareStringOption[none]{iconpackage}}%
- }%
+ \IfFileExists{fontawesome6.sty}{%
+ \DeclareStringOption[fontawesome6]{iconpackage}%
+ }%
+ {%
+ \IfFileExists{fontawesome5.sty}{%
+ \DeclareStringOption[fontawesome5]{iconpackage}%
+ }%
+ {%
+ \IfFileExists{fontawesome.sty}
+ {\DeclareStringOption[fontawesome]{iconpackage}}
+ {\DeclareStringOption[none]{iconpackage}}%
+ }%
+ }%
}%
-% Unfortunately icon names differ between fontawesome, fontawesome5, and
-% fontawesome6 LaTeX packages. At 8.3.0 we refactor the icon support code
-% into something easier to maintain in future in case of a fontawesome7,
-% etc...
+% Unfortunately icon macro names are not the same across fontawesome,
+% fontawesome5, fontawesome6 and fontawesome7 LaTeX packages.
+% At 8.3.0 we refactor the
+% icon support code into something easier to maintain in future in case of a
+% fontawesome8, etc...
%
% TODO: Handle spaces possibly caused by bad user usage of iconpackage key?
% This would need to check how LaTeX handle spaces in package name
@@ -918,7 +929,12 @@
}
% Icon defaults depending on package used.
-% Attention! no extra spaces for alignment when using \@namedef!
+% Attention! no extra spaces (to align here) when using \@namedef!
+
+% MEMO: the [regular] style mark-up for {lightbulb} was inherited
+% from fontawesome5 and found experimentally to work also with
+% fontawesome6 (and fontawesome7) despite their respective PDF docs
+% not mentioning it...
\@namedef{spx@fontawesome6@note}{\faIcon{circle-info}}
\@namedef{spx@fontawesome6@hint}{\faIcon[regular]{lightbulb}}
\@namedef{spx@fontawesome6@tip}{\faIcon[regular]{lightbulb}}
@@ -931,6 +947,13 @@
\@namedef{spx@fontawesome6@danger}{\faIcon{radiation}}
\@namedef{spx@fontawesome6@error}{\faIcon{circle-xmark}}
+% Turns out that fontawesome7 commands for our target icons are
+% same as with fontawesome6. So make a loop to spare a few lines here.
+\@for\x:=note,hint,tip,seealso,todo,important,caution,warning,attention,%
+ danger,error%
+\do{\expandafter\let\csname spx@fontawesome7@\x\expandafter\endcsname
+ \csname spx@fontawesome6@\x\endcsname}
+
\@namedef{spx@fontawesome5@note}{\faIcon{info-circle}}
\@namedef{spx@fontawesome5@hint}{\faIcon[regular]{lightbulb}}
\@namedef{spx@fontawesome5@tip}{\faIcon[regular]{lightbulb}}
From 5f88652a5ec7f9d96455c06b59bd63416631d012 Mon Sep 17 00:00:00 2001
From: Harmen Stoppels
Date: Mon, 4 Aug 2025 15:25:47 +0200
Subject: [PATCH 233/466] autodoc: do not add :no-index-entry: twice (#13807)
Co-authored-by: Adam Turner <9087854+aa-turner@users.noreply.github.com>
---
sphinx/ext/autodoc/_documenters.py | 4 +---
1 file changed, 1 insertion(+), 3 deletions(-)
diff --git a/sphinx/ext/autodoc/_documenters.py b/sphinx/ext/autodoc/_documenters.py
index 16e25b64424..1ab9f35a67a 100644
--- a/sphinx/ext/autodoc/_documenters.py
+++ b/sphinx/ext/autodoc/_documenters.py
@@ -1050,7 +1050,7 @@ def _module_all(self) -> Sequence[str] | None:
return self.__all__
def add_directive_header(self, sig: str) -> None:
- Documenter.add_directive_header(self, sig)
+ super().add_directive_header(sig)
sourcename = self.get_sourcename()
@@ -1061,8 +1061,6 @@ def add_directive_header(self, sig: str) -> None:
self.add_line(' :platform: ' + self.options.platform, sourcename)
if self.options.deprecated:
self.add_line(' :deprecated:', sourcename)
- if self.options.no_index_entry:
- self.add_line(' :no-index-entry:', sourcename)
def get_module_members(self) -> dict[str, ObjectMember]:
"""Get members of target module."""
From f92316a2f56ce66cc1b74a2939a5db723a1a2b71 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Jean-Fran=C3=A7ois=20B=2E?=
<2589111+jfbu@users.noreply.github.com>
Date: Mon, 4 Aug 2025 19:52:09 +0200
Subject: [PATCH 234/466] LaTeX: fix problems in our docs (in part inherited
from #13805) (#13809)
---
doc/latex.rst | 35 +++++++++++++++++++----------------
1 file changed, 19 insertions(+), 16 deletions(-)
diff --git a/doc/latex.rst b/doc/latex.rst
index 30aa8c23f92..edb2f0c18cb 100644
--- a/doc/latex.rst
+++ b/doc/latex.rst
@@ -1012,15 +1012,14 @@ The color used in the above example is available from having passed the
``fontawesome5``, ``fontawesome``, or ``none``, in decreasing order of
priority and depending on whether
packages with those names exist in the used LaTeX installation. The LaTeX
- code will use ``\faIcon`` command if with ``fontawesome5-7`` case the icon used
- both for :dudir:`caution` and :dudir:`danger` will default to "bolt" not
- "radiation", which is only found in ``fontawesome5-7``.
+ code for each admonition icon will use ``\faIcon`` command if with
+ ``fontawesome{5,6,7}`` and
+ ``\faicon`` if with ``fontawesome``.
If no "Font Awesome" related package is found (or if the option is set
forcefully to ``none``) the icons are silently dropped. User can set this
- option to some specific package and must configure the
+ option to some specific package and must configure then the
``div.note_title-icon`` and similar keys to use then that LaTeX package
- interface (see the :ref:`additionalcss` section for these extra
- ``'sphinxsetup'`` keys).
+ interface (see the :ref:`additionalcss` section about this).
.. versionadded:: 7.4.0
@@ -1413,17 +1412,21 @@ The next keys, for admonitions, :dudir:`topic`, contents_, and
(it applies only to the icon, not to the title of the admonition).
- ``div._title-icon``: the LaTeX code responsible for producing the
- icon. If you want to modify the icons used by Sphinx, employ in these keys
+ icon for the given ````.
+ For example the default for :dudir:`note` is
+ ``div.note_title-icon=\faIcon{info-circle}`` with ``fontawesome5``, but
+ ``div.note_title-icon=\faIcon{circle-info}`` with ``fontawesome6``
+ and ``fontawesome7``.
+ If you want to modify the icons used by Sphinx, employ in these keys
the ``\faIcon`` LaTeX command if one of ``fontawesome5``, ``6`` or ``7`` is
- on your LaTeX installation. For example the default for :dudir:`note` is
- ``div.note_title-icon=\faIcon{info-circle}`` with ``fontawesome5`` but
- ``div.note_title-icon=\faIcon{circle-info}`` with ``fontawesome7`` (which is
- used automatically if available). If your system only provides the
- ``fontawesome`` package (automatically detected) use its command ``\faicon``
- (not ``\faIcon``) in order to modify the choice of icons. The
- ``iconpackage`` key can give the name of some other package providing icons.
- You must then configure the ``div._title-icon`` keys to use the LaTeX
- commands from that custom icon package.
+ on your LaTeX installation.
+ If your system only provides the
+ ``fontawesome`` package use its command ``\faicon`` (not ``\faIcon``)
+ in order to modify the choice of icons. The ``iconpackage`` key of
+ ``'sphinxsetup'`` can be used to force usage of one among
+ ``fontawesome{,5,6,7}`` or be the name of some other icon-providing package.
+ In that latter case you must configure the ``div._title-icon`` keys
+ to use the LaTeX commands appropriate to that custom icon package.
.. note::
From fa172eab80eb1238ac64390eb15c6273acfba5c7 Mon Sep 17 00:00:00 2001
From: Ian Hunt-Isaak
Date: Mon, 4 Aug 2025 16:42:03 -0400
Subject: [PATCH 235/466] Add ``py:class`` to ``py:data`` fallback for type
alias cross-references (#10785) (#13808)
---
AUTHORS.rst | 1 +
CHANGES.rst | 3 +
sphinx/domains/python/__init__.py | 8 ++
.../alias_module.py | 28 +++++++
.../test-ext-autodoc-type-alias-xref/conf.py | 11 +++
.../index.rst | 7 ++
.../test_ext_autodoc_type_alias_nitpicky.py | 80 +++++++++++++++++++
7 files changed, 138 insertions(+)
create mode 100644 tests/roots/test-ext-autodoc-type-alias-xref/alias_module.py
create mode 100644 tests/roots/test-ext-autodoc-type-alias-xref/conf.py
create mode 100644 tests/roots/test-ext-autodoc-type-alias-xref/index.rst
create mode 100644 tests/test_extensions/test_ext_autodoc_type_alias_nitpicky.py
diff --git a/AUTHORS.rst b/AUTHORS.rst
index 11e0206f7ca..5bcd74c943b 100644
--- a/AUTHORS.rst
+++ b/AUTHORS.rst
@@ -63,6 +63,7 @@ Contributors
* Hong Xu -- svg support in imgmath extension and various bug fixes
* Horst Gutmann -- internationalization support
* Hugo van Kemenade -- support FORCE_COLOR and NO_COLOR
+* Ian Hunt-Isaak -- typealias reference improvement
* Ian Lee -- quickstart improvements
* Jacob Mason -- websupport library (GSOC project)
* James Addison -- linkcheck and HTML search improvements
diff --git a/CHANGES.rst b/CHANGES.rst
index b58b6722b3a..0ad35448a14 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -106,6 +106,9 @@ Bugs fixed
to improve `semantic HTML structure
`__.
Patch by Mark Ostroth.
+* #10785: Autodoc: Allow type aliases defined in the project to be properly
+ cross-referenced when used as type annotations. This makes it possible
+ for objects documented as ``:py:data:`` to be hyperlinked in function signatures.
Testing
-------
diff --git a/sphinx/domains/python/__init__.py b/sphinx/domains/python/__init__.py
index a0a0571f069..f8402b4be79 100644
--- a/sphinx/domains/python/__init__.py
+++ b/sphinx/domains/python/__init__.py
@@ -952,6 +952,14 @@ def resolve_xref(
searchmode = 1 if node.hasattr('refspecific') else 0
matches = self.find_obj(env, modname, clsname, target, type, searchmode)
+ if not matches and type == 'class':
+ # fallback to data/attr (for type aliases)
+ # type aliases are documented as data/attr but referenced as class
+ matches = self.find_obj(env, modname, clsname, target, 'data', searchmode)
+ if not matches:
+ matches = self.find_obj(
+ env, modname, clsname, target, 'attr', searchmode
+ )
if not matches and type == 'attr':
# fallback to meth (for property; Sphinx 2.4.x)
# this ensures that `:attr:` role continues to refer to the old property entry
diff --git a/tests/roots/test-ext-autodoc-type-alias-xref/alias_module.py b/tests/roots/test-ext-autodoc-type-alias-xref/alias_module.py
new file mode 100644
index 00000000000..b169e75fa00
--- /dev/null
+++ b/tests/roots/test-ext-autodoc-type-alias-xref/alias_module.py
@@ -0,0 +1,28 @@
+"""Module to test type alias cross-reference resolution."""
+
+from __future__ import annotations
+
+import pathlib
+
+#: Any type of path
+pathlike = str | pathlib.Path
+
+#: A generic type alias for error handlers
+Handler = type[Exception]
+
+
+def read_file(path: pathlike) -> bytes:
+ """Read a file and return its contents.
+
+ Tests Union type alias cross-reference resolution.
+ """
+ with open(path, 'rb') as f:
+ return f.read()
+
+
+def process_error(handler: Handler) -> str:
+ """Process an error with a custom handler type.
+
+ Tests generic type alias cross-reference resolution.
+ """
+ return f'Handled by {handler.__name__}'
diff --git a/tests/roots/test-ext-autodoc-type-alias-xref/conf.py b/tests/roots/test-ext-autodoc-type-alias-xref/conf.py
new file mode 100644
index 00000000000..ed3f9cbb41e
--- /dev/null
+++ b/tests/roots/test-ext-autodoc-type-alias-xref/conf.py
@@ -0,0 +1,11 @@
+import sys
+from pathlib import Path
+
+sys.path.insert(0, str(Path.cwd().resolve()))
+
+extensions = ['sphinx.ext.autodoc']
+nitpicky = True
+autodoc_type_aliases = {
+ 'pathlike': 'pathlike',
+ 'Handler': 'Handler',
+}
diff --git a/tests/roots/test-ext-autodoc-type-alias-xref/index.rst b/tests/roots/test-ext-autodoc-type-alias-xref/index.rst
new file mode 100644
index 00000000000..77f8ef9c51c
--- /dev/null
+++ b/tests/roots/test-ext-autodoc-type-alias-xref/index.rst
@@ -0,0 +1,7 @@
+Type Alias Cross-Reference Test
+===============================
+
+This tests that type aliases in function signatures can be cross-referenced properly.
+
+.. automodule:: alias_module
+ :members:
\ No newline at end of file
diff --git a/tests/test_extensions/test_ext_autodoc_type_alias_nitpicky.py b/tests/test_extensions/test_ext_autodoc_type_alias_nitpicky.py
new file mode 100644
index 00000000000..a23ae489246
--- /dev/null
+++ b/tests/test_extensions/test_ext_autodoc_type_alias_nitpicky.py
@@ -0,0 +1,80 @@
+"""Test type alias cross-reference resolution in nitpicky mode."""
+
+from __future__ import annotations
+
+import re
+from typing import TYPE_CHECKING
+
+import pytest
+
+if TYPE_CHECKING:
+ from sphinx.testing.util import SphinxTestApp
+
+
+@pytest.mark.sphinx('html', testroot='ext-autodoc-type-alias-xref')
+def test_type_alias_xref_resolution(app: SphinxTestApp) -> None:
+ """Test that type aliases in function signatures can be cross-referenced.
+
+ This tests the fix for issue https://github.com/sphinx-doc/sphinx/issues/10785
+ where type aliases documented as :py:data: but referenced as :py:class: in
+ function signatures would not resolve properly.
+
+ Tests both a Union type alias and a generic type alias to ensure our
+ domain fallback mechanism works for various type alias patterns.
+ """
+ app.build()
+
+ # In nitpicky mode, check that no warnings were generated for type alias cross-references
+ warnings_text = app.warning.getvalue()
+ assert 'py:class reference target not found: pathlike' not in warnings_text, (
+ f'Type alias cross-reference failed in nitpicky mode. Warnings: {warnings_text}'
+ )
+ assert 'py:class reference target not found: Handler' not in warnings_text, (
+ f'Type alias cross-reference failed for Handler. Warnings: {warnings_text}'
+ )
+
+ # Core functionality test: Verify type alias links are generated in function signatures
+ html_content = (app.outdir / 'index.html').read_text(encoding='utf8')
+
+ # Both type aliases should be documented and have anchors
+ assert 'id="alias_module.pathlike"' in html_content, (
+ 'pathlike type alias definition anchor not found in HTML'
+ )
+ assert 'id="alias_module.Handler"' in html_content, (
+ 'Handler type alias definition anchor not found in HTML'
+ )
+
+ # The critical test: type aliases in function signatures should be clickable links
+ # This tests the original issue - function signature type annotations should resolve
+ assert (
+ 'read_file.*?', html_content, re.DOTALL
+ )
+ assert read_file_match is not None, 'Could not find read_file function signature'
+ read_file_signature = read_file_match.group(0)
+ assert (
+ 'process_error.*?', html_content, re.DOTALL
+ )
+ assert process_error_match is not None, (
+ 'Could not find process_error function signature'
+ )
+ process_error_signature = process_error_match.group(0)
+ assert (
+ '
Date: Tue, 5 Aug 2025 15:41:48 +0200
Subject: [PATCH 236/466] Add filename caption to tutorial code block (#13814)
---
doc/tutorial/first-steps.rst | 1 +
1 file changed, 1 insertion(+)
diff --git a/doc/tutorial/first-steps.rst b/doc/tutorial/first-steps.rst
index fd5c631353e..dccf1838de3 100644
--- a/doc/tutorial/first-steps.rst
+++ b/doc/tutorial/first-steps.rst
@@ -73,6 +73,7 @@ shown right after the corresponding link, in parentheses. You can change that
behavior by adding the following code at the end of your ``conf.py``:
.. code-block:: python
+ :caption: docs/source/conf.py
# EPUB options
epub_show_urls = 'footnote'
From 873ececf9b453d779baabadcefae122ab7b3e3c4 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Jean-Fran=C3=A7ois=20B=2E?=
<2589111+jfbu@users.noreply.github.com>
Date: Tue, 5 Aug 2025 22:13:58 +0200
Subject: [PATCH 237/466] Update CHANGES.rst for PR #13805 (#13815)
---
CHANGES.rst | 2 ++
1 file changed, 2 insertions(+)
diff --git a/CHANGES.rst b/CHANGES.rst
index 0ad35448a14..2161ec1c5f7 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -62,6 +62,8 @@ Features added
The location of the cache directory must not be relied upon externally,
as it may change without notice or warning in future releases.
Patch by Adam Turner.
+* #13805: LaTeX: add support for ``fontawesome7`` package.
+ Patch by Jean-François B.
Bugs fixed
----------
From 90fd2a2648a77f0f9a4a8737fb38c87f75c69e73 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Jean-Fran=C3=A7ois=20B=2E?=
<2589111+jfbu@users.noreply.github.com>
Date: Wed, 6 Aug 2025 09:57:52 +0200
Subject: [PATCH 238/466] LaTeX: let long ``.. confval::`` argument wrap at
spaces in PDF (#13817)
---
CHANGES.rst | 3 +++
sphinx/texinputs/sphinxlatexliterals.sty | 12 +++++++++++-
2 files changed, 14 insertions(+), 1 deletion(-)
diff --git a/CHANGES.rst b/CHANGES.rst
index 2161ec1c5f7..8580cb9cf35 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -108,6 +108,9 @@ Bugs fixed
to improve `semantic HTML structure
`__.
Patch by Mark Ostroth.
+* #13812 (discussion): LaTeX: long :rst:dir:`confval` value does not wrap at
+ spaces in PDF.
+ Patch by Jean-François B.
* #10785: Autodoc: Allow type aliases defined in the project to be properly
cross-referenced when used as type annotations. This makes it possible
for objects documented as ``:py:data:`` to be hyperlinked in function signatures.
diff --git a/sphinx/texinputs/sphinxlatexliterals.sty b/sphinx/texinputs/sphinxlatexliterals.sty
index 11991d9c3e8..8b0036b464b 100644
--- a/sphinx/texinputs/sphinxlatexliterals.sty
+++ b/sphinx/texinputs/sphinxlatexliterals.sty
@@ -1,7 +1,7 @@
%% LITERAL BLOCKS
%
% change this info string if making any custom modification
-\ProvidesPackage{sphinxlatexliterals}[2024/07/01 v7.4.0 code-blocks and parsed literals]
+\ProvidesPackage{sphinxlatexliterals}[2025/08/06 v8.3.0 code-blocks and parsed literals]
% Provides support for this output mark-up from Sphinx latex writer:
%
@@ -241,6 +241,8 @@
% _, }, ^, &, >, -, ~, and \: stay at end of broken line.
% Use of \textquotesingle for straight quote.
% FIXME: convert this to package options ?
+% MEMO: "beforelist" and "afterlist" hold no relation with lists,
+% they are just "\do lists" in the inherited TeX sense.
\newcommand*\sphinxbreaksbeforelist {%
\do\PYGZob\{\do\PYGZlt\<\do\PYGZsh\#\do\PYGZpc\%% {, <, #, %,
\do\PYGZdl\$\do\PYGZdq\"% $, "
@@ -278,6 +280,8 @@
\catcode`##1\active}%
\sphinxbreaksafteractivelist
\lccode`\~`\~
+ % visit_desc_name will insert non TeX-escaped ~ in the source
+ \let~\spx@verbatim@space
}
% If the linebreak is at a space, the latter will be displayed as visible
@@ -962,7 +966,13 @@
\let\sphinxhyphen\sphinxhyphenininlineliteral
\ifspx@opt@inlineliteralwraps
% break at . , ; ? ! /
+ % and also at ~ which will be handled like are spaces in verbatim
\sphinxbreaksviaactive
+ % but for this we need to set this box which is empty by default:
+ % MEMO: it looks suboptimal to redo this each time but this is
+ % to obey a \sphinxsetup via raw LaTeX to set verbatimvisiblespace,
+ % a possibility which however will be used by 0% of Sphinx users...
+ \sbox\sphinxvisiblespacebox {\spx@opt@verbatimvisiblespace}%
% break also at \
\setbox8=\hbox{\textbackslash}%
\def\sphinx@textbackslash{\copy8}%
From bda34f914dcc8bab2638598cddf56424d9b73881 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Jean-Fran=C3=A7ois=20B=2E?=
<2589111+jfbu@users.noreply.github.com>
Date: Wed, 6 Aug 2025 11:35:58 +0200
Subject: [PATCH 239/466] LaTeX: revisit and trim visit_target() (#13616)
---
sphinx/writers/latex.py | 25 ++-----------------------
tests/roots/test-latex-labels/index.rst | 2 +-
tests/test_builders/test_build_latex.py | 4 +++-
3 files changed, 6 insertions(+), 25 deletions(-)
diff --git a/sphinx/writers/latex.py b/sphinx/writers/latex.py
index 005d0ef5e06..263399ea15b 100644
--- a/sphinx/writers/latex.py
+++ b/sphinx/writers/latex.py
@@ -1864,36 +1864,15 @@ def add_target(id: str) -> None:
and node['refid'] == prev_node['refid']
):
# a target for a hyperlink reference having alias
- pass
+ return
else:
add_target(node['refid'])
- # Temporary fix for https://github.com/sphinx-doc/sphinx/issues/11093
- # TODO: investigate if a more elegant solution exists
- # (see comments of https://github.com/sphinx-doc/sphinx/issues/11093)
- if node.get('ismod', False):
- # Detect if the previous nodes are label targets. If so, remove
- # the refid thereof from node['ids'] to avoid duplicated ids.
- prev = get_prev_node(node)
- if self._has_dup_label(prev, node):
- ids = node['ids'][:] # copy to avoid side-effects
- while self._has_dup_label(prev, node):
- ids.remove(prev['refid']) # type: ignore[index]
- prev = get_prev_node(prev) # type: ignore[arg-type]
- else:
- ids = iter(node['ids']) # read-only iterator
- else:
- ids = iter(node['ids']) # read-only iterator
-
- for id in ids:
+ for id in node['ids']:
add_target(id)
def depart_target(self, node: Element) -> None:
pass
- @staticmethod
- def _has_dup_label(sib: Node | None, node: Element) -> bool:
- return isinstance(sib, nodes.target) and sib.get('refid') in node['ids']
-
def visit_attribution(self, node: Element) -> None:
self.body.append(CR + r'\begin{flushright}' + CR)
self.body.append('---')
diff --git a/tests/roots/test-latex-labels/index.rst b/tests/roots/test-latex-labels/index.rst
index 0021d5d4215..4abf72e2e03 100644
--- a/tests/roots/test-latex-labels/index.rst
+++ b/tests/roots/test-latex-labels/index.rst
@@ -69,6 +69,6 @@ subsubsection
otherdoc
-* Embedded standalone hyperlink reference: `subsection `_.
+* Named hyperlink reference with embedded alias reference: `subsection `_.
.. See: https://github.com/sphinx-doc/sphinx/issues/5948
diff --git a/tests/test_builders/test_build_latex.py b/tests/test_builders/test_build_latex.py
index 007cddef3ef..2678c580492 100644
--- a/tests/test_builders/test_build_latex.py
+++ b/tests/test_builders/test_build_latex.py
@@ -1954,9 +1954,11 @@ def test_latex_labels(app: SphinxTestApp) -> None:
r'\label{\detokenize{otherdoc::doc}}'
) in result
- # Embedded standalone hyperlink reference
+ # Named hyperlink reference with embedded alias reference
# See: https://github.com/sphinx-doc/sphinx/issues/5948
assert result.count(r'\label{\detokenize{index:section1}}') == 1
+ # https://github.com/sphinx-doc/sphinx/issues/13609
+ assert r'\phantomsection\label{\detokenize{index:id' not in result
@pytest.mark.sphinx('latex', testroot='latex-figure-in-admonition')
From 6c9e3209c4ebc81c7c09f6a30eeaa4651f32f461 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Jean-Fran=C3=A7ois=20B=2E?=
<2589111+jfbu@users.noreply.github.com>
Date: Wed, 6 Aug 2025 12:10:59 +0200
Subject: [PATCH 240/466] LaTeX: support CSS3 length units (#13657)
---
.github/workflows/main.yml | 2 +
CHANGES.rst | 2 +
sphinx/templates/latex/latex.tex.jinja | 1 +
sphinx/texinputs/sphinx.sty | 22 ++++++++-
sphinx/writers/latex.py | 10 ++++
.../test-latex-images-css3-lengths/conf.py | 0
.../test-latex-images-css3-lengths/img.png | Bin 0 -> 66247 bytes
.../test-latex-images-css3-lengths/index.rst | 25 ++++++++++
tests/test_builders/test_build_latex.py | 46 +++++++++++++-----
9 files changed, 96 insertions(+), 12 deletions(-)
create mode 100644 tests/roots/test-latex-images-css3-lengths/conf.py
create mode 100644 tests/roots/test-latex-images-css3-lengths/img.png
create mode 100644 tests/roots/test-latex-images-css3-lengths/index.rst
diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml
index 834c18809d6..eb8bb7c8400 100644
--- a/.github/workflows/main.yml
+++ b/.github/workflows/main.yml
@@ -312,6 +312,8 @@ jobs:
enable-cache: false
- name: Install dependencies
run: uv pip install . --group test
+ - name: Install Docutils' HEAD
+ run: uv pip install "docutils @ git+https://repo.or.cz/docutils.git#subdirectory=docutils"
- name: Test with pytest
run: python -m pytest -vv --durations 25
env:
diff --git a/CHANGES.rst b/CHANGES.rst
index 8580cb9cf35..c71edc9ea0b 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -58,6 +58,8 @@ Features added
Patch by Adam Turner.
* #13647: LaTeX: allow more cases of table nesting.
Patch by Jean-François B.
+* #13657: LaTeX: support CSS3 length units.
+ Patch by Jean-François B.
* #13684: intersphinx: Add a file-based cache for remote inventories.
The location of the cache directory must not be relied upon externally,
as it may change without notice or warning in future releases.
diff --git a/sphinx/templates/latex/latex.tex.jinja b/sphinx/templates/latex/latex.tex.jinja
index deb030504db..4ba2c46a793 100644
--- a/sphinx/templates/latex/latex.tex.jinja
+++ b/sphinx/templates/latex/latex.tex.jinja
@@ -16,6 +16,7 @@
\ifdefined\pdfimageresolution
\pdfimageresolution= \numexpr \dimexpr1in\relax/\sphinxpxdimen\relax
\fi
+\newdimen\sphinxremdimen\sphinxremdimen = <%= pointsize%>
%% let collapsible pdf bookmarks panel have high depth per default
\PassOptionsToPackage{bookmarksdepth=5}{hyperref}
<% if use_xindy -%>
diff --git a/sphinx/texinputs/sphinx.sty b/sphinx/texinputs/sphinx.sty
index ded8666be04..5dcf9810907 100644
--- a/sphinx/texinputs/sphinx.sty
+++ b/sphinx/texinputs/sphinx.sty
@@ -1241,5 +1241,25 @@
% FIXME: this line should be dropped, as "9" is default anyhow.
\ifdefined\pdfcompresslevel\pdfcompresslevel = 9 \fi
-
+%%% SUPPORT FOR CSS3 EXTRA LENGTH UNITS
+% cf rstdim_to_latexdim in latex.py
+%
+\def\sphinxchdimen{\dimexpr\fontcharwd\font`0\relax}
+% TODO: decide if we want rather \textwidth/\textheight.
+\newdimen\sphinxvwdimen
+ \sphinxvwdimen=\dimexpr0.01\paperwidth\relax
+\newdimen\sphinxvhdimen
+ \sphinxvhdimen=\dimexpr0.01\paperheight\relax
+\newdimen\sphinxvmindimen
+ \sphinxvmindimen=\dimexpr
+ \ifdim\paperwidth<\paperheight\sphinxvwdimen\else\sphinxvhdimen\fi
+ \relax
+\newdimen\sphinxvmaxdimen
+ \sphinxvmaxdimen=\dimexpr
+ \ifdim\paperwidth<\paperheight\sphinxvhdimen\else\sphinxvwdimen\fi
+ \relax
+\newdimen\sphinxQdimen
+ \sphinxQdimen=0.25mm
+% MEMO: \sphinxremdimen is defined in the template as it needs
+% the config variable pointsize.
\endinput
diff --git a/sphinx/writers/latex.py b/sphinx/writers/latex.py
index 263399ea15b..553245e412a 100644
--- a/sphinx/writers/latex.py
+++ b/sphinx/writers/latex.py
@@ -297,6 +297,9 @@ def escape_abbr(text: str) -> str:
def rstdim_to_latexdim(width_str: str, scale: int = 100) -> str:
"""Convert `width_str` with rst length to LaTeX length."""
+ # MEMO: the percent unit is interpreted here as a percentage
+ # of \linewidth. Let's keep in mind though that \linewidth
+ # is dynamic in LaTeX, e.g. it is smaller in lists.
match = re.match(r'^(\d*\.?\d*)\s*(\S*)$', width_str)
if not match:
raise ValueError
@@ -310,6 +313,8 @@ def rstdim_to_latexdim(width_str: str, scale: int = 100) -> str:
res = '%sbp' % amount # convert to 'bp'
elif unit == '%':
res = r'%.3f\linewidth' % (float(amount) / 100.0)
+ elif unit in {'ch', 'rem', 'vw', 'vh', 'vmin', 'vmax', 'Q'}:
+ res = rf'{amount}\sphinx{unit}dimen'
else:
amount_float = float(amount) * scale / 100.0
if unit in {'', 'px'}:
@@ -318,8 +323,13 @@ def rstdim_to_latexdim(width_str: str, scale: int = 100) -> str:
res = '%.5fbp' % amount_float
elif unit == '%':
res = r'%.5f\linewidth' % (amount_float / 100.0)
+ elif unit in {'ch', 'rem', 'vw', 'vh', 'vmin', 'vmax', 'Q'}:
+ res = rf'{amount_float:.5f}\sphinx{unit}dimen'
else:
res = f'{amount_float:.5f}{unit}'
+ # Those further units are passed through and accepted "as is" by TeX:
+ # em and ex (both font dependent), bp, cm, mm, in, and pc.
+ # Non-CSS units (TeX only presumably) are cc, nc, dd, nd, and sp.
return res
diff --git a/tests/roots/test-latex-images-css3-lengths/conf.py b/tests/roots/test-latex-images-css3-lengths/conf.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/tests/roots/test-latex-images-css3-lengths/img.png b/tests/roots/test-latex-images-css3-lengths/img.png
new file mode 100644
index 0000000000000000000000000000000000000000..a97e86d66af1ad396be5ece749bfe35b452aefec
GIT binary patch
literal 66247
zcmV(#K;*xPP)EaK&8zz5KX4BJqb#PZA>p^8SAiHclk?q>2$Rp^n7#Wi;q^-xRyG
z$HR;EhrZqF`p3s#)>T+Q!a;}G_`<|D{V^vX$!u1xa@&_tn*_6aqHs
zO1AUTi2eR)dweTqvM-6__s`q!pV#_JoKO^K6GnTrX;A1Ov}caL=3FjM%xsBUI5Q*m
z-0$!2C8Rb-TB#T$%|3T&2Ks>1)B-R?sH$kphO|8qfDIBeZlJ4f-AxK027P$ifEr)e
z78#8#v(|_x8gouaj)vE+K9RIme|_vRvjA1OyFE;SqR2`DB$UzB}zW(s@udMxY
zd3t$z=5*H$(LDc|`3MYhat8!vQY2vH9G1vXEWnfdIoD}ng5-I4AcBZ8D~t5DS^yU$
z=}3S@CZhUVsLEsq;C$g(FiTOYI@Cq;k#4v5G#d|>GKxT{3a)j_l|5rlVLTU+;GpVxzU>5yu=bmAN#;5#C+JbYZE
zO)5-!Ma12)QLi+wLXj*us$$a7}*QFVlGK#v?PrD&LixUi(#9^Go4iWg-{KISya
zYp5|7bwo85%AJ#xRIhBufC>;%OC$0RgO4cA6HrYx;gKC6d`_E)+=my?10R&EV-%Zt
z`}zNXWH=xzZ>uZ|_eIwY8mpGEXd6QgBpnjP?`#r5s%wPxRtF5=qyEk!dVG|
zM?uh>-(2*_fC7Ys9(7726|_<%`?BoE5ui-0(!kNpRW@CpX<3AISWO8j9U|PF5bxulvY
zQOCk}#hQRA#Dr5iremuM!GoHW2jnrpeWc3*CO7ih;b8X??oc?)7#Rv*;WZ`@)D6Qa
zB56US5tzkFiV}Ctg9(mWu^8!^gp~Y7ScN&xCMaFb14Fhj1ZDx6k}#$AI1aOsKKF;j
z7676kJ`Q0LAvZg2pTw-Htu-+jmUOvX*7x_n#yBE&K#=M{#eH)NCX@#I4cxJ0AP*Q0
z2~Yc#vQFy9%yv+cLrx6Aj39pONytXSgUZY-szLj>5u>)Iy#>^)(Yd0+_49*SbXEVn
zs><8jM}%*$e;;FyaeMpopQvA3@IoDe!$-I~!VuKfTkFCyS&_|%tu+L+VcrZ0o-Z$z
z6rpMZPR*DSWn&;E0^ob_fUg9=fyf=EdjTO2N#-omqu~b5h#pWL*L}YhO({uW{w?~T
z9b**ls85ZdDgTq!?k-dhPkMT`gx3jJ90(YN*&hl*0uFemzJ+6yXC{*(J?I2fNn0H&
zusWcb*UJW^V=2Z+>Sv=nFnl4$i+V00`%nKNmkTXx^adcy_PlP-fHK?z$O2duiN`Sl
zNLuIRS(gj9)jf)rRJ$VcSBxws`%tBO0pfrY;g>vGO(?V&6xIVqK
z?fEuR*eyU>KaR>El7%$(VKc=<_EI4S)MYy-EUjx_kX4jttVKds0≀wt)EcpHYr3x36qzxlGAqZvKXRgM$4NPiFCZl9<#1;CRQVhrii8N|
zV1goflPU{i*&u2#FP*40Sb5DO%$Cvs7W{jj1cZ}K2t`FhLSdO1Pk2B@jwoQ^J6Y2U
z$Tk8gB>{kg7-eNrAz&~-$_Ljv7Uii5Q*<+Ad-6hvXRt>r-jmQuiS2xR$Dh+i1iGN
zbp$pTpU168xds6=W~%5EYqBjs0#KM)CD9mWC@Yn0>4)KtHbPneOrhyZj|33m*bn&T
zq?x{2iHDmZ-(&8>k;9Lu*xah%D}w-2=?N=WKCejI1%!WLS923q?RL>SXW3
z4;Cg!O-)waGu(LO>~kjZs5?i!7EvVmz(I0aqartjga|s2*<%wD@EE61^3BJJb|agHDWCSTtjn@3%(SlSbcSln
zp-3ILmXcL6>Pr``9rA78eOM67c8RtGQm7IOJI(B=wdJgUR_@vaC6#u54?u#(jhSiM
zOqfL|eVh%CL^Vr1U|w{AoW+4yk_|q+Ghcl=msucYV@yKVFmQ)OvG6N5O59Jr2~TmQ
zej;MSPTb>Q(V?gi-+3@mCwXbUghXKVtCpm;V>lt2KW>a7YdveS(>*NbGm2OdFnLsS
zee}m_;QrHpr9sV`DzsJFVn~h@B)7vDs7Xs#asy&XPyfW+JgE1Lv;*qr?KtbwUyOvd
zZGHKX)~)qtR5kZtLK1d?1A_^W0gH&tWg|zzByOY|dWN=j*tqA5t@q0_i1wx3_S`*FM7(#pt5XcSr2`d;yrx;O#?x3;}rUbU~=_VV=f+S=lk
z;Rj;HDh@^zh6n)~)1dAk9L1+KdXS`##Hy$7NYFMQ#qZCT?>${Vf8hL6wv^s9QZ1O?EPM7v7A(2Cc;zT3wK
zGyno3zDu%uRWPWf>3vttH16~B{glJr7a+3FkBUTXn16hHsCKhBZl7cL(i#)%zl`0O>ao3{X*aLebWHef=|Gd3t@_o-RQIK$)r|j4`fnf4p8lr=9~6Ol@bbZl(D}
z@$Vlzx%MujVikI1Pl0fssfd}EWEg_S+yK`=^}2yZHAqQRIXTVhQi0S;EG0WodbsA3
zM$VB4PL!eSD==Z!4dDwBQ|hL3r^liHxpKd#n211om+KP%!MKA55#MI+a(6DFCYn0Wlz#S)j;3GdKTR#?a&~ey9G#w|;es)eY!TkMIZK
zqDFq~B5S-Uxyf}%rt{p7LNX@Y^UT8ltSANd=~)z2(al+2|EpNV%d&7TW!(3jh?pj^
zmew!Jy0*URvLdUMTtU5m3_m-cQs?f!{nJ0a{`^b7JZW3Ccb2aGB-|SW_14hsIQF`a
z9^a{nB#~pPy>I9K-wr#U6?WKhJ8svoA;M!EwT^vV*KOO5
z{V=yYnrVHVvT#!A+bS$eTRJN>je@y1WvYmr+$x(}4Tmgs22-wPPLsq9e^?(P;aGlv
z$RQ>{VX~^&09GAm!c@+y?mo;R%94&SM~t!z-)U00W-%gU67KHu2WW17XZv?N62XbG
zdHwL7*B4*O!yaTRVnr34#~KGbxyzmnlqD>UgHcg6O5LXYtxAb2+)2jm$Nx@o+jk&}
zl5S;nGwlk1W=13|q$;g9O4{Adzz5;Se!Y!r?BlrJe*MS4Z_BpthaF>Gms2JB())5s
z&-3;6G4@-R{Eq4tRH&FDSp#A&>5r0{fXRdP_2uTyz^c-FCl+Qj8@*{+-9PueEWtL8
zaQovQejo0YXtQX=cU5L8KV|U|z$8zt=ThmSn$MGxim5wHd8;ZCTG-qaJreaOrnpsx
z9@G4HpJJUbao}Xyl!3WR8Zr43UR0Ru1%vxCpTb&$Oke63uNWwJe%HVnvW{MvG$!hn
zdJ4{gYBtRLH#@jTwjSVf0Z-K36J`Dc((F3qNZ%@{9N+dW9vi$xCTX*@Daj}s%12OREW97cRbtshk6kAM1eu8U(|&Up6p
zatuR~k=xVDIELSLLRwlEV(=3X!=BD~x2(soV~oi&L|XcCd3oyFqN{LYR0ILW0+Fb&
zh-z1D=aG|Pt!eU)g)8$&Z3YjDB93|1OKhQ5bYdz}VG;|;Ca+HvkrI%ozLO<0mL<>*
z_ix@t4=P86`y<0kpFV<-&v2HECca0*(gJ!Y-0U{LM-cI&4?vua$8qE(3oFcMJNs1S
z+h<@ZQ;H6AlIDT6D101#yB!fA+?K8-Use=<`Huk5LJ=!LM5i)+e14|SwIRST^9Yy{
zTpi2?Tz5fg>jq8UZ`TO0nJyq(y4YA5VIYEy=*vd-Rx8JzpDyS!Gb|zUyRJYWsI=j6
z81?NX0M}))F`9A!)l%@jU9_+1Z}Z#jRIUSRm59hbTvJvl4MZry#2iTjYpTn(a^q!N
zN;@DaBwe&IPoZO0W;wyBTcvhWM$4@EQ$BOV_o@w;M8rcBC%nMGMK?;94`
z7}KtE07SXFlV~QQlYKMkp2V7=_7Siife$GG1^;@g6~1$Isz$*dYk&~$>RXu1t3jsQ
z>l$SrW_AbjVadhwc+C7vM4C46G3TSINh~jzS(2!=G;3+!S!dul3;>xpQaKJA_SydORsDpiNjrlO*k%X+ztaD!l^WnI>71EdI3R;U<9m15WG
zD^(IDW-UCp^V(7z&b94L6c#o8LuTiSzY1|0e;35s+E)*MBy}B6t^l?
z#V}T2#UF|wPz@X{FAn+fSC(T=C)HiCl4DR+PYP593N7~Tx++3})k7*A5^MVj`M@Hr
z2!i&KD5FpXdvG0HNc9H_3JnNYVFE^6moef}QG7G>ae#W)!IVxZK>#sw`4os)?Hg9N
zz-)H=(6RO>L4*r{i!58Opk}|{Yqe>Thr?-FJ^XyT*uVbx!~XpV^la|#R_FbX-+o&<
zPcA+clI6T51C0;`R*r}LaxwqntFK<4U!R_TV5#5TJ$(P&x4YGfQCvVl%Qau}^7_)w
z=J%gIfnlIL%n}V;Hn$Jkhg;^SoVj?(V8~$FzljNISvj3ph!r9+2B#ls^(?W8^QZ0v
zc1}|mn&~Zkc4RSN&?Xes1Pv7;66%|pf*f@yN$B=m9wCc~%Y#40o)z(-
zKR*!T!;9swND@9z{91X&0u~>6YMdp?yw4u~G%9mgE9=X-q0nDo44&ot7|a9*;zH$a
z$-k5X*lo>A-O1RzK!4%_pL`Q^QAR+$eVgF|4x>z`v;Z7~L
zTZ*kZkFU8Xl3St2rAXEyGPrRE2nTmN@O=gnXrep;VRQ!oKYda#I2$0X&tHCdcXu~e
zA%o0ZT&Tf%I)7)8@pqqk4E~Cga8XG_zN~d)Xro@6w=y)D=^BWA$e;KZcIaqH^fop~
zE)#OFbrnAIoKnqVjnPDOnUn}oAEklF-)@#MOGHzk>`>W9U7Enn)nGKQlO|3e&5UR`
zq}OWsn`fc^pNy>Ho-5+NxO32woTpdkaY!g^h
z!IBJrYy;oGTPYHqTGuFBJ(Yfa_upTxdiGPB_xg7Iu_OkucG=V1oGt1Y;>iF-d*6sVp-#3`C@ekcCrUI7hfpgU+nH4Kow!-c6a~Fzw*lzLlOB=8Z3EI
zMn!d;_-*;r)F+ZLN`WGrbUckRTxV$P&}dwS@s-^GG)YcRj(SEq?^Wh{It)Ay
zwqM0?cU{VYi`=hbN|7MU?Lv3do$4dky5Pz1SG?!$5s&M{jsyP
zrPwS57tx0WPeyHwF;rnQgNgx}CawHlQJRV5c<2UnEYV`ZlGd%W3GVXbtf?rs^dxD>
z=qjUGnc-4VQPaovL2tMZVNq&bQqY~zo-Nmq4TcfAG89lzXI5+#6O>8t-E4O(_Ord6
z@qCKhf(!sO7#c*VmGH-N1}?jMxc{YJ`sMX{oyAnzgooW4-@g+q9M`~?39aGnv6uBz
ze>7CKv!Zv^gIq0`xkYn~acQF|+S!R)6Wd{lEPn(Y9Adgs_Sv~{JHOWPX&%iX0kv^;
zO(@r9YLVPBg&O#Uk~p7xGgr06LK;n1Y$WCtiA6K2cGCq3wEQjT35KQ05TWTVIb1i^
zs{a<;GX{}eq(b9hDW^1&6ybkxk>s7@9!P;RrHC)PKP8Kj@+k}ye_`tBTAYU(CQd
zGJp{l&`x7n1x(bs4r8mpgg0J52}L;~nlk3tWM~QBoGwzX4u03G6{Gya_utc9UPNk$
z*OF}E^I*4>6u>!9=wi3+R`bQ}rd#rW^{21Cy!-Tc`|zmXW9Y`qbUcrppe1PqM1v1>
zu@%XvVjzkCc%k3|Vy)n@$XmH1j1M=4lg^5KO-`Aq;?(WrUxDgsbS7F&u~qn9h^Et=
zR4tcFz9KHRsrqg0e0ltt)>J&x6jvREBwOH%?fpffI~(VUTs({r9zW5&rO
zlN3}9TJty?O7%%gNdbOIH?pcS-QX!KUUiIwrN+8EtjskKIv)>=CmyrQczd#K9^0Z@
zhR44MZ@W;f&?}(l`bwS2vP#$K6&_2k7#&p2Sclk}E-4x>TPVTuOzO{sR%MPuzo|Iy
z8*aq9Weia85jit*={;b7eR;;Ora%-TBp39)e_wAmr4@y2Hrv%|{qXp>xx2mp^tjq#
zH)o0RA3uMBy-?`4Zfx|(C@_e0*G5Mlp$OS6p*SI*R91lN<$+}dl1Y9zMIkWxmsm@+
z3!IH*%&|#R%Dd;7d5B%q!AzEm5)cFj)oZTKQ-~oPOAG9(M4l>&cs3Q;Rl3Z{RNmz`
zqf#jTVk`XTMVTgAUL01cyCiCW!acE%@yBWDPisHPB6~vCwv5iOiG*$v{4s)Dzzq@x
z`O4TnyZuYXC%HD3SxQXljuBnB*upoog@81Z&AukvajR2Mb#6xZq`1JHc^QTk-W1#u
z(X8#e?=@rV*I|Yh!5Qvb~;G?Yx8aFgL
z$67&B=PsVW)Eyr_pHBKV8nv(n4~F?Tw96v8l_O=rp^=k)8fQvZc6^dYUlX9qwCjs&
z^iMiYQpbu*^#6x2LQ$6pQyQZTpqfPaAS}6{*p&>gtF*v#WQe%T80#y9I1`*@_PIvH
zS6%gRNj*u6H)VM1IZbU+4hMB|k~W^$B`U2WFim(U!&qyqlZe*6-pWrCj%~~BvPE3Q
zci^_r@j4XMO=MaDrYN6r)kVApD0JeaK`|5OWj}nUDY_q6we
zPfwvaz;YT-e7wT8Sq}k0a8Vg~_#kHm7}JR+J~$dZi1gSyY01+dXF!BmYU4B6
z^OSHhPlvPPkqEPb|Us)ZnF
z0+^aCBfkXi`J_x^S{%P&5y_%&50y>*;xz?$$iwW;Yt
z!V8A)@cs%^iW@VySj_$ZPKu9qzxX$kNxx*@gn}AZZZ7el6~jx_H9v7%q&5$YI%I_<
z;u}qe=yKWIB-|wHaNXX-l+9-g&+eo&8pjK$ZE&`D4y7+$msHDu7tayJ4Hb$Q5Kjwd
z9<3o2c*o;A5gwZj-`>#T4*u@8bE0N)gVN|B-_OOIZ(URrKZTRD8XVC+GC!x1`tY&P
zQZJR1R8B=_*IZ=0Ye@E!EQ;`5Q#T&0NYJ!UOxp$!*IG_F{x-YW8*8K}nO0pfnxUD-
zhg284?L=uV?4+won5OE~Nvezwr)6OT#{gan$XKx+aPie#6Zs?vql-DlgVa4KOmb-~
z{y~N@{y#?>dV#px{n|hE^92zk7LuV#u%NG{bFLR9zr*E;3jks`#sl5fb*VRU^sqW?
zo#=*TVsXEq7Votv4n~7`e|}|`>?BQLe>$HSMm#DUHJ*%}gSH+X9stL%T|GX2`tkXB
zwuEKp(^-jeB`cMplAyn|O-$U%h+Ua%Vu5BtId7HZ=IQyVjl5H5+Kryay$HORDu|tj
zSH(DsC%I_;<(4o(6}RfF2U|JD#fkb^bBBz(@U0eL#fs6|{DP+#oDhkU^y#fzs&HxX
z=zV$q-MEmw5etCj^RL(`4PFyk}f{n-Z7U|$KXqnG9$Xk@9zrP>3iL01+
z3S$@%_q@ZgA5SXA9|`fg79~FkUJaIoqSPMMU1wfKqlOH;pC;QlPf7M&8CZ#n(wPDkzDNwjD8j+1#;NaNZf2Z+5g98Ro6O7QiX#Mr@B>G|>jSY1{^Fx(mw
z2^Ot3=mdxSQd{Mmth(sSa%)5i2_@OelA`{`^Pc8hr+i{{bQF2Ar;
zTcD>HCpcfSJD!MrYXxUj7S6(znltJp`^95C++g%I+
zY%JB)pNCD1ro}__K~u3Vr}xFx<%m~gj5M)vAM4!+HbcNzSfo6{63TKI{?K){K*xj0
zF?^%h%Pf;qckA4^FKb^EXU#KtMw3KZ_$-o@S~m9vIH5WHnmz!!;#)uGZ_laA(|T60
zkkhi1L6h*&_hCq-#ab5zh1zsg&~VWnmsW^U;!R5<$QlUeYoUb7cP)}T*isu&3+!~}
z0=Xh3?wzFaBsZD#xBY&cj&Q*=0qZ#~&S5+=%DB}847HA^yZz%kC&){ZgpR)5VF)HYjps<#7aUaE7vzOe5Qd&5z+J>
z-vbqa`p#5;pHrPPPPpnwsYy|)a@93yK!h}B+(}cZc|@yFghT^~98-{sL%Dr6Q|RA8z5i7|PXTTSWYE`VWYH^OP^=BiBScYusi_&ZNl
zNE^uVhszN1sOtKwSkt#xNtR#0+c~m`wev9!IFXsTQSTU~&d)Fw*e_RX1bFPwN>Dc4$CE>KxyBKE
zo4H9&rV6UHk%6zeIYuRSIb4R`IG1ubGp(zEqiJ&JY8gc*&jhPrMCEp%ldBZ9*+cJ&
zGY6Ixz4zMv3{bc>>;Racapx9gm=JNG)@uuk6Tvudty-IS5Qt$;?H2}`KN2a&h-8OxELtFIY}J;n>%A@u
zHauqWmy@QBMCvR{OkiBD!J(&yA~iSxM;oEv(;0Hquk9VmHQ@<`BAJOc=nv3~RP#pq
zto)V~>`#TO?baptIS0Zr#`?*8+3g-PNiYgoVV3`4JaGXv!IQV>)I!*dkOs#siHz)sbF5&EQFD{}mj(YQT!&Zj
zqLSJi21%dvfzA?|!C2yhX+l&;uKSYO#iRA&nB?vrbk>rSE5p+Ujs(rFLX&O4=7v*EuRi3~oLA*Rya{Ak$D-~^H)uMl
z8=7NJ>-ucEi98=0EvMFbBz_59(IrE7PE3w<_K6jfHfY#kX#i|^}cs4
zj993+LK`Ax2*9>^nKn~7HR73iuH_uR+K%SjcsUJQp1N$iRnzj}x6S1k&f$@adFsde
zPV`fKqaZ19L$)f2qTDKV<$`J=t4hM4SIpIYSjMY7tk%&VF@6%;905!wJrRDQbuh(|
z7qh&)UrX*v7hS&%Xp+FGu4FtFvuBZ49gopOpv$u3A!2gMH&k_ktUsduym3-Jh`Qtu
zl%Ev2uYT*_696$vIAR$Hy?an3OxljFVnOa79{78uqVg475;6HgUWR(l&33c91IgIh
z-XR1FotLr6;{M?)NwE_{GrPTeWXC9WDhrNJJJ2&Y&;6#DlwCOd5lw}6aajuRsfqz=
z%o0;&;l4FNdML`2#l_LAGL?8umDVfBiZv2mb%?kYqU
zqRPyemu7eYhl{pf0OpsQ%MUmbDMiZkWO*vC1QBjk4$U!_1vC{*6<%pUWX{K))h48*
z;@K&|gxHZcO_p^xSd+hXi{Can@Bw0tcbWx;jW<94bjJkE3ne2PNaZTJ6J=ZEq^oE~
z7?&DY*sXW6EL9?AjLp3*&+Q{8ay3Hs88o<+qS!FbDaRk4ewEHgpJclj54(IVKX2h+-{`fQzyf!->@r7d#2H?3bWR&>``X_aSMm3H^Ttm
zVkX%j#CzZ0-Z)QBSbsq5h0-57M){Y5ka_9PJp{Sfv|i9!t|#Edpj$GY8t?rQ{75F<
zz@bW2QDMDDro!D^`cylK@3G8EcAh5iKf82q5=!3ZP5=Al|1SHlo=@xV%lr*j^Y8|U
zwYR4MmULo}%0KUwq_Qwf(IGA}D0U<_M5FphPN~U>8LCbAOT(!Cv^9|t$6_etde%dx
zJdvX?P+W4Lk5U#@QZy6I>rzYLMW7UX&tfa%WmJ|T?9}yKgEt@-Z-(YEa5{QMmPe#S
zL}^ah>|*j}?1FE5=9H|i9Rus4O4P=I`G2nd0!)r0OB;oQle4fvEhgJzdzfjN8Ginl
z>F3ML%*@QZ%rN7fg<&$ISj~k=kpXv3oTp0Oo1;flU8*j%Tqh$=yypey_~`mi;FJS4
zWoL1r#<3!$G)WRTmP-lG8(&v#wQj8O!Ep^FIXI)}I!!$r42Dsh1d}cXY`!>6@_f8I
z&GMW}Q8l$n)83dQ%NzwjNyL$g;=CyGqO{JAClffS)iwTb00Ko3fj95kwr!dQo(!Lg
z?t89OmS@mQ!ON6NVk%g`cO_S#dV~HxjzBB|28B_Y4dN)40(4-e*G;toFD=(;HSAQ`
zbXu-fxtOKVtdQqd-J9>(d6=C1_HFU{bnz!U>o>DroS)~7(XmXJr%5V}kEye1mUWaX
z4rRm{$L52f#uqNSMaXavw3TrFAJ!^dKRBVdlVW4OUFzt&_%Q*#)u!)}@e9o*)`4}d
zx8WxUCrkr4V*nror95Cb(gyu3fu>;N4+n5kZ~}#~s^VN);%y)b;Nfe#2KQRCuzaG*
zsM8&A!-LFiNSe>q2i~i?WbXwrvag(l}1DY}M3hQ4R)!s;cJmxpO9pqAbheIA(%JQq9h1
z&O+Z~twEW8S2t2h;yuzO5yj53tg73=Z~z|+v~MOaOZbzf>a5B0QYq;-FUF=vMNyK(
z9$>jhCUGQV2b%4LUbn_*_^$&1->}stXNwEb{BkmzM6;9oucZB&2yx}&?!$YJ+iWt8
z`PunnO3Y|DyrX{p(R_7I5?k(BHjIT6P7|j+SCj~}>L8W^q7$}i%6!;`%~q0t`?hx*
zma`{P@h4Nu-umqoT7q{Qu_2Y&At@Aludu=)){h^YR0_y#0PC1Q%MA2!>rkBdM5Y!H
zHFUVb5i_`;rv!ui2
zHtJZV6d?}Zj#0XMa4E~uM~@zlMmsgpX|ysVGP*B^)cbVUjan649H^!@nH7E9R=b
z_w?RspWVMy>PW`)^6u{LvoC-3i;HB;lO@;9sHlXwkKG
z@8~y!%O0A=P*PY_#sHjvTJ~Um>ch9vu+lMt^G8%n1xx`K#qf7vkA)2$%II;8jN`~GgI6U^HX^K!Kp6L7D@ZG0=X6s{af~rO-An|(2+pZ?W4+rCN
zP>#lvVmQo)Wt7Efp25B&SrVmr0%+4{JlWaVUDZ`SDC)kM&t}8X5WxOcyI#XCCgor-
z5P~h1ODF>BOxLS8+CSKja}du*3ar)OZ=|3rrb2oD8Yro$u?q
za?}+?&H+I0O>ZJV?P#le{A{um-mulJQs5&A=gXK7+wt{sQ7^fVh#*VLRCRJAKf21^
zj#hU&JE>O0^+)$!y;sd^qUowOJL8e?CXK~-oIZN++QYkY{?_U8?&}v)|ID>Z=hg9S
z4R{cf838C-I;PQJ!+8t8!i7x*@i_R~uwigC7l=?Iz(4T0T1U1gK`GGl4%Z2+NOXMI
zYH3h1+v^^2EhQzxmpO|71;P(_zCle9+N{H_?MROXIDr6<$gaghw6i!TV!*cpd>|Nq
z8{$I;*IQx&Y0*0vssb!01A3!i?%0{2YPNiE{l^Fei9IZQL6R0)L&+GiV38F~rqd|N
zeX!tWAa_n+n*%6F;_xCg?^&E?03{@F=}xCvS`5B(4SD#d4u+#Bju>P3y-^hBWeH1N
zIQQ-DUEbZ>mojeJK8|IQBt=mqNpgN(DJl1M_oav?`_m-N@}h{-w8*l+Gj<@;(JkG;
z)kRq#{SjAs
zFe#EUd+O<%oU)^bhfUwANI1ZS^txKDs@8%qMccPuaD=Yupk+fX2GZ%cuRVzg+bUt6
zXz;^q8=t563k?NOse!;OtS;F2gqz%k;$VWItbV(Sus_w7835vfUyG1o+Jj>s!SJu%
zGJ5cPgUJ%b1vUK6@hFGlA8hgkf5IJAD#AdAfO&cqAT$&9b{8Dq~5``~sG%nsn#D2sfD&^XCQMOg?XVf)}L1t*ypb)=5?)nc()F9+jMZ~LaL
zlO*lhZa5ke#`C;zp6I4tu7EieCyDniG)x5oeTWw?SL^BiL?(`KlNb4LGKP6&mTKEC
z*Js0N5v3w2<8)Bs%9^JcVZc_8!Q0Z8VRl~E72|Nvfqq(dX_^*A#uo2E0Rw3~DZksZ$~lA?DY6bxa1}J9
zE>KW56gTJ4nG01B+xa6bNZT3Trcc`1nZ$9d0<4YWJQK`djdR$%V_9HbG6k;*y&ejr
zy|=)PLw9ymc2P>OB7w!^(Igu_DPV3Q>zF_sn}rsdlC}X8Kl3hPnuPT%
zaRGo3lGe2qDyo`VHY!U2QZ%Mpt=2_R;EJ7O@xkST#cHlH1vu}@JbL`VJ97Ej_0ee5
zcD=PgH-HR_IQD@>S5U)F8
zS=CmFkFz|nsLI937fii*k|hAlDIUj?@gmGqqoT~Ib?fEaw|quA^ymao->B1jcZ)^ZC)?`E+l5@#^iq?Uu87wX(nTi@*A_KkDaOjHCmfn4$8m%JV-?Ch?z|{#5H?`{tPs3yTdR90GrTb71W9U$=?|{|1TKS0MMx+E
zreT~&5f#M%NHUH#dI^}YpcYE9Pm`2|6&1ar2E*}aG-jaY%T1`ItSmU}_`;Z~X>}aK
z#u%$uM$vdY=E3|pSju@W_;5TDz<`f*XLQ%aaS{c~p)AYOEDJyjwgH~PrvkG--L{l)
z*s3*_X%eMrQsnqSmV-fF6i{)TTBCthC3xRS)3sG$o-OX&SpA`U_+_aEU;XBrgTa8q
zx;wvm_3G*A$+eqT*44VJmzSQL5+g?
zLf9@hv+OWCLmntB3OHCqhAI?P@L1H)`VidGP!x#-6pa5c#=?@LK`f1yX1#8K=pTp#
z-iKjSNT49t&J%|x57Q%a+jtv_GxQ>iLqCH*ch|H{Q+2>H?|FRiBk&^J+=D--2v#*s
zTZJ3|8>h;9()Tt4->Fn8iT58a8Br`plOcgJad8qe!N%j^U^Gn9creKGGGnaYyD&|2
zMY+q1G>&DQz+WOG)dQLe8q_39>!txRMO7K+!XPms8xE5c;0lSuM8;VQD{bdJfzJjM
ztt?5P7DE_~4nTu~_S5&CrpC8~9;W>jE0TQ2eR}#%eE0MD`J1NKWHNp2E3aO!vogjt!R<_YuGbTWgpYbUiZIg;b2;K-Y_KVPh_(#DgJv*#K&5!wk-Pw3%Rl
z^+qDhGcfuCdCqi%JjU9pn(O;QkuiAG9-J0N-_ey{bHd6y71k)vZ==QmZT18vzUxLRPC4z_1bN
zmdiMar_)_eGHv@+wa!PyU^F^EKSyl^3*4cu?U0rf@rVgN(Md=e?{D8PIm?u{2PIuuj
z&)3T(CpIj@ga=fn_s~_;bwvnT4tmF3OG%gXmz5r}S1zA^?&00nMZ&TuCVDNU9*yMv
z$A_1%-ManU2g^axHuYpWJvco(J~}zr-}~&3eD7EO@Q>DWHyVsAwdcq4$#`e~;^jOm
zAG~#+P)V4q9(_$V_djvv6E?bbXR&{z<@qvCV>yY8pG*-S=@8^?I|+^ffpUp6xOb^Z
z0aGxZhKaokLqMe8F=M4v8&7T|pn~k8>tORF=-?8r@V&k_efV~`dICJVNQe#YZzz-(
zEFBOEVWPfoo4Te-`W`x`u${GDF$5tgpm7DG&EUMi(NEWT7lPnmX^FvtDhr0C
zpv_A1xESUtQl1*rpi3o?QpyA|K*H&I(-LAA=gX>U#-kCCd_hdFn{~~QwFSh{wE=2{
zX#$}NKAzsTZR5gOJr45#XYFJ>a?Z6~M*^rFDflO_uxD5%5^v$QDfX#u_wAck?AJbU
zxj3G!mNLEm*&m4~d-v}@dhdrmbn)8N>2%78tD9D3L&u}*Pd{6fQz4YG{m$NARaI2-
zXW#$c_kHlA2bV6_ZD)zUd+*M@w@;>nE6bz98uTM!4V0Abkm~`Anh*Wo{wJKt1LC0aV1i&(IPs-%r2DP~7^#euLbb4x
zG3@quDV7GNO%#J9D=RX}y^yfjLhBdyVm~9R
zP`pYB{1^@d6!0zyxW>e7vsRp1ZFvM}aylr=$!N8lH_bZF^Wk6|MN9&S54U*T^sHO5
zcmUgb`|#4quS{p(*uSy2s_L4Ee1HGp+2Q)-V_-fJBZEJ8t2;0Rl=8;48=Q-iv$KOs
zm)57JQ5rwEchC0vnfE{6RMpAxx!{R4zM0QofAJ+)7!$>n+TSf@&mV|q*zF%*n`zZK
zCya;o2$hYNdQjmw?^esD$BB(GNMFX2pwoKWo?BzKDRY!?#LZDqi9-9X>pT>nivSex
zNR&mZG^mcQLt;pfzzB(AQ3pPT#+asVbl(OguH=NR>qV$13+-23p|sGzX1PZB6}EJQ
zf}RSE0_vtUx?{ql7atDcZC4>363kI9z5makc~PJvM~qP9t^_bWh7UA}0Vs(hWJd`p
z5J>_4w+FsJRjr{GLGQ3heiDj?7a84ixB^LoGMH8}uO!^Tlkxs|yyJk7V+04O*7?~P
zG_VZtXgpOh85!L-bsgjw04!_Wp%YM`vknFN9t&mxqZh%nR^i}05m7WOWGW7_^#$`<
z|HiLf9FdlIr{aq@ucw0nVf+t&eEl(dj5Uy+N^Br8M9*!5^97GO_!j1*%K4uxy${SprLu%yGH
zhUjD9k)trldx8%F?UUjA5Kn?I*cn*pG_Zo&ZZTgj7mI4OT+Zjt_KU?#Ysi~vnl<$D
zJ#OGFtPrMeo2JtpnwN@jHgevek1&c*6lZ819fr_1f0jZ2n+5e1I4pKe%L9z-0-Tj*
z3A&Cj&Qv5fRYl?8V*qHz0aFx38?H19hoIol-@<|oMIRPsuHb%6vIM&^l?YFJ9zkDh
zy$8>Qq8Pv-Q6*XCDXkkFMT%3l4i}KQIg}4qGB1j@YdQ1_KxaikTB0Ozq^@h|mlbOn
z&mwcd9l2M3sXKgS|H{=><;UYfoxOYK?RW0oy?*OvHXJ?wu}|H3=k?j6
z$LqspoY=MKR%B>>PF(&h4tMatS9
zKVJ-9kQ}~1^5)jACWNwT4WFw#n>x~8q0x~^(C#x-qlMzG;FLXRf0Qocy-?4947zw!H5uUt$oUsIOK95CRc!{f*IkKVa+ugJ5D7j`F;
zaAMyHON4T?@tc9Tqh5ynM1|97
zP}fPQ^e}BiI366-an^@?LKv>oZEz08)Aqp#qc3%qNg+CLJ5PYUjCzdyAT%6t2@HAKR)?Pnwkjy1dTn~(ICQO9FPBYKEf$OAeA$~i
zQVMZoLAgcgIgmY+gVT5fh#Yl!Sx~~3tCekl3>{20!C!uN_X6d-7-oZVq%lFy_SJgb
zYxtX>t6-!9;CGegIZd-Ye;B|0zqUs&?WBzwPe;Q&)B0Cm`pWrycKbc=d;9H2_uqPF
z|HA(6c#w`uNn8{cQYCF&CwZ)UUt|S!`u^Q_x~55^Y%m
zS**IIo|ff$aU83IWE_d+G(CM?J@dm-5fK-@;94e}DDJflTij|jC!XlmXw!%O-L}w`
zZrt%a30359Xubv~|X8TyeY*
zfmH&A3)X1(k7bbJx2`*YjXZOWA*lqeRv9(h1a3~y9l||zQ>Zn$5
zyFiEA_95-Ug_gb1jT;O{S)Th$0AWzKx&fA9oFswp&$y50r{{!_ERI{^^AS+EWXcEQ
zVo*+;qt$wScXl_<@+^sobrF}kY8H#-a5y|`iox=Ya#Xg116n^ia4)L