Skip to content

Commit

Permalink
chore: fix reference issue for docs
Browse files Browse the repository at this point in the history
Close  #14
  • Loading branch information
frostming committed Jul 19, 2022
1 parent c369b0e commit 00a181a
Show file tree
Hide file tree
Showing 8 changed files with 59 additions and 41 deletions.
2 changes: 0 additions & 2 deletions docs/api/link.md
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,5 @@ This API is not finalized, and may change in a patch version.

```{eval-rst}
.. automodule:: unearth.link
.. autoclass:: unearth.link.Link
:members:
```
14 changes: 14 additions & 0 deletions docs/api/session.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
```{caution}
This API is not finalized, and may change in a patch version.
```

# `unearth.session`

```{eval-rst}
.. automodule:: unearth.session
.. autoclass:: unearth.session.PyPISession
:members:
.. autoclass:: unearth.session.InsecureHTTPAdapter
```
14 changes: 11 additions & 3 deletions docs/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,9 +10,10 @@
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
# import os
# import sys
# sys.path.insert(0, os.path.abspath('.'))
import os
import sys

sys.path.insert(0, os.path.abspath("../src"))


# -- Project information -----------------------------------------------------
Expand All @@ -34,6 +35,7 @@
"sphinx.ext.autodoc",
"sphinx.ext.doctest",
"sphinx.ext.napoleon",
"sphinx.ext.intersphinx",
"sphinx.ext.todo",
"myst_parser",
"sphinx_copybutton",
Expand All @@ -48,6 +50,12 @@
# This pattern also affects html_static_path and html_extra_path.
exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"]

intersphinx_mapping = {
"python": ("https://docs.python.org/3", None),
"packaging": ("https://packaging.pypa.io/en/latest/", None),
"requests": ("https://requests.readthedocs.io/en/latest/", None),
}


# -- Options for HTML output -------------------------------------------------

Expand Down
2 changes: 1 addition & 1 deletion noxfile.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ def docs(session):
session.install("-r", "docs/requirements.txt")

# Generate documentation into `build/docs`
session.run("sphinx-build", "-W", "-b", "html", "docs/", "build/docs")
session.run("sphinx-build", "-n", "-W", "-b", "html", "docs/", "build/docs")


@nox.session(name="docs-live", python="3.10")
Expand Down
8 changes: 5 additions & 3 deletions src/unearth/evaluator.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
from typing import Any, cast
from urllib.parse import urlencode

from packaging.requirements import Requirement
import packaging.requirements
from packaging.specifiers import SpecifierSet
from packaging.tags import Tag
from packaging.utils import (
Expand Down Expand Up @@ -248,13 +248,15 @@ def evaluate_link(self, link: Link) -> Package | None:


def evaluate_package(
package: Package, requirement: Requirement, allow_prereleases: bool | None = None
package: Package,
requirement: packaging.requirements.Requirement,
allow_prereleases: bool | None = None,
) -> bool:
"""Evaluate the package based on the requirement.
Args:
package (Package): The package to evaluate
requirement (Requirement): The requirement to evaluate against
requirement: The requirement to evaluate against
allow_prerelease (bool|None): Whether to allow prereleases,
or None to infer from the specifier.
Returns:
Expand Down
36 changes: 18 additions & 18 deletions src/unearth/finder.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,12 +4,12 @@
import atexit
import functools
import os
from pathlib import Path
import pathlib
from tempfile import TemporaryDirectory
from typing import Iterable, NamedTuple, cast
from urllib.parse import urljoin

from packaging.requirements import Requirement
import packaging.requirements
from packaging.utils import BuildTag, canonicalize_name, parse_wheel_filename
from packaging.version import parse as parse_version

Expand Down Expand Up @@ -150,7 +150,7 @@ def _evaluate_links(
def _evaluate_packages(
self,
packages: Iterable[Package],
requirement: Requirement,
requirement: packaging.requirements.Requirement,
allow_prereleases: bool | None = None,
) -> Iterable[Package]:
evaluator = functools.partial(
Expand Down Expand Up @@ -246,7 +246,7 @@ def find_all_packages(

def _find_packages_from_requirement(
self,
requirement: Requirement,
requirement: packaging.requirements.Requirement,
allow_yanked: bool | None = None,
hashes: dict[str, list[str]] | None = None,
) -> Iterable[Package]:
Expand All @@ -259,15 +259,15 @@ def _find_packages_from_requirement(

def find_matches(
self,
requirement: Requirement | str,
requirement: packaging.requirements.Requirement | str,
allow_yanked: bool | None = None,
allow_prereleases: bool | None = None,
hashes: dict[str, list[str]] | None = None,
) -> list[Package]:
"""Find all packages matching the given requirement, best match first.
Args:
requirement (Requirement|str): A packaging.requirements.Requirement
requirement: A packaging.requirements.Requirement
instance or a string to construct it.
allow_yanked (bool|None): Whether to allow yanked candidates,
or None to infer from the specifier.
Expand All @@ -279,7 +279,7 @@ def find_matches(
list[Package]: The packages list sorted by best match
"""
if isinstance(requirement, str):
requirement = Requirement(requirement)
requirement = packaging.requirements.Requirement(requirement)
return sorted(
self._evaluate_packages(
self._find_packages_from_requirement(requirement, allow_yanked, hashes),
Expand All @@ -292,15 +292,15 @@ def find_matches(

def find_best_match(
self,
requirement: Requirement | str,
requirement: packaging.requirements.Requirement | str,
allow_yanked: bool | None = None,
allow_prereleases: bool | None = None,
hashes: dict[str, list[str]] = None,
) -> BestMatch:
"""Find the best match for the given requirement.
Args:
requirement (Requirement|str): A packaging.requirements.Requirement
requirement: A packaging.requirements.Requirement
instance or a string to construct it.
allow_yanked (bool|None): Whether to allow yanked candidates,
or None to infer from the specifier.
Expand All @@ -312,7 +312,7 @@ def find_best_match(
BestMatch: The best match
"""
if isinstance(requirement, str):
requirement = Requirement(requirement)
requirement = packaging.requirements.Requirement(requirement)
candidates = list(
self._find_packages_from_requirement(requirement, allow_yanked, hashes)
)
Expand All @@ -325,10 +325,10 @@ def find_best_match(
def download_and_unpack(
self,
link: Link,
location: str | Path,
download_dir: str | Path | None = None,
location: str | pathlib.Path,
download_dir: str | pathlib.Path | None = None,
hashes: dict[str, list[str]] | None = None,
) -> Path:
) -> pathlib.Path:
"""Download and unpack the package at the given link.
If the link is a remote link, it will be downloaded to the ``download_dir``.
Expand All @@ -340,13 +340,13 @@ def download_and_unpack(
Args:
link (Link): The link to download
location (str|Path): The destination directory
download_dir (str|Path|None): The directory to download to, or None to use a
location: The destination directory
download_dir: The directory to download to, or None to use a
temporary directory created by unearth.
hashes (dict[str, list[str]]|None): The optional hash dict for validation.
Returns:
Path: The path to the installable file or directory.
The path to the installable file or directory.
"""
# Strip the rev part for VCS links
if hashes is None and link.hash_name:
Expand All @@ -356,8 +356,8 @@ def download_and_unpack(
file = unpack_link(
self.session,
link,
Path(download_dir),
Path(location),
pathlib.Path(download_dir),
pathlib.Path(location),
hashes,
verbosity=self.verbosity,
)
Expand Down
8 changes: 4 additions & 4 deletions src/unearth/link.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,8 @@

import dataclasses as dc
import os
import pathlib
import sys
from pathlib import Path
from typing import Any
from urllib.parse import ParseResult, unquote, urlparse

Expand Down Expand Up @@ -77,7 +77,7 @@ def __eq__(self, __o: object) -> bool:
return isinstance(__o, Link) and self._ident() == __o._ident()

@classmethod
def from_path(cls, file_path: str | Path) -> Link:
def from_path(cls, file_path: str | pathlib.Path) -> Link:
"""Create a link from a local file path"""
url = path_to_url(str(file_path))
return cls(url)
Expand All @@ -87,8 +87,8 @@ def is_file(self) -> bool:
return self.parsed.scheme == "file"

@property
def file_path(self) -> Path:
return Path(url_to_path(self.url_without_fragment))
def file_path(self) -> pathlib.Path:
return pathlib.Path(url_to_path(self.url_without_fragment))

@property
def is_vcs(self) -> bool:
Expand Down
16 changes: 6 additions & 10 deletions src/unearth/session.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,9 +8,9 @@
import os
from typing import Any, Iterable, cast

import requests.adapters
import urllib3
from requests import Session
from requests.adapters import BaseAdapter, HTTPAdapter
from requests.models import PreparedRequest, Response

from unearth.auth import MultiDomainBasicAuth
Expand Down Expand Up @@ -39,11 +39,11 @@ def cert_verify(self, conn, url, verify, cert):
return super().cert_verify(conn, url, verify=False, cert=cert)


class InsecureHTTPAdapter(InsecureMixin, HTTPAdapter):
class InsecureHTTPAdapter(InsecureMixin, requests.adapters.HTTPAdapter):
pass


class LocalFSAdapter(BaseAdapter):
class LocalFSAdapter(requests.adapters.BaseAdapter):
def send(self, request: PreparedRequest, *args: Any, **kwargs: Any) -> Response:
link = Link(cast(str, request.url))
path = link.file_path
Expand Down Expand Up @@ -84,19 +84,15 @@ class PyPISession(Session):
"""
A session with caching enabled and specific hosts trusted.
Attributes:
secure_adapter_cls (type): The adapter class to use for secure
connections.
insecure_adapter_cls (type): The adapter class to use for insecure
connections.
Args:
index_urls: The PyPI index URLs to use.
retries: The number of retries to attempt.
trusted_hosts: The hosts to trust.
"""

secure_adapter_cls = HTTPAdapter
#: The adapter class to use for secure connections.
secure_adapter_cls = requests.adapters.HTTPAdapter
#: The adapter class to use for insecure connections.
insecure_adapter_cls = InsecureHTTPAdapter

def __init__(
Expand Down

0 comments on commit 00a181a

Please sign in to comment.