diff --git a/.devcontainer/Dockerfile b/.devcontainer/Dockerfile
index 80052720b..688975418 100644
--- a/.devcontainer/Dockerfile
+++ b/.devcontainer/Dockerfile
@@ -1,8 +1,8 @@
# syntax=docker/dockerfile:1.5
ARG CUDA=11.8
-ARG LLVM=15
-ARG RAPIDS=23.04
+ARG LLVM=16
+ARG RAPIDS=23.06
ARG DISTRO=ubuntu22.04
ARG REPO=rapidsai/devcontainers
@@ -19,12 +19,12 @@ ENV PYTHON_PACKAGE_MANAGER="${PYTHON_PACKAGE_MANAGER}"
USER coder
-COPY --chown=coder:coder opt/cuspatial /opt/cuspatial
-
-RUN /bin/bash -c 'mkdir -m 0755 -p ~/.{aws,cache,conda,config,local}'
+RUN /bin/bash -c 'mkdir -m 0755 -p ~/.{aws,cache,conda,config/pip,local}'
WORKDIR /home/coder/
+ENV PYTHONSAFEPATH="1"
+ENV PYTHONUNBUFFERED="1"
ENV PYTHONDONTWRITEBYTECODE="1"
ENV SCCACHE_REGION="us-east-2"
diff --git a/.devcontainer/conda/devcontainer.json b/.devcontainer/conda/devcontainer.json
index 01eef184f..902276b71 100644
--- a/.devcontainer/conda/devcontainer.json
+++ b/.devcontainer/conda/devcontainer.json
@@ -1,5 +1,5 @@
{
- "shutdownAction": "none",
+ "shutdownAction": "stopContainer",
"build": {
"context": "${localWorkspaceFolder}/.devcontainer",
@@ -15,7 +15,7 @@
},
"features": {
- "ghcr.io/rapidsai/devcontainers/features/rapids-build-utils:latest": {}
+ "ghcr.io/rapidsai/devcontainers/features/rapids-build-utils:23.6": {}
},
"overrideFeatureInstallOrder": [
@@ -23,8 +23,11 @@
],
"initializeCommand": [
- "/bin/bash", "-c", "mkdir -m 0755 -p ${localWorkspaceFolder}/../.{aws,cache,config,conda/pkgs,conda/${localWorkspaceFolderBasename}-single-envs}"
+ "/bin/bash", "-c", "mkdir -m 0755 -p ${localWorkspaceFolder}/../.{aws,cache,config,conda/pkgs,conda/${localWorkspaceFolderBasename}/single}"
],
+ "updateContentCommand": ["rapids-make-vscode-workspace", "--update"],
+ "postCreateCommand": ["rapids-make-vscode-workspace", "--update"],
+ "postAttachCommand": ["rapids-make-conda-env"],
"containerEnv": {
"DEFAULT_CONDA_ENV": "rapids"
@@ -37,8 +40,7 @@
"source=${localWorkspaceFolder}/../.cache,target=/home/coder/.cache,type=bind,consistency=consistent",
"source=${localWorkspaceFolder}/../.config,target=/home/coder/.config,type=bind,consistency=consistent",
"source=${localWorkspaceFolder}/../.conda/pkgs,target=/home/coder/.conda/pkgs,type=bind,consistency=consistent",
- "source=${localWorkspaceFolder}/../.conda/${localWorkspaceFolderBasename}-single-envs,target=/home/coder/.conda/envs,type=bind,consistency=consistent",
- "source=${localWorkspaceFolder}/.devcontainer/opt/${localWorkspaceFolderBasename},target=/opt/${localWorkspaceFolderBasename},type=bind,consistency=consistent"
+ "source=${localWorkspaceFolder}/../.conda/${localWorkspaceFolderBasename}/single,target=/home/coder/.conda/envs,type=bind,consistency=consistent"
],
"customizations": {
diff --git a/.devcontainer/conda/isolated/.devcontainer/devcontainer.json b/.devcontainer/conda/isolated/.devcontainer/devcontainer.json
index a23085da9..aeebb0e7c 100644
--- a/.devcontainer/conda/isolated/.devcontainer/devcontainer.json
+++ b/.devcontainer/conda/isolated/.devcontainer/devcontainer.json
@@ -1,5 +1,5 @@
{
- "shutdownAction": "none",
+ "shutdownAction": "stopContainer",
"build": {
"context": "${localWorkspaceFolder}/.devcontainer",
@@ -15,7 +15,7 @@
},
"features": {
- "ghcr.io/rapidsai/devcontainers/features/rapids-build-utils:latest": {}
+ "ghcr.io/rapidsai/devcontainers/features/rapids-build-utils:23.6": {}
},
"overrideFeatureInstallOrder": [
@@ -27,8 +27,10 @@
],
"updateContentCommand": [
- "/bin/bash", "-c", "cp -ar /workspaces/${localWorkspaceFolderBasename} /home/coder/${localWorkspaceFolderBasename}"
+ "/bin/bash", "-c", "cp -ar /workspaces/${localWorkspaceFolderBasename} /home/coder/${localWorkspaceFolderBasename} && rapids-make-vscode-workspace --update"
],
+ "postCreateCommand": ["rapids-make-vscode-workspace", "--update"],
+ "postAttachCommand": ["rapids-make-conda-env"],
"containerEnv": {
"DEFAULT_CONDA_ENV": "rapids"
diff --git a/.devcontainer/conda/unified/.devcontainer/devcontainer.json b/.devcontainer/conda/unified/.devcontainer/devcontainer.json
index 5a6af88c9..68ca35f5f 100644
--- a/.devcontainer/conda/unified/.devcontainer/devcontainer.json
+++ b/.devcontainer/conda/unified/.devcontainer/devcontainer.json
@@ -1,5 +1,5 @@
{
- "shutdownAction": "none",
+ "shutdownAction": "stopContainer",
"build": {
"context": "${localWorkspaceFolder}/.devcontainer",
@@ -15,7 +15,7 @@
},
"features": {
- "ghcr.io/rapidsai/devcontainers/features/rapids-build-utils:latest": {}
+ "ghcr.io/rapidsai/devcontainers/features/rapids-build-utils:23.6": {}
},
"overrideFeatureInstallOrder": [
@@ -23,8 +23,11 @@
],
"initializeCommand": [
- "/bin/bash", "-c", "mkdir -m 0755 -p ${localWorkspaceFolder}/../.{aws,cache,config,conda/pkgs,conda/${localWorkspaceFolderBasename}-unified-envs}"
+ "/bin/bash", "-c", "mkdir -m 0755 -p ${localWorkspaceFolder}/../.{aws,cache,config,conda/pkgs,conda/${localWorkspaceFolderBasename}/unified}"
],
+ "updateContentCommand": ["rapids-make-vscode-workspace", "--update"],
+ "postCreateCommand": ["rapids-make-vscode-workspace", "--update"],
+ "postAttachCommand": ["rapids-make-conda-env"],
"containerEnv": {
"DEFAULT_CONDA_ENV": "rapids"
@@ -39,8 +42,7 @@
"source=${localWorkspaceFolder}/../.cache,target=/home/coder/.cache,type=bind,consistency=consistent",
"source=${localWorkspaceFolder}/../.config,target=/home/coder/.config,type=bind,consistency=consistent",
"source=${localWorkspaceFolder}/../.conda/pkgs,target=/home/coder/.conda/pkgs,type=bind,consistency=consistent",
- "source=${localWorkspaceFolder}/../.conda/${localWorkspaceFolderBasename}-unified-envs,target=/home/coder/.conda/envs,type=bind,consistency=consistent",
- "source=${localWorkspaceFolder}/.devcontainer/opt/${localWorkspaceFolderBasename},target=/opt/${localWorkspaceFolderBasename},type=bind,consistency=consistent"
+ "source=${localWorkspaceFolder}/../.conda/${localWorkspaceFolderBasename}/unified,target=/home/coder/.conda/envs,type=bind,consistency=consistent"
],
"customizations": {
diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json
index 01eef184f..902276b71 100644
--- a/.devcontainer/devcontainer.json
+++ b/.devcontainer/devcontainer.json
@@ -1,5 +1,5 @@
{
- "shutdownAction": "none",
+ "shutdownAction": "stopContainer",
"build": {
"context": "${localWorkspaceFolder}/.devcontainer",
@@ -15,7 +15,7 @@
},
"features": {
- "ghcr.io/rapidsai/devcontainers/features/rapids-build-utils:latest": {}
+ "ghcr.io/rapidsai/devcontainers/features/rapids-build-utils:23.6": {}
},
"overrideFeatureInstallOrder": [
@@ -23,8 +23,11 @@
],
"initializeCommand": [
- "/bin/bash", "-c", "mkdir -m 0755 -p ${localWorkspaceFolder}/../.{aws,cache,config,conda/pkgs,conda/${localWorkspaceFolderBasename}-single-envs}"
+ "/bin/bash", "-c", "mkdir -m 0755 -p ${localWorkspaceFolder}/../.{aws,cache,config,conda/pkgs,conda/${localWorkspaceFolderBasename}/single}"
],
+ "updateContentCommand": ["rapids-make-vscode-workspace", "--update"],
+ "postCreateCommand": ["rapids-make-vscode-workspace", "--update"],
+ "postAttachCommand": ["rapids-make-conda-env"],
"containerEnv": {
"DEFAULT_CONDA_ENV": "rapids"
@@ -37,8 +40,7 @@
"source=${localWorkspaceFolder}/../.cache,target=/home/coder/.cache,type=bind,consistency=consistent",
"source=${localWorkspaceFolder}/../.config,target=/home/coder/.config,type=bind,consistency=consistent",
"source=${localWorkspaceFolder}/../.conda/pkgs,target=/home/coder/.conda/pkgs,type=bind,consistency=consistent",
- "source=${localWorkspaceFolder}/../.conda/${localWorkspaceFolderBasename}-single-envs,target=/home/coder/.conda/envs,type=bind,consistency=consistent",
- "source=${localWorkspaceFolder}/.devcontainer/opt/${localWorkspaceFolderBasename},target=/opt/${localWorkspaceFolderBasename},type=bind,consistency=consistent"
+ "source=${localWorkspaceFolder}/../.conda/${localWorkspaceFolderBasename}/single,target=/home/coder/.conda/envs,type=bind,consistency=consistent"
],
"customizations": {
diff --git a/.devcontainer/opt/cuspatial/bin/post-attach-command.sh b/.devcontainer/opt/cuspatial/bin/post-attach-command.sh
deleted file mode 100755
index 637734566..000000000
--- a/.devcontainer/opt/cuspatial/bin/post-attach-command.sh
+++ /dev/null
@@ -1,9 +0,0 @@
-#! /usr/bin/env bash
-
-# Source this call in case we're running in Codespaces.
-#
-# Codespaces runs the "postAttachCommand" in an interactive login shell.
-# Once "postAttachCommand" is finished, the terminal is relenquished to
-# the user. Sourcing here ensures the new conda env is already activated
-# in the shell for the user.
-source rapids-make-${PYTHON_PACKAGE_MANAGER}-env;
diff --git a/.devcontainer/opt/cuspatial/bin/post-create-command.sh b/.devcontainer/opt/cuspatial/bin/post-create-command.sh
deleted file mode 100755
index 0b1796c8e..000000000
--- a/.devcontainer/opt/cuspatial/bin/post-create-command.sh
+++ /dev/null
@@ -1,3 +0,0 @@
-#! /usr/bin/env bash
-
-rapids-make-vscode-workspace --update;
diff --git a/.devcontainer/opt/cuspatial/bin/update-content-command.sh b/.devcontainer/opt/cuspatial/bin/update-content-command.sh
deleted file mode 100755
index e0529489b..000000000
--- a/.devcontainer/opt/cuspatial/bin/update-content-command.sh
+++ /dev/null
@@ -1,7 +0,0 @@
-#! /usr/bin/env bash
-
-mkdir -m 0755 -p ~/.{aws,cache,config/clangd,conda,local};
-
-cp /etc/skel/.config/clangd/config.yaml ~/.config/clangd/config.yaml;
-
-rapids-make-vscode-workspace --update;
diff --git a/.devcontainer/pip/devcontainer.json b/.devcontainer/pip/devcontainer.json
index a21faaa8d..9c51daab1 100644
--- a/.devcontainer/pip/devcontainer.json
+++ b/.devcontainer/pip/devcontainer.json
@@ -1,5 +1,5 @@
{
- "shutdownAction": "none",
+ "shutdownAction": "stopContainer",
"build": {
"context": "${localWorkspaceFolder}/.devcontainer",
@@ -16,7 +16,7 @@
"features": {
"ghcr.io/devcontainers/features/python:1": {},
- "ghcr.io/rapidsai/devcontainers/features/rapids-build-utils:latest": {}
+ "ghcr.io/rapidsai/devcontainers/features/rapids-build-utils:23.6": {}
},
"overrideFeatureInstallOrder": [
@@ -25,10 +25,15 @@
],
"initializeCommand": [
- "/bin/bash", "-c", "mkdir -m 0755 -p ${localWorkspaceFolder}/../.{aws,cache,configlWorkspaceFolderBasename}-single-local}"
+ "/bin/bash", "-c", "mkdir -m 0755 -p ${localWorkspaceFolder}/../.{aws,cache,config/pip,local/${localWorkspaceFolderBasename}/single}"
],
+ "updateContentCommand": ["rapids-make-vscode-workspace", "--update"],
+ "postCreateCommand": ["rapids-make-vscode-workspace", "--update"],
+ "postAttachCommand": ["rapids-make-pip-env"],
"containerEnv": {
+ "PYTHONSAFEPATH": "true",
+ "PYTHONUNBUFFERED": "true",
"DEFAULT_VIRTUAL_ENV": "rapids"
},
@@ -38,8 +43,7 @@
"source=${localWorkspaceFolder}/../.aws,target=/home/coder/.aws,type=bind,consistency=consistent",
"source=${localWorkspaceFolder}/../.cache,target=/home/coder/.cache,type=bind,consistency=consistent",
"source=${localWorkspaceFolder}/../.config,target=/home/coder/.config,type=bind,consistency=consistent",
- "source=${localWorkspaceFolder}/../.${localWorkspaceFolderBasename}-single-local,target=/home/coder/.local,type=bind,consistency=consistent",
- "source=${localWorkspaceFolder}/.devcontainer/opt/${localWorkspaceFolderBasename},target=/opt/${localWorkspaceFolderBasename},type=bind,consistency=consistent"
+ "source=${localWorkspaceFolder}/../.local/${localWorkspaceFolderBasename}/single,target=/home/coder/.local,type=bind,consistency=consistent"
],
"customizations": {
diff --git a/.devcontainer/pip/isolated/.devcontainer/devcontainer.json b/.devcontainer/pip/isolated/.devcontainer/devcontainer.json
index a6a2f5bad..c398cab0e 100644
--- a/.devcontainer/pip/isolated/.devcontainer/devcontainer.json
+++ b/.devcontainer/pip/isolated/.devcontainer/devcontainer.json
@@ -1,5 +1,5 @@
{
- "shutdownAction": "none",
+ "shutdownAction": "stopContainer",
"build": {
"context": "${localWorkspaceFolder}/.devcontainer",
@@ -16,7 +16,7 @@
"features": {
"ghcr.io/devcontainers/features/python:1": {},
- "ghcr.io/rapidsai/devcontainers/features/rapids-build-utils:latest": {}
+ "ghcr.io/rapidsai/devcontainers/features/rapids-build-utils:23.6": {}
},
"overrideFeatureInstallOrder": [
@@ -25,12 +25,14 @@
],
"initializeCommand": [
- "/bin/bash", "-c", "mkdir -m 0755 -p ${localWorkspaceFolder}/../.{aws,cache,config,local}"
+ "/bin/bash", "-c", "mkdir -m 0755 -p ${localWorkspaceFolder}/../.{aws,cache,config/pip}"
],
"updateContentCommand": [
- "/bin/bash", "-c", "cp -ar /workspaces/${localWorkspaceFolderBasename} /home/coder/${localWorkspaceFolderBasename}"
+ "/bin/bash", "-c", "cp -ar /workspaces/${localWorkspaceFolderBasename} /home/coder/${localWorkspaceFolderBasename} && rapids-make-vscode-workspace --update"
],
+ "postCreateCommand": ["rapids-make-vscode-workspace", "--update"],
+ "postAttachCommand": ["rapids-make-pip-env"],
"containerEnv": {
"DEFAULT_VIRTUAL_ENV": "rapids"
@@ -41,8 +43,7 @@
"mounts": [
"source=${localWorkspaceFolder}/../.aws,target=/home/coder/.aws,type=bind,consistency=consistent",
"source=${localWorkspaceFolder}/../.cache,target=/home/coder/.cache,type=bind,consistency=consistent",
- "source=${localWorkspaceFolder}/../.config,target=/home/coder/.config,type=bind,consistency=consistent",
- "source=${localWorkspaceFolder}/../.local,target=/home/coder/.local,type=bind,consistency=consistent"
+ "source=${localWorkspaceFolder}/../.config,target=/home/coder/.config,type=bind,consistency=consistent"
],
"customizations": {
diff --git a/.devcontainer/pip/unified/.devcontainer/devcontainer.json b/.devcontainer/pip/unified/.devcontainer/devcontainer.json
index 39ecc7102..6a7acb47d 100644
--- a/.devcontainer/pip/unified/.devcontainer/devcontainer.json
+++ b/.devcontainer/pip/unified/.devcontainer/devcontainer.json
@@ -1,5 +1,5 @@
{
- "shutdownAction": "none",
+ "shutdownAction": "stopContainer",
"build": {
"context": "${localWorkspaceFolder}/.devcontainer",
@@ -16,7 +16,7 @@
"features": {
"ghcr.io/devcontainers/features/python:1": {},
- "ghcr.io/rapidsai/devcontainers/features/rapids-build-utils:latest": {}
+ "ghcr.io/rapidsai/devcontainers/features/rapids-build-utils:23.6": {}
},
"overrideFeatureInstallOrder": [
@@ -25,8 +25,11 @@
],
"initializeCommand": [
- "/bin/bash", "-c", "mkdir -m 0755 -p ${localWorkspaceFolder}/../.{aws,cache,config,${localWorkspaceFolderBasename}-unified-local}"
+ "/bin/bash", "-c", "mkdir -m 0755 -p ${localWorkspaceFolder}/../.{aws,cache,config/pip,local/${localWorkspaceFolderBasename}/unified}"
],
+ "updateContentCommand": ["rapids-make-vscode-workspace", "--update"],
+ "postCreateCommand": ["rapids-make-vscode-workspace", "--update"],
+ "postAttachCommand": ["rapids-make-pip-env"],
"containerEnv": {
"DEFAULT_VIRTUAL_ENV": "rapids"
@@ -40,8 +43,7 @@
"source=${localWorkspaceFolder}/../.aws,target=/home/coder/.aws,type=bind,consistency=consistent",
"source=${localWorkspaceFolder}/../.cache,target=/home/coder/.cache,type=bind,consistency=consistent",
"source=${localWorkspaceFolder}/../.config,target=/home/coder/.config,type=bind,consistency=consistent",
- "source=${localWorkspaceFolder}/../.${localWorkspaceFolderBasename}-unified-local,target=/home/coder/.local,type=bind,consistency=consistent",
- "source=${localWorkspaceFolder}/.devcontainer/opt/${localWorkspaceFolderBasename},target=/opt/${localWorkspaceFolderBasename},type=bind,consistency=consistent"
+ "source=${localWorkspaceFolder}/../.local/${localWorkspaceFolderBasename}/unified,target=/home/coder/.local,type=bind,consistency=consistent"
],
"customizations": {
diff --git a/setup.cfg b/.flake8
similarity index 95%
rename from setup.cfg
rename to .flake8
index 028c88b18..13d38d1cc 100644
--- a/setup.cfg
+++ b/.flake8
@@ -1,4 +1,4 @@
-# Copyright (c) 2019-2022, NVIDIA CORPORATION.
+# Copyright (c) 2019-2023, NVIDIA CORPORATION.
[flake8]
filename = *.py, *.pyx, *.pxd, *.pxi
diff --git a/.github/workflows/build.yaml b/.github/workflows/build.yaml
index 0457613a5..0f3f10e22 100644
--- a/.github/workflows/build.yaml
+++ b/.github/workflows/build.yaml
@@ -66,3 +66,24 @@ jobs:
node_type: "gpu-v100-latest-1"
run_script: "ci/build_docs.sh"
sha: ${{ inputs.sha }}
+ wheel-build:
+ secrets: inherit
+ uses: rapidsai/shared-action-workflows/.github/workflows/wheels-manylinux-build.yml@branch-23.06
+ with:
+ build_type: ${{ inputs.build_type || 'branch' }}
+ branch: ${{ inputs.branch }}
+ sha: ${{ inputs.sha }}
+ date: ${{ inputs.date }}
+ package-name: cuspatial
+ package-dir: python/cuspatial
+ skbuild-configure-options: "-DCUSPATIAL_BUILD_WHEELS=ON"
+ wheel-publish:
+ needs: wheel-build
+ secrets: inherit
+ uses: rapidsai/shared-action-workflows/.github/workflows/wheels-manylinux-publish.yml@branch-23.06
+ with:
+ build_type: ${{ inputs.build_type || 'branch' }}
+ branch: ${{ inputs.branch }}
+ sha: ${{ inputs.sha }}
+ date: ${{ inputs.date }}
+ package-name: cuspatial
diff --git a/.github/workflows/pr.yaml b/.github/workflows/pr.yaml
index 7e14bc978..69b057271 100644
--- a/.github/workflows/pr.yaml
+++ b/.github/workflows/pr.yaml
@@ -19,6 +19,8 @@ jobs:
- conda-python-tests
- conda-notebook-tests
- docs-build
+ - wheel-build
+ - wheel-tests
secrets: inherit
uses: rapidsai/shared-action-workflows/.github/workflows/pr-builder.yaml@branch-23.06
checks:
@@ -70,3 +72,23 @@ jobs:
arch: "amd64"
container_image: "rapidsai/ci:latest"
run_script: "ci/build_docs.sh"
+ wheel-build:
+ needs: checks
+ secrets: inherit
+ uses: rapidsai/shared-action-workflows/.github/workflows/wheels-manylinux-build.yml@branch-23.06
+ with:
+ build_type: pull-request
+ package-dir: python/cuspatial
+ package-name: cuspatial
+ skbuild-configure-options: "-DCUSPATIAL_BUILD_WHEELS=ON"
+ wheel-tests:
+ needs: wheel-build
+ secrets: inherit
+ uses: rapidsai/shared-action-workflows/.github/workflows/wheels-manylinux-test.yml@branch-23.06
+ with:
+ build_type: pull-request
+ package-name: cuspatial
+ test-smoketest: "python ./ci/wheel_smoke_test.py"
+ test-unittest: "python -m pytest -n 8 ./python/cuspatial/cuspatial/tests"
+ test-before-amd64: "apt update && DEBIAN_FRONTEND=noninteractive apt install -y --no-install-recommends libgdal-dev && python -m pip install --no-binary fiona 'fiona>=1.8.19,<1.9'"
+ test-before-arm64: "apt update && DEBIAN_FRONTEND=noninteractive apt install -y --no-install-recommends libgdal-dev && python -m pip install --no-binary fiona 'fiona>=1.8.19,<1.9'"
diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml
index eb3cb4d94..09a472e2c 100644
--- a/.github/workflows/test.yaml
+++ b/.github/workflows/test.yaml
@@ -30,3 +30,15 @@ jobs:
branch: ${{ inputs.branch }}
date: ${{ inputs.date }}
sha: ${{ inputs.sha }}
+ wheel-tests:
+ secrets: inherit
+ uses: rapidsai/shared-action-workflows/.github/workflows/wheels-manylinux-test.yml@branch-23.06
+ with:
+ build_type: nightly
+ branch: ${{ inputs.branch }}
+ date: ${{ inputs.date }}
+ sha: ${{ inputs.sha }}
+ package-name: cuspatial
+ test-unittest: "python -m pytest -n 8 ./python/cuspatial/cuspatial/tests"
+ test-before-amd64: "apt update && DEBIAN_FRONTEND=noninteractive apt install -y --no-install-recommends libgdal-dev && python -m pip install --no-binary fiona 'fiona>=1.8.19,<1.9'"
+ test-before-arm64: "apt update && DEBIAN_FRONTEND=noninteractive apt install -y --no-install-recommends libgdal-dev && python -m pip install --no-binary fiona 'fiona>=1.8.19,<1.9'"
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 01566030b..5d2fb2b29 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -1,4 +1,4 @@
-# Copyright (c) 2019-2022, NVIDIA CORPORATION.
+# Copyright (c) 2019-2023, NVIDIA CORPORATION.
repos:
- repo: https://github.com/PyCQA/isort
@@ -21,7 +21,7 @@ repos:
rev: 5.0.4
hooks:
- id: flake8
- args: ["--config=setup.cfg"]
+ args: ["--config=.flake8"]
files: python/.*$
types: [file]
types_or: [python, cython]
diff --git a/README.md b/README.md
index 9639a7c05..1c1a8f6f9 100644
--- a/README.md
+++ b/README.md
@@ -1,7 +1,7 @@
#
cuSpatial - GPU-Accelerated Vector Geospatial Data Analysis
-> **Note**
->
+> **Note**
+>
> cuSpatial depends on [cuDF](https://github.com/rapidsai/cudf) and [RMM](https://github.com/rapidsai/rmm) from [RAPIDS](https://rapids.ai/).
## Resources
@@ -14,7 +14,7 @@
- [cuSpatial Roadmap](https://github.com/orgs/rapidsai/projects/41/views/5): Report issues or request features.
## Overview
-cuSpatial accelerates vector geospatial operations through GPU parallelization. As part of the RAPIDS libraries, cuSpatial is inherently connected to [cuDF](https://github.com/rapidsai/cudf), [cuML](https://github.com/rapidsai/cuml), and [cuGraph](https://github.com/rapidsai/cugraph), enabling GPU acceleration across entire workflows.
+cuSpatial accelerates vector geospatial operations through GPU parallelization. As part of the RAPIDS libraries, cuSpatial is inherently connected to [cuDF](https://github.com/rapidsai/cudf), [cuML](https://github.com/rapidsai/cuml), and [cuGraph](https://github.com/rapidsai/cugraph), enabling GPU acceleration across entire workflows.
cuSpatial represents data in [GeoArrow](https://github.com/geoarrow/geoarrow) format, which enables compatibility with the [Apache Arrow](https://arrow.apache.org) ecosystem.
@@ -118,10 +118,10 @@ docker run --gpus all --rm -it \
nvcr.io/nvidia/rapidsai/rapidsai-core:23.02-cuda11.8-runtime-ubuntu22.04-py3.10
```
-### Install from Conda
+### Install with Conda
To install via conda:
-> **Note** cuSpatial is supported only on Linux or [through WSL](https://rapids.ai/wsl2.html), and with Python versions 3.9 and later
+> **Note** cuSpatial is supported only on Linux or [through WSL](https://rapids.ai/wsl2.html), and with Python versions 3.9 and 3.10
cuSpatial can be installed with conda (miniconda, or the full Anaconda distribution) from the rapidsai channel:
@@ -133,7 +133,55 @@ We also provide nightly Conda packages built from the HEAD of our latest develop
See the [RAPIDS release selector](https://rapids.ai/start.html#get-rapids) for more OS and version info.
-### Install from Source
+### Install with pip
+
+To install via pip:
+> **Note** cuSpatial is supported only on Linux or [through WSL](https://rapids.ai/wsl2.html), and with Python versions 3.9 and 3.10
+
+The cuSpatial pip packages can be installed from NVIDIA's PyPI index:
+
+```shell
+# If using driver 525+, with support for CUDA Toolkit 12.0+
+pip install --extra-index-url=https://pypi.nvidia.com cuspatial-cu12
+
+# If using driver 450.80+, with support for CUDA Toolkit 11.2+
+pip install --extra-index-url=https://pypi.nvidia.com cuspatial-cu11
+
+# Or do this if you're unsure which CUDA Toolkit is supported by your driver:
+CUDA_MAJOR_VERSION="$(nvidia-smi | head -n3 | tail -n1 | tr -d '[:space:]' | cut -d':' -f3 | cut -d '.' -f1)"
+pip install --extra-index-url=https://pypi.nvidia.com cuspatial-cu${CUDA_MAJOR_VERSION}
+```
+
+#### Troubleshooting Fiona/GDAL versions
+
+cuSpatial depends on [`geopandas`](https://github.com/geopandas/geopandas), which uses [`fiona >= 1.8.19`](https://pypi.org/project/Fiona/), to read common GIS formats with GDAL.
+
+Fiona requires GDAL is already present on your system, but its minimum required version may be newer than the version of GDAL in your OS's package manager.
+
+Fiona checks the GDAL version at install time and fails with an error like this if a compatible version of GDAL isn't installed:
+```
+ERROR: GDAL >= 3.2 is required for fiona. Please upgrade GDAL.
+```
+
+There are two ways to fix this:
+
+1. Install a version of GDAL that meets fiona's minimum required version
+ * Ubuntu users can install a newer GDAL with the [UbuntuGIS PPA](https://wiki.ubuntu.com/UbuntuGIS):
+ ```shell
+ sudo -y add-apt-repository ppa:ubuntugis/ppa
+ sudo apt install libgdal-dev
+ ```
+2. Pin fiona's version to a range that's compatible with your version of `libgdal-dev`
+ * For Ubuntu20.04 ([GDAL v3.0.4](https://packages.ubuntu.com/focal/libgdal-dev)):
+ ```shell
+ pip install --no-binary fiona --extra-index-url=https://pypi.nvidia.com cuspatial-cu12 'fiona>=1.8.19,<1.9'
+ ```
+ * For Ubuntu22.04 ([GDAL v3.4.1](https://packages.ubuntu.com/jammy/libgdal-dev)):
+ ```shell
+ pip install --no-binary fiona --extra-index-url=https://pypi.nvidia.com cuspatial-cu12 'fiona>=1.9'
+ ```
+
+### Build/Install from source
To build and install cuSpatial from source please see the [build documentation](https://docs.rapids.ai/api/cuspatial/stable/developer_guide/build.html).
diff --git a/ci/release/apply_wheel_modifications.sh b/ci/release/apply_wheel_modifications.sh
new file mode 100755
index 000000000..7e1d94ddc
--- /dev/null
+++ b/ci/release/apply_wheel_modifications.sh
@@ -0,0 +1,14 @@
+#!/bin/bash
+# Copyright (c) 2023, NVIDIA CORPORATION.
+#
+# Usage: bash apply_wheel_modifications.sh
+
+VERSION=${1}
+CUDA_SUFFIX=${2}
+
+sed -i "s/^version = .*/version = \"${VERSION}\"/g" python/cuspatial/pyproject.toml
+
+sed -i "s/^name = \"cuspatial\"/name = \"cuspatial${CUDA_SUFFIX}\"/g" python/cuspatial/pyproject.toml
+
+sed -i "s/rmm==/rmm${CUDA_SUFFIX}==/g" python/cuspatial/pyproject.toml
+sed -i "s/cudf==/cudf${CUDA_SUFFIX}==/g" python/cuspatial/pyproject.toml
diff --git a/ci/release/update-version.sh b/ci/release/update-version.sh
index cae29e22e..3c5b99c73 100755
--- a/ci/release/update-version.sh
+++ b/ci/release/update-version.sh
@@ -37,6 +37,9 @@ sed_runner 's/'"cuspatial_version .*)"'/'"cuspatial_version ${NEXT_FULL_TAG})"'/
sed_runner 's/version = .*/version = '"'${NEXT_SHORT_TAG}'"'/g' docs/source/conf.py
sed_runner 's/release = .*/release = '"'${NEXT_FULL_TAG}'"'/g' docs/source/conf.py
+# Python __init__.py updates
+sed_runner "s/__version__ = .*/__version__ = \"${NEXT_FULL_TAG}\"/g" python/cuspatial/cuspatial/__init__.py
+
# rapids-cmake version
sed_runner 's/'"branch-.*\/RAPIDS.cmake"'/'"branch-${NEXT_SHORT_TAG}\/RAPIDS.cmake"'/g' fetch_rapids.cmake
sed_runner 's/'"branch-.*\/RAPIDS.cmake"'/'"branch-${NEXT_SHORT_TAG}\/RAPIDS.cmake"'/g' python/cuspatial/CMakeLists.txt
@@ -67,10 +70,9 @@ done
# Dependency versions in dependencies.yaml
sed_runner "/-cu[0-9]\{2\}==/ s/==.*/==${NEXT_SHORT_TAG_PEP440}.*/g" dependencies.yaml
-# Dependency versions in setup.py
-sed_runner "s/rmm==.*\",/rmm==${NEXT_SHORT_TAG_PEP440}.*\",/g" python/cuspatial/setup.py
-sed_runner "s/cudf==.*\",/cudf==${NEXT_SHORT_TAG_PEP440}.*\",/g" python/cuspatial/setup.py
+# Python pyproject.toml updates
+sed_runner "s/^version = .*/version = \"${NEXT_FULL_TAG}\"/g" python/cuspatial/pyproject.toml
# Dependency versions in pyproject.toml
-sed_runner "s/rmm==.*\",/rmm==${NEXT_SHORT_TAG_PEP440}.*\",/g" python/cuspatial/pyproject.toml
sed_runner "s/cudf==.*\",/cudf==${NEXT_SHORT_TAG_PEP440}.*\",/g" python/cuspatial/pyproject.toml
+sed_runner "s/rmm==.*\",/rmm==${NEXT_SHORT_TAG_PEP440}.*\",/g" python/cuspatial/pyproject.py
diff --git a/ci/wheel_smoke_test.py b/ci/wheel_smoke_test.py
new file mode 100644
index 000000000..df2af9abd
--- /dev/null
+++ b/ci/wheel_smoke_test.py
@@ -0,0 +1,28 @@
+# Copyright (c) 2023, NVIDIA CORPORATION.
+
+import numpy as np
+import cudf
+import cuspatial
+import pyarrow as pa
+from shapely.geometry import Point
+
+if __name__ == '__main__':
+ order, quadtree = cuspatial.quadtree_on_points(
+ cuspatial.GeoSeries([Point(0.5, 0.5), Point(1.5, 1.5)]),
+ *(0, 2, 0, 2), # bbox
+ 1, # scale
+ 1, # max_depth
+ 1, # min_size
+ )
+ cudf.testing.assert_frame_equal(
+ quadtree,
+ cudf.DataFrame(
+ {
+ "key": cudf.Series(pa.array([0, 3], type=pa.uint32())),
+ "level": cudf.Series(pa.array([0, 0], type=pa.uint8())),
+ "is_internal_node": cudf.Series(pa.array([False, False], type=pa.bool_())),
+ "length": cudf.Series(pa.array([1, 1], type=pa.uint32())),
+ "offset": cudf.Series(pa.array([0, 1], type=pa.uint32())),
+ }
+ ),
+ )
diff --git a/cpp/CMakeLists.txt b/cpp/CMakeLists.txt
index 983123252..cb93425f0 100644
--- a/cpp/CMakeLists.txt
+++ b/cpp/CMakeLists.txt
@@ -51,6 +51,9 @@ option(CUDA_ENABLE_LINEINFO "Enable the -lineinfo option for nvcc (useful for cu
# cudart can be statically linked or dynamically linked. The python ecosystem wants dynamic linking
option(CUDA_STATIC_RUNTIME "Statically link the CUDA toolkit runtime and libraries" OFF)
+option(CUSPATIAL_USE_CUDF_STATIC "Build and statically link cuDF" OFF)
+option(CUSPATIAL_EXCLUDE_CUDF_FROM_ALL "Exclude cuDF targets from cuSpatial's 'all' target" OFF)
+
message(STATUS "CUSPATIAL: Build with NVTX support: ${USE_NVTX}")
message(STATUS "CUSPATIAL: Configure CMake to build tests: ${BUILD_TESTS}")
message(STATUS "CUSPATIAL: Configure CMake to build (google) benchmarks: ${BUILD_BENCHMARKS}")
@@ -174,14 +177,16 @@ target_include_directories(cuspatial
PRIVATE "$"
INTERFACE "$")
-# Add Conda library paths if specified
-if(CONDA_LINK_DIRS)
- target_link_directories(cuspatial PUBLIC "$")
+# Add Conda library, and include paths if specified
+if(TARGET conda_env)
+ target_link_libraries(cuspatial PRIVATE conda_env)
endif()
-# Add Conda include paths if specified
-if(CONDA_INCLUDE_DIRS)
- target_include_directories(cuspatial PUBLIC "$")
+# Workaround until https://github.com/rapidsai/rapids-cmake/issues/176 is resolved
+if(NOT BUILD_SHARED_LIBS)
+ if(TARGET conda_env)
+ install(TARGETS conda_env EXPORT cuspatial-exports)
+ endif()
endif()
# Per-thread default stream
diff --git a/cpp/cmake/thirdparty/CUSPATIAL_GetCUDF.cmake b/cpp/cmake/thirdparty/CUSPATIAL_GetCUDF.cmake
index 6ab168bbc..49db92353 100644
--- a/cpp/cmake/thirdparty/CUSPATIAL_GetCUDF.cmake
+++ b/cpp/cmake/thirdparty/CUSPATIAL_GetCUDF.cmake
@@ -1,5 +1,5 @@
#=============================================================================
-# Copyright (c) 2021, NVIDIA CORPORATION.
+# Copyright (c) 2021-2023, NVIDIA CORPORATION.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -14,39 +14,64 @@
# limitations under the License.
#=============================================================================
-function(find_and_configure_cudf VERSION)
+function(find_and_configure_cudf)
if(TARGET cudf::cudf)
- return()
+ return()
endif()
- if(${VERSION} MATCHES [=[([0-9]+)\.([0-9]+)\.([0-9]+)]=])
- set(MAJOR_AND_MINOR "${CMAKE_MATCH_1}.${CMAKE_MATCH_2}")
- else()
- set(MAJOR_AND_MINOR "${VERSION}")
- endif()
+ set(oneValueArgs VERSION GIT_REPO GIT_TAG USE_CUDF_STATIC EXCLUDE_FROM_ALL PER_THREAD_DEFAULT_STREAM)
+ cmake_parse_arguments(PKG "${options}" "${oneValueArgs}" "${multiValueArgs}" ${ARGN})
set(global_targets cudf::cudf)
- set(find_package_args "")
+ set(cudf_components "")
+
if(BUILD_TESTS)
list(APPEND global_targets cudf::cudftestutil)
- set(find_package_args "COMPONENTS testing")
+ set(cudf_components COMPONENTS testing)
+ endif()
+
+ set(BUILD_SHARED ON)
+ if(${PKG_USE_CUDF_STATIC})
+ set(BUILD_SHARED OFF)
endif()
- rapids_cpm_find(
- cudf ${VERSION}
- GLOBAL_TARGETS "${global_targets}"
+ rapids_cpm_find(cudf ${PKG_VERSION} ${cudf_components}
+ GLOBAL_TARGETS ${global_targets}
BUILD_EXPORT_SET cuspatial-exports
INSTALL_EXPORT_SET cuspatial-exports
CPM_ARGS
- GIT_REPOSITORY https://github.com/rapidsai/cudf.git
- GIT_TAG branch-${MAJOR_AND_MINOR}
- GIT_SHALLOW TRUE
- OPTIONS "BUILD_TESTS OFF" "BUILD_BENCHMARKS OFF"
- FIND_PACKAGE_ARGUMENTS "${find_package_args}"
+ GIT_REPOSITORY ${PKG_GIT_REPO}
+ GIT_TAG ${PKG_GIT_TAG}
+ GIT_SHALLOW TRUE
+ SOURCE_SUBDIR cpp
+ EXCLUDE_FROM_ALL ${PKG_EXCLUDE_FROM_ALL}
+ OPTIONS "BUILD_TESTS OFF"
+ "BUILD_BENCHMARKS OFF"
+ "BUILD_SHARED_LIBS ${BUILD_SHARED}"
+ "CUDF_BUILD_TESTUTIL ${BUILD_TESTS}"
+ "CUDF_BUILD_STREAMS_TEST_UTIL ${BUILD_TESTS}"
+ "CUDF_USE_PER_THREAD_DEFAULT_STREAM ${PKG_PER_THREAD_DEFAULT_STREAM}"
)
+
+ if(TARGET cudf)
+ set_property(TARGET cudf PROPERTY SYSTEM TRUE)
+ endif()
endfunction()
-set(CUSPATIAL_MIN_VERSION_cudf "${CUSPATIAL_VERSION_MAJOR}.${CUSPATIAL_VERSION_MINOR}.00")
+set(CUSPATIAL_MIN_VERSION_cudf "${CUSPATIAL_VERSION_MAJOR}.${CUSPATIAL_VERSION_MINOR}")
+
+if(NOT DEFINED CUSPATIAL_CUDF_GIT_REPO)
+ set(CUSPATIAL_CUDF_GIT_REPO https://github.com/rapidsai/cudf.git)
+endif()
+
+if(NOT DEFINED CUSPATIAL_CUDF_GIT_TAG)
+ set(CUSPATIAL_CUDF_GIT_TAG branch-${CUSPATIAL_MIN_VERSION_cudf})
+endif()
-find_and_configure_cudf(${CUSPATIAL_MIN_VERSION_cudf})
+find_and_configure_cudf(VERSION ${CUSPATIAL_MIN_VERSION_cudf}.00
+ GIT_REPO ${CUSPATIAL_CUDF_GIT_REPO}
+ GIT_TAG ${CUSPATIAL_CUDF_GIT_TAG}
+ USE_CUDF_STATIC ${CUSPATIAL_USE_CUDF_STATIC}
+ EXCLUDE_FROM_ALL ${CUSPATIAL_EXCLUDE_CUDF_FROM_ALL}
+ PER_THREAD_DEFAULT_STREAM ${PER_THREAD_DEFAULT_STREAM})
diff --git a/dependencies.yaml b/dependencies.yaml
index ce7ed798a..d63e97c38 100644
--- a/dependencies.yaml
+++ b/dependencies.yaml
@@ -41,6 +41,30 @@ files:
- cudatoolkit
- docs
- py_version
+ py_build:
+ output: [pyproject]
+ pyproject_dir: python/cuspatial
+ extras:
+ table: build-system
+ includes:
+ - build_cpp
+ - build_python
+ - build_wheels
+ py_run:
+ output: [pyproject]
+ pyproject_dir: python/cuspatial
+ extras:
+ table: project
+ includes:
+ - run_python
+ py_test:
+ output: [pyproject]
+ pyproject_dir: python/cuspatial
+ extras:
+ table: project.optional-dependencies
+ key: test
+ includes:
+ - test_python
channels:
- rapidsai
- rapidsai-nightly
@@ -49,16 +73,18 @@ channels:
dependencies:
build_cpp:
common:
+ - output_types: [conda, requirements, pyproject]
+ packages:
+ - ninja
+ - cmake>=3.23.1,!=3.25.0
- output_types: conda
packages:
- - &cmake_ver cmake>=3.23.1,!=3.25.0
- c-compiler
- cxx-compiler
- gmock>=1.13.0
- gtest>=1.13.0
- libcudf==23.6.*
- librmm==23.6.*
- - ninja
specific:
- output_types: conda
matrices:
@@ -86,15 +112,20 @@ dependencies:
- nvcc_linux-aarch64=11.8
build_python:
common:
- - output_types: [conda, requirements]
+ - output_types: [conda, requirements, pyproject]
packages:
- - *cmake_ver
- cython>=0.29,<0.30
- scikit-build>=0.13.1
- setuptools
- output_types: conda
- packages:
+ packages: &build_python_packages_conda
- &cudf_conda cudf==23.6.*
+ - &rmm_conda rmm==23.6.*
+ - output_types: requirements
+ packages:
+ # pip recognizes the index as a global option for the requirements.txt file
+ # This index is needed for cudf and rmm.
+ - --extra-index-url=https://pypi.nvidia.com
specific:
- output_types: conda
matrices:
@@ -108,28 +139,22 @@ dependencies:
packages:
- *gcc_aarch64
- *sysroot_aarch64
- - output_types: requirements
+ - output_types: [requirements, pyproject]
matrices:
- - matrix:
- cuda: "11.8"
- packages:
- - "--extra-index-url=https://pypi.nvidia.com"
- - cudf-cu11==23.6.*
- - matrix:
- cuda: "11.5"
- packages:
- - "--extra-index-url=https://pypi.nvidia.com"
- - cudf-cu11==23.6.*
- - matrix:
- cuda: "11.4"
- packages:
- - "--extra-index-url=https://pypi.nvidia.com"
- - cudf-cu11==23.6.*
- - matrix:
- cuda: "11.2"
- packages:
- - "--extra-index-url=https://pypi.nvidia.com"
- - cudf-cu11==23.6.*
+ - matrix: {cuda: "11.8"}
+ packages: &build_python_packages_cu11
+ - &cudf_cu11 cudf-cu11==23.6.*
+ - &rmm_cu11 rmm-cu11==23.6.*
+ - {matrix: {cuda: "11.5"}, packages: *build_python_packages_cu11}
+ - {matrix: {cuda: "11.4"}, packages: *build_python_packages_cu11}
+ - {matrix: {cuda: "11.2"}, packages: *build_python_packages_cu11}
+ - {matrix: null, packages: [*cudf_conda, *rmm_conda] }
+ build_wheels:
+ common:
+ - output_types: [requirements, pyproject]
+ packages:
+ - wheel
+ - setuptools
cudatoolkit:
specific:
- output_types: conda
@@ -160,6 +185,8 @@ dependencies:
- output_types: [conda]
packages:
- doxygen
+ - output_types: [conda, requirements]
+ packages:
- ipython
- myst-parser
- nbsphinx
@@ -168,9 +195,8 @@ dependencies:
- sphinx<6
notebooks:
common:
- - output_types: [conda, requirements]
+ - output_types: [conda, requirements, pyproject]
packages:
- - cuml==23.6.*
- ipython
- ipywidgets
- notebook
@@ -178,6 +204,19 @@ dependencies:
- pydeck
- shapely
- scikit-image
+ - output_types: conda
+ packages:
+ - &cuml_conda cuml==23.6.*
+ specific:
+ - output_types: [requirements, pyproject]
+ matrices:
+ - {matrix: null, packages: [*cuml_conda]}
+ - matrix: {cuda: "11.8"}
+ packages: ¬ebooks_packages_cu11
+ - &cuml_cu11 cuml-cu11==23.6.*
+ - {matrix: {cuda: "11.5"}, packages: *notebooks_packages_cu11}
+ - {matrix: {cuda: "11.4"}, packages: *notebooks_packages_cu11}
+ - {matrix: {cuda: "11.2"}, packages: *notebooks_packages_cu11}
py_version:
specific:
- output_types: conda
@@ -195,39 +234,32 @@ dependencies:
- python>=3.9,<3.11
run_python:
common:
- - output_types: [conda, requirements]
+ - output_types: [conda, requirements, pyproject]
packages:
- geopandas>=0.11.0
- output_types: conda
packages:
- *cudf_conda
- - rmm==23.6.*
- specific:
+ - *rmm_conda
- output_types: requirements
+ packages:
+ # pip recognizes the index as a global option for the requirements.txt file
+ # This index is needed for cudf and rmm.
+ - --extra-index-url=https://pypi.nvidia.com
+ specific:
+ - output_types: [requirements, pyproject]
matrices:
- - matrix:
- cuda: "11.8"
- packages:
- - "--extra-index-url=https://pypi.nvidia.com"
- - rmm-cu11==23.6.*
- - matrix:
- cuda: "11.5"
- packages:
- - "--extra-index-url=https://pypi.nvidia.com"
- - rmm-cu11==23.6.*
- - matrix:
- cuda: "11.4"
- packages:
- - "--extra-index-url=https://pypi.nvidia.com"
- - rmm-cu11==23.6.*
- - matrix:
- cuda: "11.2"
- packages:
- - "--extra-index-url=https://pypi.nvidia.com"
+ - matrix: {cuda: "11.8"}
+ packages: &run_python_packages_cu11
+ - cudf-cu11==23.6.*
- rmm-cu11==23.6.*
+ - {matrix: {cuda: "11.5"}, packages: *run_python_packages_cu11}
+ - {matrix: {cuda: "11.4"}, packages: *run_python_packages_cu11}
+ - {matrix: {cuda: "11.2"}, packages: *run_python_packages_cu11}
+ - {matrix: null, packages: [*cudf_conda, *rmm_conda]}
test_python:
common:
- - output_types: [conda, requirements]
+ - output_types: [conda, requirements, pyproject]
packages:
- pytest
- pytest-cov
diff --git a/docs/source/user_guide/cuspatial_api_examples.ipynb b/docs/source/user_guide/cuspatial_api_examples.ipynb
index bc40c82b0..28c81abb8 100644
--- a/docs/source/user_guide/cuspatial_api_examples.ipynb
+++ b/docs/source/user_guide/cuspatial_api_examples.ipynb
@@ -25,7 +25,7 @@
"This guide provides a working example for all of the python API components of cuSpatial. \n",
"The following list links to each subsection.\n",
"\n",
- "* [Installing cuSpatial](#Installing-cuspatial)\n",
+ "* [Installing cuSpatial](#Installing-cuSpatial)\n",
"* [GPU accelerated memory layout](#GPU-accelerated-memory-layout)\n",
"* [Input / Output](#Input-/-Output)\n",
"* [Geopandas and cuDF integration](#Geopandas-and-cuDF-integration)\n",
diff --git a/python/cuspatial/.flake8.cython b/python/cuspatial/.flake8.cython
deleted file mode 100644
index 4c5cf4965..000000000
--- a/python/cuspatial/.flake8.cython
+++ /dev/null
@@ -1,29 +0,0 @@
-#
-# Copyright (c) 2018-2019, NVIDIA CORPORATION.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-[flake8]
-filename = *.pyx, *.pxd
-exclude = *.egg, build, docs, .git
-ignore = E999, E225, E226, E227, W503, W504, E211
-
-# Rules ignored:
-# E999: invalid syntax (works for Python, not Cython)
-# E211: whitespace before '(' (used in multi-line imports)
-# E225: Missing whitespace around operators (breaks cython casting syntax like )
-# E226: Missing whitespace around arithmetic operators (breaks cython pointer syntax like int*)
-# E227: Missing whitespace around bitwise or shift operator (Can also break casting syntax)
-# W503: line break before binary operator (breaks lines that start with a pointer)
-# W504: line break after binary operator (breaks lines that end with a pointer)
diff --git a/python/cuspatial/CMakeLists.txt b/python/cuspatial/CMakeLists.txt
index 9f06c3ad9..bba6f318d 100644
--- a/python/cuspatial/CMakeLists.txt
+++ b/python/cuspatial/CMakeLists.txt
@@ -1,5 +1,5 @@
# =============================================================================
-# Copyright (c) 2022, NVIDIA CORPORATION.
+# Copyright (c) 2022-2023, NVIDIA CORPORATION.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
# in compliance with the License. You may obtain a copy of the License at
@@ -32,6 +32,8 @@ project(
option(FIND_CUSPATIAL_CPP "Search for existing cuspatial C++ installations before defaulting to local files"
OFF)
+option(CUSPATIAL_BUILD_WHEELS "Whether this build is generating a Python wheel." OFF)
+
# If the user requested it we attempt to find cuspatial.
if(FIND_CUSPATIAL_CPP)
find_package(cuspatial ${cuspatial_version})
@@ -40,22 +42,49 @@ else()
endif()
if(NOT cuspatial_FOUND)
- # TODO: This will not be necessary once we upgrade to CMake 3.22, which will
- # pull in the required languages for the C++ project even if this project
- # does not require those languages.
- include(rapids-cuda)
- rapids_cuda_init_architectures(cuspatial)
- enable_language(CUDA)
- # Since cuspatial only enables CUDA optionally we need to manually include the file that
- # rapids_cuda_init_architectures relies on `project` including.
- include("${CMAKE_PROJECT_cuspatial_INCLUDE}")
-
- add_subdirectory(../../cpp cuspatial-cpp)
-
- install(TARGETS cuspatial DESTINATION cuspatial/_lib)
+ set(BUILD_TESTS OFF)
+ set(BUILD_BENCHMARKS OFF)
+ set(_exclude_from_all "")
+ if(CUSPATIAL_BUILD_WHEELS)
+
+ # Statically link cudart if building wheels
+ set(CUDA_STATIC_RUNTIME ON)
+ set(CUSPATIAL_USE_CUDF_STATIC ON)
+ set(CUSPATIAL_EXCLUDE_CUDF_FROM_ALL ON)
+
+ # Always build wheels against the pyarrow libarrow.
+ set(USE_LIBARROW_FROM_PYARROW ON)
+
+ # Need to set this so all the nvcomp targets are global, not only nvcomp::nvcomp
+ # https://cmake.org/cmake/help/latest/variable/CMAKE_FIND_PACKAGE_TARGETS_GLOBAL.html#variable:CMAKE_FIND_PACKAGE_TARGETS_GLOBAL
+ set(CMAKE_FIND_PACKAGE_TARGETS_GLOBAL ON)
+
+ # Don't install the cuSpatial C++ targets into wheels
+ set(_exclude_from_all EXCLUDE_FROM_ALL)
+ endif()
+
+ add_subdirectory(../../cpp cuspatial-cpp ${_exclude_from_all})
+
+ set(cython_lib_dir cuspatial)
+
+ if(CUSPATIAL_BUILD_WHEELS)
+ include(cmake/Modules/WheelHelpers.cmake)
+ get_target_property(_nvcomp_link_libs nvcomp::nvcomp INTERFACE_LINK_LIBRARIES)
+ # Ensure all the shared objects we need at runtime are in the wheel
+ add_target_libs_to_wheel(LIB_DIR ${cython_lib_dir} TARGETS arrow_shared nvcomp::nvcomp ${_nvcomp_link_libs})
+ endif()
+
+ # Since there are multiple subpackages of cuspatial._lib that require access to libcuspatial, we place the
+ # library in the cuspatial directory as a single source of truth and modify the other rpaths
+ # appropriately.
+ install(TARGETS cuspatial DESTINATION ${cython_lib_dir})
endif()
include(rapids-cython)
rapids_cython_init()
add_subdirectory(cuspatial/_lib)
+
+if(DEFINED cython_lib_dir)
+ rapids_cython_add_rpath_entries(TARGET cuspatial PATHS "${cython_lib_dir}")
+endif()
diff --git a/python/cuspatial/README.md b/python/cuspatial/README.md
new file mode 120000
index 000000000..fe8400541
--- /dev/null
+++ b/python/cuspatial/README.md
@@ -0,0 +1 @@
+../../README.md
\ No newline at end of file
diff --git a/python/cuspatial/cmake/Modules/WheelHelpers.cmake b/python/cuspatial/cmake/Modules/WheelHelpers.cmake
new file mode 100644
index 000000000..41d720c52
--- /dev/null
+++ b/python/cuspatial/cmake/Modules/WheelHelpers.cmake
@@ -0,0 +1,71 @@
+# =============================================================================
+# Copyright (c) 2023, NVIDIA CORPORATION.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
+# in compliance with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software distributed under the License
+# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
+# or implied. See the License for the specific language governing permissions and limitations under
+# the License.
+# =============================================================================
+include_guard(GLOBAL)
+
+# Making libraries available inside wheels by installing the associated targets.
+function(add_target_libs_to_wheel)
+ list(APPEND CMAKE_MESSAGE_CONTEXT "add_target_libs_to_wheel")
+
+ set(options "")
+ set(one_value "LIB_DIR")
+ set(multi_value "TARGETS")
+ cmake_parse_arguments(_ "${options}" "${one_value}" "${multi_value}" ${ARGN})
+
+ message(VERBOSE "Installing targets '${__TARGETS}' into lib_dir '${__LIB_DIR}'")
+
+ foreach(target IN LISTS __TARGETS)
+
+ if(NOT TARGET ${target})
+ message(VERBOSE "No target named ${target}")
+ continue()
+ endif()
+
+ get_target_property(alias_target ${target} ALIASED_TARGET)
+ if(alias_target)
+ set(target ${alias_target})
+ endif()
+
+ get_target_property(is_imported ${target} IMPORTED)
+ if(NOT is_imported)
+ # If the target isn't imported, install it into the the wheel
+ install(TARGETS ${target} DESTINATION ${__LIB_DIR})
+ message(VERBOSE "install(TARGETS ${target} DESTINATION ${__LIB_DIR})")
+ else()
+ # If the target is imported, make sure it's global
+ get_target_property(already_global ${target} IMPORTED_GLOBAL)
+ if(NOT already_global)
+ set_target_properties(${target} PROPERTIES IMPORTED_GLOBAL TRUE)
+ endif()
+
+ # Find the imported target's library so we can copy it into the wheel
+ set(lib_loc)
+ foreach(prop IN ITEMS IMPORTED_LOCATION IMPORTED_LOCATION_RELEASE IMPORTED_LOCATION_DEBUG)
+ get_target_property(lib_loc ${target} ${prop})
+ if(lib_loc)
+ message(VERBOSE "Found ${prop} for ${target}: ${lib_loc}")
+ break()
+ endif()
+ message(VERBOSE "${target} has no value for property ${prop}")
+ endforeach()
+
+ if(NOT lib_loc)
+ message(FATAL_ERROR "Found no libs to install for target ${target}")
+ endif()
+
+ # Copy the imported library into the wheel
+ install(FILES ${lib_loc} DESTINATION ${__LIB_DIR})
+ message(VERBOSE "install(FILES ${lib_loc} DESTINATION ${__LIB_DIR})")
+ endif()
+ endforeach()
+endfunction()
diff --git a/python/cuspatial/cuspatial/__init__.py b/python/cuspatial/cuspatial/__init__.py
index c72dc00bb..92da6ee06 100644
--- a/python/cuspatial/cuspatial/__init__.py
+++ b/python/cuspatial/cuspatial/__init__.py
@@ -1,4 +1,5 @@
-from ._version import get_versions
+# Copyright (c) 2023, NVIDIA CORPORATION.
+
from .core.geodataframe import GeoDataFrame
from .core.geoseries import GeoSeries
from .core.spatial import (
@@ -28,5 +29,4 @@
)
from .io.geopandas import from_geopandas
-__version__ = get_versions()["version"]
-del get_versions
+__version__ = "23.06.00"
diff --git a/python/cuspatial/cuspatial/_lib/CMakeLists.txt b/python/cuspatial/cuspatial/_lib/CMakeLists.txt
index e124dfb86..d3730c940 100644
--- a/python/cuspatial/cuspatial/_lib/CMakeLists.txt
+++ b/python/cuspatial/cuspatial/_lib/CMakeLists.txt
@@ -31,5 +31,9 @@ set(cython_sources
)
set(linked_libraries cuspatial::cuspatial)
-rapids_cython_create_modules(SOURCE_FILES "${cython_sources}" LINKED_LIBRARIES "${linked_libraries}"
- CXX)
+rapids_cython_create_modules(
+ CXX
+ ASSOCIATED_TARGETS cuspatial
+ SOURCE_FILES "${cython_sources}"
+ LINKED_LIBRARIES "${linked_libraries}"
+)
diff --git a/python/cuspatial/cuspatial/_version.py b/python/cuspatial/cuspatial/_version.py
deleted file mode 100644
index 2db1c5542..000000000
--- a/python/cuspatial/cuspatial/_version.py
+++ /dev/null
@@ -1,566 +0,0 @@
-# This file helps to compute a version number in source trees obtained from
-# git-archive tarball (such as those provided by githubs download-from-tag
-# feature). Distribution tarballs (built by setup.py sdist) and build
-# directories (produced by setup.py build) will contain a much shorter file
-# that just contains the computed version number.
-
-# This file is released into the public domain. Generated by
-# versioneer-0.18 (https://github.com/warner/python-versioneer)
-
-"""Git implementation of _version.py."""
-
-import errno
-import os
-import re
-import subprocess
-import sys
-
-
-def get_keywords():
- """Get the keywords needed to look up the version information."""
- # these strings will be replaced by git during git-archive.
- # setup.py/versioneer.py will grep for the variable names, so they must
- # each be defined on a line of their own. _version.py will just call
- # get_keywords().
- git_refnames = "$Format:%d$"
- git_full = "$Format:%H$"
- git_date = "$Format:%ci$"
- keywords = {"refnames": git_refnames, "full": git_full, "date": git_date}
- return keywords
-
-
-class VersioneerConfig:
- """Container for Versioneer configuration parameters."""
-
-
-def get_config():
- """Create, populate and return the VersioneerConfig() object."""
- # these strings are filled in when 'setup.py versioneer' creates
- # _version.py
- cfg = VersioneerConfig()
- cfg.VCS = "git"
- cfg.style = "pep440"
- cfg.tag_prefix = "v"
- cfg.parentdir_prefix = "cudf-"
- cfg.versionfile_source = "cudf/_version.py"
- cfg.verbose = False
- return cfg
-
-
-class NotThisMethod(Exception):
- """Exception raised if a method is not valid for the current scenario."""
-
-
-LONG_VERSION_PY = {}
-HANDLERS = {}
-
-
-def register_vcs_handler(vcs, method): # decorator
- """Decorator to mark a method as the handler for a particular VCS."""
-
- def decorate(f):
- """Store f in HANDLERS[vcs][method]."""
- if vcs not in HANDLERS:
- HANDLERS[vcs] = {}
- HANDLERS[vcs][method] = f
- return f
-
- return decorate
-
-
-def run_command(
- commands, args, cwd=None, verbose=False, hide_stderr=False, env=None
-):
- """Call the given command(s)."""
- assert isinstance(commands, list)
- p = None
- for c in commands:
- try:
- dispcmd = str([c] + args)
- # remember shell=False, so use git.cmd on windows, not just git
- p = subprocess.Popen(
- [c] + args,
- cwd=cwd,
- env=env,
- stdout=subprocess.PIPE,
- stderr=(subprocess.PIPE if hide_stderr else None),
- )
- break
- except EnvironmentError:
- e = sys.exc_info()[1]
- if e.errno == errno.ENOENT:
- continue
- if verbose:
- print("unable to run %s" % dispcmd)
- print(e)
- return None, None
- else:
- if verbose:
- print("unable to find command, tried %s" % (commands,))
- return None, None
- stdout = p.communicate()[0].strip()
- if sys.version_info[0] >= 3:
- stdout = stdout.decode()
- if p.returncode != 0:
- if verbose:
- print("unable to run %s (error)" % dispcmd)
- print("stdout was %s" % stdout)
- return None, p.returncode
- return stdout, p.returncode
-
-
-def versions_from_parentdir(parentdir_prefix, root, verbose):
- """Try to determine the version from the parent directory name.
-
- Source tarballs conventionally unpack into a directory that includes both
- the project name and a version string. We will also support searching up
- two directory levels for an appropriately named parent directory
- """
- rootdirs = []
-
- for i in range(3):
- dirname = os.path.basename(root)
- if dirname.startswith(parentdir_prefix):
- return {
- "version": dirname[len(parentdir_prefix) :],
- "full-revisionid": None,
- "dirty": False,
- "error": None,
- "date": None,
- }
- else:
- rootdirs.append(root)
- root = os.path.dirname(root) # up a level
-
- if verbose:
- print(
- "Tried directories %s but none started with prefix %s"
- % (str(rootdirs), parentdir_prefix)
- )
- raise NotThisMethod("rootdir doesn't start with parentdir_prefix")
-
-
-@register_vcs_handler("git", "get_keywords")
-def git_get_keywords(versionfile_abs):
- """Extract version information from the given file."""
- # the code embedded in _version.py can just fetch the value of these
- # keywords. When used from setup.py, we don't want to import _version.py,
- # so we do it with a regexp instead. This function is not used from
- # _version.py.
- keywords = {}
- try:
- f = open(versionfile_abs, "r")
- for line in f.readlines():
- if line.strip().startswith("git_refnames ="):
- mo = re.search(r'=\s*"(.*)"', line)
- if mo:
- keywords["refnames"] = mo.group(1)
- if line.strip().startswith("git_full ="):
- mo = re.search(r'=\s*"(.*)"', line)
- if mo:
- keywords["full"] = mo.group(1)
- if line.strip().startswith("git_date ="):
- mo = re.search(r'=\s*"(.*)"', line)
- if mo:
- keywords["date"] = mo.group(1)
- f.close()
- except EnvironmentError:
- pass
- return keywords
-
-
-@register_vcs_handler("git", "keywords")
-def git_versions_from_keywords(keywords, tag_prefix, verbose):
- """Get version information from git keywords."""
- if not keywords:
- raise NotThisMethod("no keywords at all, weird")
- date = keywords.get("date")
- if date is not None:
- # git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant
- # datestamp. However we prefer "%ci" (which expands to an "ISO-8601
- # -like" string, which we must then edit to make compliant), because
- # it's been around since git-1.5.3, and it's too difficult to
- # discover which version we're using, or to work around using an
- # older one.
- date = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
- refnames = keywords["refnames"].strip()
- if refnames.startswith("$Format"):
- if verbose:
- print("keywords are unexpanded, not using")
- raise NotThisMethod("unexpanded keywords, not a git-archive tarball")
- refs = set([r.strip() for r in refnames.strip("()").split(",")])
- # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of
- # just "foo-1.0". If we see a "tag: " prefix, prefer those.
- TAG = "tag: "
- tags = set([r[len(TAG) :] for r in refs if r.startswith(TAG)])
- if not tags:
- # Either we're using git < 1.8.3, or there really are no tags. We use
- # a heuristic: assume all version tags have a digit. The old git %d
- # expansion behaves like git log --decorate=short and strips out the
- # refs/heads/ and refs/tags/ prefixes that would let us distinguish
- # between branches and tags. By ignoring refnames without digits, we
- # filter out many common branch names like "release" and
- # "stabilization", as well as "HEAD" and "master".
- tags = set([r for r in refs if re.search(r"\d", r)])
- if verbose:
- print("discarding '%s', no digits" % ",".join(refs - tags))
- if verbose:
- print("likely tags: %s" % ",".join(sorted(tags)))
- for ref in sorted(tags):
- # sorting will prefer e.g. "2.0" over "2.0rc1"
- if ref.startswith(tag_prefix):
- r = ref[len(tag_prefix) :]
- if verbose:
- print("picking %s" % r)
- return {
- "version": r,
- "full-revisionid": keywords["full"].strip(),
- "dirty": False,
- "error": None,
- "date": date,
- }
- # no suitable tags, so version is "0+unknown", but full hex is still there
- if verbose:
- print("no suitable tags, using unknown + full revision id")
- return {
- "version": "0+unknown",
- "full-revisionid": keywords["full"].strip(),
- "dirty": False,
- "error": "no suitable tags",
- "date": None,
- }
-
-
-@register_vcs_handler("git", "pieces_from_vcs")
-def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
- """Get version from 'git describe' in the root of the source tree.
-
- This only gets called if the git-archive 'subst' keywords were *not*
- expanded, and _version.py hasn't already been rewritten with a short
- version string, meaning we're inside a checked out source tree.
- """
- GITS = ["git"]
- if sys.platform == "win32":
- GITS = ["git.cmd", "git.exe"]
-
- out, rc = run_command(
- GITS, ["rev-parse", "--git-dir"], cwd=root, hide_stderr=True
- )
- if rc != 0:
- if verbose:
- print("Directory %s not under git control" % root)
- raise NotThisMethod("'git rev-parse --git-dir' returned error")
-
- # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty]
- # if there isn't one, this yields HEX[-dirty] (no NUM)
- describe_out, rc = run_command(
- GITS,
- [
- "describe",
- "--tags",
- "--dirty",
- "--always",
- "--long",
- "--match",
- "%s*" % tag_prefix,
- ],
- cwd=root,
- )
- # --long was added in git-1.5.5
- if describe_out is None:
- raise NotThisMethod("'git describe' failed")
- describe_out = describe_out.strip()
- full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root)
- if full_out is None:
- raise NotThisMethod("'git rev-parse' failed")
- full_out = full_out.strip()
-
- pieces = {}
- pieces["long"] = full_out
- pieces["short"] = full_out[:7] # maybe improved later
- pieces["error"] = None
-
- # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty]
- # TAG might have hyphens.
- git_describe = describe_out
-
- # look for -dirty suffix
- dirty = git_describe.endswith("-dirty")
- pieces["dirty"] = dirty
- if dirty:
- git_describe = git_describe[: git_describe.rindex("-dirty")]
-
- # now we have TAG-NUM-gHEX or HEX
-
- if "-" in git_describe:
- # TAG-NUM-gHEX
- mo = re.search(r"^(.+)-(\d+)-g([0-9a-f]+)$", git_describe)
- if not mo:
- # unparseable. Maybe git-describe is misbehaving?
- pieces["error"] = (
- "unable to parse git-describe output: '%s'" % describe_out
- )
- return pieces
-
- # tag
- full_tag = mo.group(1)
- if not full_tag.startswith(tag_prefix):
- if verbose:
- fmt = "tag '%s' doesn't start with prefix '%s'"
- print(fmt % (full_tag, tag_prefix))
- pieces["error"] = "tag '%s' doesn't start with prefix '%s'" % (
- full_tag,
- tag_prefix,
- )
- return pieces
- pieces["closest-tag"] = full_tag[len(tag_prefix) :]
-
- # distance: number of commits since tag
- pieces["distance"] = int(mo.group(2))
-
- # commit: short hex revision ID
- pieces["short"] = mo.group(3)
-
- else:
- # HEX: no tags
- pieces["closest-tag"] = None
- count_out, rc = run_command(
- GITS, ["rev-list", "HEAD", "--count"], cwd=root
- )
- pieces["distance"] = int(count_out) # total number of commits
-
- # commit date: see ISO-8601 comment in git_versions_from_keywords()
- date = run_command(GITS, ["show", "-s", "--format=%ci", "HEAD"], cwd=root)[
- 0
- ].strip()
- pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
-
- return pieces
-
-
-def plus_or_dot(pieces):
- """Return a + if we don't already have one, else return a ."""
- if "+" in pieces.get("closest-tag", ""):
- return "."
- return "+"
-
-
-def render_pep440(pieces):
- """Build up version string, with post-release "local version identifier".
-
- Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you
- get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty
-
- Exceptions:
- 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty]
- """
- if pieces["closest-tag"]:
- rendered = pieces["closest-tag"]
- if pieces["distance"] or pieces["dirty"]:
- rendered += plus_or_dot(pieces)
- rendered += "%d.g%s" % (pieces["distance"], pieces["short"])
- if pieces["dirty"]:
- rendered += ".dirty"
- else:
- # exception #1
- rendered = "0+untagged.%d.g%s" % (pieces["distance"], pieces["short"])
- if pieces["dirty"]:
- rendered += ".dirty"
- return rendered
-
-
-def render_pep440_pre(pieces):
- """TAG[.post.devDISTANCE] -- No -dirty.
-
- Exceptions:
- 1: no tags. 0.post.devDISTANCE
- """
- if pieces["closest-tag"]:
- rendered = pieces["closest-tag"]
- if pieces["distance"]:
- rendered += ".post.dev%d" % pieces["distance"]
- else:
- # exception #1
- rendered = "0.post.dev%d" % pieces["distance"]
- return rendered
-
-
-def render_pep440_post(pieces):
- """TAG[.postDISTANCE[.dev0]+gHEX] .
-
- The ".dev0" means dirty. Note that .dev0 sorts backwards
- (a dirty tree will appear "older" than the corresponding clean one),
- but you shouldn't be releasing software with -dirty anyways.
-
- Exceptions:
- 1: no tags. 0.postDISTANCE[.dev0]
- """
- if pieces["closest-tag"]:
- rendered = pieces["closest-tag"]
- if pieces["distance"] or pieces["dirty"]:
- rendered += ".post%d" % pieces["distance"]
- if pieces["dirty"]:
- rendered += ".dev0"
- rendered += plus_or_dot(pieces)
- rendered += "g%s" % pieces["short"]
- else:
- # exception #1
- rendered = "0.post%d" % pieces["distance"]
- if pieces["dirty"]:
- rendered += ".dev0"
- rendered += "+g%s" % pieces["short"]
- return rendered
-
-
-def render_pep440_old(pieces):
- """TAG[.postDISTANCE[.dev0]] .
-
- The ".dev0" means dirty.
-
- Eexceptions:
- 1: no tags. 0.postDISTANCE[.dev0]
- """
- if pieces["closest-tag"]:
- rendered = pieces["closest-tag"]
- if pieces["distance"] or pieces["dirty"]:
- rendered += ".post%d" % pieces["distance"]
- if pieces["dirty"]:
- rendered += ".dev0"
- else:
- # exception #1
- rendered = "0.post%d" % pieces["distance"]
- if pieces["dirty"]:
- rendered += ".dev0"
- return rendered
-
-
-def render_git_describe(pieces):
- """TAG[-DISTANCE-gHEX][-dirty].
-
- Like 'git describe --tags --dirty --always'.
-
- Exceptions:
- 1: no tags. HEX[-dirty] (note: no 'g' prefix)
- """
- if pieces["closest-tag"]:
- rendered = pieces["closest-tag"]
- if pieces["distance"]:
- rendered += "-%d-g%s" % (pieces["distance"], pieces["short"])
- else:
- # exception #1
- rendered = pieces["short"]
- if pieces["dirty"]:
- rendered += "-dirty"
- return rendered
-
-
-def render_git_describe_long(pieces):
- """TAG-DISTANCE-gHEX[-dirty].
-
- Like 'git describe --tags --dirty --always -long'.
- The distance/hash is unconditional.
-
- Exceptions:
- 1: no tags. HEX[-dirty] (note: no 'g' prefix)
- """
- if pieces["closest-tag"]:
- rendered = pieces["closest-tag"]
- rendered += "-%d-g%s" % (pieces["distance"], pieces["short"])
- else:
- # exception #1
- rendered = pieces["short"]
- if pieces["dirty"]:
- rendered += "-dirty"
- return rendered
-
-
-def render(pieces, style):
- """Render the given version pieces into the requested style."""
- if pieces["error"]:
- return {
- "version": "unknown",
- "full-revisionid": pieces.get("long"),
- "dirty": None,
- "error": pieces["error"],
- "date": None,
- }
-
- if not style or style == "default":
- style = "pep440" # the default
-
- if style == "pep440":
- rendered = render_pep440(pieces)
- elif style == "pep440-pre":
- rendered = render_pep440_pre(pieces)
- elif style == "pep440-post":
- rendered = render_pep440_post(pieces)
- elif style == "pep440-old":
- rendered = render_pep440_old(pieces)
- elif style == "git-describe":
- rendered = render_git_describe(pieces)
- elif style == "git-describe-long":
- rendered = render_git_describe_long(pieces)
- else:
- raise ValueError("unknown style '%s'" % style)
-
- return {
- "version": rendered,
- "full-revisionid": pieces["long"],
- "dirty": pieces["dirty"],
- "error": None,
- "date": pieces.get("date"),
- }
-
-
-def get_versions():
- """Get version information or return default if unable to do so."""
- # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have
- # __file__, we can work backwards from there to the root. Some
- # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which
- # case we can only use expanded keywords.
-
- cfg = get_config()
- verbose = cfg.verbose
-
- try:
- return git_versions_from_keywords(
- get_keywords(), cfg.tag_prefix, verbose
- )
- except NotThisMethod:
- pass
-
- try:
- root = os.path.realpath(__file__)
- # versionfile_source is the relative path from the top of the source
- # tree (where the .git directory might live) to this file. Invert
- # this to find the root from __file__.
- for i in cfg.versionfile_source.split("/"):
- root = os.path.dirname(root)
- except NameError:
- return {
- "version": "0+unknown",
- "full-revisionid": None,
- "dirty": None,
- "error": "unable to find root of source tree",
- "date": None,
- }
-
- try:
- pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose)
- return render(pieces, cfg.style)
- except NotThisMethod:
- pass
-
- try:
- if cfg.parentdir_prefix:
- return versions_from_parentdir(cfg.parentdir_prefix, root, verbose)
- except NotThisMethod:
- pass
-
- return {
- "version": "0+unknown",
- "full-revisionid": None,
- "dirty": None,
- "error": "unable to compute version",
- "date": None,
- }
diff --git a/python/cuspatial/cuspatial/utils/join_utils.py b/python/cuspatial/cuspatial/utils/join_utils.py
index a50d7c3f2..46bd476df 100644
--- a/python/cuspatial/cuspatial/utils/join_utils.py
+++ b/python/cuspatial/cuspatial/utils/join_utils.py
@@ -2,9 +2,9 @@
import operator
-import rmm
from numba import cuda
+import rmm
from cudf.core.buffer import acquire_spill_lock
diff --git a/python/cuspatial/pyproject.toml b/python/cuspatial/pyproject.toml
index 0261c0b09..8c701a27e 100644
--- a/python/cuspatial/pyproject.toml
+++ b/python/cuspatial/pyproject.toml
@@ -1,4 +1,4 @@
-# Copyright (c) 2022, NVIDIA CORPORATION.
+# Copyright (c) 2022-2023, NVIDIA CORPORATION.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -13,12 +13,97 @@
# limitations under the License.
[build-system]
-
+build-backend = "setuptools.build_meta"
requires = [
- "wheel",
- "setuptools",
- "cython>=0.29,<0.30",
- "scikit-build>=0.13.1",
"cmake>=3.23.1,!=3.25.0",
+ "cudf==23.6.*",
+ "cython>=0.29,<0.30",
"ninja",
+ "rmm==23.6.*",
+ "scikit-build>=0.13.1",
+ "setuptools",
+ "wheel",
+] # This list was generated by `rapids-dependency-file-generator`. To make changes, edit ../../dependencies.yaml and run `rapids-dependency-file-generator`.
+
+[project]
+name = "cuspatial"
+version = "23.6.0"
+description = "cuSpatial: GPU-Accelerated Spatial and Trajectory Data Management and Analytics Library"
+readme = { file = "README.md", content-type = "text/markdown" }
+authors = [
+ { name = "NVIDIA Corporation" },
+]
+license = { text = "Apache 2.0" }
+requires-python = ">=3.9"
+dependencies = [
+ "cudf==23.6.*",
+ "geopandas>=0.11.0",
+ "rmm==23.6.*",
+] # This list was generated by `rapids-dependency-file-generator`. To make changes, edit ../../dependencies.yaml and run `rapids-dependency-file-generator`.
+classifiers = [
+ "Intended Audience :: Developers",
+ "Topic :: Database",
+ "Topic :: Scientific/Engineering",
+ "License :: OSI Approved :: Apache Software License",
+ "Programming Language :: Python",
+ "Programming Language :: Python :: 3.9",
+ "Programming Language :: Python :: 3.10",
+]
+
+[project.optional-dependencies]
+test = [
+ "pytest",
+ "pytest-cov",
+ "pytest-xdist",
+] # This list was generated by `rapids-dependency-file-generator`. To make changes, edit ../../dependencies.yaml and run `rapids-dependency-file-generator`.
+
+[project.urls]
+Homepage = "https://github.com/rapidsai/cuspatial"
+Documentation = "https://docs.rapids.ai/api/cuspatial/stable/"
+
+[tool.setuptools]
+license-files = ["LICENSE"]
+
+[tool.isort]
+line_length = 79
+multi_line_output = 3
+include_trailing_comma = true
+force_grid_wrap = 0
+combine_as_imports = true
+order_by_type = true
+known_dask = [
+ "dask",
+ "distributed",
+ "dask_cuda",
+]
+known_rapids = [
+ "rmm",
+ "cudf",
+]
+known_first_party = [
+ "cuspatial",
+]
+default_section = "THIRDPARTY"
+sections = [
+ "FUTURE",
+ "STDLIB",
+ "THIRDPARTY",
+ "DASK",
+ "RAPIDS",
+ "FIRSTPARTY",
+ "LOCALFOLDER",
+]
+skip = [
+ "thirdparty",
+ ".eggs",
+ ".git",
+ ".hg",
+ ".mypy_cache",
+ ".tox",
+ ".venv",
+ "_build",
+ "buck-out",
+ "build",
+ "dist",
+ "__init__.py",
]
diff --git a/python/cuspatial/setup.cfg b/python/cuspatial/setup.cfg
deleted file mode 100644
index 8603312fa..000000000
--- a/python/cuspatial/setup.cfg
+++ /dev/null
@@ -1,57 +0,0 @@
-# Copyright (c) 2018, NVIDIA CORPORATION.
-
-# See the docstring in versioneer.py for instructions. Note that you must
-# re-run 'versioneer.py setup' after changing this section, and commit the
-# resulting files.
-
-[versioneer]
-VCS = git
-style = pep440
-versionfile_source = cuspatial/_version.py
-versionfile_build = cuspatial/_version.py
-tag_prefix = v
-parentdir_prefix = cuspatial-
-
-[flake8]
-exclude = __init__.py
-ignore =
- # line break before binary operator
- W503
- # whitespace before :
- E203
-
-[isort]
-line_length=79
-multi_line_output=3
-include_trailing_comma=True
-force_grid_wrap=0
-combine_as_imports=True
-order_by_type=True
-known_dask=
- dask
- distributed
- dask_cuda
-known_rapids=
- librmm_cffi
- nvtext
- cuml
- cugraph
- cudf
- dask_cudf
-known_first_party=
- cuspatial
-default_section=THIRDPARTY
-sections=FUTURE,STDLIB,THIRDPARTY,DASK,RAPIDS,FIRSTPARTY,LOCALFOLDER
-skip=
- thirdparty
- .eggs
- .git
- .hg
- .mypy_cache
- .tox
- .venv
- _build
- buck-out
- build
- dist
- __init__.py
diff --git a/python/cuspatial/setup.py b/python/cuspatial/setup.py
index c7dc01a4d..2ea444b56 100644
--- a/python/cuspatial/setup.py
+++ b/python/cuspatial/setup.py
@@ -1,31 +1,11 @@
-# Copyright (c) 2018-2022, NVIDIA CORPORATION.
-import versioneer
+# Copyright (c) 2018-2023, NVIDIA CORPORATION.
from setuptools import find_packages
from skbuild import setup
+packages = find_packages(include=["cuspatial*"])
+
setup(
- name="cuspatial",
- version=versioneer.get_version(),
- description=(
- "cuSpatial: GPU-Accelerated Spatial and Trajectory Data Management and"
- " Analytics Library"
- ),
- url="https://github.com/rapidsai/cuspatial",
- author="NVIDIA Corporation",
- license="Apache 2.0",
- classifiers=[
- "Intended Audience :: Developers",
- "Topic :: Database",
- "Topic :: Scientific/Engineering",
- "License :: OSI Approved :: Apache Software License",
- "Programming Language :: Python",
- "Programming Language :: Python :: 3.9",
- "Programming Language :: Python :: 3.9",
- "Programming Language :: Python :: 3.10",
- ],
- packages=find_packages(include=["cuspatial", "cuspatial.*"]),
- package_data={"cuspatial._lib": ["*.pxd"]},
- cmdclass=versioneer.get_cmdclass(),
- install_requires=["numba"],
+ packages=packages,
+ package_data={key: ["*.pxd", "*.hpp", "*.cuh"] for key in packages},
zip_safe=False,
)
diff --git a/python/cuspatial/versioneer.py b/python/cuspatial/versioneer.py
deleted file mode 100644
index 07ee33d5b..000000000
--- a/python/cuspatial/versioneer.py
+++ /dev/null
@@ -1,1904 +0,0 @@
-# Version: 0.18
-
-"""The Versioneer - like a rocketeer, but for versions.
-
-The Versioneer
-==============
-
-* like a rocketeer, but for versions!
-* https://github.com/warner/python-versioneer
-* Brian Warner
-* License: Public Domain
-* Compatible With: python2.6, 2.7, 3.2, 3.3, 3.4, 3.5, 3.6, and pypy
-* [![Latest Version]
-(https://pypip.in/version/versioneer/badge.svg?style=flat)
-](https://pypi.python.org/pypi/versioneer/)
-* [![Build Status]
-(https://travis-ci.org/warner/python-versioneer.png?branch=master)
-](https://travis-ci.org/warner/python-versioneer)
-
-This is a tool for managing a recorded version number in distutils-based
-python projects. The goal is to remove the tedious and error-prone "update
-the embedded version string" step from your release process. Making a new
-release should be as easy as recording a new tag in your version-control
-system, and maybe making new tarballs.
-
-
-## Quick Install
-
-* `pip install versioneer` to somewhere to your $PATH
-* add a `[versioneer]` section to your setup.cfg (see below)
-* run `versioneer install` in your source tree, commit the results
-
-## Version Identifiers
-
-Source trees come from a variety of places:
-
-* a version-control system checkout (mostly used by developers)
-* a nightly tarball, produced by build automation
-* a snapshot tarball, produced by a web-based VCS browser, like github's
- "tarball from tag" feature
-* a release tarball, produced by "setup.py sdist", distributed through PyPI
-
-Within each source tree, the version identifier (either a string or a number,
-this tool is format-agnostic) can come from a variety of places:
-
-* ask the VCS tool itself, e.g. "git describe" (for checkouts), which knows
- about recent "tags" and an absolute revision-id
-* the name of the directory into which the tarball was unpacked
-* an expanded VCS keyword ($Id$, etc)
-* a `_version.py` created by some earlier build step
-
-For released software, the version identifier is closely related to a VCS
-tag. Some projects use tag names that include more than just the version
-string (e.g. "myproject-1.2" instead of just "1.2"), in which case the tool
-needs to strip the tag prefix to extract the version identifier. For
-unreleased software (between tags), the version identifier should provide
-enough information to help developers recreate the same tree, while also
-giving them an idea of roughly how old the tree is (after version 1.2, before
-version 1.3). Many VCS systems can report a description that captures this,
-for example `git describe --tags --dirty --always` reports things like
-"0.7-1-g574ab98-dirty" to indicate that the checkout is one revision past the
-0.7 tag, has a unique revision id of "574ab98", and is "dirty" (it has
-uncommitted changes.
-
-The version identifier is used for multiple purposes:
-
-* to allow the module to self-identify its version: `myproject.__version__`
-* to choose a name and prefix for a 'setup.py sdist' tarball
-
-## Theory of Operation
-
-Versioneer works by adding a special `_version.py` file into your source
-tree, where your `__init__.py` can import it. This `_version.py` knows how to
-dynamically ask the VCS tool for version information at import time.
-
-`_version.py` also contains `$Revision$` markers, and the installation
-process marks `_version.py` to have this marker rewritten with a tag name
-during the `git archive` command. As a result, generated tarballs will
-contain enough information to get the proper version.
-
-To allow `setup.py` to compute a version too, a `versioneer.py` is added to
-the top level of your source tree, next to `setup.py` and the `setup.cfg`
-that configures it. This overrides several distutils/setuptools commands to
-compute the version when invoked, and changes `setup.py build` and `setup.py
-sdist` to replace `_version.py` with a small static file that contains just
-the generated version data.
-
-## Installation
-
-See [INSTALL.md](./INSTALL.md) for detailed installation instructions.
-
-## Version-String Flavors
-
-Code which uses Versioneer can learn about its version string at runtime by
-importing `_version` from your main `__init__.py` file and running the
-`get_versions()` function. From the "outside" (e.g. in `setup.py`), you can
-import the top-level `versioneer.py` and run `get_versions()`.
-
-Both functions return a dictionary with different flavors of version
-information:
-
-* `['version']`: A condensed version string, rendered using the selected
- style. This is the most commonly used value for the project's version
- string. The default "pep440" style yields strings like `0.11`,
- `0.11+2.g1076c97`, or `0.11+2.g1076c97.dirty`. See the "Styles" section
- below for alternative styles.
-
-* `['full-revisionid']`: detailed revision identifier. For Git, this is the
- full SHA1 commit id, e.g. "1076c978a8d3cfc70f408fe5974aa6c092c949ac".
-
-* `['date']`: Date and time of the latest `HEAD` commit. For Git, it is the
- commit date in ISO 8601 format. This will be None if the date is not
- available.
-
-* `['dirty']`: a boolean, True if the tree has uncommitted changes. Note that
- this is only accurate if run in a VCS checkout, otherwise it is likely to
- be False or None
-
-* `['error']`: if the version string could not be computed, this will be set
- to a string describing the problem, otherwise it will be None. It may be
- useful to throw an exception in setup.py if this is set, to avoid e.g.
- creating tarballs with a version string of "unknown".
-
-Some variants are more useful than others. Including `full-revisionid` in a
-bug report should allow developers to reconstruct the exact code being tested
-(or indicate the presence of local changes that should be shared with the
-developers). `version` is suitable for display in an "about" box or a CLI
-`--version` output: it can be easily compared against release notes and lists
-of bugs fixed in various releases.
-
-The installer adds the following text to your `__init__.py` to place a basic
-version in `YOURPROJECT.__version__`:
-
- from cuspatial._version import get_versions
- __version__ = get_versions()['version']
- del get_versions
-
-## Styles
-
-The setup.cfg `style=` configuration controls how the VCS information is
-rendered into a version string.
-
-The default style, "pep440", produces a PEP440-compliant string, equal to the
-un-prefixed tag name for actual releases, and containing an additional "local
-version" section with more detail for in-between builds. For Git, this is
-TAG[+DISTANCE.gHEX[.dirty]] , using information from `git describe --tags
---dirty --always`. For example "0.11+2.g1076c97.dirty" indicates that the
-tree is like the "1076c97" commit but has uncommitted changes (".dirty"), and
-that this commit is two revisions ("+2") beyond the "0.11" tag. For released
-software (exactly equal to a known tag), the identifier will only contain the
-stripped tag, e.g. "0.11".
-
-Other styles are available. See [details.md](details.md) in the Versioneer
-source tree for descriptions.
-
-## Debugging
-
-Versioneer tries to avoid fatal errors: if something goes wrong, it will tend
-to return a version of "0+unknown". To investigate the problem, run `setup.py
-version`, which will run the version-lookup code in a verbose mode, and will
-display the full contents of `get_versions()` (including the `error` string,
-which may help identify what went wrong).
-
-## Known Limitations
-
-Some situations are known to cause problems for Versioneer. This details the
-most significant ones. More can be found on Github
-[issues page](https://github.com/warner/python-versioneer/issues).
-
-### Subprojects
-
-Versioneer has limited support for source trees in which `setup.py` is not in
-the root directory (e.g. `setup.py` and `.git/` are *not* siblings). The are
-two common reasons why `setup.py` might not be in the root:
-
-* Source trees which contain multiple subprojects, such as
- [Buildbot](https://github.com/buildbot/buildbot), which contains both
- "master" and "slave" subprojects, each with their own `setup.py`,
- `setup.cfg`, and `tox.ini`. Projects like these produce multiple PyPI
- distributions (and upload multiple independently-installable tarballs).
-* Source trees whose main purpose is to contain a C library, but which also
- provide bindings to Python (and perhaps other languages) in subdirectories.
-
-Versioneer will look for `.git` in parent directories, and most operations
-should get the right version string. However `pip` and `setuptools` have bugs
-and implementation details which frequently cause `pip install .` from a
-subproject directory to fail to find a correct version string (so it usually
-defaults to `0+unknown`).
-
-`pip install --editable .` should work correctly. `setup.py install` might
-work too.
-
-Pip-8.1.1 is known to have this problem, but hopefully it will get fixed in
-some later version.
-
-[Bug #38](https://github.com/warner/python-versioneer/issues/38) is tracking
-this issue. The discussion in
-[PR #61](https://github.com/warner/python-versioneer/pull/61) describes the
-issue from the Versioneer side in more detail.
-[pip PR#3176](https://github.com/pypa/pip/pull/3176) and
-[pip PR#3615](https://github.com/pypa/pip/pull/3615) contain work to improve
-pip to let Versioneer work correctly.
-
-Versioneer-0.16 and earlier only looked for a `.git` directory next to the
-`setup.cfg`, so subprojects were completely unsupported with those releases.
-
-### Editable installs with setuptools <= 18.5
-
-`setup.py develop` and `pip install --editable .` allow you to install a
-project into a virtualenv once, then continue editing the source code (and
-test) without re-installing after every change.
-
-"Entry-point scripts" (`setup(entry_points={"console_scripts": ..})`) are a
-convenient way to specify executable scripts that should be installed along
-with the python package.
-
-These both work as expected when using modern setuptools. When using
-setuptools-18.5 or earlier, however, certain operations will cause
-`pkg_resources.DistributionNotFound` errors when running the entrypoint
-script, which must be resolved by re-installing the package. This happens
-when the install happens with one version, then the egg_info data is
-regenerated while a different version is checked out. Many setup.py commands
-cause egg_info to be rebuilt (including `sdist`, `wheel`, and installing into
-a different virtualenv), so this can be surprising.
-
-[Bug #83](https://github.com/warner/python-versioneer/issues/83) describes
-this one, but upgrading to a newer version of setuptools should probably
-resolve it.
-
-### Unicode version strings
-
-While Versioneer works (and is continually tested) with both Python 2 and
-Python 3, it is not entirely consistent with bytes-vs-unicode distinctions.
-Newer releases probably generate unicode version strings on py2. It's not
-clear that this is wrong, but it may be surprising for applications when then
-write these strings to a network connection or include them in bytes-oriented
-APIs like cryptographic checksums.
-
-[Bug #71](https://github.com/warner/python-versioneer/issues/71) investigates
-this question.
-
-
-## Updating Versioneer
-
-To upgrade your project to a new release of Versioneer, do the following:
-
-* install the new Versioneer (`pip install -U versioneer` or equivalent)
-* edit `setup.cfg`, if necessary, to include any new configuration settings
- indicated by the release notes. See [UPGRADING](./UPGRADING.md) for details.
-* re-run `versioneer install` in your source tree, to replace
- `SRC/_version.py`
-* commit any changed files
-
-## Future Directions
-
-This tool is designed to make it easily extended to other version-control
-systems: all VCS-specific components are in separate directories like
-src/git/ . The top-level `versioneer.py` script is assembled from these
-components by running make-versioneer.py . In the future, make-versioneer.py
-will take a VCS name as an argument, and will construct a version of
-`versioneer.py` that is specific to the given VCS. It might also take the
-configuration arguments that are currently provided manually during
-installation by editing setup.py . Alternatively, it might go the other
-direction and include code from all supported VCS systems, reducing the
-number of intermediate scripts.
-
-
-## License
-
-To make Versioneer easier to embed, all its code is dedicated to the public
-domain. The `_version.py` that it creates is also in the public domain.
-Specifically, both are released under the Creative Commons "Public Domain
-Dedication" license (CC0-1.0), as described in
-https://creativecommons.org/publicdomain/zero/1.0/ .
-
-"""
-
-from __future__ import print_function
-
-import errno
-import json
-import os
-import re
-import subprocess
-import sys
-
-try:
- import configparser
-except ImportError:
- import ConfigParser as configparser
-
-
-class VersioneerConfig:
- """Container for Versioneer configuration parameters."""
-
-
-def get_root():
- """Get the project root directory.
-
- We require that all commands are run from the project root, i.e. the
- directory that contains setup.py, setup.cfg, and versioneer.py .
- """
- root = os.path.realpath(os.path.abspath(os.getcwd()))
- setup_py = os.path.join(root, "setup.py")
- versioneer_py = os.path.join(root, "versioneer.py")
- if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)):
- # allow 'python path/to/setup.py COMMAND'
- root = os.path.dirname(os.path.realpath(os.path.abspath(sys.argv[0])))
- setup_py = os.path.join(root, "setup.py")
- versioneer_py = os.path.join(root, "versioneer.py")
- if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)):
- err = (
- "Versioneer was unable to run the project root directory. "
- "Versioneer requires setup.py to be executed from "
- "its immediate directory (like 'python setup.py COMMAND'), "
- "or in a way that lets it use sys.argv[0] to find the root "
- "(like 'python path/to/setup.py COMMAND')."
- )
- raise VersioneerBadRootError(err)
- try:
- # Certain runtime workflows (setup.py install/develop in a setuptools
- # tree) execute all dependencies in a single python process, so
- # "versioneer" may be imported multiple times, and python's shared
- # module-import table will cache the first one. So we can't use
- # os.path.dirname(__file__), as that will find whichever
- # versioneer.py was first imported, even in later projects.
- me = os.path.realpath(os.path.abspath(__file__))
- me_dir = os.path.normcase(os.path.splitext(me)[0])
- vsr_dir = os.path.normcase(os.path.splitext(versioneer_py)[0])
- if me_dir != vsr_dir:
- print(
- "Warning: build in %s is using versioneer.py from %s"
- % (os.path.dirname(me), versioneer_py)
- )
- except NameError:
- pass
- return root
-
-
-def get_config_from_root(root):
- """Read the project setup.cfg file to determine Versioneer config."""
- # This might raise EnvironmentError (if setup.cfg is missing), or
- # configparser.NoSectionError (if it lacks a [versioneer] section), or
- # configparser.NoOptionError (if it lacks "VCS="). See the docstring at
- # the top of versioneer.py for instructions on writing your setup.cfg .
- setup_cfg = os.path.join(root, "setup.cfg")
- parser = configparser.SafeConfigParser()
- with open(setup_cfg, "r") as f:
- parser.readfp(f)
- VCS = parser.get("versioneer", "VCS") # mandatory
-
- def get(parser, name):
- if parser.has_option("versioneer", name):
- return parser.get("versioneer", name)
- return None
-
- cfg = VersioneerConfig()
- cfg.VCS = VCS
- cfg.style = get(parser, "style") or ""
- cfg.versionfile_source = get(parser, "versionfile_source")
- cfg.versionfile_build = get(parser, "versionfile_build")
- cfg.tag_prefix = get(parser, "tag_prefix")
- if cfg.tag_prefix in ("''", '""'):
- cfg.tag_prefix = ""
- cfg.parentdir_prefix = get(parser, "parentdir_prefix")
- cfg.verbose = get(parser, "verbose")
- return cfg
-
-
-class NotThisMethod(Exception):
- """Exception raised if a method is not valid for the current scenario."""
-
-
-# these dictionaries contain VCS-specific tools
-LONG_VERSION_PY = {}
-HANDLERS = {}
-
-
-def register_vcs_handler(vcs, method): # decorator
- """Decorator to mark a method as the handler for a particular VCS."""
-
- def decorate(f):
- """Store f in HANDLERS[vcs][method]."""
- if vcs not in HANDLERS:
- HANDLERS[vcs] = {}
- HANDLERS[vcs][method] = f
- return f
-
- return decorate
-
-
-def run_command(
- commands, args, cwd=None, verbose=False, hide_stderr=False, env=None
-):
- """Call the given command(s)."""
- assert isinstance(commands, list)
- p = None
- for c in commands:
- try:
- dispcmd = str([c] + args)
- # remember shell=False, so use git.cmd on windows, not just git
- p = subprocess.Popen(
- [c] + args,
- cwd=cwd,
- env=env,
- stdout=subprocess.PIPE,
- stderr=(subprocess.PIPE if hide_stderr else None),
- )
- break
- except EnvironmentError:
- e = sys.exc_info()[1]
- if e.errno == errno.ENOENT:
- continue
- if verbose:
- print("unable to run %s" % dispcmd)
- print(e)
- return None, None
- else:
- if verbose:
- print("unable to find command, tried %s" % (commands,))
- return None, None
- stdout = p.communicate()[0].strip()
- if sys.version_info[0] >= 3:
- stdout = stdout.decode()
- if p.returncode != 0:
- if verbose:
- print("unable to run %s (error)" % dispcmd)
- print("stdout was %s" % stdout)
- return None, p.returncode
- return stdout, p.returncode
-
-
-LONG_VERSION_PY[
- "git"
-] = r'''
-# This file helps to compute a version number in source trees obtained from
-# git-archive tarball (such as those provided by githubs download-from-tag
-# feature). Distribution tarballs (built by setup.py sdist) and build
-# directories (produced by setup.py build) will contain a much shorter file
-# that just contains the computed version number.
-
-# This file is released into the public domain. Generated by
-# versioneer-0.18 (https://github.com/warner/python-versioneer)
-
-"""Git implementation of _version.py."""
-
-import errno
-import os
-import re
-import subprocess
-import sys
-
-
-def get_keywords():
- """Get the keywords needed to look up the version information."""
- # these strings will be replaced by git during git-archive.
- # setup.py/versioneer.py will grep for the variable names, so they must
- # each be defined on a line of their own. _version.py will just call
- # get_keywords().
- git_refnames = "%(DOLLAR)sFormat:%%d%(DOLLAR)s"
- git_full = "%(DOLLAR)sFormat:%%H%(DOLLAR)s"
- git_date = "%(DOLLAR)sFormat:%%ci%(DOLLAR)s"
- keywords = {"refnames": git_refnames, "full": git_full, "date": git_date}
- return keywords
-
-
-class VersioneerConfig:
- """Container for Versioneer configuration parameters."""
-
-
-def get_config():
- """Create, populate and return the VersioneerConfig() object."""
- # these strings are filled in when 'setup.py versioneer' creates
- # _version.py
- cfg = VersioneerConfig()
- cfg.VCS = "git"
- cfg.style = "%(STYLE)s"
- cfg.tag_prefix = "%(TAG_PREFIX)s"
- cfg.parentdir_prefix = "%(PARENTDIR_PREFIX)s"
- cfg.versionfile_source = "%(VERSIONFILE_SOURCE)s"
- cfg.verbose = False
- return cfg
-
-
-class NotThisMethod(Exception):
- """Exception raised if a method is not valid for the current scenario."""
-
-
-LONG_VERSION_PY = {}
-HANDLERS = {}
-
-
-def register_vcs_handler(vcs, method): # decorator
- """Decorator to mark a method as the handler for a particular VCS."""
- def decorate(f):
- """Store f in HANDLERS[vcs][method]."""
- if vcs not in HANDLERS:
- HANDLERS[vcs] = {}
- HANDLERS[vcs][method] = f
- return f
- return decorate
-
-
-def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False,
- env=None):
- """Call the given command(s)."""
- assert isinstance(commands, list)
- p = None
- for c in commands:
- try:
- dispcmd = str([c] + args)
- # remember shell=False, so use git.cmd on windows, not just git
- p = subprocess.Popen([c] + args, cwd=cwd, env=env,
- stdout=subprocess.PIPE,
- stderr=(subprocess.PIPE if hide_stderr
- else None))
- break
- except EnvironmentError:
- e = sys.exc_info()[1]
- if e.errno == errno.ENOENT:
- continue
- if verbose:
- print("unable to run %%s" %% dispcmd)
- print(e)
- return None, None
- else:
- if verbose:
- print("unable to find command, tried %%s" %% (commands,))
- return None, None
- stdout = p.communicate()[0].strip()
- if sys.version_info[0] >= 3:
- stdout = stdout.decode()
- if p.returncode != 0:
- if verbose:
- print("unable to run %%s (error)" %% dispcmd)
- print("stdout was %%s" %% stdout)
- return None, p.returncode
- return stdout, p.returncode
-
-
-def versions_from_parentdir(parentdir_prefix, root, verbose):
- """Try to determine the version from the parent directory name.
-
- Source tarballs conventionally unpack into a directory that includes both
- the project name and a version string. We will also support searching up
- two directory levels for an appropriately named parent directory
- """
- rootdirs = []
-
- for i in range(3):
- dirname = os.path.basename(root)
- if dirname.startswith(parentdir_prefix):
- return {"version": dirname[len(parentdir_prefix):],
- "full-revisionid": None,
- "dirty": False, "error": None, "date": None}
- else:
- rootdirs.append(root)
- root = os.path.dirname(root) # up a level
-
- if verbose:
- print("Tried directories %%s but none started with prefix %%s" %%
- (str(rootdirs), parentdir_prefix))
- raise NotThisMethod("rootdir doesn't start with parentdir_prefix")
-
-
-@register_vcs_handler("git", "get_keywords")
-def git_get_keywords(versionfile_abs):
- """Extract version information from the given file."""
- # the code embedded in _version.py can just fetch the value of these
- # keywords. When used from setup.py, we don't want to import _version.py,
- # so we do it with a regexp instead. This function is not used from
- # _version.py.
- keywords = {}
- try:
- f = open(versionfile_abs, "r")
- for line in f.readlines():
- if line.strip().startswith("git_refnames ="):
- mo = re.search(r'=\s*"(.*)"', line)
- if mo:
- keywords["refnames"] = mo.group(1)
- if line.strip().startswith("git_full ="):
- mo = re.search(r'=\s*"(.*)"', line)
- if mo:
- keywords["full"] = mo.group(1)
- if line.strip().startswith("git_date ="):
- mo = re.search(r'=\s*"(.*)"', line)
- if mo:
- keywords["date"] = mo.group(1)
- f.close()
- except EnvironmentError:
- pass
- return keywords
-
-
-@register_vcs_handler("git", "keywords")
-def git_versions_from_keywords(keywords, tag_prefix, verbose):
- """Get version information from git keywords."""
- if not keywords:
- raise NotThisMethod("no keywords at all, weird")
- date = keywords.get("date")
- if date is not None:
- # git-2.2.0 added "%%cI", which expands to an ISO-8601 -compliant
- # datestamp. However we prefer "%%ci" (which expands to an "ISO-8601
- # -like" string, which we must then edit to make compliant), because
- # it's been around since git-1.5.3, and it's too difficult to
- # discover which version we're using, or to work around using an
- # older one.
- date = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
- refnames = keywords["refnames"].strip()
- if refnames.startswith("$Format"):
- if verbose:
- print("keywords are unexpanded, not using")
- raise NotThisMethod("unexpanded keywords, not a git-archive tarball")
- refs = set([r.strip() for r in refnames.strip("()").split(",")])
- # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of
- # just "foo-1.0". If we see a "tag: " prefix, prefer those.
- TAG = "tag: "
- tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)])
- if not tags:
- # Either we're using git < 1.8.3, or there really are no tags. We use
- # a heuristic: assume all version tags have a digit. The old git %%d
- # expansion behaves like git log --decorate=short and strips out the
- # refs/heads/ and refs/tags/ prefixes that would let us distinguish
- # between branches and tags. By ignoring refnames without digits, we
- # filter out many common branch names like "release" and
- # "stabilization", as well as "HEAD" and "master".
- tags = set([r for r in refs if re.search(r'\d', r)])
- if verbose:
- print("discarding '%%s', no digits" %% ",".join(refs - tags))
- if verbose:
- print("likely tags: %%s" %% ",".join(sorted(tags)))
- for ref in sorted(tags):
- # sorting will prefer e.g. "2.0" over "2.0rc1"
- if ref.startswith(tag_prefix):
- r = ref[len(tag_prefix):]
- if verbose:
- print("picking %%s" %% r)
- return {"version": r,
- "full-revisionid": keywords["full"].strip(),
- "dirty": False, "error": None,
- "date": date}
- # no suitable tags, so version is "0+unknown", but full hex is still there
- if verbose:
- print("no suitable tags, using unknown + full revision id")
- return {"version": "0+unknown",
- "full-revisionid": keywords["full"].strip(),
- "dirty": False, "error": "no suitable tags", "date": None}
-
-
-@register_vcs_handler("git", "pieces_from_vcs")
-def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
- """Get version from 'git describe' in the root of the source tree.
-
- This only gets called if the git-archive 'subst' keywords were *not*
- expanded, and _version.py hasn't already been rewritten with a short
- version string, meaning we're inside a checked out source tree.
- """
- GITS = ["git"]
- if sys.platform == "win32":
- GITS = ["git.cmd", "git.exe"]
-
- out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root,
- hide_stderr=True)
- if rc != 0:
- if verbose:
- print("Directory %%s not under git control" %% root)
- raise NotThisMethod("'git rev-parse --git-dir' returned error")
-
- # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty]
- # if there isn't one, this yields HEX[-dirty] (no NUM)
- describe_out, rc = run_command(GITS, ["describe", "--tags", "--dirty",
- "--always", "--long",
- "--match", "%%s*" %% tag_prefix],
- cwd=root)
- # --long was added in git-1.5.5
- if describe_out is None:
- raise NotThisMethod("'git describe' failed")
- describe_out = describe_out.strip()
- full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root)
- if full_out is None:
- raise NotThisMethod("'git rev-parse' failed")
- full_out = full_out.strip()
-
- pieces = {}
- pieces["long"] = full_out
- pieces["short"] = full_out[:7] # maybe improved later
- pieces["error"] = None
-
- # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty]
- # TAG might have hyphens.
- git_describe = describe_out
-
- # look for -dirty suffix
- dirty = git_describe.endswith("-dirty")
- pieces["dirty"] = dirty
- if dirty:
- git_describe = git_describe[:git_describe.rindex("-dirty")]
-
- # now we have TAG-NUM-gHEX or HEX
-
- if "-" in git_describe:
- # TAG-NUM-gHEX
- mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe)
- if not mo:
- # unparseable. Maybe git-describe is misbehaving?
- pieces["error"] = ("unable to parse git-describe output: '%%s'"
- %% describe_out)
- return pieces
-
- # tag
- full_tag = mo.group(1)
- if not full_tag.startswith(tag_prefix):
- if verbose:
- fmt = "tag '%%s' doesn't start with prefix '%%s'"
- print(fmt %% (full_tag, tag_prefix))
- pieces["error"] = ("tag '%%s' doesn't start with prefix '%%s'"
- %% (full_tag, tag_prefix))
- return pieces
- pieces["closest-tag"] = full_tag[len(tag_prefix):]
-
- # distance: number of commits since tag
- pieces["distance"] = int(mo.group(2))
-
- # commit: short hex revision ID
- pieces["short"] = mo.group(3)
-
- else:
- # HEX: no tags
- pieces["closest-tag"] = None
- count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"],
- cwd=root)
- pieces["distance"] = int(count_out) # total number of commits
-
- # commit date: see ISO-8601 comment in git_versions_from_keywords()
- date = run_command(GITS, ["show", "-s", "--format=%%ci", "HEAD"],
- cwd=root)[0].strip()
- pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
-
- return pieces
-
-
-def plus_or_dot(pieces):
- """Return a + if we don't already have one, else return a ."""
- if "+" in pieces.get("closest-tag", ""):
- return "."
- return "+"
-
-
-def render_pep440(pieces):
- """Build up version string, with post-release "local version identifier".
-
- Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you
- get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty
-
- Exceptions:
- 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty]
- """
- if pieces["closest-tag"]:
- rendered = pieces["closest-tag"]
- if pieces["distance"] or pieces["dirty"]:
- rendered += plus_or_dot(pieces)
- rendered += "%%d.g%%s" %% (pieces["distance"], pieces["short"])
- if pieces["dirty"]:
- rendered += ".dirty"
- else:
- # exception #1
- rendered = "0+untagged.%%d.g%%s" %% (pieces["distance"],
- pieces["short"])
- if pieces["dirty"]:
- rendered += ".dirty"
- return rendered
-
-
-def render_pep440_pre(pieces):
- """TAG[.post.devDISTANCE] -- No -dirty.
-
- Exceptions:
- 1: no tags. 0.post.devDISTANCE
- """
- if pieces["closest-tag"]:
- rendered = pieces["closest-tag"]
- if pieces["distance"]:
- rendered += ".post.dev%%d" %% pieces["distance"]
- else:
- # exception #1
- rendered = "0.post.dev%%d" %% pieces["distance"]
- return rendered
-
-
-def render_pep440_post(pieces):
- """TAG[.postDISTANCE[.dev0]+gHEX] .
-
- The ".dev0" means dirty. Note that .dev0 sorts backwards
- (a dirty tree will appear "older" than the corresponding clean one),
- but you shouldn't be releasing software with -dirty anyways.
-
- Exceptions:
- 1: no tags. 0.postDISTANCE[.dev0]
- """
- if pieces["closest-tag"]:
- rendered = pieces["closest-tag"]
- if pieces["distance"] or pieces["dirty"]:
- rendered += ".post%%d" %% pieces["distance"]
- if pieces["dirty"]:
- rendered += ".dev0"
- rendered += plus_or_dot(pieces)
- rendered += "g%%s" %% pieces["short"]
- else:
- # exception #1
- rendered = "0.post%%d" %% pieces["distance"]
- if pieces["dirty"]:
- rendered += ".dev0"
- rendered += "+g%%s" %% pieces["short"]
- return rendered
-
-
-def render_pep440_old(pieces):
- """TAG[.postDISTANCE[.dev0]] .
-
- The ".dev0" means dirty.
-
- Eexceptions:
- 1: no tags. 0.postDISTANCE[.dev0]
- """
- if pieces["closest-tag"]:
- rendered = pieces["closest-tag"]
- if pieces["distance"] or pieces["dirty"]:
- rendered += ".post%%d" %% pieces["distance"]
- if pieces["dirty"]:
- rendered += ".dev0"
- else:
- # exception #1
- rendered = "0.post%%d" %% pieces["distance"]
- if pieces["dirty"]:
- rendered += ".dev0"
- return rendered
-
-
-def render_git_describe(pieces):
- """TAG[-DISTANCE-gHEX][-dirty].
-
- Like 'git describe --tags --dirty --always'.
-
- Exceptions:
- 1: no tags. HEX[-dirty] (note: no 'g' prefix)
- """
- if pieces["closest-tag"]:
- rendered = pieces["closest-tag"]
- if pieces["distance"]:
- rendered += "-%%d-g%%s" %% (pieces["distance"], pieces["short"])
- else:
- # exception #1
- rendered = pieces["short"]
- if pieces["dirty"]:
- rendered += "-dirty"
- return rendered
-
-
-def render_git_describe_long(pieces):
- """TAG-DISTANCE-gHEX[-dirty].
-
- Like 'git describe --tags --dirty --always -long'.
- The distance/hash is unconditional.
-
- Exceptions:
- 1: no tags. HEX[-dirty] (note: no 'g' prefix)
- """
- if pieces["closest-tag"]:
- rendered = pieces["closest-tag"]
- rendered += "-%%d-g%%s" %% (pieces["distance"], pieces["short"])
- else:
- # exception #1
- rendered = pieces["short"]
- if pieces["dirty"]:
- rendered += "-dirty"
- return rendered
-
-
-def render(pieces, style):
- """Render the given version pieces into the requested style."""
- if pieces["error"]:
- return {"version": "unknown",
- "full-revisionid": pieces.get("long"),
- "dirty": None,
- "error": pieces["error"],
- "date": None}
-
- if not style or style == "default":
- style = "pep440" # the default
-
- if style == "pep440":
- rendered = render_pep440(pieces)
- elif style == "pep440-pre":
- rendered = render_pep440_pre(pieces)
- elif style == "pep440-post":
- rendered = render_pep440_post(pieces)
- elif style == "pep440-old":
- rendered = render_pep440_old(pieces)
- elif style == "git-describe":
- rendered = render_git_describe(pieces)
- elif style == "git-describe-long":
- rendered = render_git_describe_long(pieces)
- else:
- raise ValueError("unknown style '%%s'" %% style)
-
- return {"version": rendered, "full-revisionid": pieces["long"],
- "dirty": pieces["dirty"], "error": None,
- "date": pieces.get("date")}
-
-
-def get_versions():
- """Get version information or return default if unable to do so."""
- # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have
- # __file__, we can work backwards from there to the root. Some
- # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which
- # case we can only use expanded keywords.
-
- cfg = get_config()
- verbose = cfg.verbose
-
- try:
- return git_versions_from_keywords(get_keywords(), cfg.tag_prefix,
- verbose)
- except NotThisMethod:
- pass
-
- try:
- root = os.path.realpath(__file__)
- # versionfile_source is the relative path from the top of the source
- # tree (where the .git directory might live) to this file. Invert
- # this to find the root from __file__.
- for i in cfg.versionfile_source.split('/'):
- root = os.path.dirname(root)
- except NameError:
- return {"version": "0+unknown", "full-revisionid": None,
- "dirty": None,
- "error": "unable to find root of source tree",
- "date": None}
-
- try:
- pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose)
- return render(pieces, cfg.style)
- except NotThisMethod:
- pass
-
- try:
- if cfg.parentdir_prefix:
- return versions_from_parentdir(cfg.parentdir_prefix, root, verbose)
- except NotThisMethod:
- pass
-
- return {"version": "0+unknown", "full-revisionid": None,
- "dirty": None,
- "error": "unable to compute version", "date": None}
-'''
-
-
-@register_vcs_handler("git", "get_keywords")
-def git_get_keywords(versionfile_abs):
- """Extract version information from the given file."""
- # the code embedded in _version.py can just fetch the value of these
- # keywords. When used from setup.py, we don't want to import _version.py,
- # so we do it with a regexp instead. This function is not used from
- # _version.py.
- keywords = {}
- try:
- f = open(versionfile_abs, "r")
- for line in f.readlines():
- if line.strip().startswith("git_refnames ="):
- mo = re.search(r'=\s*"(.*)"', line)
- if mo:
- keywords["refnames"] = mo.group(1)
- if line.strip().startswith("git_full ="):
- mo = re.search(r'=\s*"(.*)"', line)
- if mo:
- keywords["full"] = mo.group(1)
- if line.strip().startswith("git_date ="):
- mo = re.search(r'=\s*"(.*)"', line)
- if mo:
- keywords["date"] = mo.group(1)
- f.close()
- except EnvironmentError:
- pass
- return keywords
-
-
-@register_vcs_handler("git", "keywords")
-def git_versions_from_keywords(keywords, tag_prefix, verbose):
- """Get version information from git keywords."""
- if not keywords:
- raise NotThisMethod("no keywords at all, weird")
- date = keywords.get("date")
- if date is not None:
- # git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant
- # datestamp. However we prefer "%ci" (which expands to an "ISO-8601
- # -like" string, which we must then edit to make compliant), because
- # it's been around since git-1.5.3, and it's too difficult to
- # discover which version we're using, or to work around using an
- # older one.
- date = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
- refnames = keywords["refnames"].strip()
- if refnames.startswith("$Format"):
- if verbose:
- print("keywords are unexpanded, not using")
- raise NotThisMethod("unexpanded keywords, not a git-archive tarball")
- refs = set([r.strip() for r in refnames.strip("()").split(",")])
- # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of
- # just "foo-1.0". If we see a "tag: " prefix, prefer those.
- TAG = "tag: "
- tags = set([r[len(TAG) :] for r in refs if r.startswith(TAG)])
- if not tags:
- # Either we're using git < 1.8.3, or there really are no tags. We use
- # a heuristic: assume all version tags have a digit. The old git %d
- # expansion behaves like git log --decorate=short and strips out the
- # refs/heads/ and refs/tags/ prefixes that would let us distinguish
- # between branches and tags. By ignoring refnames without digits, we
- # filter out many common branch names like "release" and
- # "stabilization", as well as "HEAD" and "master".
- tags = set([r for r in refs if re.search(r"\d", r)])
- if verbose:
- print("discarding '%s', no digits" % ",".join(refs - tags))
- if verbose:
- print("likely tags: %s" % ",".join(sorted(tags)))
- for ref in sorted(tags):
- # sorting will prefer e.g. "2.0" over "2.0rc1"
- if ref.startswith(tag_prefix):
- r = ref[len(tag_prefix) :]
- if verbose:
- print("picking %s" % r)
- return {
- "version": r,
- "full-revisionid": keywords["full"].strip(),
- "dirty": False,
- "error": None,
- "date": date,
- }
- # no suitable tags, so version is "0+unknown", but full hex is still there
- if verbose:
- print("no suitable tags, using unknown + full revision id")
- return {
- "version": "0+unknown",
- "full-revisionid": keywords["full"].strip(),
- "dirty": False,
- "error": "no suitable tags",
- "date": None,
- }
-
-
-@register_vcs_handler("git", "pieces_from_vcs")
-def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
- """Get version from 'git describe' in the root of the source tree.
-
- This only gets called if the git-archive 'subst' keywords were *not*
- expanded, and _version.py hasn't already been rewritten with a short
- version string, meaning we're inside a checked out source tree.
- """
- GITS = ["git"]
- if sys.platform == "win32":
- GITS = ["git.cmd", "git.exe"]
-
- out, rc = run_command(
- GITS, ["rev-parse", "--git-dir"], cwd=root, hide_stderr=True
- )
- if rc != 0:
- if verbose:
- print("Directory %s not under git control" % root)
- raise NotThisMethod("'git rev-parse --git-dir' returned error")
-
- # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty]
- # if there isn't one, this yields HEX[-dirty] (no NUM)
- describe_out, rc = run_command(
- GITS,
- [
- "describe",
- "--tags",
- "--dirty",
- "--always",
- "--long",
- "--match",
- "%s*" % tag_prefix,
- ],
- cwd=root,
- )
- # --long was added in git-1.5.5
- if describe_out is None:
- raise NotThisMethod("'git describe' failed")
- describe_out = describe_out.strip()
- full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root)
- if full_out is None:
- raise NotThisMethod("'git rev-parse' failed")
- full_out = full_out.strip()
-
- pieces = {}
- pieces["long"] = full_out
- pieces["short"] = full_out[:7] # maybe improved later
- pieces["error"] = None
-
- # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty]
- # TAG might have hyphens.
- git_describe = describe_out
-
- # look for -dirty suffix
- dirty = git_describe.endswith("-dirty")
- pieces["dirty"] = dirty
- if dirty:
- git_describe = git_describe[: git_describe.rindex("-dirty")]
-
- # now we have TAG-NUM-gHEX or HEX
-
- if "-" in git_describe:
- # TAG-NUM-gHEX
- mo = re.search(r"^(.+)-(\d+)-g([0-9a-f]+)$", git_describe)
- if not mo:
- # unparseable. Maybe git-describe is misbehaving?
- pieces["error"] = (
- "unable to parse git-describe output: '%s'" % describe_out
- )
- return pieces
-
- # tag
- full_tag = mo.group(1)
- if not full_tag.startswith(tag_prefix):
- if verbose:
- fmt = "tag '%s' doesn't start with prefix '%s'"
- print(fmt % (full_tag, tag_prefix))
- pieces["error"] = "tag '%s' doesn't start with prefix '%s'" % (
- full_tag,
- tag_prefix,
- )
- return pieces
- pieces["closest-tag"] = full_tag[len(tag_prefix) :]
-
- # distance: number of commits since tag
- pieces["distance"] = int(mo.group(2))
-
- # commit: short hex revision ID
- pieces["short"] = mo.group(3)
-
- else:
- # HEX: no tags
- pieces["closest-tag"] = None
- count_out, rc = run_command(
- GITS, ["rev-list", "HEAD", "--count"], cwd=root
- )
- pieces["distance"] = int(count_out) # total number of commits
-
- # commit date: see ISO-8601 comment in git_versions_from_keywords()
- date = run_command(GITS, ["show", "-s", "--format=%ci", "HEAD"], cwd=root)[
- 0
- ].strip()
- pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
-
- return pieces
-
-
-def do_vcs_install(manifest_in, versionfile_source, ipy):
- """Git-specific installation logic for Versioneer.
-
- For Git, this means creating/changing .gitattributes to mark _version.py
- for export-subst keyword substitution.
- """
- GITS = ["git"]
- if sys.platform == "win32":
- GITS = ["git.cmd", "git.exe"]
- files = [manifest_in, versionfile_source]
- if ipy:
- files.append(ipy)
- try:
- me = __file__
- if me.endswith(".pyc") or me.endswith(".pyo"):
- me = os.path.splitext(me)[0] + ".py"
- versioneer_file = os.path.relpath(me)
- except NameError:
- versioneer_file = "versioneer.py"
- files.append(versioneer_file)
- present = False
- try:
- f = open(".gitattributes", "r")
- for line in f.readlines():
- if line.strip().startswith(versionfile_source):
- if "export-subst" in line.strip().split()[1:]:
- present = True
- f.close()
- except EnvironmentError:
- pass
- if not present:
- f = open(".gitattributes", "a+")
- f.write("%s export-subst\n" % versionfile_source)
- f.close()
- files.append(".gitattributes")
- run_command(GITS, ["add", "--"] + files)
-
-
-def versions_from_parentdir(parentdir_prefix, root, verbose):
- """Try to determine the version from the parent directory name.
-
- Source tarballs conventionally unpack into a directory that includes both
- the project name and a version string. We will also support searching up
- two directory levels for an appropriately named parent directory
- """
- rootdirs = []
-
- for i in range(3):
- dirname = os.path.basename(root)
- if dirname.startswith(parentdir_prefix):
- return {
- "version": dirname[len(parentdir_prefix) :],
- "full-revisionid": None,
- "dirty": False,
- "error": None,
- "date": None,
- }
- else:
- rootdirs.append(root)
- root = os.path.dirname(root) # up a level
-
- if verbose:
- print(
- "Tried directories %s but none started with prefix %s"
- % (str(rootdirs), parentdir_prefix)
- )
- raise NotThisMethod("rootdir doesn't start with parentdir_prefix")
-
-
-SHORT_VERSION_PY = """
-# This file was generated by 'versioneer.py' (0.18) from
-# revision-control system data, or from the parent directory name of an
-# unpacked source archive. Distribution tarballs contain a pre-generated copy
-# of this file.
-
-import json
-
-version_json = '''
-%s
-''' # END VERSION_JSON
-
-
-def get_versions():
- return json.loads(version_json)
-"""
-
-
-def versions_from_file(filename):
- """Try to determine the version from _version.py if present."""
- try:
- with open(filename) as f:
- contents = f.read()
- except EnvironmentError:
- raise NotThisMethod("unable to read _version.py")
- mo = re.search(
- r"version_json = '''\n(.*)''' # END VERSION_JSON",
- contents,
- re.M | re.S,
- )
- if not mo:
- mo = re.search(
- r"version_json = '''\r\n(.*)''' # END VERSION_JSON",
- contents,
- re.M | re.S,
- )
- if not mo:
- raise NotThisMethod("no version_json in _version.py")
- return json.loads(mo.group(1))
-
-
-def write_to_version_file(filename, versions):
- """Write the given version number to the given _version.py file."""
- os.unlink(filename)
- contents = json.dumps(
- versions, sort_keys=True, indent=1, separators=(",", ": ")
- )
- with open(filename, "w") as f:
- f.write(SHORT_VERSION_PY % contents)
-
- print("set %s to '%s'" % (filename, versions["version"]))
-
-
-def plus_or_dot(pieces):
- """Return a + if we don't already have one, else return a ."""
- if "+" in pieces.get("closest-tag", ""):
- return "."
- return "+"
-
-
-def render_pep440(pieces):
- """Build up version string, with post-release "local version identifier".
-
- Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you
- get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty
-
- Exceptions:
- 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty]
- """
- if pieces["closest-tag"]:
- rendered = pieces["closest-tag"]
- if pieces["distance"] or pieces["dirty"]:
- rendered += plus_or_dot(pieces)
- rendered += "%d.g%s" % (pieces["distance"], pieces["short"])
- if pieces["dirty"]:
- rendered += ".dirty"
- else:
- # exception #1
- rendered = "0+untagged.%d.g%s" % (pieces["distance"], pieces["short"])
- if pieces["dirty"]:
- rendered += ".dirty"
- return rendered
-
-
-def render_pep440_pre(pieces):
- """TAG[.post.devDISTANCE] -- No -dirty.
-
- Exceptions:
- 1: no tags. 0.post.devDISTANCE
- """
- if pieces["closest-tag"]:
- rendered = pieces["closest-tag"]
- if pieces["distance"]:
- rendered += ".post.dev%d" % pieces["distance"]
- else:
- # exception #1
- rendered = "0.post.dev%d" % pieces["distance"]
- return rendered
-
-
-def render_pep440_post(pieces):
- """TAG[.postDISTANCE[.dev0]+gHEX] .
-
- The ".dev0" means dirty. Note that .dev0 sorts backwards
- (a dirty tree will appear "older" than the corresponding clean one),
- but you shouldn't be releasing software with -dirty anyways.
-
- Exceptions:
- 1: no tags. 0.postDISTANCE[.dev0]
- """
- if pieces["closest-tag"]:
- rendered = pieces["closest-tag"]
- if pieces["distance"] or pieces["dirty"]:
- rendered += ".post%d" % pieces["distance"]
- if pieces["dirty"]:
- rendered += ".dev0"
- rendered += plus_or_dot(pieces)
- rendered += "g%s" % pieces["short"]
- else:
- # exception #1
- rendered = "0.post%d" % pieces["distance"]
- if pieces["dirty"]:
- rendered += ".dev0"
- rendered += "+g%s" % pieces["short"]
- return rendered
-
-
-def render_pep440_old(pieces):
- """TAG[.postDISTANCE[.dev0]] .
-
- The ".dev0" means dirty.
-
- Eexceptions:
- 1: no tags. 0.postDISTANCE[.dev0]
- """
- if pieces["closest-tag"]:
- rendered = pieces["closest-tag"]
- if pieces["distance"] or pieces["dirty"]:
- rendered += ".post%d" % pieces["distance"]
- if pieces["dirty"]:
- rendered += ".dev0"
- else:
- # exception #1
- rendered = "0.post%d" % pieces["distance"]
- if pieces["dirty"]:
- rendered += ".dev0"
- return rendered
-
-
-def render_git_describe(pieces):
- """TAG[-DISTANCE-gHEX][-dirty].
-
- Like 'git describe --tags --dirty --always'.
-
- Exceptions:
- 1: no tags. HEX[-dirty] (note: no 'g' prefix)
- """
- if pieces["closest-tag"]:
- rendered = pieces["closest-tag"]
- if pieces["distance"]:
- rendered += "-%d-g%s" % (pieces["distance"], pieces["short"])
- else:
- # exception #1
- rendered = pieces["short"]
- if pieces["dirty"]:
- rendered += "-dirty"
- return rendered
-
-
-def render_git_describe_long(pieces):
- """TAG-DISTANCE-gHEX[-dirty].
-
- Like 'git describe --tags --dirty --always -long'.
- The distance/hash is unconditional.
-
- Exceptions:
- 1: no tags. HEX[-dirty] (note: no 'g' prefix)
- """
- if pieces["closest-tag"]:
- rendered = pieces["closest-tag"]
- rendered += "-%d-g%s" % (pieces["distance"], pieces["short"])
- else:
- # exception #1
- rendered = pieces["short"]
- if pieces["dirty"]:
- rendered += "-dirty"
- return rendered
-
-
-def render(pieces, style):
- """Render the given version pieces into the requested style."""
- if pieces["error"]:
- return {
- "version": "unknown",
- "full-revisionid": pieces.get("long"),
- "dirty": None,
- "error": pieces["error"],
- "date": None,
- }
-
- if not style or style == "default":
- style = "pep440" # the default
-
- if style == "pep440":
- rendered = render_pep440(pieces)
- elif style == "pep440-pre":
- rendered = render_pep440_pre(pieces)
- elif style == "pep440-post":
- rendered = render_pep440_post(pieces)
- elif style == "pep440-old":
- rendered = render_pep440_old(pieces)
- elif style == "git-describe":
- rendered = render_git_describe(pieces)
- elif style == "git-describe-long":
- rendered = render_git_describe_long(pieces)
- else:
- raise ValueError("unknown style '%s'" % style)
-
- return {
- "version": rendered,
- "full-revisionid": pieces["long"],
- "dirty": pieces["dirty"],
- "error": None,
- "date": pieces.get("date"),
- }
-
-
-class VersioneerBadRootError(Exception):
- """The project root directory is unknown or missing key files."""
-
-
-def get_versions(verbose=False):
- """Get the project version from whatever source is available.
-
- Returns dict with two keys: 'version' and 'full'.
- """
- if "versioneer" in sys.modules:
- # see the discussion in cmdclass.py:get_cmdclass()
- del sys.modules["versioneer"]
-
- root = get_root()
- cfg = get_config_from_root(root)
-
- assert cfg.VCS is not None, "please set [versioneer]VCS= in setup.cfg"
- handlers = HANDLERS.get(cfg.VCS)
- assert handlers, "unrecognized VCS '%s'" % cfg.VCS
- verbose = verbose or cfg.verbose
- assert (
- cfg.versionfile_source is not None
- ), "please set versioneer.versionfile_source"
- assert cfg.tag_prefix is not None, "please set versioneer.tag_prefix"
-
- versionfile_abs = os.path.join(root, cfg.versionfile_source)
-
- # extract version from first of: _version.py, VCS command (e.g. 'git
- # describe'), parentdir. This is meant to work for developers using a
- # source checkout, for users of a tarball created by 'setup.py sdist',
- # and for users of a tarball/zipball created by 'git archive' or github's
- # download-from-tag feature or the equivalent in other VCSes.
-
- get_keywords_f = handlers.get("get_keywords")
- from_keywords_f = handlers.get("keywords")
- if get_keywords_f and from_keywords_f:
- try:
- keywords = get_keywords_f(versionfile_abs)
- ver = from_keywords_f(keywords, cfg.tag_prefix, verbose)
- if verbose:
- print("got version from expanded keyword %s" % ver)
- return ver
- except NotThisMethod:
- pass
-
- try:
- ver = versions_from_file(versionfile_abs)
- if verbose:
- print("got version from file %s %s" % (versionfile_abs, ver))
- return ver
- except NotThisMethod:
- pass
-
- from_vcs_f = handlers.get("pieces_from_vcs")
- if from_vcs_f:
- try:
- pieces = from_vcs_f(cfg.tag_prefix, root, verbose)
- ver = render(pieces, cfg.style)
- if verbose:
- print("got version from VCS %s" % ver)
- return ver
- except NotThisMethod:
- pass
-
- try:
- if cfg.parentdir_prefix:
- ver = versions_from_parentdir(cfg.parentdir_prefix, root, verbose)
- if verbose:
- print("got version from parentdir %s" % ver)
- return ver
- except NotThisMethod:
- pass
-
- if verbose:
- print("unable to compute version")
-
- return {
- "version": "0+unknown",
- "full-revisionid": None,
- "dirty": None,
- "error": "unable to compute version",
- "date": None,
- }
-
-
-def get_version():
- """Get the short version string for this project."""
- return get_versions()["version"]
-
-
-def get_cmdclass():
- """Get the custom setuptools/distutils subclasses used by Versioneer."""
- if "versioneer" in sys.modules:
- del sys.modules["versioneer"]
- # this fixes the "python setup.py develop" case (also 'install' and
- # 'easy_install .'), in which subdependencies of the main project are
- # built (using setup.py bdist_egg) in the same python process. Assume
- # a main project A and a dependency B, which use different versions
- # of Versioneer. A's setup.py imports A's Versioneer, leaving it in
- # sys.modules by the time B's setup.py is executed, causing B to run
- # with the wrong versioneer. Setuptools wraps the sub-dep builds in a
- # sandbox that restores sys.modules to it's pre-build state, so the
- # parent is protected against the child's "import versioneer". By
- # removing ourselves from sys.modules here, before the child build
- # happens, we protect the child from the parent's versioneer too.
- # Also see https://github.com/warner/python-versioneer/issues/52
-
- cmds = {}
-
- # we add "version" to both distutils and setuptools
- from distutils.core import Command
-
- class cmd_version(Command):
- description = "report generated version string"
- user_options = []
- boolean_options = []
-
- def initialize_options(self):
- pass
-
- def finalize_options(self):
- pass
-
- def run(self):
- vers = get_versions(verbose=True)
- print("Version: %s" % vers["version"])
- print(" full-revisionid: %s" % vers.get("full-revisionid"))
- print(" dirty: %s" % vers.get("dirty"))
- print(" date: %s" % vers.get("date"))
- if vers["error"]:
- print(" error: %s" % vers["error"])
-
- cmds["version"] = cmd_version
-
- # we override "build_py" in both distutils and setuptools
- #
- # most invocation pathways end up running build_py:
- # distutils/build -> build_py
- # distutils/install -> distutils/build ->..
- # setuptools/bdist_wheel -> distutils/install ->..
- # setuptools/bdist_egg -> distutils/install_lib -> build_py
- # setuptools/install -> bdist_egg ->..
- # setuptools/develop -> ?
- # pip install:
- # copies source tree to a tempdir before running egg_info/etc
- # if .git isn't copied too, 'git describe' will fail
- # then does setup.py bdist_wheel, or sometimes setup.py install
- # setup.py egg_info -> ?
-
- # we override different "build_py" commands for both environments
- if "setuptools" in sys.modules:
- from setuptools.command.build_py import build_py as _build_py
- else:
- from distutils.command.build_py import build_py as _build_py
-
- class cmd_build_py(_build_py):
- def run(self):
- root = get_root()
- cfg = get_config_from_root(root)
- versions = get_versions()
- _build_py.run(self)
- # now locate _version.py in the new build/ directory and replace
- # it with an updated value
- if cfg.versionfile_build:
- target_versionfile = os.path.join(
- self.build_lib, cfg.versionfile_build
- )
- print("UPDATING %s" % target_versionfile)
- write_to_version_file(target_versionfile, versions)
-
- cmds["build_py"] = cmd_build_py
-
- if "cx_Freeze" in sys.modules: # cx_freeze enabled?
- from cx_Freeze.dist import build_exe as _build_exe
-
- # nczeczulin reports that py2exe won't like the pep440-style string
- # as FILEVERSION, but it can be used for PRODUCTVERSION, e.g.
- # setup(console=[{
- # "version": versioneer.get_version().split("+", 1)[0], # FILEVERSION
- # "product_version": versioneer.get_version(),
- # ...
-
- class cmd_build_exe(_build_exe):
- def run(self):
- root = get_root()
- cfg = get_config_from_root(root)
- versions = get_versions()
- target_versionfile = cfg.versionfile_source
- print("UPDATING %s" % target_versionfile)
- write_to_version_file(target_versionfile, versions)
-
- _build_exe.run(self)
- os.unlink(target_versionfile)
- with open(cfg.versionfile_source, "w") as f:
- LONG = LONG_VERSION_PY[cfg.VCS]
- f.write(
- LONG
- % {
- "DOLLAR": "$",
- "STYLE": cfg.style,
- "TAG_PREFIX": cfg.tag_prefix,
- "PARENTDIR_PREFIX": cfg.parentdir_prefix,
- "VERSIONFILE_SOURCE": cfg.versionfile_source,
- }
- )
-
- cmds["build_exe"] = cmd_build_exe
- del cmds["build_py"]
-
- if "py2exe" in sys.modules: # py2exe enabled?
- try:
- from py2exe.distutils_buildexe import py2exe as _py2exe # py3
- except ImportError:
- from py2exe.build_exe import py2exe as _py2exe # py2
-
- class cmd_py2exe(_py2exe):
- def run(self):
- root = get_root()
- cfg = get_config_from_root(root)
- versions = get_versions()
- target_versionfile = cfg.versionfile_source
- print("UPDATING %s" % target_versionfile)
- write_to_version_file(target_versionfile, versions)
-
- _py2exe.run(self)
- os.unlink(target_versionfile)
- with open(cfg.versionfile_source, "w") as f:
- LONG = LONG_VERSION_PY[cfg.VCS]
- f.write(
- LONG
- % {
- "DOLLAR": "$",
- "STYLE": cfg.style,
- "TAG_PREFIX": cfg.tag_prefix,
- "PARENTDIR_PREFIX": cfg.parentdir_prefix,
- "VERSIONFILE_SOURCE": cfg.versionfile_source,
- }
- )
-
- cmds["py2exe"] = cmd_py2exe
-
- # we override different "sdist" commands for both environments
- if "setuptools" in sys.modules:
- from setuptools.command.sdist import sdist as _sdist
- else:
- from distutils.command.sdist import sdist as _sdist
-
- class cmd_sdist(_sdist):
- def run(self):
- versions = get_versions()
- self._versioneer_generated_versions = versions
- # unless we update this, the command will keep using the old
- # version
- self.distribution.metadata.version = versions["version"]
- return _sdist.run(self)
-
- def make_release_tree(self, base_dir, files):
- root = get_root()
- cfg = get_config_from_root(root)
- _sdist.make_release_tree(self, base_dir, files)
- # now locate _version.py in the new base_dir directory
- # (remembering that it may be a hardlink) and replace it with an
- # updated value
- target_versionfile = os.path.join(base_dir, cfg.versionfile_source)
- print("UPDATING %s" % target_versionfile)
- write_to_version_file(
- target_versionfile, self._versioneer_generated_versions
- )
-
- cmds["sdist"] = cmd_sdist
-
- return cmds
-
-
-CONFIG_ERROR = """
-setup.cfg is missing the necessary Versioneer configuration. You need
-a section like:
-
- [versioneer]
- VCS = git
- style = pep440
- versionfile_source = src/myproject/_version.py
- versionfile_build = myproject/_version.py
- tag_prefix =
- parentdir_prefix = myproject-
-
-You will also need to edit your setup.py to use the results:
-
- import versioneer
- setup(version=versioneer.get_version(),
- cmdclass=versioneer.get_cmdclass(), ...)
-
-Please read the docstring in ./versioneer.py for configuration instructions,
-edit setup.cfg, and re-run the installer or 'python versioneer.py setup'.
-"""
-
-SAMPLE_CONFIG = """
-# See the docstring in versioneer.py for instructions. Note that you must
-# re-run 'versioneer.py setup' after changing this section, and commit the
-# resulting files.
-
-[versioneer]
-#VCS = git
-#style = pep440
-#versionfile_source =
-#versionfile_build =
-#tag_prefix =
-#parentdir_prefix =
-
-"""
-
-INIT_PY_SNIPPET = """
-from cuspatial._version import get_versions
-__version__ = get_versions()['version']
-del get_versions
-"""
-
-
-def do_setup():
- """Main VCS-independent setup function for installing Versioneer."""
- root = get_root()
- try:
- cfg = get_config_from_root(root)
- except (
- EnvironmentError,
- configparser.NoSectionError,
- configparser.NoOptionError,
- ) as e:
- if isinstance(e, (EnvironmentError, configparser.NoSectionError)):
- print(
- "Adding sample versioneer config to setup.cfg", file=sys.stderr
- )
- with open(os.path.join(root, "setup.cfg"), "a") as f:
- f.write(SAMPLE_CONFIG)
- print(CONFIG_ERROR, file=sys.stderr)
- return 1
-
- print(" creating %s" % cfg.versionfile_source)
- with open(cfg.versionfile_source, "w") as f:
- LONG = LONG_VERSION_PY[cfg.VCS]
- f.write(
- LONG
- % {
- "DOLLAR": "$",
- "STYLE": cfg.style,
- "TAG_PREFIX": cfg.tag_prefix,
- "PARENTDIR_PREFIX": cfg.parentdir_prefix,
- "VERSIONFILE_SOURCE": cfg.versionfile_source,
- }
- )
-
- ipy = os.path.join(os.path.dirname(cfg.versionfile_source), "__init__.py")
- if os.path.exists(ipy):
- try:
- with open(ipy, "r") as f:
- old = f.read()
- except EnvironmentError:
- old = ""
- if INIT_PY_SNIPPET not in old:
- print(" appending to %s" % ipy)
- with open(ipy, "a") as f:
- f.write(INIT_PY_SNIPPET)
- else:
- print(" %s unmodified" % ipy)
- else:
- print(" %s doesn't exist, ok" % ipy)
- ipy = None
-
- # Make sure both the top-level "versioneer.py" and versionfile_source
- # (PKG/_version.py, used by runtime code) are in MANIFEST.in, so
- # they'll be copied into source distributions. Pip won't be able to
- # install the package without this.
- manifest_in = os.path.join(root, "MANIFEST.in")
- simple_includes = set()
- try:
- with open(manifest_in, "r") as f:
- for line in f:
- if line.startswith("include "):
- for include in line.split()[1:]:
- simple_includes.add(include)
- except EnvironmentError:
- pass
- # That doesn't cover everything MANIFEST.in can do
- # (http://docs.python.org/2/distutils/sourcedist.html#commands), so
- # it might give some false negatives. Appending redundant 'include'
- # lines is safe, though.
- if "versioneer.py" not in simple_includes:
- print(" appending 'versioneer.py' to MANIFEST.in")
- with open(manifest_in, "a") as f:
- f.write("include versioneer.py\n")
- else:
- print(" 'versioneer.py' already in MANIFEST.in")
- if cfg.versionfile_source not in simple_includes:
- print(
- " appending versionfile_source ('%s') to MANIFEST.in"
- % cfg.versionfile_source
- )
- with open(manifest_in, "a") as f:
- f.write("include %s\n" % cfg.versionfile_source)
- else:
- print(" versionfile_source already in MANIFEST.in")
-
- # Make VCS-specific changes. For git, this means creating/changing
- # .gitattributes to mark _version.py for export-subst keyword
- # substitution.
- do_vcs_install(manifest_in, cfg.versionfile_source, ipy)
- return 0
-
-
-def scan_setup_py():
- """Validate the contents of setup.py against Versioneer's expectations."""
- found = set()
- setters = False
- errors = 0
- with open("setup.py", "r") as f:
- for line in f.readlines():
- if "import versioneer" in line:
- found.add("import")
- if "versioneer.get_cmdclass()" in line:
- found.add("cmdclass")
- if "versioneer.get_version()" in line:
- found.add("get_version")
- if "versioneer.VCS" in line:
- setters = True
- if "versioneer.versionfile_source" in line:
- setters = True
- if len(found) != 3:
- print("")
- print("Your setup.py appears to be missing some important items")
- print("(but I might be wrong). Please make sure it has something")
- print("roughly like the following:")
- print("")
- print(" import versioneer")
- print(" setup( version=versioneer.get_version(),")
- print(" cmdclass=versioneer.get_cmdclass(), ...)")
- print("")
- errors += 1
- if setters:
- print("You should remove lines like 'versioneer.VCS = ' and")
- print("'versioneer.versionfile_source = ' . This configuration")
- print("now lives in setup.cfg, and should be removed from setup.py")
- print("")
- errors += 1
- return errors
-
-
-if __name__ == "__main__":
- cmd = sys.argv[1]
- if cmd == "setup":
- errors = do_setup()
- errors += scan_setup_py()
- if errors:
- sys.exit(1)