diff --git a/Makefile b/Makefile
index 0b9b1a6..12db43f 100644
--- a/Makefile
+++ b/Makefile
@@ -14,10 +14,10 @@ daemon:
docker run --rm -itd --volume "$(shell pwd)/data:/app/data:ro" -p 5000:5000 opentopodata:$(VERSION)
test: build black-check
- docker run --rm -e DISABLE_MEMCACHE=1 --volume "$(shell pwd)/htmlcov:/app/htmlcov" opentopodata:$(VERSION) pytest --ignore=data --ignore=scripts --cov=opentopodata --cov-report html
+ docker run --rm -e DISABLE_MEMCACHE=1 --volume "$(shell pwd)/htmlcov:/app/htmlcov" opentopodata:$(VERSION) python -m pytest --ignore=data --ignore=scripts --cov=opentopodata --cov-report html --timeout=10
test-m1: build-m1 black-check
- docker run --rm -e DISABLE_MEMCACHE=1 --volume "$(shell pwd)/htmlcov:/app/htmlcov" opentopodata:$(VERSION) pytest --ignore=data --ignore=scripts --cov=opentopodata --cov-report html
+ docker run --rm -e DISABLE_MEMCACHE=1 --volume "$(shell pwd)/htmlcov:/app/htmlcov" opentopodata:$(VERSION) python -m pytest --ignore=data --ignore=scripts --cov=opentopodata --cov-report html --timeout=10
run-local:
FLASK_APP=opentopodata/api.py FLASK_DEBUG=1 flask run --port 5000
@@ -26,11 +26,11 @@ black:
black --target-version py39 tests opentopodata
black-check:
- docker run --rm opentopodata:$(VERSION) black --check --target-version py39 tests opentopodata
+ docker run --rm opentopodata:$(VERSION) python -m black --check --target-version py39 tests opentopodata
update-requirements: build
# pip-compile gets confused if there's already a requirements.txt file, and
# it can't be deleted without breaking the docker mount. So instead do the
# compiling in /tmp. Should run test suite afterwards.
- docker run --rm -v $(shell pwd)/requirements.txt:/app/requirements.txt -w /tmp opentopodata:$(VERSION) /bin/bash -c "cp /app/requirements.in .; pip-compile requirements.in; cp requirements.txt /app/requirements.txt"
+ docker run --rm -v $(shell pwd)/requirements.txt:/app/requirements.txt -w /tmp opentopodata:$(VERSION) /bin/bash -c "cp /app/requirements.in .; pip-compile requirements.in --resolver backtracking; cp requirements.txt /app/requirements.txt"
diff --git a/VERSION b/VERSION
index fe4e75f..abb1658 100644
--- a/VERSION
+++ b/VERSION
@@ -1 +1 @@
-1.8.3
\ No newline at end of file
+1.9.0
\ No newline at end of file
diff --git a/docker/Dockerfile b/docker/Dockerfile
index 28ac740..7d6d1b8 100644
--- a/docker/Dockerfile
+++ b/docker/Dockerfile
@@ -1,17 +1,17 @@
# Container for packages that need to be built from source but have massive dev dependencies.
-FROM python:3.9.16-slim-bullseye as builder
+FROM python:3.11.8-slim-bookworm as builder
RUN set -e && \
apt-get update && \
apt-get install -y --no-install-recommends \
gcc \
- python3.9-dev
+ python3.11-dev
RUN pip config set global.disable-pip-version-check true && \
- pip wheel --wheel-dir=/root/wheels uwsgi==2.0.21 && \
- pip wheel --wheel-dir=/root/wheels regex==2022.10.31
+ pip wheel --wheel-dir=/root/wheels uwsgi==2.0.24 && \
+ pip wheel --wheel-dir=/root/wheels regex==2023.12.25
# The actual container.
-FROM python:3.9.16-slim-bullseye
+FROM python:3.11.8-slim-bookworm
RUN set -e && \
apt-get update && \
apt-get install -y --no-install-recommends \
diff --git a/docker/apple-silicon.Dockerfile b/docker/apple-silicon.Dockerfile
index 9c21ac3..9f81059 100644
--- a/docker/apple-silicon.Dockerfile
+++ b/docker/apple-silicon.Dockerfile
@@ -5,7 +5,8 @@
# It works just the same as the main image, but is much larger and slower to
# build.
-FROM osgeo/gdal:ubuntu-full-3.5.2
+FROM ghcr.io/osgeo/gdal:ubuntu-full-3.6.4
+RUN python --version
RUN set -e && \
apt-get update && \
apt-get install -y --no-install-recommends \
@@ -16,7 +17,7 @@ RUN set -e && \
g++ \
supervisor \
libmemcached-dev \
- python3.8-dev && \
+ python3.10-dev && \
rm -rf /var/lib/apt/lists/*
COPY requirements.txt /app/requirements.txt
diff --git a/docs/api.md b/docs/api.md
index 5a68fcd..241fca8 100644
--- a/docs/api.md
+++ b/docs/api.md
@@ -24,6 +24,7 @@ Latitudes and longitudes should be in `EPSG:4326` (also known as WGS-84 format),
* The default option `null` makes NODATA indistinguishable from a location outside the dataset bounds.
* `NaN` (not a number) values aren't valid in json and will break some clients. The `nan` option was default before version 1.4 and is provided only for backwards compatibility.
* When querying multiple datasets, this NODATA replacement only applies to the last dataset in the stack.
+* `format`: Either `json` or `geojson`. Default: `json`.
@@ -47,14 +48,11 @@ Some notes about the elevation value:
* Unless the `nodata_value` parameter is set, a `null` elevation could either mean the location is outside the dataset bounds, or a NODATA within the raster bounds.
-
### Example
`GET` api.opentopodata.org/v1/srtm90m?locations=-43.5,172.5|27.6,1.98&interpolation=cubic
-
-
```json
{
"results": [
@@ -79,6 +77,58 @@ Some notes about the elevation value:
}
```
+
+### GeoJSON response
+
+If `format=geojson` is passed, you get a `FeatureCollection` of `Point` geometries instead. Each feature has its elevation as the `z` coordinate, and a `dataset` property specifying the source (corresponding to `results[].dataset` in the regular json response):
+
+
+### GeoJSON example
+
+
+`GET` api.opentopodata.org/v1/srtm90m?locations=-43.5,172.5|27.6,1.98&interpolation=cubic&format=geojson
+
+
+
+
+```json
+{
+ "features": [
+ {
+ "geometry": {
+ "coordinates": [
+ 172.5,
+ -43.5,
+ 45
+ ],
+ "type": "Point"
+ },
+ "properties": {
+ "dataset": "srtm90m"
+ },
+ "type": "Feature"
+ },
+ {
+ "geometry": {
+ "coordinates": [
+ 1.98,
+ 27.6,
+ 402
+ ],
+ "type": "Point"
+ },
+ "properties": {
+ "dataset": "srtm90m"
+ },
+ "type": "Feature"
+ }
+ ],
+ "type": "FeatureCollection"
+}
+```
+
+
+
---
diff --git a/docs/changelog.md b/docs/changelog.md
index 44902a7..a8ccdce 100644
--- a/docs/changelog.md
+++ b/docs/changelog.md
@@ -3,6 +3,14 @@
This is a list of changes to Open Topo Data between each release.
+## Version 1.9.0 (19 Feb 2024)
+* Dependency upgrades, including python to 3.11 and rasterio to 1.3.9
+* Add support for geojson responses ([#86](https://github.com/ajnisbet/opentopodata/pull/86), thanks [@arnesetzer](https://github.com/arnesetzer)!)
+* Fix handling of preflight requests ([#93](https://github.com/ajnisbet/opentopodata/issues/93), thanks [@MaRaSu](https://github.com/MaRaSu)!)
+* Fix error message bug ([#70](https://github.com/ajnisbet/opentopodata/pull/70), thanks [@khendrickx](https://github.com/khendrickx)!)
+
+
+
## Version 1.8.3 (7 Feb 2023)
* Fix memory leak ([#68](https://github.com/ajnisbet/opentopodata/issues/68))
diff --git a/docs/datasets/eudem.md b/docs/datasets/eudem.md
index 25be1c8..48c60d4 100644
--- a/docs/datasets/eudem.md
+++ b/docs/datasets/eudem.md
@@ -30,45 +30,25 @@ The advantage of the `NODATA` oceans is that you cane use EU-DEM without clippin
## Adding EU-DEM to Open Topo Data
+As of Jan 2024, EU-DEM is no longer available to download via copernicus.eu.
-Make a new folder for the dataset:
+I have uploaded my version of the dataset at [https://files.gpxz.io/eudem_buffered.zip](https://files.gpxz.io/eudem_buffered.zip), see [EUDEM download](https://www.gpxz.io/blog/eudem) for more details.
-```bash
-mkdir ./data/eudem
-```
-
-Download the dataset from [Copernicus](https://land.copernicus.eu/imagery-in-situ/eu-dem/eu-dem-v1.1?tab=download). There are 27 files. Unzip them and move all the `.TIF` files into the data folder (you don't need the `.aux.xml`, `.ovr`, or `.TFw` files).
-
-Your data folder should now contain only 27 TIF files:
+Download and unzip the folder into:
```bash
-ls ./data/eudem
-
-# eu_dem_v11_E00N20.TIF
-# eu_dem_v11_E10N00.TIF
-# eu_dem_v11_E10N10.TIF
-# ...
-```
-
-
-If you have [gdal](https://gdal.org) installed, the easiest thing to do here is build a [VRT](https://gdal.org/drivers/raster/vrt.html) - a single raster file that links to the 27 tiles and which Open Topo Data can treat as a single-file dataset.
-
-```bash
-mkdir ./data/eudem-vrt
-cd ./data/eudem-vrt
-gdalbuildvrt -tr 25 25 -tap -te 0 0 8000000 6000000 eudem.vrt ../eudem/*.TIF
-cd ../../
+mkdir ./data/eudem
```
-
-The `tr`, `tap`, and `te` options in the above command ensure that slices from the VRT will use the exact values and grid of the source rasters.
-
+There are 27 files.
Then create a `config.yaml` file:
```yaml
datasets:
- name: eudem25m
- path: data/eudem-vrt/
+ path: data/eudem
+ filename_epsg: 3035
+ filename_tile_size: 1000000
```
Finally, rebuild to enable the new dataset at [localhost:5000/v1/eudem25m?locations=51.575,-3.220](http://localhost:5000/v1/eudem25m?locations=51.575,-3.220).
@@ -82,92 +62,6 @@ make build && make run
If you don't have gdal installed, you can use the tiles directly. There are instructions for this [here](https://github.com/ajnisbet/opentopodata/blob/f012ec136bebcd97e1dc05645e91a6d2487127dc/docs/datasets/eudem.md#adding-eu-dem-to-open-topo-data), but because the EU-DEM tiles don't come with an overlap you will get a `null` elevation at locations within 0.5 pixels of tile edges.
-### Buffering tiles (optional)
-
-The tiles provided by EU-DEM don't overlap and cover slightly less than a 1000km square. This means you'll get a `null` result for coordinates along the tile edges.
-
-The `.vrt` approach above solves the overlap issue, but for improved performance you can leave the tiles separate and add a buffer to each one. This is the code I used on the public API to do this:
-
-
-```python
-import os
-from glob import glob
-import subprocess
-
-import rasterio
-
-
-# Prepare paths.
-input_pattern = 'data/eudem/*.TIF'
-input_paths = sorted(glob(input_pattern))
-assert input_paths
-vrt_path = 'data/eudem-vrt/eudem.vrt'
-output_dir = 'data/eudem-buffered/'
-os.makedirs(output_dir, exist_ok=True)
-
-
-
-# EU-DEM specific options.
-tile_size = 1_000_000
-buffer_size = 50
-
-for input_path in input_paths:
-
- # Get tile bounds.
- with rasterio.open(input_path) as f:
- bottom = int(f.bounds.bottom)
- left = int(f.bounds.left)
-
- # For EU-DEM only: round this partial tile down to the nearest tile_size.
- if left == 943750:
- left = 0
-
- # New tile name in SRTM format.
- output_name = 'N' + str(bottom).zfill(7) + 'E' + str(left).zfill(7) + '.TIF'
- output_path = os.path.join(output_dir, output_name)
-
- # New bounds.
- xmin = left - buffer_size
- xmax = left + tile_size + buffer_size
- ymin = bottom - buffer_size
- ymax = bottom + tile_size + buffer_size
-
- # EU-DEM tiles don't cover negative locations.
- xmin = max(0, xmin)
- ymin = max(0, ymin)
-
- # Do the transformation.
- cmd = [
- 'gdal_translate',
- '-a_srs', 'EPSG:3035', # EU-DEM crs.
- '-co', 'NUM_THREADS=ALL_CPUS',
- '-co', 'COMPRESS=DEFLATE',
- '-co', 'BIGTIFF=YES',
- '--config', 'GDAL_CACHEMAX','512',
- '-projwin', str(xmin), str(ymax), str(xmax), str(ymin),
- vrt_path, output_path,
- ]
- r = subprocess.run(cmd)
- r.check_returncode()
-```
-
-These new files can be used in Open Topo Data with the following `config.yaml` file
-
-
-```yaml
-datasets:
-- name: eudem25m
- path: data/eudem-buffered/
- filename_epsg: 3035
- filename_tile_size: 1000000
-```
-
-and rebuilding:
-
-```bash
-make build && make run
-```
-
## Public API
diff --git a/docs/notes/kubernetes.md b/docs/notes/kubernetes.md
index b853438..9ea7fee 100644
--- a/docs/notes/kubernetes.md
+++ b/docs/notes/kubernetes.md
@@ -82,4 +82,9 @@ spec:
- containerPort: 5000
restartPolicy: Always
-```
\ No newline at end of file
+```
+
+
+---
+
+Thanks to [@khintz](https://github.com/khintz) for contributing this documentation in [#57](https://github.com/ajnisbet/opentopodata/pull/57)!
\ No newline at end of file
diff --git a/docs/notes/performance-optimisation.md b/docs/notes/performance-optimisation.md
index 537915f..5c045de 100644
--- a/docs/notes/performance-optimisation.md
+++ b/docs/notes/performance-optimisation.md
@@ -19,6 +19,10 @@ Batch request are faster (per point queried) than single-point requests, and lar
Batch queries are fastest if the points are located next to each other. Sorting the locations you are querying before batching will improve performance. Ideally sort by some block-level attribute like postal code or state/county/region, or by something like `round(lat, 1), round(lon, 1)` depending on your tile size.
+If the requests are very large and the server has several CPU cores, try splitting the request and sending it simultaneously. The optimum for the number of requests is slightly higher than the amount of CPU cores used by Open Topo Data. The number of CPU cores used is displayed when OpenTopodata is started. If you missed the log message, you can find iw with the following command:
+```bash
+docker logs {NAME_OF_CONTAINER} 2>&1 | grep "CPU cores"
+```
## Dataset format
diff --git a/docs/notes/running-without-docker.md b/docs/notes/running-without-docker.md
index c305beb..bc904ad 100644
--- a/docs/notes/running-without-docker.md
+++ b/docs/notes/running-without-docker.md
@@ -17,13 +17,13 @@ git clone https://github.com/ajnisbet/opentopodata.git
cd opentopodata
```
-Install system dependencies
+Install system dependencies (if you're not using Debian 10, install whatever python3.X-dev matches your installed python)L
```bash
apt install gcc python3.7-dev python3-pip
```
-Debian 10 comes with an old version of pip, it needs to be updated:
+Debian 10 comes with an old version of pip, it needs to be updated so we can install wheels:
```bash
pip3 install --upgrade pip
@@ -38,7 +38,7 @@ cat requirements.txt | grep pyproj
and install that pinned version
```bash
-pip3 install pyproj==3.0.0.post1
+pip3 install pyproj==3.4.1
```
then the remaining python packages can be installed:
@@ -133,4 +133,9 @@ Then manage Open Topo Data with
systemctl daemon-reload
systemctl enable opentopodata.service
systemctl start opentopodata.service
-```
\ No newline at end of file
+```
+
+!!! warning "Warning"
+ Opentopodata caches `config.yaml` in two places: memcache and uwsgi.
+
+ If you update the config file (to eg add a new dataset) you'll need to restart memcached **first**, then opentopodata.
\ No newline at end of file
diff --git a/example-config.yaml b/example-config.yaml
index 4df114c..6e5014a 100644
--- a/example-config.yaml
+++ b/example-config.yaml
@@ -5,10 +5,12 @@
# 400 error will be thrown above this limit.
max_locations_per_request: 100
+
# CORS header. Should be null for no CORS, '*' for all domains, or a url with
# protocol, domain, and port ('https://api.example.com/'). Default is null.
access_control_allow_origin: "*"
+
datasets:
# A small testing dataset is included in the repo.
diff --git a/opentopodata/api.py b/opentopodata/api.py
index a7e1f5e..ea8d675 100644
--- a/opentopodata/api.py
+++ b/opentopodata/api.py
@@ -1,7 +1,7 @@
import logging
import os
-from flask import Flask, jsonify, request
+from flask import Flask, jsonify, request, Response
from flask_caching import Cache
import polyline
@@ -19,6 +19,7 @@
LON_MIN = -180
LON_MAX = 180
VERSION_PATH = "VERSION"
+DEFAULT_FORMAT_VALUE = "json"
# Memcache is used to store the latlon -> filename lookups, which can take a
@@ -64,6 +65,18 @@ def _load_config_memcache():
return config.load_config()
+@app.before_request
+def handle_preflight():
+ # If before_request returns a non-none value, the regular view isn't run.
+ # after_request() does still run though, so the CORS header and OTD version
+ # will be set correctly there.
+ if request.method == "OPTIONS":
+ response = Response(status=204)
+ response.headers["access-control-allow-methods"] = "GET,POST,OPTIONS,HEAD"
+ response.headers["access-control-allow-headers"] = "content-type,x-api-key"
+ return response
+
+
@app.after_request
def apply_cors(response):
"""Set CORs header.
@@ -84,6 +97,16 @@ def apply_cors(response):
return response
+@app.after_request
+def add_version(response):
+ if "version" not in _SIMPLE_CACHE:
+ with open(VERSION_PATH) as f:
+ version = f.read().strip()
+ _SIMPLE_CACHE["version"] = version
+ response.headers["x-opentopodata-version"] = _SIMPLE_CACHE["version"]
+ return response
+
+
class ClientError(ValueError):
"""Invalid input data.
@@ -128,6 +151,16 @@ def _find_request_argument(request, arg):
raise ClientError("Invalid JSON.")
+def _parse_format(format):
+ if not format:
+ format = DEFAULT_FORMAT_VALUE
+
+ if format not in {"json", "geojson"}:
+ raise ClientError("Format must be 'json' or 'geojson'.")
+
+ return format
+
+
def _parse_interpolation(method):
"""Check the interpolation method is supported.
@@ -436,8 +469,8 @@ def _get_datasets(name):
return datasets
-@app.route("/", methods=["GET", "POST", "OPTIONS", "HEAD"])
-@app.route("/v1/", methods=["GET", "POST", "OPTIONS", "HEAD"])
+@app.route("/", methods=["GET", "POST", "HEAD"])
+@app.route("/v1/", methods=["GET", "POST", "HEAD"])
def get_help_message():
msg = "No dataset name provided."
msg += " Try a url like '/v1/test-dataset?locations=-10,120' to get started,"
@@ -445,7 +478,7 @@ def get_help_message():
return jsonify({"status": "INVALID_REQUEST", "error": msg}), 404
-@app.route("/health", methods=["GET", "OPTIONS", "HEAD"])
+@app.route("/health", methods=["GET", "HEAD"])
def get_health_status():
"""Status endpoint for e.g., uptime check or load balancing."""
try:
@@ -458,7 +491,7 @@ def get_health_status():
return jsonify(data), 500
-@app.route("/datasets", methods=["GET", "OPTIONS", "HEAD"])
+@app.route("/datasets", methods=["GET", "HEAD"])
def get_datasets_info():
"""List of datasets on the server."""
try:
@@ -479,7 +512,7 @@ def get_datasets_info():
return jsonify(data), 500
-@app.route("/v1/", methods=["GET", "POST", "OPTIONS", "HEAD"])
+@app.route("/v1/", methods=["GET", "POST", "HEAD"])
def get_elevation(dataset_name):
"""Calculate the elevation for the given locations.
@@ -502,6 +535,7 @@ def get_elevation(dataset_name):
_find_request_argument(request, "locations"),
_load_config()["max_locations_per_request"],
)
+ format = _parse_format(_find_request_argument(request, "format"))
# Check if need to do sampling.
n_samples = _parse_n_samples(
@@ -519,15 +553,29 @@ def get_elevation(dataset_name):
# Build response.
results = []
- for z, dataset_name, lat, lon in zip(elevations, dataset_names, lats, lons):
- results.append(
- {
- "elevation": z,
- "dataset": dataset_name,
- "location": {"lat": lat, "lng": lon},
- }
- )
- data = {"status": "OK", "results": results}
+
+ # Convert to json or geojson format.
+ if format == "geojson":
+ for z, dataset_name, lat, lon in zip(elevations, dataset_names, lats, lons):
+ results.append(
+ {
+ "type": "Feature",
+ "geometry": {"type": "Point", "coordinates": [lon, lat, z]},
+ "properties": {"dataset": dataset_name},
+ },
+ )
+ data = {"type": "FeatureCollection", "features": results}
+
+ else:
+ for z, dataset_name, lat, lon in zip(elevations, dataset_names, lats, lons):
+ results.append(
+ {
+ "elevation": z,
+ "dataset": dataset_name,
+ "location": {"lat": lat, "lng": lon},
+ }
+ )
+ data = {"status": "OK", "results": results}
return jsonify(data)
except (ClientError, backend.InputError) as e:
@@ -543,13 +591,3 @@ def get_elevation(dataset_name):
app.logger.error(e)
msg = "Unhandled server error, see server logs for details."
return jsonify({"status": "SERVER_ERROR", "error": msg}), 500
-
-
-@app.after_request
-def add_version(response):
- if "version" not in _SIMPLE_CACHE:
- with open(VERSION_PATH) as f:
- version = f.read().strip()
- _SIMPLE_CACHE["version"] = version
- response.headers["x-opentopodata-version"] = _SIMPLE_CACHE["version"]
- return response
diff --git a/requirements.in b/requirements.in
index c0312fc..637a868 100644
--- a/requirements.in
+++ b/requirements.in
@@ -1,5 +1,5 @@
black
-Flask>=2.2.2
+Flask>=2.2.2 # Some flask 2.0 deprecations got real.
flask-caching
geographiclib
numpy
@@ -9,6 +9,7 @@ pylibmc
pyproj
pytest
pytest-cov
+pytest-timeout
PyYAML
-rasterio==1.2.10
+rasterio>=1.3.8 # Avoid memory leak https://github.com/ajnisbet/opentopodata/issues/68
requests
diff --git a/requirements.txt b/requirements.txt
index 7b614ae..b900d0a 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -1,29 +1,29 @@
#
-# This file is autogenerated by pip-compile with Python 3.9
+# This file is autogenerated by pip-compile with Python 3.11
# by the following command:
#
# pip-compile requirements.in
#
affine==2.4.0
# via rasterio
-attrs==22.2.0
- # via
- # pytest
- # rasterio
-black==23.1.0
+attrs==23.2.0
+ # via rasterio
+black==24.2.0
# via -r requirements.in
-build==0.10.0
+blinker==1.7.0
+ # via flask
+build==1.0.3
# via pip-tools
cachelib==0.9.0
# via flask-caching
-certifi==2022.12.7
+certifi==2024.2.2
# via
# pyproj
# rasterio
# requests
-charset-normalizer==3.0.1
+charset-normalizer==3.3.2
# via requests
-click==8.1.3
+click==8.1.7
# via
# black
# click-plugins
@@ -35,92 +35,83 @@ click-plugins==1.1.1
# via rasterio
cligj==0.7.2
# via rasterio
-coverage[toml]==7.1.0
+coverage[toml]==7.4.1
# via pytest-cov
-exceptiongroup==1.1.0
- # via pytest
-flask==2.2.2
+flask==3.0.2
# via
# -r requirements.in
# flask-caching
-flask-caching==2.0.2
+flask-caching==2.1.0
# via -r requirements.in
geographiclib==2.0
# via -r requirements.in
-idna==3.4
+idna==3.6
# via requests
-importlib-metadata==6.0.0
- # via flask
iniconfig==2.0.0
# via pytest
itsdangerous==2.1.2
# via flask
-jinja2==3.1.2
+jinja2==3.1.3
# via flask
-markupsafe==2.1.2
+markupsafe==2.1.5
# via
# jinja2
# werkzeug
mypy-extensions==1.0.0
# via black
-numpy==1.24.2
+numpy==1.26.4
# via
# -r requirements.in
# rasterio
# snuggs
-packaging==23.0
+packaging==23.2
# via
# black
# build
# pytest
-pathspec==0.11.0
+pathspec==0.12.1
# via black
-pip-tools==6.12.2
+pip-tools==7.4.0
# via -r requirements.in
-platformdirs==3.0.0
+platformdirs==4.2.0
# via black
-pluggy==1.0.0
+pluggy==1.4.0
# via pytest
-polyline==2.0.0
+polyline==2.0.2
# via -r requirements.in
pylibmc==1.6.3
# via -r requirements.in
-pyparsing==3.0.9
+pyparsing==3.1.1
# via snuggs
-pyproj==3.4.1
+pyproj==3.6.1
# via -r requirements.in
pyproject-hooks==1.0.0
- # via build
-pytest==7.2.1
+ # via
+ # build
+ # pip-tools
+pytest==8.0.1
# via
# -r requirements.in
# pytest-cov
-pytest-cov==4.0.0
+ # pytest-timeout
+pytest-cov==4.1.0
# via -r requirements.in
-pyyaml==6.0
+pytest-timeout==2.2.0
# via -r requirements.in
-rasterio==1.2.10
+pyyaml==6.0.1
# via -r requirements.in
-requests==2.28.2
+rasterio==1.3.9
+ # via -r requirements.in
+requests==2.31.0
# via -r requirements.in
snuggs==1.4.7
# via rasterio
-tomli==2.0.1
- # via
- # black
- # build
- # coverage
- # pytest
-typing-extensions==4.4.0
- # via black
-urllib3==1.26.14
+urllib3==2.2.1
# via requests
-werkzeug==2.2.2
+werkzeug==3.0.1
# via flask
-wheel==0.38.4
+wheel==0.42.0
# via pip-tools
-zipp==3.12.1
- # via importlib-metadata
# The following packages are considered to be unsafe in a requirements file:
# pip
diff --git a/tests/test_api.py b/tests/test_api.py
index d363504..eb98f5a 100644
--- a/tests/test_api.py
+++ b/tests/test_api.py
@@ -1,7 +1,6 @@
import math
import pytest
-from flask_caching import Cache
import rasterio
from unittest.mock import patch
import numpy as np
@@ -9,7 +8,6 @@
from opentopodata import api
from opentopodata import backend
-from opentopodata import config
GEOTIFF_PATH = "tests/data/datasets/test-etopo1-resampled-1deg/ETOPO1_Ice_g_geotiff.resampled-1deg.tif"
@@ -41,6 +39,15 @@ def test_no_cors(self, patch_config):
response = test_api.get(url)
assert response.headers.get("access-control-allow-origin") is None
+ def test_options(self):
+ test_api = api.app.test_client()
+ url = "/"
+ response = test_api.options(url)
+ assert response.status_code == 204
+ assert "x-opentopodata-version" in response.headers
+ assert "access-control-allow-methods" in response.headers
+ assert response.headers.get("access-control-allow-origin") == "*"
+
class TestFindRequestAgument:
def test_no_argument(self, patch_config):
@@ -282,6 +289,16 @@ def test_repeated_locations(self, patch_config):
assert len(rjson["results"]) == 2
assert rjson["results"][0] == rjson["results"][1]
+ def test_repeated_locations_geojson(self, patch_config):
+ url = (
+ "/v1/etopo1deg?locations=1.5,0.1|1.5,0.1&interpolation=cubic&format=geojson"
+ )
+ response = self.test_api.get(url)
+ rjson = response.json
+ assert response.status_code == 200
+ assert len(rjson["features"]) == 2
+ assert rjson["features"][0] == rjson["features"][1]
+
def test_polyline_latlon_equivalence(self, patch_config):
url_latlon = "/v1/etopo1deg?locations=-90,180|1.5,0.1"
url_polyline = "/v1/etopo1deg?locations=~bidP_gsia@_bnmP~u_ia@"