Skip to content

Commit

Permalink
ppai/land-cover: simplify tests and pin python version (#8501)
Browse files Browse the repository at this point in the history
* rebase

* remove constraints file

* pin python version

* add copyright

* add python version
  • Loading branch information
davidcavazos authored Nov 16, 2022
1 parent c879b3a commit 089ee19
Show file tree
Hide file tree
Showing 12 changed files with 90 additions and 102 deletions.
32 changes: 17 additions & 15 deletions people-and-planet-ai/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@ def unique_name(test_name: str, unique_id: str) -> str:


@pytest.fixture(scope="session")
def bucket_name(test_name: str, location: str, unique_id: str) -> str:
def bucket_name(test_name: str, location: str, unique_id: str) -> Iterable[str]:
# Override for local testing.
if "GOOGLE_CLOUD_BUCKET" in os.environ:
bucket_name = os.environ["GOOGLE_CLOUD_BUCKET"]
Expand Down Expand Up @@ -192,7 +192,6 @@ def aiplatform_cleanup(model_name: str, location: str, versions: list[str]) -> N
)



def run_notebook(
ipynb_file: str,
prelude: str = "",
Expand All @@ -203,7 +202,8 @@ def run_notebook(
skip_shell_commands: bool = False,
until_end: bool = False,
) -> None:
import nbclient
from nbclient.client import NotebookClient
from nbclient.exceptions import CellExecutionError
import nbformat

def notebook_filter_section(
Expand Down Expand Up @@ -283,10 +283,10 @@ def notebook_filter_section(

# Run the notebook.
error = ""
client = nbclient.NotebookClient(nb)
client = NotebookClient(nb)
try:
client.execute()
except nbclient.exceptions.CellExecutionError as e:
except CellExecutionError as e:
# Remove colors and other escape characters to make it easier to read in the logs.
# https://stackoverflow.com/a/33925425
error = re.sub(r"(\x9B|\x1B\[)[0-?]*[ -\/]*[@-~]", "", str(e))
Expand All @@ -299,22 +299,24 @@ def notebook_filter_section(

def run_notebook_parallel(
ipynb_file: str,
prelude: str,
sections: list[str],
variables: dict[str, dict] = {},
replace: dict[str, dict[str, str]] = {},
skip_shell_commands: list[str] = [],
sections: dict[str, dict],
prelude: str = "",
variables: dict = {},
replace: dict[str, str] = {},
skip_shell_commands: bool = False,
) -> None:
args = [
{
"ipynb_file": ipynb_file,
"section": section,
"prelude": prelude,
"variables": variables.get(section, {}),
"replace": replace.get(section, {}),
"skip_shell_commands": section in skip_shell_commands,
"prelude": params.get("prelude", prelude),
"variables": {**variables, **params.get("variables", {})},
"replace": {**replace, **params.get("replace", {})},
"skip_shell_commands": params.get(
"skip_shell_commands", skip_shell_commands
),
}
for section in sections
for section, params in sections.items()
]
with multiprocessing.Pool(len(args)) as pool:
pool.map(_run_notebook_section, args)
Expand Down
6 changes: 5 additions & 1 deletion people-and-planet-ai/land-cover-classification/README.md
Original file line number Diff line number Diff line change
@@ -1,6 +1,10 @@
# 🌍 Land cover classification -- _image segmentation_

[![Open in Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/GoogleCloudPlatform/python-docs-samples/blob/main/people-and-planet-ai/land-cover-classification/README.ipynb)
## [![Open in Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/GoogleCloudPlatform/python-docs-samples/blob/main/people-and-planet-ai/land-cover-classification/README.ipynb) 🌍 TensorFlow with Earth Engine introduction

## [![Open in Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/GoogleCloudPlatform/python-docs-samples/blob/main/people-and-planet-ai/land-cover-classification/cloud-tensorflow.ipynb) ☁️ Scaling TensorFlow with Cloud

## [![Open in Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/GoogleCloudPlatform/python-docs-samples/blob/main/people-and-planet-ai/land-cover-classification/land-cover-change.ipynb) 🗺️ Visualizing land cover change

> [Watch the video in YouTube<br> ![thumbnail](http://img.youtube.com/vi/zImQf91ffFo/0.jpg)](https://youtu.be/zImQf91ffFo)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -162,8 +162,7 @@
"source": [
"repo_url = \"https://raw.githubusercontent.com/GoogleCloudPlatform/python-docs-samples/main/people-and-planet-ai/land-cover-classification\"\n",
"\n",
"!wget --quiet -O requirements.txt {repo_url}/requirements.txt\n",
"!wget --quiet -O constraints.txt {repo_url}/constraints.txt"
"!wget --quiet {repo_url}/requirements.txt"
]
},
{
Expand All @@ -172,7 +171,7 @@
"id": "rPWUYDKCwibV"
},
"source": [
"> 💡 For more information about the `requirements.txt` and `constraints.txt` files, see the [`pip` user guide](https://pip.pypa.io/en/stable/user_guide/)."
"> 💡 For more information about the `requirements.txt` files, see the [`pip` user guide](https://pip.pypa.io/en/stable/user_guide/)."
]
},
{
Expand All @@ -195,7 +194,7 @@
"outputs": [],
"source": [
"# Install the dependencies.\n",
"!pip install --quiet -r requirements.txt -c constraints.txt\n",
"!pip install --quiet -r requirements.txt\n",
"\n",
"# Restart the runtime by ending the process.\n",
"exit()"
Expand Down
20 changes: 0 additions & 20 deletions people-and-planet-ai/land-cover-classification/constraints.txt

This file was deleted.

93 changes: 48 additions & 45 deletions people-and-planet-ai/land-cover-classification/e2e_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@

from __future__ import annotations

from collections.abc import Iterable
import tempfile
import textwrap

Expand Down Expand Up @@ -61,15 +62,15 @@ def model_path(bucket_name: str) -> str:


@pytest.fixture(scope="session")
def cloud_run_service_name(unique_name: str, location: str) -> str:
def cloud_run_service_name(unique_name: str, location: str) -> Iterable[str]:
# The notebook itself creates the service.
service_name = unique_name
yield service_name
conftest.cloud_run_cleanup(service_name, location)


@pytest.fixture(scope="session")
def aiplatform_model_name(unique_name: str, location: str) -> str:
def aiplatform_model_name(unique_name: str, location: str) -> Iterable[str]:
# The notebook itself creates the service.
model_name = unique_name.replace("-", "_")
yield model_name
Expand Down Expand Up @@ -164,60 +165,62 @@ def test_land_cover_tensorflow(
ee_init()
"""
),
sections=[
"# 📚 Understand the data",
"# 🗄 Create the dataset",
"# 🧠 Train the model",
"## 💻 Local predictions",
"## ☁️ Cloud Run predictions",
"## 🧺 Dataflow batch prediction",
"## 🌍 Earth Engine with AI Platform",
],
variables={
sections={
"# 📚 Understand the data": {},
"# 🗄 Create the dataset": {
"points_per_class": 1,
"--data-path": f"gs://{bucket_name}/dataflow-data",
"variables": {
"points_per_class": 1,
"--data-path": f"gs://{bucket_name}/dataflow-data",
},
"replace": {
'--runner="DataflowRunner"': " ".join(
[
'--runner="DataflowRunner"',
f"--job_name={unique_name}-dataset",
"--max-requests=1",
]
)
},
},
"# 🧠 Train the model": {
"display_name": unique_name,
"data_path": data_path,
"epochs": 1,
"variables": {
"display_name": unique_name,
"data_path": data_path,
"epochs": 1,
},
},
"## 💻 Local predictions": {
"model_path": model_path,
"variables": {
"model_path": model_path,
},
},
"## ☁️ Cloud Run predictions": {
"service_name": cloud_run_service_name,
"model_path": model_path,
"identity_token": identity_token,
"variables": {
"service_name": cloud_run_service_name,
"model_path": model_path,
"identity_token": identity_token,
},
},
"## 🧺 Dataflow batch prediction": {
"model_path": model_path,
"variables": {
"model_path": model_path,
},
"replace": {
'--runner="DataflowRunner"': " ".join(
[
'--runner="DataflowRunner"',
f"--job_name={unique_name}-predict",
"--max-requests=1",
f"--locations-file={locations_file.name}",
]
)
},
},
"## 🌍 Earth Engine with AI Platform": {
"model_name": aiplatform_model_name,
"model_path": model_path,
},
},
replace={
"# 🗄 Create the dataset": {
'--runner="DataflowRunner"': " ".join(
[
'--runner="DataflowRunner"',
f"--job_name={unique_name}-dataset",
"--max-requests=1",
]
)
},
"## 🧺 Dataflow batch prediction": {
'--runner="DataflowRunner"': " ".join(
[
'--runner="DataflowRunner"',
f"--job_name={unique_name}-predict",
"--max-requests=1",
f"--locations-file={locations_file.name}",
]
)
"variables": {
"model_name": aiplatform_model_name,
"model_path": model_path,
},
},
},
)
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,8 @@

TEST_CONFIG_OVERRIDE = {
# You can opt out from the test for specific Python versions.
# ℹ️ Testing only in Python 3.9 since it's the latest supported version for Dataflow.
# ℹ️ Test only in Python 3.9 since that's what Dataflow currently supports:
# https://cloud.google.com/dataflow/docs/support/beam-runtime-support
"ignored_versions": ["2.7", "3.6", "3.7", "3.8", "3.10"],
# Old samples are opted out of enforcing Python type hints
# All new samples should feature them
Expand All @@ -39,7 +40,5 @@
"pip_version_override": None,
# A dictionary you want to inject into your test. Don't put any
# secrets here. These values will override predefined values.
"envs": {
"PYTEST_ADDOPTS": "-n=16", # not CPU-bound, it's bound by Cloud requests.
},
"envs": {},
}
Original file line number Diff line number Diff line change
Expand Up @@ -107,11 +107,11 @@ def run_tensorflow(
import tensorflow as tf

class LandCoverModel(ModelHandler[np.ndarray, np.ndarray, tf.keras.Model]):
def load_model(self) -> tf.keras.Model: # noqa: ANN101
def load_model(self) -> tf.keras.Model:
return tf.keras.models.load_model(model_path)

def run_inference(
self, # noqa: ANN101
self,
batch: Sequence[np.ndarray],
model: tf.keras.Model,
inference_args: Optional[dict] = None,
Expand Down
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
# Requirements to run tests.
apache-beam[interactive]==2.41.0
apache-beam[interactive]==2.42.0
importnb==2022.10.24
ipykernel==6.16.0
ipykernel==6.17.1
nbclient==0.7.0
pytest-xdist==2.5.0
pytest==7.1.2
pytest-xdist==3.0.2
pytest==7.2.0
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
# Requirements to run the notebooks.
apache-beam[gcp]==2.41.0
earthengine-api==0.1.325
apache-beam[gcp]==2.42.0
earthengine-api==0.1.331
folium==0.13.0
google-cloud-aiplatform==1.17.1
imageio==2.22.0
google-cloud-aiplatform==1.18.3
imageio==2.22.4
plotly==5.11.0
tensorflow==2.10.0
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
3.10.x
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
# Requirements for the prediction web service.
Flask==2.2.2
earthengine-api==0.1.325
earthengine-api==0.1.331
gunicorn==20.1.0
tensorflow==2.10.0
4 changes: 2 additions & 2 deletions people-and-planet-ai/land-cover-classification/setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,8 @@
url="https://github.com/GoogleCloudPlatform/python-docs-samples/tree/main/people-and-planet-ai/land-cover-classification",
packages=["serving"],
install_requires=[
"apache-beam[gcp]==2.41.0",
"earthengine-api==0.1.325",
"apache-beam[gcp]==2.42.0",
"earthengine-api==0.1.331",
"tensorflow==2.10.0",
],
)

0 comments on commit 089ee19

Please sign in to comment.