Skip to content

Rename tests #1297

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 1 commit into from
Feb 3, 2021
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions .github/workflows/build.wheel.sh
Original file line number Diff line number Diff line change
Expand Up @@ -7,10 +7,10 @@ run_test() {
CPYTHON_VERSION=$($entry -c 'import sys; print(str(sys.version_info[0])+str(sys.version_info[1]))')
(cd wheelhouse && $entry -m pip install tensorflow_io-*-cp${CPYTHON_VERSION}-*.whl)
$entry -m pip install -q pytest pytest-benchmark boto3 fastavro avro-python3 scikit-image pandas pyarrow==3.0.0 google-cloud-pubsub==2.1.0 google-cloud-bigtable==1.6.0 google-cloud-bigquery-storage==1.1.0 google-cloud-bigquery==2.3.1 google-cloud-storage==1.32.0
(cd tests && $entry -m pytest --benchmark-disable -v --import-mode=append $(find . -type f \( -iname "test_*.py" ! \( -iname "test_*_eager.py" \) \)))
(cd tests && $entry -m pytest --benchmark-disable -v --import-mode=append $(find . -type f \( -iname "test_*_eager.py" ! \( -iname "test_bigquery_eager.py" \) \)))
(cd tests && $entry -m pytest --benchmark-disable -v --import-mode=append $(find . -type f \( -iname "test_*_v1.py" \)))
(cd tests && $entry -m pytest --benchmark-disable -v --import-mode=append $(find . -type f \( -iname "test_*.py" ! \( -iname "test_*_v1.py" -o -iname "test_bigquery.py" \) \)))
# GRPC and test_bigquery_eager tests have to be executed separately because of https://github.com/grpc/grpc/issues/20034
(cd tests && $entry -m pytest --benchmark-disable -v --import-mode=append $(find . -type f \( -iname "test_bigquery_eager.py" \)))
(cd tests && $entry -m pytest --benchmark-disable -v --import-mode=append $(find . -type f \( -iname "test_bigquery.py" \)))
}

PYTHON_VERSION=python
Expand Down
13 changes: 6 additions & 7 deletions .github/workflows/build.yml
Original file line number Diff line number Diff line change
Expand Up @@ -434,14 +434,13 @@ jobs:
python --version
python -m pip install -U pytest-benchmark
rm -rf tensorflow_io
(cd tests && python -m pytest -s -v test_lmdb_eager.py)
(python -m pytest -s -v test_image_eager.py -k "webp or ppm or bmp or bounding or exif or hdr or openexr or tiff or avif")
(python -m pytest -s -v test_serialization_eager.py)
(python -m pytest -s -v test_io_dataset_eager.py -k "numpy or hdf5 or audio or to_file")
(python -m pytest -s -v test_http_eager.py)
(cd tests && python -m pytest -s -v test_lmdb.py)
(python -m pytest -s -v test_image.py -k "webp or ppm or bmp or bounding or exif or hdr or openexr or tiff or avif")
(python -m pytest -s -v test_serialization.py)
(python -m pytest -s -v test_io_dataset.py -k "numpy or hdf5 or audio or to_file")
(python -m pytest -s -v test_http.py)
python -m pip install google-cloud-bigquery-storage==0.7.0 google-cloud-bigquery==1.22.0 fastavro
(python -m pytest -s -v test_bigquery_eager.py)
(python -m pytest -s -v test_dicom_eager.py)
(python -m pytest -s -v test_bigquery.py)
(python -m pytest -s -v test_dicom.py)

release:
Expand Down
12 changes: 6 additions & 6 deletions docs/development.md
Original file line number Diff line number Diff line change
Expand Up @@ -88,7 +88,7 @@ bazel build -s --verbose_failures $BAZEL_OPTIMIZATION //tensorflow_io/...
# `bazel-bin/tensorflow_io/core/python/ops/` and it is possible
# to run tests with `pytest`, e.g.:
sudo python3 -m pip install pytest
TFIO_DATAPATH=bazel-bin python3 -m pytest -s -v tests/test_serialization_eager.py
TFIO_DATAPATH=bazel-bin python3 -m pytest -s -v tests/test_serialization.py
```

NOTE: When running pytest, `TFIO_DATAPATH=bazel-bin` has to be passed so that python can utilize the generated shared libraries after the build process.
Expand Down Expand Up @@ -147,7 +147,7 @@ bazel build -s --verbose_failures $BAZEL_OPTIMIZATION //tensorflow_io/...
# `bazel-bin/tensorflow_io/core/python/ops/` and it is possible
# to run tests with `pytest`, e.g.:
sudo python3 -m pip install pytest
TFIO_DATAPATH=bazel-bin python3 -m pytest -s -v tests/test_serialization_eager.py
TFIO_DATAPATH=bazel-bin python3 -m pytest -s -v tests/test_serialization.py
```

##### CentOS 8
Expand Down Expand Up @@ -207,7 +207,7 @@ scl enable rh-python36 devtoolset-9 \

TFIO_DATAPATH=bazel-bin \
scl enable rh-python36 devtoolset-9 \
'python3 -m pytest -s -v tests/test_serialization_eager.py'
'python3 -m pytest -s -v tests/test_serialization.py'
```

#### Python Wheels
Expand Down Expand Up @@ -295,7 +295,7 @@ use:
$ bash -x -e tests/test_kafka/kafka_test.sh

# Run the tests
$ TFIO_DATAPATH=bazel-bin pytest -s -vv tests/test_kafka_eager.py
$ TFIO_DATAPATH=bazel-bin pytest -s -vv tests/test_kafka.py
```

Testing `Datasets` associated with tools such as `Elasticsearch` or `MongoDB`
Expand All @@ -307,7 +307,7 @@ require docker to be available on the system. In such scenarios, use:
$ bash tests/test_elasticsearch/elasticsearch_test.sh start

# Run the tests
$ TFIO_DATAPATH=bazel-bin pytest -s -vv tests/test_elasticsearch_eager.py
$ TFIO_DATAPATH=bazel-bin pytest -s -vv tests/test_elasticsearch.py

# Stop and remove the container
$ bash tests/test_elasticsearch/elasticsearch_test.sh stop
Expand All @@ -319,7 +319,7 @@ For example, to run tests related to `parquet` dataset's, use:

```sh
# Just run the test
$ TFIO_DATAPATH=bazel-bin pytest -s -vv tests/test_parquet_eager.py
$ TFIO_DATAPATH=bazel-bin pytest -s -vv tests/test_parquet.py
```


Expand Down
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
135 changes: 107 additions & 28 deletions tests/test_dicom.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@


import os
import numpy as np
import pytest

import tensorflow as tf
Expand All @@ -35,8 +36,7 @@


def test_dicom_input():
"""test_dicom_input
"""
"""test_dicom_input"""
_ = tfio.image.decode_dicom_data
_ = tfio.image.decode_dicom_image
_ = tfio.image.dicom_tags
Expand Down Expand Up @@ -66,32 +66,26 @@ def test_dicom_input():
("MR-MONO2-12-shoulder.dcm", (1, 1024, 1024, 1)),
("OT-MONO2-8-a7.dcm", (1, 512, 512, 1)),
("US-PAL-8-10x-echo.dcm", (10, 430, 600, 3)),
("TOSHIBA_J2K_OpenJPEGv2Regression.dcm", (1, 512, 512, 1)),
],
)
def test_decode_dicom_image(fname, exp_shape):
"""test_decode_dicom_image
"""
"""test_decode_dicom_image"""

dcm_path = os.path.join(
os.path.dirname(os.path.abspath(__file__)), "test_dicom", fname
)

g1 = tf.compat.v1.Graph()
file_contents = tf.io.read_file(filename=dcm_path)

with g1.as_default():
file_contents = tf.io.read_file(filename=dcm_path)
dcm_image = tfio.image.decode_dicom_image(
contents=file_contents,
dtype=tf.float32,
on_error="strict",
scale="auto",
color_dim=True,
)

sess = tf.compat.v1.Session(graph=g1)
dcm_image_np = sess.run(dcm_image)

assert dcm_image_np.shape == exp_shape
dcm_image = tfio.image.decode_dicom_image(
contents=file_contents,
dtype=tf.float32,
on_error="strict",
scale="auto",
color_dim=True,
)
assert dcm_image.numpy().shape == exp_shape


@pytest.mark.parametrize(
Expand Down Expand Up @@ -121,23 +115,108 @@ def test_decode_dicom_image(fname, exp_shape):
],
)
def test_decode_dicom_data(fname, tag, exp_value):
"""test_decode_dicom_data
"""
"""test_decode_dicom_data"""

dcm_path = os.path.join(
os.path.dirname(os.path.abspath(__file__)), "test_dicom", fname
)

g1 = tf.compat.v1.Graph()
file_contents = tf.io.read_file(filename=dcm_path)

dcm_data = tfio.image.decode_dicom_data(contents=file_contents, tags=tag)

assert dcm_data.numpy() == exp_value


def test_dicom_image_shape():
"""test_decode_dicom_image"""

dcm_path = os.path.join(
os.path.dirname(os.path.abspath(__file__)),
"test_dicom",
"US-PAL-8-10x-echo.dcm",
)

dataset = tf.data.Dataset.from_tensor_slices([dcm_path])
dataset = dataset.map(tf.io.read_file)
dataset = dataset.map(lambda e: tfio.image.decode_dicom_image(e, dtype=tf.uint16))
dataset = dataset.map(lambda e: tf.image.resize(e, (224, 224)))


def test_dicom_image_concurrency():
"""test_decode_dicom_image_currency"""

with g1.as_default():
file_contents = tf.io.read_file(filename=dcm_path)
dcm_data = tfio.image.decode_dicom_data(contents=file_contents, tags=tag)
@tf.function
def preprocess(dcm_content):
tags = tfio.image.decode_dicom_data(
dcm_content, tags=[tfio.image.dicom_tags.PatientsName]
)
tf.print(tags)
image = tfio.image.decode_dicom_image(dcm_content, dtype=tf.float32)
return image

dcm_path = os.path.join(
os.path.dirname(os.path.abspath(__file__)),
"test_dicom",
"TOSHIBA_J2K_OpenJPEGv2Regression.dcm",
)

dataset = (
tf.data.Dataset.from_tensor_slices([dcm_path])
.repeat()
.map(tf.io.read_file)
.map(preprocess, num_parallel_calls=8)
.take(200)
)
for i, item in enumerate(dataset):
print(tf.shape(item), i)
assert np.array_equal(tf.shape(item), [1, 512, 512, 1])

dcm_path = os.path.join(
os.path.dirname(os.path.abspath(__file__)),
"test_dicom",
"US-PAL-8-10x-echo.dcm",
)

dataset = (
tf.data.Dataset.from_tensor_slices([dcm_path])
.repeat()
.map(tf.io.read_file)
.map(preprocess, num_parallel_calls=8)
.take(200)
)
for i, item in enumerate(dataset):
print(tf.shape(item), i)
assert np.array_equal(tf.shape(item), [10, 430, 600, 3])


def test_dicom_sequence():
"""test_decode_dicom_sequence"""

dcm_path = os.path.join(
os.path.dirname(os.path.abspath(__file__)),
"test_dicom",
"2.25.304589190180579357564631626197663875025.dcm",
)
dcm_content = tf.io.read_file(filename=dcm_path)

tags = tfio.image.decode_dicom_data(
dcm_content, tags=["[0x0008,0x1115][0][0x0008,0x1140][0][0x0008,0x1155]"]
)
assert np.array_equal(tags, [b"2.25.211904290918469145111906856660599393535"])

dcm_path = os.path.join(
os.path.dirname(os.path.abspath(__file__)),
"test_dicom",
"US-PAL-8-10x-echo.dcm",
)
dcm_content = tf.io.read_file(filename=dcm_path)

sess = tf.compat.v1.Session(graph=g1)
dcm_data_np = sess.run(dcm_data)
tags = tfio.image.decode_dicom_data(dcm_content, tags=["[0x0020,0x000E]"])
assert np.array_equal(tags, [b"999.999.94827453"])

assert dcm_data_np == exp_value
tags = tfio.image.decode_dicom_data(dcm_content, tags=["0x0020,0x000e"])
assert np.array_equal(tags, [b"999.999.94827453"])


if __name__ == "__main__":
Expand Down
Loading