Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

chore: remove old deprecations #209

Merged
merged 5 commits into from
Feb 19, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
18 changes: 0 additions & 18 deletions src/nd2/_util.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@

import math
import re
import warnings
from datetime import datetime, timezone
from itertools import product
from typing import TYPE_CHECKING, BinaryIO, NamedTuple, cast
Expand All @@ -11,7 +10,6 @@
from os import PathLike
from typing import Any, Callable, ClassVar, Final, Mapping, Sequence, Union

from nd2.readers import ND2Reader
from nd2.structures import ExpLoop

StrOrPath = Union[str, PathLike]
Expand Down Expand Up @@ -75,19 +73,6 @@ def is_legacy(path: StrOrPath) -> bool:
return fh.read(4) == OLD_HEADER_MAGIC


def get_reader(
path: str, validate_frames: bool = False, search_window: int = 100
) -> ND2Reader: # pragma: no cover
warnings.warn(
"Deprecated, use nd2.readers.ND2Reader.create if you want to "
"directly instantiate a reader subclass.",
stacklevel=2,
)
from nd2.readers import ND2Reader

return ND2Reader.create(path, search_window * 1000 if validate_frames else None)


def is_new_format(path: str) -> bool:
# TODO: this is just for dealing with missing test data
with open(path, "rb") as fh:
Expand Down Expand Up @@ -157,9 +142,6 @@ def parse_time(time_str: str) -> datetime:
raise ValueError(f"Could not parse {time_str}") # pragma: no cover


# utils for converting records to dicts, in recorded_data method


def convert_records_to_dict_of_lists(
records: ListOfDicts, null_val: Any = float("nan")
) -> DictOfLists:
Expand Down
61 changes: 1 addition & 60 deletions src/nd2/nd2file.py
Original file line number Diff line number Diff line change
Expand Up @@ -84,13 +84,6 @@ class ND2File:
search_window : int
When validate_frames is true, this is the search window (in KB) that will
be used to try to find the actual chunk position. by default 100 KB
read_using_sdk : Optional[bool]
:warning: **DEPRECATED**. No longer does anything.

If `True`, use the SDK to read the file. If `False`, inspects the chunkmap
and reads from a `numpy.memmap`. If `None` (the default), uses the SDK if
the file is compressed, otherwise uses the memmap. Note: using
`read_using_sdk=False` on a compressed file will result in a ValueError.
"""

def __init__(
Expand All @@ -99,15 +92,7 @@ def __init__(
*,
validate_frames: bool = False,
search_window: int = 100,
read_using_sdk: bool | None = None,
) -> None:
if read_using_sdk is not None:
warnings.warn(
"The `read_using_sdk` argument is deprecated and will be removed in "
"a future version.",
FutureWarning,
stacklevel=2,
)
self._error_radius: int | None = (
search_window * 1000 if validate_frames else None
)
Expand Down Expand Up @@ -405,7 +390,6 @@ def unstructured_metadata(
strip_prefix: bool = True,
include: set[str] | None = None,
exclude: set[str] | None = None,
unnest: bool | None = None,
) -> dict[str, Any]:
"""Exposes, and attempts to decode, each metadata chunk in the file.

Expand All @@ -430,8 +414,6 @@ def unstructured_metadata(
all metadata sections found in the file are included.
exclude : set[str] | None, optional
If provided, exclude the specified keys from the output. by default `None`
unnest : bool, optional
:warning: **DEPRECATED**. No longer does anything.

Returns
-------
Expand All @@ -440,12 +422,6 @@ def unstructured_metadata(
metadata chunk (things like 'CustomData|RoiMetadata_v1' or
'ImageMetadataLV'), and values that are associated metadata chunk.
"""
if unnest is not None:
warnings.warn(
"The unnest parameter is deprecated, and no longer has any effect.",
FutureWarning,
stacklevel=2,
)
return self._rdr.unstructured_metadata(strip_prefix, include, exclude)

@cached_property
Expand Down Expand Up @@ -1156,27 +1132,6 @@ def __repr__(self) -> str:
extra = ""
return f"<ND2File at {hex(id(self))}{extra}>"

@property
def recorded_data(self) -> DictOfLists:
"""Return tabular data recorded for each frame of the experiment.

!!! warning "Deprecated"

This method is deprecated and will be removed in a future version.
Please use the [`events`][nd2.ND2File.events] method instead. To get the
same dict-of-lists output that `recorded_data` returns, use
`ndfile.events(orient='list')`
"""
warnings.warn(
"recorded_data is deprecated and will be removed in a future version."
"Please use the `events` method instead. To get the same dict-of-lists "
"output, use `events(orient='list')`",
FutureWarning,
stacklevel=2,
)

return self.events(orient="list")

@cached_property
def binary_data(self) -> BinaryLayers | None:
"""Return binary layers embedded in the file.
Expand Down Expand Up @@ -1234,7 +1189,6 @@ def imread(
dask: Literal[False] = ...,
xarray: Literal[False] = ...,
validate_frames: bool = ...,
read_using_sdk: bool | None = None,
) -> np.ndarray: ...


Expand All @@ -1245,7 +1199,6 @@ def imread(
dask: bool = ...,
xarray: Literal[True],
validate_frames: bool = ...,
read_using_sdk: bool | None = None,
) -> xr.DataArray: ...


Expand All @@ -1256,7 +1209,6 @@ def imread(
dask: Literal[True],
xarray: Literal[False] = ...,
validate_frames: bool = ...,
read_using_sdk: bool | None = None,
) -> dask.array.core.Array: ...


Expand All @@ -1266,7 +1218,6 @@ def imread(
dask: bool = False,
xarray: bool = False,
validate_frames: bool = False,
read_using_sdk: bool | None = None,
) -> np.ndarray | xr.DataArray | dask.array.core.Array:
"""Open `file`, return requested array type, and close `file`.

Expand All @@ -1289,23 +1240,13 @@ def imread(
shifted relative to the predicted offset (i.e. in a corrupted file).
This comes at a slight performance penalty at file open, but may "rescue"
some corrupt files. by default False.
read_using_sdk : Optional[bool]
:warning: **DEPRECATED**. No longer used.

If `True`, use the SDK to read the file. If `False`, inspects the chunkmap and
reads from a `numpy.memmap`. If `None` (the default), uses the SDK if the file
is compressed, otherwise uses the memmap.
Note: using `read_using_sdk=False` on a compressed file will result in a
ValueError.

Returns
-------
Union[np.ndarray, dask.array.Array, xarray.DataArray]
Array subclass, depending on arguments used.
"""
with ND2File(
file, validate_frames=validate_frames, read_using_sdk=read_using_sdk
) as nd2:
with ND2File(file, validate_frames=validate_frames) as nd2:
if xarray:
return nd2.to_xarray(delayed=dask)
elif dask:
Expand Down
2 changes: 1 addition & 1 deletion src/nd2/readers/_legacy/legacy_reader.py
Original file line number Diff line number Diff line change
Expand Up @@ -430,7 +430,7 @@ def header(self) -> dict:

def events(self, orient: str, null_value: Any) -> list | Mapping:
warnings.warn(
"`recorded_data` is not implemented for legacy ND2 files",
"`events` is not implemented for legacy ND2 files",
UserWarning,
stacklevel=2,
)
Expand Down
14 changes: 0 additions & 14 deletions tests/test_deprecations.py

This file was deleted.

15 changes: 6 additions & 9 deletions tests/test_metadata.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
import json
import sys
from pathlib import Path
from typing import Literal
from typing import Any, Literal

import dask.array as da
import pytest
Expand Down Expand Up @@ -41,7 +41,7 @@ def test_metadata_integrity(path: str) -> None:
assert stats[key] == EXPECTED[name][key], f"{key} mismatch"


def _clear_names(*exps):
def _clear_names(*exps: Any) -> None:
for exp in exps:
for item in exp:
if item["type"] == "XYPosLoop":
Expand Down Expand Up @@ -87,13 +87,11 @@ def test_metadata_extraction(new_nd2: Path) -> None:

assert isinstance(nd.unstructured_metadata(), dict)
assert isinstance(nd.events(), list)
with pytest.warns(FutureWarning):
assert isinstance(nd.recorded_data, dict)

assert nd.closed


def test_metadata_extraction_legacy(old_nd2):
def test_metadata_extraction_legacy(old_nd2: Path) -> None:
assert ND2File.is_supported_file(old_nd2)
with ND2File(old_nd2) as nd:
assert repr(nd)
Expand All @@ -118,12 +116,11 @@ def test_metadata_extraction_legacy(old_nd2):
assert nd.closed


def test_recorded_data() -> None:
def test_events() -> None:
# this method is smoke-tested for every file above...
# but specific values are asserted here:
with ND2File(DATA / "cluster.nd2") as f:
with pytest.warns(FutureWarning, match="deprecated"):
rd = f.recorded_data
rd = f.events(orient="list")

headers = list(rd)
row_0 = [rd[h][0] for h in headers]
Expand Down Expand Up @@ -176,7 +173,7 @@ def test_recorded_data() -> None:


@pytest.mark.parametrize("orient", ["records", "dict", "list"])
def test_events(new_nd2: Path, orient: Literal["records", "dict", "list"]) -> None:
def test_events2(new_nd2: Path, orient: Literal["records", "dict", "list"]) -> None:
with ND2File(new_nd2) as f:
events = f.events(orient=orient)

Expand Down