Skip to content

4855 lazy resampling impl -- Compose #5860

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 286 commits into from
Mar 23, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
286 commits
Select commit Hold shift + click to select a range
3083442
smaller copy
wyli Jan 25, 2023
1e15fd1
fixes tests
wyli Jan 25, 2023
88ab8ed
fixe tests
wyli Jan 25, 2023
79f368e
fixes push transform
wyli Jan 25, 2023
edcd20f
one of
wyli Jan 25, 2023
256267b
push transform tensor
wyli Jan 25, 2023
bd33d93
refactoring push transform
wyli Jan 25, 2023
2c040be
fixes tests
wyli Jan 25, 2023
7699634
default false lazy
wyli Jan 25, 2023
aa0389e
fixes tests
wyli Jan 25, 2023
0fbbd78
update one_of/random_order tests
wyli Jan 25, 2023
a2a60f1
evaluate cases
wyli Jan 26, 2023
fce044b
no inplace meta change
wyli Jan 26, 2023
d6d8e9b
update samples
wyli Jan 26, 2023
4b062ef
multisample cropping
wyli Jan 26, 2023
c00b2f6
fixes tests
wyli Jan 26, 2023
1f63bbc
Merge remote-tracking branch 'upstream/dev' into 4855-lazy-resampling…
wyli Jan 26, 2023
526f1f7
style fix
wyli Jan 26, 2023
a61d3ef
fixes style
wyli Jan 26, 2023
93784f4
Merge remote-tracking branch 'upstream/dev' into 4855-lazy-resampling…
wyli Jan 27, 2023
937744c
Merge remote-tracking branch 'upstream/dev' into 4855-lazy-resampling…
wyli Jan 28, 2023
7c5a5df
fixes tests
wyli Jan 28, 2023
328b581
remove update_meta
wyli Jan 28, 2023
60246f4
refactor samples
wyli Jan 29, 2023
362cd71
multi-sample lazy cropping
wyli Jan 29, 2023
b3e07e8
simplify stack
wyli Jan 29, 2023
46aa093
optional labels for cropping
wyli Jan 29, 2023
8c88f80
remove label dep
wyli Jan 29, 2023
3952675
update compose
wyli Jan 29, 2023
6f98664
fixes pending operations
wyli Jan 29, 2023
0a13b08
update functional return
wyli Jan 30, 2023
d4bf10b
remove deepcopy
wyli Jan 30, 2023
7b12cac
cache grid
wyli Jan 30, 2023
d639b83
revivse utilities
wyli Jan 30, 2023
c30dbc8
adding new traceable keys
wyli Jan 30, 2023
23ccf83
update apply
wyli Jan 30, 2023
17f4e53
update utilities
wyli Jan 30, 2023
4727d60
update tests
wyli Jan 30, 2023
696e411
backward compatible
wyli Jan 30, 2023
47684d7
fixes #5509
wyli Jan 30, 2023
c508d5a
update types
wyli Jan 30, 2023
d562e01
fixes docstrings
wyli Jan 30, 2023
857e852
Merge branch 'minor-fixes' into 4855-lazy-resampling-impl
wyli Jan 30, 2023
acaf227
update resample
wyli Jan 30, 2023
5824f78
fixes merging issues
wyli Jan 31, 2023
71cc0c8
Merge branch 'minor-fixes' into 4855-lazy-resampling-impl
wyli Jan 31, 2023
ec7ffae
check tests
wyli Jan 31, 2023
49eaa5f
default affine
wyli Jan 31, 2023
2c90e2d
default affine
wyli Jan 31, 2023
ab7c44c
update based on comments
wyli Jan 31, 2023
9eec6b0
update based on comments
wyli Jan 31, 2023
53bb08e
Merge remote-tracking branch 'origin/minor-fixes' into 4855-lazy-resa…
wyli Jan 31, 2023
92d3b9d
update dtypes
wyli Jan 31, 2023
0ad9279
fixes typing
wyli Jan 31, 2023
6f5f21f
Merge branch 'minor-fixes' into 4855-lazy-resampling-impl
wyli Jan 31, 2023
3d33946
update dtype
wyli Jan 31, 2023
a438408
optional convert
wyli Jan 31, 2023
1e039ad
update based on comments
wyli Jan 31, 2023
234e326
Merge remote-tracking branch 'upstream/dev' into minor-fixes
wyli Jan 31, 2023
9d87532
dtype converting
wyli Jan 31, 2023
ead92fd
Merge remote-tracking branch 'origin/minor-fixes' into 4855-lazy-resa…
wyli Jan 31, 2023
990e16e
update
wyli Jan 31, 2023
0eb6602
c order array
wyli Jan 31, 2023
050953a
Merge remote-tracking branch 'upstream/dev' into 4855-lazy-resampling…
wyli Feb 1, 2023
3396fd3
fixes merging
wyli Feb 1, 2023
2bf6937
Merge remote-tracking branch 'upstream' into 4855-lazy-resampling-impl
wyli Feb 1, 2023
ffee808
resize spatial param
wyli Feb 1, 2023
c1f3c73
Disallow incomplete defs in optimizers module (#5928)
Shadow-Devil Feb 1, 2023
7585254
affine/resample align_corners=False option
wyli Feb 3, 2023
d13af2c
update
wyli Feb 3, 2023
e0e4921
Merge remote-tracking branch 'upstream/dev' into 4854-lazy-resampling…
wyli Feb 3, 2023
527699f
Merge remote-tracking branch 'upstream/dev' into 4855-lazy-resampling…
wyli Feb 3, 2023
541195a
adds integration tests
wyli Feb 3, 2023
8e3e80b
fixes min tests
wyli Feb 3, 2023
f26d3b2
tests
wyli Feb 3, 2023
3bec083
Merge remote-tracking branch 'upstream/dev' into 4855-lazy-resampling…
wyli Feb 6, 2023
ac0c50b
update align_corners=False
wyli Feb 6, 2023
ee4b9eb
deterministic tests
wyli Feb 6, 2023
40cc2bb
compose condition
wyli Feb 6, 2023
bf994b5
update compose
wyli Feb 6, 2023
2edb955
update device option
wyli Feb 6, 2023
3e30f3f
Merge branch 'dev' into 4855-lazy-resampling-impl
wyli Feb 6, 2023
96cce64
Merge remote-tracking branch 'upstream/dev' into 4855-lazy-resampling…
wyli Feb 8, 2023
1339909
testing cachedataset
wyli Feb 8, 2023
00620d9
update
wyli Feb 8, 2023
9239e06
integration tests
wyli Feb 8, 2023
3e46d26
more integration tests
wyli Feb 8, 2023
d399d60
Merge remote-tracking branch 'upstream/dev' into 4855-lazy-resampling…
wyli Feb 8, 2023
3f96c5e
Merge remote-tracking branch 'upstream/dev' into 4855-lazy-resampling…
wyli Feb 9, 2023
3b422b7
remove unused changes
wyli Feb 9, 2023
1c4bd53
adds lazy/non-lazy testing
wyli Feb 9, 2023
c601e45
update to use np.linalg
wyli Feb 9, 2023
5ac5fa9
update
wyli Feb 9, 2023
91dc115
update integration results
wyli Feb 9, 2023
c72d537
update answers
wyli Feb 9, 2023
bf599bf
Merge branch 'numpy-inverse' into 4855-lazy-resampling-impl
wyli Feb 9, 2023
07169f0
merging np.linalg usage
wyli Feb 9, 2023
3381963
Merge remote-tracking branch 'upstream/dev' into 4855-lazy-resampling…
wyli Feb 10, 2023
bfa3bb7
more tests
wyli Feb 10, 2023
e024e3d
update
wyli Feb 10, 2023
6f404a9
fix the pixelshuffle upsample shape mismatch problem.
binliunls Feb 12, 2023
f578df7
Merge remote-tracking branch 'upstream/dev' into 4855-lazy-resampling…
wyli Feb 13, 2023
68f7586
Merge remote-tracking branch 'upstream/dev' into 5971-fix-pixelshuffl…
wyli Feb 13, 2023
3c6e752
fixes flake8 errors
wyli Feb 13, 2023
726a269
Merge branch '5971-fix-pixelshuffle-upsample' into 4855-lazy-resampli…
wyli Feb 13, 2023
bd2fd0d
fixes tests
wyli Feb 13, 2023
a01a71b
Merge remote-tracking branch 'upstream/dev' into 4855-lazy-resampling…
wyli Feb 15, 2023
731ce6a
Merge remote-tracking branch 'upstream/dev' into 4855-lazy-resampling…
wyli Feb 16, 2023
4a46fda
update based on comments
wyli Feb 16, 2023
59f42f2
fixes tests
wyli Feb 16, 2023
ab01232
Merge remote-tracking branch 'upstream/dev' into 4855-lazy-resampling…
wyli Feb 17, 2023
1957962
remove spatial_rank from scale_affine
wyli Feb 17, 2023
5bf7650
Merge remote-tracking branch 'upstream/dev' into 4855-lazy-resampling…
wyli Feb 17, 2023
0f5ff94
Merge branch 'dev' into 4855-lazy-resampling-impl
yiheng-wang-nv Feb 20, 2023
6432ce7
Merge remote-tracking branch 'upstream/dev' into 4855-lazy-resampling…
wyli Feb 21, 2023
721e44a
Merge remote-tracking branch 'origin/4855-lazy-resampling-impl' into …
wyli Feb 21, 2023
9eb0f0f
Merge commit 'b9e17e8186cb25c8781e2c82de3b4915081df48b' into 4855-laz…
wyli Feb 24, 2023
f337312
Merge remote-tracking branch 'upstream/dev' into 4855-lazy-resampling…
wyli Feb 24, 2023
98e3b70
Merge remote-tracking branch 'upstream/dev' into 4855-lazy-resampling…
wyli Feb 24, 2023
eb3b65d
Merge branch 'dev' into 4855-lazy-resampling-impl
wyli Feb 26, 2023
28da58d
enhance resampling
wyli Feb 27, 2023
1b9ce4d
adds docstring
wyli Feb 27, 2023
d1d9cdd
update inshape
wyli Feb 27, 2023
47f48e0
fixes unit tests
wyli Feb 27, 2023
2f630a6
fixes integration tests
wyli Feb 27, 2023
ca00a46
fixes unit tests
wyli Feb 27, 2023
8dfbfbb
Merge branch 'dev' into 4855-lazy-resampling-impl
KumoLiu Feb 28, 2023
294a929
Merge remote-tracking branch 'upstream/dev' into 4855-lazy-resampling…
wyli Feb 28, 2023
a6123ef
not supporting 0 output shape
wyli Feb 28, 2023
f14391b
remove unused tests
wyli Feb 28, 2023
8354e13
add spacing
yiheng-wang-nv Feb 28, 2023
e9c65a3
Merge branch 'dev' into 5991-add-spatial-transforms-for-lazy-resampling
yiheng-wang-nv Feb 28, 2023
95b5fab
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] Feb 28, 2023
ab6e83e
Merge branch 'dev' into 4855-lazy-resampling-impl
wyli Feb 28, 2023
a05d823
resampletomatch lazy metadata
wyli Feb 28, 2023
4289d22
Merge branch 'dev' into 4855-lazy-resampling-impl
wyli Mar 1, 2023
41ffc5c
Merge remote-tracking branch 'upstream/dev' into 4855-lazy-resampling…
wyli Mar 1, 2023
3246d31
add spacing orientation tests
yiheng-wang-nv Mar 6, 2023
7fb0877
Merge branch 'dev' into 5991-add-spatial-transforms-for-lazy-resampling
yiheng-wang-nv Mar 6, 2023
0a35fc4
fix typo
yiheng-wang-nv Mar 6, 2023
8deedd6
Merge branch 'dev' into 4855-lazy-resampling-impl
yiheng-wang-nv Mar 6, 2023
120b51d
add flip
yiheng-wang-nv Mar 6, 2023
0ab2242
add resize
yiheng-wang-nv Mar 6, 2023
b527a44
Merge branch 'dev' into 5991-add-spatial-transforms-for-lazy-resampling
yiheng-wang-nv Mar 7, 2023
fde1d4c
add rotate
yiheng-wang-nv Mar 7, 2023
0d050db
Merge branch 'dev' into 4855-lazy-resampling-impl
yiheng-wang-nv Mar 7, 2023
24216ea
add rotate90
KumoLiu Mar 7, 2023
4532984
add `randrotate90`
KumoLiu Mar 7, 2023
eb026f1
Merge branch 'dev' into 4855-lazy-resampling-impl
wyli Mar 7, 2023
5f52bce
add affine resampler
yiheng-wang-nv Mar 8, 2023
39a6e2d
add rotate changes
yiheng-wang-nv Mar 8, 2023
ee3a170
Merge branch '5991-add-spatial-transforms-for-lazy-resampling' of git…
yiheng-wang-nv Mar 8, 2023
311af25
fix format
yiheng-wang-nv Mar 8, 2023
7bc6adc
remove wrong case
yiheng-wang-nv Mar 8, 2023
a43fe9f
fixes affine align_corners=False
wyli Mar 8, 2023
e97135c
sync affine changes
yiheng-wang-nv Mar 8, 2023
6a24b72
sync scale_affine
yiheng-wang-nv Mar 8, 2023
f389d87
Merge branch 'dev' into 5991-add-spatial-transforms-for-lazy-resampling
yiheng-wang-nv Mar 8, 2023
7568310
add `RandRotate`
KumoLiu Mar 8, 2023
17a71c7
Merge branch '5991-add-spatial-transforms-for-lazy-resampling' of htt…
KumoLiu Mar 8, 2023
4b873f2
add `RandFlip`
KumoLiu Mar 8, 2023
446c92c
add `RandAxisFlip`
KumoLiu Mar 8, 2023
42ed3c4
fixes integration tests
wyli Mar 8, 2023
f7e8829
simplify
wyli Mar 8, 2023
701d996
simplify normalize xform
wyli Mar 8, 2023
71b33ae
modify affine tests and remove align corner false cases
yiheng-wang-nv Mar 9, 2023
a4a3791
Merge branch '5991-add-spatial-transforms-for-lazy-resampling' of git…
yiheng-wang-nv Mar 9, 2023
b74c471
add `Zoom` and `RandZoom`
KumoLiu Mar 9, 2023
e606d8f
update
wyli Mar 9, 2023
a66dcf0
Merge branch 'dev' into 4855-lazy-resampling-impl
wyli Mar 9, 2023
dcca15c
update based on comments
wyli Mar 9, 2023
5d524e1
update tests
wyli Mar 9, 2023
fb3af37
fixes tests
wyli Mar 9, 2023
4fc05f7
consistency tests
wyli Mar 10, 2023
7c85332
revert test cases
wyli Mar 10, 2023
e96f890
adds consistency tests
wyli Mar 10, 2023
e36ffa9
fixes tests
wyli Mar 10, 2023
47eabb8
fixes tests
wyli Mar 10, 2023
5cb680f
update tests
wyli Mar 10, 2023
b1e407a
comment issue cases
yiheng-wang-nv Mar 10, 2023
334f41a
Merge branch '5991-add-spatial-transforms-for-lazy-resampling' of git…
yiheng-wang-nv Mar 10, 2023
97d66b7
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] Mar 10, 2023
717a6fb
update based on comments
wyli Mar 10, 2023
d9e796c
fix `Zoom` and `RandZoom`
KumoLiu Mar 10, 2023
0d8647e
backward comp
wyli Mar 10, 2023
7492249
sync with align corner updates
yiheng-wang-nv Mar 10, 2023
ac6fc5d
Merge branch '5991-add-spatial-transforms-for-lazy-resampling' of git…
yiheng-wang-nv Mar 10, 2023
cd3655c
fixes randaffined
wyli Mar 10, 2023
7857114
update based on comments
wyli Mar 10, 2023
b14ea84
fixes tests
wyli Mar 10, 2023
8dd7d7c
update zerocentred convention
wyli Mar 10, 2023
b0cb023
fix affine issues
yiheng-wang-nv Mar 13, 2023
fe41dd9
Merge branch 'dev' into 5991-add-spatial-transforms-for-lazy-resampling
yiheng-wang-nv Mar 13, 2023
45bc5c1
fix mypy and doc errors
yiheng-wang-nv Mar 13, 2023
309a2cd
Merge branch '5991-add-spatial-transforms-for-lazy-resampling' of git…
yiheng-wang-nv Mar 13, 2023
53351a7
add combine test
yiheng-wang-nv Mar 13, 2023
6307b20
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] Mar 13, 2023
43ed113
modify test name
yiheng-wang-nv Mar 13, 2023
c1a1508
Merge branch '5991-add-spatial-transforms-for-lazy-resampling' of git…
yiheng-wang-nv Mar 13, 2023
1068548
add crop
yiheng-wang-nv Mar 13, 2023
898d309
fix flake8
yiheng-wang-nv Mar 13, 2023
8014037
skip combine lazy in min test
yiheng-wang-nv Mar 13, 2023
a94ceb0
Merge branch 'dev' into 4855-lazy-resampling-impl
wyli Mar 13, 2023
cc89d7a
review shape[1:] usage
wyli Mar 13, 2023
f38dbd5
review shape[1:] usage
wyli Mar 13, 2023
c517c45
Merge remote-tracking branch 'upstream/dev' into 5991-add-spatial-tra…
wyli Mar 13, 2023
a793e86
update transforms combine
yiheng-wang-nv Mar 13, 2023
a2de379
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] Mar 13, 2023
c7453b3
unify mode padding for each case
yiheng-wang-nv Mar 13, 2023
da283a4
unify align corners
yiheng-wang-nv Mar 13, 2023
f9643c3
fixes test case
wyli Mar 13, 2023
13cd446
modify tests according to Wenqi's suggests
yiheng-wang-nv Mar 13, 2023
3174565
Merge branch '5991-add-spatial-transforms-for-lazy-resampling' into 4…
wyli Mar 13, 2023
3fb2d76
skip reflection mode distortion cuda
wyli Mar 13, 2023
22633fd
Merge branch '5991-add-spatial-transforms-for-lazy-resampling' into 4…
wyli Mar 13, 2023
5123c99
update tests
wyli Mar 13, 2023
4593856
update tests
wyli Mar 13, 2023
afe51ed
fixes tests
wyli Mar 13, 2023
b03f5d4
fixes tests
wyli Mar 13, 2023
f49aee6
integration tests
wyli Mar 13, 2023
62043bc
remove unused
wyli Mar 13, 2023
83fcbeb
update based on comments; update documentations
wyli Mar 14, 2023
a469c5f
resolves mode/padding mode
wyli Mar 14, 2023
fbead51
remove warning msg; update mode
wyli Mar 14, 2023
a2e93e5
update resampling
wyli Mar 14, 2023
005e0c9
optimize
wyli Mar 14, 2023
589af32
Merge remote-tracking branch 'upstream' into 5991-add-spatial-transfo…
wyli Mar 14, 2023
d504dea
fixes
wyli Mar 14, 2023
691232f
Merge remote-tracking branch 'upstream/dev' into 5991-add-spatial-tra…
wyli Mar 14, 2023
eac3186
Merge branch '5991-add-spatial-transforms-for-lazy-resampling' into 4…
wyli Mar 14, 2023
03abdad
Merge remote-tracking branch 'upstream/dev' into 4855-lazy-resampling…
wyli Mar 14, 2023
16dbe56
Merge branch 'dev' into 4855-lazy-resampling-impl
wyli Mar 15, 2023
ee30197
Merge branch 'dev' into 4855-lazy-resampling-impl
wyli Mar 15, 2023
bcf4c4e
Merge branch 'dev' into 4855-lazy-resampling-impl
wyli Mar 18, 2023
909d059
Merge branch 'dev' into 4855-lazy-resampling-impl
wyli Mar 21, 2023
1c02c81
Merge branch 'dev' into 4855-lazy-resampling-impl
wyli Mar 21, 2023
471886b
Merge branch 'dev' into 4855-lazy-resampling-impl
wyli Mar 21, 2023
bf13373
Merge branch 'dev' into 4855-lazy-resampling-impl
wyli Mar 22, 2023
d4b4f5b
Replacement of numerous new lazy parameters on Compose.__init__ with …
atbenmurray Mar 22, 2023
349dc09
Merge branch 'dev' into 4855-lazy-resampling-impl
wyli Mar 22, 2023
40932e0
Fix for non-trivial mappings between override_keys and _keys (#21)
atbenmurray Mar 22, 2023
17e4134
Merge branch 'dev' into 4855-lazy-resampling-impl
wyli Mar 23, 2023
12821a5
Merge branch 'dev' into 4855-lazy-resampling-impl
wyli Mar 23, 2023
9d1fe50
update integration tests
wyli Mar 23, 2023
cc73d6d
update device
wyli Mar 23, 2023
b8674bf
update
wyli Mar 23, 2023
91adea2
update
wyli Mar 23, 2023
732ecd0
update based on comments
wyli Mar 23, 2023
28b60b9
update based on comments
wyli Mar 23, 2023
c7ec452
update default flag
wyli Mar 23, 2023
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 6 additions & 0 deletions docs/source/transforms.rst
Original file line number Diff line number Diff line change
Expand Up @@ -2206,3 +2206,9 @@ Utilities

.. automodule:: monai.transforms.utils_pytorch_numpy_unification
:members:

Lazy
----
.. automodule:: monai.transforms.lazy
:members:
:imported-members:
12 changes: 12 additions & 0 deletions monai/data/dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -322,7 +322,9 @@ def _pre_transform(self, item_transformed):
break
# this is to be consistent with CacheDataset even though it's not in a multi-thread situation.
_xform = deepcopy(_transform) if isinstance(_transform, ThreadUnsafe) else _transform
item_transformed = self.transform.evaluate_with_overrides(item_transformed, _xform)
item_transformed = apply_transform(_xform, item_transformed)
item_transformed = self.transform.evaluate_with_overrides(item_transformed, None)
if self.reset_ops_id:
reset_ops_id(item_transformed)
return item_transformed
Expand All @@ -348,7 +350,9 @@ def _post_transform(self, item_transformed):
or not isinstance(_transform, Transform)
):
start_post_randomize_run = True
item_transformed = self.transform.evaluate_with_overrides(item_transformed, _transform)
item_transformed = apply_transform(_transform, item_transformed)
item_transformed = self.transform.evaluate_with_overrides(item_transformed, None)
return item_transformed

def _cachecheck(self, item_transformed):
Expand Down Expand Up @@ -496,7 +500,9 @@ def _pre_transform(self, item_transformed):
if i == self.cache_n_trans:
break
_xform = deepcopy(_transform) if isinstance(_transform, ThreadUnsafe) else _transform
item_transformed = self.transform.evaluate_with_overrides(item_transformed, _xform)
item_transformed = apply_transform(_xform, item_transformed)
item_transformed = self.transform.evaluate_with_overrides(item_transformed, None)
reset_ops_id(item_transformed)
return item_transformed

Expand All @@ -514,7 +520,9 @@ def _post_transform(self, item_transformed):
raise ValueError("transform must be an instance of monai.transforms.Compose.")
for i, _transform in enumerate(self.transform.transforms):
if i >= self.cache_n_trans:
item_transformed = self.transform.evaluate_with_overrides(item_transformed, item_transformed)
item_transformed = apply_transform(_transform, item_transformed)
item_transformed = self.transform.evaluate_with_overrides(item_transformed, None)
return item_transformed


Expand Down Expand Up @@ -884,7 +892,9 @@ def _load_cache_item(self, idx: int):
if isinstance(_transform, RandomizableTrait) or not isinstance(_transform, Transform):
break
_xform = deepcopy(_transform) if isinstance(_transform, ThreadUnsafe) else _transform
item = self.transform.evaluate_with_overrides(item, _xform)
item = apply_transform(_xform, item)
item = self.transform.evaluate_with_overrides(item, None)
if self.as_contiguous:
item = convert_to_contiguous(item, memory_format=torch.contiguous_format)
return item
Expand Down Expand Up @@ -921,7 +931,9 @@ def _transform(self, index: int):
start_run = True
if self.copy_cache:
data = deepcopy(data)
data = self.transform.evaluate_with_overrides(data, _transform)
data = apply_transform(_transform, data)
data = self.transform.evaluate_with_overrides(data, None)
return data


Expand Down
9 changes: 9 additions & 0 deletions monai/data/meta_obj.py
Original file line number Diff line number Diff line change
Expand Up @@ -214,6 +214,15 @@ def pending_operations(self) -> list[dict]:
return self._pending_operations
return MetaObj.get_default_applied_operations() # the same default as applied_ops

@property
def has_pending_operations(self) -> bool:
"""
Determine whether there are pending operations.
Returns:
True if there are pending operations; False if not
"""
return self.pending_operations is not None and len(self.pending_operations) > 0

def push_pending_operation(self, t: Any) -> None:
self._pending_operations.append(t)

Expand Down
2 changes: 1 addition & 1 deletion monai/data/meta_tensor.py
Original file line number Diff line number Diff line change
Expand Up @@ -492,7 +492,7 @@ def peek_pending_affine(self):
continue
res = convert_to_dst_type(res, next_matrix)[0]
next_matrix = monai.data.utils.to_affine_nd(r, next_matrix)
res = monai.transforms.lazy.utils.combine_transforms(res, next_matrix)
res = monai.transforms.lazy.utils.combine_transforms(res, next_matrix) # type: ignore
return res

def peek_pending_rank(self):
Expand Down
193 changes: 178 additions & 15 deletions monai/transforms/compose.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,20 +21,95 @@
import numpy as np

import monai
import monai.transforms as mt
from monai.apps.utils import get_logger
from monai.transforms.inverse import InvertibleTransform

# For backwards compatibility (so this still works: from monai.transforms.compose import MapTransform)
from monai.transforms.transform import ( # noqa: F401
LazyTransform,
MapTransform,
Randomizable,
RandomizableTransform,
Transform,
apply_transform,
)
from monai.utils import MAX_SEED, ensure_tuple, get_seed
from monai.utils.enums import TraceKeys
from monai.utils import MAX_SEED, TraceKeys, ensure_tuple, get_seed
from monai.utils.misc import to_tuple_of_dictionaries

__all__ = ["Compose", "OneOf", "RandomOrder"]
logger = get_logger(__name__)

__all__ = ["Compose", "OneOf", "RandomOrder", "evaluate_with_overrides"]


def evaluate_with_overrides(
data,
upcoming,
lazy_evaluation: bool | None = False,
overrides: dict | None = None,
override_keys: Sequence[str] | None = None,
verbose: bool = False,
):
"""
The previously applied transform may have been lazily applied to MetaTensor `data` and
made `data.has_pending_operations` equals to True. Given the upcoming transform ``upcoming``,
this function determines whether `data.pending_operations` should be evaluated. If so, it will
evaluate the lazily applied transforms.

Currently, the conditions for evaluation are:

- ``lazy_evaluation`` is ``True``, AND
- the data is a ``MetaTensor`` and has pending operations, AND
- the upcoming transform is an instance of ``Identity`` or ``IdentityD`` or ``None``.

The returned `data` will then be ready for the ``upcoming`` transform.

Args:
data: data to be evaluated.
upcoming: the upcoming transform.
lazy_evaluation: whether to evaluate the pending operations.
override: keyword arguments to apply transforms.
override_keys: to which the override arguments are used when apply transforms.
verbose: whether to print debugging info when evaluate MetaTensor with pending operations.

"""
if not lazy_evaluation:
return data # eager evaluation
overrides = (overrides or {}).copy()
if isinstance(data, monai.data.MetaTensor):
if data.has_pending_operations and ((isinstance(upcoming, (mt.Identityd, mt.Identity))) or upcoming is None):
data, _ = mt.apply_transforms(data, None, overrides=overrides)
if verbose:
next_name = "final output" if upcoming is None else f"'{upcoming.__class__.__name__}'"
logger.info(f"Evaluated - '{override_keys}' - up-to-date for - {next_name}")
elif verbose:
logger.info(
f"Lazy - '{override_keys}' - upcoming: '{upcoming.__class__.__name__}'"
f"- pending {len(data.pending_operations)}"
)
return data
override_keys = ensure_tuple(override_keys)
if isinstance(data, dict):
if isinstance(upcoming, MapTransform):
applied_keys = {k for k in data if k in upcoming.keys}
if not applied_keys:
return data
else:
applied_keys = set(data.keys())

keys_to_override = {k for k in applied_keys if k in override_keys}
# generate a list of dictionaries with the appropriate override value per key
dict_overrides = to_tuple_of_dictionaries(overrides, override_keys)
for k in data:
if k in keys_to_override:
dict_for_key = dict_overrides[override_keys.index(k)]
data[k] = evaluate_with_overrides(data[k], upcoming, lazy_evaluation, dict_for_key, k, verbose)
else:
data[k] = evaluate_with_overrides(data[k], upcoming, lazy_evaluation, None, k, verbose)

if isinstance(data, (list, tuple)):
return [evaluate_with_overrides(v, upcoming, lazy_evaluation, overrides, override_keys, verbose) for v in data]
return data


class Compose(Randomizable, InvertibleTransform):
Expand Down Expand Up @@ -114,7 +189,21 @@ class Compose(Randomizable, InvertibleTransform):
log_stats: whether to log the detailed information of data and applied transform when error happened,
for NumPy array and PyTorch Tensor, log the data shape and value range,
for other metadata, log the values directly. default to `False`.

lazy_evaluation: whether to enable lazy evaluation for lazy transforms. If False, transforms will be
carried out on a transform by transform basis. If True, all lazy transforms will
be executed by accumulating changes and resampling as few times as possible.
A `monai.transforms.Identity[D]` transform in the pipeline will trigger the evaluation of
the pending operations and make the primary data up-to-date.
overrides: this optional parameter allows you to specify a dictionary of parameters that should be overridden
when executing a pipeline. These each parameter that is compatible with a given transform is then applied
to that transform before it is executed. Note that overrides are currently only applied when lazy_evaluation
is True. If lazy_evaluation is False they are ignored.
currently supported args are:
{``"mode"``, ``"padding_mode"``, ``"dtype"``, ``"align_corners"``, ``"resample_mode"``, ``device``},
please see also :py:func:`monai.transforms.lazy.apply_transforms` for more details.
override_keys: this optional parameter specifies the keys to which ``overrides`` are to be applied. If
``overrides`` is set, ``override_keys`` must also be set.
verbose: whether to print debugging info when lazy_evaluation=True.
"""

def __init__(
Expand All @@ -123,6 +212,10 @@ def __init__(
map_items: bool = True,
unpack_items: bool = False,
log_stats: bool = False,
lazy_evaluation: bool | None = None,
overrides: dict | None = None,
override_keys: Sequence[str] | None = None,
verbose: bool = False,
) -> None:
if transforms is None:
transforms = []
Expand All @@ -132,6 +225,16 @@ def __init__(
self.log_stats = log_stats
self.set_random_state(seed=get_seed())

self.lazy_evaluation = lazy_evaluation
self.overrides = overrides
self.override_keys = override_keys
self.verbose = verbose

if self.lazy_evaluation is not None:
for t in self.flatten().transforms: # TODO: test Compose of Compose/OneOf
if isinstance(t, LazyTransform):
t.lazy_evaluation = self.lazy_evaluation

def set_random_state(self, seed: int | None = None, state: np.random.RandomState | None = None) -> Compose:
super().set_random_state(seed=seed, state=state)
for _transform in self.transforms:
Expand Down Expand Up @@ -172,9 +275,26 @@ def __len__(self):
"""Return number of transformations."""
return len(self.flatten().transforms)

def evaluate_with_overrides(self, input_, upcoming_xform):
"""
Args:
input_: input data to be transformed.
upcoming_xform: a transform used to determine whether to evaluate with override
"""
return evaluate_with_overrides(
input_,
upcoming_xform,
lazy_evaluation=self.lazy_evaluation,
overrides=self.overrides,
override_keys=self.override_keys,
verbose=self.verbose,
)

def __call__(self, input_):
for _transform in self.transforms:
input_ = self.evaluate_with_overrides(input_, _transform)
input_ = apply_transform(_transform, input_, self.map_items, self.unpack_items, self.log_stats)
input_ = self.evaluate_with_overrides(input_, None)
return input_

def inverse(self, data):
Expand Down Expand Up @@ -204,7 +324,21 @@ class OneOf(Compose):
log_stats: whether to log the detailed information of data and applied transform when error happened,
for NumPy array and PyTorch Tensor, log the data shape and value range,
for other metadata, log the values directly. default to `False`.

lazy_evaluation: whether to enable lazy evaluation for lazy transforms. If True, all lazy transforms will
be executed by accumulating changes and resampling as few times as possible. If False, transforms will be
carried out on a transform by transform basis.
A `monai.transforms.Identity[D]` transform in the pipeline will trigger the evaluation of
the pending operations and make the primary data up-to-date.
overrides: this optional parameter allows you to specify a dictionary of parameters that should be overridden
when executing a pipeline. These each parameter that is compatible with a given transform is then applied
to that transform before it is executed. Note that overrides are currently only applied when lazy_evaluation
is True. If lazy_evaluation is False they are ignored.
currently supported args are:
{``"mode"``, ``"padding_mode"``, ``"dtype"``, ``"align_corners"``, ``"resample_mode"``, ``device``},
please see also :py:func:`monai.transforms.lazy.apply_transforms` for more details.
override_keys: this optional parameter specifies the keys to which ``overrides`` are to be applied. If
``overrides`` is set, ``override_keys`` must also be set.
verbose: whether to print debugging info when lazy_evaluation=True.
"""

def __init__(
Expand All @@ -214,8 +348,14 @@ def __init__(
map_items: bool = True,
unpack_items: bool = False,
log_stats: bool = False,
lazy_evaluation: bool | None = None,
overrides: dict | None = None,
override_keys: Sequence[str] | None = None,
verbose: bool = False,
) -> None:
super().__init__(transforms, map_items, unpack_items, log_stats)
super().__init__(
transforms, map_items, unpack_items, log_stats, lazy_evaluation, overrides, override_keys, verbose
)
if len(self.transforms) == 0:
weights = []
elif weights is None or isinstance(weights, float):
Expand Down Expand Up @@ -265,8 +405,8 @@ def __call__(self, data):
self.push_transform(data, extra_info={"index": index})
elif isinstance(data, Mapping):
for key in data: # dictionary not change size during iteration
if isinstance(data[key], monai.data.MetaTensor) or self.trace_key(key) in data:
self.push_transform(data, key, extra_info={"index": index})
if isinstance(data[key], monai.data.MetaTensor):
self.push_transform(data[key], extra_info={"index": index})
return data

def inverse(self, data):
Expand All @@ -278,7 +418,7 @@ def inverse(self, data):
index = self.pop_transform(data)[TraceKeys.EXTRA_INFO]["index"]
elif isinstance(data, Mapping):
for key in data:
if isinstance(data[key], monai.data.MetaTensor) or self.trace_key(key) in data:
if isinstance(data[key], monai.data.MetaTensor):
index = self.pop_transform(data, key)[TraceKeys.EXTRA_INFO]["index"]
else:
raise RuntimeError(
Expand Down Expand Up @@ -306,7 +446,21 @@ class RandomOrder(Compose):
log_stats: whether to log the detailed information of data and applied transform when error happened,
for NumPy array and PyTorch Tensor, log the data shape and value range,
for other metadata, log the values directly. default to `False`.

lazy_evaluation: whether to enable lazy evaluation for lazy transforms. If True, all lazy transforms will
be executed by accumulating changes and resampling as few times as possible. If False, transforms will be
carried out on a transform by transform basis.
A `monai.transforms.Identity[D]` transform in the pipeline will trigger the evaluation of
the pending operations and make the primary data up-to-date.
overrides: this optional parameter allows you to specify a dictionary of parameters that should be overridden
when executing a pipeline. These each parameter that is compatible with a given transform is then applied
to that transform before it is executed. Note that overrides are currently only applied when lazy_evaluation
is True. If lazy_evaluation is False they are ignored.
currently supported args are:
{``"mode"``, ``"padding_mode"``, ``"dtype"``, ``"align_corners"``, ``"resample_mode"``, ``device``},
please see also :py:func:`monai.transforms.lazy.apply_transforms` for more details.
override_keys: this optional parameter specifies the keys to which ``overrides`` are to be applied. If
``overrides`` is set, ``override_keys`` must also be set.
verbose: whether to print debugging info when lazy_evaluation=True.
"""

def __init__(
Expand All @@ -315,8 +469,14 @@ def __init__(
map_items: bool = True,
unpack_items: bool = False,
log_stats: bool = False,
lazy_evaluation: bool | None = None,
overrides: dict | None = None,
override_keys: Sequence[str] | None = None,
verbose: bool = False,
) -> None:
super().__init__(transforms, map_items, unpack_items, log_stats)
super().__init__(
transforms, map_items, unpack_items, log_stats, lazy_evaluation, overrides, override_keys, verbose
)

def __call__(self, input_):
if len(self.transforms) == 0:
Expand All @@ -331,8 +491,8 @@ def __call__(self, input_):
self.push_transform(input_, extra_info={"applied_order": applied_order})
elif isinstance(input_, Mapping):
for key in input_: # dictionary not change size during iteration
if isinstance(input_[key], monai.data.MetaTensor) or self.trace_key(key) in input_:
self.push_transform(input_, key, extra_info={"applied_order": applied_order})
if isinstance(input_[key], monai.data.MetaTensor):
self.push_transform(input_[key], extra_info={"applied_order": applied_order})
return input_

def inverse(self, data):
Expand All @@ -344,7 +504,7 @@ def inverse(self, data):
applied_order = self.pop_transform(data)[TraceKeys.EXTRA_INFO]["applied_order"]
elif isinstance(data, Mapping):
for key in data:
if isinstance(data[key], monai.data.MetaTensor) or self.trace_key(key) in data:
if isinstance(data[key], monai.data.MetaTensor):
applied_order = self.pop_transform(data, key)[TraceKeys.EXTRA_INFO]["applied_order"]
else:
raise RuntimeError(
Expand All @@ -356,5 +516,8 @@ def inverse(self, data):

# loop backwards over transforms
for o in reversed(applied_order):
data = apply_transform(self.transforms[o].inverse, data, self.map_items, self.unpack_items, self.log_stats)
if isinstance(self.transforms[o], InvertibleTransform):
data = apply_transform(
self.transforms[o].inverse, data, self.map_items, self.unpack_items, self.log_stats
)
return data
Loading