Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Remove deprecated args and class for v1.3 #7019

Merged
merged 19 commits into from
Sep 22, 2023
Merged
Show file tree
Hide file tree
Changes from 2 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
36 changes: 0 additions & 36 deletions docs/source/transforms.rst
Original file line number Diff line number Diff line change
Expand Up @@ -996,24 +996,12 @@ Utility
:members:
:special-members: __call__

`AsChannelFirst`
""""""""""""""""
.. autoclass:: AsChannelFirst
:members:
:special-members: __call__

`AsChannelLast`
"""""""""""""""
.. autoclass:: AsChannelLast
:members:
:special-members: __call__

`AddChannel`
""""""""""""
.. autoclass:: AddChannel
:members:
:special-members: __call__

`EnsureChannelFirst`
""""""""""""""""""""
.. autoclass:: EnsureChannelFirst
Expand All @@ -1032,12 +1020,6 @@ Utility
:members:
:special-members: __call__

`SplitChannel`
""""""""""""""
.. autoclass:: SplitChannel
:members:
:special-members: __call__

`CastToType`
""""""""""""
.. autoclass:: CastToType
Expand Down Expand Up @@ -1974,24 +1956,12 @@ Utility (Dict)
:members:
:special-members: __call__

`AsChannelFirstd`
"""""""""""""""""
.. autoclass:: AsChannelFirstd
:members:
:special-members: __call__

`AsChannelLastd`
""""""""""""""""
.. autoclass:: AsChannelLastd
:members:
:special-members: __call__

`AddChanneld`
"""""""""""""
.. autoclass:: AddChanneld
:members:
:special-members: __call__

`EnsureChannelFirstd`
"""""""""""""""""""""
.. autoclass:: EnsureChannelFirstd
Expand All @@ -2010,12 +1980,6 @@ Utility (Dict)
:members:
:special-members: __call__

`SplitChanneld`
"""""""""""""""
.. autoclass:: SplitChanneld
:members:
:special-members: __call__

`CastToTyped`
"""""""""""""
.. autoclass:: CastToTyped
Expand Down
7 changes: 0 additions & 7 deletions monai/apps/auto3dseg/ensemble_builder.py
Original file line number Diff line number Diff line change
Expand Up @@ -332,13 +332,6 @@ class AlgoEnsembleBuilder:

"""

@deprecated_arg(
"data_src_cfg_filename",
since="1.2",
removed="1.3",
new_name="data_src_cfg_name",
msg_suffix="please use `data_src_cfg_name` instead.",
)
def __init__(self, history: Sequence[dict[str, Any]], data_src_cfg_name: str | None = None):
self.infer_algos: list[dict[AlgoKeys, Any]] = []
self.ensemble: AlgoEnsemble
Expand Down
3 changes: 1 addition & 2 deletions monai/bundle/scripts.py
Original file line number Diff line number Diff line change
Expand Up @@ -704,7 +704,6 @@ def get_bundle_info(
return bundle_info[version] # type: ignore[no-any-return]


@deprecated_arg("runner_id", since="1.1", removed="1.3", new_name="run_id", msg_suffix="please use `run_id` instead.")
def run(
run_id: str | None = None,
init_id: str | None = None,
Expand Down Expand Up @@ -766,7 +765,7 @@ def run(
will patch the target config content with `tracking handlers` and the top-level items of `configs`.
for detailed usage examples, please check the tutorial:
https://github.com/Project-MONAI/tutorials/blob/main/experiment_management/bundle_integrate_mlflow.ipynb.
args_file: a JSON or YAML file to provide default values for `runner_id`, `meta_file`,
args_file: a JSON or YAML file to provide default values for `run_id`, `meta_file`,
`config_file`, `logging`, and override pairs. so that the command line inputs can be simplified.
override: id-value pairs to override or add the corresponding config content.
e.g. ``--net#input_chns 42``, ``--net %/data/other.json#net_arg``.
Expand Down
1 change: 0 additions & 1 deletion monai/engines/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,6 @@
from __future__ import annotations

from .evaluator import EnsembleEvaluator, Evaluator, SupervisedEvaluator
from .multi_gpu_supervised_trainer import create_multigpu_supervised_evaluator, create_multigpu_supervised_trainer
from .trainer import GanTrainer, SupervisedTrainer, Trainer
from .utils import (
IterationEvents,
Expand Down
180 changes: 0 additions & 180 deletions monai/engines/multi_gpu_supervised_trainer.py

This file was deleted.

21 changes: 3 additions & 18 deletions monai/handlers/tensorboard_handlers.py
Original file line number Diff line number Diff line change
Expand Up @@ -87,18 +87,14 @@ class TensorBoardStatsHandler(TensorBoardHandler):

"""

@deprecated_arg("epoch_interval", since="1.1", removed="1.3")
@deprecated_arg("iteration_interval", since="1.1", removed="1.3")
def __init__(
self,
summary_writer: SummaryWriter | SummaryWriterX | None = None,
log_dir: str = "./runs",
iteration_log: bool | Callable[[Engine, int], bool] = True,
epoch_log: bool | Callable[[Engine, int], bool] = True,
wyli marked this conversation as resolved.
Show resolved Hide resolved
epoch_event_writer: Callable[[Engine, Any], Any] | None = None,
epoch_interval: int = 1,
iteration_event_writer: Callable[[Engine, Any], Any] | None = None,
iteration_interval: int = 1,
output_transform: Callable = lambda x: x[0],
global_epoch_transform: Callable = lambda x: x,
state_attributes: Sequence[str] | None = None,
Expand All @@ -118,12 +114,8 @@ def __init__(
See ``iteration_log`` argument for more details.
epoch_event_writer: customized callable TensorBoard writer for epoch level.
Must accept parameter "engine" and "summary_writer", use default event writer if None.
epoch_interval: the epoch interval at which the epoch_event_writer is called. Defaults to 1.
``epoch_interval`` must be 1 if ``epoch_log`` is callable.
iteration_event_writer: customized callable TensorBoard writer for iteration level.
Must accept parameter "engine" and "summary_writer", use default event writer if None.
iteration_interval: the iteration interval at which the iteration_event_writer is called. Defaults to 1.
``iteration_interval`` must be 1 if ``iteration_log`` is callable.
output_transform: a callable that is used to transform the
``ignite.engine.state.output`` into a scalar to plot, or a dictionary of {key: scalar}.
In the latter case, the output string will be formatted as key: value.
Expand All @@ -140,19 +132,12 @@ def __init__(
when epoch completed.
tag_name: when iteration output is a scalar, tag_name is used to plot, defaults to ``'Loss'``.
"""
if callable(iteration_log) and iteration_interval > 1:
raise ValueError("If iteration_log is callable, then iteration_interval should be 1")

if callable(epoch_log) and epoch_interval > 1:
raise ValueError("If epoch_log is callable, then epoch_interval should be 1")

super().__init__(summary_writer=summary_writer, log_dir=log_dir)
self.iteration_log = iteration_log
self.epoch_log = epoch_log
self.epoch_event_writer = epoch_event_writer
self.epoch_interval = epoch_interval
self.iteration_event_writer = iteration_event_writer
self.iteration_interval = iteration_interval
self.output_transform = output_transform
self.global_epoch_transform = global_epoch_transform
self.state_attributes = state_attributes
Expand All @@ -170,15 +155,15 @@ def attach(self, engine: Engine) -> None:
event = Events.ITERATION_COMPLETED
if callable(self.iteration_log): # substitute event with new one using filter callable
event = event(event_filter=self.iteration_log)
elif self.iteration_interval > 1:
event = event(every=self.iteration_interval)
elif self.iteration_log > 1:
event = event(every=self.iteration_log)
engine.add_event_handler(event, self.iteration_completed)
if self.epoch_log and not engine.has_event_handler(self.epoch_completed, Events.EPOCH_COMPLETED):
event = Events.EPOCH_COMPLETED
if callable(self.epoch_log): # substitute event with new one using filter callable
event = event(event_filter=self.epoch_log)
elif self.epoch_log > 1:
event = event(every=self.epoch_interval)
event = event(every=self.epoch_log)
engine.add_event_handler(event, self.epoch_completed)

def epoch_completed(self, engine: Engine) -> None:
Expand Down
Loading
Loading