Skip to content

Commit

Permalink
[ao] Moving the sparsity/experimental to sparsity/_experimental (pyto…
Browse files Browse the repository at this point in the history
…rch#81149)

The experimental code in the sparsity does not have user-facing api,
and should reside under the proivate package. This involves pruner and
base_sparsifier.

Pull Request resolved: pytorch#81149
Approved by: https://github.com/macandro96
  • Loading branch information
z-a-f authored and pytorchmergebot committed Jul 9, 2022
1 parent b98b9ea commit 68ec793
Show file tree
Hide file tree
Showing 21 changed files with 30 additions and 25 deletions.
4 changes: 0 additions & 4 deletions docs/source/quantization.rst
Original file line number Diff line number Diff line change
Expand Up @@ -1142,9 +1142,5 @@ Please take a look at `Limitations of Symbolic Tracing <https://docs-preview.pyt
.. py:module:: torch.ao.quantization.fx
.. py:module:: torch.ao.quantization.backend_config
.. py:module:: torch.ao.sparsity
.. py:module:: torch.ao.sparsity.experimental
.. py:module:: torch.ao.sparsity.experimental.pruner
.. py:module:: torch.ao.sparsity.experimental.data_sparsifier
.. py:module:: torch.ao.sparsity.experimental.data_scheduler
.. py:module:: torch.ao.sparsity.scheduler
.. py:module:: torch.ao.sparsity.sparsifier
4 changes: 3 additions & 1 deletion test/ao/sparsity/test_data_scheduler.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,14 +2,16 @@
# Owner(s): ["module: unknown"]

import logging
from torch.ao.sparsity import BaseDataScheduler, DataNormSparsifier
import warnings
from torch.testing._internal.common_utils import TestCase
from torch import nn
import torch
from typing import Tuple
import copy

from torch.ao.sparsity._experimental.data_sparsifier import DataNormSparsifier
from torch.ao.sparsity._experimental.data_scheduler import BaseDataScheduler

logging.basicConfig(format='%(asctime)s - %(name)s - %(levelname)s - %(message)s', level=logging.INFO)


Expand Down
4 changes: 3 additions & 1 deletion test/ao/sparsity/test_data_sparsifier.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,13 +6,15 @@
import torch
from torch.nn.utils.parametrize import is_parametrized
from torch.testing._internal.common_utils import TestCase
from torch.ao.sparsity import BaseDataSparsifier, DataNormSparsifier

from typing import Tuple
from torch import nn
import itertools
import math
import copy

from torch.ao.sparsity._experimental.data_sparsifier import BaseDataSparsifier, DataNormSparsifier

logging.basicConfig(format='%(asctime)s - %(name)s - %(levelname)s - %(message)s', level=logging.INFO)


Expand Down
2 changes: 1 addition & 1 deletion test/ao/sparsity/test_pruner.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@

import torch
from torch import nn
from torch.ao.sparsity import BasePruner, PruningParametrization, ZeroesParametrization
from torch.ao.sparsity._experimental.pruner import BasePruner, PruningParametrization, ZeroesParametrization
from torch.nn.utils import parametrize

from torch.testing._internal.common_utils import TestCase, skipIfTorchDynamo
Expand Down
17 changes: 0 additions & 17 deletions torch/ao/sparsity/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,20 +16,3 @@
from .sparsifier.utils import module_to_fqn
from .sparsifier.utils import fqn_to_module
from .sparsifier.utils import get_arg_info_from_tensor_fqn
# === Experimental ===

# Parametrizations
from .experimental.pruner.parametrization import PruningParametrization
from .experimental.pruner.parametrization import ZeroesParametrization
from .experimental.pruner.parametrization import ActivationReconstruction
from .experimental.pruner.parametrization import BiasHook

# Pruner
from .experimental.pruner.base_pruner import BasePruner

# Data Sparsifier
from .experimental.data_sparsifier.base_data_sparsifier import BaseDataSparsifier
from .experimental.data_sparsifier.data_norm_sparsifier import DataNormSparsifier

# Data Scheduler
from .experimental.data_scheduler.base_data_scheduler import BaseDataScheduler
File renamed without changes.
5 changes: 5 additions & 0 deletions torch/ao/sparsity/_experimental/data_scheduler/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
from .base_data_scheduler import BaseDataScheduler

__all__ = [
"BaseDataScheduler",
]
Original file line number Diff line number Diff line change
@@ -1,9 +1,10 @@
from torch.ao.sparsity import BaseDataSparsifier
from functools import wraps
import weakref
import abc
import warnings

from ..data_sparsifier import BaseDataSparsifier

__all__ = ['BaseDataScheduler']


Expand Down
7 changes: 7 additions & 0 deletions torch/ao/sparsity/_experimental/data_sparsifier/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
from .base_data_sparsifier import BaseDataSparsifier
from .data_norm_sparsifier import DataNormSparsifier

__all__ = [
"BaseDataSparsifier",
"DataNormSparsifier",
]
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
from torch.nn import functional as F
from functools import reduce
from typing import Tuple, Any, List

from .base_data_sparsifier import BaseDataSparsifier

__all__ = ['DataNormSparsifier']
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,3 +5,11 @@
PruningParametrization,
ZeroesParametrization,
)

__all__ = [
"ActivationReconstruction",
"BasePruner",
"BiasHook",
"PruningParametrization",
"ZeroesParametrization",
]
Empty file.
Empty file.

0 comments on commit 68ec793

Please sign in to comment.