[ao] Moving the sparsity/experimental to sparsity/_experimental (#81149)
The experimental code in the sparsity does not have user-facing api, and should reside under the proivate package. This involves pruner and base_sparsifier. Pull Request resolved: https://github.com/pytorch/pytorch/pull/81149 Approved by: https://github.com/macandro96
|
|
@ -1142,9 +1142,5 @@ Please take a look at `Limitations of Symbolic Tracing <https://docs-preview.pyt
|
|||
.. py:module:: torch.ao.quantization.fx
|
||||
.. py:module:: torch.ao.quantization.backend_config
|
||||
.. py:module:: torch.ao.sparsity
|
||||
.. py:module:: torch.ao.sparsity.experimental
|
||||
.. py:module:: torch.ao.sparsity.experimental.pruner
|
||||
.. py:module:: torch.ao.sparsity.experimental.data_sparsifier
|
||||
.. py:module:: torch.ao.sparsity.experimental.data_scheduler
|
||||
.. py:module:: torch.ao.sparsity.scheduler
|
||||
.. py:module:: torch.ao.sparsity.sparsifier
|
||||
|
|
|
|||
|
|
@ -2,7 +2,6 @@
|
|||
# Owner(s): ["module: unknown"]
|
||||
|
||||
import logging
|
||||
from torch.ao.sparsity import BaseDataScheduler, DataNormSparsifier
|
||||
import warnings
|
||||
from torch.testing._internal.common_utils import TestCase
|
||||
from torch import nn
|
||||
|
|
@ -10,6 +9,9 @@ import torch
|
|||
from typing import Tuple
|
||||
import copy
|
||||
|
||||
from torch.ao.sparsity._experimental.data_sparsifier import DataNormSparsifier
|
||||
from torch.ao.sparsity._experimental.data_scheduler import BaseDataScheduler
|
||||
|
||||
logging.basicConfig(format='%(asctime)s - %(name)s - %(levelname)s - %(message)s', level=logging.INFO)
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -6,13 +6,15 @@ import random
|
|||
import torch
|
||||
from torch.nn.utils.parametrize import is_parametrized
|
||||
from torch.testing._internal.common_utils import TestCase
|
||||
from torch.ao.sparsity import BaseDataSparsifier, DataNormSparsifier
|
||||
|
||||
from typing import Tuple
|
||||
from torch import nn
|
||||
import itertools
|
||||
import math
|
||||
import copy
|
||||
|
||||
from torch.ao.sparsity._experimental.data_sparsifier import BaseDataSparsifier, DataNormSparsifier
|
||||
|
||||
logging.basicConfig(format='%(asctime)s - %(name)s - %(levelname)s - %(message)s', level=logging.INFO)
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -7,7 +7,7 @@ import logging
|
|||
|
||||
import torch
|
||||
from torch import nn
|
||||
from torch.ao.sparsity import BasePruner, PruningParametrization, ZeroesParametrization
|
||||
from torch.ao.sparsity._experimental.pruner import BasePruner, PruningParametrization, ZeroesParametrization
|
||||
from torch.nn.utils import parametrize
|
||||
|
||||
from torch.testing._internal.common_utils import TestCase, skipIfTorchDynamo
|
||||
|
|
|
|||
|
|
@ -16,20 +16,3 @@ from .sparsifier.utils import FakeSparsity
|
|||
from .sparsifier.utils import module_to_fqn
|
||||
from .sparsifier.utils import fqn_to_module
|
||||
from .sparsifier.utils import get_arg_info_from_tensor_fqn
|
||||
# === Experimental ===
|
||||
|
||||
# Parametrizations
|
||||
from .experimental.pruner.parametrization import PruningParametrization
|
||||
from .experimental.pruner.parametrization import ZeroesParametrization
|
||||
from .experimental.pruner.parametrization import ActivationReconstruction
|
||||
from .experimental.pruner.parametrization import BiasHook
|
||||
|
||||
# Pruner
|
||||
from .experimental.pruner.base_pruner import BasePruner
|
||||
|
||||
# Data Sparsifier
|
||||
from .experimental.data_sparsifier.base_data_sparsifier import BaseDataSparsifier
|
||||
from .experimental.data_sparsifier.data_norm_sparsifier import DataNormSparsifier
|
||||
|
||||
# Data Scheduler
|
||||
from .experimental.data_scheduler.base_data_scheduler import BaseDataScheduler
|
||||
|
|
|
|||
|
|
@ -0,0 +1,5 @@
|
|||
from .base_data_scheduler import BaseDataScheduler
|
||||
|
||||
__all__ = [
|
||||
"BaseDataScheduler",
|
||||
]
|
||||
|
|
@ -1,9 +1,10 @@
|
|||
from torch.ao.sparsity import BaseDataSparsifier
|
||||
from functools import wraps
|
||||
import weakref
|
||||
import abc
|
||||
import warnings
|
||||
|
||||
from ..data_sparsifier import BaseDataSparsifier
|
||||
|
||||
__all__ = ['BaseDataScheduler']
|
||||
|
||||
|
||||
|
|
@ -0,0 +1,7 @@
|
|||
from .base_data_sparsifier import BaseDataSparsifier
|
||||
from .data_norm_sparsifier import DataNormSparsifier
|
||||
|
||||
__all__ = [
|
||||
"BaseDataSparsifier",
|
||||
"DataNormSparsifier",
|
||||
]
|
||||
|
|
@ -2,6 +2,7 @@ import torch
|
|||
from torch.nn import functional as F
|
||||
from functools import reduce
|
||||
from typing import Tuple, Any, List
|
||||
|
||||
from .base_data_sparsifier import BaseDataSparsifier
|
||||
|
||||
__all__ = ['DataNormSparsifier']
|
||||
|
|
@ -5,3 +5,11 @@ from .parametrization import (
|
|||
PruningParametrization,
|
||||
ZeroesParametrization,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
"ActivationReconstruction",
|
||||
"BasePruner",
|
||||
"BiasHook",
|
||||
"PruningParametrization",
|
||||
"ZeroesParametrization",
|
||||
]
|
||||
|
Before Width: | Height: | Size: 167 KiB After Width: | Height: | Size: 167 KiB |
|
Before Width: | Height: | Size: 98 KiB After Width: | Height: | Size: 98 KiB |
|
Before Width: | Height: | Size: 86 KiB After Width: | Height: | Size: 86 KiB |
|
Before Width: | Height: | Size: 130 KiB After Width: | Height: | Size: 130 KiB |