Skip to content

Commit

Permalink
Remove 1.x API (1/N) (#1323)
Browse files Browse the repository at this point in the history
* Update all experimental APIs caller
---------
Signed-off-by: yiliu30 <yi4.liu@intel.com>
Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
Signed-off-by: bmyrcha <bartosz.myrcha@intel.com>
  • Loading branch information
yiliu30 authored and bmyrcha committed Oct 24, 2023
1 parent 7b20588 commit 83805b2
Show file tree
Hide file tree
Showing 13 changed files with 429 additions and 563 deletions.
4 changes: 2 additions & 2 deletions neural_compressor/adaptor/tensorflow.py
Original file line number Diff line number Diff line change
Expand Up @@ -190,7 +190,7 @@ def train(self, model, dataloader, optimizer_tuple, criterion_tuple, hooks, post
callbacks = kwargs["kwargs"].get("callbacks", None)
execution_mode = kwargs["kwargs"].get("execution_mode", None)
distributed = getattr(dataloader, "distributed", False)
from neural_compressor.experimental.common.criterion import TensorflowKnowledgeDistillationLoss
from neural_compressor.compression.distillation.criterions import TensorflowKnowledgeDistillationLoss

if isinstance(criterion, TensorflowKnowledgeDistillationLoss):
input_model = model._model
Expand Down Expand Up @@ -1757,8 +1757,8 @@ def _get_mse_order(

def _partial_dataset_of(self, dataloader, confidence_batches):
"""Partial dataset."""
from neural_compressor.data.datasets.dummy_dataset import DummyDataset
from neural_compressor.data.datasets.dummy_dataset import DummyDataset as DummyDataset_v2_x
from neural_compressor.experimental.data.datasets.dummy_dataset import DummyDataset

if isinstance(dataloader.dataset, DummyDataset) or isinstance(dataloader.dataset, DummyDataset_v2_x):
assert isinstance(confidence_batches, int)
Expand Down
130 changes: 125 additions & 5 deletions neural_compressor/compression/distillation/criterions.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,8 @@
"""Initialize critetion classes.
Classes includes:
TensorFlowCrossEntropyLoss, PyTorchCrossEntropyLoss,
TensorFlowSparseCategoricalCrossentropy,
TensorflowKnowledgeDistillationLoss, PyTorchKnowledgeDistillationLoss,
PyTorchIntermediateLayersKnowledgeDistillationLoss.
"""
Expand Down Expand Up @@ -91,7 +93,12 @@ def __getitem__(self, criterion_type):
Returns:
cls: criterion class.
"""
assert criterion_type in self.criterions.keys(), "only support criterions in {}".format(self.criterions.keys())
assert (
criterion_type in self.criterions.keys()
), "only support criterions in {} \
, but got criterion type {}".format(
self.criterions.keys(), criterion_type
)

return self.criterions[criterion_type]

Expand Down Expand Up @@ -130,6 +137,119 @@ def decorator_criterion(cls):
return decorator_criterion


@criterion_registry("CrossEntropyLoss", "tensorflow")
class TensorFlowCrossEntropyLoss(object):
"""TensorFlow CrossEntropyLoss criterion."""

def __init__(self, param_dict):
"""Initialize the Datasets class.
Args:
param_dict (dict): The dict of parameters setting by user for CrossEntropyLoss criterion.
"""
assert isinstance(param_dict, dict), "This criterion constructor parameter must be a dict"
self._param_dict = param_dict

def _mapping(self):
_param_map = {"reduction": "reduction", "from_logits": "from_logits"}
_dict = {}
for key in self._param_dict:
if key in _param_map:
if key == "reduction":
assert self._param_dict[key] in [
"auto",
"none",
"sum",
"sum_over_batch_size",
], "Supported reduction value for tensorflow is auto, none, sum, sum_over_batch_size"
_dict.update({_param_map[key]: self._param_dict[key]})
return _dict

def __call__(self):
"""Call the TensorFlowCrossEntropyLoss.
Returns:
cls: criterion class.
param_dict(dict): param_dict
"""
return tf.keras.losses.CategoricalCrossentropy, self._mapping()


@criterion_registry("SparseCategoricalCrossentropy", "tensorflow")
class TensorFlowSparseCategoricalCrossentropy(object):
"""TensorFlow SparseCategoricalCrossentropyLoss criterion."""

def __init__(self, param_dict):
"""Initialize the Datasets class.
Args:
param_dict (string): param_dict.
"""
assert isinstance(param_dict, dict), "This criterion constructor parameter must be a dict"
self._param_dict = param_dict

def _mapping(self):
_param_map = {"reduction": "reduction", "from_logits": "from_logits"}
_dict = {}
for key in self._param_dict:
if key in _param_map:
if key == "reduction":
assert self._param_dict[key] in [
"auto",
"none",
"sum",
"sum_over_batch_size",
], "Supported reduction value for tensorflow is auto, none, sum, sum_over_batch_size"
_dict.update({_param_map[key]: self._param_dict[key]})
return _dict

def __call__(self):
"""Call the TensorFlowSparseCategoricalCrossentropy.
Returns:
cls: criterion class.
param_dict(dict): param_dict
"""
return tf.keras.losses.SparseCategoricalCrossentropy, self._mapping()


@criterion_registry("CrossEntropyLoss", "pytorch")
class PyTorchCrossEntropyLoss(object):
"""PyTorch CrossEntropyLoss criterion."""

def __init__(self, param_dict):
"""Initialize the PyTorchCrossEntropyLoss class.
Args:
param_dict (string): param_dict.
"""
assert isinstance(param_dict, dict), "This criterion constructor parameter must be a dict"
self._param_dict = param_dict

def _mapping(self):
_param_map = {"reduction": "reduction"}
_dict = {}
for key in self._param_dict:
if key in _param_map:
if key == "reduction":
assert self._param_dict[key] in [
"none",
"mean",
"sum",
], "Supported reduction value is none, mean, sum"
_dict.update({_param_map[key]: self._param_dict[key]})
return _dict

def __call__(self):
"""Call the PyTorchCrossEntropyLoss.
Returns:
cls: criterion class.
param_dict(dict): param_dict
"""
return torch.nn.CrossEntropyLoss, self._mapping()


class KnowledgeDistillationFramework(object):
"""Knowledge Distillation Framework."""

Expand Down Expand Up @@ -916,7 +1036,7 @@ def register_hooks_for_models(self):
Raises:
AttributeError: AttributeError
"""
from neural_compressor.experimental.common import torch_utils
from neural_compressor.compression.distillation import utility

def register_model_forward_hook(model, path, output_process="", student=False):
module = model
Expand All @@ -927,7 +1047,7 @@ def register_model_forward_hook(model, path, output_process="", student=False):
module = module.__getattr__(node)
except:
raise AttributeError("There is no path {} in the model.".format(path))
return module.register_forward_hook(torch_utils.get_activation(path, output_process, student))
return module.register_forward_hook(utility.get_activation(path, output_process, student))

assert isinstance(self.student_model, torch.nn.Module) and isinstance(self.teacher_model, torch.nn.Module), (
"Expect student_model and teacher_model to be an torch.nn.Module object, "
Expand All @@ -939,8 +1059,8 @@ def register_model_forward_hook(model, path, output_process="", student=False):
student_output_process, teacher_output_process = self.layer_output_process[idx]
st_handle = register_model_forward_hook(self.student_model, student_layer, student_output_process, True)
te_handle = register_model_forward_hook(self.teacher_model, teacher_layer, teacher_output_process)
torch_utils.STUDENT_FEATURES = self.student_features
torch_utils.TEACHER_FEATURES = self.teacher_features
utility.STUDENT_FEATURES = self.student_features
utility.TEACHER_FEATURES = self.teacher_features
self.hook_handles.extend([st_handle, te_handle])

def remove_all_hooks(self):
Expand Down
Loading

0 comments on commit 83805b2

Please sign in to comment.