Skip to content

Commit

Permalink
feat: Trainer state as a trainer controller metric (#150)
Browse files Browse the repository at this point in the history
Signed-off-by: Padmanabha V Seshadri <seshapad@in.ibm.com>
Co-authored-by: Alex Brooks <alex.brooks@ibm.com>
  • Loading branch information
seshapad and alex-jw-brooks authored May 9, 2024
1 parent e8e9f21 commit 521a463
Show file tree
Hide file tree
Showing 5 changed files with 109 additions and 1 deletion.
3 changes: 3 additions & 0 deletions tests/data/trainercontroller/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,9 @@
TRAINER_CONFIG_TEST_LOSS_ON_THRESHOLD_YAML = os.path.join(
_DATA_DIR, "loss_on_threshold.yaml"
)
TRAINER_CONFIG_TEST_LOSS_ON_THRESHOLD_WITH_TRAINER_STATE_YAML = os.path.join(
_DATA_DIR, "loss_on_threshold_with_trainer_state.yaml"
)
TRAINER_CONFIG_TEST_MALICIOUS_OS_RULE_YAML = os.path.join(
_DATA_DIR, "loss_with_malicious_os_rule.yaml"
)
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
controller-metrics:
- name: state
class: TrainingState
- name: loss
class: Loss
controllers:
- name: loss-controller
triggers:
- on_log
rule: loss < 2 and state["epoch"] >= 0.5
operations:
- hfcontrols.should_training_stop
19 changes: 18 additions & 1 deletion tests/trainercontroller/test_tuning_trainercontroller.py
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,8 @@ def _setup_data() -> InputData:
{"loss": 2.1, "epoch": 0.25},
{"loss": 1.3, "epoch": 0.5},
{"loss": 0.9, "epoch": 0.6},
]
],
epoch=0.6,
),
)

Expand All @@ -88,6 +89,22 @@ def test_loss_on_threshold():
assert control.should_training_stop == True


def test_loss_on_threshold_with_trainer_state():
"""Tests the loss threshold with trainer state example in
`examples/trainer-controller-configs/loss_on_threshold_with_trainer_state.yaml`
"""
test_data = _setup_data()
tc_callback = tc.TrainerControllerCallback(
td.TRAINER_CONFIG_TEST_LOSS_ON_THRESHOLD_WITH_TRAINER_STATE_YAML
)
control = TrainerControl(should_training_stop=False)
# Trigger on_init_end to perform registration of handlers to events
tc_callback.on_init_end(args=test_data.args, state=test_data.state, control=control)
# Trigger rule and test the condition
tc_callback.on_log(args=test_data.args, state=test_data.state, control=control)
assert control.should_training_stop == True


def test_custom_metric_handler():
"""Tests the custom metric registration
`examples/trainer-controller-configs/loss_custom_metric.yaml`
Expand Down
2 changes: 2 additions & 0 deletions tuning/trainercontroller/controllermetrics/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@

# Local
from .loss import Loss
from .trainingstate import TrainingState

# List of metric handlers
handlers = []
Expand All @@ -35,4 +36,5 @@ def register(cl: Type):


# Register the default metric handlers in this package here
register(TrainingState)
register(Loss)
74 changes: 74 additions & 0 deletions tuning/trainercontroller/controllermetrics/trainingstate.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,74 @@
# Copyright The IBM Tuning Team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

# SPDX-License-Identifier: Apache-2.0
# https://spdx.dev/learn/handling-license-info/

# Standard
from typing import Any
import dataclasses

# Third Party
from transformers import TrainerState

# Local
from tuning.trainercontroller.controllermetrics.metricshandler import MetricHandler


class TrainingState(MetricHandler):
"""Implements the controller metric which exposes the trainer state"""

def __init__(self, **kwargs):
"""Initializes the metric handler, by registering the event \
list and arguments with base handler.
Args:
kwargs: List of arguments (key, value)-pairs
"""
super().__init__(
events=[
"on_init_end",
"on_step_end",
"on_epoch_begin",
"on_epoch_end",
"on_prediction_step",
"on_predict",
"on_log",
"on_train_end",
"on_train_begin",
"on_evaluate",
],
**kwargs
)

def validate(self) -> bool:
"""Validate the training arguments (e.g logging_steps) are \
compatible with the computation of this metric.
Returns:
bool
"""
return True

def compute(self, state: TrainerState = None, **kwargs) -> Any:
"""Exposes the trainer state.
Args:
state: TrainerState object
kwargs: Remaining event arguments
Returns:
dict. Trainer state as a dictionary
"""
return dataclasses.asdict(state)

0 comments on commit 521a463

Please sign in to comment.