diff --git a/opacus/optimizers/optimizer.py b/opacus/optimizers/optimizer.py index bbd554a6..bb6c5fa9 100644 --- a/opacus/optimizers/optimizer.py +++ b/opacus/optimizers/optimizer.py @@ -15,6 +15,7 @@ from __future__ import annotations import logging +from collections import defaultdict from typing import Callable, List, Optional, Union import torch @@ -376,6 +377,50 @@ def accumulated_iterations(self) -> int: ) return vals[0] + @property + def param_groups(self) -> List[dict]: + """ + Returns a list containing a dictionary of all parameters managed by the optimizer. + """ + return self.original_optimizer.param_groups + + @param_groups.setter + def param_groups(self, param_groups: List[dict]): + """ + Updates the param_groups of the optimizer, where param_groups is a list containg a dictionary + of all parameters mangaged by the optimizer. + """ + self.original_optimizer.param_groups = param_groups + + + @property + def state(self) -> defaultdict: + """ + Returns a dictionary holding current optimization state. + """ + return self.original_optimizer.state + + @state.setter + def state(self, state: defaultdict): + """ + Updates the state of the optimizer, where state is a dictionary holding current optimization state. + """ + self.original_optimizer.state = state + + @property + def defaults(self) -> dict: + """ + Returns a dictionary containing default values for optimization. + """ + return self.original_optimizer.defaults + + @defaults.setter + def defaults(self, defaults: dict): + """ + Updates the defaults of the optimizer, where defaults is a dictionary containing default values for optimization. + """ + self.original_optimizer.defaults = defaults + def attach_step_hook(self, fn: Callable[[DPOptimizer], None]): """ Attaches a hook to be executed after gradient clipping/noising, but before the