diff --git a/docs/source/_rst/_code.rst b/docs/source/_rst/_code.rst index 687e36f39..7b6e2504c 100644 --- a/docs/source/_rst/_code.rst +++ b/docs/source/_rst/_code.rst @@ -174,10 +174,10 @@ Optimizers and Schedulers .. toctree:: :titlesonly: - Optimizer - Scheduler - TorchOptimizer - TorchScheduler + Optimizer Interface + Scheduler Interface + Torch Optimizer + Torch Scheduler Adaptive Functions diff --git a/docs/source/_rst/optim/optimizer_interface.rst b/docs/source/_rst/optim/optimizer_interface.rst index afd62f6a0..23a933bae 100644 --- a/docs/source/_rst/optim/optimizer_interface.rst +++ b/docs/source/_rst/optim/optimizer_interface.rst @@ -1,7 +1,7 @@ -Optimizer -============ +Optimizer Interface +===================== .. currentmodule:: pina.optim.optimizer_interface -.. autoclass:: pina._src.optim.optimizer_interface.Optimizer +.. autoclass:: pina._src.optim.optimizer_interface.OptimizerInterface :members: :show-inheritance: \ No newline at end of file diff --git a/docs/source/_rst/optim/scheduler_interface.rst b/docs/source/_rst/optim/scheduler_interface.rst index 0795c34e3..03b3e83f7 100644 --- a/docs/source/_rst/optim/scheduler_interface.rst +++ b/docs/source/_rst/optim/scheduler_interface.rst @@ -1,7 +1,7 @@ -Scheduler -============= +Scheduler Interface +===================== .. currentmodule:: pina.optim.scheduler_interface -.. autoclass:: pina._src.optim.scheduler_interface.Scheduler +.. autoclass:: pina._src.optim.scheduler_interface.SchedulerInterface :members: :show-inheritance: \ No newline at end of file diff --git a/docs/source/_rst/optim/torch_optimizer.rst b/docs/source/_rst/optim/torch_optimizer.rst index 67ab59164..54bfe9a3a 100644 --- a/docs/source/_rst/optim/torch_optimizer.rst +++ b/docs/source/_rst/optim/torch_optimizer.rst @@ -1,4 +1,4 @@ -TorchOptimizer +Torch Optimizer =============== .. currentmodule:: pina.optim.torch_optimizer diff --git a/docs/source/_rst/optim/torch_scheduler.rst b/docs/source/_rst/optim/torch_scheduler.rst index 272ba631f..59260533e 100644 --- a/docs/source/_rst/optim/torch_scheduler.rst +++ b/docs/source/_rst/optim/torch_scheduler.rst @@ -1,4 +1,4 @@ -TorchScheduler +Torch Scheduler =============== .. currentmodule:: pina.optim.torch_scheduler diff --git a/pina/_src/optim/optimizer_interface.py b/pina/_src/optim/optimizer_interface.py index 5f2fbe66a..b60e23624 100644 --- a/pina/_src/optim/optimizer_interface.py +++ b/pina/_src/optim/optimizer_interface.py @@ -1,23 +1,30 @@ -"""Module for the PINA Optimizer.""" +"""Module for the Optimizer Interface.""" from abc import ABCMeta, abstractmethod -class Optimizer(metaclass=ABCMeta): +class OptimizerInterface(metaclass=ABCMeta): """ - Abstract base class for defining an optimizer. All specific optimizers - should inherit form this class and implement the required methods. + Abstract interface for all optimizers. """ - @property @abstractmethod - def instance(self): + def hook(self, parameters): """ - Abstract property to retrieve the optimizer instance. + Execute custom logic associated with the optimizer instance. + + This method is intended to encapsulate any additional behavior that + should be triggered during the optimization process. + + :param dict parameters: The parameters of the model to be optimized. """ + @property @abstractmethod - def hook(self): + def instance(self): """ - Abstract method to define the hook logic for the optimizer. + The underlying optimizer object. + + :return: The optimizer instance. + :rtype: object """ diff --git a/pina/_src/optim/scheduler_interface.py b/pina/_src/optim/scheduler_interface.py index 5ae5d8b99..55951ee0e 100644 --- a/pina/_src/optim/scheduler_interface.py +++ b/pina/_src/optim/scheduler_interface.py @@ -1,23 +1,31 @@ -"""Module for the PINA Scheduler.""" +"""Module for the Scheduler Interface.""" from abc import ABCMeta, abstractmethod -class Scheduler(metaclass=ABCMeta): +class SchedulerInterface(metaclass=ABCMeta): """ - Abstract base class for defining a scheduler. All specific schedulers should - inherit form this class and implement the required methods. + Abstract interface for all schedulers. """ - @property @abstractmethod - def instance(self): + def hook(self, optimizer): """ - Abstract property to retrieve the scheduler instance. + Execute custom logic associated with the scheduler instance. + + This method is intended to encapsulate any additional behavior that + should be triggered during the optimization process. + + :param OptimizerInterface optimizer: The optimizer instance associated + with the scheduler. """ + @property @abstractmethod - def hook(self): + def instance(self): """ - Abstract method to define the hook logic for the scheduler. + The underlying scheduler object. + + :return: The scheduler instance. + :rtype: object """ diff --git a/pina/_src/optim/torch_optimizer.py b/pina/_src/optim/torch_optimizer.py index f01d3b3cb..a37bfbfec 100644 --- a/pina/_src/optim/torch_optimizer.py +++ b/pina/_src/optim/torch_optimizer.py @@ -1,35 +1,46 @@ -"""Module for the PINA Torch Optimizer""" +"""Module for wrapping PyTorch optimizers.""" import torch - from pina._src.core.utils import check_consistency -from pina._src.optim.optimizer_interface import Optimizer +from pina._src.optim.optimizer_interface import OptimizerInterface -class TorchOptimizer(Optimizer): +class TorchOptimizer(OptimizerInterface): """ - A wrapper class for using PyTorch optimizers. + The wrapper class for PyTorch optimizers. + + This class wraps a ``torch.optim.Optimizer`` class and defers its + instantiation until runtime. It enables a consistent interface across + different optimizer backends while leveraging PyTorch’s optimization + algorithms. """ def __init__(self, optimizer_class, **kwargs): """ Initialization of the :class:`TorchOptimizer` class. - :param torch.optim.Optimizer optimizer_class: A - :class:`torch.optim.Optimizer` class. - :param dict kwargs: Additional parameters passed to ``optimizer_class``, - see more + :param torch.optim.Optimizer optimizer_class: The subclass of + ``torch.optim.Optimizer`` to be instantiated. + :param dict kwargs: Additional keyword arguments forwarded to the + optimizer constructor. See more `here `_. + :raises ValueError: If ``optimizer_class`` is not a subclass of + ``torch.optim.Optimizer``. """ + # Check consistency check_consistency(optimizer_class, torch.optim.Optimizer, subclass=True) + # Initialize attributes self.optimizer_class = optimizer_class self.kwargs = kwargs self._optimizer_instance = None def hook(self, parameters): """ - Initialize the optimizer instance with the given parameters. + Execute custom logic associated with the optimizer instance. + + This method is intended to encapsulate any additional behavior that + should be triggered during the optimization process. :param dict parameters: The parameters of the model to be optimized. """ @@ -40,7 +51,7 @@ def hook(self, parameters): @property def instance(self): """ - Get the optimizer instance. + The underlying optimizer object. :return: The optimizer instance. :rtype: torch.optim.Optimizer diff --git a/pina/_src/optim/torch_scheduler.py b/pina/_src/optim/torch_scheduler.py index bf9927836..f33b6020f 100644 --- a/pina/_src/optim/torch_scheduler.py +++ b/pina/_src/optim/torch_scheduler.py @@ -1,34 +1,35 @@ -"""Module for the PINA Torch Optimizer""" - -try: - from torch.optim.lr_scheduler import LRScheduler # torch >= 2.0 -except ImportError: - from torch.optim.lr_scheduler import ( - _LRScheduler as LRScheduler, - ) # torch < 2.0 +"""Module for wrapping PyTorch schedulers.""" +from torch.optim.lr_scheduler import LRScheduler from pina._src.core.utils import check_consistency -from pina._src.optim.optimizer_interface import Optimizer -from pina._src.optim.scheduler_interface import Scheduler +from pina._src.optim.optimizer_interface import OptimizerInterface +from pina._src.optim.scheduler_interface import SchedulerInterface -class TorchScheduler(Scheduler): +class TorchScheduler(SchedulerInterface): """ - A wrapper class for using PyTorch schedulers. + The wrapper class for PyTorch schedulers. + + This class wraps a ``torch.optim.lr_scheduler.LRScheduler`` class and defers + its instantiation until runtime, once the optimizer instance is available. """ def __init__(self, scheduler_class, **kwargs): """ Initialization of the :class:`TorchScheduler` class. - :param torch.optim.LRScheduler scheduler_class: A - :class:`torch.optim.LRScheduler` class. - :param dict kwargs: Additional parameters passed to ``scheduler_class``, - see more - `here _`. + :param torch.optim.LRScheduler scheduler_class: The subclass of + ``torch.optim.lr_scheduler.LRScheduler`` to be instantiated. + :param dict kwargs: Additional keyword arguments forwarded to the + scheduler constructor. See more + `here `_. + :raises ValueError: If ``scheduler_class`` is not a subclass of + ``torch.optim.lr_scheduler.LRScheduler``. """ + # Check consistency check_consistency(scheduler_class, LRScheduler, subclass=True) + # Initialize attributes self.scheduler_class = scheduler_class self.kwargs = kwargs self._scheduler_instance = None @@ -37,9 +38,15 @@ def hook(self, optimizer): """ Initialize the scheduler instance with the given parameters. - :param dict parameters: The parameters of the optimizer. + :param OptimizerInterface optimizer: The optimizer instance associated + with the scheduler. + :raises ValueError: If ``optimizer`` is not an instance of + :class:`OptimizerInterface`. """ - check_consistency(optimizer, Optimizer) + # Check consistency + check_consistency(optimizer, OptimizerInterface) + + # Initialize the scheduler instance self._scheduler_instance = self.scheduler_class( optimizer.instance, **self.kwargs ) @@ -47,9 +54,9 @@ def hook(self, optimizer): @property def instance(self): """ - Get the scheduler instance. + The underlying scheduler object. - :return: The scheduelr instance. - :rtype: torch.optim.LRScheduler + :return: The scheduler instance. + :rtype: torch.optim.lr_scheduler.LRScheduler """ return self._scheduler_instance diff --git a/pina/_src/solver/autoregressive_solver/autoregressive_solver.py b/pina/_src/solver/autoregressive_solver/autoregressive_solver.py index 58bf8bdca..31133018a 100644 --- a/pina/_src/solver/autoregressive_solver/autoregressive_solver.py +++ b/pina/_src/solver/autoregressive_solver/autoregressive_solver.py @@ -53,10 +53,10 @@ def __init__( :param torch.nn.Module loss: The loss function to be minimized. If ``None``, the :class:`torch.nn.MSELoss` loss is used. Default is ``None``. - :param Optimizer optimizer: The optimizer to be used. + :param OptimizerInterface optimizer: The optimizer to be used. If ``None``, the :class:`torch.optim.Adam` optimizer is used. Default is ``None``. - :param Scheduler scheduler: Learning rate scheduler. + :param SchedulerInterface scheduler: Learning rate scheduler. If ``None``, the :class:`torch.optim.lr_scheduler.ConstantLR` scheduler is used. Default is ``None``. :param WeightingInterface weighting: The weighting schema to be used. diff --git a/pina/_src/solver/ensemble_solver/ensemble_pinn.py b/pina/_src/solver/ensemble_solver/ensemble_pinn.py index af117d702..743b3db09 100644 --- a/pina/_src/solver/ensemble_solver/ensemble_pinn.py +++ b/pina/_src/solver/ensemble_solver/ensemble_pinn.py @@ -92,10 +92,10 @@ def __init__( :param torch.nn.Module loss: The loss function to be minimized. If ``None``, the :class:`torch.nn.MSELoss` loss is used. Default is ``None``. - :param Optimizer optimizer: The optimizer to be used. + :param OptimizerInterface optimizers: The optimizers to be used. If ``None``, the :class:`torch.optim.Adam` optimizer is used. Default is ``None``. - :param Scheduler scheduler: Learning rate scheduler. + :param SchedulerInterface schedulers: Learning rate schedulers. If ``None``, the :class:`torch.optim.lr_scheduler.ConstantLR` scheduler is used. Default is ``None``. :param WeightingInterface weighting: The weighting schema to be used. diff --git a/pina/_src/solver/ensemble_solver/ensemble_solver_interface.py b/pina/_src/solver/ensemble_solver/ensemble_solver_interface.py index ed0fc2d29..0134e3a98 100644 --- a/pina/_src/solver/ensemble_solver/ensemble_solver_interface.py +++ b/pina/_src/solver/ensemble_solver/ensemble_solver_interface.py @@ -61,10 +61,10 @@ def __init__( :param BaseProblem problem: The problem to be solved. :param torch.nn.Module models: The neural network models to be used. - :param Optimizer optimizer: The optimizer to be used. + :param OptimizerInterface optimizers: The optimizers to be used. If ``None``, the :class:`torch.optim.Adam` optimizer is used. Default is ``None``. - :param Scheduler scheduler: Learning rate scheduler. + :param SchedulerInterface schedulers: Learning rate schedulers. If ``None``, the :class:`torch.optim.lr_scheduler.ConstantLR` scheduler is used. Default is ``None``. :param WeightingInterface weighting: The weighting schema to be used. diff --git a/pina/_src/solver/ensemble_solver/ensemble_supervised.py b/pina/_src/solver/ensemble_solver/ensemble_supervised.py index e98ab7ed1..f2e26a5f2 100644 --- a/pina/_src/solver/ensemble_solver/ensemble_supervised.py +++ b/pina/_src/solver/ensemble_solver/ensemble_supervised.py @@ -81,10 +81,10 @@ def __init__( :param torch.nn.Module loss: The loss function to be minimized. If ``None``, the :class:`torch.nn.MSELoss` loss is used. Default is ``None``. - :param Optimizer optimizer: The optimizer to be used. + :param OptimizerInterface optimizers: The optimizers to be used. If ``None``, the :class:`torch.optim.Adam` optimizer is used. Default is ``None``. - :param Scheduler scheduler: Learning rate scheduler. + :param SchedulerInterface schedulers: Learning rate schedulers. If ``None``, the :class:`torch.optim.lr_scheduler.ConstantLR` scheduler is used. Default is ``None``. :param WeightingInterface weighting: The weighting schema to be used. diff --git a/pina/_src/solver/garom.py b/pina/_src/solver/garom.py index 29b1c67ac..d476c2d3b 100644 --- a/pina/_src/solver/garom.py +++ b/pina/_src/solver/garom.py @@ -48,18 +48,18 @@ def __init__( :param torch.nn.Module loss: The loss function to be minimized. If ``None``, :class:`~pina.loss.power_loss.PowerLoss` with ``p=1`` is used. Default is ``None``. - :param Optimizer optimizer_generator: The optimizer for the generator. - If ``None``, the :class:`torch.optim.Adam` optimizer is used. - Default is ``None``. - :param Optimizer optimizer_discriminator: The optimizer for the + :param OptimizerInterface optimizer_generator: The optimizer for the + generator. If ``None``, the :class:`torch.optim.Adam` optimizer is + used. Default is ``None``. + :param OptimizerInterface optimizer_discriminator: The optimizer for the discriminator. If ``None``, the :class:`torch.optim.Adam` optimizer is used. Default is ``None``. - :param Scheduler scheduler_generator: The learning rate scheduler for - the generator. + :param SchedulerInterface scheduler_generator: The learning rate + scheduler for the generator. If ``None``, the :class:`torch.optim.lr_scheduler.ConstantLR` scheduler is used. Default is ``None``. - :param Scheduler scheduler_discriminator: The learning rate scheduler - for the discriminator. + :param SchedulerInterface scheduler_discriminator: The learning rate + scheduler for the discriminator. If ``None``, the :class:`torch.optim.lr_scheduler.ConstantLR` scheduler is used. Default is ``None``. :param float gamma: Ratio of expected loss for generator and @@ -328,7 +328,7 @@ def optimizer_generator(self): The optimizer for the generator. :return: The optimizer for the generator. - :rtype: Optimizer + :rtype: OptimizerInterface """ return self.optimizers[0] @@ -338,7 +338,7 @@ def optimizer_discriminator(self): The optimizer for the discriminator. :return: The optimizer for the discriminator. - :rtype: Optimizer + :rtype: OptimizerInterface """ return self.optimizers[1] @@ -348,7 +348,7 @@ def scheduler_generator(self): The scheduler for the generator. :return: The scheduler for the generator. - :rtype: Scheduler + :rtype: SchedulerInterface """ return self.schedulers[0] @@ -358,6 +358,6 @@ def scheduler_discriminator(self): The scheduler for the discriminator. :return: The scheduler for the discriminator. - :rtype: Scheduler + :rtype: SchedulerInterface """ return self.schedulers[1] diff --git a/pina/_src/solver/physics_informed_solver/causal_pinn.py b/pina/_src/solver/physics_informed_solver/causal_pinn.py index cfcbbea20..c061b783f 100644 --- a/pina/_src/solver/physics_informed_solver/causal_pinn.py +++ b/pina/_src/solver/physics_informed_solver/causal_pinn.py @@ -82,10 +82,10 @@ def __init__( inherit from at least :class:`~pina.problem.time_dependent_problem.TimeDependentProblem`. :param torch.nn.Module model: The neural network model to be used. - :param Optimizer optimizer: The optimizer to be used. + :param OptimizerInterface optimizer: The optimizer to be used. If ``None``, the :class:`torch.optim.Adam` optimizer is used. Default is ``None``. - :param torch.optim.LRScheduler scheduler: Learning rate scheduler. + :param SchedulerInterface scheduler: Learning rate scheduler. If ``None``, the :class:`torch.optim.lr_scheduler.ConstantLR` scheduler is used. Default is ``None``. :param WeightingInterface weighting: The weighting schema to be used. diff --git a/pina/_src/solver/physics_informed_solver/competitive_pinn.py b/pina/_src/solver/physics_informed_solver/competitive_pinn.py index 42096fa64..1b946e26f 100644 --- a/pina/_src/solver/physics_informed_solver/competitive_pinn.py +++ b/pina/_src/solver/physics_informed_solver/competitive_pinn.py @@ -73,18 +73,18 @@ def __init__( :param torch.nn.Module discriminator: The discriminator to be used. If ``None``, the discriminator is a deepcopy of the ``model``. Default is ``None``. - :param torch.optim.Optimizer optimizer_model: The optimizer of the + :param OptimizerInterface optimizer_model: The optimizer of the ``model``. If ``None``, the :class:`torch.optim.Adam` optimizer is used. Default is ``None``. - :param torch.optim.Optimizer optimizer_discriminator: The optimizer of + :param OptimizerInterface optimizer_discriminator: The optimizer of the ``discriminator``. If ``None``, the :class:`torch.optim.Adam` optimizer is used. Default is ``None``. - :param Scheduler scheduler_model: Learning rate scheduler for the - ``model``. + :param SchedulerInterface scheduler_model: Learning rate scheduler for + the ``model``. If ``None``, the :class:`torch.optim.lr_scheduler.ConstantLR` scheduler is used. Default is ``None``. - :param Scheduler scheduler_discriminator: Learning rate scheduler for - the ``discriminator``. + :param SchedulerInterface scheduler_discriminator: Learning rate + scheduler for the ``discriminator``. If ``None``, the :class:`torch.optim.lr_scheduler.ConstantLR` scheduler is used. Default is ``None``. :param WeightingInterface weighting: The weighting schema to be used. @@ -184,7 +184,7 @@ def configure_optimizers(self): Optimizer configuration. :return: The optimizers and the schedulers - :rtype: tuple[list[Optimizer], list[Scheduler]] + :rtype: tuple[list[OptimizerInterface], list[SchedulerInterface]] """ # If the problem is an InverseProblem, add the unknown parameters # to the parameters to be optimized @@ -238,7 +238,7 @@ def optimizer_model(self): The optimizer associated to the model. :return: The optimizer for the model. - :rtype: Optimizer + :rtype: OptimizerInterface """ return self.optimizers[0] @@ -248,7 +248,7 @@ def optimizer_discriminator(self): The optimizer associated to the discriminator. :return: The optimizer for the discriminator. - :rtype: Optimizer + :rtype: OptimizerInterface """ return self.optimizers[1] @@ -258,7 +258,7 @@ def scheduler_model(self): The scheduler associated to the model. :return: The scheduler for the model. - :rtype: Scheduler + :rtype: SchedulerInterface """ return self.schedulers[0] @@ -268,6 +268,6 @@ def scheduler_discriminator(self): The scheduler associated to the discriminator. :return: The scheduler for the discriminator. - :rtype: Scheduler + :rtype: SchedulerInterface """ return self.schedulers[1] diff --git a/pina/_src/solver/physics_informed_solver/gradient_pinn.py b/pina/_src/solver/physics_informed_solver/gradient_pinn.py index 4ee2b3089..72798b10a 100644 --- a/pina/_src/solver/physics_informed_solver/gradient_pinn.py +++ b/pina/_src/solver/physics_informed_solver/gradient_pinn.py @@ -74,10 +74,10 @@ def __init__( :class:`~pina.problem.spatial_problem.SpatialProblem` to compute the gradient of the loss. :param torch.nn.Module model: The neural network model to be used. - :param Optimizer optimizer: The optimizer to be used. + :param OptimizerInterface optimizer: The optimizer to be used. If ``None``, the :class:`torch.optim.Adam` optimizer is used. Default is ``None``. - :param Scheduler scheduler: Learning rate scheduler. + :param SchedulerInterface scheduler: Learning rate scheduler. If ``None``, the :class:`torch.optim.lr_scheduler.ConstantLR` scheduler is used. Default is ``None``. :param WeightingInterface weighting: The weighting schema to be used. diff --git a/pina/_src/solver/physics_informed_solver/pinn.py b/pina/_src/solver/physics_informed_solver/pinn.py index 59b61214e..47ffa6d6d 100644 --- a/pina/_src/solver/physics_informed_solver/pinn.py +++ b/pina/_src/solver/physics_informed_solver/pinn.py @@ -63,10 +63,10 @@ def __init__( :param BaseProblem problem: The problem to be solved. :param torch.nn.Module model: The neural network model to be used. - :param Optimizer optimizer: The optimizer to be used. + :param OptimizerInterface optimizer: The optimizer to be used. If ``None``, the :class:`torch.optim.Adam` optimizer is used. Default is ``None``. - :param Scheduler scheduler: Learning rate scheduler. + :param SchedulerInterface scheduler: Learning rate scheduler. If ``None``, the :class:`torch.optim.lr_scheduler.ConstantLR` scheduler is used. Default is ``None``. :param WeightingInterface weighting: The weighting schema to be used. @@ -117,7 +117,7 @@ def configure_optimizers(self): Optimizer configuration for the PINN solver. :return: The optimizers and the schedulers - :rtype: tuple[list[Optimizer], list[Scheduler]] + :rtype: tuple[list[OptimizerInterface], list[SchedulerInterface]] """ # If the problem is an InverseProblem, add the unknown parameters # to the parameters to be optimized. diff --git a/pina/_src/solver/physics_informed_solver/rba_pinn.py b/pina/_src/solver/physics_informed_solver/rba_pinn.py index 5c7821120..e1d754f88 100644 --- a/pina/_src/solver/physics_informed_solver/rba_pinn.py +++ b/pina/_src/solver/physics_informed_solver/rba_pinn.py @@ -81,10 +81,10 @@ def __init__( :param BaseProblem problem: The problem to be solved. :param torch.nn.Module model: The neural network model to be used. - :param Optimizer optimizer: The optimizer to be used. + :param OptimizerInterface optimizer: The optimizer to be used. If ``None``, the :class:`torch.optim.Adam` optimizer is used. Default is ``None``. - :param Scheduler scheduler: Learning rate scheduler. + :param SchedulerInterface scheduler: Learning rate scheduler. If ``None``, the :class:`torch.optim.lr_scheduler.ConstantLR` scheduler is used. Default is ``None``. :param WeightingInterface weighting: The weighting schema to be used. diff --git a/pina/_src/solver/physics_informed_solver/self_adaptive_pinn.py b/pina/_src/solver/physics_informed_solver/self_adaptive_pinn.py index 983eb2966..c8217a892 100644 --- a/pina/_src/solver/physics_informed_solver/self_adaptive_pinn.py +++ b/pina/_src/solver/physics_informed_solver/self_adaptive_pinn.py @@ -125,19 +125,19 @@ def __init__( :param torch.nn.Module model: The model to be used. :param torch.nn.Module weight_function: The Self-Adaptive mask model. Default is ``torch.nn.Sigmoid()``. - :param Optimizer optimizer_model: The optimizer of the ``model``. - If ``None``, the :class:`torch.optim.Adam` optimizer is used. - Default is ``None``. - :param Optimizer optimizer_weights: The optimizer of the + :param OptimizerInterface optimizer_model: The optimizer of the + ``model``. If ``None``, the :class:`torch.optim.Adam` optimizer is + used. Default is ``None``. + :param OptimizerInterface optimizer_weights: The optimizer of the ``weight_function``. If ``None``, the :class:`torch.optim.Adam` optimizer is used. Default is ``None``. - :param Scheduler scheduler_model: Learning rate scheduler for the - ``model``. + :param SchedulerInterface scheduler_model: Learning rate scheduler for + the ``model``. If ``None``, the :class:`torch.optim.lr_scheduler.ConstantLR` scheduler is used. Default is ``None``. - :param Scheduler scheduler_weights: Learning rate scheduler for the - ``weight_function``. + :param SchedulerInterface scheduler_weights: Learning rate scheduler for + the ``weight_function``. If ``None``, the :class:`torch.optim.lr_scheduler.ConstantLR` scheduler is used. Default is ``None``. :param WeightingInterface weighting: The weighting schema to be used. @@ -296,7 +296,7 @@ def configure_optimizers(self): Optimizer configuration. :return: The optimizers and the schedulers - :rtype: tuple[list[Optimizer], list[Scheduler]] + :rtype: tuple[list[OptimizerInterface], list[SchedulerInterface]] """ # Hook the optimizers to the models self.optimizer_model.hook(self.model.parameters()) @@ -421,7 +421,7 @@ def scheduler_model(self): The scheduler associated to the model. :return: The scheduler for the model. - :rtype: Scheduler + :rtype: SchedulerInterface """ return self.schedulers[0] @@ -431,7 +431,7 @@ def scheduler_weights(self): The scheduler associated to the mask model. :return: The scheduler for the mask model. - :rtype: Scheduler + :rtype: SchedulerInterface """ return self.schedulers[1] @@ -441,7 +441,7 @@ def optimizer_model(self): Returns the optimizer associated to the model. :return: The optimizer for the model. - :rtype: Optimizer + :rtype: OptimizerInterface """ return self.optimizers[0] @@ -451,6 +451,6 @@ def optimizer_weights(self): The optimizer associated to the mask model. :return: The optimizer for the mask model. - :rtype: Optimizer + :rtype: OptimizerInterface """ return self.optimizers[1] diff --git a/pina/_src/solver/solver.py b/pina/_src/solver/solver.py index 571892f05..3d1f8de36 100644 --- a/pina/_src/solver/solver.py +++ b/pina/_src/solver/solver.py @@ -7,8 +7,8 @@ from torch._dynamo import OptimizedModule from pina._src.problem.base_problem import BaseProblem from pina._src.problem.inverse_problem import InverseProblem -from pina._src.optim.optimizer_interface import Optimizer -from pina._src.optim.scheduler_interface import Scheduler +from pina._src.optim.optimizer_interface import OptimizerInterface +from pina._src.optim.scheduler_interface import SchedulerInterface from pina._src.optim.torch_optimizer import TorchOptimizer from pina._src.optim.torch_scheduler import TorchScheduler from pina._src.weighting.weighting_interface import WeightingInterface @@ -316,7 +316,7 @@ def default_torch_optimizer(): Set the default optimizer to :class:`torch.optim.Adam`. :return: The default optimizer. - :rtype: Optimizer + :rtype: OptimizerInterface """ return TorchOptimizer(torch.optim.Adam, lr=0.001) @@ -327,7 +327,7 @@ def default_torch_scheduler(): :class:`torch.optim.lr_scheduler.ConstantLR`. :return: The default scheduler. - :rtype: Scheduler + :rtype: SchedulerInterface """ return TorchScheduler(torch.optim.lr_scheduler.ConstantLR, factor=1.0) @@ -381,10 +381,10 @@ def __init__( :param BaseProblem problem: The problem to be solved. :param torch.nn.Module model: The neural network model to be used. - :param Optimizer optimizer: The optimizer to be used. + :param OptimizerInterface optimizer: The optimizer to be used. If ``None``, the :class:`torch.optim.Adam` optimizer is used. Default is ``None``. - :param Scheduler scheduler: The scheduler to be used. + :param SchedulerInterface scheduler: The scheduler to be used. If ``None``, the :class:`torch.optim.lr_scheduler.ConstantLR` scheduler is used. Default is ``None``. :param WeightingInterface weighting: The weighting schema to be used. @@ -402,9 +402,9 @@ def __init__( # check consistency of models argument and encapsulate in list check_consistency(model, torch.nn.Module) # check scheduler consistency and encapsulate in list - check_consistency(scheduler, Scheduler) + check_consistency(scheduler, SchedulerInterface) # check optimizer consistency and encapsulate in list - check_consistency(optimizer, Optimizer) + check_consistency(optimizer, OptimizerInterface) # initialize the model (needed by Lightining to go to different devices) self._pina_models = torch.nn.ModuleList([model]) @@ -427,7 +427,7 @@ def configure_optimizers(self): Optimizer configuration for the solver. :return: The optimizer and the scheduler - :rtype: tuple[list[Optimizer], list[Scheduler]] + :rtype: tuple[list[OptimizerInterface], list[SchedulerInterface]] """ self.optimizer.hook(self.model.parameters()) if isinstance(self.problem, InverseProblem): @@ -458,7 +458,7 @@ def scheduler(self): The scheduler used for training. :return: The scheduler used for training. - :rtype: Scheduler + :rtype: SchedulerInterface """ return self._pina_schedulers[0] @@ -468,7 +468,7 @@ def optimizer(self): The optimizer used for training. :return: The optimizer used for training. - :rtype: Optimizer + :rtype: OptimizerInterface """ return self._pina_optimizers[0] @@ -493,10 +493,10 @@ def __init__( :param BaseProblem problem: The problem to be solved. :param models: The neural network models to be used. :type model: list[torch.nn.Module] | tuple[torch.nn.Module] - :param list[Optimizer] optimizers: The optimizers to be used. + :param list[OptimizerInterface] optimizers: The optimizers to be used. If ``None``, the :class:`torch.optim.Adam` optimizer is used for all models. Default is ``None``. - :param list[Scheduler] schedulers: The schedulers to be used. + :param list[SchedulerInterface] schedulers: The schedulers to be used. If ``None``, the :class:`torch.optim.lr_scheduler.ConstantLR` scheduler is used for all the models. Default is ``None``. :param WeightingInterface weighting: The weighting schema to be used. @@ -548,10 +548,10 @@ def __init__( check_consistency(models, torch.nn.Module) # check scheduler consistency and encapsulate in list - check_consistency(schedulers, Scheduler) + check_consistency(schedulers, SchedulerInterface) # check optimizer consistency and encapsulate in list - check_consistency(optimizers, Optimizer) + check_consistency(optimizers, OptimizerInterface) # check length consistency optimizers if len(models) != len(optimizers): @@ -598,7 +598,7 @@ def configure_optimizers(self): Optimizer configuration for the solver. :return: The optimizer and the scheduler - :rtype: tuple[list[Optimizer], list[Scheduler]] + :rtype: tuple[list[OptimizerInterface], list[SchedulerInterface]] """ for optimizer, scheduler, model in zip( self.optimizers, self.schedulers, self.models @@ -627,7 +627,7 @@ def optimizers(self): The optimizers used for training. :return: The optimizers used for training. - :rtype: list[Optimizer] + :rtype: list[OptimizerInterface] """ return self._pina_optimizers @@ -637,6 +637,6 @@ def schedulers(self): The schedulers used for training. :return: The schedulers used for training. - :rtype: list[Scheduler] + :rtype: list[SchedulerInterface] """ return self._pina_schedulers diff --git a/pina/_src/solver/supervised_solver/reduced_order_model.py b/pina/_src/solver/supervised_solver/reduced_order_model.py index 3687a3e2b..585d0ef90 100644 --- a/pina/_src/solver/supervised_solver/reduced_order_model.py +++ b/pina/_src/solver/supervised_solver/reduced_order_model.py @@ -106,10 +106,10 @@ def __init__( :param torch.nn.Module loss: The loss function to be minimized. If ``None``, the :class:`torch.nn.MSELoss` loss is used. Default is `None`. - :param Optimizer optimizer: The optimizer to be used. + :param OptimizerInterface optimizer: The optimizer to be used. If ``None``, the :class:`torch.optim.Adam` optimizer is used. Default is ``None``. - :param Scheduler scheduler: Learning rate scheduler. + :param SchedulerInterface scheduler: Learning rate scheduler. If ``None``, the :class:`torch.optim.lr_scheduler.ConstantLR` scheduler is used. Default is ``None``. :param WeightingInterface weighting: The weighting schema to be used. diff --git a/pina/_src/solver/supervised_solver/supervised.py b/pina/_src/solver/supervised_solver/supervised.py index cdbddffca..e7ee6d6e6 100644 --- a/pina/_src/solver/supervised_solver/supervised.py +++ b/pina/_src/solver/supervised_solver/supervised.py @@ -50,10 +50,10 @@ def __init__( :param torch.nn.Module loss: The loss function to be minimized. If ``None``, the :class:`torch.nn.MSELoss` loss is used. Default is `None`. - :param Optimizer optimizer: The optimizer to be used. + :param OptimizerInterface optimizer: The optimizer to be used. If ``None``, the :class:`torch.optim.Adam` optimizer is used. Default is ``None``. - :param Scheduler scheduler: Learning rate scheduler. + :param SchedulerInterface scheduler: Learning rate scheduler. If ``None``, the :class:`torch.optim.lr_scheduler.ConstantLR` scheduler is used. Default is ``None``. :param WeightingInterface weighting: The weighting schema to be used. diff --git a/pina/optim/__init__.py b/pina/optim/__init__.py index 682b6225e..f88b85e7a 100644 --- a/pina/optim/__init__.py +++ b/pina/optim/__init__.py @@ -1,13 +1,34 @@ """Module for the Optimizers and Schedulers.""" __all__ = [ - "Optimizer", + "OptimizerInterface", + "SchedulerInterface", "TorchOptimizer", - "Scheduler", "TorchScheduler", ] -from pina._src.optim.optimizer_interface import Optimizer +from pina._src.optim.optimizer_interface import OptimizerInterface +from pina._src.optim.scheduler_interface import SchedulerInterface from pina._src.optim.torch_optimizer import TorchOptimizer -from pina._src.optim.scheduler_interface import Scheduler from pina._src.optim.torch_scheduler import TorchScheduler + +# Back-compatibility with version 0.2, to be removed soon +import warnings + +_DEPRECATED_IMPORTS = { + "Optimizer": "OptimizerInterface", + "Scheduler": "SchedulerInterface", +} + + +def __getattr__(name): + if name in _DEPRECATED_IMPORTS: + + warnings.warn( + f"Importing '{name}' from 'pina.optim' is deprecated; use " + f"pina.optim.{_DEPRECATED_IMPORTS[name]} instead.", + DeprecationWarning, + stacklevel=2, + ) + + return globals()[_DEPRECATED_IMPORTS[name]] diff --git a/tests/test_optim/test_torch_optimizer.py b/tests/test_optim/test_torch_optimizer.py new file mode 100644 index 000000000..dffc04c67 --- /dev/null +++ b/tests/test_optim/test_torch_optimizer.py @@ -0,0 +1,27 @@ +import torch +import pytest +from pina.optim import TorchOptimizer + +opt_list = [torch.optim.Adam, torch.optim.AdamW, torch.optim.SGD] +kwargs_list = [{"lr": 1e-3}, {"lr": 1e-3, "weight_decay": 1e-4}] + + +@pytest.mark.parametrize("optimizer_class", opt_list) +@pytest.mark.parametrize("kwargs", kwargs_list) +def test_constructor(optimizer_class, kwargs): + TorchOptimizer(optimizer_class, **kwargs) + + # Should fail if the optimizer is not subclass of torch.optim.Optimizer + with pytest.raises(ValueError): + TorchOptimizer(object, **kwargs) + + +@pytest.mark.parametrize("optimizer_class", opt_list) +@pytest.mark.parametrize("kwargs", kwargs_list) +def test_hook(optimizer_class, kwargs): + + # Create the optimizer instance + optimizer = TorchOptimizer(optimizer_class, **kwargs) + + # Hook the optimizer with model parameters + optimizer.hook(torch.nn.Linear(10, 10).parameters()) diff --git a/tests/test_optim/test_torch_scheduler.py b/tests/test_optim/test_torch_scheduler.py new file mode 100644 index 000000000..bc7dd96c9 --- /dev/null +++ b/tests/test_optim/test_torch_scheduler.py @@ -0,0 +1,37 @@ +import torch +import pytest +from pina.optim import TorchOptimizer, TorchScheduler + +opt_list = [torch.optim.Adam, torch.optim.AdamW, torch.optim.SGD] +sch_list = [ + torch.optim.lr_scheduler.ConstantLR, + torch.optim.lr_scheduler.ReduceLROnPlateau, +] + + +@pytest.mark.parametrize("scheduler_class", sch_list) +def test_constructor(scheduler_class): + TorchScheduler(scheduler_class) + + # Should fail if the scheduler is not subclass of torch LRScheduler + with pytest.raises(ValueError): + TorchScheduler(object) + + +@pytest.mark.parametrize("optimizer_class", opt_list) +@pytest.mark.parametrize("scheduler_class", sch_list) +def test_hook(optimizer_class, scheduler_class): + + # Create the optimizer instance + optimizer = TorchOptimizer(optimizer_class) + optimizer.hook(torch.nn.Linear(10, 10).parameters()) + + # Create the scheduler instance + scheduler = TorchScheduler(scheduler_class) + + # Hook the scheduler with the optimizer instance + scheduler.hook(optimizer) + + # Should fail if the optimizer is not an instance of OptimizerInterface + with pytest.raises(ValueError): + scheduler.hook(object) diff --git a/tests/test_optimizer.py b/tests/test_optimizer.py deleted file mode 100644 index 037de9929..000000000 --- a/tests/test_optimizer.py +++ /dev/null @@ -1,21 +0,0 @@ -import torch -import pytest -from pina.optim import TorchOptimizer - -opt_list = [ - torch.optim.Adam, - torch.optim.AdamW, - torch.optim.SGD, - torch.optim.RMSprop, -] - - -@pytest.mark.parametrize("optimizer_class", opt_list) -def test_constructor(optimizer_class): - TorchOptimizer(optimizer_class, lr=1e-3) - - -@pytest.mark.parametrize("optimizer_class", opt_list) -def test_hook(optimizer_class): - opt = TorchOptimizer(optimizer_class, lr=1e-3) - opt.hook(torch.nn.Linear(10, 10).parameters()) diff --git a/tests/test_scheduler.py b/tests/test_scheduler.py deleted file mode 100644 index 157a818d2..000000000 --- a/tests/test_scheduler.py +++ /dev/null @@ -1,26 +0,0 @@ -import torch -import pytest -from pina.optim import TorchOptimizer, TorchScheduler - -opt_list = [ - torch.optim.Adam, - torch.optim.AdamW, - torch.optim.SGD, - torch.optim.RMSprop, -] - -sch_list = [torch.optim.lr_scheduler.ConstantLR] - - -@pytest.mark.parametrize("scheduler_class", sch_list) -def test_constructor(scheduler_class): - TorchScheduler(scheduler_class) - - -@pytest.mark.parametrize("optimizer_class", opt_list) -@pytest.mark.parametrize("scheduler_class", sch_list) -def test_hook(optimizer_class, scheduler_class): - opt = TorchOptimizer(optimizer_class, lr=1e-3) - opt.hook(torch.nn.Linear(10, 10).parameters()) - sch = TorchScheduler(scheduler_class) - sch.hook(opt)