Skip to content

Commit

Permalink
Merge pull request #1104 from pints-team/930-gradient-descent
Browse files Browse the repository at this point in the history
Simple gradient descent optimiser
  • Loading branch information
ben18785 authored Apr 17, 2020
2 parents af26f86 + 29d8afb commit 5791246
Show file tree
Hide file tree
Showing 10 changed files with 879 additions and 6 deletions.
8 changes: 8 additions & 0 deletions docs/source/optimisers/gradient_descent.rst
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
**************************************
Gradient descent (fixed learning rate)
**************************************

.. currentmodule:: pints

.. autoclass:: GradientDescent

1 change: 1 addition & 0 deletions docs/source/optimisers/index.rst
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ or the :class:`OptimisationController` class.
convenience_methods
boundary_transformations
cmaes
gradient_descent
nelder_mead
pso
snes
Expand Down
1 change: 1 addition & 0 deletions examples/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@ relevant code.

### Particle-based methods
- [CMA-ES](./optimisation/cmaes.ipynb)
- [Gradient descent](./optimisation/gradient-descent.ipynb)
- [PSO](./optimisation/pso.ipynb)
- [SNES](./optimisation/snes.ipynb)
- [XNES](./optimisation/xnes.ipynb)
Expand Down
559 changes: 559 additions & 0 deletions examples/optimisation/gradient-descent.ipynb

Large diffs are not rendered by default.

1 change: 1 addition & 0 deletions pints/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -169,6 +169,7 @@ def version(formatted=False):
TriangleWaveTransform,
)
from ._optimisers._cmaes import CMAES
from ._optimisers._gradient_descent import GradientDescent
from ._optimisers._nelder_mead import NelderMead
from ._optimisers._pso import PSO
from ._optimisers._snes import SNES
Expand Down
29 changes: 26 additions & 3 deletions pints/_optimisers/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,10 @@ class Optimiser(pints.Loggable, pints.TunableMethod):
criteria etc. Users who don't need this functionality can use optimisers
via the :class:`OptimisationController` class instead.
All PINTS optimisers are _minimisers_. To maximise a function simply pass
in the negative of its evaluations to :meth:`tell()` (this is handled
automatically by the :class:`OptimisationController`).
All optimisers implement the :class:`pints.Loggable` and
:class:`pints.TunableMethod` interfaces.
Expand Down Expand Up @@ -153,6 +157,13 @@ def name(self):
"""
raise NotImplementedError

def needs_sensitivities(self):
"""
Returns ``True`` if this methods needs sensitivities to be passed in to
``tell`` along with the evaluated error.
"""
return False

def running(self):
"""
Returns ``True`` if this an optimisation is in progress.
Expand All @@ -171,6 +182,11 @@ def tell(self, fx):
"""
Performs an iteration of the optimiser algorithm, using the evaluations
``fx`` of the points ``x`` previously specified by ``ask``.
For methods that require sensitivities (see
:meth:`needs_sensitivities`), ``fx`` should be a tuple
``(objective, sensitivities)``, containing the values returned by
:meth:`pints.ErrorMeasure.evaluateS1()`.
"""
raise NotImplementedError

Expand Down Expand Up @@ -322,6 +338,9 @@ def __init__(
raise ValueError('Method must be subclass of pints.Optimiser.')
self._optimiser = method(x0, sigma0, boundaries)

# Check if sensitivities are required
self._needs_sensitivities = self._optimiser.needs_sensitivities()

# Logging
self._log_to_screen = True
self._log_filename = None
Expand Down Expand Up @@ -419,6 +438,11 @@ def run(self):
# information)
unchanged_iterations = 0

# Choose method to evaluate
f = self._function
if self._needs_sensitivities:
f = f.evaluateS1

# Create evaluator object
if self._parallel:
# Get number of workers
Expand All @@ -428,10 +452,9 @@ def run(self):
# particles!
if isinstance(self._optimiser, PopulationBasedOptimiser):
n_workers = min(n_workers, self._optimiser.population_size())
evaluator = pints.ParallelEvaluator(
self._function, n_workers=n_workers)
evaluator = pints.ParallelEvaluator(f, n_workers=n_workers)
else:
evaluator = pints.SequentialEvaluator(self._function)
evaluator = pints.SequentialEvaluator(f)

# Keep track of best position and score
fbest = float('inf')
Expand Down
126 changes: 126 additions & 0 deletions pints/_optimisers/_gradient_descent.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,126 @@
#
# Fixed learning-rate gradient descent.
#
# This file is part of PINTS (https://github.com/pints-team/pints/) which is
# released under the BSD 3-clause license. See accompanying LICENSE.md for
# copyright notice and full license details.
#
from __future__ import absolute_import, division
from __future__ import print_function, unicode_literals

import pints


class GradientDescent(pints.Optimiser):
"""
Gradient-descent method with a fixed learning rate.
"""

def __init__(self, x0, sigma0=0.1, boundaries=None):
super(GradientDescent, self).__init__(x0, sigma0, boundaries)

# Set optimiser state
self._running = False
self._ready_for_tell = False

# Best solution found
self._xbest = self._x0
self._fbest = float('inf')

# Learning rate
self._eta = 0.01

# Current point, score, and gradient
self._current = self._x0
self._current_f = None
self._current_df = None

# Proposed next point (read-only, so can be passed to user)
self._proposed = self._x0
self._proposed.setflags(write=False)

def ask(self):
""" See :meth:`Optimiser.ask()`. """

# Running, and ready for tell now
self._ready_for_tell = True
self._running = True

# Return proposed points (just the one)
return [self._proposed]

def fbest(self):
""" See :meth:`Optimiser.fbest()`. """
return self._fbest

def learning_rate(self):
""" Returns this optimiser's learning rate. """
return self._eta

def name(self):
""" See :meth:`Optimiser.name()`. """
return 'Gradient descent'

def needs_sensitivities(self):
""" See :meth:`Optimiser.needs_sensitivities()`. """
return True

def n_hyper_parameters(self):
""" See :meth:`pints.TunableMethod.n_hyper_parameters()`. """
return 1

def running(self):
""" See :meth:`Optimiser.running()`. """
return self._running

def set_hyper_parameters(self, x):
"""
See :meth:`pints.TunableMethod.set_hyper_parameters()`.
The hyper-parameter vector is ``[learning_rate]``.
"""
self.set_learning_rate(x[0])

def set_learning_rate(self, eta):
"""
Sets the learning rate for this optimiser.
Parameters
----------
eta : float
The learning rate, as a float greater than zero.
"""
eta = float(eta)
if eta <= 0:
raise ValueError('Learning rate must greater than zero.')
self._eta = eta

def tell(self, reply):
""" See :meth:`Optimiser.tell()`. """

# Check ask-tell pattern
if not self._ready_for_tell:
raise Exception('ask() not called before tell()')
self._ready_for_tell = False

# Unpack reply
fx, dfx = reply[0]

# Move to proposed point
self._current = self._proposed
self._current_f = fx
self._current_df = dfx

# Propose next point
self._proposed = self._current - self._eta * dfx
self._proposed.setflags(write=False)

# Update xbest and fbest
if self._fbest > fx:
self._fbest = fx
self._xbest = self._current

def xbest(self):
""" See :meth:`Optimiser.xbest()`. """
return self._xbest

130 changes: 130 additions & 0 deletions pints/tests/test_opt_gradient_descent.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,130 @@
#!/usr/bin/env python3
#
# Tests the basic methods of the Gradient Descent optimiser.
#
# This file is part of PINTS (https://github.com/pints-team/pints/) which is
# released under the BSD 3-clause license. See accompanying LICENSE.md for
# copyright notice and full license details.
#
import unittest
import numpy as np

import pints
import pints.toy

from shared import CircularBoundaries


debug = False
method = pints.GradientDescent

# Consistent unit testing in Python 2 and 3
try:
unittest.TestCase.assertRaisesRegex
except AttributeError:
unittest.TestCase.assertRaisesRegex = unittest.TestCase.assertRaisesRegexp


class TestGradientDescent(unittest.TestCase):
"""
Tests the basic methods of the gradient descent optimiser.
"""
def setUp(self):
""" Called before every test """
np.random.seed(1)

def problem(self):
""" Returns a test problem, starting point, sigma, and boundaries. """
r = pints.toy.ParabolicError()
x = [0.1, 0.1]
s = 0.1
b = pints.RectangularBoundaries([-1, -1], [1, 1])
return r, x, s, b

def test_unbounded(self):
# Runs an optimisation without boundaries.
r, x, s, b = self.problem()
opt = pints.OptimisationController(r, x, method=method)
opt.set_threshold(1e-3)
opt.set_log_to_screen(debug)
found_parameters, found_solution = opt.run()
self.assertTrue(found_solution < 1e-3)

def test_bounded(self):
# Runs an optimisation with boundaries.
r, x, s, b = self.problem()

# Rectangular boundaries
b = pints.RectangularBoundaries([-1, -1], [1, 1])
opt = pints.OptimisationController(r, x, boundaries=b, method=method)
opt.set_log_to_screen(debug)
found_parameters, found_solution = opt.run()
self.assertTrue(found_solution < 1e-3)

# Circular boundaries
# Start near edge, to increase chance of out-of-bounds occurring.
b = CircularBoundaries([0, 0], 1)
x = [0.99, 0]
opt = pints.OptimisationController(r, x, boundaries=b, method=method)
opt.set_log_to_screen(debug)
found_parameters, found_solution = opt.run()
self.assertTrue(found_solution < 1e-3)

def test_bounded_and_sigma(self):
# Runs an optimisation without boundaries and sigma.
r, x, s, b = self.problem()
opt = pints.OptimisationController(r, x, s, b, method)
opt.set_threshold(1e-3)
opt.set_log_to_screen(debug)
found_parameters, found_solution = opt.run()
self.assertTrue(found_solution < 1e-3)

def test_ask_tell(self):
# Tests ask-and-tell related error handling.
r, x, s, b = self.problem()
opt = method(x)

# Stop called when not running
self.assertFalse(opt.running())
self.assertFalse(opt.stop())

# Best position and score called before run
self.assertEqual(list(opt.xbest()), list(x))
self.assertEqual(opt.fbest(), float('inf'))

# Tell before ask
self.assertRaisesRegex(
Exception, r'ask\(\) not called before tell\(\)', opt.tell, 5)

# Ask
opt.ask()

# Now we should be running
self.assertTrue(opt.running())

def test_hyper_parameter_interface(self):
# Tests the hyper parameter interface for this optimiser.
r, x, s, b = self.problem()
opt = pints.OptimisationController(r, x, method=method)
m = opt.optimiser()
self.assertEqual(m.n_hyper_parameters(), 1)
eta = m.learning_rate() * 2
m.set_hyper_parameters([eta])
self.assertEqual(m.learning_rate(), eta)
self.assertRaisesRegex(
ValueError, 'greater than zero', m.set_hyper_parameters, [0])

def test_name(self):
# Test the name() method.
opt = method(np.array([0, 1.01]))
self.assertIn('radient descent', opt.name())


if __name__ == '__main__':
print('Add -v for more debug output')
import sys
if '-v' in sys.argv:
debug = True
import logging
logging.basicConfig(level=logging.DEBUG)
unittest.main()
Loading

0 comments on commit 5791246

Please sign in to comment.