diff --git a/README.rst b/README.rst index 867bdb328..0af48dcad 100644 --- a/README.rst +++ b/README.rst @@ -1,4 +1,4 @@ -.. |iminuit| image:: doc/iminuit_logo.svg +.. |iminuit| image:: doc/_static/iminuit_logo.svg :alt: iminuit :target: http://iminuit.readthedocs.io/en/latest diff --git a/doc/bench.svg b/doc/_static/bench.svg similarity index 100% rename from doc/bench.svg rename to doc/_static/bench.svg diff --git a/doc/bench2d.svg b/doc/_static/bench2d.svg similarity index 100% rename from doc/bench2d.svg rename to doc/_static/bench2d.svg diff --git a/doc/iminuit_logo.svg b/doc/_static/iminuit_logo.svg similarity index 100% rename from doc/iminuit_logo.svg rename to doc/_static/iminuit_logo.svg diff --git a/doc/_static/mncontour.png b/doc/_static/mncontour.png deleted file mode 100644 index ad8f107dc..000000000 Binary files a/doc/_static/mncontour.png and /dev/null differ diff --git a/doc/_static/mnprofile.png b/doc/_static/mnprofile.png deleted file mode 100644 index c1bffabb8..000000000 Binary files a/doc/_static/mnprofile.png and /dev/null differ diff --git a/doc/benchmark.rst b/doc/benchmark.rst index e7bd40cb8..1a00aa060 100644 --- a/doc/benchmark.rst +++ b/doc/benchmark.rst @@ -29,9 +29,9 @@ Results The results are shown in the following three plots. The best algorithms require the fewest function calls to achieve the highest accuracy. -.. image:: bench.svg +.. image:: _static/bench.svg -.. image:: bench2d.svg +.. image:: _static/bench2d.svg Shown in the first plot is the number of calls to the cost function divided by the number of parameters. Smaller is better. Note that the algorithms achieve varying levels of accuracy, therefore this plot alone cannot show which algorithm is best. Shown in the second plot is the accuracy of the solution when the minimizer is stopped. The stopping criteria vary from algorithm to algorithm. @@ -55,4 +55,4 @@ Conclusion Minuit2 (and therefore iminuit) is a good allrounder. It is not outstanding in terms of convergence rate or accuracy, but not bad either. Using strategy 0 seem safe to use: it speeds up the convergence without reducing the accuracy of the result. -When an application requires minimising the same cost function with different data over and over so that a fast convergence rate is critical, it can be useful to try other minimisers to in addition to iminuit. \ No newline at end of file +When an application requires minimising the same cost function with different data over and over so that a fast convergence rate is critical, it can be useful to try other minimisers to in addition to iminuit. diff --git a/doc/index.rst b/doc/index.rst index 1ee3ca78f..fb06e7168 100644 --- a/doc/index.rst +++ b/doc/index.rst @@ -1,6 +1,6 @@ .. include:: references.txt -.. |iminuit| image:: iminuit_logo.svg +.. |iminuit| image:: _static/iminuit_logo.svg |iminuit| ========= diff --git a/doc/bench.py b/doc/plots/bench.py similarity index 100% rename from doc/bench.py rename to doc/plots/bench.py diff --git a/doc/plots/loss.py b/doc/plots/loss.py new file mode 100644 index 000000000..a4d91c4ca --- /dev/null +++ b/doc/plots/loss.py @@ -0,0 +1,15 @@ +from matplotlib import pyplot as plt +import numpy as np + + +def soft_l1(z): + return 2 * ((1 + z) ** 0.5 - 1) + + +x = np.linspace(-3, 3) +z = x ** 2 +plt.plot(x, z, label="linear $\\rho(z) = z$") +plt.plot(x, soft_l1(z), label="soft L1-norm $\\rho(z) = 2(\\sqrt{1+z} - 1)$") +plt.xlabel("studentized residual") +plt.ylabel("cost") +plt.legend() diff --git a/doc/plots/mncontour.py b/doc/plots/mncontour.py new file mode 100644 index 000000000..cb99c3f77 --- /dev/null +++ b/doc/plots/mncontour.py @@ -0,0 +1,10 @@ +from iminuit import Minuit + + +def cost(x, y, z): + return (x - 1) ** 2 + (y - x) ** 2 + (z - 2) ** 2 + + +m = Minuit(cost, print_level=0, pedantic=False) +m.migrad() +m.draw_mncontour("x", "y", nsigma=4) diff --git a/doc/pyplots/draw_mnprofile.py b/doc/plots/mnprofile.py similarity index 65% rename from doc/pyplots/draw_mnprofile.py rename to doc/plots/mnprofile.py index bce7fb10f..3781f9964 100644 --- a/doc/pyplots/draw_mnprofile.py +++ b/doc/plots/mnprofile.py @@ -1,10 +1,10 @@ from iminuit import Minuit -def f(x, y, z): +def cost(x, y, z): return (x - 1) ** 2 + (y - x) ** 2 + (z - 2) ** 2 -m = Minuit(f, print_level=0, pedantic=False) +m = Minuit(cost, pedantic=False) m.migrad() m.draw_mnprofile("y") diff --git a/doc/pyplots/draw_mncontour.py b/doc/pyplots/draw_mncontour.py deleted file mode 100644 index a14d14237..000000000 --- a/doc/pyplots/draw_mncontour.py +++ /dev/null @@ -1,10 +0,0 @@ -from iminuit import Minuit - - -def f(x, y, z): - return (x - 1) ** 2 + (y - x) ** 2 + (z - 2) ** 2 - - -m = Minuit(f, print_level=0, pedantic=False) -m.migrad() -m.draw_mncontour("x", "y") diff --git a/setup.py b/setup.py index 8de2285f5..2af18699e 100644 --- a/setup.py +++ b/setup.py @@ -1,7 +1,6 @@ # Use CFLAGS="-g -Og -DDEBUG" python setup.py ... for debugging import os -import sys import platform from os.path import dirname, join, exists from glob import glob @@ -122,7 +121,7 @@ def lazy_compile( print("Minuit2 submodule is missing, attempting download...") subp.check_call(["git", "submodule", "update"]) - except: + except subp.CalledProcessError: raise SystemExit( "Could not download Minuit2 submodule, run `git submodule update` manually" ) @@ -150,11 +149,11 @@ def lazy_compile( # Getting the version number at this point is a bit tricky in Python: -# https://packaging.python.org/en/latest/development.html#single-sourcing-the-version-across-setup-py-and-your-project -# This is one of the recommended methods that works in Python 2 and 3: +# https://packaging.python.org/guides/single-sourcing-package-version/?highlight=single%20sourcing with open(join(cwd, "src/iminuit/version.py")) as fp: - exec(fp.read()) # this loads __version__ - + version = {} + exec(fp.read(), version) # this loads __version__ + version = version["__version__"] with open(join(cwd, "README.rst")) as readme_rst: txt = readme_rst.read() @@ -164,16 +163,18 @@ def lazy_compile( setup( name="iminuit", - version=__version__, + version=version, description="Jupyter-friendly Python frontend for MINUIT2 in C++", long_description=long_description, long_description_content_type="text/x-rst", author="Piti Ongmongkolkul and the iminuit team", maintainer="Hans Dembinski", maintainer_email="hans.dembinski@gmail.com", - url="https://github.com/scikit-hep/iminuit", - download_url="http://pypi.python.org/packages/source/i/" - "scikit-hep/iminuit-%s.tar.gz" % __version__, + url="http://github.com/scikit-hep/iminuit", + project_urls={ + "Documentation": "https://iminuit.readthedocs.io", + "Source Code": "http://github.com/scikit-hep/iminuit", + }, packages=["iminuit", "iminuit.tests"], package_dir={"": "src"}, ext_modules=extensions, diff --git a/src/iminuit/_libiminuit.pyx b/src/iminuit/_libiminuit.pyx index e1e7e2d80..0294f7930 100644 --- a/src/iminuit/_libiminuit.pyx +++ b/src/iminuit/_libiminuit.pyx @@ -1323,7 +1323,7 @@ cdef class Minuit: bins(center point), value, migrad results - .. plot:: pyplots/draw_mnprofile.py + .. plot:: plots/mnprofile.py :include-source: """ x, y, s = self.mnprofile(vname, bins, bound, subtract_min) @@ -1604,6 +1604,8 @@ cdef class Minuit: :meth:`mncontour` + .. plot:: plots/mncontour.py + :include-source: """ return _minuit_methods.draw_mncontour(self, x, y, nsigma, numpoints) diff --git a/src/iminuit/cost.py b/src/iminuit/cost.py index 84523382c..3075a6dff 100644 --- a/src/iminuit/cost.py +++ b/src/iminuit/cost.py @@ -164,8 +164,8 @@ def __init__(self, n, xe, cdf, verbose=0): Bin edge locations, must be len(n) + 1. cdf: callable - Cumulative density function of the form f(x, par0, par1, ..., parN), - where `x` is the observation value and par0, ... parN are model parameters. + Cumulative density function of the form f(xe, par0, par1, ..., parN), + where `xe` is a bin edge and par0, ... parN are model parameters. verbose: int, optional Verbosity level @@ -223,8 +223,8 @@ def __init__(self, n, xe, scaled_cdf, verbose=0): Bin edge locations, must be len(n) + 1. scaled_cdf: callable - Scaled Cumulative density function of the form f(x, par0, par1, ..., parN), - where `x` is the observation value and par0, ... parN are model parameters. + Scaled Cumulative density function of the form f(xe, par0, par1, ..., parN), + where `xe` is a bin edge and par0, ... parN are model parameters. verbose: int, optional Verbosity level @@ -267,6 +267,8 @@ class LeastSquares: mask = None verbose = False errordef = 1.0 + _loss = None + _cost = None def __init__(self, x, y, yerror, model, loss="linear", verbose=0): """ @@ -294,6 +296,8 @@ def __init__(self, x, y, yerror, model, loss="linear", verbose=0): as this argument. It should be a monotonic, twice differentiable function, which accepts the squared residual and returns a modified squared residual. + .. plot:: plots/loss.py + verbose: int, optional Verbosity level @@ -317,16 +321,25 @@ def __init__(self, x, y, yerror, model, loss="linear", verbose=0): self.yerror = yerror self.model = model + self.loss = loss + self.verbose = verbose + self.func_code = make_func_code(describe(self.model)[1:]) + + @property + def loss(self): + return self._loss + + @loss.setter + def loss(self, loss): + self._loss = loss if hasattr(loss, "__call__"): - self.cost = lambda y, ye, ym: np.sum(loss(_z_squared(y, ye, ym))) + self._cost = lambda y, ye, ym: np.sum(loss(_z_squared(y, ye, ym))) elif loss == "linear": - self.cost = _sum_z_squared + self._cost = _sum_z_squared elif loss == "soft_l1": - self.cost = _sum_z_squared_soft_l1 + self._cost = _sum_z_squared_soft_l1 else: raise ValueError("unknown loss type: " + loss) - self.verbose = verbose - self.func_code = make_func_code(describe(self.model)[1:]) def __call__(self, *args): ma = self.mask @@ -339,7 +352,7 @@ def __call__(self, *args): y = self.y[ma] yerror = self.yerror[ma] ym = self.model(x, *args) - r = self.cost(y, yerror, ym) + r = self._cost(y, yerror, ym) if self.verbose >= 1: print(args, "->", r) return r diff --git a/src/iminuit/tests/test_cost.py b/src/iminuit/tests/test_cost.py index a292c294b..53abe3245 100644 --- a/src/iminuit/tests/test_cost.py +++ b/src/iminuit/tests/test_cost.py @@ -123,6 +123,12 @@ def model(x, a, b): m = Minuit(cost, a=0, b=0) m.migrad() assert_allclose(m.args, (1, 2), rtol=0.03) + assert cost.loss == loss + if loss != "linear": + cost.loss = "linear" + assert cost.loss != loss + m.migrad() + assert_allclose(m.args, (1, 2), rtol=0.02) # add bad value and mask it out cost.y[1] = np.nan