diff --git a/doc/conf.py b/doc/conf.py index 27bb9bb55..8f32f6e5a 100644 --- a/doc/conf.py +++ b/doc/conf.py @@ -54,6 +54,7 @@ autoclass_content = "both" autosummary_generate = True autodoc_member_order = "groupwise" +autodoc_type_aliases = {"ArrayLike": "ArrayLike"} # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. diff --git a/doc/notebooks/cost_functions.ipynb b/doc/notebooks/cost_functions.ipynb index af4b3363d..a82de5102 100644 --- a/doc/notebooks/cost_functions.ipynb +++ b/doc/notebooks/cost_functions.ipynb @@ -24,7 +24,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 12, "id": "lucky-canvas", "metadata": {}, "outputs": [], @@ -34,7 +34,9 @@ "from numba_stats import truncnorm, truncexpon, norm, expon \n", "import numpy as np\n", "from matplotlib import pyplot as plt\n", - "from scipy.stats import multivariate_normal as mvnorm" + "from scipy.stats import multivariate_normal as mvnorm\n", + "# accurate numerical derivatives\n", + "from jacobi import jacobi" ] }, { @@ -47,10 +49,21 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 13, "id": "destroyed-fusion", "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAigAAAGdCAYAAAA44ojeAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8pXeV/AAAACXBIWXMAAA9hAAAPYQGoP6dpAAA4gklEQVR4nO3dfXRcdYH/8c+dO0/JJDNp2ubJpE3xASiPK0iJGixSWmj1wAlZgUW2ekB32eK2oqico/BT92wRPdLionU9SNlVQKlBjmwBsfRpJSAWOFueuoCRpmkeWtpmkkkyD3e+vz9CZjtN+pB0JrlJ3q9z5jT33u/MfL9z78z99H7v/V7LGGMEAADgIp6JrgAAAMCRCCgAAMB1CCgAAMB1CCgAAMB1CCgAAMB1CCgAAMB1CCgAAMB1CCgAAMB1vBNdgbFIp9Pau3eviouLZVnWRFcHAACcAGOMenp6VFVVJY/n2MdIJmVA2bt3r2pqaia6GgAAYAxaW1tVXV19zDKTMqAUFxdLGmxgOBye4NoAAIATEY1GVVNTk9mPH8ukDChD3TrhcJiAAgDAJHMip2dwkiwAAHAdAgoAAHAdAgoAAHAdAgoAAHAdAgoAAHAdAgoAAHAdAgoAAHAdAgoAAHAdAgoAAHAdAgoAAHAdAgoAAHAdAgoAAHAdAgoAAHAdAgqASSkWi8myLFmWpVgsNtHVAZBjBBQAAOA6BBQAAOA6BBQAAOA6BBQAAOA6BBQAAOA6BBQAAOA6BBQAAOA6BBQAAOA6BBQAAOA6BBQAAOA6BBQAk5LjOJm/t23bljUNYPIjoACYdJqamjR//vzM9NKlS1VbW6umpqYJrBWAXCKgAJhUmpqa1NjYqLa2tqz5bW1tamxsJKQAUwQBBcCk4TiOVq5cKWPMsGVD81atWkV3DzAFEFAATBrbt2/Xnj17jrrcGKPW1lZt3759HGsFIB9OKqDceeedsixLq1atyswbGBjQihUrNHPmTBUVFemqq65SZ2dn1vN2796tZcuWqbCwUGVlZbr11luVSqVOpioApoH29vaclgPgXmMOKC+88IJ++tOf6uyzz86a/+Uvf1m/+93v9Mgjj2jr1q3au3evGhoaMssdx9GyZcuUSCT07LPP6oEHHtD69et1++23j70VAKaFysrKnJYD4F5jCii9vb267rrr9LOf/UwzZszIzO/u7tZ9992nH/7wh/rkJz+p8847T/fff7+effZZPffcc5Kk3//+93rttdf0i1/8Queee64uv/xyffe739W9996rRCKRm1YBmJLq6+tVXV0ty7JGXG5ZlmpqalRfXz/ONQOQa2MKKCtWrNCyZcu0aNGirPk7duxQMpnMmn/aaadpzpw5am5uliQ1NzfrrLPOUnl5eabMkiVLFI1G9eqrr474fvF4XNFoNOsBYPqxbVtr166VpGEhZWh6zZo1sm173OsGILdGHVAefvhhvfjii1q9evWwZR0dHfL7/SopKcmaX15ero6OjkyZw8PJ0PKhZSNZvXq1IpFI5lFTUzPaagOYIhoaGrRhwwZVVVVlza+urtaGDRuyupQBTF6jCiitra1auXKlfvnLXyoYDOarTsPcdttt6u7uzjxaW1vH7b0BuE9DQ4Nee+21zPTGjRvV0tJCOAGmkFEFlB07dqirq0sf/vCH5fV65fV6tXXrVt1zzz3yer0qLy9XIpHQoUOHsp7X2dmpiooKSVJFRcWwq3qGpofKHCkQCCgcDmc9AExvh3fjXHTRRXTrAFPMqALKJZdcop07d+rll1/OPM4//3xdd911mb99Pp82bdqUec6uXbu0e/du1dXVSZLq6uq0c+dOdXV1Zco8/fTTCofDWUNXAwCA6cs7msLFxcU688wzs+aFQiHNnDkzM/+GG27QLbfcotLSUoXDYX3pS19SXV2dLrzwQknS4sWLNX/+fF1//fW666671NHRoW9+85tasWKFAoFAjpoFAAAms1EFlBNx9913y+Px6KqrrlI8HteSJUv04x//OLPctm09/vjjuummm1RXV6dQKKTly5frO9/5Tq6rAgAAJinLjHRTC5eLRqOKRCLq7u7mfBRgmorFYioqKpI0ODZTKBSa4BoBOJ7R7L+5Fw8AAHAdAgoAAHAdAgoAAHAdAgqAvIrFYrIsS5ZlKRaLTXR1AEwSBBQAAOA6Ob/MGADGQygU0iS8CBHACeIICgAAcB0CCgAAcB0CCgAAcB0CCgAAcB0CCgAAcB0CCgAAcB0CCgAAcB0CCoC8chwn8/e2bduypgHgaAgoAPKmqalJ8+fPz0wvXbpUtbW1ampqmsBaAZgMCCgA8qKpqUmNjY1qa2vLmt/W1qbGxkZCCoBjIqAAyDnHcbRy5coRh6Ifmrdq1Sq6ewAcFQEFQM5t375de/bsOepyY4xaW1u1ffv2cawVgMmEgAIg59rb23NaDsD0Q0ABkHOVlZU5LQdg+iGgAMi5+vp6VVdXy7KsEZdblqWamhrV19ePc80ATBYEFAA5Z9u21q5dK0nDQsrQ9Jo1a2Tb9rjXDcDkQEABkBcNDQ3asGGDqqqqsuZXV1drw4YNamhomKCaAZgMLDPSdYAuF41GFYlE1N3drXA4PNHVAXAMQ99XSdq4caMWL17MkRNgmhrN/psjKADy6vAwctFFFxFOAJwQAgoAAHAdAgoAAHAdAgoAAHAdAgoAAHAdAgoAAHAdAgoAAHAdAgoAAHAdAgoAAHAd70RXAMDUFgqFNAkHrAYwwTiCAgAAXIeAAgAAXIeAAgAAXIeAAgAAXIeAAgAAXIeAAgAAXIeAAgAAXIeAAgAAXIeAAgAAXIeAAgAAXIeAAgAAXIeAAgAAXIeAAgAAXIeAAgAAXIeAAgAAXIeAAgAAXIeAAgAAXIeAAgAAXIeAAgAAXIeAAgAAXIeAAgAAXIeAAgAAXIeAAgAAXIeAAgAAXIeAAgAAXIeAAgAAXIeAAgAAXIeAAgAAXIeAAgAAXIeAAgAAXIeAAgAAXIeAAgAAXIeAAgAAXIeAAgAAXGdUAeUnP/mJzj77bIXDYYXDYdXV1emJJ57ILB8YGNCKFSs0c+ZMFRUV6aqrrlJnZ2fWa+zevVvLli1TYWGhysrKdOuttyqVSuWmNQAAYEoYVUCprq7WnXfeqR07dujPf/6zPvnJT+qKK67Qq6++Kkn68pe/rN/97nd65JFHtHXrVu3du1cNDQ2Z5zuOo2XLlimRSOjZZ5/VAw88oPXr1+v222/PbasAAMCkZhljzMm8QGlpqb7//e+rsbFRs2fP1oMPPqjGxkZJ0htvvKHTTz9dzc3NuvDCC/XEE0/oU5/6lPbu3avy8nJJ0rp16/T1r39d+/btk9/vP6H3jEajikQi6u7uVjgcPpnqAwCAcTKa/feYz0FxHEcPP/ywYrGY6urqtGPHDiWTSS1atChT5rTTTtOcOXPU3NwsSWpubtZZZ52VCSeStGTJEkWj0cxRmJHE43FFo9GsBwAAmLpGHVB27typoqIiBQIB/eM//qMeffRRzZ8/Xx0dHfL7/SopKckqX15ero6ODklSR0dHVjgZWj607GhWr16tSCSSedTU1Iy22gAAYBIZdUA59dRT9fLLL+v555/XTTfdpOXLl+u1117LR90ybrvtNnV3d2cera2teX0/AAAwsbyjfYLf79cHPvABSdJ5552nF154QWvXrtXVV1+tRCKhQ4cOZR1F6ezsVEVFhSSpoqJCf/rTn7Jeb+gqn6EyIwkEAgoEAqOtKgAAmKROehyUdDqteDyu8847Tz6fT5s2bcos27Vrl3bv3q26ujpJUl1dnXbu3Kmurq5MmaefflrhcFjz588/2aoAAIApYlRHUG677TZdfvnlmjNnjnp6evTggw9qy5YteuqppxSJRHTDDTfolltuUWlpqcLhsL70pS+prq5OF154oSRp8eLFmj9/vq6//nrddddd6ujo0De/+U2tWLGCIyQAACBjVAGlq6tLf//3f6/29nZFIhGdffbZeuqpp3TppZdKku6++255PB5dddVVisfjWrJkiX784x9nnm/bth5//HHddNNNqqurUygU0vLly/Wd73wnt60CAACT2kmPgzIRGAcFAIDJZ1zGQQEAAMgXAgoAAHAdAgoAAHAdAgoAAHAdAgoAAHAdAgoAAHAdAgoAAHAdAgoAAHAdAgoAAHAdAgoAAHAdAgoAAHAdAgoAAHAdAgoAAHAdAgoAAHAdAgoAAHAdAgoAAHAdAgoAAHAdAgoAAHAdAgoAAHAdAgoAAHAdAgoAAHAdAgoAAHAdAgoAAHAdAgoAAHAdAgoAAHAdAgoAAHAdAgoAAHAdAgoAAHAdAgoAAHAdAgoAAHAdAgoAAHAdAgoAAHAdAgoAAHAdAgoAAHAdAgoAAHAdAgoAAHAdAgoAAHAdAgoAAHAdAgoAAHAdAgoAAHAdAgoAAHAdAgoAAHAdAgoAAHAdAgoAAHAdAgoAAHAdAgoAAHAdAgoAAHAdAgoAAHAdAgoAAHAdAgoAAHAdAgoAAHAdAgoAAHAdAgoAAHAdAgoAAHAdAgoAAHAdAgoAAHAdAgoAAHAdAgoAAHAdAgoAAHAdAgoAAHAdAgoAAHAdAgoAAHAdAgoAAHAdAgoAAHAdAgoAAHAdAgoAAHAdAgoAAHAdAgoAAHCdUQWU1atX6yMf+YiKi4tVVlamK6+8Urt27coqMzAwoBUrVmjmzJkqKirSVVddpc7Ozqwyu3fv1rJly1RYWKiysjLdeuutSqVSJ98aAGMSi8VkWZYsy1IsFpvo6gDA6ALK1q1btWLFCj333HN6+umnlUwmtXjx4qwftC9/+cv63e9+p0ceeURbt27V3r171dDQkFnuOI6WLVumRCKhZ599Vg888IDWr1+v22+/PXetAgAAk5pljDFjffK+fftUVlamrVu36qKLLlJ3d7dmz56tBx98UI2NjZKkN954Q6effrqam5t14YUX6oknntCnPvUp7d27V+Xl5ZKkdevW6etf/7r27dsnv99/3PeNRqOKRCLq7u5WOBwea/UBvCcWi6moqEiS1Nvbq1AoNME1AjAVjWb/fVLnoHR3d0uSSktLJUk7duxQMpnUokWLMmVOO+00zZkzR83NzZKk5uZmnXXWWZlwIklLlixRNBrVq6++OuL7xONxRaPRrAcAAJi6xhxQ0um0Vq1apY997GM688wzJUkdHR3y+/0qKSnJKlteXq6Ojo5MmcPDydDyoWUjWb16tSKRSOZRU1Mz1moDAIBJYMwBZcWKFXrllVf08MMP57I+I7rtttvU3d2debS2tub9PQEAwMQZU0C5+eab9fjjj2vz5s2qrq7OzK+oqFAikdChQ4eyynd2dqqioiJT5sireoamh8ocKRAIKBwOZz0A5I7jOJm/t23bljUNABNhVAHFGKObb75Zjz76qJ555hnNmzcva/l5550nn8+nTZs2Zebt2rVLu3fvVl1dnSSprq5OO3fuVFdXV6bM008/rXA4rPnz559MWwCMQVNTU9Z3b+nSpaqtrVVTU9ME1grAdDeqq3j+6Z/+SQ8++KAee+wxnXrqqZn5kUhEBQUFkqSbbrpJGzdu1Pr16xUOh/WlL31JkvTss89KGvyf2rnnnquqqirddddd6ujo0PXXX68bb7xR//qv/3pC9eAqHiA3mpqa1NjYqCN/BizLkiRt2LAha5gAADgZo9l/jyqgDP1oHen+++/X5z73OUmDA7V95Stf0UMPPaR4PK4lS5boxz/+cVb3zTvvvKObbrpJW7ZsUSgU0vLly3XnnXfK6/WeUD0IKMDJcxxHtbW12rNnz4jLLctSdXW1WlpaZNv2ONcOwFSUt4DiFgQU4ORt2bJFF1988XHLbd68WQsXLsx/hQBMeeM2DgqAyau9vT2n5QAglwgowDRVWVmZ03IAkEsEFGCaqq+vV3V19VHPLbMsSzU1Naqvrx/nmgEAAQWYtmzb1tq1ayUNPwF+aHrNmjWcIAtgQhBQgGmsoaFBGzZsUFVVVdb86upqLjEGMKG4igdA5jslSRs3btTixYs5cgIg57iKB8CoHB5GLrroIsIJgAlHQAEAAK5DQAEAAK5DQAEAAK5DQAEAAK5zYnfnAzClhUKhYXc0BoCJxBEUAADgOgQUAADgOgQUAADgOgQUAADgOgQUAADgOgQUAADgOgQUAADgOgQUAADgOgQUAADgOgQUAADgOgQUAADgOgQUAADgOgQUADhCLBaTZVmyLEuxWGyiqwNMSwQUAADgOgQUAADgOgQUAADgOgQUAADgOgQUADiC4ziZv7dt25Y1DWB8EFAA4DBNTU2aP39+Znrp0qWqra1VU1PTBNYKmH4IKADwnqamJjU2NqqtrS1rfltbmxobGwkpwDgioACABrt1Vq5cKWPMsGVD81atWkV3DzBOCCgAIGn79u3as2fPUZcbY9Ta2qrt27ePY62A6YuAAgCS2tvbc1oOwMkhoACApMrKypyWA3ByCCgAIKm+vl7V1dWyLGvE5ZZlqaamRvX19eNcM2B6IqAAgCTbtrV27VpJGhZShqbXrFkj27bHvW7AdERAAYD3NDQ0aMOGDaqqqsqaX11drQ0bNqihoWGCagZMP5YZ6Zo6l4tGo4pEIuru7lY4HJ7o6gCYYoZ+YyRp48aNWrx4MUdOgBwYzf6bIygAcITDw8hFF11EOAEmAAEFAAC4DgEFAAC4jneiKwAAbhMKhUYc8h7A+OEICgAAcB0CCgAAcB0CCgAAcB0CCgAAcB0CCgAAcB0CCgAAcB0CCgAAcB0CCgAAcB0CCgAAcB0CCgAAcB0CCgAAcB0CCgAAcB0CCgAAcB0CCgAAcB0CCgAAcB0CCgAAcB0CyhjFYjFZliXLshSLxSa6OgAATCkEFAAA4DoEFAAA4DoElDFyHCfz97Zt27KmAQDAySGgjEFTU5Pmz5+fmV66dKlqa2vV1NQ0gbUCAGDqIKCMUlNTkxobG9XW1pY1v62tTY2NjYQUAABygIAyCo7jaOXKlTLGDFs2NG/VqlV09wAAcJIIKKOwfft27dmz56jLjTFqbW3V9u3bx7FWAABMPaMOKNu2bdOnP/1pVVVVybIs/fa3v81abozR7bffrsrKShUUFGjRokV68803s8ocOHBA1113ncLhsEpKSnTDDTeot7f3pBoyHtrb23NaDgAAjGzUASUWi+mcc87RvffeO+Lyu+66S/fcc4/WrVun559/XqFQSEuWLNHAwECmzHXXXadXX31VTz/9tB5//HFt27ZNX/ziF8feinFSWVmZ03IAAGBklhnphIoTfbJl6dFHH9WVV14pafDoSVVVlb7yla/oq1/9qiSpu7tb5eXlWr9+va655hq9/vrrmj9/vl544QWdf/75kqQnn3xSS5cu1Z49e1RVVXXc941Go4pEIuru7lY4HB5r9UfNcRzV1taqra1txPNQLMtSdXW1WlpaZNv2uNULAIDJYDT775yeg9LS0qKOjg4tWrQoMy8SiWjBggVqbm6WJDU3N6ukpCQTTiRp0aJF8ng8ev7550d83Xg8rmg0mvWYCLZta+3atZIGw8jhhqbXrFlDOAEA4CTlNKB0dHRIksrLy7Pml5eXZ5Z1dHSorKwsa7nX61VpaWmmzJFWr16tSCSSedTU1OSy2qPS0NCgDRs2DDvSU11drQ0bNqihoWGCagYAwNQxKa7iue2229Td3Z15tLa2Tmh9Ghoa9Nprr2WmN27cqJaWFsIJAAA5ktOAUlFRIUnq7OzMmt/Z2ZlZVlFRoa6urqzlqVRKBw4cyJQ5UiAQUDgcznpMtMO7cS666CK6dQAAyKGcBpR58+apoqJCmzZtysyLRqN6/vnnVVdXJ0mqq6vToUOHtGPHjkyZZ555Rul0WgsWLMhldQAAwCTlHe0Tent79dZbb2WmW1pa9PLLL6u0tFRz5szRqlWr9C//8i/64Ac/qHnz5ulb3/qWqqqqMlf6nH766brsssv0hS98QevWrVMymdTNN9+sa6655oSu4HGLUCg04pU8AADg5I06oPz5z3/WxRdfnJm+5ZZbJEnLly/X+vXr9bWvfU2xWExf/OIXdejQIX384x/Xk08+qWAwmHnOL3/5S91888265JJL5PF4dNVVV+mee+7JQXMAAMBUcFLjoEyUiRoHBQAAjN2EjYMCAACQCwQUl4nFYrIsS5ZlKRaLTXR14DJsHwCmCwIKAABwHQKKyziOk/l727ZtWdMA2weA6YKA4iJNTU2aP39+Znrp0qWqra1VU1PTBNYKbsH2AWA6IaC4RFNTkxobG9XW1pY1v62tTY2NjeyEpjm2DwDTDZcZu4DjOKqtrdWePXtGXG5Zlqqrq9XS0sKQ+tMQ2weAqYLLjCeZ7du3H3XnI0nGGLW2tmr79u3jWCu4BdsHgOmIgOIC7e3tOS2HqYXtA8B0REBxgcrKypyWw9TC9gFgOiKguEB9fb2qq6tlWdaIyy3LUk1Njerr68e5ZnADtg8A0xEBxQVs29batWsladhOaGh6zZo1rj4BkhFO82cqbB8AMFoEFJdoaGjQhg0bVFVVlTW/urpaGzZsUENDwwTV7MQwgFh+TfbtAwBGi8uMXWaobZK0ceNGLV682PX/M25qatI///M/Z43RUV1drbVr17LjzLHJuH0AwBAuM57EDt/ZXHTRRTnd+eSjGybfA4jRdZQtn9sHALgJAWUayXU3jOM4WrlypUY6CDc0b9WqVSf1PnQdAcD0REBxmVAoJGOMjDEKhUI5e9183Mcl3wOIce+Z4fK1fQCA2xBQpoF8dcPkcwAx7j0DANMbAWWKy2c3TL4GEBuPriMAgLsRUKa4fHbD5GsAMe49AwAgoExx+eyGydcAYtx7BgBAQJni8n0fl3wMIMa9ZwAADNQ2xTmOo9raWrW1tY14TodlWaqurlZLS8tJjamRywHExqvOAIDxxUBtyBiv+7jkcgAx7j0DACCgTAPjcR+XXI/Pwb1nAGB6o4tnGpmM93GZjHWOxWIqKiqSJPX29jKgGgC8hy4ejGgy3sdlMtaZ4fkB4OQRUKaRyThM+mSrM8Pz41i4+SVw4ggomLZyvbNgeH4cD0fXgBNHQMG0lcudBcPz43g4ugaMDgEF01KudxYMz49j4egaMHoEFEw7+dhZMDw/joaja8DYEFAwreRrZ8Hw/Dgajq4BY0NAwbSSr51Fvu7sjMmPo2vA2BBQMK3ka2fB8Pw4Go6uAWNDQMG0ks+dBcPzYyQcXQPGhqHuMa2Mx52SHcfR9u3b1d7ersrKStXX13PkZJobOjFbUtZ2NxRaCLCYLhjqHjiK8eiKsW1bCxcu1LXXXquFCxcSTsDRNWAMCCiYdthZYCI0NDTotddey0xv3LhRLS0tbG/AUdDFg2mLrhgAGF+j2X97x6lOgOsMdcUAANyHLh4AAOA6BBQAAOA6BBQAAOA6BBQAAOA6BBQAwDFFo1FZliXLsvTEE09w52WMCwIKAEwBjuNoy5Yteuihh7Rly5achYimpibNnz8/M7106VLV1taqqakpJ68PHA0BBQAmuaamJs2dO1cXX3yx/u7v/k4XX3xxTkLE0BD9bW1tWfPb2trU2NhISEFeMVAbAExiQyHiyJ/yk73Pz9B9q/bs2TPi8lzctwrTD/fiAYBpwHEcrVy5csQbXw7NW7Vq1Zi6e7Zv337UcDL0+q2trdq+ffuoX3syy1dXGoYjoADAJJXPENHe3p7TclNBU1OTamtrc96VhpERUABgkspniKisrMxpufGW6yMdQ11pRwZCzsfJHwIKAExS+QwR9fX1qq6uzpzLciTLslRTU6P6+vpRv/aQfF55lMuThvPZlYajI6AAwCSVzxBh27bWrl2beZ0jX1eS1qxZM+YTZPPVXZKPK484H2diEFAAYJLKd4hoaGjQhg0b9L73vS9rfnV19ZivDpLy112SryMdnI8zMQgoADCJ5StEHP76f/3rX7V582Y9+OCD2rx5s1paWsb8upPxyiPOx5kY3omuAADg5DQ0NOiKK67Q9u3b1d7ersrKStXX1+dsfBLbtrVw4cKcvNZoQsRo3zNfRzqGutLa2tpGDFZDY8KczPk4+dLU1KSVK1dmfebV1dVau3btSYfXfCOgAMAUkMsQkU+T8cqjoa60xsZGWZaVFVJy0ZWWL0cbxG+oKy0XR9jyiS4eAMC4maxXHuW7K03KbVfMVLjyiKHuAQDjZmgI/eN1l4x1CP2howaSRjzScbJhwnGcvHSl5borZsuWLbr44ouPW27z5s3jeuSNoe4BAK40Wa88GjLUlXbttddq4cKFOQsnub6qaSpceURAAQCMq8l25VE+5asrZrJfeSTRxQMAmCD56i6ZTPLVFZPvrrSxGs3+m6t4AAATYrJceZRP+eqKmaxXHh2OLh4AACZIPrtixuPKo3yiiwcAgAkyHl0xbupKo4sHAIBJYDy6YiZrVxpdPAAATKDJ3hWTLxPaxXPvvffq+9//vjo6OnTOOefoRz/6kS644ILjPi+fXTz9CUev7DmktzqiCgV8et/MQs0MBbSvd0BOyujV1gNKmLQqwoUqCvrUN5DSwYG4fLatOTNCao/2q6I4oLKSQv1lX498lkceSdFESoU+W7KkmcUB+Wxbnd19MrJU4PHozXej+vgHKpRIO+qOJWTJ0qGBhKoihfJ6LRX6vbIt673XL9D/dkVV5LOVSqXVm3Bkez2aOzOk0pBf3f1JdR7qV088pQKfrcKArWQqLdvjUe9AUuFCn7y2R7bHUsBrK5VOq9DvVVHAq/6ko4Gko+7+hHweW9GBhGYUBuTzWooU+NUXT6mje0AyRl6vR4mEo7jjaGZxUDNDAR3oi6u0MKCO7n61d/fr9MqIYomUumMJVZQUKOCzlXLS2vtuTO/2J1U7q0he25LtsVQc8Gl/74B8tq1DfQk5TlqnlBWruz+pv+zvUc2MkAI+jwJeW5ak/qQjv+1RMp2W1+ORkVGhz6sDfXFFgn7tjw3I6/EoHPTJSIqnHHk9HnksqSJSoP29ce050KdkylHNzJBKiwLyez2yLUttB/sU8NkKB31qPRjTrKKgLEkd0X6VFgbUE09KRioLB2UkdXT3KzaQVMBnD/6vx0gBn0dFAZ+6BxKqmRHSQMqR4xh1RvsVLvBrRsivdNqovbtfKSetU2YXa8/BPr2656BqSotUFgkqnnTU0tkjY1kKB706/X0l2tc7oIO9icHPrMCnnr6k+pMp9SWcwXZFB7Qv2q8ZRUGdVTNDu9/tVdpIHsvS3oMxBby2bI+l0lBQb+07pFh/UocSjuZECpUwKUVjjpKelOy0VxVFPr3W1aN4Iq7+lFFf0pE3FddA2lbVjJCCQZ8O9vTrQG9cPqUV93gU8no0kHRkOY76HCmZTikxkJYjqbzYq56EpUQ8qZSkoE+SkQaSUlGhR04irX5HivZK3Smpokjye6SEI6WTUjQp1ZRKlbOLlHY8SjgJmbRH8URS0f6UbI9RpDCgksKgOnv6VBzwKRofUNv+tGaGLc0I+BQKF+r8mlnyez1KpY2S6bS6exIasIzePyMkI0tKG/U5KUWCAZWGApLHkt/j0d5oTPF4Sp2xAQVtn2YX+XVa1Qz5vJbiqbQ8llQ9I6TWgzHJSB6PlfmeGRmVhgIq8NuZbcxrexT02ToYiyueSsvv9aikwK+eeFJez+B31O/1aFZRQAdiCe0+ENMHZhcrmU7Ltiz1xlOKpxzNLg6qKODVwb6EfLZHfq9HRQGvbI+lRCqtzuiAnLRRuMCnAp+tg30JBX22Cny2ogNJhYM+JZy0nLSR7bEGfwsSjv63M6qSAr+qSwvl93qUSKW1vzcuxzE60BdXVaRQ78bi8tseeW2PDvbFVTuzSLIkJ23kpI1i8ZQkqSjglc/rUYHPVsJJy297lHDSkpG6egZUVhxUwkkrFk8pFPAqUuAb/E1OOirwDW6zTtqoP+nItqzBeqSNgj5bSSet4HtlJGXaIEnd/UnF4ikFfbb8Xo/8tifzOdnvrZ+hz0oarPfQc0KBwd/F3ngqa3qo/r3xlKL9STlpI7/XoxmFfh3sSyidNgr4bKWN0ayigGyPlWmHDvvt6k86mc/G9lhZXTFl5RU6/8KPqsDvU3/Syawb27Iyn5csybYsRQeSmlHol9/ryXxGQ+/VG0+pP+GoZyCpykiBZClrmaTMOhn6nPsTjvYc7FNJoV+lIX/ms8mFSdHF86tf/Uq33HKL1q1bpwULFmjNmjVasmSJdu3apbKysomqlhKOo7cPRPVS6yGFi3xKKCVjQnqrq08eK60/7T6klFKaU5LQjEKfugdS6ooNKCCv0sbotfYe1cws1BlWWi/tPqRwgU9ppXWwN6FIgVdGlt43UKDiAq/eaOuVsYxmFfr1fMtBnVFRpHf7UursGdz57D3UL8cYWTKaUehX0OvR6+09KvCktXNPt2aFvBpIGh0cSCjgteSzjWQVqivarzf29Sja4yhc4FW4wKv+pJHPNtrfm1R5OCC/z5LXY6k44Fd/wlFJyKeZRQF19yXVN5BU26G4SkJetR7oU+3skGQszZlRoP19cf1lX79SjqOigE/Rgbj64kZzZyWUTheq5d1+zZvpqGV/r15v71Vpoa39fQm1HUwoaRwVBbwaSDp6vb1Xe3ti8nok4zHyylZFJKA3u/pUUuhV28GYBpJG4UJbXb1xvfTOQXnSaXm8toqDtiTPewHMo/5ESgUBW4mkUVmRX2+/26e5M4J6a1+fCry2SiM+edKWogMphfwepdKWIoVetXfH9L9dUfX2O7I8Rl6PFPB7FfR5tPtATMUBr7ylBXqrq1e2jIwlvdnVo/fPctR+KC55LHm9ljzGqOXdqA72pBQK2PLalizLI69XqioO6p2DA5oZ8ml/b1Iyab3Z2aPKGQHZVkiJtNFf9/eoP5lWRSSgPe/26IW/HlQi5SillPoGUnqx7ZC8Hilc4FfNzKD+ur9Hnd0D8ngsVYSD6uhJqLc/oehASgknpTf39Wrvwbhmhvs0p9SvV9q75fFIAXtw+wn5bFmWR/PKEnrhnQPqi6fVPRBXdFZSvcmkDvU6StlJWSlb80oL9dLeg0okk0qmHA0kJaWkpKSunpQKAh4diiXU3Z+WMXrvfaR+R7JSUsKS0o4U1+Dh2v3dKfUbKTX4MvJLSr/3CHSnlZJk3lsmSX/tHf4djb0r7R/oVTotpVKSY0nJlDSQHnyPgkNxlYQS6ooaFQaSSiSkXiN17zfyWgmFDiYUtPwytuQ4RkmT1rt9CSWSacX6kkqkJUuWepNJlRb4VVlSKJM2Cgc9er2jV4mkUVu0VyG/X7NDQRUW+WQZKZFKK+1IJQW23uzqkceWfHovfBd4FU8aeWTkpP0K+Dx650BMhT6PSgp8ank3pkTKyO+1VD2jUO3dAwoFPLItj4L+we/wwd5+vd4eVWXYr0P9jgI+j97tHVC0LyW/bcnvtdQVHVBxwFbA71Wh35YtS6l0Wp3RfjlOWlKBbI9fXdEBzSj0yfb4ta8nrqDPo56BlFJOWj7bo0K/rYFUSm929ahqRlAVJUH55VEqnVZXdEDGpPWX/X2KBG3tPhBTod+jIp+tt9/tV3k4oFRaSjlpOU5a+3rjkgb/Y1bo88pnW4rFU/IEvYrFU7I9UuuBPoULvOodSGl/b1yziwIqDg7unmLxlAJej2xZShvzXtjwqLO7X6l0WiUFPvUmHM0o8Mm2BzsFhtogST39Ce3rjaukwKeg36vioDfzOdm2J1N28L89UtqYwef0xDWrOKCgz6Nof0L7D5seqn+0P6GO9/6DEfR7VeAfrFfSpFUa9Kk/ZVRS6JNleTLtGGqTJ+hVz0BSlpR5/8O7YpJOWgdiCfm8afUOJJV8b90EfJ7M5+WkpaDPo309cRUHvfLLk/mMht6rZyCp3v6E9nbHNSPkk5NW1jJLyqyToc854ThqPdAnS4P1H/psxtuEdfH88Ic/1Be+8AV9/vOf1/z587Vu3ToVFhbq5z//+URVCQAAuMSEBJREIqEdO3Zo0aJF/1cRj0eLFi1Sc3PzsPLxeFzRaDTrAQAApq4JCSj79++X4zgqLy/Pml9eXq6Ojo5h5VevXq1IJJJ51NTUjFdVAQDABJgUV/Hcdttt6u7uzjxaW1snukoAACCPJuQk2VmzZsm2bXV2dmbN7+zsVEVFxbDygUBAgUBgvKoHAAAm2IQcQfH7/TrvvPO0adOmzLx0Oq1Nmzaprq5uIqoEAABcZMIuM77lllu0fPlynX/++brgggu0Zs0axWIxff7zn5+oKkmS/Lat95eGZRLKGgfFyCMnZXTBnOQxx0EJeO3BcVAihfqbOeb/xkEpGT4OileezDgoKZPWjKIihQodRQoGx0GZVRQYNg7K6ZWWIqECnVXtDBsHpaokpNJCv2zLlsfY6gkPHwelovjY46AU+HwaKHQUKvDL57FVHPRnjYMS8Plky3vYOCgFWeOg2LZXpYUBOWlLQZ9X5ZEiFRWkNCOYPQ6KJ22poiSo6tLscVAsyyOfbSvk98lx0ppVVCif7dPfzDV63wmOg+KxbUWCflkeO2sclNLDxkEp8PlUGQnJcSwlU46qZ4QULvy/cVDmlIYU8Nkq9Pv0gbIizXhvHJQPlkmlhQEFfD7JDP5tJM2baVQWGnkcFJ/Pq0K/X2VhW45j9MHywUuGw4WD46DUzipWykmr0O9X9cxifaQ29X/joBQ6SiWVGQelKBhU7SwpEgxkxkGJBLPHQSn0+lQVGhwHJRIK6cxKkxkHpcC2s8ZBceamjzsOiseryTkOStHRx0E5szo8pnFQCgI+xeMpVcX8mXFQ5oSLssZBKQoG9cGy4hHHQSk5bByUuaWhzDgosjxZ46AEfN6scVD8tq0ZRQU6vTKtUCAgv29wHBS/bas05ChSGJDftlUWDmbGQfG8Nwqp1+NRebggMw7KULmgz5bftjW7ePC5xUErM9aGx7IU9Hr1wbJilRT45fV4Mq9VFg7KcYw8Ho9CgYDmlCozDopl2yrw+bLGQRkaAXVoHBSvx6NQwJv5V0aqKS1U0OuVp8Aj+735Q/U//G+PZSkUGPwtLI8UZMZBCQWHj4My9JziAr88Hk/WOChDn9PQOChDZYfeY+g5Q/UMF/gz9fIeMV+yMuOgBL1elUcKMuOgFBvz3m+ONaxNXo9HxUFf5j2PNPQcr8ejoqAvaxyUoc9raByU2cWBzDo68r2Kg77Bbcm2M885fNnQej38OX7bVk1poSKF/hHrNl4mdKC2f/u3f8sM1Hbuuefqnnvu0YIFC477PO7FAwDA5DOa/Tc3CwQAAONiNPvvSXEVDwAAmF4IKAAAwHUIKAAAwHUIKAAAwHUIKAAAwHUIKAAAwHUIKAAAwHUIKAAAwHUIKAAAwHUm7F48J2No8NtoNDrBNQEAACdqaL99IoPYT8qA0tPTI0mqqamZ4JoAAIDR6unpUSQSOWaZSXkvnnQ6rb1796q4uHjwzrE5FI1GVVNTo9bW1il5nx/aN/lN9TbSvslvqrdxqrdPyl8bjTHq6elRVVWVPJ5jn2UyKY+geDweVVdX5/U9wuHwlN3wJNo3FUz1NtK+yW+qt3Gqt0/KTxuPd+RkCCfJAgAA1yGgAAAA1yGgHCEQCOiOO+5QIBCY6KrkBe2b/KZ6G2nf5DfV2zjV2ye5o42T8iRZAAAwtXEEBQAAuA4BBQAAuA4BBQAAuA4BBQAAuM6UDyj33nuvamtrFQwGtWDBAv3pT386ZvlHHnlEp512moLBoM466yxt3Lgxa7kxRrfffrsqKytVUFCgRYsW6c0338xnE45rNG382c9+pvr6es2YMUMzZszQokWLhpX/3Oc+J8uysh6XXXZZvptxVKNp3/r164fVPRgMZpVx2zocTfsWLlw4rH2WZWnZsmWZMm5af9u2bdOnP/1pVVVVybIs/fa3vz3uc7Zs2aIPf/jDCgQC+sAHPqD169cPKzPa73U+jbaNTU1NuvTSSzV79myFw2HV1dXpqaeeyirz//7f/xu2Dk877bQ8tuLoRtu+LVu2jLiNdnR0ZJVzyzocbftG+n5ZlqUzzjgjU8ZN62/16tX6yEc+ouLiYpWVlenKK6/Url27jvs8N+wLp3RA+dWvfqVbbrlFd9xxh1588UWdc845WrJkibq6ukYs/+yzz+raa6/VDTfcoJdeeklXXnmlrrzySr3yyiuZMnfddZfuuecerVu3Ts8//7xCoZCWLFmigYGB8WpWltG2ccuWLbr22mu1efNmNTc3q6amRosXL1ZbW1tWucsuu0zt7e2Zx0MPPTQezRlmtO2TBkc+PLzu77zzTtZyN63D0bavqakpq22vvPKKbNvW3/7t32aVc8v6i8ViOuecc3TvvfeeUPmWlhYtW7ZMF198sV5++WWtWrVKN954Y9YOfCzbRD6Nto3btm3TpZdeqo0bN2rHjh26+OKL9elPf1ovvfRSVrkzzjgjax3+93//dz6qf1yjbd+QXbt2ZdW/rKwss8xN63C07Vu7dm1Wu1pbW1VaWjrsO+iW9bd161atWLFCzz33nJ5++mklk0ktXrxYsVjsqM9xzb7QTGEXXHCBWbFiRWbacRxTVVVlVq9ePWL5z3zmM2bZsmVZ8xYsWGD+4R/+wRhjTDqdNhUVFeb73/9+ZvmhQ4dMIBAwDz30UB5acHyjbeORUqmUKS4uNg888EBm3vLly80VV1yR66qOyWjbd//995tIJHLU13PbOjzZ9Xf33Xeb4uJi09vbm5nnpvV3OEnm0UcfPWaZr33ta+aMM87Imnf11VebJUuWZKZP9jPLpxNp40jmz59vvv3tb2em77jjDnPOOefkrmI5ciLt27x5s5FkDh48eNQybl2HY1l/jz76qLEsy/z1r3/NzHPr+jPGmK6uLiPJbN269ahl3LIvnLJHUBKJhHbs2KFFixZl5nk8Hi1atEjNzc0jPqe5uTmrvCQtWbIkU76lpUUdHR1ZZSKRiBYsWHDU18ynsbTxSH19fUomkyotLc2av2XLFpWVlenUU0/VTTfdpHfffTendT8RY21fb2+v5s6dq5qaGl1xxRV69dVXM8vctA5zsf7uu+8+XXPNNQqFQlnz3bD+xuJ438FcfGZuk06n1dPTM+w7+Oabb6qqqkqnnHKKrrvuOu3evXuCajg25557riorK3XppZfqj3/8Y2b+VFuH9913nxYtWqS5c+dmzXfr+uvu7pakYdvb4dyyL5yyAWX//v1yHEfl5eVZ88vLy4f1hQ7p6Og4Zvmhf0fzmvk0ljYe6etf/7qqqqqyNrTLLrtM//Ef/6FNmzbpe9/7nrZu3arLL79cjuPktP7HM5b2nXrqqfr5z3+uxx57TL/4xS+UTqf10Y9+VHv27JHkrnV4suvvT3/6k1555RXdeOONWfPdsv7G4mjfwWg0qv7+/pxs827zgx/8QL29vfrMZz6TmbdgwQKtX79eTz75pH7yk5+opaVF9fX16unpmcCanpjKykqtW7dOv/nNb/Sb3/xGNTU1WrhwoV588UVJufndcou9e/fqiSeeGPYddOv6S6fTWrVqlT72sY/pzDPPPGo5t+wLJ+XdjJEbd955px5++GFt2bIl60TSa665JvP3WWedpbPPPlvvf//7tWXLFl1yySUTUdUTVldXp7q6usz0Rz/6UZ1++un66U9/qu9+97sTWLPcu++++3TWWWfpggsuyJo/mdffdPPggw/q29/+th577LGsczQuv/zyzN9nn322FixYoLlz5+rXv/61brjhhomo6gk79dRTdeqpp2amP/rRj+rtt9/W3Xffrf/8z/+cwJrl3gMPPKCSkhJdeeWVWfPduv5WrFihV155ZcLOhxmtKXsEZdasWbJtW52dnVnzOzs7VVFRMeJzKioqjll+6N/RvGY+jaWNQ37wgx/ozjvv1O9//3udffbZxyx7yimnaNasWXrrrbdOus6jcTLtG+Lz+fQ3f/M3mbq7aR2eTPtisZgefvjhE/qxm6j1NxZH+w6Gw2EVFBTkZJtwi4cfflg33nijfv3rXw87nH6kkpISfehDH5oU63AkF1xwQabuU2UdGmP085//XNdff738fv8xy7ph/d188816/PHHtXnzZlVXVx+zrFv2hVM2oPj9fp133nnatGlTZl46ndamTZuy/od9uLq6uqzykvT0009nys+bN08VFRVZZaLRqJ5//vmjvmY+jaWN0uDZ19/97nf15JNP6vzzzz/u++zZs0fvvvuuKisrc1LvEzXW9h3OcRzt3LkzU3c3rcOTad8jjzyieDyuz372s8d9n4laf2NxvO9gLrYJN3jooYf0+c9/Xg899FDWJeJH09vbq7fffntSrMORvPzyy5m6T5V1uHXrVr311lsn9J+EiVx/xhjdfPPNevTRR/XMM89o3rx5x32Oa/aFOTvd1oUefvhhEwgEzPr1681rr71mvvjFL5qSkhLT0dFhjDHm+uuvN9/4xjcy5f/4xz8ar9drfvCDH5jXX3/d3HHHHcbn85mdO3dmytx5552mpKTEPPbYY+Z//ud/zBVXXGHmzZtn+vv7x719xoy+jXfeeafx+/1mw4YNpr29PfPo6ekxxhjT09NjvvrVr5rm5mbT0tJi/vCHP5gPf/jD5oMf/KAZGBhwffu+/e1vm6eeesq8/fbbZseOHeaaa64xwWDQvPrqq5kyblqHo23fkI9//OPm6quvHjbfbeuvp6fHvPTSS+all14ykswPf/hD89JLL5l33nnHGGPMN77xDXP99ddnyv/lL38xhYWF5tZbbzWvv/66uffee41t2+bJJ5/MlDneZzbeRtvGX/7yl8br9Zp777036zt46NChTJmvfOUrZsuWLaalpcX88Y9/NIsWLTKzZs0yXV1drm/f3XffbX7729+aN9980+zcudOsXLnSeDwe84c//CFTxk3rcLTtG/LZz37WLFiwYMTXdNP6u+mmm0wkEjFbtmzJ2t76+voyZdy6L5zSAcUYY370ox+ZOXPmGL/fby644ALz3HPPZZZ94hOfMMuXL88q/+tf/9p86EMfMn6/35xxxhnmv/7rv7KWp9Np861vfcuUl5ebQCBgLrnkErNr167xaMpRjaaNc+fONZKGPe644w5jjDF9fX1m8eLFZvbs2cbn85m5c+eaL3zhCxP242/M6Nq3atWqTNny8nKzdOlS8+KLL2a9ntvW4Wi30TfeeMNIMr///e+HvZbb1t/QJadHPobatHz5cvOJT3xi2HPOPfdc4/f7zSmnnGLuv//+Ya97rM9svI22jZ/4xCeOWd6YwUurKysrjd/vN+973/vM1Vdfbd56663xbdh7Rtu+733ve+b973+/CQaDprS01CxcuNA888wzw17XLetwLNvooUOHTEFBgfn3f//3EV/TTetvpLZJyvpeuXVfaL3XAAAAANeYsuegAACAyYuAAgAAXIeAAgAAXIeAAgAAXIeAAgAAXIeAAgAAXIeAAgAAXIeAAgAAXIeAAgAAXIeAAgAAXIeAAgAAXIeAAgAAXOf/A0qbx+orJ1afAAAAAElFTkSuQmCC", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], "source": [ "xr = (0, 2) # xrange\n", "\n", @@ -79,10 +92,21 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 14, "id": "b62cbb46", "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAiUAAAGdCAYAAADNHANuAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8pXeV/AAAACXBIWXMAAA9hAAAPYQGoP6dpAABcw0lEQVR4nO3deXxU9bk/8M+ZzD5JJjsJEhZBiEIAAQMBBQQUwqJSxQUQsC69vXhvra1WX7Zar/oD635br1staAEVF7RVwQUELIsIiIIVJAiCmkBISCaZSSbJzPf3x8k5mTWZmWSSk+Tzfr3SIZMz55xhpDw83+d5vpIQQoCIiIiok+k6+waIiIiIAAYlREREpBEMSoiIiEgTGJQQERGRJjAoISIiIk1gUEJERESawKCEiIiINIFBCREREWmCvqMv6PV68dNPPyEpKQmSJHX05YmIiCgGQghUV1ejd+/e0Onik9Po8KDkp59+Qm5ubkdfloiIiNrBiRMn0KdPn7icu8ODkqSkJADAhZgJPQwdfXmink2K7l83adkpuGTxJHz00hZUlFZG9iLhjf6+iEjzGtGAf+F99e/xeOjwoERZstHDAL3EoISoQ0UZlDhOOvHmn94HgCj+vDIoIeqWmnbKi2fpBQtdiYiISBMYlBAREZEmMCghIiIiTWBQQkRERJrAoISIiIg0gUEJERERaQKDEiIiItIEBiVERESkCQxKiIiISBMYlBAREZEmMCghIiIiTWBQQkRERJrAoISIiIg0gUEJERERaQKDEiIiItIEBiVERESkCQxKiIiISBMYlBAREZEmMCghIiIiTWBQQkRERJrAoISIiIg0gUEJERERaQKDEiIiItIEBiVERESkCQxKiIiISBOiCkr69+8PSZKCvpYuXRqv+yMiIqIeQh/NwZ9//jk8Ho/6/YEDB3DJJZdg3rx57X5jRERE1LNEFZRkZmb6fb98+XIMHDgQkyZNatebIiIiop4nqqDEV319PVatWoXbb78dkiSFPc7tdsPtdqvfOxyOWC9JRERE3VjMha5vv/02KisrsWTJkhaPW7ZsGex2u/qVm5sb6yWJiIioG5OEECKWF06fPh1GoxH//Oc/WzwuVKYkNzcXk3E59JIhlksTUaykDmi4E974X4OIOlyjaMBmvIOqqiokJyfH5RoxLd98//33+Pjjj/HWW2+1eqzJZILJZIrlMkRERNSDxPTPphUrViArKwuzZs1q7/shIiKiHirqoMTr9WLFihVYvHgx9PqY62SJiIiI/EQdlHz88cc4fvw4fv7zn8fjfoiIiKiHijrVcemllyLG2lgiIiKisLj3DREREWkCgxIiIiLSBAYlREREpAkMSoiIiEgTGJQQERGRJjAoISIiIk1gUEJERESawKCEiIiINIFBCREREWkCgxIiIiLSBAYlREREpAkMSoiIiEgTGJQQERGRJjAoISIiIk1gUEJERESawKCEiIiINIFBCREREWkCgxIiIiLSBAYlREREpAkMSoiIiEgTGJQQERGRJjAoISIiIk1gUEJERESawKCEiIiINIFBCREREWkCgxKiHiY9JxUL/3AV0nNSO/tWiIj8MCgh6mGKbpqKwtmjUHTT1M6+FSIiP/rOvgEi6ljr/7rR75GISCsYlBD1MOUlZ7DqgTc6+zaIiIJw+YaIiIg0gUEJERERaQKDEiIiItIEBiVERESkCQxKiIiISBMYlBAREZEmsCWYqAeRdFLcryE8cb8EEXVTzJQQUZC0nFQs/P2VSOMoeiLqQFEHJT/++CMWLlyI9PR0WCwW5OfnY/fu3fG4NyLqJDNvnIJxs0dj5o1TOvtWiKgHiWr55syZM5gwYQIuvvhirF+/HpmZmTh8+DBSU/mvKaLu5P0XN/k9EhF1hKiCkocffhi5ublYsWKF+tyAAQPa/aaIqHNVlJzBqgff7OzbIKIeJqrlm3/84x8YM2YM5s2bh6ysLJx//vl44YUXWnyN2+2Gw+Hw+yIiIiIKFFVQ8t133+GZZ57BOeecgw8++AC//OUv8d///d946aWXwr5m2bJlsNvt6ldubm6bb5qIiIi6H0kIISI92Gg0YsyYMdi+fbv63H//93/j888/x44dO0K+xu12w+12q987HA7k5uZiMi6HXjK04daJKFpSQkLcryE87Akm6o4aRQM24x1UVVUhOTk5LteIKlOSk5OD8847z++5c889F8ePHw/7GpPJhOTkZL8vIiIiokBRBSUTJkzAoUOH/J779ttv0a9fv3a9KSIiIup5ogpKfv3rX2Pnzp34f//v/6G4uBhr1qzB888/j6VLl8br/oiIiKiHiCooueCCC7Bu3Tq88sorGDZsGB544AE8+eSTWLBgQbzuj4iIiHqIqPe+mT17NmbPnh2PeyEiIqIejHvfEBERkSYwKCEiIiJNYFBCREREmsCghIiIiDSBQQkRERFpAoMSIiIi0gQGJURERKQJDEqIiIhIExiUEFHcpeekYuG9VyE9J7Wzb4WINIxBCRHFXdHNU1E4ewyKbp7a2bdCRBoW9Zh5IqJorX9ho98jEVEoDEqIKO7KS85g1f+80dm3QUQax+UbIiIi0gQGJURERKQJDEqIiIhIExiUEBERkSYwKCEiIiJNYFBCREREmsCghIiIiDSBQQkRERFpAoMSIiIi0gQGJURERKQJDEqIiIhIExiUEBERkSYwKCEiIiJNYFBCREREmsCghIiIiDSBQQkRERFpAoMSIiIi0gQGJURERKQJDEqIiIhIExiUEBERkSYwKCEiIiJNYFBCREREmsCghIiIiDSBQQkRERFpAoMSoi4uPScVC++9Cuk5qZ19K0REbRJVUPLHP/4RkiT5feXl5cXr3ogoAkU3T0Xh7DEounlqZ98KEVGb6KN9wdChQ/Hxxx83n0Af9SmIqB2tf2Gj3yNFLj0nFUU3T8X6FzaivORMZ98OUY8XdUSh1+uRnZ0dj3shohiUl5zBqv95o7Nvo0tSskwA+HtIpAFRByWHDx9G7969YTabUVhYiGXLlqFv375hj3e73XC73er3DocjtjslImpnzDIRaYskhBCRHrx+/XrU1NRgyJAhKCkpwf33348ff/wRBw4cQFJSUsjX/PGPf8T9998f9PxkXA69ZIj9zokoalJCQtyvITyeuF+DiDpeo2jAZryDqqoqJCcnx+UaUQUlgSorK9GvXz88/vjjuPHGG0MeEypTkpuby6CEqBMwKCGiWHVEUNKmKtWUlBQMHjwYxcXFYY8xmUwwmUxtuQwRERH1AG2aU1JTU4MjR44gJyenve6HiOIsLScVC39/JdI414SINCaqoOS3v/0ttmzZgmPHjmH79u2YO3cuEhIScN1118Xr/oionc28cQrGzR6NmTdO6exbISLyE9XyzQ8//IDrrrsO5eXlyMzMxIUXXoidO3ciMzMzXvdHRO3s/Rc3+T0SEWlFmwpdY+FwOGC321noStQJWOhKRLHqiEJX7n1DREREmsCghIiixmJZIooHBiVEFDUWyxJRPHA3PSKKGotliSgeGJQQUdQqSs5g1YNvdvZtEFE3w+UbIiIi0gQGJURERKQJDEqIiIhIExiUEBERkSYwKCEiIiJNYFBCREREmsCghIiIiDSBQQkRERFpAoenEWmFFP9/I+gslrhfA3HeidhbUxPX8wPc6ZioszBTQkRERJrAoISIiIg0gUEJERERaQKDEiIiItIEBiVEPVBadgoW3HU50rJTOvtWiIhUDEqIeqCiJZMwdsYIFC2Z1Nm3QkSkYkswUQ+0fuUWv0ciIi1gUELUA1WUVmL18nc6+zaIiPxw+YaIiIg0gUEJERERaQKDEiKKGLt2iCieGJQQUcTYtUNE8cRCVyKKGLt2iCieGJQQUcTYtUNE8cTlGyIiItIEBiVEBEC7Raxp2XYsuHMO0rLtnX0rRBRnDEqICIB2i1iLFk2U72vRxM6+FSKKM9aUEBEA7Raxrn95q98jEXVfkhBCdOQFHQ4H7HY7JuNy6CVDR16aSNuk+CcuMwf2RtGSSVi/cgsqSivjc5GEhPict4m3piau5wcA4fHE/RpEXU2jaMBmvIOqqiokJyfH5RpcviHqQbS6RENEBHD5hqhH0eoSDRERwKCEqEfhnBEi0jIu3xBRTLTaQkxEXVebgpLly5dDkiTcdttt7XQ7RNRVRFKfwhkjRBSNmJdvPv/8czz33HMYPnx4e94PEXURkdSnKDNGAGD1n/7ZIfdFRF1XTJmSmpoaLFiwAC+88AJSU1Pb+56IqAtQ6lNaai1e//JWfLbhS84YIaKIxBSULF26FLNmzcK0adPa+36IqIvzXbKpKK3C6j/9ExWlVZ19W0TUBUS9fPPqq69i7969+PzzzyM63u12w+12q987HI5oL0lEXQiXbIgoVlFlSk6cOIFf/epXWL16Ncxmc0SvWbZsGex2u/qVm5sb040SUdcQ6ZINi2CJKFBUY+bffvttzJ07Fwk+Y6Q9Hg8kSYJOp4Pb7fb7GRA6U5Kbm8sx80SBOmDMfILNGvdrRDpmfsGdczB2xgh8tuHLqDIq8Rozn5aTipk3TsH7L25C+Q+n43INoq6sI8bMR7V8M3XqVOzfv9/vuRtuuAF5eXn43e9+FxSQAIDJZILJZGrbXRJRt6O1jfZm3jgF42aPBgD8/f61nXw3RD1TVEFJUlIShg0b5veczWZDenp60PNERC1RimC14v0XN/k9ElHH40RXIuoy0rLtWPj7K5GW0/6jCCpKzmDVg2+iouRMu5+biCLT5r1vNm/e3A63QUTUuqJFEzH20nwAwKoH3+zkuyGi9sYN+Yioy1j/8laI+nousRB1UwxKiKjLqCitYoaEqBtjTQkRERFpAoMSIuqy0nJS41b4SkQdj0EJEXVZymyRmTdO6exbIaJ2wJoSIuqyOFuEqHthpoSIuqzA2SJcziHq2hiUEFG3weUcoq6NyzdE1G1wOYeoa2NQQkTdhrKcQ0RdE5dviIiISBMYlBAREZEmMCghIiIiTWBQQkRERJrAoISIiIg0gUEJEXWotGw7Ftw5B2nZ9s6+FSLSGAYlRNShihZNxIQ5o/C7529mYEJEfhiUEFGHWv/yVlSfcSIxxYaiRRNDHsNsClHPxKCEiDpURWkVHr7lBWx/dy/Wv7w15DFFiyZi7IwRYYMWIuqeONGViDpcRWkVVv/pn2F/rgQr4YIWIuqemCkh6ubSc1Kx8A9XIT3KnXPTslOw4K7LkZadEp8ba4EStFSUVnX4tYmo8zAoIermim6aisLZo1B009ToXrdkkryEsmRSnO6MiMgfl2+Iurn1f93o9xjx61ZuUR/TslNQtGQS1q/cgorSyva+RSIiAMyUEHVLvks25SVnsOqBN1Beciaqc1SUVmL18ndQUVrJrAkRdQhmSoi6IWXJBgBWPfBGm8/nmzUhIooXBiVE3VCsSzbhKFmTSKVl21G0aCLWv7yVxapEFDEGJUTdkLJk01mUOSMAWmz9JSLyxZoSIopYpG3C61/eis82fMk5I0QUFQYlRBSxSAteOWeEiGLB5RsiihgLXokonpgpIaKI+bYJd4a0bDsW/v5KpEU5nZaIugYGJUTUqs4cOe+raNFEjJs9GjNvnNKp90FE8cGghIha1dHD09Ky7Vhw5xykZdv9nl//8lbsfHcP3n9xU4fcBxF1LNaUEBEAtDhKvqNrScK1FFeUVmHVg292yD0QUcdjUEJEAJqzIQCCBqVFOzytrZRWYrYUE/UsDEqICED7ZkPaOtFVaSkmop6FQQkRAWjfbAgnuhJRLFjoSkTtrjMnuqblpLJtmKiLiipT8swzz+CZZ57BsWPHAABDhw7Fvffei6KionjcG5FmSHpD3K+RkJ0V92u4hvWO+zX0tR6cBLDyw38D2RnyVzsy7DvS4s9n/XIGxs4YAclojDlL462piel10RAeT9yvQdTVRJUp6dOnD5YvX449e/Zg9+7dmDJlCi6//HJ8/fXX8bo/ItKo9LRELJ4/HulpiZ19K3647w5R1xVVUDJnzhzMnDkT55xzDgYPHoyHHnoIiYmJ2LlzZ7zuj4g6QSQBx+wZwzF+7CDMnjG8/a6bnojrF12I9PTIAp1Q80y47w5R1xVzTYnH48Grr74Kp9OJwsLCsMe53W44HA6/LyLStkgCjnc3fIXtnxXj3Q1fRXTOSAKOmbNGonD8IMycNTKicyoFtUWLJkZ0PBFpW9TdN/v370dhYSHq6uqQmJiIdevW4bzzzgt7/LJly3D//fe36SaJqGMpgUZLAUd5RQ1eWrM94nMqAQcA/P3lf4U85v339vk9tobzTIi6F0kIIaJ5QX19PY4fP46qqiq88cYb+Otf/4otW7aEDUzcbjfcbrf6vcPhQG5uLibjcuil+BcPErWHrljomtYrGTOuK8SGV3ag4qScoeyoQtdQ0tMTMXPWSLz/3j6Ul9e0+nw4rRW6tgcWuhIFaxQN2Ix3UFVVheTk5LhcI+pMidFoxKBB8r92Ro8ejc8//xxPPfUUnnvuuZDHm0wmmEymtt0lEUVtxnWFGDttKABgzZMfdPLdAOXlNUEZkvT0RNx9z2VITjYDCJ9BIaKeoc3D07xer18mhIi0YcMrO/wetWjmrJFISjLD5WqA1WpEenpiRNkSIuqeogpK7r77bhQVFaFv376orq7GmjVrsHnzZnzwQef/K4yI/FWcdGgiQ9ISpXbEajVixMi+cLnq1WxJtMs6RNT1RRWUnDp1CosWLUJJSQnsdjuGDx+ODz74AJdcckm87o+I2ll6WiJmzxiOdzd8hfKKzv3LXlnSSU9PhMtV71fgGklhLBF1L1EFJS+++GK87oOIIhSqgDUaSrsvgKi6Z+IpVL2JEqDs2H4Y1y+6kBkToh6Ae98QdSFpvZJx51PXY/yM4ZhxXfj5QGm9kjH/tulI6xVcIa/MF9m2s1iTE1kVSqBSOP6cqGaXEFHXxaCEqAuZcV0hklKsqKl0tVjAqnTehApclPkiE8YNaveJrPHw/nv7sGN7ccSzS4io62pz9w0RdRzfjpqWlm4i6byJZECaFoRa2iGi7inq4Wlt5XA4YLfbOTyNupSuODwtlM4cntZeODyNqHN0xPA0Lt8QkaZFu0kfEXVdXL4hIs1SJr4mJXHiK1FPwEwJEXW6cNmQq+YVoF+/dNTVNrDQlagHYFBCRDFLT0tsl7ZiZVCab9tvenoizj3vLADA11//yBklRD0AgxIiilhgEKIMYmutrTg9PRG3/GIKfvEfU0LWhoRq+505ayQsFgO+/74cb76xK+y507LtWHDnHKRl22N7U0SkGawpIaKIBU6DjbSteOaskZg67TwA8NvfRtHSRFdlkquyF86HpadRUVqlHle0aCLGzhgBAFj9p3/G/uaIqNMxKCHqRto6gr41vkFINHvovP/ePlgsRkgSQtaGhNp8LzBQUZZ4EhZN9As+1r+81e+RiLouBiVE3YgyyRVAXHYIVqbBAsDi+eMj3kOnvLwGzz+3KezPI9l8TwlmPgwIPipKq5ghIeomGJQQdSORTHJtL+05EdZ3qSYcJXNi8Fm6IaLuhYWuRN1EvJduAilZk9aWbiI6V1PA0R4dNix8Jeq6GJQQdRMtbcLXkyiFr0WLJnb2rRBRlLh8Q9RNdOTSTUfzLYRtLQfEwleirouZEqJuouKkA2ue/KBDlm7aW2v721x5VQFmzxmJK68qaPVcSuFrBWtPiLocBiVEFLNIJ7q2FnSEmujqS5L8H4moe+LyDRHFLHCYWjittfyG6r7xXbJ54/VdcLnquf8NUTfHoISoh0jrlYx588dHNOwsUpG2BbfW8htqomtgIKP83NCG+yUibWNQQtRDzLiuEBdEOOwsUr7D1Fo8LkTQ0ZpIZpcQUffCmhKiLiytVzLm3zYdab2SW3wOkLtytn9W3GJWo712/W0PvrNLfDf04/wRou6LQQlRFxZqNsncmyajaH4h5t402e/YipOOVoed+e7621KA0tHBi7Kh35Sp53H+CFE3xuUboi4s1GwSqel/I2lUCdxUz7dGpKUi1kgLXIHQm+1Fy3dDP84fIeq+GJQQdWHKbBJfb/11M1xOd0RD1HyDCyUQCRWgBIpm35tINtsDWg5efDf04943RN0XgxIiDYtlP5tQgUo4LWVGAotYA7MqkRbLRlqwGmnwQkTdF4MSIg1TakYARBxoRMM3uPANUNLTEnH13DEQAF5ftxvlFTVhsyoA/IKVoGtE2HnDbhsiYlBCpGEduZ+Nb4DyyxsnY07RCNTVNaC2th4vrdkeNqsCIGx9STT1JJEEL+npiZhz5xysf3krx8gTdUMMSog0LJqlmPYkANTVNeDUaQcsFiPS0xLDZlUUoepL2ntJZuaskRg7MhcAsPpP/2zz+YhIWxiUEJGf9LREWMwG/FRaiSNHy3D+8L5qtkQRWFPiV3vikx2JZUmmpezK++/tQ0JpBTtwiLopzikh6gGUgWqRzBWZPWM4JowbhOxedtTVNbQ6cC2Q7+Z6vgPQYnl9oPLyGu4ATNSNMVNC1AMoBbMNWUmtds28u+ErWC1GvyLXSCjdOTu2fAsg9oJVFrwS9VwMSoh6AKVQ9t29R1s9tryiBs+8uBlA8+TWSDbxU4pfdQ0i4vqRUEs1kXbrpGXbUbRoIoteiboRLt8Q9QBKwWy0uwP7jp1vzbsbvsL2z4qxY/thXL/oQqSnt75U1NJSTWuKFk3E2BkjNDt2Pi0nFQt/fyXSclI7+1aIugxmSoi6qVgGrwWKZnKrUvx6w7zCiDtu2rJUoxS7arXodeaNUzBu9mgAwKoH3+zkuyHqGhiUEHVT7TF4LdTk1sDJroGiCTQiXaoJpaK0StNtwe+/uMnvkYhax6CEqJtqz8FrvoFIa5vxtSXQ6E4qSs4wQ0IUpahqSpYtW4YLLrgASUlJyMrKwhVXXIFDhw7F696IqA2UOpJYl258+daWKLUj0bQJRyM9PTHimhQi6l6iCkq2bNmCpUuXYufOnfjoo4/Q0NCASy+9FE6nM173R0Qa4BuIKEs60RbNRmrmrJG4aOJg3H3PZZ0WmLBIlahzRBWUbNiwAUuWLMHQoUMxYsQIrFy5EsePH8eePXvidX9EpGFKy3AkQ9ki9f57++Bw1CEpyRxTV057UIpUZ944pVOuT9RTtakluKpKng2QlpYW9hi32w2Hw+H3RURAWk4KFv7+Z0jLSemU60cTUIRrDZ43dwzmFI3AvLlj2u2+ystrsOyhf+Bfn37baQPU3n9xE3a+u8evSJXZE6L4izko8Xq9uO222zBhwgQMGzYs7HHLli2D3W5Xv3Jzc2O9JFG3MvPGKRg3a1Sn/Wu8tRkkvkFLuDoSKeCxvcQynr69pGXbMfPGKXj/xU2oKDmjPs/sCVH8xdx9s3TpUhw4cAD/+lfLVfZ33303br/9dvV7h8PBwIQInd8yGmoGSUtdNqE6bdau2w1XbX2rRa8tbbKnNUWLJmLspfkA/OeLdPbnRdQTxBSU3HrrrXj33XexdetW9OnTp8VjTSYTTCZTTDdHpHVpOSk+/6qujPo1qx58K7432IJQM0h8AxEl0Ni2sxiL54/Htp3FmDBukN98klDn8KUEI1arESNG9gXQ+kC1zrb+5a0Q9fVBwQdbfIniL6rlGyEEbr31Vqxbtw6bNm3CgAED4nVfRF1CLEswnb1sowhVUxKqy2bCuEEYP3YQlt58ccQj5xXKGHkhgB3bi7vEJnsVpVVY9eCbfks3RNQxosqULF26FGvWrME777yDpKQklJaWAgDsdjssFktcbpBIy2JJ6XfkMkCoUfPKEo3VYsTI4XL24qU129Xnt+0s9pvY6psxUTIl4aSnJeKyqfnqMo3vdNdYlm260rIPEbWdJIQQER8shS5nW7FiBZYsWRLRORwOB+x2OybjcuglQ6SXJupUkj7+/60mZGe1+znn3zYdY6cNxWcff401T34A17DeWDx/PMaPHYQvvjqO2qZ6kPKKGvV5l8sNq9WE7Z8Vt7g0E8ri+eMx4YKB2LG9uE07BSuuX3QhCscPUs+Xnp6IOaP7+u0MHI/dgr018Q+AhMcT92sQtadG0YDNeAdVVVVITk6OyzWiypREEb8QUTuLZYO9UKPmfQtcfQegRZMRCefdDV9B1yCiWqZRlniA4HqTwH10Zs4aibEj5UJ5Zd8bZbdg3+eIqGuKKlPSHpgpoa5IC5mSwKxHLFzDeod8vrVN9qKhr5UzAJEuvUSzRMNMCVHn0VymhIg6T3tusBeotU32ohFNx020NSPl5TVB2RCt7xZMRJFr00RXIuo47bnBnkLpwNm2szjqTfaU1w46O8uviyeajhvl2M4aJ09E2sJMCVEHiGWeSUcIzJAogUZLyzjKUo/FYsT5w/tiZH4urFYTLBYjamvrsWPLtwBCd9wEZkaUgGXH9sO4ftGFMXfZhFrCiceyDhHFFzMlRB0g3rNJ0nolY/5t05HWKznk9+EEjo9vbfS87zESgO2fFWP12p1yx47ZgPFjB6Fw/DlhR8QHZkaUcfKF489B4fhBuPKqAly/6MKodwdWil2LFk1s8Tki0jZmSog6QLxnk8y4rhBjpw2F1WaCy+mG1WZCfqGcAQlVFOtb2OpbQxJq9HygwGN+f8csJCdZ4KprwPbPirGhheWawG6awOetVmPYThwly/Jh6emgzMf6l7f6PQLAjvX7MPzCIdixPvz9EJG2sPuGKAJa6L5pidIubLGZMLxwEL7aUYxapzuofdgycbDf0su+r46re9fE0nWzeP54TBw/GI7qWjz4yHsor6hBL6sl5oFnkcws2fXWrogKWxfcOQdjZ4zAZxu+jLoQlt03RMHYfUNEEVGKYNN6JYcMRhTK0su+r45j+2fFsFiMEXfdhGobDjXzJHDuSDQdNspyTihKNuVDn2xISwKzJ6wxIdI+BiVE3YgSnIQTGESkpyWqU12BlueVKAGNUtDqO4be9zXvv7cPVqsRFotRDUjCLckoIglclIDFEGFAEdgqzCFrRNrHoISoGwucAhu4q2/g97NnDMfE8YMxMj9XXY5RKIGLNSC7EtjBU15eA5erHoXjB6G2tj5sHYkvJXCxWo1wueqDghPfoCXWhuhQdSdEpC3sviHqxpQC2BnXFarPBe4O7Pv9uxu+gqO6FslJlqAOHCWAWbtuN/Z9dVzOhDS9Zvtnxdi2s1g+T3oi3n9vH3ZsL8aO7YcjWrpRjhcCIeeWtMc8EyVzwqUbIu1ipoSoC2ttPxxl+uvOjw5g/m3TsW7v0aDMhvK91WKEq7YeT7/wSat73wwe1AvJSRbU1tbjpTXb8dKa7eqGfroGgb+//C/8/eV/qcWpQPilG6B5aWbQoF4YPCQbO7Yf9vt5JNkWIur6GJQQdWFKJgQI3fqr1Jgo++Y0ZCUFtfQqj5EWvc6eMRzJSRY4qmv9Ahfl174twa0FE4G1JIXjz2lqCz4HxcUn1eN8C2DZs0fUfTEoIeoCwmVEWtoPx/c1ys/f3Xs0qI5E8fEn/0Ztbb26DBOuTTjcLsPKeZUN+YCWu2mA4E4dZkSIejbWlBB1AaFqQ4CW98PxfY1yXKggQ1m+mTBuEF5asx2XFY3ANT+7AIsDrqVQgo9I55qkpycGTWlVntux/bDf/jhKEBPLqPlopWXbseDOOUjLtsf9WkQUGWZKqOuT4h9bJ2Smx/0ax5b0C/uzFzwnUHk6Ba97TqCsheNae81Di19Wf25OyET/5Lkocf4FXtskZF+4Dr8qLMOE3pfCbNJhwhQJnrxXonoP5oRMzEifDdSuBbyn5Cdtt0Iyp2DRVQJwrgh4rgZw3ohFRQEn0mUBlqv9z9Ok4J7/iOqewllw9UWYXpAHjOqNp9Z+6vezjHcOtss1WuI50wEFt8Ib/2sQtSNmSoi6gLJqJ/5v806UVTtjfk1mkg15qbfAnJAJAOifPBc51onIsU3CwTPPo85TBgD48vRylNftw5enl0d9n/2T50IyXywHFIratRDuXYAuA0j8nRxwuDdCeJ2Ae2PoE1mu9j+PLguw3So/thvJ54uItICZEqJOkNYrGTPmT8CGNdtCLr3Ew7zR+cixyqOhjznWQS/ZUFa7G8cc6wA0Z06OOdbhXyW3+D1X4tyCHNskHHOsU4MXX+aETAyyL0SCZJYDkNq1cgBhvUE+QLJAslwGiFoIIQdJks4GYZoKNH4TfLO1ayGaHgGoQYq8J0Zju/x+rP5wD1x19Vi3dX+7nI+I2o5BCVEnmDF/AsZeMgwAsOaJ9XG7TmaSDUsKRwES8M8vD2LqyGocc6xD/+S5yLSMRolrqxpkKJkTADh45nn1ud7WKeibeBkavTV+P1OYEzIxJushJBsHQAgA3mL5B5arIVlmA9BBiHpAuCEaj6uBhl/QEbhc4z0FOP/SfBG/IOVn7fJ7c7rSiRf+sbNdzkVE7YNBCVEn2LBmm99jvMwbnY9Zw/PkbwQAfA4AanZEeWzpuQzzGOgkA+q9Dr+fKVkUvWSDKSENHtEAd2MFzPoUiKYAQ0g2wDASki4ZwnMCqPqt/GLL1fLSjRKI+GZCfIMRhU+QkpFiw9yJ+Vi3dT9OV0a+nEVE2seghKgTVJx0xDVDonh9z37YjAa1dKK3bSoyzGOw+9Q9QRmPOk+Z33NK0LG//DHk2CahxLlFXd6p85SpmZWy2t1weyogRCNO1+1Gir6qOeNR8zCgy1KDFHhPNRW5XgxhLJCXcIDg5ZoWzJ2Yj4tGnA0AzHQQdTMMSog0KjPJhnmj8/H6nv1RFbj6Kqt24pEPP1XPN/v882HSpWJM1kPYfeqekPUhisDlnLzUW/y+V+pSAKiByzHHOgwyH2s+SaguGiUAcW+Ua0qUn/lmSFrovlFqQFgLQtT9sPuGSKPmjc7HxUPOxrzR+e1yvrJqJ3afugdu7xkYE+zonzy3xeNLnFvQ4HWhxLkFgLyUU+Laqi7h1HnK0CicyLSMCergUVlvgGS9DrD/b3PnjBKANH4jPwYEHQCCu298+NaC3HzZOGSk2KL8nSAirWKmhEijXt+z3++xvVS6vwEg/OpDQsmxTYJBZ0WObRKq6g/5Le/4duUAaPlckgWSPrd5CSdMBsRPBMs5XMaRpeekouimqVj/140oLznT2bdD1CYMSog0Spkz0l4yk2wYk/UQjAl2/OSU54Pkpd4Sts3Xt/DVbhyC/PTfYH/5Y6iqPxSyUyck14qmNl74F7RKNkA4wwcnLS3nNPFdxunJxa9FN01F4exRAIBVD7zRyXdD1DZcviHqpjKTbPjPyeOQmSQvb8wbnQ+jLhn1niq1LTjHOhGD7Av9hqoplMxInacM+em/QZo5H2N7PQZzQmbQ0o7CnJDpP+RMKXateVj+de1aiLpPACDs8kxIPss5GSk23HzZOAByhuR0pVPNmsyd2D5LXYq0bDsW3DFb06Po1/91I3a8uxfr/xpmEB1RF8KghKiLCAwyWhNYk/L6nv34ybVJLXBVakQAgRzrRPRPngtzQmbIAGV/+WOoayyHx1srBzM+Szu+BtkXQrJe0zw0TaFMZAXkDIhrhRycBC7PhJvcqgQztWtDBiDrtu7Hp19+p2ZPlMClpXqTSI4puv4ijJ0+HEXXXxT2mM5WXnIGqx54g0s31C1w+YaoC8hMsuGRK4tgt5gBIKJlncCalLJqJw6ead77RsmEmBMy0ShcavbEt21YWdapqj+ErT/doLYEmxLSkGEeE5QpQfNijT/L1ZDMl0AYC+RZJYHLM37HhZhXoksHjBMAXQa27DsCwL/7xrf4NSPFhgdvLkJqkhWjh/TB719YH7SkoxyTbJN/P8PVpKz/+6d+j0QUX8yUEGlAWq/kFrMgS8aPwtlZaXDV10dc+Brpfjm+yzTHHOtQ76mCUZcc1J3je1y4TElx1WoI12uAa4X/RWrXQnhrIekHBmdRAo/zzaAomZOk+yAZ8iFZLsOkkQPVZZtQ5k7MR7LNDLNRj2SbOeSSjnKMw1kXsrVYWbYBgNWPvIuK0g7YPI+IGJRQz5Kek4qFf7gK6TmpnX0rfmbMn9By+68AvF6BfSdKYp5ZAiBoeSbw+zpPGXafugenandCL9nCHhfYHqz8HEBwm69SpNp4CEAru9YqGRTl9Uotiec4hLcMou7jVueTrNu6H5/sLcYd//dPfLK3OOTxyjGhsihA11i2IeqOuHxDPYpWOxU2rNmGylGpYbMgK3fshbO+oc3twYFdM6G6aOT5Iy7kWCeiUThDHue79JOXegv0khWZljHyReqf8L+osiTj3iVnUSKY2qpSWoMlGyRRAeE5DsCEmy8bF7bTxncp59vj8vJTYHdOa/vecNmGqHMwKKEeRelQ0FqnQsVJR4t1Ii21B0cz+TVwf5tQ+920dpzvbsLNo+b3oMS1FSXOLchLuVXe18Y0VX6UbPLOwa4V/hkU/blA0t1A9bLQOwUDzZkTXZa8u3DtWsyd+Keo55P4zjRZt3V/q+3DFaVVWP3IuxGdm4jaD4MS6lGUToWuINJgY97ofFxy7iAU9O+DO95c3+KxgfvbBH4fyeuGpf0auUkzYE7IQKPXhbLaPSiuWgUAKOj1J0jGsyCMF0LSGZr3t6n7JHgeSdLdkAzDIZLuBs4skp9TlnqUoMZ31+CmOSehCl19hZpZ4jvThEPXiLSLNSVEGhXpmPnX9+xHVW0d7FZzTCPpQ7UBN88wWRCiRVgAAkg2noNMyxg0Cqe6QV+ioQ8gWYDGQ3J2pPGo/BhYuKrLAqqXQTR8AzR+19wCbL1BbilOuheS+RJ5PH3i79RgRTJfjKJxeX73H9jaG6plWFmuOV3pDGofJiLtYKaESKMiHTNfVu3EHW+ux5LCUbCZDMhMsrWYLfEdEZ9jmwS9ZEOmZTSA5roSZblGL9mCWoSLq1ajUbjU1/su7Zh0GRhgGQjUvgqYpkIyFfhnSQJbfuu3yd97r/ZpAdYBkrGpWycX0J/VVFdihfA2wGIyYvSQXABypiMw89Hahn2t1ZMQUedhUEKkUdGMmS+rdsJZ34CLh5wNp7uhxdcpWZAM8xgYdFaU1e7266QBoLYHD7IvRKPXpbYI+xa5AvL8El9Jxv6QdFZ19191/xqfZRm/PW0C97hxrYAwDIWks0PU74Jo+EI9t2SeIr/Efdwv0xH4qGRDIhk735PH0xNpEYMSom5CyahsOngE/zl5nPp9YF2KEnz4ZjrqPGUwJ2RiWNqvAQgUV61G/+S5yLSMRlntHjQKZ1CRa+B+Of2T58KkS4WQrICuablHyX4k/g6SZba8503Nw80vChyi5j0FVP22efM+JcOiy5LPaxiG1CQrAJf6klCZj0jrRlhfQqQtUQclW7duxSOPPII9e/agpKQE69atwxVXXBGHWyPq/tJ6JWPG/AnYsGYbjrXxXEpm5T8nj8PFQ85Wn1d+rWRPAjMdduMQjMl6CNX1x9A7cRIgoE54BeAXgOSl3hJ2I75jjnXIMI9BsmQHzFMgvGWhp7aG47vpXuDrvKcAbxmkhCxMPj8Frrp6uOoawgYSrS3hRHtcWrYdRddfhPV//5SD1IjiKOqgxOl0YsSIEfj5z3+On/3sZ/G4J6IuwzeoqDjpiPp1FpsJw8efAwDY6zjUyqsi8/qe/bCZDLAZDfjnVwfV58KRN9sbBp1kwInqDQCEGoj4Bh7mhEzoJSvKavcEtRADzYPXZmTeAOiHyB00CtcKtaVXFdgSbL2hOZviWtE8+VVpJa5dCyHZsGH3GNS661sMJCKtG4n0OGWYGgC2ChPFUdTdN0VFRXjwwQcxd+7c1g8m6uZmzJ+AsZcMw4z5E2J6nSQBn310ABvWbGu3eyqrdsLpbkDBgFxMyRuI1/fsx7zR+UEj7JWum0Nn/oaKugP48vRyHKh4Ql26CdyUb5B9AXITiwCIoKUbRZ2nTM5o6KxywOG7W3DgpNemlmAk3R18IsvVkCyzIVlmN+8k3LTj8PK/b8RTaz8NuZ9NaxvsxWr93z/FZx981eIwta6wozCR1sW9psTtdsPtdqvfOxyR/2uSSOuUYCLaoML3dc0ZloyY7iHUPBPfzh2ltTiQ75TWf5Xc4ve80nGzv/wxnw4bCZKkQ4Z5NIal/RrFVatCByfujRCWuZB0Jgj7o4DzOcD2i+AhadXL5Bkl1cvk7wOyKUJqCi7CTIANLFJdcOkoTC/Ig9VswFNrPw15TCQyUmxYcMdsv6WaSIapMZtC1HZxn1OybNky2O129Ss3NzfelyTqMBUnHVjzxPqolm7a8rpQWptnsungETjd9fji+E9+M0dKnFvQ4HUF7fTruynfiIy7MCD5SgyyL0Rx1So46o/Cos9GbtKMoA37VKapkEQNhKSHpEsBkv8ndEak8Rt5x2DTVDmj4ptNacqKoObh4KFrTYLnkUg+X+GOad3cifkx7XsTSTaFiFoW90zJ3Xffjdtvv1393uFwMDAhCpDWKxkzmzpmlGxHpBNdQ80zCcyO2ExG/GJiAXKschB08Mzzfjv9+rb2KrUhcjdNOmz63lCWbHafugeD7AsASCHrSgA0t/k2fAFh+wXgeg7Cek1zRsRX4NySKAQWqa7+cA9cdf61JpEWsgae15p1OurggqPpidou7kGJyWSCyWSK92WIurQZ8yfg/IAuGd/AIpp9cTKTbLAZDdh17IRfoLLp4BE8dG21X1eNXrJBL1lhTsj0W4rx3XDP7S33C0CUzpxwdSVqxsN2qzxvxHqNnBEJlfFwb4QwFvgXxQJy5iSw0DVAYJFqqPkkLRWyhlvaOV3pxOqXGFwQdQaOmSfSgA1rtuGTQ9/5BRGv79kf9Fwk5o3OR8GAXDjdDSirdqpBy8HSMnX4GdBUlAqB3MSipuxHMGWImlL4qtShKN8Hj6D34d4IISVC0mU0F6sGMk2FpLP7F8UC6rh5yXKZHJwoo+l9hCpsjWa5JpalHSKKr6gzJTU1NSguLla/P3r0KPbt24e0tDT07du3XW+OqKcItUtwNBNdfYVazlGWgswJ69VBaf2T5yJBsjaVYEihTwb/gljfLIvyvF6yNh0pNW3M15R1ME2FJJwQ3sqwxaqoXdu0aV+GXBTrm1ERtRCNJ+Qzh1jiWXDp6KbCViOeWrsVGSk2WM1G7Dl0Ql2uaanQ1Xdph5NdibQh6qBk9+7duPjii9XvlXqRxYsXY+XKle12Y0TUsnA1J6GCGWUpSGeRR8UrAUVZ7R4cdbwZvj4E/oGIwpSQBr1kQ0Xd10gzj4BN3weQvPIP9ZXycoxkg3BvC7v8AqB5gqv9Uf/AJLATJ3DGifys+pWRYsODNxch2WbGJ3uL1cDCd2JrS0s7N182jpNdiTQg6qBk8uTJEELE416IKAqR1pwAzVmTgWf7BxjhakMCx8krQ9SUia7KvjkNXhf0OiucjT+gou4rAELOahgLIOls/pvxhaMGJv8LST8QwnqD3HWjZEUClm0Uqz/cC1ddgxpsJNvMcDjrwha6tjRSPpaCWCJqf9z7hqiLen3PftiMhqCdgX0zKACwZPwoQAArd+zFbf3kpRulg0YRuHOwSZeB3rZJ0EtWHKh4Uj0ucN+cUPvnDDI0ApIFQtSGXrbxHSevBCzeU0DDF4D+rOBj7Y9C0qUELd/4Zjp8g4rAotVQxwTizsFE2sCghKiLCrczcGA78Kz8PACAs74BgFwjkptYpMYkjcIpByGJk5BlKUSCZIBHNISsNfEtfA275GM4H5I+F6Lu4+bgA2j+teVqSOZL5a4bpYakKRsiat+Vl3sU1hvk7Enj8fB1KYgsqGDgQaR9DEqI2iDWvW/aS2BRa2aSDTaTAbuONrcD20wGQMjH3NZPaQW2Qg44BHKsE+UgRABV7sNwe0/7ZUAAwG4cgvz032B/+WPITZyJ3KQZ0Es2NAqn3wZ9/ZPnQtLnApIFMAyDlJClTmZV97Wp+0fTxNcEealGOAHJBslUEGa5xws07Gt9GYiIujy2BBO1Qax73wByQDP/10VI65Uc8/WVolZl6Wbe6HwU9M+Fs765HXjl9r1w1jcg3WZFXqo8Tv5AxZPqPjdltbvhqD+MEzXrcajyBRw88zyq6g+p7cPmhEyM7fUY0sz5yE//DQAh15c2bdxX4tqKY4516oZ9ou4jCNdrQMMBAHrAPAswjof6fzdqV045ALmzBoAckARmQ1wr5HMp2RNdVov728S6/008980hosgxU0LUBrHufQM0BzQAsOaJ9TFdP7ADJ1Tm5MmrZyE3PQUTBvZFjrUSANTC1TpPGRqFC5mmMShxbfUrelXqTPSSFR5vLeoay3HozN/QyzoOJ2o2oLhqdVARbKZlDNCwUa790GVBmC6ElJAN6JIgGvarwYUAgjtrlGUc263BSz5KlsRydYtdMkoxa7rdiv7ZaXj8tS349niYIW8hXqecNy3bjqLrL/Lb/4aI4o9BCVEbKHvYxKItAY1i3uh8FA0bjLkjz8N/vfpPHCwt8+vEmTc6H/3SU5FoMuJ4RSXMSc1ZDaUuJLDltzkYsSHLOg6NXhdO1m5HcdUq9E+ei0zLGFTUfY0xWQ9hf/lj6oh6ZVnIbrAB+nPlPW0c90LY/gtoPAQ4n24OLnxHyjfVmTTXm1wi15s0fC0v6Ug2eYmndi1QuxaffjkmbJeM8vy4of0wODcLt18zCf/xyBut/j4GFsFycz2izsHlG6Iw0nNSsfAPVyE9JzUu5w/clC8zyYb/nDwOmUmtLyEox246eARmvR4ZSTb8+do5Qa/ddPAI3A2NqHHXo8JZqy7J+E5mDcx2DLIvaKoTEaj3VMEgJSLFdC4AoLz2Kxh1aehtm4p080iMyLhLvZaSdZFME4CUv0AyXwoYzgfOXAtU3xe+JqRp/xslMBHeKnkjPzQt6QDyueyPAoDfbJHA5RalmPXh1Zvw9dESPP6a/2aDisDlGuV1SucON9cj6hwMSojCKLppKgpnj0LRTVPjcv7AmhKla2bJ+FGtBifKsVPyBuK/Xv0nTlc7UdfYGLRT8JS8gXC43Sg+VY6VO/aqz/vWgiiaJ7dKKHFtRXHVauw+dQ/c3jMwJtjRP3kuhqT+HGZ9BowJyQAEnA0/qKPm1ZoSrwuSqAs9yVVZntFlNf/avRGi/gBgvBDQpcszS+o+lJd6nH+R60q8lfI4+qZx9a2NiP/2eBn+45E38O3xshbH0S+4dHTI4EbZXI9LN0Qdi8s3RGGs/+tGn0dDu58/sKZE7ZYxGlodiuZbO1JW7cT8F1/zm00S7jiFb3ZEETiDRDlO2TH4mGMdymu/wqjMe3HGfQCuxp8AQO2+AdBUU7LTv07El287cON3kMxT5eUZ/QBIhvMgUl8GzizyX95Rhqup7cU/i2rYWaihacrrrGYDLhpxNqxmgzqIjWPmiToPgxKiMMpLzmDVA3I9gj479FTRlrTWLhxYU6J00mQm2eCsbwgKMAKLWn0DlnD75ESzf44yg2RM1kMwJ6QjwzwGu0/d43dML2shdLoEuBpLcKDiSZgTMpEgWZFlKcQ3Fc8BAOyNq8Mv1ah73dgh9MMAyQZIVsD5HETKM5AkI0TS3XJg4kvZebiJstySkWLDr66Wl5pWf7g3ZEARKoDxfb2rrgFWs5Fj5ok0gMs3RHHSWrtwYE2JIrDNV6Es2ShLNL41KC3Vo7RWq+K722//5Lkw6pKhk0ww6pLRP3muX/2Jbzuwopd1PFJMeTg/8/dytkUJSHyXahRK1qN+H5CQA/n/goQ8cM3zA4T3NFC9LPgmQ50LchZkekEephfkhV3KCawXCWQ1G2Ex6bHn0A8cM0/UyZgpIYqDtF7JsNhM2L/jcJu6a3wFtvsGTm4NXPJRMis2kwEF/XNhMxqQl3pL0H43oXYBLnFuQW7iTOglK07UrFd/lmQYiN62aTjp2qm+1iPq4PHWwyPq5MCl/gn5xE0FrIHj4eE9JS/XJKQAwgs0jaMXgLyRn2kq4C1vbhG23iAHLTprUyjUqJ5q3db9sJqNAERMAYUS1Oh0wLGSiqCfszWYqGMxU0IUBzPmT8Dw8efAVeNut0mvgRmU1/fsxyeHvsPre/b7/VqhBi0C+OTQd4AENePhmx3xLXpVak2q6g+hUTiRaRmDHNsktWvn3LRfwKrPwrlpvwAgByo/Oj/ErpO/g9tTgRKnT7dL7VoI9y55iSZwU73qZRD1+yFq32reRdj5F3mwmtKJA8iBjWW2PLa+qXB2cN9MPHvHVRjcNxOnK514au1WrP5wL+ZOzI96qNq6rfvxwa6D+LGsCsk2c1C2RWkNLrr+ohg+MSKKFjMlRHHQHjNIQmmpriSwdiSwyDUzyYZx53rUvWt8x8P77mejZFEC55cA8hh6u3EQqtyHATQXzOal3gKjLlluEfbsAer+IWc8AHnWiHD6Z0sav5FbhQPVrpULX5VARv3eImdUANx+zSQMHZDjN4OkpR2AW/q5EtRkpNgwd2J+ULZFaQlmazBRx2CmhCgOwtWLKGIdMR9YV9ISZcLrvNH56i7CSsajxLkFDV6XmtnonzwXvW1TMS77CUzsvQKZ5nEYZF/YtEdOs0OVL+Co4014RC3MCZnq88cc61DvdSDR0AeSZTaQdLf/+Hj3xpA1IUG8pwDhhGQqkLMl3lNAzcOA97T63OOvbfGbQZKRYoPVbMSeQyewZd+RsBmRT7/8LuwST7i6E7YGE3UsZkqIOkFLI+aVbMimg0cwJW+gXztvYF1Ja3zrTnwzKTm2STDorMixTYLbUwGTLh0GXRKMCSnQQY/zM3+PBJ0BEDqkmM7D7lP3oM5Tpg5Iy7FORKNw+o2r333qHgyyL8QgowGAgBC1zUsztlv9dwYGgkfIK5T6Et8ZJ+6NEMaLAdPFAPb5TWmdOzEfo4f0wadffodJIweGzYiwq4ZI+xiUEHWClpZ3lECioH8f2ExGAM0BRTQtvkD4IMZ3aUbOkkyBXmdBjft7eFCHbyqew1mJU9DLeiHMCenonzxXDUB8i2HzUm9R55qUOLdAr7MApgvl1t7at5uLVSWbPFRNlwFhf7R5hDzgv6wDBLX/ApBrTQwDAMmE26+x+wUloVp+lYxJqLkjg/tm4vZrJkW8Lw4RdRwu3xB1gsDlnbReyWrbrlK0unzDlqDi1XDCtf2Gay9WakGU2STOxh/gEbUoq9uFrT/dgLK6najznEaDx4E6T7kagJgTMtXX5tgmIcc6ESMy7sKApCsxIuMu5CbOhKTLAnRJgLEASPwdYL1BHj0PQHgd8mRWhNkVOBz3RoiGoxANB4NGx4daeikalxd24qtvTQrQfjsEp2XbseCO2UjLtrfpPEQ9GYMSIg2YMX+CWiuiBBIHS8v8akIAOfi4Y/pFuOPSi/wCkFhnmABygLLr5J34zvE6iqtWqc8fc6zDT65N2H3qHjUAkWeVNP+8xLUVjvrDgCQXwf7k/ATCewqi4QikhGxIlssAQwGElAJJ3w9oPARR91HzCHnfnYFbqjcxTYWkA+D+xC+7ERhQNBe0SmFrSAJrUlobWR8pduoQtR2Xb4g6QeC01w1rtqFyVGpQViSwJmTe6HzMys8DADjrG/B/m3ciLzsTEwb1RfGp07CZDGpNivI6m9GAWcPzYDOGHpVvTsjEIPtCAAKmhDS/LhxlyabEuQUZ5jEor/0Kw9JuAyChuGoVjjnWYZB9AU5Ub1B3EZbEIAhRC1H3MaAfAkmfK09thQCEq3lpRpcl15VItvBLOcpxkk1uL24aM69QAgqrWX5vFpNc8BpuuivQvC+OIpqR9S1hpw5R2zEoIeoEgYWuFScdIWtFAmtCXt+zHzaTARDNz901YxLysrOQlZiIyto69bX7fyqFzWiAxWSATpIwsm8OSr2ZfoPTALnzJjdpBiCAFNN5MOjkjhvfvXGUwtghqT9HkqEfIAGNQv5LP9MyBmW1u9E/ea68zGM7Xx4j7y0Dql+FSH4AEHqgcZ+cIVEow9Xcu+SAQ5chL/coxbGAHJDYH5XPV/dRUFGsMjxt+MDeOCvTDq8XWLf1K5yudPq1+ba0n00kRbCRDFFTOnWIKHYMSog6QaxzTMqqnXjkg0+Rl52JR64swvINW7B8wxbcNWMSntu6CxcO6oeRuTlItphh1utRU1+PL078BHdDI5JNZpgNc0NuxGfSZSDVdC5qG0tR2TQEzbeItbz2K2RbLoJBSsZJ12fwiDroJRtO1LwPANBLtuaN+Xw3z7PeAEl/NiDcEDjb/834dtk0DUkD4D/TxHK1XBwrmeS24gCnK51w1dXDYjLgx7IqfHWkRM14NGdRjHDV1bdpsz1laQYAAw+iOGJQQqRh4Vp675oxCcP7ZOPP187B/Bdfw5KV8nLE+X17w2oywqzXo66hEVWuOtTWN6Cmvh5VrjqU69YFXaPOUwa39zQshixY9Fk4Wv2mWkOSZSlEoqEP+iVdAWOCHToY0CAcOF23268tWJ5ZImBOyACMNzRnOyQrAAlCeCDpz4GwPwVU/488WK12bXPw4d4IYbwQaDzkX/yqbuCXIW/UV/XboAyI7/KLb9ARuBMwEPtme1yaIeoYDEqIOohvHcmM+RMwYeZwDC8chD/910tBQ9Z8Z5UAzUs1yvOvff4V8rIz0eD1YN7ofDVgUZZ3LEYDat0NWLljLwDA6ZZ3Hb7tZ2XqxntKFuSYYx1KnFuQZSlElftbvwmuJl067MZBENCh2v0dvGjE/vLH4PZUQC/ZoJesakdOo3Ah11YESRJytqN2LaAfIteReMsBw9mQ9P0hku6GpLP5ZUkg2SDpDBDC1Ty/BJB/Xb2s6TXpEPZHseBSPUYP6QMAatdNqGAjcCfgttSMcGmGqGMwKCFqJ4HFq4F860g2rNmG4YWDkJhiw4z5E4IGqIXLkCjPTxjUF/UeD864avH6nv3ITLJhyfhR6ua9+b2z8cmh79RWYN9zKCPmM8xj1PoRANDrrEgyDgDQ3DJsTsiER9RCKWxV6lHMCZlIMZ0LY4IdjcKljqrXSzZ5eJpkk5dudMkQog5oPAgkZEA0HpeDDPNlzceYCiAaiiGkFECXBsnQVPQq2SBZZstj5qt+C2F/FJIuBUA19hz6AVazARkptlaXZFqrGYm09iQe0nNSUXTTVKz/60aUl5zp0GsTaRFbgonaiRJ0zJg/IeTPN6zZhs8+OqAGLX/6r5ewff2XIetKQm2wl5lkg81owK5jJ3Co9DS8QmDf8RKUVTvVrpxZw/PUDfiUYCWwHVhp5d1f/hhKXFvloWeSDY1eF4y6ZL+23zpPGYqrVqNROGFKSFNnlfRPngujLhn1nio1syIfu0reAdg0AZCsEJIJkmgAEvpB1P4DqPpVU9ZkaPMxXieQMBCSLgMwjvXpsvHhPSUHM95K7DjwPc7tl4Wx5/ULauONZeZIe7UEx6LopqkonD0KRTdN7fBrE2kRMyVE7aS14lVlYFq4732Fmtw6b3Q+Cgbk4pND32Hl9r04XeMK6sqxGAyA1LwJ339OHueXcfFt/3V7KnDMsQ5jsh6CMcGOU66daBROv+UboHlfnL6Jl6HRWwOguUV4f/ljAKC2CQOiqfOmEhAuSMIJIdkg6azyko4yct73GJ1N3jE4IRUSGiEMQ+ULu1bIr3FvlLtyDOdD0lnx81m9kWwzw+GsC1qSCbXxXkaKDQsuHQVAwuoP9wRlQ9qrJTgW6/+60e+RqKdjUELUTloKMtoqM8kGm8mAXUdPhNwhWOnKUYIQpYbE9zWZSTaMyXoIycYBEMKLRuECADXj4bs8ozAnZEIvWdHodUHSJaDe60CJcwvy038Doy4ZOTZ5KmrfpDkw6BJRXX8Uwv0vtfVXCSqEUtgKBO1towYeohbC0NRO7FtXYr6sqTNHB9F4BI+/9jkmjRwYcrklVIAxd2I+phfkAZDgqqsPWsrpzH1xykvOYNUDb7R+IFEPweUboi5g3uh8FPTPhbO+AWXVzrBTWjcdPAKnux6bDh4Jes280fkw6pJR0/ADfnJuhl6yocS5RZ3aGhiQAHKWJMsyDgBwunYPKt3fIDexCMYEO7yiAVmWQpTXfgWP1w0JCbAZ+wJKpkPZw6bxm4DprUvljfVsS+XjnH+RJ7aaCoCGL+R5JEqbsPliQLJANP4oD2Or+i2+PV4WckdfIPTI+XVb9+ODXQfxwa6DarAS6TIPR8cTdSwGJUQaowQcedmZQfvhKMs1gWPlFVPyBsJmMqq7C/u+5vU9+/GTaxN2nbwTbm85Mi2jkWObpO6BYzcOwYU5z8NuHKKeT95kLxGmhFT0sk5AlnUcAAk/OTfCi0akmIbg/Mzf40D5U6j1nEKN+3t5bxvL1XIAkvg7IOmP8mPTBFfJMgeS4Tx5BL39Ufl590a5tqTuH80BTO1aeX8cUQtJZwC8ZerwtEiDinBLN751JC2dK3B0PIMUovji8g1RG7TWcROLcLsE/9/mnWrAorQKbzp4BP85eZy6pOMbgIRa4jl45mUAzTUhJc7mze3y03+DNHM+xvZ6DJ+d/A1yE2cizTwcHlGHBMkMj7cW9d7mZZ4S5xaM7fUYvMKNgSnXQicl4Ez9ftgbS/wHokk2QDiVxiA5E6IfBAhA0qXISzWAXFtiair4TLobqF4mByj6c+U6E5/haaFqR0IJt3Tju8zT0rkC55NwiBpRfDEoIWqDwHHx7UEJLDYdPKJmPBSBrcKBhaxKIKIEL0pwolA6Z/SSFQadFTm2SXB7KtA/eS4OnfkbRmXeC71kxoiMu2Az9AaEDtUNR7G//DHk2CahxLlFnXGSmzgTJ13b4REuJEhW2PS90eh1Ae6m2SPujRC6TMAwUm5VNo6HlJAF4fkJqN8F1P2judZElw5hLJADj6S7IRmGy8PSziySa0r0AyAs1wDe08hIMYWsHQnV2iuPoTcAkPyO9a0jaanQNXA+CYeoEcUXgxKiNmit4yaWTIpvhuNgqX+dR6i9cHwfFUvGj8Ks/DzYTAY88kHzX6DKjJKy2j0oq90NvWTDkJSb0TtxEswJGaj1nESioQ+cDT8AAKrc3+JQ5V/VepOxvR6DEI3om3gZ9DoLEiQjjjrewqHKF+D2npY36Etv2tMGkLtr9LmQO3MAQC+PnU/ICjFOvilT0jQsDc7nmrpuCgDo5M39dPmYO7EaL/xjZ1BWI1TG43SlE0+tbTmAiKbQlUPUiOKLQQlRG7TWcdPemRTfgEWZ7hqYDclMsuGCfmchyWTEBf3OQl52pppxOeaQOz1KnFswIuMuJBr6wNVQCggdelkvRKPHCUf9Ubg95Ug2DoDbW64GJPnpv4FZnwaPtwF1jacgSToI4QUg/HYU9uuusd4gd9U0lgHwAJ5iQNTKX+6NgO1W+biGLyAsc4HGo/IIeudzQPL/QJKsALwQjUcA53MQtv9Cut0WcmhaJK29nTkojYhax6CEKI5i3XgvUGaSDUsKRwESsHL7XrWbJtzU12x7MvQJOpydmY4/zLoYRr38R73O8wwOnnkeeam3wKbvC4MuERJOw9n4A4y6ZLi9Z7D71D0wJaQhzTwCNn0uhqXdhuKq1dhf/hjy03+DQ2f+hnTLcL8x9Qq7cQhgv0Pew0aXDgDy0DQAkmk8BHKAqt82zytRMirGAnmMfNLtkEQNhHWJPNvEcxpwb5KDGOOFkPS5mDE2GbXu+lYzIKFEWotCRJ2DQQlRHLV1domSDclIsmJOfh5qGxvhdDfg/zbvDLt08/qe/chItGLKkLOh1yfg0MnT6qC1u85u3vcmy1IIvW4wEo25OF79HtzechxzrEOdpwyD7AtgNw6C3TQYjZ4aKIPRKt3foLrhCKobjmCQfQGal2XkepVx2U9A0mcBhnMhTBfJAUbdR02ZkKsgwSwXtjr/0pxRcW8EdJkQMALOP0PYfgFJlwMICWjYJ3ffWGZD1G2CaDyBuvohftdVRBJwdOagNCJqHYMSog4UbY2Jkg2pb2xEbUMjTlRUqkFIqKmvirMz0lDtroejUt4lWFniUWpKAGDXyTsxrtcTSDINgCRJ6l43eam3QK+zolHUQ3gbUVG3D2fZLkGCzowESe4GahRO5CYWARLUnYL7J8+FvmkvHSHq5cxHY6W8PGN/FJJkgtClA7oMuQ1YmWOS+DtI5ikQte8C9duAxsMQaWsh6WzycZ4fodSUoPp+rN/zewBS0BJOJAFHqPqRlpZ00rLtKLr+IuzY8CUKZ4zA+r9/iorSqlY/NyKKTUxBydNPP41HHnkEpaWlGDFiBP785z+joKCgve+NqNuJtsZk08EjKOjfB89t3YXz+/YOqh8B4LcZ38odezFvdD7sFjPKa1z4uuQkCgbIA9T+b/NOdalF6aI54/43LPpMuWsGvoWwu+FsOI5EQx+kmvNhSkiFx9sIAEiQzCiuWgW9ZIUpIQ3ZlomwJpwFj3DD661vmn6kl79EfdPMkfUQhtFyfsNyGYSoBWoe9nkXOsBwPqA/V64pcdwDYfuF3BbsLYcwDIWk6wWR8hdYTBU4r38vAICrrl4NJqKdzKoEI1az0W/XYV9KC/DwCYNhSTQDYCswUTxFHZS89tpruP322/Hss89i7NixePLJJzF9+nQcOnQIWVlZ8bhHom4j2hoTZRja+X17B7X6AnImxWYyYFZ+HgDA2ZQVAeRlnEGZ6Zhx3mDkptqRly0v3RxzrPMLPo5Wv6kGK8pOv4DceWPT90aZ63NYDTmobTyJLGsBPKJO3ahv0lkvwaLPgt00GIBATcMJmIUFqP8SwjgMqP2HXMxquQqSlAABMyQJar0JAHmPG8NQSLoMiJRnIEkGiLpMoOFrwHwZ0HAASOgLAQMkXSLO6aPDp19+B6vZgItHDcLoIX3w+xfWR124qiz37Dl0Ap9++V3IDIvS+qtkSnZs+BIL7pgdU8ZEybow20IUXtRByeOPP46bb74ZN9xwAwDg2WefxXvvvYe//e1vuOuuu9r9Bom6i1jagwPrRnyLWzMSrbhseB5+qqrGKUc1jpRVwGY0AGgufH3kyiJkJNkw7dyByLEnIccqX9c3CPEdL1/nKUOjcKK3bSoavS6cqFmP4qrV6sRXi74XTtS8D0DOqiRIJgjhgYCABB283gaIhkOAYRAkyQiReJNcV+KpgkACAD2gSwYScpvfpPeU3FljfxISBCBZANMUSJIOgFeuRdElQnjKIBq+xMOrD+Pb42UY3DcTU0adA32CDnMn5kdduKoEIVv2HUHRuDwsuHR00IZ9vi3A3+0/gQV3zI55eBoHrxG1LqqgpL6+Hnv27MHdd9+tPqfT6TBt2jTs2LEj5Gvcbjfcbrf6fVWV/C+ERjRAHfFI1CYdsFuCt77Np5j0s/MxtLAfautceP3pj4J+7nHXBT1X6q7Dnz/YrH7/yrZdqHU68fa+r7H8Z9NRX1eLVEMCKhvq0ctsQMFZWRD1bjz8wVYAwAPrNuDO6Rfh8KlyfHLoO9w/tw+KKz6Go74UrurX0TdpNo673kWd57R6jX+7XocpMw8GXTIqXeWoqCwFAPRJKUBdjQ79Db/AF5UP4N+u11FXrUOqeSiMCSlIkMwA9KjWpQOog0A94FgGWK+EpEuDkMohQUBILqDqcaDBA+gyActCAJdAqq5vCm7qAFEH0XgcaPgKaPg3kPQroH4PULsKBYN/jVNl5SgYnI2TZeVwuOrw+ke74KkP/v3zlW63YvaEoXh329cor3Lh5Kk6PPvGZiyeeQHG5/UGIKG84gxeev9zNIb5vP+x8mPU1rnw0avbwx4TTuBrPaIhqtfHRHjjfw3qMRoh/zcrRBz/8hZR+PHHHwUAsX37dr/n77jjDlFQUBDyNffdd5+AHH7wi1/84he/+MWvLv515MiRaEKHqMS9++buu+/G7bffrn5fWVmJfv364fjx47Db7fG+vGY4HA7k5ubixIkTSE5O7uzb6TB833zfPQHfN993T1BVVYW+ffsiLS0tbteIKijJyMhAQkICTp486ff8yZMnkZ2dHfI1JpMJJpMp6Hm73d6jPkxFcnIy33cPwvfds/B99yw99X3rdPFbMo/qzEajEaNHj8bGjc27dXq9XmzcuBGFhYXtfnNERETUc0S9fHP77bdj8eLFGDNmDAoKCvDkk0/C6XSq3ThEREREsYg6KLnmmmtQVlaGe++9F6WlpRg5ciQ2bNiAXr16RfR6k8mE++67L+SSTnfG98333RPwffN99wR83/F735IQ8eztISIiIopMBwx4ICIiImodgxIiIiLSBAYlREREpAkMSoiIiEgT2hyUPP300+jfvz/MZjPGjh2LXbt2tXj866+/jry8PJjNZuTn5+P999/3+7kQAvfeey9ycnJgsVgwbdo0HD58uK232e6ied8vvPACLrroIqSmpiI1NRXTpk0LOn7JkiWQJMnva8aMGfF+G1GL5n2vXLky6D2ZzWa/Y7rj5z158uSg9y1JEmbNmqUe0xU+761bt2LOnDno3bs3JEnC22+/3eprNm/ejFGjRsFkMmHQoEFYuXJl0DHR/n9GR4v2fb/11lu45JJLkJmZieTkZBQWFuKDDz7wO+aPf/xj0Oedl5cXx3cRvWjf9+bNm0P+d15aWup3XHf7vEP92ZUkCUOHDlWP0frnvWzZMlxwwQVISkpCVlYWrrjiChw6dKjV13XE399tCkpee+013H777bjvvvuwd+9ejBgxAtOnT8epU6dCHr99+3Zcd911uPHGG/HFF1/giiuuwBVXXIEDBw6ox/zpT3/C//7v/+LZZ5/FZ599BpvNhunTp6OuruXNtjpStO978+bNuO666/DJJ59gx44dyM3NxaWXXooff/zR77gZM2agpKRE/XrllVc64u1ELNr3DcgTD33f0/fff+/38+74eb/11lt+7/nAgQNISEjAvHnz/I7T+uftdDoxYsQIPP300xEdf/ToUcyaNQsXX3wx9u3bh9tuuw033XST31/Qsfw31NGifd9bt27FJZdcgvfffx979uzBxRdfjDlz5uCLL77wO27o0KF+n/e//vWveNx+zKJ934pDhw75va+srCz1Z93x837qqaf83u+JEyeQlpYW9Odby5/3li1bsHTpUuzcuRMfffQRGhoacOmll8LpdIZ9TYf9/d2WjXMKCgrE0qVL1e89Ho/o3bu3WLZsWcjjr776ajFr1iy/58aOHSt+8YtfCCGE8Hq9Ijs7WzzyyCPqzysrK4XJZBKvvPJKW261XUX7vgM1NjaKpKQk8dJLL6nPLV68WFx++eXtfavtKtr3vWLFCmG328Oer6d83k888YRISkoSNTU16nNd4fP2BUCsW7euxWPuvPNOMXToUL/nrrnmGjF9+nT1+7b+Xna0SN53KOedd564//771e/vu+8+MWLEiPa7sTiL5H1/8sknAoA4c+ZM2GN6wue9bt06IUmSOHbsmPpcV/u8T506JQCILVu2hD2mo/7+jjlTUl9fjz179mDatGnqczqdDtOmTcOOHTtCvmbHjh1+xwPA9OnT1eOPHj2K0tJSv2PsdjvGjh0b9pwdLZb3HcjlcqGhoSFoU6PNmzcjKysLQ4YMwS9/+UuUl5e36723Razvu6amBv369UNubi4uv/xyfP311+rPesrn/eKLL+Laa6+FzWbze17Ln3csWvvz3R6/l12B1+tFdXV10J/vw4cPo3fv3jj77LOxYMECHD9+vJPusH2NHDkSOTk5uOSSS7Bt2zb1+Z7yeb/44ouYNm0a+vXr5/d8V/q8q6qqAKDFjfY66u/vmIOS06dPw+PxBE1y7dWrV9CaoqK0tLTF45XHaM7Z0WJ534F+97vfoXfv3n4f3owZM/Dyyy9j48aNePjhh7FlyxYUFRXB4/G06/3HKpb3PWTIEPztb3/DO++8g1WrVsHr9WL8+PH44YcfAPSMz3vXrl04cOAAbrrpJr/ntf55xyLcn2+Hw4Ha2tp2+bPTFTz66KOoqanB1VdfrT43duxYrFy5Ehs2bMAzzzyDo0eP4qKLLkJ1dXUn3mnb5OTk4Nlnn8Wbb76JN998E7m5uZg8eTL27t0LoH3+v1LrfvrpJ6xfvz7oz3dX+ry9Xi9uu+02TJgwAcOGDQt7XEf9/R31mHlqm+XLl+PVV1/F5s2b/Yo+r732WvXX+fn5GD58OAYOHIjNmzdj6tSpnXGrbVZYWOi3UeP48eNx7rnn4rnnnsMDDzzQiXfWcV588UXk5+ejoKDA7/nu+HkTsGbNGtx///145513/GorioqK1F8PHz4cY8eORb9+/bB27VrceOONnXGrbTZkyBAMGTJE/X78+PE4cuQInnjiCfz973/vxDvrOC+99BJSUlJwxRVX+D3flT7vpUuX4sCBA5qpeYk5U5KRkYGEhAScPHnS7/mTJ08iOzs75Guys7NbPF55jOacHS2W96149NFHsXz5cnz44YcYPnx4i8eeffbZyMjIQHFxcZvvuT205X0rDAYDzj//fPU9dffP2+l04tVXX43o/4S09nnHItyf7+TkZFgslnb5b0jLXn31Vdx0001Yu3ZtUJo7UEpKCgYPHtylP+9QCgoK1PfU3T9vIQT+9re/4frrr4fRaGzxWK1+3rfeeiveffddfPLJJ+jTp0+Lx3bU398xByVGoxGjR4/Gxo0b1ee8Xi82btzo969jX4WFhX7HA8BHH32kHj9gwABkZ2f7HeNwOPDZZ5+FPWdHi+V9A3JV8gMPPIANGzZgzJgxrV7nhx9+QHl5OXJyctrlvtsq1vfty+PxYP/+/ep76s6fNyC3z7ndbixcuLDV62jt845Fa3++2+O/Ia165ZVXcMMNN+CVV17xa/0Op6amBkeOHOnSn3co+/btU99Td/68AbmDpbi4OKJ/dGjt8xZC4NZbb8W6deuwadMmDBgwoNXXdNjf31GV6AZ49dVXhclkEitXrhT//ve/xS233CJSUlJEaWmpEEKI66+/Xtx1113q8du2bRN6vV48+uij4ptvvhH33XefMBgMYv/+/eoxy5cvFykpKeKdd94RX331lbj88svFgAEDRG1tbVtutV1F+76XL18ujEajeOONN0RJSYn6VV1dLYQQorq6Wvz2t78VO3bsEEePHhUff/yxGDVqlDjnnHNEXV1dp7zHUKJ93/fff7/44IMPxJEjR8SePXvEtddeK8xms/j666/VY7rj56248MILxTXXXBP0fFf5vKurq8UXX3whvvjiCwFAPP744+KLL74Q33//vRBCiLvuuktcf/316vHfffedsFqt4o477hDffPONePrpp0VCQoLYsGGDekxrv5daEO37Xr16tdDr9eLpp5/2+/NdWVmpHvOb3/xGbN68WRw9elRs27ZNTJs2TWRkZIhTp051+PsLJ9r3/cQTT4i3335bHD58WOzfv1/86le/EjqdTnz88cfqMd3x81YsXLhQjB07NuQ5tf55//KXvxR2u11s3rzZ779Zl8ulHtNZf3+3KSgRQog///nPom/fvsJoNIqCggKxc+dO9WeTJk0Sixcv9jt+7dq1YvDgwcJoNIqhQ4eK9957z+/nXq9X/OEPfxC9evUSJpNJTJ06VRw6dKitt9nuonnf/fr1EwCCvu677z4hhBAul0tceumlIjMzUxgMBtGvXz9x8803a+oPriKa933bbbepx/bq1UvMnDlT7N271+983fHzFkKIgwcPCgDiww8/DDpXV/m8lZbPwC/lvS5evFhMmjQp6DUjR44URqNRnH322WLFihVB523p91ILon3fkyZNavF4IeTW6JycHGE0GsVZZ50lrrnmGlFcXNyxb6wV0b7vhx9+WAwcOFCYzWaRlpYmJk+eLDZt2hR03u72eQsht7paLBbx/PPPhzyn1j/vUO8XgN+f1876+1tqukEiIiKiTsW9b4iIiEgTGJQQERGRJjAoISIiIk1gUEJERESawKCEiIiINIFBCREREWkCgxIiIiLSBAYlREREpAkMSoiIiEgTGJQQERGRJjAoISIiIk1gUEJERESa8P8BQ4gHyiAcCmAAAAAASUVORK5CYII=", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], "source": [ "n2, _, ye = np.histogram2d(xdata, ydata, bins=(20, 5), range=(xr, (0, np.max(ydata))))\n", "\n", @@ -117,15 +141,990 @@ "* $\\sigma > 0$,\n", "* $\\tau > 0$.\n", "\n", - "In addition, it can be beneficial to use $-1 < \\mu < 1$ (optional), but it is not required. We use `truncnorm` and `truncexpon`, which are normalised inside the data range (0, 2)." + "In addition, it can be beneficial to use $0 < \\mu < 2$, but it is not required. We use `truncnorm` and `truncexpon`, which are normalised inside the data range (0, 2)." ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 15, "id": "uniform-drama", "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/html": [ + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
Migrad
FCN = 768.1 Nfcn = 111
EDM = 3.76e-06 (Goal: 0.0002)
Valid Minimum Below EDM threshold (goal x 10)
No parameters at limit Below call limit
Hesse ok Covariance accurate
\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
Name Value Hesse Error Minos Error- Minos Error+ Limit- Limit+ Fixed
0 z 0.537 0.015 0 1
1 mu 0.996 0.004 0 2
2 sigma 0.1006 0.0035 0
3 tau 1.05 0.08 0
\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
z mu sigma tau
z 0.000231 -0.003e-3 (-0.052) 0.019e-3 (0.353) -0.24e-3 (-0.206)
mu -0.003e-3 (-0.052) 1.5e-05 -0.001e-3 (-0.069) -0.015e-3 (-0.053)
sigma 0.019e-3 (0.353) -0.001e-3 (-0.069) 1.25e-05 -0.043e-3 (-0.163)
tau -0.24e-3 (-0.206) -0.015e-3 (-0.053) -0.043e-3 (-0.163) 0.00568
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " 2023-08-23T14:43:37.279667\n", + " image/svg+xml\n", + " \n", + " \n", + " Matplotlib v3.7.2, https://matplotlib.org/\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "\n" + ], + "text/plain": [ + "┌─────────────────────────────────────────────────────────────────────────┐\n", + "│ Migrad │\n", + "├──────────────────────────────────┬──────────────────────────────────────┤\n", + "│ FCN = 768.1 │ Nfcn = 111 │\n", + "│ EDM = 3.76e-06 (Goal: 0.0002) │ │\n", + "├──────────────────────────────────┼──────────────────────────────────────┤\n", + "│ Valid Minimum │ Below EDM threshold (goal x 10) │\n", + "├──────────────────────────────────┼──────────────────────────────────────┤\n", + "│ No parameters at limit │ Below call limit │\n", + "├──────────────────────────────────┼──────────────────────────────────────┤\n", + "│ Hesse ok │ Covariance accurate │\n", + "└──────────────────────────────────┴──────────────────────────────────────┘\n", + "┌───┬───────┬───────────┬───────────┬────────────┬────────────┬─────────┬─────────┬───────┐\n", + "│ │ Name │ Value │ Hesse Err │ Minos Err- │ Minos Err+ │ Limit- │ Limit+ │ Fixed │\n", + "├───┼───────┼───────────┼───────────┼────────────┼────────────┼─────────┼─────────┼───────┤\n", + "│ 0 │ z │ 0.537 │ 0.015 │ │ │ 0 │ 1 │ │\n", + "│ 1 │ mu │ 0.996 │ 0.004 │ │ │ 0 │ 2 │ │\n", + "│ 2 │ sigma │ 0.1006 │ 0.0035 │ │ │ 0 │ │ │\n", + "│ 3 │ tau │ 1.05 │ 0.08 │ │ │ 0 │ │ │\n", + "└───┴───────┴───────────┴───────────┴────────────┴────────────┴─────────┴─────────┴───────┘\n", + "┌───────┬─────────────────────────────────────────┐\n", + "│ │ z mu sigma tau │\n", + "├───────┼─────────────────────────────────────────┤\n", + "│ z │ 0.000231 -0.003e-3 0.019e-3 -0.24e-3 │\n", + "│ mu │ -0.003e-3 1.5e-05 -0.001e-3 -0.015e-3 │\n", + "│ sigma │ 0.019e-3 -0.001e-3 1.25e-05 -0.043e-3 │\n", + "│ tau │ -0.24e-3 -0.015e-3 -0.043e-3 0.00568 │\n", + "└───────┴─────────────────────────────────────────┘" + ] + }, + "execution_count": 15, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "def pdf(x, z, mu, sigma, tau):\n", " return (z * truncnorm.pdf(x, *xr, mu, sigma) + \n", @@ -133,8 +1132,1011 @@ "\n", "c = cost.UnbinnedNLL(xmix, pdf)\n", "\n", - "m = Minuit(c, z=0.4, mu=0.1, sigma=0.2, tau=2)\n", + "m = Minuit(c, z=0.4, mu=1, sigma=0.2, tau=1)\n", + "m.limits[\"z\"] = (0, 1)\n", + "m.limits[\"mu\"] = (0, 2)\n", + "m.limits[\"sigma\", \"tau\"] = (0, None)\n", + "m.migrad()" + ] + }, + { + "cell_type": "markdown", + "id": "fa50a81d", + "metadata": {}, + "source": [ + "If the gradient of the model is available, it can be passed to the cost function to enable the computation of its gradient, which Minuit then uses to potentially improve the minimization. We use a numerically computed gradient here obtained from the `jacobi` library. This is for demonstration purpose only and generally not recommended, since `jacobi` computes the gradient much more accurately than what is required for Minuit." + ] + }, + { + "cell_type": "code", + "execution_count": 16, + "id": "8081f7f5", + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
Migrad
FCN = 768.1 Nfcn = 87, Ngrad = 5
EDM = 3.31e-05 (Goal: 0.0002) time = 0.1 sec
Valid Minimum Below EDM threshold (goal x 10)
No parameters at limit Below call limit
Hesse ok Covariance accurate
\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
Name Value Hesse Error Minos Error- Minos Error+ Limit- Limit+ Fixed
0 z 0.537 0.015 0 1
1 mu 0.996 0.004 0 2
2 sigma 0.1006 0.0035 0
3 tau 1.05 0.08 0
\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
z mu sigma tau
z 0.000231 -0.003e-3 (-0.052) 0.019e-3 (0.353) -0.24e-3 (-0.206)
mu -0.003e-3 (-0.052) 1.5e-05 -0.001e-3 (-0.069) -0.015e-3 (-0.053)
sigma 0.019e-3 (0.353) -0.001e-3 (-0.069) 1.25e-05 -0.043e-3 (-0.163)
tau -0.24e-3 (-0.206) -0.015e-3 (-0.053) -0.043e-3 (-0.163) 0.00569
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " 2023-08-23T14:43:38.309566\n", + " image/svg+xml\n", + " \n", + " \n", + " Matplotlib v3.7.2, https://matplotlib.org/\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "\n" + ], + "text/plain": [ + "┌─────────────────────────────────────────────────────────────────────────┐\n", + "│ Migrad │\n", + "├──────────────────────────────────┬──────────────────────────────────────┤\n", + "│ FCN = 768.1 │ Nfcn = 87, Ngrad = 5 │\n", + "│ EDM = 3.31e-05 (Goal: 0.0002) │ time = 0.1 sec │\n", + "├──────────────────────────────────┼──────────────────────────────────────┤\n", + "│ Valid Minimum │ Below EDM threshold (goal x 10) │\n", + "├──────────────────────────────────┼──────────────────────────────────────┤\n", + "│ No parameters at limit │ Below call limit │\n", + "├──────────────────────────────────┼──────────────────────────────────────┤\n", + "│ Hesse ok │ Covariance accurate │\n", + "└──────────────────────────────────┴──────────────────────────────────────┘\n", + "┌───┬───────┬───────────┬───────────┬────────────┬────────────┬─────────┬─────────┬───────┐\n", + "│ │ Name │ Value │ Hesse Err │ Minos Err- │ Minos Err+ │ Limit- │ Limit+ │ Fixed │\n", + "├───┼───────┼───────────┼───────────┼────────────┼────────────┼─────────┼─────────┼───────┤\n", + "│ 0 │ z │ 0.537 │ 0.015 │ │ │ 0 │ 1 │ │\n", + "│ 1 │ mu │ 0.996 │ 0.004 │ │ │ 0 │ 2 │ │\n", + "│ 2 │ sigma │ 0.1006 │ 0.0035 │ │ │ 0 │ │ │\n", + "│ 3 │ tau │ 1.05 │ 0.08 │ │ │ 0 │ │ │\n", + "└───┴───────┴───────────┴───────────┴────────────┴────────────┴─────────┴─────────┴───────┘\n", + "┌───────┬─────────────────────────────────────────┐\n", + "│ │ z mu sigma tau │\n", + "├───────┼─────────────────────────────────────────┤\n", + "│ z │ 0.000231 -0.003e-3 0.019e-3 -0.24e-3 │\n", + "│ mu │ -0.003e-3 1.5e-05 -0.001e-3 -0.015e-3 │\n", + "│ sigma │ 0.019e-3 -0.001e-3 1.25e-05 -0.043e-3 │\n", + "│ tau │ -0.24e-3 -0.015e-3 -0.043e-3 0.00569 │\n", + "└───────┴─────────────────────────────────────────┘" + ] + }, + "execution_count": 16, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "def grad(x, *par):\n", + " return jacobi(lambda par: pdf(x, *par), par)[0].T\n", + "\n", + "c = cost.UnbinnedNLL(xmix, pdf, grad=grad)\n", + "\n", + "m = Minuit(c, z=0.4, mu=1, sigma=0.2, tau=1, grad=c.gradient)\n", "m.limits[\"z\"] = (0, 1)\n", + "m.limits[\"mu\"] = (0, 2)\n", "m.limits[\"sigma\", \"tau\"] = (0, None)\n", "m.migrad()" ] @@ -149,10 +2151,143 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 17, "id": "9da33a94", "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/html": [ + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
Migrad
FCN = 147.6 Nfcn = 134
EDM = 2.1e-06 (Goal: 0.0002)
Valid Minimum Below EDM threshold (goal x 10)
No parameters at limit Below call limit
Hesse ok Covariance accurate
\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
Name Value Hesse Error Minos Error- Minos Error+ Limit- Limit+ Fixed
0 mu 0.9946 0.0031
1 sigma 0.0986 0.0022 0
2 tau 0.972 0.031 0
\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
mu sigma tau
mu 9.73e-06 0e-6 -0e-6
sigma 0e-6 4.86e-06 -0e-6
tau -0e-6 -0e-6 0.000944
" + ], + "text/plain": [ + "┌─────────────────────────────────────────────────────────────────────────┐\n", + "│ Migrad │\n", + "├──────────────────────────────────┬──────────────────────────────────────┤\n", + "│ FCN = 147.6 │ Nfcn = 134 │\n", + "│ EDM = 2.1e-06 (Goal: 0.0002) │ │\n", + "├──────────────────────────────────┼──────────────────────────────────────┤\n", + "│ Valid Minimum │ Below EDM threshold (goal x 10) │\n", + "├──────────────────────────────────┼──────────────────────────────────────┤\n", + "│ No parameters at limit │ Below call limit │\n", + "├──────────────────────────────────┼──────────────────────────────────────┤\n", + "│ Hesse ok │ Covariance accurate │\n", + "└──────────────────────────────────┴──────────────────────────────────────┘\n", + "┌───┬───────┬───────────┬───────────┬────────────┬────────────┬─────────┬─────────┬───────┐\n", + "│ │ Name │ Value │ Hesse Err │ Minos Err- │ Minos Err+ │ Limit- │ Limit+ │ Fixed │\n", + "├───┼───────┼───────────┼───────────┼────────────┼────────────┼─────────┼─────────┼───────┤\n", + "│ 0 │ mu │ 0.9946 │ 0.0031 │ │ │ │ │ │\n", + "│ 1 │ sigma │ 0.0986 │ 0.0022 │ │ │ 0 │ │ │\n", + "│ 2 │ tau │ 0.972 │ 0.031 │ │ │ 0 │ │ │\n", + "└───┴───────┴───────────┴───────────┴────────────┴────────────┴─────────┴─────────┴───────┘\n", + "┌───────┬────────────────────────────┐\n", + "│ │ mu sigma tau │\n", + "├───────┼────────────────────────────┤\n", + "│ mu │ 9.73e-06 0e-6 -0e-6 │\n", + "│ sigma │ 0e-6 4.86e-06 -0e-6 │\n", + "│ tau │ -0e-6 -0e-6 0.000944 │\n", + "└───────┴────────────────────────────┘" + ] + }, + "execution_count": 17, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "def logpdf(xy, mu, sigma, tau):\n", " x, y = xy\n", @@ -164,6 +2299,191 @@ "m.migrad()" ] }, + { + "cell_type": "markdown", + "id": "c272cd2b", + "metadata": {}, + "source": [ + "And we can also use a gradient as before." + ] + }, + { + "cell_type": "code", + "execution_count": 18, + "id": "220d17c6", + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
Migrad
FCN = 147.6 Nfcn = 81, Ngrad = 9
EDM = 3.73e-05 (Goal: 0.0002) time = 0.2 sec
Valid Minimum Below EDM threshold (goal x 10)
No parameters at limit Below call limit
Hesse ok Covariance accurate
\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
Name Value Hesse Error Minos Error- Minos Error+ Limit- Limit+ Fixed
0 mu 0.9946 0.0031
1 sigma 0.0986 0.0022 0
2 tau 0.972 0.031 0
\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
mu sigma tau
mu 9.73e-06 0e-6 -0e-6
sigma 0e-6 4.87e-06 -0e-6
tau -0e-6 -0e-6 0.000944
" + ], + "text/plain": [ + "┌─────────────────────────────────────────────────────────────────────────┐\n", + "│ Migrad │\n", + "├──────────────────────────────────┬──────────────────────────────────────┤\n", + "│ FCN = 147.6 │ Nfcn = 81, Ngrad = 9 │\n", + "│ EDM = 3.73e-05 (Goal: 0.0002) │ time = 0.2 sec │\n", + "├──────────────────────────────────┼──────────────────────────────────────┤\n", + "│ Valid Minimum │ Below EDM threshold (goal x 10) │\n", + "├──────────────────────────────────┼──────────────────────────────────────┤\n", + "│ No parameters at limit │ Below call limit │\n", + "├──────────────────────────────────┼──────────────────────────────────────┤\n", + "│ Hesse ok │ Covariance accurate │\n", + "└──────────────────────────────────┴──────────────────────────────────────┘\n", + "┌───┬───────┬───────────┬───────────┬────────────┬────────────┬─────────┬─────────┬───────┐\n", + "│ │ Name │ Value │ Hesse Err │ Minos Err- │ Minos Err+ │ Limit- │ Limit+ │ Fixed │\n", + "├───┼───────┼───────────┼───────────┼────────────┼────────────┼─────────┼─────────┼───────┤\n", + "│ 0 │ mu │ 0.9946 │ 0.0031 │ │ │ │ │ │\n", + "│ 1 │ sigma │ 0.0986 │ 0.0022 │ │ │ 0 │ │ │\n", + "│ 2 │ tau │ 0.972 │ 0.031 │ │ │ 0 │ │ │\n", + "└───┴───────┴───────────┴───────────┴────────────┴────────────┴─────────┴─────────┴───────┘\n", + "┌───────┬────────────────────────────┐\n", + "│ │ mu sigma tau │\n", + "├───────┼────────────────────────────┤\n", + "│ mu │ 9.73e-06 0e-6 -0e-6 │\n", + "│ sigma │ 0e-6 4.87e-06 -0e-6 │\n", + "│ tau │ -0e-6 -0e-6 0.000944 │\n", + "└───────┴────────────────────────────┘" + ] + }, + "execution_count": 18, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "def grad(xy, *par):\n", + " return jacobi(lambda p: logpdf(xy, *p), par)[0].T\n", + "\n", + "c = cost.UnbinnedNLL((xdata, ydata), logpdf, log=True, grad=grad)\n", + "m = Minuit(c, mu=1, sigma=2, tau=2, grad=c.gradient)\n", + "m.limits[\"sigma\", \"tau\"] = (0, None)\n", + "m.migrad()" + ] + }, + { + "cell_type": "code", + "execution_count": 19, + "id": "4f9704d9", + "metadata": {}, + "outputs": [ + { + "ename": "ValueError", + "evalue": "Iteration of zero-sized operands is not enabled", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mValueError\u001b[0m Traceback (most recent call last)", + "Cell \u001b[0;32mIn[19], line 1\u001b[0m\n\u001b[0;32m----> 1\u001b[0m c\u001b[39m.\u001b[39;49mcovariance()\n", + "File \u001b[0;32m~/Extern/iminuit/src/iminuit/cost.py:870\u001b[0m, in \u001b[0;36mMaskedCost.covariance\u001b[0;34m(self, *args)\u001b[0m\n\u001b[1;32m 852\u001b[0m \u001b[39mdef\u001b[39;00m \u001b[39mcovariance\u001b[39m(\u001b[39mself\u001b[39m, \u001b[39m*\u001b[39margs: \u001b[39mfloat\u001b[39m) \u001b[39m-\u001b[39m\u001b[39m>\u001b[39m NDArray:\n\u001b[1;32m 853\u001b[0m \u001b[39m \u001b[39m\u001b[39m\"\"\"\u001b[39;00m\n\u001b[1;32m 854\u001b[0m \u001b[39m Estimate covariance of the parameters with the sandwich estimator.\u001b[39;00m\n\u001b[1;32m 855\u001b[0m \n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 868\u001b[0m \u001b[39m The array has shape (K, K) for K arguments.\u001b[39;00m\n\u001b[1;32m 869\u001b[0m \u001b[39m \"\"\"\u001b[39;00m\n\u001b[0;32m--> 870\u001b[0m \u001b[39mreturn\u001b[39;00m \u001b[39mself\u001b[39;49m\u001b[39m.\u001b[39;49m_sandwich(args)\n", + "File \u001b[0;32m~/Extern/iminuit/src/iminuit/cost.py:984\u001b[0m, in \u001b[0;36mUnbinnedCost._sandwich\u001b[0;34m(self, args)\u001b[0m\n\u001b[1;32m 983\u001b[0m \u001b[39mdef\u001b[39;00m \u001b[39m_sandwich\u001b[39m(\u001b[39mself\u001b[39m, args: Sequence[\u001b[39mfloat\u001b[39m]) \u001b[39m-\u001b[39m\u001b[39m>\u001b[39m NDArray:\n\u001b[0;32m--> 984\u001b[0m \u001b[39mreturn\u001b[39;00m np\u001b[39m.\u001b[39mlinalg\u001b[39m.\u001b[39minv(\u001b[39mself\u001b[39;49m\u001b[39m.\u001b[39;49m_fisher_information(args))\n", + "File \u001b[0;32m~/Extern/iminuit/src/iminuit/cost.py:980\u001b[0m, in \u001b[0;36mUnbinnedCost._fisher_information\u001b[0;34m(self, args)\u001b[0m\n\u001b[1;32m 979\u001b[0m \u001b[39mdef\u001b[39;00m \u001b[39m_fisher_information\u001b[39m(\u001b[39mself\u001b[39m, args: Sequence[\u001b[39mfloat\u001b[39m]) \u001b[39m-\u001b[39m\u001b[39m>\u001b[39m NDArray:\n\u001b[0;32m--> 980\u001b[0m g \u001b[39m=\u001b[39m \u001b[39mself\u001b[39;49m\u001b[39m.\u001b[39;49m_pointwise_grad(args)\n\u001b[1;32m 981\u001b[0m \u001b[39mreturn\u001b[39;00m np\u001b[39m.\u001b[39meinsum(\u001b[39m\"\u001b[39m\u001b[39mji,ki->jk\u001b[39m\u001b[39m\"\u001b[39m, g, g)\n", + "File \u001b[0;32m~/Extern/iminuit/src/iminuit/cost.py:1076\u001b[0m, in \u001b[0;36mUnbinnedNLL._pointwise_grad\u001b[0;34m(self, args)\u001b[0m\n\u001b[1;32m 1074\u001b[0m \u001b[39mraise\u001b[39;00m \u001b[39mValueError\u001b[39;00m(\u001b[39m\"\u001b[39m\u001b[39ma gradient is required to use this functionality\u001b[39m\u001b[39m\"\u001b[39m)\n\u001b[1;32m 1075\u001b[0m data \u001b[39m=\u001b[39m \u001b[39mself\u001b[39m\u001b[39m.\u001b[39m_masked\n\u001b[0;32m-> 1076\u001b[0m g \u001b[39m=\u001b[39m \u001b[39mself\u001b[39;49m\u001b[39m.\u001b[39;49m_grad(data, \u001b[39m*\u001b[39;49margs)\n\u001b[1;32m 1077\u001b[0m g \u001b[39m=\u001b[39m _normalize_model_output(g)\n\u001b[1;32m 1078\u001b[0m \u001b[39mif\u001b[39;00m \u001b[39mself\u001b[39m\u001b[39m.\u001b[39m_log:\n", + "Cell \u001b[0;32mIn[18], line 2\u001b[0m, in \u001b[0;36mgrad\u001b[0;34m(xy, *par)\u001b[0m\n\u001b[1;32m 1\u001b[0m \u001b[39mdef\u001b[39;00m \u001b[39mgrad\u001b[39m(xy, \u001b[39m*\u001b[39mpar):\n\u001b[0;32m----> 2\u001b[0m \u001b[39mreturn\u001b[39;00m jacobi(\u001b[39mlambda\u001b[39;49;00m p: logpdf(xy, \u001b[39m*\u001b[39;49mp), par)[\u001b[39m0\u001b[39m]\u001b[39m.\u001b[39mT\n", + "File \u001b[0;32m~/Extern/iminuit/py39/lib/python3.9/site-packages/jacobi/_jacobi.py:112\u001b[0m, in \u001b[0;36mjacobi\u001b[0;34m(fn, x, diagonal, method, mask, rtol, maxiter, maxgrad, step, diagnostic, *args)\u001b[0m\n\u001b[1;32m 110\u001b[0m jac \u001b[39m=\u001b[39m \u001b[39mNone\u001b[39;00m\n\u001b[1;32m 111\u001b[0m err \u001b[39m=\u001b[39m \u001b[39mNone\u001b[39;00m\n\u001b[0;32m--> 112\u001b[0m it \u001b[39m=\u001b[39m np\u001b[39m.\u001b[39;49mnditer(x, flags\u001b[39m=\u001b[39;49m[\u001b[39m\"\u001b[39;49m\u001b[39mc_index\u001b[39;49m\u001b[39m\"\u001b[39;49m, \u001b[39m\"\u001b[39;49m\u001b[39mmulti_index\u001b[39;49m\u001b[39m\"\u001b[39;49m])\n\u001b[1;32m 113\u001b[0m \u001b[39mwhile\u001b[39;00m \u001b[39mnot\u001b[39;00m it\u001b[39m.\u001b[39mfinished:\n\u001b[1;32m 114\u001b[0m k \u001b[39m=\u001b[39m it\u001b[39m.\u001b[39mindex\n", + "\u001b[0;31mValueError\u001b[0m: Iteration of zero-sized operands is not enabled" + ] + } + ], + "source": [ + "c.covariance()" + ] + }, { "cell_type": "markdown", "id": "introductory-watershed", @@ -808,7 +3128,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.10.9" + "version": "3.9.16" }, "vscode": { "interpreter": { diff --git a/doc/plots/interactive.py b/doc/plots/interactive.py index e7a62ce11..b0c2b4451 100644 --- a/doc/plots/interactive.py +++ b/doc/plots/interactive.py @@ -3,7 +3,7 @@ from matplotlib import pyplot as plt -# custom visualization, x and y are taken from outer scope +# custom visualization; x, y, model are taken from outer scope def viz(args): plt.plot(x, y, "ok") xm = np.linspace(x[0], x[-1], 100) @@ -18,4 +18,5 @@ def model(x, a, b): y = np.array([1.03, 1.58, 2.03, 2.37, 3.09]) c = cost.LeastSquares(x, y, 0.1, model) m = Minuit(c, 0.5, 0.5) -m.interactive(viz) # calling m.interactive() uses LeastSquares.visualize +m.interactive(viz) +# m.interactive() also works and calls LeastSquares.visualize diff --git a/pyproject.toml b/pyproject.toml index 19c684e22..baf08f66e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -95,10 +95,13 @@ filterwarnings = [ [tool.ruff] select = [ - "E", "F", # flake8 - "D", # pydocstyle + "E", + "F", # flake8 + "D", # pydocstyle +] +extend-ignore = [ + "D212", # multi-line-summary-first-line ] -extend-ignore = ["D203", "D212"] src = ["src"] unfixable = [ "F841", # Removes unused variables @@ -112,8 +115,6 @@ convention = "numpy" ".ci/*.py" = ["D"] "bench/*.py" = ["D"] "doc/*.py" = ["D"] -"setup.py" = ["D"] -"cmake_ext.py" = ["D"] [tool.mypy] ignore_missing_imports = true diff --git a/src/iminuit/_optional_dependencies.py b/src/iminuit/_optional_dependencies.py index 7c097fa99..719c952a9 100644 --- a/src/iminuit/_optional_dependencies.py +++ b/src/iminuit/_optional_dependencies.py @@ -1,13 +1,17 @@ import contextlib import warnings from iminuit.warnings import OptionalDependencyWarning +from typing import Dict, Optional @contextlib.contextmanager -def optional_module_for(functionality, *, replace=None, stacklevel=3): +def optional_module_for( + functionality: str, *, replace: Optional[Dict[str, str]] = None, stacklevel: int = 3 +): try: yield except ModuleNotFoundError as e: + assert e.name is not None package = e.name.split(".")[0] if replace: package = replace.get(package, package) diff --git a/src/iminuit/cost.py b/src/iminuit/cost.py index 0f9bba498..d7f0a9ee4 100644 --- a/src/iminuit/cost.py +++ b/src/iminuit/cost.py @@ -18,8 +18,9 @@ - Data are binned: :class:`ExtendedBinnedNLL`, also supports histogram of weighted samples -- Fit a template to binned data with bin-wise uncertainties on the template: - :class:`Template`, which also supports weighted data and weighted templates +- Fit a template to binned data with bin-wise uncertainties on the template + + - :class:`Template`, also supports weighted data and weighted template histograms - Fit of a function f(x) to (x, y, yerror) pairs with normal-distributed fluctuations. x is one- or multi-dimensional, y is one-dimensional. @@ -28,8 +29,9 @@ - y values contain outliers: :class:`LeastSquares` with loss function set to "soft_l1" -- Include constraints from external fits or apply regularisation: - :class:`NormalConstraint` +- Include constraints from external fits or apply regularisation + + - :class:`NormalConstraint` Combining cost functions ------------------------ @@ -52,6 +54,18 @@ can have negative amplitudes. To achieve this, simply reset the limits with :attr:`iminuit.Minuit.limits` after creating the Minuit instance. +User-defined gradients +---------------------- +If the user provides a model gradient, the cost functions defined here except +:class:`Template` will then also make their gradient available, which is then +automatically used by :class:`iminuit.Minuit` (see the constructor for details) to +potentially improve the fit (improve convergence or robustness). + +Note that it is perfectly normal to use Minuit without a user-defined gradient, and +Minuit does not always benefit from a user-defined gradient. If the gradient is +expensive to compute, the time to converge may increase. If you have trouble with the +fitting process, it is unlikely that the issues are resolved by a user-defined gradient. + Notes ----- The cost functions defined here have been optimized with knowledge about implementation @@ -62,16 +76,19 @@ the histogram, the sum of weights and the sum of squared weights is needed then, see class documentation for details. """ +from __future__ import annotations + from .util import ( describe, merge_signatures, PerformanceWarning, _smart_sampling, _detect_log_spacing, + is_positive_definite, ) -from .typing import Model, LossFunction +from .typing import Model, ModelGradient, LossFunction import numpy as np -from numpy.typing import ArrayLike, NDArray +from numpy.typing import NDArray, ArrayLike from collections.abc import Sequence as ABCSequence import abc from typing import ( @@ -85,14 +102,16 @@ class documentation for details. Iterable, Optional, overload, + TypeVar, + Callable, + cast, ) import warnings -from ._deprecated import deprecated_parameter +from ._deprecated import deprecated_parameter, deprecated __all__ = [ "CHISQUARE", "NEGATIVE_LOG_LIKELIHOOD", - "BohmZechTransform", "chi2", "multinominal_chi2", "poisson_chi2", @@ -110,6 +129,8 @@ class documentation for details. "LeastSquares", ] +T = TypeVar("T", float, NDArray) + CHISQUARE = 1.0 NEGATIVE_LOG_LIKELIHOOD = 0.5 @@ -130,20 +151,13 @@ def _z_squared(y, ye, ym): return z * z -def _soft_l1_loss(z_sqr): - return np.sum(2 * (np.sqrt(1 + z_sqr) - 1)) - - -def _soft_l1_cost(y, ye, ym): - return _soft_l1_loss(_z_squared(y, ye, ym)) - - def _replace_none(x, replacement): if x is None: return replacement return x +@deprecated("The class is deprecated and will be removed without replacement") class BohmZechTransform: """ Apply Bohm-Zech transform. @@ -153,8 +167,10 @@ class BohmZechTransform: :meta private: """ - _scale: np.ndarray - _obs: np.ndarray + __slots__ = "_obs", "_scale" + + _obs: NDArray + _scale: NDArray def __init__(self, val: ArrayLike, var: ArrayLike): """ @@ -232,6 +248,21 @@ def chi2(y: ArrayLike, ye: ArrayLike, ym: ArrayLike) -> float: return np.sum(_z_squared(y, ye, ym)) +def _chi2_grad(y: NDArray, ye: NDArray, ym: NDArray, ymg: NDArray) -> NDArray: + return -2 * np.sum((y - ym) * ymg * ye**-2, axis=1) + + +def _soft_l1_cost(y: NDArray, ye: NDArray, ym: NDArray) -> float: + z_sqr = _z_squared(y, ye, ym) + return 2 * np.sum(np.sqrt(1 + z_sqr) - 1) + + +def _soft_l1_cost_grad(y: NDArray, ye: NDArray, ym: NDArray, ymg: NDArray) -> NDArray: + inv_ye = 1 / ye + z = (y - ym) * inv_ye + return -2 * np.sum(z * ymg * inv_ye * (1 + z**2) ** -0.5, axis=1) + + def multinominal_chi2(n: ArrayLike, mu: ArrayLike) -> float: """ Compute asymptotically chi2-distributed cost for binomially-distributed data. @@ -260,6 +291,10 @@ def multinominal_chi2(n: ArrayLike, mu: ArrayLike) -> float: return 2 * np.sum(n * (_safe_log(n) - _safe_log(mu))) +def _multinominal_chi2_grad(n: NDArray, mu: NDArray, gmu: NDArray) -> NDArray: + return -2 * np.sum(n * gmu / mu, axis=tuple(range(1, gmu.ndim))) + + def poisson_chi2(n: ArrayLike, mu: ArrayLike) -> float: """ Compute asymptotically chi2-distributed cost for Poisson-distributed data. @@ -287,6 +322,10 @@ def poisson_chi2(n: ArrayLike, mu: ArrayLike) -> float: return 2 * np.sum(mu - n + n * (_safe_log(n) - _safe_log(mu))) +def _poisson_chi2_grad(n: NDArray, mu: NDArray, gmu: NDArray) -> NDArray: + return 2 * np.sum(gmu * (1.0 - n / mu), axis=tuple(range(1, gmu.ndim))) + + def template_chi2_jsc(n: ArrayLike, mu: ArrayLike, mu_var: ArrayLike) -> float: """ Compute asymptotically chi2-distributed cost for a template fit. @@ -470,23 +509,6 @@ def chi2(y: ArrayLike, ye: ArrayLike, ym: ArrayLike) -> float: # noqa chi2.__doc__ = _chi2_np.__doc__ - _soft_l1_loss_np = _soft_l1_loss - _soft_l1_loss_nb = jit( - nogil=True, - cache=True, - error_model="numpy", - )(_soft_l1_loss_np) - - def _soft_l1_loss(z_sqr): - if z_sqr.dtype in (np.float32, np.float64): - return _soft_l1_loss_nb(z_sqr) - # fallback to numpy for float128 - return _soft_l1_loss_np(z_sqr) - - @nb_overload(_soft_l1_loss, inline="always") - def _ol_soft_l1_loss(z_sqr): - return _soft_l1_loss_np # pragma: no cover - _soft_l1_cost_np = _soft_l1_cost _soft_l1_cost_nb = jit( nogil=True, @@ -494,7 +516,7 @@ def _ol_soft_l1_loss(z_sqr): error_model="numpy", )(_soft_l1_cost_np) - def _soft_l1_cost(y, ye, ym): + def _soft_l1_cost(y: NDArray, ye: NDArray, ym: NDArray) -> float: if ym.dtype in (np.float32, np.float64): return _soft_l1_cost_nb(y, ye, ym) # fallback to numpy for float128 @@ -538,6 +560,11 @@ def ndata(self): """ return self._ndata() + @property + def npar(self): + """Return total number of model parameters.""" + return len(self._parameters) + @abc.abstractmethod def _ndata(self): NotImplemented # pragma: no cover @@ -597,13 +624,44 @@ def __call__(self, *args: float) -> float: ------- float """ - r = self._call(args) + r = self._value(args) if self.verbose >= 1: print(args, "->", r) return r + def grad(self, *args: float) -> NDArray: + """ + Compute gradient of the cost function. + + This requires that a model gradient is provided. + + Parameters + ---------- + *args : float + Parameter values. + + Returns + ------- + ndarray of float + The length of the array is equal to the length of args. + """ + return self._grad(args) + + @property + def has_grad(self) -> bool: + """Return True if cost function can compute a gradient.""" + return self._has_grad() + + @abc.abstractmethod + def _value(self, args: Sequence[float]) -> float: + ... # pragma: no cover + + @abc.abstractmethod + def _grad(self, args: Sequence[float]) -> NDArray: + ... # pragma: no cover + @abc.abstractmethod - def _call(self, args: Sequence[float]) -> float: + def _has_grad(self) -> bool: ... # pragma: no cover @@ -625,9 +683,16 @@ def __init__(self, value: float): def _ndata(self): return 0 - def _call(self, args: Sequence[float]) -> float: + def _value(self, args: Sequence[float]) -> float: return self.value + def _grad(self, args: Sequence[float]) -> NDArray: + return np.zeros(0) + + @staticmethod + def _has_grad(): + return True + class CostSum(Cost, ABCSequence): """ @@ -683,12 +748,22 @@ def _split(self, args: Sequence[float]): component_args = tuple(args[i] for i in cmap) yield component, component_args - def _call(self, args: Sequence[float]) -> float: + def _value(self, args: Sequence[float]) -> float: r = 0.0 - for comp, cargs in self._split(args): - r += comp._call(cargs) / comp.errordef + for component, component_args in self._split(args): + r += component._value(component_args) / component.errordef return r + def _grad(self, args: Sequence[float]) -> NDArray: + r = np.zeros(self.npar) + for component, indices in zip(self._items, self._maps): + component_args = tuple(args[i] for i in indices) + r[indices] += component._grad(component_args) / component.errordef + return r + + def _has_grad(self) -> bool: + return all(component.has_grad for component in self._items) + def _ndata(self): return sum(c.ndata for c in self._items) @@ -795,6 +870,56 @@ def _update_cache(self): self._masked = self._data[_replace_none(self._mask, ...)] +class MaskedCostWithPulls(MaskedCost): + """ + Base class for cost functions with pulls. + + :meta private: + """ + + def pulls(self, args: Sequence[float]) -> NDArray: + """ + Return studentized residuals (aka pulls). + + Parameters + ---------- + args : sequence of float + Parameter values. + + Returns + ------- + array + Array of pull values. If the cost function is masked, the array contains NaN + values where the mask value is False. + + Notes + ----- + Pulls allow one to estimate how well a model fits the data. A pull is a value + computed for each data point. It is given by (observed - predicted) / + standard-deviation. If the model is correct, the expectation value of each pull + is zero and its variance is one in the asymptotic limit of infinite samples. + Under these conditions, the chi-square statistic is computed from the sum of + pulls squared has a known probability distribution if the model is correct. It + therefore serves as a goodness-of-fit statistic. + + Beware: the sum of pulls squared in general is not identical to the value + returned by the cost function, even if the cost function returns a chi-square + distributed test-statistic. The cost function is computed in a slightly + differently way that makes the return value approach the asymptotic chi-square + distribution faster than a test statistic based on sum of pulls squared. In + summary, only use pulls for plots. Compute the chi-square test statistic + directly from the cost function. + """ + return self._pulls(args) + + def _ndata(self): + return np.prod(self._masked.shape[: self._ndim]) + + @abc.abstractmethod + def _pulls(self, args: Sequence[float]) -> NDArray: + ... # pragma: no cover + + class UnbinnedCost(MaskedCost): """ Base class for unbinned cost functions. @@ -802,12 +927,20 @@ class UnbinnedCost(MaskedCost): :meta private: """ - __slots__ = "_model", "_log" + __slots__ = "_model", "_model_grad", "_log" - def __init__(self, data, model: Model, verbose: int, log: bool): + def __init__( + self, + data, + model: Model, + verbose: int, + log: bool, + grad: Optional[ModelGradient], + ): """For internal use.""" self._model = model self._log = log + self._model_grad = grad super().__init__(_model_parameters(model), _norm(data), verbose) @abc.abstractproperty @@ -824,6 +957,11 @@ def _ndata(self): # unbinned likelihoods have infinite degrees of freedom return np.inf + def _npoints(self): + # cannot use len(self._masked) because multi-dimensional data has format + # (K, N) with K dimensions and N points + return self._masked.shape[-1] + @deprecated_parameter(bins="nbins") def visualize( self, @@ -846,7 +984,6 @@ def visualize( it is interpreted as the point locations. bins : int, optional number of bins. Default is 50 bins. - """ from matplotlib import pyplot as plt @@ -877,6 +1014,48 @@ def visualize( plt.errorbar(cx, n, n**0.5, fmt="ok") plt.fill_between(xm, 0, ym * dx, fc="C0") + def fisher_information(self, *args: float) -> NDArray: + """ + Estimate Fisher information for model and sample. + + The estimated Fisher information is only meaningful if the arguments provided + are estimates of the true values. + + Parameters + ---------- + *args: float + Estimates of model parameters. + """ + g = self._pointwise_score(args) + return np.einsum("ji,ki->jk", g, g) + + def covariance(self, *args: float) -> NDArray: + """ + Estimate covariance of the parameters with the sandwich estimator. + + This requires that the model gradient is provided, and that the arguments are + the maximum-likelihood estimates. The sandwich estimator is only asymptotically + correct. + + Parameters + ---------- + *args : float + Maximum-likelihood estimates of the parameter values. + + Returns + ------- + ndarray of float + The array has shape (K, K) for K arguments. + """ + return np.linalg.inv(self.fisher_information(*args)) + + @abc.abstractmethod + def _pointwise_score(self, args: Sequence[float]) -> NDArray: + ... # pragma: no cover + + def _has_grad(self) -> bool: + return self._model_grad is not None + class UnbinnedNLL(UnbinnedCost): """ @@ -907,8 +1086,10 @@ def __init__( self, data: ArrayLike, pdf: Model, + *, verbose: int = 0, log: bool = False, + grad: Optional[ModelGradient] = None, ): """ Initialize UnbinnedNLL with data and model. @@ -923,26 +1104,56 @@ def __init__( Probability density function of the form f(data, par0, [par1, ...]), where data is the data sample and par0, ... are model parameters. If the data are multivariate, data passed to f has shape (D, N), where D is the number of - dimensions and N the number of data points. + dimensions and N the number of data points. Must return an array with the + shape (N,). verbose : int, optional Verbosity level. 0: is no output (default). 1: print current args and negative log-likelihood value. log : bool, optional Distributions of the exponential family (normal, exponential, poisson, ...) allow one to compute the logarithm of the pdf directly, which is more - accurate and efficient than effectively doing ``log(exp(logpdf))``. Set this - to True, if the model returns the logarithm of the pdf instead of the pdf. + accurate and efficient than numerically computing ``log(pdf)``. Set this + to True, if the model returns the logpdf instead of the pdf. Default is False. + grad : callable or None, optional + Optionally pass the gradient of the pdf. Has the same calling signature like + the pdf, but must return an array with the shape (K, N), where N is the + number of data points and K is the number of parameters. If `log` is True, + the function must return the gradient of the logpdf instead of the pdf. The + gradient can be used by Minuit to improve or speed up convergence and to + compute the sandwich estimator for the variance of the parameter estimates. + Default is None. """ - super().__init__(data, pdf, verbose, log) + super().__init__(data, pdf, verbose, log, grad) - def _call(self, args: Sequence[float]) -> float: - data = self._masked - x = self._model(data, *args) - x = _normalize_model_output(x) + def _value(self, args: Sequence[float]) -> float: + f = self._eval_model(args) + if self._log: + return -2.0 * np.sum(f) + return 2.0 * _unbinned_nll(f) + + def _grad(self, args: Sequence[float]) -> NDArray: + g = self._pointwise_score(args) + return -2.0 * np.sum(g, axis=1) + + def _pointwise_score(self, args: Sequence[float]) -> NDArray: + g = self._eval_model_grad(args) if self._log: - return -2.0 * np.sum(x) - return 2.0 * _unbinned_nll(x) + return g + f = self._eval_model(args) + return g / f + + def _eval_model(self, args: Sequence[float]) -> float: + data = self._masked + return _normalize_output(self._model(data, *args), "model", self._npoints()) + + def _eval_model_grad(self, args: Sequence[float]) -> NDArray: + if self._model_grad is None: + raise ValueError("no gradient available") # pragma: no cover + data = self._masked + return _normalize_output( + self._model_grad(data, *args), "model gradient", self.npar, self._npoints() + ) class ExtendedUnbinnedNLL(UnbinnedCost): @@ -983,8 +1194,10 @@ def __init__( self, data: ArrayLike, scaled_pdf: Model, + *, verbose: int = 0, log: bool = False, + grad: Optional[ModelGradient] = None, ): """ Initialize cost function with data and model. @@ -1013,32 +1226,70 @@ def __init__( accurate and efficient than effectively doing ``log(exp(logpdf))``. Set this to True, if the model returns the logarithm of the density as the second argument instead of the density. Default is False. + grad : callable or None, optional + Optionally pass the gradient of the density function. Has the same calling + signature like the density function, but must return two arrays. The first + array has shape (K,) where K are the number of parameters, while the second + has shape (K, N), where N is the number of data points. The first array is + the gradient of the integrated density. The second array is the gradient of + the density itself. If `log` is True, the second array must be the gradient + of the log-density instead. The gradient can be used by Minuit to improve or + speed up convergence and to compute the sandwich estimator for the variance + of the parameter estimates. Default is None. """ - super().__init__(data, scaled_pdf, verbose, log) + super().__init__(data, scaled_pdf, verbose, log, grad) + + def _value(self, args: Sequence[float]) -> float: + fint, f = self._eval_model(args) + if self._log: + return 2 * (fint - np.sum(f)) + return 2 * (fint + _unbinned_nll(f)) + + def _grad(self, args: Sequence[float]) -> NDArray: + g = self._pointwise_score(args) + return -2 * np.sum(g, axis=1) - def _call(self, args: Sequence[float]) -> float: + def _pointwise_score(self, args: Sequence[float]) -> NDArray: + gint, g = self._eval_model_grad(args) + m = self._npoints() + if self._log: + return g - (gint / m)[:, np.newaxis] + _, f = self._eval_model(args) + return g / f - (gint / m)[:, np.newaxis] + + def _eval_model(self, args: Sequence[float]) -> Tuple[float, float]: + data = self._masked + fint, f = self._model(data, *args) + f = _normalize_output(f, "model", self._npoints(), msg="in second position") + return fint, f + + def _eval_model_grad(self, args: Sequence[float]) -> Tuple[NDArray, NDArray]: + if self._model_grad is None: + raise ValueError("no gradient available") # pragma: no cover data = self._masked - ns, x = self._model(data, *args) - x = _normalize_model_output( - x, "Model should return numpy array in second position" + gint, g = self._model_grad(data, *args) + gint = _normalize_output( + gint, "model gradient", self.npar, msg="in first position" ) - if self._log: - return 2 * (ns - np.sum(x)) - return 2 * (ns + _unbinned_nll(x)) + g = _normalize_output( + g, "model gradient", self.npar, self._npoints(), msg="in second position" + ) + return gint, g -class BinnedCost(MaskedCost): +class BinnedCost(MaskedCostWithPulls): """ Base class for binned cost functions. :meta private: """ - __slots__ = "_xe", "_ndim", "_bztrafo" + __slots__ = "_xe", "_ndim", "_bohm_zech_scale", "_bohm_zech_n" _xe: Union[NDArray, Tuple[NDArray, ...]] _ndim: int - _bztrafo: Optional[BohmZechTransform] + _bohm_zech_scale: Optional[NDArray] + _bohm_zech_n: Optional[NDArray] n = MaskedCost.data @@ -1053,7 +1304,6 @@ def __init__( n: ArrayLike, xe: Union[ArrayLike, Sequence[ArrayLike]], verbose: int, - *updater, ): """For internal use.""" if not isinstance(xe, Iterable): @@ -1062,12 +1312,13 @@ def __init__( shape = _shape_from_xe(xe) self._ndim = len(shape) if self._ndim == 1: - self._xe = _norm(xe) # type:ignore + self._xe = _norm(cast(ArrayLike, xe)) else: self._xe = tuple(_norm(xei) for xei in xe) n = _norm(n) - is_weighted = n.ndim > self._ndim + + is_weighted = n.ndim > self._ndim and n.shape[-1] == 2 if n.ndim != (self._ndim + int(is_weighted)): raise ValueError("n must either have same dimension as xe or one extra") @@ -1080,13 +1331,9 @@ def __init__( "xe must be longer by one element along each dimension" ) - if is_weighted: - if n.shape[-1] != 2: - raise ValueError("n must have shape (..., 2)") - self._bztrafo = BohmZechTransform(n[..., 0], n[..., 1]) - else: - self._bztrafo = None - + self._bohm_zech_scale = None + self._bohm_zech_n = None + self._set_bohm_zech(n, is_weighted) super().__init__(parameters, n, verbose) def prediction( @@ -1103,45 +1350,11 @@ def prediction( Returns ------- NDArray - Model prediction for each bin. + Model prediction for each bin. The expectation is always returned for all + bins, even if some bins are temporarily masked. """ return self._pred(args) - def pulls(self, args: Sequence[float]) -> NDArray: - """ - Return studentized residuals (aka pulls). - - Parameters - ---------- - args : sequence of float - Parameter values. - - Returns - ------- - array - Array of pull values. If the cost function is masked, the array contains NaN - values where the mask value is False. - - Notes - ----- - Pulls allow one to estimate how well a model fits the data. A pull is a value - computed for each data bin. It is given by (observed - predicted) / - standard-deviation. If the model is correct, the expectation value of each pull - is zero and its variance is one in the asymptotic limit of infinite samples. - Under these conditions, the chi-square statistic is computed from the sum of - pulls squared has a known probability distribution if the model is correct. It - therefore serves as a goodness-of-fit statistic. - - Beware: the sum of pulls squared in general is not identical to the value - returned by the cost function, even if the cost function returns a chi-square - distributed test-statistic. The cost function is computed in a slightly - differently way that makes the return value approach the asymptotic chi-square - distribution faster than a test statistic based on sum of pulls squared. In - summary, only use pulls for plots. Compute the chi-square test statistic - directly from the cost function. - """ - return self._pulls(args) - def visualize(self, args: Sequence[float]) -> None: """ Visualize data and model agreement (requires matplotlib). @@ -1161,9 +1374,9 @@ def visualize(self, args: Sequence[float]) -> None: comparison to a model, the visualization shows all data bins as a single sequence. """ - return self._vis(args) + return self._visualize(args) - def _vis(self, args: Sequence[float]) -> None: + def _visualize(self, args: Sequence[float]) -> None: from matplotlib import pyplot as plt n, ne = self._n_err() @@ -1188,31 +1401,20 @@ def _vis(self, args: Sequence[float]) -> None: def _pred(self, args: Sequence[float]) -> Union[NDArray, Tuple[NDArray, NDArray]]: ... # pragma: no cover - def _ndata(self): - return np.prod(self._masked.shape[: self._ndim]) - - def _update_cache(self): - super()._update_cache() - if self._bztrafo: - ma = _replace_none(self._mask, ...) - self._bztrafo = BohmZechTransform(self._data[ma, 0], self._data[ma, 1]) - def _n_err(self) -> Tuple[NDArray, NDArray]: d = self.data - if self._bztrafo: - n = d[..., 0].copy() - err = d[..., 1] ** 0.5 - else: + if self._bohm_zech_scale is None: n = d.copy() err = d**0.5 - if self.mask is not None: - ma = ~self.mask - n[ma] = np.nan - err[ma] = np.nan + else: + n = d[..., 0].copy() + err = d[..., 1] ** 0.5 # mask values where error is zero ma = err == 0 - err[ma] = np.nan + if self.mask is not None: + ma = ~self.mask n[ma] = np.nan + err[ma] = np.nan return n, err def _pulls(self, args: Sequence[float]) -> NDArray: @@ -1220,6 +1422,49 @@ def _pulls(self, args: Sequence[float]) -> NDArray: n, ne = self._n_err() return (n - mu) / ne + def _set_bohm_zech(self, n: NDArray, is_weighted: bool): + if not is_weighted: + return + val = n[..., 0] + var = n[..., 1] + self._bohm_zech_scale = np.ones_like(val) + np.divide(val, var, out=self._bohm_zech_scale, where=var > 0) + self._bohm_zech_n = val * self._bohm_zech_scale + + def _update_cache(self): + super()._update_cache() + self._set_bohm_zech(self._masked, self._bohm_zech_scale is not None) + + @overload + def _transformed(self, val: NDArray) -> Tuple[NDArray, NDArray]: + ... # pragma: no cover + + @overload + def _transformed( + self, val: NDArray, var: NDArray + ) -> Tuple[NDArray, NDArray, NDArray]: + ... # pragma: no cover + + def _transformed(self, val, var=None): + s = self._bohm_zech_scale + ma = self.mask + if ma is not None: + val = val[ma] + if var is not None: + var = var[ma] + if s is None: + if var is None: + return self._masked, val + return self._masked, val, var + if var is None: + return self._bohm_zech_n, val * s + return self._bohm_zech_n, val * s, var * s**2 + + def _counts(self): + if self._bohm_zech_scale is None: + return self._masked + return self._masked[..., 0] + class BinnedCostWithModel(BinnedCost): """ @@ -1228,14 +1473,15 @@ class BinnedCostWithModel(BinnedCost): :meta private: """ - __slots__ = "_xe_shape", "_model", "_model_xe" + __slots__ = "_xe_shape", "_model", "_model_xe", "_model_len", "_model_grad" _model_xe: np.ndarray _xe_shape: Union[Tuple[int], Tuple[int, ...]] - def __init__(self, n, xe, model, verbose): + def __init__(self, n, xe, model, verbose, grad): """For internal use.""" self._model = model + self._model_grad = grad super().__init__(_model_parameters(model), n, xe, verbose) @@ -1247,16 +1493,11 @@ def __init__(self, n, xe, model, verbose): self._model_xe = np.row_stack( [x.flatten() for x in np.meshgrid(*self.xe, indexing="ij")] ) + self._model_len = np.prod(self._xe_shape) def _pred(self, args: Sequence[float]) -> NDArray: d = self._model(self._model_xe, *args) - d = _normalize_model_output(d) - expected_shape = (np.prod(self._xe_shape),) - if d.shape != expected_shape: - raise ValueError( - f"Expected model to return an array of shape {expected_shape}, " - f"but it returns an array of shape {d.shape}" - ) + d = _normalize_output(d, "model", self._model_len) if self._ndim > 1: d = d.reshape(self._xe_shape) for i in range(self._ndim): @@ -1266,6 +1507,18 @@ def _pred(self, args: Sequence[float]) -> NDArray: d[d < 0] = 0 return d + def _pred_grad(self, args: Sequence[float]) -> NDArray: + d = self._model_grad(self._model_xe, *args) + d = _normalize_output(d, "model gradient", self.npar, self._model_len) + if self._ndim > 1: + d = d.reshape((self.npar, *self._xe_shape)) + for i in range(1, self._ndim + 1): + d = np.diff(d, axis=i) + return d + + def _has_grad(self) -> bool: + return self._model_grad is not None + class Template(BinnedCost): """ @@ -1317,7 +1570,7 @@ class Template(BinnedCost): .. [6] Langenbruch, Eur.Phys.J.C 82 (2022) 5, 393 """ - __slots__ = "_model_data", "_model_xe", "_xe_shape", "_impl" + __slots__ = "_model_data", "_model_xe", "_xe_shape", "_impl", "_model_len" _model_data: List[ Union[ @@ -1333,6 +1586,7 @@ def __init__( n: ArrayLike, xe: Union[ArrayLike, Sequence[ArrayLike]], model_or_template: Collection[Union[Model, ArrayLike]], + *, name: Optional[Sequence[str]] = None, verbose: int = 0, method: str = "da", @@ -1451,6 +1705,7 @@ def __init__( self._model_xe = np.row_stack( [x.flatten() for x in np.meshgrid(*self.xe, indexing="ij")] ) + self._model_len = np.prod(self._xe_shape) def _pred(self, args: Sequence[float]) -> Tuple[NDArray, NDArray]: mu: NDArray = 0 # type:ignore @@ -1464,7 +1719,7 @@ def _pred(self, args: Sequence[float]) -> Tuple[NDArray, NDArray]: i += 1 elif isinstance(t1, Model) and isinstance(t2, int): d = t1(self._model_xe, *args[i : i + t2]) - d = _normalize_model_output(d) + d = _normalize_output(d, "model", self._model_len) if self._ndim > 1: d = d.reshape(self._xe_shape) for j in range(self._ndim): @@ -1479,22 +1734,18 @@ def _pred(self, args: Sequence[float]) -> Tuple[NDArray, NDArray]: assert False # pragma: no cover return mu, mu_var - def _call(self, args: Sequence[float]) -> float: + def _value(self, args: Sequence[float]) -> float: mu, mu_var = self._pred(args) - - ma = self.mask - if ma is not None: - mu = mu[ma] - mu_var = mu_var[ma] - - if self._bztrafo: - n, mu, mu_var = self._bztrafo(mu, mu_var) - else: - n = self._masked - + n, mu, mu_var = self._transformed(mu, mu_var) ma = mu > 0 return self._impl(n[ma], mu[ma], mu_var[ma]) + def _grad(self, args: Sequence[float]) -> NDArray: + raise NotImplementedError # pragma: no cover + + def _has_grad(self) -> bool: + return False + def _errordef(self) -> float: return NEGATIVE_LOG_LIKELIHOOD if self._impl is template_nll_asy else CHISQUARE @@ -1523,7 +1774,7 @@ def prediction(self, args: Sequence[float]) -> Tuple[NDArray, NDArray]: mu, mu_var = self._pred(args) return mu, np.sqrt(mu_var) - def _vis(self, args: Sequence[float]) -> None: + def _visualize(self, args: Sequence[float]) -> None: from matplotlib import pyplot as plt n, ne = self._n_err() @@ -1578,7 +1829,9 @@ def __init__( n: ArrayLike, xe: Union[ArrayLike, Sequence[ArrayLike]], cdf: Model, + *, verbose: int = 0, + grad: Optional[ModelGradient] = None, ): """ Initialize cost function with data and model. @@ -1604,26 +1857,44 @@ def __init__( Verbosity level. 0: is no output (default). 1: print current args and negative log-likelihood value. """ - super().__init__(n, xe, cdf, verbose) + super().__init__(n, xe, cdf, verbose, grad) def _pred(self, args: Sequence[float]) -> NDArray: + # must return array of full length, mask not applied yet p = super()._pred(args) + # normalise probability of remaining bins ma = self.mask if ma is not None: - p /= np.sum(p[ma]) # normalise probability of remaining bins - scale = np.sum(self._masked[..., 0] if self._bztrafo else self._masked) - return p * scale + p /= np.sum(p[ma]) + # scale probabilities with total number of entries of unmasked bins in histogram + return p * np.sum(self._counts()) - def _call(self, args: Sequence[float]) -> float: + def _value(self, args: Sequence[float]) -> float: mu = self._pred(args) + n, mu = self._transformed(mu) + return multinominal_chi2(n, mu) + + def _grad(self, args: Sequence[float]) -> NDArray: + # pg and p must be arrays of full length, mask not applied yet + pg = super()._pred_grad(args) + p = super()._pred(args) + ma = self.mask + # normalise probability of remaining bins + if ma is not None: + scale = np.sum(p[ma]) + pg = pg / scale - p * np.sum(pg[:, ma]) / scale**2 + p /= scale + # scale probabilities with total number of entries of unmasked bins in histogram + scale = np.sum(self._counts()) + mu = p * scale + gmu = pg * scale ma = self.mask if ma is not None: mu = mu[ma] - if self._bztrafo: - n, mu = self._bztrafo(mu) - else: - n = self._masked - return multinominal_chi2(n, mu) + gmu = gmu[:, ma] + # don't need to scale mu and gmu, because scale factor cancels + n = self._masked if self._bohm_zech_scale is None else self._bohm_zech_n + return _multinominal_chi2_grad(n, mu, gmu) class ExtendedBinnedNLL(BinnedCostWithModel): @@ -1656,7 +1927,9 @@ def __init__( n: ArrayLike, xe: Union[ArrayLike, Sequence[ArrayLike]], scaled_cdf: Model, + *, verbose: int = 0, + grad: Optional[ModelGradient] = None, ): """ Initialize cost function with data and model. @@ -1680,22 +1953,29 @@ def __init__( Verbosity level. 0: is no output (default). 1: print current args and negative log-likelihood value. """ - super().__init__(n, xe, scaled_cdf, verbose) + super().__init__(n, xe, scaled_cdf, verbose, grad) - def _call(self, args: Sequence[float]) -> float: + def _value(self, args: Sequence[float]) -> float: mu = self._pred(args) + n, mu = self._transformed(mu) + return poisson_chi2(n, mu) + + def _grad(self, args: Sequence[float]) -> NDArray: + mu = self._pred(args) + gmu = self._pred_grad(args) ma = self.mask if ma is not None: mu = mu[ma] - if self._bztrafo: - n, mu = self._bztrafo(mu) - else: - n = self._masked - # assert isinstance(n, np.ndarray) - return poisson_chi2(n, mu) + gmu = gmu[:, ma] + n = self._counts() + s = self._bohm_zech_scale + if s is None: + return _poisson_chi2_grad(n, mu, gmu) + # use original n and mu because Bohm-Zech scale factor cancels + return _poisson_chi2_grad(n, mu, s * gmu) -class LeastSquares(MaskedCost): +class LeastSquares(MaskedCostWithPulls): """ Least-squares cost function (aka chisquare function). @@ -1704,9 +1984,14 @@ class LeastSquares(MaskedCost): :meth:`__init__` for details on how to use a multivariate model. """ - __slots__ = "_loss", "_cost", "_model", "_ndim" + __slots__ = "_loss", "_cost", "_cost_grad", "_model", "_model_grad", "_ndim" _loss: Union[str, LossFunction] + _cost: Callable[[ArrayLike, ArrayLike, ArrayLike], float] + _cost_grad: Optional[Callable[[NDArray, NDArray, NDArray, NDArray], NDArray]] + _model: Model + _model_grad: Optional[ModelGradient] + _ndim: int @property def x(self): @@ -1759,14 +2044,17 @@ def loss(self, loss: Union[str, LossFunction]): if isinstance(loss, str): if loss == "linear": self._cost = chi2 + self._cost_grad = _chi2_grad elif loss == "soft_l1": - self._cost = _soft_l1_cost + self._cost = _soft_l1_cost # type: ignore + self._cost_grad = _soft_l1_cost_grad else: raise ValueError(f"unknown loss {loss!r}") elif isinstance(loss, LossFunction): self._cost = lambda y, ye, ym: np.sum( loss(_z_squared(y, ye, ym)) # type:ignore ) + self._cost_grad = None else: raise ValueError("loss must be str or LossFunction") @@ -1778,6 +2066,7 @@ def __init__( model: Model, loss: Union[str, LossFunction] = "linear", verbose: int = 0, + grad: Optional[ModelGradient] = None, ): """ Initialize cost function with data and model. @@ -1826,19 +2115,13 @@ def __init__( self._ndim = x.ndim self._model = model + self._model_grad = grad self.loss = loss x = np.atleast_2d(x) data = np.column_stack(np.broadcast_arrays(*x, y, yerror)) super().__init__(_model_parameters(self._model), data, verbose) - def _call(self, args: Sequence[float]) -> float: - x = self._masked.T[0] if self._ndim == 1 else self._masked.T[: self._ndim] - y, yerror = self._masked.T[self._ndim :] - ym = self._model(x, *args) - ym = _normalize_model_output(ym) - return self._cost(y, yerror, ym) - def _ndata(self): return len(self._masked) @@ -1894,7 +2177,7 @@ def prediction(self, args: Sequence[float]) -> NDArray: """ return self.model(self.x, *args) - def pulls(self, args: Sequence[float]) -> NDArray: # noqa: D102 + def _pulls(self, args: Sequence[float]) -> NDArray: y = self.y.copy() ye = self.yerror.copy() ym = self.prediction(args) @@ -1905,8 +2188,33 @@ def pulls(self, args: Sequence[float]) -> NDArray: # noqa: D102 ye[ma] = np.nan return (y - ym) / ye + def _pred(self, args: Sequence[float]) -> NDArray: + x = self._masked.T[0] if self._ndim == 1 else self._masked.T[: self._ndim] + ym = self._model(x, *args) + return _normalize_output(ym, "model", self._ndata()) -LeastSquares.pulls.__doc__ = BinnedCost.pulls.__doc__ + def _pred_grad(self, args: Sequence[float]) -> NDArray: + if self._model_grad is None: + raise ValueError("no gradient available") # pragma: no cover + x = self._masked.T[0] if self._ndim == 1 else self._masked.T[: self._ndim] + ymg = self._model_grad(x, *args) + return _normalize_output(ymg, "model gradient", self.npar, self._ndata()) + + def _value(self, args: Sequence[float]) -> float: + y, ye = self._masked.T[self._ndim :] + ym = self._pred(args) + return self._cost(y, ye, ym) + + def _grad(self, args: Sequence[float]) -> NDArray: + if self._cost_grad is None: + raise ValueError("no cost gradient available") # pragma: no cover + y, ye = self._masked.T[self._ndim :] + ym = self._pred(args) + ymg = self._pred_grad(args) + return self._cost_grad(y, ye, ym, ymg) + + def _has_grad(self) -> bool: + return self._model_grad is not None and self._cost_grad is not None class NormalConstraint(Cost): @@ -1934,7 +2242,7 @@ class NormalConstraint(Cost): result. """ - __slots__ = "_value", "_cov", "_covinv" + __slots__ = "_expected", "_cov", "_covinv" def __init__( self, @@ -1955,12 +2263,25 @@ def __init__( Expected error(s). If 1D, must have same length as `args`. If 2D, must be the covariance matrix of the parameters. """ - self._value = _norm(value) + tp_args = (args,) if isinstance(args, str) else tuple(args) + nargs = len(tp_args) + self._expected = _norm(value) + if self._expected.ndim > 1: + raise ValueError("value must be a scalar or one-dimensional") + # args can be a vector of values, in this case we have nargs == 1 + if nargs > 1 and len(self._expected) != nargs: + raise ValueError("size of value does not match size of args") self._cov = _norm(error) + if len(self._cov) != len(self._expected): + raise ValueError("size of error does not match size of value") if self._cov.ndim < 2: self._cov **= 2 + elif self._cov.ndim == 2: + if not is_positive_definite(self._cov): + raise ValueError("covariance matrix is not positive definite") + else: + raise ValueError("covariance matrix cannot have more than two dimensions") self._covinv = _covinv(self._cov) - tp_args = (args,) if isinstance(args, str) else tuple(args) super().__init__({k: None for k in tp_args}, False) @property @@ -1974,26 +2295,38 @@ def covariance(self): @covariance.setter def covariance(self, value): + value = np.asarray(value) + if value.ndim == 2 and not is_positive_definite(value): + raise ValueError("covariance matrix is not positive definite") self._cov[:] = value self._covinv = _covinv(self._cov) @property def value(self): """Get expected parameter values.""" - return self._value + return self._expected @value.setter def value(self, value): - self._value[:] = value + self._expected[:] = value - def _call(self, args: Sequence[float]) -> float: - delta = self._value - args + def _value(self, args: Sequence[float]) -> float: + delta = args - self._expected if self._covinv.ndim < 2: return np.sum(delta**2 * self._covinv) return np.einsum("i,ij,j", delta, self._covinv, delta) + def _grad(self, args: Sequence[float]) -> NDArray: + delta = args - self._expected + if self._covinv.ndim < 2: + return 2 * delta * self._covinv + return 2 * self._covinv @ delta + + def _has_grad(self) -> bool: + return True + def _ndata(self): - return len(self._value) + return len(self._expected) def visualize(self, args: ArrayLike): """ @@ -2046,15 +2379,21 @@ def _covinv(array): return np.linalg.inv(array) if array.ndim == 2 else 1.0 / array -def _normalize_model_output(x, msg="Model should return numpy array"): +def _normalize_output(x, kind, *shape, msg=None): if not isinstance(x, np.ndarray): - warnings.warn( - f"{msg}, but returns {type(x)}", - PerformanceWarning, - ) + if msg is None: + msg = f"{kind} should return numpy array, but returns {type(x)}" + else: + msg = f"{kind} should return numpy array {msg}, but returns {type(x)}" + warnings.warn(msg, PerformanceWarning) x = np.array(x) if x.dtype.kind != "f": return x.astype(float) + if x.ndim < len(shape): + return x.reshape(*shape) + elif x.shape != shape: + msg = f"output of {kind} has shape {x.shape!r}, but {shape!r} is required" + raise ValueError(msg) return x diff --git a/src/iminuit/minuit.py b/src/iminuit/minuit.py index 7f38d0f6e..3a50c1655 100644 --- a/src/iminuit/minuit.py +++ b/src/iminuit/minuit.py @@ -1,4 +1,5 @@ """Minuit class.""" +from __future__ import annotations import warnings from iminuit import util as mutil @@ -18,7 +19,6 @@ import numpy as np from typing import ( Union, - Sequence, Optional, Callable, Tuple, @@ -30,12 +30,9 @@ Set, Sized, ) -from iminuit.typing import UserBound +from iminuit.typing import UserBound, Cost, CostGradient from iminuit._optional_dependencies import optional_module_for - -# Better use numpy.typing.ArrayLike in the future, but this -# requires dropping Python-3.6 support -_ArrayLike = Sequence +from numpy.typing import ArrayLike MnPrint.global_level = 0 @@ -488,9 +485,9 @@ def ngrad(self) -> int: def __init__( self, - fcn: Callable, - *args: Union[float, _ArrayLike[float]], - grad: Callable = None, + fcn: Cost, + *args: Union[float, ArrayLike], + grad: Union[CostGradient, bool, None] = None, name: Collection[str] = None, **kwds: float, ): @@ -508,11 +505,18 @@ def __init__( *args : Starting values for the minimization as positional arguments. See notes for details on how to set starting values. - grad : - Function that calculates the gradient and returns an iterable object with - one entry for each parameter, which is the derivative for that parameter. If - None (default), Minuit will calculate the gradient numerically. - name : + grad : callable, bool, or None, optional + If grad is a callable, it must be a function that calculates the gradient + and returns an iterable object with one entry for each parameter, which is + the derivative of `fcn` for that parameter. If None (default), Minuit will + call the function :func:`iminuit.util.gradient` on `fcn`. If this function + returns a callable, it will be used, otherwise Minuit will internally + compute the gradient numerically. Please see the documentation of + :func:`iminuit.util.gradient` how gradients are detected. Passing a boolean + override this detection. If grad=True is used, a ValueError is raised if no + useable gradient is found. If grad=False, Minuit will internally compute the + gradient numerically. + name : sequence of str, optional If it is set, it overrides iminuit's function signature detection. **kwds : Starting values for the minimization as keyword arguments. @@ -605,7 +609,7 @@ def fcn_np(x): See Also -------- - migrad, hesse, minos, scan, simplex + migrad, hesse, minos, scan, simplex, iminuit.util.gradient """ array_call = False if len(args) == 1 and isinstance(args[0], Iterable): @@ -637,9 +641,24 @@ def fcn_np(x): # set self.tol to default value self.tol = None # type:ignore self._strategy = MnStrategy(1) + + if grad is None: + grad = mutil.gradient(fcn) + elif grad is True: + grad = mutil.gradient(fcn) + if grad is None: + raise ValueError( + "gradient enforced, but iminuit.util.gradient returned None" + ) + elif grad is False: + grad = None + + if grad is not None and not isinstance(grad, CostGradient): + raise ValueError("provided gradient is not a CostGradient") + self._fcn = FCN( fcn, - getattr(fcn, "grad", grad), + grad, array_call, getattr(fcn, "errordef", 1.0), ) @@ -1327,6 +1346,10 @@ def visualize(self, plot: Callable = None, **kwargs): kwargs : Other keyword arguments are forwarded to the plot function. + + See Also + -------- + Minuit.interactive """ return self._visualize(plot)(self.values, **kwargs) @@ -1528,7 +1551,7 @@ def mnprofile( *, size: int = 30, bound: Union[float, UserBound] = 2, - grid: _ArrayLike[float] = None, + grid: ArrayLike = None, subtract_min: bool = False, ) -> Tuple[np.ndarray, np.ndarray, np.ndarray]: r""" @@ -1634,7 +1657,7 @@ def profile( *, size: int = 100, bound: Union[float, UserBound] = 2, - grid: _ArrayLike[float] = None, + grid: ArrayLike = None, subtract_min: bool = False, ) -> Tuple[np.ndarray, np.ndarray]: r""" @@ -1772,7 +1795,7 @@ def contour( *, size: int = 50, bound: Union[float, Iterable[Tuple[float, float]]] = 2, - grid: Tuple[_ArrayLike, _ArrayLike] = None, + grid: Tuple[ArrayLike, ArrayLike] = None, subtract_min: bool = False, ) -> Tuple[np.ndarray, np.ndarray, np.ndarray]: r""" @@ -2012,7 +2035,7 @@ def draw_mncontour( x: Union[int, str], y: Union[int, str], *, - cl: Union[float, _ArrayLike[float]] = None, + cl: Union[float, ArrayLike] = None, size: int = 100, interpolated: int = 0, experimental: bool = False, @@ -2078,7 +2101,7 @@ def draw_mncontour( def draw_mnmatrix( self, *, - cl: Union[float, _ArrayLike[float]] = None, + cl: Union[float, ArrayLike] = None, size: int = 100, experimental: bool = False, figsize=None, @@ -2240,6 +2263,10 @@ def interactive( -------- .. plot:: plots/interactive.py :include-source: + + See Also + -------- + Minuit.visualize """ with warnings.catch_warnings(): # ipywidgets produces deprecation warnings through use of internal APIs :( diff --git a/src/iminuit/typing.py b/src/iminuit/typing.py index a330d2c72..5f8033c10 100644 --- a/src/iminuit/typing.py +++ b/src/iminuit/typing.py @@ -29,6 +29,33 @@ def __call__(self, x: np.ndarray, *args: float) -> np.ndarray: ... # pragma: no cover +@runtime_checkable +class ModelGradient(Protocol): + """Type for user-defined model gradient.""" + + def __call__(self, x: np.ndarray, *args: float) -> np.ndarray: + """Evaluate model gradient at locations x and return results as an array.""" + ... # pragma: no cover + + +@runtime_checkable +class Cost(Protocol): + """Type for user-defined cost function.""" + + def __call__(self, *args: float) -> float: + """Evaluate cost and return results as a float.""" + ... # pragma: no cover + + +@runtime_checkable +class CostGradient(Protocol): + """Type for user-defined gradient of a cost function.""" + + def __call__(self, *args: float) -> np.ndarray: + """Evaluate gradient and return results as an array.""" + ... # pragma: no cover + + @runtime_checkable class LossFunction(Protocol): """Type for user-defined loss function for LeastSquares clas.""" diff --git a/src/iminuit/util.py b/src/iminuit/util.py index 4751ae150..8f62ac03a 100644 --- a/src/iminuit/util.py +++ b/src/iminuit/util.py @@ -8,10 +8,10 @@ from collections import OrderedDict from argparse import Namespace from iminuit import _repr_html, _repr_text, _deprecated -from iminuit.typing import Key, UserBound +from iminuit.typing import Key, UserBound, Cost, CostGradient from iminuit.warnings import IMinuitWarning, HesseFailedWarning, PerformanceWarning import numpy as np -from numpy.typing import NDArray +from numpy.typing import NDArray, ArrayLike from typing import ( overload, Any, @@ -52,6 +52,8 @@ "make_with_signature", "merge_signatures", "describe", + "gradient", + "is_positive_definite", ) @@ -1066,7 +1068,7 @@ def __call__(self, *args: object) -> object: def merge_signatures( callables: Iterable[Callable], annotations: bool = False -) -> Tuple[Any, List[Tuple[int, ...]]]: +) -> Tuple[Any, List[List[int]]]: """ Merge signatures of callables with positional arguments. @@ -1078,7 +1080,7 @@ def g(x, p): ... parameters, mapping = merge_signatures(f, g) # parameters is ('x', 'y', 'z', 'p') - # mapping is ((0, 1, 2), (0, 3)) + # mapping is ([0, 1, 2], [0, 3]) Parameters ---------- @@ -1108,7 +1110,7 @@ def g(x, p): ... amap.append(len(args)) args.append(k) anns.append(ann) - mapping.append(tuple(amap)) + mapping.append(amap) if annotations: return {k: a for (k, a) in zip(args, anns)}, mapping @@ -1605,3 +1607,60 @@ def _detect_log_spacing(x: NDArray) -> bool: lin_rel_std = np.std(d_lin) / np.mean(d_lin) log_rel_std = np.std(d_log) / np.mean(d_log) return log_rel_std < lin_rel_std + + +def gradient(fcn: Cost) -> Optional[CostGradient]: + """ + Return a callable which computes the gradient of fcn or None. + + Parameters + ---------- + fcn: Cost + Cost function which may provide a callable gradient. How the gradient + is detected is specified below in the Notes. + + Notes + ----- + This function checks whether the following attributes exist: `fcn.grad` and + `fcn.has_grad`. If `fcn.grad` exists and is a CostGradient, it is returned unless + `fcn.has_grad` exists and is False. If no useable gradient is detected, None is + returned. + + Returns + ------- + callable or None + The gradient function or None + """ + grad = getattr(fcn, "grad", None) + has_grad = getattr(fcn, "has_grad", True) + if grad and isinstance(grad, CostGradient) and has_grad: + return grad + return None + + +def is_positive_definite(m: ArrayLike) -> bool: + """ + Return True if argument is a positive definite matrix. + + This test is somewhat expensive, because we attempt a cholesky decomposition. + + Parameters + ---------- + m : array-like + Matrix to be tested. + + Returns + ------- + bool + True if matrix is positive definite and False otherwise. + """ + m = np.atleast_2d(m) + if np.all(m.T == m): + # maybe check this first https://en.wikipedia.org/wiki/Diagonally_dominant_matrix + # and only try cholesky if that fails + try: + np.linalg.cholesky(m) + except np.linalg.LinAlgError: + return False + return True + return False diff --git a/tests/test_cost.py b/tests/test_cost.py index f1c918ec5..05a1e4073 100644 --- a/tests/test_cost.py +++ b/tests/test_cost.py @@ -14,7 +14,6 @@ NormalConstraint, Template, multinominal_chi2, - _soft_l1_loss, PerformanceWarning, ) from iminuit.util import describe @@ -55,6 +54,27 @@ def expon_cdf(x, a): return 1 - np.exp(-x / a) +def numerical_cost_gradient(fcn): + jacobi = pytest.importorskip("jacobi").jacobi + return lambda *args: jacobi(lambda p: fcn(*p), args)[0] + + +def numerical_model_gradient(fcn): + jacobi = pytest.importorskip("jacobi").jacobi + return lambda x, *args: jacobi(lambda p: fcn(x, *p), args)[0].T + + +def numerical_extended_model_gradient(fcn): + jacobi = pytest.importorskip("jacobi").jacobi + + def fn(x, *args): + fint = jacobi(lambda p: fcn(x, *p)[0], args)[0] + f = jacobi(lambda p: fcn(x, *p)[1], args)[0].T + return fint, f + + return fn + + @pytest.fixture def unbinned(): rng = np.random.default_rng(1) @@ -143,48 +163,106 @@ def model( } -def test_Constant(): +def test_Constant_1(): c = Constant(2.5) assert c.value == 2.5 assert c.ndata == 0 + assert c.has_grad + assert_equal(c.grad(), []) + + +def test_Constant_2(): + c = NormalConstraint(("a", "b"), (1, 2), (3, 4)) + Constant(10.2) + assert_allclose(c(1, 2), 10.2) + assert_allclose(c(4, 2), 10.2 + 1) + + +def test_Constant_3(): + c = NormalConstraint(("a", "b"), (1, 2), (3, 4)) + Constant(10.2) + ref = numerical_cost_gradient(c) + assert c.has_grad + assert_allclose(c.grad(1, 2), ref(1, 2)) + assert_allclose(c.grad(2, 3), ref(2, 3)) + assert_allclose(c.grad(-1, -2), ref(-1, -2)) @pytest.mark.parametrize("verbose", (0, 1)) @pytest.mark.parametrize("model", (logpdf, pdf)) -def test_UnbinnedNLL(unbinned, verbose, model): +@pytest.mark.parametrize("use_grad", (False, True)) +def test_UnbinnedNLL(unbinned, verbose, model, use_grad): mle, x = unbinned - cost = UnbinnedNLL(x, model, verbose=verbose, log=model is logpdf) + cost = UnbinnedNLL( + x, + model, + verbose=verbose, + log=model is logpdf, + grad=numerical_model_gradient(model), + ) assert cost.ndata == np.inf - m = Minuit(cost, mu=0, sigma=1) + if use_grad: + ref = numerical_cost_gradient(cost) + assert_allclose(cost.grad(1, 2), ref(1, 2), rtol=1e-3) + assert_allclose(cost.grad(-1, 3), ref(-1, 3), rtol=1e-3) + + m = Minuit(cost, mu=0, sigma=1, grad=use_grad) m.limits["sigma"] = (0, None) m.migrad() + assert m.valid assert_allclose(m.values, mle[1:], atol=1e-3) assert m.errors["mu"] == pytest.approx(1000**-0.5, rel=0.05) + if use_grad: + assert m.ngrad > 0 + else: + assert m.ngrad == 0 + assert_equal(m.fmin.reduced_chi2, np.nan) + if use_grad and verbose == 0: + fi = cost.fisher_information(*m.values) + cov = cost.covariance(*m.values) + assert_allclose(cov, np.linalg.inv(fi)) + assert_allclose(np.diag(cov) ** 0.5, m.errors, rtol=5e-2) + rho1 = m.covariance.correlation()[0, 1] + rho2 = cov[0, 1] / (cov[0, 0] * cov[1, 1]) ** 0.5 + assert_allclose(rho1, rho2, atol=0.01) + -def test_UnbinnedNLL_2D(): +@pytest.mark.parametrize("use_grad", (False, True)) +def test_UnbinnedNLL_2D(use_grad): def model(x_y, mux, muy, sx, sy, rho): return mvnorm(mux, muy, sx, sy, rho).pdf(x_y.T) truth = 0.1, 0.2, 0.3, 0.4, 0.5 - x, y = mvnorm(*truth).rvs(size=1000, random_state=1).T + x, y = mvnorm(*truth).rvs(size=100, random_state=1).T - cost = UnbinnedNLL((x, y), model) - m = Minuit(cost, *truth) + cost = UnbinnedNLL((x, y), model, grad=numerical_model_gradient(model)) + m = Minuit(cost, *truth, grad=use_grad) m.limits["sx", "sy"] = (0, None) m.limits["rho"] = (-1, 1) m.migrad() assert m.valid - assert_allclose(m.values, truth, atol=0.02) + if use_grad: + assert m.ngrad > 0 + + ref = numerical_cost_gradient(cost) + assert_allclose(cost.grad(*m.values), ref(*m.values), rtol=0.01) + else: + assert m.ngrad == 0 + + assert_allclose(m.values, truth, atol=0.2) -def test_UnbinnedNLL_mask(): - c = UnbinnedNLL([1, np.nan, 2], lambda x, a: x + a) +@pytest.mark.parametrize("use_grad", (False, True)) +def test_UnbinnedNLL_mask(use_grad): + c = UnbinnedNLL( + [1, np.nan, 2], + lambda x, a: x + a**2, + grad=lambda x, a: np.ones_like(x) * 2 * a, + ) assert c.mask is None assert np.isnan(c(0)) @@ -192,6 +270,11 @@ def test_UnbinnedNLL_mask(): assert_equal(c.mask, (True, False, True)) assert not np.isnan(c(0)) + assert_allclose(c.grad(0), [0]) + + ref = numerical_cost_gradient(c) + assert_allclose(c.grad(1), ref(1), atol=1e-3) + @pytest.mark.parametrize("log", (False, True)) def test_UnbinnedNLL_properties(log): @@ -281,7 +364,8 @@ def model( @pytest.mark.parametrize("verbose", (0, 1)) @pytest.mark.parametrize("model", (logpdf, pdf)) -def test_ExtendedUnbinnedNLL(unbinned, verbose, model): +@pytest.mark.parametrize("use_grad", (False, True)) +def test_ExtendedUnbinnedNLL(unbinned, verbose, model, use_grad): mle, x = unbinned log = model is logpdf @@ -291,39 +375,67 @@ def density(x, n, mu, sigma): return n, np.log(n) + logpdf(x, mu, sigma) return n, n * pdf(x, mu, sigma) - cost = ExtendedUnbinnedNLL(x, density, verbose=verbose, log=log) + cost = ExtendedUnbinnedNLL( + x, + density, + verbose=verbose, + log=log, + grad=numerical_extended_model_gradient(density), + ) assert cost.ndata == np.inf - m = Minuit(cost, n=len(x), mu=0, sigma=1) + m = Minuit(cost, n=len(x), mu=0, sigma=1, grad=use_grad) m.limits["n"] = (0, None) m.limits["sigma"] = (0, None) m.migrad() + assert m.valid assert_allclose(m.values, mle, atol=1e-3) assert m.errors["mu"] == pytest.approx(1000**-0.5, rel=0.05) assert_equal(m.fmin.reduced_chi2, np.nan) + if use_grad: + assert m.ngrad > 0 + else: + assert m.ngrad == 0 -def test_ExtendedUnbinnedNLL_2D(): + +@pytest.mark.parametrize("use_grad", (False, True)) +def test_ExtendedUnbinnedNLL_2D(use_grad): def model(x_y, n, mux, muy, sx, sy, rho): - return n * 1000, n * 1000 * mvnorm(mux, muy, sx, sy, rho).pdf(x_y.T) + return n, n * mvnorm(mux, muy, sx, sy, rho).pdf(x_y.T) - truth = 1.0, 0.1, 0.2, 0.3, 0.4, 0.5 - x, y = mvnorm(*truth[1:]).rvs(size=int(truth[0] * 1000)).T + truth = 100.0, 0.1, 0.2, 0.3, 0.4, 0.5 + x, y = mvnorm(*truth[1:]).rvs(size=int(truth[0]), random_state=1).T - cost = ExtendedUnbinnedNLL((x, y), model) + cost = ExtendedUnbinnedNLL( + (x, y), model, grad=numerical_extended_model_gradient(model) + ) - m = Minuit(cost, *truth) + m = Minuit(cost, *truth, grad=use_grad) m.limits["n", "sx", "sy"] = (0, None) m.limits["rho"] = (-1, 1) m.migrad() assert m.valid - assert_allclose(m.values, truth, atol=0.1) + assert_allclose(m.values, truth, atol=0.5) + + if use_grad: + assert m.ngrad > 0 + + ref = numerical_cost_gradient(cost) + assert_allclose(cost.grad(*m.values), ref(*m.values), rtol=0.01) + else: + assert m.ngrad == 0 def test_ExtendedUnbinnedNLL_mask(): - c = ExtendedUnbinnedNLL([1, np.nan, 2], lambda x, a: (1, x + a)) + def model(x, a): + return 1, x + a + + c = ExtendedUnbinnedNLL( + [1, np.nan, 2], model, grad=numerical_extended_model_gradient(model) + ) assert c.ndata == np.inf assert np.isnan(c(0)) @@ -331,6 +443,10 @@ def test_ExtendedUnbinnedNLL_mask(): assert not np.isnan(c(0)) assert c.ndata == np.inf + ref = numerical_cost_gradient(c) + assert_allclose(c.grad(0), ref(0)) + assert_allclose(c.grad(1.5), ref(1.5)) + @pytest.mark.parametrize("log", (False, True)) def test_ExtendedUnbinnedNLL_properties(log): @@ -400,15 +516,22 @@ def test_ExtendedUnbinnedNLL_pickle(): @pytest.mark.parametrize("verbose", (0, 1)) -def test_BinnedNLL(binned, verbose): +@pytest.mark.parametrize("use_grad", (False, True)) +def test_BinnedNLL(binned, verbose, use_grad): mle, nx, xe = binned - cost = BinnedNLL(nx, xe, cdf, verbose=verbose) + cost = BinnedNLL(nx, xe, cdf, verbose=verbose, grad=numerical_model_gradient(cdf)) assert cost.ndata == len(nx) - m = Minuit(cost, mu=0, sigma=1) + if use_grad: + ref = numerical_cost_gradient(cost) + assert_allclose(cost.grad(*mle[1:]), ref(*mle[1:])) + assert_allclose(cost.grad(-1, 2), ref(-1, 2)) + + m = Minuit(cost, mu=0, sigma=1, grad=use_grad) m.limits["sigma"] = (0, None) m.migrad() + assert m.valid # binning loses information compared to unbinned case assert_allclose(m.values, mle[1:], rtol=0.15) assert m.errors["mu"] == pytest.approx(1000**-0.5, rel=0.05) @@ -416,6 +539,11 @@ def test_BinnedNLL(binned, verbose): assert_allclose(m.fmin.reduced_chi2, 1, atol=0.15) + if use_grad: + assert m.ngrad > 0 + else: + assert m.ngrad == 0 + def test_BinnedNLL_pulls(binned): mle, nx, xe = binned @@ -428,7 +556,8 @@ def test_BinnedNLL_pulls(binned): assert np.nanvar(pulls) == pytest.approx(1, abs=0.2) -def test_BinnedNLL_weighted(): +@pytest.mark.parametrize("use_grad", (False, True)) +def test_BinnedNLL_weighted(use_grad): xe = np.array([0, 0.2, 0.4, 0.8, 1.5, 10]) p = np.diff(expon_cdf(xe, 1)) n = p * 1000 @@ -442,31 +571,24 @@ def test_BinnedNLL_weighted(): # variance * 4 = (sigma * 2)^2, constructed so that # fitted parameter has twice the uncertainty w = np.transpose((n, 4 * n)) - c = BinnedNLL(w, xe, expon_cdf) + c = BinnedNLL(w, xe, expon_cdf, grad=numerical_model_gradient(expon_cdf)) assert_equal(c.data, w) - m2 = Minuit(c, 1) + + if use_grad: + ref = numerical_cost_gradient(c) + assert_allclose(c.grad(*m1.values), ref(*m1.values)) + assert_allclose(c.grad(12), ref(12)) + + m2 = Minuit(c, 1, grad=use_grad) m2.migrad() + assert m2.valid assert m2.values[0] == pytest.approx(1, rel=1e-2) assert m2.errors[0] == pytest.approx(2 * m1.errors[0], rel=1e-2) - -def test_ExtendedBinnedNLL_weighted_pulls(): - rng = np.random.default_rng(1) - xe = np.linspace(0, 5, 500) - p = np.diff(expon_cdf(xe, 2)) - m = p * 100000 - n = rng.poisson(m * 4) / 4 - nvar = m * 4 / 4**2 - w = np.transpose((n, nvar)) - c = ExtendedBinnedNLL(w, xe, lambda x, n, s: n * expon_cdf(x, s)) - m = Minuit(c, np.sum(n), 2) - m.limits["n"] = (0.1, None) - m.limits["s"] = (0.1, None) - m.migrad() - assert m.valid - pulls = c.pulls(m.values) - assert np.nanmean(pulls) == pytest.approx(0, abs=0.1) - assert np.nanvar(pulls) == pytest.approx(1, abs=0.2) + if use_grad: + assert m2.ngrad > 0 + else: + assert m2.ngrad == 0 def test_BinnedNLL_bad_input_1(): @@ -485,7 +607,7 @@ def test_BinnedNLL_bad_input_3(): def test_BinnedNLL_bad_input_4(): - with pytest.raises(ValueError, match="n must have shape"): + with pytest.raises(ValueError, match="n must either have same dimension as xe"): BinnedNLL([[1, 2, 3]], [1, 2], lambda x, a: 0) @@ -507,7 +629,8 @@ def test_BinnedNLL_bad_input_6(): BinnedNLL(1, 2, lambda x, a: 0) -def test_BinnedNLL_2D(): +@pytest.mark.parametrize("use_grad", (False, True)) +def test_BinnedNLL_2D(use_grad): truth = (0.1, 0.2, 0.3, 0.4, 0.5) x, y = mvnorm(*truth).rvs(size=1000, random_state=1).T @@ -516,15 +639,25 @@ def test_BinnedNLL_2D(): def model(xy, mux, muy, sx, sy, rho): return mvnorm(mux, muy, sx, sy, rho).cdf(xy.T) - cost = BinnedNLL(w, (xe, ye), model) + cost = BinnedNLL(w, (xe, ye), model, grad=numerical_model_gradient(model)) assert cost.ndata == np.prod(w.shape) - m = Minuit(cost, *truth) + + if use_grad: + ref = numerical_cost_gradient(cost) + assert_allclose(cost.grad(*truth), ref(*truth)) + + m = Minuit(cost, *truth, grad=use_grad) m.limits["sx", "sy"] = (0, None) m.limits["rho"] = (-1, 1) m.migrad() assert m.valid assert_allclose(m.values, truth, atol=0.05) + if use_grad: + assert m.ngrad > 0 + else: + assert m.ngrad == 0 + assert cost.ndata == np.prod(w.shape) w2 = w.copy() w2[1, 1] += 1 @@ -552,14 +685,22 @@ def model(xy, mux, muy, sx, sy, rho): def test_BinnedNLL_mask(): - c = BinnedNLL([5, 1000, 1], [0, 1, 2, 3], expon_cdf) + c = BinnedNLL( + [5, 1000, 1], [0, 1, 2, 3], expon_cdf, grad=numerical_model_gradient(expon_cdf) + ) assert c.ndata == 3 + ref = numerical_cost_gradient(c) + assert_allclose(c.grad(2), ref(2)) + c_unmasked = c(1) c.mask = np.arange(3) != 1 assert c(1) < c_unmasked assert c.ndata == 2 + ref = numerical_cost_gradient(c) + assert_allclose(c.grad(2), ref(2)) + def test_BinnedNLL_properties(): def cdf(x, a, b): @@ -611,16 +752,24 @@ def test_BinnedNLL_pickle(): @pytest.mark.parametrize("verbose", (0, 1)) -def test_ExtendedBinnedNLL(binned, verbose): +@pytest.mark.parametrize("use_grad", (False, True)) +def test_ExtendedBinnedNLL(binned, verbose, use_grad): mle, nx, xe = binned - cost = ExtendedBinnedNLL(nx, xe, scaled_cdf, verbose=verbose) + cost = ExtendedBinnedNLL( + nx, xe, scaled_cdf, verbose=verbose, grad=numerical_model_gradient(scaled_cdf) + ) assert cost.ndata == len(nx) - m = Minuit(cost, n=mle[0], mu=0, sigma=1) + if use_grad: + ref = numerical_cost_gradient(cost) + assert_allclose(cost.grad(*mle), ref(*mle)) + + m = Minuit(cost, n=mle[0], mu=0, sigma=1, grad=use_grad) m.limits["n"] = (0, None) m.limits["sigma"] = (0, None) m.migrad() + assert m.valid # binning loses information compared to unbinned case assert_allclose(m.values, mle, rtol=0.15) assert m.errors["mu"] == pytest.approx(1000**-0.5, rel=0.05) @@ -628,8 +777,14 @@ def test_ExtendedBinnedNLL(binned, verbose): assert_allclose(m.fmin.reduced_chi2, 1, 0.1) + if use_grad: + assert m.ngrad > 0 + else: + assert m.ngrad == 0 -def test_ExtendedBinnedNLL_weighted(): + +@pytest.mark.parametrize("use_grad", (False, True)) +def test_ExtendedBinnedNLL_weighted(use_grad): xe = np.array([0, 1, 10]) n = np.diff(expon_cdf(xe, 1)) m1 = Minuit(ExtendedBinnedNLL(n, xe, expon_cdf), 1) @@ -637,36 +792,58 @@ def test_ExtendedBinnedNLL_weighted(): assert_allclose(m1.values, (1,), rtol=1e-2) w = np.transpose((n, 4 * n)) - m2 = Minuit(ExtendedBinnedNLL(w, xe, expon_cdf), 1) + c = ExtendedBinnedNLL(w, xe, expon_cdf, grad=numerical_model_gradient(expon_cdf)) + if use_grad: + ref = numerical_cost_gradient(c) + assert_allclose(c.grad(1), ref(1), atol=1e-14) + assert_allclose(c.grad(12), ref(12)) + m2 = Minuit(c, 1.1, grad=use_grad) m2.migrad() + assert m2.valid assert_allclose(m2.values, (1,), rtol=1e-2) assert m2.errors[0] == pytest.approx(2 * m1.errors[0], rel=1e-2) + if use_grad: + assert m2.ngrad > 0 + else: + assert m2.ngrad == 0 + def test_ExtendedBinnedNLL_bad_input(): with pytest.raises(ValueError): ExtendedBinnedNLL([1], [1], lambda x, a: 0) -def test_ExtendedBinnedNLL_2D(): +@pytest.mark.parametrize("use_grad", (False, True)) +def test_ExtendedBinnedNLL_2D(use_grad): truth = (1.0, 0.1, 0.2, 0.3, 0.4, 0.5) - x, y = mvnorm(*truth[1:]).rvs(size=int(truth[0] * 1000), random_state=1).T + x, y = mvnorm(*truth[1:]).rvs(size=int(truth[0] * 100), random_state=1).T w, xe, ye = np.histogram2d(x, y, bins=(10, 20)) def model(xy, n, mux, muy, sx, sy, rho): - return n * 1000 * mvnorm(mux, muy, sx, sy, rho).cdf(np.transpose(xy)) + return n * 100 * mvnorm(mux, muy, sx, sy, rho).cdf(np.transpose(xy)) - cost = ExtendedBinnedNLL(w, (xe, ye), model) + cost = ExtendedBinnedNLL(w, (xe, ye), model, grad=numerical_model_gradient(model)) assert cost.ndata == np.prod(w.shape) - m = Minuit(cost, *truth) + + if use_grad: + ref = numerical_cost_gradient(cost) + assert_allclose(cost.grad(*truth), ref(*truth)) + + m = Minuit(cost, *truth, grad=use_grad) m.limits["n", "sx", "sy"] = (0, None) m.limits["rho"] = (-1, 1) m.migrad() assert m.valid assert_allclose(m.values, truth, atol=0.1) + if use_grad: + assert m.ngrad > 0 + else: + assert m.ngrad == 0 + def test_ExtendedBinnedNLL_3D(): norm = pytest.importorskip("scipy.stats").norm @@ -698,7 +875,9 @@ def model(xyz, n, mux, muy, sx, sy, rho, muz, sz): def test_ExtendedBinnedNLL_mask(): - c = ExtendedBinnedNLL([1, 1000, 2], [0, 1, 2, 3], expon_cdf) + c = ExtendedBinnedNLL( + [1, 1000, 2], [0, 1, 2, 3], expon_cdf, grad=numerical_model_gradient(expon_cdf) + ) assert c.ndata == 3 c_unmasked = c(2) @@ -706,6 +885,9 @@ def test_ExtendedBinnedNLL_mask(): assert c(2) < c_unmasked assert c.ndata == 2 + ref = numerical_cost_gradient(c) + assert_allclose(c.grad(2), ref(2)) + def test_ExtendedBinnedNLL_properties(): def cdf(x, a): @@ -748,9 +930,32 @@ def test_ExtendedBinnedNLL_pickle(): assert_equal(c.data, c2.data) +def test_ExtendedBinnedNLL_weighted_pulls(): + rng = np.random.default_rng(1) + xe = np.linspace(0, 5, 500) + p = np.diff(expon_cdf(xe, 2)) + m = p * 100000 + n = rng.poisson(m * 4) / 4 + nvar = m * 4 / 4**2 + w = np.transpose((n, nvar)) + c = ExtendedBinnedNLL(w, xe, lambda x, n, s: n * expon_cdf(x, s)) + m = Minuit(c, np.sum(n), 2) + m.limits["n"] = (0.1, None) + m.limits["s"] = (0.1, None) + m.migrad() + assert m.valid + pulls = c.pulls(m.values) + assert np.nanmean(pulls) == pytest.approx(0, abs=0.1) + assert np.nanvar(pulls) == pytest.approx(1, abs=0.2) + + @pytest.mark.parametrize("loss", ["linear", "soft_l1", np.arctan]) @pytest.mark.parametrize("verbose", (0, 1)) -def test_LeastSquares(loss, verbose): +@pytest.mark.parametrize("use_grad", (False, True)) +def test_LeastSquares(loss, verbose, use_grad): + if use_grad and loss is np.arctan: + pytest.skip() + rng = np.random.default_rng(1) x = np.linspace(0, 1, 1000) @@ -760,10 +965,22 @@ def test_LeastSquares(loss, verbose): def model(x, a, b): return a + b * x - cost = LeastSquares(x, y, ye, model, loss=loss, verbose=verbose) + cost = LeastSquares( + x, + y, + ye, + model, + loss=loss, + verbose=verbose, + grad=numerical_model_gradient(model), + ) assert cost.ndata == len(x) - m = Minuit(cost, a=0, b=0) + if use_grad: + ref = numerical_cost_gradient(cost) + assert_allclose(cost.grad(1, 2), ref(1, 2)) + + m = Minuit(cost, a=0, b=0, grad=use_grad) m.migrad() assert_allclose(m.values, (1, 2), rtol=0.05) assert cost.loss == loss @@ -776,6 +993,11 @@ def model(x, a, b): assert_allclose(m.fmin.reduced_chi2, 1, atol=5e-2) + if use_grad: + assert m.ngrad > 0 + else: + assert m.ngrad == 0 + def test_LeastSquares_2D(): x = np.array([1.0, 2.0, 3.0]) @@ -787,7 +1009,12 @@ def model(xy, a, b): x, y = xy return a * x + b * y - c = LeastSquares((x, y), z, ze, model) + c = LeastSquares((x, y), z, ze, model, grad=numerical_model_gradient(model)) + assert c.ndata == 3 + + ref = numerical_cost_gradient(c) + assert_allclose(c.grad(1, 2), ref(1, 2)) + assert_equal(c.x, (x, y)) assert_equal(c.y, z) assert_equal(c.yerror, ze) @@ -815,19 +1042,28 @@ def test_LeastSquares_bad_input(): LeastSquares([1], [1], [1], lambda x, a: 0, loss=[1, 2, 3]) -def test_LeastSquares_mask(): - c = LeastSquares([1, 2, 3], [3, np.nan, 4], [1, 1, 1], lambda x, a: x + a) +@pytest.mark.parametrize("use_grad", (False, True)) +def test_LeastSquares_mask(use_grad): + c = LeastSquares( + [1, 2, 3], + [3, np.nan, 4], + [1, 1, 1], + lambda x, a: x + a, + grad=lambda x, a: np.ones_like(x), + ) assert c.ndata == 3 assert np.isnan(c(0)) + assert np.all(np.isnan(c.grad(1))) - m = Minuit(c, 1) + m = Minuit(c, 1, grad=use_grad) assert m.ndof == 2 m.migrad() assert not m.valid c.mask = np.arange(3) != 1 - assert not np.isnan(c(0)) assert c.ndata == 2 + assert not np.isnan(c(0)) + assert not np.any(np.isnan(c.grad(0))) assert m.ndof == 1 m.migrad() @@ -910,25 +1146,47 @@ def test_LeastSquares_pulls(): assert_equal(c.pulls((0, 1)), [10, np.nan]) -def test_CostSum_1(): +@pytest.mark.parametrize("use_grad", (False, True)) +def test_CostSum_1(use_grad): def model1(x, a): return a + x + def grad1(x, a): + return np.ones((1, len(x))) + def model2(x, b, a): return a + b * x + def grad2(x, b, a): + g = np.empty((2, len(x))) + g[0] = x + g[1] = 1 + return g + def model3(x, c): return c - lsq1 = LeastSquares(1, 2, 3, model1) + def grad3(x, c): + return np.ones((1, len(x))) + + lsq1 = LeastSquares(1, 2, 3, model1, grad=grad1) assert describe(lsq1) == ["a"] - lsq2 = LeastSquares(1, 3, 4, model2) + if use_grad: + assert_allclose(lsq1.grad(2), numerical_cost_gradient(lsq1)(2)) + + lsq2 = LeastSquares(1, 3, 4, model2, grad=grad2) assert describe(lsq2) == ["b", "a"] - lsq3 = LeastSquares(1, 1, 1, model3) + if use_grad: + assert_allclose(lsq2.grad(2, 3), numerical_cost_gradient(lsq2)(2, 3)) + + lsq3 = LeastSquares(1, 1, 1, model3, grad=grad3) assert describe(lsq3) == ["c"] + if use_grad: + assert_allclose(lsq3.grad(4), numerical_cost_gradient(lsq3)(4)) + lsq12 = lsq1 + lsq2 assert lsq12._items == [lsq1, lsq2] assert isinstance(lsq12, CostSum) @@ -939,33 +1197,74 @@ def model3(x, c): assert lsq12(1, 2) == lsq1(1) + lsq2(2, 1) - m = Minuit(lsq12, a=0, b=0) - m.migrad() - assert m.parameters == ("a", "b") - assert_allclose(m.values, (1, 2)) - assert_allclose(m.errors, (3, 5)) - assert_allclose(m.covariance, ((9, -9), (-9, 25)), atol=1e-10) + if use_grad: + a = 2 + b = 3 + ref = np.zeros(2) + ref[0] += lsq1.grad(a) + ref[[1, 0]] += lsq2.grad(b, a) + assert_allclose(lsq12.grad(a, b), ref) lsq121 = lsq12 + lsq1 assert lsq121._items == [lsq1, lsq2, lsq1] assert describe(lsq121) == ["a", "b"] assert lsq121.ndata == 3 + if use_grad: + a = 2 + b = 3 + ref = np.zeros(2) + ref[0] += lsq1.grad(a) + ref[[1, 0]] += lsq2.grad(b, a) + ref[0] += lsq1.grad(a) + assert_allclose(lsq121.grad(a, b), ref) + lsq312 = lsq3 + lsq12 assert lsq312._items == [lsq3, lsq1, lsq2] assert describe(lsq312) == ["c", "a", "b"] assert lsq312.ndata == 3 + if use_grad: + a = 2 + b = 3 + c = 4 + ref = np.zeros(3) + ref[0] += lsq3.grad(c) + ref[1] += lsq1.grad(a) + ref[[2, 1]] += lsq2.grad(b, a) + assert_allclose(lsq312.grad(c, a, b), ref) + lsq31212 = lsq312 + lsq12 assert lsq31212._items == [lsq3, lsq1, lsq2, lsq1, lsq2] assert describe(lsq31212) == ["c", "a", "b"] assert lsq31212.ndata == 5 + if use_grad: + a = 2 + b = 3 + c = 4 + ref = np.zeros(3) + ref[:] += lsq312.grad(c, a, b) + ref[1:] += lsq12.grad(a, b) + assert_allclose(lsq31212.grad(c, a, b), ref) + lsq31212 += lsq1 assert lsq31212._items == [lsq3, lsq1, lsq2, lsq1, lsq2, lsq1] assert describe(lsq31212) == ["c", "a", "b"] assert lsq31212.ndata == 6 + m = Minuit(lsq12, a=0, b=0, grad=use_grad) + m.migrad() + assert m.parameters == ("a", "b") + assert_allclose(m.values, (1, 2)) + assert_allclose(m.errors, (3, 5)) + assert_allclose(m.covariance, ((9, -9), (-9, 25)), atol=1e-10) + + if use_grad: + assert m.ngrad > 0 + else: + assert m.ngrad == 0 + def test_CostSum_2(): ref = NormalConstraint("a", 1, 2), NormalConstraint(("b", "a"), (1, 1), (2, 2)) @@ -1023,28 +1322,115 @@ def test_CostSum_visualize(): def test_NormalConstraint_1(): - def model(x, a): - return a * np.ones_like(x) - - lsq1 = LeastSquares(0, 1, 1, model) - lsq2 = lsq1 + NormalConstraint("a", 1, 0.1) - assert describe(lsq1) == ["a"] - assert describe(lsq2) == ["a"] - assert lsq1.ndata == 1 - assert lsq2.ndata == 2 - - m = Minuit(lsq1, 0) + c1 = NormalConstraint("a", 1, 1.5) + c2 = NormalConstraint(("a", "b"), (1, 2), (3, 4)) + c3 = NormalConstraint(("a", "b"), (1, 2), ((1, 0.5), (0.5, 4))) + + assert describe(c1) == ["a"] + assert describe(c2) == ["a", "b"] + assert describe(c3) == ["a", "b"] + assert c1.ndata == 1 + assert c2.ndata == 2 + assert c3.ndata == 2 + assert c1.has_grad + assert c2.has_grad + assert c3.has_grad + assert c1.value == 1 + assert_equal(c1.covariance, [1.5**2]) + assert_equal(c2.value, (1, 2)) + assert_equal(c2.covariance, (9, 16)) + assert_equal(c3.value, (1, 2)) + assert_equal(c3.covariance, ((1, 0.5), (0.5, 4))) + + assert_allclose(c1(1), 0) + assert_allclose(c1(1 + 1.5), 1) + assert_allclose(c1(1 - 1.5), 1) + + assert_allclose(c2(1, 2), 0) + assert_allclose(c2(1 + 3, 2), 1) + assert_allclose(c2(1 - 3, 2), 1) + assert_allclose(c2(1, 2 + 4), 1) + assert_allclose(c2(1, 2 - 4), 1) + + def ref(x, y): + d = np.subtract((x, y), (1, 2)) + c = ((1, 0.5), (0.5, 4)) + cinv = np.linalg.inv(c) + return d.T @ cinv @ d + + assert_allclose(c3(1, 2), 0) + assert_allclose(c3(2, 2), ref(2, 2)) + assert_allclose(c3(3, 4), ref(3, 4)) + assert_allclose(c3(-1, -2), ref(-1, -2)) + + ref = numerical_cost_gradient(c1) + assert_allclose(c1.grad(1), [0]) + assert_allclose(c1.grad(2), ref(2)) + assert_allclose(c1.grad(-2), ref(-2)) + + ref = numerical_cost_gradient(c2) + assert_allclose(c2.grad(1, 2), [0, 0]) + assert_allclose(c2.grad(2, 3), ref(2, 3)) + assert_allclose(c2.grad(-2, -4), ref(-2, -4)) + + ref = numerical_cost_gradient(c3) + assert_allclose(c3.grad(1, 2), [0, 0]) + assert_allclose(c3.grad(2, 3), ref(2, 3)) + assert_allclose(c3.grad(-2, -4), ref(-2, -4)) + + c1.value = 2 + c1.covariance = 4 + assert_equal(c1.value, 2) + assert_equal(c1.covariance, 4) + assert_allclose(c1(2), 0) + assert_allclose(c1(4), 1) + assert_allclose(c1(0), 1) + + c2.value = (2, 3) + c2.covariance = (1, 4) + assert_equal(c2.value, (2, 3)) + assert_equal(c2.covariance, (1, 4)) + assert_allclose(c2(2, 3), 0) + assert_allclose(c2(1, 3), 1) + assert_allclose(c2(2, 5), 1) + + c3.value = (2, 3) + c3.covariance = [(4, 1), (1, 5)] + assert_equal(c3.value, (2, 3)) + assert_equal(c3.covariance, [(4, 1), (1, 5)]) + + def ref(x, y): + d = np.subtract((x, y), (2, 3)) + c = ((4, 1), (1, 5)) + cinv = np.linalg.inv(c) + return d.T @ cinv @ d + + assert_allclose(c3(2, 3), 0) + assert_allclose(c3(1, 3), ref(1, 3)) + assert_allclose(c3(2, 5), ref(2, 5)) + + +@pytest.mark.parametrize("use_grad", (False, True)) +def test_NormalConstraint_2(use_grad): + c1 = NormalConstraint("a", 1, 1) + c2 = c1 + NormalConstraint("a", 1, 0.1) + + m = Minuit(c1, 0, grad=use_grad) m.migrad() + if use_grad: + assert m.ngrad > 0 assert_allclose(m.values, (1,), atol=1e-2) assert_allclose(m.errors, (1,), rtol=1e-2) - m = Minuit(lsq2, 0) + m = Minuit(c2, 0, grad=use_grad) m.migrad() + if use_grad: + assert m.ngrad > 0 assert_allclose(m.values, (1,), atol=1e-2) assert_allclose(m.errors, (0.1,), rtol=1e-2) -def test_NormalConstraint_2(): +def test_NormalConstraint_3(): lsq1 = NormalConstraint(("a", "b"), (1, 2), (2, 2)) lsq2 = lsq1 + NormalConstraint("b", 2, 0.1) + NormalConstraint("a", 1, 0.01) sa = 0.1 @@ -1075,16 +1461,6 @@ def test_NormalConstraint_2(): assert_allclose(m.covariance, cov, rtol=1e-2) -def test_NormalConstraint_properties(): - nc = NormalConstraint(("a", "b"), (1, 2), (3, 4)) - assert_equal(nc.value, (1, 2)) - assert_equal(nc.covariance, (9, 16)) - nc.value = (2, 3) - nc.covariance = (1, 2) - assert_equal(nc.value, (2, 3)) - assert_equal(nc.covariance, (1, 2)) - - def test_NormalConstraint_visualize(): pytest.importorskip("matplotlib") @@ -1104,21 +1480,43 @@ def test_NormalConstraint_pickle(): assert_equal(c.covariance, c2.covariance) +def test_NormalConstraint_bad_input_1(): + with pytest.raises(ValueError, match="scalar or one-dimensional"): + NormalConstraint("par", [[[1, 2]]], np.eye(2)) + + +def test_NormalConstraint_bad_input_2(): + with pytest.raises(ValueError, match="not match size of args"): + NormalConstraint(["a", "b", "c"], [1, 2], np.eye(2)) + + +def test_NormalConstraint_bad_input_3(): + with pytest.raises(ValueError, match="size of error does not match size of value"): + NormalConstraint(["a", "b"], [1, 2], np.eye(3)) + + +def test_NormalConstraint_bad_input_4(): + with pytest.raises(ValueError, match="positive definite"): + NormalConstraint(["a", "b"], [1, 2], [[1, 1], [1, 1]]) + + +def test_NormalConstraint_bad_input_5(): + n = NormalConstraint(["a", "b"], [1, 2], [[1, 0], [0, 1]]) + + with pytest.raises(ValueError, match="positive definite"): + n.covariance = [[1, 1], [1, 1]] + + +def test_NormalConstraint_bad_input_6(): + with pytest.raises(ValueError, match="cannot have more than two dimensions"): + NormalConstraint(["a", "b"], [1, 2], np.ones((2, 2, 2))) + + dtypes_to_test = [np.float32] if hasattr(np, "float128"): # not available on all platforms dtypes_to_test.append(np.float128) -@pytest.mark.parametrize("dtype", dtypes_to_test) -def test_soft_l1_loss(dtype): - v = np.array([0], dtype=dtype) - assert _soft_l1_loss(v) == v - v[:] = 0.1 - assert _soft_l1_loss(v) == pytest.approx(0.1, abs=0.01) - v[:] = 1e10 - assert _soft_l1_loss(v) == pytest.approx(2e5, rel=0.01) - - def test_multinominal_chi2(): zero = np.array(0) one = np.array(1) @@ -1469,10 +1867,7 @@ def cdf(xe, a): c = cost(n, edges, cdf) with pytest.raises( ValueError, - match=( - r"Expected model to return an array of shape \(3,\), " - r"but it returns an array of shape \(2,\)" - ), + match=(r"output of model has shape \(2,\), but \(3,\) is required"), ): c(1) @@ -1489,9 +1884,25 @@ def cdf(xye, a): c = cost(n, edges, cdf) with pytest.raises( ValueError, - match=( - r"Expected model to return an array of shape \(12,\), " - r"but it returns an array of shape \(11,\)" - ), + match=(r"output of model has shape \(11,\), but \(12,\) is required"), ): c(1) + + +def test_BohmZechTransform(): + from iminuit.cost import BohmZechTransform + + with pytest.warns(np.VisibleDeprecationWarning): + val = np.array([1.0, 2.0]) + var = np.array([3.0, 4.0]) + tr = BohmZechTransform(val, var) + s = val / var + mu = np.array([2.0, 2.0]) + ns, mus = tr(mu) + assert_allclose(ns, val * s) + assert_allclose(mus, mu * s) + var2 = mu**2 + ns, mus, vars = tr(mu, var2) + assert_allclose(ns, val * s) + assert_allclose(mus, mu * s) + assert_allclose(vars, var2 * s**2) diff --git a/tests/test_minuit.py b/tests/test_minuit.py index 7f1e01d95..fd7e7909a 100644 --- a/tests/test_minuit.py +++ b/tests/test_minuit.py @@ -1693,3 +1693,19 @@ def cost(a, b: Annotated[float, 0.1:1]): assert m2.limits["y"] == (0.1, 1.0) m.migrad() assert_allclose(m.values, (0, 0.1), atol=1e-2) + + +def test_enforced_grad(): + def cost(a, b): + return a**2 + b**2 + + with pytest.raises(ValueError): + Minuit(cost, 0, 0, grad=True) + + +def test_bad_grad(): + def cost(a, b): + return a**2 + b**2 + + with pytest.raises(ValueError, match="provided gradient is not a CostGradient"): + Minuit(cost, 0, 0, grad="foo") diff --git a/tests/test_util.py b/tests/test_util.py index 21d486dc0..793f34721 100644 --- a/tests/test_util.py +++ b/tests/test_util.py @@ -514,8 +514,8 @@ def g(x, a, b): args, (pf, pg) = util.merge_signatures([f, g]) assert args == ["x", "y", "z", "a", "b"] - assert pf == (0, 1, 2) - assert pg == (0, 3, 4) + assert pf == [0, 1, 2] + assert pg == [0, 3, 4] @pytest.mark.skipif(not scipy_available, reason="needs scipy") @@ -768,3 +768,9 @@ def test_optional_module_for_3(): ): with optional_module_for("foo", replace={"foobarbaz": "foo"}): import foobarbaz # noqa + + +def test_positive_definite(): + assert util.is_positive_definite([[1, 0], [0, 1]]) + assert not util.is_positive_definite([[1, 1], [1, 1]]) + assert not util.is_positive_definite([[1, 0], [1, 1]]) diff --git a/tests/test_without_numba.py b/tests/test_without_numba.py index 60b8e2e76..674f02833 100644 --- a/tests/test_without_numba.py +++ b/tests/test_without_numba.py @@ -19,7 +19,6 @@ def npa(*args, **kwargs): ("multinominal_chi2", (npa(1, 0), npa(1.2, 0))), ("chi2", (npa(1.2, 0), npa(1.2, 1.0), npa(1.2, 0.1))), ("poisson_chi2", (npa(1, 0), npa(1.2, 0.1))), - ("_soft_l1_loss", (npa(1.2, 0),)), ("_soft_l1_cost", (npa(1.2, 0), npa(1.2, 0.1), npa(1.0, 1.0))), ), )