From 4f1eb25903bbe6be3d8c6f5d46ee30544c1c304e Mon Sep 17 00:00:00 2001 From: Aman Deep Singh Date: Wed, 11 Jul 2018 10:46:49 +0530 Subject: [PATCH] Added POMDP-value-iteration (#929) * Added POMDP value iteration * Added plot_pomdp_utility function * Added tests for pomdp-value-iteration * Updated README.md * Fixed notebook import * Changed colors * Added notebook sections for POMDP and pomdp_value_iteration * Fixed notebook parsing error * Replace pomdp.ipynb * Updated README.md * Fixed line endings * Fixed line endings * Fixed line endings * Fixed line endings * Removed numpy dependency * Added docstrings * Fix tests * Added a test for pomdp_value_iteration * Remove numpy dependencies from mdp.ipynb * Added POMDP to mdp_apps.ipynb --- README.md | 2 +- mdp.ipynb | 778 +++++++++++++++++++++++++++++++++++++++++++++- mdp.py | 209 ++++++++++++- mdp_apps.ipynb | 382 ++++++++++++++++++++++- notebook.py | 21 ++ pomdp.ipynb | 240 -------------- tests/test_mdp.py | 40 +++ 7 files changed, 1418 insertions(+), 254 deletions(-) delete mode 100644 pomdp.ipynb diff --git a/README.md b/README.md index d89a90bca..2b3a50488 100644 --- a/README.md +++ b/README.md @@ -131,7 +131,7 @@ Here is a table of algorithms, the figure, name of the algorithm in the book and | 16.9 | Information-Gathering-Agent | | | | | | 17.4 | Value-Iteration | `value_iteration` | [`mdp.py`][mdp] | Done | Included | | 17.7 | Policy-Iteration | `policy_iteration` | [`mdp.py`][mdp] | Done | Included | -| 17.9 | POMDP-Value-Iteration | | | | | +| 17.9 | POMDP-Value-Iteration | `pomdp_value_iteration` | [`mdp.py`][mdp] | Done | Included | | 18.5 | Decision-Tree-Learning | `DecisionTreeLearner` | [`learning.py`][learning] | Done | Included | | 18.8 | Cross-Validation | `cross_validation` | [`learning.py`][learning] | | | | 18.11 | Decision-List-Learning | `DecisionListLearner` | [`learning.py`][learning]\* | | | diff --git a/mdp.ipynb b/mdp.ipynb index aa74514e0..b9952f528 100644 --- a/mdp.ipynb +++ b/mdp.ipynb @@ -4,9 +4,10 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "# Markov decision processes (MDPs)\n", + "# Making Complex Decisions\n", + "---\n", "\n", - "This IPy notebook acts as supporting material for topics covered in **Chapter 17 Making Complex Decisions** of the book* Artificial Intelligence: A Modern Approach*. We makes use of the implementations in mdp.py module. This notebook also includes a brief summary of the main topics as a review. Let us import everything from the mdp module to get started." + "This Jupyter notebook acts as supporting material for topics covered in **Chapter 17 Making Complex Decisions** of the book* Artificial Intelligence: A Modern Approach*. We make use of the implementations in mdp.py module. This notebook also includes a brief summary of the main topics as a review. Let us import everything from the mdp module to get started." ] }, { @@ -16,7 +17,7 @@ "outputs": [], "source": [ "from mdp import *\n", - "from notebook import psource, pseudocode" + "from notebook import psource, pseudocode, plot_pomdp_utility" ] }, { @@ -30,7 +31,10 @@ "* Grid MDP\n", "* Value Iteration\n", " * Value Iteration Visualization\n", - "* Policy Iteration" + "* Policy Iteration\n", + "* POMDPs\n", + "* POMDP Value Iteration\n", + " - Value Iteration Visualization" ] }, { @@ -2170,6 +2174,769 @@ "For in-depth knowledge about sequential decision problems, refer **Section 17.1** in the AIMA book." ] }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## POMDP\n", + "---\n", + "Partially Observable Markov Decision Problems\n", + "\n", + "In retrospect, a Markov decision process or MDP is defined as:\n", + "- a sequential decision problem for a fully observable, stochastic environment with a Markovian transition model and additive rewards.\n", + "\n", + "An MDP consists of a set of states (with an initial state $s_0$); a set $A(s)$ of actions\n", + "in each state; a transition model $P(s' | s, a)$; and a reward function $R(s)$.\n", + "\n", + "The MDP seeks to make sequential decisions to occupy states so as to maximise some combination of the reward function $R(s)$.\n", + "\n", + "The characteristic problem of the MDP is hence to identify the optimal policy function $\\pi^*(s)$ that provides the _utility-maximising_ action $a$ to be taken when the current state is $s$.\n", + "\n", + "### Belief vector\n", + "\n", + "**Note**: The book refers to the _belief vector_ as the _belief state_. We use the latter terminology here to retain our ability to refer to the belief vector as a _probability distribution over states_.\n", + "\n", + "The solution of an MDP is subject to certain properties of the problem which are assumed and justified in [Section 17.1]. One critical assumption is that the agent is **fully aware of its current state at all times**.\n", + "\n", + "A tedious (but rewarding, as we will see) way of expressing this is in terms of the **belief vector** $b$ of the agent. The belief vector is a function mapping states to probabilities or certainties of being in those states.\n", + "\n", + "Consider an agent that is fully aware that it is in state $s_i$ in the statespace $(s_1, s_2, ... s_n)$ at the current time.\n", + "\n", + "Its belief vector is the vector $(b(s_1), b(s_2), ... b(s_n))$ given by the function $b(s)$:\n", + "\\begin{align*}\n", + "b(s) &= 0 \\quad \\text{if }s \\neq s_i \\\\ &= 1 \\quad \\text{if } s = s_i\n", + "\\end{align*}\n", + "\n", + "Note that $b(s)$ is a probability distribution that necessarily sums to $1$ over all $s$.\n", + "\n" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "collapsed": true + }, + "source": [ + "### POMDPs - a conceptual outline\n", + "\n", + "The POMDP really has only two modifications to the **problem formulation** compared to the MDP.\n", + "\n", + "- **Belief state** - In the real world, the current state of an agent is often not known with complete certainty. This makes the concept of a belief vector extremely relevant. It allows the agent to represent different degrees of certainty with which it _believes_ it is in each state.\n", + "\n", + "- **Evidence percepts** - In the real world, agents often have certain kinds of evidence, collected from sensors. They can use the probability distribution of observed evidence, conditional on state, to consolidate their information. This is a known distribution $P(e\\ |\\ s)$ - $e$ being an evidence, and $s$ being the state it is conditional on.\n", + "\n", + "Consider the world we used for the MDP. \n", + "\n", + "![title](images/grid_mdp.jpg)\n", + "\n", + "#### Using the belief vector\n", + "An agent beginning at $(1, 1)$ may not be certain that it is indeed in $(1, 1)$. Consider a belief vector $b$ such that:\n", + "\\begin{align*}\n", + " b((1,1)) &= 0.8 \\\\\n", + " b((2,1)) &= 0.1 \\\\\n", + " b((1,2)) &= 0.1 \\\\\n", + " b(s) &= 0 \\quad \\quad \\forall \\text{ other } s\n", + "\\end{align*}\n", + "\n", + "By horizontally catenating each row, we can represent this as an 11-dimensional vector (omitting $(2, 2)$).\n", + "\n", + "Thus, taking $s_1 = (1, 1)$, $s_2 = (1, 2)$, ... $s_{11} = (4,3)$, we have $b$:\n", + "\n", + "$b = (0.8, 0.1, 0, 0, 0.1, 0, 0, 0, 0, 0, 0)$ \n", + "\n", + "This fully represents the certainty to which the agent is aware of its state.\n", + "\n", + "#### Using evidence\n", + "The evidence observed here could be the number of adjacent 'walls' or 'dead ends' observed by the agent. We assume that the agent cannot 'orient' the walls - only count them.\n", + "\n", + "In this case, $e$ can take only two values, 1 and 2. This gives $P(e\\ |\\ s)$ as:\n", + "\\begin{align*}\n", + " P(e=2\\ |\\ s) &= \\frac{1}{7} \\quad \\forall \\quad s \\in \\{s_1, s_2, s_4, s_5, s_8, s_9, s_{11}\\}\\\\\n", + " P(e=1\\ |\\ s) &= \\frac{1}{4} \\quad \\forall \\quad s \\in \\{s_3, s_6, s_7, s_{10}\\} \\\\\n", + " P(e\\ |\\ s) &= 0 \\quad \\forall \\quad \\text{ other } s, e\n", + "\\end{align*}\n", + "\n", + "Note that the implications of the evidence on the state must be known **a priori** to the agent. Ways of reliably learning this distribution from percepts are beyond the scope of this notebook." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### POMDPs - a rigorous outline\n", + "\n", + "A POMDP is thus a sequential decision problem for for a *partially* observable, stochastic environment with a Markovian transition model, a known 'sensor model' for inferring state from observation, and additive rewards. \n", + "\n", + "Practically, a POMDP has the following, which an MDP also has:\n", + "- a set of states, each denoted by $s$\n", + "- a set of actions available in each state, $A(s)$\n", + "- a reward accrued on attaining some state, $R(s)$\n", + "- a transition probability $P(s'\\ |\\ s, a)$ of action $a$ changing the state from $s$ to $s'$\n", + "\n", + "And the following, which an MDP does not:\n", + "- a sensor model $P(e\\ |\\ s)$ on evidence conditional on states\n", + "\n", + "Additionally, the POMDP is now uncertain of its current state hence has:\n", + "- a belief vector $b$ representing the certainty of being in each state (as a probability distribution)\n", + "\n", + "\n", + "#### New uncertainties\n", + "\n", + "It is useful to intuitively appreciate the new uncertainties that have arisen in the agent's awareness of its own state.\n", + "\n", + "- At any point, the agent has belief vector $b$, the distribution of its believed likelihood of being in each state $s$.\n", + "- For each of these states $s$ that the agent may **actually** be in, it has some set of actions given by $A(s)$.\n", + "- Each of these actions may transport it to some other state $s'$, assuming an initial state $s$, with probability $P(s'\\ |\\ s, a)$\n", + "- Once the action is performed, the agent receives a percept $e$. $P(e\\ |\\ s)$ now tells it the chances of having perceived $e$ for each state $s$. The agent must use this information to update its new belief state appropriately.\n", + "\n", + "#### Evolution of the belief vector - the `FORWARD` function\n", + "\n", + "The new belief vector $b'(s')$ after an action $a$ on the belief vector $b(s)$ and the noting of evidence $e$ is:\n", + "$$ b'(s') = \\alpha P(e\\ |\\ s') \\sum_s P(s'\\ | s, a) b(s)$$ \n", + "\n", + "where $\\alpha$ is a normalising constant (to retain the interpretation of $b$ as a probability distribution.\n", + "\n", + "This equation is just counts the sum of likelihoods of going to a state $s'$ from every possible state $s$, times the initial likelihood of being in each $s$. This is multiplied by the likelihood that the known evidence actually implies the new state $s'$. \n", + "\n", + "This function is represented as `b' = FORWARD(b, a, e)`\n", + "\n", + "#### Probability distribution of the evolving belief vector\n", + "\n", + "The goal here is to find $P(b'\\ |\\ b, a)$ - the probability that action $a$ transforms belief vector $b$ into belief vector $b'$. The following steps illustrate this -\n", + "\n", + "The probability of observing evidence $e$ when action $a$ is enacted on belief vector $b$ can be distributed over each possible new state $s'$ resulting from it:\n", + "\\begin{align*}\n", + " P(e\\ |\\ b, a) &= \\sum_{s'} P(e\\ |\\ b, a, s') P(s'\\ |\\ b, a) \\\\\n", + " &= \\sum_{s'} P(e\\ |\\ s') P(s'\\ |\\ b, a) \\\\\n", + " &= \\sum_{s'} P(e\\ |\\ s') \\sum_s P(s'\\ |\\ s, a) b(s)\n", + "\\end{align*}\n", + "\n", + "The probability of getting belief vector $b'$ from $b$ by application of action $a$ can thus be summed over all possible evidences $e$:\n", + "\\begin{align*}\n", + " P(b'\\ |\\ b, a) &= \\sum_{e} P(b'\\ |\\ b, a, e) P(e\\ |\\ b, a) \\\\\n", + " &= \\sum_{e} P(b'\\ |\\ b, a, e) \\sum_{s'} P(e\\ |\\ s') \\sum_s P(s'\\ |\\ s, a) b(s)\n", + "\\end{align*}\n", + "\n", + "where $P(b'\\ |\\ b, a, e) = 1$ if $b' = $ `FORWARD(b, a, e)` and $= 0$ otherwise.\n", + "\n", + "Given initial and final belief states $b$ and $b'$, the transition probabilities still depend on the action $a$ and observed evidence $e$. Some belief states may be achievable by certain actions, but have non-zero probabilities for states prohibited by the evidence $e$. Thus, the above condition thus ensures that only valid combinations of $(b', b, a, e)$ are considered.\n", + "\n", + "#### A modified rewardspace\n", + "\n", + "For MDPs, the reward space was simple - one reward per available state. However, for a belief vector $b(s)$, the expected reward is now:\n", + "$$\\rho(b) = \\sum_s b(s) R(s)$$\n", + "\n", + "Thus, as the belief vector can take infinite values of the distribution over states, so can the reward for each belief vector vary over a hyperplane in the belief space, or space of states (planes in an $N$-dimensional space are formed by a linear combination of the axes)." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Now that we know the basics, let's have a look at the `POMDP` class." + ] + }, + { + "cell_type": "code", + "execution_count": 32, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "\n", + "\n", + "\n", + "\n", + " \n", + " \n", + " \n", + "\n", + "\n", + "

\n", + "\n", + "
class POMDP(MDP):\n",
+       "\n",
+       "    """A Partially Observable Markov Decision Process, defined by\n",
+       "    a transition model P(s'|s,a), actions A(s), a reward function R(s),\n",
+       "    and a sensor model P(e|s). We also keep track of a gamma value,\n",
+       "    for use by algorithms. The transition and the sensor models\n",
+       "    are defined as matrices. We also keep track of the possible states\n",
+       "    and actions for each state. [page 659]."""\n",
+       "\n",
+       "    def __init__(self, actions, transitions=None, evidences=None, rewards=None, states=None, gamma=0.95):\n",
+       "        """Initialize variables of the pomdp"""\n",
+       "\n",
+       "        if not (0 < gamma <= 1):\n",
+       "            raise ValueError('A POMDP must have 0 < gamma <= 1')\n",
+       "\n",
+       "        self.states = states\n",
+       "        self.actions = actions\n",
+       "\n",
+       "        # transition model cannot be undefined\n",
+       "        self.t_prob = transitions or {}\n",
+       "        if not self.t_prob:\n",
+       "            print('Warning: Transition model is undefined')\n",
+       "        \n",
+       "        # sensor model cannot be undefined\n",
+       "        self.e_prob = evidences or {}\n",
+       "        if not self.e_prob:\n",
+       "            print('Warning: Sensor model is undefined')\n",
+       "        \n",
+       "        self.gamma = gamma\n",
+       "        self.rewards = rewards\n",
+       "\n",
+       "    def remove_dominated_plans(self, input_values):\n",
+       "        """\n",
+       "        Remove dominated plans.\n",
+       "        This method finds all the lines contributing to the\n",
+       "        upper surface and removes those which don't.\n",
+       "        """\n",
+       "\n",
+       "        values = [val for action in input_values for val in input_values[action]]\n",
+       "        values.sort(key=lambda x: x[0], reverse=True)\n",
+       "\n",
+       "        best = [values[0]]\n",
+       "        y1_max = max(val[1] for val in values)\n",
+       "        tgt = values[0]\n",
+       "        prev_b = 0\n",
+       "        prev_ix = 0\n",
+       "        while tgt[1] != y1_max:\n",
+       "            min_b = 1\n",
+       "            min_ix = 0\n",
+       "            for i in range(prev_ix + 1, len(values)):\n",
+       "                if values[i][0] - tgt[0] + tgt[1] - values[i][1] != 0:\n",
+       "                    trans_b = (values[i][0] - tgt[0]) / (values[i][0] - tgt[0] + tgt[1] - values[i][1])\n",
+       "                    if 0 <= trans_b <= 1 and trans_b > prev_b and trans_b < min_b:\n",
+       "                        min_b = trans_b\n",
+       "                        min_ix = i\n",
+       "            prev_b = min_b\n",
+       "            prev_ix = min_ix\n",
+       "            tgt = values[min_ix]\n",
+       "            best.append(tgt)\n",
+       "\n",
+       "        return self.generate_mapping(best, input_values)\n",
+       "\n",
+       "    def remove_dominated_plans_fast(self, input_values):\n",
+       "        """\n",
+       "        Remove dominated plans using approximations.\n",
+       "        Resamples the upper boundary at intervals of 100 and\n",
+       "        finds the maximum values at these points.\n",
+       "        """\n",
+       "\n",
+       "        values = [val for action in input_values for val in input_values[action]]\n",
+       "        values.sort(key=lambda x: x[0], reverse=True)\n",
+       "\n",
+       "        best = []\n",
+       "        sr = 100\n",
+       "        for i in range(sr + 1):\n",
+       "            x = i / float(sr)\n",
+       "            maximum = (values[0][1] - values[0][0]) * x + values[0][0]\n",
+       "            tgt = values[0]\n",
+       "            for value in values:\n",
+       "                val = (value[1] - value[0]) * x + value[0]\n",
+       "                if val > maximum:\n",
+       "                    maximum = val\n",
+       "                    tgt = value\n",
+       "\n",
+       "            if all(any(tgt != v) for v in best):\n",
+       "                best.append(tgt)\n",
+       "\n",
+       "        return self.generate_mapping(best, input_values)\n",
+       "\n",
+       "    def generate_mapping(self, best, input_values):\n",
+       "        """Generate mappings after removing dominated plans"""\n",
+       "\n",
+       "        mapping = defaultdict(list)\n",
+       "        for value in best:\n",
+       "            for action in input_values:\n",
+       "                if any(all(value == v) for v in input_values[action]):\n",
+       "                    mapping[action].append(value)\n",
+       "\n",
+       "        return mapping\n",
+       "\n",
+       "    def max_difference(self, U1, U2):\n",
+       "        """Find maximum difference between two utility mappings"""\n",
+       "\n",
+       "        for k, v in U1.items():\n",
+       "            sum1 = 0\n",
+       "            for element in U1[k]:\n",
+       "                sum1 += sum(element)\n",
+       "            sum2 = 0\n",
+       "            for element in U2[k]:\n",
+       "                sum2 += sum(element)\n",
+       "        return abs(sum1 - sum2)\n",
+       "
\n", + "\n", + "\n" + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "psource(POMDP)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The `POMDP` class includes all variables of the `MDP` class and additionally also stores the sensor model in `e_prob`.\n", + "
\n", + "
\n", + "`remove_dominated_plans`, `remove_dominated_plans_fast`, `generate_mapping` and `max_difference` are helper methods for `pomdp_value_iteration` which will be explained shortly." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "To understand how we can model a partially observable MDP, let's take a simple example.\n", + "Let's consider a simple two state world.\n", + "The states are labelled 0 and 1, with the reward at state 0 being 0 and at state 1 being 1.\n", + "
\n", + "There are two actions:\n", + "
\n", + "`Stay`: stays put with probability 0.9 and\n", + "`Go`: switches to the other state with probability 0.9.\n", + "
\n", + "For now, let's assume the discount factor `gamma` to be 1.\n", + "
\n", + "The sensor reports the correct state with probability 0.6.\n", + "
\n", + "This is a simple problem with a trivial solution.\n", + "Obviously the agent should `Stay` when it thinks it is in state 1 and `Go` when it thinks it is in state 0.\n", + "
\n", + "The belief space can be viewed as one-dimensional because the two probabilities must sum to 1." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Let's model this POMDP using the `POMDP` class." + ] + }, + { + "cell_type": "code", + "execution_count": 33, + "metadata": {}, + "outputs": [], + "source": [ + "# transition probability P(s'|s,a)\n", + "t_prob = [[[0.9, 0.1], [0.1, 0.9]], [[0.1, 0.9], [0.9, 0.1]]]\n", + "# evidence function P(e|s)\n", + "e_prob = [[[0.6, 0.4], [0.4, 0.6]], [[0.6, 0.4], [0.4, 0.6]]]\n", + "# reward function\n", + "rewards = [[0.0, 0.0], [1.0, 1.0]]\n", + "# discount factor\n", + "gamma = 0.95\n", + "# actions\n", + "actions = ('0', '1')\n", + "# states\n", + "states = ('0', '1')" + ] + }, + { + "cell_type": "code", + "execution_count": 34, + "metadata": {}, + "outputs": [], + "source": [ + "pomdp = POMDP(actions, t_prob, e_prob, rewards, states, gamma)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "We have defined our `POMDP` object." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## POMDP VALUE ITERATION\n", + "Defining a POMDP is useless unless we can find a way to solve it. As POMDPs can have infinitely many belief states, we cannot calculate one utility value for each state as we did in `value_iteration` for MDPs.\n", + "
\n", + "Instead of thinking about policies, we should think about conditional plans and how the expected utility of executing a fixed conditional plan varies with the initial belief state.\n", + "
\n", + "If we bound the depth of the conditional plans, then there are only finitely many such plans and the continuous space of belief states will generally be divided inte _regions_, each corresponding to a particular conditional plan that is optimal in that region. The utility function, being the maximum of a collection of hyperplanes, will be piecewise linear and convex.\n", + "
\n", + "For the one-step plans `Stay` and `Go`, the utility values are as follows\n", + "
\n", + "
\n", + "$$\\alpha_{|Stay|}(0) = R(0) + \\gamma(0.9R(0) + 0.1R(1)) = 0.1$$\n", + "$$\\alpha_{|Stay|}(1) = R(1) + \\gamma(0.9R(1) + 0.1R(0)) = 1.9$$\n", + "$$\\alpha_{|Go|}(0) = R(0) + \\gamma(0.9R(1) + 0.1R(0)) = 0.9$$\n", + "$$\\alpha_{|Go|}(1) = R(1) + \\gamma(0.9R(0) + 0.1R(1)) = 1.1$$" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The utility function can be found by `pomdp_value_iteration`.\n", + "
\n", + "To summarize, it generates a set of all plans consisting of an action and, for each possible next percept, a plan in U with computed utility vectors.\n", + "The dominated plans are then removed from this set and the process is repeated till the maximum difference between the utility functions of two consecutive iterations reaches a value less than a threshold value." + ] + }, + { + "cell_type": "code", + "execution_count": 35, + "metadata": {}, + "outputs": [ + { + "data": { + "text/markdown": [ + "### AIMA3e\n", + "__function__ POMDP-VALUE-ITERATION(_pomdp_, _ε_) __returns__ a utility function \n", + " __inputs__: _pomdp_, a POMDP with states _S_, actions _A_(_s_), transition model _P_(_s′_ | _s_, _a_), \n", + "      sensor model _P_(_e_ | _s_), rewards _R_(_s_), discount _γ_ \n", + "     _ε_, the maximum error allowed in the utility of any state \n", + " __local variables__: _U_, _U′_, sets of plans _p_ with associated utility vectors _αp_ \n", + "\n", + " _U′_ ← a set containing just the empty plan \\[\\], with _α\\[\\]_(_s_) = _R_(_s_) \n", + " __repeat__ \n", + "   _U_ ← _U′_ \n", + "   _U′_ ← the set of all plans consisting of an action and, for each possible next percept, \n", + "     a plan in _U_ with utility vectors computed according to Equation(__??__) \n", + "   _U′_ ← REMOVE\\-DOMINATED\\-PLANS(_U′_) \n", + " __until__ MAX\\-DIFFERENCE(_U_, _U′_) < _ε_(1 − _γ_) ⁄ _γ_ \n", + " __return__ _U_ \n", + "\n", + "---\n", + "__Figure ??__ A high\\-level sketch of the value iteration algorithm for POMDPs. The REMOVE\\-DOMINATED\\-PLANS step and MAX\\-DIFFERENCE test are typically implemented as linear programs." + ], + "text/plain": [ + "" + ] + }, + "execution_count": 35, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "pseudocode('POMDP-Value-Iteration')" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Let's have a look at the `pomdp_value_iteration` function." + ] + }, + { + "cell_type": "code", + "execution_count": 36, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "\n", + "\n", + "\n", + "\n", + " \n", + " \n", + " \n", + "\n", + "\n", + "

\n", + "\n", + "
def pomdp_value_iteration(pomdp, epsilon=0.1):\n",
+       "    """Solving a POMDP by value iteration."""\n",
+       "\n",
+       "    U = {'':[[0]* len(pomdp.states)]}\n",
+       "    count = 0\n",
+       "    while True:\n",
+       "        count += 1\n",
+       "        prev_U = U\n",
+       "        values = [val for action in U for val in U[action]]\n",
+       "        value_matxs = []\n",
+       "        for i in values:\n",
+       "            for j in values:\n",
+       "                value_matxs.append([i, j])\n",
+       "\n",
+       "        U1 = defaultdict(list)\n",
+       "        for action in pomdp.actions:\n",
+       "            for u in value_matxs:\n",
+       "                u1 = Matrix.matmul(Matrix.matmul(pomdp.t_prob[int(action)], Matrix.multiply(pomdp.e_prob[int(action)], Matrix.transpose(u))), [[1], [1]])\n",
+       "                u1 = Matrix.add(Matrix.scalar_multiply(pomdp.gamma, Matrix.transpose(u1)), [pomdp.rewards[int(action)]])\n",
+       "                U1[action].append(u1[0])\n",
+       "\n",
+       "        U = pomdp.remove_dominated_plans_fast(U1)\n",
+       "        # replace with U = pomdp.remove_dominated_plans(U1) for accurate calculations\n",
+       "        \n",
+       "        if count > 10:\n",
+       "            if pomdp.max_difference(U, prev_U) < epsilon * (1 - pomdp.gamma) / pomdp.gamma:\n",
+       "                return U\n",
+       "
\n", + "\n", + "\n" + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "psource(pomdp_value_iteration)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "This function uses two aptly named helper methods from the `POMDP` class, `remove_dominated_plans` and `max_difference`." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Let's try solving a simple one-dimensional POMDP using value-iteration.\n", + "
\n", + "Consider the problem of a user listening to voicemails.\n", + "At the end of each message, they can either _save_ or _delete_ a message.\n", + "This forms the unobservable state _S = {save, delete}_.\n", + "It is the task of the POMDP solver to guess which goal the user has.\n", + "
\n", + "The belief space has two elements, _b(s = save)_ and _b(s = delete)_.\n", + "For example, for the belief state _b = (1, 0)_, the left end of the line segment indicates _b(s = save) = 1_ and _b(s = delete) = 0_.\n", + "The intermediate points represent varying degrees of certainty in the user's goal.\n", + "
\n", + "The machine has three available actions: it can _ask_ what the user wishes to do in order to infer his or her current goal, or it can _doSave_ or _doDelete_ and move to the next message.\n", + "If the user says _save_, then an error may occur with probability 0.2, whereas if the user says _delete_, an error may occur with a probability 0.3.\n", + "
\n", + "The machine receives a large positive reward (+5) for getting the user's goal correct, a very large negative reward (-20) for taking the action _doDelete_ when the user wanted _save_, and a smaller but still significant negative reward (-10) for taking the action _doSave_ when the user wanted _delete_. \n", + "There is also a small negative reward for taking the _ask_ action (-1).\n", + "The discount factor is set to 0.95 for this example.\n", + "
\n", + "Let's define the POMDP." + ] + }, + { + "cell_type": "code", + "execution_count": 37, + "metadata": {}, + "outputs": [], + "source": [ + "# transition function P(s'|s,a)\n", + "t_prob = [[[0.65, 0.35], [0.65, 0.35]], [[0.65, 0.35], [0.65, 0.35]], [[1.0, 0.0], [0.0, 1.0]]]\n", + "# evidence function P(e|s)\n", + "e_prob = [[[0.5, 0.5], [0.5, 0.5]], [[0.5, 0.5], [0.5, 0.5]], [[0.8, 0.2], [0.3, 0.7]]]\n", + "# reward function\n", + "rewards = [[5, -10], [-20, 5], [-1, -1]]\n", + "\n", + "gamma = 0.95\n", + "actions = ('0', '1', '2')\n", + "states = ('0', '1')\n", + "\n", + "pomdp = POMDP(actions, t_prob, e_prob, rewards, states, gamma)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "We have defined the `POMDP` object.\n", + "Let's run `pomdp_value_iteration` to find the utility function." + ] + }, + { + "cell_type": "code", + "execution_count": 38, + "metadata": {}, + "outputs": [], + "source": [ + "utility = pomdp_value_iteration(pomdp, epsilon=0.1)" + ] + }, + { + "cell_type": "code", + "execution_count": 39, + "metadata": {}, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYEAAAD8CAYAAACRkhiPAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMS4yLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvNQv5yAAAIABJREFUeJzsnXd81dX9/5+fm733JiEBEkYYYRNkrwABW9yjWq2tP7WuVq24v0WtiKNaF1K0al3ggNIEhIDKEhlhRCBkD7L33rnn98eH+ykrZN2bm3Gej0cekuQzziee+359zjnv83orQggkEolEMjDRmbsBEolEIjEfUgQkEolkACNFQCKRSAYwUgQkEolkACNFQCKRSAYwUgQkEolkACNFQCKRSAYwUgQkEolkACNFQCKRSAYwluZuwPl4enqK4OBgczdDIpFI+hTx8fElQgivrpzbq0QgODiYI0eOmLsZEolE0qdQFCWrq+fK6SCJRCIZwEgRkEgkkgGMFAGJRCIZwEgRkEgkkgGMFAGJRCIZwEgRkEgkkgGMFAGJRCIZwEgRkEgkkgGMFAGJRCIZwEgRkEgkkgHMgBaBF198kfDwcMaOHUtERAQHDx40d5MkfYBNmzahKApnzpy54nGOjo491CJJW1hYWBAREUF4eDjjxo3j9ddfR6/XX/GczMxMRo8e3e4xn3/+uTGbajYGrAgcOHCAmJgYjh49SkJCAjt37iQwMNDczZL0Ab744gtmzJjBl19+ae6mSNrBzs6O48ePc+rUKeLi4ti6dSt//etfu31dKQL9gPz8fDw9PbGxsQHA09MTf39/Vq1axeTJkxk9ejR33303QggSExOZMmWKdm5mZiZjx44FID4+ntmzZzNx4kSioqLIz883y/NIeoaamhr279/PBx98oIlAfn4+s2bNIiIigtGjR7N3794LzikpKSEyMpLY2FhzNFlyDm9vb9atW8fbb7+NEILW1lYee+wxJk+ezNixY3n//fcvOaetY1auXMnevXuJiIjg73//e4eu1WsRQvSar4kTJ4qeorq6WowbN06EhoaKe++9V/z4449CCCFKS0u1Y37zm9+ILVu2CCGEGDdunEhLSxNCCLF69Wrx/PPPi6amJhEZGSmKioqEEEJ8+eWX4s477+yxZ5D0PP/+97/F7373OyGEEJGRkSI+Pl68+uqr4oUXXhBCCNHS0iKqqqqEEEI4ODiIgoICMWXKFLFjxw6ztXkg4+DgcMnPXF1dRUFBgXj//ffF888/L4QQoqGhQUycOFGkp6eLjIwMER4eLoQQbR7zww8/iOjoaO2abR3XUwBHRBfjrlGspBVF+RBYBhQJIUaf+5k7sAEIBjKBG4QQ5ca4nzFwdHQkPj6evXv38sMPP3DjjTeyevVqnJycWLNmDXV1dZSVlREeHs7y5cu54YYb2LhxIytXrmTDhg1s2LCBpKQkTp48ycKFCwH1rcHPz8/MTyYxJV988QUPP/wwADfddBNffPEFy5cv53e/+x3Nzc38+te/JiIiAoDm5mbmz5/PO++8w+zZs83ZbMl5qDETduzYQUJCAl9//TUAlZWVpKSkEBYWph3b1jHW1tYXXLOt40JCQnrikbpHV9Xj/C9gFjABOHnez9YAK8/9eyXwcnvXCRsdJhpbGk2ilO3x1VdfiQULFghvb2+RnZ0thBDiueeeE88995wQQojU1FQxfvx4kZSUJCZMmCCEECIhIUFMmzbNLO2V9DwlJSXC1tZWBAUFicGDB4tBgwaJwMBAodfrRW5urli3bp0YPXq0+Pjjj4UQQtjb24vbb79dPPHEE2Zu+cDl4pFAWlqacHd3F3q9XlxzzTXiu+++u+Sc80cCbR1z8UigreN6gvz87o0EjLImIITYA5Rd9ONfAR+f+/fHwK/bu05yaTKeazy5buN1fHT8IwprCo3RvMuSlJRESkqK9v3x48cZPnw4oK4P1NTUaKoOMHToUCwsLHj++ee58cYbARg+fDjFxcUcOHAAUN/8Tp06ZbI2S8zL119/ze23305WVhaZmZmcPXuWkJAQ9uzZg7e3N3/4wx+46667OHr0KACKovDhhx9y5swZVq9ebebWS4qLi7nnnnu4//77URSFqKgo3nvvPZqbmwFITk6mtrb2gnPaOsbJyYnq6up2jzMlubnw0EPQ3cGGKSuL+Qgh8gGEEPmKoni3d8JQ96HMGz2P2JRYvkn8BgWFyQGTWRa6jOiwaMb7jkdRFKM0rqamhgceeICKigosLS0ZNmwY69atw9XVlTFjxhAcHMzkyZMvOOfGG2/kscceIyMjAwBra2u+/vprHnzwQSorK2lpaeHhhx8mPDzcKG2U9C6++OILVq5cecHPrr32Wu644w4cHBywsrLC0dGRTz75RPu9hYUFX375JcuXL8fZ2Zn77ruvp5s9oKmvryciIoLm5mYsLS257bbb+POf/wzA73//ezIzM5kwYQJCCLy8vNi8efMF57d1zNixY7G0tGTcuHHccccdPPTQQ+1ey1hkZ8PLL8P69aDXw223wb/+1fXrKeLc/Fh3URQlGIgR/1sTqBBCuJ73+3IhhNtlzrsbuBsgKChoYlZWFkIIjhccJyY5htiUWA7lHkIg8HfyZ+mwpSwLW8aCIQtwsHYwStslEomkt5OZCS+99L+Af+edsHKlOhJQFCVeCDGpK9c1pQgkAXPOjQL8gB+FEMOvdI1JkyaJy9UYLqwpZFvqNmJTYtmeup3qpmpsLGyYEzyHZWHLiA6NJsStDyzAmIGdZeos3QJ3dzO3RCLpOgO5H6elwd/+Bp98Ajod/P738PjjEBT0v2N6qwi8ApQKIVYrirIScBdC/OVK12hLBM6nqbWJfdn7iEmOISY5hpQydV5/lNcookOjWRa2jOmB07HUmXKmq+8w59gxAH4cP97MLZFIus5A7MdJSWrw/+wzsLKCu++Gv/wFAgIuPdbsIqAoyhfAHMATKASeAzYDG4EgIBu4Xghx8eLxBXREBC4muTSZ2ORYYlNi2Z21mxZ9C662riwetphloctYPGwxHvYeXXiq/sFA/PBI+h8DqR+fPg0vvghffgk2NnDvvfDoo3Cl7PPuiIBRXpeFEDe38av5xrj+lQjzCCMsMow/Rf6JqsYq4tLiiEmJYWvKVr48+SU6RUfkoEhtlDDae7TRFpclEonEWPzyC7zwAnz1Fdjbq4H/kUfAu92Umu7Rr+ZMnG2cuXbUtVw76lr0Qs+RvCPa4vKT3z/Jk98/SZBLkCYIc4PnYmdlZ+5mSySSAczx4/D88/Dtt+DkBE88AX/6E3h69sz9e5UItOfu1xl0io4pAVOYEjCFVXNXkVedx9aUrcQkx/DxiY9578h72FnaMX/IfC0FdZDzIKPdXyKRSK7EkSNq8N+yBVxc4Nln1bz/nl77NtrCsDFQFEUA6HQ6fH19iYqK4qmnnmLo0KFGvU9DSwO7M3eri8spMWRWZAIwzmecNkqYEjAFC52FUe9rDpLq6gAYbm9v5pZIJF2nP/Xjn3+GVatg2zZwc1Pf+h94AFxd2z+3Lcy+MGwsDCLQFs7OzkRERPDII4+wfPlyo8ztCyFILEnUpo32Z++nVbTiae/JkmFLWBa2jEVDF+Fq243/QxKJZMCzb58a/OPiwMNDnfO/7z5wdu7+tfuNCLi6uopp06aRnZ1NWloaTU1N7Z5jZWVFSEgIt9xyC3/6059w7uZftLy+nO1p24lJjmFb6jbK6suwUCyYOXimNkoY7jG8zywu/7ekBIDlPTXBKJGYgL7aj4WA3bvV4P/DD+oi72OPwT33gDFrDvUbETh/JGBjY4OXlxe+vr5YW1tTX19PRkYGlZWVtNdmRVHw9PRk9uzZPPfcc+1WCWqLVn0rP+f8TGxKLDHJMfxS9AsAQ9yGaOsIswfPxsbSpkvX7wkGUmqdpP/S1/qxELBrlxr89+4FX191g9fdd6uZP8am34hASEiIWLRoEQkJCWRkZFBWVqYZMhmwtrbGw8MDV1dXdDod5eXlFBYW0tra2u717ezsGD16NA888AC33HILFhadm/PPrswmNjmWmJQYvs/4noaWBhysHFg0dBHRodEsDV2Kn1PvspLuax8eieRy9JV+LARs364G/wMH1I1dK1fCXXeBnQkTEfuNCFxus1hZWRlxcXHs2bOH48ePk5GRQUlJySXiYGVlhYuLC/b29uj1ekpLS6mvr2/3nhYWFvj7+3Pttdfy7LPP4uZ2ib3RZalrruP7jO81UcipygFgot9EloUtY1nYMib4TUCnmLd4W1/58EgkV6K392MhIDZWDf6HD6uWDk88ofr72PTAREG/FoG2qKqqYufOnezZs4djx46RlpZGSUkJjY2NFxxnaWmJg4MDNjY21NfXU1tb26FUVFdXVyIjI3n22WeZNm3aFY8VQpBQmKBNG/2c8zMCgY+DD9Gh0USHRbNwyEKcbJw69GzGpLd/eCSSjtBb+7Fer6Z4rloFx46pZm5PPgm33w4X1Z0xKQNSBNqitraW77//nt27d3P06FFSU1MpLi6moaHhguMsLCywtbVFp9NRX19PS0tLu9e2sbFh2LBh3H333dx3331YWl5+m0VxbTHfpX5HbEos36V+R2VjJVY6K+YEz9EWl4e6GzfttS1664dHIukMva0f6/Xq5q7nn4eEBBg2DJ56Cm69VfX56WmkCHSAhoYGdu/ezY8//kh8fDwpKSkUFhZeMmWk0+mwsrJCr9dfMuV0OXQ6Hd7e3ixdupQXXnjhkvKSza3N/HT2J21PwpmSMwAM9xiuOaDOCJqBlYVpes7Zc+IXaGtrkutLJD1Bb+nHra2wcaNq73D6NAwfDk8/DTfdBG28E/YIUgS6QVNTE/v27eOHH37gyJEjJCcnU1BQQN25zSkGFEVBp9Oh1+vbzU4CcHBwYMKECTz11FNERUVpP08rSyM2RTW8+zHzR5pam3C2cSZqaBTLwpaxZNgSvBy8jP6cEomk67S0wBdfqMZuSUkwahQ88wxcfz10Mr/EJPQbEXBxcRGPP/44V199NeHh4WbNxW9paeHgwYPs3LmTw4cPk5ycTF5e3mVLximK0iFhsLS0JCgoiN/+9rc8/vjjNNHEzvSdmigU1BSgoDB10FQtBXWcz7hu/R02FBUBcKOpXagkEhNirn7c3AyffqoG/7Q0GDtWDf7XXKN6+/cW+o0IXGnHsJWVFa6urvj7+zN27FiWL1/OwoULce3OXusuoNfriY+PZ+fOnRw8eJAzZ86Ql5d3Qb3RzqAoCi4uLsybN49bH7qVX/S/EJMSw5E8dUQ0yHmQurgcGs38IfOxt+pcknFvm0uVSLpCT/fjpib4+GPVzz8zEyZMUIP/1Vf3ruBvYECIQEfQ6XTY2dnh6elJSEgIM2bMYMWKFYwbN67TewI6i16vJyEhgbi4OA4ePEhiYiI5OTlUV1d3aJRwMVbWVvgM9sF3iS9nfM9Q01SDraUtc4PnamsJg10Ht3sdKQKS/kBP9ePGRvjwQ7WM49mzMHkyPPccLF0KvdkkoF+JgCFrx9bWFmtray0Dp7GxkZqaGhobGzu0MexKGAqC+/v7M3r0aBYvXszVV1+Nuwns+4QQJCYmsmPHDn7++WdOnz7N2bNnqaqq6rxrqgI6Bx36kXqYD6ODRmvTRtMGTbtsNTUpApL+gKn7cX29Wrj95ZchNxciI9Xgv2hR7w7+BvqNCDg4OAhfX18qKyupra2lqanpioHS0tISKyurC8SiqamJhoYGWlpauvQGbkBRFGxtbXFzc2PIkCFMnTqVa6+9lsmTJ7eZGtpZkpOTiYuL48CBA5w6dYrs7GwqKio6Lw6WoAvUMf+e+dy55E6ihkXhbqcKmhQBSX/AVP24rg7efx/WrIGCApg5Uw3+8+b1jeBvoN+IwOWygxobG0lOTubMmTOkpKSQlZVFTk4OhYWFlJaWUlVVpQnGlZ7FwsICS0tLLYC3tLTQ0tLS7VGFhYUFjo6O+Pn5MXLkSKKiorjmmmvw8up6hk9WVhbbt29n//79nDx5kuzsbMrLyzvXVh24ertif+2vCPntH9k3eXKX2yORmBtji0BNDbz3Hrz6KhQVqUH/2Wdh9myjXL7H6dci0Bnq6+s5c+YMiYmJpKWlkZmZSV5eHgUFBZSVlVFVVUVdXR3Nzc3tCoZhDaG1tbXDaaFtoSgKVlZWuLm5MXjwYCZPnsy1117LzJkzOzWqyM3NJS4ujn379pGQkEBmZiZlZWWdEgdnZ2eioqJ4++238ZYZQ5I+Qsk5R2HPbm7DraqCd96B116D0lJ1uueZZ2DGDGO00nxIEegC1dXVJCYmkpSURGpqKtnZ2eTk5FBUVERZWRnV1dWaYFwJ3blUASFEt4TCcC07Ozt8fHwYOXIk8+bN44YbbmDQoCtXPCsqKiIuLo69e/eSkJCgWWh0dFrJysqK4cOHs3r1aqKjo7v1DBJJb6SiAt56C/7+dygvVxd6n3kG2nGE6bVUVFTw1VdfsWvXLk6fPs0vv/wiRcCUlJeXc/r0aZKSkkhLSyM7O5u8vDyKioooLy+nurqa+vr6Du0wNgaWlpY4OzszePBgJkyYwIoVK4iKirpkVFFWVsZTGzeSfPAg9clJnDx9kuqKjqeyenp6cvPNN/Paa69hZY698BLJOT7KzwfgDr/OufSWlcGbb6pflZVqiuczz8CkLoXLnqG5uZm9e/eyefNm4uPjtenghoaGK436pQj0FoqLizXBSE9P1wSjuLhYEwzDwrUpURQFGxsbWp2csAsO5rGrr+bWW28lJCSEyspK/vXtv/h629ecOHqCmuwa6KB+2djYMHHiRN577z3Gjh1r0meQSAx0dk2gpER963/rLaiuVjd3Pf009Ib8iIyMDDZu3MjevXtJSUmhqKiI2traDiezWFpaYm9vj4eHB8HBwQwdOpT169f3DxFQFEVYWVlhZWWFjY0Ntra22Nvb4+joiKOjIy4uLri6uuLm5oanpydeXl54e3vj6+uLv78/fn5+2NnZ9YmqX0IICgoKNMHIyMjQBKOkpITy8nJqamraU/9uY2FhgZ2dHU4eTuj8dRRZFdHc2AzpQCnQgRklRVHw8fHhkUce4dFHHzVZWyUDl46KQFGROt//zjtq5s/116vBf8yYnmgl1NXVsWXLFrZv384vv/xCTk4OlZWV7WY6GtDpdNjY2ODg4ICzs7MW7ywsLCgtLaW4uJiKigrq6uoufpHsHyJga2srvL29qa+vp7Gxkaampi5l8Oh0OiwsLC4REwcHh0vExMPDAy8vL3x8fPD19cXPzw8/Pz8cHBx6jZjo9XrOnj3L6dOnSUlJISMjg7Nnz5Kfn09JSQkVFRVG20PRJjrQWejQt+ihg13G2tqa6dOn8/nnn19irCeRdIb2RCA/H155BdauVTd83XST6uo5apTx2iCE4MiRI3z77bccOnSI9PR0rW5JR0b2Bv8xQ1q7ra0tlpaWtLS0UFtb290Xvt4rAoqiZALVQCvQcqWGXmk6qLm5mZKSEvLz88nPz6ewsJDi4mJKSkooLS2lvLycyspKqqurqampoa6ujrq6ugvEpLNZPoqiXLAX4XJi4uLigru7uyYm3t7e+Pj44OfnR0BAAI6Ojj0qJnq9noyMDE0wXjt0iMbCQkIbGjTBqK2tpbGxsfP7EYyAoih4eHjw2GOP8eijj2oL6xLJlWhLBHJy1Bz/detUk7dbb1WDf1hY5+9RXFzMV199xffff09iYiKFhYVUV1e3m01owBDkDZ93vV5v9M+Y4QXX1tYWJycnfHx8GDZsGF999VWvF4FJQoiS9o7tiTWBlpYWSktLLxCToqIiSkpKKCsr08SkqqqKmpoaamtrLxiZNDc3d0lMzh+Z2NjYtDkycXd3v0BM/P398ff3x9nZuUticqU3qJaWFlJTU0lMTCQlJYXMzExycnIoKCigtLSUyspKampq2t2DYQysrKwYO3Ysr7/+OjNmzJDiILmAi/txdjasXg0ffKB6+99+u1rJa9iwy5/f0tJCXFwcMTExHD16VNuY2dDQYJaXofOxsrLC1tYWZ2dnfHx8CA0NZcKECUyePJkJEybg4uLS7jV6dYpobxMBY6HX6ykrKyMvL4+8vLwLRiYGMamoqLhETBoaGi6Y5uqqmBhGJnZ2djg4OODk5ISTk9Ml01wuHh54+/gQHBDAoEGDcHFx6ZKYNDU1kZycTGJiIqmpqWRkZJCYnkja2TRKS0tpqmlSF5dN8Hny9vbm1ltv5emnnzaJtYek91N3bpqkMNuCl16Cjz5Sf/6736k1fJubU9iwYQM//fQTKSkpFBcXa/PmPT3lbWFhgb29Pc7Ozvj6+hIaGsqkSZOYOnUqERERODo6Gv2evV0EMoBy1Jnk94UQ69o6ti+JgLHQ6/WUl5dfMjIpLi6+YJqrLTFpbm7usphYWlpqIxODmDg6Ol6wIOXh4YGnp+cF01z+/v64u7tfICZFtUVsS9lGbEos2xK3UZNbg0WZBcH6YLyavbCutaaqrIrCwkLKy8tpbGw02ofTwcEBb29vJk+ezF133cX8+fNNbhgo6TlqampYu3Yna9e6k5Y2HWjFwuJftLb+DThr8vsbpl9cXFzw8/MjNDSUqVOnEhkZybhx47DtBQWbersI+Ash8hRF8QbigAeEEHvO+/3dwN0AQUFBE7Oyskzanv6KEILKykry8vI0QSkuLmZ7Rga15eX4NzVdMDKpqakxqphYW1trYmLvYA/WUKfUUSbKqNHVgD14eXoxMXQi88bMY96YeQwOHIy1tTVJSUnaLm+DLUhqaio5OTlGW+jW6XQ4OzsTHBzMwoULeeihhwgICDDKtSVdRwjBgQMH2LRpE4cOHSIlJYWysrLzpiDDgKeAW4Em4H3gFSCvy/c0vPwYgnpYWBjTp0/nqquuIjw8HOueLA5sJHq1CFxwM0X5P6BGCPHq5X4/EEcCpqaz+dVCCKqrq8nNzb1kzcSQutqemHR2CH5x1oQmJufSg52dnXFxccHOzk6zBbm4LKixsLS0xMXFhbFjx3Lbbbdxyy23YGNjY5J7DQSys7N588032bJlCzk5OZ0YAY5CDf43AfXAe8CrQOElRxrSKl1cXPD399eC+rx58xg+fLjRDB97M71WBBRFcQB0Qojqc/+OA1YJIb673PFSBIyPuVxEhRDU1tZqI5OCggKKiorIyc/hRMYJknOTyS3KpamuCRrBRthgrbeGFmhtbtVGJuZetGsLa2tr3N3dmTJlCo8//jjTp083d5N6jMrKSj788EO2bt1KQkICZWVlRtz8OAZ4GrgOqMPLayPTp//ML2GuuE6bxpEVK3pN6nZvojeLwBBg07lvLYHPhRAvtnW8FAHj05utpPVCT3xePDHJMcSmxBKfHw9AoHOgVjhnXsg89E168vPzNTNAw8iktLSUsrIyKioqtPTg2tpa6urqqK+vp6amxuQ7s6+EoihYW1vj5eVFZGQkDz/8MNOmTet1mU/19fVs2bKF//znPxw/fpzCwkItjdiYWFtb4+Pjw4QJE5g7dy7R0dEMHToURVE4dgyefx42bQInJ3jwQXj4YfD0VM/tzf24N9BrRaCzSBEwPn3pw5NXncfWlK3EpsQSlxZHbXMtdpZ2zAuZp4lCoEtgl6+vLjCu5b333iMrK8ukO7E7i8GSfPjw4cyZM4fIyEjGjx+Pv79/p32b6urq2L59O9u3byc+Pl4rYtTU1GT0Z9bpdDg4ODBixAjuvfdebrvttk5Nvxw+rAb///4XXFzUwP/QQ+DmduFxfakfmwMpApI26asfnsaWRnZn7SYmOYaY5BgyKjIAGOszVqumNjVgKha67mcBnTp1imeffZbvv/+eysrKTq1nGIKgYa+Hs7MzDg4OVFRUkJaWRlVVlVlHI11Fp9Ph6OjIoEGDCA8PZ86cOVx//fXdqpNxPgcOqMF/2zY14P/5z/DAA6oQXI6+2o97CikCkn6NEIIzJWe0aaN92ftoFa142HmwJHQJy0KXETUsCldbV6Pds7a2ljVr1vDvf/+bs2fPdjqQW1hY4O7uTnBwMGPHjuWqq65i0aJFBAQEUFVVRVxcHDt37uTHH38kPT2dpnN++T2JYUe8g4MDnp6e+Pn5aenBHh4eF+yC9/X1JSAgAF9f326lRO7dC6tWwc6d6lTPI4/AH/+oTgFJuo4UAcmAory+nB1pO4hJiWFrylbK6suwUCyYETRDmzYa4TnC6AuIQgj++9//smbNGo4dO0ZdXZ1Rr9+X0Ol0mqWKwZ/Lzs7uArNHNze3c7vgPaioiGDXrumcOuWNh0cLjzwieOABK0ywb2pAIkVA0iavZmcD8GhQkJlbYhpa9a0czD2ojRISChMAGOI2RJs2mj14NjaWHUvzNLi77tu3jz179mhOkGVlZVoZU3Nx/pu7t7c3YWFhzJw5kxtuuIHAwEAOHjzIp59+yp49e8jJyaG6utpo2VXn+2hZWFhc4o/T2trahtnjAuBZYCZqbv/LwD9R0z6vbPbo5OSkjUyyrKywd3Pj6qFD8fb21jYt+vv7Y29vb5Rn7MtIEZC0yUCbS82uzGZrylZikmPYlbGLhuYGbOtsCW8Mx7nAmfrceory1WJABk8oY38GDEHSWAZijo6OBAcHEx4ezrRp01i4cCGjRo3q0kinoqKCr7/+mi1btnDixAmKi4tpaGgw2t9ADejW2Nr+moaGx2hoGI+1dSEhIRsJCfkea2v171FdXX2B2aNhr0lzc3OX/LnOF5OLzR7PF5OLnYPP3wXf02aPxkSKgKRN+psI6PV60tLSOHToED/99BMnT54kNzeXsrIyzSvGmBkwhk1shsVfRVEuCVodCfSGdFFbW1taW1s7VVhIUZRLAqKiKDg5OTFo0CBGjhzJlClTWLhwIePGjet2CqoQgmPHjrFhwwZ2795NWloalZWVHayctwz1zX8ykAX8DfgIdbfvhZz/d/Xw8NDWHYKDgxk2bBhhYWF4enpSUlLCbfv20Vxayl329hQXF3fI7LGr/lyWlpYXWKpcSUwutlQZNGgQTk5OPS4mUgQkbdLbRaClpYWkpCQOHTrE4cOHOXXqFDk5OZSXl2s1no2+YUwHWAD2YOliqQYe72DsmuwoLfnqFwU+AAAgAElEQVRf4Y6uVnsaNmwY06ZN47rrrmPMmDHtBoS8vDxeeukltmzZQm5ubodEzGBZfPGxiqLg6OiIv78/I0aMYMqUKSxYsIBJkyYZfX9CbW3tuf0F/2XPHjcKC/+AXh+BWpHoReDfdLhk3RVQFAWsrLCwsyPo3IK1j48PgwYNIiQkhGHDhjFy5EhCQ0Mvm54qhLjA7LGgoMDsZo+GXfDni4mhpolBDDtj9ihFQNImPS0CjY2NnDp1imPHjhEfH8+pU6c4e/asVg3JFEFdURTtrdLDw0ObMw4MDCQkJISAgABycnKIj4/n5MmT5OXlUVlZ2eGpIIMtgaurK4MGDSIiIoKlS5eyZMkSk1lKNDc38/HHH7N27VpOnz7dIZsMw9/BcP7Fz+bg4ICfnx/Dhw9n0qRJLFiwgMjIyC6b7en18M03aqrnL7+oNs5PPw233AKGrQ1CCJKSkti4cSO7d+8mMTHR6AaC53N+ZS4XFxc8PT3x9fUlMDCQ4OBgQkNDGTVqFEOGDOm0KAohKC8v13bBn2+pYti42BNmj/b29tjb21/gHPz1119LEZBcniUJ6kLpti7WA66vrychIYFjx45x4sSJC97U6+vrTfOmzv9qJDs6OuLu7q6lMBre/sLCwhg5ciSBgYHEx8dfUO2prKysU9WerKyssLGzwcrZiibXJmr8aiAchg8ZTnRoNMvCljEjaAZWFp3btGUKDh8+zOrVq9mzZw+lpaUdCiiGRdeWlpbLBl97e3t8fX0JCwtj4sSJzJ07l1mzZrW5Sa21FTZuhBdegNOnYcQINfjfeCN01aanvr6eHTt28J///Efz+6+urjbpHgsLCwtNMFxdXfHy8tIEw9DHRo0aRWBgoFFHURebPRYWFlJYWHhJgSxDPQ/DLvh2xESKgKRj1NTUcPz4cY4dO8bJkyc5ffo0OTk5VFRUmDSow//e0gxvL+cH9uDgYO1DFxQUpH3oioqKtLnpxMRECgoKqKmp6XC1J0MNZXd3dwYPHszkyZP51a9+dcXCNenl6cQmxxKTEsOPmT/S1NqEs40zUUOjWBa2jCXDluDlYJxNU8agpKSEN954gw0bNpCVldWhuXuD572VlRVNTU3U1dVd8v/d1tYWX19fhg0bxsSJE5k5cy6FhXN5+WVrkpMhPByeeQauuw56yrk7NTWVb7/9lh9//JEzZ85QWFjYI4VhDHbSjo6OmmD4+fkRFBRESEgIw4cPZ+TIkfj5+fWYLcj5Zo+jRo2SIjAQEUJQUVGhvaWfPn2axMRE8vLyTP6mbkCn013w4bg4sIeGhhIeHn5BYDfQ0tLC9u3biY2N5dixY52u9mRYtHV2dsbf35/Ro0ezYMECVqxY0aFqTB2hpqmGnek7iU2OJTYllvyafBQUpg6aqo0SxvmM63VZJS0tLXz77be89dZbnDhxgurq6g6dZyhwDurbeXV1Na2tCvAbVFfPYShKAu7u7zBmTCoTJkQwZ84c5s2bh4ODg8mepzM0NDSwZ88eNm/ezJEjR8jKyqKioqLH0nstLS21z4RhFOvv709QUBBDhw4lNDSU0aNHG233Ncg1gX6DEIKSkhKOHj3KiRMnOHPmDMnJyeTm5mpz6oZayabk/GGym5ubtmgVFBSkBfaRI0cSHBx8xbee06dP880331xQ7ckwTdNevzNkadjb2+Pp6cmwYcO46qqruO666xgxwvgbwTqCXug5XnBcs7I4nHcYgACnAE0Q5oXMw8G6dwTDy3Hy5EleeeUV4uLiKCgoaOf/gxXwW+BJIAQbm5O4uPyD1tbNVFaWXzJVYzDLGzJkCOPHj2fWrFksXLhQE5Xu8HxmJgDPBAd3+1oGzp49y5YtW4iLi9NGmbW1td3OLju/b7bXzw2lJZ2cnHBzc8Pb2xt/f38GDx7MkCFDGD58OOHh4bhdbKZ06T2lCPRG9Ho9BQUFxMfH88svv3DmzBnS0tK0oG4IiD1hl3y5+U8fHx9t/rOjgd1AZWUlmzdvJi4ujpMnT5Kfn6+ZlHXkeQztcXV1JSgoiHHjxrF8+XIWLFjQZ/z7C2oKtGpq29O2U9NUg42FDfNC5hEdGk10WDTBrsHmbma7VFVVsXbtWj755BPS0tJoaBDA74CVQBBwEFgFbNXOURQFNzc3fH19sbGxoampidLSUkpLSy+ZjrKyssLT05OQkBAiIiKYOXMmixYt6lSpUHNluTU3N7N//37+85//cPDgQTIyMqioqDDKwrZOp9M+a0KIdsXHysoKOzs7nJ2dtVrkhpTaVatWSRHoCVpbW8nNzeXIkSMkJCSQmppKeno6ubm5VFZWatMvPfU3vTiwn58JYQjsf9XrsfX3Z8/EiR2+bmtrq9bxjxw5QmZmprbY2pG3JMNiq6OjIz4+PowaNUozIPPx8enOI/damlqb2JO1R1tLSC1LBSDcK5xlYctYFraMaYOmYanrvQVO6uvhn/+El1+GvDwYObIMO7tXSE9/n4qK8g5dw9bWlkGDBuHj44O1tTW1tbXk5uZSXFx8yXSMpaUlHh4emr/SjBkziIqKumwf6e2pznl5eWzbto0dO3ZoGWjGsDI/P9UU1BfLNvbCSBHoCk1NTWRlZWmpjKmpqWRmZpKXl3fBm3pP/o2uFNjPT3Hr6Bv75T48Z8+e5ZtvvtEWW4uKiqitre2wgBnmPD08PAgJCWHKlCmsWLGCKVOm9DqvfHORXJqsWVnsydpDi74FN1s3loQuITo0msXDFuNu1/E3YVNSWwvvvw9r1kBhIcyaBc89B3PnwsWzbqmpqbz++uvExsaSn5/foUVoRVHw9PQkNDQULy8vLC0tKSkpIT09naKiokvqFhjM9wYPHsyYMWO46qqrWB8QgI2PT68VgY7Q3NzM4cOHiYmJ4aeffiI9PZ3S0lLq6+uNEWOkCIC6IJSenk58fDynT58mPT2drKwsLagbdmka45k7M+/XkcBumIrpboH0hoYGtm3bxrZt2zhx4gTH0tJoqalB6eACsU6nw9raGmdnZwICAhgzZgxRUVEsX74cJ2n12CUqGyqJS48jJlk1vCuuK0an6Lgq8CptLWGUV9dsILpDTQ28+y68+ioUF8P8+Wq2z+zZnbtObW0tH3/8MR988AFJSUnU1tZ26DxbW1sGDx5MeHg4vr6+6PV6UlJSSE1N1bJ+LkCnw8PNjaCgIMaMGcP06dOJiooi2IjrBL2B4uJidu7cydatWzlx4gT5+fkd2bHdP0WgtraW1NRULahnZGSQk5Oj/VGM+aZuqHMLalA3fLVFW4H9/F2Mhjf27gZ2A0IIjh8/zubNmzlw4ACpqamUlpZqC8YdeUbDYqvBgGz69OnccMMNWoUniWnRCz2Hcw9ro4RjBepIbbDLYG3aaE7wHGwtu27X3B5VVfD22/D661BaClFRavC/6irj3UMIwe7du3nzzTfZu3cv5eXlHX4J8fDwICwsjAkTJuDn50dlZSX/2rePmqwsOGcPcvE5Li4uBAYGEh4eTmRkJIsWLWL48OHGe6BeRktLCydOnCA2Npa9e/eyc+fO/iECOp1OQPtv1h28lra13mDm1d5uPUO6oyGwG/xMLjcVY6zAfj6lpaVs2rSJXbt2cerUKfLz86murqapqanDOfG2tra4ubkRGBjIhAkTuPrqq5k9e3afWWwdaORU5WjV1Ham76SuuQ57K3sWDFmgLi6HRhPgHGCUe1VUwD/+AX//u/rv6Gg1+E+dapTLd4js7GzefvttNm3aRHZ2dofTNm1tbbURwLRp0wgICCApKYnDhw+TnJxMfn7+JSMQRVFwcXHR/JWmTZvGokWLCA8P73cvPP0mO0hRlMs2xvCWbmFhcYFDY2tra7vFyA0blAx57BcHdsMbe0hIiEkCu4GWlhZ2797Nli1biI+PJysri/LychoaGjq12Ork5ISvry/h4eHMnTuXa665Bm9vb5O1W9JzNLQ08GPmj1oKalZlFgARvhGaLfZk/8mdrqZWVgZvvAFvvqmOAn71KzX4dyJXwKQ0NjbyxRdfsH79ehISEqipqemwnYe7uzuhoaFMmjSJefPm4e7uzt69ezl06BBnzpzRFmjPx2C+FxAQwIgRI5g6dSoLFy4kIiKiz65p9RsRsLCwEJaWlu2mTbYV2C82lBoyZIhJA/v5pKWl8c0337Bv3z6SkpK0xdbOGJDZ2dnh4eHB0KFDmTp1Ktdccw3jx4/vVsd8Ij0dgJeGDOnyNSQ9jxCC08WntWmj/Wf3oxd6vOy9WBq6lOjQaBYNXYSLbdub4kpK1Cmft95S5/+vvVa1d4iI6MEH6SJCCA4cOMC7777Lrl27KCwqQnQwldqQoRQeHs6MGTOIjo6mvr6enTt38vPPP3PmzBlyc3Oprq6+5LPp5OSEn5+fZr43f/58Jk+e3GNxpKv0GxHQ6XTC09PzAuMncwZ2A7W1tcTGxrJt2zYSEhK0lNCO5sQbFlsNQ9Nx48axePFilixZgqOJSyv19tQ6Sccoqy9je+p2YlJi2JayjfKGcix1lswMmqmtJYR5hAFqhs9rr6mLvnV1cMMNavAfPdrMD9ENDP34Sz8/1q5dy1dffUV6evqli8dtoCgK7u7uDB06lEmTJjF//nwWLVpEZmYmO3bs4Oeff9YsVKqqqi5rvufr68uIESO08yMjIy/rWmoO+o0ImGOfQGtrK0ePHmXTpk0cPHjwgrStzhiQOTg44OXlxfDhw5k1axbXXHMNISEhZp97lCLQ/2jRt/Bzzs/aKOFk0UkAgi0icTv6N05vm0lzk46bb1Z46ikYOdLMDTYCV+rHTU1NbNq0iX/+85/Ex8dTWVnZ4XVFW1tb/P39GTVqFNOnT2f58uWEh4eTnJysicOpU6fIzs6msrLykpc+e3t7fHx8NPO9efPmMXPmTKytrbv/0J1AisAVKCgoYNOmTfzwww+cOnWKwsJCampqurTYGhwczMSJE1m+fPkVXRZ7E1IE+j8/n8ph5V/L2Lt5BPpWHYz5FIf5/yBqagjLQpexNHQpPo59e5NeZ/uxoTDO2rVr+e6778jPz+9UER83NzdCQkK0wL5kyRKcnZ1JT09nx44dWkEjg9/Vxet6tra2+Pj4aOZ7c+bMYc6cOdjZ2XXuwTvIgBWB5uZmdu7cSUxMDMeOHdOMohobGzu82GrIiff19dUMyK6++mo8PT278yi9BikC/ZesLFi9Gj78UPX2/+1v4aFH6sjU7dJGCbnVuQBM9p/MsrBlRIdGM95vPDqlby2AGqsfl5WVsX79er788kuSkpI6tVHLxsYGPz8/Ro4cSWRkJMuWLdMquZ09e5YdO3awf/9+fvnlF7KysigrK7skDtnY2ODt7c3QoUOZMGECs2fPZt68ed2eFu7VIqAoymLgTdRaTuuFEKvbOvZ8ERBCcObMGTZt2sS+ffs0AzJDTnxnqj0ZDMimTZvGihUrGDNmTK9f6DEWvzl9GoBPR40yc0skxiI9HV56CT76SN3R+7vfwcqVcPGeKSEEJwpPaFYWB3MOIhD4OfqxNHQpy8KWsWDIAhytTbsuZQxM2Y9bW1v573//ywcffMCBAwc6vKcB/peGGhISwvjx45k3bx5Lly7VDN8KCgqIi4tj7969JCQkkJmZSWlp6RXN9yIiIpg1axYLFizA1dW1o+3onSKgKIoFkAwsBHKAw8DNQojTlztep9MJQ/pnexhy+g2LrREREURHRzN//nyTL7ZKJOYgJQX+9jf497/V4i2//z08/jgEBnbs/OLaYralqoZ336V+R1VjFdYW1swJnqOloA5xk1lkBk6dOsW6deuIiYnh7NmzHayxrGJtba0tJEdGRrJ06dILSnyWlpZq4nD8+HEyMjIoKSm5rPmeh4cHQ4YMYdy4ccycOZOFCxdeMlPRm0UgEvg/IUTUue+fABBCvNTG8eLcfzUDMm9vb0aMGMHs2bO55pprCAwMNPtiq0TSk5w5Ay++CJ9/DtbWcM898Nhj4O/f9Ws2tzazL3sfsSmxxCTHkFSaBMBIz5GalcX0wOm9oppab6KqqopPPvmEzz77jJMnT1JbW9vh6SRFUXB2diY4OJhx48Yxd+5coqOjL6grUFlZyc6dO9mzZw/Hjh0jPT29TfM9d3d3zXxv/fr1vVYErgMWCyF+f+7724CpQoj7L3f8xIkTRXx8vMnaMxB5OCUFgDdCQ83cEklnOXVKLeG4YQPY2cF998Ejj4Cvr/HvlVqWqk0b7c7cTbO+GVdbV62a2uJhi/G0N986WW/ux3q9nri4OD788EN2795NSUlJp2oSWFlZaRlGkZGRLF68mOnTp1+wP6i2tpZdu3axe/dujh07RmpqKsXFxeenyPZaEbgeiLpIBKYIIR4475i7gbsBgoKCJmZlZZmsPQMRuTDc9zhxQg3+X38Njo5w//3w5z+DEQtRXZHqxmri0uO0amqFtYXoFB3TBk3Tpo3GeI/p0RF5X+zHaWlprF+/ns2bN5ORkXGJW2p7ODs7a7U2Zs+ezdVXX32JzXZDQwM//PADS5cu7bUi0KnpoN5eT6Av0hc/PAOVo0fh+edh82ZwdoYHH4SHHwYPD/O1SS/0xOfFa9NG8fnqSD3QOfCCamp2VqZJfTTQX/pxXV0dn3/+OZ999hlHjx697K7lK2FlZYWXlxdhYWFMnTqVqKgoZs6ciZWVVa8VAUvUheH5QC7qwvAtQohTlzteioDx6S8fnv7MoUNq8I+JAVdXNfA/+CC0U1HQLORX52uGdzvSdlDbXIutpS3zQ+Zr1dSCXIKMft/+3I/1ej379+9n/fr1qkVGYWFXitF0WQRMuudZCNGiKMr9wHbUFNEP2xIAiWSgceAArFoF330H7u7qFND994NL23ZAZsfPyY+7JtzFXRPuorGlkT1Ze1TDuxR1XwJbYazPWG2UMDVgaqcN7wYaOp2OmTNnMnPmzAt+npOTwwcffMCmTZtITk6mvr7eJPfv05vFJO1zd5Ka9bGuH3ur9zX27FHf/HfuBE9PePRRddG3L9fsEUKQVJqkbVLbm7WXVtGKh52HVk0tamgUbnZdG97IfqxSX1/P119/zWeffcbhw4cpLy83TCf1zumgziJFQNJfEQJ+/BH++lfYvRt8fNQ0z3vuAQcHc7fO+FQ0VLA9dTuxKbFsTdlKaX0pFooFM4JmaKOEEZ4jZLq3ERBCoNPppAhIJL0RIdQ3/lWrYN8+8PNTN3j94Q9gb2/u1vUMrfpWDuUe0kYJJwpPABDiGqI5oM4ePBsbS1n4qKv02s1inUWKgPGRw2jzIARs26YG/4MHYdAg1drhrrvA1nSVI/sEZyvPEpuipp/uSt9FfUs9DlYOLBy6kOjQaJaGLsXf6cKdcLIfX5nuiEDvMMOWmIzki+qxSkyLEPDf/6rBPz4eBg+G999Xzd1khU+VQJdA7pl0D/dMuof65np+yPxBGyVsPrMZgAl+E7Q9CZP8J8l+bEKkCEgkRkCvV/P7n38ejh+HIUPggw/gttugDziOmw07KzuWhi5laehShBCcLDqp7Ul4Ye8LrNqzCh8HHxT3qbj7zKRq1FCcbZzN3ex+hZwO6uf05/zq3kBrK3zzjRr8T56E0FC1itctt6gmb5KuU1pXynep3xGTEsPXSbG0NFdjpbNi1uBZ2lrCMPdh5m5mr0CuCUjaRIqAaWhtVT19XngBEhNhxAi1ePuNN8IAcSnvUWbHH6ay/ASLRDKxKbGcLlaNiMM8wrRpoxlBM7C26NmKXr0FuSYgaZMIaattVFpaVDfPF1+E5GS1bu+GDWoRdxn8Tcd4Z1dwns2a0N+zZuEaMsoztGmjtw+/zes/v46zjTNRQ6OIDo1mSegSvB28zd3sPoEcCUgkHaC5WfXxf/FFtajLuHHw7LPw61+Drm8V6ep31DTVsCt9lyYK+TX5KChMCZiiVVOL8I3o13sS5HSQRGIiGhvh44/VYi5ZWTBxohr8ly9Xq3pJehdCCI4VHNNssQ/nHkYgCHAK0LyN5ofMx8G6f+3QkyIgaRNZXrJrNDSotXtXr4azZ2HqVDX4L1kig7856Go/LqwpZFvqNmKSY9iRtoPqpmpsLGyYGzJXW0sIdg02QYt7FrkmIGmTnE56mA906uth3TpYswby8uCqq2D9eli4UAZ/c9LVfuzj6MMdEXdwR8QdNLU2sTdrrzZtdP+2+7l/2/2Ee4VrVhaRgZFY6gZWWBxYTyuRtEFtLaxdC6+8AoWFMHs2fPopzJkjg39/wdrCmvlD5jN/yHxej3qd5NJkbdro9Z9fZ81Pa3CzdWPxsMVaNTV3O3dzN9vkSBGQDGiqq+Hdd+HVV6GkBObPV7N9Zs82d8skpibMI4ywyDD+FPknKhsq1WpqKbHEJsfyxckv0Ck6pgdO16aNwr3C++XishQByYCkshLefhtefx3KymDxYjXPf/p0c7dMYg5cbF24btR1XDfqOvRCz5G8I2qdhOQYVu5aycpdKxnsMlibNpobMhdby/5hAiVFoJ8T2ZsrlJiB8nL4xz/gjTegogKWLVOD/5Qp5m6Z5Er0ZD/WKTqmBExhSsAUVs1dRW5VrlZN7aMTH/HukXext7Jnfsh8LQU1wDmgx9pnbGR2kGRAUFqqBv5//AOqqtT8/qefVlM+JZKO0tDSwO7M3Vo1tcyKTAAifCO0UcJk/8k9Xk1NpohKJG1QXKxO+bz9NtTUwHXXqcF/3Dhzt0zS1xFCkFiSqDmg7s/eT6toxcveiyWhS1gWuoxFQxfhYmv6UYwUAUmbXHvyJADfjB5t5pb0LIWF6mLvu++qaZ833ghPPaXaPEj6Hn2hH5fVl2nV1LalbqOsvgxLnSUzg2Zqo4QwjzCTLC7LfQKSNiltbjZ3E3qUvDw1zXPtWmhqUt08n3pKNXiT9F36Qj92t3Pn5jE3c/OYm2nVt/Jzzs/aKOHRuEd5NO5RhroN1RxQZw2e1SsM76QISPoFOTnw8svwz3+qJm+33QZPPqlaO0skPY2FzoKrgq7iqqCreGnBS2RVZLE1ZSsxKTG8H/8+bx58E0drRxYNXaRVU/N19DVLW6UISPo0WVmqtcOHH6qFXe64A554Qi3qIpH0Fga7Dubeyfdy7+R7qWuu4/uM77VRwreJ3wIw2X+yNm003m88OqVnnAmlCEj6JOnpqqnbxx+rO3rvukut4Tt4sLlbJpFcGXsre21KSAhBQmGCJgh/3f1X/m/3/+Hr6KsJwoIhC3C0Np0lvBSBfs58NzdzN8GopKSods6ffqpW7rrnHvjLXyAw0Nwtk5iS/taPDSiKwjjfcYzzHcdTs56iuLb4f9XUTn/NB8c+wNrCmjnBc1QX1NBohroPNW4bTJUdpCjK/wF/AIrP/ehJIcTWK50js4MkbZGYqAb/L75QC7b/v/8Hjz0G/v7mbplEYhqaW5vZf3a/5m90puQMACM8R2hWFlcFXoWVhVXvTBE9JwI1QohXO3qOFAHJxZw8qZZw3LgR7Ozgj3+ERx4BHx9zt0wi6VnSytI0B9TdWbtpam3CxcaFxcMWs+H6DTJFVHJ5liQkALBt7Fgzt6RznDihFm//5htwdFTn+//0J/DyMnfLJOagr/ZjYzLUfSgPTn2QB6c+SHVjNTvTd6qGdymx3bquqUXgfkVRbgeOAI8IIcpNfD/JRdS3tpq7CZ0iPl4N/v/5Dzg7q74+Dz0EHh7mbpnEnPS1fmxqnGycWDFyBStGrkAv9Fg82nWbim7lICmKslNRlJOX+foV8B4wFIgA8oHX2rjG3YqiHFEU5UhxcfHlDpEMAA4eVM3cJk2C3bvhr39V0z9XrZICIJFcie6mknZrJCCEWNCR4xRF+ScQ08Y11gHrQF0T6E57JH2Pn35SA/327eDuri7+3n+/OgqQSCSmx2S7ERRF8Tvv2xXASVPdS9L32LMHFixQyzceParu9s3MVHf5SgGQSHoOU64JrFEUJQIQQCbw/0x4L0kbLOtFcylCwA8/qG/+u3erGT6vvaamezo4mLt1kt5Mb+rH/Q3pIioxOUJAXJwa/PfvV3P7H38c/vAHNe1TIpF0j+7sE+gZcwrJgEQI2LoVIiMhKkpd6H3nHUhLgwcflAIgkfQGpAj0c+YcO8acY8d69J5CqCmekydDdDQUFMD770NqKtx3H9j2j9Kskh7EHP14oCBFQGI09Hp1c9f48Wr5xooK1d0zJQXuvlu1e5BIJL0LKQKSbtPaChs2wNixavnGujrV3fPMGbjzTrCyMncLJRJJW0gRkHSZlhb47DO1ZONNN6kjgc8+U83ebr9ddfmUSCS9GykCkk7T0qK+6Y8aBb/5jRrsN25Uzd5uuQUsur6DXSKR9DDyXa2fc4O3t9Gu1dQE//63WswlPR0iIuDbb+FXvwKdfJ2QmBBj9mPJhUgR6OfcFxDQ7Ws0NsJHH8FLL6lpnpMmwRtvqF4/itL9Nkok7WGMfiy5PPL9rZ9T19pKXRcdGBsa1Lz+YcPUCl6+vmre/6FDsHy5FABJz9Gdfiy5MnIk0M9Zes6H/cfx4zt8Tl0d/POfqp9Pfr7q7/Phh6rXjwz8EnPQlX4s6RhSBCQatbWwdi288goUFsKcOWq2z5w5MvhLJP0VKQISqqvVaZ/XXoOSEvWNf+NGmDXL3C2TSCSmRorAAKayEt56C/7+dygrg8WL1Upe06ebu2USiaSnkCIwACkvhzffVL8qKtRF3qefhilTzN0yiUTS00gR6Ofc4eur/bu0VH3rf+stqKqCFSvU4D9hghkbKJF0gPP7scS4SBHo59zh50dxMaxcqc7719aq/j5PP616/UgkfYE7/PzaP0jSJaQI9GMKCuD5l1v5aJ2O+nqFm26Cp56C8HBzt0wi6RwlTU0AeFpbm7kl/Q8pAv2QvDxYs0b18G9o0uGzpJz4V90ZMcLcLZNIusZ1p04Bcp+AKZAi0I84e1bd4LV+vWrydvvtcOpXp7ELamLECHdzN08ikfRCpG1EPyAzU2czTncAAA2TSURBVLV1GDpUffu//XZITlZ3+doFNZm7eRKJpBcjRwJ9mLQ01dTt449VF8/f/14t4D54sLlbJpFI+gpSBPogycnw4ouqpYOlJdx7L/zlLzBokLlbJpFI+hpSBPoQiYlq8P/iC7Ve74MPwmOPwZWy5+6VFrySfoDsx6ZDikAf4Jdf4IUX4KuvwN4eHnlE/fLxaf/cG2UxDkk/QPZj09GthWFFUa5XFOWUoih6RVEmXfS7JxRFSVUUJUlRlKjuNXNgcvw4XHutuqlr2zZ44gl1EXjNmo4JAMDZhgbONjSYtJ0SiamR/dh0dHckcBK4Bnj//B8qijIKuAkIB/yBnYqihAkhZFWIDnDkCDz/PGzZAi4u8Oyz8NBD4N6FLM/bEhMBmV8t6dvIfmw6uiUCQohEAOVSs/lfAV8KIRqBDEVRUoEpwIHu3K+/c/AgrFqlVu9yc1P//cAD4Opq7pZJJJL+iqnWBAKAn8/7PufczySXYf9+NeDv2AEeHmoh9z/+EZydzd0yiUTS32lXBBRF2QlczsLvKSHEf9o67TI/E21c/27gboCgoKD2mtOv2L1bDf7ffw9eXupc/733gqOjuVsmkUgGCu2KgBBiQReumwMEnvf9ICCvjeuvA9YBTJo06bJC0Z8QQg36q1bBnj1q8fbXX4e77wYHB3O3TiKRDDRMNR20BfhcUZTXUReGQ4FDJrpXn0AIdbpn1Sr46Sfw94d//EPd5WtnZ7r7PhIY2P5BEkkvR/Zj09EtEVAUZQXwFuAFxCqKclwIESWEOKUoykbgNNAC/HGgZgYJoS70rloFhw5BYCC8+y7ceSfY2pr+/ss9PU1/E4nExMh+bDq6mx20CdjUxu9eBF7szvX7MkKoKZ6rVsHRoxAcDOvWwW9/Cz1piZ5UVwfAcHv7nrupRGJkZD82HXLHsJHR6+Hbb9UdvidOqM6eH34Iv/kNWFn1fHv+X1ISIPOrJX0b2Y9Nh7SSNhKtrfDll+ru3uuvh/p6+OQTOHNGnfoxhwBIJBJJe0gR6CYtLfDpp2rJxptvVkcCn38Op0/DbbepLp8SiUTSW5Ei0EWam+Gjj2DkSDXYW1vDxo1w8qQqBhYW5m6hRCKRtI98T+0kTU3qNM/f/gYZGTB+PGzaBFdfrRZ2kUgkkr6EFIEO0tgI//qXWskrOxsmTVLz/KOj4VLrpN7D07LMmKQfIPux6ZAi0A4NDWrh9tWrITcXpk1T6/hGRfXu4G9gQVesRyWSXobsx6ZDikAb1NWpef1r1kB+PsyYoa4BzJ/fN4K/gePV1QBEODmZuSUSSdeR/dh0SBG4iJoaWLsWXnkFiopg7lw122f27L4V/A08nJoKyPxqSd9G9mPTIUXgHNXV8M478NprUFICCxfCM8/AzJnmbplEIpGYjgEvApWV8NZb8Pe/Q1kZLFmiBv/ISHO3TCKRSEzPgBWB8nJ44w14801VCJYvV4P/5MnmbplEIpH0HANOBEpL1bf+f/xDnQJasUIN/nKqUSKRDEQGjAgUFanz/e+8o2b+XH89PP00jBlj7paZlr8NGWLuJkgk3Ub2Y9PR70WgoEDN9HnvPXXD1003wVNPwahR5m5ZzzDdxcXcTZBIuo3sx6aj34pAbq6a479unWr18JvfwJNPwvDh5m5Zz/JTZSUgP0SSvo3sx6aj34lAdja8/LK6y1evh9tvhyeegGHDzN0y8/Bkejog86slfRvZj01HvxGBzEzV1+df/1K/v/NOWLkSQkLM2iyJRCLp1fR5EUhLUx09P/lEdfH8wx/g8cchKMjcLZNIJJLeT58VgaQkNfh/9plateu+++Avf4GAAHO3TCKRSPoOfU4ETp+GF19USzna2MBDD8Gjj/L/27vfGCuuMo7j31+pQAh/w9qUWBAaoeFPiVZC2jdVQ6MNUYhNVUwaW20kFOUFEqMNSW3AvrFpNEZrwdigjVr+NBTQEixarTFuBUNKKRUC2BYQslIUX7Si4OOLmXY3ZHfv7M7OzN6Z3ychmd2Ze+6Th3Pvs3PmzBmmTKk6MjOz9tM2ReCll5KHt2/dCmPGJF/8a9bANddUHdnw9p2mXhG3WnE/Ls6wLwIHDsD69cnTu8aNS2b6rF4NHR1VR9YevPSu1YH7cXGGbRHYty/58t+1CyZMgAceSIZ+/GyJgdl7/jzgh3JYe3M/Lk6uIiDpU8CDwGxgYUTsT38/HXgFOJIe2hkRK7K02dkJ69bB7t0waVKyvWoVTJyYJ9Lm+uZrrwH+8Fh7cz8uTt4zgUPAHcCGXvYdj4j3D6Sxo0eTJZwnT07m/K9cCePH54zQzMz6lKsIRMQrABqiR2699Vayzs+KFTB27JA0aWZm/biqwLZnSDog6XeSMj2f68Ybk1k/LgBmZuVoeSYgaS9wbS+71kbEjj5edgaYFhFvSPog8LSkuRHxr17aXw4sB5jm23zNzErVsghExG0DbTQiLgIX0+0/SzoOzAL293LsRmAjwIIFC2Kg72X929C0ZVOtltyPi1PIFFFJ7wbOR8RlSdcDM4ETRbyX9e+GMWOqDsEsN/fj4uS6JiDpk5JOAbcAv5S0J911K3BQ0ovANmBFRJzPF6oNxq5z59h17lzVYZjl4n5cnLyzg7YD23v5/VPAU3natqHxyMmTAHzCt1hbG3M/Lk6Rs4PMzGyYcxEwM2swFwEzswZzETAza7Bhu4qoDY0nZs+uOgSz3NyPi+MiUHNTR4+uOgSz3NyPi+PhoJrb3NXF5q6uqsMwy8X9uDg+E6i5H5w+DcBn/BxOa2Pux8XxmYCZWYO5CJiZNZiLgJlZg7kImJk1mC8M19y2uXOrDsEsN/fj4rgI1FzHyJFVh2CWm/txcTwcVHObzpxh05kzVYdhlov7cXFcBGpu09mzbDp7tuowzHJxPy6Oi4CZWYO5CJiZNZiLgJlZg7kImJk1mKeI1twz8+dXHYJZbu7HxXERqLkxI0ZUHYJZbu7HxfFwUM09evo0j6bL8Jq1K/fj4rgI1NyWri62+GEc1ubcj4uTqwhIeljSXyQdlLRd0sQe++6XdEzSEUkfyx+qmZkNtbxnAs8C8yJiPnAUuB9A0hxgGTAXuB14VJIH9czMhplcRSAifhURl9IfO4Hr0u2lwJMRcTEi/gocAxbmeS8zMxt6Q3lN4AvA7nT7PcDJHvtOpb8zM7NhpOUUUUl7gWt72bU2Inakx6wFLgE/fftlvRwffbS/HFie/nhR0qFWMTVEB3BuqBrr7T+kjQxpLtpco3NxRT9udC6ucMNgX9iyCETEbf3tl3Q38HFgUUS8/UV/Cpja47DrgL/10f5GYGPa1v6IWJAh7tpzLro5F92ci27ORTdJ+wf72ryzg24HvgYsiYg3e+zaCSyTNErSDGAm8Kc872VmZkMv7x3D3wNGAc9KAuiMiBUR8bKkLcBhkmGiL0XE5ZzvZWZmQyxXEYiI9/Wz7yHgoQE2uTFPPDXjXHRzLro5F92ci26DzoW6h/HNzKxpvGyEmVmDVVIEJN2eLidxTNLXe9k/StLmdP8LkqaXH2U5MuTiK5IOp0tz/FrSe6uIswytctHjuDslhaTazgzJkgtJn077xsuSflZ2jGXJ8BmZJuk5SQfSz8niKuIsmqTHJXX1NY1eie+meToo6aZMDUdEqf+AEcBx4HpgJPAiMOeKY1YCj6Xby4DNZcc5jHLxEWBMun1fk3ORHjcOeJ7kDvUFVcddYb+YCRwAJqU/X1N13BXmYiNwX7o9B3i16rgLysWtwE3AoT72Lya5YVfAzcALWdqt4kxgIXAsIk5ExH+AJ0mWmehpKfDjdHsbsEjp9KOaaZmLiHguuqff9lyao26y9AuA9cC3gH+XGVzJsuTii8D3I+IfABFR1yU2s+QigPHp9gT6uCep3UXE88D5fg5ZCvwkEp3ARElTWrVbRRHIsqTEO8dEsjbRBWByKdGVa6DLa9xL99IcddMyF5I+AEyNiF+UGVgFsvSLWcAsSX+Q1Jnes1NHWXLxIHCXpFPAM8CqckIbdga1XE8VTxbLsqRE5mUn2txAlte4C1gAfKjQiKrTby4kXQV8G7inrIAqlKVfXE0yJPRhkrPD30uaFxH/LDi2smXJxWeBTRHxiKRbgCfSXPyv+PCGlUF9b1ZxJpBlSYl3jpF0NckpXn+nQe0q0/Iakm4D1pLcmX2xpNjK1ioX44B5wG8lvUoy5rmzpheHs35GdkTEfyNZqfcISVGomyy5uBfYAhARfwRGk6wr1DSZl+vpqYoisA+YKWmGpJEkF353XnHMTuDudPtO4DeRXvmomZa5SIdANpAUgLqO+0KLXETEhYjoiIjpETGd5PrIkogY9Jopw1iWz8jTJJMGkNRBMjx0otQoy5ElF68DiwAkzSYpAn8vNcrhYSfwuXSW0M3AhYg40+pFpQ8HRcQlSV8G9pBc+X88kmUm1gH7I2In8COSU7pjJGcAy8qOswwZc/EwMBbYml4bfz0illQWdEEy5qIRMuZiD/BRSYeBy8BXI+KN6qIuRsZcrAF+KGk1yfDHPXX8o1HSz0mG/zrS6x/fAN4FEBGPkVwPWUzy/JY3gc9nareGuTIzs4x8x7CZWYO5CJiZNZiLgJlZg7kImJk1mIuAmVmDuQiYmTWYi4CZWYO5CJiZNdj/AfYEjbWN5IUkAAAAAElFTkSuQmCC\n", + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "%matplotlib inline\n", + "plot_pomdp_utility(utility)" + ] + }, { "cell_type": "markdown", "metadata": {}, @@ -2221,7 +2988,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.6.1" + "version": "3.6.4" }, "widgets": { "state": { @@ -4714,4 +5481,3 @@ "nbformat": 4, "nbformat_minor": 1 } - diff --git a/mdp.py b/mdp.py index b9a6eaea0..657334d59 100644 --- a/mdp.py +++ b/mdp.py @@ -9,6 +9,8 @@ from utils import argmax, vector_add, orientations, turn_right, turn_left import random +import numpy as np +from collections import defaultdict class MDP: @@ -51,11 +53,13 @@ def __init__(self, init, actlist, terminals, transitions=None, reward=None, stat def R(self, state): """Return a numeric reward for this state.""" + return self.reward[state] def T(self, state, action): """Transition model. From a state and an action, return a list of (probability, result-state) pairs.""" + if not self.transitions: raise ValueError("Transition model is missing") else: @@ -65,6 +69,7 @@ def actions(self, state): """Return a list of actions that can be performed in this state. By default, a fixed list of actions, except for terminal states. Override this method if you need to specialize by state.""" + if state in self.terminals: return [None] else: @@ -106,7 +111,10 @@ def check_consistency(self): class MDP2(MDP): - """Inherits from MDP. Handles terminal states, and transitions to and from terminal states better.""" + """ + Inherits from MDP. Handles terminal states, and transitions to and from terminal states better. + """ + def __init__(self, init, actlist, terminals, transitions, reward=None, gamma=0.9): MDP.__init__(self, init, actlist, terminals, transitions, reward, gamma=gamma) @@ -160,11 +168,13 @@ def T(self, state, action): def go(self, state, direction): """Return the state that results from going in this direction.""" + state1 = vector_add(state, direction) return state1 if state1 in self.states else state def to_grid(self, mapping): """Convert a mapping from (x, y) to v into a [[..., v, ...]] grid.""" + return list(reversed([[mapping.get((x, y), None) for x in range(self.cols)] for y in range(self.rows)])) @@ -190,6 +200,7 @@ def to_arrows(self, policy): def value_iteration(mdp, epsilon=0.001): """Solving an MDP by value iteration. [Figure 17.4]""" + U1 = {s: 0 for s in mdp.states} R, T, gamma = mdp.R, mdp.T, mdp.gamma while True: @@ -206,6 +217,7 @@ def value_iteration(mdp, epsilon=0.001): def best_policy(mdp, U): """Given an MDP and a utility function U, determine the best policy, as a mapping from state to action. (Equation 17.4)""" + pi = {} for s in mdp.states: pi[s] = argmax(mdp.actions(s), key=lambda a: expected_utility(a, s, U, mdp)) @@ -214,6 +226,7 @@ def best_policy(mdp, U): def expected_utility(a, s, U, mdp): """The expected utility of doing a in state s, according to the MDP and U.""" + return sum(p*U[s1] for (p, s1) in mdp.T(s, a)) # ______________________________________________________________________________ @@ -221,6 +234,7 @@ def expected_utility(a, s, U, mdp): def policy_iteration(mdp): """Solve an MDP by policy iteration [Figure 17.7]""" + U = {s: 0 for s in mdp.states} pi = {s: random.choice(mdp.actions(s)) for s in mdp.states} while True: @@ -238,6 +252,7 @@ def policy_iteration(mdp): def policy_evaluation(pi, U, mdp, k=20): """Return an updated utility mapping U from each state in the MDP to its utility, using an approximation (modified policy iteration).""" + R, T, gamma = mdp.R, mdp.T, mdp.gamma for i in range(k): for s in mdp.states: @@ -245,6 +260,198 @@ def policy_evaluation(pi, U, mdp, k=20): return U +class POMDP(MDP): + + """A Partially Observable Markov Decision Process, defined by + a transition model P(s'|s,a), actions A(s), a reward function R(s), + and a sensor model P(e|s). We also keep track of a gamma value, + for use by algorithms. The transition and the sensor models + are defined as matrices. We also keep track of the possible states + and actions for each state. [page 659].""" + + def __init__(self, actions, transitions=None, evidences=None, rewards=None, states=None, gamma=0.95): + """Initialize variables of the pomdp""" + + if not (0 < gamma <= 1): + raise ValueError('A POMDP must have 0 < gamma <= 1') + + self.states = states + self.actions = actions + + # transition model cannot be undefined + self.t_prob = transitions or {} + if not self.t_prob: + print('Warning: Transition model is undefined') + + # sensor model cannot be undefined + self.e_prob = evidences or {} + if not self.e_prob: + print('Warning: Sensor model is undefined') + + self.gamma = gamma + self.rewards = rewards + + def remove_dominated_plans(self, input_values): + """ + Remove dominated plans. + This method finds all the lines contributing to the + upper surface and removes those which don't. + """ + + values = [val for action in input_values for val in input_values[action]] + values.sort(key=lambda x: x[0], reverse=True) + + best = [values[0]] + y1_max = max(val[1] for val in values) + tgt = values[0] + prev_b = 0 + prev_ix = 0 + while tgt[1] != y1_max: + min_b = 1 + min_ix = 0 + for i in range(prev_ix + 1, len(values)): + if values[i][0] - tgt[0] + tgt[1] - values[i][1] != 0: + trans_b = (values[i][0] - tgt[0]) / (values[i][0] - tgt[0] + tgt[1] - values[i][1]) + if 0 <= trans_b <= 1 and trans_b > prev_b and trans_b < min_b: + min_b = trans_b + min_ix = i + prev_b = min_b + prev_ix = min_ix + tgt = values[min_ix] + best.append(tgt) + + return self.generate_mapping(best, input_values) + + def remove_dominated_plans_fast(self, input_values): + """ + Remove dominated plans using approximations. + Resamples the upper boundary at intervals of 100 and + finds the maximum values at these points. + """ + + values = [val for action in input_values for val in input_values[action]] + values.sort(key=lambda x: x[0], reverse=True) + + best = [] + sr = 100 + for i in range(sr + 1): + x = i / float(sr) + maximum = (values[0][1] - values[0][0]) * x + values[0][0] + tgt = values[0] + for value in values: + val = (value[1] - value[0]) * x + value[0] + if val > maximum: + maximum = val + tgt = value + + if all(any(tgt != v) for v in best): + best.append(np.array(tgt)) + + return self.generate_mapping(best, input_values) + + def generate_mapping(self, best, input_values): + """Generate mappings after removing dominated plans""" + + mapping = defaultdict(list) + for value in best: + for action in input_values: + if any(all(value == v) for v in input_values[action]): + mapping[action].append(value) + + return mapping + + def max_difference(self, U1, U2): + """Find maximum difference between two utility mappings""" + + for k, v in U1.items(): + sum1 = 0 + for element in U1[k]: + sum1 += sum(element) + sum2 = 0 + for element in U2[k]: + sum2 += sum(element) + return abs(sum1 - sum2) + + +class Matrix: + """Matrix operations class""" + + @staticmethod + def add(A, B): + """Add two matrices A and B""" + + res = [] + for i in range(len(A)): + row = [] + for j in range(len(A[0])): + row.append(A[i][j] + B[i][j]) + res.append(row) + return res + + @staticmethod + def scalar_multiply(a, B): + """Multiply scalar a to matrix B""" + + for i in range(len(B)): + for j in range(len(B[0])): + B[i][j] = a * B[i][j] + return B + + @staticmethod + def multiply(A, B): + """Multiply two matrices A and B element-wise""" + + matrix = [] + for i in range(len(B)): + row = [] + for j in range(len(B[0])): + row.append(B[i][j] * A[j][i]) + matrix.append(row) + + return matrix + + @staticmethod + def matmul(A, B): + """Inner-product of two matrices""" + + return [[sum(ele_a*ele_b for ele_a, ele_b in zip(row_a, col_b)) for col_b in list(zip(*B))] for row_a in A] + + @staticmethod + def transpose(A): + """Transpose a matrix""" + + return [list(i) for i in zip(*A)] + + +def pomdp_value_iteration(pomdp, epsilon=0.1): + """Solving a POMDP by value iteration.""" + + U = {'':[[0]* len(pomdp.states)]} + count = 0 + while True: + count += 1 + prev_U = U + values = [val for action in U for val in U[action]] + value_matxs = [] + for i in values: + for j in values: + value_matxs.append([i, j]) + + U1 = defaultdict(list) + for action in pomdp.actions: + for u in value_matxs: + u1 = Matrix.matmul(Matrix.matmul(pomdp.t_prob[int(action)], Matrix.multiply(pomdp.e_prob[int(action)], Matrix.transpose(u))), [[1], [1]]) + u1 = Matrix.add(Matrix.scalar_multiply(pomdp.gamma, Matrix.transpose(u1)), [pomdp.rewards[int(action)]]) + U1[action].append(u1[0]) + + U = pomdp.remove_dominated_plans_fast(U1) + # replace with U = pomdp.remove_dominated_plans(U1) for accurate calculations + + if count > 10: + if pomdp.max_difference(U, prev_U) < epsilon * (1 - pomdp.gamma) / pomdp.gamma: + return U + + __doc__ += """ >>> pi = best_policy(sequential_decision_environment, value_iteration(sequential_decision_environment, .01)) diff --git a/mdp_apps.ipynb b/mdp_apps.ipynb index 50dce5427..da3ae7b06 100644 --- a/mdp_apps.ipynb +++ b/mdp_apps.ipynb @@ -7,15 +7,13 @@ "# APPLICATIONS OF MARKOV DECISION PROCESSES\n", "---\n", "In this notebook we will take a look at some indicative applications of markov decision processes. \n", - "We will cover content from [`mdp.py`](https://github.com/aimacode/aima-python/blob/master/mdp.py), for chapter 17 of Stuart Russel's and Peter Norvig's book [*Artificial Intelligence: A Modern Approach*](http://aima.cs.berkeley.edu/)." + "We will cover content from [`mdp.py`](https://github.com/aimacode/aima-python/blob/master/mdp.py), for **Chapter 17 Making Complex Decisions** of Stuart Russel's and Peter Norvig's book [*Artificial Intelligence: A Modern Approach*](http://aima.cs.berkeley.edu/).\n" ] }, { "cell_type": "code", "execution_count": 1, - "metadata": { - "collapsed": true - }, + "metadata": {}, "outputs": [], "source": [ "from mdp import *\n", @@ -33,7 +31,14 @@ " - State, action and next state dependent reward function\n", "- Grid MDP\n", " - Pathfinding problem\n", - "\n", + "- POMDP\n", + " - Two state POMDP" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ "## SIMPLE MDP\n", "---\n", "### State dependent reward function\n", @@ -1429,6 +1434,371 @@ "As you can infer, we can find the path to the terminal state starting from any given state using this policy.\n", "All maze problems can be solved by formulating it as a MDP." ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## POMDP\n", + "### Two state POMDP\n", + "Let's consider a problem where we have two doors, one to our left and one to our right.\n", + "One of these doors opens to a room with a tiger in it, and the other one opens to an empty hall.\n", + "
\n", + "We will call our two states `0` and `1` for `left` and `right` respectively.\n", + "
\n", + "The possible actions we can take are as follows:\n", + "
\n", + "1. __Open-left__: Open the left door.\n", + "Represented by `0`.\n", + "2. __Open-right__: Open the right door.\n", + "Represented by `1`.\n", + "3. __Listen__: Listen carefully to one side and possibly hear the tiger breathing.\n", + "Represented by `2`.\n", + "\n", + "
\n", + "The possible observations we can get are as follows:\n", + "
\n", + "1. __TL__: Tiger seems to be at the left door.\n", + "2. __TR__: Tiger seems to be at the right door.\n", + "\n", + "
\n", + "The reward function is as follows:\n", + "
\n", + "We get +10 reward for opening the door to the empty hall and we get -100 reward for opening the other door and setting the tiger free.\n", + "
\n", + "Listening costs us -1 reward.\n", + "
\n", + "We want to minimize our chances of setting the tiger free.\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Our transition probabilities can be defined as:\n", + "
\n", + "
\n", + "Action `0` (Open left door)\n", + "$\\\\\n", + " P(0) = \n", + " \\left[ {\\begin{array}{cc}\n", + " 0.5 & 0.5 \\\\\n", + " 0.5 & 0.5 \\\\\n", + " \\end{array}}\\right] \\\\\n", + " \\\\\n", + " $\n", + " \n", + "Action `1` (Open right door)\n", + "$\\\\\n", + " P(1) = \n", + " \\left[ {\\begin{array}{cc}\n", + " 0.5 & 0.5 \\\\\n", + " 0.5 & 0.5 \\\\\n", + " \\end{array}}\\right] \\\\\n", + " \\\\\n", + " $\n", + " \n", + "Action `2` (Listen)\n", + "$\\\\\n", + " P(2) = \n", + " \\left[ {\\begin{array}{cc}\n", + " 1.0 & 0.0 \\\\\n", + " 0.0 & 1.0 \\\\\n", + " \\end{array}}\\right] \\\\\n", + " \\\\\n", + " $\n", + " \n", + "
\n", + "
\n", + "Our observation probabilities can be defined as:\n", + "
\n", + "
\n", + "$\\\\\n", + " O(0) = \n", + " \\left[ {\\begin{array}{ccc}\n", + " Open left & TL & TR \\\\\n", + " Tiger: left & 0.5 & 0.5 \\\\\n", + " Tiger: right & 0.5 & 0.5 \\\\\n", + " \\end{array}}\\right] \\\\\n", + " \\\\\n", + " $\n", + "\n", + "$\\\\\n", + " O(1) = \n", + " \\left[ {\\begin{array}{ccc}\n", + " Open right & TL & TR \\\\\n", + " Tiger: left & 0.5 & 0.5 \\\\\n", + " Tiger: right & 0.5 & 0.5 \\\\\n", + " \\end{array}}\\right] \\\\\n", + " \\\\\n", + " $\n", + "\n", + "$\\\\\n", + " O(2) = \n", + " \\left[ {\\begin{array}{ccc}\n", + " Listen & TL & TR \\\\\n", + " Tiger: left & 0.85 & 0.15 \\\\\n", + " Tiger: right & 0.15 & 0.85 \\\\\n", + " \\end{array}}\\right] \\\\\n", + " \\\\\n", + " $\n", + "\n", + "
\n", + "
\n", + "The rewards of this POMDP are defined as:\n", + "
\n", + "
\n", + "$\\\\\n", + " R(0) = \n", + " \\left[ {\\begin{array}{cc}\n", + " Openleft & Reward \\\\\n", + " Tiger: left & -100 \\\\\n", + " Tiger: right & +10 \\\\\n", + " \\end{array}}\\right] \\\\\n", + " \\\\\n", + " $\n", + " \n", + "$\\\\\n", + " R(1) = \n", + " \\left[ {\\begin{array}{cc}\n", + " Openright & Reward \\\\\n", + " Tiger: left & +10 \\\\\n", + " Tiger: right & -100 \\\\\n", + " \\end{array}}\\right] \\\\\n", + " \\\\\n", + " $\n", + " \n", + "$\\\\\n", + " R(2) = \n", + " \\left[ {\\begin{array}{cc}\n", + " Listen & Reward \\\\\n", + " Tiger: left & -1 \\\\\n", + " Tiger: right & -1 \\\\\n", + " \\end{array}}\\right] \\\\\n", + " \\\\\n", + " $\n", + " \n", + "
\n", + "Based on these matrices, we will initialize our variables." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Let's first define our transition state." + ] + }, + { + "cell_type": "code", + "execution_count": 40, + "metadata": {}, + "outputs": [], + "source": [ + "t_prob = [[[0.5, 0.5], \n", + " [0.5, 0.5]], \n", + " \n", + " [[0.5, 0.5], \n", + " [0.5, 0.5]], \n", + " \n", + " [[1.0, 0.0], \n", + " [0.0, 1.0]]]" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Followed by the observation model." + ] + }, + { + "cell_type": "code", + "execution_count": 41, + "metadata": {}, + "outputs": [], + "source": [ + "e_prob = [[[0.5, 0.5], \n", + " [0.5, 0.5]], \n", + " \n", + " [[0.5, 0.5], \n", + " [0.5, 0.5]], \n", + " \n", + " [[0.85, 0.15], \n", + " [0.15, 0.85]]]" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "And the reward model." + ] + }, + { + "cell_type": "code", + "execution_count": 42, + "metadata": {}, + "outputs": [], + "source": [ + "rewards = [[-100, 10], \n", + " [10, -100], \n", + " [-1, -1]]" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Let's now define our states, observations and actions.\n", + "
\n", + "We will use `gamma` = 0.95 for this example.\n", + "
" + ] + }, + { + "cell_type": "code", + "execution_count": 43, + "metadata": {}, + "outputs": [], + "source": [ + "# 0: open-left, 1: open-right, 2: listen\n", + "actions = ('0', '1', '2')\n", + "# 0: left, 1: right\n", + "states = ('0', '1')\n", + "\n", + "gamma = 0.95" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "We have all the required variables to instantiate an object of the `POMDP` class." + ] + }, + { + "cell_type": "code", + "execution_count": 44, + "metadata": {}, + "outputs": [], + "source": [ + "pomdp = POMDP(actions, t_prob, e_prob, rewards, states, gamma)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "We can now find the utility function by running `pomdp_value_iteration` on our `pomdp` object." + ] + }, + { + "cell_type": "code", + "execution_count": 45, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "defaultdict(list,\n", + " {'0': [array([-83.05169196, 26.94830804])],\n", + " '1': [array([ 26.94830804, -83.05169196])],\n", + " '2': [array([23.55049363, -0.76359097]),\n", + " array([23.55049363, -0.76359097]),\n", + " array([23.55049363, -0.76359097]),\n", + " array([23.55049363, -0.76359097]),\n", + " array([23.24120177, 1.56028929]),\n", + " array([23.24120177, 1.56028929]),\n", + " array([23.24120177, 1.56028929]),\n", + " array([20.0874279 , 15.03900771]),\n", + " array([20.0874279 , 15.03900771]),\n", + " array([20.0874279 , 15.03900771]),\n", + " array([20.0874279 , 15.03900771]),\n", + " array([17.91696135, 17.91696135]),\n", + " array([17.91696135, 17.91696135]),\n", + " array([17.91696135, 17.91696135]),\n", + " array([17.91696135, 17.91696135]),\n", + " array([17.91696135, 17.91696135]),\n", + " array([15.03900771, 20.0874279 ]),\n", + " array([15.03900771, 20.0874279 ]),\n", + " array([15.03900771, 20.0874279 ]),\n", + " array([15.03900771, 20.0874279 ]),\n", + " array([ 1.56028929, 23.24120177]),\n", + " array([ 1.56028929, 23.24120177]),\n", + " array([ 1.56028929, 23.24120177]),\n", + " array([-0.76359097, 23.55049363]),\n", + " array([-0.76359097, 23.55049363]),\n", + " array([-0.76359097, 23.55049363]),\n", + " array([-0.76359097, 23.55049363])]})" + ] + }, + "execution_count": 45, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "utility = pomdp_value_iteration(pomdp, epsilon=3)\n", + "utility" + ] + }, + { + "cell_type": "code", + "execution_count": 46, + "metadata": {}, + "outputs": [], + "source": [ + "import matplotlib.pyplot as plt\n", + "%matplotlib inline\n", + "\n", + "def plot_utility(utility):\n", + " open_left = utility['0'][0]\n", + " open_right = utility['1'][0]\n", + " listen_left = utility['2'][0]\n", + " listen_right = utility['2'][-1]\n", + " left = (open_left[0] - listen_left[0]) / (open_left[0] - listen_left[0] + listen_left[1] - open_left[1])\n", + " right = (open_right[0] - listen_right[0]) / (open_right[0] - listen_right[0] + listen_right[1] - open_right[1])\n", + " \n", + " colors = ['g', 'b', 'k']\n", + " for action in utility:\n", + " for value in utility[action]:\n", + " plt.plot(value, color=colors[int(action)])\n", + " plt.vlines([left, right], -10, 35, linestyles='dashed', colors='c')\n", + " plt.ylim(-10, 35)\n", + " plt.xlim(0, 1)\n", + " plt.text(left/2 - 0.35, 30, 'open-left')\n", + " plt.text((right + left)/2 - 0.04, 30, 'listen')\n", + " plt.text((right + 1)/2 + 0.22, 30, 'open-right')\n", + " plt.show()" + ] + }, + { + "cell_type": "code", + "execution_count": 47, + "metadata": {}, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYEAAAD8CAYAAACRkhiPAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMS4yLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvNQv5yAAAIABJREFUeJzsnXlcVNX7xz9nZthXWRSU1Q0UJBREQITUci/XMpc0yg03yi3zq4ZkmltuZWaWmfnT1EpLLS0zFTUVRRQXElkURUUUBVkHnt8fA8Q4wzZzZwHO+/WalzJz7znPXc793HOec56HERE4HA6H0zgR6doADofD4egOLgIcDofTiOEiwOFwOI0YLgIcDofTiOEiwOFwOI0YLgIcDofTiFFbBBhjxoyxs4yxeMbYFcbYorLvv2WMpTDGLpZ9fNU3l8PhcDhCIhGgjEIAPYgolzFmACCGMfZb2W+ziWiPAHVwOBwORwOoLQIkW22WW/anQdmHr0DjcDicegATYsUwY0wM4DyA1gA+J6L3GWPfAgiCrKdwBMBcIipUsu8EABMAwMzMzM/T01NtexoKiXl5AAAPU1MdW8Lh6Ae8TSjn/PnzD4nIXpV9BRGBisIYswbwM4BpALIA3ANgCGATgJtEFF3d/v7+/hQbGyuYPfWdF+PiAAB/d+yoY0s4HP2AtwnlMMbOE5G/KvsKOjuIiLIB/A2gDxFlkIxCAFsABAhZF4fD4XDUR22fAGPMHkAxEWUzxkwAvARgGWPMkYgyGGMMwCAACerW1diY7+qqaxM4HL2CtwnhEWJ2kCOArWV+ARGAXUS0nzH2V5lAMAAXAUwSoK5GxUs2Nro2gcPRK3ibEB4hZgddAqAwQEdEPdQtu7FzMScHAOBrYaFjSzgc/YC3CeERoifA0RDvJiUB4E4wDqcc3iaEh4eN4HA4nEYMFwEOh8NpxHAR4HA4nEYMFwEOh8NpxHDHsB6zpGVLXZvA4egVvE0IDxcBPSbYykrXJnA4egVvE8LDh4P0mFNPnuDUkye6NoPD0Rt4mxAe3hPQY+YlJwPgc6I5nHJ4mxAe3hPgcDicRgwXAQ6Hw2nEcBFQg7feegt79lSfPfP69evw9fVFx44dcf78eWzYsEFL1nGqw9zcHABw9+5dDBs2rMrtsrOz+TXjqMTGjRvx3XffVbvNt99+i6lTpyr9bcmSJZowSwEuAhpm7969GDhwIOLi4mBra8sfKHpG8+bNqxVyLgIcVZBKpZg0aRLGjBmjchlcBFTk008/hbe3N7y9vbFmzRqkpqbC09MTY8eOhY+PD4YNG4a8shR158+fR1hYGPz8/NC7d29kZGQAAF588UW8//77CAgIQNu2bXHixIka61VW1sGDB7FmzRps3rwZ3bt3x9y5c3Hz5k34+vpi9uzZNZa5pnVrrGndWr0TwqmW1NRUeHt7AwCuXLmCgIAA+Pr6wsfHBzdu3FB6zVasWIHOnTvDx8cHH374YUU57dq1w/jx4+Hl5YVevXohPz9fZ8fVUKlrm9Dm8+DFF1/EvHnzEBYWhrVr1yIqKgorV64EAJw7dw4+Pj4ICgrC7NmzK+45QNYb7dOnD9q0aYM5c+YAAObOnYv8/Hz4+vpi1KhRKp2rWkNEevPx8/MjdYiNjSVvb2/Kzc2lnJwcat++PV24cIEAUExMDBERhYeH04oVK6ioqIiCgoLowYMHRES0c+dOCg8PJyKisLAwmjFjBhERHThwgHr27Km0vrFjx9Lu3burLevDDz+kFStWEBFRSkoKeXl5qXWMHGEwMzMjIvlrMnXqVPr++++JiKiwsJDy8vIUrtmhQ4do/PjxVFpaSiUlJdS/f386duwYpaSkkFgspri4OCIieu2112jbtm1aPipOZbT9PAgLC6OIiIiKvyu3fS8vLzp58iQREb3//vsV99SWLVvI3d2dsrOzKT8/n1xcXOjWrVtE9N89WhsAxJKKz10hMosZAzgOwAiyKad7iOhDxpg7gJ0AbABcAPAmERWpW191xMTEYPDgwTAzMwMADBkyBCdOnICzszO6du0KABg9ejTWrVuHPn36ICEhAS+//DIAoKSkBI6OjhVlDRkyBADg5+eH1NTUautNTEystixV+fPRIwA8kYa2CAoKwscff4z09HQMGTIEbdq0Udjm8OHDOHz4MDqWTVHMzc3FjRs34OLiAnd3d/j6+gKo3X3DqTt1aRO6eB4MHz5c4bvs7Gzk5OQgODgYADBy5Ejs37+/4veePXvCqmwRXPv27ZGWlgZnZ+caj08ohFgnUAigBxHlMsYMAMQwxn4DMAPAaiLayRjbCOAdAF8IUF+VyARREVmGS/m/iQheXl44ffq00n2MjIwAAGKxGFKpFAAQHh6OuLg4NG/eHAcPHpSrt7qyVGVxWhoALgLaYuTIkejSpQsOHDiA3r17Y/PmzWj5XJgCIsIHH3yAiRMnyn2fmppacc8AsvuGDwcJT13ahC6eB+WCUxs7ni/7+fK1hdo+gbLeSG7ZnwZlHwLQA0C5x20rZHmGNUpoaCj27t2LvLw8PHv2DD///DO6deuGW7duVVzcHTt2ICQkBB4eHsjMzKz4vri4GFeuXKm2/C1btuDixYtyAgCg1mVZWFggpywzEkf/SE5ORsuWLTF9+nS8+uqruHTpksI16927N7755hvk5spu+Tt37uDBgwe6MplTDbp6HjxPkyZNYGFhgX/++QcAsHPnzlrZb2BggOLi4lptqw6COIYZY2LG2EUADwD8AeAmgGwiKpe0dAAthKirOjp16oS33noLAQEB6NKlC8aNG4cmTZqgXbt22Lp1K3x8fPDo0SNERETA0NAQe/bswfvvv48XXngBvr6+OHXqlEr11rYsW1tbdO3aFd7e3rVyDHO0yw8//ABvb2/4+vri+vXrGDNmjMI169WrF0aOHImgoCB06NABw4YN48Kup+jqeaCMr7/+GhMmTEBQUBCIqGL4pzomTJgAHx8fjTuGWU1dlToVxpg1gJ8BLASwhYhal33vDOAgEXVQss8EABMAwMXFxS+trLsnFKmpqRgwYAASEhIELVcbvBgXB4AvkedwylG3TejqeZCbm1uxNuWTTz5BRkYG1q5dK1j5jLHzROSvyr6CThElomwAfwMIBGDNGCv3OTgBuFvFPpuIyJ+I/O3t7YU0h8PhcPSCAwcOwNfXF97e3jhx4gTmz5+va5MqULsnwBizB1BMRNmMMRMAhwEsAzAWwI+VHMOXiKjaVTf+/v4UGxurlj0NicSy+csepqY6toTD0Q94m1COOj0BIWYHOQLYyhgTQ9az2EVE+xljVwHsZIwtBhAH4GsB6mpU8Budw5GHtwnhUVsEiOgSAIUBOiJKBhCgbvmNmV8fPgQAvGJnp2NLOBz9gLcJ4eH5BPSYVbdvA+A3PIdTDm8TwtPgYgdxOBwOp/ZwEeBwOJxGDBcBDofDacRwEeBwOJxGDHcM6zHb2rXTtQkcjl7B24TwcBHQY5yNjXVtAoejV/A2ITx8OEiP+eHBA/zAI1RyOBXwNiE8etUTuHdP1xboF1/cuQMAGN60qY4t4XD0A94mFPm/y/+n1v561RO4cweoRTpfDofD4QC4m3MXEQci1CpDr0TA0BCYMAEoLNS1JRwOh6P/RP4eiUKpeg9MvRIBFxfg+nVg2TJdW8LhcDj6zf5/92PP1T1YGLZQrXL0SgSsrIA33gA+/hhITNS1NRwOh6Of5BblYvKByfCy98Ks4FlqlaVXjmEAWLMG+P13YOJE4OhR4Lmc0I2KPV5eujaBw9EreJuQseCvBbj99DZOvn0ShmJDtcrSq54AADRrBixfDhw7BmzZomtrdIudoSHsDNW7wBxOQ4K3CeD83fNYd3YdJvlNQrBzsNrl6Z0IAMA77wDdugGzZgGNeUrwtxkZ+DYjQ9dmcDh6Q2NvE9JSKcb/Oh7NzJph6UtLBSlTbRFgjDkzxo4yxq4xxq4wxiLLvo9ijN1hjF0s+/SrtVEi4MsvgdxcYMYMdS2sv3x77x6+5YsnOJwKGnubWHdmHeLuxWFd33WwNrYWpEwhegJSADOJqB1kCeanMMbal/22moh8yz4H61Jou3bABx8A27cDhw4JYCWHw+HUY1KzU7Hg6AIMaDsAQ9sNFaxctUWAiDKI6ELZ/3MAXAPQQt1yAZkItG0LREQAZfmlORwOp9FBRJhycAoYGD7v9zmYgDNmBPUJMMbcIMs3fKbsq6mMsUuMsW8YY02q2GcCYyyWMRabmZkp95uxMbBpE5CSAkRHC2kph8Ph1B92X92NgzcOYnGPxXCxchG0bMFEgDFmDuBHAO8S0VMAXwBoBcAXQAaAVcr2I6JNRORPRP729vYKv4eFAW+/DaxcCVy6JJS1HA6HUz94nP8Y03+bDj9HP0wLmCZ4+YyI1C+EMQMA+wEcIqJPlfzuBmA/EXlXV46/vz/FxsYqfP/oEeDpCbi7A6dOAWKx2ibXC/JKSgAApo3lgDmcGmiMbWLirxOxOW4zzo0/h06OnZRuwxg7T0T+qpQvxOwgBuBrANcqCwBjzLHSZoMBJKhah40NsHo1cPYs8MUXqtta3zAVixvVzc7h1ERjaxMxt2Kw6cImvNvl3SoFQF3U7gkwxkIAnABwGUBp2dfzAIyAbCiIAKQCmEhE1U7wraonAABEQJ8+wOnTwLVrQAtBXM/6zYaysLmTG8PBcji1oDG1iaKSInT8siOeFT1DwuQEmBuaV7mtOj0BtcNGEFEMAGWu6jpNCQWA/Pz8Kn9jTNYL8PYGpk0DfvqprqXXP3aVrZRrDDc8h1MbGlObWH5yOa5mXsWBkQeqFYCHDx+qVY9erRi+evUqJBIJunbtintKFoS0bAl8+CHw88/A3r06MJDD4XC0wL9Z/2Lx8cV43et19GujuM62oKAAw4cPh7GxMZRNqKkLeiUCAFBSUoJTp07B0dERhoaGGDx4MHJzcyt+nzED8PEBpk4Fnj7VoaEcDoejAYgIk/ZPgrHEGGv7rK34XiqV4t1334W5uTlMTEywa9cuFAqQfEXvRKAyxcXF2Lt3LywsLGBqaoqIiAgwJsVXXwF37wLz5+vaQg6HwxGWrfFbcTT1KJa/vBwO5g5Ys2YNbG1tYWBggLVr1+LZs2eC1qdXIuDp6YnQ0FAYGRkp/Jafn4+NGzfCwMAAvXs3QUDAx/jsM9mMIQ6Hw2kIZD7LxMzDM+Fp6omoV6PAGMN7772HR48eKWwrkUjQoUMH7Nq1S606BVknIBSVZwfdunULM2bMwOHDh5GTk1PlPmJxM3z11XKEh4/RlpkcDocjOKdPn0bvL3sjxyUH2AggU3EbIyMjdOnSBcuWLUNgYGDF9zpdJ6ApXFxcsGfPHjx9+hQ5OTmYOnUq7OzsFLYrKbmPt98eC5FIhFatWuH48eM6sJbD4XDqTmpqKgICAiAWixE8Ohg57jlADOQEwMLCAkOHDkVaWhoKCgpw7NgxOQFQF70VgcqYm5tj/fr1yMzMhFQqxSeffAJXV1e5IEpEhOTkZISFhYExBl9fXyTW8xyVK2/dwspbt3RtBoejNzSENpGdnY0+ffrAwMAA7u7uOHfuHEpFpcAAAFkATgB2dnaYOnUqcnJy8PTpU+zZswcuLsLGDCqnXohAZcRiMd5//32kpqaitLQUn322E4x5A5BfRRgfHw9PT08wxtCjRw+159Lqgv1ZWdiflaVrMzgcvaG+tgmpVIoxY8bAxMQETZo0waFDhyCVSv/bIAyADTDeYTykBVJkZmZi/fr1MDeven2AUNQ7EXieKVOGY+3aywCkWLToNEJCQmD4XPq5o0ePwt7eHgYGBhg+fDgKCgp0YyyHw2lU/O9//4OlpSUMDAywbds2hWePt7c3ln+7HJIwCd7yfQub5m2CWMthMeq9CADA5MlAQADw2WeB2LfvBAoLC5GcnIxBgwbBzMysYjupVIpdu3bBxMQEJiYmmDlzprwaczgcjpps2rQJtra2YIxhyZIlchNbxGIxQkJCcPr0aRAR4i/F4yfpT7A2tsbKl1fqxN4GIQJiMfDVV7Joo3PmyL5zd3fHzz//jNzcXGRnZyMiIgLW1v+lYysoKMCnn34KAwODCp8Dh8PhqMLvv/8OBwcHMMYwceJEuSmdRkZGGDRoEJKTkyGVSnHixIkKx+7G2I34J/0frO69GramtjqxvUGIACBbRTxzJvD118CxY/K/WVlZYcOGDXj8+DGkUik++ugjuZlGz549w/Tp08EYg4WFBXbv3q1l65VjIhbDpBFFTORwakKf2sSlS5fg7OwMxhj69u2L+/fvV/xWvrg1OzsbBQUF+Pnnn+Hu7i63/52nd/DBkQ/wcsuXMarDKG2b/x9EpDcfPz8/Uodnz4jc3Yk8PIgKCmq3z7Zt28jBwYEgi3Yq97G0tKQ//vhDLZs4HE7DIT09nVxdXZU+L8zMzOjDDz8kqVRaq7KG/DCEjBcbU1JWktp2AYglFZ+7DaYnAACmprJIo4mJwNKltdtn9OjRyMjIABHh2LFjaFEpOuHTp0/x8ssvgzGGJk2a4OTJkxqynMPh6Cv37t2Du7s7GGNwcnJCWlpaxW8WFhb46quvQETIzc1FVFRUrRy7vyT+gp+u/YSFoQvRyqaVJs2vGVXVQxMfdXsC5YwcSWRoSHTtmuplXL9+nZycnJQqvrW1NR0+fFgQW6sjOiWFolNSNF4Ph1Nf0FabuHnzJrm5uVU5QrBv3z6Vy35a8JScPnUi7w3eVCQtEsRe6LInwBhzZowdZYxdY4xdYYxFln1vwxj7gzF2o+xfpYnmNcHq1YCZGTBhAlBaWvP2yvDw8MDt27dBREhNTYWTk1PFb9nZ2ejVq1dFD2Hnzp0CWS7PkcePceTxY42UzeHURzTZJs6ePQs3NzcwxtCqVSukpqZW/GZpaYnDhw+DiPDkyRO8+uqrKtez4OgC3Hl6B1+98hUMxAYCWK4eQgwHSQHMJKJ2AAIBTGGMtQcwF8ARImoD4EjZ39Vy/fp1DBs2DPPmzcPOnTuV5hSoDU2bAitWACdOAN98o1IRcri6ulYIwvXr1+Ho+F/mzOzsbIwYMQKMMVhbW2P58uUoKcuDyuFw9Jvdu3fDyckJjDF06dJFYajnl19+qXjwv/zyy2rXd+7OOaw7sw4R/hEIdFIt9EN2djb27duHRYsWYcSIEejWrZtaNgkeQI4xtg/AZ2WfF4kooyzf8N9E5FHDvtUawxiDSCSCRCKBkZERTE1NYWVlhWbNmsHZ2Rnt2rVDQEAAgoKCYGZmju7dgfh44Pp1oFkz4Y6xnNOnT2Pw4MFyswLKMTc3x1tvvYWlS5eqvOrvxbg4AMDfHTuqZSeH01BQt02UlJRgzZo1WLFihdJ2a2Zmhs8//xxjx45Vy05lSEul6PxVZ9zPvY9rU67BCEaIjY3FmTNncOXKFdy6dQv37t3D48eP8ezZMxQWFqK4uBilpaWoxXNa5QBygooAY8wNwHEA3gBuEZF1pd8eE5HCkBBjbAKACQBgbm7u161bN9y7dw9ZWVnIyclBQUEBiouLUVJSUpsTUZVlEItFMDAwgLGxMczMzGBjYwNHR0e4urqiQ4cOCAwMRMeOHSGRqJZxc8+ePQrzg8sxMTFB//79sWbNGjnHc01wEeBw5FGlTeTn52Pu3Ln4/vvvq2yf//vf//C///1PJZukUikSExNx8uRJXL58GcnJycjIyEBWVhZyc3ORn5+P4uJiSAOkQC8APwC4Vrc6yl+An3+GNWvWDO7u7vjqq690LwKMMXMAxwB8TEQ/McayayMClaku0fzzFBQU4OzZszhz5gyuXbuG1NRU3L9/H9nZ2cjNzUVhYSGkUilKSkoh8+fUHZFIBLFYXHHiLS0tYWNjAycnJ7i7u8PX1xchISFo3bq13H6rVq3CRx99hCdPniiUaWhoiKCgICxfvhwBAQHV1j80IQEA8KO3t0r2czgNjdq2iTt37uDdd9/FwYMHkZeXp/C7kZERwsPDsX79erkXv3v37uHvv//GpUuX8O+//+L27dvIysrCkydPkJ+fj6KiIpSUlKC0rs5GawCTASQDot0iSMSy0QwzMzNYWlqiWbNmcHFxgYeHBzp37oyQkJA6jSCoE0paEBFgjBkA2A/gEBF9WvZdIuo4HFQXEagthYWAry9QUAAkJAD5+Q9x8uRJnD9/Hjdu3MCtW7eQmZmJJ0+eIC8vD0VFRZBKpRWe87rCGANjDBKJBAYGBigtLUV+fr7SbcViMby8vLBw4UIMHTpU3UPlcBo158+fx+zZs3HixIkqw8EYGBhAIpGgtLQUUqm0tkMtSil/STQ0NISJiQksLS1hZ2cHZ2dntGrVCh07dkRISAhatGiBfv/XDzG3YnB18lU4Wzmrc5hK0akIMFk8560AHhHRu5W+XwEgi4g+YYzNBWBDRHOqK0sTIgAAx48DYWHA7NnA8uWqlZGUlIRTp04hPj4eSUlJuHv3Lh4+fIicnJyK7p5KbwjPIZFIYGpqCjMzMzRp0gT29vZwc3ODl5cX/Pz8EBwcDGNjY7Xq4HDqC1KpFHFxcfjnn39w+fJlpKWlISMjA48ePcKzZ8/w7NkzFBcXq1UHY0yux29hYQFbW1s4ODigVatWFcPF7du3V2m4eGfCToz4cQTW9F6DyMBItWytCl2LQAiAEwAuAyh/As4DcAbALgAuAG4BeI2IFAfkKqEpEQCA8eOBLVuA2FhZz0AblN/AZ8+eRUJCApKTk3Hv3j08fPgQ9+/fF2QW0fM3sLm5OWxsbNC8eXO4ubnBx8cHwcHBKt/AHI5QJCUlISYmBhcvXkRKSgrS09PlfH9FRUUoLS1V+0UKQMX0bUdHRzRt2rTiRapz584ICAjQ2ovU4/zH8PzcEy5WLvjnnX8gFmkm5IXOh4OEQpMi8Pgx4OkJuLoCp0/Lgs7pA1euXMGoUaNw+fLlKm9+kUgEMzMzGBsbo7CwsMLfIWRX1sLCAnZ2dnBycqroyoaGhsqtj+BwAODhw4c4fvw4Ll68iMTERNy+fRsPHz5EdnZ2xbi5UEOqhoaGMDIyQklJCXJzc6uN+uvi4oLPP/8cAwYMUOfwBGXCrxPwTdw3iJ0QC18Hzb19chGoJTt2ACNHAmvXAtOna6walfnrr78wbtw4pKamVtl4jI2NERwcjFWrVsH3uS7NvXv3EBMTgwsXLuDGjRtIT0/Hw4cP5fwd6gxZlc9QKBcPMzMzWFlZoWnTpnBxcUHbtm3h5+eHkJAQuYitHP2koKAAp06dwvnz53HlyhWkpqYiMzNTboqiUC8bBgYGCi8bLVu2xAsvvIDg4GCFyRX37t3Du+++i99++w1Pnz6tsnw7Ozt89NFHmDRpkkr2aZITaScQ+m0oZgXNwopeKzRaFxeBWkIE9OsHxMQAV68CzsL7ZwTju+++wzvvvQepkilt5UgkEnh7e2PRokUqr2CUSqW4efMmYmJicOnSJSQnJ1f4OypPb1Nnim7l6W3lMyLKp7e5ubnB29sb/v7+CAwM5ENWKiCVSnH16lWcOnUKly5dQmpqKu7evYtHjx4hNzdXkGnWz09RLB92dHBwQMuWLeHt7Y2AgAC1pllfvHgRM2fOxKlTp6pN/CQyNcX7kZFYsmSJSvVog0JpIXy/9EV+cT6uTL4CM0OzmndSAy4CdSAlBfDyAnr1Avbu1WhValM+J7rn/v1YtWqV0imn5YhEIri5uWHatGmYNm2axrMTFRQUIC4uDmfOnEFCQgLS0tJw7949PHr0CHl5eRUPHnXeIiv7O0xMTGBubg47Ozs4ODigdevW8PHxQWBgIDw8PBqMeKSnp+P48eOIi4vDzZs3K3pz5RMQhOrNVV5wWT4BwdXVFe3bt4efnx+CgoK0ktpw//79WLhwIS5fvlztUI+xsTFee+01JE+bBolEovdrZ6KPRePDvz/EwZEH0bdNX43X12BEoKYVwxwOh6P32AKIgGxB2I9aq1VlEWhQoaQ5HA5H57wCoBjA77o2pHbolQj4+flpLWz12bMExghTpug+hLa6n9u3byMoKEhhCKi8218ZY2Nj9OzZE/Hx8Tq3W9Of4uJixMfH44svvsCkSZMqFu6Ym5tDLBZDtsRFeMpntpiamsLBwQEBAQEIDw/H6tWrcerUKeTn5+v83Gj6k5GRgZEjR8LKykrpuXn+u7Zt2+LUqVM6t1vdz9cXvgbcgE2vbQLlaq9ete5XdQsQEm34BCoTGQmsXw+cOgUEqhbQT6O8e+MGAGBNmza13ufChQt48803ce3aNbmbo3ylZOXVyxKJBD4+Pli0aJFeTaurCWWzoDIzM/H06VPBZkEp+xBRhY9D3bns1YUkad26NTp06KA0JIk+c+nSJcycORMnT56Uu88MDAwgFosVnL3NmzfH2rVrMWzYsFrXoUqb0BYPnj2A52ee8G7qjb/f+hsipr137AbjE9C2COTkAO3bA02aAOfPAwa6D+0th7oB5Pbv34/Jkyfj9u3bct+bmZnB0NAQjyvFZReJRGjZsiUiIyMRERGhcccyAOTm5uL06dM4e/YsEhMTkZaWhvv371dMadXEeghLS0vY2trCyckJbdq0qZii6ObmpvbxPHz4X0iSxMTECnHS1Px5ExMTWFtbw97eHk5OTvDw8ICfnx+6du0ql0Nbkxw8eBALFizApUuX5By75b2tp0+fyh2rjY0N5s2bh5kzZ6pUnz4HVRz902jsurIL8ZPi0c6+nVbr5iKgBvv2AYMGydJRzq0x44F2EfKG37hxIxYsWICHDx9WfMcYg52dHYyMjHD37l25t1sHBwe8+eabWLx4MQwNDastu6qV0c9PURRqplBDWxldvpL28uXLciFJnj59Kje9U6gZQeUhScqDlnl5eaFLly61WklbUlKCTZs24dNPP0VycrKcTfb29jA0NMS9e/fkVsObmZlh3LhxWLlypdrXRl9F4FDSIfTZ3gcLQxdiUfdFWq+fi4CaDB0KHDwoCzDXSsfpPiujqRv+/fffx4YNG5Cbm1vxnUgkQquyg09KSpJ7WDPGKrr0ssis+vFAasxUjqlz9erVivDFjx8/Flx4RSIRGGMV5VX+3dHREba2trh+/bpcDB8jIyMMHDgQW7duFfQ66qMI5BXnwXvenOb2AAAgAElEQVSDNwzEBoifFA9jifbvW3VEoH69MmmIdeuAP/4AIiKAQ4cADfkLtUp2djaOHTtWbbRUkUhU0ahLS0txo2y89XmICEVFRQrflz/MKw9NlEdR9PDwgK+vL0JDQ7U2NNGYkEgk6Ny5Mzp37lyn/Wo7BFe+sKy6uftEhLt37+Lu3bsKv5WWluLAgQNwdnZWOgTXkEKSRB+LRkp2Cv4e+7dOBEBdeE+gjM8/B6ZOBb7/Hhg1SicmKDAhMRHSggK89eRJjXkT1B03LxeE59/wRSIROnXqBAcHB5w4cUJuwZqBgQF8fHwQHR2Nfv36qXWsHN2TkJCAWbNm4fjx43KOXRMTE/j7+8PMzAxHjx5FYWGh3H7ls6wYYxoLSeLp6YmOHTtil709jK2tscmj2qj0WuPS/Uvo9GUnjH1hLL4e+LXO7ODDQQJQUgJ07QrcvClLR2lrK2z55Uv7y0Pi3rx5U/AMalVlH6prBrWkpCSMGDECFy5ckGvUlpaWiIiIQGlpKbZt2yaXA7p8OGnmzJmYOHGiSvZztM+hQ4cwf/58xMfHyw3nWFlZ4ZVXXkFwcDCio6PlrjVjDG5ubti8eTN69OhRZdnlGbf++ecfXLp0CUlJSRVRdDURksTY2BimpqYV4SxcXV01GpKkpLQEwd8EI+VxCq5PvQ4bExtBy68LXAQE4tIlwM8PePPN6hPUp6enIyYmBnFxcbhx4wbu3LlT4cjT1NL+qnIpa3ppf0xMDN5++20FP4G9vT0WL16MoqIirF27VsFJ6OjoiLFjx2LRokU1OpY52uXLL7/EqlWrcPPmTaWTAXr27InJkycjJSVF7po3a9YMy5cvx5gxYzRqX0FBAWJjYxEbG4uEhISK3m95DoE65t5VSlUhSZo3b46WLVvCx8cHISEhaNWqVZXi8dnZzzDtt2n4fvD3GOWj2+EDLgIqkpubi5iYGJw7dw6JiYm4desWrly5j0ePnsLU9Bmk0sKKh7lQUxStrKxga2sLZ2dntG3btuJmUzY+OiExEQD0puu7Z88eREZGKowBu7i44Msvv4RUKlUaB8ba2hqvvvoqVq9eDRsb3b0tNVaKioqwaNEifPvtt3LXrvK04B49emD06NGIj4+XEwYrKyvMnDkTCxYs0IXpClTXJrKzsytezq5fv45bt27hwYMHePLkCZ49eyb3cqZue5bYSFDwTgGMHxrD55IPnJ2c0aZNG3Tq1AkhISFwcHBQ6zjris5FgDH2DYABAB4QkXfZd1EAxgPILNtsHhEdrK4cVUVAKpXi3LlzOHPmDK5evYqUlBTcu3evIiSupoKZ2drawtHREa1bt4aXlxe6du0qaDAzfZwJUc6qVavw8ccfy601YIzBy8sL27dvR2lpKWbNmoWTJ0/KLRIyNTVFaGgoPv30U7Rrp9251I2JR48eYcaMGfjll1/krpFEIkGHDh0QHR2NwMBAvP766zh+/LjclE4TExOMHj0aGzZs0LvptppoE6mpqTh16lTdgvYNB9AawAYAj6souIyagvZ5eHio3bPXBxEIBZAL4LvnRCCXiFbWthx/f3/avn17xZzpmzdv4u7duwrZh4QOa9ykSRM4ODhUTFEUibpgxgx/LFhgjOholaoRBH0WgXKkUilmzZqFzZs349mzZxXfi8ViBAUFYffu3QCAGTNm4ODBgwqOZV9fX3z88cd4+eWXtW57Q+PatWuYMWMGjh8/Lpdc3djYGF27dsXKlSvh6emJ8PBw/Pzzz3IOXgMDA/Tt2xfbt2/XSvRQVdGHNrH3+l4M/mEwIr0i4fnQUy/CdxsYGKgsAkLGrnADkFDp7ygAs+pYBtX2wxgjkUhEhoaGZG5uTk2bNqW2bdtSSEgIvfHGGxQVFUW//vorPX78mFRh9GgiAwOiK1dU2l0Qwi5coLALF3RnQB3Jz8+nYcOGkaGhody1MjQ0pMGDB1N+fj7l5eXRe++9R82aNZPbRiQSUdu2bWnz5s26Pox6xZEjRyggIEDhnFtZWdGIESMoIyODiouLacaMGWRmZia3jVgspqCgILp9+7auD6PW6LpNPCl4Qi1WtSCfL3yoSFpU5/3z8/PpyJEjtHz5cho7dix1796d2rdvT46OjmRpaUlGRkYkFoupLKJyXT6xpOqzW9UdFQpSLgKpAC4B+AZAkyr2mwAgFkAsY4xsbW3J3d2d/P39adCgQTRr1izaunUrpaSk1PmEq8ODB0Q2NkQhIUQlJVqtugJd3/DqkJmZSWFhYSQWi+VuVlNTU4qIiKDi4mKSSqW0evVqatmyJYlEIrntWrRoQfPnz6fCwkJdH4resXnzZvLw8FA4Z82aNaP33nuP8vLyiIho3bp1ZGNjo/Dy1L59ezp//ryOj0I1dN0mph2cRiyK0T+3/9FqvZmZmfTjjz/SggUL6PXXX6egoCBq06YN2dvbk7m5ud6KQDMAYsgilX4M4JuayvDz89PUOVSJb76RnaFNm3RTf+S//1Lkv//qpnIBSUhIIB8fH4W3G2tra1q6dGnFdj/99BP5+vqSRCKR265JkyYUHh5OWVlZOjwK3VFYWEgLFy6kFi1aKPSeWrZsSatXryapVEpERD/++CM1b95c4U3R2dmZfv31Vx0fifrosk2cST9DLIrR1ANTdVJ/deilCNT2t8offROB0lKiF18ksrIiysjQtTUNgyNHjpC7u7uCIDg4OND27dsrtouNjaXu3buTsbGxQk+iX79+lJiYqMOj0DxZWVn09ttvK7zJSyQS8vX1pZ9++qli27Nnz1Lbtm0VzqmdnR198cUXOjyKhkORtIh8vvChFqta0JOCJ7o2RwG9FAEAjpX+/x6AnTWVoW8iQESUmEhkZEQ0fLiuLWl4bNmyhZo2baowXNGqVSs6duxYxXbp6en0+uuvk6WlpYKvITAwkI4cOaLDoxCOxMRE6t+/P5mamsodp7GxMXXv3p1iY2Mrtk1JSaHOnTsrDAmZm5vTnDlzdHgUDZNlMcsIUaCfrv5U88Y6QOciAGAHgAzI8umkA3gHwDYAl8t8Ar9UFoWqPvooAkRE0dGyM3XwoHbrHXXlCo3SpWdai0RHR5OVlZXCcEfHjh3p+vXrFdvl5eVRZGSkgniIRCLy9PSkLVu26O4gVODo0aMUGBio4Ni1tLSk119/ndLT0yu2zcnJob59+5KBgYGCSLz55ptUXFyswyPRDrpoE8mPkslksQkN2jlIq/XWBZ2LgFAffRWBwkKidu2IXF2JcnO1V6+unWC6oLi4mMaPH08mJiYKwyA9e/akzMzMim2lUimtXLmS3Nzc5IZCGGPUokULWrhwYcVYuT6xdetW8vT0VHCaN23alCIjIyscu0Sy8zF27FiFYTGJREK9evVSefZbfUXbbaK0tJR6b+tNFkss6PYT/Z1FxUVAC5w4ITtbM2dqr87GKAKVycnJoQEDBii8+RoZGdEbb7xB+fn5ctvv2bOHfHx8FBzLNjY2NG7cOMrOztbJcUilUoqKiiInJycFsXJzc6OVK1cqiNWCBQsUhr9EIhH5+/vTjRs3dHIc+oC228T2S9sJUaB1/6zTWp2qwEVAS0yYQCQWE2nrHmzsIlCZ27dvU1BQkMLbs7m5Oc2aNUth+zNnzlBYWBgZGRnJbW9mZkYDBgygpKQkjdqbnZ1N48aNU+rY9fHxoT179ijss3nzZrK3t1fwkbRt25ZOnTqlUXvrC9psE1l5WWS/3J4CvgogaYn+9Sgrw0VASzx6RNSsGZG/P5E2Rhm4CCjn/Pnz1K5dO4XZMDY2NrRuneIbW1paGg0bNowsLCwUHMvBwcFyTmh1SEpKoldffVVhUZaRkRGFhYXRmTNnFPb57bffyMXFRWFKZ/PmzWn37t2C2NWQ0GabeGffOyReJKaLGRe1Up86cBHQIjt3ys7amjWar2vuzZs09+ZNzVdUj/n111/J2dlZ4SHaokUL2rt3r8L2OTk5NHXqVIU3brFYTO3ataNt27bVqf5jx45RcHCwgmPXwsKChg0bRmlpaQr7xMfHk7e3t4KINWnShFauXKnyuWgMaKtN/J3yNyEKNOdw/ZhpxUVAi5SWEvXtS2RmRqSkfXN0yGeffUa2trZKh1POnj2rsL1UKqVly5YpdSw7OztTdHS0Usfytm3bqF27dgpDU/b29jR16lTKyclR2CcjI4NCQkIU9jEzM6PIyMhGMbOnvlBQXEAe6z3IfY07PSt6pmtzagUXAS2TkkJkakr0yisyUeDoH3PmzClfTi/nWA0ICKgyBMmuXbvI29tb4UFtbW1N/v7+1KJFC6WO3WXLlikVi/z8fBo8eLDSWErDhg1TcGxz9IMPj35IiAL9fuN3XZtSa7gI6ICVK2VnT4l/TzCGXL5MQy5f1lwFjYDi4mJ68803FaZYGhgYUN++fZW+tRMR/fHHH+Tg4KAwzFQ+1LN+/foq64uIiFBY8CUWiyksLExuiiun7mi6TVzLvEaGHxnSyB9HaqwOTcBFQAcUFxN17Ejk6EikqZmH3DEsLI8fP6ZevXopTCE1Njam8PBwunHjBg0cOFChB2FoaEi2trYKaxcMDQ2pa9euFBMTQ0uXLiVra2uFoSgfHx9KSEjQ9aE3GDTZJkpKS6jbN92oySdN6H7ufY3UoSm4COiIc+eIRCKiiAjNlM9FQHPcuHGD/P39lb7pl7/tDxkyhJKTk+X2y8nJoYiICLKzs6tyX3d39wYTykLf0GSb+Or8V4Qo0Obz9S+cuToiIAJHZfz9genTgY0bgdOndW0Np7b88MMPGDx4MOLKEpQoIycnBydOnMBff/0l9/2FCxdw+PBhZGVlVblvamoqwsPDsXTpUrmMXRz95X7ufcz+YzZCXUPxdse3dW2OVuEioCYffQQ4OQETJgDFxbq2hqOMkpISLFu2DK6urhCJRHjjjTeQkJCA0tJSuLi4YMmSJZBKpSAi7NixA46OjgCAzMxMjBs3DowxGBoaQiQSISwsDDdv3gQRwcrKCtHR0RVvVNu3b4e3tzdEIhFu3bqFefPmQSKRwN7eHpMnT0Zubq6OzwSnKt479B7yivPw5YAvwRjTtTnaRdUuhCY+9W04qJxffpENrC1ZImy50SkpFK3lZDoNhZycHJoyZYrCsI1YLCZvb2+5sNXKyMzMpFatWikd7rG2tq4xKUtMTAx17dpV6fqBIUOGKF0/wKkZTbSJ3278RogCRR2NErRcbQLuE9A9w4bJQk434rAuOictLY2GDBmidGVwuQO3OvLz8+mNN95QCDVhYGBAbm5uCt+LxWLq1q0bZdSQbCIpKUmpw9nIyIi6detGp0+fFvI0cOpAbmEuua1xI8/PPKmguEDX5qgMFwE94M4dIktLop49+doBbXL69Gnq1q2bwgPa3NycBg4cWKsYQbNmzVJ4QFeVf7c2eZSrIzs7myZOnKiwqK28h7Jr1y61zgenbsw+PJsQBTqWKkzoEF3BRUBP2LBBdka/+06Y8vrEx1Of+HhhCmtA7Nq1izp06KAw1dPW1pYmTpxYq2ihVeXfbdeuXa3z72ZkZFBoaKjSPMpTp06tcRWwVCql6OhocnZ2rtMitMaMkG0iLiOOxIvENG7fOEHK0yU6FwHIEsk/gHxmMRsAfwC4Ufav0kTzlT/1XQRKSoiCgojs7IiEWBPEp4jKUDW8w/Ps3btXIU8vIEz+3drmUa6OrVu31jkcRWNDqDYhLZFS502dqemKpvQo75EAlukWfRCBUACdnhOB5QDmlv1/LoBlNZVT30WAiOjyZSKJhGjsWPXLaswiUFOgt61bt9aqHF3k3z18+DC5ubkpiI2DgwPt2LGjVmUcPXq0ysB0Q4cObbSOZaHaxNp/1hKiQP936f8EsEr36FwEZDYo5BhORFlKSQCOABJrKqMhiAAR0bx5sjOr7nqhxiYC6enpNGzYMKW5hIODg+no0aO1KiclJYUCAgL0Iv9ubfMoV4cqIaobKkK0iVvZt8h8iTn13tabShuIA09fRSD7ud8fV7HfBACxAGJdXFw0dY60Sl4eUevWRG3aEKkTI6wxiEBsbGyVyV9effXVWid/qQ/5d6OiopRmC+vUqVOts4WpkqymISFEmxi4YyCZLDah5EfJNW9cT6jXIlD501B6AkREf/4pO7vz56texoq0NFrRALv9QqWBLC4upvDw8HqXf7e4uJjeeeedKvMo19ZuVdJW1nfUbRM/Xf2JEAVaHrNcQKt0j76KQKMdDipnzBiZf6Cxxw+TSqW0atUqcnd3V3hYOTk5UVRUVJ0eVg0p/251eZRHjhxZpx7Mli1byNPTU2EYTFkC+8ZIdn42NV/VnF744gUqkhbp2hxB0VcRWPGcY3h5TWU0NBHIzCSytSUKDpbNHGpM5OXlUWRkpMJ4uEgkIk9PT9qyZUudyqsq/26bNm3oxIkTmjkILVPXPMrVceTIEQoMDFRwLFtaWtLrr79O6enpGjoK/WXKgSnEohidSW94PhSdiwCAHQAyABQDSAfwDgBbAEcgmyJ6BIBNTeU0NBEgIvr2W9lZ3rix7vvWN59Aeno6vf7660odu4GBgXWOrHn48GFydXVVmGXj6OhY61k29ZXq8ih/9tlndSorMTGR+vfvr5DjwNjYmLp3705xcXEaOgrhUbVNnL59mlgUo+kHp2vAKt2jcxEQ6tMQRaC0lKhHDyIrK6K7d+u2b30Qgbi4OOrRo4fCuLypqSn179+fEhMT61Redfl3ly9vWOO4taWueZSrIysri8LDw6lJkyYK/ghfX1/at2+fho5CGFRpE0XSIuqwoQM5fepETwueasgy3cJFQM/5919ZXKHXXqvbfvoqAvv27SNfX18Fx26TJk0oPDycsrKy6lReRkYGdevWTWn+3enTp+vFzB59oao8yh4eHkrzKFdHYWEhzZ8/X2EBnUgkopYtW9Lq1av1zrGsSptYemIpIQq091rdBLM+wUWgHrB4sexs799f+330RQSkUimtW7eOWrZsqeB0bNGiBc2fP58KCwvrVCbPv6s+yvIoi8XiavMoV8emTZvIw8ND4Ro3a9aMZs6cqReO5bq2iaSsJDJebEyDdw7WoFW6h4tAPaCwkMjLi8jFhai2q/91KQJ5eXk0c+ZMatasmcJbooeHB23atKnOZRYXF9PUqVOV5t8NDQ2tMRonRznFxcU0atQopXmU+/Xrp1K4icOHD1Pnzp0VZi1ZWVnRiBEjdHat6tImSktL6eXvXiaLJRaU/qRhO8K5CNQTYmJkZ3zGjNpt/3l6On2uxVkcGRkZNGLECLKyslJ4mHTu3JkOHz6sUrk8/672qCqPsomJCYWHh6s0tHb16lXq06ePUsdyz549tepYrkub+D7+e0IU6LMzdXOk10e4CNQjJk2S5SWOjdW1JTLi4+OpZ8+eSh27ffr0oatXr6pU7o4dO8jBwUHBmenm5qaymHDqxo0bN6hTp04KwzuWlpa0YMEClcp88OABjR07VqljuWPHjmoH4hOKh88ekt1yO+ryVReSluiXX0MTcBGoRzx+TOTgQNSpE1FNL2XPpFJ6pgHH3K+//kqdOnVS6tgdO3YsPXjwQKVyjx07Rq1atVKY2dO0adM6rwvgCMuJEyeodevWCtfG3t6eNm9WLbF6YWEhzZ07lxwdHRWGDFu3bk3r168X3LFc2zYRvjecJNESir/XOEKxcxGoZ+zaJTvzn35a/XaChc2VSmn9+vXUunVrhbdCR0dHmjt3bp0du+VU97YZFRWltu0c4dmxY4fCgxsAubq6qtVL27hxI7Vp00bhXnBwcKDZs2erfI9VpjZt4mjKUUIUaO4fc9Wur77ARaCeUVpK1L8/kZkZUWpq1dupIwKFhYU0e/ZshSEZkUhEbdq0oY2qrF4r4/Hjx9SzZ0+l487jx4/nUzrrEcuXL1fqr+nQoYNa/prff/+d/P39lTqWR48erXJvs6Y2kV+cT23Xt6WWa1tSXpHuZzNpCy4C9ZDUVJkI9O9fdTrKuorAgwcPaPTo0QqN2sDAgPz9/en3339X2d7i4mIaOXKk0vy7AwYM4AlP6jnFxcU0ffp0hXDVtc2jXB2XL1+m3r17KwTMMzExoV69etHly5drXVZNbWLBXwsIUaDDSY3L78RFoJ7y6aeyK1BVWtnaiMDly5epV69eShtY796969TAlFGX/LuchkF+fj4NHTpU6RqOoUOHqrWGo7oXFT8/Pzpw4EC1+1fXJq48uEIG0QY06sdRKttXX+EiUE8pLpY5iB0cZA7j56nqhj9w4AD5+fkpdLWtra3V6mqXI0T+XU7DQN08ytVRWFhIc+bMUepYrmrIsqo2UVJaQiHfhJDNMhu6n3tfZZvqK1wE6jHnz8umjE6apPjblrt3aUtZwKGqnG6Ojo40Z84ctZ1umsy/y2kYJCQkUIcOHZTmUVY3rpNUKqUNGzbUOHmhcpuozKbYTYQo0DcXvlHLjvoKF4F6zowZsisRE/PfdzVNv9uwYYPa0+/Onj1LHh4eCo3a1ta2zpEqOY0LIfIoV0dV05itra1pzJgxcr3djJwMslpqRS9++2KDSRdZV7gI1HNycmThJDw8smjUqDEK46USiYQ6deokyBu5PuXf5TQMqsr10Lp1a0FyPcTHx9NLL71Upd+r79d9yfAjQ7qeeV2Ao6mfcBGox5QvyTcyMlW4wa27dCG/nTvVriMnJ4f69eunNP/uqFGj+JROjmAIkUe5OoIOHSL7Pn3+C23SGoQokKi7SK3QJvUdvRYBAKkALgO4WJOhjUUEqgrOJZFYEWMj6ORJ2XQ8ddYJlOffVZbHVp/z73IaBtXlUX7ppZdUvv8qt4nM7EyyXGhJ4uliglhedNq2batSkMP6Sn0QAbvabNuQRWDz5s3Utm3basP03r0rSz7Tvbts7YAqIlBV/l2h3sQ4nLoiZB7lym1i5qGZhCjQibQTFeHOW7VqpdDGmjdvrlK48/oEFwE9pLqEHa1ataJ169Ypdexu3Ci7Kt9+W3sR0PSYLIcjFLdv36bAwECleZRr45MqbxMX7l4g8SIxjf9lvNLthE58pO/ouwikALgA4DyACUp+nwAgFkCsi4uLps6RVhAidV9JCVHXrmUJ6o/EVykCjTn/LqdhcPbs2TrnUQ67cIFCz58j/03+1GxFM3qU96jGeqpLgdqvX786p0DVR/RdBJqX/dsUQDyA0Kq2rY89gcTEROrXr5/SWOs9evRQKdZ6QgKRgQFR6Ot5tPP+/Urfa26eNoejS/bu3UtOTk4KLzVOTk5yeZR33r9PY/74iBAF2nm57pMm0tPTafjw4QpDpoaGhhQYGEhHjhwR8rC0hl6LgFxlQBSAWVX9Xl9E4MiRIxQYGKiwrN7S0pKGDx9O6QIkgpk/X3Z1duzg+Xc5jYvq8ij/cuwXMvvYjPp+31ftNQF5eXkUGRlJTZs2VRiy9fT0rFfhz/VWBACYAbCo9P9TAPpUtb0+i8CWLVvI09NTwenUtGlTioyMFDT/an5+Pg0aNJQA4WO3cDj1CYXYVSNAmAfy7e4raOwqqVRKq1atInd3d7meNmOMWrRoQQsXLhQ8N4KQ6LMItCwbAooHcAXA/6rbXp9EQCqV0sKFC6lFixYKN4W7uzutWrVK0Juiqvy7gJicnXn+XU7jpri4mLpN7EaIAiFIPvCcqnmUq2PPnj30wgsvKDiWbWxsaNy4cZSdnS1ofeqityJQ14+uRSA7O5vGjRunEDxNIpHQCy+8QHv27BG8zuriufvt3k3NXnlIEgmRmsFAOZx6TXZ+NjmudCTz1Z4UfOQPeumll5Tms3jnnXcEHyI9c+YMhYWFKYRRNzMzowEDBlBSUpKg9akCFwE1SEpKogEDBijEUTcyMqKwsDA6c+aM4HXWNv9u2IULFHwknuzsiIKCZDOHOJzGSMT+CBItElGnv76XmzGn7cx2aWlpNGzYMKWO5aCgIDp69KjgddYGLgJ15NixYxQcHKzUsTts2DBKS0sTvE5VcryWz4n+7jvZldqwQXCzOBy959StU8SiGEX+Flnt2pnq2pgmnLw5OTk0ffp0BceyWCymdu3a0datWwWvsyq4CNSCrVu3Urt27RRm2TRt2pSmT5+ukcxY6r6llN/wpaVEPXsSWVoS3bkjuJkcjt5SJC0i7w3e5PypMz0teFrrBZS17W0LhVQqpWXLlpGbm5uCD9HZ2Zmio6M16ljmIqAEqVRK0dHR5OzsrHBR3NzcaNmyZRq5KI8fPxZsvPKXzEz6JTOTiIhu3CAyNiYaNkxwkzkcvWXJ8SWEKNAv138hIvk2UVs0lUe5Onbt2kUdOnRQeA7Y2trS+PHjBXcscxEoIzs7m8aPH68wx1gikVCHDh1oV1V5HNVEW/l3lyyRXbFffhGkOA5Hr7mRdYOMPjKioT8MFaS88hl4yvIoh4aGUmYdxaW2nD59mkJDQxWeD+bm5jRw4EBKTk5Wu45GLQLJyck0cOBAhTy4RkZGFBoaSqdPn65zmbVlzpw5SvPvBgYGCjKH+fqzZ3T92bOKv4uKiLy9iZydZTkIOJyGSmlpKfXc2pMsl1rSnaf/jYE+3yZURZN5lKsjLS2NhgwZQhYWFgr1du3alWIqZ5aqA41OBGJiYqhr164KF9DCwoKGDBmiEcduOZ999lmV+XfPnj0raF3Kxj9PnSJijOjddwWtisPRK767+B0hCrThrPxsCHXCq1dFRoZuVuXn5OTQlClTyM7OTuFF0svLi7Zv317rshqFCGzfvp28vLwULpSdnR1NmTJFI47dcmob10RoqrrhIyJkeYnPndNY1RyOzsh8lkl2y+0oaHMQlZTKz4vWhAhURlfxuaRSKX3yySfk6uqq4MN0cXGhJUuWVOvDbJAiIJVKacmSJeTi4qJwUlxdXemTTz7RqLddH/LvVnXDZ2cTOToS+foS8dBBnIbG2J/HkiRaQpfvK66Q1LQIVEaXkXp37txJ3t7eSl96IyIiFF56G4wIdFLhXSoAABI+SURBVOzYkSIiIpR2j7y9vWmnAKkWq6O6WOezZs3SaN3KqO6G37NHdvVWrtSyURyOBjmSfIQQBfrgzw+U/q5NEaiMLnN2xMTEUEhIiFLH8qBBgyg5ObnhiMDzjt2QkBCVHSW1RZ/z71Z3w5eWEr3yCpGpKVFKinbt4nA0QX5xPrVZ14ZarW1FeUXKAzLqSgQqo8vsfcnJyTRo0CCFCSkNRgREIlGFsmkSTeU/FZo/srLoj2oyIKWlEZmZEfXtKxMFDqc+M//IfEIU6M+bf1a5TU1tQpvo+jmSnZ1NERER5VPiG4YIaDpsRFRUVIPLv7tmjewqanikjMPRKAn3E0gSLaE3f3pT16aohK5HFLgIVMOWLVvqbf7duKdPKe7p02q3kUqJ/P2JmjUjelRzpj0OR+8oKS2h4K+DyXaZLT3IfVDttrVpE7pG3TzKqsBF4DkOHz5Mbm5uOvHqC0ltxz8vXCASi4kmTNCCURyOwGw8t5EQBfo27tsat9UHn0Bd0NYsQ3VEQAQNwxjrwxhLZIwlMcbmaqqeK1euwMfHByKRCL169UJqaioAwNraGsuXLwcR4e7du3jjjTc0ZYLO6NgReO89YNMmICZG19ZwOLUnIycD7//5Pnq498CYF8bo2hzB6dy5M65fv47S0lLs3bsXTk5OAICsrCxMnToVjDE4Oztj3759OrNRoyLAGBMD+BxAXwDtAYxgjLUXqvyHDx8iLCwMEokE3t7euHz5MogIZmZmmDp1KoqLi/H48WPMnj1bqCr1lqgowNUVmDABKCzUtTUcTu2I/D0SBdICbOy/EYwxXZujUQYOHIjbt2+DiLBu3TrY2NgAANLT0zFo0CCIRCK0b98eFy5c0Kpdmu4JBABIIqJkIioCsBPAQHUKLCgowLBhw2BkZAR7e3scP34cJSUlMDQ0xNChQ5Gfn4/c3FysX78eEolEkIOoD5iZAV98AVy7BixfrmtrOJyaOfDvAey+uhvzQ+ejjW0bXZujVaZNm4asrCwQEWbNmgVzc3MQEa5duwY/Pz9IJBIEBQUhPT1d47ZoWgRaALhd6e/0su8qYIxNYIzFMsZiMzMzlRYilUoxbdo0mJubw8TEBD/++COKioogFovRrVs3ZGRkoLCwEHv27IGxsbHmjkbP6dsXGD4c+Phj4N9/dW0Nh1M1uUW5mHxwMtrbt8ecrnN0bY5OWbFiBXJyclBcXIyRI0fCyMgIJSUl+Oeff+Ds7AxDQ0O88soryM3N1YwBqjoTavMB8BqAzZX+fhPA+qq2f94xrIs44PrEyexsOlnHuOMZGUTW1kQvvsjXDnD0lxm/zyBEgWLS6rYYVJU2UR/JzMyknj171jovCfR1dhCAIACHKv39AYAPqtrez8+PduzYQY6Ojgoze1xdXTWSEaghsmmT7Mp+842uLeFwFIm9E0uiRSKa+OtEXZtSL6hNhkJ1RIDJ9tcMjDEJgH8B9ARwB8A5ACOJ6EoV28sZY29vj6VLl+Kdd97RmI36zKknTwAAwVZWddqvtBQICwOuXgWuXwfs7TVhHYdTd6SlUnTZ3AV3c+7i2pRrsDa2rtP+qraJhsLx48fx9ttvIzk5Gc89u88Tkb8qZWrUJ0BEUgBTARwCcA3ArqoEoBxLS0ssWLAARIQHDx40WgEAgHnJyZiXnFzn/UQi2XTRnBxgxgwNGMbhqMj6M+txIeMC1vVZV2cBAFRvEw2F0NBQJCUlobS0FNu3b4eDg4PaZWq0J1BX/Pz86Pz587o2Q294MS4OAPB3x44q7f/hh0B0NHD4MPDyy0JaxuHUnbTsNHht8MKLbi/i1xG/qjQlVN020VBhjOlnT6CuNPR5wtrmgw+Atm2BSZOAvDxdW8NpzBARphycAgLh836f87auR+iVCHCExdgY+PJLIDkZ+OgjXVvDaczsuboHB24cwEfdP4KrtauuzeFUgotAA+fFF4HwcGDlSuDyZV1bw2mMZBdkY/rv09HJsROmd5mua3M4z9F4ltTWQ9a0bi1IOStWAPv3A+PHAydPAmKxIMVyOLXigz8/wINnD3Bg5AFIROo9coRqE5z/4D0BPcbXwgK+FhZql2NrC6xeDZw5A2zcKIBhHE4tOXnrJDae34jILpHo5NhJ7fKEahOc/9Cr2UH+/v4UGxurazP0hj8fPQIAvFQWaEodiIA+fYDTp2XxhVq0qHkfDkcdikqK0PHLjsgtysWVyVdgbmiudplCtomGRIOZHcSRZ3FaGhanpQlSFmPAhg1AcTEwnQ/LcrTAipMrcDXzKjb02yCIAADCtgmODC4CjYhWrWQhp3/6CdBh+HJOI+BG1g18dPwjvNb+NfRv21/X5nCqgYtAI2PGDMDHB5g6VbaimMMRGiLCxP0TYSwxxto+a3VtDqcGuAg0MgwMZCEl7twB5s/XtTWchsh38d/haOpRfPLSJ3C0cNS1OZwa4CLQCOnSBZg8GVi/Hjh3TtfWcBoSD/MeYubhmQh2DsYEvwm6NodTC/g6AT3mSw8PjZW9ZAmwd69s7cC5c7IeAoejLjMPz8TTwqfYNGATREz4d0xNtonGCu8J6DEepqbwMDXVSNmWlrKeQHw8sGaNRqrgNDL+TP4T38V/hzld58CrqZdG6tBkm2is8HUCesyvDx8CAF6xs9NYHYMGyaKMXrkCuLtrrBpOAye/OB8dvugAxhguTboEEwMTjdSjjTZRH+HrBBooq27fxqrbt2veUA3Wr5eFkZg8WbagjMNRhcXHF+Pm45v4csCXGhMAQDttorGhMRFgjEUxxu4wxi6Wffppqi6O6jg7y/wDv/8O/PCDrq3h1EcSHiRg+anlGPvCWPRw76Frczh1RNM9gdVE5Fv2OajhujgqMnkyEBAAREYCZavyOZxaUUqlmPDrBFgZWWFlr5W6NoejAnw4iPP/7d17cBX1FcDx7zEYGFCoEkvRgtApVMFHFWSktlQeUygMMFSpMKMtFc1AgVFsOgYZrRYYlPoYsbyCj9Q6KMgfCCjyUBF0CJSpaYAgDgJieDSGolWp4eHpH7+tydCLWXNz97d37/nMZLibu3f3cNi9h939PcjLc30HjhyBu+/2HY3JJgu2LmBT1SYeHfgoBS3tPn02ynQRmCgiFSLytIicl+F9mTRceaXrTfzkk7Bxo+9oTDY4+OlBil8rpn/n/txyxS2+wzGNlFbrIBFZB6Sa6XgqUAbUAApMA9qr6q0ptlEIFAJ07Nixxwc2ONRXPvziCwA6tGgRyf4+/xwuvxyaN4fycvenMWcy8sWRrHxvJdvGb+P750czzn/U50S2SKd1UFqdxVR1QJj1RGQhsPIM2ygBSsA1EU0nnqSJ+kBv1QrmzXNDTj/4oJuo3phUVuxawdLKpczoNyOyAgD25Z8JmWwdVH/QkBHA9kztK6kWV1ezuLo60n0OHAijR7sWQ+++G+muTZb47PhnTHhlAt0v6E7Rj4oi3bePcyLpMvlMYJaIbBORCqAvMDmD+0qkeQcOMO/Agcj3+9hj0LIljBtnfQfM/7v39Xup+ncVC4cuJD8vP9J9+zonkixjRUBVb1HVy1X1ClUdpqqHMrUv07TatXMT07/5JjzzjO9oTJxsPbiV2VtmM67nOHp36O07HNMErImoSenWW6FPHygqArv6NgAnvzzJ7Stup12rdszsP9N3OKaJWBEwKYnAggWuxdBku5FngMfLHqf8cDmzfz6bNi3a+A7HNBErAuaMLrkEpkyBRYtg9Wrf0Rif9n28j/vW38fQrkO54dIbfIdjmpCNIhpjNcePA1CQH+3Dt/pqa11HsuPHYft298DY5BZVZciiIWz4YAOVEyrp2Kajt1jicE7EkY0imlAF+fneD/bmzd2QEnv3wgMPeA3FeLJkxxJW7V7F9H7TvRYAiMc5kTRWBGKs9NAhSg/5b1TVpw+MHQuPPOImoTG54+h/jnLHq3fQo30PJvWa5Duc2JwTSWJFIMZKDx+m9PBh32EAMGsWtG0LhYVw6pTvaExUitcVU3OshoVDF5J3Vp7vcGJ1TiSFFQETyvnnu2kot2xxQ0uY5Htr/1uU/L2EO6+9k6vaX+U7HJMhVgRMaKNGuWElpkyBqirf0ZhMqj1ZS+GKQi5uczEPXG8Pg5LMioAJTcRdBZw6BZP83x42GTTr7VnsrNnJ3CFzaZXfync4JoOsCJhvpHNnuP9+WLbM/Zjkee/Ie8zYOIObut/E4C42K2zSWT+BGDsWPIFtmef/gVx9J07ANddATQ1UVkLr1r4jMk1FVen3bD/KD5ezc8JOvnNOqulC/InrOeGb9RNIqJZ5ebE82M8+2/UdOHgQpk71HY1pSqXlpazft56HBjwUuwIA8T0nspkVgRibe+AAc2M6bG6vXjBxIsyZA5s3+47GNIWPPv+IorVFXNfhOm67+jbf4aQU53MiW1kRiLEl1dUsifEQntOnw4UXur4DJ074jsak6641d/Fp7aeUDC3hLInnV0Pcz4lsFM9/aZMVWrd2VwIVFW4iGpO91r6/lucqnqP4x8V0u6Cb73BMhNIqAiIyUkR2iMiXItLztPemiMhuEdklIgPTC9PE1fDhMGKEazG0Z4/vaExjHDtxjHEvj6Nr267c85N7fIdjIpbulcB24BfAhvq/FJFuwCigOzAImCsi9jQnoZ54Apo1g/HjbTrKbDTtzWnsObqH+UPm06KZTeSea9IqAqq6U1V3pXhrOPCCqtaq6l5gN9ArnX2Z+LroIpg5E9asgeef9x2N+Sa2/XMbD296mDE/HEPfzn19h2M8aJJ+AiKyHihS1a3B8p+BMlV9Llh+ClilqktTfLYQKAwWL8NdXRgoAGp8BxETlos6los6los6P1DVcxvzwWYNrSAi64BUDYanqupLZ/pYit+lrDaqWgKUBPva2tgOD0ljuahjuahjuahjuagjIo3uZdtgEVDVAY3YbhXQod7yd4GDjdiOMcaYDMpUE9HlwCgRaS4inYEuwJYM7csYY0wjpdtEdISIVAG9gZdFZDWAqu4AlgCVwKvABFUNMxVJSTrxJIzloo7loo7loo7lok6jcxGrAeSMMcZEy3oMG2NMDrMiYIwxOcxLERCRQcFwErtFpDjF+81FZHHw/mYR6RR9lNEIkYu7RKRSRCpE5DURudhHnFFoKBf11rtRRPT0oUqSJEwuROSXwbGxQ0QWRR1jVEKcIx1F5A0ReSc4TxI5E46IPC0i1SKSsi+VOLODPFWIyNWhNqyqkf4AecD7wPeAfOAfQLfT1vktMD94PQpYHHWcMcpFX6Bl8Hp8LuciWO9c3DAlZUBP33F7PC66AO8A5wXL3/Ydt8dclADjg9fdgH2+485QLvoAVwPbz/D+YGAVrp/WtcDmMNv1cSXQC9itqntU9TjwAm6YifqGA38JXi8F+otIqg5o2a7BXKjqG6p6LFgsw/W5SKIwxwXANGAW8EWUwUUsTC5uB+ao6lEAVU3q+MphcqHA/+a3a0NC+ySp6gbgX1+zynDgWXXKgG+JSPuGtuujCFwEfFhvuSr4Xcp1VPUk8AnQNpLoohUmF/WNxVX6JGowFyJyFdBBVVdGGZgHYY6LrkBXEXlbRMpEZFBk0UUrTC7uB24Omqu/AkyKJrTY+abfJ0CIHsMZEGZIidDDTmS50H9PEbkZ6An8NKMR+fO1uRCRs4DHgDFRBeRRmOOiGe6W0PW4q8ONInKZqn6c4diiFiYXo4FSVX1ERHoDfw1y8WXmw4uVRn1v+rgSCDOkxFfriEgz3CXe110GZatQw2uIyABgKjBMVWsjii1qDeXiXNwAg+tFZB/unufyhD4cDnuOvKSqJ9SN1LsLVxSSJkwuxuI6p6Kqm4AWuMHlck2jhuvxUQT+BnQRkc4iko978Lv8tHWWA78OXt8IvK7Bk4+EaTAXwS2QBbgCkNT7vtBALlT1E1UtUNVOqtoJ93xkmAYj1yZMmHNkGa7RACJSgLs9lMRpfcLkYj/QH0BELsUVgY8ijTIelgO/CloJXQt8oqqHGvpQ5LeDVPWkiEwEVuOe/D+tqjtE5I/AVlVdDjyFu6TbjbsCGBV1nFEImYs/AecALwbPxver6jBvQWdIyFzkhJC5WA38TEQqgVPA71X1iL+oMyNkLn4HLBSRybjbH2OS+J9GEXked/uvIHj+8QfgbABVnY97HjIYN3/LMeA3obabwFwZY4wJyXoMG2NMDrMiYIwxOcyKgDHG5DArAsYYk8OsCBhjTA6zImCMMTnMioAxxuSw/wJvKdH74RNWdQAAAABJRU5ErkJggg==\n", + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "plot_utility(utility)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Hence, we get a piecewise-continuous utility function consistent with the given POMDP." + ] } ], "metadata": { @@ -1447,7 +1817,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.6.1" + "version": "3.6.4" } }, "nbformat": 4, diff --git a/notebook.py b/notebook.py index 263f7a44b..80062d9f6 100644 --- a/notebook.py +++ b/notebook.py @@ -1087,3 +1087,24 @@ def gaussian_kernel(l=5, sig=1.0): xx, yy = np.meshgrid(ax, ax) kernel = np.exp(-(xx**2 + yy**2) / (2. * sig**2)) return kernel + +# Plots utility function for a POMDP +def plot_pomdp_utility(utility): + save = utility['0'][0] + delete = utility['1'][0] + ask_save = utility['2'][0] + ask_delete = utility['2'][-1] + left = (save[0] - ask_save[0]) / (save[0] - ask_save[0] + ask_save[1] - save[1]) + right = (delete[0] - ask_delete[0]) / (delete[0] - ask_delete[0] + ask_delete[1] - delete[1]) + + colors = ['g', 'b', 'k'] + for action in utility: + for value in utility[action]: + plt.plot(value, color=colors[int(action)]) + plt.vlines([left, right], -20, 10, linestyles='dashed', colors='c') + plt.ylim(-20, 13) + plt.xlim(0, 1) + plt.text(left/2 - 0.05, 10, 'Save') + plt.text((right + left)/2 - 0.02, 10, 'Ask') + plt.text((right + 1)/2 - 0.07, 10, 'Delete') + plt.show() diff --git a/pomdp.ipynb b/pomdp.ipynb deleted file mode 100644 index 1c8391818..000000000 --- a/pomdp.ipynb +++ /dev/null @@ -1,240 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Partially Observable Markov decision processes (POMDPs)\n", - "\n", - "This Jupyter notebook acts as supporting material for POMDPs, covered in **Chapter 17 Making Complex Decisions** of the book* Artificial Intelligence: A Modern Approach*. We make use of the implementations of POMPDPs in mdp.py module. This notebook has been separated from the notebook `mdp.py` as the topics are considerably more advanced.\n", - "\n", - "**Note that it is essential to work through and understand the mdp.ipynb notebook before diving into this one.**\n", - "\n", - "Let us import everything from the mdp module to get started." - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [ - "from mdp import *\n", - "from notebook import psource, pseudocode" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## CONTENTS\n", - "\n", - "1. Overview of MDPs\n", - "2. POMDPs - a conceptual outline\n", - "3. POMDPs - a rigorous outline\n", - "4. Value Iteration\n", - " - Value Iteration Visualization" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 1. OVERVIEW\n", - "\n", - "We first review Markov property and MDPs as in [Section 17.1] of the book.\n", - "\n", - "- A stochastic process is said to have the **Markov property**, or to have a **Markovian transition model** if the conditional probability distribution of future states of the process (conditional on both past and present states) depends only on the present state, not on the sequence of events that preceded it.\n", - "\n", - " -- (Source: [Wikipedia](https://en.wikipedia.org/wiki/Markov_property))\n", - "\n", - "A Markov decision process or MDP is defined as:\n", - "- a sequential decision problem for a fully observable, stochastic environment with a Markovian transition model and additive rewards.\n", - "\n", - "An MDP consists of a set of states (with an initial state $s_0$); a set $A(s)$ of actions\n", - "in each state; a transition model $P(s' | s, a)$; and a reward function $R(s)$.\n", - "\n", - "The MDP seeks to make sequential decisions to occupy states so as to maximise some combination of the reward function $R(s)$.\n", - "\n", - "The characteristic problem of the MDP is hence to identify the optimal policy function $\\pi^*(s)$ that provides the _utility-maximising_ action $a$ to be taken when the current state is $s$.\n", - "\n", - "### Belief vector\n", - "\n", - "**Note**: The book refers to the _belief vector_ as the _belief state_. We use the latter terminology here to retain our ability to refer to the belief vector as a _probability distribution over states_.\n", - "\n", - "The solution of an MDP is subject to certain properties of the problem which are assumed and justified in [Section 17.1]. One critical assumption is that the agent is **fully aware of its current state at all times**.\n", - "\n", - "A tedious (but rewarding, as we will see) way of expressing this is in terms of the **belief vector** $b$ of the agent. The belief vector is a function mapping states to probabilities or certainties of being in those states.\n", - "\n", - "Consider an agent that is fully aware that it is in state $s_i$ in the statespace $(s_1, s_2, ... s_n)$ at the current time.\n", - "\n", - "Its belief vector is the vector $(b(s_1), b(s_2), ... b(s_n))$ given by the function $b(s)$:\n", - "\\begin{align*}\n", - "b(s) &= 0 \\quad \\text{if }s \\neq s_i \\\\ &= 1 \\quad \\text{if } s = s_i\n", - "\\end{align*}\n", - "\n", - "Note that $b(s)$ is a probability distribution that necessarily sums to $1$ over all $s$.\n", - "\n" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "collapsed": true - }, - "source": [ - "## 2. POMDPs - a conceptual outline\n", - "\n", - "The POMDP really has only two modifications to the **problem formulation** compared to the MDP.\n", - "\n", - "- **Belief state** - In the real world, the current state of an agent is often not known with complete certainty. This makes the concept of a belief vector extremely relevant. It allows the agent to represent different degrees of certainty with which it _believes_ it is in each state.\n", - "\n", - "- **Evidence percepts** - In the real world, agents often have certain kinds of evidence, collected from sensors. They can use the probability distribution of observed evidence, conditional on state, to consolidate their information. This is a known distribution $P(e\\ |\\ s)$ - $e$ being an evidence, and $s$ being the state it is conditional on.\n", - "\n", - "Consider the world we used for the MDP. \n", - "\n", - "![title](images/grid_mdp.jpg)\n", - "\n", - "#### Using the belief vector\n", - "An agent beginning at $(1, 1)$ may not be certain that it is indeed in $(1, 1)$. Consider a belief vector $b$ such that:\n", - "\\begin{align*}\n", - " b((1,1)) &= 0.8 \\\\\n", - " b((2,1)) &= 0.1 \\\\\n", - " b((1,2)) &= 0.1 \\\\\n", - " b(s) &= 0 \\quad \\quad \\forall \\text{ other } s\n", - "\\end{align*}\n", - "\n", - "By horizontally catenating each row, we can represent this as an 11-dimensional vector (omitting $(2, 2)$).\n", - "\n", - "Thus, taking $s_1 = (1, 1)$, $s_2 = (1, 2)$, ... $s_{11} = (4,3)$, we have $b$:\n", - "\n", - "$b = (0.8, 0.1, 0, 0, 0.1, 0, 0, 0, 0, 0, 0)$ \n", - "\n", - "This fully represents the certainty to which the agent is aware of its state.\n", - "\n", - "#### Using evidence\n", - "The evidence observed here could be the number of adjacent 'walls' or 'dead ends' observed by the agent. We assume that the agent cannot 'orient' the walls - only count them.\n", - "\n", - "In this case, $e$ can take only two values, 1 and 2. This gives $P(e\\ |\\ s)$ as:\n", - "\\begin{align*}\n", - " P(e=2\\ |\\ s) &= \\frac{1}{7} \\quad \\forall \\quad s \\in \\{s_1, s_2, s_4, s_5, s_8, s_9, s_{11}\\}\\\\\n", - " P(e=1\\ |\\ s) &= \\frac{1}{4} \\quad \\forall \\quad s \\in \\{s_3, s_6, s_7, s_{10}\\} \\\\\n", - " P(e\\ |\\ s) &= 0 \\quad \\forall \\quad \\text{ other } s, e\n", - "\\end{align*}\n", - "\n", - "Note that the implications of the evidence on the state must be known **a priori** to the agent. Ways of reliably learning this distribution from percepts are beyond the scope of this notebook." - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 3. POMDPs - a rigorous outline\n", - "\n", - "A POMDP is thus a sequential decision problem for for a *partially* observable, stochastic environment with a Markovian transition model, a known 'sensor model' for inferring state from observation, and additive rewards. \n", - "\n", - "Practically, a POMDP has the following, which an MDP also has:\n", - "- a set of states, each denoted by $s$\n", - "- a set of actions available in each state, $A(s)$\n", - "- a reward accrued on attaining some state, $R(s)$\n", - "- a transition probability $P(s'\\ |\\ s, a)$ of action $a$ changing the state from $s$ to $s'$\n", - "\n", - "And the following, which an MDP does not:\n", - "- a sensor model $P(e\\ |\\ s)$ on evidence conditional on states\n", - "\n", - "Additionally, the POMDP is now uncertain of its current state hence has:\n", - "- a belief vector $b$ representing the certainty of being in each state (as a probability distribution)\n", - "\n", - "\n", - "#### New uncertainties\n", - "\n", - "It is useful to intuitively appreciate the new uncertainties that have arisen in the agent's awareness of its own state.\n", - "\n", - "- At any point, the agent has belief vector $b$, the distribution of its believed likelihood of being in each state $s$.\n", - "- For each of these states $s$ that the agent may **actually** be in, it has some set of actions given by $A(s)$.\n", - "- Each of these actions may transport it to some other state $s'$, assuming an initial state $s$, with probability $P(s'\\ |\\ s, a)$\n", - "- Once the action is performed, the agent receives a percept $e$. $P(e\\ |\\ s)$ now tells it the chances of having perceived $e$ for each state $s$. The agent must use this information to update its new belief state appropriately.\n", - "\n", - "#### Evolution of the belief vector - the `FORWARD` function\n", - "\n", - "The new belief vector $b'(s')$ after an action $a$ on the belief vector $b(s)$ and the noting of evidence $e$ is:\n", - "$$ b'(s') = \\alpha P(e\\ |\\ s') \\sum_s P(s'\\ | s, a) b(s)$$ \n", - "\n", - "where $\\alpha$ is a normalising constant (to retain the interpretation of $b$ as a probability distribution.\n", - "\n", - "This equation is just counts the sum of likelihoods of going to a state $s'$ from every possible state $s$, times the initial likelihood of being in each $s$. This is multiplied by the likelihood that the known evidence actually implies the new state $s'$. \n", - "\n", - "This function is represented as `b' = FORWARD(b, a, e)`\n", - "\n", - "#### Probability distribution of the evolving belief vector\n", - "\n", - "The goal here is to find $P(b'\\ |\\ b, a)$ - the probability that action $a$ transforms belief vector $b$ into belief vector $b'$. The following steps illustrate this -\n", - "\n", - "The probability of observing evidence $e$ when action $a$ is enacted on belief vector $b$ can be distributed over each possible new state $s'$ resulting from it:\n", - "\\begin{align*}\n", - " P(e\\ |\\ b, a) &= \\sum_{s'} P(e\\ |\\ b, a, s') P(s'\\ |\\ b, a) \\\\\n", - " &= \\sum_{s'} P(e\\ |\\ s') P(s'\\ |\\ b, a) \\\\\n", - " &= \\sum_{s'} P(e\\ |\\ s') \\sum_s P(s'\\ |\\ s, a) b(s)\n", - "\\end{align*}\n", - "\n", - "The probability of getting belief vector $b'$ from $b$ by application of action $a$ can thus be summed over all possible evidences $e$:\n", - "\\begin{align*}\n", - " P(b'\\ |\\ b, a) &= \\sum_{e} P(b'\\ |\\ b, a, e) P(e\\ |\\ b, a) \\\\\n", - " &= \\sum_{e} P(b'\\ |\\ b, a, e) \\sum_{s'} P(e\\ |\\ s') \\sum_s P(s'\\ |\\ s, a) b(s)\n", - "\\end{align*}\n", - "\n", - "where $P(b'\\ |\\ b, a, e) = 1$ if $b' = $ `FORWARD(b, a, e)` and $= 0$ otherwise.\n", - "\n", - "Given initial and final belief states $b$ and $b'$, the transition probabilities still depend on the action $a$ and observed evidence $e$. Some belief states may be achievable by certain actions, but have non-zero probabilities for states prohibited by the evidence $e$. Thus, the above condition thus ensures that only valid combinations of $(b', b, a, e)$ are considered.\n", - "\n", - "#### A modified rewardspace\n", - "\n", - "For MDPs, the reward space was simple - one reward per available state. However, for a belief vector $b(s)$, the expected reward is now:\n", - "$$\\rho(b) = \\sum_s b(s) R(s)$$\n", - "\n", - "Thus, as the belief vector can take infinite values of the distribution over states, so can the reward for each belief vector vary over a hyperplane in the belief space, or space of states (planes in an $N$-dimensional space are formed by a linear combination of the axes)." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.6.1" - } - }, - "nbformat": 4, - "nbformat_minor": 2 -} diff --git a/tests/test_mdp.py b/tests/test_mdp.py index 00710bc9f..5552f7570 100644 --- a/tests/test_mdp.py +++ b/tests/test_mdp.py @@ -119,3 +119,43 @@ def test_transition_model(): assert mdp.T("a","plan3") == [(0.2, 'a'), (0.5, 'b'), (0.3, 'c')] assert mdp.T("b","plan2") == [(0.6, 'a'), (0.2, 'b'), (0.1, 'c'), (0.1, 'd')] assert mdp.T("c","plan1") == [(0.3, 'a'), (0.5, 'b'), (0.1, 'c'), (0.1, 'd')] + + +def test_pomdp_value_iteration(): + t_prob = [[[0.65, 0.35], [0.65, 0.35]], [[0.65, 0.35], [0.65, 0.35]], [[1.0, 0.0], [0.0, 1.0]]] + e_prob = [[[0.5, 0.5], [0.5, 0.5]], [[0.5, 0.5], [0.5, 0.5]], [[0.8, 0.2], [0.3, 0.7]]] + rewards = [[5, -10], [-20, 5], [-1, -1]] + + gamma = 0.95 + actions = ('0', '1', '2') + states = ('0', '1') + + pomdp = POMDP(actions, t_prob, e_prob, rewards, states, gamma) + utility = pomdp_value_iteration(pomdp, epsilon=5) + + for _, v in utility.items(): + sum_ = 0 + for element in v: + sum_ += sum(element) + # exact value was found to be -9.73231 + assert -9.76 < sum_ < -9.70 + + +def test_pomdp_value_iteration2(): + t_prob = [[[0.5, 0.5], [0.5, 0.5]], [[0.5, 0.5], [0.5, 0.5]], [[1.0, 0.0], [0.0, 1.0]]] + e_prob = [[[0.5, 0.5], [0.5, 0.5]], [[0.5, 0.5], [0.5, 0.5]], [[0.85, 0.15], [0.15, 0.85]]] + rewards = [[-100, 10], [10, -100], [-1, -1]] + + gamma = 0.95 + actions = ('0', '1', '2') + states = ('0', '1') + + pomdp = POMDP(actions, t_prob, e_prob, rewards, states, gamma) + utility = pomdp_value_iteration(pomdp, epsilon=100) + + for _, v in utility.items(): + sum_ = 0 + for element in v: + sum_ += sum(element) + # exact value was found to be -77.28259 + assert -77.31 < sum_ < -77.25