');\n",
+ " this._root_extra_style(this.root)\n",
+ " this.root.attr('style', 'display: inline-block');\n",
+ "\n",
+ " $(parent_element).append(this.root);\n",
+ "\n",
+ " this._init_header(this);\n",
+ " this._init_canvas(this);\n",
+ " this._init_toolbar(this);\n",
+ "\n",
+ " var fig = this;\n",
+ "\n",
+ " this.waiting = false;\n",
+ "\n",
+ " this.ws.onopen = function () {\n",
+ " fig.send_message(\"supports_binary\", {value: fig.supports_binary});\n",
+ " fig.send_message(\"send_image_mode\", {});\n",
+ " fig.send_message(\"refresh\", {});\n",
+ " }\n",
+ "\n",
+ " this.imageObj.onload = function() {\n",
+ " if (fig.image_mode == 'full') {\n",
+ " // Full images could contain transparency (where diff images\n",
+ " // almost always do), so we need to clear the canvas so that\n",
+ " // there is no ghosting.\n",
+ " fig.context.clearRect(0, 0, fig.canvas.width, fig.canvas.height);\n",
+ " }\n",
+ " fig.context.drawImage(fig.imageObj, 0, 0);\n",
+ " };\n",
+ "\n",
+ " this.imageObj.onunload = function() {\n",
+ " this.ws.close();\n",
+ " }\n",
+ "\n",
+ " this.ws.onmessage = this._make_on_message_function(this);\n",
+ "\n",
+ " this.ondownload = ondownload;\n",
+ "}\n",
+ "\n",
+ "mpl.figure.prototype._init_header = function() {\n",
+ " var titlebar = $(\n",
+ " '');\n",
+ " var titletext = $(\n",
+ " '');\n",
+ " titlebar.append(titletext)\n",
+ " this.root.append(titlebar);\n",
+ " this.header = titletext[0];\n",
+ "}\n",
+ "\n",
+ "\n",
+ "\n",
+ "mpl.figure.prototype._canvas_extra_style = function(canvas_div) {\n",
+ "\n",
+ "}\n",
+ "\n",
+ "\n",
+ "mpl.figure.prototype._root_extra_style = function(canvas_div) {\n",
+ "\n",
+ "}\n",
+ "\n",
+ "mpl.figure.prototype._init_canvas = function() {\n",
+ " var fig = this;\n",
+ "\n",
+ " var canvas_div = $('');\n",
+ "\n",
+ " canvas_div.attr('style', 'position: relative; clear: both; outline: 0');\n",
+ "\n",
+ " function canvas_keyboard_event(event) {\n",
+ " return fig.key_event(event, event['data']);\n",
+ " }\n",
+ "\n",
+ " canvas_div.keydown('key_press', canvas_keyboard_event);\n",
+ " canvas_div.keyup('key_release', canvas_keyboard_event);\n",
+ " this.canvas_div = canvas_div\n",
+ " this._canvas_extra_style(canvas_div)\n",
+ " this.root.append(canvas_div);\n",
+ "\n",
+ " var canvas = $('');\n",
+ " canvas.addClass('mpl-canvas');\n",
+ " canvas.attr('style', \"left: 0; top: 0; z-index: 0; outline: 0\")\n",
+ "\n",
+ " this.canvas = canvas[0];\n",
+ " this.context = canvas[0].getContext(\"2d\");\n",
+ "\n",
+ " var rubberband = $('');\n",
+ " rubberband.attr('style', \"position: absolute; left: 0; top: 0; z-index: 1;\")\n",
+ "\n",
+ " var pass_mouse_events = true;\n",
+ "\n",
+ " canvas_div.resizable({\n",
+ " start: function(event, ui) {\n",
+ " pass_mouse_events = false;\n",
+ " },\n",
+ " resize: function(event, ui) {\n",
+ " fig.request_resize(ui.size.width, ui.size.height);\n",
+ " },\n",
+ " stop: function(event, ui) {\n",
+ " pass_mouse_events = true;\n",
+ " fig.request_resize(ui.size.width, ui.size.height);\n",
+ " },\n",
+ " });\n",
+ "\n",
+ " function mouse_event_fn(event) {\n",
+ " if (pass_mouse_events)\n",
+ " return fig.mouse_event(event, event['data']);\n",
+ " }\n",
+ "\n",
+ " rubberband.mousedown('button_press', mouse_event_fn);\n",
+ " rubberband.mouseup('button_release', mouse_event_fn);\n",
+ " // Throttle sequential mouse events to 1 every 20ms.\n",
+ " rubberband.mousemove('motion_notify', mouse_event_fn);\n",
+ "\n",
+ " rubberband.mouseenter('figure_enter', mouse_event_fn);\n",
+ " rubberband.mouseleave('figure_leave', mouse_event_fn);\n",
+ "\n",
+ " canvas_div.on(\"wheel\", function (event) {\n",
+ " event = event.originalEvent;\n",
+ " event['data'] = 'scroll'\n",
+ " if (event.deltaY < 0) {\n",
+ " event.step = 1;\n",
+ " } else {\n",
+ " event.step = -1;\n",
+ " }\n",
+ " mouse_event_fn(event);\n",
+ " });\n",
+ "\n",
+ " canvas_div.append(canvas);\n",
+ " canvas_div.append(rubberband);\n",
+ "\n",
+ " this.rubberband = rubberband;\n",
+ " this.rubberband_canvas = rubberband[0];\n",
+ " this.rubberband_context = rubberband[0].getContext(\"2d\");\n",
+ " this.rubberband_context.strokeStyle = \"#000000\";\n",
+ "\n",
+ " this._resize_canvas = function(width, height) {\n",
+ " // Keep the size of the canvas, canvas container, and rubber band\n",
+ " // canvas in synch.\n",
+ " canvas_div.css('width', width)\n",
+ " canvas_div.css('height', height)\n",
+ "\n",
+ " canvas.attr('width', width);\n",
+ " canvas.attr('height', height);\n",
+ "\n",
+ " rubberband.attr('width', width);\n",
+ " rubberband.attr('height', height);\n",
+ " }\n",
+ "\n",
+ " // Set the figure to an initial 600x600px, this will subsequently be updated\n",
+ " // upon first draw.\n",
+ " this._resize_canvas(600, 600);\n",
+ "\n",
+ " // Disable right mouse context menu.\n",
+ " $(this.rubberband_canvas).bind(\"contextmenu\",function(e){\n",
+ " return false;\n",
+ " });\n",
+ "\n",
+ " function set_focus () {\n",
+ " canvas.focus();\n",
+ " canvas_div.focus();\n",
+ " }\n",
+ "\n",
+ " window.setTimeout(set_focus, 100);\n",
+ "}\n",
+ "\n",
+ "mpl.figure.prototype._init_toolbar = function() {\n",
+ " var fig = this;\n",
+ "\n",
+ " var nav_element = $('')\n",
+ " nav_element.attr('style', 'width: 100%');\n",
+ " this.root.append(nav_element);\n",
+ "\n",
+ " // Define a callback function for later on.\n",
+ " function toolbar_event(event) {\n",
+ " return fig.toolbar_button_onclick(event['data']);\n",
+ " }\n",
+ " function toolbar_mouse_event(event) {\n",
+ " return fig.toolbar_button_onmouseover(event['data']);\n",
+ " }\n",
+ "\n",
+ " for(var toolbar_ind in mpl.toolbar_items) {\n",
+ " var name = mpl.toolbar_items[toolbar_ind][0];\n",
+ " var tooltip = mpl.toolbar_items[toolbar_ind][1];\n",
+ " var image = mpl.toolbar_items[toolbar_ind][2];\n",
+ " var method_name = mpl.toolbar_items[toolbar_ind][3];\n",
+ "\n",
+ " if (!name) {\n",
+ " // put a spacer in here.\n",
+ " continue;\n",
+ " }\n",
+ " var button = $('');\n",
+ " button.addClass('ui-button ui-widget ui-state-default ui-corner-all ' +\n",
+ " 'ui-button-icon-only');\n",
+ " button.attr('role', 'button');\n",
+ " button.attr('aria-disabled', 'false');\n",
+ " button.click(method_name, toolbar_event);\n",
+ " button.mouseover(tooltip, toolbar_mouse_event);\n",
+ "\n",
+ " var icon_img = $('');\n",
+ " icon_img.addClass('ui-button-icon-primary ui-icon');\n",
+ " icon_img.addClass(image);\n",
+ " icon_img.addClass('ui-corner-all');\n",
+ "\n",
+ " var tooltip_span = $('');\n",
+ " tooltip_span.addClass('ui-button-text');\n",
+ " tooltip_span.html(tooltip);\n",
+ "\n",
+ " button.append(icon_img);\n",
+ " button.append(tooltip_span);\n",
+ "\n",
+ " nav_element.append(button);\n",
+ " }\n",
+ "\n",
+ " var fmt_picker_span = $('');\n",
+ "\n",
+ " var fmt_picker = $('');\n",
+ " fmt_picker.addClass('mpl-toolbar-option ui-widget ui-widget-content');\n",
+ " fmt_picker_span.append(fmt_picker);\n",
+ " nav_element.append(fmt_picker_span);\n",
+ " this.format_dropdown = fmt_picker[0];\n",
+ "\n",
+ " for (var ind in mpl.extensions) {\n",
+ " var fmt = mpl.extensions[ind];\n",
+ " var option = $(\n",
+ " '', {selected: fmt === mpl.default_extension}).html(fmt);\n",
+ " fmt_picker.append(option)\n",
+ " }\n",
+ "\n",
+ " // Add hover states to the ui-buttons\n",
+ " $( \".ui-button\" ).hover(\n",
+ " function() { $(this).addClass(\"ui-state-hover\");},\n",
+ " function() { $(this).removeClass(\"ui-state-hover\");}\n",
+ " );\n",
+ "\n",
+ " var status_bar = $('');\n",
+ " nav_element.append(status_bar);\n",
+ " this.message = status_bar[0];\n",
+ "}\n",
+ "\n",
+ "mpl.figure.prototype.request_resize = function(x_pixels, y_pixels) {\n",
+ " // Request matplotlib to resize the figure. Matplotlib will then trigger a resize in the client,\n",
+ " // which will in turn request a refresh of the image.\n",
+ " this.send_message('resize', {'width': x_pixels, 'height': y_pixels});\n",
+ "}\n",
+ "\n",
+ "mpl.figure.prototype.send_message = function(type, properties) {\n",
+ " properties['type'] = type;\n",
+ " properties['figure_id'] = this.id;\n",
+ " this.ws.send(JSON.stringify(properties));\n",
+ "}\n",
+ "\n",
+ "mpl.figure.prototype.send_draw_message = function() {\n",
+ " if (!this.waiting) {\n",
+ " this.waiting = true;\n",
+ " this.ws.send(JSON.stringify({type: \"draw\", figure_id: this.id}));\n",
+ " }\n",
+ "}\n",
+ "\n",
+ "\n",
+ "mpl.figure.prototype.handle_save = function(fig, msg) {\n",
+ " var format_dropdown = fig.format_dropdown;\n",
+ " var format = format_dropdown.options[format_dropdown.selectedIndex].value;\n",
+ " fig.ondownload(fig, format);\n",
+ "}\n",
+ "\n",
+ "\n",
+ "mpl.figure.prototype.handle_resize = function(fig, msg) {\n",
+ " var size = msg['size'];\n",
+ " if (size[0] != fig.canvas.width || size[1] != fig.canvas.height) {\n",
+ " fig._resize_canvas(size[0], size[1]);\n",
+ " fig.send_message(\"refresh\", {});\n",
+ " };\n",
+ "}\n",
+ "\n",
+ "mpl.figure.prototype.handle_rubberband = function(fig, msg) {\n",
+ " var x0 = msg['x0'];\n",
+ " var y0 = fig.canvas.height - msg['y0'];\n",
+ " var x1 = msg['x1'];\n",
+ " var y1 = fig.canvas.height - msg['y1'];\n",
+ " x0 = Math.floor(x0) + 0.5;\n",
+ " y0 = Math.floor(y0) + 0.5;\n",
+ " x1 = Math.floor(x1) + 0.5;\n",
+ " y1 = Math.floor(y1) + 0.5;\n",
+ " var min_x = Math.min(x0, x1);\n",
+ " var min_y = Math.min(y0, y1);\n",
+ " var width = Math.abs(x1 - x0);\n",
+ " var height = Math.abs(y1 - y0);\n",
+ "\n",
+ " fig.rubberband_context.clearRect(\n",
+ " 0, 0, fig.canvas.width, fig.canvas.height);\n",
+ "\n",
+ " fig.rubberband_context.strokeRect(min_x, min_y, width, height);\n",
+ "}\n",
+ "\n",
+ "mpl.figure.prototype.handle_figure_label = function(fig, msg) {\n",
+ " // Updates the figure title.\n",
+ " fig.header.textContent = msg['label'];\n",
+ "}\n",
+ "\n",
+ "mpl.figure.prototype.handle_cursor = function(fig, msg) {\n",
+ " var cursor = msg['cursor'];\n",
+ " switch(cursor)\n",
+ " {\n",
+ " case 0:\n",
+ " cursor = 'pointer';\n",
+ " break;\n",
+ " case 1:\n",
+ " cursor = 'default';\n",
+ " break;\n",
+ " case 2:\n",
+ " cursor = 'crosshair';\n",
+ " break;\n",
+ " case 3:\n",
+ " cursor = 'move';\n",
+ " break;\n",
+ " }\n",
+ " fig.rubberband_canvas.style.cursor = cursor;\n",
+ "}\n",
+ "\n",
+ "mpl.figure.prototype.handle_message = function(fig, msg) {\n",
+ " fig.message.textContent = msg['message'];\n",
+ "}\n",
+ "\n",
+ "mpl.figure.prototype.handle_draw = function(fig, msg) {\n",
+ " // Request the server to send over a new figure.\n",
+ " fig.send_draw_message();\n",
+ "}\n",
+ "\n",
+ "mpl.figure.prototype.handle_image_mode = function(fig, msg) {\n",
+ " fig.image_mode = msg['mode'];\n",
+ "}\n",
+ "\n",
+ "mpl.figure.prototype.updated_canvas_event = function() {\n",
+ " // Called whenever the canvas gets updated.\n",
+ " this.send_message(\"ack\", {});\n",
+ "}\n",
+ "\n",
+ "// A function to construct a web socket function for onmessage handling.\n",
+ "// Called in the figure constructor.\n",
+ "mpl.figure.prototype._make_on_message_function = function(fig) {\n",
+ " return function socket_on_message(evt) {\n",
+ " if (evt.data instanceof Blob) {\n",
+ " /* FIXME: We get \"Resource interpreted as Image but\n",
+ " * transferred with MIME type text/plain:\" errors on\n",
+ " * Chrome. But how to set the MIME type? It doesn't seem\n",
+ " * to be part of the websocket stream */\n",
+ " evt.data.type = \"image/png\";\n",
+ "\n",
+ " /* Free the memory for the previous frames */\n",
+ " if (fig.imageObj.src) {\n",
+ " (window.URL || window.webkitURL).revokeObjectURL(\n",
+ " fig.imageObj.src);\n",
+ " }\n",
+ "\n",
+ " fig.imageObj.src = (window.URL || window.webkitURL).createObjectURL(\n",
+ " evt.data);\n",
+ " fig.updated_canvas_event();\n",
+ " fig.waiting = false;\n",
+ " return;\n",
+ " }\n",
+ " else if (typeof evt.data === 'string' && evt.data.slice(0, 21) == \"data:image/png;base64\") {\n",
+ " fig.imageObj.src = evt.data;\n",
+ " fig.updated_canvas_event();\n",
+ " fig.waiting = false;\n",
+ " return;\n",
+ " }\n",
+ "\n",
+ " var msg = JSON.parse(evt.data);\n",
+ " var msg_type = msg['type'];\n",
+ "\n",
+ " // Call the \"handle_{type}\" callback, which takes\n",
+ " // the figure and JSON message as its only arguments.\n",
+ " try {\n",
+ " var callback = fig[\"handle_\" + msg_type];\n",
+ " } catch (e) {\n",
+ " console.log(\"No handler for the '\" + msg_type + \"' message type: \", msg);\n",
+ " return;\n",
+ " }\n",
+ "\n",
+ " if (callback) {\n",
+ " try {\n",
+ " // console.log(\"Handling '\" + msg_type + \"' message: \", msg);\n",
+ " callback(fig, msg);\n",
+ " } catch (e) {\n",
+ " console.log(\"Exception inside the 'handler_\" + msg_type + \"' callback:\", e, e.stack, msg);\n",
+ " }\n",
+ " }\n",
+ " };\n",
+ "}\n",
+ "\n",
+ "// from http://stackoverflow.com/questions/1114465/getting-mouse-location-in-canvas\n",
+ "mpl.findpos = function(e) {\n",
+ " //this section is from http://www.quirksmode.org/js/events_properties.html\n",
+ " var targ;\n",
+ " if (!e)\n",
+ " e = window.event;\n",
+ " if (e.target)\n",
+ " targ = e.target;\n",
+ " else if (e.srcElement)\n",
+ " targ = e.srcElement;\n",
+ " if (targ.nodeType == 3) // defeat Safari bug\n",
+ " targ = targ.parentNode;\n",
+ "\n",
+ " // jQuery normalizes the pageX and pageY\n",
+ " // pageX,Y are the mouse positions relative to the document\n",
+ " // offset() returns the position of the element relative to the document\n",
+ " var x = e.pageX - $(targ).offset().left;\n",
+ " var y = e.pageY - $(targ).offset().top;\n",
+ "\n",
+ " return {\"x\": x, \"y\": y};\n",
+ "};\n",
+ "\n",
+ "/*\n",
+ " * return a copy of an object with only non-object keys\n",
+ " * we need this to avoid circular references\n",
+ " * http://stackoverflow.com/a/24161582/3208463\n",
+ " */\n",
+ "function simpleKeys (original) {\n",
+ " return Object.keys(original).reduce(function (obj, key) {\n",
+ " if (typeof original[key] !== 'object')\n",
+ " obj[key] = original[key]\n",
+ " return obj;\n",
+ " }, {});\n",
+ "}\n",
+ "\n",
+ "mpl.figure.prototype.mouse_event = function(event, name) {\n",
+ " var canvas_pos = mpl.findpos(event)\n",
+ "\n",
+ " if (name === 'button_press')\n",
+ " {\n",
+ " this.canvas.focus();\n",
+ " this.canvas_div.focus();\n",
+ " }\n",
+ "\n",
+ " var x = canvas_pos.x;\n",
+ " var y = canvas_pos.y;\n",
+ "\n",
+ " this.send_message(name, {x: x, y: y, button: event.button,\n",
+ " step: event.step,\n",
+ " guiEvent: simpleKeys(event)});\n",
+ "\n",
+ " /* This prevents the web browser from automatically changing to\n",
+ " * the text insertion cursor when the button is pressed. We want\n",
+ " * to control all of the cursor setting manually through the\n",
+ " * 'cursor' event from matplotlib */\n",
+ " event.preventDefault();\n",
+ " return false;\n",
+ "}\n",
+ "\n",
+ "mpl.figure.prototype._key_event_extra = function(event, name) {\n",
+ " // Handle any extra behaviour associated with a key event\n",
+ "}\n",
+ "\n",
+ "mpl.figure.prototype.key_event = function(event, name) {\n",
+ "\n",
+ " // Prevent repeat events\n",
+ " if (name == 'key_press')\n",
+ " {\n",
+ " if (event.which === this._key)\n",
+ " return;\n",
+ " else\n",
+ " this._key = event.which;\n",
+ " }\n",
+ " if (name == 'key_release')\n",
+ " this._key = null;\n",
+ "\n",
+ " var value = '';\n",
+ " if (event.ctrlKey && event.which != 17)\n",
+ " value += \"ctrl+\";\n",
+ " if (event.altKey && event.which != 18)\n",
+ " value += \"alt+\";\n",
+ " if (event.shiftKey && event.which != 16)\n",
+ " value += \"shift+\";\n",
+ "\n",
+ " value += 'k';\n",
+ " value += event.which.toString();\n",
+ "\n",
+ " this._key_event_extra(event, name);\n",
+ "\n",
+ " this.send_message(name, {key: value,\n",
+ " guiEvent: simpleKeys(event)});\n",
+ " return false;\n",
+ "}\n",
+ "\n",
+ "mpl.figure.prototype.toolbar_button_onclick = function(name) {\n",
+ " if (name == 'download') {\n",
+ " this.handle_save(this, null);\n",
+ " } else {\n",
+ " this.send_message(\"toolbar_button\", {name: name});\n",
+ " }\n",
+ "};\n",
+ "\n",
+ "mpl.figure.prototype.toolbar_button_onmouseover = function(tooltip) {\n",
+ " this.message.textContent = tooltip;\n",
+ "};\n",
+ "mpl.toolbar_items = [[\"Home\", \"Reset original view\", \"fa fa-home icon-home\", \"home\"], [\"Back\", \"Back to previous view\", \"fa fa-arrow-left icon-arrow-left\", \"back\"], [\"Forward\", \"Forward to next view\", \"fa fa-arrow-right icon-arrow-right\", \"forward\"], [\"\", \"\", \"\", \"\"], [\"Pan\", \"Pan axes with left mouse, zoom with right\", \"fa fa-arrows icon-move\", \"pan\"], [\"Zoom\", \"Zoom to rectangle\", \"fa fa-square-o icon-check-empty\", \"zoom\"], [\"\", \"\", \"\", \"\"], [\"Download\", \"Download plot\", \"fa fa-floppy-o icon-save\", \"download\"]];\n",
+ "\n",
+ "mpl.extensions = [\"eps\", \"jpeg\", \"pdf\", \"png\", \"ps\", \"raw\", \"svg\", \"tif\"];\n",
+ "\n",
+ "mpl.default_extension = \"png\";var comm_websocket_adapter = function(comm) {\n",
+ " // Create a \"websocket\"-like object which calls the given IPython comm\n",
+ " // object with the appropriate methods. Currently this is a non binary\n",
+ " // socket, so there is still some room for performance tuning.\n",
+ " var ws = {};\n",
+ "\n",
+ " ws.close = function() {\n",
+ " comm.close()\n",
+ " };\n",
+ " ws.send = function(m) {\n",
+ " //console.log('sending', m);\n",
+ " comm.send(m);\n",
+ " };\n",
+ " // Register the callback with on_msg.\n",
+ " comm.on_msg(function(msg) {\n",
+ " //console.log('receiving', msg['content']['data'], msg);\n",
+ " // Pass the mpl event to the overriden (by mpl) onmessage function.\n",
+ " ws.onmessage(msg['content']['data'])\n",
+ " });\n",
+ " return ws;\n",
+ "}\n",
+ "\n",
+ "mpl.mpl_figure_comm = function(comm, msg) {\n",
+ " // This is the function which gets called when the mpl process\n",
+ " // starts-up an IPython Comm through the \"matplotlib\" channel.\n",
+ "\n",
+ " var id = msg.content.data.id;\n",
+ " // Get hold of the div created by the display call when the Comm\n",
+ " // socket was opened in Python.\n",
+ " var element = $(\"#\" + id);\n",
+ " var ws_proxy = comm_websocket_adapter(comm)\n",
+ "\n",
+ " function ondownload(figure, format) {\n",
+ " window.open(figure.imageObj.src);\n",
+ " }\n",
+ "\n",
+ " var fig = new mpl.figure(id, ws_proxy,\n",
+ " ondownload,\n",
+ " element.get(0));\n",
+ "\n",
+ " // Call onopen now - mpl needs it, as it is assuming we've passed it a real\n",
+ " // web socket which is closed, not our websocket->open comm proxy.\n",
+ " ws_proxy.onopen();\n",
+ "\n",
+ " fig.parent_element = element.get(0);\n",
+ " fig.cell_info = mpl.find_output_cell(\"\");\n",
+ " if (!fig.cell_info) {\n",
+ " console.error(\"Failed to find cell for figure\", id, fig);\n",
+ " return;\n",
+ " }\n",
+ "\n",
+ " var output_index = fig.cell_info[2]\n",
+ " var cell = fig.cell_info[0];\n",
+ "\n",
+ "};\n",
+ "\n",
+ "mpl.figure.prototype.handle_close = function(fig, msg) {\n",
+ " fig.root.unbind('remove')\n",
+ "\n",
+ " // Update the output cell to use the data from the current canvas.\n",
+ " fig.push_to_output();\n",
+ " var dataURL = fig.canvas.toDataURL();\n",
+ " // Re-enable the keyboard manager in IPython - without this line, in FF,\n",
+ " // the notebook keyboard shortcuts fail.\n",
+ " IPython.keyboard_manager.enable()\n",
+ " $(fig.parent_element).html('');\n",
+ " fig.close_ws(fig, msg);\n",
+ "}\n",
+ "\n",
+ "mpl.figure.prototype.close_ws = function(fig, msg){\n",
+ " fig.send_message('closing', msg);\n",
+ " // fig.ws.close()\n",
+ "}\n",
+ "\n",
+ "mpl.figure.prototype.push_to_output = function(remove_interactive) {\n",
+ " // Turn the data on the canvas into data in the output cell.\n",
+ " var dataURL = this.canvas.toDataURL();\n",
+ " this.cell_info[1]['text/html'] = '';\n",
+ "}\n",
+ "\n",
+ "mpl.figure.prototype.updated_canvas_event = function() {\n",
+ " // Tell IPython that the notebook contents must change.\n",
+ " IPython.notebook.set_dirty(true);\n",
+ " this.send_message(\"ack\", {});\n",
+ " var fig = this;\n",
+ " // Wait a second, then push the new image to the DOM so\n",
+ " // that it is saved nicely (might be nice to debounce this).\n",
+ " setTimeout(function () { fig.push_to_output() }, 1000);\n",
+ "}\n",
+ "\n",
+ "mpl.figure.prototype._init_toolbar = function() {\n",
+ " var fig = this;\n",
+ "\n",
+ " var nav_element = $('')\n",
+ " nav_element.attr('style', 'width: 100%');\n",
+ " this.root.append(nav_element);\n",
+ "\n",
+ " // Define a callback function for later on.\n",
+ " function toolbar_event(event) {\n",
+ " return fig.toolbar_button_onclick(event['data']);\n",
+ " }\n",
+ " function toolbar_mouse_event(event) {\n",
+ " return fig.toolbar_button_onmouseover(event['data']);\n",
+ " }\n",
+ "\n",
+ " for(var toolbar_ind in mpl.toolbar_items){\n",
+ " var name = mpl.toolbar_items[toolbar_ind][0];\n",
+ " var tooltip = mpl.toolbar_items[toolbar_ind][1];\n",
+ " var image = mpl.toolbar_items[toolbar_ind][2];\n",
+ " var method_name = mpl.toolbar_items[toolbar_ind][3];\n",
+ "\n",
+ " if (!name) { continue; };\n",
+ "\n",
+ " var button = $('');\n",
+ " button.click(method_name, toolbar_event);\n",
+ " button.mouseover(tooltip, toolbar_mouse_event);\n",
+ " nav_element.append(button);\n",
+ " }\n",
+ "\n",
+ " // Add the status bar.\n",
+ " var status_bar = $('');\n",
+ " nav_element.append(status_bar);\n",
+ " this.message = status_bar[0];\n",
+ "\n",
+ " // Add the close button to the window.\n",
+ " var buttongrp = $('');\n",
+ " var button = $('');\n",
+ " button.click(function (evt) { fig.handle_close(fig, {}); } );\n",
+ " button.mouseover('Stop Interaction', toolbar_mouse_event);\n",
+ " buttongrp.append(button);\n",
+ " var titlebar = this.root.find($('.ui-dialog-titlebar'));\n",
+ " titlebar.prepend(buttongrp);\n",
+ "}\n",
+ "\n",
+ "mpl.figure.prototype._root_extra_style = function(el){\n",
+ " var fig = this\n",
+ " el.on(\"remove\", function(){\n",
+ "\tfig.close_ws(fig, {});\n",
+ " });\n",
+ "}\n",
+ "\n",
+ "mpl.figure.prototype._canvas_extra_style = function(el){\n",
+ " // this is important to make the div 'focusable\n",
+ " el.attr('tabindex', 0)\n",
+ " // reach out to IPython and tell the keyboard manager to turn it's self\n",
+ " // off when our div gets focus\n",
+ "\n",
+ " // location in version 3\n",
+ " if (IPython.notebook.keyboard_manager) {\n",
+ " IPython.notebook.keyboard_manager.register_events(el);\n",
+ " }\n",
+ " else {\n",
+ " // location in version 2\n",
+ " IPython.keyboard_manager.register_events(el);\n",
+ " }\n",
+ "\n",
+ "}\n",
+ "\n",
+ "mpl.figure.prototype._key_event_extra = function(event, name) {\n",
+ " var manager = IPython.notebook.keyboard_manager;\n",
+ " if (!manager)\n",
+ " manager = IPython.keyboard_manager;\n",
+ "\n",
+ " // Check for shift+enter\n",
+ " if (event.shiftKey && event.which == 13) {\n",
+ " this.canvas_div.blur();\n",
+ " event.shiftKey = false;\n",
+ " // Send a \"J\" for go to next cell\n",
+ " event.which = 74;\n",
+ " event.keyCode = 74;\n",
+ " manager.command_mode();\n",
+ " manager.handle_keydown(event);\n",
+ " }\n",
+ "}\n",
+ "\n",
+ "mpl.figure.prototype.handle_save = function(fig, msg) {\n",
+ " fig.ondownload(fig, null);\n",
+ "}\n",
+ "\n",
+ "\n",
+ "mpl.find_output_cell = function(html_output) {\n",
+ " // Return the cell and output element which can be found *uniquely* in the notebook.\n",
+ " // Note - this is a bit hacky, but it is done because the \"notebook_saving.Notebook\"\n",
+ " // IPython event is triggered only after the cells have been serialised, which for\n",
+ " // our purposes (turning an active figure into a static one), is too late.\n",
+ " var cells = IPython.notebook.get_cells();\n",
+ " var ncells = cells.length;\n",
+ " for (var i=0; i= 3 moved mimebundle to data attribute of output\n",
+ " data = data.data;\n",
+ " }\n",
+ " if (data['text/html'] == html_output) {\n",
+ " return [cell, data, j];\n",
+ " }\n",
+ " }\n",
+ " }\n",
+ " }\n",
+ "}\n",
+ "\n",
+ "// Register the function which deals with the matplotlib target/channel.\n",
+ "// The kernel may be null if the page has been refreshed.\n",
+ "if (IPython.notebook.kernel != null) {\n",
+ " IPython.notebook.kernel.comm_manager.register_target('matplotlib', mpl.mpl_figure_comm);\n",
+ "}\n"
+ ],
+ "text/plain": [
+ ""
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ },
+ {
+ "data": {
+ "text/html": [
+ ""
+ ],
+ "text/plain": [
+ ""
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ },
+ {
+ "data": {
+ "text/plain": [
+ "[]"
+ ]
+ },
+ "execution_count": 12,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "fig, ax = plt.subplots()\n",
+ "ax.scatter(tr_data.transpose()[0], tr_data.transpose()[1], marker = 'o', s = 100, c = tr_features, cmap=plt.cm.coolwarm )\n",
+ "plt.plot()"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 7,
+ "metadata": {
+ "collapsed": false
+ },
+ "outputs": [],
+ "source": [
+ "points=tf.Variable(data)\n",
+ "cluster_assignments = tf.Variable(tf.zeros([N], dtype=tf.int64))"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 8,
+ "metadata": {
+ "collapsed": true
+ },
+ "outputs": [],
+ "source": [
+ "sess = tf.Session()\n",
+ "sess.run(tf.initialize_all_variables())"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 14,
+ "metadata": {
+ "collapsed": false,
+ "scrolled": true
+ },
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "[0, 0, 0, 0, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 0, 1, 0, 0, 0, 1, 1, 1, 0, 0, 0, 1, 1, 1, 0, 0, 1, 1, 0, 0, 0, 0, 1, 1, 1, 0, 1, 1, 1, 1, 0, 0, 1, 0, 0, 1, 1, 0, 1, 1, 1, 0, 0, 0]\n"
+ ]
+ },
+ {
+ "data": {
+ "application/javascript": [
+ "/* Put everything inside the global mpl namespace */\n",
+ "window.mpl = {};\n",
+ "\n",
+ "mpl.get_websocket_type = function() {\n",
+ " if (typeof(WebSocket) !== 'undefined') {\n",
+ " return WebSocket;\n",
+ " } else if (typeof(MozWebSocket) !== 'undefined') {\n",
+ " return MozWebSocket;\n",
+ " } else {\n",
+ " alert('Your browser does not have WebSocket support.' +\n",
+ " 'Please try Chrome, Safari or Firefox ≥ 6. ' +\n",
+ " 'Firefox 4 and 5 are also supported but you ' +\n",
+ " 'have to enable WebSockets in about:config.');\n",
+ " };\n",
+ "}\n",
+ "\n",
+ "mpl.figure = function(figure_id, websocket, ondownload, parent_element) {\n",
+ " this.id = figure_id;\n",
+ "\n",
+ " this.ws = websocket;\n",
+ "\n",
+ " this.supports_binary = (this.ws.binaryType != undefined);\n",
+ "\n",
+ " if (!this.supports_binary) {\n",
+ " var warnings = document.getElementById(\"mpl-warnings\");\n",
+ " if (warnings) {\n",
+ " warnings.style.display = 'block';\n",
+ " warnings.textContent = (\n",
+ " \"This browser does not support binary websocket messages. \" +\n",
+ " \"Performance may be slow.\");\n",
+ " }\n",
+ " }\n",
+ "\n",
+ " this.imageObj = new Image();\n",
+ "\n",
+ " this.context = undefined;\n",
+ " this.message = undefined;\n",
+ " this.canvas = undefined;\n",
+ " this.rubberband_canvas = undefined;\n",
+ " this.rubberband_context = undefined;\n",
+ " this.format_dropdown = undefined;\n",
+ "\n",
+ " this.image_mode = 'full';\n",
+ "\n",
+ " this.root = $('');\n",
+ " this._root_extra_style(this.root)\n",
+ " this.root.attr('style', 'display: inline-block');\n",
+ "\n",
+ " $(parent_element).append(this.root);\n",
+ "\n",
+ " this._init_header(this);\n",
+ " this._init_canvas(this);\n",
+ " this._init_toolbar(this);\n",
+ "\n",
+ " var fig = this;\n",
+ "\n",
+ " this.waiting = false;\n",
+ "\n",
+ " this.ws.onopen = function () {\n",
+ " fig.send_message(\"supports_binary\", {value: fig.supports_binary});\n",
+ " fig.send_message(\"send_image_mode\", {});\n",
+ " fig.send_message(\"refresh\", {});\n",
+ " }\n",
+ "\n",
+ " this.imageObj.onload = function() {\n",
+ " if (fig.image_mode == 'full') {\n",
+ " // Full images could contain transparency (where diff images\n",
+ " // almost always do), so we need to clear the canvas so that\n",
+ " // there is no ghosting.\n",
+ " fig.context.clearRect(0, 0, fig.canvas.width, fig.canvas.height);\n",
+ " }\n",
+ " fig.context.drawImage(fig.imageObj, 0, 0);\n",
+ " };\n",
+ "\n",
+ " this.imageObj.onunload = function() {\n",
+ " this.ws.close();\n",
+ " }\n",
+ "\n",
+ " this.ws.onmessage = this._make_on_message_function(this);\n",
+ "\n",
+ " this.ondownload = ondownload;\n",
+ "}\n",
+ "\n",
+ "mpl.figure.prototype._init_header = function() {\n",
+ " var titlebar = $(\n",
+ " '');\n",
+ " var titletext = $(\n",
+ " '');\n",
+ " titlebar.append(titletext)\n",
+ " this.root.append(titlebar);\n",
+ " this.header = titletext[0];\n",
+ "}\n",
+ "\n",
+ "\n",
+ "\n",
+ "mpl.figure.prototype._canvas_extra_style = function(canvas_div) {\n",
+ "\n",
+ "}\n",
+ "\n",
+ "\n",
+ "mpl.figure.prototype._root_extra_style = function(canvas_div) {\n",
+ "\n",
+ "}\n",
+ "\n",
+ "mpl.figure.prototype._init_canvas = function() {\n",
+ " var fig = this;\n",
+ "\n",
+ " var canvas_div = $('');\n",
+ "\n",
+ " canvas_div.attr('style', 'position: relative; clear: both; outline: 0');\n",
+ "\n",
+ " function canvas_keyboard_event(event) {\n",
+ " return fig.key_event(event, event['data']);\n",
+ " }\n",
+ "\n",
+ " canvas_div.keydown('key_press', canvas_keyboard_event);\n",
+ " canvas_div.keyup('key_release', canvas_keyboard_event);\n",
+ " this.canvas_div = canvas_div\n",
+ " this._canvas_extra_style(canvas_div)\n",
+ " this.root.append(canvas_div);\n",
+ "\n",
+ " var canvas = $('');\n",
+ " canvas.addClass('mpl-canvas');\n",
+ " canvas.attr('style', \"left: 0; top: 0; z-index: 0; outline: 0\")\n",
+ "\n",
+ " this.canvas = canvas[0];\n",
+ " this.context = canvas[0].getContext(\"2d\");\n",
+ "\n",
+ " var rubberband = $('');\n",
+ " rubberband.attr('style', \"position: absolute; left: 0; top: 0; z-index: 1;\")\n",
+ "\n",
+ " var pass_mouse_events = true;\n",
+ "\n",
+ " canvas_div.resizable({\n",
+ " start: function(event, ui) {\n",
+ " pass_mouse_events = false;\n",
+ " },\n",
+ " resize: function(event, ui) {\n",
+ " fig.request_resize(ui.size.width, ui.size.height);\n",
+ " },\n",
+ " stop: function(event, ui) {\n",
+ " pass_mouse_events = true;\n",
+ " fig.request_resize(ui.size.width, ui.size.height);\n",
+ " },\n",
+ " });\n",
+ "\n",
+ " function mouse_event_fn(event) {\n",
+ " if (pass_mouse_events)\n",
+ " return fig.mouse_event(event, event['data']);\n",
+ " }\n",
+ "\n",
+ " rubberband.mousedown('button_press', mouse_event_fn);\n",
+ " rubberband.mouseup('button_release', mouse_event_fn);\n",
+ " // Throttle sequential mouse events to 1 every 20ms.\n",
+ " rubberband.mousemove('motion_notify', mouse_event_fn);\n",
+ "\n",
+ " rubberband.mouseenter('figure_enter', mouse_event_fn);\n",
+ " rubberband.mouseleave('figure_leave', mouse_event_fn);\n",
+ "\n",
+ " canvas_div.on(\"wheel\", function (event) {\n",
+ " event = event.originalEvent;\n",
+ " event['data'] = 'scroll'\n",
+ " if (event.deltaY < 0) {\n",
+ " event.step = 1;\n",
+ " } else {\n",
+ " event.step = -1;\n",
+ " }\n",
+ " mouse_event_fn(event);\n",
+ " });\n",
+ "\n",
+ " canvas_div.append(canvas);\n",
+ " canvas_div.append(rubberband);\n",
+ "\n",
+ " this.rubberband = rubberband;\n",
+ " this.rubberband_canvas = rubberband[0];\n",
+ " this.rubberband_context = rubberband[0].getContext(\"2d\");\n",
+ " this.rubberband_context.strokeStyle = \"#000000\";\n",
+ "\n",
+ " this._resize_canvas = function(width, height) {\n",
+ " // Keep the size of the canvas, canvas container, and rubber band\n",
+ " // canvas in synch.\n",
+ " canvas_div.css('width', width)\n",
+ " canvas_div.css('height', height)\n",
+ "\n",
+ " canvas.attr('width', width);\n",
+ " canvas.attr('height', height);\n",
+ "\n",
+ " rubberband.attr('width', width);\n",
+ " rubberband.attr('height', height);\n",
+ " }\n",
+ "\n",
+ " // Set the figure to an initial 600x600px, this will subsequently be updated\n",
+ " // upon first draw.\n",
+ " this._resize_canvas(600, 600);\n",
+ "\n",
+ " // Disable right mouse context menu.\n",
+ " $(this.rubberband_canvas).bind(\"contextmenu\",function(e){\n",
+ " return false;\n",
+ " });\n",
+ "\n",
+ " function set_focus () {\n",
+ " canvas.focus();\n",
+ " canvas_div.focus();\n",
+ " }\n",
+ "\n",
+ " window.setTimeout(set_focus, 100);\n",
+ "}\n",
+ "\n",
+ "mpl.figure.prototype._init_toolbar = function() {\n",
+ " var fig = this;\n",
+ "\n",
+ " var nav_element = $('')\n",
+ " nav_element.attr('style', 'width: 100%');\n",
+ " this.root.append(nav_element);\n",
+ "\n",
+ " // Define a callback function for later on.\n",
+ " function toolbar_event(event) {\n",
+ " return fig.toolbar_button_onclick(event['data']);\n",
+ " }\n",
+ " function toolbar_mouse_event(event) {\n",
+ " return fig.toolbar_button_onmouseover(event['data']);\n",
+ " }\n",
+ "\n",
+ " for(var toolbar_ind in mpl.toolbar_items) {\n",
+ " var name = mpl.toolbar_items[toolbar_ind][0];\n",
+ " var tooltip = mpl.toolbar_items[toolbar_ind][1];\n",
+ " var image = mpl.toolbar_items[toolbar_ind][2];\n",
+ " var method_name = mpl.toolbar_items[toolbar_ind][3];\n",
+ "\n",
+ " if (!name) {\n",
+ " // put a spacer in here.\n",
+ " continue;\n",
+ " }\n",
+ " var button = $('');\n",
+ " button.addClass('ui-button ui-widget ui-state-default ui-corner-all ' +\n",
+ " 'ui-button-icon-only');\n",
+ " button.attr('role', 'button');\n",
+ " button.attr('aria-disabled', 'false');\n",
+ " button.click(method_name, toolbar_event);\n",
+ " button.mouseover(tooltip, toolbar_mouse_event);\n",
+ "\n",
+ " var icon_img = $('');\n",
+ " icon_img.addClass('ui-button-icon-primary ui-icon');\n",
+ " icon_img.addClass(image);\n",
+ " icon_img.addClass('ui-corner-all');\n",
+ "\n",
+ " var tooltip_span = $('');\n",
+ " tooltip_span.addClass('ui-button-text');\n",
+ " tooltip_span.html(tooltip);\n",
+ "\n",
+ " button.append(icon_img);\n",
+ " button.append(tooltip_span);\n",
+ "\n",
+ " nav_element.append(button);\n",
+ " }\n",
+ "\n",
+ " var fmt_picker_span = $('');\n",
+ "\n",
+ " var fmt_picker = $('');\n",
+ " fmt_picker.addClass('mpl-toolbar-option ui-widget ui-widget-content');\n",
+ " fmt_picker_span.append(fmt_picker);\n",
+ " nav_element.append(fmt_picker_span);\n",
+ " this.format_dropdown = fmt_picker[0];\n",
+ "\n",
+ " for (var ind in mpl.extensions) {\n",
+ " var fmt = mpl.extensions[ind];\n",
+ " var option = $(\n",
+ " '', {selected: fmt === mpl.default_extension}).html(fmt);\n",
+ " fmt_picker.append(option)\n",
+ " }\n",
+ "\n",
+ " // Add hover states to the ui-buttons\n",
+ " $( \".ui-button\" ).hover(\n",
+ " function() { $(this).addClass(\"ui-state-hover\");},\n",
+ " function() { $(this).removeClass(\"ui-state-hover\");}\n",
+ " );\n",
+ "\n",
+ " var status_bar = $('');\n",
+ " nav_element.append(status_bar);\n",
+ " this.message = status_bar[0];\n",
+ "}\n",
+ "\n",
+ "mpl.figure.prototype.request_resize = function(x_pixels, y_pixels) {\n",
+ " // Request matplotlib to resize the figure. Matplotlib will then trigger a resize in the client,\n",
+ " // which will in turn request a refresh of the image.\n",
+ " this.send_message('resize', {'width': x_pixels, 'height': y_pixels});\n",
+ "}\n",
+ "\n",
+ "mpl.figure.prototype.send_message = function(type, properties) {\n",
+ " properties['type'] = type;\n",
+ " properties['figure_id'] = this.id;\n",
+ " this.ws.send(JSON.stringify(properties));\n",
+ "}\n",
+ "\n",
+ "mpl.figure.prototype.send_draw_message = function() {\n",
+ " if (!this.waiting) {\n",
+ " this.waiting = true;\n",
+ " this.ws.send(JSON.stringify({type: \"draw\", figure_id: this.id}));\n",
+ " }\n",
+ "}\n",
+ "\n",
+ "\n",
+ "mpl.figure.prototype.handle_save = function(fig, msg) {\n",
+ " var format_dropdown = fig.format_dropdown;\n",
+ " var format = format_dropdown.options[format_dropdown.selectedIndex].value;\n",
+ " fig.ondownload(fig, format);\n",
+ "}\n",
+ "\n",
+ "\n",
+ "mpl.figure.prototype.handle_resize = function(fig, msg) {\n",
+ " var size = msg['size'];\n",
+ " if (size[0] != fig.canvas.width || size[1] != fig.canvas.height) {\n",
+ " fig._resize_canvas(size[0], size[1]);\n",
+ " fig.send_message(\"refresh\", {});\n",
+ " };\n",
+ "}\n",
+ "\n",
+ "mpl.figure.prototype.handle_rubberband = function(fig, msg) {\n",
+ " var x0 = msg['x0'];\n",
+ " var y0 = fig.canvas.height - msg['y0'];\n",
+ " var x1 = msg['x1'];\n",
+ " var y1 = fig.canvas.height - msg['y1'];\n",
+ " x0 = Math.floor(x0) + 0.5;\n",
+ " y0 = Math.floor(y0) + 0.5;\n",
+ " x1 = Math.floor(x1) + 0.5;\n",
+ " y1 = Math.floor(y1) + 0.5;\n",
+ " var min_x = Math.min(x0, x1);\n",
+ " var min_y = Math.min(y0, y1);\n",
+ " var width = Math.abs(x1 - x0);\n",
+ " var height = Math.abs(y1 - y0);\n",
+ "\n",
+ " fig.rubberband_context.clearRect(\n",
+ " 0, 0, fig.canvas.width, fig.canvas.height);\n",
+ "\n",
+ " fig.rubberband_context.strokeRect(min_x, min_y, width, height);\n",
+ "}\n",
+ "\n",
+ "mpl.figure.prototype.handle_figure_label = function(fig, msg) {\n",
+ " // Updates the figure title.\n",
+ " fig.header.textContent = msg['label'];\n",
+ "}\n",
+ "\n",
+ "mpl.figure.prototype.handle_cursor = function(fig, msg) {\n",
+ " var cursor = msg['cursor'];\n",
+ " switch(cursor)\n",
+ " {\n",
+ " case 0:\n",
+ " cursor = 'pointer';\n",
+ " break;\n",
+ " case 1:\n",
+ " cursor = 'default';\n",
+ " break;\n",
+ " case 2:\n",
+ " cursor = 'crosshair';\n",
+ " break;\n",
+ " case 3:\n",
+ " cursor = 'move';\n",
+ " break;\n",
+ " }\n",
+ " fig.rubberband_canvas.style.cursor = cursor;\n",
+ "}\n",
+ "\n",
+ "mpl.figure.prototype.handle_message = function(fig, msg) {\n",
+ " fig.message.textContent = msg['message'];\n",
+ "}\n",
+ "\n",
+ "mpl.figure.prototype.handle_draw = function(fig, msg) {\n",
+ " // Request the server to send over a new figure.\n",
+ " fig.send_draw_message();\n",
+ "}\n",
+ "\n",
+ "mpl.figure.prototype.handle_image_mode = function(fig, msg) {\n",
+ " fig.image_mode = msg['mode'];\n",
+ "}\n",
+ "\n",
+ "mpl.figure.prototype.updated_canvas_event = function() {\n",
+ " // Called whenever the canvas gets updated.\n",
+ " this.send_message(\"ack\", {});\n",
+ "}\n",
+ "\n",
+ "// A function to construct a web socket function for onmessage handling.\n",
+ "// Called in the figure constructor.\n",
+ "mpl.figure.prototype._make_on_message_function = function(fig) {\n",
+ " return function socket_on_message(evt) {\n",
+ " if (evt.data instanceof Blob) {\n",
+ " /* FIXME: We get \"Resource interpreted as Image but\n",
+ " * transferred with MIME type text/plain:\" errors on\n",
+ " * Chrome. But how to set the MIME type? It doesn't seem\n",
+ " * to be part of the websocket stream */\n",
+ " evt.data.type = \"image/png\";\n",
+ "\n",
+ " /* Free the memory for the previous frames */\n",
+ " if (fig.imageObj.src) {\n",
+ " (window.URL || window.webkitURL).revokeObjectURL(\n",
+ " fig.imageObj.src);\n",
+ " }\n",
+ "\n",
+ " fig.imageObj.src = (window.URL || window.webkitURL).createObjectURL(\n",
+ " evt.data);\n",
+ " fig.updated_canvas_event();\n",
+ " fig.waiting = false;\n",
+ " return;\n",
+ " }\n",
+ " else if (typeof evt.data === 'string' && evt.data.slice(0, 21) == \"data:image/png;base64\") {\n",
+ " fig.imageObj.src = evt.data;\n",
+ " fig.updated_canvas_event();\n",
+ " fig.waiting = false;\n",
+ " return;\n",
+ " }\n",
+ "\n",
+ " var msg = JSON.parse(evt.data);\n",
+ " var msg_type = msg['type'];\n",
+ "\n",
+ " // Call the \"handle_{type}\" callback, which takes\n",
+ " // the figure and JSON message as its only arguments.\n",
+ " try {\n",
+ " var callback = fig[\"handle_\" + msg_type];\n",
+ " } catch (e) {\n",
+ " console.log(\"No handler for the '\" + msg_type + \"' message type: \", msg);\n",
+ " return;\n",
+ " }\n",
+ "\n",
+ " if (callback) {\n",
+ " try {\n",
+ " // console.log(\"Handling '\" + msg_type + \"' message: \", msg);\n",
+ " callback(fig, msg);\n",
+ " } catch (e) {\n",
+ " console.log(\"Exception inside the 'handler_\" + msg_type + \"' callback:\", e, e.stack, msg);\n",
+ " }\n",
+ " }\n",
+ " };\n",
+ "}\n",
+ "\n",
+ "// from http://stackoverflow.com/questions/1114465/getting-mouse-location-in-canvas\n",
+ "mpl.findpos = function(e) {\n",
+ " //this section is from http://www.quirksmode.org/js/events_properties.html\n",
+ " var targ;\n",
+ " if (!e)\n",
+ " e = window.event;\n",
+ " if (e.target)\n",
+ " targ = e.target;\n",
+ " else if (e.srcElement)\n",
+ " targ = e.srcElement;\n",
+ " if (targ.nodeType == 3) // defeat Safari bug\n",
+ " targ = targ.parentNode;\n",
+ "\n",
+ " // jQuery normalizes the pageX and pageY\n",
+ " // pageX,Y are the mouse positions relative to the document\n",
+ " // offset() returns the position of the element relative to the document\n",
+ " var x = e.pageX - $(targ).offset().left;\n",
+ " var y = e.pageY - $(targ).offset().top;\n",
+ "\n",
+ " return {\"x\": x, \"y\": y};\n",
+ "};\n",
+ "\n",
+ "/*\n",
+ " * return a copy of an object with only non-object keys\n",
+ " * we need this to avoid circular references\n",
+ " * http://stackoverflow.com/a/24161582/3208463\n",
+ " */\n",
+ "function simpleKeys (original) {\n",
+ " return Object.keys(original).reduce(function (obj, key) {\n",
+ " if (typeof original[key] !== 'object')\n",
+ " obj[key] = original[key]\n",
+ " return obj;\n",
+ " }, {});\n",
+ "}\n",
+ "\n",
+ "mpl.figure.prototype.mouse_event = function(event, name) {\n",
+ " var canvas_pos = mpl.findpos(event)\n",
+ "\n",
+ " if (name === 'button_press')\n",
+ " {\n",
+ " this.canvas.focus();\n",
+ " this.canvas_div.focus();\n",
+ " }\n",
+ "\n",
+ " var x = canvas_pos.x;\n",
+ " var y = canvas_pos.y;\n",
+ "\n",
+ " this.send_message(name, {x: x, y: y, button: event.button,\n",
+ " step: event.step,\n",
+ " guiEvent: simpleKeys(event)});\n",
+ "\n",
+ " /* This prevents the web browser from automatically changing to\n",
+ " * the text insertion cursor when the button is pressed. We want\n",
+ " * to control all of the cursor setting manually through the\n",
+ " * 'cursor' event from matplotlib */\n",
+ " event.preventDefault();\n",
+ " return false;\n",
+ "}\n",
+ "\n",
+ "mpl.figure.prototype._key_event_extra = function(event, name) {\n",
+ " // Handle any extra behaviour associated with a key event\n",
+ "}\n",
+ "\n",
+ "mpl.figure.prototype.key_event = function(event, name) {\n",
+ "\n",
+ " // Prevent repeat events\n",
+ " if (name == 'key_press')\n",
+ " {\n",
+ " if (event.which === this._key)\n",
+ " return;\n",
+ " else\n",
+ " this._key = event.which;\n",
+ " }\n",
+ " if (name == 'key_release')\n",
+ " this._key = null;\n",
+ "\n",
+ " var value = '';\n",
+ " if (event.ctrlKey && event.which != 17)\n",
+ " value += \"ctrl+\";\n",
+ " if (event.altKey && event.which != 18)\n",
+ " value += \"alt+\";\n",
+ " if (event.shiftKey && event.which != 16)\n",
+ " value += \"shift+\";\n",
+ "\n",
+ " value += 'k';\n",
+ " value += event.which.toString();\n",
+ "\n",
+ " this._key_event_extra(event, name);\n",
+ "\n",
+ " this.send_message(name, {key: value,\n",
+ " guiEvent: simpleKeys(event)});\n",
+ " return false;\n",
+ "}\n",
+ "\n",
+ "mpl.figure.prototype.toolbar_button_onclick = function(name) {\n",
+ " if (name == 'download') {\n",
+ " this.handle_save(this, null);\n",
+ " } else {\n",
+ " this.send_message(\"toolbar_button\", {name: name});\n",
+ " }\n",
+ "};\n",
+ "\n",
+ "mpl.figure.prototype.toolbar_button_onmouseover = function(tooltip) {\n",
+ " this.message.textContent = tooltip;\n",
+ "};\n",
+ "mpl.toolbar_items = [[\"Home\", \"Reset original view\", \"fa fa-home icon-home\", \"home\"], [\"Back\", \"Back to previous view\", \"fa fa-arrow-left icon-arrow-left\", \"back\"], [\"Forward\", \"Forward to next view\", \"fa fa-arrow-right icon-arrow-right\", \"forward\"], [\"\", \"\", \"\", \"\"], [\"Pan\", \"Pan axes with left mouse, zoom with right\", \"fa fa-arrows icon-move\", \"pan\"], [\"Zoom\", \"Zoom to rectangle\", \"fa fa-square-o icon-check-empty\", \"zoom\"], [\"\", \"\", \"\", \"\"], [\"Download\", \"Download plot\", \"fa fa-floppy-o icon-save\", \"download\"]];\n",
+ "\n",
+ "mpl.extensions = [\"eps\", \"jpeg\", \"pdf\", \"png\", \"ps\", \"raw\", \"svg\", \"tif\"];\n",
+ "\n",
+ "mpl.default_extension = \"png\";var comm_websocket_adapter = function(comm) {\n",
+ " // Create a \"websocket\"-like object which calls the given IPython comm\n",
+ " // object with the appropriate methods. Currently this is a non binary\n",
+ " // socket, so there is still some room for performance tuning.\n",
+ " var ws = {};\n",
+ "\n",
+ " ws.close = function() {\n",
+ " comm.close()\n",
+ " };\n",
+ " ws.send = function(m) {\n",
+ " //console.log('sending', m);\n",
+ " comm.send(m);\n",
+ " };\n",
+ " // Register the callback with on_msg.\n",
+ " comm.on_msg(function(msg) {\n",
+ " //console.log('receiving', msg['content']['data'], msg);\n",
+ " // Pass the mpl event to the overriden (by mpl) onmessage function.\n",
+ " ws.onmessage(msg['content']['data'])\n",
+ " });\n",
+ " return ws;\n",
+ "}\n",
+ "\n",
+ "mpl.mpl_figure_comm = function(comm, msg) {\n",
+ " // This is the function which gets called when the mpl process\n",
+ " // starts-up an IPython Comm through the \"matplotlib\" channel.\n",
+ "\n",
+ " var id = msg.content.data.id;\n",
+ " // Get hold of the div created by the display call when the Comm\n",
+ " // socket was opened in Python.\n",
+ " var element = $(\"#\" + id);\n",
+ " var ws_proxy = comm_websocket_adapter(comm)\n",
+ "\n",
+ " function ondownload(figure, format) {\n",
+ " window.open(figure.imageObj.src);\n",
+ " }\n",
+ "\n",
+ " var fig = new mpl.figure(id, ws_proxy,\n",
+ " ondownload,\n",
+ " element.get(0));\n",
+ "\n",
+ " // Call onopen now - mpl needs it, as it is assuming we've passed it a real\n",
+ " // web socket which is closed, not our websocket->open comm proxy.\n",
+ " ws_proxy.onopen();\n",
+ "\n",
+ " fig.parent_element = element.get(0);\n",
+ " fig.cell_info = mpl.find_output_cell(\"\");\n",
+ " if (!fig.cell_info) {\n",
+ " console.error(\"Failed to find cell for figure\", id, fig);\n",
+ " return;\n",
+ " }\n",
+ "\n",
+ " var output_index = fig.cell_info[2]\n",
+ " var cell = fig.cell_info[0];\n",
+ "\n",
+ "};\n",
+ "\n",
+ "mpl.figure.prototype.handle_close = function(fig, msg) {\n",
+ " fig.root.unbind('remove')\n",
+ "\n",
+ " // Update the output cell to use the data from the current canvas.\n",
+ " fig.push_to_output();\n",
+ " var dataURL = fig.canvas.toDataURL();\n",
+ " // Re-enable the keyboard manager in IPython - without this line, in FF,\n",
+ " // the notebook keyboard shortcuts fail.\n",
+ " IPython.keyboard_manager.enable()\n",
+ " $(fig.parent_element).html('');\n",
+ " fig.close_ws(fig, msg);\n",
+ "}\n",
+ "\n",
+ "mpl.figure.prototype.close_ws = function(fig, msg){\n",
+ " fig.send_message('closing', msg);\n",
+ " // fig.ws.close()\n",
+ "}\n",
+ "\n",
+ "mpl.figure.prototype.push_to_output = function(remove_interactive) {\n",
+ " // Turn the data on the canvas into data in the output cell.\n",
+ " var dataURL = this.canvas.toDataURL();\n",
+ " this.cell_info[1]['text/html'] = '';\n",
+ "}\n",
+ "\n",
+ "mpl.figure.prototype.updated_canvas_event = function() {\n",
+ " // Tell IPython that the notebook contents must change.\n",
+ " IPython.notebook.set_dirty(true);\n",
+ " this.send_message(\"ack\", {});\n",
+ " var fig = this;\n",
+ " // Wait a second, then push the new image to the DOM so\n",
+ " // that it is saved nicely (might be nice to debounce this).\n",
+ " setTimeout(function () { fig.push_to_output() }, 1000);\n",
+ "}\n",
+ "\n",
+ "mpl.figure.prototype._init_toolbar = function() {\n",
+ " var fig = this;\n",
+ "\n",
+ " var nav_element = $('')\n",
+ " nav_element.attr('style', 'width: 100%');\n",
+ " this.root.append(nav_element);\n",
+ "\n",
+ " // Define a callback function for later on.\n",
+ " function toolbar_event(event) {\n",
+ " return fig.toolbar_button_onclick(event['data']);\n",
+ " }\n",
+ " function toolbar_mouse_event(event) {\n",
+ " return fig.toolbar_button_onmouseover(event['data']);\n",
+ " }\n",
+ "\n",
+ " for(var toolbar_ind in mpl.toolbar_items){\n",
+ " var name = mpl.toolbar_items[toolbar_ind][0];\n",
+ " var tooltip = mpl.toolbar_items[toolbar_ind][1];\n",
+ " var image = mpl.toolbar_items[toolbar_ind][2];\n",
+ " var method_name = mpl.toolbar_items[toolbar_ind][3];\n",
+ "\n",
+ " if (!name) { continue; };\n",
+ "\n",
+ " var button = $('');\n",
+ " button.click(method_name, toolbar_event);\n",
+ " button.mouseover(tooltip, toolbar_mouse_event);\n",
+ " nav_element.append(button);\n",
+ " }\n",
+ "\n",
+ " // Add the status bar.\n",
+ " var status_bar = $('');\n",
+ " nav_element.append(status_bar);\n",
+ " this.message = status_bar[0];\n",
+ "\n",
+ " // Add the close button to the window.\n",
+ " var buttongrp = $('');\n",
+ " var button = $('');\n",
+ " button.click(function (evt) { fig.handle_close(fig, {}); } );\n",
+ " button.mouseover('Stop Interaction', toolbar_mouse_event);\n",
+ " buttongrp.append(button);\n",
+ " var titlebar = this.root.find($('.ui-dialog-titlebar'));\n",
+ " titlebar.prepend(buttongrp);\n",
+ "}\n",
+ "\n",
+ "mpl.figure.prototype._root_extra_style = function(el){\n",
+ " var fig = this\n",
+ " el.on(\"remove\", function(){\n",
+ "\tfig.close_ws(fig, {});\n",
+ " });\n",
+ "}\n",
+ "\n",
+ "mpl.figure.prototype._canvas_extra_style = function(el){\n",
+ " // this is important to make the div 'focusable\n",
+ " el.attr('tabindex', 0)\n",
+ " // reach out to IPython and tell the keyboard manager to turn it's self\n",
+ " // off when our div gets focus\n",
+ "\n",
+ " // location in version 3\n",
+ " if (IPython.notebook.keyboard_manager) {\n",
+ " IPython.notebook.keyboard_manager.register_events(el);\n",
+ " }\n",
+ " else {\n",
+ " // location in version 2\n",
+ " IPython.keyboard_manager.register_events(el);\n",
+ " }\n",
+ "\n",
+ "}\n",
+ "\n",
+ "mpl.figure.prototype._key_event_extra = function(event, name) {\n",
+ " var manager = IPython.notebook.keyboard_manager;\n",
+ " if (!manager)\n",
+ " manager = IPython.keyboard_manager;\n",
+ "\n",
+ " // Check for shift+enter\n",
+ " if (event.shiftKey && event.which == 13) {\n",
+ " this.canvas_div.blur();\n",
+ " event.shiftKey = false;\n",
+ " // Send a \"J\" for go to next cell\n",
+ " event.which = 74;\n",
+ " event.keyCode = 74;\n",
+ " manager.command_mode();\n",
+ " manager.handle_keydown(event);\n",
+ " }\n",
+ "}\n",
+ "\n",
+ "mpl.figure.prototype.handle_save = function(fig, msg) {\n",
+ " fig.ondownload(fig, null);\n",
+ "}\n",
+ "\n",
+ "\n",
+ "mpl.find_output_cell = function(html_output) {\n",
+ " // Return the cell and output element which can be found *uniquely* in the notebook.\n",
+ " // Note - this is a bit hacky, but it is done because the \"notebook_saving.Notebook\"\n",
+ " // IPython event is triggered only after the cells have been serialised, which for\n",
+ " // our purposes (turning an active figure into a static one), is too late.\n",
+ " var cells = IPython.notebook.get_cells();\n",
+ " var ncells = cells.length;\n",
+ " for (var i=0; i= 3 moved mimebundle to data attribute of output\n",
+ " data = data.data;\n",
+ " }\n",
+ " if (data['text/html'] == html_output) {\n",
+ " return [cell, data, j];\n",
+ " }\n",
+ " }\n",
+ " }\n",
+ " }\n",
+ "}\n",
+ "\n",
+ "// Register the function which deals with the matplotlib target/channel.\n",
+ "// The kernel may be null if the page has been refreshed.\n",
+ "if (IPython.notebook.kernel != null) {\n",
+ " IPython.notebook.kernel.comm_manager.register_target('matplotlib', mpl.mpl_figure_comm);\n",
+ "}\n"
+ ],
+ "text/plain": [
+ ""
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ },
+ {
+ "data": {
+ "text/html": [
+ ""
+ ],
+ "text/plain": [
+ ""
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ },
+ {
+ "data": {
+ "text/plain": [
+ "[]"
+ ]
+ },
+ "execution_count": 14,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "test=[]\n",
+ "\n",
+ "for i, j in zip(te_data, te_features):\n",
+ " distances = tf.reduce_sum(tf.square(tf.sub(i , tr_data)),reduction_indices=1)\n",
+ " neighbor = tf.arg_min(distances,0)\n",
+ " \n",
+ " #print tr_features[sess.run(neighbor)]\n",
+ " #print j\n",
+ " test.append(tr_features[sess.run(neighbor)])\n",
+ "print test\n",
+ "fig, ax = plt.subplots()\n",
+ "ax.scatter(te_data.transpose()[0], te_data.transpose()[1], marker = 'o', s = 100, c = test, cmap=plt.cm.coolwarm )\n",
+ "plt.plot()\n",
+ "\n",
+ "#rep_points_v = tf.reshape(points, [1, N, 2])\n",
+ "#rep_points_h = tf.reshape(points, [N, 2])\n",
+ "#sum_squares = tf.reduce_sum(tf.square(rep_points - rep_points), reduction_indices=2)\n",
+ "#print(sess.run(tf.square(rep_points_v - rep_points_h)))\n"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 17,
+ "metadata": {
+ "collapsed": false
+ },
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "Found in 180.42 seconds\n",
+ "Cluster assignments: [0, 0, 0, 0, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 0, 1, 0, 0, 0, 1, 1, 1, 0, 0, 0, 1, 1, 1, 0, 0, 1, 1, 0, 0, 0, 0, 1, 1, 1, 0, 1, 1, 1, 1, 0, 0, 1, 0, 0, 1, 1, 0, 1, 1, 1, 0, 0, 0]\n"
+ ]
+ }
+ ],
+ "source": [
+ "end = time.time()\n",
+ "print (\"Found in %.2f seconds\" % (end-start))\n",
+ "print \"Cluster assignments:\", test"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "collapsed": true
+ },
+ "outputs": [],
+ "source": []
+ }
+ ],
+ "metadata": {
+ "celltoolbar": "Edit Metadata",
+ "kernelspec": {
+ "display_name": "Python 2",
+ "language": "python",
+ "name": "python2"
+ },
+ "language_info": {
+ "codemirror_mode": {
+ "name": "ipython",
+ "version": 2
+ },
+ "file_extension": ".py",
+ "mimetype": "text/x-python",
+ "name": "python",
+ "nbconvert_exporter": "python",
+ "pygments_lexer": "ipython2",
+ "version": "2.7.11+"
+ }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 0
+}
diff --git a/Building-Machine-Learning-Projects-with-TensorFlow-master/2/CH2_NN.py b/Building-Machine-Learning-Projects-with-TensorFlow-master/2/CH2_NN.py
new file mode 100644
index 000000000..4d52788cd
--- /dev/null
+++ b/Building-Machine-Learning-Projects-with-TensorFlow-master/2/CH2_NN.py
@@ -0,0 +1,55 @@
+import tensorflow as tf
+import numpy as np
+import time
+
+import matplotlib
+import matplotlib.pyplot as plt
+
+from sklearn.datasets.samples_generator import make_circles
+
+N=210
+K=2
+# Maximum number of iterations, if the conditions are not met
+MAX_ITERS = 1000
+cut=int(N*0.7)
+
+start = time.time()
+
+data, features = make_circles(n_samples=N, shuffle=True, noise= 0.12, factor=0.4)
+tr_data, tr_features= data[:cut], features[:cut]
+te_data,te_features=data[cut:], features[cut:]
+
+fig, ax = plt.subplots()
+ax.scatter(tr_data.transpose()[0], tr_data.transpose()[1], marker = 'o', s = 100, c = tr_features, cmap=plt.cm.coolwarm )
+plt.plot()
+
+points=tf.Variable(data)
+cluster_assignments = tf.Variable(tf.zeros([N], dtype=tf.int64))
+
+sess = tf.Session()
+sess.run(tf.initialize_all_variables())
+
+test=[]
+
+for i, j in zip(te_data, te_features):
+ distances = tf.reduce_sum(tf.square(tf.sub(i , tr_data)),reduction_indices=1)
+ neighbor = tf.arg_min(distances,0)
+
+ #print tr_features[sess.run(neighbor)]
+ #print j
+ test.append(tr_features[sess.run(neighbor)])
+print test
+fig, ax = plt.subplots()
+ax.scatter(te_data.transpose()[0], te_data.transpose()[1], marker = 'o', s = 100, c = test, cmap=plt.cm.coolwarm )
+plt.plot()
+
+#rep_points_v = tf.reshape(points, [1, N, 2])
+#rep_points_h = tf.reshape(points, [N, 2])
+#sum_squares = tf.reduce_sum(tf.square(rep_points - rep_points), reduction_indices=2)
+#print(sess.run(tf.square(rep_points_v - rep_points_h)))
+
+end = time.time()
+print ("Found in %.2f seconds" % (end-start))
+print "Cluster assignments:", test
+
+
diff --git a/Building-Machine-Learning-Projects-with-TensorFlow-master/2/CH2_kmeans.py b/Building-Machine-Learning-Projects-with-TensorFlow-master/2/CH2_kmeans.py
new file mode 100644
index 000000000..3b88065e1
--- /dev/null
+++ b/Building-Machine-Learning-Projects-with-TensorFlow-master/2/CH2_kmeans.py
@@ -0,0 +1,114 @@
+import tensorflow as tf
+import numpy as np
+import time
+
+import matplotlib
+import matplotlib.pyplot as plt
+
+from sklearn.datasets.samples_generator import make_blobs
+from sklearn.datasets.samples_generator import make_circles
+
+DATA_TYPE = 'blobs'
+N=200
+# Number of clusters, if we choose circles, only 2 will be enough
+if (DATA_TYPE == 'circle'):
+ K=2
+else:
+ K=4
+
+
+# Maximum number of iterations, if the conditions are not met
+MAX_ITERS = 1000
+
+
+start = time.time()
+
+
+centers = [(-2, -2), (-2, 1.5), (1.5, -2), (2, 1.5)]
+if (DATA_TYPE == 'circle'):
+ data, features = make_circles(n_samples=200, shuffle=True, noise= 0.01, factor=0.4)
+else:
+ data, features = make_blobs (n_samples=200, centers=centers, n_features = 2, cluster_std=0.8, shuffle=False, random_state=42)
+
+
+fig, ax = plt.subplots()
+ax.scatter(np.asarray(centers).transpose()[0], np.asarray(centers).transpose()[1], marker = 'o', s = 250)
+plt.show()
+
+
+fig, ax = plt.subplots()
+if (DATA_TYPE == 'blobs'):
+ ax.scatter(np.asarray(centers).transpose()[0], np.asarray(centers).transpose()[1], marker = 'o', s = 250)
+ ax.scatter(data.transpose()[0], data.transpose()[1], marker = 'o', s = 100, c = features, cmap=plt.cm.coolwarm )
+ plt.show()
+
+
+points=tf.Variable(data)
+cluster_assignments = tf.Variable(tf.zeros([N], dtype=tf.int64))
+
+centroids = tf.Variable(tf.slice(points.initialized_value(), [0,0], [K,2]))
+
+sess = tf.Session()
+sess.run(tf.initialize_all_variables())
+
+sess.run(centroids)
+
+
+rep_centroids = tf.reshape(tf.tile(centroids, [N, 1]), [N, K, 2])
+rep_points = tf.reshape(tf.tile(points, [1, K]), [N, K, 2])
+sum_squares = tf.reduce_sum(tf.square(rep_points - rep_centroids),
+reduction_indices=2)
+
+
+best_centroids = tf.argmin(sum_squares, 1)
+
+
+did_assignments_change = tf.reduce_any(tf.not_equal(best_centroids, cluster_assignments))
+
+
+def bucket_mean(data, bucket_ids, num_buckets):
+ total = tf.unsorted_segment_sum(data, bucket_ids, num_buckets)
+ count = tf.unsorted_segment_sum(tf.ones_like(data), bucket_ids, num_buckets)
+ return total / count
+
+
+means = bucket_mean(points, best_centroids, K)
+
+
+with tf.control_dependencies([did_assignments_change]):
+ do_updates = tf.group(
+ centroids.assign(means),
+ cluster_assignments.assign(best_centroids))
+
+changed = True
+iters = 0
+
+
+fig, ax = plt.subplots()
+if (DATA_TYPE == 'blobs'):
+ colourindexes=[2,1,4,3]
+else:
+ colourindexes=[2,1]
+while changed and iters < MAX_ITERS:
+ fig, ax = plt.subplots()
+ iters += 1
+ [changed, _] = sess.run([did_assignments_change, do_updates])
+ [centers, assignments] = sess.run([centroids, cluster_assignments])
+ ax.scatter(sess.run(points).transpose()[0], sess.run(points).transpose()[1], marker = 'o', s = 200, c = assignments, cmap=plt.cm.coolwarm )
+ ax.scatter(centers[:,0],centers[:,1], marker = '^', s = 550, c = colourindexes, cmap=plt.cm.plasma)
+ ax.set_title('Iteration ' + str(iters))
+ plt.savefig("kmeans" + str(iters) +".png")
+
+
+ax.scatter(sess.run(points).transpose()[0], sess.run(points).transpose()[1], marker = 'o', s = 200, c = assignments, cmap=plt.cm.coolwarm )
+plt.show()
+
+
+end = time.time()
+print ("Found in %.2f seconds" % (end-start)), iters, "iterations"
+print "Centroids:"
+print centers
+print "Cluster assignments:", assignments
+
+
+
diff --git a/Building-Machine-Learning-Projects-with-TensorFlow-master/4/Univariate_logistic_regression_keras.ipynb b/Building-Machine-Learning-Projects-with-TensorFlow-master/4/Univariate_logistic_regression_keras.ipynb
new file mode 100644
index 000000000..825346885
--- /dev/null
+++ b/Building-Machine-Learning-Projects-with-TensorFlow-master/4/Univariate_logistic_regression_keras.ipynb
@@ -0,0 +1,310 @@
+{
+ "cells": [
+ {
+ "cell_type": "code",
+ "execution_count": 1,
+ "metadata": {
+ "collapsed": false
+ },
+ "outputs": [
+ {
+ "name": "stderr",
+ "output_type": "stream",
+ "text": [
+ "Using TensorFlow backend.\n"
+ ]
+ },
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ " age chd\n",
+ "count 100.000000 100.00000\n",
+ "mean 44.380000 0.43000\n",
+ "std 11.721327 0.49757\n",
+ "min 20.000000 0.00000\n",
+ "25% 34.750000 0.00000\n",
+ "50% 44.000000 0.00000\n",
+ "75% 55.000000 1.00000\n",
+ "max 69.000000 1.00000\n"
+ ]
+ },
+ {
+ "name": "stderr",
+ "output_type": "stream",
+ "text": [
+ "/usr/lib/python2.7/dist-packages/sklearn/utils/validation.py:420: DataConversionWarning: Data with input dtype int64 was converted to float64 by StandardScaler.\n",
+ " warnings.warn(msg, DataConversionWarning)\n",
+ "/usr/lib/python2.7/dist-packages/sklearn/utils/validation.py:420: DataConversionWarning: Data with input dtype int64 was converted to float64 by StandardScaler.\n",
+ " warnings.warn(msg, DataConversionWarning)\n"
+ ]
+ },
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "Train on 60 samples, validate on 30 samples\n",
+ "Epoch 1/100\n",
+ "0s - loss: 0.5174 - acc: 0.7500 - val_loss: 0.6135 - val_acc: 0.6667\n",
+ "Epoch 2/100\n",
+ "0s - loss: 0.5170 - acc: 0.7500 - val_loss: 0.6132 - val_acc: 0.6667\n",
+ "Epoch 3/100\n",
+ "0s - loss: 0.5169 - acc: 0.7500 - val_loss: 0.6130 - val_acc: 0.6667\n",
+ "Epoch 4/100\n",
+ "0s - loss: 0.5167 - acc: 0.7500 - val_loss: 0.6127 - val_acc: 0.6667\n",
+ "Epoch 5/100\n",
+ "0s - loss: 0.5165 - acc: 0.7500 - val_loss: 0.6125 - val_acc: 0.6667\n",
+ "Epoch 6/100\n",
+ "0s - loss: 0.5164 - acc: 0.7500 - val_loss: 0.6123 - val_acc: 0.6667\n",
+ "Epoch 7/100\n",
+ "0s - loss: 0.5163 - acc: 0.7500 - val_loss: 0.6121 - val_acc: 0.6667\n",
+ "Epoch 8/100\n",
+ "0s - loss: 0.5162 - acc: 0.7500 - val_loss: 0.6119 - val_acc: 0.6667\n",
+ "Epoch 9/100\n",
+ "0s - loss: 0.5161 - acc: 0.7500 - val_loss: 0.6118 - val_acc: 0.6667\n",
+ "Epoch 10/100\n",
+ "0s - loss: 0.5160 - acc: 0.7500 - val_loss: 0.6116 - val_acc: 0.6667\n",
+ "Epoch 11/100\n",
+ "0s - loss: 0.5159 - acc: 0.7500 - val_loss: 0.6115 - val_acc: 0.6667\n",
+ "Epoch 12/100\n",
+ "0s - loss: 0.5158 - acc: 0.7500 - val_loss: 0.6113 - val_acc: 0.6667\n",
+ "Epoch 13/100\n",
+ "0s - loss: 0.5157 - acc: 0.7500 - val_loss: 0.6113 - val_acc: 0.6667\n",
+ "Epoch 14/100\n",
+ "0s - loss: 0.5156 - acc: 0.7500 - val_loss: 0.6111 - val_acc: 0.6667\n",
+ "Epoch 15/100\n",
+ "0s - loss: 0.5155 - acc: 0.7500 - val_loss: 0.6110 - val_acc: 0.6667\n",
+ "Epoch 16/100\n",
+ "0s - loss: 0.5155 - acc: 0.7500 - val_loss: 0.6109 - val_acc: 0.6667\n",
+ "Epoch 17/100\n",
+ "0s - loss: 0.5153 - acc: 0.7500 - val_loss: 0.6108 - val_acc: 0.6667\n",
+ "Epoch 18/100\n",
+ "0s - loss: 0.5152 - acc: 0.7500 - val_loss: 0.6107 - val_acc: 0.6667\n",
+ "Epoch 19/100\n",
+ "0s - loss: 0.5152 - acc: 0.7500 - val_loss: 0.6105 - val_acc: 0.6667\n",
+ "Epoch 20/100\n",
+ "0s - loss: 0.5151 - acc: 0.7500 - val_loss: 0.6104 - val_acc: 0.6667\n",
+ "Epoch 21/100\n",
+ "0s - loss: 0.5150 - acc: 0.7500 - val_loss: 0.6103 - val_acc: 0.6667\n",
+ "Epoch 22/100\n",
+ "0s - loss: 0.5150 - acc: 0.7500 - val_loss: 0.6102 - val_acc: 0.6667\n",
+ "Epoch 23/100\n",
+ "0s - loss: 0.5148 - acc: 0.7500 - val_loss: 0.6100 - val_acc: 0.6667\n",
+ "Epoch 24/100\n",
+ "0s - loss: 0.5148 - acc: 0.7500 - val_loss: 0.6099 - val_acc: 0.6667\n",
+ "Epoch 25/100\n",
+ "0s - loss: 0.5147 - acc: 0.7500 - val_loss: 0.6097 - val_acc: 0.6667\n",
+ "Epoch 26/100\n",
+ "0s - loss: 0.5146 - acc: 0.7500 - val_loss: 0.6096 - val_acc: 0.6667\n",
+ "Epoch 27/100\n",
+ "0s - loss: 0.5145 - acc: 0.7500 - val_loss: 0.6095 - val_acc: 0.6667\n",
+ "Epoch 28/100\n",
+ "0s - loss: 0.5144 - acc: 0.7500 - val_loss: 0.6093 - val_acc: 0.6667\n",
+ "Epoch 29/100\n",
+ "0s - loss: 0.5143 - acc: 0.7500 - val_loss: 0.6092 - val_acc: 0.6667\n",
+ "Epoch 30/100\n",
+ "0s - loss: 0.5143 - acc: 0.7500 - val_loss: 0.6090 - val_acc: 0.6667\n",
+ "Epoch 31/100\n",
+ "0s - loss: 0.5142 - acc: 0.7500 - val_loss: 0.6089 - val_acc: 0.6667\n",
+ "Epoch 32/100\n",
+ "0s - loss: 0.5142 - acc: 0.7500 - val_loss: 0.6088 - val_acc: 0.6667\n",
+ "Epoch 33/100\n",
+ "0s - loss: 0.5140 - acc: 0.7500 - val_loss: 0.6087 - val_acc: 0.6667\n",
+ "Epoch 34/100\n",
+ "0s - loss: 0.5140 - acc: 0.7500 - val_loss: 0.6085 - val_acc: 0.6667\n",
+ "Epoch 35/100\n",
+ "0s - loss: 0.5139 - acc: 0.7500 - val_loss: 0.6085 - val_acc: 0.6667\n",
+ "Epoch 36/100\n",
+ "0s - loss: 0.5138 - acc: 0.7500 - val_loss: 0.6084 - val_acc: 0.6667\n",
+ "Epoch 37/100\n",
+ "0s - loss: 0.5137 - acc: 0.7500 - val_loss: 0.6083 - val_acc: 0.6667\n",
+ "Epoch 38/100\n",
+ "0s - loss: 0.5137 - acc: 0.7500 - val_loss: 0.6082 - val_acc: 0.6667\n",
+ "Epoch 39/100\n",
+ "0s - loss: 0.5137 - acc: 0.7500 - val_loss: 0.6081 - val_acc: 0.6667\n",
+ "Epoch 40/100\n",
+ "0s - loss: 0.5136 - acc: 0.7500 - val_loss: 0.6080 - val_acc: 0.6667\n",
+ "Epoch 41/100\n",
+ "0s - loss: 0.5134 - acc: 0.7500 - val_loss: 0.6078 - val_acc: 0.6667\n",
+ "Epoch 42/100\n",
+ "0s - loss: 0.5134 - acc: 0.7500 - val_loss: 0.6077 - val_acc: 0.6667\n",
+ "Epoch 43/100\n",
+ "0s - loss: 0.5134 - acc: 0.7500 - val_loss: 0.6077 - val_acc: 0.6667\n",
+ "Epoch 44/100\n",
+ "0s - loss: 0.5132 - acc: 0.7500 - val_loss: 0.6076 - val_acc: 0.6667\n",
+ "Epoch 45/100\n",
+ "0s - loss: 0.5132 - acc: 0.7500 - val_loss: 0.6075 - val_acc: 0.6667\n",
+ "Epoch 46/100\n",
+ "0s - loss: 0.5131 - acc: 0.7500 - val_loss: 0.6074 - val_acc: 0.6667\n",
+ "Epoch 47/100\n",
+ "0s - loss: 0.5131 - acc: 0.7500 - val_loss: 0.6073 - val_acc: 0.6667\n",
+ "Epoch 48/100\n",
+ "0s - loss: 0.5130 - acc: 0.7500 - val_loss: 0.6072 - val_acc: 0.6667\n",
+ "Epoch 49/100\n",
+ "0s - loss: 0.5129 - acc: 0.7500 - val_loss: 0.6070 - val_acc: 0.6667\n",
+ "Epoch 50/100\n",
+ "0s - loss: 0.5129 - acc: 0.7500 - val_loss: 0.6069 - val_acc: 0.6667\n",
+ "Epoch 51/100\n",
+ "0s - loss: 0.5128 - acc: 0.7500 - val_loss: 0.6068 - val_acc: 0.6667\n",
+ "Epoch 52/100\n",
+ "0s - loss: 0.5127 - acc: 0.7500 - val_loss: 0.6067 - val_acc: 0.6667\n",
+ "Epoch 53/100\n",
+ "0s - loss: 0.5127 - acc: 0.7500 - val_loss: 0.6066 - val_acc: 0.6667\n",
+ "Epoch 54/100\n",
+ "0s - loss: 0.5126 - acc: 0.7500 - val_loss: 0.6064 - val_acc: 0.6667\n",
+ "Epoch 55/100\n",
+ "0s - loss: 0.5125 - acc: 0.7500 - val_loss: 0.6063 - val_acc: 0.6667\n",
+ "Epoch 56/100\n",
+ "0s - loss: 0.5125 - acc: 0.7500 - val_loss: 0.6062 - val_acc: 0.6667\n",
+ "Epoch 57/100\n",
+ "0s - loss: 0.5124 - acc: 0.7500 - val_loss: 0.6061 - val_acc: 0.6667\n",
+ "Epoch 58/100\n",
+ "0s - loss: 0.5123 - acc: 0.7500 - val_loss: 0.6059 - val_acc: 0.6667\n",
+ "Epoch 59/100\n",
+ "0s - loss: 0.5122 - acc: 0.7500 - val_loss: 0.6058 - val_acc: 0.6667\n",
+ "Epoch 60/100\n",
+ "0s - loss: 0.5122 - acc: 0.7500 - val_loss: 0.6057 - val_acc: 0.6667\n",
+ "Epoch 61/100\n",
+ "0s - loss: 0.5121 - acc: 0.7500 - val_loss: 0.6056 - val_acc: 0.6667\n",
+ "Epoch 62/100\n",
+ "0s - loss: 0.5120 - acc: 0.7500 - val_loss: 0.6055 - val_acc: 0.6667\n",
+ "Epoch 63/100\n",
+ "0s - loss: 0.5120 - acc: 0.7500 - val_loss: 0.6054 - val_acc: 0.6667\n",
+ "Epoch 64/100\n",
+ "0s - loss: 0.5119 - acc: 0.7500 - val_loss: 0.6052 - val_acc: 0.6667\n",
+ "Epoch 65/100\n",
+ "0s - loss: 0.5119 - acc: 0.7500 - val_loss: 0.6051 - val_acc: 0.6667\n",
+ "Epoch 66/100\n",
+ "0s - loss: 0.5118 - acc: 0.7500 - val_loss: 0.6051 - val_acc: 0.6667\n",
+ "Epoch 67/100\n",
+ "0s - loss: 0.5117 - acc: 0.7500 - val_loss: 0.6050 - val_acc: 0.6667\n",
+ "Epoch 68/100\n",
+ "0s - loss: 0.5117 - acc: 0.7500 - val_loss: 0.6049 - val_acc: 0.6667\n",
+ "Epoch 69/100\n",
+ "0s - loss: 0.5116 - acc: 0.7500 - val_loss: 0.6048 - val_acc: 0.6667\n",
+ "Epoch 70/100\n",
+ "0s - loss: 0.5115 - acc: 0.7500 - val_loss: 0.6047 - val_acc: 0.6667\n",
+ "Epoch 71/100\n",
+ "0s - loss: 0.5115 - acc: 0.7500 - val_loss: 0.6046 - val_acc: 0.6667\n",
+ "Epoch 72/100\n",
+ "0s - loss: 0.5114 - acc: 0.7500 - val_loss: 0.6044 - val_acc: 0.6667\n",
+ "Epoch 73/100\n",
+ "0s - loss: 0.5113 - acc: 0.7500 - val_loss: 0.6043 - val_acc: 0.6667\n",
+ "Epoch 74/100\n",
+ "0s - loss: 0.5113 - acc: 0.7500 - val_loss: 0.6041 - val_acc: 0.6667\n",
+ "Epoch 75/100\n",
+ "0s - loss: 0.5112 - acc: 0.7500 - val_loss: 0.6040 - val_acc: 0.6667\n",
+ "Epoch 76/100\n",
+ "0s - loss: 0.5113 - acc: 0.7500 - val_loss: 0.6039 - val_acc: 0.6667\n",
+ "Epoch 77/100\n",
+ "0s - loss: 0.5111 - acc: 0.7500 - val_loss: 0.6038 - val_acc: 0.6667\n",
+ "Epoch 78/100\n",
+ "0s - loss: 0.5112 - acc: 0.7500 - val_loss: 0.6037 - val_acc: 0.6667\n",
+ "Epoch 79/100\n",
+ "0s - loss: 0.5111 - acc: 0.7500 - val_loss: 0.6037 - val_acc: 0.6667\n",
+ "Epoch 80/100\n",
+ "0s - loss: 0.5110 - acc: 0.7500 - val_loss: 0.6036 - val_acc: 0.6667\n",
+ "Epoch 81/100\n",
+ "0s - loss: 0.5109 - acc: 0.7500 - val_loss: 0.6035 - val_acc: 0.6667\n",
+ "Epoch 82/100\n",
+ "0s - loss: 0.5109 - acc: 0.7500 - val_loss: 0.6034 - val_acc: 0.6667\n",
+ "Epoch 83/100\n",
+ "0s - loss: 0.5109 - acc: 0.7500 - val_loss: 0.6034 - val_acc: 0.6667\n",
+ "Epoch 84/100\n",
+ "0s - loss: 0.5108 - acc: 0.7500 - val_loss: 0.6033 - val_acc: 0.6667\n",
+ "Epoch 85/100\n",
+ "0s - loss: 0.5107 - acc: 0.7500 - val_loss: 0.6032 - val_acc: 0.6667\n",
+ "Epoch 86/100\n",
+ "0s - loss: 0.5107 - acc: 0.7500 - val_loss: 0.6032 - val_acc: 0.6667\n",
+ "Epoch 87/100\n",
+ "0s - loss: 0.5106 - acc: 0.7500 - val_loss: 0.6031 - val_acc: 0.6667\n",
+ "Epoch 88/100\n",
+ "0s - loss: 0.5106 - acc: 0.7500 - val_loss: 0.6030 - val_acc: 0.6667\n",
+ "Epoch 89/100\n",
+ "0s - loss: 0.5105 - acc: 0.7500 - val_loss: 0.6029 - val_acc: 0.6667\n",
+ "Epoch 90/100\n",
+ "0s - loss: 0.5105 - acc: 0.7500 - val_loss: 0.6028 - val_acc: 0.6667\n",
+ "Epoch 91/100\n",
+ "0s - loss: 0.5104 - acc: 0.7500 - val_loss: 0.6027 - val_acc: 0.6667\n",
+ "Epoch 92/100\n",
+ "0s - loss: 0.5104 - acc: 0.7500 - val_loss: 0.6026 - val_acc: 0.6667\n",
+ "Epoch 93/100\n",
+ "0s - loss: 0.5104 - acc: 0.7500 - val_loss: 0.6025 - val_acc: 0.6667\n",
+ "Epoch 94/100\n",
+ "0s - loss: 0.5103 - acc: 0.7500 - val_loss: 0.6025 - val_acc: 0.6667\n",
+ "Epoch 95/100\n",
+ "0s - loss: 0.5102 - acc: 0.7500 - val_loss: 0.6024 - val_acc: 0.6667\n",
+ "Epoch 96/100\n",
+ "0s - loss: 0.5102 - acc: 0.7500 - val_loss: 0.6023 - val_acc: 0.6667\n",
+ "Epoch 97/100\n",
+ "0s - loss: 0.5101 - acc: 0.7500 - val_loss: 0.6022 - val_acc: 0.6667\n",
+ "Epoch 98/100\n",
+ "0s - loss: 0.5102 - acc: 0.7500 - val_loss: 0.6021 - val_acc: 0.6667\n",
+ "Epoch 99/100\n",
+ "0s - loss: 0.5100 - acc: 0.7500 - val_loss: 0.6020 - val_acc: 0.6667\n",
+ "Epoch 100/100\n",
+ "0s - loss: 0.5100 - acc: 0.7500 - val_loss: 0.6019 - val_acc: 0.6667\n",
+ "['loss', 'acc']\n",
+ "[0.58956193923950195, 0.80000001192092896]\n"
+ ]
+ }
+ ],
+ "source": [
+ "import tensorflow as tf\n",
+ "from keras.models import Sequential\n",
+ "from keras.layers import Dense\n",
+ "from sklearn import datasets, metrics, preprocessing\n",
+ "from sklearn.utils import shuffle\n",
+ "import numpy as np\n",
+ "import pandas as pd\n",
+ "\n",
+ "#Load the dataset\n",
+ "df = pd.read_csv(\"data/CHD.csv\", header=0)\n",
+ "#Describe the input data\n",
+ "print df.describe()\n",
+ "\n",
+ "#Normalize the input data\n",
+ "a = preprocessing.StandardScaler()\n",
+ "X =a.fit_transform(df['age'].reshape(-1, 1))\n",
+ "\n",
+ "#Shuffle the data \n",
+ "x,y = shuffle(X, df['chd'])\n",
+ "\n",
+ "#Define the model as a logistic regression with\n",
+ "model = Sequential()\n",
+ "model.add(Dense(1, activation='sigmoid', input_dim=1))\n",
+ "model.compile(optimizer='rmsprop', loss='binary_crossentropy', metrics=['accuracy'])\n",
+ "\n",
+ "# Fit the model with the first 90 elements, and spliting 70%/30% of them for training/validation sets\n",
+ "\n",
+ "model.fit(x[:90], y[:90], nb_epoch=100, validation_split=0.33, shuffle=True,verbose=2 )\n",
+ "\n",
+ "#Evaluate the model with the last 10 elements\n",
+ "scores = model.evaluate(x[90:], y[90:], verbose=2)\n",
+ "print model.metrics_names\n",
+ "print scores\n"
+ ]
+ }
+ ],
+ "metadata": {
+ "kernelspec": {
+ "display_name": "Python 2",
+ "language": "python",
+ "name": "python2"
+ },
+ "language_info": {
+ "codemirror_mode": {
+ "name": "ipython",
+ "version": 2
+ },
+ "file_extension": ".py",
+ "mimetype": "text/x-python",
+ "name": "python",
+ "nbconvert_exporter": "python",
+ "pygments_lexer": "ipython2",
+ "version": "2.7.11+"
+ }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 0
+}
diff --git a/Building-Machine-Learning-Projects-with-TensorFlow-master/5/CH5_Nonlinear.ipynb b/Building-Machine-Learning-Projects-with-TensorFlow-master/5/CH5_Nonlinear.ipynb
new file mode 100644
index 000000000..dc120f785
--- /dev/null
+++ b/Building-Machine-Learning-Projects-with-TensorFlow-master/5/CH5_Nonlinear.ipynb
@@ -0,0 +1,2146 @@
+{
+ "cells": [
+ {
+ "cell_type": "code",
+ "execution_count": 1,
+ "metadata": {
+ "collapsed": false,
+ "scrolled": false
+ },
+ "outputs": [
+ {
+ "name": "stderr",
+ "output_type": "stream",
+ "text": [
+ "Using TensorFlow backend.\n"
+ ]
+ },
+ {
+ "data": {
+ "text/plain": [
+ ""
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ },
+ {
+ "data": {
+ "image/png": "iVBORw0KGgoAAAANSUhEUgAAAbYAAAEoCAYAAAA0ZdRDAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzsnXt8nFWd/9/fJHPLPSmhhd7SC22B3hUWBGyLUAFXQOUi\noBZbEHBrkR8rRVwoFOvaZUHpaintIkWhTVBEqKLpsqTVohjAUlgDgkpKKdgJlItgC6X9/v4458k8\nM5lJZpKZTDI579drXvPcn/M8cz7zPed7vuccUVUcDofD4SgUivKdAIfD4XA4sokzbA6Hw+EoKJxh\nczgcDkdB4Qybw+FwOAoKZ9gcDofDUVA4w+ZwOByOgsIZtn6KiMwSkR2+9f8TkY/28Fp3isjS7KXO\n4cgNXl4VkeNF5NksXO9FETkxG2lzDBycYevfdHQyVNXJqvrrfCbG4egrVHWLqh6e73T0J0Rknoj8\nJt/pGAg4w+ZIiYgU5zsNDoejA8FX2HWkxhm2PkJERojIfSISFZF2Efm+iLwuIkf6jqkTkXdFZEiS\n8ztcKiKyREQaReQuEXlbRJ4RkZm+Y2eIyJMi8paINADhhGv9s4hsFZE3RGSLiExJuM9VIrINeEdE\nikRksYi8bO/1rIjMycU7cgw+UuXVJK74pHnQauHHItJg9z0hIlNT3OsoEfmtzfc7ReS/RKTEt/9I\nEdlodfmqiFxtt4uIXC0if7babRCRartvtIgcEJELReQle+4lIvJhEdkmIrtF5L8S0jFfRFrtsb8U\nkVG+fQfs+c/bc79nt08CbgOOFZG/i8juLP0EBYkzbH2AiBQBPwdeBEYDw4F7gPXA532Hngc8rKqv\np3HZTwLrgCpgA/B9e68AcD9wF1AL/Bj4jC8tM4A7gIvt/tuBB+15Hp8FTgWqgfHAvwAfUtVK4ONA\nW9oP73CkoLu8iq2diMgEus6DpwONQA1GUz9L4W3YD3zV3utY4ETgy/Ye5cD/AA8Bh2Dy/f/a8xbZ\ne5wAHAq8AaxMuPbR9pxzge8C19jrTwbOEZET7H3OAK4GzgTqgN/YNPv5BPAhYJo9d66qPgdcCvxO\nVStUtTbJ8zkszrD1DUdjxHKVqu5R1fdV9bfADzHGzOPzwI/SvOYWVW1SM9jnjwCvlHosUKKqK1R1\nv6reBzzuO+9iYJWqPqGGHwHvAcf4jrlVVV9R1fcwfwZBYLKIlKjqS6r6YmaP73Ak5Ri6zqse3eXB\nJ1X1flXdD9yCqfUdk3gRVf2DqrbYfP8SsBqYZXf/M/Cqqn7X6vNdVfXScgnwDVV9VVX3AUuBs2yB\nFYwBXmrPexh4F1ivqq+r6isY4zXDd61/V9XnVfUA8G1guoiM9CX131X176q6A2gGpqf3Oh0ezrD1\nDSOB7TYjd6CqLcC71u0yERgHPJjmNf/mW/4HELZCOwTYmXDsdt/yaOBK6+bYLSJvACMwJVGPl31p\n/AumlHs9sEtE1onIIWmm0eHoikPpOq8CKfPgMN8hO3zHKib/HkoCInKYiGywbsY3gWXAQXb3SOAv\nKdI5Grjf0wzQCuwDhvqOifqW9wC7EtbLfde61Xet1zGGcbjveP+5//Cd60gTZ9j6hh3AKF8Jz89d\nmJra54GfqOr7vbzXq8SLBGCUb3kHsExVa+2nRlXLVbXRd0xcA7WqNqjqCRhRgillOhy9pbu82kGS\nPLjct7ujtiMigimoJRpMMG1UzwLjVLUa+AYmIAOMLsalSOdLwKkJmilT1VdTP1pKdgCXJNHfY2mc\n6wJH0sQZtr6hBSPib4tIqYiEROQjdt89wKeACzCuyZ7iCfR3wAci8hURKRGRT2NcoR5rgEtF5GgA\nESkTkdNEpCzpRUUmiMgcEQkC72NKnweSHetwZEh3eRVIKw9+SETOtO1qVwB7gd8nuV8F8Laq/sMG\nY1zm2/dzYJiILBKRoIiUexrBtEN/ywvyEBPkdbo/iRk88yrgGhE5wl6rSkTOSvPcXcCIhPZwRxKc\nYesDrAvyk8BhmNLfDuAcu+9l4A9mUbd0dZnubmOvtw/4NPBFjJvjbOA+X1qexLSzfc+6Qp4H5nVx\nnxCmhtYOvIJp8P56N2lxOLqlu7zqo7s8+AAmaOMNTAHx07a9DeLz878CF4jI2xhj1eBLyzvAyZgg\nkb9hdDHb7r7V3mOjiLwF/JZ4A5yomZTrqvoz+ywN1h36NHBKmtd6BPgj8DcRieJIiaQz0aiIVAH/\njYnwOQDMx/zwjRjXQBtwjqq+lbOUFjAicgewU1Wvy3daHLnFaSm7iMgSjGvxC/lOi6P/kG6N7Vbg\nITsSwDTgOUzI6sOqOhFTknCl+B4gIvUYV+Qd+U2Jo49wWnI4cky3hk1EKoETVPVOAFX9wJYmz8AE\nPmC/z8xZKgsUMeM3Pg38h6p2igZzFBZOSw5H39CtK1JEpmH6e7RiSphPYEJvd6pqje+43a7ToMOR\nGqclh6NvSMcVWQLMBL6vqjMxnQ+vpvsGU4fDEY/TksPRB5R0fwgvAztU9Qm7fh9GjLtEZKiq7rKd\nJZNG6YiIE6mjYFHVTEK9nZYcji7IUE8p6bbGpqq7gB12vDaAj2FCTh8ELrTb5mHCYVNdw3366LNk\nyZK8p2GwfDJFnZYy/gzW/DwYnzubpFNjAzMI6D22Y+BfMf1OioF7RWQ+Zhicc7KaMoejMHFacjhy\nTFqGTVW3AUcl2XVSdpPjcBQ2TksOR+5xI48UGLNnz853EhyOrDFY8/Ngfe5skdbII726gYjm+h4D\nmfb2dtra2qivr6euri7fyXFkgIigWWrsTvN+faIllycd+SCbenI1tjyyfn0jI0aM5fjjP8mIEWNZ\nv76x+5Mcjhyyfn0jo0dPYs6ceQwfPobrr1/K448/Tnt7e76T5nCkTdo1NjvlyhPAy6p6uh0KqgEz\nG+2TwOdV9YMk57kaWxLa29s5+OCRmLLFCEwk+AdEoztdKXmA0NMSZn/VUnt7O6NHT2LPnlMwE1tX\nA28SCNRTUrKLO+5YyXnnnZuz+zsGN/mqsV2OGTHBYzlws6pOAN4EFmQjQYOFBx54APP6H8OMgfsY\nUGK3OwqcfqmltrY2oArTvW4dZmL1x9i3r5U9e5pZsODLrubmGBCkZdhEZARwGmZUco8TiU0xcRdm\nIF9Hmjz66KOYORan2i1TgeF2u6NQ6a9aam9v58Ybv8WePa9iJp/+nP32589DrfFzOPo36dbYvgN8\nDTvUj4gMAd5QM88YpJiK3ZGaDz74ADPJ79N2y9PATrs9v7S3t7t2ldzR77S0fn0jo0ZNZMOGZzBd\n6nYBZcBf8OfPPXv+wvvvxyZ4H0j5ZCCl1dF70hnd/xPALlV9iviZYvssGqwQeeeddzCunmMw848e\nA+y123tGNsTrBQ+cfPKljB49yQW0ZJH+qKX29nYWLPgye/duAhYDH2Amqf47po3tGGCc/a5gzpxT\nuP32Naxf38jIkeP56EfPYuTI8R35pD8aEJenBx/pdNA+DjhdRE4DIpjp1W8FqkSkyJY0R2CqH0m5\n/vrrO5Znz57t+mgAra2tQAAIYkrJQWC/3Z4569c3smDBlwkG63n//bYeNfR7f3J79jSzZ89U4GkW\nLJjDSSed6AJagE2bNrFp06beXKLfaWnr1q0cOHAIcAhmgukSYm2/U4FNmAmei4A97Nt3CJdeugjY\nb48NA/u44IJ5vP3221xxxdVxefCkk07Ma9cBl6f7L1nQU2oyHMtrFvCgXW4EzrXLtwGXpjhHHZ0J\nBAIKpQrNCi32u1QDgUDG14pGoxqJ1CpsU1CFbRqJ1Go0Gs3oOi0tLVpVNdNew3wqK2doS0tLxmka\nDNi83dNx8fKupXXrGjQYLFeIKCxUGKEwVOGwuDxg1kMKNQoz7XfQ5lm1+S6sJSUVcfk5EKjUUKhS\nq6pmaiRSq+vWNXTcOxqNaktLS8Z5NFNcnh449EZPiZ/eiHEM8Hti09oHUpyT27cxQAEUhilUK0y0\n30O1J+8rW+KNRqMaDFbFGchgsCrnfz4DlSwatj7XUjQa1UCgQqFCoUohbL9LraGL5QGzHk7YVqrQ\naI1YVGGMwiEKtdb41dr8XaawOq6wtW5dg0YitUkNXrbJVqHPkXvyZth6dANn2JJiDFuyUnDm7ytb\n4o1Go1pUFLF/ZIcpRLSoKOz+BFKQTSGm88mmlpqamqxxut/mwykKDdaABW1+nGG/I9Zwqe8zTKHS\nl3cDKQzi/fZ7tVZWztCmpqY+NzSeIa2snJFzQ+roOdnUkxt5JK8UY9ownrTf6U62EE9dXR133LGS\nSGQOlZUziUTmcMcdKzNuQzDtLQI8BNwDPMSBA0Vs3bq1R+ly9F9+8YuHMO1q79vvHcDhwH8BI4E/\nAbfb7+HAK8QiJDcBbwG/IT7vDiW+e8BYoNRe/yu8++6fePPNNwkG6+OO27OnhttvX5OjJ4XzzjuX\n7duf4+GHb2f79udcJ/NBQM/+SR1Z4hDi/wgOwYRYZ855553L9OlTaWlp4eijj+bwww/P+Bpvvvmm\nTcORQJv9PsRuzy9u/MLs0d7ezqpVd2IKVn/AzGt6JSbysQp4G3gVMwmB6YZioiWPxxivKFBPYh83\n01Phabv+NGZWnk9h8lQR+/fv5/OfX4BqEcYYlmEmEX+FG274Fkcf/WFmzJiRk9+3rq6uz/KNy6v9\ngGxV/VJ9KFBXZG8bv4EUrpueva9stFs0NjZat1SFwij7HdTGxsYepSlbeM9WVjatX7mSGKCuyJaW\nFq2omGFdj+UKJb62tmF2W6Ir0nNRhhTWpsi7tfZ7ql1ebs+P+trlwgrFce5u+IxCREOh4RoOV3f8\nvtkMMOmrYJW+bD8sNLKpp3TENAJ4BDPT7zPAIru9BtiI8VU0AVUpzs/5C+lrspF5jWHzGttn+Brb\nM39f2WpjM4YtYP98Rtrvkrwatv4c0JKpEPuLlmL5xTM846yBKbFGbbmaYKbh1tg1qIl2LLZ5tjJJ\n3h1q885EjQWUqN3f4lu+x96r2WcUQz6DWKOBQLmuWrVaw+EajUQmazhck1RjnrHasGGDXnfddbpl\ny5ak+1etWt0nxsYFqvSOvjZsw4Dpdrncim8SZny7q+z2xcC3U5yf6/fRp2Qr88ZqbM0aC/fvWY0t\nW1GRS5cu1WQBLUuXLs04TdnCBDmMj3s2GKdNTU15S5NHDwxbv9HSqlWrk9S6ytXUyiIKo62hC9v3\nH/HVtO63hq9z3u18TX+NrdYuj/cZu6iamlx8xKUJYlqhsEBhhZaUVMRpLNZVIaz+2t/cuad27I9E\narWiYkqnNKWr10xreS0tLRqJTInLq5HIZNe1IE361LB1OgF+hpnt9zlgqMYE+1yK43P6MvqabGVe\n8yfg/VH4/zh6VmPLRq1m8uTJSf9kJk+enHGasoWpRXZOU77do6q9F2I+tRRzR8YXGEz0Y0ThY9Zo\nxIyL2e5FPw6z315tr9j+TkOtAZtut5fZY2rU1Pw8t2Wzvec9SQsuMQPruStDHb95NBpVUwDzjol3\niW7YsMEWPu9XuE5NxGfs+qHQEd0WjNata9BwuFrLyiZ2uEe7M3Stra1J09PY2OhqbWmQN8OGaTFu\ns6XNNxL27U5xTi7fRZ+TKvO2trZmdB1j2IaqKSX7v3tm2AKB+HaRQKA8YzEVFRXZP5X4P5mioqKM\n05Qt1qxZo7FapNfmE9I1a9bkLU0evRFivrWUrDAUX7uqtvm8Wo2LsNoakqHW4AWssVtrDUiNwhqf\n0VqrcKS93jJ7vleA87seq9W4NmPpEPHa4uI1NnfuXFX154mwdu5MPl6PPfZYFRmisW4KiXotjWvL\nS/ZuTB+/Mvu8ZVpUFNFgsFyDwZEaClUmPdcUesdooou2rGyia29Lg2watrSjIkWkHPgJcLmqviMi\nmnBI4noHhTSk1o4dOzCRY3OA0cB2oJIdO3ZkFIlo5h56GxNa70WHnYZI5sMGtrW1UVo6gbfe+hXm\nv7KeSOTjtLW1ZRSVdeDAAWJh3V5k2yt2e37YvHkzJmsdwLyjA4CyefNmLrrooj5NS7aGAOovWvrg\ng/cxkZBeOP8PgDr7GYKZQedd4A1gL3AWppJ5LKZ7wFd8V1sKjLJJPw1T8fwbJrryGuAjwKmYobkO\nAtoxnthDgZ0UFx9HODyWAwdeJhyO8MYb1SRGXXrDzf385z+324PEBhL38utOnnvudVT3YoYF+z2w\n0Kb5EGA38AP27j085dBaW7duZd++PZgh7yqBtzlw4D3efz+EGQntNS64YF6nc+vr69m/f7d9R56m\nP8277/4GeLXT/bzoyfLyct55552OKMp77rmHe++9l3POOYcLLrig0+9WKFGXeR9SC9Mt4FcYIXrb\nniXeffJsinNzaeT7HOMa69w2lqlrbMqUKWraMPyuyBKdMmVKxmnKbrtfMEkpPX+/4ZAhQ9QEKlQr\nTLDfw3TIkCF5S5MHPShh9hctxTpoNys0qQkSieWfUKhai4q8CMgWhWttDcmrbaVqS1tuj6nz5e+x\nGnNjJg5IUK5wrYbD1drU1KTRaFTPP/98TeZ+Pv/881VV9cMf/rBvf4MmBsAUFxfb9ajNL9vsMx6m\nsaCW1O3QCxYsSHi+5iTPG4nzGkSjUftOQ2pqoONsGhuS3s9rAzTNGhGNRMZoJFKrNTVD1e+CHTmy\nPi5tyQLXEl2kW7ZsSRpM09/piZ5SfdIV4w+BWxK2LQcW2+VBEzxiMu+wTu6GTIMZjDul2ArhYPtd\n3GMXWzZGVzCGzftD8CLbxuXVsJHFIJtcpE0zN2z9Qkudg3IaFEq1tHRKR/6JdbOYqpFIra5atVpv\nvHGZlpSU+YyW1wYW9P2ZB9S48QL68Y+fokVFYTVtXZ2NFdymUKvl5bF2apO2xNFPgh0amzVrlsa7\nzKNqIjhLFKrU1IBL1bTfzfAdk17hb/jw4Rrv4mzRZC7Pk046SVVj2gsGD1Wot8b0Np9Rjb9frCDq\nz9O1CtckNaB33323qmrS8wKBiri2wMmTZ6g/qtkLphkI9Klhw4xIvh94CtiK6dF5CmYa+4cx/oSN\nQHWK83P/RvqQWNtELHP1JFDDNIAHfAIOKgR61cjc3/rWZQOTpqqEP9GqAWnY+pOWkrWxBQIVHbUm\n/3GJeaq1tVVFvD5tXqGsSM8991xtbGzULVu26Nq1azvandeta9BAwAsi8RuHcWpqUlM1FKrsuEes\nzbjM/kGXxbUZ33LLLUnyaYV6/evq6uqsnryxL73jlqsX8NJV4c8Ytu5rbKNHj07wlnzGvotpGiss\n1KrnkfGMjGmLG6vx42rWK3woqQGdM2dOkvOmaWwoM68WXKXJasUDpebW5zW2Xt2gH/wBZZvEkmxP\nG4W9kOVgcKgGg+V5b1yeN2+e7w9hsv0O6rx58/KWppixvV9jgQoDt8bWm0+2n7k3+XjdugYNhSo1\nFDpUA4EyXbVqdZfHt7a2aiAQHyRi8pcxGonnx6ISJ3QK9DCFQi+i2HNBGhfeyJH1unLlSo11VahN\nOK5EFy26vMvC39SpU7VzxLIkFK5KdNq0aQldbU7WWE3WbxRDCheoF2SWKgDNBNt03j579uyOd9i1\nwU1mgEv1yiuvTPt3zSfOsPUDsjWSQV+NiJAuRtBhNa4dM0pEftOD70/F+3MSZ9iyRG/yX6bneoY0\nFDpCzUgj9RoKVac0il1d36sFlpTUKgR15swPdbjsVFVra+usQfFqMUMUArpgwUXdpvOzn/2sxkZg\nOVhNzbFGTdvZKPs9TL/0pS8l1Njq7XlelwjPKBYptCqM1xUrViTtMmSOFY21YcYiSI8//nhVTeY+\nTnSRJnOZjusXEcTp4AybI6fMmzdPa2tr81pT8zDtJRGFi9UEj1ysEFERyXfSCsKw9TWesWptbe11\nga47w7py5Uo94YQT9Nprr41zjXbHhg0bNL5d9/6E9Wb1B4x5Bru4uMZ33BqFL6jxgJzYUfvy+rQl\nBnuFQtW6cOFCNa5Tf9/BUr3hhhtU1R/wk36NraiorN8UmrvDGTbHoGHx4sW+GlvMLbR48eJ8J80Z\ntgKlpaVFRSo75Tn/euJ0TrGoyMRI52L1+tL5z0kW7BXreB7fZ7Nz22Nsf1FRWMPhmg538sKFizQU\nqtZQ6AgNhVL31euP9BvDZhu+n8NMkLg4xTE5fBWORJqbm/OdhKwSa0/xIkjNcn8ohWbbsHWnp8Go\npXzk51iNaq3ClWoiOsNaXFyq4fColB20VY3BKi4uU5GDFSJaXBxOeU6yGqdxsVZoUdFQDQQqOp2T\nrO0x8Tr9rXkjXfqFYcP0tPwzppdywEZ6TUpyXE5fhiOeJUuW5DsJWWfdugYtKSnToqKDtKSkrN+U\nQrMqxDT0NBi1lK/8HBtrcnpHO2C6BsN/XE+MTDQa1YsuuijlOQPVcHVHNvXUm/nYjgZeUNXtACLS\nAJxhS5wOR9Y477xzOemkEwtitIUucHrqR6TKc+nkvcS53zLNr3V1dQwfPjzleX05t9xApTeGbThm\nXB2PlzHidDiyziAQs9NTP2MQ5LmCRUwNsAcninwG+Liqfsmufw44WlUXJRzXsxs4HAMAVc18cM8k\npKMnpyVHoZMtPfWmxrYTM+qpxwi7LY5sJdThKHC61ZPTksORHkW9OPdxYLyIjBaRIPBZ4MHsJMvh\nGHQ4PTkcWaLHNTZV3S8iCzFj2xUBd6jqs1lLmcMxiHB6cjiyR4/b2BwOh8Ph6I/0xhXZJSJyiog8\nJyLPi8jiXN3HYRCRNhHZJiJbRaQl3+kpNETkDhHZJSJP+7bViMhGEfmTiDSJSFUO71+QehKRESLy\niIj8UUSeEZFFdnvKdysiK0TkBRF5SkSm5y/1vUNEikTkDyLyoF2vF5HH7G+8XkRK7PagiDTYZ/6d\niIzq+sr9ExGpEpEfi8iz9vf+p1z9zjkxbCJSBHwP+DhwJHCeiEzKxb0cHRwAZqvqDFV1YeLZ505M\nfvZzNfCwqk4EHgG+nosbF7iePgD+n6oeiZnm+l/ssyV9tyJyKjBOVQ8DLgFW5SfZWeFyoNW3vhy4\nWVUnYKYvX2C3LwB222f+LvAffZrK7HEr8JCqHg5Mw/TRzMnvnKsaW0dnU1XdB3idTR25Q8hhDXyw\no6pbgDcSNp8B3GWX7wLOzNHtC1ZPqvo3VX3KLr+DmU18BJ3frfe8Z2Ama0VVfw9UicjQPk10FhCR\nEcBpwH/7Np8I3GeX/fnJ/y5+AnysL9KYTUSkEjhBVe8EUNUPVPUtcvQ75+qPMFln0+E5upfDoECT\niDwuIhfnOzGDhINVdReYP2jg4BzdZ1DoSUTqgenAY8DQhHfr/aklvoudDMx38R3gaxjdIiJDgDdU\n9YDd7/+NO55ZVfcDb4pIbd8mt9eMAV4TkTut+3W1iJSSo9/ZlfALh+NU9cOYUuC/iMjx+U7QIMRF\nYvUQESnH1EYutzW3xHdZMO9WRD4B7LI1VX/fxHT7KQ7E/owlwEzg+6o6E3gX44bMye+cK8OWVudt\nR/ZQ1VftdztwP244pr5gl+ceEZFhQDRH9yloPdkgiZ8AP1LVB+zmVO92JzDSd/pAfBfHAaeLyF+B\n9RgX5K0Yd5v3n+x/ro5nFpFioFJVd/dtknvNy8AOVX3Crt+HMXQ5+Z1zZdhcZ9M+RERKbYkXESkD\n5gL/l99UFSRCfGn5QeBCuzwPeCDxhCxR6Hr6AdCqqrf6tvnf7YXE3u2DwBcAROQY4E3PlTVQUNVr\nVHWUqo7F/JaPqOrngGbgbHuYPz89aNex+x/py/RmA/sb7RCRCXbTx4A/kqvfOVvTBCR+MHNL/Ql4\nAbg6V/dxHwXjv34K2Ao84953Tt7xOuAV4D3gJeCLQA3wsM3nG4HqHN6/IPWEqb3s9+XfP9hnrU31\nbjERon8GtgEz8/0MvXz+WcCDdnkM8HvMfHyNQMBuDwH32t/+MaA+3+nu4bNOwxTSngJ+ClTl6nd2\nHbQdDofDUVC44BGHw+FwFBTOsDkcDoejoHCGzeFwOBwFhTNsDofD4SgonGFzOBwOR0HhDJvD4XA4\nCgpn2BwOh8NRUDjD5nA4HI6Cwhk2h8PhcBQUzrBlARF5UUROzHc6HA5H7xCR20TkG2kee6eILM11\nmhyZU5LvBDgcDkd/QVUvy9a1ROQAMF5V/5qtazrSw9XY+gl2OoqCoJCexeHoBW4g3jzhDFv2mCEi\n20TkDRFZb6cXQUQuFpEXROQ1EfmZiBzinSAiB0TkyyLyPGZEb0TkOyKyS0Testc7wm4Pish/ish2\nEXlVRFaKSMjumyUiO0Tk6yLSLiJ/FZHzffepFJEfikjUuk2/4dvXJiIz7PIFNk2H2/X5InK/XRYR\nuVpE/mzv0SAi1XbfaHvefBHZDvxvbl+1w9EZEblQRB70rb8gIo2+9ZdEZKqITBSRjSLyuog8KyJn\n+46Jcy+KyFUi8oqIvCwiC2w+H+u7ba2I/FxE3haR34nIGHveZswUR0/bfWfj6DOcYcseZ2PmQRuD\nmZ7hQhGZA3wLOAs4BDPdSUPCeWcARwFHiMhc4ASM+6IKOAd43R63HBgPTLXfw4HrfNcZhpkC4lDM\nvEarReQwu+97QAVQD8wGviAiX7T7NtttAB8F/mK/wUypsckuLwJOt+k7FHgDWJnwLB8FJgEfT/qG\nHI7cshk4HsAWIAPAsXZ9LFCGmQblf4C7gYMw86GtFJFJiRcTkVOAr2ImAh2P0UliLexcYAlQjdHO\nMgBVnWX3T1HVSlX9cbYe0tE9zrBlj1tVdZeqvglsAGYAFwB3qOo2Vd0HfB04VkT8syF/S1XfUtX3\ngH1AOcbIiar+SWOT610MXGGPfRf4NnCe7zoKXKuq+1T118AvgHPsjLznYubw+oeqbgduBj5vz9uM\nMWBgjNa/+9b9hu0S4Buq+qp9lqXAWb4ZfxVYoqp77LM4HH2Kqr4I/F1EpmMKWU3AK3Zyy48CvwH+\nGXhRVX+ohm2Y2ZyT1ajOBu5U1edUdS9wfZJj7lfVJ1X1AHAPMD1hvyQ5x5FjXPBI9vDP7voPTK2m\nFnjS26iq74rI65ja1kt288u+/c0i8j3g+8AoEfkp8K9ABCgFnhTp0EkR8aJ5w4rPY7tNw0GYkutL\nCfuG2+Xate+vAAAgAElEQVTNwE12WvYizISG14vIaMwU9NvscaOB+22DOPbe+4Chvuu+jMORXzYD\nczA1rE0Yz8JsTM1tMyYfHyMiu+3xAhQDP0xyrUMxE2N67KCzofqbb/kfmIKpI8+4GlvuUMyMy/Xe\nBhEpA4YQbwDiXBuq+j1V/TBwBDAR+BrwGkY0R6pqrf1UW3elR42IRHzro+z9X8MYoNG+faOBnfZ+\nfwH2AF8Bfq2q72DE+iVgi++cl4BTffevUdUyVX011bM4HHng1xhDdjzGkP0a43n4qF3fAWxKyMeV\nqrowybVeBUb41kfh8viAwBm23LIe09Y21QZ6fAt4TFV3JDtYRD4sIkeLSAnG2OwFDqiZ5nwN8F0R\nqbPHDrdtch2nAzeISEBETgA+AdxrXSSNwDIRKbc1sSuAH/nO3QwstN9gSrr+dYDbgW95blQRqROR\n0xPu73DkG6/GFlHVVzDux1MwBcqtwM+BCSLyOREpsXr5sIhMTHKte4EvisgkESkF/i3DtPwNGNvt\nUY6s4wxbdkhailPVR4BrgZ9iakhjMI3Vqc6rxBiw3cCLmNrWTXbfYkzD92Mi8iawEZjgO/dVjNvl\nFYzRukRVX7D7FmFqfH/FlGDvVtU7feduxrhQfp1iHeBW4AFgo4i8BfwWOLq7d+Bw9CU2z/8dm3dV\n9e+YoI4ttk3tHUyQ12cxWnkF014dSnKtXwErgGZM1PLv7K5025CvB34oIrtF5KyePpMjc8RUBro5\nSKQK+G9gMnAAmI/5oRsxbq024BxVfStnKXWkRERmAT9S1VHdHuzIK05LAxcbOfkMELKeEEc/Jd0a\n263AQ6p6OCaU/TngauBhVZ0IPIKJ+HM4HF3jtDSAEJEzbR/SGkyXmwedUev/dFtjE5FKYKuqjkvY\n/hwwS1V32Yi6TaraqS+II/e4GtvAwGlp4CEiv8REVH6AaXv+F18XHEc/JR3DNg1YDbRiSphPYDot\n7lTVGt9xu1W1NodpdTgGNE5LDkffkE4/thJgJqak8oSIfAfjOkm0iEktpIi4oAJHwaKqmUSDOi05\nHF2QoZ5Skk4b28vADlV9wq7fhxHnLhEZCmDdJ9FUF1DVAfdZsmRJ3tMwWD4D9V33gEGppcGYN9xz\nZ/7JJt0aNjX+5B12WBqAjwF/BB7EjEkIMA8TCu5wOFLgtORw9A3pDqm1CLhHRAKYvlBfxAxDc6+I\nzMcM0XRObpLocBQUTksOR45Jy7CpGS/wqCS7TspucvoPs2fPzncSBg2D6V0PRi31hsGUN/wM1ufO\nFml10O7VDUQ01/coFNrb22lra6O+vp66urp8J8fRDSKCZqmxO837DWotOX0UNtnUkxtSq5+wfn0j\no0ZNYM6czzNq1ATWr2/s/iSHo4Bob2/n8ccfp729vdO+9esbGT16EieffCmjR09y+nB0iaux9SGP\nPvooGzdu5KijjmLo0KEdJc/29naGDx/Hvn0lmOEkXyQQ2MfOnX91JdN+jKuxZY/16xtZsODLBIP1\nvP9+G3fcsZKTTjqRtrY2ysvLmTnzI+zd+++YMYWDRCKfYfv25zr04WpzA5+s6imDUMwi4A+YIWXA\nTMfyGGacu/VASYrz1KF68smnKkQUDlGIaHHxRI1EanXdugZtampSKFXYpqD2u1Sbmpo0Go1qS0uL\nRqPRfD+CIwGbt3sS1uy05CMajWokUhuX/4PBKg2Hq7WqaqYWF5cqhKxGxitUaShUry0tLaqqum5d\ng0YitVpVNbNDU46BR0/1lOyTiSvycsyICR7LgZtVdQLwJrAgU6PaX+nKJdITHn30Uf7nfzYBD2EG\nBn+M/fufY8+eZhYs+DJvvvkmZk7DqfaMqcAh/OIXDzn3S2EyaLSUDm1tbZSUDMdoox2YyvvvH8Te\nvd/nrbd+xf79JZh5dn8HvAD8mvfe28Xmzb+hvb2dBQu+zJ49zbz11pMdmsqWdh0Dk7QMm4iMAE7D\njErucSKmgynAXcCnspu0/JALX/7GjRsx8xWWYQrnMQMmMpxt27Zh5j+8GDNl28+AV1ix4vZOgn32\n2WezanQdfctg0lI6tLe389Of/oy///0FTP6fgJnzdidmooM1QBVmjk9/wW8k//ZvN7B161aCwfq4\nfYHAaNra2nqdLqezAUw61Trgx8B0zEy0D2Im7Xvet38E8HSKc3Nafc0myVwikUhtr92AW7ZssW7I\nZoXaBJdjRCGoELDLh9nvYoV6hbUKrQqqodAoDQTKtaxssoZC1XrTTTc7N2UeoQeuk8GipXTwXIjG\nvVijcLn9HuZzPY5TCFtNxOsmHB6tjY2NPs1GFe7RcLi6w42fKdFoVG+8cZlzbeaBnugp1afbfmwi\n8glgl6o+JSKz/bvSNZ7XX399x/Ls2bP7bR+NtrY2gsF69uw5BHgcqO8o/fWmQfq4445j7tzZbNx4\nGqb0eQzG9fg65jWGMIOHP4YpeT5tj3kVWIYZiWkq773XDkxg374dwNf52tf+jXC4DpF3uOOOlZx3\n3rk9TqOjezZt2sSmTZt6fP5g0lIqvCCP8vJy60K8D+PJ2Amcj3HXfwrjetxETA9HEdPNK8B+9u4t\nYd68Szn//LO4665j2b8f4FD27n2PM8+8FNU3+MY3vsYll1ycln7Xr29k/vxL2bv3feB37Nlj7r1g\nwRxOOulEF5SSZXqrpy7pzvJhfGMvYUZJeBV4B7gbM55dkT3mGOCXKc7PsZ3PHtFoVAOBCltqnKlQ\no4FAedZqRFu2bNHrrrtOFyxYoDBc4RqFEXZ5gi2Nep/xCovtcnOSEmutwhEKZQrNGonUamtra49q\ncC5ApWeQYQlzMGkpGf4gj1CoUktKRtp8PFOh2gZWtVgtzEzQwzib12+zx8ZqaLHanV8fVfYzPi3v\nRjQa1XC4WmGpwoy4e1dWzugIVHHkjkz11NUnUzfKLGKRXI3AuXb5NuDSFOfk9GVkk2g0qsFgVafo\nrGz+4ZvoyLD9VClUWGF2drVApUKDFfsYhSb7iSpMtfvrFRo1GDxUg8HKjN0nLqKs5/RGiIWupUTi\n3fxRhf9Ikefvt5qoTrJvitXCTKuLWrstYAuCiYbwHrt8rUJIg8HxGgyW66JFl3dyVd544zJrHKd0\nSlc2miMc3dNfDNsY4PfEprUPpDgnt28ji7S0tGhVVXxJMZultVhb2zY1tbVxCssUhqhpU4tYgUYU\nTtVYzexK375SW3INdgjW/BGMU1PTbEhbjLlqUxwsZNGwFZyWEolpyzNIE22e9Rujw2x+9vK318ZW\nZguCNWq8F9V2ebm91lSrj+UJhrBVYbVdPtJeO9ZtIBCo1HXrGpLoYLk9Z1xcYc/v2XBejuyTN8PW\noxsMIDHm+o/+sssus+JVW2qttEKt0FhwSYvGgkyiampqiSXbGrstWU3PnFdWNlWbmpo6niuZCHNt\nyAudbAoxnc9A0lIiMVdfjcZqbRXauVY21JenPVdj2PcptXm8XpMHYk2224eqqRWWWz1594v3yAQC\nlbpixQqtqIh3P0K9XnLJZR2a8Xs2AoFKLS6OaEXFFOflyCLOsOUQLwNXVs7Iaqa96aabVSQxuuta\nu16TpPQ6Q2PtB4lulhkKozXmikzcd5tCSEOhSl24cJGGw9UaDh+m4XB13PO4GlvvcIYtM4y7z5+X\nb+6oGcVqXGs1eXvzEJvfvZpcmcK0hOMm2/ObfYW+w9TU8E622xKvPU6DwUOt0YyPrPR0kEwnJg3V\nCssz0oyr6aXGGbYc09raqmvXrtXW1tasXG/Bgos11n5Q4hNcyBomz7+fKJyQLXHWJOyrtkKs0+Qu\nmLCatoIqe78Ku16hIuG49oVcGfLBgDNsmZHcQJTYfDzFbmvV5G1vl1uDFbS6CKY47giN7x7QYHVQ\nqqm72xxujaanzXFaUlKhN964rMMQJXo2YgXPWi0vn9zJy5HMgLn27K7pU8OG6VfzCGZCxGeARXZ7\nDbAR+BPQBFSlOD/nLySbrFvXoKFQpYbDozQUqux15mttbbVCnKbGzTjFinetwg0ac0GWWnGNs8sB\nNe1u0zTWLjHD7vOMWa3GIibrrQEL+gTt1QTL7adeIaLB4OEaidR2CNeVIntGD6IiB5WWkuEvSAWD\nVXa4LK/9zDM452t8e3OJxmp1I9TU1uYoiMbX+EZYDS2w26JWI/doLMrS05J37cRCYXPcejBYrkuW\n3KChUGIwi9dUMFVDoco4Ha1atbqTAevKO+L0Z+hrwzYMmG6Xy634JmGGAbrKbl8MfDvF+bl+H1kj\nGo1qUVHEZvAJChEtKgr3KsOtXbtWYazPCPkjw7wG8Co1bQJhNSHPQYXTFE7Rzu0Nlep12DaGrkVh\nuppSb7GammA04T7eeYniLO3knnSkTw8M26DRUlf4/8hXrFihcLDGB4tEFI6zehimppA3RKFITSHR\na3ceatc/o/BJ376wmkKeZ9D8elCrw3I1NTV/LWy81ZO3Pq7jXkVFlb70xYK0IKI33XSz3njjMg2H\nq7W0dIK9f8xgRiK12tTUlLQ923UGj5FXVyRmvKeTgOeAoRoT7HMpjs/py8gmjY2Nmsy90djY2ONr\nmhqbv4Y1yq6P0Vh7Q1RNGP9BVsT3++7vtRX42yH8pcZmK7SwwnUaX0L1SqczrSgPTRCycaf42wii\n0ag2NTVpU1NTj/vFDRZ6K8RC1lK6xPRxrcY8CwGbX+tt/h9itx1q9x9k83tAY8EgnmdksZpC3HI1\nBTnPxe/3YHj3S+aWbFZ/wc/o0ttXqrDQXtd0Cxg71nQhMIa5ymqqxqYzqqBaWjpF16xZ06nWF4nU\n2oAa18atmkfDhhnosM2WNt9I2Lc7xTm5fBdZZdmyZRqLWoyV4pYtW9ar6y5cuMhnzEJaXDzMCibV\n8Fpe+L5nyFoVjtFYG8BYjZVYS62IhtprNlhhe0EpXQnXuFOCwUna2Nioq1attq6hg+29whoKHamR\nSK3edNPNSdsdB7MbpZfh/gWtpUxI1Edd3VCb/7wa2cEKXmfuZo1FDgc1VtOr82nXK9CN0VjB0Gt7\n82qHVfaaETXdBWo15gKd3sk4GT0WacybE1Lj+gz5dOn3kER8RtFzt3reIOMGPeGEWVpaOjXu/6ar\naOZC11peDJsV4BPAGZpEfMDrKc7L5bvIKrmosXl4ASlbtmzx+dpjpchwuEZXrVqtn/jEP2u8S9Jf\nWxO7b5QV1G1WPM0JRus/rHFLjKYcb8/r7E6JjVXp3atOYx3EvX49RpBnn32uRqPRTo3hq1atjhNe\ntoNw/PQHkfdUiINBS5mSmFe8UXqWLLnB1mr8Xo+JGiu4FVudeIFUfndjSGGLmr5sITUFwqDVULOa\ndu7D1RhKz4DV22vFuxNjXpHhGuumE1VjIMs1vi3c85DUJNFZmU2/1zevc9BYOFyt8+dfrKFQtVZU\nmKCu00//lAaD5VpaOk7D4Wq98cZl2traqo2NjbpixYqO95ZMF/1BK+nQ54YNKAF+BVzu2/Zsgvvk\n2RTn6pIlSzo+zc3NOX05vcG0sXklKzMYcW/b2JLhb0D3MqnfGJgMf5wVbVhNCfCaBOEuj0unEUip\nGldIpT0vsU2tQk3p8mC7b4bG+gQl9ikKq2mEr9TONctSDYWqNBAo18RCQCg0RsPhat/8c6atcuHC\nRR3P5/8D64no8hVd1tzcHJeXeyLEwaKlbNLa2qrFxWUa694Sstqo9hmZFjVtbf6gk/N9ebNKodF+\ne8fVa7KCrNFIiU9PtdY4jVdTAPS6GbRYIzVFY53GE6/ltYerxkZDifqObbDnJits3maP9Xtzxvh0\n7B8oOqL/9E8f6aSL/hyJmQ09pfqkK8YfArckbFsOLLbLBdPgne2oyFR09Yc+efIMm5FHaqx/W6N2\n7ut2hMa6BZSoKQXeY8VUZj/+BvmAwue0c2fwajV9gLzrNmhsUseQdm5k90KdSzVW0vWEO9H+eXjR\nmTGRn3/+59VvjOfOPbVL0fmNoPe+NmzY0GnYs3y1S/TQsA0aLWWLWLi91xbdZI2S38h4n1FqXIbe\nSD7jrCEoUzP6SFBNDWukPcZrz/M04nWludYue4bIc93/P413O1ZZg7RQux7Wy3NJRjU2LJh3XJNN\nT6KWJmjMqI6zuqlV+LR2jiT1jF+sfT7WhhfTeiBQqVu2bEnrvfd1Ta9PDRtwHLAfeArYipn59xSg\nFngYE9m1EahOcX7u30iWyWfVPdaY7o+c9JY715CMMP3+e0+knoDH2m1eRNk9akp9/lLiao01wvvv\nqZp6AOZoEuHW+P4EEo1eshFUImrcQUZ03kDOTU1N+slPfkoTo1MDgaH2ueLbQfM1WkqmQhyMWsoG\nqULlL7ro4qR5KhAYpqFQpS5adLnOmzdPg0Gvpldh83qzxtrmwnbZmyrn4xorEHqjoIz3GRi1Rshr\nB/fO97SXmL/DGmvDK8tAY34t1Vh9RjXmYj1MOw8UPd7qyayXlExUE3jjuUe9EVlKOjwo/gLjlVde\nqWvWrEnazNAXNb28BY/06AaDVIw9xXQP8I+OYGpPkchkG9jhDx4RNQar1gqo3oqsWAG7bYT9LtFY\no3mpmpJdRGORXN61x2vnkucoTe4u8VyeXt+7Bt85iUbPa9vThGO8ESRqtaTkYA0EKjW1EfSnf2DW\n2HrzGcxaSjWQwPz5nnEzXoD58y9O2sbkheN75y9cuMi60r1amld781yUno5qbN5t9uVFz0tSq7Hg\nlNsUvu7TideeVqyxbgieAZxs9/kDV0L2XlNTaGmZxtyfYzW169MfUZ18HjtT4A3rTTfdrJFIrRYX\ne22WRqNFRWWdmhn6QmPOsBUw8TW2WAZtbGzUaDSqra2t+tWvflVPPvlkvfzyy/Xuu+/Wc889V42R\n8yIavZEZytQ0dpcrhGxmrVITmOJFVXoG0ev745VkE8XgRZBVaGwwZm+A2cUpBBTW+IlTk123Ocl6\ni3Z2MdX7zo8F3XgD2eYDZ9j6llSelHSDlJJFGfq7tjQ1Nektt9yiF154oV5zzTW6Zs0aLSubqGbU\nE6+9rVpNG91Qm6fv9+lgstXX2WqM5Bc0FmHsGc8jNBas4gW1HG61Wq6mS4M3vFcynTRrLJCmXOPd\nqKdqrEtDhcZmP/Dr6Air30M7ufS78rj0hVfEGbYCJxb+bIyC5zboCtM2WKWh0FgNBMr1Yx87WQOB\nCg2FxmooVNXRkBwOV2tZ2QQNBMo0NtzWBI01SJeqCRrx2ie8UVAWarwr5CtqamTjNeYmrVZT4qxQ\nYzy9EVZaNdbwXmP/IKrUuH/8ovM6yCZ2qE1W44sqDNc1a9b0wS+SHGfYCpuYC7RZYx2+zR9/IFCp\nV199jYbDNRoKHWHzthkir6SkQufPv9i2b4V8xswbGzMxSMsLEmnWWFcdv5Y8Xfrbyr3uEUE1XpsK\nn64iaqJBk3X58QJuglpWllh4nKGxDurxHhdXY3NizAo9CZVPp9+Lf9vcuaeq3105Zcp06+70+gV5\nbQ+ekLxoM7Fi9UTi1aKq1NQQk4/jF2sTmWDPSywx+kumXtTnOGvkEsO5t2lJScWADPfv6cdpqe/x\nXKDhsImgDIWOSDqVjVfj84/DGo1G9eqrE6OZzcDnxcUT7LfJ16Y9zPOihDS+GcHrAI7GIiNDPmPq\neV6GW015TQX+tj/PKJq+f8XFQ5IMExarsZWUVMS5bgdaG5uY6/UMETkF+C5QBNyhqsuTHKO9uYcj\ntzz66KNs3LiRuXPnctxxx9He3k5zczO7du1izJgxvP7667z44nZuuOGbmEj1A8yZczyPPtpCSclI\n3ntvO1BEJDKWDz7Yzrx55zF2bD033XQLr732NjAc2MncubO5++67GDVqAnv3fh84GbgTuB6YCGyn\nqGgPxcVB9u2rw0wwDfAN4CNEIp/h8ssv4TvfuY2iohGo7uQHP7iN8847t69fWQcigqpKFq/XpZ6c\nlvJDe3s7bW1tlJeX884771BfX09dXV3a599++xouv/wqAoHR7Nu3nW9+81pmzTqh43r+7x07dgBQ\nVlbG5s2baWtrY9++fRx33HEcd9xxPPPMM/z5z39m//79LFmyDNUSIIiZjL0UeI9AoJ59+7YDnwIe\nAiqAVzBxSh8HRhKJLOA73/k2V1xxNe+/fxD79+8ADgAjKCr6G3fffQcnnXQibW1tGT9vT8mqnnpq\nETHi+zMwGghgIr0mJTkuF8Y95wyWPkLp0lVtMFXbh9fJ1h9enDgIbklJmYbDIzrGrPTaPYybp3OJ\nsT91NiWbrpM09DRQtdQbCkWHmebbdJ7b37QQClV2dNr2D8RcXj5ZQ6FKXbDgoqTBN6miIvNBVvXU\n4xPhGOCXvvWrsX1xEo7L4avIHUuWLMl3EgqWRKN40UUXJRVTfzJiyciyYetWTwNVS71hsOow3efu\nSiMDbUiubOqppBeVveHADt/6y8DRvbieY5BQV1cX59oYPnx4UldH4nEFjtOTI2O60kjivsGkp6J8\nJ8DhcDgcjmzS4+ARETkGuF5VT7HrV2Oqkp0avHudSoejn6JZauxOR09OS45CJ2t66oVhK8YMAfQx\nTAhbC3Ceqj6bjYQ5HIMJpyeHI3v0uI1NVfeLyELM2HZeeLITocPRA5yeHI7s0at+bA6Hw+Fw9Ddc\n8EgCInKKiDwnIs+LyOJ8p6cQEJERIvKIiPxRRJ4RkUV2e42IbBSRP4lIk4hU+c5ZISIviMhTIjI9\nf6nvHxRSvsxmfhCRefad/ElEvpCP50kXESkSkT+IyIN2vV5EHrPpXy8iJXZ7UEQa7PP+TkRG+a7x\ndbv9WRGZm69nSRcRqRKRH9v0/lFE/qlPfuds9RsohA9pdjp3n4zf6zBgul0ux7QlTcLMQ3aV3d4x\nDxlwKvALu/xPwGP5foY8v7+CypfZyg9ADfAXoAqo9pbz/XxdPPcVwN3Ag3a9ETjbLt8GXGKXLwNW\n2uVzgQa7fARmuqMSoN7mCcn3c3XzzGuBL9rlEvtb5fx3djW2eI4GXlDV7aq6D2gAzshzmgY8qvo3\nVX3KLr+DmTF6BObd3mUPu4vYuz4DMyEnqvp7oEpEhvZpovsXBZUvs5gfPg5sVNW3VPVNTPvkKX32\nIBkgIiOA04D/9m0+EbjPLt8FnGmX/e/hJ/Y4gNMxRu4DVW0DXqAf93UUkUrgBFW9E8Cm+y364Hd2\nhi2eZJ1kh+cpLQWJiNQD04HHgKGqugvMnx3gGa/E32Eng/t3KNh82cP84D3/QMon3wG+BiiAiAwB\n3lDVA3a//zfteC5V3Q+8JSK1DKznBRgDvCYid1oX7GoRKaUPfmdn2Bx9hoiUY0qgl9uSemLkkotk\nGkT0Ij9kbeDpvkBEPgHssrVUf9rTfY4B9bw+SoCZwPdVdSbwLmaouJz/zs6wxbMTGOVbH2G3OXqJ\nbRj/CfAjVX3Abt7luRhFZBgQtdt3AiN9pw/236Hg8mWW8sNAeS/HAaeLyF+B9RjX4q0YV5v3H+xP\ne8fz2v6Nlaq6m4Gni5eBHar6hF2/D2Pocv47O8MWz+PAeBEZLSJB4LPAg3lOU6HwA6BVVW/1bXsQ\nuNAuXwg84Nv+BegYkeNNz3UxSCnEfJmN/NAEnGwj72owcyE15T7pmaGq16jqKFUdi/ntHlHVzwHN\nwNn2sHnEP+88u3w28Ihv+2dt1OQYYDymI3+/xP5GO0Rkgt30MeCP9MXvnO+omf72wTRK/gnTMHt1\nvtNTCB9MiXU/JppvK/AH+55rgYft+94IVPvO+R4m6msbMDPfz5DvTyHly2zmB/vH+ALwPPCFfD9b\nGs8+i1hU5Bjg9zbtjUDAbg8B99rnegyo953/dfsengXm5vt50njeaZiC2VPATzGRjTn/nV0HbYfD\n4XAUFM4V6XA4HI6Cwhk2h8PhcBQUzrA5HA6Ho6Bwhs3hcDgcBYUzbA6Hw+EoKJxhczgcDkdB4Qyb\nw+FwOAoKZ9gcDofDUVA4w+ZwOByOgsIZtgGEiBwQkbF9eL+/22lFHI4Bj50+ZWmax74oIid2f6Sj\nP+IM28AiZ+OfiUiziMyPu5lqhZoJDR0Oh2PA4AzbwKJH8xPZqS8cDscAxOk3c5xh6yUislhE/iwi\nb4vI/4nImb59F4tIq2/fdLt9hIjcJyJREWkXkRW+c+bbc14XkV+KyKgU9w2KyH+KyHYReVVEVopI\nyO6bJSI7ROQqEXkV+IGIVIvIBnvP1+3yofb4bwInAN+zaV1ht3e4PkWkUkR+aM9/UUS+4UvLPBH5\njYjcJCK7ReQvItLl1O0ORyps/vpXEdlm3eFrRORgEXnI5s+NIlJljz3damu3iDwiIpN815khIk+K\nyFsi0gCEE+7zzyKyVUTeEJEtIjIlgzQOFZF37TQq3raZVh/Fdj2llkXkuyLykk3b4yJyvG/fEhH5\nsYj8SETeJDaFjSNd8j2twUD/AJ/BTHUOZu6kv2OmOj8bM535TLtvLGYSvSLMFA7/iRFaEPiIPeYM\nzLQME+xx1wCP+u51ABhrl78D/AwzDUQZZk6jZXbfLGAf8C0ggJkGoxb4lF0uw0yTcb/v2s3A/IRn\n2++73w+B+4FSYDRmyokv2n3zgPeA+Zha5aXAznz/Nu4zMD/Ai8BvgYOAQ4BdwBPAVKuX/wWuBQ4D\n3sFM3FkMfA0ztUmJzfdtwCK77zPA+8BSe48Z9roftnn28/a+AV8aTuwmnT8HLvGt3wLcape70/L5\nQLXddwXwKhC0+5ZYPX3Srofy/ZsMtE/eE1BoH8z8UqcDvwK+kmT/MVZQRUn2PeQZC7tehJlOfaRd\n9xu2d4AxvmOPBf5ql2cBez2RpkjndOB133oyw3YAY5CLrNAm+vZ9CTNhomfYnvfti1ijeHC+fw/3\nGXgfa1TO863/BPi+b30hppD1b0CDb7tgCpMfxXggXk647qM+w7YSuCFh/3PACb40dGfYzgG22OUi\na5w+ZNe71HKSa+0GptjlJcCmfP8OA/njXJG9RES+4HNnvAEciSlpjgT+kuSUkcB2VT2QZN9o4Fbr\nVvVSpsMAACAASURBVNkNvI4JGBmecM86TM3pSd+xvwSG+A5rV9V9vnMiInK7iLRZ98ZmoFpE0mm3\nOwhTCn7Jt217Qrr+5i2o6h7Mn0x5Gtd2OJLhnzF9T5L1ckxtbru3UY1VeBmTLw8FdiZcc7tveTRw\npacfq90R9rx0eQA4XERGA3MxMz4/6bt+Si1bV2ur73+jEqMzjx0ZpMORQEm+EzCQsT7z1cAcVf2d\n3bbV7n4JGJfktB3AKBEpSmLcXgK+qarru7n1a8A/gCNV9dUUxyRGUF6Jcd0cpartIjINM3Ox2GO7\nirh8DePaHI0p1WKXE/84HI6+QoFXMO5JPyOJ5csRCftGYWZnBqPDZar67z1OgOp7InIvxo05CfiR\nb3dKLdv2tK9h/jda7bbdxAeHuRmge4GrsfWOMoy77jURKRKRLwKT7b47gH8VkZkAIjJOREYCLRiX\nxbdFpFREQiLyEXvO7cA1InKEPadKRM5KvKktma4Bvmtrb4jIcBGZ20VaKzAl3bdFpBa4PmH/Lozb\nsRPWAN8LLBORcltCvYJ4ITscfc29wGkiMkdESkTkXzEu+N8CvwP2ichX7L5PA0f7zl0DXCoiRwOI\nSJmInCYiZRmm4UfAhcAniddDV1quwBQUXxcTBHad3ebIEs6w9QJVfRa4GXgM44o7Ethi9/0EWAas\nE5G3MW0CtdZIfBJTe3oJU3I8x57zM+DbQIN1Fz4N+KML/aW4xZjS52P22I2YhupUfBfjvnwNI/yH\nEvbfCpxtI7i+m+R+izC1xL8CvwbuVtU7u7ifK3E6ekpi3kmal1T1BeBzwPeAduATmICLD6wb/tPA\nFzFuwLOB+3znPglcjIkE3o0J9PBHH6aVf1X1t5jC7R9UdYdve1dabrKf5zFtef/AuR6zitjGyq4P\nMqG1/42pjRzARL89j4msG42JPjpHVd/KWUodjgLAaanwEJH/Be5R1R/kOy0OQ7o1tluBh1T1cGAa\npp3lauBhVZ0IPAJ8PTdJdDgKCqelAkJEjsJ0HWjMd1ocMbqtsYlIJbBVVcclbH8OmKWqu0RkGCY8\ndVLSizgcDqelAYiIPITpOuD9UXrBVt/CBIycASxSVdfe3I9Ix7BNw0T+tWJKmE8AX8V0wPX3ut+t\nqrU5TKvDMaBxWnI4+oZ0XJElwExMB8mZmE6GV5NmA6/D4ejAacnh6APS6cf2MrBDVZ+w6/dhxLhL\nRIb63CfRZCeLiBOpo2BR1UwGpnZacji6IEM9paTbGpuq7gJ2iIgXSv4x4I/Ag5j+G2DCZB/o4hqD\n6rNkyZK8p8E9c+4/maJOSy5fuedO+ckm6Y48sgi4R0QCmH5MX8QMLHqvmDm8tmP7Yjkcji5xWnI4\nckxahk1VtwFHJdl1UnaT43AUNk5LDkfucSOP5IDZs2fnOwl9zmB8ZkfuGaz5arA+d7ZIa+SRXt1A\nRHN9D0f2aG9vp62tjfr6eurq6vKdnH6NiKBZauxO835OS4OAwarBbOrJ1dgGEO3t7Tz++OO0t7fn\n5Prr1zcyevQkTj75UkaPnsT69dkfTCEbz5Dr9+Bw5Iv16xsZOXI8H/3oWYwcOT4tDTo9JCGDiJUi\nzDQnD9r1eszgv88D64GSFOepo/esW9egkUitVlXN1EikVteta8jq9aPRqEYitQrbFFRhm0YitRqN\nRrN2/RtvXJbxM0SjUW1paelIR67fQybYvN2T6C+npUFCYv7t7tiioohCROEwhYgWFYW7PLc/6aG3\n9FRPyT6ZiPEK4G6fGBuBs+3ybfimSE84L7dvYxDQndHJRDyprr927VqtqJhir28+lZUztKWlpdOx\nmd5r3boGDYerFUozMpyJol21anVOjW+m9MKwOS0NAjI1Oo2NjdaoNSu02O+INjY2Jj0+14XRvqbP\nDRtmwr7/AWb7xNgOFNnlY4BfpTg3x6+j8GlpadGqqplJjU4m4vEbJW/ZMxYVFTOsqJZ3EknisZnW\nuIz47lFI/gxdnxcTbShUadOZ3jVyTU+E6LQ0OIjl35iR6s7oLFu2TGGYQq3VSq3CUF22bJmqqra2\nturatWu1tbVVVbv+XxiI5MOw/RiYDszCdCYdAjzv2z8CeDrFuTl+HYVPqpJZa2trgnjWaiBQplu2\nbOl0jVWrVmsoVKmlpUdqIFCpgUC5lpVNVggp/L+4EmJ5+WQNh6v1xhuXdRizsrLDreHz0tCsoVBl\nh8hSERNf1Ao1vdJlMtGWl0/WUKi635RQe2jYnJYGAS0tLRqJjI0zUuFwfYfRSeb5WLp0aYLGtilE\ndOnSpbpw4eVWq0MVQrpw4SJXY+uNYcNM3vc9uzzbJ8YXfMc4MWaZVG1LlZUzOmpLjY2NKnKwQpnC\nIVYU4xUiOn/+xR3XWrVqtd03zQptuUK5XT7S7jtEoVpDoXq99NIvJ9TizleoVJhgBdRgz52goVC1\nrlq1OqV7Ml58DQo1CuPSql0mE61naP3vIV9kKkSnpcIlUa+tra2azK3Y2traoeWysmlxefjKK69U\nGBdXmINxeuGFFyoE7PUm2O8SbW1t1Zqag+N0X1tbl8/X0Cv62rB9CzPT81+BV4F3bPtANMF98ssU\n5+uSJUs6Ps3Nzbl+PwMev3sxHK7RSy+9TFtbW+PEY0pwxTZTj01a0rvpppt1w4YNWlRUlrCvc3uX\nWa9SCGgwWNXpWnC/NWbNnWpeENGKiikpDY3fKHs1wXRKlcmMuWrv2xR7SnNzc1xe7oFhc1rKEvnK\nA8lYt65BQ6FKDYdHaShUqevWNWhLS4sWF4+Iq7EVFw/XpqYmq6+YwQsGqzQajerKlSt9WltrvyN6\n+umn2+3LFE633xE966yzbC2uSmGi/Q7q3Xffne9Xkha91VNXn8wOtu4Tu9wInGuXbwMuTXFOTl/O\nQCQajWpTU5M2NTV1EmaymooxOuEO98OaNWt8Rm2bFci0hJLeVIWgQomaCCv/vlFJSoYz1LSDRRTq\nE/YdZu/RoKbmlni9qXZ/6qCWnv4R9ac/sER6I0SnpZ6TzUjAxHYrj1T5LvH4+EhGU5sqKgrrhg0b\nkhY2b775Zk3WjtbU1GRrbCUKYYWR9rtYjz/+eGvAQgoH2++ADhs2zP43xIwklOr06dN7/D7ySX8x\nbGOA3xOb1j6Q4pzcvo0Bxrp1DRoIVNgMOV6Dwao4YSZrW4oZnZAGApXWKHnuQ9Vk7Vfg3SOxbWyb\nFUx1wrZae51xalybiTW2Zrt+v2+9SeE2e62o9jSoZaCSRcPmtJQm2WxXMl6PmBFZuHCRqqY2nOb4\nmAFbuHBRykjGyy67TI17sFVN7atVYZyeeeaZSQ3e0qVL9aqrrrJGq8YavRqFoB5zzDFqXJGxNjYo\n0SFDhlgjWa2mxlatMFQPP/zwrL7zviJvhq1HN3Bi7CAajdqw95pOwmxtbdWWlhZfQEii0WnVzu5D\nv8FZbtcPtxm80nest2+8/R6qsXayGfa6y/T/t3fucVZV1+H/rnnceQLDxBGYARkEkUdABEOw1Agp\nJmqMRm0koG2imDZaFU1+bRKMovlVf7X92XxiLYniMzFEUk1UqilqAFM0AlXwxSC+QJGnGogPHsPM\n6h9rH+5jzjCX4Z47d87s7+dzP+fefc85e+97zzrr7LXWXjs5Ouyl5oMLvks4ZTfUtf00d56h7vsK\ndz7ry/Lly7WsrHdK29pGWB7OKK5QyKUgZvPyshQEZaRPS6mo+HSHkYDhPrC2SmT58uWhinP58uWh\nCmnOnDlqD5i1mvRh99fZs2c7ZVSuZiEpVyjRE088UcP8aGeccYaeddZZITJeqVVVVRrmY6usrAzt\nw5lnnpmPvyLn5FKefOaRPHLbbfPZs6cae0Af60rH0tran+OPn8Qpp3yLCRP+nJNO+izmajnGbS8A\nVgMD0o6DQcCpwHDgOuAEzH1TAdQCe7FI8n8A+mNunUuAPwEjgXXA/wFagFuBzwF3Acux/NgzgAS2\n6PM+YBfwNDa4eBZ4HfgDUAZcSHn5ycyceS5Tp57K3r1HAOdiA5CxlJYO5rbb5h/IbDJw4DE0NBx9\nWFlOfMaFeBP2/1ZXV7N79+vAMmAVsIzdu9+guroagKeffpq5c+fy9NNPHzgmLKPOfffdhy2qsAx4\nzm1LuOOOO2hp6UeqnLW09OPxxx8HGjAZXOW2Da5tO4GlwBq33UVLSws2D78COMJti6ipqQE2Ay+6\n878IbGbo0KF88skntJXxAe6hpgSTuVfdtpTi4uLQPojkLctb4ZIrDdneC/+Uqaqpo7U+bUZsbU19\nZW6btJtDWUgQSIUbTf2twnJNmhmDp7vUKMjUwI86147j3cgsGM3VupGXus9lbp9kJgSYpOZTS3/i\nLC4+UqdP/1q79ZaX14SMRPuqmTAP3ZxUCKZO/IgtMsICMlSDScw1GSOhPrpw4UI95ZTT0q7VL3zh\ntHZNl9OnTw8dOZ12WnCOdDm75pprNCxQ46STTgo9z6c//enQ0ZcFfJRqukyV6KJFi/Tyyy8PrXv4\n8OHa1qw5TI86KsxXPlSnT5/exf9e58ilPHlhzBNJ39n9TplUOoXQSysqhriL8nZNBmekKpmhWlRU\npsXF/TU9CrKPppsEyxRma7jivN0JxEAnWJVq9vlMQUpVgL8J+b48pKyvKy8LOdcQhTL96lenh/gO\nxyjcorD9kCaWFsr8Ha/YoiE9IMPM50FqqW9+85saZn778pe/HKoU5s+fr4nEyDSlkEiMdKbCtvt/\n7Wtf06RpcazbDtDJkyeH1nv00UeHKrCRI0eGKp0TTjhBwwI+5syZ48L6+6b1G2p0xIgRGqYMjzkm\neJ/eh/PPP7+r/8JOkUt56tAUKSIDRWSJiLwiIi+JyBWuvK+IPC4ir4rIYhHpE82YMh40Njayb98G\nzAT4JnAzicQ2nnrqt5iJ71pgNvDfWAzBUuBSzLywhdbWv6KlZR82zeldzDQIZpb4AzAfEGAEcBTp\n5oxhwO+ACZhJpAQzP+7FzCup+9YCZwHz3HcDaWv+nAycCIwHpmKBfA1AP9JNNQMxU+j9LFq02PU/\n1QTzOvAjYBi7d79GY2Nj2m/Wnqlxw4YNJBKNae0qLR3Mhg0b2vzuhYSXpexYunQpra1g1/ZrwLO0\ntgpLly5ly5YthJnfXn75Zdpeqw387ne/Y9++17Fr/0ZgAvv2veauqRZsOuF4t22hqqoK+ABoBj50\n2/cpKysLrdf234+Z8Y912/0cddRRhJkc9+7di8nGFGxZvinAADZv3kyfPn2wGSCKybcCH7mHmsAU\nuZ7AFPnee+8BNZgMBrLYh3Xr1nXuh48R2fjY9gPfVtXR2N3s70RkBPA94ElVPRZYAnw/umZ2f+rq\n6rjzznlUVEyld+8vUlFxNffccweTJ09m9uxvATcRrmS+jP0FD2DKbj2wErvQjwaaMGV2M/Z3XoXd\nDDIVyMOkC0YR5mvLFL53gXuB6cDHwKaM7zdh/jzF/HPrMGX9vnsdC3zLbdcB/wp8hURiCHPmfIeK\niqmYop3kzvMmpszTL8WDrTSQfEhItqu5eWMbxViAeFnKgiVLlgD1pD8k1bNkyRJGjRpFmB+qvr6e\nttfqu06BlQCPYVMGHwNKeeqpp7CHtFeB29x2kCsvwvzMb7ptcYpCSm3TAFdeBezBfG17gEr27NmD\nKc7PAaPdtoUhQ4YQpvDGjh3Lpz71KdfWFdhC6iuAUpqbm0N/j/Lyclfng64PDwK7GDduXGd/+vhw\nqEM84CFstd91QD9X1h9Y187+UY1cuyVhkYCLFy925o+wSdOlCkXadu5Yo1r0YqbZsVJtLkxgouyr\n8A1NZg0JXsPU/HP93L6fdvsG8+MCM8zkDNPIFa6ehKabQW9XM41mTjlI96EtXrxYy8oa1DKeJNtT\nVTU2Ld1QR6bG9iZv5xMO03TiZSmcM844I8TsV6ZnnHGGi0Jsa3477rjjUq77pLlu9OjRmjQtBvPG\n+uuQIUNC5a22tjbUhNirVy/Xpl5qvr1eCgl3nrYRi6effromTfSBL7DGhfv3c98FMtdP58+f7+ax\nZcr5MOeva2sGtT4H8hrIZ7HOmTOnq//CTnG48pT6OqSoSBFpxPLcPesEcZuTtq3AkZ1Xr/Ek1ZS2\nY8cOHn/8cVavXk11dTUbNmw4UA5QUvIhZkqcQqppxJ4ea2j7NBqYS2pJf3qtB0qBM13Zq1hU5Du0\nHZktBLZiT7FvYxGPR7t633LnegFoxUyKrcCj2GCjGPgxNmprxZ5ah2a05UgSiTGUlZ3EnXfOo66u\njrfe2sjeve+78yfb09q66cCIKxtT44wZ09m4cR1PPnkbGzeuY8aM6R38G4WFl6X2GT16NHbdLyNp\n9iti9OjRzvTXB5OPMW7bh127dmHRxs8BV7ttozNdBlGLzxFELR5xxBHYAHoKSXnb70yOW0iXlS0u\nyrHVHVPutq0kEgnCIhbfeustbPSWOvray+rVqzET56+xCORfAx/y8ssvc/bZZxM26jz99NPd7/Fr\n4KduW+xGf41YTu2ZbtvofqOeTUm2O4pINWYPm62qH4mIZuyS+fkA11133YH3U6ZM6bbLnre3sm1T\nUxMrV65k4sSJAKxcuZIdO97n2mtvIJFoZPfuN2hu3otqEWZG2ExxcSWwBxFIJPqzf/9eTLG1YhmW\nmjGT4EjMbFfptvWYKaMF82kFZo2xJBXeY8Bp7nxb3Hdnu+MbMKW2H/OxgQn1UGxKwSZXx0r3eQam\n8MZiN5hTMX9dL2yO8TNADcXFf0NLi2a0ZTP79vUH/sQzzzzDtGmf56qrvocJeZOrt5bS0h3ceeft\nB37TdFOjnSvM1FhXV5fXFYaXLVvGsmXLDvs8XpYOTkNDA3YNpj+wNTQ0UFtbi/mkH8Mepj4GTqe2\ntp4NG5owufk6gVJoaUmEnmvbtm2YDOzGHtp2A/vdtIH3sWuzEdjgymv45JMPgQWu/j7ATGcmbOvb\nW79+PaZoA/NhI9DAO++8id12z0k7/8aNGxk+fDgirahOcufchEgrEyZMwB5uz005pjdf+tKXeOih\nxZgsXk8gc6YgC59cyVMo2QzrsH/ivzBBDMqaSDefNLVzbFQj17yQuWRLkLg0SPx70UV/k2GGKNZk\nOpyZzpxwTYj5pFKDlDnpZr0StajH7SnmiGPUoiWv0WRGkXKFBk2PlkqNpBzjygMzRRC1+EO1CLH0\ncPvkOUelmGjCUnUdoxZdFpgzzSSzaNGiAwuJVlcfp5lL4ATrSqVHRm7Xysrhunjx4ja/eyGYGjuC\nTphOerIsZUsyZ2K6uXHevHkuajEwyx2vgZnSTI5tTZGDBw8OPZeF0Je573prMHVm/PjxKbIw0G0r\ntb6+XpMRi4Gs1+jAgQNDz19cXKxh5sOioiINEo7beWoU+uu0adNSIqeTEZy9ex+fkt0kvY61a9fq\nZZddoammyCB7SnekM/LU3itbYfwZ8K8ZZTcB33Xvvwv8UzvHRvpjRElwcw1bqwwqtLIyPNw2aTtP\nqM0vq3YXXqqCCNJklTvlEJzjnJBzpobgj9C2voFyJ5xLU8pqXDuOckLyEw3zHVg4fl+Fr6plFwmy\nkgc+gLD+BYrR6iwqqkrLD2m+giEZdQ3TG2644ZDC9As9M0knFVuPlKVDwbJzBFNZAhnroyeeeKKe\ncsop7rrdrvbgZWngTIH1c9fkQLft53xmCU1XhAmXjipTERZrXV2dk697FK5120qtqAgeElPLK5zv\nra1CLSsLlGa67JSXB9NllmpqCq6TTz65Xd/y4sWLD5ptpb18l92NvCo2LLa7BZtWvxpb0v5UzLnz\nJGZYfhyoaef46H+RCAhPRhzkUwyUwne0bVBGMiGwCcglaiOrTGUUnKtB2zqMU5egSR2FjdUgx2Rb\nBXWZJufeVKiN7vpqMo/j9pA2VDuhb9TkZO+xrrzKtS0QxMA5XarJXJWzFdoubtjeE+bChQu7xUgs\nWw5VEHuqLB0qNjpqm82+vr5eL7744tBr69hjjw1VGKZ42k5uTiqq9PMkEgkNU4RWHiQzCBRYlUt3\ndUyb8x955JEa9iBpeR/bKu0rr7xSVcMtFYUybzNq8j5iO6wKuqkwhicjzlRa80OEI135FRXVOEFJ\nOCUTRCre7/bvo2HLwECQEHVpRnlpSJ19nNA1umOCp86Epo8yExpMDLdjblJ7sv1nhXK99NLL3PGj\nNJndYanaqDMwmdan9Cc8a4hNsA0UoglvMME2+L6QR2LZkktBzObVXWXpUBkzZoy79lKz3JfpmDFj\n3IgtGCEFD1sl2ru3jdCS+U8ta357JkEbUbWNQBw0aJC7tvuoPbRahpFhw4K60uXURpdty2+66SYV\nSX+QFKlMyTmZvjRN6mgrTD7i9EDYHl6x5YHwEVuqmbHK3dhnavrTV7q50gQxKK9RW3aiXJNh8vdr\nMiFxIDzXqCnQwZpMrlrtlMsAV3cfTZoK+7lzBouOzlYzBc7RYGFPq9uWxRAp1+Li1BUCkrb5WbMu\ndjeCIZr0/R2j5h+o1PLyo7SkpEpLS6sPKmTtpUSKE16xRUMyfVW6MrrmmmtcKqy2IyQbmbXN6AGE\nKp6SkpLQ8iuvvFLDTJ1Tp04NVYTnn3++1tbWaaqiDRb7XLDgfk0k+mhJybFpq3h01i8WlwfC9vCK\nLU+kPiXZBVqlVVXDtbS0V9qN/V/+5Wa95557dMaMC9pcsDbvJjBJBMJSr3CqU0aBYC11wny9E+TA\nPJgqqMGaTEvduX7hFN4NbvsLTQaDVGhZ2bGafPI1hVxaWn1gJYHly5eH2ubXrl2rt9xyS0qGfqu/\nvLzmwBpy2QiZF0QvS53BfLRt55h95zvfaTfL/syZMzXMhGhmzbYmwYkTJ2rS+pE0s69du7bNQruJ\nRB+97777QutdtGiRqlrAy0knnaTz5s1L60u267p5vGLLK+0ts5LtBZtcIj5z5DdE206uLNFkBGOJ\n2ggsNTdkSYbw9lYzq4x05b0PCHVJSZUuXrxYf/rT27W8vEarqoZreXlN1iOnMFPsoeRz7Al4xRYN\ntpBuW2U0f/58VVX9whdOS5ObINmxJTJIjWYsalcRrl271o2oqjWR6KeJRPUB2QgeaKuqxqZZJMaM\nGZdW75gx3XNBz0KlYBSbc3yvw/I0fbedfSL8KQqTpUuXpn3OND3MnHnBAeWX+qS3fPlyvfbaa3X5\n8uW6aNEinTVrll566WVaWlqlZWUNWlbW2wl1Unhnzbr4wHSE9hRYZ0ZOh+qwzuxzTyDXiq0jeeop\nsmQ+2mAli6UKL6RF3qpqmqwELFhwv5aUVGpRUS8tKakMMf2Z+T3V9NeebLRXHshlMFKLCi9PXaTY\nsKnwrwODsVQXa4ARIftF+mMUInPnzm1Tdjimh7BFEg9lOfvOcigO67A+x52cCmIW8tSTZMl8tDVa\nXHyElpVlb2mIi+nPy9PhvbLOPBLCROA1Vd0IICL3Y2nhfWrpEEaOHMnIkSM7dWxmho32zpXrTBwz\nZkxn2rTPh2Zb8eQcL08pBNfenDlzuPHGG7O+9tqTgcORP0/343AUWwOWgDBgEyacnhiR77RVPRgv\nTxnU1dXR0NDgrz/PISM2AuzEgSLnAl9U1b9xny8AJqrqFRn7da4Cj6cboKqSi/NkI09eljxxJ1fy\ndDgjtnexFS0DghUw08hVQz2emNOhPHlZ8niy45CWrclgFTBMRAaLSAL4GvBIbprl8fQ4vDx5PDmi\n0yM2VW0Rkcuw3HZFwJ2q2pSzlnk8PQgvTx5P7ui0j83j8Xg8nkLkcEyRB0VEThWRdSKyXkS+G1U9\nXY2I3Cki20TkxZSyviLyuIi8KiKLRaRPV7Yx14jIQBFZIiKviMhLInKFK49tv0WkTERWiMhq1+e5\nrrxRRJ511/kvReRw/NaZdfYRkf8QkSb3W3825r/xcPf7Pu+2u0Tkijj3GUBErhKRl0XkRRH5hYgk\noryuCgERme3kKJL7RySKTUSKgFuBLwKjgRkiMiKKugqAu7F+pvI94ElVPRZYAnw/762Klv3At1V1\nNHAi8Hfu/41tv1V1LzBVVY8HxgGnichnsbXUblbV4cBOYFYOq/0x8JiqjgSOw+a0xfk3Xq+qx6vq\neGACtjz2b4hxn0WkHrgcGK+qYzH30Ayiva66FBEZjfXnBEyWzhCRoeTwf45qxHZgsqmqNgPBZNPY\noarLgT9mFJ8F3Ove3wt8Ja+NihhV3aqqa9z7j7AVoAcS/35/4t6WYTcgBaYCD7rye4Gzc1GXiPQG\nTlLVu13d+1V1FzH/jVOYBryhqu8Q/z4XA1VuVFYBbCai66pAGAmsUNW9qtoC/B44BziTHP3PUSm2\nsMmmDRHVVYgcqarbwJQAcGQXtycyRKQRe+p6FugX536LSJGIrAa2Ak8AbwA7VbXV7bIJqM9RdUOA\n90Tkbmeau11EKon5b5zCdGCBex/bPqvqZuBm4G1sescubAHaqK6rQuBl4CRneqwETgcGkcP/OTIf\nmyeNWEboiEg18AAw243cMvsZq36raqszRQ7ErBJRmtdLgPHAvzvT3MeYqSbWvzGAiJRiT+//4Ypi\n22cRqcFGpIMx5VWFJcOOLaq6DjO1PgE8hq0m3xK2a2friEqxZTV5O8ZsE5F+ACLSH9jexe3JOc5s\n8gDwc1V92BXHvt8AqvonYBnmX6xxPmXI7XW+CXhHVf/HfX4QU3Q94Tc+DXhOVd9zn+Pc52nAm6r6\ngTPL/QaYTHTXVUGgqner6gmqOgXzIb5KDv/nqBRbT5tsKu4V8AjwDff+68DDmQfEgLuAtar645Sy\n2PZbRI4IorREpAI4BVgLLAW+6nbLWZ+dSeYdERnuiv4CeIUY/8YpzAB+mfI5zn1+G5gkIuUiIiT/\n50iuq0JBROrc9ijMf7iAHP7Pkc1jE5FTsaiuYLLpP0VSURcjIguAKcCngG3AXOAhzIwyCNgInKeq\nO7uqjblGRCZjDt+XMHOBAnOAlcCviGG/RWQM5tAucq+FqnqDiAzBgqP6YiaVC1zAVC7qPA645tQ7\nWwAAEVtJREFUA1vG5k3gQizQIJa/MYDzuWwEjlbVD11ZLfHu81zs4b8Zu4YuxkZpkVxXhYCI/B6o\nxfp8laouy+X/7CdoezwejydW+OARj8fj8cQKr9g8Ho/HEyu8YvN4PB5PrPCKzePxeDyxwis2j8fj\n8cQKr9g8Ho/HEyu8YvN4PB5PrPCKzePxeDyxwis2j8fj8cQKr9i6MSLyExG5Otf7ejweT3fGp9Ty\neDweT6zwI7ZuSsqSFh6Px+NJwd8cCwwRGSEiS0XkjyLykoh82ZXfLSLzRORREfkQmOLKfphy7D+I\nyGYR2SQis0SkVUSOTjn+h+79ySLyjoh8W0S2ici7IvKNruivx+Px5Bqv2AoIt3jnIuC/gDrgCuA+\nETnG7TID+L+q2gt4OuPYU4Ergc8Dw7CldA5mZ+4P9MJW7b0Y+PdgvTGPx+PpznjFVlhMAqpU9SZV\n3a+qS4H/BGa67x9W1WcBVHVvxrFfBe5W1XWquge4roO69mFKskVVfwt8BBybq454PB5PV+EVW2FR\nD7yTUfY20ODeZ353sGPfIX1V70zeV9XWlM+fANVZttPj8XgKFq/YCovN2OqxqRwFbHLvD2Za3IKt\nupt6nA959Xg8PQ6v2AqLFcAnLgikRESmAGdgS8R3xK+AC13wSSXwgwjb6fF4PAWLV2wFhKo2A18G\nTgfeA24F/kpV17d3SMqx/wXcAiwF1gN/cF9l+uLarb4zbfZ4PJ5CI6sJ2iKyAdgFtALNqjpRRPoC\nC4HBwAbgPFXdFV1TPYeCiIwAXgLKMnxpHo/HE2uyHbG1AlNU9XhVnejKvgc8qarHAkuA70fRQE/2\niMhXRCThHjpuAh7xSs3j8fQ0slVsErLvWcC97v29wFdy1ShPp/lbYDvwGtAMXNq1zfF4PJ78k60p\n8k3gA8wPc5uq3iEif1TVvin7fKCqtdE11ePxeDyejinJcr/JwDbgBeBmEXkVqHQKbxem8EJHfyLi\ngxI8sUVVDzZX0OPxdAFZmSJVdQswG1NsW4CJwG7gh6p6PBbFt+Ugx3fJa+7cuT2q3p7W5wUL7qei\nopaysv5UVNSyYMH9ea3f4/EUJh0qNhGpFJHhmPK6DzgSi7Z7G/gLt9vXgYejaqQnO5qamlizZg1N\nTU1d3ZTI2bFjB7NmXcru3Q+yd+8Z7N79ILNmXcqOHTu6umkej6eLyWbE1g9YiWXEmAdsVdXHMeV2\njojsAS4B/n9krfR0yOWXX8moURN4+OH/ZtSoCVx++eyublKkbNiwAagBzsXSaZ6Lam9X7vF4ejLZ\n+NhGYSO1K4B1QBAg8iNgqPtcgkXg/TDsBNddd92B91OmTGHKlCmdbe8hka96urrepqYmbr31duBZ\nLManlltvncSll36LkSNH5qUN+e5zdXU1u3dvIbXPe/ZMoro6unSXy5YtY9myZZGd3+Px5IYOoyJF\n5EbgAqAKqARKgQVAGfCAqv6HiDwEDFHV40KOV++PiJYbb7yRq6++C3g9pXQYN9xwEXPmzOmqZkXK\nqlWrOPnkWeze/eKBsoqKMTz11F185jOfyUsbRAT1wSMeT8HRoSlSVecAfwY8j03Kfk9V/xrzrz0o\nIoIFkhRUZvgdO3awatWqHuFz2b59O5Y/ObjJvwhsduXxpLGxkZaWt4FlwCpgGS0t79DY2Nil7fJ4\nPF1PthO0fwT8PS6foIh8Chu9rcYiJUuxZU8Kgl/+ciGDBg3jc5/7SwYNGsYvf7mwq5sUKUceeSTQ\nG5gKjHfb3q48vjQ37wG+CJwKfNF99ng8PZ1sTJFnAbdh4fx9sAUqJwNvAu9j89hKsJyEw0OO17lz\n5x74HLWPbceOHfTvP5jWVsUWod5BURFs3fo2dXV1kdXblTQ1NTFq1ATMQrwL+5tmsnbtc3nzseWb\nX/3qV0yffj72TDUQW9lnHwsXLuC8886LpM5MH9v111/vTZEeTyGSxVydG7FFK9/ElFsL8FvM/Hiu\n22cS8Nt2jtd8snDhQoUyhb4K4902oQsXLsxrO/LJ9u3bFUoUKhSGuW2xbt++vaubFhkXXHCB6+cL\nCuq2FXrBBRfkrQ3u2u6SeYP+5V/+1f4rKx+bqg5S1aOx+WofAtcCWzHfGxTQPLZnnnkGKMZ8L8+5\nbYkrjycPP/wwNnJ5FksT+SyQcOXxZMWKFdhIbawrGQs0uHKPx9OT6TDcX0TKgN8DY7BIyJ2qukpE\n1gJXichVwEeYD67L2bp1KzDAvVYBjcAAVx5Pnn/+eaCe9Jt8vSuPJ/v27cPyPS/DAnY/Bt5l3754\n+xU9Hk/HdKjYVHWviExV1U9EpAbYKCLTsezx04GHsMnav8Ay/rchn/PYdu7ciflbjgH6YwPL3ezc\nOSyyOruaAQMGkIyKHEsQFWnl8aRfv35s3LgJS4gT+Nj20q9fv8jq9PPYPJ7uQVZJkFU1iHjch43O\nJmFmyHNUtVVEfgxc397xqYotajZt2uTeFWFP8kUZ5fGjuLiYZFRkcJPv7crjySuvvAIkMLNroMwn\nufJoyHwou/76di95j8fThWSTK/IIEakRkdXY8KcFu5t86JSaYDHmBcHAgQMJ87FZeTzZv38/sBP7\na/a67U5XHk8+/vhjwnxsVu7xeHoy2cxjG4dFRY7AHpErsCHBIBHZh81f+0vMydHltLS0YP611Bve\nAFceT5544gnsr/w9lvXs90CRK48npaWl2GWYOin9XVfu8Xh6MtmYIl8GTlLVNSJSDWwAzsTMknNV\n9WYRmQTMbe8E+fSxmcmxrb9p06b43vDWr18PNJAZPGLl8WT8+PGsWPEcZhUPzK/NjB8/IbI6vY/N\n4+keZKPY9gNvuffBsGcnpuCC3JAHDffPp49t27ZtWDOnYBGRG4AWVx5n3iVTmUOvLm1RlDQ0NAD/\nA7Rik9JbAXXl0eB9bB5P9yAbxTYAuFdEirBw/2Lg37B12S4RkfOwu+gPImvlIWBRkcOApzGl1gj8\nGTt3vtGFrYqW5uZmLNvIVGAwsBHoTXPzvi5tV5S88MILmFW8OaW03JV7PJ6eTDY+tj9iI7Qy7K75\ngKp+BPw75szZiOWQ/FFUjTwUVBUzS20BPuO277ryeLJ3715s1PIglv3sQeBPrjyeDBs2DFNqlZgp\nshJoduUej6cnk60p8u+BG4C7gItFZARwIfCkqv6ziPw/YFZ7J8inj82WEtmL+V4aMBPdXix4M55U\nVFTwySctpM/pKqOiIr7h/jZKDaJfA/Pria48GryPzePpHnSYBBlARH6GLVfzbbf22q3AT4A/V9Vt\nInIt8B1V7RNyrOZztGQKrMI1703gaGyB792xHbUNGzaMN97YTGYS5KFD63n99dcPfnA3ZcCAAWzd\nWkXmGnT9+3/Mli1b8tIGvx6bx1OYZJNSazJwPvCSiLyCObDuAY4CnhCRVsyZ1RpdMw+VBiyeJeAf\nSb8BxgsLcW8BLiI1YCbOoe/19fVs3dpEZsBMfX08VzPweDzZk42P7SLgPUwJ7gbOw6IhSzEzZSvw\nUwpKsQURghDMb4ozthzPIOBVzMf2KjAotsv0AJxwwgnY5TsJS582CShy5R6PpyeTjY/tbmAe5sz4\ngao+LCLjgB3Aac4U2R/LSBtKPn1sRnDDC3xs8bYW1dbWYqHvr2BpxF4BNlNb++kubVeUVFRUYM9S\n6SZnK48G72PzeLoH2SRBXi4iDwJ7VPXHKV+9AnwDuIkCmsdmtJLpb4oztlL2fix4ZBCWKKY51ito\nV1VVYYlwLiEZMJNw5dHg57F5PN2DbH1sXwH2uXyRiim1o4EfiMj3geeBc6Js6KFRjzU59XN857FV\nVlZiluE/kBohaOXx5LjjjsOS36Q/wFi5x+PpyWRjirwI+ACoVtXjAURkGGaeHOzOsUlVd0bWykOm\nbUqtOGMjs7b5MeM8Yps6dSrFxUJLy4VYHNPbFBcLU6dO7eqmeTyeLiZbH9sDwK9Tyr5JlnPYoCt8\nbL2Bk7GR2mb3eXfEdXYdZ599Nldf/Y9kKvOzzz67axsWIXV1dfz853dx0UXfwvJww1133RVpwIz3\nsXk83YNs57FNxhRZhfv8GlnMYXP7dtE8tsdIrqx8OnGexwZw+eWzufXW+QQBM5dd9k3+7d9+3NFh\n3Z4dO3awYcMGGhsb8x4F6uexeTyFSYeKTUQWAH8B1GEe+rlYeP+rWJTGBuBkVe3bzvFdoNgEKCcZ\nFbkH0FgrNoCmpiZWrlzJxIkTGTnSz+eKGq/YPJ7CJNsR22BgkaqOdZ8/UNVaEdmAee5HA8+r6sSQ\nY/Oq2MaNG8cLL7yIpVtKYAEGLRx33FjWrFmTt3Z44o9XbB5PYZKNjy2MbSLSDxuxnQc8FKbUAvLp\nY5s4cSIvvLAe+BLwO8wM+SgTJ7bbvJyzbNmyPPgRC6vuntBn72PzeLoH2Sq2wL4X8Ag2h02wSWLt\nzmGD/M5jmzZtGvPn/wJ4FDNHPgoI06ZNy1sbesJNvlDqzWfdfh6bx9M96DCllvOxPQMMF5G3ReRC\n4J+AUzAn1pXAF0Tkm5G2NEumTp1KSUkxlpFiDPATSkqKfRi4x+Px9BCyyTzSXtqOaSIyQFW3iEgd\nlhC5SVWX57aJh0ZdXR0/+9l8LrroEvbvL6Gk5Cruumt+rPMmejwejydJVsEjWZ1IZC7woar+a0Z5\nvEMRPT0aHzzi8RQenQ0eQUQqgSJV/UhEqoAvAG2cDl7wPR6Px5NPOq3YgH7Ab9yIrAT4hao+nptm\neTwej8fTOXJmivR4PB6PpxDIZqHRrBCRU0VknYisF5Hvhnz/dRHZLiLPu9dFOar3ThHZJiIvHmSf\nW0TkNRFZ49aSi7xeETlZRHam9PcHOap3oIgsEZFXROQlEbminf2i6HOHdUfY7zIRWSEiq13dc0P2\nSYjI/a7ffxCRo/JUbyTXtsfj6SSqetgvTEG+jmX7LwXWACMy9vk6cEsu6ss4758D44AX2/n+NOBR\n9/6zwLN5qvdk4JEI+tsfGOfeV2OpzTJ/66j6nE3dkfTbnbvSbYuBZ4GJGd9fAsxz76cD9+ep3kiu\nbf/yL//q3CtXI7aJwGuqulFVm4H7gbNC9st5IIna9II/HmSXs4CfuX1XAH1c1pSo64Vo+rtVVde4\n9x8BTdh8wlSi6nM2dUNES5ar6ifubRnm1820o58F3OveP4DlOM1HvRD3Zdo9nm5ErhRbA7Zsc8Am\nwm945zjT2K9EZGCO6u6IzLa9S3jbomCSM2E9KiKjcn1yEWnERo0rMr6KvM8HqRsi6reIFLnFbrcC\nT6jqqoxdDvRbVVuAnSJSm4d6oWuubY/HE0LOfGxZ8AjQqKrjgCdJPlnHleeAwWqLs94KPJTLk4tI\nNTYqme1GT3mjg7oj67eqtrrzDgQ+m4XSzMkoKot6e9q17fEUNLlSbO9iyxgHDHRlB1DVPzozJcAd\nwIQc1Z1N2wYdrG1RoKofBSYsVf0tUJqL0QOAiJRgiuXnqhqWpzOyPndUd5T9TqnjT8BS4NSMrzbh\n+i0ixUBvVf0g6nq78Nr2eDwh5EqxrQKGichgEUkAX8OeYg8gIv1TPp4FrM1R3dA2SXMqjwB/7dow\nCdipqtuirjfVpyUiE7GpFbm6yd4FrFXV9lYSjbLPB607qn6LyBEi0se9r8Byla7L2G0RFsgB8FVg\nST7qjfja9ng8h8jhTNA+gKq2iMhlwOOYsrxTVZtE5Hpglar+J3CFiJwJNAMfYKsDHDYuSfMU4FMi\n8ja2EGrCmqW3q+pjInK6iLyOLad9YT7qBf5SRC7B+rsbi9LLRb2TgfOBl5zfR4E5WERq1H3usG4i\n6jcwALhXRIqwa2yh62fqNXYn8HOxFd7fxx6w8lFvJNe2x+PpHH6Ctsfj8XhiRT6DRzwej8fjiRyv\n2Dwej8cTK7xi83g8Hk+s8IrN4/F4PLHCKzaPx+PxxAqv2Dwej8cTK7xi83g8Hk+s8IrN4/F4PLHi\nfwHge5kM4IlySAAAAABJRU5ErkJggg==\n",
+ "text/plain": [
+ ""
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ }
+ ],
+ "source": [
+ "%matplotlib inline \n",
+ "import matplotlib.pyplot as plt\n",
+ "\n",
+ "import pandas as pd\n",
+ "\n",
+ "from sklearn import datasets, cross_validation, metrics\n",
+ "from sklearn import preprocessing\n",
+ "\n",
+ "from tensorflow.contrib import learn\n",
+ "\n",
+ "from keras.models import Sequential\n",
+ "from keras.layers import Dense\n",
+ "\n",
+ "# Read the original dataset\n",
+ "df = pd.read_csv(\"data/mpg.csv\", header=0)\n",
+ "# Convert the displacement column as float\n",
+ "df['displacement']=df['displacement'].astype(float)\n",
+ "# We get data columns from the dataset\n",
+ "# First and last (mpg and car names) are ignored for X\n",
+ "X = df[df.columns[1:8]]\n",
+ "y = df['mpg']\n",
+ "\n",
+ "plt.figure() # Create a new figure\n",
+ "f, ax1 = plt.subplots()\n",
+ "for i in range (1,8):\n",
+ " number = 420 + i\n",
+ " ax1.locator_params(nbins=3)\n",
+ " ax1 = plt.subplot(number)\n",
+ " plt.title(list(df)[i])\n",
+ " ax1.scatter(df[df.columns[i]],y) #Plot a scatter draw of the datapoints\n",
+ "plt.tight_layout(pad=0.4, w_pad=0.5, h_pad=1.0)\n"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 2,
+ "metadata": {
+ "collapsed": false
+ },
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "Train on 199 samples, validate on 99 samples\n",
+ "Epoch 1/1000\n",
+ "1s - loss: 558.5263 - val_loss: 654.6665\n",
+ "Epoch 2/1000\n",
+ "0s - loss: 558.1055 - val_loss: 654.1592\n",
+ "Epoch 3/1000\n",
+ "0s - loss: 557.6125 - val_loss: 653.5768\n",
+ "Epoch 4/1000\n",
+ "0s - loss: 557.0580 - val_loss: 652.8745\n",
+ "Epoch 5/1000\n",
+ "0s - loss: 556.3781 - val_loss: 652.0314\n",
+ "Epoch 6/1000\n",
+ "0s - loss: 555.5521 - val_loss: 650.9948\n",
+ "Epoch 7/1000\n",
+ "0s - loss: 554.5409 - val_loss: 649.6848\n",
+ "Epoch 8/1000\n",
+ "0s - loss: 553.2281 - val_loss: 647.9902\n",
+ "Epoch 9/1000\n",
+ "0s - loss: 551.5695 - val_loss: 645.7861\n",
+ "Epoch 10/1000\n",
+ "0s - loss: 549.4351 - val_loss: 642.9671\n",
+ "Epoch 11/1000\n",
+ "0s - loss: 546.6794 - val_loss: 639.4501\n",
+ "Epoch 12/1000\n",
+ "0s - loss: 543.3316 - val_loss: 635.0900\n",
+ "Epoch 13/1000\n",
+ "0s - loss: 539.1375 - val_loss: 629.8408\n",
+ "Epoch 14/1000\n",
+ "0s - loss: 534.2557 - val_loss: 623.4576\n",
+ "Epoch 15/1000\n",
+ "0s - loss: 528.1510 - val_loss: 615.8885\n",
+ "Epoch 16/1000\n",
+ "0s - loss: 520.9657 - val_loss: 606.9808\n",
+ "Epoch 17/1000\n",
+ "0s - loss: 512.7480 - val_loss: 596.5796\n",
+ "Epoch 18/1000\n",
+ "0s - loss: 503.0699 - val_loss: 584.9360\n",
+ "Epoch 19/1000\n",
+ "0s - loss: 492.4659 - val_loss: 571.7550\n",
+ "Epoch 20/1000\n",
+ "0s - loss: 480.0820 - val_loss: 557.0076\n",
+ "Epoch 21/1000\n",
+ "0s - loss: 466.5228 - val_loss: 540.3196\n",
+ "Epoch 22/1000\n",
+ "0s - loss: 451.3413 - val_loss: 522.0204\n",
+ "Epoch 23/1000\n",
+ "0s - loss: 434.7479 - val_loss: 502.3460\n",
+ "Epoch 24/1000\n",
+ "0s - loss: 417.2623 - val_loss: 481.3055\n",
+ "Epoch 25/1000\n",
+ "0s - loss: 398.1072 - val_loss: 459.2307\n",
+ "Epoch 26/1000\n",
+ "0s - loss: 378.4173 - val_loss: 435.4537\n",
+ "Epoch 27/1000\n",
+ "0s - loss: 357.3919 - val_loss: 411.0292\n",
+ "Epoch 28/1000\n",
+ "0s - loss: 335.7903 - val_loss: 385.8197\n",
+ "Epoch 29/1000\n",
+ "0s - loss: 313.9249 - val_loss: 360.2250\n",
+ "Epoch 30/1000\n",
+ "0s - loss: 291.4822 - val_loss: 334.4138\n",
+ "Epoch 31/1000\n",
+ "0s - loss: 269.3034 - val_loss: 308.6426\n",
+ "Epoch 32/1000\n",
+ "0s - loss: 247.2679 - val_loss: 283.8862\n",
+ "Epoch 33/1000\n",
+ "0s - loss: 226.5211 - val_loss: 259.0792\n",
+ "Epoch 34/1000\n",
+ "0s - loss: 205.6577 - val_loss: 235.2786\n",
+ "Epoch 35/1000\n",
+ "0s - loss: 185.7510 - val_loss: 212.8807\n",
+ "Epoch 36/1000\n",
+ "0s - loss: 167.4288 - val_loss: 191.7367\n",
+ "Epoch 37/1000\n",
+ "0s - loss: 150.2742 - val_loss: 172.1115\n",
+ "Epoch 38/1000\n",
+ "0s - loss: 134.6755 - val_loss: 154.2002\n",
+ "Epoch 39/1000\n",
+ "0s - loss: 121.0863 - val_loss: 138.5698\n",
+ "Epoch 40/1000\n",
+ "0s - loss: 109.3291 - val_loss: 125.2109\n",
+ "Epoch 41/1000\n",
+ "0s - loss: 99.2151 - val_loss: 113.8334\n",
+ "Epoch 42/1000\n",
+ "0s - loss: 90.7812 - val_loss: 104.1874\n",
+ "Epoch 43/1000\n",
+ "0s - loss: 83.8845 - val_loss: 96.3072\n",
+ "Epoch 44/1000\n",
+ "0s - loss: 78.5124 - val_loss: 89.5210\n",
+ "Epoch 45/1000\n",
+ "0s - loss: 73.7539 - val_loss: 83.9138\n",
+ "Epoch 46/1000\n",
+ "0s - loss: 70.4146 - val_loss: 79.3055\n",
+ "Epoch 47/1000\n",
+ "0s - loss: 67.4559 - val_loss: 75.9448\n",
+ "Epoch 48/1000\n",
+ "0s - loss: 65.2832 - val_loss: 73.1432\n",
+ "Epoch 49/1000\n",
+ "0s - loss: 63.5577 - val_loss: 70.8700\n",
+ "Epoch 50/1000\n",
+ "0s - loss: 62.0218 - val_loss: 68.7976\n",
+ "Epoch 51/1000\n",
+ "0s - loss: 60.7051 - val_loss: 66.9989\n",
+ "Epoch 52/1000\n",
+ "0s - loss: 59.4057 - val_loss: 65.4240\n",
+ "Epoch 53/1000\n",
+ "0s - loss: 58.2864 - val_loss: 63.8596\n",
+ "Epoch 54/1000\n",
+ "0s - loss: 57.2141 - val_loss: 62.5386\n",
+ "Epoch 55/1000\n",
+ "0s - loss: 56.1734 - val_loss: 61.2891\n",
+ "Epoch 56/1000\n",
+ "0s - loss: 55.2154 - val_loss: 59.9946\n",
+ "Epoch 57/1000\n",
+ "0s - loss: 54.2305 - val_loss: 58.8878\n",
+ "Epoch 58/1000\n",
+ "0s - loss: 53.2856 - val_loss: 57.9529\n",
+ "Epoch 59/1000\n",
+ "0s - loss: 52.4155 - val_loss: 57.0137\n",
+ "Epoch 60/1000\n",
+ "0s - loss: 51.5627 - val_loss: 56.1253\n",
+ "Epoch 61/1000\n",
+ "0s - loss: 50.6566 - val_loss: 55.1741\n",
+ "Epoch 62/1000\n",
+ "0s - loss: 49.8042 - val_loss: 54.1428\n",
+ "Epoch 63/1000\n",
+ "0s - loss: 48.9593 - val_loss: 53.0814\n",
+ "Epoch 64/1000\n",
+ "0s - loss: 48.0463 - val_loss: 52.2091\n",
+ "Epoch 65/1000\n",
+ "0s - loss: 47.2787 - val_loss: 51.4066\n",
+ "Epoch 66/1000\n",
+ "0s - loss: 46.4950 - val_loss: 50.6026\n",
+ "Epoch 67/1000\n",
+ "0s - loss: 45.7162 - val_loss: 49.8387\n",
+ "Epoch 68/1000\n",
+ "0s - loss: 44.9000 - val_loss: 48.8503\n",
+ "Epoch 69/1000\n",
+ "0s - loss: 44.0981 - val_loss: 47.8598\n",
+ "Epoch 70/1000\n",
+ "0s - loss: 43.2790 - val_loss: 46.9632\n",
+ "Epoch 71/1000\n",
+ "0s - loss: 42.4892 - val_loss: 46.0565\n",
+ "Epoch 72/1000\n",
+ "0s - loss: 41.7355 - val_loss: 45.2484\n",
+ "Epoch 73/1000\n",
+ "0s - loss: 41.0153 - val_loss: 44.2624\n",
+ "Epoch 74/1000\n",
+ "0s - loss: 40.2367 - val_loss: 43.3511\n",
+ "Epoch 75/1000\n",
+ "0s - loss: 39.5221 - val_loss: 42.3832\n",
+ "Epoch 76/1000\n",
+ "0s - loss: 38.8046 - val_loss: 41.6003\n",
+ "Epoch 77/1000\n",
+ "0s - loss: 38.1185 - val_loss: 40.8792\n",
+ "Epoch 78/1000\n",
+ "0s - loss: 37.4434 - val_loss: 40.2698\n",
+ "Epoch 79/1000\n",
+ "0s - loss: 36.8288 - val_loss: 39.4087\n",
+ "Epoch 80/1000\n",
+ "0s - loss: 36.1033 - val_loss: 38.7083\n",
+ "Epoch 81/1000\n",
+ "0s - loss: 35.4795 - val_loss: 37.9890\n",
+ "Epoch 82/1000\n",
+ "0s - loss: 34.8946 - val_loss: 37.3058\n",
+ "Epoch 83/1000\n",
+ "0s - loss: 34.2956 - val_loss: 36.6926\n",
+ "Epoch 84/1000\n",
+ "0s - loss: 33.7105 - val_loss: 35.9810\n",
+ "Epoch 85/1000\n",
+ "0s - loss: 33.1437 - val_loss: 35.3054\n",
+ "Epoch 86/1000\n",
+ "0s - loss: 32.6085 - val_loss: 34.6045\n",
+ "Epoch 87/1000\n",
+ "0s - loss: 32.0571 - val_loss: 34.0986\n",
+ "Epoch 88/1000\n",
+ "0s - loss: 31.5470 - val_loss: 33.4284\n",
+ "Epoch 89/1000\n",
+ "0s - loss: 31.0101 - val_loss: 32.8609\n",
+ "Epoch 90/1000\n",
+ "0s - loss: 30.5110 - val_loss: 32.2255\n",
+ "Epoch 91/1000\n",
+ "0s - loss: 30.0178 - val_loss: 31.7290\n",
+ "Epoch 92/1000\n",
+ "0s - loss: 29.5419 - val_loss: 31.1889\n",
+ "Epoch 93/1000\n",
+ "0s - loss: 29.0682 - val_loss: 30.6831\n",
+ "Epoch 94/1000\n",
+ "0s - loss: 28.6259 - val_loss: 30.1810\n",
+ "Epoch 95/1000\n",
+ "0s - loss: 28.2424 - val_loss: 29.5969\n",
+ "Epoch 96/1000\n",
+ "0s - loss: 27.7890 - val_loss: 29.1868\n",
+ "Epoch 97/1000\n",
+ "0s - loss: 27.3230 - val_loss: 28.7001\n",
+ "Epoch 98/1000\n",
+ "0s - loss: 26.9161 - val_loss: 28.1194\n",
+ "Epoch 99/1000\n",
+ "0s - loss: 26.4610 - val_loss: 27.7726\n",
+ "Epoch 100/1000\n",
+ "0s - loss: 26.0527 - val_loss: 27.4751\n",
+ "Epoch 101/1000\n",
+ "0s - loss: 25.6621 - val_loss: 27.3332\n",
+ "Epoch 102/1000\n",
+ "0s - loss: 25.2772 - val_loss: 27.1132\n",
+ "Epoch 103/1000\n",
+ "0s - loss: 24.9488 - val_loss: 26.8085\n",
+ "Epoch 104/1000\n",
+ "0s - loss: 24.6094 - val_loss: 26.4109\n",
+ "Epoch 105/1000\n",
+ "0s - loss: 24.2957 - val_loss: 26.1175\n",
+ "Epoch 106/1000\n",
+ "0s - loss: 23.9492 - val_loss: 25.7465\n",
+ "Epoch 107/1000\n",
+ "0s - loss: 23.6356 - val_loss: 25.4041\n",
+ "Epoch 108/1000\n",
+ "0s - loss: 23.3148 - val_loss: 25.1255\n",
+ "Epoch 109/1000\n",
+ "0s - loss: 23.0030 - val_loss: 24.6227\n",
+ "Epoch 110/1000\n",
+ "0s - loss: 22.6659 - val_loss: 24.2252\n",
+ "Epoch 111/1000\n",
+ "0s - loss: 22.3886 - val_loss: 23.8649\n",
+ "Epoch 112/1000\n",
+ "0s - loss: 22.1207 - val_loss: 23.5177\n",
+ "Epoch 113/1000\n",
+ "0s - loss: 21.8381 - val_loss: 23.2578\n",
+ "Epoch 114/1000\n",
+ "0s - loss: 21.5674 - val_loss: 23.0608\n",
+ "Epoch 115/1000\n",
+ "0s - loss: 21.3335 - val_loss: 22.6728\n",
+ "Epoch 116/1000\n",
+ "0s - loss: 21.0512 - val_loss: 22.4515\n",
+ "Epoch 117/1000\n",
+ "0s - loss: 20.8030 - val_loss: 22.2170\n",
+ "Epoch 118/1000\n",
+ "0s - loss: 20.5617 - val_loss: 22.1506\n",
+ "Epoch 119/1000\n",
+ "0s - loss: 20.3085 - val_loss: 22.0291\n",
+ "Epoch 120/1000\n",
+ "0s - loss: 20.1044 - val_loss: 21.9321\n",
+ "Epoch 121/1000\n",
+ "0s - loss: 19.8803 - val_loss: 21.7703\n",
+ "Epoch 122/1000\n",
+ "0s - loss: 19.6635 - val_loss: 21.5782\n",
+ "Epoch 123/1000\n",
+ "0s - loss: 19.4809 - val_loss: 21.4122\n",
+ "Epoch 124/1000\n",
+ "0s - loss: 19.2352 - val_loss: 20.8901\n",
+ "Epoch 125/1000\n",
+ "0s - loss: 18.9900 - val_loss: 20.4883\n",
+ "Epoch 126/1000\n",
+ "0s - loss: 18.8357 - val_loss: 20.1262\n",
+ "Epoch 127/1000\n",
+ "0s - loss: 18.6249 - val_loss: 19.8610\n",
+ "Epoch 128/1000\n",
+ "0s - loss: 18.4243 - val_loss: 19.6879\n",
+ "Epoch 129/1000\n",
+ "0s - loss: 18.2305 - val_loss: 19.6142\n",
+ "Epoch 130/1000\n",
+ "0s - loss: 18.0415 - val_loss: 19.4990\n",
+ "Epoch 131/1000\n",
+ "0s - loss: 17.8619 - val_loss: 19.5216\n",
+ "Epoch 132/1000\n",
+ "0s - loss: 17.6978 - val_loss: 19.4191\n",
+ "Epoch 133/1000\n",
+ "0s - loss: 17.5418 - val_loss: 19.4368\n",
+ "Epoch 134/1000\n",
+ "0s - loss: 17.3724 - val_loss: 19.2885\n",
+ "Epoch 135/1000\n",
+ "0s - loss: 17.2083 - val_loss: 19.1405\n",
+ "Epoch 136/1000\n",
+ "0s - loss: 17.0625 - val_loss: 19.0243\n",
+ "Epoch 137/1000\n",
+ "0s - loss: 16.9099 - val_loss: 18.9540\n",
+ "Epoch 138/1000\n",
+ "0s - loss: 16.7690 - val_loss: 18.8810\n",
+ "Epoch 139/1000\n",
+ "0s - loss: 16.6320 - val_loss: 18.7722\n",
+ "Epoch 140/1000\n",
+ "0s - loss: 16.4673 - val_loss: 18.5046\n",
+ "Epoch 141/1000\n",
+ "0s - loss: 16.3105 - val_loss: 18.2977\n",
+ "Epoch 142/1000\n",
+ "0s - loss: 16.1779 - val_loss: 18.1081\n",
+ "Epoch 143/1000\n",
+ "0s - loss: 16.0545 - val_loss: 17.7971\n",
+ "Epoch 144/1000\n",
+ "0s - loss: 15.8665 - val_loss: 17.6339\n",
+ "Epoch 145/1000\n",
+ "0s - loss: 15.7209 - val_loss: 17.5999\n",
+ "Epoch 146/1000\n",
+ "0s - loss: 15.5814 - val_loss: 17.5209\n",
+ "Epoch 147/1000\n",
+ "0s - loss: 15.4729 - val_loss: 17.5430\n",
+ "Epoch 148/1000\n",
+ "0s - loss: 15.3287 - val_loss: 17.4168\n",
+ "Epoch 149/1000\n",
+ "0s - loss: 15.1966 - val_loss: 17.2938\n",
+ "Epoch 150/1000\n",
+ "0s - loss: 15.0744 - val_loss: 17.1271\n",
+ "Epoch 151/1000\n",
+ "0s - loss: 14.9217 - val_loss: 16.9917\n",
+ "Epoch 152/1000\n",
+ "0s - loss: 14.8080 - val_loss: 16.9379\n",
+ "Epoch 153/1000\n",
+ "0s - loss: 14.6678 - val_loss: 16.8606\n",
+ "Epoch 154/1000\n",
+ "0s - loss: 14.5542 - val_loss: 16.8267\n",
+ "Epoch 155/1000\n",
+ "0s - loss: 14.4550 - val_loss: 16.7855\n",
+ "Epoch 156/1000\n",
+ "0s - loss: 14.3367 - val_loss: 16.6983\n",
+ "Epoch 157/1000\n",
+ "0s - loss: 14.2422 - val_loss: 16.6656\n",
+ "Epoch 158/1000\n",
+ "0s - loss: 14.1242 - val_loss: 16.5611\n",
+ "Epoch 159/1000\n",
+ "0s - loss: 14.0109 - val_loss: 16.5001\n",
+ "Epoch 160/1000\n",
+ "0s - loss: 13.9265 - val_loss: 16.3473\n",
+ "Epoch 161/1000\n",
+ "0s - loss: 13.8226 - val_loss: 16.3721\n",
+ "Epoch 162/1000\n",
+ "0s - loss: 13.7344 - val_loss: 16.2892\n",
+ "Epoch 163/1000\n",
+ "0s - loss: 13.6374 - val_loss: 16.2301\n",
+ "Epoch 164/1000\n",
+ "0s - loss: 13.5383 - val_loss: 16.1375\n",
+ "Epoch 165/1000\n",
+ "0s - loss: 13.4399 - val_loss: 16.0151\n",
+ "Epoch 166/1000\n",
+ "0s - loss: 13.3784 - val_loss: 15.7967\n",
+ "Epoch 167/1000\n",
+ "0s - loss: 13.2947 - val_loss: 15.8945\n",
+ "Epoch 168/1000\n",
+ "0s - loss: 13.1630 - val_loss: 15.9627\n",
+ "Epoch 169/1000\n",
+ "0s - loss: 13.0564 - val_loss: 15.9207\n",
+ "Epoch 170/1000\n",
+ "0s - loss: 12.9800 - val_loss: 15.8314\n",
+ "Epoch 171/1000\n",
+ "0s - loss: 12.9055 - val_loss: 15.8024\n",
+ "Epoch 172/1000\n",
+ "0s - loss: 12.8103 - val_loss: 15.7358\n",
+ "Epoch 173/1000\n",
+ "0s - loss: 12.7421 - val_loss: 15.6642\n",
+ "Epoch 174/1000\n",
+ "0s - loss: 12.6465 - val_loss: 15.4539\n",
+ "Epoch 175/1000\n",
+ "0s - loss: 12.5590 - val_loss: 15.2575\n",
+ "Epoch 176/1000\n",
+ "0s - loss: 12.4746 - val_loss: 15.2122\n",
+ "Epoch 177/1000\n",
+ "0s - loss: 12.4023 - val_loss: 15.1549\n",
+ "Epoch 178/1000\n",
+ "0s - loss: 12.3257 - val_loss: 15.0895\n",
+ "Epoch 179/1000\n",
+ "0s - loss: 12.2710 - val_loss: 15.1340\n",
+ "Epoch 180/1000\n",
+ "0s - loss: 12.1735 - val_loss: 15.0553\n",
+ "Epoch 181/1000\n",
+ "0s - loss: 12.1063 - val_loss: 14.9831\n",
+ "Epoch 182/1000\n",
+ "0s - loss: 12.0296 - val_loss: 14.9070\n",
+ "Epoch 183/1000\n",
+ "0s - loss: 11.9599 - val_loss: 14.9547\n",
+ "Epoch 184/1000\n",
+ "0s - loss: 11.8919 - val_loss: 14.8804\n",
+ "Epoch 185/1000\n",
+ "0s - loss: 11.8193 - val_loss: 14.7802\n",
+ "Epoch 186/1000\n",
+ "0s - loss: 11.7457 - val_loss: 14.8727\n",
+ "Epoch 187/1000\n",
+ "0s - loss: 11.6716 - val_loss: 14.8496\n",
+ "Epoch 188/1000\n",
+ "0s - loss: 11.6186 - val_loss: 14.8986\n",
+ "Epoch 189/1000\n",
+ "0s - loss: 11.5596 - val_loss: 14.8658\n",
+ "Epoch 190/1000\n",
+ "0s - loss: 11.5208 - val_loss: 14.8170\n",
+ "Epoch 191/1000\n",
+ "0s - loss: 11.4240 - val_loss: 14.7112\n",
+ "Epoch 192/1000\n",
+ "0s - loss: 11.3490 - val_loss: 14.5987\n",
+ "Epoch 193/1000\n",
+ "0s - loss: 11.2963 - val_loss: 14.5401\n",
+ "Epoch 194/1000\n",
+ "0s - loss: 11.2172 - val_loss: 14.4682\n",
+ "Epoch 195/1000\n",
+ "0s - loss: 11.1804 - val_loss: 14.5257\n",
+ "Epoch 196/1000\n",
+ "0s - loss: 11.0938 - val_loss: 14.3685\n",
+ "Epoch 197/1000\n",
+ "0s - loss: 11.0115 - val_loss: 14.1099\n",
+ "Epoch 198/1000\n",
+ "0s - loss: 10.9974 - val_loss: 13.8335\n",
+ "Epoch 199/1000\n",
+ "0s - loss: 10.9434 - val_loss: 13.7582\n",
+ "Epoch 200/1000\n",
+ "0s - loss: 10.9009 - val_loss: 13.7337\n",
+ "Epoch 201/1000\n",
+ "0s - loss: 10.8457 - val_loss: 13.7478\n",
+ "Epoch 202/1000\n",
+ "0s - loss: 10.8053 - val_loss: 13.7183\n",
+ "Epoch 203/1000\n",
+ "0s - loss: 10.7184 - val_loss: 13.8440\n",
+ "Epoch 204/1000\n",
+ "0s - loss: 10.6708 - val_loss: 14.0264\n",
+ "Epoch 205/1000\n",
+ "0s - loss: 10.6425 - val_loss: 14.1505\n",
+ "Epoch 206/1000\n",
+ "0s - loss: 10.5987 - val_loss: 14.0667\n",
+ "Epoch 207/1000\n",
+ "0s - loss: 10.5411 - val_loss: 13.9295\n",
+ "Epoch 208/1000\n",
+ "0s - loss: 10.4896 - val_loss: 13.7917\n",
+ "Epoch 209/1000\n",
+ "0s - loss: 10.4518 - val_loss: 13.6881\n",
+ "Epoch 210/1000\n",
+ "0s - loss: 10.4086 - val_loss: 13.6611\n",
+ "Epoch 211/1000\n",
+ "0s - loss: 10.3522 - val_loss: 13.5923\n",
+ "Epoch 212/1000\n",
+ "0s - loss: 10.3142 - val_loss: 13.5912\n",
+ "Epoch 213/1000\n",
+ "0s - loss: 10.2715 - val_loss: 13.5013\n",
+ "Epoch 214/1000\n",
+ "0s - loss: 10.2201 - val_loss: 13.3928\n",
+ "Epoch 215/1000\n",
+ "0s - loss: 10.1903 - val_loss: 13.3539\n",
+ "Epoch 216/1000\n",
+ "0s - loss: 10.1502 - val_loss: 13.3764\n",
+ "Epoch 217/1000\n",
+ "0s - loss: 10.0995 - val_loss: 13.4284\n",
+ "Epoch 218/1000\n",
+ "0s - loss: 10.0742 - val_loss: 13.5177\n",
+ "Epoch 219/1000\n",
+ "0s - loss: 10.0321 - val_loss: 13.4997\n",
+ "Epoch 220/1000\n",
+ "0s - loss: 9.9920 - val_loss: 13.4955\n",
+ "Epoch 221/1000\n",
+ "0s - loss: 9.9589 - val_loss: 13.4585\n",
+ "Epoch 222/1000\n",
+ "0s - loss: 9.9229 - val_loss: 13.4878\n",
+ "Epoch 223/1000\n",
+ "0s - loss: 9.8802 - val_loss: 13.5946\n",
+ "Epoch 224/1000\n",
+ "0s - loss: 9.8532 - val_loss: 13.5498\n",
+ "Epoch 225/1000\n",
+ "0s - loss: 9.8247 - val_loss: 13.4091\n",
+ "Epoch 226/1000\n",
+ "0s - loss: 9.7690 - val_loss: 13.3662\n",
+ "Epoch 227/1000\n",
+ "0s - loss: 9.7325 - val_loss: 13.2502\n",
+ "Epoch 228/1000\n",
+ "0s - loss: 9.7238 - val_loss: 13.0802\n",
+ "Epoch 229/1000\n",
+ "0s - loss: 9.6833 - val_loss: 13.0371\n",
+ "Epoch 230/1000\n",
+ "0s - loss: 9.6526 - val_loss: 13.1047\n",
+ "Epoch 231/1000\n",
+ "0s - loss: 9.6076 - val_loss: 12.9906\n",
+ "Epoch 232/1000\n",
+ "0s - loss: 9.5903 - val_loss: 12.9417\n",
+ "Epoch 233/1000\n",
+ "0s - loss: 9.5517 - val_loss: 12.9617\n",
+ "Epoch 234/1000\n",
+ "0s - loss: 9.5240 - val_loss: 13.0106\n",
+ "Epoch 235/1000\n",
+ "0s - loss: 9.5107 - val_loss: 13.0079\n",
+ "Epoch 236/1000\n",
+ "0s - loss: 9.4719 - val_loss: 12.9160\n",
+ "Epoch 237/1000\n",
+ "0s - loss: 9.4648 - val_loss: 12.9961\n",
+ "Epoch 238/1000\n",
+ "0s - loss: 9.4071 - val_loss: 12.8877\n",
+ "Epoch 239/1000\n",
+ "0s - loss: 9.3821 - val_loss: 12.8120\n",
+ "Epoch 240/1000\n",
+ "0s - loss: 9.3425 - val_loss: 12.6740\n",
+ "Epoch 241/1000\n",
+ "0s - loss: 9.3320 - val_loss: 12.6152\n",
+ "Epoch 242/1000\n",
+ "0s - loss: 9.3038 - val_loss: 12.6289\n",
+ "Epoch 243/1000\n",
+ "0s - loss: 9.2794 - val_loss: 12.6285\n",
+ "Epoch 244/1000\n",
+ "0s - loss: 9.2539 - val_loss: 12.5829\n",
+ "Epoch 245/1000\n",
+ "0s - loss: 9.2226 - val_loss: 12.5892\n",
+ "Epoch 246/1000\n",
+ "0s - loss: 9.1986 - val_loss: 12.5987\n",
+ "Epoch 247/1000\n",
+ "0s - loss: 9.1752 - val_loss: 12.6143\n",
+ "Epoch 248/1000\n",
+ "0s - loss: 9.1682 - val_loss: 12.6907\n",
+ "Epoch 249/1000\n",
+ "0s - loss: 9.1433 - val_loss: 12.5107\n",
+ "Epoch 250/1000\n",
+ "0s - loss: 9.1189 - val_loss: 12.4652\n",
+ "Epoch 251/1000\n",
+ "0s - loss: 9.0703 - val_loss: 12.5609\n",
+ "Epoch 252/1000\n",
+ "0s - loss: 9.0387 - val_loss: 12.7475\n",
+ "Epoch 253/1000\n",
+ "0s - loss: 9.0328 - val_loss: 12.8889\n",
+ "Epoch 254/1000\n",
+ "0s - loss: 9.0400 - val_loss: 12.7758\n",
+ "Epoch 255/1000\n",
+ "0s - loss: 9.0083 - val_loss: 12.6581\n",
+ "Epoch 256/1000\n",
+ "0s - loss: 8.9724 - val_loss: 12.5758\n",
+ "Epoch 257/1000\n",
+ "0s - loss: 8.9448 - val_loss: 12.5477\n",
+ "Epoch 258/1000\n",
+ "0s - loss: 8.9396 - val_loss: 12.6669\n",
+ "Epoch 259/1000\n",
+ "0s - loss: 8.9123 - val_loss: 12.5319\n",
+ "Epoch 260/1000\n",
+ "0s - loss: 8.8873 - val_loss: 12.3621\n",
+ "Epoch 261/1000\n",
+ "0s - loss: 8.8512 - val_loss: 12.2950\n",
+ "Epoch 262/1000\n",
+ "0s - loss: 8.8215 - val_loss: 12.2297\n",
+ "Epoch 263/1000\n",
+ "0s - loss: 8.8084 - val_loss: 12.1445\n",
+ "Epoch 264/1000\n",
+ "0s - loss: 8.8144 - val_loss: 12.0887\n",
+ "Epoch 265/1000\n",
+ "0s - loss: 8.7955 - val_loss: 12.0791\n",
+ "Epoch 266/1000\n",
+ "0s - loss: 8.7906 - val_loss: 12.0525\n",
+ "Epoch 267/1000\n",
+ "0s - loss: 8.7840 - val_loss: 11.9313\n",
+ "Epoch 268/1000\n",
+ "0s - loss: 8.7599 - val_loss: 11.9774\n",
+ "Epoch 269/1000\n",
+ "0s - loss: 8.7293 - val_loss: 12.0157\n",
+ "Epoch 270/1000\n",
+ "0s - loss: 8.7055 - val_loss: 11.9781\n",
+ "Epoch 271/1000\n",
+ "0s - loss: 8.6896 - val_loss: 11.9659\n",
+ "Epoch 272/1000\n",
+ "0s - loss: 8.6698 - val_loss: 11.8892\n",
+ "Epoch 273/1000\n",
+ "0s - loss: 8.6575 - val_loss: 11.9701\n",
+ "Epoch 274/1000\n",
+ "0s - loss: 8.6268 - val_loss: 12.1620\n",
+ "Epoch 275/1000\n",
+ "0s - loss: 8.6157 - val_loss: 12.3965\n",
+ "Epoch 276/1000\n",
+ "0s - loss: 8.6212 - val_loss: 12.3896\n",
+ "Epoch 277/1000\n",
+ "0s - loss: 8.6034 - val_loss: 12.1589\n",
+ "Epoch 278/1000\n",
+ "0s - loss: 8.5703 - val_loss: 12.0821\n",
+ "Epoch 279/1000\n",
+ "0s - loss: 8.5598 - val_loss: 12.0395\n",
+ "Epoch 280/1000\n",
+ "0s - loss: 8.5561 - val_loss: 11.9060\n",
+ "Epoch 281/1000\n",
+ "0s - loss: 8.5385 - val_loss: 11.9489\n",
+ "Epoch 282/1000\n",
+ "0s - loss: 8.5171 - val_loss: 11.9287\n",
+ "Epoch 283/1000\n",
+ "0s - loss: 8.5042 - val_loss: 11.8829\n",
+ "Epoch 284/1000\n",
+ "0s - loss: 8.5107 - val_loss: 11.9078\n",
+ "Epoch 285/1000\n",
+ "0s - loss: 8.4916 - val_loss: 11.9009\n",
+ "Epoch 286/1000\n",
+ "0s - loss: 8.4753 - val_loss: 11.7782\n",
+ "Epoch 287/1000\n",
+ "0s - loss: 8.4730 - val_loss: 11.7852\n",
+ "Epoch 288/1000\n",
+ "0s - loss: 8.4721 - val_loss: 11.8649\n",
+ "Epoch 289/1000\n",
+ "0s - loss: 8.4548 - val_loss: 11.9191\n",
+ "Epoch 290/1000\n",
+ "0s - loss: 8.4425 - val_loss: 11.8398\n",
+ "Epoch 291/1000\n",
+ "0s - loss: 8.4242 - val_loss: 11.8511\n",
+ "Epoch 292/1000\n",
+ "0s - loss: 8.4201 - val_loss: 11.7995\n",
+ "Epoch 293/1000\n",
+ "0s - loss: 8.4012 - val_loss: 11.8587\n",
+ "Epoch 294/1000\n",
+ "0s - loss: 8.4044 - val_loss: 11.8841\n",
+ "Epoch 295/1000\n",
+ "0s - loss: 8.3899 - val_loss: 11.9541\n",
+ "Epoch 296/1000\n",
+ "0s - loss: 8.3873 - val_loss: 11.9119\n",
+ "Epoch 297/1000\n",
+ "0s - loss: 8.3795 - val_loss: 11.7851\n",
+ "Epoch 298/1000\n",
+ "0s - loss: 8.3632 - val_loss: 11.6978\n",
+ "Epoch 299/1000\n",
+ "0s - loss: 8.3391 - val_loss: 11.5268\n",
+ "Epoch 300/1000\n",
+ "0s - loss: 8.3606 - val_loss: 11.4162\n",
+ "Epoch 301/1000\n",
+ "0s - loss: 8.3575 - val_loss: 11.5051\n",
+ "Epoch 302/1000\n",
+ "0s - loss: 8.3332 - val_loss: 11.5816\n",
+ "Epoch 303/1000\n",
+ "0s - loss: 8.3282 - val_loss: 11.6179\n",
+ "Epoch 304/1000\n",
+ "0s - loss: 8.3220 - val_loss: 11.7089\n",
+ "Epoch 305/1000\n",
+ "0s - loss: 8.3157 - val_loss: 11.7198\n",
+ "Epoch 306/1000\n",
+ "0s - loss: 8.3032 - val_loss: 11.7339\n",
+ "Epoch 307/1000\n",
+ "0s - loss: 8.3079 - val_loss: 11.8089\n",
+ "Epoch 308/1000\n",
+ "0s - loss: 8.3082 - val_loss: 11.8763\n",
+ "Epoch 309/1000\n",
+ "0s - loss: 8.3239 - val_loss: 11.8680\n",
+ "Epoch 310/1000\n",
+ "0s - loss: 8.3201 - val_loss: 11.6964\n",
+ "Epoch 311/1000\n",
+ "0s - loss: 8.2837 - val_loss: 11.6511\n",
+ "Epoch 312/1000\n",
+ "0s - loss: 8.2681 - val_loss: 11.5479\n",
+ "Epoch 313/1000\n",
+ "0s - loss: 8.2656 - val_loss: 11.5266\n",
+ "Epoch 314/1000\n",
+ "0s - loss: 8.2514 - val_loss: 11.4685\n",
+ "Epoch 315/1000\n",
+ "0s - loss: 8.2437 - val_loss: 11.3839\n",
+ "Epoch 316/1000\n",
+ "0s - loss: 8.2447 - val_loss: 11.4023\n",
+ "Epoch 317/1000\n",
+ "0s - loss: 8.2245 - val_loss: 11.6527\n",
+ "Epoch 318/1000\n",
+ "0s - loss: 8.2267 - val_loss: 11.7483\n",
+ "Epoch 319/1000\n",
+ "0s - loss: 8.2287 - val_loss: 11.5875\n",
+ "Epoch 320/1000\n",
+ "0s - loss: 8.2140 - val_loss: 11.4581\n",
+ "Epoch 321/1000\n",
+ "0s - loss: 8.2021 - val_loss: 11.4381\n",
+ "Epoch 322/1000\n",
+ "0s - loss: 8.2018 - val_loss: 11.4169\n",
+ "Epoch 323/1000\n",
+ "0s - loss: 8.1867 - val_loss: 11.4154\n",
+ "Epoch 324/1000\n",
+ "0s - loss: 8.1980 - val_loss: 11.5104\n",
+ "Epoch 325/1000\n",
+ "0s - loss: 8.1647 - val_loss: 11.3666\n",
+ "Epoch 326/1000\n",
+ "0s - loss: 8.1756 - val_loss: 11.2822\n",
+ "Epoch 327/1000\n",
+ "0s - loss: 8.1696 - val_loss: 11.2677\n",
+ "Epoch 328/1000\n",
+ "0s - loss: 8.1570 - val_loss: 11.3119\n",
+ "Epoch 329/1000\n",
+ "0s - loss: 8.1568 - val_loss: 11.3125\n",
+ "Epoch 330/1000\n",
+ "0s - loss: 8.1442 - val_loss: 11.3657\n",
+ "Epoch 331/1000\n",
+ "0s - loss: 8.1267 - val_loss: 11.4253\n",
+ "Epoch 332/1000\n",
+ "0s - loss: 8.1424 - val_loss: 11.5049\n",
+ "Epoch 333/1000\n",
+ "0s - loss: 8.1254 - val_loss: 11.4423\n",
+ "Epoch 334/1000\n",
+ "0s - loss: 8.1204 - val_loss: 11.3855\n",
+ "Epoch 335/1000\n",
+ "0s - loss: 8.1203 - val_loss: 11.3545\n",
+ "Epoch 336/1000\n",
+ "0s - loss: 8.1200 - val_loss: 11.3624\n",
+ "Epoch 337/1000\n",
+ "0s - loss: 8.0956 - val_loss: 11.2366\n",
+ "Epoch 338/1000\n",
+ "0s - loss: 8.0906 - val_loss: 11.1804\n",
+ "Epoch 339/1000\n",
+ "0s - loss: 8.0975 - val_loss: 11.2618\n",
+ "Epoch 340/1000\n",
+ "0s - loss: 8.0842 - val_loss: 11.4501\n",
+ "Epoch 341/1000\n",
+ "0s - loss: 8.0980 - val_loss: 11.5825\n",
+ "Epoch 342/1000\n",
+ "0s - loss: 8.0878 - val_loss: 11.5529\n",
+ "Epoch 343/1000\n",
+ "0s - loss: 8.0632 - val_loss: 11.4353\n",
+ "Epoch 344/1000\n",
+ "0s - loss: 8.0348 - val_loss: 11.2123\n",
+ "Epoch 345/1000\n",
+ "0s - loss: 8.0200 - val_loss: 11.1293\n",
+ "Epoch 346/1000\n",
+ "0s - loss: 8.0155 - val_loss: 11.1220\n",
+ "Epoch 347/1000\n",
+ "0s - loss: 8.0029 - val_loss: 11.0208\n",
+ "Epoch 348/1000\n",
+ "0s - loss: 8.0177 - val_loss: 11.1227\n",
+ "Epoch 349/1000\n",
+ "0s - loss: 7.9681 - val_loss: 11.0681\n",
+ "Epoch 350/1000\n",
+ "0s - loss: 7.9982 - val_loss: 11.2902\n",
+ "Epoch 351/1000\n",
+ "0s - loss: 7.9389 - val_loss: 11.1377\n",
+ "Epoch 352/1000\n",
+ "0s - loss: 7.9432 - val_loss: 10.8971\n",
+ "Epoch 353/1000\n",
+ "0s - loss: 7.9307 - val_loss: 10.8650\n",
+ "Epoch 354/1000\n",
+ "0s - loss: 7.9150 - val_loss: 10.9361\n",
+ "Epoch 355/1000\n",
+ "0s - loss: 7.9224 - val_loss: 11.1138\n",
+ "Epoch 356/1000\n",
+ "0s - loss: 7.8936 - val_loss: 11.0083\n",
+ "Epoch 357/1000\n",
+ "0s - loss: 7.8813 - val_loss: 10.9296\n",
+ "Epoch 358/1000\n",
+ "0s - loss: 7.8658 - val_loss: 10.7349\n",
+ "Epoch 359/1000\n",
+ "0s - loss: 7.9032 - val_loss: 10.6942\n",
+ "Epoch 360/1000\n",
+ "0s - loss: 7.8775 - val_loss: 10.8303\n",
+ "Epoch 361/1000\n",
+ "0s - loss: 7.8377 - val_loss: 11.1259\n",
+ "Epoch 362/1000\n",
+ "0s - loss: 7.8699 - val_loss: 11.2624\n",
+ "Epoch 363/1000\n",
+ "0s - loss: 7.8676 - val_loss: 11.0654\n",
+ "Epoch 364/1000\n",
+ "0s - loss: 7.8514 - val_loss: 11.0146\n",
+ "Epoch 365/1000\n",
+ "0s - loss: 7.8735 - val_loss: 10.8282\n",
+ "Epoch 366/1000\n",
+ "0s - loss: 7.8320 - val_loss: 10.8432\n",
+ "Epoch 367/1000\n",
+ "0s - loss: 7.8254 - val_loss: 10.8759\n",
+ "Epoch 368/1000\n",
+ "0s - loss: 7.8238 - val_loss: 11.0631\n",
+ "Epoch 369/1000\n",
+ "0s - loss: 7.8251 - val_loss: 11.1738\n",
+ "Epoch 370/1000\n",
+ "0s - loss: 7.8164 - val_loss: 11.0347\n",
+ "Epoch 371/1000\n",
+ "0s - loss: 7.8062 - val_loss: 11.0105\n",
+ "Epoch 372/1000\n",
+ "0s - loss: 7.7965 - val_loss: 10.8947\n",
+ "Epoch 373/1000\n",
+ "0s - loss: 7.7955 - val_loss: 10.7648\n",
+ "Epoch 374/1000\n",
+ "0s - loss: 7.8384 - val_loss: 10.6471\n",
+ "Epoch 375/1000\n",
+ "0s - loss: 7.7998 - val_loss: 10.6820\n",
+ "Epoch 376/1000\n",
+ "0s - loss: 7.7854 - val_loss: 10.7553\n",
+ "Epoch 377/1000\n",
+ "0s - loss: 7.7667 - val_loss: 10.7826\n",
+ "Epoch 378/1000\n",
+ "0s - loss: 7.7576 - val_loss: 10.7903\n",
+ "Epoch 379/1000\n",
+ "0s - loss: 7.7672 - val_loss: 10.7327\n",
+ "Epoch 380/1000\n",
+ "0s - loss: 7.7557 - val_loss: 10.8170\n",
+ "Epoch 381/1000\n",
+ "0s - loss: 7.7400 - val_loss: 10.8690\n",
+ "Epoch 382/1000\n",
+ "0s - loss: 7.7314 - val_loss: 10.8287\n",
+ "Epoch 383/1000\n",
+ "0s - loss: 7.7187 - val_loss: 10.9466\n",
+ "Epoch 384/1000\n",
+ "0s - loss: 7.7442 - val_loss: 11.0996\n",
+ "Epoch 385/1000\n",
+ "0s - loss: 7.7236 - val_loss: 11.0335\n",
+ "Epoch 386/1000\n",
+ "0s - loss: 7.7195 - val_loss: 10.8625\n",
+ "Epoch 387/1000\n",
+ "0s - loss: 7.7134 - val_loss: 10.7955\n",
+ "Epoch 388/1000\n",
+ "0s - loss: 7.7091 - val_loss: 10.8024\n",
+ "Epoch 389/1000\n",
+ "0s - loss: 7.6993 - val_loss: 10.9470\n",
+ "Epoch 390/1000\n",
+ "0s - loss: 7.7107 - val_loss: 11.1068\n",
+ "Epoch 391/1000\n",
+ "0s - loss: 7.7326 - val_loss: 11.1625\n",
+ "Epoch 392/1000\n",
+ "0s - loss: 7.7093 - val_loss: 11.0270\n",
+ "Epoch 393/1000\n",
+ "0s - loss: 7.6944 - val_loss: 10.8553\n",
+ "Epoch 394/1000\n",
+ "0s - loss: 7.6865 - val_loss: 10.7728\n",
+ "Epoch 395/1000\n",
+ "0s - loss: 7.6968 - val_loss: 10.6200\n",
+ "Epoch 396/1000\n",
+ "0s - loss: 7.7078 - val_loss: 10.7841\n",
+ "Epoch 397/1000\n",
+ "0s - loss: 7.6727 - val_loss: 10.7226\n",
+ "Epoch 398/1000\n",
+ "0s - loss: 7.6936 - val_loss: 10.5775\n",
+ "Epoch 399/1000\n",
+ "0s - loss: 7.6952 - val_loss: 10.4995\n",
+ "Epoch 400/1000\n",
+ "0s - loss: 7.7006 - val_loss: 10.4518\n",
+ "Epoch 401/1000\n",
+ "0s - loss: 7.7189 - val_loss: 10.3639\n",
+ "Epoch 402/1000\n",
+ "0s - loss: 7.7255 - val_loss: 10.4779\n",
+ "Epoch 403/1000\n",
+ "0s - loss: 7.6600 - val_loss: 10.7322\n",
+ "Epoch 404/1000\n",
+ "0s - loss: 7.6410 - val_loss: 11.1461\n",
+ "Epoch 405/1000\n",
+ "0s - loss: 7.7009 - val_loss: 11.4412\n",
+ "Epoch 406/1000\n",
+ "0s - loss: 7.7221 - val_loss: 11.3018\n",
+ "Epoch 407/1000\n",
+ "0s - loss: 7.6986 - val_loss: 11.1412\n",
+ "Epoch 408/1000\n",
+ "0s - loss: 7.7201 - val_loss: 10.7272\n",
+ "Epoch 409/1000\n",
+ "0s - loss: 7.6533 - val_loss: 10.5904\n",
+ "Epoch 410/1000\n",
+ "0s - loss: 7.6437 - val_loss: 10.4873\n",
+ "Epoch 411/1000\n",
+ "0s - loss: 7.6502 - val_loss: 10.5390\n",
+ "Epoch 412/1000\n",
+ "0s - loss: 7.6454 - val_loss: 10.5559\n",
+ "Epoch 413/1000\n",
+ "0s - loss: 7.6390 - val_loss: 10.6162\n",
+ "Epoch 414/1000\n",
+ "0s - loss: 7.6293 - val_loss: 10.7414\n",
+ "Epoch 415/1000\n",
+ "0s - loss: 7.6264 - val_loss: 10.7096\n",
+ "Epoch 416/1000\n",
+ "0s - loss: 7.6205 - val_loss: 10.6559\n",
+ "Epoch 417/1000\n",
+ "0s - loss: 7.6253 - val_loss: 10.5487\n",
+ "Epoch 418/1000\n",
+ "0s - loss: 7.6516 - val_loss: 10.4718\n",
+ "Epoch 419/1000\n",
+ "0s - loss: 7.6138 - val_loss: 10.6924\n",
+ "Epoch 420/1000\n",
+ "0s - loss: 7.6022 - val_loss: 10.8662\n",
+ "Epoch 421/1000\n",
+ "0s - loss: 7.6207 - val_loss: 10.8832\n",
+ "Epoch 422/1000\n",
+ "0s - loss: 7.6210 - val_loss: 10.8810\n",
+ "Epoch 423/1000\n",
+ "0s - loss: 7.6247 - val_loss: 10.9177\n",
+ "Epoch 424/1000\n",
+ "0s - loss: 7.6221 - val_loss: 10.7813\n",
+ "Epoch 425/1000\n",
+ "0s - loss: 7.6053 - val_loss: 10.6974\n",
+ "Epoch 426/1000\n",
+ "0s - loss: 7.6505 - val_loss: 10.3854\n",
+ "Epoch 427/1000\n",
+ "0s - loss: 7.6392 - val_loss: 10.4074\n",
+ "Epoch 428/1000\n",
+ "0s - loss: 7.6357 - val_loss: 10.5096\n",
+ "Epoch 429/1000\n",
+ "0s - loss: 7.6221 - val_loss: 10.4471\n",
+ "Epoch 430/1000\n",
+ "0s - loss: 7.6144 - val_loss: 10.4258\n",
+ "Epoch 431/1000\n",
+ "0s - loss: 7.6247 - val_loss: 10.3588\n",
+ "Epoch 432/1000\n",
+ "0s - loss: 7.6213 - val_loss: 10.4823\n",
+ "Epoch 433/1000\n",
+ "0s - loss: 7.6010 - val_loss: 10.6238\n",
+ "Epoch 434/1000\n",
+ "0s - loss: 7.5874 - val_loss: 10.5674\n",
+ "Epoch 435/1000\n",
+ "0s - loss: 7.6327 - val_loss: 10.4552\n",
+ "Epoch 436/1000\n",
+ "0s - loss: 7.5729 - val_loss: 10.6180\n",
+ "Epoch 437/1000\n",
+ "0s - loss: 7.5802 - val_loss: 10.7595\n",
+ "Epoch 438/1000\n",
+ "0s - loss: 7.5751 - val_loss: 10.8116\n",
+ "Epoch 439/1000\n",
+ "0s - loss: 7.5855 - val_loss: 10.7126\n",
+ "Epoch 440/1000\n",
+ "0s - loss: 7.5696 - val_loss: 10.6216\n",
+ "Epoch 441/1000\n",
+ "0s - loss: 7.5609 - val_loss: 10.4592\n",
+ "Epoch 442/1000\n",
+ "0s - loss: 7.5753 - val_loss: 10.4247\n",
+ "Epoch 443/1000\n",
+ "0s - loss: 7.5979 - val_loss: 10.4444\n",
+ "Epoch 444/1000\n",
+ "0s - loss: 7.6085 - val_loss: 10.9150\n",
+ "Epoch 445/1000\n",
+ "0s - loss: 7.5751 - val_loss: 11.0150\n",
+ "Epoch 446/1000\n",
+ "0s - loss: 7.5958 - val_loss: 10.9615\n",
+ "Epoch 447/1000\n",
+ "0s - loss: 7.5837 - val_loss: 10.7457\n",
+ "Epoch 448/1000\n",
+ "0s - loss: 7.5681 - val_loss: 10.6972\n",
+ "Epoch 449/1000\n",
+ "0s - loss: 7.5665 - val_loss: 10.5583\n",
+ "Epoch 450/1000\n",
+ "0s - loss: 7.5440 - val_loss: 10.5990\n",
+ "Epoch 451/1000\n",
+ "0s - loss: 7.5411 - val_loss: 10.5325\n",
+ "Epoch 452/1000\n",
+ "0s - loss: 7.5446 - val_loss: 10.4850\n",
+ "Epoch 453/1000\n",
+ "0s - loss: 7.5387 - val_loss: 10.4341\n",
+ "Epoch 454/1000\n",
+ "0s - loss: 7.5437 - val_loss: 10.4888\n",
+ "Epoch 455/1000\n",
+ "0s - loss: 7.5331 - val_loss: 10.5360\n",
+ "Epoch 456/1000\n",
+ "0s - loss: 7.5290 - val_loss: 10.5906\n",
+ "Epoch 457/1000\n",
+ "0s - loss: 7.5240 - val_loss: 10.7664\n",
+ "Epoch 458/1000\n",
+ "0s - loss: 7.5880 - val_loss: 10.9494\n",
+ "Epoch 459/1000\n",
+ "0s - loss: 7.5755 - val_loss: 10.7004\n",
+ "Epoch 460/1000\n",
+ "0s - loss: 7.5331 - val_loss: 10.5778\n",
+ "Epoch 461/1000\n",
+ "0s - loss: 7.5183 - val_loss: 10.5781\n",
+ "Epoch 462/1000\n",
+ "0s - loss: 7.5303 - val_loss: 10.6520\n",
+ "Epoch 463/1000\n",
+ "0s - loss: 7.5217 - val_loss: 10.5437\n",
+ "Epoch 464/1000\n",
+ "0s - loss: 7.5393 - val_loss: 10.3459\n",
+ "Epoch 465/1000\n",
+ "0s - loss: 7.5230 - val_loss: 10.3179\n",
+ "Epoch 466/1000\n",
+ "0s - loss: 7.5840 - val_loss: 10.1277\n",
+ "Epoch 467/1000\n",
+ "0s - loss: 7.5594 - val_loss: 10.2970\n",
+ "Epoch 468/1000\n",
+ "0s - loss: 7.5249 - val_loss: 10.2439\n",
+ "Epoch 469/1000\n",
+ "0s - loss: 7.5267 - val_loss: 10.2097\n",
+ "Epoch 470/1000\n",
+ "0s - loss: 7.5314 - val_loss: 10.2946\n",
+ "Epoch 471/1000\n",
+ "0s - loss: 7.5007 - val_loss: 10.4302\n",
+ "Epoch 472/1000\n",
+ "0s - loss: 7.5191 - val_loss: 10.7165\n",
+ "Epoch 473/1000\n",
+ "0s - loss: 7.5035 - val_loss: 10.7278\n",
+ "Epoch 474/1000\n",
+ "0s - loss: 7.5249 - val_loss: 10.5478\n",
+ "Epoch 475/1000\n",
+ "0s - loss: 7.4949 - val_loss: 10.4361\n",
+ "Epoch 476/1000\n",
+ "0s - loss: 7.4883 - val_loss: 10.3843\n",
+ "Epoch 477/1000\n",
+ "0s - loss: 7.4957 - val_loss: 10.4680\n",
+ "Epoch 478/1000\n",
+ "0s - loss: 7.4931 - val_loss: 10.5048\n",
+ "Epoch 479/1000\n",
+ "0s - loss: 7.5201 - val_loss: 10.2524\n",
+ "Epoch 480/1000\n",
+ "0s - loss: 7.4964 - val_loss: 10.2942\n",
+ "Epoch 481/1000\n",
+ "0s - loss: 7.4940 - val_loss: 10.7032\n",
+ "Epoch 482/1000\n",
+ "0s - loss: 7.5009 - val_loss: 10.8538\n",
+ "Epoch 483/1000\n",
+ "0s - loss: 7.5238 - val_loss: 10.5513\n",
+ "Epoch 484/1000\n",
+ "0s - loss: 7.4755 - val_loss: 10.5335\n",
+ "Epoch 485/1000\n",
+ "0s - loss: 7.4864 - val_loss: 10.5989\n",
+ "Epoch 486/1000\n",
+ "0s - loss: 7.4811 - val_loss: 10.6167\n",
+ "Epoch 487/1000\n",
+ "0s - loss: 7.4720 - val_loss: 10.6700\n",
+ "Epoch 488/1000\n",
+ "0s - loss: 7.4787 - val_loss: 10.5564\n",
+ "Epoch 489/1000\n",
+ "0s - loss: 7.4696 - val_loss: 10.3576\n",
+ "Epoch 490/1000\n",
+ "0s - loss: 7.4602 - val_loss: 10.2715\n",
+ "Epoch 491/1000\n",
+ "0s - loss: 7.4755 - val_loss: 10.1893\n",
+ "Epoch 492/1000\n",
+ "0s - loss: 7.4735 - val_loss: 10.2862\n",
+ "Epoch 493/1000\n",
+ "0s - loss: 7.4617 - val_loss: 10.2662\n",
+ "Epoch 494/1000\n",
+ "0s - loss: 7.4589 - val_loss: 10.3302\n",
+ "Epoch 495/1000\n",
+ "0s - loss: 7.4576 - val_loss: 10.2899\n",
+ "Epoch 496/1000\n",
+ "0s - loss: 7.5020 - val_loss: 10.1326\n",
+ "Epoch 497/1000\n",
+ "0s - loss: 7.4768 - val_loss: 10.3433\n",
+ "Epoch 498/1000\n",
+ "0s - loss: 7.4450 - val_loss: 10.4103\n",
+ "Epoch 499/1000\n",
+ "0s - loss: 7.4543 - val_loss: 10.5700\n",
+ "Epoch 500/1000\n",
+ "0s - loss: 7.4388 - val_loss: 10.4204\n",
+ "Epoch 501/1000\n",
+ "0s - loss: 7.4381 - val_loss: 10.3797\n",
+ "Epoch 502/1000\n",
+ "0s - loss: 7.4377 - val_loss: 10.3496\n",
+ "Epoch 503/1000\n",
+ "0s - loss: 7.4329 - val_loss: 10.3707\n",
+ "Epoch 504/1000\n",
+ "0s - loss: 7.4341 - val_loss: 10.4708\n",
+ "Epoch 505/1000\n",
+ "0s - loss: 7.4391 - val_loss: 10.6921\n",
+ "Epoch 506/1000\n",
+ "0s - loss: 7.4478 - val_loss: 10.6650\n",
+ "Epoch 507/1000\n",
+ "0s - loss: 7.4418 - val_loss: 10.5200\n",
+ "Epoch 508/1000\n",
+ "0s - loss: 7.4331 - val_loss: 10.5249\n",
+ "Epoch 509/1000\n",
+ "0s - loss: 7.4261 - val_loss: 10.4752\n",
+ "Epoch 510/1000\n",
+ "0s - loss: 7.4276 - val_loss: 10.3147\n",
+ "Epoch 511/1000\n",
+ "0s - loss: 7.4161 - val_loss: 10.3581\n",
+ "Epoch 512/1000\n",
+ "0s - loss: 7.4251 - val_loss: 10.4221\n",
+ "Epoch 513/1000\n",
+ "0s - loss: 7.4245 - val_loss: 10.2926\n",
+ "Epoch 514/1000\n",
+ "0s - loss: 7.4143 - val_loss: 10.3159\n",
+ "Epoch 515/1000\n",
+ "0s - loss: 7.4126 - val_loss: 10.5865\n",
+ "Epoch 516/1000\n",
+ "0s - loss: 7.4217 - val_loss: 10.4909\n",
+ "Epoch 517/1000\n",
+ "0s - loss: 7.4339 - val_loss: 10.5381\n",
+ "Epoch 518/1000\n",
+ "0s - loss: 7.4640 - val_loss: 10.7960\n",
+ "Epoch 519/1000\n",
+ "0s - loss: 7.4516 - val_loss: 10.6543\n",
+ "Epoch 520/1000\n",
+ "0s - loss: 7.4247 - val_loss: 10.4150\n",
+ "Epoch 521/1000\n",
+ "0s - loss: 7.4017 - val_loss: 10.2173\n",
+ "Epoch 522/1000\n",
+ "0s - loss: 7.4030 - val_loss: 10.3301\n",
+ "Epoch 523/1000\n",
+ "0s - loss: 7.3812 - val_loss: 10.3615\n",
+ "Epoch 524/1000\n",
+ "0s - loss: 7.3845 - val_loss: 10.3322\n",
+ "Epoch 525/1000\n",
+ "0s - loss: 7.3992 - val_loss: 10.3930\n",
+ "Epoch 526/1000\n",
+ "0s - loss: 7.3927 - val_loss: 10.3845\n",
+ "Epoch 527/1000\n",
+ "0s - loss: 7.3966 - val_loss: 10.3080\n",
+ "Epoch 528/1000\n",
+ "0s - loss: 7.4120 - val_loss: 10.1934\n",
+ "Epoch 529/1000\n",
+ "0s - loss: 7.3964 - val_loss: 10.1839\n",
+ "Epoch 530/1000\n",
+ "0s - loss: 7.3920 - val_loss: 10.2711\n",
+ "Epoch 531/1000\n",
+ "0s - loss: 7.3867 - val_loss: 10.3361\n",
+ "Epoch 532/1000\n",
+ "0s - loss: 7.3939 - val_loss: 10.3948\n",
+ "Epoch 533/1000\n",
+ "0s - loss: 7.3758 - val_loss: 10.1905\n",
+ "Epoch 534/1000\n",
+ "0s - loss: 7.3851 - val_loss: 10.0880\n",
+ "Epoch 535/1000\n",
+ "0s - loss: 7.3898 - val_loss: 10.0592\n",
+ "Epoch 536/1000\n",
+ "0s - loss: 7.3894 - val_loss: 10.1627\n",
+ "Epoch 537/1000\n",
+ "0s - loss: 7.3774 - val_loss: 10.2003\n",
+ "Epoch 538/1000\n",
+ "0s - loss: 7.3809 - val_loss: 10.2461\n",
+ "Epoch 539/1000\n",
+ "0s - loss: 7.4235 - val_loss: 10.0521\n",
+ "Epoch 540/1000\n",
+ "0s - loss: 7.3799 - val_loss: 10.1640\n",
+ "Epoch 541/1000\n",
+ "0s - loss: 7.3736 - val_loss: 10.1857\n",
+ "Epoch 542/1000\n",
+ "0s - loss: 7.3726 - val_loss: 10.1890\n",
+ "Epoch 543/1000\n",
+ "0s - loss: 7.3783 - val_loss: 10.4599\n",
+ "Epoch 544/1000\n",
+ "0s - loss: 7.3736 - val_loss: 10.5228\n",
+ "Epoch 545/1000\n",
+ "0s - loss: 7.3766 - val_loss: 10.4928\n",
+ "Epoch 546/1000\n",
+ "0s - loss: 7.3686 - val_loss: 10.4066\n",
+ "Epoch 547/1000\n",
+ "0s - loss: 7.3549 - val_loss: 10.3289\n",
+ "Epoch 548/1000\n",
+ "0s - loss: 7.3567 - val_loss: 10.2604\n",
+ "Epoch 549/1000\n",
+ "0s - loss: 7.3596 - val_loss: 10.4552\n",
+ "Epoch 550/1000\n",
+ "0s - loss: 7.3638 - val_loss: 10.4449\n",
+ "Epoch 551/1000\n",
+ "0s - loss: 7.3492 - val_loss: 10.2628\n",
+ "Epoch 552/1000\n",
+ "0s - loss: 7.3520 - val_loss: 10.1344\n",
+ "Epoch 553/1000\n",
+ "0s - loss: 7.3573 - val_loss: 10.1745\n",
+ "Epoch 554/1000\n",
+ "0s - loss: 7.3460 - val_loss: 10.0620\n",
+ "Epoch 555/1000\n",
+ "0s - loss: 7.3663 - val_loss: 9.9054\n",
+ "Epoch 556/1000\n",
+ "0s - loss: 7.3777 - val_loss: 9.9367\n",
+ "Epoch 557/1000\n",
+ "0s - loss: 7.3554 - val_loss: 10.1113\n",
+ "Epoch 558/1000\n",
+ "0s - loss: 7.3280 - val_loss: 10.2085\n",
+ "Epoch 559/1000\n",
+ "0s - loss: 7.3467 - val_loss: 10.3174\n",
+ "Epoch 560/1000\n",
+ "0s - loss: 7.3365 - val_loss: 10.1736\n",
+ "Epoch 561/1000\n",
+ "0s - loss: 7.3331 - val_loss: 10.1039\n",
+ "Epoch 562/1000\n",
+ "0s - loss: 7.3442 - val_loss: 9.9918\n",
+ "Epoch 563/1000\n",
+ "0s - loss: 7.3531 - val_loss: 9.9874\n",
+ "Epoch 564/1000\n",
+ "0s - loss: 7.3445 - val_loss: 10.2712\n",
+ "Epoch 565/1000\n",
+ "0s - loss: 7.3472 - val_loss: 10.4470\n",
+ "Epoch 566/1000\n",
+ "0s - loss: 7.3470 - val_loss: 10.2780\n",
+ "Epoch 567/1000\n",
+ "0s - loss: 7.3305 - val_loss: 10.3648\n",
+ "Epoch 568/1000\n",
+ "0s - loss: 7.3328 - val_loss: 10.2149\n",
+ "Epoch 569/1000\n",
+ "0s - loss: 7.3182 - val_loss: 10.2667\n",
+ "Epoch 570/1000\n",
+ "0s - loss: 7.3238 - val_loss: 10.3270\n",
+ "Epoch 571/1000\n",
+ "0s - loss: 7.3491 - val_loss: 10.4982\n",
+ "Epoch 572/1000\n",
+ "0s - loss: 7.3435 - val_loss: 10.4769\n",
+ "Epoch 573/1000\n",
+ "0s - loss: 7.3310 - val_loss: 10.3134\n",
+ "Epoch 574/1000\n",
+ "0s - loss: 7.3015 - val_loss: 9.9874\n",
+ "Epoch 575/1000\n",
+ "0s - loss: 7.3351 - val_loss: 9.9836\n",
+ "Epoch 576/1000\n",
+ "0s - loss: 7.3304 - val_loss: 9.8903\n",
+ "Epoch 577/1000\n",
+ "0s - loss: 7.3247 - val_loss: 9.9627\n",
+ "Epoch 578/1000\n",
+ "0s - loss: 7.3170 - val_loss: 10.1574\n",
+ "Epoch 579/1000\n",
+ "0s - loss: 7.3259 - val_loss: 10.2674\n",
+ "Epoch 580/1000\n",
+ "0s - loss: 7.3426 - val_loss: 10.1408\n",
+ "Epoch 581/1000\n",
+ "0s - loss: 7.3019 - val_loss: 10.1300\n",
+ "Epoch 582/1000\n",
+ "0s - loss: 7.3159 - val_loss: 10.0377\n",
+ "Epoch 583/1000\n",
+ "0s - loss: 7.3222 - val_loss: 9.9694\n",
+ "Epoch 584/1000\n",
+ "0s - loss: 7.3389 - val_loss: 10.2158\n",
+ "Epoch 585/1000\n",
+ "0s - loss: 7.3035 - val_loss: 10.2447\n",
+ "Epoch 586/1000\n",
+ "0s - loss: 7.2950 - val_loss: 10.2473\n",
+ "Epoch 587/1000\n",
+ "0s - loss: 7.2897 - val_loss: 10.1436\n",
+ "Epoch 588/1000\n",
+ "0s - loss: 7.2925 - val_loss: 10.1359\n",
+ "Epoch 589/1000\n",
+ "0s - loss: 7.2878 - val_loss: 10.0842\n",
+ "Epoch 590/1000\n",
+ "0s - loss: 7.2975 - val_loss: 10.1652\n",
+ "Epoch 591/1000\n",
+ "0s - loss: 7.3006 - val_loss: 10.1450\n",
+ "Epoch 592/1000\n",
+ "0s - loss: 7.3775 - val_loss: 10.5111\n",
+ "Epoch 593/1000\n",
+ "0s - loss: 7.3058 - val_loss: 10.2529\n",
+ "Epoch 594/1000\n",
+ "0s - loss: 7.2889 - val_loss: 10.1708\n",
+ "Epoch 595/1000\n",
+ "0s - loss: 7.2832 - val_loss: 10.2061\n",
+ "Epoch 596/1000\n",
+ "0s - loss: 7.2917 - val_loss: 10.2151\n",
+ "Epoch 597/1000\n",
+ "0s - loss: 7.2854 - val_loss: 10.2681\n",
+ "Epoch 598/1000\n",
+ "0s - loss: 7.2891 - val_loss: 10.2904\n",
+ "Epoch 599/1000\n",
+ "0s - loss: 7.2845 - val_loss: 10.3920\n",
+ "Epoch 600/1000\n",
+ "0s - loss: 7.3085 - val_loss: 10.2477\n",
+ "Epoch 601/1000\n",
+ "0s - loss: 7.2750 - val_loss: 10.0589\n",
+ "Epoch 602/1000\n",
+ "0s - loss: 7.2856 - val_loss: 9.9591\n",
+ "Epoch 603/1000\n",
+ "0s - loss: 7.2710 - val_loss: 10.0745\n",
+ "Epoch 604/1000\n",
+ "0s - loss: 7.2656 - val_loss: 10.2803\n",
+ "Epoch 605/1000\n",
+ "0s - loss: 7.3136 - val_loss: 10.5413\n",
+ "Epoch 606/1000\n",
+ "0s - loss: 7.3149 - val_loss: 10.4886\n",
+ "Epoch 607/1000\n",
+ "0s - loss: 7.3071 - val_loss: 10.2908\n",
+ "Epoch 608/1000\n",
+ "0s - loss: 7.2932 - val_loss: 10.4981\n",
+ "Epoch 609/1000\n",
+ "0s - loss: 7.3056 - val_loss: 10.4701\n",
+ "Epoch 610/1000\n",
+ "0s - loss: 7.3061 - val_loss: 10.3230\n",
+ "Epoch 611/1000\n",
+ "0s - loss: 7.2873 - val_loss: 10.2525\n",
+ "Epoch 612/1000\n",
+ "0s - loss: 7.2625 - val_loss: 10.1370\n",
+ "Epoch 613/1000\n",
+ "0s - loss: 7.2592 - val_loss: 10.0855\n",
+ "Epoch 614/1000\n",
+ "0s - loss: 7.2652 - val_loss: 10.0586\n",
+ "Epoch 615/1000\n",
+ "0s - loss: 7.2739 - val_loss: 10.2048\n",
+ "Epoch 616/1000\n",
+ "0s - loss: 7.2867 - val_loss: 10.0613\n",
+ "Epoch 617/1000\n",
+ "0s - loss: 7.2534 - val_loss: 10.0500\n",
+ "Epoch 618/1000\n",
+ "0s - loss: 7.2994 - val_loss: 10.4155\n",
+ "Epoch 619/1000\n",
+ "0s - loss: 7.2898 - val_loss: 10.4540\n",
+ "Epoch 620/1000\n",
+ "0s - loss: 7.2791 - val_loss: 10.2250\n",
+ "Epoch 621/1000\n",
+ "0s - loss: 7.2556 - val_loss: 10.1293\n",
+ "Epoch 622/1000\n",
+ "0s - loss: 7.2616 - val_loss: 10.0188\n",
+ "Epoch 623/1000\n",
+ "0s - loss: 7.2433 - val_loss: 9.9727\n",
+ "Epoch 624/1000\n",
+ "0s - loss: 7.2692 - val_loss: 9.9556\n",
+ "Epoch 625/1000\n",
+ "0s - loss: 7.2415 - val_loss: 10.2360\n",
+ "Epoch 626/1000\n",
+ "0s - loss: 7.2455 - val_loss: 10.4704\n",
+ "Epoch 627/1000\n",
+ "0s - loss: 7.2815 - val_loss: 10.5971\n",
+ "Epoch 628/1000\n",
+ "0s - loss: 7.2804 - val_loss: 10.4287\n",
+ "Epoch 629/1000\n",
+ "0s - loss: 7.2563 - val_loss: 10.2884\n",
+ "Epoch 630/1000\n",
+ "0s - loss: 7.2330 - val_loss: 10.0593\n",
+ "Epoch 631/1000\n",
+ "0s - loss: 7.2288 - val_loss: 9.9681\n",
+ "Epoch 632/1000\n",
+ "0s - loss: 7.2240 - val_loss: 9.8445\n",
+ "Epoch 633/1000\n",
+ "0s - loss: 7.2384 - val_loss: 9.8650\n",
+ "Epoch 634/1000\n",
+ "0s - loss: 7.2224 - val_loss: 9.9030\n",
+ "Epoch 635/1000\n",
+ "0s - loss: 7.2178 - val_loss: 9.9857\n",
+ "Epoch 636/1000\n",
+ "0s - loss: 7.2060 - val_loss: 9.9932\n",
+ "Epoch 637/1000\n",
+ "0s - loss: 7.1928 - val_loss: 10.1384\n",
+ "Epoch 638/1000\n",
+ "0s - loss: 7.2087 - val_loss: 10.0650\n",
+ "Epoch 639/1000\n",
+ "0s - loss: 7.1924 - val_loss: 10.1360\n",
+ "Epoch 640/1000\n",
+ "0s - loss: 7.1922 - val_loss: 10.2236\n",
+ "Epoch 641/1000\n",
+ "0s - loss: 7.2100 - val_loss: 10.0820\n",
+ "Epoch 642/1000\n",
+ "0s - loss: 7.1812 - val_loss: 10.0331\n",
+ "Epoch 643/1000\n",
+ "0s - loss: 7.1742 - val_loss: 9.9082\n",
+ "Epoch 644/1000\n",
+ "0s - loss: 7.1927 - val_loss: 9.8744\n",
+ "Epoch 645/1000\n",
+ "0s - loss: 7.1919 - val_loss: 9.9550\n",
+ "Epoch 646/1000\n",
+ "0s - loss: 7.2273 - val_loss: 10.0534\n",
+ "Epoch 647/1000\n",
+ "0s - loss: 7.1807 - val_loss: 9.9498\n",
+ "Epoch 648/1000\n",
+ "0s - loss: 7.2351 - val_loss: 9.6298\n",
+ "Epoch 649/1000\n",
+ "0s - loss: 7.2691 - val_loss: 9.9665\n",
+ "Epoch 650/1000\n",
+ "0s - loss: 7.1814 - val_loss: 9.9562\n",
+ "Epoch 651/1000\n",
+ "0s - loss: 7.1730 - val_loss: 9.9009\n",
+ "Epoch 652/1000\n",
+ "0s - loss: 7.1692 - val_loss: 9.8869\n",
+ "Epoch 653/1000\n",
+ "0s - loss: 7.1786 - val_loss: 10.1116\n",
+ "Epoch 654/1000\n",
+ "0s - loss: 7.1759 - val_loss: 9.9204\n",
+ "Epoch 655/1000\n",
+ "0s - loss: 7.1684 - val_loss: 9.9511\n",
+ "Epoch 656/1000\n",
+ "0s - loss: 7.1634 - val_loss: 10.0238\n",
+ "Epoch 657/1000\n",
+ "0s - loss: 7.1625 - val_loss: 9.9729\n",
+ "Epoch 658/1000\n",
+ "0s - loss: 7.1592 - val_loss: 9.9302\n",
+ "Epoch 659/1000\n",
+ "0s - loss: 7.1531 - val_loss: 9.8003\n",
+ "Epoch 660/1000\n",
+ "0s - loss: 7.1689 - val_loss: 9.6241\n",
+ "Epoch 661/1000\n",
+ "0s - loss: 7.1926 - val_loss: 9.5935\n",
+ "Epoch 662/1000\n",
+ "0s - loss: 7.1995 - val_loss: 9.8668\n",
+ "Epoch 663/1000\n",
+ "0s - loss: 7.1207 - val_loss: 9.9770\n",
+ "Epoch 664/1000\n",
+ "0s - loss: 7.1614 - val_loss: 10.0944\n",
+ "Epoch 665/1000\n",
+ "0s - loss: 7.1376 - val_loss: 9.9018\n",
+ "Epoch 666/1000\n",
+ "0s - loss: 7.1264 - val_loss: 9.8169\n",
+ "Epoch 667/1000\n",
+ "0s - loss: 7.1322 - val_loss: 9.5914\n",
+ "Epoch 668/1000\n",
+ "0s - loss: 7.1756 - val_loss: 9.4677\n",
+ "Epoch 669/1000\n",
+ "0s - loss: 7.2228 - val_loss: 9.5309\n",
+ "Epoch 670/1000\n",
+ "0s - loss: 7.1221 - val_loss: 9.8272\n",
+ "Epoch 671/1000\n",
+ "0s - loss: 7.1694 - val_loss: 10.2424\n",
+ "Epoch 672/1000\n",
+ "0s - loss: 7.1469 - val_loss: 10.2019\n",
+ "Epoch 673/1000\n",
+ "0s - loss: 7.1341 - val_loss: 10.1415\n",
+ "Epoch 674/1000\n",
+ "0s - loss: 7.1294 - val_loss: 9.8550\n",
+ "Epoch 675/1000\n",
+ "0s - loss: 7.1236 - val_loss: 9.8194\n",
+ "Epoch 676/1000\n",
+ "0s - loss: 7.1136 - val_loss: 9.7638\n",
+ "Epoch 677/1000\n",
+ "0s - loss: 7.1250 - val_loss: 9.8803\n",
+ "Epoch 678/1000\n",
+ "0s - loss: 7.1117 - val_loss: 9.9392\n",
+ "Epoch 679/1000\n",
+ "0s - loss: 7.1156 - val_loss: 9.9271\n",
+ "Epoch 680/1000\n",
+ "0s - loss: 7.1538 - val_loss: 9.6088\n",
+ "Epoch 681/1000\n",
+ "0s - loss: 7.1304 - val_loss: 9.6990\n",
+ "Epoch 682/1000\n",
+ "0s - loss: 7.1071 - val_loss: 9.9984\n",
+ "Epoch 683/1000\n",
+ "0s - loss: 7.1481 - val_loss: 10.3292\n",
+ "Epoch 684/1000\n",
+ "0s - loss: 7.1585 - val_loss: 10.2097\n",
+ "Epoch 685/1000\n",
+ "0s - loss: 7.1660 - val_loss: 9.6619\n",
+ "Epoch 686/1000\n",
+ "0s - loss: 7.1311 - val_loss: 9.6285\n",
+ "Epoch 687/1000\n",
+ "0s - loss: 7.1451 - val_loss: 9.8866\n",
+ "Epoch 688/1000\n",
+ "0s - loss: 7.1050 - val_loss: 9.8680\n",
+ "Epoch 689/1000\n",
+ "0s - loss: 7.1146 - val_loss: 9.6843\n",
+ "Epoch 690/1000\n",
+ "0s - loss: 7.1193 - val_loss: 9.9650\n",
+ "Epoch 691/1000\n",
+ "0s - loss: 7.1163 - val_loss: 10.1442\n",
+ "Epoch 692/1000\n",
+ "0s - loss: 7.1099 - val_loss: 10.1598\n",
+ "Epoch 693/1000\n",
+ "0s - loss: 7.1075 - val_loss: 10.1014\n",
+ "Epoch 694/1000\n",
+ "0s - loss: 7.0965 - val_loss: 10.0233\n",
+ "Epoch 695/1000\n",
+ "0s - loss: 7.1083 - val_loss: 10.1571\n",
+ "Epoch 696/1000\n",
+ "0s - loss: 7.0982 - val_loss: 10.0523\n",
+ "Epoch 697/1000\n",
+ "0s - loss: 7.1096 - val_loss: 9.8419\n",
+ "Epoch 698/1000\n",
+ "0s - loss: 7.0903 - val_loss: 9.9749\n",
+ "Epoch 699/1000\n",
+ "0s - loss: 7.0576 - val_loss: 9.6691\n",
+ "Epoch 700/1000\n",
+ "0s - loss: 7.0936 - val_loss: 9.6816\n",
+ "Epoch 701/1000\n",
+ "0s - loss: 7.1219 - val_loss: 9.5613\n",
+ "Epoch 702/1000\n",
+ "0s - loss: 7.0909 - val_loss: 9.7389\n",
+ "Epoch 703/1000\n",
+ "0s - loss: 7.0848 - val_loss: 9.9158\n",
+ "Epoch 704/1000\n",
+ "0s - loss: 7.0797 - val_loss: 9.9673\n",
+ "Epoch 705/1000\n",
+ "0s - loss: 7.0823 - val_loss: 9.9838\n",
+ "Epoch 706/1000\n",
+ "0s - loss: 7.0787 - val_loss: 9.9805\n",
+ "Epoch 707/1000\n",
+ "0s - loss: 7.0865 - val_loss: 9.8578\n",
+ "Epoch 708/1000\n",
+ "0s - loss: 7.0739 - val_loss: 9.7663\n",
+ "Epoch 709/1000\n",
+ "0s - loss: 7.1175 - val_loss: 9.8952\n",
+ "Epoch 710/1000\n",
+ "0s - loss: 7.0679 - val_loss: 9.8783\n",
+ "Epoch 711/1000\n",
+ "0s - loss: 7.0992 - val_loss: 9.7650\n",
+ "Epoch 712/1000\n",
+ "0s - loss: 7.1081 - val_loss: 9.9847\n",
+ "Epoch 713/1000\n",
+ "0s - loss: 7.0943 - val_loss: 10.2337\n",
+ "Epoch 714/1000\n",
+ "0s - loss: 7.1155 - val_loss: 10.2349\n",
+ "Epoch 715/1000\n",
+ "0s - loss: 7.0932 - val_loss: 10.0091\n",
+ "Epoch 716/1000\n",
+ "0s - loss: 7.0945 - val_loss: 9.7639\n",
+ "Epoch 717/1000\n",
+ "0s - loss: 7.0735 - val_loss: 9.9598\n",
+ "Epoch 718/1000\n",
+ "0s - loss: 7.0708 - val_loss: 9.8427\n",
+ "Epoch 719/1000\n",
+ "0s - loss: 7.0589 - val_loss: 9.8517\n",
+ "Epoch 720/1000\n",
+ "0s - loss: 7.0701 - val_loss: 9.8597\n",
+ "Epoch 721/1000\n",
+ "0s - loss: 7.0970 - val_loss: 9.6938\n",
+ "Epoch 722/1000\n",
+ "0s - loss: 7.0486 - val_loss: 9.7629\n",
+ "Epoch 723/1000\n",
+ "0s - loss: 7.0477 - val_loss: 9.8391\n",
+ "Epoch 724/1000\n",
+ "0s - loss: 7.0466 - val_loss: 9.8425\n",
+ "Epoch 725/1000\n",
+ "0s - loss: 7.0478 - val_loss: 9.8494\n",
+ "Epoch 726/1000\n",
+ "0s - loss: 7.0448 - val_loss: 9.8165\n",
+ "Epoch 727/1000\n",
+ "0s - loss: 7.0464 - val_loss: 9.5782\n",
+ "Epoch 728/1000\n",
+ "0s - loss: 7.0715 - val_loss: 9.5460\n",
+ "Epoch 729/1000\n",
+ "0s - loss: 7.0541 - val_loss: 9.8024\n",
+ "Epoch 730/1000\n",
+ "0s - loss: 7.0461 - val_loss: 9.7884\n",
+ "Epoch 731/1000\n",
+ "0s - loss: 7.0452 - val_loss: 9.7993\n",
+ "Epoch 732/1000\n",
+ "0s - loss: 7.0534 - val_loss: 9.8886\n",
+ "Epoch 733/1000\n",
+ "0s - loss: 7.0401 - val_loss: 9.8490\n",
+ "Epoch 734/1000\n",
+ "0s - loss: 7.0531 - val_loss: 9.8721\n",
+ "Epoch 735/1000\n",
+ "0s - loss: 7.1051 - val_loss: 9.5883\n",
+ "Epoch 736/1000\n",
+ "0s - loss: 7.0406 - val_loss: 9.6821\n",
+ "Epoch 737/1000\n",
+ "0s - loss: 7.0693 - val_loss: 9.8972\n",
+ "Epoch 738/1000\n",
+ "0s - loss: 7.0721 - val_loss: 9.6410\n",
+ "Epoch 739/1000\n",
+ "0s - loss: 7.0562 - val_loss: 9.8917\n",
+ "Epoch 740/1000\n",
+ "0s - loss: 7.0402 - val_loss: 9.9640\n",
+ "Epoch 741/1000\n",
+ "0s - loss: 7.0679 - val_loss: 9.6972\n",
+ "Epoch 742/1000\n",
+ "0s - loss: 7.0200 - val_loss: 9.4677\n",
+ "Epoch 743/1000\n",
+ "0s - loss: 7.1283 - val_loss: 9.3255\n",
+ "Epoch 744/1000\n",
+ "0s - loss: 7.0938 - val_loss: 9.5011\n",
+ "Epoch 745/1000\n",
+ "0s - loss: 7.0472 - val_loss: 9.7615\n",
+ "Epoch 746/1000\n",
+ "0s - loss: 7.0363 - val_loss: 9.9264\n",
+ "Epoch 747/1000\n",
+ "0s - loss: 7.0348 - val_loss: 10.0550\n",
+ "Epoch 748/1000\n",
+ "0s - loss: 7.0511 - val_loss: 10.0585\n",
+ "Epoch 749/1000\n",
+ "0s - loss: 7.0511 - val_loss: 9.8607\n",
+ "Epoch 750/1000\n",
+ "0s - loss: 7.0340 - val_loss: 9.6419\n",
+ "Epoch 751/1000\n",
+ "0s - loss: 7.0152 - val_loss: 9.7737\n",
+ "Epoch 752/1000\n",
+ "0s - loss: 7.0244 - val_loss: 9.9911\n",
+ "Epoch 753/1000\n",
+ "0s - loss: 7.0350 - val_loss: 9.9427\n",
+ "Epoch 754/1000\n",
+ "0s - loss: 7.0655 - val_loss: 9.7622\n",
+ "Epoch 755/1000\n",
+ "0s - loss: 7.0352 - val_loss: 10.0412\n",
+ "Epoch 756/1000\n",
+ "0s - loss: 7.0536 - val_loss: 10.2095\n",
+ "Epoch 757/1000\n",
+ "0s - loss: 7.0722 - val_loss: 10.0711\n",
+ "Epoch 758/1000\n",
+ "0s - loss: 7.0229 - val_loss: 10.0303\n",
+ "Epoch 759/1000\n",
+ "0s - loss: 7.0469 - val_loss: 9.7785\n",
+ "Epoch 760/1000\n",
+ "0s - loss: 7.0256 - val_loss: 9.7082\n",
+ "Epoch 761/1000\n",
+ "0s - loss: 7.0278 - val_loss: 9.8486\n",
+ "Epoch 762/1000\n",
+ "0s - loss: 7.0223 - val_loss: 9.7083\n",
+ "Epoch 763/1000\n",
+ "0s - loss: 7.0196 - val_loss: 9.7462\n",
+ "Epoch 764/1000\n",
+ "0s - loss: 6.9926 - val_loss: 9.8832\n",
+ "Epoch 765/1000\n",
+ "0s - loss: 7.0053 - val_loss: 10.0246\n",
+ "Epoch 766/1000\n",
+ "0s - loss: 7.0409 - val_loss: 10.1664\n",
+ "Epoch 767/1000\n",
+ "0s - loss: 7.0375 - val_loss: 10.1345\n",
+ "Epoch 768/1000\n",
+ "0s - loss: 7.0331 - val_loss: 10.0566\n",
+ "Epoch 769/1000\n",
+ "0s - loss: 7.0115 - val_loss: 10.0263\n",
+ "Epoch 770/1000\n",
+ "0s - loss: 7.0115 - val_loss: 9.9855\n",
+ "Epoch 771/1000\n",
+ "0s - loss: 6.9938 - val_loss: 9.8025\n",
+ "Epoch 772/1000\n",
+ "0s - loss: 6.9954 - val_loss: 9.7895\n",
+ "Epoch 773/1000\n",
+ "0s - loss: 6.9769 - val_loss: 9.7211\n",
+ "Epoch 774/1000\n",
+ "0s - loss: 6.9778 - val_loss: 9.6848\n",
+ "Epoch 775/1000\n",
+ "0s - loss: 6.9773 - val_loss: 9.7152\n",
+ "Epoch 776/1000\n",
+ "0s - loss: 6.9643 - val_loss: 9.7161\n",
+ "Epoch 777/1000\n",
+ "0s - loss: 6.9572 - val_loss: 9.8465\n",
+ "Epoch 778/1000\n",
+ "0s - loss: 6.9902 - val_loss: 10.0346\n",
+ "Epoch 779/1000\n",
+ "0s - loss: 6.9692 - val_loss: 9.8583\n",
+ "Epoch 780/1000\n",
+ "0s - loss: 6.9531 - val_loss: 9.8203\n",
+ "Epoch 781/1000\n",
+ "0s - loss: 6.9532 - val_loss: 9.8267\n",
+ "Epoch 782/1000\n",
+ "0s - loss: 6.9598 - val_loss: 9.7866\n",
+ "Epoch 783/1000\n",
+ "0s - loss: 6.9967 - val_loss: 10.1662\n",
+ "Epoch 784/1000\n",
+ "0s - loss: 6.9763 - val_loss: 10.1285\n",
+ "Epoch 785/1000\n",
+ "0s - loss: 6.9640 - val_loss: 9.9857\n",
+ "Epoch 786/1000\n",
+ "0s - loss: 6.9655 - val_loss: 9.8088\n",
+ "Epoch 787/1000\n",
+ "0s - loss: 6.9495 - val_loss: 9.7072\n",
+ "Epoch 788/1000\n",
+ "0s - loss: 6.9717 - val_loss: 9.4951\n",
+ "Epoch 789/1000\n",
+ "0s - loss: 6.9674 - val_loss: 9.5096\n",
+ "Epoch 790/1000\n",
+ "0s - loss: 6.9520 - val_loss: 9.5795\n",
+ "Epoch 791/1000\n",
+ "0s - loss: 6.9528 - val_loss: 9.7407\n",
+ "Epoch 792/1000\n",
+ "0s - loss: 6.9519 - val_loss: 9.8536\n",
+ "Epoch 793/1000\n",
+ "0s - loss: 6.9443 - val_loss: 9.8120\n",
+ "Epoch 794/1000\n",
+ "0s - loss: 6.9392 - val_loss: 9.8287\n",
+ "Epoch 795/1000\n",
+ "0s - loss: 6.9331 - val_loss: 9.8130\n",
+ "Epoch 796/1000\n",
+ "0s - loss: 6.9544 - val_loss: 9.9168\n",
+ "Epoch 797/1000\n",
+ "0s - loss: 6.9442 - val_loss: 9.6829\n",
+ "Epoch 798/1000\n",
+ "0s - loss: 6.9732 - val_loss: 9.4329\n",
+ "Epoch 799/1000\n",
+ "0s - loss: 6.9475 - val_loss: 9.5505\n",
+ "Epoch 800/1000\n",
+ "0s - loss: 6.9252 - val_loss: 9.6780\n",
+ "Epoch 801/1000\n",
+ "0s - loss: 6.9545 - val_loss: 9.8744\n",
+ "Epoch 802/1000\n",
+ "0s - loss: 6.9281 - val_loss: 9.5915\n",
+ "Epoch 803/1000\n",
+ "0s - loss: 7.0609 - val_loss: 9.2334\n",
+ "Epoch 804/1000\n",
+ "0s - loss: 7.0305 - val_loss: 9.3938\n",
+ "Epoch 805/1000\n",
+ "0s - loss: 6.9366 - val_loss: 9.7316\n",
+ "Epoch 806/1000\n",
+ "0s - loss: 6.9368 - val_loss: 10.0449\n",
+ "Epoch 807/1000\n",
+ "0s - loss: 6.9448 - val_loss: 10.0714\n",
+ "Epoch 808/1000\n",
+ "0s - loss: 6.9565 - val_loss: 9.9018\n",
+ "Epoch 809/1000\n",
+ "0s - loss: 6.9824 - val_loss: 9.4973\n",
+ "Epoch 810/1000\n",
+ "0s - loss: 6.9154 - val_loss: 9.6698\n",
+ "Epoch 811/1000\n",
+ "0s - loss: 6.9244 - val_loss: 9.9868\n",
+ "Epoch 812/1000\n",
+ "0s - loss: 6.9601 - val_loss: 10.2352\n",
+ "Epoch 813/1000\n",
+ "0s - loss: 6.9547 - val_loss: 9.9705\n",
+ "Epoch 814/1000\n",
+ "0s - loss: 6.9387 - val_loss: 9.6667\n",
+ "Epoch 815/1000\n",
+ "0s - loss: 6.9093 - val_loss: 9.8577\n",
+ "Epoch 816/1000\n",
+ "0s - loss: 6.9209 - val_loss: 9.6485\n",
+ "Epoch 817/1000\n",
+ "0s - loss: 6.9048 - val_loss: 9.6576\n",
+ "Epoch 818/1000\n",
+ "0s - loss: 6.9051 - val_loss: 9.6882\n",
+ "Epoch 819/1000\n",
+ "0s - loss: 6.9024 - val_loss: 9.6886\n",
+ "Epoch 820/1000\n",
+ "0s - loss: 6.9172 - val_loss: 9.6644\n",
+ "Epoch 821/1000\n",
+ "0s - loss: 6.9513 - val_loss: 10.0582\n",
+ "Epoch 822/1000\n",
+ "0s - loss: 6.9429 - val_loss: 9.9693\n",
+ "Epoch 823/1000\n",
+ "0s - loss: 6.8838 - val_loss: 9.5948\n",
+ "Epoch 824/1000\n",
+ "0s - loss: 6.9588 - val_loss: 9.3062\n",
+ "Epoch 825/1000\n",
+ "0s - loss: 6.9751 - val_loss: 9.4388\n",
+ "Epoch 826/1000\n",
+ "0s - loss: 6.9372 - val_loss: 9.5712\n",
+ "Epoch 827/1000\n",
+ "0s - loss: 6.9173 - val_loss: 9.7322\n",
+ "Epoch 828/1000\n",
+ "0s - loss: 6.9278 - val_loss: 9.7984\n",
+ "Epoch 829/1000\n",
+ "0s - loss: 6.9414 - val_loss: 9.9805\n",
+ "Epoch 830/1000\n",
+ "0s - loss: 6.9224 - val_loss: 9.8754\n",
+ "Epoch 831/1000\n",
+ "0s - loss: 6.9103 - val_loss: 9.9649\n",
+ "Epoch 832/1000\n",
+ "0s - loss: 6.9216 - val_loss: 10.0827\n",
+ "Epoch 833/1000\n",
+ "0s - loss: 6.9478 - val_loss: 10.0666\n",
+ "Epoch 834/1000\n",
+ "0s - loss: 6.8892 - val_loss: 9.5971\n",
+ "Epoch 835/1000\n",
+ "0s - loss: 7.0854 - val_loss: 9.1630\n",
+ "Epoch 836/1000\n",
+ "0s - loss: 7.1181 - val_loss: 9.2584\n",
+ "Epoch 837/1000\n",
+ "0s - loss: 7.0183 - val_loss: 9.5478\n",
+ "Epoch 838/1000\n",
+ "0s - loss: 6.9077 - val_loss: 9.8269\n",
+ "Epoch 839/1000\n",
+ "0s - loss: 6.8964 - val_loss: 9.8702\n",
+ "Epoch 840/1000\n",
+ "0s - loss: 6.8957 - val_loss: 9.8755\n",
+ "Epoch 841/1000\n",
+ "0s - loss: 6.9010 - val_loss: 9.9531\n",
+ "Epoch 842/1000\n",
+ "0s - loss: 6.9054 - val_loss: 9.8704\n",
+ "Epoch 843/1000\n",
+ "0s - loss: 6.8965 - val_loss: 10.0415\n",
+ "Epoch 844/1000\n",
+ "0s - loss: 6.9253 - val_loss: 10.1834\n",
+ "Epoch 845/1000\n",
+ "0s - loss: 6.9789 - val_loss: 10.3540\n",
+ "Epoch 846/1000\n",
+ "0s - loss: 6.9703 - val_loss: 9.9366\n",
+ "Epoch 847/1000\n",
+ "0s - loss: 6.8811 - val_loss: 9.7014\n",
+ "Epoch 848/1000\n",
+ "0s - loss: 6.8948 - val_loss: 9.7033\n",
+ "Epoch 849/1000\n",
+ "0s - loss: 6.8904 - val_loss: 9.7017\n",
+ "Epoch 850/1000\n",
+ "0s - loss: 6.8816 - val_loss: 9.6060\n",
+ "Epoch 851/1000\n",
+ "0s - loss: 6.9092 - val_loss: 9.4696\n",
+ "Epoch 852/1000\n",
+ "0s - loss: 6.9205 - val_loss: 9.4838\n",
+ "Epoch 853/1000\n",
+ "0s - loss: 6.8918 - val_loss: 9.6495\n",
+ "Epoch 854/1000\n",
+ "0s - loss: 6.8794 - val_loss: 9.7015\n",
+ "Epoch 855/1000\n",
+ "0s - loss: 6.9231 - val_loss: 9.5852\n",
+ "Epoch 856/1000\n",
+ "0s - loss: 6.8931 - val_loss: 9.7922\n",
+ "Epoch 857/1000\n",
+ "0s - loss: 6.8721 - val_loss: 9.8046\n",
+ "Epoch 858/1000\n",
+ "0s - loss: 6.8756 - val_loss: 9.8446\n",
+ "Epoch 859/1000\n",
+ "0s - loss: 6.8766 - val_loss: 9.7762\n",
+ "Epoch 860/1000\n",
+ "0s - loss: 6.8718 - val_loss: 9.6024\n",
+ "Epoch 861/1000\n",
+ "0s - loss: 6.9005 - val_loss: 9.4156\n",
+ "Epoch 862/1000\n",
+ "0s - loss: 6.9300 - val_loss: 9.4512\n",
+ "Epoch 863/1000\n",
+ "0s - loss: 6.9184 - val_loss: 9.5185\n",
+ "Epoch 864/1000\n",
+ "0s - loss: 6.9044 - val_loss: 9.7656\n",
+ "Epoch 865/1000\n",
+ "0s - loss: 6.8805 - val_loss: 9.8036\n",
+ "Epoch 866/1000\n",
+ "0s - loss: 6.8813 - val_loss: 9.8981\n",
+ "Epoch 867/1000\n",
+ "0s - loss: 6.9482 - val_loss: 10.1740\n",
+ "Epoch 868/1000\n",
+ "0s - loss: 6.8805 - val_loss: 9.6598\n",
+ "Epoch 869/1000\n",
+ "0s - loss: 6.8879 - val_loss: 9.4575\n",
+ "Epoch 870/1000\n",
+ "0s - loss: 6.9365 - val_loss: 9.4378\n",
+ "Epoch 871/1000\n",
+ "0s - loss: 6.9177 - val_loss: 9.5994\n",
+ "Epoch 872/1000\n",
+ "0s - loss: 6.8441 - val_loss: 10.2577\n",
+ "Epoch 873/1000\n",
+ "0s - loss: 6.9625 - val_loss: 10.3729\n",
+ "Epoch 874/1000\n",
+ "0s - loss: 6.9348 - val_loss: 9.9135\n",
+ "Epoch 875/1000\n",
+ "0s - loss: 6.8642 - val_loss: 9.6878\n",
+ "Epoch 876/1000\n",
+ "0s - loss: 6.8998 - val_loss: 9.5717\n",
+ "Epoch 877/1000\n",
+ "0s - loss: 6.8568 - val_loss: 10.0258\n",
+ "Epoch 878/1000\n",
+ "0s - loss: 6.9055 - val_loss: 10.1910\n",
+ "Epoch 879/1000\n",
+ "0s - loss: 6.9205 - val_loss: 9.8503\n",
+ "Epoch 880/1000\n",
+ "0s - loss: 6.8690 - val_loss: 9.6842\n",
+ "Epoch 881/1000\n",
+ "0s - loss: 6.8710 - val_loss: 9.7441\n",
+ "Epoch 882/1000\n",
+ "0s - loss: 6.8867 - val_loss: 9.6026\n",
+ "Epoch 883/1000\n",
+ "0s - loss: 6.8913 - val_loss: 9.5797\n",
+ "Epoch 884/1000\n",
+ "0s - loss: 6.8788 - val_loss: 9.9564\n",
+ "Epoch 885/1000\n",
+ "0s - loss: 6.9464 - val_loss: 10.2895\n",
+ "Epoch 886/1000\n",
+ "0s - loss: 6.9576 - val_loss: 10.1720\n",
+ "Epoch 887/1000\n",
+ "0s - loss: 6.9065 - val_loss: 9.9135\n",
+ "Epoch 888/1000\n",
+ "0s - loss: 6.8565 - val_loss: 9.5755\n",
+ "Epoch 889/1000\n",
+ "0s - loss: 6.8879 - val_loss: 9.4012\n",
+ "Epoch 890/1000\n",
+ "0s - loss: 6.8799 - val_loss: 9.4772\n",
+ "Epoch 891/1000\n",
+ "0s - loss: 6.8638 - val_loss: 9.5824\n",
+ "Epoch 892/1000\n",
+ "0s - loss: 6.8623 - val_loss: 9.6295\n",
+ "Epoch 893/1000\n",
+ "0s - loss: 6.8857 - val_loss: 9.7806\n",
+ "Epoch 894/1000\n",
+ "0s - loss: 6.8549 - val_loss: 9.4955\n",
+ "Epoch 895/1000\n",
+ "0s - loss: 6.8735 - val_loss: 9.5176\n",
+ "Epoch 896/1000\n",
+ "0s - loss: 6.9015 - val_loss: 9.8826\n",
+ "Epoch 897/1000\n",
+ "0s - loss: 6.9207 - val_loss: 10.1765\n",
+ "Epoch 898/1000\n",
+ "0s - loss: 6.9177 - val_loss: 10.1402\n",
+ "Epoch 899/1000\n",
+ "0s - loss: 6.9144 - val_loss: 9.8987\n",
+ "Epoch 900/1000\n",
+ "0s - loss: 6.8668 - val_loss: 9.8062\n",
+ "Epoch 901/1000\n",
+ "0s - loss: 6.8656 - val_loss: 9.5354\n",
+ "Epoch 902/1000\n",
+ "0s - loss: 6.8748 - val_loss: 9.5189\n",
+ "Epoch 903/1000\n",
+ "0s - loss: 6.8680 - val_loss: 9.5157\n",
+ "Epoch 904/1000\n",
+ "0s - loss: 6.8913 - val_loss: 9.8827\n",
+ "Epoch 905/1000\n",
+ "0s - loss: 6.8744 - val_loss: 9.8446\n",
+ "Epoch 906/1000\n",
+ "0s - loss: 6.8681 - val_loss: 9.8132\n",
+ "Epoch 907/1000\n",
+ "0s - loss: 6.8641 - val_loss: 9.6817\n",
+ "Epoch 908/1000\n",
+ "0s - loss: 6.8816 - val_loss: 9.7919\n",
+ "Epoch 909/1000\n",
+ "0s - loss: 6.8545 - val_loss: 9.6069\n",
+ "Epoch 910/1000\n",
+ "0s - loss: 6.8496 - val_loss: 9.5663\n",
+ "Epoch 911/1000\n",
+ "0s - loss: 6.9753 - val_loss: 9.2802\n",
+ "Epoch 912/1000\n",
+ "0s - loss: 6.9398 - val_loss: 9.4622\n",
+ "Epoch 913/1000\n",
+ "0s - loss: 6.8845 - val_loss: 9.5526\n",
+ "Epoch 914/1000\n",
+ "0s - loss: 6.8906 - val_loss: 9.7940\n",
+ "Epoch 915/1000\n",
+ "0s - loss: 6.8764 - val_loss: 9.6684\n",
+ "Epoch 916/1000\n",
+ "0s - loss: 6.8513 - val_loss: 9.7581\n",
+ "Epoch 917/1000\n",
+ "0s - loss: 6.9080 - val_loss: 10.1138\n",
+ "Epoch 918/1000\n",
+ "0s - loss: 6.9075 - val_loss: 9.8867\n",
+ "Epoch 919/1000\n",
+ "0s - loss: 6.8566 - val_loss: 9.7181\n",
+ "Epoch 920/1000\n",
+ "0s - loss: 6.8579 - val_loss: 9.6089\n",
+ "Epoch 921/1000\n",
+ "0s - loss: 6.8458 - val_loss: 9.7560\n",
+ "Epoch 922/1000\n",
+ "0s - loss: 6.8494 - val_loss: 9.7760\n",
+ "Epoch 923/1000\n",
+ "0s - loss: 6.8708 - val_loss: 9.8110\n",
+ "Epoch 924/1000\n",
+ "0s - loss: 6.8485 - val_loss: 9.5868\n",
+ "Epoch 925/1000\n",
+ "0s - loss: 6.8759 - val_loss: 9.4506\n",
+ "Epoch 926/1000\n",
+ "0s - loss: 6.8607 - val_loss: 9.5612\n",
+ "Epoch 927/1000\n",
+ "0s - loss: 6.8592 - val_loss: 9.8613\n",
+ "Epoch 928/1000\n",
+ "0s - loss: 6.8764 - val_loss: 9.9875\n",
+ "Epoch 929/1000\n",
+ "0s - loss: 6.8927 - val_loss: 9.9327\n",
+ "Epoch 930/1000\n",
+ "0s - loss: 6.8696 - val_loss: 9.5220\n",
+ "Epoch 931/1000\n",
+ "0s - loss: 6.8535 - val_loss: 9.4219\n",
+ "Epoch 932/1000\n",
+ "0s - loss: 6.8617 - val_loss: 9.4907\n",
+ "Epoch 933/1000\n",
+ "0s - loss: 6.8641 - val_loss: 9.4480\n",
+ "Epoch 934/1000\n",
+ "0s - loss: 6.8683 - val_loss: 9.4132\n",
+ "Epoch 935/1000\n",
+ "0s - loss: 6.8730 - val_loss: 9.4276\n",
+ "Epoch 936/1000\n",
+ "0s - loss: 6.8620 - val_loss: 9.5121\n",
+ "Epoch 937/1000\n",
+ "0s - loss: 6.8471 - val_loss: 9.6378\n",
+ "Epoch 938/1000\n",
+ "0s - loss: 6.8805 - val_loss: 9.5233\n",
+ "Epoch 939/1000\n",
+ "0s - loss: 6.8603 - val_loss: 9.6506\n",
+ "Epoch 940/1000\n",
+ "0s - loss: 6.8558 - val_loss: 9.7673\n",
+ "Epoch 941/1000\n",
+ "0s - loss: 6.8468 - val_loss: 9.6891\n",
+ "Epoch 942/1000\n",
+ "0s - loss: 6.8399 - val_loss: 9.7591\n",
+ "Epoch 943/1000\n",
+ "0s - loss: 6.9042 - val_loss: 9.5560\n",
+ "Epoch 944/1000\n",
+ "0s - loss: 6.8441 - val_loss: 10.0014\n",
+ "Epoch 945/1000\n",
+ "0s - loss: 6.8663 - val_loss: 10.0668\n",
+ "Epoch 946/1000\n",
+ "0s - loss: 6.8788 - val_loss: 9.8825\n",
+ "Epoch 947/1000\n",
+ "0s - loss: 6.8282 - val_loss: 9.6162\n",
+ "Epoch 948/1000\n",
+ "0s - loss: 6.8535 - val_loss: 9.5697\n",
+ "Epoch 949/1000\n",
+ "0s - loss: 6.8519 - val_loss: 9.8834\n",
+ "Epoch 950/1000\n",
+ "0s - loss: 6.8618 - val_loss: 10.0135\n",
+ "Epoch 951/1000\n",
+ "0s - loss: 6.8740 - val_loss: 10.1331\n",
+ "Epoch 952/1000\n",
+ "0s - loss: 6.8871 - val_loss: 10.0984\n",
+ "Epoch 953/1000\n",
+ "0s - loss: 6.8641 - val_loss: 9.8316\n",
+ "Epoch 954/1000\n",
+ "0s - loss: 6.8550 - val_loss: 9.8152\n",
+ "Epoch 955/1000\n",
+ "0s - loss: 6.9007 - val_loss: 9.5681\n",
+ "Epoch 956/1000\n",
+ "0s - loss: 6.8400 - val_loss: 9.7079\n",
+ "Epoch 957/1000\n",
+ "0s - loss: 6.8434 - val_loss: 9.6331\n",
+ "Epoch 958/1000\n",
+ "0s - loss: 6.8449 - val_loss: 9.5090\n",
+ "Epoch 959/1000\n",
+ "0s - loss: 6.8638 - val_loss: 9.2868\n",
+ "Epoch 960/1000\n",
+ "0s - loss: 6.9117 - val_loss: 9.4991\n",
+ "Epoch 961/1000\n",
+ "0s - loss: 6.8508 - val_loss: 9.5273\n",
+ "Epoch 962/1000\n",
+ "0s - loss: 6.8727 - val_loss: 9.3635\n",
+ "Epoch 963/1000\n",
+ "0s - loss: 6.8744 - val_loss: 9.4758\n",
+ "Epoch 964/1000\n",
+ "0s - loss: 6.8471 - val_loss: 9.5344\n",
+ "Epoch 965/1000\n",
+ "0s - loss: 6.8581 - val_loss: 9.4198\n",
+ "Epoch 966/1000\n",
+ "0s - loss: 6.8460 - val_loss: 9.3622\n",
+ "Epoch 967/1000\n",
+ "0s - loss: 6.8642 - val_loss: 9.6134\n",
+ "Epoch 968/1000\n",
+ "0s - loss: 6.8351 - val_loss: 9.6498\n",
+ "Epoch 969/1000\n",
+ "0s - loss: 6.8439 - val_loss: 9.7189\n",
+ "Epoch 970/1000\n",
+ "0s - loss: 6.8292 - val_loss: 9.6398\n",
+ "Epoch 971/1000\n",
+ "0s - loss: 6.8407 - val_loss: 9.7949\n",
+ "Epoch 972/1000\n",
+ "0s - loss: 6.8648 - val_loss: 9.8820\n",
+ "Epoch 973/1000\n",
+ "0s - loss: 6.8471 - val_loss: 9.7040\n",
+ "Epoch 974/1000\n",
+ "0s - loss: 6.8289 - val_loss: 9.3830\n",
+ "Epoch 975/1000\n",
+ "0s - loss: 6.8533 - val_loss: 9.3661\n",
+ "Epoch 976/1000\n",
+ "0s - loss: 6.8200 - val_loss: 9.5863\n",
+ "Epoch 977/1000\n",
+ "0s - loss: 6.8610 - val_loss: 9.9952\n",
+ "Epoch 978/1000\n",
+ "0s - loss: 6.8941 - val_loss: 10.1854\n",
+ "Epoch 979/1000\n",
+ "0s - loss: 6.9012 - val_loss: 10.0414\n",
+ "Epoch 980/1000\n",
+ "0s - loss: 6.8357 - val_loss: 9.5292\n",
+ "Epoch 981/1000\n",
+ "0s - loss: 6.8663 - val_loss: 9.4074\n",
+ "Epoch 982/1000\n",
+ "0s - loss: 6.8386 - val_loss: 9.6499\n",
+ "Epoch 983/1000\n",
+ "0s - loss: 6.8190 - val_loss: 9.7233\n",
+ "Epoch 984/1000\n",
+ "0s - loss: 6.8458 - val_loss: 9.8233\n",
+ "Epoch 985/1000\n",
+ "0s - loss: 6.8554 - val_loss: 9.3096\n",
+ "Epoch 986/1000\n",
+ "0s - loss: 6.8756 - val_loss: 9.3255\n",
+ "Epoch 987/1000\n",
+ "0s - loss: 6.8636 - val_loss: 9.4233\n",
+ "Epoch 988/1000\n",
+ "0s - loss: 6.8575 - val_loss: 9.7051\n",
+ "Epoch 989/1000\n",
+ "0s - loss: 6.8163 - val_loss: 9.9605\n",
+ "Epoch 990/1000\n",
+ "0s - loss: 6.8583 - val_loss: 9.9562\n",
+ "Epoch 991/1000\n",
+ "0s - loss: 6.8623 - val_loss: 9.7375\n",
+ "Epoch 992/1000\n",
+ "0s - loss: 6.8466 - val_loss: 9.9335\n",
+ "Epoch 993/1000\n",
+ "0s - loss: 6.8488 - val_loss: 9.9882\n",
+ "Epoch 994/1000\n",
+ "0s - loss: 6.9191 - val_loss: 10.2011\n",
+ "Epoch 995/1000\n",
+ "0s - loss: 6.8788 - val_loss: 9.7320\n",
+ "Epoch 996/1000\n",
+ "0s - loss: 6.8663 - val_loss: 9.3897\n",
+ "Epoch 997/1000\n",
+ "0s - loss: 6.8409 - val_loss: 9.4664\n",
+ "Epoch 998/1000\n",
+ "0s - loss: 6.8450 - val_loss: 9.6415\n",
+ "Epoch 999/1000\n",
+ "0s - loss: 6.8483 - val_loss: 9.8600\n",
+ "Epoch 1000/1000\n",
+ "0s - loss: 6.8648 - val_loss: 9.8251\n"
+ ]
+ },
+ {
+ "data": {
+ "text/plain": [
+ ""
+ ]
+ },
+ "execution_count": 2,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "# Split the datasets\n",
+ "\n",
+ "X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y,\n",
+ " test_size=0.25)\n",
+ "\n",
+ "# Scale the data for convergency optimization\n",
+ "scaler = preprocessing.StandardScaler()\n",
+ "\n",
+ "# Set the transform parameters\n",
+ "X_train = scaler.fit_transform(X_train)\n",
+ "\n",
+ "# Build a 2 layer fully connected DNN with 10 and 5 units respectively\n",
+ "\n",
+ "model = Sequential()\n",
+ "model.add(Dense(10, input_dim=7, init='normal', activation='relu'))\n",
+ "model.add(Dense(5, init='normal', activation='relu'))\n",
+ "model.add(Dense(1, init='normal'))\n",
+ "\n",
+ "Compile the model, whith the mean squared error as a loss function\n",
+ "model.compile(loss='mean_squared_error', optimizer='adam')\n",
+ "\n",
+ "Fit the model, in 1000 epochs\n",
+ "model.fit(X_train, y_train, nb_epoch=1000, validation_split=0.33, shuffle=True,verbose=2 )\n",
+ "\n"
+ ]
+ }
+ ],
+ "metadata": {
+ "kernelspec": {
+ "display_name": "Python 2",
+ "language": "python",
+ "name": "python2"
+ },
+ "language_info": {
+ "codemirror_mode": {
+ "name": "ipython",
+ "version": 2
+ },
+ "file_extension": ".py",
+ "mimetype": "text/x-python",
+ "name": "python",
+ "nbconvert_exporter": "python",
+ "pygments_lexer": "ipython2",
+ "version": "2.7.11+"
+ }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 0
+}
diff --git a/Building-Machine-Learning-Projects-with-TensorFlow-master/5/CH5_linear_regression_nn.ipynb b/Building-Machine-Learning-Projects-with-TensorFlow-master/5/CH5_linear_regression_nn.ipynb
new file mode 100644
index 000000000..0d3abcebe
--- /dev/null
+++ b/Building-Machine-Learning-Projects-with-TensorFlow-master/5/CH5_linear_regression_nn.ipynb
@@ -0,0 +1,144 @@
+{
+ "cells": [
+ {
+ "cell_type": "code",
+ "execution_count": 70,
+ "metadata": {
+ "collapsed": false
+ },
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ ""
+ ]
+ },
+ "execution_count": 70,
+ "metadata": {},
+ "output_type": "execute_result"
+ },
+ {
+ "data": {
+ "image/png": "iVBORw0KGgoAAAANSUhEUgAAAMEAAACQCAYAAACxkA/OAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAHTNJREFUeJztnXuYVNWV6H+L7qrqgn6BtIg8ulFUUK8CMyYm5o6S6MTE\nRByN42uMD3xLMDd6g5oxYHwkaDQfeqM8QgSNPBIzGk3MJeamyQxR00RaMBdQvNqNIKHLRKII/aB7\n3T/WPlR124/qrqqu6q79+77z9alzdp29T/VZZ6+19tpri6ri8eQzQ7LdAI8n23gh8OQ9Xgg8eY8X\nAk/e44XAk/d4IfDkPV4I+oiI3CYii9NdNolrtYnIEUmWnSsiT6Sj3sFMYbYbkAuIyOXAN4Ajgb8D\nzwC3qerfu/qOqn432ev3pmwyl8tEeRF5DHhHVb/d+yYNbPK+JxCRm4HvAjcDpcDJQCXwgoh0+pIQ\nkYL+a+HHq89i3YMTVc3bDSgBPgTO63B8GNAAXO4+zwV+BjwB7AGudMeeSPjOV4E6IAb8O/A28NmE\n7z/h9iuBNle+3tVze8J1TgJeBN4HdgIPA4UJ59uAI7q4nypgLdabrXHffTzh/E+BXe7aa4HJ7vjV\nQDPQCHwA/MIdnwO86Y79GTgn2/+zTGz53hN8GogATyceVNWPgOeBMxIOnw38VFXLgRVBUQARORb4\nIXARMBooAw7vUFdHteQU4CjgdODbInKMO94KfB0YAXwK+CxwQ5L3swJYD4wE7gYu63D+eUzlOxTY\nENyHqi4BngTuU9VSVZ3hyr8JnKKqpcCdwE9EZFSSbRkw5LsQjATeU9W2Ts7tcucDXlLV5wBUtbFD\n2fOAZ1X1JVU9APSkVyswT1WbVXUTsBE40V17g6rWqLEdWAyc2tONiMg44B+Bb6tqi6r+F/Bcu0pV\nl6nqPlVtAb4DnCgiJV02UvXnqrrb7f8M2AZ8oqe2DDTyXQjeA0aKSGe/w2h3PuCdbq5zeOJ5Vd0P\n/LWHuncn7O8DigFE5CgReU5EdonIHuAe2gtjd21439UdUB/siMgQEfmeiLzprvs2JoxdXltEvioi\ntSLyvoi8DxyXZFsGFPkuBC8BTcC5iQdFpBj4AvDbhMPdeVl2AWMTvh8FDuljmx4FtgBHOtXrWyRn\nDO8Chru6A8Yn7F8CfBmzU8ox+0ESrt3u/kRkPNYL3aCqw1V1OPB/k2zLgCKvhUBVP8DUgodF5PMi\nUigiVcBqYDvwkyQv9RTwZRE5WURCwLweynf3IJUAH6jqPhGZBFyfTAOc6vQn4E4RCYnIZ7CHPqAY\nE/j3RWQY5hFLfPB3A4njD8MwI/w914tcARyfTFsGGnktBACqej9wO/B9zKvyEqZGnO5052SusRn4\nGiY872LelAbsoev0K918vgW4REQ+ABYBq3r4biIXYy7evwJ3AMsTzj2OCfZOzNPzYofvLgWOE5G/\nich/qOoW4EHgZeAvmCq0rpu6ByziXGGpX8j06j8BO1T17LRcdIDi3rR7gImqWt9TeU92SWdPcBOw\nOY3XG1CIyJdEJOoE4AFgkxeAgUFahEBExgJfBH6UjusNUGZgqtAOzBd/YXab40mWtKhDIvIzzJVX\nBtyc7+qQZ2CRcgCdiJwF7FbVV0XkNLrwfIiIn9HvySiq2if3bTrUoVOAs0XkLWAlMF1EHu+sYKZj\nQObOndsvsSb9Uc9gqaO/6kmFlIVAVW9X1fGqegSmB/9OVb+a6nU9nv4i78cJPJ60TqpR1d8Dv0/n\nNXvDaaedNmjqGSx19Gc9fSVtg2U9ViSi/VWXJ/8QETSLhrHHM6DxQuDJe/xEe0/aiMVi1NXVUVVV\nBUBtbS0AU6dOpaKiIost6x4vBJ60sHLlambOvIFwuIp9+7bR1tZGa6sChxMOx1i2bBEXXXRBtpvZ\nKd4w9qRMLBajsnIS+/dXYxPyjsI07bXu8wsUFd3I9u1vZKxH8IaxJ6vU1dURDlcBJwC12Dz+CdgE\nuUnAAzQ2NrNo0ZLsNbIbfE/gSZl4TzAHm7DWBBQAIaw3OAHYRDQ6nfr6rRnpDbLaE4hIRET+6CZk\nvyYic1O9pmfgcfXVl2KzSm/DHqtGbJr1Ca7ECRQWjqeuri4r7euWNAUvDXV/C7DpeJ/opIx6BhcN\nDQ161133aChUohBSqFIoU3hS4XiF4QobFVRho0Yi5drQ0JCRtrjnq0/Pb1q8Q6q6z+1GMI+T13sG\nOStXrubKK6+jsbEJCLvtXSzBxRnAddg8/U9hxvG7LFiwICddpemaWTZERGqxCdkvqOr6dFzXk5vE\nYjFmzryBxsYfYg/9SOy99xVMEB4GDmBz+euA7xAKFXLuuedkqcXdk66eoA2YKiKlwDMicqxaBoZ2\nzJs37+D+aaedlvOBVZ7Oqa2tZciQccBULN1RK5b7qwp7+O/DvEKBPXAxRUX3U1dXl7aeYO3ataxd\nuzYt10q7d0hE7gA+UtUHOxzXdNfl6X/iatB+YCiWJmkn5gmKYJ6h0Vj2mmoCz1AkcirvvDNIxwlE\nZKSIlLn9KKYQbk31up7cI64GPY098LdiKY7CwFVABfAQphXPAaZjKVZPZsGC+3LSHoD02ASjgWoR\neRX4I7BGVZ9Pw3U9OUQsFuP555+nsLAS0/UPwcYEAMZgOYj/BnwSWADcC5QSidSxcOECrr326iy0\nOjn8YJmnRxYtWsLs2bdQUDCS/fvfxTzhB7CHXzC74CVshPh6YAThcIw77pjDtdde3S89QCrqkBcC\nT7csWrSE6677Gqb/F2LqTxHmAn0EU4sOYCpRFfAWBQXNvPbaK0yePLnf2uljhzwZYcuWLcya9T+w\nN/8V2KI+UcwTdCFwGOYZOoCNEDdQWNjKE0881q8CkCo+lNrTKStXrubSS6+ktbUMe/MvwgLjhmG+\n/4+wVZ+ew1Z62khBwTw2bfrTgBIA8OpQ3pI4AaaiouLg5+LiYt555x1mzLiAxsZWoAXrCcZiybpD\n2EpSfwcCN2kVUEco1MLOnW9lxQuUijrke4I8JJgAIzKa1tZ3uOSSC1m58ina2oppaooRDo+mubkY\n0/fbMMO3zu23YlnnHwQWAi+4c1VEo59P64BYf+F7gjwjFosxduxRNDffDszHPNxvYcsifB+Lf9yO\nhUAIJgiKPfxh4MfAL7B1Sdowr1DmQ6V7wvcEnqSpra2luXkEJgDV2AjvFZhv/2jsIX8Ee+APuPOH\nYAvdDAPOcdt9FBaeQEHBqUQiE2hpqWfp0kcGXC8AXgjyjj179mBBbkFsTwxbc3Actpbf65gh3IQF\nxz2LLX6zF3MmbnLf20Uo1Mwrr7zI3r17D9oWA5K+xmAHG2Yx/Q5b1O01YHYX5dIRNu5JgRUrVmlR\nUblCWCHqYv0bFGa6z2crjFE4UmGymxuwUWGVwjCFiMJQhSM1HC7TFStWZfuWDkIK8wlStglE5DDg\nMLXU7MXAK8AMVd3aoZymWpen77SfAnkX5tYU7O1+CCIxAOJqdRSLDZqPjQu8yezZ13LKKZ+mvLw8\n59KoZNUmUNW/YBFTqOpeEdmCjaf7ILos0tEFumjREvbvL8Me6j9grs5/xAzfQ1HdgwlGKzY/YC+2\niP2hwDZuvfVmvvvde7JyLxmnr11IZxuBwxiKOzmXiV7Q0wkLFy7WSKRcS0qmajQ6QhcuXOzUoGEK\nJ7rpjmucarNRYbM7N1zhDqcaVbsyj2pRUeamRaYLsj29Eg4ugP0UcJOq7u2sjJ9Uk3ks1ucm4GWa\nmsx1eeONn6K1dRTm338DywBxL+Ye3YKFQZcDo7BlnccB/wKMJBJ5jx//eGFOqT6Qg5NqRKQQ+CXw\na1Vd0EUZTUddnq6JxWKMGzeRpqYJwKvu6BbgHzDNdzy2sNByzB4oxNSi24C5mFdoLeb9WUso9GU2\nbqwZEGEQuRBA92Ngc1cC4OkfLAlWJeby3AQsAU7CHHhnA/8P+BnwLUwgbsMe/Huxga9mbGL8ROCL\nPPzwgwNCAFKmr3pUsGGvllbs1VMLbADO7KRcptRBj6OhoUGj0REK852OH+j2JW4bpTDR2QBRhafd\n3/kKxW7/MC0oiOrChYuzfTu9gmy6SJPFq0P9Q/s5wMcAvwUqsbDn912pQP2xhLnx+cCWNzQcvp4d\nO97MOTugO3JBHfLkEK2tLdiS0tuwALeJ2IN+GxYVOg+bCfsUNno8DrMDKoCLCYcn5GamuAzhe4JB\nRCwWY8yYI2lpAYv7mYfNA27G3vzzMeEYArwJrMcC5WJY4Fz/ZIbIBL4nyANisRjr168nFot1eby6\nupqWllYsxHki8E3MJXo1MJdwuIRQqIFQKIYZzlXYbLFbgFMxIcjtzBAZoa/GRG83vGHcZ1asWKXR\n6AgtK5umRUXletdd92hDQ8PBWKBhw47RwsIgtudIFw80wg2EXdMu5qegoFhnzZqt0egILS2dqqFQ\nsYbDZVpcfLxGIqUDziAOwBvGg5f2C2AE2RwOoajoPZqbm2lri2Au0G3EY///CzOIb8fGA6Ikpkgv\nKjqNDRv+cDD6E2gXYjEQ8fMJBjHxlIejsWRWa4ETaGz8DOaRXouFad0MDMfGAE7GhKHNfW8kiSnS\nRcawd+9eTjrppIP1DNSHPx14myBHicVi3H33vcyYcQEffRR4eSqxh/lBzKg9HOsdLgfew4zdiLvC\nUswuaMDmCWxyxzehuvNgD+DB2wS5SDzuPwhwC+L5g8GvqMIENwBWrFCqUKRwmCt3lLMLyhQudvMH\n4jZBLs0DSBekYBOk6wFfCuwGNnVTJpO/waAhPur7pMI0F/EZGLrByO4oJwgzFQ5RGJ0gJMsShKXY\nRYZOUojqkCER3bx5c7ZvMSOkIgTpUoceAz6fpmvlNfFF8M7AotLXAs9jxm8lZsZ9gOn7j2Nx/3uw\n/KB3A1/D5gefidkDr7ty9RQXH8vevZ0G+OY1aRECVV1HfEzekwJVVVU0N9dhs1X/CfgiNrnlDcwz\n9HvM7x942kZiuv8W4B7M99/IJZecTzjcgKVLOQnYRUtLvbcFOqOvXUjHDXtNeXUoBTZv3qzLli3T\niy++VNvPA17ldP7EMYBqhdVODSrTjuuDBZNpgvGAaHTEoLQFAsiFSTXJ4CfVtCcx69ucObfz3HNr\nsOmMuzEvzxGYSvPfgZXAxZiXaDxwmruKApe6Y3E3aChUybRpU6iv3zrgxwA6I52TanxPkCWCUeBo\n9AiFwoSQ5lKFcQr/zb3xL3Y9wD3O+1OU0EPYWz8SKdVwuOxjPUGuT4lMJ+RITyBu83RB8OZvbm7m\niiuuo6npaeBL2M82GgtwWw5chBm807FIzwJsHbA2V3aeO2cjxXfffTdjxoxh5szphEKVAzoRVlbo\nq/QkbsAKLCa3Ccvhd0UnZTL7Kshx2r/5IwpHq01kjygc7v4el/DGH5ZwLKo2GeZyNwYQuE1rFCbo\nQw89pKrmXq2pqcmrHiCAbI8TJFVRHgtB3PdfnWDUjlB4VGG8e8jPcqpOqTNyH1U41pULMkDc9zFV\nCKK6evXqbN9i1klFCHzsUD9QW1uLyFgsl2cVZvg2A9/AVJyJWLz/i1ja83uwlWAS5wGMwNYFa8Vi\ng8YCOxgyRJk+fXp/3s6gw8cOZZiVK1dz1lnnsm/fNmxhi7cwf/8fsCS4Q9yxnVi+/78BkzEbAOzB\n3w8IhYUFnH/+uUQiIYqKmohEQvzkJ8u87p8iPpQ6g8RiMcaPP5rGRgFmYSu9KzbA9RKWFLcaC3u+\nAwuIu5t4L/Ar4DwS1wOORqfzyivrBn4S3DTjQ6kzRMdUhsl+p7a2lu3bt/P007+gsbEYy+3zQ8wD\n9Da2+N13MBUnGOkdA+zAVoMJVoYM1Kf2/v+OYdCeFOmrMdHbjQFmGCfO5kocbU30wHT0xqxYsUpD\noRKFkDNyw24/MGZrFE5QWOzOByO9850BPDYh+C3RgM5f/3+y4L1D6SXuzek8DKGsbJqGQiUaDpcd\nFJJ4vs8S9yA/6h70wM2pCg+4c08qTHHu0AkJD3ogJKoWKjFCLUI0qtHo8YM+9CEVvBCkmZqaGi0r\nC8KYbbM5uOUaz+kf7OvBUduhQ492Ls+jFW5SqFA4ooObc75abH/UHYtoPElu4txgVajWSKRU161b\nl7f+/2TxQpBmOusJIpFSLSmZ6j7fozZ41V5ICguL1WL4w+7hjnTysC9OOJb4N6hrvkJUS0qm+Dd/\nL8i6EGDB61uxeN85XZTJ6I+QbgKbIIjANHVnuNPTyzUetdmg8KSGw8UqUqQWBxRxakwQ6xNxak+g\nLp3oVJ9AqBa7c0dpJFKuCxcu9m/+XpJVISCeyakSc4O8CkzqpFyGf4b0E4Q2r1u3Ts8//8KEN/xR\nTmcvcarNYRoPfS5ROMY9/IHQLHE9xKwEG6C6U9VnsM78yjTZFoKTsZTswedbO+sNBpoQBD1BJDI+\nQa0JFrQodQ/xcG0fAjFWLfqzVM0YDozb452QlDmhCbxBFjJRVHScV31SJBUhSMeI8RgsF3jADnds\nwBKLxZg58wb277+RpqYY8HXi+TrXYfk8/xnz87/g/p6HTXPcAdyEjQJPxrTEf8Xi/e/HokDvBEop\nKPgr999/N//5n49RX7+Viy66oD9v0+Pwk2oSCFIZLl/+hFvfawE2yeVR7MH/NhbOUISN/L6DpT+J\nYtOsAfYBD2Dr/p5MUdERwE5aW1tpaZmDxQdZ9udQ6EYuu+xSP+rbB3JqUg2mDv3vhM8DSh1qaGjQ\nNWvW6Jw5t6lIVOOTVkrUQpwDt2ZE46lLnlSb6NLR4xMYyo9qQcHQdq7Nu+7q3KO0bNkybwCnAbJs\nExQQN4zDmGE8uZNyGf4ZjN7E1MdHeKMJen+Q6uQBd2yi0+WfVPP5T3QPenGCkfvoxx5wOFLXrFnT\nrl3t3a6BK3Twz//tD7IqBFY/Z2K5PbYBt3ZRJqM/gmpyoQ4Bmzdv1nC41Lkm71M4VONJq0Y4T854\n9/YPHvxyZ/RuVLjKCc3FCkM0nihL3d+h7YQgsX3Fxcdrx3kBPhwiNbIuBElVlGEh6CrU4f77Hzi4\nnGmQ0dmWOC1VGKnmzy/u4AG6yqlFJU44OssEN1ThG+57RRpfAnWqwnAtLBzW6UPd0NCgy5YtSxh4\ns620dKrW1NRk9DcazHghUAt1aP9gNWgoNCrhwV7lHtIJGl+vKwhuu0NtkGuUe9tHtH2gW+IDXqYQ\n0kjkOLUBryBMYpX77tEKUb3uuuu7bGtXAut7gr6TihAMmkk1Gza8yocfbiW+auORtLTsAY7GxvCu\nw7K5rXbH1mLjfOOBh7CljD7AXJ2HY67OT7pzCjQCDQwZ0kwoVERT09vYJJk9mJdoMjZgPhdQZs/+\nWpdtraioYOnSR4hGp1NaOo1odLqfGJ9N+io9vd3IYE/Q+aqNT6oNUgWG75EHewgrE3a9QrHGjdoH\nND4qPEyDJLZQpLNn36RLlixxQXRBTtChrvcIUqZMVIjqrFmzk263D49ID+T7HOO6ujoKCyuBK7AJ\nKhOxt/Qb2Ns+gk1b3AQ8g01amQD8BbgAS2y1CTgec3aFsIUuzJ9fUHANxx57HLNm3UxT00hszu8f\niPv7r+WRR37A/v37Of3005Ne+7eiosK//XOBvkpPbzcy2BMsXLi4w9u/XOMhCkepuTyDVIYRtdie\nwP9fnmAbHK4wRuPZoG2LRie7HqBazTt0YrvzJSVTvFGbZchnmyAWi/H1r38Te+Nfg63a/m/YYPgo\nbGXGYPEKxXqJXdhI8Hxsfd9CLPPzY5iO/zaJi1q0te0kHJ6ApT78PtbDxM8fOLDdJ7odyPRVenq7\nkaGeoKamRocNO0bjaQuDVIZBFOcdTs8vdDZAmcZj+KvV8vmfkPBmX6WJC92Fw2UHZ5TFvTn2/eLi\nE/1AV45APtsEVVVVtLbuxmJ7gmVL3wV+idkH12Bv+11uKwB+gOXvOYt4AtxNWIDcZIqKoixfvojy\n8nKmTp1KRUUFpaWl7dIc/uAHC5g2bYrP+DAY6Kv0mPDxFeDP2BM1rYeyGXsLmE2Q6PPvGPpcpBby\nEFao0ngcv02IKSiIJpXC3HtzcheytYSriByDpVBbBNyiqhu6Kaup1NUd69ev53Ofu5YPP/w3LH5v\nFObh+TsW0vQ6Zg/cgkWBTsJCnIySkqk89dR8hg8f7t/sA5Ss5R1S1dddA7KajbqqqooDB+qBodgt\nfYgZyv+B5e75CJEzUf0+UELcsLWEVgcObD+o9njyjwFvE0B8BPbyy6+iuflwzBa4CtP5R1NY2MDj\njy9nypQTqKmp4e236/ne904lHK7iwIHtfrQ2z+lRHRKRFzD94uAhTLf4lqo+58pUAzf3pA7NnTv3\n4OdMTKrZsmULJ554Mi0twUDXzykouJnXXvvTxwaw+pJdzpM7dJxUc+edd/ZZHUpLLtJkhSBTNkEi\nK1euZubMGxgyZCxtbTtYuvQRTj/9s/6BH+SkYhOkc7AsJ1apueiiC6iv30p19Y+or98KQGXlJM44\n4zoqKyexcuXqLLfQk2uk6h06B0u1PBIban1VVb/QRdl+6QkSicViVFZOYv/+ahKzOtfXb/U9wiAj\naz2Bqj6jquNUNaqqo7sSgGwRXxi7fVbnurq67DXKk3MM+Nih7ogvjB2P8/ELWns6MqiFwE9e8SRD\nXqxU492hg59UbIK8EALP4CdXXKQez4DEC4En7/FC4Ml7vBB48p6UhEBE7hORLSLyqoj8XERK09Ww\nvpC2LMU5UM9gqaM/6+krqfYEvwGOU9UpWB7S21JvUt8ZTP/UwVJHf9bTV1INm/itqra5jy8DY1Nv\nksfTv6TTJrgS+HUar+fx9AvpmlTzLWyi/XndXMePlHkyStZGjEXkcuBq4LOq2pTSxTyeLJDSHGMR\nORP4n8A/eQHwDFRSnVSzDVui6a/u0MuqekM6Gubx9Bf9FkDn8eQqGRsxFpGviMifRaRVRKZ1U65O\nRDaKSK2I1GSojjNFZKuIvCEic3pTh/v+cBH5jYi8LiJrRKSsi3KtIrLB3cszSV6727aJSFhEVonI\nNhF5SUTG96H9PdVxmYg0uLZvEJEr+1DHUhHZLSKbuinzkLuPV0VkSrrrEJFTRWRPwn38e1IX7mvq\nup424BjgKOB3dJOiEVv1enim6sAEPVhdM4SlnpvUy3rmA990+3OA73VR7oNeXrfHtgHXA4+4/QuA\nVRmo4zLgoRT/358BpgCbujj/BeBXbv+TmOqc7jpOBZ7t7XUz1hOo6uuquo2es1AIfeyRkqzjE8A2\nVa1X1RZgFTCjl1XNAJa7/eXAOV2U662LLpm2Jdb9FPC5DNQBKWYLUdV1wPvdFJkBPO7K/hEoE5FR\n3ZTvSx3Qh/vIhQA6BdaIyHoRuToD1x+DLSoWsMMd6w2HqupuAFX9C5bKujMiIlIjIi+KSDKClkzb\nDpZR1VZgj4iM6EXbk73/c52a8lMRycTIf8d27OyiHalyslNHfyUixybzhVRdpD0OpCXBKaq6S0Qq\ngBdEZIuT+HTW0SPd1NOZXtmVN6HS3csE4HcisklV305XGxPalW6eBVaoaouIXIP1PL3tcXKBV7D/\nwT4R+QK2NtfRPX0p1YS8Z6TyfXeNXe5vTESexrrvdQnnU61jJ7ZEZcBYd6xjO7qsxxljo1R1t4gc\nBjR0Vi7hXt4WkbXAVGzZm1TatgMYB7wrIgVAqar+rZtr9roOVU1UMX6Epe5ONzux++iyHamiqnsT\n9n8tIo+IyIiefq/+Uoc6fXuJyFARKXb7w4B/xtY7SFsdwHpgoohUikgYuBB78/WGZ4HL3f5lwC8+\nVrlIubs+IjIS+DSwuYfrJtO251ydAOdjToDe0GMdTrADZiTR7q4Quv4/PAt81dV3MrAnUDHTVUei\njSEin8CGAHp+YaTiEejBkj8H0wH3Y0vE/NodHw380u1PwLwVtcBrwK3prsN9PhNbpGBbb+tw3x8B\n/NZd4zdAuTv+D8Bit/8pLMFRLbARuDzJa3+sbcCdwJfcfgT4qTv/MlDVh/b3VMe92MunFvg/wNF9\nqGMFtkRQE7AdW0r0WuCahDL/C/NUbaSHRV36UgdwY8J9vAh8Mpnr+sEyT96TC94hjyereCHw5D1e\nCDx5jxcCT97jhcCT93gh8OQ9Xgg8ec//B65Jgc0mFNxhAAAAAElFTkSuQmCC\n",
+ "text/plain": [
+ ""
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ }
+ ],
+ "source": [
+ "import tensorflow as tf\n",
+ "import numpy as np\n",
+ "from sklearn.utils import shuffle\n",
+ "%matplotlib inline\n",
+ "import matplotlib.pyplot as plt \n",
+ "\n",
+ "\n",
+ "\n",
+ "trainsamples = 200\n",
+ "testsamples = 60\n",
+ "\n",
+ "#Here we will represent the model, a simple imput, a hidden layer of sigmoid activation\n",
+ "def model(X, hidden_weights1, hidden_bias1, ow):\n",
+ " hidden_layer = tf.nn.sigmoid(tf.matmul(X, hidden_weights1)+ b) \n",
+ " return tf.matmul(hidden_layer, ow) \n",
+ "\n",
+ "dsX = np.linspace(-1, 1, trainsamples + testsamples).transpose()\n",
+ "dsY = 0.4* pow(dsX,2) +2 * dsX + np.random.randn(*dsX.shape) * 0.22 + 0.8 \n",
+ "\n",
+ "plt.figure() # Create a new figure\n",
+ "plt.title('Original data')\n",
+ "plt.scatter(dsX,dsY) #Plot a scatter draw of the datapoints\n"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 68,
+ "metadata": {
+ "collapsed": false
+ },
+ "outputs": [],
+ "source": [
+ "X = tf.placeholder(\"float\")\n",
+ "Y = tf.placeholder(\"float\")\n",
+ "\n",
+ "hw1 = tf.Variable(tf.random_normal([1, 10], stddev=0.01)) # Create first hidden layer\n",
+ "ow = tf.Variable(tf.random_normal([10, 1], stddev=0.01)) # Create output connection\n",
+ "b = tf.Variable(tf.random_normal([10], stddev=0.01)) # Create bias\n",
+ "\n",
+ "model_y = model(X, hw1, b, ow) #\n",
+ "\n",
+ "cost = tf.pow(model_y-Y, 2)/(2) # Cost function\n",
+ "\n",
+ "train_op = tf.train.AdamOptimizer(0.0001).minimize(cost) # construct an optimizer\n"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 69,
+ "metadata": {
+ "collapsed": false,
+ "scrolled": true
+ },
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "Average cost for epoch 1:[[ 0.00753353]]\n",
+ "Average cost for epoch 2:[[ 0.00381996]]\n",
+ "Average cost for epoch 3:[[ 0.00134867]]\n",
+ "Average cost for epoch 4:[[ 0.01020064]]\n",
+ "Average cost for epoch 5:[[ 0.00240157]]\n",
+ "Average cost for epoch 6:[[ 0.01248318]]\n",
+ "Average cost for epoch 7:[[ 0.05143405]]\n",
+ "Average cost for epoch 8:[[ 0.00621457]]\n",
+ "Average cost for epoch 9:[[ 0.0007379]]\n"
+ ]
+ }
+ ],
+ "source": [
+ "# Launch the graph in a session\n",
+ "with tf.Session() as sess:\n",
+ " tf.initialize_all_variables().run() #Initialize all variables\n",
+ " \n",
+ " for i in range(1,10):\n",
+ " \n",
+ " trainX, trainY =dsX[0:trainsamples], dsY[0:trainsamples]\n",
+ " for x1,y1 in zip (trainX, trainY):\n",
+ " sess.run(train_op, feed_dict={X: [[x1]], Y: y1})\n",
+ " testX, testY = dsX[trainsamples:trainsamples + testsamples], dsY[0:trainsamples:trainsamples+testsamples]\n",
+ " \n",
+ " cost1=0.\n",
+ " for x1,y1 in zip (testX, testY):\n",
+ " cost1 += sess.run(cost, feed_dict={X: [[x1]], Y: y1}) / testsamples \n",
+ " print \"Average cost for epoch \" + str (i) + \":\" + str(cost1)\n",
+ " dsX, dsY = shuffle (dsX, dsY) #We randomize the samples to implement a better training \n",
+ " \n"
+ ]
+ }
+ ],
+ "metadata": {
+ "kernelspec": {
+ "display_name": "Python 2",
+ "language": "python",
+ "name": "python2"
+ },
+ "language_info": {
+ "codemirror_mode": {
+ "name": "ipython",
+ "version": 2
+ },
+ "file_extension": ".py",
+ "mimetype": "text/x-python",
+ "name": "python",
+ "nbconvert_exporter": "python",
+ "pygments_lexer": "ipython2",
+ "version": "2.7.11+"
+ }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 0
+}
diff --git a/Building-Machine-Learning-Projects-with-TensorFlow-master/5/Ch5_third_example.ipynb b/Building-Machine-Learning-Projects-with-TensorFlow-master/5/Ch5_third_example.ipynb
new file mode 100644
index 000000000..9c0f47af0
--- /dev/null
+++ b/Building-Machine-Learning-Projects-with-TensorFlow-master/5/Ch5_third_example.ipynb
@@ -0,0 +1,273 @@
+{
+ "cells": [
+ {
+ "cell_type": "code",
+ "execution_count": 10,
+ "metadata": {
+ "collapsed": false
+ },
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ " Wine Alcohol Malic.acid Ash Acl Mg \\\n",
+ "count 178.000000 178.000000 178.000000 178.000000 178.000000 178.000000 \n",
+ "mean 1.938202 13.000618 2.336348 2.366517 19.494944 99.741573 \n",
+ "std 0.775035 0.811827 1.117146 0.274344 3.339564 14.282484 \n",
+ "min 1.000000 11.030000 0.740000 1.360000 10.600000 70.000000 \n",
+ "25% 1.000000 12.362500 1.602500 2.210000 17.200000 88.000000 \n",
+ "50% 2.000000 13.050000 1.865000 2.360000 19.500000 98.000000 \n",
+ "75% 3.000000 13.677500 3.082500 2.557500 21.500000 107.000000 \n",
+ "max 3.000000 14.830000 5.800000 3.230000 30.000000 162.000000 \n",
+ "\n",
+ " Phenols Flavanoids Nonflavanoid.phenols Proanth Color.int \\\n",
+ "count 178.000000 178.000000 178.000000 178.000000 178.000000 \n",
+ "mean 2.295112 2.029270 0.361854 1.590899 5.058090 \n",
+ "std 0.625851 0.998859 0.124453 0.572359 2.318286 \n",
+ "min 0.980000 0.340000 0.130000 0.410000 1.280000 \n",
+ "25% 1.742500 1.205000 0.270000 1.250000 3.220000 \n",
+ "50% 2.355000 2.135000 0.340000 1.555000 4.690000 \n",
+ "75% 2.800000 2.875000 0.437500 1.950000 6.200000 \n",
+ "max 3.880000 5.080000 0.660000 3.580000 13.000000 \n",
+ "\n",
+ " Hue OD Proline \n",
+ "count 178.000000 178.000000 178.000000 \n",
+ "mean 0.957449 2.611685 746.893258 \n",
+ "std 0.228572 0.709990 314.907474 \n",
+ "min 0.480000 1.270000 278.000000 \n",
+ "25% 0.782500 1.937500 500.500000 \n",
+ "50% 0.965000 2.780000 673.500000 \n",
+ "75% 1.120000 3.170000 985.000000 \n",
+ "max 1.710000 4.000000 1680.000000 \n"
+ ]
+ },
+ {
+ "data": {
+ "image/png": "iVBORw0KGgoAAAANSUhEUgAAAbYAAAEoCAYAAAA0ZdRDAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzsvXuYXFWZ6P17+1Ldlb6mQ6cDnU4q0AQQEkgwLRhnSBQi\nF+WiM44KIjq3bz4z4Xk8esbxO0c4M8cZnU/HkXH8zvMJijjEDIMExKNj5KPDSBQ6mBAuCYRgKjeg\nuwNJIBfpXN7vj712165du7qrq6u6uqve3/Psp3ettfba79q13l6113rf9YqqYhiGYRjlQlWpBTAM\nwzCMQmIDm2EYhlFW2MBmGIZhlBU2sBmGYRhlhQ1shmEYRllhA5thGIZRVtjANokQke+JyN+Ms47b\nROQHpbq/YUxmRGSuiJwSkSr3+aci8okJuneXiLwpIpIlP2/dNdKxga1EiMh6EXlDRGqLUL05Jxpl\niYgkReR3ItIWSt/sBqw5OVQzrB+qerWqTshgoqp7VLVZR3YeNt0tADawlQARmQu8BzgFXFticQxj\nKqHATuBjfoKIXADEsUHBcNjAVhpuBn4N3A3ckq2QiFznfokeEpGXRGSFSz9dRB4SkddFZLuI/Eno\n0joR+b6b9nhWRBYH6jxXRHpF5IDL+2Dhm2cYReUHwCcDnz8JfN//ICJXi8gmpze7ROS2bBU5Xfh0\n4POfishWpzvPichFWa67T0RedXq0XkTeEcirF5Gvu7fLAyLynyJSFzENmnDXHhKRnwOn5f9IjCA2\nsJWGm4F/BVYD7xeR9nABEenBU9b/oqotwO8DSZf9b8BuYBbwh8DficiywOUfdHW3AA8D/+LqrHGf\n/wNoB1YB94rI2YVtnmEUlSeAJhE5xw0Sf4SnT/7a1WHgE05vrgH+DxEZdWZERP4Q+BJwk6o2482m\nvJ6l+E+Bs4CZwCbg3kDe14FFwCVAG/Bf8WZnIP2tcjWwEW9A+5+kD9bGOLCBbYIRkfcAc4D7VHUT\nsAP4eETRTwN3qeqjAKr6qqpuF5HZwKXAX6nqcVXdAtyJN1j6PK6qP3dz+T8AFrr0S4EGVf2qqp5Q\n1V7gJwSmdQxjiuC/tV0BbANe8TNU9T9V9Xl3/hywBrgshzr/GPgHp5eo6m9VdU9UQVW9W1WPqupx\n4G+AC0WkyRmGfApYpaqvqccTrtwwbi3wncCXnB7/Eu9Hp1EAbGCbeG4G1qnqAff5h0T/UusCXo5I\nPwN4Q1WPBtJ2AZ2Bz68Fzo8C9e6X7elAWFHD1xrGVOBf8X4Q3gLcE8wQkXeJyKMiMiAiB4E/J7dp\nvmw6l4aIVInIV0Rkh6t/J96b2GnuqAN+O0o1pwMHVPVYIG1XDjIaOVBTagEqCRGpBz4CVInIqy65\nDmgRkYWh4nvwpjrCvAK0iUiDqh5xaXOAfTmI8Aqe8gaZA7yYi/yGMVlQ1d0ishO4Cm92A1LTfPcC\ndwDvV9XjIvINYEYO1WbTuTAfx5vuf6+TowU4gDcVuh/4navn2RHqeBWYLiLxwOA2h9SUpTEO7I1t\nYrkBOAGcB1zojnOBX5I+lQhwF/ApEVkuHmeIyDmquhf4FfD3bkF6Id4Uykgmy/7aw5PAURH5ryJS\n49blPoD31mgYU41P4w0u/sDg9/NGvLeh426tOjzVH+lHhjel/znf2EpEzsriPtAEvA0cEJEG4O9x\ng6qb/v8e8I/OyKtKRC4JuPWIK7cbeAr4HyJS65YozJCrQNjANrHcDHxXVfep6oB/4Bl3fByo9guq\n6ka8ufp/Ag4B6/F+0eHKzsN7A/sR8N/delk2fKU7jqc8V+P9svwW3iL7S8FyhjGJCfqg7fTXw0J5\n/yfwtyJyCPhveMZWkXWE6rsf+DKwWkTeBNYC02HYkfsLrug9eMZb+4Dn8H5oBvkc3tvaRjzjk6+Q\n+l8bvPfH8QxMXgf+OwHLTmN8SK6BRt0azVPAXlU13yvDKCCmX4ZROMbyxnYrsLVYghhGhWP6ZRgF\nIqeBzZmYX403B20YRgEx/TKMwpLrG9s3gM9jazCGUQxMvwyjgIw6sInINUC/qj6NZ9GTzaLIMIwx\nYvplGIVnVOMREfk74CY8M/U4nqnrA6p6c6ic/do0KhZVzWtAMv0yjNEZq37lbBUJICKX4e1dmGG1\nJSKjRGMoX26//XZuv/32UotRMiq9/SKS98AWqsf0axJS6f271OSjX+bHZhiGYZQVY9pSS1UfAx4r\nkiyGUdGYfhlGYbA3tgKwbNmyUotQUiq9/UZ5Y/176jGmNbYRK7I1AKNCKdQa2yj3MP0yKpJ89Mt2\n9y8ig4OD9Pb20t/fz+WXX855553H4OAgyWSSRCIBQDKZZGhoiB07djBjxgxef/11enp60soODQ3x\n2GOPMTAwwLFjx6iurubqq68mFosBsGjRItrb2xkcHGTz5s0cPHiQN998k4GBAbq7u1mwYAF79njR\narq6unj22WfZsWMHM2fOBBixXH9/P4sXLyYWi9HY2Dic798zqs2bN2+OLBNse9S1xsQT7DOtra2R\n3+u2bdvo6+vL6JfZvsdgH+jq6mLPnj08//zz7Nu3j2XLlhGLxdLuB7B582Z2797NwMAAM2fOZM6c\nOQwNDbF+/Xo6Ozs5//zz6erq4vDhw/T397Nx40aWLFlCR0fHsC499NBDvPDCC9xwww0sXbo0Te7T\nTjuNZDJJY2Mjhw8fTpPdL9fd3U0sFhvO89sZvAaIbHuUXls/LyGqWpDDq8rwWb16jdbUNCmcpTBN\nIaZXXHGVxuNt2tKyWGOxFq2tbdRY7DyFuMJ09/dshbiuWOGVra3tcOl+PXXuqHXp3RqLtejKlau0\ntrbJlUmvC2IK3S6vOpRXm1GfV64mrVxV1Wnu8yyFaVpb26irV6/JaHMs1jJcR7DM6tVrhtsej7dl\nXDuVcX2/YLoUdRRDv1avXhPoM2dFfq8rV97qvvf5af0y2/eYXmd3oL/69/D7nHe/6mrvyOyzfj89\nPdD/4ypyRpq+VFef4/pc+j1mz06kyV1dPU3j8QUKcY3H5w3LHm5fbe1MjcfbdOXKWzUeb9N4/Ex3\nzQKtrW3SWKwlo+3Bvp2tjJE/+ejXpFa8qcrAwIDW109X2KKg7m+rQn0obbrCgDuPK/QG8uIKd7i/\n4WuaI+qKK7QorM1yzUj3aQhd0xtRR9zV3ebyp2t9fasODAwMtzkeb8u4b319q27dujUjLx5vG752\nqjMVBzavj7a6vpHeT/3vdevWrSP0g8zvMbPOAdcnw/eIK2wN6EW4/4X7WzC9LaKPh/trVP8N6oDX\nhz1Zo+57d0BP/PsPZLQjHm8L9e3oMuXSz0tFPvplxiNFIJlMItIJ+LFDF+JFnGkPpSWApDufDTQE\n8jqBLS49fM1MdywMlZ+FF+Gma4T7dIbuMxsvfFXwPg0R953t6p7r8hNUV88kmUwOt7mqKvO+1dUz\n6evrIxZLpOXV1s4dvtaYeJLJJNXVHXjRj9L7qf+99vX1kdmXOvH6gfc5+D1m1pnE65Phe3QCfaT0\nItz/gvdJhNLnktnHGyI+R+lNMlBHA16w66h+/ltXX0Pg/smMdtTWzg317egy1s8nHhvYikAikUB1\nH/CMS3kGL3zTYCgtiac4zwB7gSOBvH14cUj3Rlwz4I5nQuVfA1rwAgFnu8++0H32AodD9zkScd+9\nru5dLj/JyZMDw2sKiUSCU6cy73vy5AA9PT0MDSXT8o4f3zV8rTHxJBIJTp7sB3YS7qf+99rT00Nm\nX9qH1w+8z8HvMbPOBF6fDN9jH9BDSi/C/S94n2QofReZffxIxOcovUkE6jiCF5Iwqp+f6eo7Erh/\nIqMdx4/vCvXt6DLWz0vAWF/xsh3YVGQaUWts/vpEc/OiiDU2f1qkW4NrGTU17Rq9xlYznJ5aY2t0\nZVrS6vLW2Pzrq0N54TW26HJVVTPc5w4dfY0tc73GX4dobl5UdmsPTMGpSFV/PczvM9nW2FZp1Npv\ntu8xs86YO0ZbYwv32SpNrekG19j8NTdPX6qr57s+l36Prq5EmtzeGtsFCnGtr08E1tjS21dT0+7W\n2FZpPN6m9fVePfH4BU5fWzLaHuzb2coY+ZOPfpm5fxExq8jKsIqcyub+ZhVpVpGTnXz0K5dNkGfj\nhULvAE4B31HVOyLK2cBmVCTjGdhMvwxjZIo1sM0CZqnq0yLSCPwGuE5VXwiVM8UzKpJxDmymX4Yx\nAkVx0FbV1/BWgFHVwyKyDc9k6YURL5xAxjrFFZzi27FjR9pUxdDQED/96U85cOAAN954I0uXLh2e\nUnz00UfZt28fy5cv5xOf+ASQmv5YtmwZv/zlL3nsscf40Ic+xEUXXTQ8tdLd3c3x48dZs2YNr776\nKsePH+fd7343b731FhdffDE9PT3s2LFjeCqksbGRZ599ll/96lcMDQ1x9dVXD0+5hNvnT/vs3r2b\nY8eODU8bZisbfk65puX7rI2RKaZ+Bb+r7du3c+edd3Lw4EHa29upqqri0KFDdHR0MHfuXHbt2kV/\nfz8tLS3MmDGD/v5+fv3rX9PW1sb555/PkSNHOHjwID09PZxzzjm0trbS1dXFhg0b2LBhA62trVx6\n6aUsX758uF9s2LCBdevWZUwZBqfsoqato/pYeDo03MZcpsmzTTkWE9OXEjGWBTlSNrONEXkFWioc\nG2N1/PXL19bOCSwa12lNTVPAkMNfoK7RBQsWaXW172fjL353qMi0gMFF0Il0mlvIro5Iqw0tovvX\nVWumI7Rfdpb6jqjh9qWcYX05vDp8J9Nw2fBzyjUt32ddKVAg45FC6lfwu6qqasjoT6l+6RsP1QXy\n6yINN1L9NbhJQF1afdXV03T16jV6xRVXpfXJqqrT0pyXY7EWZzSS7swf1cfCTtQrV65Ka6PveD3S\n5gHZHLGL2YdNXwpDPvo1FqVrBJ7CmyaZFFaRUU7BIzlEpsr36kiOlykn5Hp3tGbJz+ZEOt1dtzaQ\n5lt9ZavncR3ZQTS9fSln2KaIej25gmXDz6m+vjXy2Xl1Rt+vnJ2sx0MhBrZC6le0s3xUX/X7uO9E\n3ev6emtk30vvr9l0Y7r7sRW1gUBzhJ4MDJ/X1TVnyF1X1xzZvx9//PGINkZvHjCSo3mx+rDpS+HI\nR79y2itSRGqA+4EfqOpD2coFg/EtW7as6LtiJ5NJYrEEx45lOkRGvfanygcdLzeS6UA6F885sx1Q\nYEaWfN+JNMohepCgI6tnG6Aj1LMO+B9kdxBNbx/gnGGPAfVkOpkeSisbfk7V1TPxAjan0qqqZgNv\nZ73fWJ51ObN+/XrWr19fsPoKrV9RepFysE+Q7sjcDkxz5+HPwbLBTQQ6gVfx9GI64b5/6tQbeHYw\nYYdrySjrvaAuARKIvE5VVXp9Iq1E9e9169ZFtNHXpwTV1UeG+2a0o3m6jhS6D4/1f5ORoiD6lcvo\nh2e19Y+jlJmAsTsde2OzN7bJAON8Yyu0ftkbm72xlRP56FcuSrcUOAk8DWwGNgFXRpSbsIYGGavj\nb2qNrUtTDqGx0Bqbv7ZQowsWXOTWAqLW2KpD5XNdYzszdF21ZjpC17iy3ibI1dXzs6yxNWr6BsYp\nJ9Oo9bHgc8o1Ld9nXSmMZ2Arln4FvytvjS29P6X6ZY3rm3WB/NhwP0/v935/DW4SEEurz19jW7Hi\nqrQ+WVU1I815ObXGlu4YHtXHwk7UmWtsFwTkzbbGFu2IPRFrbKYv4yMf/SoLB22zijSryFIyWR20\nzSoyHbOKnJoUxY9tDDcv2cBmGKVksg5shlEOWKDRMmekbXvuvfde7rvvPj7ykY+wYsWK4V+xzz77\nLFu2bAHgwgsvZPny5ezfv3/4l+u+ffvo7+9n3rx57Ny5k3g8TnNzMwD79u3jySefRETo6emhqamJ\n5557jkOHDrFw4ULOP//84S2Rent70+7T2dnJv//7v7N3715uueUW3vWud5FMJtm+fTu9vb1cf/31\nfOADH8i5zVFbIRmFI/icN2zYwKZNm2hqauKpp55i27ZtHD58mLfffpvm5mY6Ojpobm5m4cKFvPOd\n72TOnDk0NDQMz35EbbuV65tLLuWi6g6++e3fv59HHnmEjo6OtDdIo4IY69xltgPbBLmoBH1i/A2U\nff+Y6dM70tYPoMr59vh+RuENaINlOyLSqjXsn5RaV6kLrMmcpTU1TSoS5fOUGdBUpDNt3WXBgoty\nanNUgMjJBFN0E2Sf1HM+M/Sd12rm5thR/ph+n/D83MLBSP2gnaP5c+Xi9xUukx5gt9v1xVSfr6lp\nmnT9xRgb+ejXlFC8SidbEE/PouzLGm3x9VX1LNOigjz2Bj5HBVus12gLtpbQ4Vu0tWi6dVxvFpla\nMtIefvjhMbTZs3qbbNZlU3lgS7cUDls4+taPwbQo697WQB/wv3vfIjizL0R9f7lYEWa39vT7VXRg\n0/r66ZOqvxhjIx/9snhsUwDfJybaB+hJooM0/hDPJykqyGMw0GhUsMXTXLnw/TpCR9LldbnPIwV6\n7CTTH7CTBx98cAxtngs0WPDGApJ6zg3AGWR+Z+HguB1kfrdzXPpM0v07ITMIaHTwzajvO1wuuk/4\nAXazBzYV6bT+UmHYwDYFSCQSGYE6U4ET30V0kMaP4TmJ74zICwYajQq2uN+VC9+vP3QkXN4e93mk\nQI/7gNcz0q6//voxtNkLEGnBGwtH6jkfAV4h8zsLB8ftJ/O73e3SB1w9wWCk4SCg0cE3o77vcLno\nPuEH2M0e2FR1n/WXSmOsr3jZDmwqsqgEfWL8NTbfP6atzQ9GmgrS6Pn2+H5GUWtsftmOiLRqDfsn\njb7GFvZ5qgrVWRsIEjnWNbbMAJGTCabwVKRq6jn7QTXTfdxqNHONzfeJG3mNze+fftDO0fy5cvH7\nCpdJD7B7luuLqT5va2xTn3z0y8z9pxBmFTk5rSLLwdzfrCKNyYr5sRlGCSiHgc0wJiv56FdOa2wi\ncqWIvCAi20Xkr/ITr3wp5Ia4U5FKb/94Mf2a3Fj/nnqMOrCJSBXwLeD9wPnAx0Tk3GILNpWo9I5f\n6e0fD6Zfkx/r31OPXN7YeoCXVHWXqh4H1gDXFVcsw6gYTL8Mo8DkMrB14tnr+ux1aYZhjB/TL8Mo\nMKMaj4jIh4H3q+qfuc83AT2quipUzla2jYolX+MR0y/DGJ2x6lcumyDvw9tawGe2SxvXjQ3DAEy/\nDKPg5DIVuRHoFpG5IhIDPgr8uLhiGUbFYPplGAVm1Dc2VT0pIiuBdXgD4V2quq3okhlGBWD6ZRiF\np2AO2oZhGIYxGRjzJsgicpeI9IvIM4G06SKyTkReFJGfi0jLSHVMZbK0/x9EZJuIPC0iPxKR5lLK\nWEyi2h/I+y8ickpE2kohW7HJ1nYR+Uv3/T8rIl8p8D3NebuEiEhSRLaIyGYR6Su1POVOocaXfHb3\n/x6eM2mQLwCPqOo5wKPAX+dR71Qhqv3rgPNV9SLgJSqv/YjIbOAKvC34y5WMtovIMuCDwAJVXQB8\nrVA3M+ftScEpYJmqLlLVnlILUwEUZHwZ88Cmqo8DB0LJ1wHfd+ffB6JjkZQBUe1X1UdU9ZT7+ASe\nZVtZkuX7B/gG8PkJFmdCydL2vwC+oqonXJn9BbylOW+XHsHCe00YhRpfCvWFzVTVfifYa3gRByuV\nTwM/K7UQE4mIXAvsUdVnSy1LCZgP/L6IPCEivSLyzgLWbc7bpUeBn4vIRhH501ILU6GMeXzJxY8t\nHyrSIkVE/i/guKquLrUsE4WIxIEv4k1DDieXSJxSUANMV9VLRGQJcB9wZollMgrHUlV9VUTagV+I\nyDb3VmGUjlHHl0K9sfWLSAeAiMzCC6VbUYjILcDVwMdLLMpEcxZe6OItIrITbxr2NyJSKW/te4AH\nAFR1I3BKRGYUqO6cnLeN4qGqr7q/g8BavOlhY2IZ8/iS78AmpP8q/zFwizv/JPBQnvVOFdLaLyJX\n4q0vXauqb5dMqoljuP2q+pyqzlLVM1V1Ht502SJVLdcfN+G+/yDwXgARmQ/UqurrBbqXOW+XEBGZ\nJiKN7rwBWAE8V1qpKoJxjy/5mPuvBn4FzBeR3SLyKeArwBUi8iLwPve5LMnS/n8GGvGmKjaJyLdL\nKmQRydL+IEqZTkVmaft3gTNF5FlgNXBzoe6nqicB33n7eWCNOW9PKB3A4yKyGc8o7GFVXVdimcqa\nQo0v5qBtGIZhlBVmxmoYhmGUFTawGYZhGGWFDWyGYRhGWWEDm2EYhlFW2MBmGIZhlBU2sBmGYRhl\nhQ1shmEYRllhA5thGIZRVtjAZhiGYZQVNrCVGSJymYjsGb2kYRj54vbvPOWCwRqTDPtSpgAisl5E\n3hCR2hwvsX3SDGOMmJ6VDzawTXJEZC7wHrwQ9deWWBzDKEtMz8oLG9gmPzcDvwbuJhW6ARG5WkSe\nF5E3RWSPiHw2cI2IyGdFpF9E9rlYcYZhZCebntWLyNdFJCkiB0TkP0WkrkQyGjlSrAjaRuG4Gfga\nXmyuJ0Sk3QU9vBP4A1X9lYi0APMC18wCmoAz8GJI3S8ia1X10ATLbhhThWx69nXgPOASoB94F95b\nnTGJsTe2SYyIvAcvgvJ9qroJ2EEqQvcQcL6INKnqIVV9OnDpEPC3qnpSVX8GHAbOmUjZDWOqkE3P\nRESATwGrVPU19XhCVY+XUl5jdGxgm9zcDKxT1QPu8w/xIsgCfBi4BtglIr0ickngutdVNfir8ihe\nIFTDMDLJpmenAfXAb0slmJEfNhU5SRGReuAjQJWIvOqSY0CriCxQ1d8A14tINfCXwH14vzoNw8iR\nLHpWB7QApwPHgLOAZ0sjoZEPNrBNXm4ATgAXAsGpj/uAT4nIU8BPVPVNEXkLOFkCGQ1jqjOSnt0M\nfBf4hoh8Am+NrQf4jSsjEyinMQZsYJu83Ax8V1X3BRNF5F+A/we4APiWcxB9kdTaWxTmb2MY0Yyk\nZ98Ezgb+J55RSQOwBXi/K2Z6NUkR1dy+G/cP9Clgr6qan4dhFBDTL8MoHGMxHrkV2FosQQyjwjH9\nMowCkdPAJiKzgavxfKcMwyggpl+GUVhyfWP7BvB5bE7ZMIqB6ZdhFJBRjUdE5BqgX1WfFpFlZLEE\nEhFTSqNiUdW8LORMvwxjdMaqX7m8sS0FrhWR3+I5Li4XkXuy3Lwij9tuu63kMlj7S3eME9OvSX5U\nev8u9ZEPow5sqvpFVZ2jqmcCHwUeVdWb87qbYRhpmH4ZRuGxLbUMwzCMsmJMDtqq+hjwWJFkmbIs\nW7as1CKUlEpvf6Ew/ZqcWP+eeuTsoD1qRSJaqLoMYyohImiexiNjuIfpl1GR5KNftqVWERkcHCSZ\nTJJIJABIJpM0Njby7LPP0t/fz+LFizly5Ai7d+9m586dHDlyhJkzZzJ37ly2b9+OqtLX18fs2bM5\nefIkZ5xxBjfddBOnnXYayWSS7du309vby/XXX88HPvABtm3bRl9fH93d3cRiMYaGhti0aRMHDx7k\n0KFD3HDDDcyfP5/e3l527NjBzJkzaW5uprW1lUWLFtHe3p5V/nCeYUxFonTS16Ply5czf/58EokE\nTz75JA8++CD79+9n8+bN/MEf/AFf+MIX0sqfccYZvPLKK1x//fUAPPjgg1x88cVMmzaNnp4egDR9\n7O/vZ+PGjaxYsYK2tjb6+vqoqalh+/btrFixgvnz52fVN9PFMVJAyxU1UqxevUbj8TZtaVmssViL\n1tY2ajy+QCGuUKcwy51Pd3/PUpimEHPHWa6cnxd36dVaU9OkIp0urVshrm1t7e7zGQrxQL5/7ekK\ndSoyLaLuaVpb26irV6+JlD8eb0vLM9Jxfb/YlmET1p5yJUonoSGgH3GtqpqvUO/SGt3fs93fGpcf\nD+jv6QrVaXV4Olnrzv3yTRF53Wl1idRF6lul62I++mWKVwQGBgY0Hm9T2KKg7u90hQF33qLQprDW\ndexwuRaFre48mNfilK7XXR/Miyvc7dKj8lvdscXJEa57utbXt+rAwECk/PF4mw4MDJT60U5KbGCb\n/ETr5DSnN0F98XXj7gjdjDu93BLQs6aIOrZmuXZtljy/rlT9vr6ZLuanX2YVWQSSySSxWAJY6FIW\nAgkg6c47gC7gEDA7olwH0AfMC+V1Ae14m4yH6+/Ei4eYyJI/E5jrzpMRdSeorp5JMpmMlL+2di7J\nZDKfx2EYJSdaJ+vx9C+oL0k83fgtmbo5G08vF+LpUgPQFFFHH56uhvXzUJY8v65U/b6+mS7mhw1s\nRSCRSDA0lASecSnP4ClMwp33A3vwYhnujSjnh33aGcrbAwwCR1y5YN4+4EyXHpU/AOxy54mIupOc\nPDlAIpGIlP/48V3D6xKGMdWI1snf4elfUF8SeLpxJpm6uRdPL5/B06UjwFsRdfTg6WpYP1uy5Pl1\nper39c10MU/G+oqX7cCmStLw58WbmxcF1tgu0NQaW4c7b9Hsa2wxTV8niylUuTW24Hx+cI3tdDdf\n7+efqal5/NiY19iamxdV5Lz+WMCmIqcEUTrp6UBqnauq6uyAbjRo+lpYjcuPB/R3lkJVWh2eTtVo\n+vpcY6CumlC9qTW2KH2rdF3MR7/M3L+ImFVkZWDm/lMHs4qceuSjX6MObC6kxj14Cz+ngO+o6h0R\n5UzxjIpkPAOb6ZdhjEyxBrZZwCz1dh9vBH4DXKeqL4TKmeIZFck4BzbTL8MYgaI4aKvqa8Br7vyw\niGzDM/F5YcQLC0QxXsG3bdvGI488QkdHBwsWLODw4cNp9YfvOTg4yObNm3n++ed56aWX6O7u5oIL\nLmBoaIgf/ehHAHz4wx+mo6Mjo57Nmzdz8OBBAN58800GBgY4ceIE27ZtA+Ad73gHv/vd79i/fz+7\nd+/mxIkTXHzxxXR2drJ48eK0KcUTJ05QU1PD5ZdfDnjTHDNmzGDnzp3E43HmzJlDV1cXe/bsAUg7\nX7RoEZCaDj18+DBDQ0Ps2LGDnp6e4enNkZ5zob+LSp5e8Sm1fk0kxeg/mzdvBkibSr/33nu57777\n+MhHPsKNN96YVd//+Z//mX/7t3/j0ksvTZvev+mmm1i6dCnr1q1j1qxZvPbaayxZsoRYLMbu3bv5\nxS9+wcv51t/NAAAgAElEQVQvv8ySJUt46623uPjiizn77LPZuHEjDQ0NbN++neXLlw/L09XVlVXf\nfF2M+v8TlReFvwTR09PDeeedV9RnPmUYy4IcKZv1xoi8wqwUBiiGY+LKlbdmGGvE4/OG6w/fc+XK\nVRqLtUQYXNS5wzfGiGtVVUtaPbW1TYEys0L39a+vDfwNLjb7RiUNoXTfoTumnv9LMK/WHdPcwnTd\n8HlNTZMzYPGMSaqrZweurdOamqYRn3Ohv4tycjqlQMYjE61fE0kx+o+nl91pxk+zZ89L04lp05pD\nOhsPbJQQ05RTddgZOxbQNT+tOqCnQeOtOk03BvHzGobzqqvPCeR5+ubLEfX/x9fTeHzBiM8r9f/M\ncwRfuXJV0Z55qchHv8aidI3AU3jTJEW32iqGY+LWrVHOkdPVc1zu1fr61ggnTt/5MujUPKDQrJlO\nzp6TdH19q9bXt7rBabpGO0z7jtj1gcErXNcdWdLXBq4N59VpuqNpML9ZMx24M8uFn3Ohv4tyczot\nxMA20fo1kUxE/4HpWlPjO1xH6VGUPvgbJUQ5YzdHpPkD2Mj6k+5w3RDKC26UkF4+9f8n8/9F1POK\n/n8W161bt5aVjuWjXzntFSkiNcD9wA9U9aFs5W6//fbh82XLlo1rV2zfMfHYsUzHxHxfqfv6+oh2\niD4CNFBdPROIk+mU2UC6U/NGvLX+JqKcpKurZ3LypOK5CTYQ7TDt3/cocBzPxyV83y0R8s7Gc/Ts\nAt6MyPPTNpLphD0zQp7McuHnXOjvohjf7USyfv161q9fX7D6SqFfE0kx+k9VVdjJOcGpUzuAWWTq\n5BbgEjL1oQNP36OcsWfiBTMPprUArYymPymH6048P7dwveH/MV751P+fzP8XUc/L+38Wfg6z6evr\n4x3veMeU1bGC6Fcuox+e1dY/jlKmoKO0vbHZG9tUgXG+sZVCvyYSe2OzN7bxkI9+5aJ0S4GTwNPA\nZmATcGVEuYI3qBiOiStXrtLwGlt9fSJjjtu/Z/Y1Nt+ROrXGJtIcWmNr1NQ6V0fovv4aW42rJ7wx\nqr/GFg+lt2rmGoCfV+vq82VKyeevsdXXJ9Sb8w9uohzTmpqmEZ9zob+LcnI6Hc/AVkr9mkiK0X9S\neplaY+vqSqTpREND1BrbBQEd8te+w87YsYCu+WlVmnKuDq6ZhzdS8POmaWqNzd8M+fRhffPliPr/\n4+tpPH7BKGts/v8zbx0wao1tqutYPvo16R20zSrSrCInO+agnRtmFWlWkflQFD+2Mdx8yiueYeSD\nDWyGUTzy0S/bBNkwDMMoKyyCdgEJv/Zv27aNtWvXMjg4SHt7O5dddhmxWCxy78gtW7YwODjI8ePH\nWbhwIeeffz5DQ0Ns3LiR+fPns3XrVl555RXmzZvH22+/DcC8efMA2LlzJw0NDVx22WUcOXIE8KY/\nNmzYwIYNGzjjjDO49NJL2blzJydOnGDXrl0cOnSIpUuXct1110VOwQbly2U6ZLzPyqgswtOIEN3f\nonQquP9icO/VtrY2tm/fzuHDh3nsscfo7Oyko6ODwcHBYZ3x9e+CCy4A4NixY1x++eW8/PLL3H33\n3cyYMYMlS5YM76Hq62Bwf0f/3oXY09H0oEiMdVEu28EUX9weL2FnyBUrrtKwAzfUaG3tHK2tbdJY\nrGXYCTM6WrZvVHK2ppy4wzvzh52+/cXwTOfxaIfTGhWpy3BMT8mX6UBajGc1VRe1fbDd/cdEavMC\nb/OA6uqGNH3wnZJXrlyV1k+uuOIqDTojV1UFI2OE+/h0DUewSOlFnaYMts7SlOFWUI9qAvWeruGI\n2LW1M8cd6brc9KBY5KNfpngFINr82LdsDKc1qu9eMLLJcFy96LyPq2fWPz1UNni9f8109cyUo1wR\n/J0VMt0DYrHGgPzZzZcLYS5cTmbIPjaw5c7AwIBzhcmlP/um9aop8/ls+Vs0Fc16rdOZsFm972Lj\nH/UKD0fUO93l+dGyo0z/vfvkG+m6HPWgWOSjX7bGVgCio/N2Eu0s2kDKgdN3rkyS6eTZCUwH1uE5\ndc4Llc3m9N2K53garq+VKGdOaOHUqWkBh9coWTx5CxG51yICVzbJZJLq6mD/TJK9P/ubI0AqwnSU\nPvmfp5PawCAYMd7PT+Dphn+0Aw9G1JsATiMVLbuZaN05lHeka9OD4mIDWwGIjs67D29v23DaEVIR\nc/1IugkyI1rvAw4AK/CiX+8MlY2Kkp0EDuJF4A7Xd5DMyL17gUNUVR3l1KmRZPHkLUTkXosIXNkk\nEglOngz2zwTZ+7MfmRpSEaaj9Mn/fMCVaSE9Yryfn8TTDf8YBK6PqDcJ7CcVLftNonWnJe9I16YH\nRWasr3jZDspkqiRfws6Q3hpbugO3t8bWpbW1jRqLtQw7YY68xtYdOA86Xsc10+k75tLqIu4d5XCa\nucbW3LxoWL4oB9JiPKupvraATUWOidTmBV7/rK6elqYPvlOyv8aWrlOpNWKRoPN1uI+3arrj9Ehr\nbDWaqUfBNbZZGo6IXVPTPu5I1+WmB8UiH/0yP7YCYlaR+T+rqYz5sY0ds4rMr3wlUjQHbRG5Evgn\nvKnLu1T1qxFlykrxDCNXxjuwmX4ZRnaK4qAtIlXAt4D3A+cDHxORc/MTsTwp5E7vU5FKb/94MP2a\n/Fj/nnrkYjzSA7ykqrtU9TiwBriuuGJNLSq941d6+8eJ6dckx/r31COXga0TzyTIZ69LMwxj/Jh+\nGUaBMXN/wzAMo6wY1XhERC4BblfVK93nL+CZX341VM5Wto2KJV/jEdMvwxidgltFikg18CLwPuBV\nPHf8j6nqtnyFNAzDw/TLMArPqLv7q+pJEVmJt7eTb45sSmcYBcD0yzAKT8EctA3DMAxjMjBu4xER\nuUtE+kXkmdFLlxciMltEHhWR50XkWRFZVWqZJhIRqRORJ0Vks2v/baWWaaIRkSoR2SQiPy5S/VeK\nyAsisl1E/qoY9zBSZNNpEZkuIutE5EUR+bmItJRa1nIlrFMikhCRJ5wO/FBERp1pLIRV5PfwnEsr\nkRPAZ1X1fOBS4DOV5Fyrqm8Dy1V1EXARcJWI9JRYrInmVmBrMSo25+2SkE2nvwA8oqrnAI8Cf11C\nGcudsE59Ffi6qs7H2839j0erYNwDm6o+jretdsWhqq+p6tPu/DCwjQrzQVLVo+60Dm/NtmLmtkVk\nNnA1cGeRbmHO2xNMFp2ejffcv++KfR8vLIBRYLLo1HuBH7nz7wM3jFaP+bEVCBFJ4L21PFlaSSYW\nN22wGS9Gzy9UdWOpZZpAvgF8nuIN5ua8XUICOv0E0KGq/eANfngB34zCk6ZTIjIDOKCqp1z+XuCM\n0Sqxga0AiEgjcD9wq/uVVzGo6ik3FTkbeJeIvKPUMk0EInIN0O9+3Ys7jDIhQqfDP14qZmZioojQ\nqeGssdY16iKcMTJuIfN+4Aeq+lCp5SkVqvqmiPQCV1KkNadJxlLgWhG5GogDTSJyj6reXMB77APm\nBD7PdmlGEcmi0/0i0qGq/SIyCy+SqVFYMnQK+CbQIiJV7q0tJx0o1BtbJf9i/S6wVVW/WWpBJhoR\nOc23DhOROHAF8EJppZoYVPWLqjpHVc8EPgo8WuBBDWAj0C0ic0Uk5u5TFOtLI40onf4xcIs7/yRQ\nsT9ii0UWnboJ6AX+0BXL6dkXwtx/NfArYL6I7BaRT423zqmCiCwFbgTe60zeN7nYWpXC6UCviDyN\nt7b4c1X9aYllKhtU9STgO28/D6wx5+3iMoJOfxW4QkT8XWK+Uko5K4wvAJ8Vke1AG3DXaBeYg7Zh\nGIZRVpjxiGEYhlFW2MBmGIZhlBU2sBmGYRhlhQ1shmEYRllhA5thGIZRVtjAZhiGYZQVNrAZhmEY\nZYUNbIZhGEZZYQObYRiGUVbYwGYYRlkjIr0i8ukS3HeniLx3ou9r2MA2JRCRpIj8TkTaQumbReSU\niMzJdq1hVApOT46KyJsi8qqIfFdEGkotlzHx2MA2NVBgJ/AxP0FELsAL7WCbfRqGhwLXqGozsBh4\nJ/DfSiuSUQpsYJs6/AAvZIPPJ0mFqkdE2kTkYRE5JCJPisjfisgvJ1xKwygtAqCqrwL/AVyAN+Al\nRORx9zb3H8HZDxG5REQ2iMgBNwtyWSCvV0T+ZoRrrxWR50TkDRF5VETOjRRKZImIbHT6+aqIfK1Y\nD8CwgW0q8QReMMtzRKQK+CPgXwP53wbewgtZfwvewGdvc0ZFIiJdwNXAJrzB7mN4OtEO1AGfc+U6\ngZ8Af6Oq0136j0RkRqC6bNfOB1YDq1zez4CHXaDSMN8E/klVW4CzgPsK2V4jHRvYphb+W9sVwDbg\nFZdeA3wI+JKqvu1idn0/ugrDKGseFJE3gP/EC1D5dy79e6r6sqq+jTeoXOTSbwT+t6r+HEBV/z/g\nKbxBkVGu/QjwE1V91MXO+xre8sC7I+QawgsaO0NVj6pqX6EabGRiA9vU4l+Bj+O9kd0TSG8HqoG9\ngbQ9EyeWYUwarlPVNlWdp6p/6QYjgNcCZY4Cje58LvARN5X4hogcAJYCswLls117BrDLz1AvuOUe\noDNCrj8GzgFecEsF1+TZPiMHol6ZjUmKqu4WkZ3AVUDQfHkAOAHMBna4tK4JFs8wJgMyxvJ7gHtU\n9c/zuNcreGt4QbpI/4EJgKq+jPejFBH5MHC/iLSp6rE87muMgr2xTT0+Dbw3oBACnAQeAG4Xkbhb\nwL65VAIaxhTiX4EPisgKEakSkXoRuUxEzsjh2vuAa0RkuYjUiMjngN8Bvw4XFJEbReQ09/EQ3vr3\nqUI1wkjHBrapwbARiKruVNVNEXl/CbQCr+Ktr60G3sYwKodsxlJZjahUdS9wHfBFYBBvavFzpP43\njnTtduAm4Fvu2muAD6rqiYhrrwSeF5E3gW8AfxSYJjUKjHjTwjkU9CzxngL2quq1RZXKGDci8hWg\nQ1U/VWpZjNEx/TKMwjGWN7Zbga3FEsQYH84NYIE778FbrH6gtFIZY8D0yzAKRE4Dm4jMxjN/vbO4\n4hjjoAl4QEQOAz8E/m9VfbjEMhk5YPplGIUlV6vIbwCfB1qKKIsxDlT1KeDsUsth5IXpl2EUkFHf\n2Jy/Rb+qPo1ngTdWc1rDMLJg+mUYhWdU4xER+Ts8y58TeF71TcADqnpzqJxt32RULKqa14Bk+mUY\nozNm/VLVnA/gMuDHWfK0UrnttttKLUJJqfT2u74/Jl2KOky/oqn0/lXp7c9Hv8yPzTAMwygrxrSl\nlqo+BjxWJFkMo6Ix/TKMwmBvbAVg2bJlpRahpFR6+43iUun9q9Lbnw857zwyakUiWqi6DGMqISJ5\nG4+M4R6mX0ZFko9+lfXu/oODgySTSRKJBO3t7Wnpvb299Pf3c/nll/Pyyy9zxx13APDJT36S9vZ2\ndu/ezcDAAN3d3SxYsIA9e/bwxBNPsHHjRs466yyWLFnC9u3bWbFiBUuXLh2uc8eOHcycOZM5c+bQ\n1dXFhg0b2LRpE4sXL+a6664blsOXrb+/n/Xr13PuuecO50fJvW3bNvr6+uju7iYWi9HY2Mjhw4cz\n2mYYhSSqLwbTADZv3szBgwdpbW2lq6uLu+++m0cffZT3ve99LFiwgJ/97Ge0tbVx4sQJZs2axdtv\nv83Ro0c5cuQIx48fZ+nSpcTjcX72s59x8cUXc+WVV7Jnjxd1aWhoiI0bN7JkyRI6OjpGlQNg0aJF\nkTqR7f/BaHnGFGSs1ibZDiaZ1dbq1Ws0Hm/TlpbFGo+36erVa4bTq6riCnGFs93fWOC8WmGawlnu\nc4dCnTuC6TUK3QpxXbDgokCdZ7lytaHPMRWp09Wr1wzLVl0925Xx6hGp05Urb82Qe+XKW125+Qpx\nrapqUYhrPL4grW1GaaBAVpEjHaXQrygdCqbV1jZpTU2T67/TnI7UhvQkpnB6QH9mZeiFp3PhtFnu\nmri7Pq7V1efkJEdtbWOGTmT7fzBanlF68tGvKa142RgYGNB4vE1hi4IqbNF4vE23bt2qdXXNTllS\neTBdYUChNyKvVaHJlQmmxxW2uvP6iOvirr7gPeo1Fmt0svUqtEVcU5+WFi1vXGFtWtsGBgZK/dgr\nlnIc2LLpUH19q0sbcLoR7JctWfSn2fX/qD7fmtHnvbLNLi/zmkw5wro5XevrW4d1IltbBgYGRswz\nJgf56FdZGo8kk0lisQSw0KUspLZ2Ln19fYi04sUCTOVBAkgCDXixOoN5p+EFqJ4XSu8E+gJlOkP5\ns119wXu0c+rUNKqqulxeIqLO9rS0aHk78UI6pdqWTCZzf0CGMQpROlRVNZvq6g6XlsQLPh3slx1k\n6s8cvGhK84ju83MI93mvTKurP/OaTDnCupmgunrmsE5k+3+QTCZHzDOmLmU5sCUSCYaGksAzLuUZ\njh/fRU9PD6oH8YLmpvI85UgAR/CC3wbz9uOFWtoZSt8H9ATK7Avl73X1Be8xSFXVUU6d2uPykhF1\nDqalRcu7j9S2gl7b/HUGwygEUTp06tReTp7sd2kJvNBlwX7ZT6b+7AYO4ulPVJ/fTbjPe2UOuvoz\nr8mUI6ybSU6eHBjWiWz/DxKJxIh5xhRmrK942Q4m0VSkamrevLl5UcQamz912K2pdQD/PGqNLeaO\nkdbY/DrHssbWqZlrbKsy5F65cpUG1wRFvOnJePwCWxOYBFCGU5Gq0ToUTKutbXRrW34fr3N6EV5j\nmxXQn44MvYheY/PXtuPqr8tVV8/PSY6R1tjC/w9GyzNKTz76Vdbm/mYVaUwE5Wzub1aRRqnJR79y\n2QR5NnAP3gT6KeA7qnpHRLlJN7AZxkQwnoHN9MswRqZYA9ssYJaqPi0ijcBvgOtU9YVQOVM8oyIZ\n58Bm+mUYI1AUB21VfQ14zZ0fFpFteGZ5L4x4YYHJdapgw4YNrFu3jhUrVnDgwAHWrFnDOeecw7ve\n9S4WLVrE9u3b+drXvsYTTzzB0NAQV1xxBclkkp07d3LTTTfxJ3/yJ/T19TFjxgxef/11enp6eOON\nN1i3bh1Llizh6NGj9Pf309bWxubNm+ns7OT8889naGiI9evX09zcTE1NDd3d3Sxfvnx4ajE4TQKk\nTaGM1q5CTpP4dYWnMsdzj3yvtemfyaNfuZLLd+ZPmwd16LzzzgM8/bzzzjv53e9+R09PD52dnQC0\ntrYO68ZDDz3Epk2bmDVrFi+99BLJZJKlS5fy3ve+d1jPzj33XHbv3s3atWtZtmwZX/rSl9L68dDQ\nEI899hi7d+/m7bffpqmpiVdeeYWFCxdyySWXDE9X+rL29PQA8MgjjxCPx4eXEvbs2TM8zRqc4gxe\nd955542oV6NNkRpFYCwLcqTs4hsj8gq0VJhJrg6UV1xxVcDIwl+Q9o1COlRkmlvcDjpb+wvU/mJ3\ntfObCTtwnx4qFzQ4qYk0FqmqqteVK2/V2toml9atNTVNWlvbOOxYGou1jNiuQjqP+nXF4wuc8ck8\njcfbhg1W8rlHvvKVk1MshQtbUxL9ypVcvrPUZgK+7nh6s3LlKqeftRG65elMdbVvdOVviFATKisj\n6GD18OYGsdh5EXqdrp+1tY2B/xfzA7ofNmKZPvx/JBZridwwYcWKq5xenanBjRNWrlyVpvv+9cbY\nyEe/xqJ0jcBTeNMkE2a1lasD5eOPP64p59CHA+fq/rap59RcrymHzijnzkwnac9RNOyM6jt1Z3PQ\nnu7S6yLu0aqec3d6erhdhXQejarLeya9GnYmz/Ue+cpXbk6xhRjYSqVfuZLLd7Z169Yseuf3sVgW\nPRkInNep55zdECrbO8q1cXdtlPN41LVRzuTRmyoE07JvmHC3Zjqf12f835jK/bxU5KNfOe0VKSI1\nwP3AD1T1oWzlbr/99uHzZcuWFWRXat+B8tixTAfK4Gv9unXrSDmH3kGmo+hcPKfmdlIOoRuJdrx+\nm0wn0nBaAu/H9RJXX11E/iAwFCHLHDzn7vR7h9uVa9tzIaqulANsujN5rvfIV75CtqsUrF+/nvXr\n1xesvlLqV67k8p319fWRuZmA38c6gdeJdsZO4ulRgpSP5ptAc6CsX0e2azvdNb5ezw1dG9bBDkDJ\n1P3wpgr709JUm4FpEdf9lkzn83agLS2tqmr2lOnnpaIg+pXL6IdntfWPo5Qpymhtb2z2xjbZYZxv\nbKXUr1yxNzZ7YysV+ehXLkq3FDgJPA1sBjYBV0aUK1rDcnWgXLHCnzPvVqgKnAfX2Ko13dnaV7bg\nGtv00LW+k+lIa2w1gfzgGtsqra1tHL6fv8bmO5bGYi0jtquQzqOpNbYLFOJaX59IW2PL5x75yldO\nTrHjGdgmg37lSi7fWWozAV83PL1ZuXKV08+aCN0Kr7HF3CAVXicfbY1tVWCNLazX6fpZW9sY+H8R\n3AA9vMbWMpyWWmNL3zDBX2Orr09ocOOEsO7bGlt+5KNfU8ZB26wizSpyslLODtphzCrSrCInmqL4\nsY3h5pNC8Qxjoqmkgc0wJhoLNDoOgr+4/F9p4P2SbGho4LHHHiOZTA5vATRr1iw2btzI/PnzOXHi\nBP39/TzwwAOcPHmSxYsXD28ntGfPHnbv3s1zzz3H0NAQN954I21tbaxduxaAhQsXsnPnTjo6Oujs\n7GTHjh3MmDGDnTt3cvDgQQ4dOsQNN9zA/PnzM375hX8l+rIHywTbNtXfjAwjSLbtvvw3vubmZvr7\n+2lpaeHd7343x48fZ+3atYgI7e3tLFiwgJdffplzzz2X48ePc++991JXV8cll1zCTTfdxKZNm/jy\nl7/MkSNH+MxnPsMHP/jBjG3tnn32WXbs2EE8HqempobLL7+c0047Le8ZDHu7KxBjnbvMdjAJ1gDy\nJbX2dKYGfW/S/dyCawZhX5x6Tfeb8efnff+2cPnq0OfpgTKna3BtInOjZm99wPfZ8X3SamvnaMo/\nL+UzU07+YpMVynQT5MlMtiCoInGnA9NDOlYb0ic/P6ivwSCp2YILe/VVVbVHXO8Zv9TUNOXl12k+\nb9Hko18Vr3gpa69eTVlw+dZN0UEM060mfUvLKKvJeo22BAtbX9UH0kYKQuoHNs0W0DFdhvr66YGA\njF6aWWUVHhvYJpYoC836+laNxfyApmtD+tGbgw4GA54OaCo4apQORtXnW3S2jFnfBgYGnJ6ObCVd\nqeSjX2UZj20spAINNuD54AQDGyaJ9nObGUjzfePmkuljc1qg3pH8ZdpJ+ZIliA7IOJtUYNOogI4z\nM2QQ6QwEZPTSLIiiMdWJCg5aXT0T1Rl4+nqIdJ2L8mMLBwKeQ0qvk3g6li24cFR9CTz/u3Rdz0Xf\nksmk09P0+/k+b8bYqfiBLRVo8AheQM9gYMME0QFGBwJpLXiO2OGgi0k8506/3nAdwSCkg6QCkyaJ\nDsi4l1Rg06iAjgMZMqjuCwRk9NIsiKIx1YkKDnry5AAir+PpawvpOhcVQDgcCHg3Kb1O4OlYlO73\nZKkviTegput6LvqWSCScnqbf79Spvaar+TLWV7xsB1N4qsSfr/f9UNKDIUatsYV9ceo03W8m2xqb\nXz7sY9caqNNfW+sIlUn5+3hrbKvSfNJqa7s06J8XXmMrB3+xyQo2FTnhZAuCmlpjawnpT1gHU/5p\n6foa9G/NHly4qmpGxPWtCjGtqWnKy6/TfN6iyUe/zNzfYVaRRr6YuX9pMKvIysD82AyjBNjAZhjF\nIx/9ymmNTUSuFJEXRGS7iPxVfuKVL4XcEHcqUuntHy+mXyNT6f2r0tufD6MObCJSBXwLeD9wPvAx\nETm32IJNJSq941V6+8eD6dfoVHr/qvT250Mub2w9wEuquktVjwNrgOuKK5ZhVAymX4ZRYHIZ2Drx\nbFh99ro0wzDGj+mXYRSYUY1HROTDwPtV9c/c55uAHlVdFSpnK9tGxZKv8Yjpl2GMzlj1K5dNkPfh\nueX7zHZp47qxYRiA6ZdhFJxcpiI3At0iMldEYsBHgR8XVyzDqBhMvwyjwIz6xqaqJ0VkJbAObyC8\nS1W3FV0yw6gATL8Mo/AUzEHbMAzDMCYDeW2CLCJJEdkiIptFpM+lTReRdSLyooj8XERaCitq6RCR\nu0SkX0SeCaRlba+I3CEiL4nI0yJyUWmkLhxZ2n+biOwVkU3uuDKQ99eu/dtEZEVppC4MIjJbRB4V\nkedF5FkRWeXSJ+T7r2Tn7WzPvpIQkSqnXxU3PS0iLSLy7+7/yPMi8q5cr813d/9TwDJVXaSqPS7t\nC8AjqnoO8Cjw13nWPRn5Hp4DbZDI9orIVcBZqno28OfA/5pIQYtEVPsB/lFVF7vjPwBE5DzgI8B5\nwFXAt0VkKhs+nAA+q6rnA5cCn3EO1EX//s15O+uzryRuBbaWWogS8U3gp6p6HnAhkPMUfb4Dm0Rc\nex3wfXf+feD6POuedKjq48CBUHK4vdcF0u9x1z0JtIhIx0TIWSyytB+8fhDmOmCNqp5Q1STwEp4T\n8pREVV9T1afd+WE85ZrNxHz/Fe28neXZV4yPn4jMBq4G7iy1LBONiDQDv6eq3wNw/0/ezPX6fAc2\nBX4uIhtF5E9cWoeq9jshXsOL2lfOzAy11//nFXa43Uf5KuNn3HTbnYGpuLJtv4gkgIuAJ8js78X4\n/s152xF49k+WVpIJ5RvA5/H+31Ya84D9IvI9NxX7/4pIPNeL8x3YlqrqO/F+TXxGRH6PzIdfaV9G\npbX323hTbhcBrwFfL7E8RUVEGoH7gVvd20Ol9/cJI+LZlz0icg3Q795YhejZkXKmBlgM/IuqLgaO\n4k3/50ReA5uqvur+DgIP4k2Z9PtTLiIyCy8cbTmTrb378OLD+0Q63E51VHUwEEflO6SmG8uu/SJS\ng/eP9Qeq+pBLnojvPyfn7XImy7OvBJYC14rIb4EfAstF5J4SyzSR7AX2qOpT7vP9eANdTox5YBOR\nae4XFCLSAKwAnsVzKr3FFfskUG6dMPyrKdjeW0i198fAzQAicglw0J+ymuKktd/9M/f5EPCcO/8x\n8HaHMD0AAA/+SURBVFERiYnIPKAb6JswKYvDd4GtqvrNQNpEfP/mvB397MseVf2iqs5R1TPxvvdH\nVfXmUss1UTid2SMi813S+xiLEc1YQ27jzX0+DWzGG9C+4NLbgEeAF/GcTVvHWvdkPYDVwCvA28Bu\n4FPA9GztxbNk2wFsARaXWv4itf8e4BnXFx7EW3Pyy/+1a/82YEWp5R9n25cCJwN9fhNw5Uj9vZDf\nv7vXi3hGOF8o9fOYDM++1HKV4DlcBvy41HKUoN0X4v24exp4AGjJ9Vpz0DYMwzDKinyNRwzDMAxj\nUmIDm2EYhlFW2MBmGIZhlBU2sBmGYRhlhQ1shmEYRllhA5thGIZRVtjAZhiGYZQVNrAZhmEYZYUN\nbIZhGEZZYQNbEXF7/J1yASMnJSLyUxH5RJa8SS+/YRhGmJpSC1AuiEgSLwbdCbzNghUv8vGk3rNM\nVa8erciECGIYhlEg7Jd44VDgGlVtVtUmVW3G2zjYMAzDmEBsYCssIwYDFJFbRGSriLwpIjtE5M8C\neVtF5OrA52oRGRCRi9zn+0TkVRE5ICLrReQdgbLfE5FvichPXN2/diFj/Px3i0ifu/ZJEbk0kNcr\nIp9251Ui8jURGRSRHcA1EfK/7O7xsoh8bBzPyjAMoyjYwDax9ANXu7e5TwHf8AcuvGCCHw+UvRIY\nVC+CLsBPgbPwpjs3AfeG6v4j4DagFXgZ+DKAiEwHfgL8EzADL9z8/3bpYf4MLyr6hcA7gT/wM0Rk\nGvBN4P1O/nfjhZMwDMOYVNjAVlgeFJE33PFAOFNVf6aqSXf+S7w4Xr/nslfjRcytd58/hjfY+dfe\nrapHVfU48DfAhSLSFKh+rar+RlVP4Q16/oB5DbBdVVer6ilVXQO8AHwwQv4/BP5JVV9R1YPA34fy\nTwILRKReVftVdVuOz8UwDGPCsIGtsFynqm3u+FA4U0SuctOEr4vIAeAq4DQAVX0ZL0LsB0UkDlyL\nN9j5U4RfcdOXB4GdeGt6pwWqfy1wfhRodOdnALtCouwCOiPkPwPYEyqHk+8o3lvhXwCvisjDInLO\nSA/DMAyjFNjAVliyrrGJSAy4H/gHoF1VpwM/C12zBm868jrgeVX9rUv/ON4b1ntVtRVIuOtGXNNz\nvOLKB5kD7Iso+yrQFfg8N5ipqr9Q1RXALLyozt/J4f6GYRgTig1sxccffGLu2K+qp0TkKmBFqOwa\nl/YXuLc1RxPwNnBARBrwpghzNcP/KXC2iHzUGaT8EXAe8HBE2fuAVSLS6dbg/mq4ESIzReRat9Z2\nHDiMNzVpGIYxqbCBrXBkG2gUQFUPA6uAfxeRN4CPAg+lFVR9Dfg1cAnwb4Gse4DdeG9ZzwG/ylko\n1TeADwCfA/a7v9eo6oEIub8D/BzYAjwF/CiQVwV81smwH/h9vAHYMAxjUiGquf3wd7tPPAXsVdVr\niyqVYRiGYeTJWN7YbsUzbjAMwzCMSUtOA5uIzMbzb7qzuOIYhmEYxvjI9Y3tG8DnsX0DDcMwjEnO\nqJsgi8g1QL+qPi0iy8hiYi4iNugZFYuq5uJ6YRjGBJDLG9tSvB0xfou3E8ZyEbknqqCqVuRx2223\nlVwGa3/pDsMwJhejDmyq+kVVnaOqZ+KZqD+qqjcXXzTDMAzDGDvmx2YYhmGUFWMKNKqqjwGPFUmW\nKcuyZctKLUJJqfT2G4YxucjZQXvUikTU1huMSkREUDMeMYxJw5je2MbL4OAgyWSSRCJBe3s7AD/5\nyU+44447OHz4MLNnz+bw4cMcPXqU5uZm3nzzTVpbWznnnHN46623aG5u5sSJE9xwww20tbXxyCOP\n0NHRwfLly2lvb2dwcJDNmzcDsGjRIvbv388jjzxCPB6nubmZ1tZWurq62LNnz3AZX45s8hmGYRhT\njAJahulIrF69RuPxNm1pWazxeJuuXr1GL7hgkUKdwjSFs9x5XOF09zeYdlYor3b4upqaJl25cpXW\n1ja5tG6tqmpQiLnydcPp3rWzFKZpbW2jrl69Jqt8hpELru+X3DrTDjvs8I4JmYocHBxk7txzOXas\nF1gIPEMs9h6Gho4DcWA9cDpwDvAA8GGgN5C2fvg6WA7chRfJ5YlA+iV4m+f/ZyDtMuAUUB1Rx4+A\nD1Ffr2za9Csuvvg9afLF48vZtesFe3MzRsWmIg1jcjEhVpHJZJJYLIE3aAAsRLUJaAfmufSkO2/A\nCwkWTEtdB7OBQ4EyfnonXpiwYNocoDVLHQ1AgurqmfT19WXIV1s7l2QyWYjmG4ZhGBPIhAxsiUSC\noaEk3tsSwDOIvAUM4gWDfgYvFuZO4AheEOdgWuo62Au0BMr46fvwgkgH03YDB7PUcQRIcvLkAD09\nPRnyHT++i0QiUYjmG4ZhGBPIhAxs7e3t3HXXt4nHl9PcvJh4fDl33/0dFiw4F2+AudQdR/D2Wq7F\nm1q8FDjqzrtJTTd+HC/W5aVANzU172Hlyj+ltvbkcFpV1bvdtdMC9/DrqAWuobb2ON/97v/ivPPO\ny5Dvrru+bdOQhmEYU5AJNfc3q0ijHLE1NsOYXIw6sLmQNfcAHXiWGN9R1Tsiyo06sBlGOWIDm2FM\nLnIZ2GYBs9Tb3b8R+A1wnaq+ECpnA5tRkdjAZhiTi1EdtFX1NTyrDFT1sIhswzNBfGHEC/Mgn6nA\n4PRjV1cXhw8fTrt+27ZtadORPlHTkk8++SR33303DQ0NtLW10d7ezsKFC9m5cyfxeByAgYEBuru7\n6ezsZMeOHXR3d9PX18eGDRu48MILOf300xkYGCAej3Ps2DHi8Tgvvvgi+/btY/ny5XziE58Yls2X\n/eDBg2lyDQ0NsXHjRlasWMHSpUvTngswfE24DX77Gxsbs063jpWpMj07VeQ0DGMCGIvTG56ZYhJo\njMjT8ZCPg/Tq1Ws0FmtxjtfTFGIaj88bvn7lyludQ/bZ7m91wBk87hy4/WvrnNN3lMP4WcP1Q0fg\n2rMVagKO43Wh+qeHnMs7VGSarl69JkL22tA1nswLFlw0/FxisRatqopHyJhyPq+t9eXrznBCn4jv\npBSUWk7MQdsOOybVkXtBaASewpuGjMrXfBkYGNB4vE1hi4IqbNF4vE0HBgbGdI03KLQq9GpdXbP7\nBx/Mjyv0hsoPuPMWhXqXtsWlT89av3f+cKDO1lD53oj7tyn0am1tk9bXh+uOK9ydRea1oc8tWWSM\nuud0ra9vHfFZFuo7KQWTQU4b2OywY3IdOe0VKSI1wP3AD1T1oWzlbr/99uHzZcuW5bzru+/AfexY\npoN0tmmlZDJJVVXYSTuBZ9rfgEgrUEemE3dDqHwSWIJnG3MMz2l8IbCRTMfuVP3e+YOknL073F+/\nfNDR3L9+LtDAqVNt1NQ0kek0/tuIazrxHNKDn6uzyNjg6kmXubr6yP/f3v2GVlXHcRx/f+ecW8Qm\nI/PPhttEVhBiGrhAejAUG4X6SNwIwvBBEGH0IBIf9dQn/YHyQbSMMiwSRj2KPXDrYdncRJq2IBJ1\nZgyhWelc49uDc047u7vXze3unrN7Pi84zHvPub/7vbvDL+d8v7/zu+/vMp+FfCdJSCLO/v5++vv7\nl2RsESmC+WQ/gq7It+c4xhdKZ2w6Y1uoNMSJzti0aUvVNvcBsBOYAoaAQeA80JHnOF+MqE5SW7tt\nATW26RpYdXVzrMZ2xKfrTYVqbNFrV3lQL6uKHVPl96+xbQ7HjG6sXJUz/urw5yYvXGOLxo5qbHUz\nYo5qbLW128IaW7XPrLHNfM/KyjUzYi5Gje1BvpMkJB2nEps2benaUrUem7oi1RW5UEnGqXZ/kXRJ\nVWITWY6U2ETSpST3ihQRESmVkq6gnSX5Lo3FLz2Oj49z584ddu/eza1bt+jt7WXPnj3U19fT09PD\n0NAQY2NjHDhwgLt37zIwMEBnZydtbW309fVx4cIFAG7fvs3g4CDNzc3s3buX9vZ2xsbGOHXqFKOj\no7S0tDAxMQHA1q1baW9vZ2Rk5P/3a21tnXFpM36PzbVr17Jly5ZZl3cLfT4RkTTQpcglcPr0lxw+\n/ApVVcFyPd3dJwA4dOhl7t1bQ7DkTgWwARgl6M1pIlhmB4J2/vXhvkmC1QgaCZbbuUewwsGG8HFF\nzj6LjREfZzXBEj7R66PXTIaP1wM3MJvCPT7mFDU1jwHX6e4+QVfXwbyfr6vrYHF/icuILkWKpEyx\nulBYZFdkucjXfl5dvTrW3l9oGsGwQ23Ovp4C7f99BcapCbsrq/Psq49NB+grMD0g9/HMKRE1NfU+\nPDyceHt92qCuSG3aUrWpxlZk+VYLX7HiUcwaKLwqeDPwA8Ek7/i+P5k94TqaZJ5vnAaCSelr8uxr\nCl8Xn6SeO6E736Ty5vC9gonPWm1cRNJOia3I8q0WPjX1B+7XKbwq+G/ADuBmzr46gsuBuSuF/11g\nnOvABDNXJo/2XQlfF72e8Gd8/Pjq5fHYmolWFddq4yKSdqqxLYGoBrVyZROTk1dyamyPMF0bi9fY\nNjJdY6tkujYW1cQaCJJSvMZ2laCOFt8XWZEzTh3BGWC+8VYxu8YW7Z+ipqYVGJ1VY4t/PtXYVGMT\nSQsltiWirsjsUGITSZd5JTYz6wDeJTjN6Hb343mOUWKTTFJiE0mXOWtsZlYBvA88CzwBdJnZ40sd\n2HKS9Tu9Z/3zi0i6zKd5ZAfwi7tfcfdJ4Atg/9KGtbxk/T/2rH9+EUmX+SS2BoIuhci18DkREZHU\nUbu/iIiUlTmbR8zsaeAtd+8IHx8luNPC8Zzj1DkimaXmEZH0mE9iWwH8DOwCbhDcIqPL3S8tfXgi\nIiIPZs67+7v7lJm9CvQy3e6vpCYiIqlUtAnaIiIiabDo5hEz6zCzy2Y2YmZvFiOo5cLMGs3srJn9\nZGYXzexI0jGVmplVmNl5M/sm6VhKzczqzOwrM7sU/g20JR2TiCzyjC2cvD1CUH8bBc4Bne5+uTjh\npZuZrQPWufuQmT0MDAD7s/L5AczsdeApoNbd9yUdTymZ2SfAd+5+0swqgYfcfTzhsEQyb7FnbJme\nvO3uv7v7UPjvv4BLZGiOn5k1As8BHyUdS6mZWS3wjLufBHD3f5XURNJhsYlNk7dDZtYMPAl8n2wk\nJfUO8AaQxUJtCzBmZifDS7EfmllN0kGJiCZoF0V4GfIM8Fp45lb2zOx54GZ4xmrhliWVwHbgA3ff\nDvwDHE02JBGBxSe26wQLiUUaw+cyI6ytnAE+c/evk46nhHYC+8zsV+A00G5mnyYcUyldA666+4/h\n4zMEiU5EErbYxHYO2GxmTWZWBXQCWeuO+xgYdvf3kg6klNz9mLtvdPdNBN/7WXd/Mem4SsXdbwJX\nzaw1fGoXMJxgSCISmnOC9v1kffK2me0EXgAumtkgQa3pmLt/m2xkUiJHgM/NbCXwK/BSwvGICJqg\nLSIiZUbNIyIiUlaU2EREpKwosYmISFlRYhMRkbKixCYiImVFiU1ERMqKEpuIiJQVJTYRESkr/wHF\nmoOHC55syAAAAABJRU5ErkJggg==\n",
+ "text/plain": [
+ ""
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ }
+ ],
+ "source": [
+ "%matplotlib inline \n",
+ "import matplotlib.pyplot as plt\n",
+ "\n",
+ "import pandas as pd\n",
+ "from sklearn.utils import shuffle\n",
+ "import tensorflow as tf\n",
+ "\n",
+ "from sklearn import preprocessing\n",
+ "\n",
+ "df = pd.read_csv(\"./wine.csv\", header=0)\n",
+ "print (df.describe())\n",
+ "\n",
+ "for i in range (1,8):\n",
+ " number = 420 + i\n",
+ " ax1 = plt.subplot(number)\n",
+ " ax1.locator_params(nbins=3)\n",
+ " plt.title(list(df)[i])\n",
+ " ax1.scatter(df[df.columns[i]],df['Wine']) #Plot a scatter draw of the datapoints\n",
+ "plt.tight_layout(pad=0.4, w_pad=0.5, h_pad=1.0)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 11,
+ "metadata": {
+ "collapsed": false,
+ "scrolled": true
+ },
+ "outputs": [
+ {
+ "name": "stderr",
+ "output_type": "stream",
+ "text": [
+ "Exception AssertionError: AssertionError() in > ignored\n"
+ ]
+ },
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "0.973684\n",
+ "0.921053\n",
+ "0.921053\n",
+ "0.947368\n",
+ "0.921053\n",
+ "0.921053\n",
+ "0.921053\n",
+ "0.894737\n",
+ "0.947368\n",
+ "0.921053\n",
+ "0.894737\n",
+ "0.921053\n",
+ "0.947368\n",
+ "0.973684\n",
+ "0.947368\n",
+ "0.921053\n",
+ "0.973684\n",
+ "0.921053\n",
+ "0.947368\n",
+ "0.921053\n",
+ "1.0\n",
+ "0.947368\n",
+ "0.973684\n",
+ "0.921053\n",
+ "0.921053\n",
+ "0.973684\n",
+ "0.973684\n",
+ "0.947368\n",
+ "0.894737\n",
+ "0.921053\n",
+ "0.921053\n",
+ "0.947368\n",
+ "0.947368\n",
+ "0.973684\n",
+ "1.0\n",
+ "0.973684\n",
+ "0.973684\n",
+ "0.921053\n",
+ "0.973684\n",
+ "0.921053\n",
+ "0.973684\n",
+ "1.0\n",
+ "1.0\n",
+ "1.0\n",
+ "0.973684\n",
+ "0.973684\n",
+ "0.973684\n",
+ "0.947368\n",
+ "1.0\n",
+ "0.973684\n",
+ "0.947368\n",
+ "0.947368\n",
+ "1.0\n",
+ "0.973684\n",
+ "1.0\n",
+ "0.894737\n",
+ "0.921053\n",
+ "0.947368\n",
+ "0.947368\n",
+ "0.947368\n",
+ "0.973684\n",
+ "1.0\n",
+ "0.973684\n",
+ "0.947368\n",
+ "0.947368\n",
+ "0.921053\n",
+ "0.973684\n",
+ "0.921053\n",
+ "1.0\n",
+ "1.0\n",
+ "0.973684\n",
+ "0.947368\n",
+ "0.973684\n",
+ "0.973684\n",
+ "1.0\n",
+ "0.973684\n",
+ "0.973684\n",
+ "0.973684\n",
+ "1.0\n",
+ "0.947368\n",
+ "1.0\n",
+ "0.973684\n",
+ "1.0\n",
+ "0.973684\n",
+ "0.947368\n",
+ "0.947368\n",
+ "0.973684\n",
+ "0.921053\n",
+ "0.973684\n",
+ "0.973684\n",
+ "0.973684\n",
+ "0.973684\n",
+ "1.0\n",
+ "0.947368\n",
+ "1.0\n",
+ "0.921053\n",
+ "0.973684\n",
+ "0.973684\n",
+ "0.973684\n",
+ "1.0\n"
+ ]
+ }
+ ],
+ "source": [
+ "\n",
+ "#mnist = input_data.read_data_sets(\".\", one_hot=True)\n",
+ "\n",
+ "sess = tf.InteractiveSession()\n",
+ "\n",
+ "X = df[df.columns[1:13]].values\n",
+ "\n",
+ "\n",
+ "\n",
+ "y = df['Wine'].values-1\n",
+ "Y = tf.one_hot(indices = y, depth=3, on_value = 1., off_value = 0., axis = 1 , name = \"a\").eval()\n",
+ "X, Y = shuffle (X, Y)\n",
+ "\n",
+ "scaler = preprocessing.StandardScaler()\n",
+ "X = scaler.fit_transform(X)\n",
+ "\n",
+ "# Create the model\n",
+ " x = tf.placeholder(tf.float32, [None, 12])\n",
+ " W = tf.Variable(tf.zeros([12, 3]))\n",
+ " b = tf.Variable(tf.zeros([3]))\n",
+ " y = tf.nn.softmax(tf.matmul(x, W) + b)\n",
+ "\n",
+ "# Define loss and optimizer\n",
+ "y_ = tf.placeholder(tf.float32, [None, 3])\n",
+ "cross_entropy = tf.reduce_mean(-tf.reduce_sum(y_ * tf.log(y), reduction_indices=[1]))\n",
+ "train_step = tf.train.GradientDescentOptimizer(0.1).minimize(cross_entropy)\n",
+ "\n",
+ "\n",
+ "# Train\n",
+ "tf.initialize_all_variables().run()\n",
+ "for i in range(100):\n",
+ " X,Y =shuffle (X, Y, random_state=1)\n",
+ "\n",
+ " Xtr=X[0:140,:]\n",
+ " Ytr=Y[0:140,:]\n",
+ "\n",
+ " Xt=X[140:178,:]\n",
+ " Yt=Y[140:178,:]\n",
+ " Xtr, Ytr = shuffle (Xtr, Ytr, random_state=0)\n",
+ " #batch_xs, batch_ys = mnist.train.next_batch(100)\n",
+ " batch_xs, batch_ys = Xtr , Ytr\n",
+ " train_step.run({x: batch_xs, y_: batch_ys})\n",
+ " cost = sess.run (cross_entropy, feed_dict={x: batch_xs, y_: batch_ys})\n",
+ " # Test trained model\n",
+ " correct_prediction = tf.equal(tf.argmax(y, 1), tf.argmax(y_, 1))\n",
+ " accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))\n",
+ " print(accuracy.eval({x: Xt, y_: Yt}))\n",
+ " \n"
+ ]
+ }
+ ],
+ "metadata": {
+ "kernelspec": {
+ "display_name": "Python 2",
+ "language": "python",
+ "name": "python2"
+ },
+ "language_info": {
+ "codemirror_mode": {
+ "name": "ipython",
+ "version": 2
+ },
+ "file_extension": ".py",
+ "mimetype": "text/x-python",
+ "name": "python",
+ "nbconvert_exporter": "python",
+ "pygments_lexer": "ipython2",
+ "version": "2.7.11+"
+ }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 0
+}
diff --git a/Building-Machine-Learning-Projects-with-TensorFlow-master/5/data/mpg.csv b/Building-Machine-Learning-Projects-with-TensorFlow-master/5/data/mpg.csv
new file mode 100644
index 000000000..6e6b23cc9
--- /dev/null
+++ b/Building-Machine-Learning-Projects-with-TensorFlow-master/5/data/mpg.csv
@@ -0,0 +1,399 @@
+mpg,cylinders,displacement,horsepower,weight,acceleration,model_year,origin,name
+18,8,307,130,3504,12,70,1,chevrolet chevelle malibu
+15,8,350,165,3693,11.5,70,1,buick skylark 320
+18,8,318,150,3436,11,70,1,plymouth satellite
+16,8,304,150,3433,12,70,1,amc rebel sst
+17,8,302,140,3449,10.5,70,1,ford torino
+15,8,429,198,4341,10,70,1,ford galaxie 500
+14,8,454,220,4354,9,70,1,chevrolet impala
+14,8,440,215,4312,8.5,70,1,plymouth fury iii
+14,8,455,225,4425,10,70,1,pontiac catalina
+15,8,390,190,3850,8.5,70,1,amc ambassador dpl
+15,8,383,170,3563,10,70,1,dodge challenger se
+14,8,340,160,3609,8,70,1,plymouth 'cuda 340
+15,8,400,150,3761,9.5,70,1,chevrolet monte carlo
+14,8,455,225,3086,10,70,1,buick estate wagon (sw)
+24,4,113,95,2372,15,70,3,toyota corona mark ii
+22,6,198,95,2833,15.5,70,1,plymouth duster
+18,6,199,97,2774,15.5,70,1,amc hornet
+21,6,200,85,2587,16,70,1,ford maverick
+27,4,97,88,2130,14.5,70,3,datsun pl510
+26,4,97,46,1835,20.5,70,2,volkswagen 1131 deluxe sedan
+25,4,110,87,2672,17.5,70,2,peugeot 504
+24,4,107,90,2430,14.5,70,2,audi 100 ls
+25,4,104,95,2375,17.5,70,2,saab 99e
+26,4,121,113,2234,12.5,70,2,bmw 2002
+21,6,199,90,2648,15,70,1,amc gremlin
+10,8,360,215,4615,14,70,1,ford f250
+10,8,307,200,4376,15,70,1,chevy c20
+11,8,318,210,4382,13.5,70,1,dodge d200
+9,8,304,193,4732,18.5,70,1,hi 1200d
+27,4,97,88,2130,14.5,71,3,datsun pl510
+28,4,140,90,2264,15.5,71,1,chevrolet vega 2300
+25,4,113,95,2228,14,71,3,toyota corona
+25,4,98,0,2046,19,71,1,ford pinto
+19,6,232,100,2634,13,71,1,amc gremlin
+16,6,225,105,3439,15.5,71,1,plymouth satellite custom
+17,6,250,100,3329,15.5,71,1,chevrolet chevelle malibu
+19,6,250,88,3302,15.5,71,1,ford torino 500
+18,6,232,100,3288,15.5,71,1,amc matador
+14,8,350,165,4209,12,71,1,chevrolet impala
+14,8,400,175,4464,11.5,71,1,pontiac catalina brougham
+14,8,351,153,4154,13.5,71,1,ford galaxie 500
+14,8,318,150,4096,13,71,1,plymouth fury iii
+12,8,383,180,4955,11.5,71,1,dodge monaco (sw)
+13,8,400,170,4746,12,71,1,ford country squire (sw)
+13,8,400,175,5140,12,71,1,pontiac safari (sw)
+18,6,258,110,2962,13.5,71,1,amc hornet sportabout (sw)
+22,4,140,72,2408,19,71,1,chevrolet vega (sw)
+19,6,250,100,3282,15,71,1,pontiac firebird
+18,6,250,88,3139,14.5,71,1,ford mustang
+23,4,122,86,2220,14,71,1,mercury capri 2000
+28,4,116,90,2123,14,71,2,opel 1900
+30,4,79,70,2074,19.5,71,2,peugeot 304
+30,4,88,76,2065,14.5,71,2,fiat 124b
+31,4,71,65,1773,19,71,3,toyota corolla 1200
+35,4,72,69,1613,18,71,3,datsun 1200
+27,4,97,60,1834,19,71,2,volkswagen model 111
+26,4,91,70,1955,20.5,71,1,plymouth cricket
+24,4,113,95,2278,15.5,72,3,toyota corona hardtop
+25,4,97.5,80,2126,17,72,1,dodge colt hardtop
+23,4,97,54,2254,23.5,72,2,volkswagen type 3
+20,4,140,90,2408,19.5,72,1,chevrolet vega
+21,4,122,86,2226,16.5,72,1,ford pinto runabout
+13,8,350,165,4274,12,72,1,chevrolet impala
+14,8,400,175,4385,12,72,1,pontiac catalina
+15,8,318,150,4135,13.5,72,1,plymouth fury iii
+14,8,351,153,4129,13,72,1,ford galaxie 500
+17,8,304,150,3672,11.5,72,1,amc ambassador sst
+11,8,429,208,4633,11,72,1,mercury marquis
+13,8,350,155,4502,13.5,72,1,buick lesabre custom
+12,8,350,160,4456,13.5,72,1,oldsmobile delta 88 royale
+13,8,400,190,4422,12.5,72,1,chrysler newport royal
+19,3,70,97,2330,13.5,72,3,mazda rx2 coupe
+15,8,304,150,3892,12.5,72,1,amc matador (sw)
+13,8,307,130,4098,14,72,1,chevrolet chevelle concours (sw)
+13,8,302,140,4294,16,72,1,ford gran torino (sw)
+14,8,318,150,4077,14,72,1,plymouth satellite custom (sw)
+18,4,121,112,2933,14.5,72,2,volvo 145e (sw)
+22,4,121,76,2511,18,72,2,volkswagen 411 (sw)
+21,4,120,87,2979,19.5,72,2,peugeot 504 (sw)
+26,4,96,69,2189,18,72,2,renault 12 (sw)
+22,4,122,86,2395,16,72,1,ford pinto (sw)
+28,4,97,92,2288,17,72,3,datsun 510 (sw)
+23,4,120,97,2506,14.5,72,3,toyouta corona mark ii (sw)
+28,4,98,80,2164,15,72,1,dodge colt (sw)
+27,4,97,88,2100,16.5,72,3,toyota corolla 1600 (sw)
+13,8,350,175,4100,13,73,1,buick century 350
+14,8,304,150,3672,11.5,73,1,amc matador
+13,8,350,145,3988,13,73,1,chevrolet malibu
+14,8,302,137,4042,14.5,73,1,ford gran torino
+15,8,318,150,3777,12.5,73,1,dodge coronet custom
+12,8,429,198,4952,11.5,73,1,mercury marquis brougham
+13,8,400,150,4464,12,73,1,chevrolet caprice classic
+13,8,351,158,4363,13,73,1,ford ltd
+14,8,318,150,4237,14.5,73,1,plymouth fury gran sedan
+13,8,440,215,4735,11,73,1,chrysler new yorker brougham
+12,8,455,225,4951,11,73,1,buick electra 225 custom
+13,8,360,175,3821,11,73,1,amc ambassador brougham
+18,6,225,105,3121,16.5,73,1,plymouth valiant
+16,6,250,100,3278,18,73,1,chevrolet nova custom
+18,6,232,100,2945,16,73,1,amc hornet
+18,6,250,88,3021,16.5,73,1,ford maverick
+23,6,198,95,2904,16,73,1,plymouth duster
+26,4,97,46,1950,21,73,2,volkswagen super beetle
+11,8,400,150,4997,14,73,1,chevrolet impala
+12,8,400,167,4906,12.5,73,1,ford country
+13,8,360,170,4654,13,73,1,plymouth custom suburb
+12,8,350,180,4499,12.5,73,1,oldsmobile vista cruiser
+18,6,232,100,2789,15,73,1,amc gremlin
+20,4,97,88,2279,19,73,3,toyota carina
+21,4,140,72,2401,19.5,73,1,chevrolet vega
+22,4,108,94,2379,16.5,73,3,datsun 610
+18,3,70,90,2124,13.5,73,3,maxda rx3
+19,4,122,85,2310,18.5,73,1,ford pinto
+21,6,155,107,2472,14,73,1,mercury capri v6
+26,4,98,90,2265,15.5,73,2,fiat 124 sport coupe
+15,8,350,145,4082,13,73,1,chevrolet monte carlo s
+16,8,400,230,4278,9.5,73,1,pontiac grand prix
+29,4,68,49,1867,19.5,73,2,fiat 128
+24,4,116,75,2158,15.5,73,2,opel manta
+20,4,114,91,2582,14,73,2,audi 100ls
+19,4,121,112,2868,15.5,73,2,volvo 144ea
+15,8,318,150,3399,11,73,1,dodge dart custom
+24,4,121,110,2660,14,73,2,saab 99le
+20,6,156,122,2807,13.5,73,3,toyota mark ii
+11,8,350,180,3664,11,73,1,oldsmobile omega
+20,6,198,95,3102,16.5,74,1,plymouth duster
+21,6,200,0,2875,17,74,1,ford maverick
+19,6,232,100,2901,16,74,1,amc hornet
+15,6,250,100,3336,17,74,1,chevrolet nova
+31,4,79,67,1950,19,74,3,datsun b210
+26,4,122,80,2451,16.5,74,1,ford pinto
+32,4,71,65,1836,21,74,3,toyota corolla 1200
+25,4,140,75,2542,17,74,1,chevrolet vega
+16,6,250,100,3781,17,74,1,chevrolet chevelle malibu classic
+16,6,258,110,3632,18,74,1,amc matador
+18,6,225,105,3613,16.5,74,1,plymouth satellite sebring
+16,8,302,140,4141,14,74,1,ford gran torino
+13,8,350,150,4699,14.5,74,1,buick century luxus (sw)
+14,8,318,150,4457,13.5,74,1,dodge coronet custom (sw)
+14,8,302,140,4638,16,74,1,ford gran torino (sw)
+14,8,304,150,4257,15.5,74,1,amc matador (sw)
+29,4,98,83,2219,16.5,74,2,audi fox
+26,4,79,67,1963,15.5,74,2,volkswagen dasher
+26,4,97,78,2300,14.5,74,2,opel manta
+31,4,76,52,1649,16.5,74,3,toyota corona
+32,4,83,61,2003,19,74,3,datsun 710
+28,4,90,75,2125,14.5,74,1,dodge colt
+24,4,90,75,2108,15.5,74,2,fiat 128
+26,4,116,75,2246,14,74,2,fiat 124 tc
+24,4,120,97,2489,15,74,3,honda civic
+26,4,108,93,2391,15.5,74,3,subaru
+31,4,79,67,2000,16,74,2,fiat x1.9
+19,6,225,95,3264,16,75,1,plymouth valiant custom
+18,6,250,105,3459,16,75,1,chevrolet nova
+15,6,250,72,3432,21,75,1,mercury monarch
+15,6,250,72,3158,19.5,75,1,ford maverick
+16,8,400,170,4668,11.5,75,1,pontiac catalina
+15,8,350,145,4440,14,75,1,chevrolet bel air
+16,8,318,150,4498,14.5,75,1,plymouth grand fury
+14,8,351,148,4657,13.5,75,1,ford ltd
+17,6,231,110,3907,21,75,1,buick century
+16,6,250,105,3897,18.5,75,1,chevroelt chevelle malibu
+15,6,258,110,3730,19,75,1,amc matador
+18,6,225,95,3785,19,75,1,plymouth fury
+21,6,231,110,3039,15,75,1,buick skyhawk
+20,8,262,110,3221,13.5,75,1,chevrolet monza 2+2
+13,8,302,129,3169,12,75,1,ford mustang ii
+29,4,97,75,2171,16,75,3,toyota corolla
+23,4,140,83,2639,17,75,1,ford pinto
+20,6,232,100,2914,16,75,1,amc gremlin
+23,4,140,78,2592,18.5,75,1,pontiac astro
+24,4,134,96,2702,13.5,75,3,toyota corona
+25,4,90,71,2223,16.5,75,2,volkswagen dasher
+24,4,119,97,2545,17,75,3,datsun 710
+18,6,171,97,2984,14.5,75,1,ford pinto
+29,4,90,70,1937,14,75,2,volkswagen rabbit
+19,6,232,90,3211,17,75,1,amc pacer
+23,4,115,95,2694,15,75,2,audi 100ls
+23,4,120,88,2957,17,75,2,peugeot 504
+22,4,121,98,2945,14.5,75,2,volvo 244dl
+25,4,121,115,2671,13.5,75,2,saab 99le
+33,4,91,53,1795,17.5,75,3,honda civic cvcc
+28,4,107,86,2464,15.5,76,2,fiat 131
+25,4,116,81,2220,16.9,76,2,opel 1900
+25,4,140,92,2572,14.9,76,1,capri ii
+26,4,98,79,2255,17.7,76,1,dodge colt
+27,4,101,83,2202,15.3,76,2,renault 12tl
+17.5,8,305,140,4215,13,76,1,chevrolet chevelle malibu classic
+16,8,318,150,4190,13,76,1,dodge coronet brougham
+15.5,8,304,120,3962,13.9,76,1,amc matador
+14.5,8,351,152,4215,12.8,76,1,ford gran torino
+22,6,225,100,3233,15.4,76,1,plymouth valiant
+22,6,250,105,3353,14.5,76,1,chevrolet nova
+24,6,200,81,3012,17.6,76,1,ford maverick
+22.5,6,232,90,3085,17.6,76,1,amc hornet
+29,4,85,52,2035,22.2,76,1,chevrolet chevette
+24.5,4,98,60,2164,22.1,76,1,chevrolet woody
+29,4,90,70,1937,14.2,76,2,vw rabbit
+33,4,91,53,1795,17.4,76,3,honda civic
+20,6,225,100,3651,17.7,76,1,dodge aspen se
+18,6,250,78,3574,21,76,1,ford granada ghia
+18.5,6,250,110,3645,16.2,76,1,pontiac ventura sj
+17.5,6,258,95,3193,17.8,76,1,amc pacer d/l
+29.5,4,97,71,1825,12.2,76,2,volkswagen rabbit
+32,4,85,70,1990,17,76,3,datsun b-210
+28,4,97,75,2155,16.4,76,3,toyota corolla
+26.5,4,140,72,2565,13.6,76,1,ford pinto
+20,4,130,102,3150,15.7,76,2,volvo 245
+13,8,318,150,3940,13.2,76,1,plymouth volare premier v8
+19,4,120,88,3270,21.9,76,2,peugeot 504
+19,6,156,108,2930,15.5,76,3,toyota mark ii
+16.5,6,168,120,3820,16.7,76,2,mercedes-benz 280s
+16.5,8,350,180,4380,12.1,76,1,cadillac seville
+13,8,350,145,4055,12,76,1,chevy c10
+13,8,302,130,3870,15,76,1,ford f108
+13,8,318,150,3755,14,76,1,dodge d100
+31.5,4,98,68,2045,18.5,77,3,honda accord cvcc
+30,4,111,80,2155,14.8,77,1,buick opel isuzu deluxe
+36,4,79,58,1825,18.6,77,2,renault 5 gtl
+25.5,4,122,96,2300,15.5,77,1,plymouth arrow gs
+33.5,4,85,70,1945,16.8,77,3,datsun f-10 hatchback
+17.5,8,305,145,3880,12.5,77,1,chevrolet caprice classic
+17,8,260,110,4060,19,77,1,oldsmobile cutlass supreme
+15.5,8,318,145,4140,13.7,77,1,dodge monaco brougham
+15,8,302,130,4295,14.9,77,1,mercury cougar brougham
+17.5,6,250,110,3520,16.4,77,1,chevrolet concours
+20.5,6,231,105,3425,16.9,77,1,buick skylark
+19,6,225,100,3630,17.7,77,1,plymouth volare custom
+18.5,6,250,98,3525,19,77,1,ford granada
+16,8,400,180,4220,11.1,77,1,pontiac grand prix lj
+15.5,8,350,170,4165,11.4,77,1,chevrolet monte carlo landau
+15.5,8,400,190,4325,12.2,77,1,chrysler cordoba
+16,8,351,149,4335,14.5,77,1,ford thunderbird
+29,4,97,78,1940,14.5,77,2,volkswagen rabbit custom
+24.5,4,151,88,2740,16,77,1,pontiac sunbird coupe
+26,4,97,75,2265,18.2,77,3,toyota corolla liftback
+25.5,4,140,89,2755,15.8,77,1,ford mustang ii 2+2
+30.5,4,98,63,2051,17,77,1,chevrolet chevette
+33.5,4,98,83,2075,15.9,77,1,dodge colt m/m
+30,4,97,67,1985,16.4,77,3,subaru dl
+30.5,4,97,78,2190,14.1,77,2,volkswagen dasher
+22,6,146,97,2815,14.5,77,3,datsun 810
+21.5,4,121,110,2600,12.8,77,2,bmw 320i
+21.5,3,80,110,2720,13.5,77,3,mazda rx-4
+43.1,4,90,48,1985,21.5,78,2,volkswagen rabbit custom diesel
+36.1,4,98,66,1800,14.4,78,1,ford fiesta
+32.8,4,78,52,1985,19.4,78,3,mazda glc deluxe
+39.4,4,85,70,2070,18.6,78,3,datsun b210 gx
+36.1,4,91,60,1800,16.4,78,3,honda civic cvcc
+19.9,8,260,110,3365,15.5,78,1,oldsmobile cutlass salon brougham
+19.4,8,318,140,3735,13.2,78,1,dodge diplomat
+20.2,8,302,139,3570,12.8,78,1,mercury monarch ghia
+19.2,6,231,105,3535,19.2,78,1,pontiac phoenix lj
+20.5,6,200,95,3155,18.2,78,1,chevrolet malibu
+20.2,6,200,85,2965,15.8,78,1,ford fairmont (auto)
+25.1,4,140,88,2720,15.4,78,1,ford fairmont (man)
+20.5,6,225,100,3430,17.2,78,1,plymouth volare
+19.4,6,232,90,3210,17.2,78,1,amc concord
+20.6,6,231,105,3380,15.8,78,1,buick century special
+20.8,6,200,85,3070,16.7,78,1,mercury zephyr
+18.6,6,225,110,3620,18.7,78,1,dodge aspen
+18.1,6,258,120,3410,15.1,78,1,amc concord d/l
+19.2,8,305,145,3425,13.2,78,1,chevrolet monte carlo landau
+17.7,6,231,165,3445,13.4,78,1,buick regal sport coupe (turbo)
+18.1,8,302,139,3205,11.2,78,1,ford futura
+17.5,8,318,140,4080,13.7,78,1,dodge magnum xe
+30,4,98,68,2155,16.5,78,1,chevrolet chevette
+27.5,4,134,95,2560,14.2,78,3,toyota corona
+27.2,4,119,97,2300,14.7,78,3,datsun 510
+30.9,4,105,75,2230,14.5,78,1,dodge omni
+21.1,4,134,95,2515,14.8,78,3,toyota celica gt liftback
+23.2,4,156,105,2745,16.7,78,1,plymouth sapporo
+23.8,4,151,85,2855,17.6,78,1,oldsmobile starfire sx
+23.9,4,119,97,2405,14.9,78,3,datsun 200-sx
+20.3,5,131,103,2830,15.9,78,2,audi 5000
+17,6,163,125,3140,13.6,78,2,volvo 264gl
+21.6,4,121,115,2795,15.7,78,2,saab 99gle
+16.2,6,163,133,3410,15.8,78,2,peugeot 604sl
+31.5,4,89,71,1990,14.9,78,2,volkswagen scirocco
+29.5,4,98,68,2135,16.6,78,3,honda accord lx
+21.5,6,231,115,3245,15.4,79,1,pontiac lemans v6
+19.8,6,200,85,2990,18.2,79,1,mercury zephyr 6
+22.3,4,140,88,2890,17.3,79,1,ford fairmont 4
+20.2,6,232,90,3265,18.2,79,1,amc concord dl 6
+20.6,6,225,110,3360,16.6,79,1,dodge aspen 6
+17,8,305,130,3840,15.4,79,1,chevrolet caprice classic
+17.6,8,302,129,3725,13.4,79,1,ford ltd landau
+16.5,8,351,138,3955,13.2,79,1,mercury grand marquis
+18.2,8,318,135,3830,15.2,79,1,dodge st. regis
+16.9,8,350,155,4360,14.9,79,1,buick estate wagon (sw)
+15.5,8,351,142,4054,14.3,79,1,ford country squire (sw)
+19.2,8,267,125,3605,15,79,1,chevrolet malibu classic (sw)
+18.5,8,360,150,3940,13,79,1,chrysler lebaron town @ country (sw)
+31.9,4,89,71,1925,14,79,2,vw rabbit custom
+34.1,4,86,65,1975,15.2,79,3,maxda glc deluxe
+35.7,4,98,80,1915,14.4,79,1,dodge colt hatchback custom
+27.4,4,121,80,2670,15,79,1,amc spirit dl
+25.4,5,183,77,3530,20.1,79,2,mercedes benz 300d
+23,8,350,125,3900,17.4,79,1,cadillac eldorado
+27.2,4,141,71,3190,24.8,79,2,peugeot 504
+23.9,8,260,90,3420,22.2,79,1,oldsmobile cutlass salon brougham
+34.2,4,105,70,2200,13.2,79,1,plymouth horizon
+34.5,4,105,70,2150,14.9,79,1,plymouth horizon tc3
+31.8,4,85,65,2020,19.2,79,3,datsun 210
+37.3,4,91,69,2130,14.7,79,2,fiat strada custom
+28.4,4,151,90,2670,16,79,1,buick skylark limited
+28.8,6,173,115,2595,11.3,79,1,chevrolet citation
+26.8,6,173,115,2700,12.9,79,1,oldsmobile omega brougham
+33.5,4,151,90,2556,13.2,79,1,pontiac phoenix
+41.5,4,98,76,2144,14.7,80,2,vw rabbit
+38.1,4,89,60,1968,18.8,80,3,toyota corolla tercel
+32.1,4,98,70,2120,15.5,80,1,chevrolet chevette
+37.2,4,86,65,2019,16.4,80,3,datsun 310
+28,4,151,90,2678,16.5,80,1,chevrolet citation
+26.4,4,140,88,2870,18.1,80,1,ford fairmont
+24.3,4,151,90,3003,20.1,80,1,amc concord
+19.1,6,225,90,3381,18.7,80,1,dodge aspen
+34.3,4,97,78,2188,15.8,80,2,audi 4000
+29.8,4,134,90,2711,15.5,80,3,toyota corona liftback
+31.3,4,120,75,2542,17.5,80,3,mazda 626
+37,4,119,92,2434,15,80,3,datsun 510 hatchback
+32.2,4,108,75,2265,15.2,80,3,toyota corolla
+46.6,4,86,65,2110,17.9,80,3,mazda glc
+27.9,4,156,105,2800,14.4,80,1,dodge colt
+40.8,4,85,65,2110,19.2,80,3,datsun 210
+44.3,4,90,48,2085,21.7,80,2,vw rabbit c (diesel)
+43.4,4,90,48,2335,23.7,80,2,vw dasher (diesel)
+36.4,5,121,67,2950,19.9,80,2,audi 5000s (diesel)
+30,4,146,67,3250,21.8,80,2,mercedes-benz 240d
+44.6,4,91,67,1850,13.8,80,3,honda civic 1500 gl
+40.9,4,85,0,1835,17.3,80,2,renault lecar deluxe
+33.8,4,97,67,2145,18,80,3,subaru dl
+29.8,4,89,62,1845,15.3,80,2,vokswagen rabbit
+32.7,6,168,132,2910,11.4,80,3,datsun 280-zx
+23.7,3,70,100,2420,12.5,80,3,mazda rx-7 gs
+35,4,122,88,2500,15.1,80,2,triumph tr7 coupe
+23.6,4,140,0,2905,14.3,80,1,ford mustang cobra
+32.4,4,107,72,2290,17,80,3,honda accord
+27.2,4,135,84,2490,15.7,81,1,plymouth reliant
+26.6,4,151,84,2635,16.4,81,1,buick skylark
+25.8,4,156,92,2620,14.4,81,1,dodge aries wagon (sw)
+23.5,6,173,110,2725,12.6,81,1,chevrolet citation
+30,4,135,84,2385,12.9,81,1,plymouth reliant
+39.1,4,79,58,1755,16.9,81,3,toyota starlet
+39,4,86,64,1875,16.4,81,1,plymouth champ
+35.1,4,81,60,1760,16.1,81,3,honda civic 1300
+32.3,4,97,67,2065,17.8,81,3,subaru
+37,4,85,65,1975,19.4,81,3,datsun 210 mpg
+37.7,4,89,62,2050,17.3,81,3,toyota tercel
+34.1,4,91,68,1985,16,81,3,mazda glc 4
+34.7,4,105,63,2215,14.9,81,1,plymouth horizon 4
+34.4,4,98,65,2045,16.2,81,1,ford escort 4w
+29.9,4,98,65,2380,20.7,81,1,ford escort 2h
+33,4,105,74,2190,14.2,81,2,volkswagen jetta
+34.5,4,100,0,2320,15.8,81,2,renault 18i
+33.7,4,107,75,2210,14.4,81,3,honda prelude
+32.4,4,108,75,2350,16.8,81,3,toyota corolla
+32.9,4,119,100,2615,14.8,81,3,datsun 200sx
+31.6,4,120,74,2635,18.3,81,3,mazda 626
+28.1,4,141,80,3230,20.4,81,2,peugeot 505s turbo diesel
+30.7,6,145,76,3160,19.6,81,2,volvo diesel
+25.4,6,168,116,2900,12.6,81,3,toyota cressida
+24.2,6,146,120,2930,13.8,81,3,datsun 810 maxima
+22.4,6,231,110,3415,15.8,81,1,buick century
+26.6,8,350,105,3725,19,81,1,oldsmobile cutlass ls
+20.2,6,200,88,3060,17.1,81,1,ford granada gl
+17.6,6,225,85,3465,16.6,81,1,chrysler lebaron salon
+28,4,112,88,2605,19.6,82,1,chevrolet cavalier
+27,4,112,88,2640,18.6,82,1,chevrolet cavalier wagon
+34,4,112,88,2395,18,82,1,chevrolet cavalier 2-door
+31,4,112,85,2575,16.2,82,1,pontiac j2000 se hatchback
+29,4,135,84,2525,16,82,1,dodge aries se
+27,4,151,90,2735,18,82,1,pontiac phoenix
+24,4,140,92,2865,16.4,82,1,ford fairmont futura
+23,4,151,0,3035,20.5,82,1,amc concord dl
+36,4,105,74,1980,15.3,82,2,volkswagen rabbit l
+37,4,91,68,2025,18.2,82,3,mazda glc custom l
+31,4,91,68,1970,17.6,82,3,mazda glc custom
+38,4,105,63,2125,14.7,82,1,plymouth horizon miser
+36,4,98,70,2125,17.3,82,1,mercury lynx l
+36,4,120,88,2160,14.5,82,3,nissan stanza xe
+36,4,107,75,2205,14.5,82,3,honda accord
+34,4,108,70,2245,16.9,82,3,toyota corolla
+38,4,91,67,1965,15,82,3,honda civic
+32,4,91,67,1965,15.7,82,3,honda civic (auto)
+38,4,91,67,1995,16.2,82,3,datsun 310 gx
+25,6,181,110,2945,16.4,82,1,buick century limited
+38,6,262,85,3015,17,82,1,oldsmobile cutlass ciera (diesel)
+26,4,156,92,2585,14.5,82,1,chrysler lebaron medallion
+22,6,232,112,2835,14.7,82,1,ford granada l
+32,4,144,96,2665,13.9,82,3,toyota celica gt
+36,4,135,84,2370,13,82,1,dodge charger 2.2
+27,4,151,90,2950,17.3,82,1,chevrolet camaro
+27,4,140,86,2790,15.6,82,1,ford mustang gl
+44,4,97,52,2130,24.6,82,2,vw pickup
+32,4,135,84,2295,11.6,82,1,dodge rampage
+28,4,120,79,2625,18.6,82,1,ford ranger
+31,4,119,82,2720,19.4,82,1,chevy s-10
\ No newline at end of file
diff --git a/Building-Machine-Learning-Projects-with-TensorFlow-master/5/data/wine.csv b/Building-Machine-Learning-Projects-with-TensorFlow-master/5/data/wine.csv
new file mode 100644
index 000000000..10e3f2cdb
--- /dev/null
+++ b/Building-Machine-Learning-Projects-with-TensorFlow-master/5/data/wine.csv
@@ -0,0 +1,179 @@
+Wine,Alcohol,Malic.acid,Ash,Acl,Mg,Phenols,Flavanoids,Nonflavanoid.phenols,Proanth,Color.int,Hue,OD,Proline
+1,14.23,1.71,2.43,15.6,127,2.8,3.06,.28,2.29,5.64,1.04,3.92,1065
+1,13.2,1.78,2.14,11.2,100,2.65,2.76,.26,1.28,4.38,1.05,3.4,1050
+1,13.16,2.36,2.67,18.6,101,2.8,3.24,.3,2.81,5.68,1.03,3.17,1185
+1,14.37,1.95,2.5,16.8,113,3.85,3.49,.24,2.18,7.8,.86,3.45,1480
+1,13.24,2.59,2.87,21,118,2.8,2.69,.39,1.82,4.32,1.04,2.93,735
+1,14.2,1.76,2.45,15.2,112,3.27,3.39,.34,1.97,6.75,1.05,2.85,1450
+1,14.39,1.87,2.45,14.6,96,2.5,2.52,.3,1.98,5.25,1.02,3.58,1290
+1,14.06,2.15,2.61,17.6,121,2.6,2.51,.31,1.25,5.05,1.06,3.58,1295
+1,14.83,1.64,2.17,14,97,2.8,2.98,.29,1.98,5.2,1.08,2.85,1045
+1,13.86,1.35,2.27,16,98,2.98,3.15,.22,1.85,7.22,1.01,3.55,1045
+1,14.1,2.16,2.3,18,105,2.95,3.32,.22,2.38,5.75,1.25,3.17,1510
+1,14.12,1.48,2.32,16.8,95,2.2,2.43,.26,1.57,5,1.17,2.82,1280
+1,13.75,1.73,2.41,16,89,2.6,2.76,.29,1.81,5.6,1.15,2.9,1320
+1,14.75,1.73,2.39,11.4,91,3.1,3.69,.43,2.81,5.4,1.25,2.73,1150
+1,14.38,1.87,2.38,12,102,3.3,3.64,.29,2.96,7.5,1.2,3,1547
+1,13.63,1.81,2.7,17.2,112,2.85,2.91,.3,1.46,7.3,1.28,2.88,1310
+1,14.3,1.92,2.72,20,120,2.8,3.14,.33,1.97,6.2,1.07,2.65,1280
+1,13.83,1.57,2.62,20,115,2.95,3.4,.4,1.72,6.6,1.13,2.57,1130
+1,14.19,1.59,2.48,16.5,108,3.3,3.93,.32,1.86,8.7,1.23,2.82,1680
+1,13.64,3.1,2.56,15.2,116,2.7,3.03,.17,1.66,5.1,.96,3.36,845
+1,14.06,1.63,2.28,16,126,3,3.17,.24,2.1,5.65,1.09,3.71,780
+1,12.93,3.8,2.65,18.6,102,2.41,2.41,.25,1.98,4.5,1.03,3.52,770
+1,13.71,1.86,2.36,16.6,101,2.61,2.88,.27,1.69,3.8,1.11,4,1035
+1,12.85,1.6,2.52,17.8,95,2.48,2.37,.26,1.46,3.93,1.09,3.63,1015
+1,13.5,1.81,2.61,20,96,2.53,2.61,.28,1.66,3.52,1.12,3.82,845
+1,13.05,2.05,3.22,25,124,2.63,2.68,.47,1.92,3.58,1.13,3.2,830
+1,13.39,1.77,2.62,16.1,93,2.85,2.94,.34,1.45,4.8,.92,3.22,1195
+1,13.3,1.72,2.14,17,94,2.4,2.19,.27,1.35,3.95,1.02,2.77,1285
+1,13.87,1.9,2.8,19.4,107,2.95,2.97,.37,1.76,4.5,1.25,3.4,915
+1,14.02,1.68,2.21,16,96,2.65,2.33,.26,1.98,4.7,1.04,3.59,1035
+1,13.73,1.5,2.7,22.5,101,3,3.25,.29,2.38,5.7,1.19,2.71,1285
+1,13.58,1.66,2.36,19.1,106,2.86,3.19,.22,1.95,6.9,1.09,2.88,1515
+1,13.68,1.83,2.36,17.2,104,2.42,2.69,.42,1.97,3.84,1.23,2.87,990
+1,13.76,1.53,2.7,19.5,132,2.95,2.74,.5,1.35,5.4,1.25,3,1235
+1,13.51,1.8,2.65,19,110,2.35,2.53,.29,1.54,4.2,1.1,2.87,1095
+1,13.48,1.81,2.41,20.5,100,2.7,2.98,.26,1.86,5.1,1.04,3.47,920
+1,13.28,1.64,2.84,15.5,110,2.6,2.68,.34,1.36,4.6,1.09,2.78,880
+1,13.05,1.65,2.55,18,98,2.45,2.43,.29,1.44,4.25,1.12,2.51,1105
+1,13.07,1.5,2.1,15.5,98,2.4,2.64,.28,1.37,3.7,1.18,2.69,1020
+1,14.22,3.99,2.51,13.2,128,3,3.04,.2,2.08,5.1,.89,3.53,760
+1,13.56,1.71,2.31,16.2,117,3.15,3.29,.34,2.34,6.13,.95,3.38,795
+1,13.41,3.84,2.12,18.8,90,2.45,2.68,.27,1.48,4.28,.91,3,1035
+1,13.88,1.89,2.59,15,101,3.25,3.56,.17,1.7,5.43,.88,3.56,1095
+1,13.24,3.98,2.29,17.5,103,2.64,2.63,.32,1.66,4.36,.82,3,680
+1,13.05,1.77,2.1,17,107,3,3,.28,2.03,5.04,.88,3.35,885
+1,14.21,4.04,2.44,18.9,111,2.85,2.65,.3,1.25,5.24,.87,3.33,1080
+1,14.38,3.59,2.28,16,102,3.25,3.17,.27,2.19,4.9,1.04,3.44,1065
+1,13.9,1.68,2.12,16,101,3.1,3.39,.21,2.14,6.1,.91,3.33,985
+1,14.1,2.02,2.4,18.8,103,2.75,2.92,.32,2.38,6.2,1.07,2.75,1060
+1,13.94,1.73,2.27,17.4,108,2.88,3.54,.32,2.08,8.90,1.12,3.1,1260
+1,13.05,1.73,2.04,12.4,92,2.72,3.27,.17,2.91,7.2,1.12,2.91,1150
+1,13.83,1.65,2.6,17.2,94,2.45,2.99,.22,2.29,5.6,1.24,3.37,1265
+1,13.82,1.75,2.42,14,111,3.88,3.74,.32,1.87,7.05,1.01,3.26,1190
+1,13.77,1.9,2.68,17.1,115,3,2.79,.39,1.68,6.3,1.13,2.93,1375
+1,13.74,1.67,2.25,16.4,118,2.6,2.9,.21,1.62,5.85,.92,3.2,1060
+1,13.56,1.73,2.46,20.5,116,2.96,2.78,.2,2.45,6.25,.98,3.03,1120
+1,14.22,1.7,2.3,16.3,118,3.2,3,.26,2.03,6.38,.94,3.31,970
+1,13.29,1.97,2.68,16.8,102,3,3.23,.31,1.66,6,1.07,2.84,1270
+1,13.72,1.43,2.5,16.7,108,3.4,3.67,.19,2.04,6.8,.89,2.87,1285
+2,12.37,.94,1.36,10.6,88,1.98,.57,.28,.42,1.95,1.05,1.82,520
+2,12.33,1.1,2.28,16,101,2.05,1.09,.63,.41,3.27,1.25,1.67,680
+2,12.64,1.36,2.02,16.8,100,2.02,1.41,.53,.62,5.75,.98,1.59,450
+2,13.67,1.25,1.92,18,94,2.1,1.79,.32,.73,3.8,1.23,2.46,630
+2,12.37,1.13,2.16,19,87,3.5,3.1,.19,1.87,4.45,1.22,2.87,420
+2,12.17,1.45,2.53,19,104,1.89,1.75,.45,1.03,2.95,1.45,2.23,355
+2,12.37,1.21,2.56,18.1,98,2.42,2.65,.37,2.08,4.6,1.19,2.3,678
+2,13.11,1.01,1.7,15,78,2.98,3.18,.26,2.28,5.3,1.12,3.18,502
+2,12.37,1.17,1.92,19.6,78,2.11,2,.27,1.04,4.68,1.12,3.48,510
+2,13.34,.94,2.36,17,110,2.53,1.3,.55,.42,3.17,1.02,1.93,750
+2,12.21,1.19,1.75,16.8,151,1.85,1.28,.14,2.5,2.85,1.28,3.07,718
+2,12.29,1.61,2.21,20.4,103,1.1,1.02,.37,1.46,3.05,.906,1.82,870
+2,13.86,1.51,2.67,25,86,2.95,2.86,.21,1.87,3.38,1.36,3.16,410
+2,13.49,1.66,2.24,24,87,1.88,1.84,.27,1.03,3.74,.98,2.78,472
+2,12.99,1.67,2.6,30,139,3.3,2.89,.21,1.96,3.35,1.31,3.5,985
+2,11.96,1.09,2.3,21,101,3.38,2.14,.13,1.65,3.21,.99,3.13,886
+2,11.66,1.88,1.92,16,97,1.61,1.57,.34,1.15,3.8,1.23,2.14,428
+2,13.03,.9,1.71,16,86,1.95,2.03,.24,1.46,4.6,1.19,2.48,392
+2,11.84,2.89,2.23,18,112,1.72,1.32,.43,.95,2.65,.96,2.52,500
+2,12.33,.99,1.95,14.8,136,1.9,1.85,.35,2.76,3.4,1.06,2.31,750
+2,12.7,3.87,2.4,23,101,2.83,2.55,.43,1.95,2.57,1.19,3.13,463
+2,12,.92,2,19,86,2.42,2.26,.3,1.43,2.5,1.38,3.12,278
+2,12.72,1.81,2.2,18.8,86,2.2,2.53,.26,1.77,3.9,1.16,3.14,714
+2,12.08,1.13,2.51,24,78,2,1.58,.4,1.4,2.2,1.31,2.72,630
+2,13.05,3.86,2.32,22.5,85,1.65,1.59,.61,1.62,4.8,.84,2.01,515
+2,11.84,.89,2.58,18,94,2.2,2.21,.22,2.35,3.05,.79,3.08,520
+2,12.67,.98,2.24,18,99,2.2,1.94,.3,1.46,2.62,1.23,3.16,450
+2,12.16,1.61,2.31,22.8,90,1.78,1.69,.43,1.56,2.45,1.33,2.26,495
+2,11.65,1.67,2.62,26,88,1.92,1.61,.4,1.34,2.6,1.36,3.21,562
+2,11.64,2.06,2.46,21.6,84,1.95,1.69,.48,1.35,2.8,1,2.75,680
+2,12.08,1.33,2.3,23.6,70,2.2,1.59,.42,1.38,1.74,1.07,3.21,625
+2,12.08,1.83,2.32,18.5,81,1.6,1.5,.52,1.64,2.4,1.08,2.27,480
+2,12,1.51,2.42,22,86,1.45,1.25,.5,1.63,3.6,1.05,2.65,450
+2,12.69,1.53,2.26,20.7,80,1.38,1.46,.58,1.62,3.05,.96,2.06,495
+2,12.29,2.83,2.22,18,88,2.45,2.25,.25,1.99,2.15,1.15,3.3,290
+2,11.62,1.99,2.28,18,98,3.02,2.26,.17,1.35,3.25,1.16,2.96,345
+2,12.47,1.52,2.2,19,162,2.5,2.27,.32,3.28,2.6,1.16,2.63,937
+2,11.81,2.12,2.74,21.5,134,1.6,.99,.14,1.56,2.5,.95,2.26,625
+2,12.29,1.41,1.98,16,85,2.55,2.5,.29,1.77,2.9,1.23,2.74,428
+2,12.37,1.07,2.1,18.5,88,3.52,3.75,.24,1.95,4.5,1.04,2.77,660
+2,12.29,3.17,2.21,18,88,2.85,2.99,.45,2.81,2.3,1.42,2.83,406
+2,12.08,2.08,1.7,17.5,97,2.23,2.17,.26,1.4,3.3,1.27,2.96,710
+2,12.6,1.34,1.9,18.5,88,1.45,1.36,.29,1.35,2.45,1.04,2.77,562
+2,12.34,2.45,2.46,21,98,2.56,2.11,.34,1.31,2.8,.8,3.38,438
+2,11.82,1.72,1.88,19.5,86,2.5,1.64,.37,1.42,2.06,.94,2.44,415
+2,12.51,1.73,1.98,20.5,85,2.2,1.92,.32,1.48,2.94,1.04,3.57,672
+2,12.42,2.55,2.27,22,90,1.68,1.84,.66,1.42,2.7,.86,3.3,315
+2,12.25,1.73,2.12,19,80,1.65,2.03,.37,1.63,3.4,1,3.17,510
+2,12.72,1.75,2.28,22.5,84,1.38,1.76,.48,1.63,3.3,.88,2.42,488
+2,12.22,1.29,1.94,19,92,2.36,2.04,.39,2.08,2.7,.86,3.02,312
+2,11.61,1.35,2.7,20,94,2.74,2.92,.29,2.49,2.65,.96,3.26,680
+2,11.46,3.74,1.82,19.5,107,3.18,2.58,.24,3.58,2.9,.75,2.81,562
+2,12.52,2.43,2.17,21,88,2.55,2.27,.26,1.22,2,.9,2.78,325
+2,11.76,2.68,2.92,20,103,1.75,2.03,.6,1.05,3.8,1.23,2.5,607
+2,11.41,.74,2.5,21,88,2.48,2.01,.42,1.44,3.08,1.1,2.31,434
+2,12.08,1.39,2.5,22.5,84,2.56,2.29,.43,1.04,2.9,.93,3.19,385
+2,11.03,1.51,2.2,21.5,85,2.46,2.17,.52,2.01,1.9,1.71,2.87,407
+2,11.82,1.47,1.99,20.8,86,1.98,1.6,.3,1.53,1.95,.95,3.33,495
+2,12.42,1.61,2.19,22.5,108,2,2.09,.34,1.61,2.06,1.06,2.96,345
+2,12.77,3.43,1.98,16,80,1.63,1.25,.43,.83,3.4,.7,2.12,372
+2,12,3.43,2,19,87,2,1.64,.37,1.87,1.28,.93,3.05,564
+2,11.45,2.4,2.42,20,96,2.9,2.79,.32,1.83,3.25,.8,3.39,625
+2,11.56,2.05,3.23,28.5,119,3.18,5.08,.47,1.87,6,.93,3.69,465
+2,12.42,4.43,2.73,26.5,102,2.2,2.13,.43,1.71,2.08,.92,3.12,365
+2,13.05,5.8,2.13,21.5,86,2.62,2.65,.3,2.01,2.6,.73,3.1,380
+2,11.87,4.31,2.39,21,82,2.86,3.03,.21,2.91,2.8,.75,3.64,380
+2,12.07,2.16,2.17,21,85,2.6,2.65,.37,1.35,2.76,.86,3.28,378
+2,12.43,1.53,2.29,21.5,86,2.74,3.15,.39,1.77,3.94,.69,2.84,352
+2,11.79,2.13,2.78,28.5,92,2.13,2.24,.58,1.76,3,.97,2.44,466
+2,12.37,1.63,2.3,24.5,88,2.22,2.45,.4,1.9,2.12,.89,2.78,342
+2,12.04,4.3,2.38,22,80,2.1,1.75,.42,1.35,2.6,.79,2.57,580
+3,12.86,1.35,2.32,18,122,1.51,1.25,.21,.94,4.1,.76,1.29,630
+3,12.88,2.99,2.4,20,104,1.3,1.22,.24,.83,5.4,.74,1.42,530
+3,12.81,2.31,2.4,24,98,1.15,1.09,.27,.83,5.7,.66,1.36,560
+3,12.7,3.55,2.36,21.5,106,1.7,1.2,.17,.84,5,.78,1.29,600
+3,12.51,1.24,2.25,17.5,85,2,.58,.6,1.25,5.45,.75,1.51,650
+3,12.6,2.46,2.2,18.5,94,1.62,.66,.63,.94,7.1,.73,1.58,695
+3,12.25,4.72,2.54,21,89,1.38,.47,.53,.8,3.85,.75,1.27,720
+3,12.53,5.51,2.64,25,96,1.79,.6,.63,1.1,5,.82,1.69,515
+3,13.49,3.59,2.19,19.5,88,1.62,.48,.58,.88,5.7,.81,1.82,580
+3,12.84,2.96,2.61,24,101,2.32,.6,.53,.81,4.92,.89,2.15,590
+3,12.93,2.81,2.7,21,96,1.54,.5,.53,.75,4.6,.77,2.31,600
+3,13.36,2.56,2.35,20,89,1.4,.5,.37,.64,5.6,.7,2.47,780
+3,13.52,3.17,2.72,23.5,97,1.55,.52,.5,.55,4.35,.89,2.06,520
+3,13.62,4.95,2.35,20,92,2,.8,.47,1.02,4.4,.91,2.05,550
+3,12.25,3.88,2.2,18.5,112,1.38,.78,.29,1.14,8.21,.65,2,855
+3,13.16,3.57,2.15,21,102,1.5,.55,.43,1.3,4,.6,1.68,830
+3,13.88,5.04,2.23,20,80,.98,.34,.4,.68,4.9,.58,1.33,415
+3,12.87,4.61,2.48,21.5,86,1.7,.65,.47,.86,7.65,.54,1.86,625
+3,13.32,3.24,2.38,21.5,92,1.93,.76,.45,1.25,8.42,.55,1.62,650
+3,13.08,3.9,2.36,21.5,113,1.41,1.39,.34,1.14,9.40,.57,1.33,550
+3,13.5,3.12,2.62,24,123,1.4,1.57,.22,1.25,8.60,.59,1.3,500
+3,12.79,2.67,2.48,22,112,1.48,1.36,.24,1.26,10.8,.48,1.47,480
+3,13.11,1.9,2.75,25.5,116,2.2,1.28,.26,1.56,7.1,.61,1.33,425
+3,13.23,3.3,2.28,18.5,98,1.8,.83,.61,1.87,10.52,.56,1.51,675
+3,12.58,1.29,2.1,20,103,1.48,.58,.53,1.4,7.6,.58,1.55,640
+3,13.17,5.19,2.32,22,93,1.74,.63,.61,1.55,7.9,.6,1.48,725
+3,13.84,4.12,2.38,19.5,89,1.8,.83,.48,1.56,9.01,.57,1.64,480
+3,12.45,3.03,2.64,27,97,1.9,.58,.63,1.14,7.5,.67,1.73,880
+3,14.34,1.68,2.7,25,98,2.8,1.31,.53,2.7,13,.57,1.96,660
+3,13.48,1.67,2.64,22.5,89,2.6,1.1,.52,2.29,11.75,.57,1.78,620
+3,12.36,3.83,2.38,21,88,2.3,.92,.5,1.04,7.65,.56,1.58,520
+3,13.69,3.26,2.54,20,107,1.83,.56,.5,.8,5.88,.96,1.82,680
+3,12.85,3.27,2.58,22,106,1.65,.6,.6,.96,5.58,.87,2.11,570
+3,12.96,3.45,2.35,18.5,106,1.39,.7,.4,.94,5.28,.68,1.75,675
+3,13.78,2.76,2.3,22,90,1.35,.68,.41,1.03,9.58,.7,1.68,615
+3,13.73,4.36,2.26,22.5,88,1.28,.47,.52,1.15,6.62,.78,1.75,520
+3,13.45,3.7,2.6,23,111,1.7,.92,.43,1.46,10.68,.85,1.56,695
+3,12.82,3.37,2.3,19.5,88,1.48,.66,.4,.97,10.26,.72,1.75,685
+3,13.58,2.58,2.69,24.5,105,1.55,.84,.39,1.54,8.66,.74,1.8,750
+3,13.4,4.6,2.86,25,112,1.98,.96,.27,1.11,8.5,.67,1.92,630
+3,12.2,3.03,2.32,19,96,1.25,.49,.4,.73,5.5,.66,1.83,510
+3,12.77,2.39,2.28,19.5,86,1.39,.51,.48,.64,9.899999,.57,1.63,470
+3,14.16,2.51,2.48,20,91,1.68,.7,.44,1.24,9.7,.62,1.71,660
+3,13.71,5.65,2.45,20.5,95,1.68,.61,.52,1.06,7.7,.64,1.74,740
+3,13.4,3.91,2.48,23,102,1.8,.75,.43,1.41,7.3,.7,1.56,750
+3,13.27,4.28,2.26,20,120,1.59,.69,.43,1.35,10.2,.59,1.56,835
+3,13.17,2.59,2.37,20,120,1.65,.68,.53,1.46,9.3,.6,1.62,840
+3,14.13,4.1,2.74,24.5,96,2.05,.76,.56,1.35,9.2,.61,1.6,560
\ No newline at end of file
diff --git a/Building-Machine-Learning-Projects-with-TensorFlow-master/6/CIFAR.ipynb b/Building-Machine-Learning-Projects-with-TensorFlow-master/6/CIFAR.ipynb
new file mode 100644
index 000000000..f1f59a289
--- /dev/null
+++ b/Building-Machine-Learning-Projects-with-TensorFlow-master/6/CIFAR.ipynb
@@ -0,0 +1,736 @@
+{
+ "cells": [
+ {
+ "cell_type": "code",
+ "execution_count": 1,
+ "metadata": {
+ "collapsed": false
+ },
+ "outputs": [
+ {
+ "name": "stderr",
+ "output_type": "stream",
+ "text": [
+ "Using TensorFlow backend.\n"
+ ]
+ },
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "(10000, 32, 32, 3)\n",
+ "[ 0. 0. 0. 0. 0. 0. 0. 0. 0. 1.]\n"
+ ]
+ },
+ {
+ "data": {
+ "image/png": "iVBORw0KGgoAAAANSUhEUgAAAP4AAAD9CAYAAACcAsr/AAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzsvV+sdV13F/QbY659vkSvjCmora201UpI4cNCMeGCJhhC\nUNMLlaAXIP65sHphwgV6ZbwSuTEIFwoSIol/UBMEo7HaKDGQgk248aKAYEst1MKFIfZ7n3POXnMM\nL8bfufba55znedvzfsn3zCf7WWvts/Zac801f+M3xphjjkmqis/lc/lcvrUKf9UV+Fw+l8/l/ctn\n4H8un8u3YPkM/M/lc/kWLJ+B/7l8Lt+C5TPwP5fP5VuwfAb+5/K5fAuWLwV8IvqtRPSXiOivENHv\n/cWq1OfyuXwuv7SFPnUcn4gYwF8B8JsB/E0APwHgd6jqX/rFq97n8rl8Lr8UZfsSv/1BAP+nqv51\nACCi/xLADwNYgE9EnyOEPpfP5Ssqqkpn338Z4H87gP+7Hf8sTBjclJ/4qb8MAPiP/4P/EL/73/jX\ncN13XPfrsn3erxBVTBWICqaobVUgB62EDvtMjMF8sx3slgwRiAgg+JbwX/3hP4Z/9l/9XZgiEJFl\nO0WgaveXw1ZVoAooBFCFAoAKfA/k97j9wK7h9+mfH/sv/hR+6J/7J+u7OZe/n1+PwMwg3PkbEbo2\nd9TsVDW/69s/89/89/hN/8xvy7/3z/E68czHe1ibrH99qS73yo//6R/DP/5P/+a4INS3cWFVzb/h\n8Ld7hQ7tclPu/O3P/w//K37Db/uhvPy97e0Nb+9/tn88Nf4W7xPtPf/4f/dj+I0//Fvu9Iuy3v+9\n3/lv3qvVZ+eelVOheFqy8+XvPl6hefvdPr281Lk/1bz71Gu+9/0+9Zr3QP/qNT/pV19t+TKM/zcA\nfGc7/g7/7qb8kT/whwAAf/Ev/O/4tb/h1+NX/7p/DKLDmdS2mw6IKkgJIgQmxVQCK0NUluudMT4z\nYxCDidoxOQX7mc68oQHEv3h1K5cF00kxtWsjxmbqBKN2vtr2JcZXVai4BhOf9hz5ucPw5M9GfPi+\nXyCfhPKpQlidblNr8SdRuGbTtYJ4yn79ttd6/hkIXmXZ+O3dc1a2P+gZB40Aa7vSy2L2YwRBHGq2\n7O3z5jEdv1jb4dgmBECD5b1eUXdV68Z5Zz3W2877mZ/8q/iZv/zX7j9PK18G+D8B4HuJ6LsA/ByA\n3wHgnz878Ud+j6kcP/Hjfx5f//U/gOu+Q5mhPKAsth0GLlbCJIKogpVcxS7F5Ah6wIEfgCcCc+0b\n4NHAD4AI3/8DvzYBWZjpgFe/t4FVVRL8CXyggJHAJxDpCWBN1de8XgHuV/yqf2R5qqPKHircDeBT\nDWwdnLr62cGtqRovQuuw/c5f+T1Zz/hu+R3y4sedNzPfWwXBd3zfd68mQ2I8df2C4Ano7fBWCNy7\n90t1+ge+57tKOGZ1joSB7JTHS70IejIhTWEqUaMjBcJNpiCQertkHepm/+A/+j34zl/5vfm3P/ff\n/ujd5/lkr75X+LcC+AMwk+GPqurvOzlH/4+/8VN5HHb9vu+4zh3Xfbf9fW+29K19DcDYufp3FiYG\no4DCzubsqNb2O1BJ1ikT+5yYItjnxC4T07dp92uz/x38BpQAPHAL/CNrA+zC7Mx2TuHifo3QClQE\nIroAng9CoDX0oV1oEWLJ3rEPNLZvoE7m70JBV8C9oZz1q9fs/LvfHTSU5dyi4Zt6vsb29+7Zr3vL\n6I3tuwZw51Z6qMeNXd/f22E/tL9FQw2N9Y5vp1fk9//u3/NL4tyDqv6PAL7vtfPSyQZAgumHd3Jm\n+wyGJMMrRMlBwRAE8KPQAfgF9GicEATafqjO+vGd4ACexigLKEMAuTDADRg0AVIvQRtYAXEprw1Y\nHYzxgKQu8a3HgLkLkVvWv3knbe9MyMQzVcfW5bnX79a/Kc779/G7e+LhJdY7K/X3pur371MItLve\nES7Htjpzdr5QEbQ7IN54/+7WBEFj/xeYXrXUfO8/yf6x73WI/kvtRfRHt2vef4xevhTw31pugD8Y\noiUAovMLOtDMtlf/ju7A3kjdpSPo5lhbAx33j2p+Abqp/OldL498P89+Guzfgd88slQveWXak85G\n/cUySosgEHfVn0DM2ZEO3RhBlApr1wK9aRZ+VuGlEL4y2WE/q5aaF+HkKb50KXAn3BfNpP89T2ra\nwGtAf+n7u+d2QYhWl+N5r9j3N8cd9A38QAe993oFlOrZ410cBcBr5V2BrwDGYKgWy6sy4AKAo3Oi\nwF/AB6K3UUg+vz4t+1i+D3guoEcAf/1dnFHM3If1JFm/M7z95Bb47CoZiCB0++KPwF+6aXu+UBWL\n6VfWR2eN1jET9IqmuZgAU3eWrix6vn92HD0t7kldEnxEeRPrL8IHC/g76MsTcBAMOFf538r4N+dV\nrVYBCdw49BK8L4B+Afthm/dP0Bvrk7d/FzAE1wjeCP53AT43xlcdUNZi+txnb0xNsAfrSmsIwrq1\n/bbV1gVdUnYgxL5A0xyoftGYvjOktvF3Pw7Wj/vU29YF7Avz3yuLAKD0Qao/FBEvrH+m6mv0BCog\nFPirTaP+L9nbr/t9Fj6yNn8j7j9GxT9T9VMGBdg743c6Prne2f0/5tlTY6uarOLm7Kd0//5vAf2p\nAPD7lwYAQNvozhvKuwOfVTDYWX+MagR00GMFYOto9+z81G2pqX3uUEsJHYLE90XMkQZnzHCelYai\nGN3uzX7eO1pnf21Olwb8cMqcvZVmB6ZDrT1zaHWpyWQfar4CtBEDdHOpmSptP6/fOmp241cwT/GU\nFJ2vdczzJ1zKPdDdgj3/UCyr63HWXNvf/TfA60z/GuhfYvs87toGHc+tneOIwkugP/MJ3ArMonfH\n/ZvZHngv4LcXMIihLBjK9oLGQGjF2VkT9AXWKF3lj70OmASif5/tkX8TdFB05xp5XZnNxBge36QE\njP5ADewJet+/AT5KANwDfgo9VaiUVpJ6xNKBg9GjKg3oIdiatiKi6yiB2/jar9c2p+Vgh5CDzDpz\nh/+NLrCUjwH94rvo4M/jLqS6BLu9xseq+i/Z/Sd/WXaPokJxC+C43pcBfwjgJLqPLO/E+E0lVTLQ\nt0IE8KTFCbUyfjv38JABGrNdHcyoDhO2EdRs2x6QoyqACozMFUyAEmMwgAUanXZpBf4S0OM2Pgr4\n9nwF/DNb3qrogOTwugvQ45ai8yrMueMPmbb7gemPNn0JgCPwz8sip7RqbppPtznDvXdwpvXr4BNB\nfwR/MPuicfUbnj/VGejO/v7q35bnuxUEJz0mr3FPff9Y8GfptiAawb2xvAvwu8Tl5omOCjMRJs0b\nlu8CYLne4QlVFAIHNbgYrjm+oAo04VBx9ykmQLDhM1WCglOAJNtTaSapXSgtQmABPlbmvzGEm80m\nSqmNQMwDIVTIXx451csV6OmPCLbPZ9UWI+DC6uU31jVJ43iqNgrA27swQZA256Ec++tHgz61Kfjz\nRhMsas/aSA1c9+79Un3Ozl3+dtee6YIqa/kqg78F/FFuWb/tffM593pLEaCcNquAIG5bH1l+sfF1\nucKi5gkJBIAIQCSQ8KarGht1xheBeoCOVE9JMicQRpvokBUlgMSiCgv45SgLcyOCKBbgdxuf4rK1\nDxBI7RlUFGBYFCORkf6J2httoqnJ2HZ2IRBsfxAABfz2Xpbdrr6XOlkCgBLwUT2z+f33dzrfJ4H+\n8Nwh7M4Zv3ZeUvNfqs+L+4edJFy099HOLjl0C/T4/q3gf9Epms691zW5KO9u44MYxEgmZCIIE1T4\nhuVrHzi0ago4qGLCADJR34Ha+LI28LcZcEolvQOgpu7TzUtGqO5KpZYrHcAf7F6AxwH4C/NTfgMR\nGOAhYHHBRTWkt1BnA4Y006WY/sDyHfi+D1rpgTQrtJYkEwM9xchBdrODqq9Lk7UqfyLoj+DP4wMT\nLmi8ve9L4/ofY4aADrfQVTuKSnRhdQ/oZ999FOhvTPy3U/5XoOoDSgwSApO4Ol5q7Sn4l07v/x3U\nuhl/czU0umJ8AvQqAp2+dZAH3SfoyStqKomBRAhEZnerGgiMaakYV26BTw3c2Q73VH+xyjru7aNz\nPaVpRCure5BRA3568A/7oT739xNwOrfYkULKmM16XBcC9W4Or8rbvnY/AfQhtA/n53WbAoCzurff\nvDSc96pw6g9F9W2rRjt+mfHfCv5e3sL+Lw4bt/IuwD8tAQgLTgP7TiPntLF7B1oFa0lWqKn1CXY6\n4qp0QhMqjjIgDPfF5iZY2K8SmW+fCaTsjR4gCrWfjfmZsaj57Tlfk8UV+NNQz1aL8mG0l54N1P0N\nodV09b8JTq22isZpCv3yXRwT1s5EOLZrvYvb7/qf3w7007/5PTqY3tbFj9X8RNDHvr/U/FOXeVm/\nQz1PtI232Pqnf2uGWCppqqaJvI3sAbwT8DtcY99swgitBZTjwRyYkAX41hAFn7ymNtGwiGQrhPbd\nESRxgkYDw5gs6kUGfitsgoUK8GiaCTzK0C65RhWGBr0wgdYzRCfO34QQAAFk2hCBXK1bQZzglrbf\nWb1/FiQuFkfUFCG4kO+mtBU6OfvQwrg9yAf/OMA3rU697tquFbf5lPxOR7a8azuf/F2PN61qZs9c\nKni4zlu9+8Ad0IduT1UNdQeL8dfbGuTdgQ+ggc0PyDu1D62pwmz+I/ijacVtm9apKz4Pub1l/NbR\nXEqmXtvr47/lsN/8WMKBEh0xgNw6cL8ltWvF38MXsAquqEa1hTn2GExmj6sSiBSkYYGs4O5sX6y/\nmky9iVbY1rMffRNdY+lCof92Kf1Vv8LyrwK+6fDR1je3OKnHWzv/2bkvCqXDTjDusWba64/XAd/3\n3wL6HFp1NqHc/yZT9XvjdVaLLwhwwPtQn4iZAMIwozqvhHXPe7KzX78Jtf3lpwlaLbArGjrROr2H\n9SJm8ikC2vVMq013lDX9WLU726wq7tlIswAOcMBMDPXhT3bwm07XHr2RecQpRP6ABIs/66IttIod\nzZIEexMGR01gbdyTclDR4/nvsv4p4Nf9kgNvB/b96t1e4yWB0TXE5Sxdj29FU/3+reDv+2egr30U\n2wMA6Zub5itg/N7JVnvSAOFTdcP5BvKtTdWtpu2A70xfIDL2atqAM2Cpv7ahsPFvJKbb5hxKAd28\n0gVM7duzXREBKSyYhsWGH49visKoaPgmNo86bgVJPldn+9jPc7wijTX9VguIXwf7UcHv2+Zw0rJ1\nS7FZQX/OqAfAZ3V7hz5v277/kvPrLYA/q1v97XDLE8Y/Vq1f917o7r39M9CT0sL2oKgX4Y2E//7A\nD5DbJjobecexwBUDPS3AFyWY8u+EFQzW1N28PlaALPVIddvUZruJFtuhjWC7dCfifMk3L5SOB50C\n1g5LRDZsB4s0tDn79dKqfdbrUfgdmvOxq/iLyt9s/ThP+29OCi2fpuajtkfH50ts3zEaDH0P9DfA\nbxe4Fap68gJwq2md1emNDP/Sdyc1vWnS5fCkUkdn30vC4C7ok9zKyUgfwfbAV+DVj5dE/l94sgk+\nlh9DfOT7aqG8lswCICV4cG/rUMF8qP0Tbq6tfawu8f0B9P5/1DMqbYKg68l3ulwArnVygth1dQLE\nCO9sqmpxPdc8XD+BUozpr5A72vjd0YcbULUq54fSkUg5mtBAT+vWWyQrsPi4mqA5Pv9roD8Ff2vH\n04d4oZO/5rx7izZwa4oc/39Dde5IpA7we8fx/k9Bv2gGH8/2wDsB/7rvub/kxiM1kLOlzsoOHOD0\nHmqOruigCiIG93B/AZQBhkDFA4Tsp+DBYGWwDmyIEYPo5ARi3+fYZx9GO4iK+I/OGKDKEex9KyKY\nmTp7Yk7J/aV/n3RMqy5lyHOEEU8RgGYDsO3PcI2EZoS13ktCDyLk1F8+2PCUe7elLK+o+AL2twD/\nBpQ3wD8/+Ig+vl7upm1vmXLxO9SXt9WJP90ZWkge8nIviOiuje/trwj1XtOkqvOCe74JGX+fBfzh\n9qqyqzEhspiBhalao1CxE9sYG0T8mM2KJ4lOS2AhqO+TMngwBkbpAR3wweiH4+i4i2c5GreDyZ8r\nO3k8aO/4vo3cfasAMOBXudel4/kZ6Om4ZIKmzXVYnieYQFu9qUYU4loB9r7/atF1Z8XmLejvgf1j\ngP9ird7I/uu9zoBS3536Gz6inAqIdsM3qfuIIKl20dOGUNioz9vr9+7A10h9rSYAwL6NhlY0h1fZ\nMoQI72WQLzLBTBBneAZsmI/dHnZBwMoYOupFMIHYhIEGo4XtHo3vrG4hsFYPbQIpwBNbACUoOjMc\nwN8Xy5gN9JbVp/wKtV+aR2d8G/Kz56BJIBJ73jmX36f2pA10XuNI0x2AZw7ghyql+UqqhACJZ2qU\nf2Two/nxirrf7nDTf+ou9LIAeKHcFwDtuXoNTgVA/8UL9/qI+rxF3e9sv14jbPsA/dtZ/32Avzuj\nkalFQ9W2rMX2QI5Rx378ppxO9ujsDC3CYBaoWIQdMYGF0wtPAjArFIwRTCgCZrbz/BYK3KTmMjCa\nK1GWzhwRcQF+dzgeO3Rcu32XbH8AfQDfWBy5X0xgaIuQYnLQsTrzJ+CB9A4SboBn03HtX2TrTQHA\ntZ/19luXcAsFLfwjXjNd4Xpz31dYv5cj8FfPi9bzfWJ5mfU76E8EwN2LvvSnFdD36vPa8F4/P4LN\nYti3nuWbzKu/ML5n3xk0HPQBNXu9LgIA348hJhvWJ7DYlhz0xfhqAoBXtZ8iv378xqfkDh0F2oXF\no1MrIB44A9dCVG3KrHdccQHQU1sFu3fmj2sewX5cJqvA3xxuXgiwZ2idXgFnfAKmqy5EsOlKQM7M\nc9ALx0IZzvjEtgYBcwN+hE5386A8yQkUOoAonv2VD6I93gj8he+jw0NTOzsFXXm8Xigry8cj5H67\n9rFONzVszramV9wIw7NyD/D5N0WbHHWsRQmAAP29+xzL+wPfAYdMdlHj0wyfootSeYlCwStnB8Hm\nzYv4/HkxB2GwPLEuqn5MKCFWX5mnOuIxe018l50fpbYjJvloJf44rgNgp66/C/CfAT4SeHYHG3uM\nfpTudU/TJ3wfhfObouLagQiEFTbHSKGQXH+Ag/XdBMt8CeEP0DIPlq7dMX/YviQEsn1O1Py63i3r\nh99lYcBX+vhRuNT9Dqp9R3nsddZf6vaLW+4C3m/WVXzFrbofAuB4rdfKOwG/eqaqAupsrwOkwNR6\nuTYxpha7rA1lMB+LTcNlZgPNkeVjvr9YQ5Dfl9FAqR24LVONKsjz8KnP7Sd10yPU5b6ophTo+8y5\n4xbAOfAd/AZ4G7EIgRd2d5Qc+gx7nEIDoNsOoWpmgdg8f3LtJpJ8JMuffI7+AH/0JSEQsMJCQ5U4\ntO9bWP+losHu0am1sf3dH7103VdA3377VtCf+gxeruHp70/te9xnfa2mOaj9r5f3tfEBKJt6b8tn\nqUWwMUNkYsQqt8Q+LVZd9eRkkFC14UwWYMwhMhXsMrGLYMoMJRGxyRdCvngHQu9otjqCGb3O+XM3\nRZrTJTQSChUUhwg2LTNAl2mzAvvnzxVqWqptt6yYtrQoTDZSPl0IiqExS3Gz8GB3iBrrc07PTQHC\nsexYCROvfAIk2idMoco8XMJyme+fQrAl/4BaB05tCvlde0Op0dVz1WhOxR3AE7eg6FmxAhrdSEDt\nHdT70mO0n7xoNwuuu/MUa2lXat+9JgLqWaPWQVQUz4g2Gy8qRPZN6AD5vN9Mqv5sjJ+sIJqr6AgT\nhGItPcuzTwq33ckExQugj+QaUwL0E1dfHguhIru9lIEqQHnvgQS9AGm7l3qLVKtDg6gJPIpKxmCh\nRZSPeQuCyPW3sqoLDKiBO6OwCvgEyuw8BM0RDGiZSY7+1AFUA/QuYJvwOfoSjj6FphQvavuSbjyf\n5ygM9EYI9GdRb9wAGzXUdx9lAn4xS9owrr8ntHaM90YJOT0IgHq6rvqfqvZ6/J0Lge55Xq/4KuiP\n3yRZ+B/7fBDAhT2VcOgKv7bvcOMDeLl8KeAT0U8D+Dsw3FxV9QfPzttnyx2XoDfHnIHeOyiLZd1V\nRVi5lqiDivG1VO1MQhFs72C/yo7rNPCn7cy+RQFYFI3xkaCXxm7xqrptbdNzDwJA/cIar6YDxoTS\nAhA09TcBH9Fa1WnLaeT16I42UW8rj2+AjWvWkl2utrNAw7chpWGEz8RfZm7PHG5dVV+z/pS5cwb4\nekYkdR5t7piN2PtttG+AfoRmEg5JKpNEPMFqTHjqgC0m7cLg8HRH0Ofvb/QFpNpIy1tZf9/Of41/\nj8pE3begnU5VasfoZgDW795QvizjC4AfUtX/96WTOuObI86G4JjMng/wY4h15OGOPpBnyOGb6aaI\n3HEN/HNKLnp5nTue5zRVlhUM9/4Tg11FD9ALtH2wsH2+2OyIBj729xI+AHbGV4o5APnAtdpuEwSd\n7Z0PV8DDO6pqe6kOdm7Kn5ayyGSeECWPigxBqc1u5wbEXs1DlysmidpVvZdMPyKYPcVXBzywMH5n\n+byjt10BtbG++3yC6YePPIzwReSiojH8WiMswAp0Wp6wb3tdjnBv+6dgP/x++c25kn/23cL6S33p\n9hwXAGHvfwzYe/mywA9ifrEE4zs2oGJAH85MlmUXgF5yLJ9BGC4YlBWV207OQe82fYHePqwDEbeX\nGgQBg6wuQjgwf63h159ytTcjI5cm44vGirjdRm4M2W3gA9tXV22Axwn7x9/EVUJa6wZ/vry/s1kJ\ngPW7qkexemQa673wyPalZamBXmwl4f5MsXSXNiAGs8ZBgaqEarZ3e65g+vQB8cBgxgSBMDNXWQjJ\nqH+02/I41HwlCzhvgXojDJYoOl1Mxnysw7VeY/z+G4Ayk1kj+HzzVv0uJA4e/49Q978s8BXAj5K5\nFP+wqv6Rs5M64zPBpbi9L6aI3zcJwnAAEUPE1f9Q83tIb6SX6jb+NFV/nxPX3YA/hsW1C4BBkdPf\nOkOF6Nh7rKG5YuGlw1D4CKzzWD09FIEiUQeK8aPjS6ijAa4VbBpyWxvgT4VBY/7kg7LNlyE+VMcN\nlu0q95qC248hHjW52pRAe5bWRsH6s5ky62hGrYqEDvq4blyaQuh2hishy0wYTAn6bZgAIACz14+o\ngbB4uYO7drsAWut3rOrt76mdSe2v93WKkxospeCLVcBgZff13VRd3ziKl+XLAv83qurPEdG3Afif\niegnVfXPHk+azcaPmXYGcGf3EADqTA/CJIbwWJa5CqAn+zfQS65zv+M6zcZ/njsGgI1MewBz+g+i\nQyhCAKxOvgX0AEIBJt9l9xGkAFP7QyTuAJBgCYD06yZAkg2dwV0ovAT4kvnub4APZRLa2npV86NN\nHeDNCEISS34ijTVRdSkQh8ki6bfo8w9qFSSsW2+LVqObli1eprx/xBqMtO0ZW4CfOesIVSjb+yC/\n1wKkM8Afvl+1krNaNoFwiJjLt0R1zv2nPS96OKJ883WfzugmrOI9rwLoLeVLAV9Vf863f5uI/iSA\nHwRwA/w/8Z/88dz//h/4Nfg1v+7r+WKIqvubSjwxJ2EHwKrgOUH7Xh5kaaqzA/9pXvFhb5/5nPvb\ntmHbLr71/X3D2La2BLbc7KtIC9hp+xJTa5FviwCPex9pIJN4RJxMjDGw62yq8rrMlQ2vcUYZMpn4\nqOCa8G6zL/RZ3m1Qe+UKE4wk5502TwJiFaFKFd4/zuQiEDWnaYB8AXs7b4ZGAyyA7/6Ee4yfayqc\ndN6IVRguAIaDfxsj+qC1nyo44i4WB6Uu54VAMqG2CqUA74vApc72/vbzOq+w/ksSwBmlLIkejHNo\nm8Pfogf8zb/60/i5v/YzL9ykyicDn4j+LgCsqr9ARH83gN8C4N89O/d3/si/0o4E4fFemBHqLLJj\nh4JVbDIO71AemUp6GUby48f9ikcH/BfXK77Yn/FhXvGFA/+yXQr8l93255ZALuBr2q8BdATow6cg\nHvWGBkTUZBdQRQyyWGjwpoKpbXKOdo+4bQPkCfw85qb2WiRB+hoQgAnhae0q0mCUjXwA1QHoncl7\nvWZuJ6bOBnwtQRC/BQ7aTDF/1eJWJKmr6XFs5ks5GZP5mVPl3zhiO9zJ14b+iBrGEozRx7Q0H3Sg\nNsDe2Ox9/0zdvwV6v8fLpewyG8pr39mFkBGbWdNzdv/7v/cfwrf/w78ij//ij/5vd+/6ZRj/lwP4\nk27fbwD+M1X9n85OHFtbctIXobDeEOvOwTqsCmQCUxQ7CUA7LMsso68BV6u/duA/48NuwP/G/owv\ndhMA2+WSwL9cLtjmjstm4I/8dJL+AslxaTTQQ7UJAfUgo4HBCqLhce6EwcM8/mBbFVgVE1bXiZqD\nP2X68GPsUwP+CnrmAn7+o9oWYDtb29a0gbD/gVAPIiZgAb7cgr5PKpruQ5mqCfTjfrzHAn2Ba7FJ\nD123w7B3+gJ+V/ud8dnnWghD2IRsgL6bcm6rHITcet8zdl7m2IcG4G24Ap6Wo/XaJQDuq+Ja70Tj\nFUU7hBkRu2UKLRrHJ5RPBr6q/hSAr7/pJqOAb6mp4Sy6yv/oQORr4qifP0EZGRbA17b/tD/jw/Vq\nwN+f8cX1Gb+wP+Mb+xWXqwH+cnnAZe64zB3btuNymU3N7TH4BXybmFPgj+1lbFC26Dj1mYXM4XCy\neg8ohComQKDpfJxzYsrux4SJHcyjgX/kpBmmUdGB4WdoxzNSebmHzJ7H1PMM6Q1AEOUUz0zMeci/\nv45ChOPOYyT68N3Jtlk/C/Dt2LmsOR4LQiYaelfubE8N9MPbeWOGKHs0ImGy5WHIQKYDq3ehmH2x\n7d1T7QvwK7cfvQjWt+8LgPvcb578XJsw8uk1IVhAr+G8bhZ8SnmXyL3O+GZaGihsLC0A7gARxZ4d\nWCFitn7Z9npQ+RWP12dj/KsB/xtX+/zC/myA3y+47Dsedgf/ZeIic1144rAIRcTrkxboY4aeqgID\nPqas2UG34ck+qH80v9vnjn3ffUvYIwgI2sBu29GOgcbcDfzJClOKbVUg07SLJSKvfcjt0hglWcKI\n0wfRA6P7BRW+AAAgAElEQVQK/KHim9ANxlfMg2obz9y/I3/Pyf509Lq3E9HAD/JZhN3GD+ATJjNY\naGV8u1m7aIF/Ga/oWvWhGreCgU6+cyHQNJ0wL/TmwW4fMtX7mF0HzX3fxSoEfF+rfe9Zcy+VdwF+\nZ3zDunU6gatd3lAS9rQvcSXTVEye1dG6it+B/+H6lGz/jeszfiGA3wB/nTsu8oCHOXGZM0EegOnH\nULFJPn4cAqCiCinVTTibDh6pm2roqEQ21YAI+37FlRljj7Fpe5EEdSEyHPj2GcRgduAvS7T2kQOG\nCsVEXG8b0yqOk3rYg6Fs6kNnfQdFV/eD7d0/EcyfY/faRwZqCNRqQeHz8mMHeAN6BB+BipHbOKi3\nizefq/rdwWdj+ZJ2fw/nDdAkJx80mqwBtcotVnT93/dWxx2dfl8jJyEEGvqXMbfeOBV3H1fu+zgZ\nzltr/PHlfRi/Ad9cApYvX5Vssom/DHWvvs4J2XfQPu0z54mKX4Lg6fpU4L8+4xvXJ/zC9Rn/3/UZ\nDw9XPOw7LtMA/zAnrlPwECp97xBaDrJQ8ynA3j7WCQfmmP6iqVT94cksPNMPIpcfE67DOuk1nIN+\nr/AbMG8enTZMCLgguEtL6im7PRTZLlfDm9SGMJnZHGAMH7Y7ajotBl+K7cvOD+avdpcmAFbgd0Ue\n7gnXXOAxnG/3Om6q+hQxHwH6UvM3NjV/hknE5dzDzbW1/gUQqQAVawEG5FK1134F3+bfmhDwMOoO\ndj38/uaCMLJI4eDmSXxHOAL9ZHZennfSlq9Iha9kmWzVCNUZiFC9jBZjBVElkoBO6NwX51502FJX\np4E0mCKcQTmmXbHzFkI7IXNvrNcYoQmEAD48dqAEArIHpMNNjZV5MMY2LMknYAxOniSTB8AbaLgA\ngWJTYAMdbPrVzl9dYccuHcHGW9sevfqUzxc2rnnjy0u/57CcRT/O9EV0T35vK+CWSdGM+GMmmar5\nsvVeWyp6d2CiTBXECfXjFUYN3Ai9oY77WR1MK3RC7a4z0c70x0I7Je8FfQPwl1JGzjr82M/wOnXh\npPXzPOzaVX+0F8o7Ab8fuRJH7KAfaX7acNlMKaCwY02QKnqsPjpAfZiwg97mlxf4MypfJ1S4hgjD\no681ew4uBMK+X461edjdmA/gb5cNmyoGBjYyQWDj0ANgAfkkJFbFcNBvkeiSGDZcV959ArWhrjDq\nik3LLl9BXw4tBzzW9othuD4kN2Nas893ME9+BecsOQe0qeghVBtrAd4Zb/FqavhBEFSLFtiPNju1\ns6MkwAOEugqAOi8hehAeHfx1zfoNEuDr3w7fdbBr1em8WH88/r2nWwsTEr5sW+yfLpvV9l/Be5Z3\nXyabmDByAUpktBkLQadAJmcIZ7Czyr6wfAkBOBCL8Zm0wjxHAN/vjcb4QuvssiUCrYJ1wsOvUiMA\nEVseFQ3GhxIe1ObYPxBM9dfwBzBIhmUBGhYhuIEwyaIUw/uOjP0jpCOugz9AH+Bq//oogpKmj0Td\nkSkJ5FmgP9nusuY0qICd0hqC8eN9WGUK/GG3JlybEADWbVftc9Yj+rh8G53oP/YWqL2A9+2/eF1n\ntnIwvuZV4nHOhMGB9fM7zSbQ0BhOUXgO+rvlgHBrR8rfH6/1TQX8NQc+QT1/fWTXZiWbobfvnnWX\nIDnk1NXyYHg0tj9hfLZrsjYnWmbyjWi1lt++jVdHzH8O7R2CfGwqbAGfAGd95JRZuPdZZAPSJ8Ag\nHm7gkIcH+5wEtuFLk9gFCdUGdgoB0I9dh9FQ99s/Uuw0obsN9SXLz92GFKWcd3IC/H6cEY0qbfJN\nsSs01PkCfWCrR7cuzL9oA7RsuQ8/YmX+g6WTzLt4GM4Y+siIadNjObOzt1/tFOh+WtXhBvir4KgW\nWL87snzfFrrR+oIubbtESLSfvFbenfEtgYKBXmCOL/UotzmGZc8NffDI+FqdrHc+0glA3Ja3+7Fr\nFsyUC2xYp3DQJyOGStuGrWZj/7k6ukzrQAI9mJ4UIJ8lxh5SehFb1SIYv/0UipiSPOwZuxzDup9A\nP2xruFB8a4BX95sofLrqnBZHIKbGX6/XA+h1EQS1r+ndLzX/AHwt0FWHDUEbyrl1+BXs3jewgp9R\nLB9BO6ni04p6PexXxKCuKj9pgZn6bzvswzxZeb+HHJ8zPer8/s60rlVPqj3E4PUSAtR/lw/g6v4y\nKuEH37SMH0E7Bn41AKhaIMywDDyaL1t9WG+vF+edrRlTKMeeLKo+o9n4Lkjy5fiwV0zuyeAaT+YR\nanJf/CIEQoIe5KAnS/Lpzr1tG3iYM30CBLhHugNZLIGgCFRGMqko2ow52weRL0BCuZ/gb53bti4M\nCJgqIM9CZBbLtElM+zXB3mfX2Xh8i8iDJvgzQUmCHgcphcb01htfUu/hp6UNDyzMXgLArlvnRc9A\nA7Eu/5/b+Ei2vPm+XQV5DWSfO4bf3mgU2jSDBfhxxi3T93KP9bOxjmyPQ1vnpQlvhf47MX7fD9+7\nVoPBJeswT7Z4lotQ9YPxo0W1WttKDLuhVP2hPmTGoeqXjY+MB9htbNo92Hvb2n7TBOK4AR/O8mFF\nkBLGGHjYLtgfZgoJyy3AuUQXqdgL4xg1cNC1obJJFSAD4gK+b+P42NEX4M/p90Sq+jFlucfYRyKN\nHowjUEx4XRCGRGfVehfZ1VQd6E3Vj/eO+p4iSg1dzW/HjfFTGCwSovrTwr4pmI5C4Pb8qOIiCLT9\npvW1Yv8V/Mv9k5COwF/uupR73vz4GxbwY23Q/F3Z/1A0R/DL5X2A395UsMziUAv18otHzC8esX94\nxHx8wv74hPn0jP3peTHtjvtdENTUXmDzNfnsYwOI5myDr7OnGB4JyLGvPtPL2dYGAzRVNxWFTMXO\nE1eaeKarZfcRAokBfxsbtjH8U/t2X8062JCeP4+eqMCE9BtEBxTxaU42i9Y98BYNOOe+OOOiU5EL\nwMHD6rdtNgFKJnYhg7WqmSqqraOtJYWvNqIhNLOz/Uth2xT+IK74prF7THyq7SoM4G0U6oYmMbRR\nDS0H5M0wbcCXVvY+EyLrcYq4/MEC+s72+V07DuzGI4R/yIXxqW0fbU3tudP8bRVfAiPuv7ez8j6r\n5baoMxWtFFmdYfcd8uED5odHzA9PmA76+fSE+fxcnSkYAchtMFBEexnDAptGB3LgO9uwd1xOkN/7\nuKqtcI++yYAY5tqx4xkMErb5RoIV+NvApQF/kAkk9u1oz2GNY5sCfWl04Vgzb4blDphQH3YL8EdY\nbXnho2OwLxu2bZsF/YjPv5/TMhFNTSZeSjDJ7UvNP+eptIK5s3eee6MFRL69sue5v+doI7/nwurt\nUz4ISdOk2/lLA988RT8+nHsDpiYOQg52kN9oBd4zNZ5f87dx6RvQ45hOK27Q4/jt+Ab8byzvv0y2\nADIF+3Xieb/iet1xvV5x3a+QD4+Qx0fMD48QB788PkOenlFz1Smdd7FvkjycQ5ZWa4CxeUc0VtXG\n+JW/rs9SEwc7u1CIYJ0M2mnzDPYpYJ0g3Q1AopBdV5bfav8yLHnERjZuD47ncAdIsnMr4evwHhWd\ne1eb7Te1gN/j6QP8FpfubUCMMYY970VBc9ooKDw5ioZZsdqOL7Egte8SxAgBsKrr/fzcLoxf2ZeO\njj1q9yyGPTB7G33JCM+DkHixX77h+9O2uAH92m4GeF1MC3LpF2SVwIWDvmVw1gQ1AHdeV6ShaRA3\n6tcbyrszvohi7mKAf77i6fkZz76Vxw/QRwO/PD7Z5+kZ+nTNgJzItmqpuT3bauqU5EC3EM8N1LQA\ntOw71tElmF0qoCYEADngK1KvHFsiiomJqyft06mYLNhZMLZbxr848C88IIMrhbhH941IJ+58kypu\nvn/Nzh2OuF0F1zSTKpa+R+RZPnsHoav6uoUDkDLmj7UWIgX85j2RXLzGO/BYGZxSw+rMD5TmGl28\nGL+ceZXroIb4Vmnoqv7C7H3UoYR4CIio+Vuhf6sh6HJWAT78Hu0sbVvt2kFT8aGJz5jXkO++x+Uv\nGXbsx5pCuX4XSV7h93lLeV/G11L19+s0wD8+4/HpCY9Pjwb6p0fo0xP08QnqbK/PzzkdU0ZM0FCL\nQx9AePSso/nceHhEHFqcPdD2FaOFoEpT/YcCU8skCK9OzCycKmZ7y7RAGRLsJNhpOruXet/3ZQzo\n2IBtgIdiYPMX33tOgJ6sns3miwxFlkJccHVmrznxa2acPiGFmaFDsdGWWpI68KcoiCWHIxfDVMvD\nfVYWVR9NVU/WLpPMztE8NzCdtj2Vf6DYv9v656p+5lMIYSA9G1C65rKBVxgfHkZv91cZ2K7WpIkG\nOUQ/R30f4A3M2lEHLkq5D8HQ9CnNOQUxrOdAb3XNGJK34f4rsvF3wb7vBvynZ3z48IgPHx6hzx+A\np0dogP/pCXDG3wYbcISBoUv+ffWFHiPvXHjRN2LUUJ8DqTF5dBoDe1f1A/Sav4kQYbPzFbuKA4cx\nIWCaZl6MgctW6v1lK8bHtgFbD9dlaKzx7SXtPjTGp6hrSyHueQVr+S6xlXliXysu30DINjXCzYsI\n/BkiYJYc9rQ63LL9sRz7V9bX7XNzZDZ1vwO+/d4ERXPANvav5CPReYBy7HVVv9KZZeRl/qR5+JdH\numf5n39777tbx15s2/UTj1Qjc3kitdyaofE10OeRGwZ+MXIVP6YEA+UreEt5d8YX8YQU14nnpyse\nH5/w+OEJX3zhoH9+BJ6fgKcn2z4/A8/PkDGwbQLISBDSGGXjO3OSwhxnru7nv2icUN9FzJPfBEDu\nO/NTML479uJTi2IQSMU6uo/r3wC/fUhs+C4SgE5fOTgap4M+OkAPJFVX52Ne/3Xupe5Cb7ZhmhAB\nYMYI5nd7foqCRcAyQZOPuvjSz/VwHKXs9RaAgxA2JQQWdV/77yrx6uLZ7xoDChi2WQEf2pA2De6e\nS+/I+Uv3bKA57pUGUedGf8jfdjX/rP0WFd+Bih57d+vSI/8vbPkW4YKI49doVEWZa6+U9wG+J9lV\nV/V1KmS3IBnZJ/brjv16BfY9PzonMC3qzNPMZMNLfHxu+JUZzyy+nXhm9n029kZ46SvfH6t4p5ca\nysvsWoSh7uQTD/8VgDwNb3fkhAYRqbWf9x1Pz89u0/tUXQJIBfvlAfvDA+blinl5wHx4gOw7ZM4D\nZdLit1gdVF0QFOvFIhdd3QcQKsOqkncHaaatNm1lbpunynZTBr4sWPbo7nCjrDZcy0o/DFcefM6M\nuKt9H8eRSmvkb0vFr9JU95PPURPov+sXWrX2zsrdICgQ5xmaXzRvff0p+sMC/laNUuMbn+uRz9dn\nJQd0rnuvaJN0ivF7WPTb+P6dgK9SYk6nhcoG6Oduc+/ndQeuO3TfgX3mR6dkhplI2yUSmoNlwAnA\nX5nxTAF6wpXYPfzGstHhLOMNzKkn5dxjNSvC7H3CEDKV36PybNwL9VK1JH288es+8XTdsY1niyMg\n0woggv3hirkb6OVht1mH8wEqMxewJJ/H3z/GYHLesVUr+cYyb37NwLMEwoQWRJXrb4yBIYJtWK4+\nC3K0uIEwqQxD0enQIu/codeESOW/N0ADnblXxh9kIc0mKFyALB59lNRvQE+nJ9p38XKO1Il24wb4\n7sNIsZr7LgziOG97PK77LBqAn1Pz7t1PQasovgU9ZXs3KWERra7C5fXac67Xebm8E/BjR4vxpyWL\nMPAb8HXfgWsA3pg+ga+AsgOeA/SMwWIMT5bgwsBf+5vjNT3+TqoJfI3AHeRY/abA7oxPHfwCi5pJ\npsECflXYQh7PV8sFn89s0YH79Yp5vWI+XCFzz4/OCR4DvFmADfsnxvyXYamFdsr2txDjfYmNqKnJ\n7BGMnH6Qcr4VS48xMLRy58UwH5PlPeylx1X0TMOhORjw/RNZhGgFfuwPB//oKj4K+MWJ9Smmj6nS\nHYUhjNvNsjNWX+zbdALegB6Lml+Cfq1SVwL0Zhu5ESjV91j3vsAaFz3oOQ7u+F2Y9Ouy2Ks78C3l\nnVT91ohSHVV2wdwn5tUCeOBsr/sB/CKp4k9RDLaQVmZLS52gP/loZMMhW40FmR2HTM33DmQCIPoN\nYUiM58PDcsmZHxBp6ncG9tj2ep14omum1YLaXIO5XzH3r7lqb2DXOQGZgAjGtmETy/e/6YYBdacf\nlS0LVGfz/RjmEzHtaWZev2lJQXhAh2IoQ4fZlBbPiFT3B9tkoTEUmwNf4A5PkgKfWmcrT3uBPtq3\nmw2XFtNwBvpQf8MRGGP5YzEh4qGpnjc0oLTxSxta2ufoq2jsaRtdAL+wPnQBfaE4hEGTNfF9u1cH\nf2bVcU0kQNrt+w7/GpnzMwP9qfYrMh2XN80iB95Q3p/xp/q8ewd9Y/xQ9XWfwGxq/hRbrUacfcgW\nTwjWuhLhCsKVUFsiXAFg+BrwgyHDxv9p2HEAXtU6eVfhu6ofjA8ft0cP+um5AFUtrRbZS45MP/v1\niut1w9z3FfhiwFcRywSsF1xUIAAuTCDhrF+q+e1fVDgTbE5ry323iThDDPQbhgGJOTsmHxifB2NT\nO1+BHB2gSGLprzBV/nJBeNBOY3x20G+bxTFs26rm0wp++/hICmLYte0jQFNoMuCtar9VrfH1AfDL\nfJbj77oQuAH9kfUL+Ii/l1zw/1por/qsvLTHC7gFkhRvq8BqbN9G9eqZPkrBr/L+Nr6s4Jd9d9bf\nF6Y/sr649DPbz0HvTHMF7EO2Ak8dw1Vo9nF0BrYBqC+gqRW0o+0YQHPuUU6/DeeeE7n5GrQWjhRV\nWA0c9DIw9yuu28Dz84DsYdfvzvQT4VF8kOmTYWC23CBT97VFoAWrNVU2VX1Pl7XvJmj26xW62Ump\n2otkvIOxNINZMJQhDnqBLV4ymDGlFvSI94fG9n2+PDftYQwzG2w4c+DBgY8j4EOg6JrjEOoTroLo\nqiclCP3hUTZ/HSfi4qZnjH8H+AV6v88R9NEOVR2/RAd6nZSqujP1yvZU59Md+CauDfnaPPulCXx8\neWevvp469+bVVGFNxt+B3bPtTnFTwcfigcOWcIXiqmigV1xhx7wNjG1gXjboZllxSW0YkB3wo73I\n2CbwFe7RJwtwCZMygnky3bSF0JreL5BBmDvhyoRtMC5MBnoJto98/QIKb7wjgpis3jJz6LCDPjtW\nqLlh48/dGN/DoBXhzyCIOwmjrDa+D2WagQGJEY9wTuY/LFNpjyG2Pe/9xYOXLpszfnPWHRk/2gxK\niLURIbXCctf4g5ERwlAP+8na8T71VuVvRQF7F16PJIEj6NvvFznjtezOvL4NgUBUanp66fE6dhUW\nwtuieIv28yK4IzXul3cK4ImXaKmxhn+2AVwG4WEjPGwWQe8JtHJr+xzvuiRs5efCVdVArw54P95V\nMQme3mraBBdqH/iwIGh5kQq4VhKCqn0c8AF+KMzj72pciiStrk3+9JGZdhdb3PN5v+JyZTyyjaGT\nO+KYB8a42vDa2MwJKqli+BClxQIIGLOp2MIDMgQim48quBVuiLZrES2JNO36M/MTSPhVIl6iq9w5\nEtAcg6HmD16iFS/bhoftgofLJc2BFfgGgBCEKgIln0PgQUvVn09Q274LARcZefNL7yr+BUCR78BM\nKgIgRBns5HR8u5++gDgu0JWQ8B+dVfUMmQtg60dZlTOJ0O2lQ1WO13mpvNO0XFn2mRSDgY2BywAu\nG+Frm4F+OtAFDFHr2AC787YAV5qds72qA94W5JhxTISJ6QLAPqGxR/orxwQ8Fy8ApLYR2okewa+6\nCKBIx5UJOpoaDJSEN9OglvJ+YhtyJDbVnrk84vvYsY09035BIte/5RtQGOg28hVleEDGlhN0Mje/\nd0YVgU5vjwB5Zh0q8KtEEpHIONxBHzEA7MNuMX/CBE/OUXCmf9i2Aj46+Ckn9AizC5zpwthYUbLP\nNLYrRd8FQ9MECTk8mTZwFwKu+ov7UcjNpHg50j1lL+Gngb1rASEp9AzkDQEvlUUDeI3JbwTD26n/\nVeAT0R8F8E8B+HlV/dX+3d8D4E8A+C4APw3gt6vq37l7jdT10UCvCfqHDbheLP/cBGO6AJg5i97X\nzotZcLBppBZ2aqC/BtBFUwhMVWd7Bz3g01kL+Oq1C9DntjF+mScO/BBA3UTIwJZFMV7Abxqtz1Vw\n4A/y2YJcY945s293h2DEovsCH+x9bkPk7GMIDQgLdAhUNnNcRiwAeYSXqC9iogfAR+7BYP2V8eFP\n0nPcF9u7ek+8zE8I4F8uFzxsl4rAS8DXcKDwjkmUU42n+vumUveroWlV59Gv2xjfgb4C3+45xa8Z\noCePoGsM7y122p8V8JBvrEyfoH8LAA9/9+t0LN8M0nl7faxaf1bewvh/DMAfBPDH23f/FoAfU9Xf\nT0S/F8C/7d+dFmoIYbiq3xj/YSPsFxsr3tW7WOTeV1P3MdPy8gw1BuCJDngp4Pt2ErC7ut/VfCUL\nua1s/Gj7lIx/o+rP7v0FIgmiMdIB8tQsWQrNwmbXXfdZoHeQjjFyPv9l2zDHDhm7O/cQKg5YFTFz\nK+YkiC8woTwgIyxTqo7vbClzgoRawo4+h7/yDcaUZdNm/D2mPc/LAhcZsOOfsO8fkvE3d8TG3PvQ\nHqydJpk/Jn0kLFAfRsxOnphvAC0jG4Q2UuHRkkhtqwkBau/LL6yemDUnTKUd3bZeh0UYdOYHzkG/\nPARur32n2HOeq/oL+A/btyn6bwC+qv5ZIvquw9c/DOA3+f5/CuDP4CXgN1WfXdXfyBzsDwPYN8Lc\nGHvLl2MS10DPYMu6Cx9jFmBS2PGl5k9xwIvUFiEgnE0QbF/AF6IV9A58caY/gr9pePmE1PaO4K9v\nTGjMsPFhHV1leoJOG/66jg37Zp+5b8v9AoQR/76Rsf7moNfRh/sC76GhSB4X6NdtTyyawTGoBCe1\njh23SD3LNTAiYGfbmqofjN+BX7P22AWzmUlux4mlX5NDx2/m9vIGom7lZAx2ry35uGOYGjPahdhT\nlVEKFMI9ANX8jXSU9v2ozRmgj3R++Fsf3bsL+tNb3OhDbyqfauP/MlX9eQBQ1f+HiH7ZSycvwA/n\nHqszPuFhEOaF3BYNkNgEFoUtjhizmUQFwoo5CTtMxU973j3s5jzzzLGE1b6nAr7lrHMBQ0fgq6/h\n12172wKrQ4VCqtOq6gf4U50MVZ8EO2IIa0Kng3579vn7G/btgrntmNtsNyn1OB1XsGXElWI4rgRT\nrj4EZF6/HP5TSaY/bvvqwcGqxdLcnGgHxh+H/AMB/sslJ+HkkCzViEDOfhSBzgGh2bLrIkVYN737\nKEeQe03wcfOGV+BTzEAMX4H6aAZRRmNnWy9gbkI09rrG0c9d2PweDE80gHvHd8uXU/l/sZx7L2oY\n6TLTUvVDzZ8DmBtBNsuKmza9sk95NbXfFq0kqBgTFOMH6CUZ32xoFwJY2d62ajZxMr5ZU+rLVSnY\nHWEB+M76+VAHG7JYuGCP+sb7m6jNTiTLdwWZtsRzsP1lPONh23C9XLHvF8x9r45LXB0Zdm8hi3PX\nGI5rPichsflNU4zhDjH9Z6CPuf19qCwEWK1TX069XM/OQ3O3ZqpcXM1/2LZlxt2SYYeM6S0V2IQO\nXxOvAX/tYBVzEXXLVk7fg68n2MCe2wC1qjO9eFh0tWky+EL9lcSlJixhMTduzIMXwX/yN8U6Tt9O\nW+ZcHEH/CQLgU4H/80T0y1X154no7wPwt146+Q/9wf8o97/+9V+F7//+77NFW5kwNsa2MS6XAVPm\n2z8SDAspgbgKN2C+gN6JaAqIpw24T2cDm1NmyygH46OExHXC8tojpD1DSXIbK8Lm0B2yvxTrIjpc\n2I82xLaxfwbjMgjbRtgGmaYDixoeUM8DqCBRi1T0GAa5XiFPz5g8sGcH9iW1YoiOTRDE9FT4lN+w\n+3XYMBUTWTsq+zDfgIivlDMFw9fKYxm2ndOSkEA8S5FkghKBuANy5FBhjfUDwaIqUinL9x1X5gX4\nZYvbNpKE9oU7jox7NIu78Bi+NoNpcQqdYxHIoPAzmT2vPg7LZHMEIqhphExARe6h7c8gFYvbtr8J\nECnO6oa9eEw+BQHWk4Tzrof1dKfG0gS9pEmyJuT4W//Xz+Jv//TPvgTFLG8FPh2e6E8D+BcB/PsA\nfheAP/XSj3/kX/+Xc//5+QOenj6YajZsmauxDWyyud19/m+GfYnbDkQ7gWI6OQHxygRqE3vYPcaq\nvlKMRftFCqoICVBiCIl1Iu0JHHO6QWuO6nzBFBGvbuu3Ey6Dcdlsu23kHgubEjyAnDJMqj79eIde\nr9AxDPTM2EE5Sy+89OILapIn1FD3FYStO9g0FyaGstS4tbrqL4ohE8PXyOPpH2HsxDZlWRVDxTMR\nSebkqyWqKw1az6sXc+MtZfmOfW+mwVEABPD3PRfpXJYu8x6dkGhYVqCuAQZDLfGKLybSgZP+DgHU\nI7BixuFwocogN/fa+f488eqvc4JIfJo4QXWiUn5EadrCYqx4+9A9sOvp+cfr6vHP7fbf9t3fjm/7\n7u/I45/8X/4C7pW3DOf95wB+CMDfS0Q/A+DfAfD7APzXRPQvAfjrAH77S9fQZuPbsIkm4/MgbBvb\nohKN6Q3sgpGsX+uj9zhzIgHzBO1I8VTuLXfcUan6uwK7ADumq3rsoI+POXskF7RA2sbB/An0VC/L\ndo0pppf4DMbDRrhsZq6Qisf+e4iSqn0fjD+c8YnT200e1CO+JWboGC4EPKuLdzQmIJYOskU2uNXd\nVVxVXOfEmDvGnGCeYN7B01g8V9RJ0Dvri+T8+gJyDBdm107TwUC/m0Bic0IG2AdRXmd6avB0LrqA\nMvXK++HSKe1YyLVGN8+yEkwpPDJLcr5D8cuqz4Gy+liyC5s+nLFhKLNJSUE7w1Zynmm2kRyBv/b8\nqv0Bre5JrMVjcNAK+u9d5/HmUNJKtZUNoosQeK28xav/L9z50z/x1pvIogrZICpzMb5uDNWRgA/Q\nbxN0TccAACAASURBVAH+BvxwKjGJfXKaKZCvKWw40Ry+C1V/aoz7w8GBnLYby08pxQoyNesu+ae9\nl7DlO4ttzct9GYyHwbhsjIeNQTLt8QUAtbh0t28xJzRAD0pBRRxTdc1zT2NAp30nzD7jkE2YRgZf\nZq9vd0xV9BnPHWMyeHfQN6eYJSnpoHfge3sv57Z97y/uEJ2YRO6xVwyysOEQGOLaERP5ykUzgb+o\n+ilpDyTqbK++FuMgXshTCD4yETn5QtuRNNIiniCcpukw9XuEBuCYg83DcCetKnaKgIAO8NtyZPmj\nol+/PWd9hZkqtaDOSX69jwA98F6TdA6MDzJblBkYDno1Sx6zAT/BT0fG11LzYjVcUli2+v6SW1AI\nPJoP5RRsUbWZ8UZ9FR8Lr0W+5ObgRnfYde3D6hegH6bqj4GHwXjYhkkfwDu19aaMyJvTMg+5wy7r\nK2oAz3n6G3gMH7rboDHxCACRz8IjExAFltZDHD1jH7jOYaDf3X9AuwEq8vBFzH4HfjjkurMpWCtG\nDVQwhcBz5vfCMXvPBMAgmynZVX3xlXyXEQUqf0pVP5ylPu1aI7ci++rrhBlOWAlh55GIvgw7e0Tj\nUYCpt0+RAfK74OOYmGVOxFLYcQQj9Z1i+XNwrwk2l+8VnoQDK+C1Xfsjy/sAf2H8VdUfw14cNeB3\ntjfGn5YvLl6Yd8Dch78SVZhXS6HCTd3ziD2Fje/DhgF9DMi2JKkigghTYmpqB3wv3X/Py/DWxgX+\nhzHwsA18bRtIp6OyMRWMIS0wYUJpR8xXEI14BEnAG9AndPN9EZBuYXvYtFuYCg2fA7+MY+c+cGV2\n1ZVvPMaTWxpvtpx8rOIgaeCzZkghCATj2xDiHqaFCCTn/RfoIz26zD3nDmgMJ6aNbx07PNnhx4lX\npxR6Obvdbiwa/aEcZJp5Cwb5cuVcJoe9u9EAbzeoRUqjD6m/EwHvN/73ewjI51AQ+iw9oDSC9dzb\nffVeV3ED0TYfD/13ZXxVD8MkeGw6wd1RIBqY5GCnpuLTxMDwsX/C4NiypYTmetGhOhvoGTop7XQb\n2rNEE7uP32aLcfZmFwLdvnc1X0P7DEURi5pfoOfMIWehqwV+hTu+aHpe+5owgljc0tspV+uZ01h+\nm+AxMTYDvA4BYp57pOtyr36MAkTmHaLIY8c5rBW+guMMPCiK6dm2k9k6O8172mwDvvjEOn8XLB5V\naDb+YE7QqzsIy77vKxJ3K7vAH0oaYNpW3LsD3kDPPsvQgIaw78UEfMytYCLPjeiZgtoahSBaFigV\n8X40BYNnZiZ+EXTRLgn1Zo9Qd/L1Z13Akza9+SHCPInzP4Xv3wn4e3OAiBDEwyMFbteTQPlikVXD\npsIOsRRYMZFG2DL2CLlgIMGFyBac9fF+UcZUxlTCVFsme4jlz2dPamEptJxlfXjMeg5XXq5wVmlk\n7LWG2gBcKOadh9kRP2vhPxqdTzwGQSuzr+9nEhDYi7vAnneEQ00ERBPkKmsIDYWv4CM7dtlsHzWX\nsZQYsgw8OsADUB3mV6FKQqLJkCYoImQ4xvPnyTbqgvR2B6tqAZAqNBcKyBTzu3Nn4NJA2FOMkwK0\nWXsKEURixASL2RIyOkBDZI66WFpMTXq5FmfBTRYGzACVYE7NjCtFWF+YNLcO/J0ZV4rVkML0tP6B\nWBoLWJyDaI8bpUF/gbne7PhzKyq3P7mW2AhovcHbxMA7Ab/2jTkJojG4NRDruSNNAIA3+KIY7pln\nY8pJExcSm2bLwNzhHmgD/Rbgd2EQoB/TU0kTG0iNKgv0B/CTej0QqbqtsSYhh6VG+Bkc/K4bHDQQ\nCzyCUOXzj4ZAvSYDvmLzYbShExzzRsm0JhuKEkwZlg577IcJzA58cjNKN2BzhmBFpdoa0VtdG2CM\nObCPzWYDhsahsgiBHtgTkXPdW47+RFpbddZKSQC0URHLB9CVLxuNEah0Vbo6dHCskLE8u/bE0IrA\nYwF4QNnAbysOW+faHPyXbpY5+DvQDfjI/StzxmgMap948x3wTRD0J0jN6AQnwep1Mpzx1+PWEG0n\nDIG3lXcB/myVNXu5dVUarXHt4a1fOnz8RQhPH+JiCO2YDMju3nod1jkD7GBsMAEwBAZ6VvA0tqdp\nkV0JdtCNEDASIge+z30nUxTu5X2v6T7GiLR8IulGqbHxkgjARYENAXrL/GPMrQDEHKAqUJ5mxjBD\nJ1dsAGkKoKhbdDRhY34CcjJQmATMjMkDY0xcpmBulwXwsVR3HEfIbzjxIi6g57/LlWxUMsBF2W3T\n1hcyog4+AhGAoxjLZ2+rZm9Bc0tAxmGYvR8AVIAc9MH2IQR8+nCA/hIzCkPVd/U+nb15jByiXVgf\nIazsIwHctOlxekwdtG52uuJ0a+IfQb+QuwkLyuu/DfrvrupDCTajzdkeKFtKC0TDO4F6YszJOyYx\nLmSReBc20G+kCfiJUPMd9EoYYim4eRrb86zQS7jqBw/VzXHBtHmR6twmyJiACtppqi0UMf11FQCS\nUwJtLn2p+6nogLBBcQnGF/XJyXadzLLDMQ23puOW3nQYYcgEo+wZZsymjIQZ5swTDNFM3GGRc5ph\nu5mrX9oKPVJ5+3M/jmO1XpCHBhNiOrGKevLQaLNVzTcJJWVfi4Nem4ffrwXXNoiQIzPJuPFhdbYX\nI5cmBBL0jfVDAITw6WQUx1fXFgaFqYdk/JgDEiaHd/V44FY6QFfez9MawPUG9BZ9SP0HTZq84nHI\n8u6MD1f14eAHheymOMwoqkFk6a2ZMZkxabcxeVfxN1Jc2GP69fCBzekf04ahbOzfPAsep7UAnhBC\nwKHI5CG1HpRDMN9EixMBjvv+JI3lLUOvrbUXgI8MOj7huLQKmNNxqPgiHlZfUR/iE/L8AuTTjD0a\nkCzcNL3U5GPmzJjTZstZ/UrVZ1JjwUVdR0XeSVuKqwF/zuOsvumpvScmMXZP9LGLjWBYJi3NBKXR\nL0PNJx+Oi3zxphExiF1w5hRhccKIxUDldsiNCqRY2F4QNqQ2NT9Y/tI+RjbtOk0IbKnq82Ljc+sM\nYj/BraKPgwA4lNDw6fBdrKLjgruZ96kJZGyAWkDQW8q72/gAeVqqzMni6raatxUeScW27rwog4Ux\nW8y9sb2tUDt3WW17tem9A4yhbJF/DnpuoDd/qg1n+XzNEkBkdWQisNiqOkKEyzGky1XQPgmpDyUF\nS5nhKcn2w8E/UKAf8CUBIa6pkJmmYi8yRgEimm8HsBPZYiFk+QYGRWBMpMse2LYaFzcyNVW/niGr\nniVArwvo1+QdmbprTky2/StdK8WXz4OI4TxT21EfuKofPhdvx9o6J4pYai7yYB1YqKzm/HmkWo4G\n1gB8rEwcc/y12fjHCMuL2/hLKLdfT1zV36iBHuRjUlj8F4HNCNq5EQE32CxKJ12/1i4Qur3ff0ax\n+01r44dtW82CZF8tNVsZrLuxrtr03AQ+A3NXbCTYWLDRNOA3dX9DqPs+zEaa7NqH/WxYxL8N1nfo\nMlGyvYZ50oBfi2i0bb58H41XyplnlpPfV+2BugffPwQXAlbH4RoBiYEjPOgCDztGJRQdhBxhEGd6\n8Yk0c98g81KLZ4Jy8Ywecuz863YnIUJdtYE+s/b4gp2x3adlSd7n7pmI/Xee4xDO+OsQHZqqH8I+\ntL2+daFBBJm+nJeaDS+ZgjfMM9QQHMECmGLdMxcAIQQuPHChxvTjwPhpMvR9tKFaOmd8YPE3ALoy\n+E25VfcXpISC1FT+3AKIJbU7+N9a3gf4s+fc03xPirC1XRi4oVajtQyQRWRlznbhDI6J0f6Yy01b\ngCxmlAk2MiExIvkHJIfmyhJ3Vm22F4FsWNFrTZ4ZiJUcEPDFQbB4tnXaIhQTjF3F154nQBjbnNg8\npx0ir/70PHrkGg/ZpCICUm1TImgDv2hNLw5GlGk5++e4+nRZG86Lzzh8yOfVRwCPrW/nNne0DJlY\ntrqYtmBicUuP9kZsmtfcTMjCBGakIycBaMLWxqPhGhcDnrR0TnXgGIhyKNUdp6pd5Z7VPhqAb5/o\n+n7Mg8EYGLRh4wsuwyYORSLQWPCjb92DkJOfFMhRDMvgTLgq4VkJDwCuCjwrYWYvgvcl+PxQe7ZU\n2Zec+t3WXyk92tzFH0IgqxMBNeUzb3hfjtyUdwG+SAd+Cehbc4SWDzn7x5zpCJK5aMzXH1CID/oP\nQAUkw1VqAevwST42DXZDqNXiQzBUATpw+9ZbjvOlBCNal51gC6v36DTkbywcVEgxd7JZgOoddJon\nUnpkmq+gQzJtzN5pg5gyRNM0ETTGR4JetAHfZ9jJ3MHXq6n9IMvLf/y4CRBLa4UGALXvcvFeF9CA\ngdCYpoJeZLppwcME4ZAEPYcCF4kNB1wYDQwXnlCkkLTxfHi4NC3aQLAuEbm/j0A6c7SnAO/aS/po\nGAxx0LtP4mImSV/ay/YrX2BFe1YEYcwf+JoSrmop3B8APCtwUcKDmhYWjKzkE3gS0/YlJfjtneYJ\nN+N19b1SbUNINHFSPw/14I20/z6M34CfEbJkHSR6F3XQd9a3HoFI/rDxQKwFr9GzNklnmsWW2xpw\nNufd7TAS935754R1IkFF9wng885Nzo4moGxsmTEwMAmgXTGVfKqn2fIWqmrOuF3JOv40XVA5Yv77\n/HlxZ5/d0ebXow15wmvi6qbXTdQTibqw0TkhGXNvz7YrFdNzY/vcLyGAMVLHjhTh0X9o2XEWJoKS\nBwGJpvMtQM8B+IiTHp74k1x8KnlCFTUHEFtkoYKyg7BreelscyGqJH7/SpQJ11xKSBOYbEkwZTH2\ndkelQjxxSK0XGPsbc2YZtoSjnnVYrc2vIDw44z/4x8Af7yls/OhD8e1BHw/Q6wnYG8P34w56PYC8\niALfZIzfVH0E6MkTRQBNrUGreDEtSFPVFx+jjygsYLj3XEA6jOkhpuJBUtDYPHhLuGBLNU1IMJPC\n7X74d+psEw4xqs5OjB2S9r6QM76zuCowHfQusyJ+BxHxBtSwHsOcfQTLK0gEHwFw8JMBX7Si0iQ+\nbdyc5gTRbmLOh3sS4GefzdTbSEoBbyewd1Uq+Jc/AFAZyEZ0VSmScnIIu1RN1HIAjjJjwmyCC8yp\nlvumJp/YO7fAnpH+FfUw5/yw58wJ0IcAsMphsCYTmqlQW8slEP4OzwkRcwmmJ0PhCd09A7MAAnHg\nA8+wgKuLAg9qzJ9aIyzjE5P3Kd/GnAJCqetNJchtRFPCz9MmJPpx+ACC9jO6743l3RlfKeaMW0BV\n2Nid8ePlBe+F7TkceMrhCxcQ+br3mzO6CoYKNlU8Q9pVa/w+ct9PIUwHfnRgDUHgoHXkI8f72VKB\nmYpthG6OPGd8srDgSRFFpp4PAMkIEWIazjxxtVpCxV0c4OUZNgFQOQKmOw4xZ3PSARE8FIx+xvqb\nbiWEUrGycfc+vz59AFQqOEIrcFXE8WQaXDI9WnryGHqDq6x2a5kA1LIeCby9NdT2Aaaxgj0cda41\nhXO2nt2Dr+BZdocHCDFM8LB9F9OBmeECID6AXm2qs1yvyfQ2XdxA/9DYPoVAA75bNpgoZ3LOrOux\ntyeM3+P59fB9Mv1Bh0DTmt/K9sBXYOMbkMyQJHE1DtTqvwqAeLkMBz6blx/E1jnUmJ4c8CPVfMHm\nrjkr9nYyck69U4jNeY9RtwA/2J1ODLO/fSIMaFjiyhl2rvosNFcNvQvYYKG6Jz5DlVLb8T7oeQLM\n/hOORTwjGMQQaUNXYY64fe/JMmzOwQRNSi0knJ0L2BfGt5RdAfwI8RUfQQm1uWRemxkZqrRycDPC\n6RmAT7bfxRKADiktBZX0U2Fz5Qcsjl4FPqRrBtoCfLMxnP1DEAfIa0QmfmuTwNy5uZEtlLox2IUB\nc4Q2V5e0uKGrr6wTzShQMu3QHHr+UbPv7dPkHcXin0XoptGqmyoO9gPjH0FvgVtu24dTcFH7S2RE\n5F8ev6F8NcCHMYmwJvitLajUNwe9W+OmbrONzUOHq+OVFmqo4IoC/EZiKjkicCQ879WBPBrWOhyp\nq3TO+KwZ2MfJHDapR4XNgReh/QjJ4SGsWg4iUosf2P1FB8gi1t8SVNiEFBG1SUeuNmdUQAc/CvyZ\nqUZmdgA73oApK+MfPuEZZiJMFxAqHnXuak9M4R1ZZ189x+MkmHzr/xLwU6GbQjabWDV52qxLFfOL\n6Mz3YRmX3KmYGkTMkzcA5zsjG+VR/xC1dOxUoAdRPfs2MC724QtjXIZPyQhNR+1d+74Qp0YlIrY6\nkcckfA1m0z8ocHHwmwAomRcxGcH2CJseFMZ4qftZivkT9I3x07ZvATqpPcKIMxC/XPaF8k7z8UsO\nKZxVWTKa1Tqbza23FDWhf4fkNLswBpNSIwjB4OxPrBhDPRAmyIfNiafUhuB8rFkMiI4DDKHU2Xhc\nwHyxbdsnvgBwkIpgv8b0TCBCaymYWASk5r1HqNBJo80LzYRNBBetlFfFkANXCK4+ndhY30EAmEZA\nYcS09laB6MQUG2u/7ldsz8NTZ1GOLliATls3T6aDJnLr2SrDtm8RcErDtqFROb1pqNSDwJvnUrxs\nuMgFrNMdrxOshKnTTS3KcXTzsvecfjH9hcuWGE7FDqbQOVC6iAHf788XxtjYF081DWCJrFRfUSHm\nUjw9gR4fgacn4PEJeHqEPj5Bn56gj4/23eMz6OkZ9LyDrtPCwOkgTFBNcxhuL25/kZ7dpm8M3/7i\n4qHdI47eSPlfDfBh498e1WLGnY//EGtYfCUxETalvWYE2NVUbyKfhKOKOTSn0VoWG/L5+BaRNofZ\nxXPGRBZjlBGeEWeXMS7g8XC6VfV52fvE2K6W+MLVNxvLd0dfH68XWYAehnVMCLk44C8ai35EOnAL\n1tlh+QRiQZBUd11XTRsaYWhEnL0H2uxXPI8BemYfngvAR13Lix0pskcOcW02/39Y3PtgNZMnPLP+\nhpTUXs2wXAtjY2wy8CCbBVRJgF08/Hga8NmzFbWpspHMM2ZsIn0xWvugBfT9eLjw4S3UfY9nYAJk\nWuyEdwyS6WnRJuiDA/7xEXh8hH54hD4+Qh6foM/2wfMT8HwFnnfQPkHTtM+M+Gb3JVE1jwLoq952\n0L4MnnivZeevqn4QZAiCtyH/nVbLXSsTqa8DdBmF5RF25k23ThRyvdw35g6De35J1eLSdWQYbDqW\nmTK55vSVdTAYmDHBpWsQnHdiMMZ4wNgebJv7F4zxkKC/Pu82Jj6C8dU1Chti07lbxNm0pbFxAvwA\n7lRx0CsuKOALgCupLwNmS4KZ+g+/Rnm0q2OskXZz33HlAebnUmm1L5AZjL9bNpxtw9w28/pvm4fI\nWtjTiBj6fA31LNKBvzGGDFzU1/GTaTMOhTBkWlJhsqW8LmMsM+Y21zQGc753ADla0fuDoWL9FjDB\nYzZ+7YdAMnYHaAqwT9C+A/sVuO4G+g+PwIdH6IcP0C8eoY8fTABcn+28/Rm4XkH7DtoFPMVTfiHy\nnOax2d6hnt8DPbX9svZXzKwfnAA+1f43lHfKwNP2XQioxhTGcOAhJaWS+tithhaJ7vlnV/0VFoMv\nNKBsoa4xOCVEUGFcBRiiuE4P9BkC9Q6hiEg11yDAnu+OsW0X/xjwt+1rub/vE8/PO7bLhm0btVYb\nanhN5oTsE3O/ZlQd3EOeYOEYIiRcvO59QU9b7AOYbEkrp8ksSCaK8MYBJevfY3yeDL7624iglGT5\nWDbLBNXlskHmBt0uDvqL+wTCMar5vsrs8kQb7gxlT5tuy3YpeBLG9JGUab6NOSeEUOvujZXte3ag\nzI0XJl4M3YWqA7R9n+cwzFsf2Z7CqZeq0RTQdQLPO3C9As9X0BdPoC8c/F88Qr/4wkD/xRcmwGWH\nzt1micluw6iiBXoyARXK6hHotU8nAmDFzNGbX/Tf1f8V8LSg7X55d8ZX/y8WArCA3LD1S1pqibZ0\n+FBoCW7zhWeYfX6rDqQH2FbWtQw8z9OCfDAFOiZmdAZq/oFwJnnO+gD9tj3gsn0N2+VrCf7rdcfT\n0zMul82j4MJm0wRRMO3cd8zrFXPfm3rPi5qvzBmCGwJAkLN5MZkgw2fkqS9IFgIENdbvTeWMH1Nn\n5/9P3duE3NZt6UHPGHOufb6SCkaiVgWLimlLQtJIy4YKYksQBNMwiD8h2LAhxIZanYJgRxsFGrBh\n1GAFRMVOtBNUxIYGG4YEIlZXywi3Kq3Ce7/z7rXmHMPGeMaYc+3znu+cG/Hc6/5Y31p7n/fd7/qZ\nzxjP+MeYArlIDD39G5mo0jmiekRe/8HPjgfzEg4WzMTMwwqUlEROtmQb1QfU6YjlEmsaE4Pa5HXM\nJeTSpGibtk8BUEM79mPu64K59+19dV+mINrJUcROwxGJawZtf7uAN4L++zfgJx+B7z8CP/kI//57\n2PcfKQQn4BOyb1mXAVAoeYU681bFmn/R/rskeBc3KKlw0/hCC3n7gZU+9POk8T/xYoSTai0ZriM+\nvMqdz4eEdXkJ/nL4JNFJr3AuRlWADiXMDfT0ag/GbrNUU7RFG2tpMG3o/cBxPGLrDxzHhxIA5/PE\n4+2BfvRwGKXGz4SauYpZxpXbiezwk/b5Dny7AT4n+0bhjbfsn69rwGd2C7qRxPh/aomsiV+t8qxA\nfwP+C9X342BizxFPg+aB6vIlIMtpobHwxUJ4M04ZwG8FBhvC3vqRkx+5DSHE2kbt91l8ofVXinHs\nw/Go2gJMtVDqBkTEhuecFhXKIgmz0qnxMSbwHMDbBf94At8/gR+/AT95g//k47b9JASbZG+DuGYR\nRpewWz1S9v3S07Kf4qb9P6fv18/HNfK8y8bfHXz5F8rF98XXt9H42+smBCo9dtuK2nF1yAJ90Tgk\nypnkkw/XF/0TWEiDBP2cmLNhjgC9RIyK+emNAyo6XBuacq77EeB/HB9wHB/wOL7DcXzA8+2Jx+MN\nx9HRqfGDgS1g1fioceG6LozzKm2/YtN61/jyKfCdmXThSXd4VtSmCstbyfu5KCJYP8/UIb9PuCnA\n137Aeo+9TQKfHm+Si8YcZhEBLByLOc5csoWahgDXFucWiTJWffTr2hAKN4Gv1PjaEti6vW9ovd/3\nLC0uZlhCoDACekO4hu4WMjwSi3BN+BnAx0+eEG748Rvw44/wH38P/8lPYD/+PkyZitcF+xEynLhP\nsnK9MlyMTN5BreL1vD6vne+2vOxnvq4VqelfxcuXX98E+KJrgUboZMmzWx12UjIieU8bTW1PwzIk\n9+1fgjvEAzAC3yK814PpH5ZUOUJSpm3FabWF5uXnaB3e+BkBmovVRYCm0N7RHgceHx748N13+IW/\n68TZcryVFCMJej3LXk11kBl901mua47LIi6PqaGNVABk22cvT7qrMO11t+23xZHSsO4l7/m2MlbT\njRjeOXjPm0oxojXZNkDUZ/gwuq1Co5wN4BnHZ7hUGHZTdwg7CoVPJs6/pZ+lyWoblvdiGhQjHLbG\nfonaoGNGPkVr5QMqsGNlXr7eEd/ee2rz79/g33/k9gb7+Ibx9obz+cS4Llxj4LSJYYYThjcB3kTw\nJsCpgkuBS4GhwGxCH0xmGSa1fwUwE3MokHYwb4ihsEf9bBwvqr//7D0D8Ote3wT4Kf2BpPRxBQH4\nEIdSCSO5SLeFi5RlsQk13e7rF1gsiNB1NAgMrQGtC7oJpinTQztcO/v3RR+/6GhD2i/KwpVWWtkQ\nQy4qBVMbtHf048Dx4QM+/MKF784rPMkZ5tpy6W1OpunmI1oJORNgNJOx/2mABui9hsdjUex07NFR\ntfeHixsj93tZAoBPQAiEikJE81KdbPQxFugzQSkA5gv0M7ZgVBG2lEw5DLoRz9jjeXu4KuGI0Gv4\nCEOQh7+Fl+cB8vgXCgE16FCojhoeCj5rJ8jT5hVfn6XQfaUD9v1bAP0nb7DvP8IIeifoz/PEdV04\n58BlEycMlzhB73iK4KkEfwNGkwA/Ha+2P4vX1263p6P7Jo1TSC2QB1yYxQeBvwB9kfwUAl9+fc3s\nvP8IwD8J4Hfc/Y/ys18H8GewpuT+mrv/lc99h7aVOltNMOLKt/dC58sL+HlpXBaUA5yaIuuy430F\nCetYW3Sz6Ue0847+ax1oHRPUcvn9kuJCQuNr/LyVbZ30lGZCb+iPAwc1/p7MAwDReiqp/6hUzhTd\ngRPm3Oe9qDnx0VjTJO1SMvsMQmQlSNn56xruwzPoFq3P13MpJ6BJsQ8Aq3cfsO4nvOoRUsMn4Bfw\n8/p4R51szHORZ3IWqgQ3Ix3Vr9BZe2FgdqbdPfk7a0qgI9cQl39mauZN3jI34Q7/+MT8/g32/Rvm\n90/YxzfMj2+YH5843wL45zhxzgunDZxuOOF4Cgr0TxGcClxNMDZtz9YLpfHvry0Wn8wRL+YvNhnF\nY9u1Pfbv3YG+6P7XvL5G4/9FAH8ewG++fP4b7v4bX/NHbho/ge6oB7J3xJEMUUnsU6cH6CuDnVQ/\nbHlJrcSMKRWvn2lNYkQ0GjqusJW1A+2IuXRI5bw0sMRJv2j8cEbhleofBx4fPmCMKOWUovebrT8H\ndIzooedeFNjNYS4L+Nnvfwogsyh84EMK+MKoBEyWANTFmuJG71Qf9VkJACTVj4SmFGrwyBlQCpy4\nP/mMdk0/gX4gewpgTgqLFYGBrGPNZZuYJVvZBXw9RzbU3I27tefPlmYMUZ2aXbAAfgO8rc/t+yfs\n4xPj+wD7/P4Z+49vOM8Tz/PEeV14joGnTTzdcIrjFOCpglMQoKdJNJpS238G9HyOoaiD4WaZNfAp\n3c/3RfNftP1WopOrt9bw19L9rxma+T+KyB9655++TrTgHY1v66HI9oCWxsdaEKnVA40EOp16oLAA\n6F3d1zqLT1qW8ca8ObQJ9AlJDZyLwb1KcuEOaCfotbzphMud6j8OHGPiOzMK3C2GPlfWXLtaAN9C\n61UTilywuUgt+sn7RDnBRAHJaGABn3sgmk7QZAJ21rRr+tfHRd8DO9dOBLAjdRo17FJSU6e2P/qO\njwAAIABJREFU77Tt57E0/eyQNstR2cpkCrNMRYsNxPnSpyNyi3nfsvIIipVeixKOy1Q0gt8W4PM4\n19gmZHNcmX08CfQnxscnxvdn7D8+8bxOPK8Lz+vC2xx4zgD+EwH8SxyXSgH/ahpUX7DAz8rM93x3\nVAtLgRTYKQQ23Gbd06u2T4L/qbbP4y+//t/Y+P+KiPxzAP4XAP+au//e537wpvGzCJ4PYV0R1+bN\nLs3LWP+/gx5rv227AGgJepmAxgIVM+icBTTzaDM92bJLzELjSwM0QZ9aMrztofEb+nHg8cFKNd80\n/QjQX1dHO3pkeM2MOETXWOMiGAl6AXzu9r9Dxwb4GV5yaRLXIUHN4V6aEa+Av73H0kCknAYPdS8S\nTjne36U1V+1BTvVFZ6rrnJB5QPpkim2O5mpV1KTakrPRlqcpQVMA5Rsgq3BkKSL9Hs5+C06GEZ8h\n/Qa+Heedy86+W6OQ8OQ77O3EJPjH24nx8cTF43NceI4Lb2PEZhNPTLxJNOIYAlwCDJWg+SqYTSur\ncoozTPsZzesLvLdU9pu2943qf6rt/Qby/MVvE8779wH8OXd3Efm3APwGgD/9uR/Wzau/QE8xT4ks\nzGWvJIvU8rLe8wteQL+FUYQZW/UezC2POm549uOLTj3IzLUZveMbPbjR1KID0kLjM6MPiIcVGl+p\n8elJZlKJb51oA/Qn+nWgnW3RcGq0GpzJ7y36L2zhhSjnFWFmrwp0MhttCrSxD336AJCkT2o02Cvg\na7JtXEw04ixBEFroDnwDvFVqr89gTKXp+wGZEzo7wBHe2iI8Cm8QdJYhs3ZO2BZs+wy2014CltEB\nmXNzIHKfuRkrvxH+ejy9/CVVNcgCLft4BvgT9G8nrrcL59uJ5xx42sDbDNB/tIk3N7yJY4hjimOk\nJ1+Fjr3wFU1ZY9krCrMp4J3SU0/c6b7jBtzoGPSptvcf0PZfS8P/joDv7n97e/sXAPzXP/Tz/9l/\n+Jfq+B/6438Uf+SP/xH2lsuxxh419dWgIjy6eRUlGXm9eU8hiHipICavQuCGauOkQPTCZ4mr0E8g\nsBiygcmpO4apBrMWMWxtqObJkvN0VrJEhlfS8EybW5pU6Wd/dBwfDjzGB0wbmDYjPDcNMng8rPat\nCTPUpOLYeYyjQXLr3HRNuk07PmRHmBmc9bVRZVkKfAf/JmSDUS1mE2ZAjJxOqq0eNQTqqCKiRoUd\n3vpNyyr3k10KMuuQ+2qBls06pm3HvqIGbEqaTs96n3XVpPeVXANO95lpOlHTc3+dF67nhfO8Is9i\nDlw2cPoMW14FQxTDgdmiwtPc4F1gXeFd4/jgcYu+kHU2wvRr952DsLhqD3wEaO/aHkvbb76B0v7Y\n8/VSYMS73/vtH+H//j/T3/7Dr68Ffi7zeCPyy+7+I779pwH8rz/0y//Mn/5Tty/id9wKKsuD7H7b\nwxd9SdCXfOOBapmnAf4s4ZVF/soLDKbqukfvfTE0MXTP4Z3RcDGQtAp38ub6JoWxgR8t6va1b8Af\nBx7zgekjwPgC9n2fbaA0G0jwuKkCPcCPHsfSWhQb6d1T77RzLWnwpr01U5x9LablPOVV3hjBYh+C\niDzANXrqcdt1bjqr3APwMKdECLaVlp24w4lZY467TYumHTOyHms/lolRkYQCfzpS48tkOQcgYrC5\nCaDU9jOEwHUOXOfAeQ1c18A1GLZzxwmP+DwUA5xYlMK+CbxL7F83ANFuJEynysTkfrfXVz1JPoil\n8fMVzylj+HJbd74hSbZf+rt/9Zfw+3/1l+v93/qrf/OzmPyacN5/CuAfBfAHROS3Afw6gH9MRP4Y\nn/v/DuBf/qHvMLc61vJQMVlEVlOKXB2S3WFYRfVa1vv6cguN6xaZbtnEInrye7p7EItCySacwyui\nBbe5xcSVdHjxyZSWBNM8U+bK0vplf78Cfz44Uoq9e3dNf93Bv9o/yV0IaKTsogm8K9AUHs302Sc+\nTsLJ19NB6RYaMASds7nHtsBE1gL6xLfC60Z0Giqno0hVQKa2XwQbMPUaXRX9Fugr0dDgSeGrvc1k\nRGEE0GM/4fl+3kFf+2xWKuAzdYY8VxjXNsDv2t7NcF4T15i4huG6Bs45cdnE5YaT8fmha46DMVXZ\nWugCU4m6EAXHfSeoN8ADaxCJ3AG/CwGkYirtdjtcglq24ztCv9afd3t9jVf/n33n47/40/wR2041\nPfXCBR6dc7NOOqglTPiQvYp58o5s+n9RJM1FTVpvbDbhCQreLlkSUuHQZlHR547OKrIa/mi01/LY\n1oNNulXanqAXS+B3tBkNKMwnalLsmMBl8DGBI8FP4IvcwS878IWLLQRb7Sv0mToaBCkC9BKlsuaR\nCFNaeXsa66ms0Knk9yCuH9uCC9DLHfDcewFfmb7rNbOuwoBjJf541EtjDoMNNgKpY8McE2DUIcGe\nRUbR2ARY+fj3vW2gdyMDoAC4pt22cxouiw5Ol8RU3IrPN4W1EL7OBp7RQNUD8LLuq3l2Hk6ByE7I\nWYOyAT6PbyjeQJ9rfWl7Ovky6297fOVH/CkEwLdpvbVrfErPbGWVnV56a6xhn1xkThtIFsD5v6Kq\n/CyIQTj20p53D40PYN2ZUm7x3kiNyxJzsgLnmGgOu5jJTZPylSDxTePjrvHtCBYhHoJBJRb9tTS+\ndysWkEDPUdY78DMpxJQaZ3ufF5XZX543R2hrlsOJ4Sye+p4EsjsARZYxFTPq4j/xuF856qsanQAs\nIMLqrCSNIMnoSFynXaHNfUz4NWM/AuBzZBnz2gfwN2HsC/juTkfu5vykDFRFAX1pe1vAz80d53Z8\nuWNAMRQRpjsaZm+05Ru966GMTLhu8rNSEKxH8K0TMgFcMxw27b2va+x7fEbbp1+P/i1ge5b8/Gte\n3wj4L1Sd1DIaJDS0FsUumR2Xmkby5vIXb9RnJwKalgG9xCrwbBy5/lz6sbZQn9/+S1QIPEZpY2KA\n9qmng8rrAQCo7DlpEZPOBhTdOxxGViCQpqHhD4K/Tzi1vl/GZo8b4AnCAP4KFVkFxJ0aYN3nnTq6\nc248extmIKWo4usiSfueseUEmXhSawfc2N2IgzuRoOd8AnWY6apiS9CLBtDPAbsm7Bqwc8B5nCCf\nV5YyL+BXP/wU0Hnstu6T4n4sEv0GEvg3AeAxfsxjMMbJ/YWYijMlirhmV8yjYT467OjwRzxPy3PA\nao6WuRDRSBTl2a++iMW0ZGXhyV27v7f/nLaPR0fNTzl9j3x9+fVtgL+JsTzZ1PiaPd57Z9oo9a8p\nk102x4a/AJ/HksMUSe9D24dDLgdMlOd6C/U1STpFrSZpLUdRCVzgPuksI0vgH14CDCwSpI3PctS6\nzq3/HC6CnuAPAWDwnho/zy+LVuKcJzJ1lR19KR6NgqXm98HLs+9gx1c1uC3w++3EhQ6+l7TedAJW\niu4s+7rHnVn2vWSZcJhYkfDk3BvDoQ1+XrBzwM4L88n9GQJgEPxjDMxrYIyJyc+WaWXISUfVh0Fl\nCcw8TpbEqMCnmt9xiuASBK1/2aZEw9PZFHY0zMcB+xBbCMO57WN4pxnCMUymyRSE6oq8tDXBu4H/\nc6APput3bZ//TEws0JMVi6eh9sXXNwH+87rWm01FhSZemXo1vgiIBUnBIEgbarO5c7F7OOzCWRU3\nHaGcYO5otMEVoCAIadkkJKYK0JAJF75CKBpZaGiO+AkUGKeGw2xKVASCoTIYbtl1SkEQk32Sn2nk\nFGjj6C+DHF6Lt71Q/SbCfnvRiHPAMJz993yfW7867xqz1KKRRQPS8WcGVulGrjydg1HcFOFOjfwo\nPh8KBmlIddp6MDTtHdp6PJ8WvQyymQmQAoBMQGLE1HSPFmgeeRODzUpGTt61Gf/mtmYDSnIxCpfi\nZr4129gYEp9v0HGJSTopMKhUhsi721SB9QbrUZkZeQmZhKQRhYCUcF0ThMMsnGYY+d7XtnjkBt7c\nv0Pxb3Q/PfvB7WOtpVJh+V9gIE29/Ys+//omwD/HBvykJ9hAz+PsRst/DB8AAIgypzzs70x1tUzf\nhGDlrYUEzsIW8NNoHAFU1hi9+w0etJk/uJsiUW+eKTGo1tjjylIegp6RprL5BVGj7Ql8h4PAYCxM\nGrfukOlrsotITXrJ/XCC3Se77cbxYOXfZNZhHptF485GDZ6L1eivcPUAfQs7Ka5Racevaw6WFOec\nmqS1AHwj6LVtAkBWVn3Q2mhLPckSRtrRZhg2ceXkXZsLNBvocygJQNrLc1pViGm2bWnePDbxAr/p\nAn2YTcLpyy+bKpzl2OgxWqwatGiki2clk4P3MxO2bGLwOqbfwZ9UPXL0Q+sjbXVszrltn2SgzErf\nj9O29wV+1HL/qtc31fh5YvXcNtBH3rZX/nzU5lPaenS1rSIW0ZVaal5fliaAYB0Igo5q3WRqNnA6\nK+I+ZiPnViep2LulqkSapprlsgZ8whhByHAS0pnnafcrG4iiOrFqAyKD2GNvuM1z67rPdtMAiRH4\nNteemYapbSY7/0yW9lYKL5aGAiL/JZJ7WggfXy1MY6xZ3Lvqd6fLomm9o/VOE23X+h17VkAaTIbo\nUx/dghFONEtP+sQ1xzr/TUtWY9Fy0HCBb1GMfL6Zj7B/ZuLhc4CXk9Pzs/J9yCfH6A3emSfRovw3\nmrVoDBHZNKslwOcO+vlyLcsqf4/VCz4vAMqRt9n48TkFsW/HiH+XuhE//Pr2Gj/BXpp32ZY5wVZJ\nxUWUwwnIBiadRmLIOTVBteOrkzrlrYY7FLpSKBkliNRRpX1v1UauHHZCb4kKVBppt0L5QMPh1cJ2\nZvPIaBHgq9sqtb23sHdBthBhbQmwc68ma2Ire8sf2xTXa05cFkkmue/8bExS5jkxhmFo5M9DyJ7S\nJ0H6b3TY5VAS8QZtOdtTkEMpM+yavof0OzRq+7Zp+gI/pDzN8DS9UtunxjecbnjaxDljm6THRqDs\nGr+kbrns83hLA3/nVYDXjKV7hTYjlz67H/GYe/TGBClqfIK/siQLZEn1N9Dv4M9rocZfK1SWbS+7\nouJHG/jv2l4WMyXdLyGUvycFhS++vg3w08Yv0G9CvGwzqcquTCKpCbmSjj7GfyW8qcaFVnYfRWdl\nqDlteL5PChWNolJdKAVJavu4gwHgMDGUWrDxfRSSDJg1tJlOOdypvqZwU/iMD5sLmikanY/7+6M1\nHD3Cmgdntef7aw42hRh13OdEm4PtvSbGnFCd0eddBoDVKz8TkvbOunCPluSoVITq8Zel0VE78WKC\n9B52fn8BfetrAdPnYdRWAmp8D43/NMfTLPLi5ygTZFqCPuh+9vCTZWdFDQLZYDK8fOYFFscd9Nym\nMEYk2SFJVxwwaxt6D58Faf5u54sxPRxULrY0fmn6123T+O+S8Q34tfzyPubhDn4sLf86YFN+CsL/\nbaj+pvEX6GXzYMcGxHgjz8w+Nljs2mMwRcydLk2SzDpLMBP8ac+6R9umaPWUFqIsQILVcEzdrTPc\nTBAnr8jW3RFxGLDZMJtiNC0n06L6/DsCRFmqAyaRIuyK7jF6qnuUDDdXPHqCvcdxpwDoDdcYOOaI\nxhBjoM0LbQ60Eemm7QrwiwxABkCfh40RC54hSTePltZjQj0cbpIMSwU2lfP7ivogi480cy42+769\navw0Sm3Zp2mJDQADHmmxZgF8m3ibg5oxnLGl8QlSkXRAygKghr20BHxSb1REI7X8ZNaiwSt3XrJt\nd2ryjDBJpENraf3l2KvOPwm+EijLxg/wv1J9q98pwx0EQfpSdrDTD1WRo/znzc5PsyGF0J1PfN3r\nm1P9O+hpR1KrhGBXOtzSqx8LLcJCk6bArFnsk041p1PQ3TEzy47VWF4jnRlr52JHeuXDUNz2zlz/\nlYe+51ubdczZMCYntuhinqXtJWh8aBOHGNBEY64fGg7uO+KzR+94HEfsX7ZzXDjGhT4G+rjQx4WW\n2zXYkupCzgaAK9yv0DZmpR3MMllmhJ+C9z9SUwVNjCG5AFTV07PTbedo7Wh4Saq/a0haYkGFmYsv\nfCaI4SaXJfAnPs6JtzkJSHr9QQEAsPsQ7qBvLYZjaGPx3fpb6302LnUyB1/H8CXIXtp3N3ZVQl7P\npvmDfS4bOhjN3ZNf0QrfHJUvXvYdmBmGSz/0zT8FlE0fd/Tll4PsQ7b918P+GwH/41jOPaeDJu3u\n1KLmgHUviRY3uqFvkk+S/mt2fI2FmU4ty5prho2qrZTFIIcxJ1S8GkeuzDaygqTqWKGXXMSQpU2E\noGii6Npx9CNodIV3vMJr4W0PzROe83g4qYlTiit9Ag0sChHULDqoxrRX9qpv3nDwOpH5Ch4uSwWH\nWapiaMPUhtG2feuY46JJoQQzp9ZUs0BJ2wUZJkXeD6GxpKvuPlhA9N9PeSpZds2CHe892nY/RmVo\nVt49/TLYFn4QKA8mwW67+7FoK22f4N+PE+gJ9knmNsH4fwE5U8fjM2ERlCsLvMwgc0DgOMfAOZjn\nPxmVKI/+NvewtpW8trQyTUkXVJUkaXzZ93mzbzb7p6CWff9e148feH0T4L/NAYDA13s8NmlYNkxJ\n8tmkoStBVM6NDPEtut7EMKIpVsygN6vhHCEEjMCPhJYxW3it3ZhzjdUuSffF7ftdr6SReAMoAlxH\na/B+1GLdY7qTTSPmDP+EMkkgPcLlGabNn8k/XWzlgCfN9aTbFIY8n2zjnPdDee9agV4xW8NsDWME\n6OfoFS7sTWqQpmYzBILet8WWQU2H3JiAaodqMICSiiHJWCXr4WM8JnwM+DwC9DN9DyyIWsZ6gV4A\nSPXVJ/i3/aL5qPua/sxlLuATAZAm5t6PMN87MzErfOgsMHLDmSW83IZZOFc30M+XnIpMtCltzvWT\ndnv5vHbAfKreUb4nvsujr03YeX19I+Avqp/tpIwSuOw6Pnjlgupt4ijnSLzSXo7BmlE0kUkVAAB3\nmGo00oAvp5YDc0bWm8BJhcN2dGbdVcdalRfAp5SOzWnDRuQhGnd6d5opEguhZa+9bOoR7WtzGLB7\nCCGhbSsGavsWvQHgBfyIoVGjNyYDtaUd1TP1pqHJhEpD04muHVMvavkRwO8Dk+B/9dYvGrtpfNmX\npSyhyOKEmFCcNn4IP2j6XFDlgF4a/4FsouFWxa6837ymXdsDFC4L/G07Tps4NWa+/0TjyxYpyKsh\nZxcyvGR6ybaWP4eKZ6bGZzVfafwAfiUeJfhTOOe1cS2Vue/1v3cQk86AO6xvj+Od3/xpRMC3Af5Y\nXv2E0CvwzaNzbj7co3WMZkWZ0gYP7Yd1b9RpH9HLWo5CAEiNvxaCuEX1mAd7SFCBTSuDbultMSK/\nv7RsAl/DHkQAp6li5KLQEYAXgc8SG0jhUR13KJkiv38S+EavNCiIpPrqBegbhzgA6bGIvgKh7YdO\nDLXQ9GNgtguj85jgr3QYyYg7rzc+BOqTjMkvrQ9Jp1grrd9ar+uKdgcZQkT4WI6DgGe0gdWQcT9S\nkHnZvEoWmKBf03YW+OPXU+jnI4tFNuWu5SPNmCFC1HL89JVJNxkJyGiDWwH/GoM5FLnx58Cuyjvd\nJyu7aZNcCx75H3qHdwmEZb/fTzYdx/mhbPr/5c989vVNqT6wlSv6soEn9yrhPDraxGOm3USrSMBu\nO1JSui7YnUMhlod9afzI5JvkW5FLHfRfW4BGE/Rp9+XNSwrqC6xrkQpa9pKTsL2td1xjxBRVVYiM\nooxh1hgr9gz1H/vBhd3eMZFtt7A0vtMBmrZIS4NIEJMCFFMszKMW9HM2i8lBrdEROTB7tPmec9C2\nNl6PVanr6tm+a37cbP5MrJKk+tT4q4Am7luwqdT6tgBPEwySlHej+RvtV980fs3UW3P2KmegFvyy\nmScW+G8bTZHFNCiEEZ8504d9zlqfl0W49JwxZCOoPgV8OvewmGtW5a08/Z1CYgNygJ8Do5J0xk3Z\nwb8lKO1CIB/N38nr21N93ox0fES6qWN2C9CPjkc7cHULSUqqtRJ9MusuFr6IosZB62TiD/+YO9N3\nSfAEFARxs8OV1simpQY9ZDilqGMVUcfCJj+Ic2mCLFh1hMf41sKa/003eEsQpPBLejjRvVca7k71\nvbIAFZmyEvojkoZVHE0MUx1Tw6/Q1WHNMCadeQT7tAX8HO7pk3PzZDJjOgsOkurHX0zhlYF1kfB0\nS1HxvthZOUE3gZmFPp5p2by+9AnsDCsFADwEi2yg37ZN9L/s5ROw71o/mdsSVGszEchcTHF4JEid\nIwZsnBvodxvfyNQyHFn98lKoFUVd2h5ABn0ACTM4j3dI38AteYXy6b9+znJ45/XtNX4CPhf9luzQ\nNUD/oQ+mpFrdQCTQMpFE1mZzYraBkfFZrKWaRT8exXbI3hVTBA2sLw+1DRiTV5JChrEYC9lS24fI\nVgIjQrttKcStMDxNmumOZpMLK2q4Dc6Cm6CNBzvAzBeqT0oR/Ql9LezMXGyMSkxjwooF6EOgEvS5\nz2KYFAST03xn9NUr2o0d8GkI7Btt/KT6LRx8QNyriLBmllneN0ZcslxGwKhFKvi08VFCV9yp7ftN\n4x+yAT8ddJsYEAjBvhx7Y9P+5VwtJ9yKyAxxwGOmwUTUFVxz4DkS9KMm7JRXn2s5G29ULX7q+s3O\nXy/eZyfg837f7PcXJNe/pU9i3178AV+QAN8E+NNWI44hE2qCKN2QlfThHq2NrwvPfuHZTjzbgY+t\n49AeD1wVXdkfn7FzVeo/kWpO2XvDtI7DOdYJ8f2CBd7pvONRUxn19yKQ7FyzLYikZ2thrdcyGb3e\niy9vcQqrYALGrLP11FzARJOITlw+cNqFPhvaCNNlb1VVx9zMV/gqcg1WxhsQPgAVRwwSQVxrE8iU\nzOCp80h2kgkrkLWc3FeM/OZQ2wUBwE5gUtTVwK49wvHYkrnx2TpM7+ZB+T1CW3Zt6KK1b4yptw34\nyb4ispGf+Sdbav2sqDONqIsYsxqTVVa4j0k9+QwrmqERhamfoSB2W/Z8mYZr7acCWx9EVig0hauW\neKifTVMoTdAsIcfK/a9vlC8DPl/fBvh7Bx6LMtOwUecN+M8x8CT439qFRzvx0IY3bTi0w1qDNQea\n1yJzdtkRyVpsRW8d1rNBAsG72ZdBRy1SMC163U864nKm9KJpSwsyKS9eG+Bl/+wmJGiH60oSkXzQ\nGUJMjY2g/JdNnHOgjWv5K8g6hEXe+z5JiG+M2dN2xrLJs9xWODg0QO8EPQ0Vz842Qh8F6X79Db//\nnR38yYDSuSqR+97IfHJO4Q30sgN/saoFfi+HZWduQpdM615gVKZ532Lzjk/B7xQANuM+TKmxYWma\npLCWTQhkufH6m/rJ5rDIJsXGWpIl3paM47ZTzcpuQH0DPu18nlNYB2EiyCYMUGLgp3t9I+AvWTfN\nCvBreozD1WNsUbvw1k48tAfgJTbrE24rXi5gTX2qXD6w1loMdfQOwKOPmxkz7pbdZWYcVUWb1iR6\ntm+27e6UuVmSZaLlgRcri5lvqfF2rb9SQ8sU4J8N6p/VdwPNFDols29jQbL71+s+CjbWGfoG1kwd\nBh2DUddglWiTRR9p7zY2u7hN4MESgm5S+RbZzzDoKa9WsppNCtRCrR5gj3qM1PTZhz96LNgqJGLy\nlRfws3CLNQPgZ6l9JZNw1vH0O9jzuMExpkDnxMB2D9K+T82u27Or/gjKMusN9PzMPaovLRs6lIn4\nqvH3g/gxFQ2zx+gQhdxAHrkAC97vQv0dk/+HXt+mA8928aLCghuUDR2NCw1n6zivC6d2PDW0/SFB\n8c0c6ChANeH0WzXeiND4TaM/WtD/oJhjDtLIldZrRmpPjb+SOSIZ6OYy3WK+dLPX6d/keKpDYNFQ\nuS/MjEosir00ftTbjwB9AdOCaTBRR2lKaIKf+ePLCInFWo45liEvZpJdhnnfQbubyTRqbPzJq8ir\nKepqL1p/A386+6TlvlWarUlWwb0AX7WAnxGOXRCooFKs96EcsjGpTL1t+3v6kQr8BP0swYZ6buoB\n2CjC2bdkEAvgN01PgaqiMNbG7xo/23vn69XUD9kciWRR3k2ThyW4C/TC9UYB4Oxb+Y4P4Gs1/zfV\n+ABqdhzoyYmFrzBTPEnvD22xSUOnxgebboiEB74rZ9onm9hsfKAV9b9KMzvM0l6lxpfU8qD2l9hj\n2eGrycP2ORL04EOiHEPa+IvqrzTjDIEJ03AXJS/gg/6PSFwPT7GzLgHCqj7Ue/WNlmLR0Rp2yQWD\n2qP2cVestL0z1JgjqmsB5+3D1kZq0/rL+Zcavt0Kd5RVfAn6BfiVC+9sImJsIrLec1Iu6JRFJiyh\nknta9i1gVuMO/AA0Qc/kGuUaAjbfiEfJ9WdBf6P/S5A33xzMwjjLllcAe9H4L6CPfZSHI4eRuIRj\nVPwT0O+U/xbj318/Txp/t/HDQZG2Cb3ciBFVoekvHNJL23dRHBJdYaL/fjh6jsaYeOodCcqUs6jF\npBx/uZAStBlGzBbUMc9urugVUNQcEiwFQDmS7raaIMNX+fEaBLnZ+Fj2Z/WuZzvqXeOzABEZApw+\nY6aPRylxg9ZxaL8Mq21TiJLuvwiufVuaPrVsRzODNUMNqUwnVSK/7Hy/LeK7L4EJPb2j9aMad+yg\nR7W1CgFR5ti+TT4z/u0URCuzD6wWzISerOSMfYCcYLcUANFOHZImeIB+ljNz3u/TTvd3Z59qpFDv\nn2MJKdnvVWr8RQaxG5EhUMMZmWD3TO0mrUwBkOvucxb9T2Pnf/Muu9G1NjWFs4GVoUFw6oVTG57S\ncIiiE/iduU3p3Dlaw2gdszLAEAuPjTPEaUuy/a6xUcWte0qaGwWSTdsjNLKC4PcF+nXTZQNF6vtP\nqX54m30tmNT4RfVRoSPJgpXpEdtXxTAtwK9NopyXKcxNW2TH1rWw1yCFVnqpqwpSpECf0YsAfYM2\nRkLYMbLA77lfFk1l9Una9AHmLNrpx4HeD7TjWMDfK95YeBNAn9U+LAXB5GDTFeJbmwAGmETUAAAg\nAElEQVTQBH5jUk/tQ+ObOZqv/PnmkZBTNj1blzVVzBfBeKP4vGfv2vi7QMj1kefLyMtKEMJtDwTj\n0CwPNynw71o+mKQvrY9yKdU6qrWcr/34ndc3Ab68HtPY2W+EA1tMP+Kj1xg49cKT2Vq9jagqY1OK\nYR0XK/I2xBXoAGE5aV9NKDbtnPPpMmQjVAWeiwNlzePmYeVJC3LxO/eU/EXFSVE903w3e1V1xaZb\nKw/1sr1Z0mkW7atgmJDS/iPEUtBdb2zoGfs8FgmHnqbtqot1jHFhjAtzDgwbmB6txKOGAOWHSJNh\nmQfFYivjckzDNSeBT0caV2V1Uqa27UiuB5CmBehVoRTQ5WnPZ5HFPGbRH5/PsvXITOwt+t93i+fc\nvG1CDXWcf7c87ZJMZYE8nomhNcdhy6cBR4RAt8Xs2PMALExPhgndrKYJOW31/Ntr73dpsLGpjCRV\narfI9u9edzFXXq7RWp838fLp6xsBX7bjuyCoxgnIpgYsemC++zkbjnFVR5pzdhwWKb2ZQLELvLJt\nufBKI/ReXuOsf9491/U7jhokkWCvxB3f7F6sGyzr1hewdzt535KKZ0eb3hTT6ZPIb5W4H8kgjKEi\nBTBuQkU2oHd21GloyQvkPYdVbHaNLYsvgG8sEArS4HVOZe5wAa60a9vAPyEtJhHv1ZTp2Q/wM9cA\n+78Lpox3NWb0qqctnkxghHCwOdGtwVrD7B3dDNYNs2YabCBDfeXtfT13SJmRtldAJuj5PNNhv69X\nb0somRpcG3IcmxH8KXRsW+vg+ch2Ul5LbgO3L8BH/X4yglyM721ffn3N7LxfAfCbAH4JEX34C+7+\n74nI3wPgPwfwhxDz8/6ku//eu9/xmeOwjWWB34PiToL+0oZzkO73jmNGJ5rMlc4iiYrn+m7Xxmem\nht77ookgI9CdqlMD5UKh4y/mz+2g99L063qWbzWBL9TKVTLrm22KBfp0SHWW7C0SFw93Oljkkd+x\nbMgSJC1BP0MAoEEpBITsJx1E+7HPGFWVKbvZJ96EDU+FNB9YAgCrKUkWVw3msvc5ITbRPJOXUdpU\ntYWDrac5gmWaqcZkWrlKWObiVrXIuCQIo69gZBuOcUVGIsO3szdMpwDw9fexnTe2O7xW48aG2F+g\nq8Pb+sESSLlssTtEHaa7lvcFeCUTsHDUhoFlVTq8JFEqpPs+1wF8AR4J/vQHwOGfCIAvv75G4w8A\nf9bd/4aI/CKAvyYi/w2AfxHAf+fu/46I/OsA/k0A/8Z7X7DoBxZweM2Vz42VUZWAvuZAV8UpgmN0\nnKPj6CO0va203kzjhaB69mWfOGttaex8iPz3cGpZxY3Nok929u6HeHDaHfyf3Ni1JCIlWJYHGi8g\ndZSQapmQAoWhwTOl1VPLrMX1msa6mJ0HyFuCvocXwCcUffNGkNVs/0fOr5uObGvuqctFylEYcebk\nNan1tsrKfGZzotnePwEENwGFxgEmWMJZNfoEynYPk3GpwSRsb9Ank8C/rhPXdcXf7i3SvUnzrUfN\nwE7j114XcLa1meXeERJ0eItaf/Z2Dw4nWycmPhujljdp1PZ0OL9Q/qmRLAb6o3L+7iteHWup5Vpb\nguBO+ykPdlj9VK+vGZr5IwA/4vGPReS3APwKgH8KwD/CH/tPAPwP+BzwP3OMXOSeWmQD/py4ZNCB\npew5N/CYr9VR4QQTB2J4BJZDS4OykVjeQN9UYUabclrlqmczj5gMk4kUuHNFpDJJPS/lW9C0vSHL\npNg0flJ91XDQmbTIyzeB24S5VCzbkM0qfDmJXo4D5NT6mCEI0CJJqu7zvpcSJDIZIrUl1KSSeuJn\nFeCEIoTmEamJR9OdRSwxdLIZx40ncyLYpAXoC4xG+m9ROlwmVtJrFhBNpgEn1Z+Tfp/rwvU8WXvQ\nMXsWO/WqkLu1Z8/kKQFEmRab9yLXRK6LBF6rgGzlCMRtSy2evfai4Ulp+dT61PZO0E9mXOUATfGX\n55KgLrMy2PD+2U77Pc3QlBQ3KfLl109l44vIPwjgjwH4nwH8krv/DhDCQUT+/s/+3s3G9zv4kXQb\nFcedFs6iAP2AiuAcHec8qlBi9ZWPBa4ZouICy4eF1uocEvRTJ1prGGNAZEDABB9jl8jU8rIeQoW4\nNl6/wH+7R/G3HGu/+QcUDMulPYmGLpyNh8gbDzM3201nt1xHZrPVxCFztBbaPbW8YpbmT7sx2c4u\nwLJnYTERy/yABXrhRS4n5l5inLXqIahbllGnXR43o+h80XsRiBrUtKIJac9neXA4+1qwATD6s2v8\n88LzfIaWJwhfa+DLG+8Nqh4+D404fsnv/Zyo9fMG7FGZZoYpzDDMv8fzMQ3H5F3Le2l7U+bj0mkb\nUfvwk+yAvzkQdnBj0/QvtP8G/v8PqH4u6F8E8F8C+Fep+V//wmf/4qtNvHOb3XFWGn9Gee01l5Ps\nGB2PcQX4aQbsVD862NS51oOv97umbw1zzqrkSyq5h/vKptq3hIQjFgxF906hbxlmCSqCf4+zNxGY\nK7o4nAalu1ZuwnJ2zqrj3+PcmZsQzr0ZobEC/YRKr044yRjW/Ua09kZ2/s3jeI80w7enxRt5604z\nzTHM0cygMzomlSedP7+GouSxMbEmq+NsaXv3GI/dQpBUV6Ab1b8itfs8t6k1q5mLsUquuva0uP8N\njJp4xRTqqeX6cNGsMqIgsCh5NkdT++Q5zDYwZySZvWp5s1YMAOxL4Ii8lWwLnyZvwWETArK9d1Ki\n5GLhF1vy4n28fg6N8foq4ItIR4D+L7n7X+bHvyMiv+TuvyMivwzgdz/3+7/73//1Ov7FP/wH8fv+\n8B8sLSQbsNKOclle4+EGtVW8EhV8JwdOxNCJ2Q7MLMphjj5IrXOB1cPWIORNVmMM24CeW6Z/5h2M\nwZkG+KgIQGYLJs0XQWnzLpFZmEAOGcGLyzz37aGHYMhQmKMJJ/eoRL08mVCYJ0G3QeqKJtusdiaS\n+KDySNq4wAUCIXz/FoUvzA9oEiP9gpkEC2gWQqw558uZwk0RQzlym+wnMFmiukC4/5cXnP6CDB3q\nTTivMGdMUu7onbkBfaAfA30ca45fywQc3RqfZPeb5VGX7fmumoP1DIB1nGZHphEn68IkO0nzaGdy\nEqbb0bi2JRLBrjkgcoFWe3yvWCmM8sXsAsCSd8X5i2RH3nTy1Q/XNf3eb/8ufvy3/vbXQPqrNf5/\nDOB/c/d/d/vsvwLwLwD4twH88wD+8ju/BwD4B/6JP7He7PQGSXH4niCKB8WOqG7l6DtnVO8do6OP\nhnZFq+UFelu0HKT/RXOdSlnWSGtj3/2kcHkT4cgQTyZQVP64rwGXqgI32o+4O+6M/oXs5w/kIiFT\nKK4Xe9FG4MdtmMJhGxo1CXOGAJwmEJnRVtzosMq6do3lzoqEO9x4Xek8bFB0serq2+Bs/82pOjcT\ngHsDTDVGS5lCLEHf0aqJyGpIsbjLfibrlZZS3utMbc5oR45Pbz2Av7YD/Rih0Xs239TK3dgY803g\n5aTjjO2ve7Kxmvzctr0FE0kBUFJjA38C33WZliqK1ibkklr77tE3YTIdOU7wTvOTzovfwZ5Ovltm\nIN+4O37fr/y9+P2/+vfV/f2//upvfQ6SXxXO+4cB/CkAf1NE/jpP79cQgP8vRORfAvB/APiTn/uO\npNx58Xfgr/eVyrppfGF+fWr8Yw68jRPt2mqhKzmHKZ6gNt6cbhnaCpDG5y0BvwFiLc+1GuJzozkS\n15MzaERJzSoerGjizE1fxUKa2s1krbLUxtagmu2iZIHdFd0UwwaGTrYIJ/1lam9MhE2m5JX8k1qu\nwH57D3RRhFcgOtR0REOSGAWQI0gZ2jOHciqXNQUmM/B8QKxDfaCjYyA1/lwa/8Vkumv9uBG6safG\nSr7IcYhWbKnx+9HR54GDwzYT8KXxydB2JZ5TlXdtfy//3Z51nWY+F6fPIbZIHlrC4DU/o2XegqAK\neJq30unplzGZmFCMMmS5JDbwO4vXdrCXokesU7jU+UZ4z5Bl6l96fY1X/38Cynx+ff3jX/NH2gZ8\nT6AbVQovLK5KiwEH8B2xup1jo2KgRMbAC/j0fK9eMaSskiWWmeW2YrZRs72qtnbgA7g707JPHI9b\njs0GKqMqNX6j59tZNyDSwP4bRBVI47wonWu2F8ONUk/XKCCZjHdn5pjEbUn77jajQKK9eGTcxjWl\nYNv3E4ruEVFo/I4uJBF5L5M9GbfpwAwvvViDWkfzgeYd/RONv/oK+sZB0sTLVz6vvbZeW4sUYmbn\n9d7Qx13rH8dRzzFToUtx4EWBprbf7nuBfgc/zxKG5UTdqL7tGp+bGJ99FNQji7IasgvP8mGEn8Ii\nS1Gy7mNp+5JBFpLEzTfwx77WjWfEabsOYDHcL7y+Sebeq8bP5ATUBaE0fuoBizuATPDp1aAiyy8z\n2BJgTA2ysrBWkowCMQ4JCBufn79L9xJMFiG+rAXIAYlZYhzNbASN4p6PPZpPgJ95UHuVADES/NhB\nkDbfckKFbaxs48S04irVRXmIbcZDtgrB3XvLVQuoTePn+wlle6pWgE/wx70MF17k7XNS8bSg9wS9\n2iDwJw5fo7v3jrMrC40PHyhBma9l42tpfG8Nbn7X+PNA7xP9mOg2yyO/7/Ov3DRhfbYo/7vg3yk0\nBb3dNP5iAVsKIrIiL52RmuwLWB3cLJyWUyeGtMg3KW2fjuLERiSPCSs/b5reGVS6gT+Ten7egU9t\nf8tSSi8nsOsHaihBt4E+OTd+JHXnF2IBT0mVs3TXe0MX2vThwo3GlS1s6oZ1c7dTBMZYWh4r4jDH\nWNpdLIpPnFoLW1MKcEE7K+vc6lxrxZgjptMqDMFQYqb8ritb5SWgKD226a+G9PkGoBlbxwJ50G6/\nb9KQbgK2+yf4BTmRuBpjeiQ2wQwye2h6gr7bRPeJCxMDsybFVpvpDO5Rncn9Lq/oweY3aRpecm9e\nNn6bHX3OJQCynBqrT11++XsaP5lO2fD7tq3VPXT2ro2f7c/IIIrup+DJpo66jqsZ7JwYjbkpuV6w\nZEjJxVr7y/md51ORuwS5Z2EPboz1S69vA3y52x2u62L2i4p/RNFTeFBgAXBNRdMB2SRlMQbg5lzr\nGjPmr9nK8509ajOuHKmu6+/h9YbxoU8O07RMGWWYKTy4TK5JyU9PeXr8o2FCeM7Ns3VOaoO0xakZ\nCfrsxlPz3GE1i2+3401Cq6oDmQOaIcABq8adS8PbJgjYCvoGemGzDMEaThoJRGA3XkyDWkMr8B/o\nPnD4KG0/Xqj+eraxaLlm41nU+rhrfK/sOQ8HX5/oCfzjQDdDt1ksp7Q51sIv0Eva+Yyhl32fj/wu\nBHI9LFve7lo/vfsvNn5VYlKxiAqHbkqNeBsj0tCzbyDJVXn3XyGbVN9lA/ySDgX+WoIOVEeqL7y+\nCfDrCe9P/eUHpP49Lyy8YXmttVh9Ypjimlt/NVmkKYRBStiBox84xoGjH3gcF4554OgDx3FE0Qcz\n96oU1I3NL1EDLbwp4JEMInBIb2trEU5DOpdgJcsjfOu8vjALOjW1wyuqFw7N/XO/deON8NaFNhv6\npMNrhnf7pMY9bUY83yJnXnJssxiGWEURHLRRKUSiKCf1Ti4r2uUSrnxvS8sNdVzNMDQyKy8MnD5w\n+IXTT1y2bXNtyYPKps/lLrIJbkS6sAqaB93vLXvpd/Q2KQQmjnksn0VR+GWWLYfaosZx9VYUPjT5\nsuXNDGNMjCvGj8d+vb/G9dlNW4tyXVZaijJ7k8JsdYWOOYXRPLSxp75z7NtSdO5ZLLUid7cN7+/3\njj8/9Po2wN9eu0SOl6x3ghvgxZPPr9zwYR6jouYSFrtvNB7gvAH/MQ4cx4FjHnjMwXz/sdo97Rlx\nqYtp87pKANsb0jaNGeoMa6VHuZHeIdtceV0bO+4X6DvpOkzY8JMOqQT/tpk42syBEoOgHzgsruVp\nA90Gmg2oDegcEFPABEMMNVMAgtD7ZCK6/t6ElcaIoMMCfW10747mGGq4xDAkwX/h8o7LrtpGHs8T\nY55lBmVDEk/4C6MIBL0T9CBF3+vty8vfWaFHsMuWCBTx+tX4pdYaveQQLA1OCm8s9zVzXNfAuC7W\nBBD8F49nAj2KhHbgt8PR0MKUjIUDAeiEzlAlOwSlMNCG3RWZK2bvDF1s0v0uBMp8wl3jfyUOvwnw\nv3gyGyMI3G/gx4JRxvWvSmvlA0+zwFKDU0JfR4yeHqH1HyOFwAMP9uFbJ+hYLCk1vsBU4K6Q5kHk\nBbcRyuAcdWg0k9zPOJMv8lwbQZ9afq8iDKDjBvp83zVqzXuLPgSHZZHSQJsDbV5QG5B5hRCaAzBA\nZ3YV2pJa3DFZRZS+giwUjY5fPBJHtAN3pE0k4gX8AH/Y9qH1T5z+wOkXLjtxUuOPeeCaZ2m88Hqy\nMaegdD8jYXAXjg0L02wwrDeS8pvhMLI0+l0kGY2F0Mp4PrlwLTNDAGel3C4Kb5N9IAj467oI+KsE\nwMUeBgX8ud6nQqsUZd+jFZvWzwQlDgmJk+bT8VW8I/i8tq91mqbTzyvwX1+3kyuFL7gZgZnfTK4W\njqyM7fN7POhZ0XvLYZV8MK3jGA885sEtAB+1/GPRTElaJvVZan3fep6X7d62GerVUoorF1XHxoW9\nuNmi90LgoyoLX8HufPIuWHP1OCsgmpBMXD4D9KMR9OkAlHBClaYfFVoSlgvnPRWeU5aNIvdCE0BX\nGEkQGv9qhksngT9wuqK74iLoL3tgUNNf88CYJ1waqktPxD3KObc84dmMhC3UAIK+LfC3GA/We7TJ\nBhlN3mVhsdVuUZYPhyix6QX6uZl6c1iAflwF+Ovk8XmtdUXgj5nCYCxf0tQYouq+QC+6gb+tkmzV\npH4EvTDiSEcDU313X0KBfyPKS/N/rWvvZ63xZZPI74Bd6h9AjW9F2aJYIso243gyXDIwRosWXi3y\n+x/zgcc48GGMDfyPz3doFS3vuRd9x+Z5pravdlJL41dJi2xnn/n8vMZVOxCJOs349zRt70VLXVDj\ntY4tLXZ41MG3eUaUYigwpFJWjRI0j0NoKjRz4/PZZFUQzzeiK6m9Vm5ECAJgdNr51PinDHQoTtfQ\n8vao/WWh8cc4A/AawzqNllHcC/6NspQSsnEnk+bP1mpQyjTDYZyLMAUckF7hO6treV2DXqG13dM+\n2dxjjEnAx3aeAfgEf9r5KQAGmeWg0M1WYJXRh3AC627jF/jDdwFbvhWjA7JqxDLvIyunXgVAXlti\nyARfmb/zjTX+LqVuxxQA+VnGY/MzJE3jMvbQS00mFJHLPrWV1/RUrU69H2jXP+YD15x4WBx/sMnm\njJsNiQaTCP+hQI9VvGEO8eydF33jEvzetBbtyhTEbf8J8F0wRTG1lbMNyXKTqUro67klyKwZexN6\naeUm+Jb1aLDtmK2mXasjT1L/9C3slXdpdwvAnCovKj5a0Pyw7yc6Bk4ougsuv0j1qfnno+x8eIA+\nq9OcYcOy8x3VeizqKSL5qc250nfNMFvMWZzm7JGYCpJddc3KsZd282usNm37Sbo/CP4xFtU/qeUD\n/CfOTePPDfCxH1EU1FsMciGY41K0wH+374PyOwA3sP9/DDsRnufS9p/W7i/g0z7O6/1Klf9tgf/e\nWe0CIN8DNwEAemSzI43SyTbIhqYIhiguFsccGdITxTkfeIyBD8z1/zAfbOQxcfSOo3d0P3CQ2h+a\n5ag8Kc0HIQECd4JemZ7ZFtXPWG6BXSqsG1pTIG7sDBSaPmq5J7J7h6eEl/W+suG4DawkmT1VNUGe\n5bzr2DAaK+RUtiSi8t8zb2DJ3z0kXcB0Wc49NXSZ6BB0F5yOsu8vO3DNg+A/MOZR1YkNEeKM1FKv\nR64URpn45CIQQw1ICbB3zL5GV4fvBAypW9T5iwLVtW8xyhWqCyfuzPJfAn6WbT+K2p/XhfM8cZ5n\nUH1q+UmwJ+jnjLqBfkTHYN80/k73P7HxM3wrXpl879r2lvdl2f93jV+A+Woj/xt12d1WGrAEwG57\nbUJhHS5OGjeEVV28eHVQf2SvmHxJ5JyoAzbgU+FDmKySwyoNx+zoo+PoV6SBMkPsaJ3fskJNUucR\nQuWgYLm0Vd5AZwpxYVcYwkrgO2P3FGTVZGN7YMv6kXpfTK8+l/rvkI4PMjH1iNrvFgk34l5deRXZ\ne5//ibDykSHSSis1mgh8bgR8nr9LjD+LnHzB5UD3KHRqMJyj4zkanlfDW2t4NMVDBA8RzHbA2wPe\nBrxNoM8YZtIMOZXHmXuexwnce3691bHtcfaKtc+It2eOCPASo1+e+wT6fpzaPbT9edP4NXi0BpCO\n7Zj1+ZsJUaFi2/I9guNUIVLi2j0FQHTdjTUnSwhsIVifVgVtpSgjWwwvAY3Pvr55e+0byHc35O1n\nVvw+f6k8nUAdLw9o/iDtJWHzBtdIPrEBm8EMB5zppbZi4iOacfbWWe7bq6innH5AHQfgCfYX4JeW\nBz7ZJ4FJr8X9PX+grJtEWyyVrdqhfkchmGiY0mEy4Qy9SXM240xBtIqU8nuz3Hk4/SaI7rZpL/p2\nzplOKu6YEqOnBoDhjss86tbd8JwEfmt4Xoo3FQIfAfo+CXoOq+zs/vPZO6YL+K8C4J3kGrMFwHcz\n8BL4m8OubPlNyy+Kf+G6liCYNitUHH9rrpLp6uR032d+SGaAFhPk2sk8BMv2XxFw5drbtDvPXZjU\nU4tguUTgEHzaJuP9188A+FvYrIDvS7lvbKB8lP5yjRlPr892L0hokMg2dziHI5qsdNaLwynLttct\nRsy4ccVei67dQzN9B77EcdYFvL+EpTLURF6/k+bBpunDLIj3Atug71UnYDAc6JHsw/pZbRz9zeYa\nRSGF303z4bKIasBmCEljBVgyJ1nMKmUQgAL95UBzg4ohwpwTj6F4NgJeBYcIHgAOANaY/Zeavseg\nC+1ATNql2QTl4m2AZIYdQWx38Jvl9qrx52IE9fPZ+MMJ9gD0+VxU/jxPnLsn/0UYrO47c2vIMVfC\nWOWPzC1iQAbCBKis4IyS3YZsWqqc5lNrYqP9i/JneJhgya6oqKW/OUd/+PVtvPo7//DNJiknxdL8\nK3V31/pLy+0afxcG8VMMieybzegRN6IfXLNoy3y2EY49jWYe1fiBzr4mUs6YnJOWxwn6tmn8FAbK\n87sBn+/3ME7+DrLCzLerqffJOqJmPtgdjRuPGobI8GuAdE7TCeB3yNL22G4UrQqNxnnMDlSoTwJu\nS7F6x2ScCN9Kg+NCRAmiwFdxTsVzKB4qeBPFA4I3BPAD8DNKfLm1zhZgwpFawmpGQZkmu9bGjeav\n+vhX0E/a2beZfPU7Xp768/nE83nifJ54nrGvMN5tH8fVb6+qNTMM58tnsAF+vkv149rS1p8a470i\nQrUP5kiGluB3MP8YDoNkmJmAdwCi+P8B1d+0vryAfu35CzuzkdsaTjZMQRFgb9xPAM2ciT8aQx1k\nos0VugvwZ9cX/cLxer+DvW+fFegdUJoi+f5orUyJ7OQqZAL5EqFKXhY5QAoID594NV+AIJJteoW+\nO4ADQbGre21GKYCVtJNjuuAc3ZUGyJ66+3IkKNaUo7xill0M7nhOxWOQ3kPwBkeHR8PTOavKT8yr\n0UdzgWqHaA+qq4iuRuyU9AllL8DdBcCi3Emvkwks8CczuC5q+ueJ5/OJ51sIgOfzucA+VsbeNSKZ\nJ/wyYZVn/UHubxT/hfrn3705/HIttWjTNdUWwwyvzKYYwdwVXw787enILtl/nsJ5r8CXF+D7fvyZ\nPbA52SRhkV/pkfxBsBnto7CBU8O81HzXzb+HWILKf8oCciZb2zV8exUeuhpYwm/NLAXAo3XM3vHg\n9Utn8k5R6gX63YkX16558xYrYtadSLTq6gp0CYo9tCxEhvkC0i7h63CADr6Jy5RBiQzwbXpf9ncZ\nSRGoW5UdZ9LDYyieBP0B4CDoI+ZuS9t7JAM25ygw9ZgUq/WUw7TJYi67gz/bn7/O2qtEnAT+3PsU\nrhLbpPrP88TzeeKN4H97e8M1Zjj7EvhjVIhvGdyxl+14zkwge7Xzl7OxsvvSZGyO5gH6ZtEOfspi\namWXJbUP2X/3Y+e6wfIhfM3rG1H9u43vrxSfC3+v0nvdA3dND+q77EUWmXYJ/s0Z54BgFoSWl321\nyVqUfrPh293u398XXZ8bdW8B/uh1jw30XuCf/VgdYUDBY46jrYYMi+Cnthc2eYgvDMHHm+aOgE5O\nEBYcmRuQnV9q/WRWIDAl4vwxlntGT73MncCi+6umfAFfAUwPAVBJJsyafgpwCAr0hzsr6e6avnmE\nADsUBxTeOOuvWlorIMszf/Ps2wL9XjJb8/fKvt4EQuXms0V32vPPM7T92xs+fkzgM0w35nYc7wHs\nEVtA1vsCvc33qf4txKdokdUb47qUNF+1tP7NhE1GDC931s0E04WjTyjBZ17fRuPbi4ja6L4Q/Ale\ncJ9Onfws1zCw2T6I30/wKzW/04mSZFj4LoixVBmlIhyAyvJbZWdV9Qx3sVf81C3Z57pT/1tChhS1\nv3XY5ftsBz6MLarMatO9mINsZI1nzsXm65hbCkahwHNo3asuhocaLnd859GgI+vu4/auEFkWvOyd\najMRKLvqhm2bzWcc03YB4Dit4ZwXnqI4aHI1j7yL9cy9avuj1Hegt8fLNmD9QNcH5nVhXhfsvGDX\nBRsnfFzwccGuE5PbOE+G3U6c17kEwI0NBPCfzydObjmcIxNzCrCbxz5zITK0CtrWstuewnWLZYJk\n6K/aj4FJSlBIk9Wn4nZvlrCr53R7ZktnFgu4QesmEj77+hlofP4vgf0J5b9r/aL6BCuAArFLanSm\nyCZzEAoBlyUc+JsFHtqnAS6n193K4z4JetUZoJ73tN5y/hHwIcUX48iBmSUEIBvocxiD8XhNA9JX\nBlICISMC4KBPAn8RSBLkZELAIR2HGD5o2PLVpyhZknkBvibVZsIPG2qEM2+FxzGI7tQAACAASURB\nVPI5hTd6mW3ujsMGnqLoU6KHH3AHPhxwg9hExFcHZA70/sDRPuBoF6xfsHbhmB8gfWKeF8E/YOOC\nXxf8GsAIIWDXE+MK0F/nsyj8rvlfj9OePyksxrVAb7a2TJLK3gIqkQsisrIMA8dSrKrupy9bP5d4\nCorG51nCt8COusfwVZGa1Qj53lOzW/hCUkl+YgL8wOtnEs6r/YsI831/s/ND5RfgS+d7UXihto8M\nMRbFytL4wBIAGSYLp5vci3QQD1VNoTLvmjg9+yobUDdnjSzfwgI921QX8KNr8MyFRY2/8rjXfq/q\nqu40ct9v9gGvI1SRQqjxj9L04QVWiCnXFjX9BnrLUKcLLg8v/F48ksJicnHmFB53j7p8E3TJa44S\nWXE29GBTjxjfFcDHHHiMB2a/YO0Bnx/gfUBaCIVxDVhu5wW7BnzEZtczNP75xLieofGfzwL+HIv+\nz7Fs7xAOQffDkRept0vbUxD6jK14D2sJ0/TO1urRriFMKdgt1DfmrOIvcB2J0HkpS6ULfAN9PM+a\nCuRW93tXhKX5LfDxlSwfwM8onJfg98+A33G/SPd0PCWIF/RFHLJrdiHFd9yEAnD3EUR0nD9z+9m7\nD6AafWxOlwT+Av12DA5vwH4cwiQ1fmkD4+w5Z2vrrXqrS6Z2BpNIh6KzEy2yskuS/uf9SCeg4BDS\n+1yUIlVrsM8UiAWewylimMkZ6gUuGnn+1Na7g03cMXy18zpN0JH9+EPTq6aGt6iTtwm0BXoZF2b/\nAJsD3i4KhAntsc1rhra/ZoCex34N+DgxrwD9OJ9L4z+fq/gmAT+yGIfOveuqmP24Ig13zlFj2s3n\nJgy3jEZZlYShJbDSrfnfmjIUgiTz9CsnRLfOwP4C+O19JFnF+s4w3qdZsDwsx9/PEdV/P5y3NH9R\nlRdtv1P+naoDzFBKwNPVX7S/wMvf8x30m/Zfz7L+7VU4LDNu+RW0hAEK9JJ0HCtxJkGf75Pe15Z2\ntfmtK0uFCGXlCVjjXsMZFmHQBmGsMIppFmsRBOCDRYaKEtEapZ1TXN1sO69gIppeYjeYSMXui4Ky\nFBoUHMqF3tivQ6mlxCagHS4jAG8GtBnaXC9gnED7gNkveB/w/oGx/gG16CYUwN/APyacm10n7HyG\nxj+fOLk9n88F9Ky8u1Yl3nWF42535JXGt8wAJCMrf0ckzEuB3qOIK6R6eXPLL7LZ+EBH9ghIpti0\no/W2Jeiw/DmdgO5Qi4YnbqxA3ZlBct6EEcJ0q4SeL7x+Jl79pIZ3TZ/0Zd8vBpDJHCK7xvc7kHPR\nu9+ACmAD+ToX2e/a9jPrsyU48sOdFWQ/eJEVLWik2S1tOXAyLmRp+VxQ6QQyw8EyzQB+q/dTDNY0\nPMOtwTUWi7T0JUiAH8JYfrKP7E0RXDSYjUIx0TyGPM4edDYYyNoECWo2p6AAAx2BCfg8FgugN0Te\nRDhIG0So2aWHEJgTaAPQC2gXoBekXfB5sXkIAT8n2oy+evMy2JgEvcGvGduwoPrjpI3/xHW+hdPu\nfAbQtwKceeUxq/DYP3GMmNM4MiKw+V92JmTMnzdmFn4Cfkmqv5ykOQ5OJOf4gck7ESE62vFZjS/u\ncb8w4S6wLNJhGnOu5ihTB6M28rKIP//65sB/dRLt2v+HwnlRortpfOR7lEZfbGBpfvgO9HVOUueC\nG/tAHeY5bufLn7k5c2UxggR+aHgt8DeGDcthljYkqb65B9ClVTnx0IZDOw5tMGsxwsscaKsxQ4OG\ndxl09rkuxoGI58c9CsOjYaLDcLCJ5r5AZ08BMABQIJhiSPpCPDS2rySaWxdeN5aWRkccwQSEQ8Gk\nATYAHcC8AD0C/HoCesD7BRwTMif0mMyupOPzMswRoLfLAvwpAAY1Pun+9XziPN/wfD7ZMotAvwL8\n9X4vyeV+MbHVJXgm1Y8iZ2YWRgKVK0EvgDQp0DuZ0rQZfqJMFvPoXZYa/2hRGboDvuYYeColpp77\njLRerNyBnLrL5Z+flhv7S6+fiVd/EwObMs3bhgLZXmFVDGZT0pJ7Wd+7X/YO1PV+/d4eScibn5/v\noC/zw9ctrqo1vwuByKGP1tvRuVaDLovFIrj1U0+qbzhax0M7hjbM1nFohzf+TeYoky0Gy1CB2Kws\nRUMk1ZgzCUTjbw5Ew80gofFdCqCZ4HDFB3QMPWDtA+DBIvpoVUZa4UWaMz4nTFggbDO/NRJk4Bhi\nuBwsNsmlOhG9+yZcJqADrgMuHabHZlOP2ObAPC7MeWIMD608bDuO/SDo7XrCryd8nJBxxTaZIpx7\nbrBJh2N0GlIYXKwq43zlDYUt7+t964KDAz6O3qKkuzccB3v/tyM+awcrPY84Pno0fe0HHj3+7WgN\nhypcld2VY59JPDkMZs87SXNh5WfIbc3vMPvS62fTbPPGsLdssaT42LW/l1AUIJv07NIjXp8RDPn7\ntx/x+0Fp9Tq+a/yb9t8chbU5gemh8V2ioUL0ybd4kFjgyatO+36aRVZfOzAbRz+3dV8Yz1nAV6mG\nGgZBc8AswS4V7otuwRahPHbtFWquZoIDDR8IPnRHziToDFG2DfSZQT6FNekuGA4MSbOFDbndcYEO\nKTb3NEjY+Al86XC9CPwO8wD8nAM2L24nrF+YwzGmYw7nscV+OAZj+HY9YeMExhMYZ7Qhmwl6W8Bn\nhCEos/FeROaj6orB5yhtdr4q4PeuOB6Kx9Gjl+NxxPHjQNcAfA0AuR2z7wOFQu+9TDnTyd6Od4ZY\nGXxlTuaGz9jxP4VLH183O+9XAPwmgF9CmI3/gbv/eRH5dQB/BmtK7q+5+1/5uj+7svcWu84bj037\n746/T04MO30ooFNKr9uwfuD2LaXpPw0jJvBvxSHvAd/jf3UMr75xiuhTn8dThG2iIsFot+/HHBj9\niNZh/ahEGd7/ALutBaC2FoK6MJwrHNW12fikpblPG0EF6NT4Jh2uXoUjR1t933fg53o7RXG54qQH\nOfoARSw5ugNtt9zB7j8Ip4SMmCMvHSYNMzcbsDZgdt2BP85I8R8eUcDpsJF7xzivAP644NcJHycw\nLwI/RndbAd8K/EqA139MVXaazwF4CgBUr1H0Q3AcDR8eHR8+HPju8cCHxwMfPjwK+E07j3sc175t\nSWB8Ty1vojUzMWP8GSFaWp88saR/4uDrEPf6+hqNPwD8WXf/GyLyiwD+moj8t/y333D33/jSF7zC\n9gZ0flCmTv4/P+MDEnyakbfT/NvrJhMWUF9/bjclbpS+zuTT/1Jipd6ujiiymoToBsr09Cd44k/c\n47yj9aC7PXO6eRmMGogAMjfJb/TQq7EyU1Y1oCybPxt2QlD9/ITvG0LjAxEZbNBgADg+BTzW+mou\nUANgBmO2IyAV298nv+QshO4OSAs6O8Pmj8Uee0vgTybwEPTWTtgEnPk+r/vM6ptXZPJhXhvVZ31A\n7lPjGzU90iHsvG8rTFmaHqjn4SDVfzQ8PnR89+HAL3z3Ab/w4QO+++4DugbAm/R1TCGgey1HHku0\n2jZVTIZvU8vfwC930Gca91rUXwDbZ15fMzTzRwB+xOMfi8hv4f9p7+tirWmysp5V1b33+w2IMwMO\nyO8YvOCGOBjFGLiQCw3xBiUh8ScEjTFcECXBRIxe4N8FXkjChRCDkCCJQhyjYGIEDRAjCSPKjDMI\nMug4MB/KOMgw873n7LO7q2p5sX5qVe+9z7vfz5lzvuQ79b59urp3/1RX97PWqlXrB/gC+zavu81p\nuzpjiNw9SAKb/cPdtgAOykwfEvjhPJ4Szo1DiS7Kj9zeo75ERV8Evq2VgiVSs10DY6DcLt6nLHHr\nasWcJE5gbdWJXB/CqVmniXutAz9REs6PDk7zAnSwqlmpR7XWuiR5IewU4BMlzKmh0oxCo094d/WR\nvssM4fRVk2pAjAQkQCSjNHNd5cHsWVKDWYow+cglf19CzWKt1/IOta4C+LSg5qMEkJQ0wrJuBK4E\nVMj8+1rUlLcAawFKcY5PlTfgt7G99a/4Dpgvgs3KGenPiAwJmOaE3Zyx30949myHV17Z4zNeeYa3\nPHsFmSa1u+gEwPZR8ANJQ11E/EqSJNVsNnxGSIHus0fDeO8Ssq4rLzXGJ6J3AngXgPcA+GoA30JE\n3wjgPwH4K8z8iWuuE6RBr/fFwLhZsFHmAeOzbkX8rsE7uXekAp3YBCLjon47SwDk1B6eshMYdi7Z\nxWMjANBIKyLeTyTJEyVOYHZTTHlOctDnnLuYb5y+JaQkWmMfZnAfctjaXFxtzI8kU36UVOeQMnJi\ntJRFp5AlXFm0UxgTO7GAPjfUVLGSBPYC1G5EA15IRiIZVyeN9GN81pSfoveXpeUVnFe0tEPLCzjt\nBfx5J8Mji6bbCKjSD6gJZVHglyrWfNXMgCuS2uY7+BsHpV6XjsS8W98e9XG9gd9/A2GaSMb4uwnP\nns14yys7vOWVZ/jMt7yCRJPG0ZuQdG11m1fpqrnOxSslXwpt9CoUQB+WiwB4iXI18FXMfzeAb1XO\n/z0A/jYzMxH9XQDfBeAvnDv3//7k+7ydr7zzHXj2zs+VzQ247Tk4/maPx6cAPnn+IOL38f5lSjjc\nO2zL991OAL/l+vI5xLqHRhum+IzzNzTJi04VGUlNW2UsbU9KJuJlQs4JU8ugGgBv65aQU9PIv9I/\nPg3EqmhM5At0TYlAWcxq3eA/k6TJ1XUU8Xt/qyhcRSex5hVHyi7qcxOxvjWxzqNWVIteQFzFsQdQ\nsGtADxAKIFw+Lah5J+BPC1qaBfhN0oRTE1Pj1Hq9qHFP07l9lAoqVbm9gV4MYRqzgp876KGOTdS3\nh+y6CPnhSZV7W47/GQH4yEg0ISEjQ0CfNKV65HbRaKemhKKKvYlIwW9SIpzrO+jjWzEA6HWPv/lb\nuPvN37r4vcdyFfCJaIKA/oeY+UcBgJk/Fg75PgD/6tL5b/+a3+f1ETxj6eJ+6PyLwGWAdbxv0xr9\nj9/r5Kywb1t3ArDlcrF6LyFBfy/WFlW+9XBhDLAm0OTkll5LXWUaLa3dM08vVqcQ6mmyCDANZtHo\nrshstiQUONUpiQIzuJH418bGN3X4KIxUgakl7DhjTzNqquDMoofQlNFtZnWNhRiStBVci65J/QNY\njicFPkvcw9X6AlC+n8CtgJ0wyOxAaknCUulCbAFAElZIXsCVJMFHSTIbYtSXSGYRmQjZ0gdY0h0l\nyDL12vcZUZKhCGQ4goZGwD4T9hpWbEfAjswFGT6cy7Y2cR6pD11N/9F6vVWRVFgdg3zt79g6V98e\n9ViN/bMUYr//7Ldh99lv8/3PP/ihi9/qtRz/BwD8IjN/t+0gos/T8T8AfD2AX7jyWtrYsATQvegc\nU6AREKb29BcK3D6C0M5/ASEYt7V1saH3tAnmMz9cC/AgCjDnKQkPZkQrqdg71YIlrUilj6+t/R5I\nYurz8X0cGs2D1RUWlliDhj6Oz8INoaMANmQ2SFbcanP9GXtMaLSXoUCWJJo8sWeNtfFTKxNqWlBL\nB29jCdApwyqdYgSwRiLLDeAqwwAmVBYCsTaNnKQSTiQAGUltFGTKsiRGTSxDFgZQFfhVgG/bpG6s\nTftuSBMOaAZhFsnEiBUSGjH2ibDPp6DPkPyIFhfBxucGfg/95YxN3JKbuiaL/cIIenMJ7sNLd7MS\ns2F/dYF5aR9fU66ZzvsqAH8WwAeI6L16p78O4M8Q0bu0RR8G8M3X3dIaq1z/Ate9/9zAVpmUAIT9\nzOKQEs+6wOnP3dfG/RusXML++ANj9JLSZhmnlV0MZnLvvQRgUS+8LtIF8To4i5jm2aSiiUxMTBKS\ngyS2vCiCNs8J6l3e2L8bZu5OHlV+S2wcf5JQXZr91UEfdCNWSjpiXUV8X6GBTVNFa0k+ZlJXXj1e\nACZ/mRX0IJQm3H6h1oGv68zCRTMn5dKMlmThbH4h7NydiJAbS6abCqRmkgiGtdXF5oHco9FTiRPw\nTDn+PpEEHCFIaDFogFMdMmSdmrMp0YrmvjNNQc+tanCQovUtx+9cv7dOZTcyYZLDt8cuWVxTrtHq\n/wzCUCeUK+fsx8YYVRpAdQZ491xsg/vgpMtav4fTXxLvX3jf+9rTNYlRwte97BKKxU9PrHP5CJF6\nUnGXTWPEW+DHuWdA7jGTZA1iykCSj0LEV9caDCK+9xvgUWGYVVzXKD1AU5NgmfJjQMJBU+6cno3b\ny8MSCAslHEFYAPfEq1Skf0jAX/VjFd91MS5i1X7YMKCgYWmMIzVkFpDnJvn5MmfZRnICa0lIOPdn\nM+AnAriJjYPkIbT3MMKpKSF10NMoBTBhFPMj+FlAL+A3bt9dqolFguiSlwYKraVHBQ5xALhVD+QJ\n5/gd9IDMSHTcu0r8Hu40lodPmhkplHF927NRtN13EWYKc/gMVkWaXdkkgPtAP9x324aNEu8sz+84\n2ijChj99CKIKOGYelHDEQCp9as4+EB/Xc3MiEJWgANCyxGbnxJAw16RTh+nkYxB78hC4wQxXNGyz\n2Zu7kY/OB8qUX8ZOj8UUu8OmnCSiboZkNEaraLWg6HQW1NNPhjhi+14gJrNi9Wdjf8bCDTM1zCRA\nnxT8E2dkrl4XsVcVXwninj2Ri/Ru8WgoFqrYmQ+MAcn35CI/bdcSs1A4Phz8M4Tjz+jRjY3zuxVe\nSuLjoEOIBpllYAN87abK7KAXE2YfFjguOvj752sv19/2VeVRsuU6heL+kV/FbY2TKlt18G8POjO1\ncQ70wz233B8Bu2cw37eNr2/vGl5CuB8JewEM/K0r/uxhhAP3gA7D7IcpdnXervEkjjtQk1tumJA8\nDx8M7N7m3pb+r4W/DKi7P1JCSkCjjJnUrz8QHtK/Br4MmTJjA32eJJOvzVoowIksHbeYzprb7wox\n9pmIMHEdgD9BwC71jElDepuZsUWmtekw0hulBo8LKFOiAKt+ZaDv+poi6Fmlp0asyr2kXB/O7fsY\nfwT8pOmyLYYCqyRByvGNMLY2Kveagb7VTpDDB2hjfBsMxo+U+UUMs5cHBX7/2NAbfA8grx3vn/4d\nr7dtw3Z9r/i/vd3JztMWnd1j192KyW3bBx3wtVWUXDvHx6ZdZNaMcPGyJpkpMCkj8oHT4ZXpDFrf\nBgOZkCYN8SwT/2re1y/q9CfOOTP7eLXkVS3YMkzF3gIB6u0S0T5D4stnSCBOmeYkTCwWhQ54zphR\nhQhAA5ZQwpQYk/rLJ8pd1G/C8VMAvnx2pO+kC3asuqEt6C2IyTMd3+9JOT4JgGw55faWAj2pMlEC\nlyAQRxvbm7jfOX40GgvfEfXvhXzbyfnlD3NTHt47Dxgf5NIxkBc17DdKp3/OMnutDKL/S47dT+7b\nf1BaY2AWbh9bMvL/ToY4bjK6YkZPtbx+3eKtqngK8eorqesuYeGyqtj4151Ej5ksl5ssU0qYNbBH\nJcn3xxoyJpIRJwbUc9dB87aBodN++rE10fZnBeTOvfsaalpQ0oySZqxpxkozFhKvw8ZRqWZ3F29C\nij0Yn1HXrN7wHvhTJYfKjNoYWde2FGYxcTbAa13WBnR2sJv4qeoK13VYZGLtDgnasa5YNUhKAkno\nscbIaUXOK1JakdOCnHdIaYecZgn6ofH519Lj9JuozyHngAX0mSDPKGHIbenJSKy9FIJzuq7mivJI\nor4WPqncWyK8KOwIur4At9OpjdejvHM1wjCwshv3gCBRld9VfX7nYa8QAerafhggRKssuoBmbAep\nFXXwkQN7sAjL1lo74KceIdb9+vOEXdJJp6SELcoDTiCt9dKwPuWn/QkBUWJyDryjnsKrpJ0APk1Y\n04QlCegXmlRLzpbJHsSSwcdi22yNUxiiZzCpUJKNwpVyFu6rMmNqI/Bza2L0pON8A7zVsQH7MOC3\nZiQMdSJCrZJsw0NgM2R2pDJSmh30Ke2Q0qL7ZokApIE+bO0x9y0yEUui08SMzJIJyW0K0CTASViY\nu0FSH7NAdDBXlEcb49+3GYFj3JcM6QPXv+cOjKs5fuTwp1JGR8UW0BTITFcs6jaL1GHSh8jHNNAO\nF8EBz3BjU1xGCRjsoDex3GK5lVJQJuX00w51HonALs/YT+Lmy3lSiVsDcm46nTrC9UFc3vU2QnUU\nyTg+Z/fuAxglL1jTDmvaYUkzdmnGjmbMaQI1OOiZJUYBoBlyCV3rrbeOcknUQjQI99uCflL35tIk\n34GKBAJ0XUj3OchP1twVhbZOKi0koBTLPiyg58rgKhGCiATkiWZQWpBoAul2ayGoaQh51riJAY95\nD2owkwyxwZCsRfK8STl+VktEC3oKhoPfDYOuKI+o1b+446TYvPY5sPtv8VrKYF8Hgz+59klHUhDv\nWUEebtolhNgkk0HgwwQlM/5zU4JWndWaSM+9roYdVbXla84oJuZ72uYeF14s/nbygRDE5DUlnaDT\nuxNAwQLS5F2fMTEDHe1TYo0crKI+i3YSlIA1zVjSjCVNWGjGkSSgyEyTmskpeElzHTkB6MSc7Vmh\nHoaQj9mCX1nI7sZJRHw2UV+z0VSpG/BtzaVvm1IVG+CAzbdBCaT5OeiMQaWiTkn6LAr6uhYQzSCa\ndNF6kjU4EHn/HvSWOnUH5d6Jde6cxKRZdB+jmJ9qD3bqbQ/Zhq4pj8DxeVhdKqficsC9fSxnCIEz\nVMZZQvF6i0jnpJy/c/AT0LOJxf0YhJftYEf/yP1bQo/xJ4Kw+gxAQ0FRRUlJHXskEGeZCuq8O8nW\nWj0ZhILbtMyckam7E0clnTTXpIvQSOo7RPKVLDisGW3FcjA5p1/SjKNzfAG/ROy1+PQacpuE49tM\nm5FHIYLq+wA46BuSzv2TAL4lTMoFayPkKmJ+qUnAbil/KsCFlQAE3wYFPwUCkNSnISXxlciafyEz\noZh4X1sHfS4oaRWA0wSCrklUfpIEVHwtoqONuds2B76K+tKrAEjzDsK5fmZIRt3WNKBpDNAZpnuv\nKI87xgffSwAG8G+0Pxcxbdz+Bdc+e78XifxDp56CfmgYW/s5ftUu3BsTFXEXPodeGWrc0q36xOpN\nPvhCXeOdiUSx1/p40bXEXPXeAfQ5Y+IqegSojw7LjEDk6IH1DqC3bjCgC+gndzSKoN/r+N44vgQE\nEQgXknh8kgS0W16S3sAlEXCfYmPJ9ZdBnoZLOL1Y5uVGyK1J4hP1JTDwc+kLCvuQxUDTnZtYcyRK\nFKLG4jY7ZSV3yulbkth/NfWMy0GvD5DVJ4A0sIn2f0+WKdKXWfJZ0NLE/esqdsUtx2+XQN/emMAf\nnG82y1ACyGj8c36Mv6UCzOf3v84yEITtPTnq9C1QCODme7o/PiP7Q4yEQHRp7FN0plurDe4bL6Kg\n1kGizKsVddbsL8b5m0SCj+69U5uwcsPO9RUaPRfduWcomxckwwHx+hMfcwng0RJjpowl7XBMM54Z\n+Ek5vioAK2Ukrkjonv7s5snwYZCJPQTReyQWaShB7PiNA0uWHrFdyC0N4BeObIBvA/i7UgzdY0+J\nwZQTpkZoWSwFOVtPi8GRxOcTi8QMcasW70oBPbOu++y+htyS+HxWt1BcDlYd35utRwJkWpP1ag1K\n4Az8LbTbhipvUI5PZ7hmdGLZHOxV+4DjafEwPnuBly/XWPn5fY1DDuqHHiUIgE7Z9RmIUeFodRsW\nXGwVthRMGQ8A1hRPkqhjqQUWqoshnN6yvXRdnaTTMmceN3qBOfhY7r3oPx7kGzJTVw5SAochQMak\n4v0+z3g27bHMq8QOMF969atnMnWGDXqUylDfzhbFiHSdQoAQ1uvpi2CIvT+gNgOszIZJFWoAO4eH\nZwGSbbFDECtEsYeQCDkZnBNaFh8I1gXQ6MG2zRpRmJWgcYjzQxA/CNIPhgJPsI9Bhx/CwXXOsTZZ\nSvNcAlwqsKo0F8R704Vcq9d6BFHfxr4wMm+VcAidnHFuPB+EgP8v6F8F+NMxx/nmkJ1ngNf5VuXy\nfYbCwN+HLvqZBLCFZ3euq9dXQtKgGVe4iq96HYEPohH0rPntKRi/6DqrQYwb5cDGo8ljCmwgOo79\nNcDoRBm7NGGXZzybdih1r3EC4e0DOsE2vX2/cjdY6qHLevw5IwAWY7C3S20FWHUHEfQMB38PYU2w\nHAxdwuqRcgT0GS0n8JQd9BIy3FT+sjCbs4DVJTIRgzzugYXjjgTAv6vAtSV4gegRUBXspQKl9LwC\n9g4U9PL/jQz8cyg1SWDQ8NLpeUPhk58/HeC/76IU1qHlnZMP3L6PYQfOj/At4PyybYO9aPEf1ySX\nrboqwTLbDpyeeyis0qpk66F8kr1notwTgqo/uVikJQ8f1psUpTVWqUGchnbK8UuWaUYJJqIPWsfu\nlTl6m7Rrw7ZYBCrAkxjkpCQBR13i0r5sYbBi3n4W2UgV553jI3B62D6TgDREmGYw4pwV+OTcHpH7\nG8d3wCcnPswKeJe8Aui333jroIeDvgHG7Vfl9mtxAtz7MOqNXlweSbm35fo4C3radgyA/skNJ6ND\nKK5frpy15w9c6dwlB6s9glJ5+xbtbBp0gxs654LPWW4flpMhDcu0TlHbb92l2V/yhtNLyq7SJHOM\nxHWfNIGHxHmftD4liSGTiTFp3j0XO2KbXdSXuugfkiYGmbHPOw8i0l2NtV8MkApwm71gtvQVOgOA\nHoUmAQp+8jiC1jlsxFZv0hz4I56UHoagJXKs1SW0dXIlnKQsU/AHsHMMaEgB8I3AnMAtcH2iAHgl\nWFFctRdnFKpuwF+DqK/5AwEl6tqnsX5NeRzgK0C24DfQb9fbIt8bw4JZfCrLMCXiYtilo4OY7sTK\nxC7qoNBW+qMEqz2XGs4IOBFvPg705nShuLIY/HAzrp4wNUnP7AkcWbLGrK1gqUWs+XSZ84RdmzDn\nGTWz2sMzppSda0lEWu8lGOk1pBHg+oJZRf2aZ/Uya/1cIxz6OJ60kwz0PTtt49qHGwDMuNfm1c2k\ndhgyqMjboKBvpIty/cZOREx5KspD2VeR3I/eQN9yBk+TAp468FMAf0si/d2j7gAAGhpJREFUgiVd\nkxEB5fYJY649f7l0VsyP43uYqG+ZgtfSPS1NqtO+5bPM8rQ88nTehjNH0J/l+qIIc6u4jWLw9fH5\nC4XPcNiLxQgA+QuwvHySodYvqTSvq/NYzyMauT2C6D80i6Oso4EsFPSWvCO1hJXIiUJx0FcsCvxd\nlqwu+zxhN80o3LBnEbRnsHupwbltUhPazuGlmSbTwEV94fiSqINz68MdRLCryW1qaJwV/BWNExpX\nVE3GEQFv/SORjM1rjmE5A3ywsOH4dcP1zZNR2gx0ywwJBJJBEoBUF3bgUxjbEzgFzt8IaAp2ChGQ\nVNRni28Yxfzx0x7Azwp8rnVQ7PkY3wDvykLq21eUxwO+f73kH7nsH0Efgc8QQAmYNrCMSLqybP3/\nh+3AXrdivgAyaL2D6Ba13z3JBlS4Id8fub3EUQsi/uab2N6cw25Rg1VxdvFxqqzjmH5tFbtWMJcV\nx2nGfpqxrzus84w9W+Zedmchc04hlqksmbBSB9/wbC7969TapPHyd2mSqBdkocX7kwzAb5a4M6Fx\nQtUEHTJ1p7nl7R9BFXLay0lAT2Qe/gb8hsoaydfAr9zegqEmFfUb9alRcZ0lj3prY/w2TZqwNHWu\nH7g9EwngawC3vc1GPajpRrMfC4F1KCLmvGYkJFzfOL5yfeX4SNQBr3Wnzi8oj8zxpWw5tWi+aahb\nIAIGej2C1AedvqcPqu0+F6jhdrruRNR/UWEjXt1mz8T9KMq7Pd+Zdoyg5zB83X4hvVFMIYw3TtdQ\nZVyFBMcorcpFa388T5rZKtZasNTVpYFdnrCfZuzqrMOCGYnYpdUMl2KRyOzpVTGIjIlmiX6Tu4KL\nLKnENGEuM+ZpJ9xel6oZaq1uICfz4tN5MGKShJozo62MOjfUHcu+hdHWirpU1F3tdY3I63nqqac2\nsxmD/X7Cbj9jv5ux38/Y7SfsdzN2+9mTn05kWW+S77NkwC0stp0hCkPS4ChFbe0rGrgUtCDCW72t\nBXdrwd1ScNRlKQWrpf5OStSINE6CShb8Rgd+GNdfwtcIevj4DT5vaRcyAqB/qW9tr7fdfpGZ4/3i\nfuhkZeGdm8cnOxHYXbS0o4QhcACv1S/fU5jHGeCbBKLioOsBWg1KIA14pSmySy1Y8opdEdHf1mud\nsSszdpMAfwoJH2I9JxsaQI17JkwMUJboOUhJQJ8F9JNef19XBXsHfFy7HBUIgGwD1QC/MspOAF/X\nhrprqBuwW72udQiGabMV9hwOeF1buqz9bu7nhTx3lhW5VsnnVwtO8v2BIYZC6kQjmZKB0gTgkgJM\n65oghNeCu7XiuFYc14JlrVjXirUU8fDjAHzWYce5T+1CeRzgv6Q4buA/Ab3ts4u6uB+oyuZa991n\nq9i7DHrqz7Edp2HD+Tu0h7NlD6uwYECPgO910Pbs0JIIfJ8ZIZc2owKQGeDKQxpoSeFVMNWu1d9N\nCsoyY5nEw0+4/ow5JUxZfPunlDBnWVuocBmqyRQggZDR0DCBUhbQ1wlzFU6/ryuWuroiz4OQwMA/\nAt+fDQCU45eVdd05flmbg1wAX/r2WjwTsHHtiXqMvP1uDrnxlPMrEfAMtg56C6pJWNeQyXfV7L4r\nY1Xjm1Y0zFaReHtNx+xtXR3sbSnD9rE0LGuVdalY1oZSNO0aJwF/Ildk+tThFeUR0mRfh/qtuN8t\nlUyq79ZKABRNG8C/4F6XOP5L+e0znMMaF0cAPQ+qo/6LtVP2RdBTEPVdXegPRbTdjrMgCnhrjYr6\nTb39mioAiQlNRf+cEtYqudwmnbvfazrnVcX8tU7YFdELzDljN2kq79zNUsVi0IAPzSojTjxMENA3\nAf2urdi1grUWrK0Mjkg2nWeEIED9ZF0i8IsBnnVdJOGGAr+snQBIxJ7kz2v1KSXsd5MDPgL/2W7e\nZLjRRS0J11KxLALQdRHArlNDXquk+qJV3ASgBLcUlGV1wNd1RVtksbRgS2lYa8NSGpYqxGQtjFpN\nJ6P+BJzAyVy8zAj7/vIIabL73xcea1ZhMkAeRf0IfGWhxh1h+y5c79z2y3g2WfGx9Ebx12vc130u\nz5/JIUxhjL/l/Cq/mVLNXWaNu0awDzMhZIdrPwn47Xql1W6qG0TWRAlLES6/qBLQ1msVkb+0Wefn\nJwCzDFOSWfxBx/HkJsMpEaY2YW4VpRWUtsNqQwwLMxXn8MM6wt1nfQbgd9BLfQP8taCsBSUQgNmB\nngPoM+acsJ8F5HHZ7yY8m2cHegw3RqpnOS4Vx6ViWQuOc8Wk9bRULHTE0hi1VB/jr6VgWRbUZUV1\nwK/D9loZa5Nhw1ob1sooul05OfjFgtNUsNeF4HlE5d71IIv2yFvQu0UcMIS3im65LwP4a8F/onTb\n/NpH+mfEfAWu1LtsMIr4ug7PQjolQOyiglvDGYcf7B+UUIqE1FNvmzRl2uw4E5AArNOMpc7YVwG7\nrdc641nTuXnspC3J3FYzMsSePaWElLsXWs4JkwUQUZsCq7fWjXXces9jDLah706AXxrWAPy1REJQ\nPalmUcDbtgxpsg5TpC7DlyxgV/C/EurPdrODnALgrX537Iq4aSrIU0FaCpBWUeSVCkpicVdbw1IK\n7gLw67KiHsft0lim8RvrlH7fFvuM5PH/2RnQG5Djdy386zuvc30DrP2OEQjsivZ7Qb2dzrumdMHz\n0u+xFkHf6/aLiPrsJOISx3f+7aDv8wUG/C345Y5qHWeqPO7JOMDcneEY3VsNGDj9Ms1Y5g7+ynuY\nXb1wevFbn9VgNpNo7nMSb7R5klzwHkyDe3BPr8MAbxoJI+ytS0ouzsH3raUN4F9LDQRgBHxc5pyx\nywZ+XbTuYJ878G1tIB/XUj/sVuyOKw7HFXlakaYVlFcgTZJvcC1IaQGDUJu09XhcUZYFdVlQjivq\nsqD6vlVseBgylve62Ci4bQIsVopGMqbrvuM3xHReLJcA6Fze0C97XdQ3rhiJwOXrdaKxLUQ4/1tU\nsJ353UT1eI/4m94VyoaHNrro7qvTG0TJxo5zacJOpQ5m+FCChwfqRMcoJGQc3m8UTHwrSiNJWqnX\nsulG8/+2kNBVrQGnSWL8zeqCavv6U41/+5rDdmit91UAfSBYiclDVbFG2SWdZchZYgNKOu6MmibU\nbKJ+7mbKyu2nnLGfJ+znGbt5wjzNmKcJOU/IOYd31SvChKDj9op1XXE8Lri7O+JwWHBzt+Bwc4vb\nm1vcHg44HA44HO5wuLvD4XgnegcT84uETJMIyy2AXCwRLYqYhP9mD9U2/LuSgV2TQmsP4N8D2Onx\n72bmv6Ups38YwNsB/GcA38jM5aq7enmRbVz83T7gcw9o4iw6EcCpPuG+PumAp65Au3Q8jb+9WALY\nXkif5eQ+vDnmzAW3D+HAkApTOObcA5OP/v1BJD9BB54ZzhrwqUJ/a/48lt6pKujXIpzUOPysoJ+z\nEICBS9p4XbfhhI9DHb1jHfT9OQFIiqwAfjjwGTkBU4Ya8iQ0ymipoeYmsxFxfJ+7k9J+tlmNaQB9\nynnozz6Ekn212rhdgX844vb2TpabW9zeHnC4PeD2cIfD3RGHuyPujosr8mopaEUi79ZmsxoSd0+i\nD1kkZoL55omhlVovBuJ5TbkmhdaRiL6GmW+JKAP4GSL6NwC+DcDfZ+Z/RkTfC0mR/Q+vuusLC2+w\nwKGjt0cFDioNhonRL+4DPgE8+8nxmmdOtUPO3oPDIWe4LcI3fPKyTusMHmiEYCOAg/mCqDISVopS\nwKaDtmRUtP4EoMD6v3lWF4391yzg54qlrCo2Twr6LAEnlPtLBmAd/2uiCcsKbOKSS/UUm0fDe+h9\nSkCjHrVG5/kTEjIxmhqz8KR59RJLws+mUXZMq5+yuCXr9m7OCvqswTMU+Enyc50MD1nWtQrHF+Af\ncTgccHt7wPObAw43Bxxub3G4FY5/ezgIx787Smx9m+4rllKrOfAd9PIVoGf2EUIQuf3pt3S5XCXq\nM/OtVvfoCZS+BsCf1v0/COBv4nUBnzdbEdmXCEAHfBdn9bh7uLWLaRdL4PbxshcIwPARDtQHZ054\n0YuJ7R45C8VjeHNGBP1mvZV4vM0q5g+/dIrivnGVoRE/1Aw29bl24/Qlr1iLOPs40DcEYFauOaWM\nnPNJ3dQSJhVw2N4+QuwZ4/b2XAnktvtMAHKSABiNxVW+CUhzSg52N8bRukgtMmUp4Ld2Jg18KjKR\nEW3x/xfgr2tRUf8oov7tAbfPb0S8v1URP4D+cDxqXP2YUkuTbTSTvuDrBvRAKO6fEGMkfApFfflO\nKEHE+S8F8A8A/A8Av802RwS8CuDzr7qjlhO+RjiZ74+gHh4qAn57JT75pB3wnbNvn2/83a8Vhg0A\n+qwBwvj6RLI4F09nS4z4zDIcAHuKLvmGbZMoGF20d44/NHJ4lr7TJJIoAfS7u8GP3rXVJuBopHH9\nJD1WSRlrylhcSaZj+jyJ8szrY7ipOU9o04TZIuBQj2wrYa27FDA8xva9KSoyR18AfWlqyEJMYlts\n9v0aLCQp4H2tBj0yTEnDWuLjZYB0FkJYrYCOWX0BZG7eRP3D4Q63t7e4ubnB4VYAf3e4w+H2TtZ3\nd7i7O0pyTF2gpryWMFO87zSykAGcOtABIXLOHuLw8QXlWo7fAHwFEX0WgH8B4MuuuvrVZdPYAHqj\nrKPzTH90hL0dYEGgHUT5fs1zdvvjLlIc8YCjc3z9/LDi/As4Qyc2ldD2sG8kAuFIx7qeEdo70DG7\nPyFcOfag/LF5f+FkVU0iRLGUQKhUUVPCVApWGyuTKcum7uef45Kxm3bYzWIY1GaLMyfXTkl82MUG\nQCekiMTnftNFw+Mw1GkHTpVNorFU4eLLoJOWlghjMMBJg0HOlMUyMeeEKQJfbZKNxoKqR8hqjVFq\nVSMe4/h3uL094Ob5Le7u7nA4COBtkTH+US6gC7c2bsMSn8I97xjc4yPY9+xHXTW+BfCSWn1m/iQR\n/TSAPwzgrUSUlCh8IYBfv3Tex3/q/V5/9s53YP8l7zh3baudgB7xFyMK/mc8rpMD3TbQuhT8Qnnf\ni0jEI5gunk1wV9xxaBCJEV/Y3oJ+lBpGe6QzL9YuGd2UfR6/t8fEaNu1vRLrzUx8lfTO6KGqoLHe\nY7guIq93wGfx8w8EYD8X1LoT0HusO0Im8flPDECTXzIgXJ9PG+mPh67cM6/E6KdgPvWiV8geTitZ\nUk03wLG6nGd2Bzmr+J9JOb7KeGzvTmUvnZaUMX7Bsq443gnHP9wecHNzg7u7O9wdRPy/U05/d3fE\n0YBvVzuZfVFrPEr9y3DQB7Ab11+OHqTjReUarf7nAFiZ+RNE9AqAPwrgOwH8FIBvAPAjAL4JwI9e\nusZb/8iXe92VI7oeP3v7UjccnregD9M/l0Cv53SgR4pIfRqwP+mZlm8HDVHcjvxf70/RxBZB7ArE\ny54vIG+4ykYCiETAlHwm7vv+jQ6ii//hqEAT7N5h5BTa7CLS8KxRpmJwSPwhIS3cxFqDZzIDLeu0\nYLOYOuxxASwgyLGsmrQidau4UB+7f3wbqUGDZFIImClrCXstSTu39Qj6FOsglNaQqiXWEKnDLPZK\nLShFdRuaycgyGv32a8/xiefP8drNDZ4fbl2BdzwesSyr5MurBbWOGZAR370bKPVtUI/eY6CPHN/t\nOBJAz3agV555/9SbW1wq13D83w3gB3WcnwD8CDP/ayL6JQA/TER/B8B7AXz/pQucWsr1h4tEwIE2\ngD5IAAPAwzBgwzGdUPi+AB2fulKOSB1Ip+1GuEbnnko6MPLNrjCj0I6Rm/PJKb0v2K+w5cbCaewj\n2XCI7fXiI3NvkePfH4K9f08JnJ4clIR2G4ZGrmFNfOEJ9ki8/+wYNoWgLE1jA3bQz9iVFfM0dXPf\nYA5LW+AP+h2pJwc9aQ6CXp/CUGPKM+ZcMSWJMjSI+hvg24xCX7PXSym+rJv6J157DZ98foPXbm5x\ncyugvzsecVwXLOuqx/dEJ25qHl5X/MYIMVwXRvCD3LefnQDoO77OcO+q6bwPAPj9Z/b/TwB/6Jqb\nDDo7/TtwfgPvFsjDWJ/tvuN2OBbjHkQEcLRxj2hh9M7E5gWcEIOOsAHgG6j6b7Q5z7l9f+4zLTpp\nvXWN8102Pu4/jIDf1DmIpoNoMQybzt/b17oRQd+7njUYRd9ulFz7X1J20M+tYKkT5rK6oq9r9Gkg\nABQIj/edtx0d6EgO/ARC4iSuxdOMXd5hnqrWNVGJgz2FyL0xmKi9mzZsW5bbtaw9++0q3Py1127w\nyefP8dqtcvy7Aw7HI47HRY5Ze4JTs1wc+tu4PfXtyO0d/CAPwuHTnkEKuHYk+zDeeREUZzi8DXfH\naTrjHRiOPeH8FwHf70cUP2Fs6rZ5Huqn1+5HhQj66G8tgHl4uTyOwW2vE4F49/BM3AnMqPPg3u4z\n9IfNrPeEndhxp/caLkIyHCInN1ZTMx6OkNBottRBX0ncRjMlJHXGmVrGUotPkU2rTeepubKDv9dP\n5szt+2D2BJYprLOu99MO+3mP/VSxbw21MdosT2kg9zUSEklE3+4a3HpdFwGwLMu6Yl0Xrz9/7QbP\nn9/guXL828Dxy9qlA/HFV9NpezUGbK3Ti8Bv9bQR9504vLg8kFtu3JDtznQC9xsAEMX+zdj+jPjv\nFx/vjNOxOKmS7wSBAy2IjR6J84gk86cf78/hkrw5Vdu/1b7jFJ/D74PYtAE/gs6Axv3m2Whgpnjx\nM1RuQ7/kkjQe3rRm9EOy3MjbaEyo1ASIzWLyq6Ks9ui1WQ16soawMuDbqGQIuebSob5xNo6v2Wup\nZ7G17WfzM7xSKtZdVScXG0QF0IO6AlD3mWGSBSiJy7quWJYFy7rIehExflkW3N7c4ub5LW5vDrg5\n3OL2cIe74x2OyyIRc0rMa9g5/vDuT8BNA/h7fL0g2iu379z/9J2eKw/vj48A3kFJxye/29ZWCvBf\nXDN+csdNPYr7G24fjzsjCJxeL2yT8fsYgXa8npIaPZ5H/GLbL+fu1ZsW0efP7tSCA23r16D++C6y\ne7w8O+bkhiOxPNfDEruuOZ1pkDh2g6cfmTWdjd1Tj48fwGa3tMHW0JfKHAzwHJiFAD57MhAjAJky\nll1FqU082Rpg2nGiHkXXRPwO/tYVd01Ec1/XiuNydMAflyOW4+L7Dreixff13R0OR/mt1tYt8lpX\n7nlXO9gV0JsY/GNobtuHLh04+OncyzxbHtRJ5/irH8Xui98xivZR2RemuCLnH0C/GevDj7tcTsV9\nxvJrH8fui99+5mBb3X/N4WCXqAOrBMbhQyRsIdomM2P5yMcxf9Fbt432NQ+7NgTPPyC7JsfHFEKn\n+g3XJ4WPYzsIOn7kk9h/0WdtDhiHMO4wtFlX7kyHYt0uZZxd5dMtZ4+3BICbX/8k3vL5v8M5fgQ+\nM+t0Yu7gp75ea0Vtkjkogj6nSc16O+izEwPSuHarBAkpK9YqY/gPffDDeMcX/i4cj0ccF5mK82U5\n4niQKbvjIdSPRxyXBa2aYQ53jf72+zIAx2i8CQOXd8AfK/DK7MRhAP9Jb54vV+oAPzXl+OGPwcG8\n5XZ8GWj3g/AagJ6W9SO/9Trvd195fecdX/3t13nF19vOC4UE+K+rvASdvCjZhMMYwM2rrynQ4xK+\nFf+MThkAb7bGu53fOh3S9fLqh16Fq6GDQjpKIs6nQnNeNriLl4FC01hf7pmrj5LBPeVBgf9UnspT\neWOURwT+tULJU3kqePpcPsWFXrcocu0NTtTnT+WpPJWHKnwh0P6nHfhP5ak8lTdeeRrjP5Wn8iYs\nT8B/Kk/lTVgeDPhE9LVE9N+I6INE9O0Pdd8LbfkwEf0XInovEf3HB7739xPRR4no/WHf24joJ4jo\nl4nox4nodz5iW76DiF4lop/X5WsfoB1fSEQ/SUT/lYg+QER/Wfc/eL+cactf0v2P0S97InqPfqcf\nIKLv0P3vJKKfVSz9UyJ6eXuc0Tji07NACMx/B/AlAGYA7wPwZQ9x7wvt+RCAtz3Svb8awLsAvD/s\n+3sA/qrWvx3Adz5iW74DwLc9cJ98HoB3af0zAfwyJNjLg/fLPW158H7RNrxF1xnAz0Ic434EwDfo\n/u8F8M0ve92H4vhfCeBXmPlXmXmFROf9uge697kilqSPUJj5PwD4+Gb310HiFkLXf+IR2wI88OQZ\nM/8GM79P688B/BIkuMuD98uFtnyB/vzgk4p8Od7lP9f9PwjgT77sdR/q4/8CAB8J26+id+ZjFAbw\n40T0c0T0Fx+xHVbewcwfBeTDA3Aaouhhy7cQ0fuI6B891LDDioZtfxeEu33uY/ZLaMt7dNeD9wsR\nJSJ6L4DfAPBv8SmIdwm8eZV7X8XMfwDAH4e8zK9+7AZtymPOsX4PgC9l5ndBPrbveqgbE9FnAng3\ngG9VbrvthwfrlzNteZR+YebGzF8BkYC+Ep+ieJcPBfxfB/DFYfveGH2f7sLM/1vXH4MED/3Kx2qL\nlo8S0ecCABF9HoD/81gNYeaPsQ4eAXwfgD/4EPdVBdW7AfwQM1sYt0fpl3Nteax+scLMnwTw0wjx\nLvWn14WlhwL+zwH4vUT0JUS0A/CnAPzYA917KET0FqXmIKLPAPDHAPzCQzcD43jxxwD8Oa1/E+6J\nX/jpbosCzMrX4+H65gcA/CIzf3fY91j9ctKWx+gXIvocG1KEeJe/iB7vEni9/fKA2smvhWhIfwXA\nX3to7Whox++BzCq8F8AHHrotAP4JgP8F4Ajg1wD8eQBvA/DvtH9+AsBbH7Et/xjA+7WP/iVknP3p\nbsdXQdLC2Xv5ef1e3v7Q/XJPWx6jX75c7/8+vfffCN/wewB8EKLhn1/22k8mu0/lqbwJy5tVufdU\nnsqbujwB/6k8lTdheQL+U3kqb8LyBPyn8lTehOUJ+E/lqbwJyxPwn8pTeROWJ+A/lafyJixPwH8q\nT+VNWP4fVJ72bsOs3qEAAAAASUVORK5CYII=\n",
+ "text/plain": [
+ ""
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ }
+ ],
+ "source": [
+ "%matplotlib inline\n",
+ "import glob\n",
+ "import numpy as np\n",
+ "import matplotlib.pyplot as plt\n",
+ "import tensorflow as tf\n",
+ "from tensorflow.contrib import learn\n",
+ "from keras.models import Sequential\n",
+ "from keras.layers import Dense, Dropout, Activation, Flatten\n",
+ "from keras.layers import Convolution2D, MaxPooling2D\n",
+ "from keras.utils import np_utils\n",
+ "from keras import backend as K\n",
+ "from keras.utils.np_utils import to_categorical\n",
+ "\n",
+ "datadir='data/cifar-10-batches-bin/'\n",
+ "\n",
+ "plt.ion()\n",
+ "G = glob.glob (datadir + '*.bin')\n",
+ "A = np.fromfile(G[0],dtype=np.uint8).reshape([10000,3073])\n",
+ "labels = to_categorical(A [:,0])\n",
+ "images = A [:,1:].reshape([10000,3,32,32]).transpose (0,2,3,1)\n",
+ "print images.shape\n",
+ "plt.imshow(images[15])\n",
+ "print labels[11]\n",
+ "images_unroll = A [:,1:]\n"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 2,
+ "metadata": {
+ "collapsed": false
+ },
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "Train on 8000 samples, validate on 1000 samples\n",
+ "Epoch 1/300\n",
+ "8000/8000 [==============================] - 35s - loss: 4.8452 - acc: 0.1071 - val_loss: 2.3276 - val_acc: 0.0970\n",
+ "Epoch 2/300\n",
+ "8000/8000 [==============================] - 31s - loss: 2.3178 - acc: 0.1036 - val_loss: 2.3125 - val_acc: 0.0930\n",
+ "Epoch 3/300\n",
+ "8000/8000 [==============================] - 30s - loss: 2.3068 - acc: 0.1046 - val_loss: 2.3099 - val_acc: 0.0960\n",
+ "Epoch 4/300\n",
+ "8000/8000 [==============================] - 30s - loss: 2.3049 - acc: 0.0980 - val_loss: 2.3094 - val_acc: 0.0920\n",
+ "Epoch 5/300\n",
+ "8000/8000 [==============================] - 31s - loss: 2.3023 - acc: 0.1031 - val_loss: 2.3088 - val_acc: 0.0920\n",
+ "Epoch 6/300\n",
+ "8000/8000 [==============================] - 30s - loss: 2.3033 - acc: 0.1044 - val_loss: 2.3094 - val_acc: 0.0900\n",
+ "Epoch 7/300\n",
+ "8000/8000 [==============================] - 30s - loss: 2.3027 - acc: 0.1002 - val_loss: 2.3078 - val_acc: 0.0920\n",
+ "Epoch 8/300\n",
+ "8000/8000 [==============================] - 30s - loss: 2.3037 - acc: 0.1005 - val_loss: 2.3083 - val_acc: 0.0910\n",
+ "Epoch 9/300\n",
+ "8000/8000 [==============================] - 31s - loss: 2.3018 - acc: 0.1007 - val_loss: 2.3069 - val_acc: 0.0940\n",
+ "Epoch 10/300\n",
+ "8000/8000 [==============================] - 31s - loss: 2.3007 - acc: 0.1061 - val_loss: 2.3066 - val_acc: 0.0910\n",
+ "Epoch 11/300\n",
+ "8000/8000 [==============================] - 33s - loss: 2.3005 - acc: 0.1069 - val_loss: 2.3051 - val_acc: 0.0930\n",
+ "Epoch 12/300\n",
+ "8000/8000 [==============================] - 31s - loss: 2.3001 - acc: 0.1085 - val_loss: 2.3013 - val_acc: 0.1050\n",
+ "Epoch 13/300\n",
+ "8000/8000 [==============================] - 31s - loss: 2.2989 - acc: 0.1156 - val_loss: 2.3052 - val_acc: 0.0950\n",
+ "Epoch 14/300\n",
+ "8000/8000 [==============================] - 30s - loss: 2.2936 - acc: 0.1141 - val_loss: 2.2973 - val_acc: 0.1090\n",
+ "Epoch 15/300\n",
+ "8000/8000 [==============================] - 30s - loss: 2.2880 - acc: 0.1151 - val_loss: 2.2937 - val_acc: 0.1040\n",
+ "Epoch 16/300\n",
+ "8000/8000 [==============================] - 29s - loss: 2.2847 - acc: 0.1256 - val_loss: 2.3009 - val_acc: 0.1000\n",
+ "Epoch 17/300\n",
+ "8000/8000 [==============================] - 35s - loss: 2.2858 - acc: 0.1149 - val_loss: 2.2849 - val_acc: 0.1120\n",
+ "Epoch 18/300\n",
+ "8000/8000 [==============================] - 65s - loss: 2.2803 - acc: 0.1248 - val_loss: 2.2851 - val_acc: 0.1150\n",
+ "Epoch 19/300\n",
+ "8000/8000 [==============================] - 36s - loss: 2.2764 - acc: 0.1229 - val_loss: 2.2937 - val_acc: 0.1030\n",
+ "Epoch 20/300\n",
+ "8000/8000 [==============================] - 34s - loss: 2.2711 - acc: 0.1270 - val_loss: 2.2916 - val_acc: 0.1060\n",
+ "Epoch 21/300\n",
+ "8000/8000 [==============================] - 39s - loss: 2.2709 - acc: 0.1294 - val_loss: 2.2807 - val_acc: 0.1150\n",
+ "Epoch 22/300\n",
+ "8000/8000 [==============================] - 30s - loss: 2.2674 - acc: 0.1287 - val_loss: 2.2760 - val_acc: 0.1160\n",
+ "Epoch 23/300\n",
+ "8000/8000 [==============================] - 30s - loss: 2.2609 - acc: 0.1341 - val_loss: 2.2811 - val_acc: 0.1310\n",
+ "Epoch 24/300\n",
+ "8000/8000 [==============================] - 31s - loss: 2.2557 - acc: 0.1365 - val_loss: 2.2839 - val_acc: 0.1160\n",
+ "Epoch 25/300\n",
+ "8000/8000 [==============================] - 30s - loss: 2.2558 - acc: 0.1376 - val_loss: 2.2738 - val_acc: 0.1340\n",
+ "Epoch 26/300\n",
+ "8000/8000 [==============================] - 31s - loss: 2.2490 - acc: 0.1424 - val_loss: 2.2583 - val_acc: 0.1370\n",
+ "Epoch 27/300\n",
+ "8000/8000 [==============================] - 30s - loss: 2.2386 - acc: 0.1499 - val_loss: 2.2539 - val_acc: 0.1470\n",
+ "Epoch 28/300\n",
+ "8000/8000 [==============================] - 33s - loss: 2.2395 - acc: 0.1508 - val_loss: 2.2479 - val_acc: 0.1340\n",
+ "Epoch 29/300\n",
+ "8000/8000 [==============================] - 30s - loss: 2.2309 - acc: 0.1510 - val_loss: 2.2333 - val_acc: 0.1400\n",
+ "Epoch 30/300\n",
+ "8000/8000 [==============================] - 33s - loss: 2.2157 - acc: 0.1579 - val_loss: 2.2194 - val_acc: 0.1520\n",
+ "Epoch 31/300\n",
+ "8000/8000 [==============================] - 36s - loss: 2.2076 - acc: 0.1678 - val_loss: 2.2088 - val_acc: 0.1500\n",
+ "Epoch 32/300\n",
+ "8000/8000 [==============================] - 33s - loss: 2.1879 - acc: 0.1748 - val_loss: 2.1946 - val_acc: 0.1600\n",
+ "Epoch 33/300\n",
+ "8000/8000 [==============================] - 37s - loss: 2.1737 - acc: 0.1794 - val_loss: 2.1617 - val_acc: 0.1900\n",
+ "Epoch 34/300\n",
+ "8000/8000 [==============================] - 40s - loss: 2.1644 - acc: 0.1816 - val_loss: 2.2325 - val_acc: 0.1390\n",
+ "Epoch 35/300\n",
+ "8000/8000 [==============================] - 32s - loss: 2.1531 - acc: 0.1920 - val_loss: 2.1349 - val_acc: 0.2140\n",
+ "Epoch 36/300\n",
+ "8000/8000 [==============================] - 32s - loss: 2.1458 - acc: 0.1884 - val_loss: 2.1262 - val_acc: 0.1900\n",
+ "Epoch 37/300\n",
+ "8000/8000 [==============================] - 37s - loss: 2.1260 - acc: 0.2006 - val_loss: 2.0920 - val_acc: 0.2140\n",
+ "Epoch 38/300\n",
+ "8000/8000 [==============================] - 32s - loss: 2.1143 - acc: 0.1988 - val_loss: 2.1078 - val_acc: 0.2080\n",
+ "Epoch 39/300\n",
+ "8000/8000 [==============================] - 31s - loss: 2.1082 - acc: 0.2072 - val_loss: 2.1701 - val_acc: 0.1800\n",
+ "Epoch 40/300\n",
+ "8000/8000 [==============================] - 32s - loss: 2.0913 - acc: 0.2103 - val_loss: 2.0795 - val_acc: 0.2330\n",
+ "Epoch 41/300\n",
+ "8000/8000 [==============================] - 34s - loss: 2.0791 - acc: 0.2153 - val_loss: 2.0515 - val_acc: 0.2440\n",
+ "Epoch 42/300\n",
+ "8000/8000 [==============================] - 33s - loss: 2.0531 - acc: 0.2242 - val_loss: 2.0803 - val_acc: 0.2230\n",
+ "Epoch 43/300\n",
+ "8000/8000 [==============================] - 35s - loss: 2.0529 - acc: 0.2296 - val_loss: 2.0194 - val_acc: 0.2540\n",
+ "Epoch 44/300\n",
+ "8000/8000 [==============================] - 38s - loss: 2.0362 - acc: 0.2314 - val_loss: 1.9901 - val_acc: 0.2610\n",
+ "Epoch 45/300\n",
+ "8000/8000 [==============================] - 53s - loss: 2.0207 - acc: 0.2350 - val_loss: 1.9814 - val_acc: 0.2420\n",
+ "Epoch 46/300\n",
+ "8000/8000 [==============================] - 38s - loss: 2.0095 - acc: 0.2385 - val_loss: 1.9604 - val_acc: 0.2700\n",
+ "Epoch 47/300\n",
+ "8000/8000 [==============================] - 47s - loss: 2.0013 - acc: 0.2441 - val_loss: 1.9835 - val_acc: 0.2520\n",
+ "Epoch 48/300\n",
+ "8000/8000 [==============================] - 45s - loss: 1.9788 - acc: 0.2519 - val_loss: 2.0620 - val_acc: 0.2070\n",
+ "Epoch 49/300\n",
+ "8000/8000 [==============================] - 46s - loss: 1.9658 - acc: 0.2490 - val_loss: 1.9148 - val_acc: 0.2930\n",
+ "Epoch 50/300\n",
+ "8000/8000 [==============================] - 44s - loss: 1.9591 - acc: 0.2625 - val_loss: 2.0140 - val_acc: 0.2320\n",
+ "Epoch 51/300\n",
+ "8000/8000 [==============================] - 46s - loss: 1.9563 - acc: 0.2519 - val_loss: 1.9288 - val_acc: 0.2780\n",
+ "Epoch 52/300\n",
+ "8000/8000 [==============================] - 51s - loss: 1.9377 - acc: 0.2673 - val_loss: 1.8916 - val_acc: 0.2930\n",
+ "Epoch 53/300\n",
+ "8000/8000 [==============================] - 46s - loss: 1.9376 - acc: 0.2610 - val_loss: 1.9621 - val_acc: 0.2450\n",
+ "Epoch 54/300\n",
+ "8000/8000 [==============================] - 45s - loss: 1.9212 - acc: 0.2744 - val_loss: 1.8836 - val_acc: 0.2890\n",
+ "Epoch 55/300\n",
+ "8000/8000 [==============================] - 43s - loss: 1.9141 - acc: 0.2709 - val_loss: 1.8822 - val_acc: 0.2890\n",
+ "Epoch 56/300\n",
+ "8000/8000 [==============================] - 45s - loss: 1.9053 - acc: 0.2811 - val_loss: 1.8775 - val_acc: 0.2800\n",
+ "Epoch 57/300\n",
+ "8000/8000 [==============================] - 47s - loss: 1.8986 - acc: 0.2755 - val_loss: 1.8852 - val_acc: 0.3020\n",
+ "Epoch 58/300\n",
+ "8000/8000 [==============================] - 46s - loss: 1.8760 - acc: 0.2878 - val_loss: 1.8774 - val_acc: 0.3010\n",
+ "Epoch 59/300\n",
+ "8000/8000 [==============================] - 47s - loss: 1.8763 - acc: 0.2900 - val_loss: 1.8485 - val_acc: 0.2940\n",
+ "Epoch 60/300\n",
+ "8000/8000 [==============================] - 34s - loss: 1.8659 - acc: 0.2890 - val_loss: 1.8394 - val_acc: 0.3090\n",
+ "Epoch 61/300\n",
+ "8000/8000 [==============================] - 33s - loss: 1.8688 - acc: 0.2914 - val_loss: 1.8524 - val_acc: 0.2960\n",
+ "Epoch 62/300\n",
+ "8000/8000 [==============================] - 33s - loss: 1.8580 - acc: 0.2970 - val_loss: 1.8333 - val_acc: 0.2950\n",
+ "Epoch 63/300\n",
+ "8000/8000 [==============================] - 34s - loss: 1.8458 - acc: 0.3001 - val_loss: 1.8283 - val_acc: 0.3060\n",
+ "Epoch 64/300\n",
+ "8000/8000 [==============================] - 32s - loss: 1.8422 - acc: 0.3055 - val_loss: 1.8252 - val_acc: 0.2950\n",
+ "Epoch 65/300\n",
+ "8000/8000 [==============================] - 33s - loss: 1.8335 - acc: 0.3000 - val_loss: 1.8362 - val_acc: 0.3110\n",
+ "Epoch 66/300\n",
+ "8000/8000 [==============================] - 32s - loss: 1.8326 - acc: 0.3038 - val_loss: 1.8136 - val_acc: 0.2970\n",
+ "Epoch 67/300\n",
+ "8000/8000 [==============================] - 34s - loss: 1.8361 - acc: 0.3110 - val_loss: 1.8112 - val_acc: 0.3120\n",
+ "Epoch 68/300\n",
+ "8000/8000 [==============================] - 32s - loss: 1.8225 - acc: 0.3063 - val_loss: 1.8770 - val_acc: 0.2820\n",
+ "Epoch 69/300\n",
+ "8000/8000 [==============================] - 32s - loss: 1.8167 - acc: 0.3183 - val_loss: 1.8146 - val_acc: 0.3040\n",
+ "Epoch 70/300\n",
+ "8000/8000 [==============================] - 33s - loss: 1.8203 - acc: 0.3076 - val_loss: 1.8149 - val_acc: 0.3060\n",
+ "Epoch 71/300\n",
+ "8000/8000 [==============================] - 32s - loss: 1.8045 - acc: 0.3221 - val_loss: 1.8304 - val_acc: 0.2990\n",
+ "Epoch 72/300\n",
+ "8000/8000 [==============================] - 32s - loss: 1.8023 - acc: 0.3193 - val_loss: 1.7987 - val_acc: 0.3230\n",
+ "Epoch 73/300\n",
+ "8000/8000 [==============================] - 33s - loss: 1.7914 - acc: 0.3230 - val_loss: 1.8158 - val_acc: 0.3080\n",
+ "Epoch 74/300\n",
+ "8000/8000 [==============================] - 33s - loss: 1.7859 - acc: 0.3257 - val_loss: 1.7972 - val_acc: 0.3130\n",
+ "Epoch 75/300\n",
+ "8000/8000 [==============================] - 32s - loss: 1.7809 - acc: 0.3290 - val_loss: 1.8104 - val_acc: 0.2960\n",
+ "Epoch 76/300\n",
+ "8000/8000 [==============================] - 36s - loss: 1.7803 - acc: 0.3269 - val_loss: 1.8023 - val_acc: 0.3200\n",
+ "Epoch 77/300\n",
+ "8000/8000 [==============================] - 33s - loss: 1.7796 - acc: 0.3291 - val_loss: 1.7982 - val_acc: 0.3230\n",
+ "Epoch 78/300\n",
+ "8000/8000 [==============================] - 32s - loss: 1.7762 - acc: 0.3250 - val_loss: 1.8404 - val_acc: 0.3060\n",
+ "Epoch 79/300\n",
+ "8000/8000 [==============================] - 34s - loss: 1.7552 - acc: 0.3370 - val_loss: 1.7964 - val_acc: 0.3090\n",
+ "Epoch 80/300\n",
+ "8000/8000 [==============================] - 33s - loss: 1.7694 - acc: 0.3386 - val_loss: 1.8058 - val_acc: 0.3030\n",
+ "Epoch 81/300\n",
+ "8000/8000 [==============================] - 33s - loss: 1.7542 - acc: 0.3436 - val_loss: 1.7791 - val_acc: 0.3170\n",
+ "Epoch 82/300\n",
+ "8000/8000 [==============================] - 32s - loss: 1.7813 - acc: 0.3356 - val_loss: 1.7822 - val_acc: 0.3330\n",
+ "Epoch 83/300\n",
+ "8000/8000 [==============================] - 33s - loss: 1.7401 - acc: 0.3418 - val_loss: 1.8077 - val_acc: 0.3150\n",
+ "Epoch 84/300\n",
+ "8000/8000 [==============================] - 33s - loss: 1.7448 - acc: 0.3440 - val_loss: 1.7841 - val_acc: 0.3060\n",
+ "Epoch 85/300\n",
+ "8000/8000 [==============================] - 35s - loss: 1.7425 - acc: 0.3400 - val_loss: 1.8095 - val_acc: 0.3070\n",
+ "Epoch 86/300\n",
+ "8000/8000 [==============================] - 34s - loss: 1.7431 - acc: 0.3474 - val_loss: 1.7704 - val_acc: 0.3160\n",
+ "Epoch 87/300\n",
+ "8000/8000 [==============================] - 33s - loss: 1.7345 - acc: 0.3519 - val_loss: 1.8614 - val_acc: 0.2920\n",
+ "Epoch 88/300\n",
+ "8000/8000 [==============================] - 33s - loss: 1.7389 - acc: 0.3368 - val_loss: 1.8025 - val_acc: 0.3200\n",
+ "Epoch 89/300\n",
+ "8000/8000 [==============================] - 33s - loss: 1.7366 - acc: 0.3471 - val_loss: 1.7682 - val_acc: 0.3300\n",
+ "Epoch 90/300\n",
+ "8000/8000 [==============================] - 34s - loss: 1.7191 - acc: 0.3511 - val_loss: 1.7648 - val_acc: 0.3220\n",
+ "Epoch 91/300\n",
+ "8000/8000 [==============================] - 34s - loss: 1.7139 - acc: 0.3595 - val_loss: 1.7714 - val_acc: 0.3210\n",
+ "Epoch 92/300\n",
+ "8000/8000 [==============================] - 33s - loss: 1.7195 - acc: 0.3531 - val_loss: 1.7599 - val_acc: 0.3230\n",
+ "Epoch 93/300\n",
+ "8000/8000 [==============================] - 33s - loss: 1.7115 - acc: 0.3549 - val_loss: 1.8008 - val_acc: 0.3280\n",
+ "Epoch 94/300\n",
+ "8000/8000 [==============================] - 38s - loss: 1.7124 - acc: 0.3526 - val_loss: 1.7515 - val_acc: 0.3310\n",
+ "Epoch 95/300\n",
+ "8000/8000 [==============================] - 33s - loss: 1.7275 - acc: 0.3486 - val_loss: 1.7792 - val_acc: 0.3250\n",
+ "Epoch 96/300\n",
+ "8000/8000 [==============================] - 33s - loss: 1.6974 - acc: 0.3624 - val_loss: 1.8358 - val_acc: 0.3120\n",
+ "Epoch 97/300\n",
+ "8000/8000 [==============================] - 34s - loss: 1.6995 - acc: 0.3596 - val_loss: 1.8040 - val_acc: 0.3170\n",
+ "Epoch 98/300\n",
+ "8000/8000 [==============================] - 39s - loss: 1.7005 - acc: 0.3612 - val_loss: 1.7610 - val_acc: 0.3240\n",
+ "Epoch 99/300\n",
+ "8000/8000 [==============================] - 34s - loss: 1.6946 - acc: 0.3696 - val_loss: 1.8016 - val_acc: 0.3110\n",
+ "Epoch 100/300\n",
+ "8000/8000 [==============================] - 33s - loss: 1.6817 - acc: 0.3654 - val_loss: 1.7695 - val_acc: 0.3280\n",
+ "Epoch 101/300\n",
+ "8000/8000 [==============================] - 32s - loss: 1.6800 - acc: 0.3650 - val_loss: 1.7946 - val_acc: 0.3190\n",
+ "Epoch 102/300\n",
+ "8000/8000 [==============================] - 34s - loss: 1.6978 - acc: 0.3638 - val_loss: 1.7759 - val_acc: 0.3250\n",
+ "Epoch 103/300\n",
+ "8000/8000 [==============================] - 33s - loss: 1.6772 - acc: 0.3685 - val_loss: 1.8253 - val_acc: 0.3040\n",
+ "Epoch 104/300\n",
+ "8000/8000 [==============================] - 32s - loss: 1.6806 - acc: 0.3732 - val_loss: 1.7725 - val_acc: 0.3190\n",
+ "Epoch 105/300\n",
+ "8000/8000 [==============================] - 32s - loss: 1.6641 - acc: 0.3716 - val_loss: 1.7392 - val_acc: 0.3490\n",
+ "Epoch 106/300\n",
+ "8000/8000 [==============================] - 34s - loss: 1.6600 - acc: 0.3806 - val_loss: 1.7482 - val_acc: 0.3480\n",
+ "Epoch 107/300\n",
+ "8000/8000 [==============================] - 41s - loss: 1.6677 - acc: 0.3718 - val_loss: 1.7393 - val_acc: 0.3460\n",
+ "Epoch 108/300\n",
+ "8000/8000 [==============================] - 31s - loss: 1.6617 - acc: 0.3808 - val_loss: 1.9247 - val_acc: 0.3190\n",
+ "Epoch 109/300\n",
+ "8000/8000 [==============================] - 32s - loss: 1.6541 - acc: 0.3804 - val_loss: 1.7399 - val_acc: 0.3420\n",
+ "Epoch 110/300\n",
+ "8000/8000 [==============================] - 31s - loss: 1.6480 - acc: 0.3860 - val_loss: 1.7351 - val_acc: 0.3330\n",
+ "Epoch 111/300\n",
+ "8000/8000 [==============================] - 32s - loss: 1.6434 - acc: 0.3887 - val_loss: 1.7150 - val_acc: 0.3490\n",
+ "Epoch 112/300\n",
+ "8000/8000 [==============================] - 32s - loss: 1.6428 - acc: 0.3893 - val_loss: 1.7465 - val_acc: 0.3290\n",
+ "Epoch 113/300\n",
+ "8000/8000 [==============================] - 32s - loss: 1.6363 - acc: 0.3916 - val_loss: 1.7642 - val_acc: 0.3410\n",
+ "Epoch 114/300\n",
+ "8000/8000 [==============================] - 32s - loss: 1.6321 - acc: 0.3957 - val_loss: 1.7435 - val_acc: 0.3430\n",
+ "Epoch 115/300\n",
+ "8000/8000 [==============================] - 40s - loss: 1.6386 - acc: 0.3898 - val_loss: 1.7122 - val_acc: 0.3550\n",
+ "Epoch 116/300\n",
+ "8000/8000 [==============================] - 33s - loss: 1.6299 - acc: 0.3886 - val_loss: 1.7931 - val_acc: 0.3220\n",
+ "Epoch 117/300\n",
+ "8000/8000 [==============================] - 32s - loss: 1.6349 - acc: 0.3909 - val_loss: 1.7132 - val_acc: 0.3650\n",
+ "Epoch 118/300\n",
+ "8000/8000 [==============================] - 32s - loss: 1.6265 - acc: 0.4002 - val_loss: 1.7164 - val_acc: 0.3490\n",
+ "Epoch 119/300\n",
+ "8000/8000 [==============================] - 32s - loss: 1.6083 - acc: 0.4050 - val_loss: 1.7130 - val_acc: 0.3460\n",
+ "Epoch 120/300\n",
+ "8000/8000 [==============================] - 35s - loss: 1.6130 - acc: 0.3981 - val_loss: 1.7348 - val_acc: 0.3440\n",
+ "Epoch 121/300\n",
+ "8000/8000 [==============================] - 31s - loss: 1.6043 - acc: 0.3987 - val_loss: 1.7112 - val_acc: 0.3520\n",
+ "Epoch 122/300\n",
+ "8000/8000 [==============================] - 33s - loss: 1.6169 - acc: 0.3967 - val_loss: 1.7217 - val_acc: 0.3570\n",
+ "Epoch 123/300\n",
+ "8000/8000 [==============================] - 31s - loss: 1.5882 - acc: 0.4046 - val_loss: 1.7344 - val_acc: 0.3560\n",
+ "Epoch 124/300\n",
+ "8000/8000 [==============================] - 31s - loss: 1.6003 - acc: 0.4004 - val_loss: 1.7267 - val_acc: 0.3440\n",
+ "Epoch 125/300\n",
+ "8000/8000 [==============================] - 31s - loss: 1.5764 - acc: 0.4096 - val_loss: 1.7170 - val_acc: 0.3600\n",
+ "Epoch 126/300\n",
+ "8000/8000 [==============================] - 33s - loss: 1.5929 - acc: 0.4162 - val_loss: 1.7196 - val_acc: 0.3560\n",
+ "Epoch 127/300\n",
+ "8000/8000 [==============================] - 43s - loss: 1.5737 - acc: 0.4147 - val_loss: 1.7027 - val_acc: 0.3540\n",
+ "Epoch 128/300\n",
+ "8000/8000 [==============================] - 32s - loss: 1.5925 - acc: 0.4117 - val_loss: 1.7307 - val_acc: 0.3430\n",
+ "Epoch 129/300\n",
+ "8000/8000 [==============================] - 44s - loss: 1.5663 - acc: 0.4157 - val_loss: 1.7020 - val_acc: 0.3610\n",
+ "Epoch 130/300\n",
+ "8000/8000 [==============================] - 40s - loss: 1.5891 - acc: 0.4042 - val_loss: 1.7048 - val_acc: 0.3510\n",
+ "Epoch 131/300\n",
+ "8000/8000 [==============================] - 37s - loss: 1.5769 - acc: 0.4162 - val_loss: 1.6838 - val_acc: 0.3590\n",
+ "Epoch 132/300\n",
+ "8000/8000 [==============================] - 36s - loss: 1.5859 - acc: 0.4119 - val_loss: 1.7754 - val_acc: 0.3460\n",
+ "Epoch 133/300\n",
+ "8000/8000 [==============================] - 35s - loss: 1.5739 - acc: 0.4140 - val_loss: 1.7645 - val_acc: 0.3460\n",
+ "Epoch 134/300\n",
+ "8000/8000 [==============================] - 34s - loss: 1.5684 - acc: 0.4155 - val_loss: 1.7217 - val_acc: 0.3490\n",
+ "Epoch 135/300\n",
+ "8000/8000 [==============================] - 43s - loss: 1.5441 - acc: 0.4327 - val_loss: 1.7133 - val_acc: 0.3370\n",
+ "Epoch 136/300\n",
+ "8000/8000 [==============================] - 39s - loss: 1.5549 - acc: 0.4220 - val_loss: 1.7212 - val_acc: 0.3530\n",
+ "Epoch 137/300\n",
+ "8000/8000 [==============================] - 37s - loss: 1.5512 - acc: 0.4197 - val_loss: 1.6699 - val_acc: 0.3680\n",
+ "Epoch 138/300\n",
+ "8000/8000 [==============================] - 34s - loss: 1.5440 - acc: 0.4297 - val_loss: 1.7478 - val_acc: 0.3470\n",
+ "Epoch 139/300\n",
+ "8000/8000 [==============================] - 37s - loss: 1.5345 - acc: 0.4352 - val_loss: 1.7015 - val_acc: 0.3640\n",
+ "Epoch 140/300\n",
+ "8000/8000 [==============================] - 35s - loss: 1.5403 - acc: 0.4330 - val_loss: 1.7623 - val_acc: 0.3490\n",
+ "Epoch 141/300\n",
+ "8000/8000 [==============================] - 44s - loss: 1.5304 - acc: 0.4315 - val_loss: 1.6834 - val_acc: 0.3580\n",
+ "Epoch 142/300\n",
+ "8000/8000 [==============================] - 41s - loss: 1.5208 - acc: 0.4387 - val_loss: 1.7034 - val_acc: 0.3500\n",
+ "Epoch 143/300\n",
+ "8000/8000 [==============================] - 35s - loss: 1.5264 - acc: 0.4286 - val_loss: 1.6789 - val_acc: 0.3660\n",
+ "Epoch 144/300\n",
+ "8000/8000 [==============================] - 38s - loss: 1.5338 - acc: 0.4334 - val_loss: 1.7439 - val_acc: 0.3550\n",
+ "Epoch 145/300\n",
+ "8000/8000 [==============================] - 53s - loss: 1.5137 - acc: 0.4436 - val_loss: 1.7277 - val_acc: 0.3670\n",
+ "Epoch 146/300\n",
+ "8000/8000 [==============================] - 34s - loss: 1.5083 - acc: 0.4405 - val_loss: 1.6762 - val_acc: 0.3710\n",
+ "Epoch 147/300\n",
+ "8000/8000 [==============================] - 40s - loss: 1.5129 - acc: 0.4387 - val_loss: 1.6694 - val_acc: 0.3800\n",
+ "Epoch 148/300\n",
+ "8000/8000 [==============================] - 37s - loss: 1.4989 - acc: 0.4440 - val_loss: 1.7224 - val_acc: 0.3570\n",
+ "Epoch 149/300\n",
+ "8000/8000 [==============================] - 39s - loss: 1.5254 - acc: 0.4342 - val_loss: 1.6775 - val_acc: 0.3600\n",
+ "Epoch 150/300\n",
+ "8000/8000 [==============================] - 43s - loss: 1.4929 - acc: 0.4454 - val_loss: 1.7174 - val_acc: 0.3600\n",
+ "Epoch 151/300\n",
+ "8000/8000 [==============================] - 34s - loss: 1.4964 - acc: 0.4341 - val_loss: 1.7464 - val_acc: 0.3420\n",
+ "Epoch 152/300\n",
+ "8000/8000 [==============================] - 61s - loss: 1.5029 - acc: 0.4412 - val_loss: 1.6692 - val_acc: 0.3700\n",
+ "Epoch 153/300\n",
+ "8000/8000 [==============================] - 50s - loss: 1.4924 - acc: 0.4459 - val_loss: 1.6817 - val_acc: 0.3690\n",
+ "Epoch 154/300\n",
+ "8000/8000 [==============================] - 33s - loss: 1.4882 - acc: 0.4491 - val_loss: 1.6862 - val_acc: 0.3660\n",
+ "Epoch 155/300\n",
+ "8000/8000 [==============================] - 33s - loss: 1.4749 - acc: 0.4467 - val_loss: 1.6857 - val_acc: 0.3610\n",
+ "Epoch 156/300\n",
+ "8000/8000 [==============================] - 35s - loss: 1.4774 - acc: 0.4459 - val_loss: 1.6960 - val_acc: 0.3590\n",
+ "Epoch 157/300\n",
+ "8000/8000 [==============================] - 35s - loss: 1.4796 - acc: 0.4511 - val_loss: 1.7136 - val_acc: 0.3650\n",
+ "Epoch 158/300\n",
+ "8000/8000 [==============================] - 49s - loss: 1.4784 - acc: 0.4479 - val_loss: 1.7824 - val_acc: 0.3420\n",
+ "Epoch 159/300\n",
+ "8000/8000 [==============================] - 39s - loss: 1.4706 - acc: 0.4541 - val_loss: 1.6818 - val_acc: 0.3680\n",
+ "Epoch 160/300\n",
+ "8000/8000 [==============================] - 40s - loss: 1.4595 - acc: 0.4599 - val_loss: 1.6736 - val_acc: 0.3780\n",
+ "Epoch 161/300\n",
+ "8000/8000 [==============================] - 66s - loss: 1.4532 - acc: 0.4580 - val_loss: 1.6766 - val_acc: 0.3630\n",
+ "Epoch 162/300\n",
+ "8000/8000 [==============================] - 50s - loss: 1.4566 - acc: 0.4621 - val_loss: 1.6529 - val_acc: 0.3720\n",
+ "Epoch 163/300\n",
+ "8000/8000 [==============================] - 33s - loss: 1.4461 - acc: 0.4531 - val_loss: 1.7723 - val_acc: 0.3610\n",
+ "Epoch 164/300\n",
+ "8000/8000 [==============================] - 51s - loss: 1.4415 - acc: 0.4616 - val_loss: 1.7694 - val_acc: 0.3450\n",
+ "Epoch 165/300\n",
+ "8000/8000 [==============================] - 54s - loss: 1.4630 - acc: 0.4514 - val_loss: 1.7519 - val_acc: 0.3560\n",
+ "Epoch 166/300\n",
+ "8000/8000 [==============================] - 42s - loss: 1.4536 - acc: 0.4622 - val_loss: 1.6762 - val_acc: 0.3640\n",
+ "Epoch 167/300\n",
+ "8000/8000 [==============================] - 55s - loss: 1.4417 - acc: 0.4583 - val_loss: 1.6673 - val_acc: 0.3590\n",
+ "Epoch 168/300\n",
+ "8000/8000 [==============================] - 53s - loss: 1.4476 - acc: 0.4607 - val_loss: 1.6601 - val_acc: 0.3730\n",
+ "Epoch 169/300\n",
+ "8000/8000 [==============================] - 32s - loss: 1.4399 - acc: 0.4556 - val_loss: 1.6820 - val_acc: 0.3810\n",
+ "Epoch 170/300\n",
+ "8000/8000 [==============================] - 32s - loss: 1.4425 - acc: 0.4619 - val_loss: 1.6863 - val_acc: 0.3680\n",
+ "Epoch 171/300\n",
+ "8000/8000 [==============================] - 33s - loss: 1.4329 - acc: 0.4724 - val_loss: 1.7670 - val_acc: 0.3490\n",
+ "Epoch 172/300\n",
+ "8000/8000 [==============================] - 39s - loss: 1.4193 - acc: 0.4781 - val_loss: 1.7030 - val_acc: 0.3750\n",
+ "Epoch 173/300\n",
+ "8000/8000 [==============================] - 52s - loss: 1.4325 - acc: 0.4621 - val_loss: 1.7331 - val_acc: 0.3730\n",
+ "Epoch 174/300\n",
+ "8000/8000 [==============================] - 44s - loss: 1.4164 - acc: 0.4705 - val_loss: 1.7481 - val_acc: 0.3690\n",
+ "Epoch 175/300\n",
+ "8000/8000 [==============================] - 43s - loss: 1.4282 - acc: 0.4745 - val_loss: 1.7060 - val_acc: 0.3850\n",
+ "Epoch 176/300\n",
+ "8000/8000 [==============================] - 40s - loss: 1.4219 - acc: 0.4732 - val_loss: 1.7367 - val_acc: 0.3870\n",
+ "Epoch 177/300\n",
+ "8000/8000 [==============================] - 39s - loss: 1.4229 - acc: 0.4757 - val_loss: 1.7221 - val_acc: 0.3830\n",
+ "Epoch 178/300\n",
+ "8000/8000 [==============================] - 39s - loss: 1.4135 - acc: 0.4761 - val_loss: 1.7985 - val_acc: 0.3730\n",
+ "Epoch 179/300\n",
+ "8000/8000 [==============================] - 57s - loss: 1.4183 - acc: 0.4721 - val_loss: 1.7622 - val_acc: 0.3750\n",
+ "Epoch 180/300\n",
+ "8000/8000 [==============================] - 40s - loss: 1.3986 - acc: 0.4807 - val_loss: 1.7274 - val_acc: 0.3790\n",
+ "Epoch 181/300\n",
+ "8000/8000 [==============================] - 38s - loss: 1.4032 - acc: 0.4765 - val_loss: 1.7895 - val_acc: 0.3720\n",
+ "Epoch 182/300\n",
+ "8000/8000 [==============================] - 45s - loss: 1.3998 - acc: 0.4771 - val_loss: 1.7279 - val_acc: 0.3670\n",
+ "Epoch 183/300\n",
+ "8000/8000 [==============================] - 33s - loss: 1.4099 - acc: 0.4742 - val_loss: 1.7543 - val_acc: 0.3680\n",
+ "Epoch 184/300\n",
+ "8000/8000 [==============================] - 33s - loss: 1.4052 - acc: 0.4796 - val_loss: 1.7256 - val_acc: 0.3760\n",
+ "Epoch 185/300\n",
+ "8000/8000 [==============================] - 33s - loss: 1.3908 - acc: 0.4786 - val_loss: 1.7812 - val_acc: 0.3690\n",
+ "Epoch 186/300\n",
+ "8000/8000 [==============================] - 33s - loss: 1.3744 - acc: 0.4887 - val_loss: 1.7568 - val_acc: 0.3760\n",
+ "Epoch 187/300\n",
+ "8000/8000 [==============================] - 34s - loss: 1.3788 - acc: 0.4861 - val_loss: 1.7656 - val_acc: 0.3810\n",
+ "Epoch 188/300\n",
+ "8000/8000 [==============================] - 33s - loss: 1.3744 - acc: 0.4922 - val_loss: 1.7206 - val_acc: 0.3750\n",
+ "Epoch 189/300\n",
+ "8000/8000 [==============================] - 54s - loss: 1.3966 - acc: 0.4831 - val_loss: 1.7305 - val_acc: 0.3830\n",
+ "Epoch 190/300\n",
+ "8000/8000 [==============================] - 47s - loss: 1.3755 - acc: 0.4854 - val_loss: 1.8067 - val_acc: 0.3580\n",
+ "Epoch 191/300\n",
+ "8000/8000 [==============================] - 34s - loss: 1.3700 - acc: 0.4897 - val_loss: 1.7230 - val_acc: 0.3740\n",
+ "Epoch 192/300\n",
+ "8000/8000 [==============================] - 33s - loss: 1.3590 - acc: 0.4964 - val_loss: 1.7333 - val_acc: 0.3910\n",
+ "Epoch 193/300\n",
+ "8000/8000 [==============================] - 34s - loss: 1.3739 - acc: 0.4935 - val_loss: 1.7088 - val_acc: 0.3820\n",
+ "Epoch 194/300\n",
+ "8000/8000 [==============================] - 34s - loss: 1.3496 - acc: 0.4944 - val_loss: 1.7125 - val_acc: 0.3760\n",
+ "Epoch 195/300\n",
+ "8000/8000 [==============================] - 44s - loss: 1.3660 - acc: 0.4912 - val_loss: 1.7448 - val_acc: 0.3910\n",
+ "Epoch 196/300\n",
+ "8000/8000 [==============================] - 42s - loss: 1.3501 - acc: 0.4972 - val_loss: 1.7508 - val_acc: 0.3880\n",
+ "Epoch 197/300\n",
+ "8000/8000 [==============================] - 46s - loss: 1.3659 - acc: 0.4952 - val_loss: 1.8237 - val_acc: 0.3750\n",
+ "Epoch 198/300\n",
+ "8000/8000 [==============================] - 39s - loss: 1.3599 - acc: 0.4930 - val_loss: 1.8098 - val_acc: 0.3890\n",
+ "Epoch 199/300\n",
+ "8000/8000 [==============================] - 58s - loss: 1.3605 - acc: 0.4880 - val_loss: 1.7441 - val_acc: 0.3690\n",
+ "Epoch 200/300\n",
+ "8000/8000 [==============================] - 54s - loss: 1.3509 - acc: 0.4951 - val_loss: 1.7307 - val_acc: 0.3840\n",
+ "Epoch 201/300\n",
+ "8000/8000 [==============================] - 50s - loss: 1.3476 - acc: 0.4975 - val_loss: 1.7121 - val_acc: 0.3930\n",
+ "Epoch 202/300\n",
+ "8000/8000 [==============================] - 54s - loss: 1.3312 - acc: 0.5017 - val_loss: 1.7644 - val_acc: 0.3660\n",
+ "Epoch 203/300\n",
+ "8000/8000 [==============================] - 55s - loss: 1.3560 - acc: 0.4922 - val_loss: 1.6856 - val_acc: 0.3960\n",
+ "Epoch 204/300\n",
+ "8000/8000 [==============================] - 43s - loss: 1.3458 - acc: 0.4972 - val_loss: 1.7923 - val_acc: 0.3890\n",
+ "Epoch 205/300\n",
+ "8000/8000 [==============================] - 57s - loss: 1.3301 - acc: 0.5046 - val_loss: 1.7163 - val_acc: 0.3910\n",
+ "Epoch 206/300\n",
+ "8000/8000 [==============================] - 57s - loss: 1.3322 - acc: 0.5094 - val_loss: 1.7737 - val_acc: 0.3920\n",
+ "Epoch 207/300\n",
+ "8000/8000 [==============================] - 59s - loss: 1.3221 - acc: 0.5112 - val_loss: 1.7900 - val_acc: 0.3860\n",
+ "Epoch 208/300\n",
+ "8000/8000 [==============================] - 61s - loss: 1.3137 - acc: 0.5100 - val_loss: 1.7866 - val_acc: 0.3860\n",
+ "Epoch 209/300\n",
+ "8000/8000 [==============================] - 58s - loss: 1.3377 - acc: 0.5030 - val_loss: 1.7854 - val_acc: 0.3640\n",
+ "Epoch 210/300\n",
+ "8000/8000 [==============================] - 46s - loss: 1.3138 - acc: 0.5132 - val_loss: 1.7913 - val_acc: 0.3790\n",
+ "Epoch 211/300\n",
+ "8000/8000 [==============================] - 46s - loss: 1.3005 - acc: 0.5154 - val_loss: 1.7860 - val_acc: 0.3790\n",
+ "Epoch 212/300\n",
+ "8000/8000 [==============================] - 47s - loss: 1.3127 - acc: 0.5147 - val_loss: 1.7600 - val_acc: 0.3790\n",
+ "Epoch 213/300\n",
+ "8000/8000 [==============================] - 46s - loss: 1.3087 - acc: 0.5122 - val_loss: 1.7678 - val_acc: 0.3830\n",
+ "Epoch 214/300\n",
+ "8000/8000 [==============================] - 53s - loss: 1.3063 - acc: 0.5171 - val_loss: 1.7076 - val_acc: 0.3830\n",
+ "Epoch 215/300\n",
+ "8000/8000 [==============================] - 51s - loss: 1.3013 - acc: 0.5165 - val_loss: 1.7708 - val_acc: 0.3870\n",
+ "Epoch 216/300\n",
+ "8000/8000 [==============================] - 48s - loss: 1.3101 - acc: 0.5206 - val_loss: 1.7493 - val_acc: 0.3930\n",
+ "Epoch 217/300\n",
+ "8000/8000 [==============================] - 46s - loss: 1.3147 - acc: 0.5069 - val_loss: 1.9293 - val_acc: 0.3920\n",
+ "Epoch 218/300\n",
+ "8000/8000 [==============================] - 49s - loss: 1.3075 - acc: 0.5130 - val_loss: 1.7904 - val_acc: 0.3860\n",
+ "Epoch 219/300\n",
+ "8000/8000 [==============================] - 48s - loss: 1.3011 - acc: 0.5192 - val_loss: 1.8325 - val_acc: 0.3810\n",
+ "Epoch 220/300\n",
+ "8000/8000 [==============================] - 59s - loss: 1.3181 - acc: 0.5131 - val_loss: 1.7451 - val_acc: 0.3900\n",
+ "Epoch 221/300\n",
+ "8000/8000 [==============================] - 81s - loss: 1.3053 - acc: 0.5159 - val_loss: 1.8403 - val_acc: 0.3690\n",
+ "Epoch 222/300\n",
+ "8000/8000 [==============================] - 55s - loss: 1.2804 - acc: 0.5175 - val_loss: 1.7491 - val_acc: 0.3870\n",
+ "Epoch 223/300\n",
+ "8000/8000 [==============================] - 56s - loss: 1.2910 - acc: 0.5171 - val_loss: 1.7671 - val_acc: 0.3850\n",
+ "Epoch 224/300\n",
+ "8000/8000 [==============================] - 54s - loss: 1.2932 - acc: 0.5150 - val_loss: 1.7996 - val_acc: 0.3860\n",
+ "Epoch 225/300\n",
+ "8000/8000 [==============================] - 48s - loss: 1.3132 - acc: 0.5141 - val_loss: 1.8531 - val_acc: 0.3900\n",
+ "Epoch 226/300\n",
+ "8000/8000 [==============================] - 52s - loss: 1.2860 - acc: 0.5250 - val_loss: 1.7788 - val_acc: 0.3840\n",
+ "Epoch 227/300\n",
+ "8000/8000 [==============================] - 49s - loss: 1.2558 - acc: 0.5332 - val_loss: 1.8187 - val_acc: 0.3970\n",
+ "Epoch 228/300\n",
+ "8000/8000 [==============================] - 48s - loss: 1.2976 - acc: 0.5219 - val_loss: 1.8214 - val_acc: 0.3970\n",
+ "Epoch 229/300\n",
+ "8000/8000 [==============================] - 57s - loss: 1.2755 - acc: 0.5207 - val_loss: 1.7923 - val_acc: 0.3900\n",
+ "Epoch 230/300\n",
+ "8000/8000 [==============================] - 59s - loss: 1.2677 - acc: 0.5267 - val_loss: 1.7863 - val_acc: 0.3970\n",
+ "Epoch 231/300\n",
+ "8000/8000 [==============================] - 55s - loss: 1.2680 - acc: 0.5307 - val_loss: 1.7812 - val_acc: 0.3870\n",
+ "Epoch 232/300\n",
+ "8000/8000 [==============================] - 45s - loss: 1.2671 - acc: 0.5269 - val_loss: 1.7846 - val_acc: 0.3980\n",
+ "Epoch 233/300\n",
+ "8000/8000 [==============================] - 45s - loss: 1.2578 - acc: 0.5366 - val_loss: 1.8388 - val_acc: 0.3930\n",
+ "Epoch 234/300\n",
+ "8000/8000 [==============================] - 45s - loss: 1.2605 - acc: 0.5292 - val_loss: 1.8948 - val_acc: 0.4030\n",
+ "Epoch 235/300\n",
+ "8000/8000 [==============================] - 60s - loss: 1.2838 - acc: 0.5255 - val_loss: 1.8097 - val_acc: 0.4020\n",
+ "Epoch 236/300\n",
+ "8000/8000 [==============================] - 53s - loss: 1.2352 - acc: 0.5429 - val_loss: 1.8199 - val_acc: 0.3950\n",
+ "Epoch 237/300\n",
+ "8000/8000 [==============================] - 55s - loss: 1.2572 - acc: 0.5319 - val_loss: 1.8150 - val_acc: 0.3810\n",
+ "Epoch 238/300\n",
+ "8000/8000 [==============================] - 63s - loss: 1.2507 - acc: 0.5331 - val_loss: 1.8879 - val_acc: 0.3880\n",
+ "Epoch 239/300\n",
+ "8000/8000 [==============================] - 55s - loss: 1.2684 - acc: 0.5290 - val_loss: 1.7812 - val_acc: 0.3800\n",
+ "Epoch 240/300\n",
+ "8000/8000 [==============================] - 49s - loss: 1.2584 - acc: 0.5312 - val_loss: 1.7401 - val_acc: 0.3870\n",
+ "Epoch 241/300\n",
+ "8000/8000 [==============================] - 46s - loss: 1.2501 - acc: 0.5385 - val_loss: 1.8292 - val_acc: 0.3860\n",
+ "Epoch 242/300\n",
+ "8000/8000 [==============================] - 49s - loss: 1.2690 - acc: 0.5276 - val_loss: 1.7795 - val_acc: 0.3890\n",
+ "Epoch 243/300\n",
+ "8000/8000 [==============================] - 47s - loss: 1.2314 - acc: 0.5452 - val_loss: 1.7840 - val_acc: 0.3820\n",
+ "Epoch 244/300\n",
+ "8000/8000 [==============================] - 48s - loss: 1.2547 - acc: 0.5319 - val_loss: 1.7786 - val_acc: 0.4020\n",
+ "Epoch 245/300\n",
+ "8000/8000 [==============================] - 48s - loss: 1.2438 - acc: 0.5401 - val_loss: 1.8943 - val_acc: 0.3930\n",
+ "Epoch 246/300\n",
+ "8000/8000 [==============================] - 54s - loss: 1.2638 - acc: 0.5340 - val_loss: 1.8043 - val_acc: 0.4000\n",
+ "Epoch 247/300\n",
+ "8000/8000 [==============================] - 57s - loss: 1.2528 - acc: 0.5330 - val_loss: 1.8343 - val_acc: 0.3740\n",
+ "Epoch 248/300\n",
+ "8000/8000 [==============================] - 60s - loss: 1.2448 - acc: 0.5366 - val_loss: 1.7803 - val_acc: 0.3930\n",
+ "Epoch 249/300\n",
+ "8000/8000 [==============================] - 57s - loss: 1.2396 - acc: 0.5361 - val_loss: 1.7649 - val_acc: 0.3760\n",
+ "Epoch 250/300\n",
+ "8000/8000 [==============================] - 48s - loss: 1.2538 - acc: 0.5316 - val_loss: 1.8259 - val_acc: 0.3940\n",
+ "Epoch 251/300\n",
+ "8000/8000 [==============================] - 48s - loss: 1.2394 - acc: 0.5396 - val_loss: 1.8638 - val_acc: 0.3900\n",
+ "Epoch 252/300\n",
+ "8000/8000 [==============================] - 52s - loss: 1.2255 - acc: 0.5424 - val_loss: 1.8557 - val_acc: 0.3900\n",
+ "Epoch 253/300\n",
+ "8000/8000 [==============================] - 54s - loss: 1.2334 - acc: 0.5427 - val_loss: 1.8894 - val_acc: 0.4010\n",
+ "Epoch 254/300\n",
+ "8000/8000 [==============================] - 51s - loss: 1.2336 - acc: 0.5396 - val_loss: 1.7947 - val_acc: 0.3860\n",
+ "Epoch 255/300\n",
+ "8000/8000 [==============================] - 47s - loss: 1.2269 - acc: 0.5417 - val_loss: 1.8605 - val_acc: 0.4000\n",
+ "Epoch 256/300\n",
+ "8000/8000 [==============================] - 48s - loss: 1.2512 - acc: 0.5354 - val_loss: 1.8913 - val_acc: 0.4000\n",
+ "Epoch 257/300\n",
+ "8000/8000 [==============================] - 47s - loss: 1.2307 - acc: 0.5472 - val_loss: 1.8458 - val_acc: 0.3980\n",
+ "Epoch 258/300\n",
+ "8000/8000 [==============================] - 50s - loss: 1.2138 - acc: 0.5502 - val_loss: 1.8366 - val_acc: 0.3850\n",
+ "Epoch 259/300\n",
+ "8000/8000 [==============================] - 58s - loss: 1.2156 - acc: 0.5509 - val_loss: 1.8741 - val_acc: 0.3900\n",
+ "Epoch 260/300\n",
+ "8000/8000 [==============================] - 59s - loss: 1.2232 - acc: 0.5440 - val_loss: 1.8609 - val_acc: 0.4030\n",
+ "Epoch 261/300\n",
+ "8000/8000 [==============================] - 55s - loss: 1.2252 - acc: 0.5480 - val_loss: 1.8242 - val_acc: 0.3570\n",
+ "Epoch 262/300\n",
+ "8000/8000 [==============================] - 70s - loss: 1.2005 - acc: 0.5579 - val_loss: 1.8336 - val_acc: 0.3710\n",
+ "Epoch 263/300\n",
+ "8000/8000 [==============================] - 65s - loss: 1.2119 - acc: 0.5492 - val_loss: 2.0188 - val_acc: 0.4010\n",
+ "Epoch 264/300\n",
+ "8000/8000 [==============================] - 65s - loss: 1.2221 - acc: 0.5489 - val_loss: 1.9160 - val_acc: 0.3890\n",
+ "Epoch 265/300\n",
+ "8000/8000 [==============================] - 54s - loss: 1.2369 - acc: 0.5459 - val_loss: 1.8770 - val_acc: 0.3810\n",
+ "Epoch 266/300\n",
+ "8000/8000 [==============================] - 53s - loss: 1.2053 - acc: 0.5506 - val_loss: 1.8836 - val_acc: 0.3870\n",
+ "Epoch 267/300\n",
+ "8000/8000 [==============================] - 52s - loss: 1.2007 - acc: 0.5567 - val_loss: 1.9252 - val_acc: 0.4010\n",
+ "Epoch 268/300\n",
+ "8000/8000 [==============================] - 79s - loss: 1.2076 - acc: 0.5575 - val_loss: 1.8153 - val_acc: 0.3940\n",
+ "Epoch 269/300\n",
+ "8000/8000 [==============================] - 61s - loss: 1.2160 - acc: 0.5460 - val_loss: 1.8347 - val_acc: 0.3650\n",
+ "Epoch 270/300\n",
+ "8000/8000 [==============================] - 57s - loss: 1.2060 - acc: 0.5600 - val_loss: 1.8661 - val_acc: 0.3880\n",
+ "Epoch 271/300\n",
+ "8000/8000 [==============================] - 47s - loss: 1.1941 - acc: 0.5531 - val_loss: 1.8563 - val_acc: 0.3990\n",
+ "Epoch 272/300\n",
+ "8000/8000 [==============================] - 48s - loss: 1.2006 - acc: 0.5484 - val_loss: 1.8671 - val_acc: 0.3870\n",
+ "Epoch 273/300\n",
+ "8000/8000 [==============================] - 61s - loss: 1.1836 - acc: 0.5585 - val_loss: 1.8631 - val_acc: 0.3960\n",
+ "Epoch 274/300\n",
+ "8000/8000 [==============================] - 49s - loss: 1.2218 - acc: 0.5514 - val_loss: 1.8312 - val_acc: 0.3850\n",
+ "Epoch 275/300\n",
+ "8000/8000 [==============================] - 48s - loss: 1.1910 - acc: 0.5597 - val_loss: 1.8523 - val_acc: 0.3810\n",
+ "Epoch 276/300\n",
+ "8000/8000 [==============================] - 52s - loss: 1.1876 - acc: 0.5591 - val_loss: 1.8808 - val_acc: 0.3980\n",
+ "Epoch 277/300\n",
+ "8000/8000 [==============================] - 52s - loss: 1.1990 - acc: 0.5495 - val_loss: 1.8938 - val_acc: 0.3650\n",
+ "Epoch 278/300\n",
+ "8000/8000 [==============================] - 51s - loss: 1.1914 - acc: 0.5634 - val_loss: 1.9394 - val_acc: 0.3840\n",
+ "Epoch 279/300\n",
+ "8000/8000 [==============================] - 48s - loss: 1.1901 - acc: 0.5589 - val_loss: 1.9116 - val_acc: 0.4010\n",
+ "Epoch 280/300\n",
+ "8000/8000 [==============================] - 56s - loss: 1.1958 - acc: 0.5651 - val_loss: 1.8547 - val_acc: 0.3920\n",
+ "Epoch 281/300\n",
+ "8000/8000 [==============================] - 52s - loss: 1.1776 - acc: 0.5636 - val_loss: 1.9008 - val_acc: 0.3950\n",
+ "Epoch 282/300\n",
+ "8000/8000 [==============================] - 59s - loss: 1.1805 - acc: 0.5627 - val_loss: 2.0467 - val_acc: 0.3920\n",
+ "Epoch 283/300\n",
+ "8000/8000 [==============================] - 61s - loss: 1.1777 - acc: 0.5629 - val_loss: 1.8486 - val_acc: 0.4030\n",
+ "Epoch 284/300\n",
+ "8000/8000 [==============================] - 60s - loss: 1.1762 - acc: 0.5661 - val_loss: 1.9355 - val_acc: 0.3890\n",
+ "Epoch 285/300\n",
+ "8000/8000 [==============================] - 57s - loss: 1.1931 - acc: 0.5631 - val_loss: 1.9267 - val_acc: 0.3890\n",
+ "Epoch 286/300\n",
+ "8000/8000 [==============================] - 56s - loss: 1.1783 - acc: 0.5679 - val_loss: 2.0521 - val_acc: 0.3980\n",
+ "Epoch 287/300\n",
+ "8000/8000 [==============================] - 55s - loss: 1.1653 - acc: 0.5728 - val_loss: 1.9667 - val_acc: 0.3980\n",
+ "Epoch 288/300\n",
+ "8000/8000 [==============================] - 50s - loss: 1.1588 - acc: 0.5736 - val_loss: 1.8695 - val_acc: 0.3830\n",
+ "Epoch 289/300\n",
+ "8000/8000 [==============================] - 60s - loss: 1.1642 - acc: 0.5691 - val_loss: 1.8775 - val_acc: 0.4070\n",
+ "Epoch 290/300\n",
+ "8000/8000 [==============================] - 57s - loss: 1.1659 - acc: 0.5666 - val_loss: 1.8620 - val_acc: 0.4020\n",
+ "Epoch 291/300\n",
+ "8000/8000 [==============================] - 57s - loss: 1.1721 - acc: 0.5661 - val_loss: 2.0072 - val_acc: 0.3950\n",
+ "Epoch 292/300\n",
+ "8000/8000 [==============================] - 56s - loss: 1.1731 - acc: 0.5670 - val_loss: 1.8663 - val_acc: 0.3990\n",
+ "Epoch 293/300\n",
+ "8000/8000 [==============================] - 59s - loss: 1.1578 - acc: 0.5705 - val_loss: 1.8047 - val_acc: 0.3820\n",
+ "Epoch 294/300\n",
+ "8000/8000 [==============================] - 65s - loss: 1.1630 - acc: 0.5667 - val_loss: 1.8694 - val_acc: 0.3930\n",
+ "Epoch 295/300\n",
+ "8000/8000 [==============================] - 55s - loss: 1.1650 - acc: 0.5680 - val_loss: 2.0402 - val_acc: 0.3910\n",
+ "Epoch 296/300\n",
+ "8000/8000 [==============================] - 46s - loss: 1.1567 - acc: 0.5767 - val_loss: 1.9987 - val_acc: 0.4020\n",
+ "Epoch 297/300\n",
+ "8000/8000 [==============================] - 56s - loss: 1.1833 - acc: 0.5616 - val_loss: 1.9215 - val_acc: 0.3960\n",
+ "Epoch 298/300\n",
+ "8000/8000 [==============================] - 52s - loss: 1.1536 - acc: 0.5725 - val_loss: 1.9892 - val_acc: 0.3960\n",
+ "Epoch 299/300\n",
+ "8000/8000 [==============================] - 47s - loss: 1.1581 - acc: 0.5760 - val_loss: 1.8705 - val_acc: 0.4040\n",
+ "Epoch 300/300\n",
+ "8000/8000 [==============================] - 56s - loss: 1.1555 - acc: 0.5740 - val_loss: 1.8611 - val_acc: 0.3990\n",
+ "('Test score:', 1.940684980392456)\n",
+ "('Test accuracy:', 0.39000000000000001)\n"
+ ]
+ }
+ ],
+ "source": [
+ "\n",
+ "\n",
+ "model = Sequential()\n",
+ "\n",
+ "model.add(Convolution2D(16, 5, 5,\n",
+ " border_mode='valid',\n",
+ " input_shape=(32,32,3) ))\n",
+ "model.add(Activation('relu'))\n",
+ "model.add(MaxPooling2D(pool_size=(2, 2)))\n",
+ "model.add(Convolution2D(16, 5, 5, border_mode='valid',\n",
+ " input_shape=(32,32,3)))\n",
+ "model.add(Activation('relu'))\n",
+ "model.add(MaxPooling2D(pool_size=(2, 2)))\n",
+ "model.add(Dropout(0.25))\n",
+ "\n",
+ "model.add(Flatten())\n",
+ "model.add(Dense(128))\n",
+ "model.add(Activation('relu'))\n",
+ "model.add(Dropout(0.5))\n",
+ "model.add(Dense(10))\n",
+ "model.add(Activation('softmax'))\n",
+ "\n",
+ "model.compile(loss='categorical_crossentropy',\n",
+ " optimizer='adadelta',\n",
+ " metrics=['accuracy'])\n",
+ "\n",
+ "model.fit(images[:8000], labels[:8000], batch_size=100, nb_epoch=300,\n",
+ " verbose=1, validation_data=(images[8000:], labels[8000:]))\n",
+ "score = model.evaluate(images[8000:], labels[8000:], verbose=0)\n",
+ "print('Test score:', score[0])\n",
+ "print('Test accuracy:', score[1])"
+ ]
+ }
+ ],
+ "metadata": {
+ "kernelspec": {
+ "display_name": "Python 2",
+ "language": "python",
+ "name": "python2"
+ },
+ "language_info": {
+ "codemirror_mode": {
+ "name": "ipython",
+ "version": 2
+ },
+ "file_extension": ".py",
+ "mimetype": "text/x-python",
+ "name": "python",
+ "nbconvert_exporter": "python",
+ "pygments_lexer": "ipython2",
+ "version": "2.7.11+"
+ }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 0
+}
diff --git a/Building-Machine-Learning-Projects-with-TensorFlow-master/6/Mnist_final.ipynb b/Building-Machine-Learning-Projects-with-TensorFlow-master/6/Mnist_final.ipynb
new file mode 100644
index 000000000..ec592b19f
--- /dev/null
+++ b/Building-Machine-Learning-Projects-with-TensorFlow-master/6/Mnist_final.ipynb
@@ -0,0 +1,384 @@
+{
+ "cells": [
+ {
+ "cell_type": "code",
+ "execution_count": 3,
+ "metadata": {
+ "collapsed": true
+ },
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "Extracting /tmp/data/train-images-idx3-ubyte.gz\n",
+ "Extracting /tmp/data/train-labels-idx1-ubyte.gz\n",
+ "Extracting /tmp/data/t10k-images-idx3-ubyte.gz\n",
+ "Extracting /tmp/data/t10k-labels-idx1-ubyte.gz\n"
+ ]
+ },
+ {
+ "ename": "NameError",
+ "evalue": "name 'random' is not defined",
+ "output_type": "error",
+ "traceback": [
+ "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m",
+ "\u001b[1;31mNameError\u001b[0m Traceback (most recent call last)",
+ "\u001b[1;32m\u001b[0m in \u001b[0;36m\u001b[1;34m()\u001b[0m\n\u001b[0;32m 8\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 9\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m---> 10\u001b[1;33m \u001b[0mplt\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mimshow\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mmnist\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mtrain\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mimages\u001b[0m\u001b[1;33m[\u001b[0m\u001b[0mrandom\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mrandom\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m*\u001b[0m\u001b[1;36m60000\u001b[0m\u001b[1;33m]\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mreshape\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;36m28\u001b[0m\u001b[1;33m,\u001b[0m \u001b[1;36m28\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0morder\u001b[0m\u001b[1;33m=\u001b[0m\u001b[1;34m'C'\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mcmap\u001b[0m\u001b[1;33m=\u001b[0m\u001b[1;34m'Greys'\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0minterpolation\u001b[0m\u001b[1;33m=\u001b[0m\u001b[1;34m'nearest'\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m",
+ "\u001b[1;31mNameError\u001b[0m: name 'random' is not defined"
+ ]
+ }
+ ],
+ "source": [
+ "import tensorflow as tf\n",
+ "%matplotlib inline\n",
+ "import matplotlib.pyplot as plt\n",
+ "\n",
+ "# Import MINST data\n",
+ "from tensorflow.examples.tutorials.mnist import input_data\n",
+ "mnist = input_data.read_data_sets(\"/tmp/data/\", one_hot=True)\n",
+ "#Show the first training image\n",
+ "plt.imshow(mnist.train.images[0].reshape((28, 28), order='C'), cmap='Greys', interpolation='nearest')"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 2,
+ "metadata": {
+ "collapsed": false
+ },
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ ""
+ ]
+ },
+ "execution_count": 2,
+ "metadata": {},
+ "output_type": "execute_result"
+ },
+ {
+ "data": {
+ "image/png": "iVBORw0KGgoAAAANSUhEUgAAAP4AAAD8CAYAAABXXhlaAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAADilJREFUeJzt3W+MVfWdx/HPVwwqf4SxkZlExKrLdhMTQthdFeHBbdit\nZtMEg0YtfSDthjTG3W1CTCqEZO6YjaGbSMLG9ImlBgiGdklYrA8qJeaodEPFFVwpoMTNWAQZlbCG\n0Qda+O6DuUzvjPf+7p05989hvu9XMvHM+Z57z9ejn3vOuefM+Zm7C0AsV3W7AQCdR/CBgAg+EBDB\nBwIi+EBABB8IKFfwzew+MzthZu+Z2U9a1RSA9rLJXsc3s6skvSdphaQzkg5JesTdT4xbjhsFgC5x\nd6s1P88e/05JJ939A3f/StIuSSvrrHz0p7+/f8zvRfuhv6nbX5F7a0d/KXmCf5OkU1W/f1iZB6Dg\n+HIPCOjqHK89LWlB1e/zK/O+plwuj07PnTs3xyrbr1QqdbuFJPqbvCL3JuXvL8syZVnW1LJ5vtyb\nJuldjXy595GkNyR9z92Pj1vOJ7sOAJNnZvI6X+5Neo/v7hfN7J8k7dPIKcPW8aEHUEyT3uM3vQL2\n+EBXpPb4fLkHBETwgYAIPhAQwQcCIvhAQAQfCIjgAwERfCAggg8ERPCBgAg+EBDBBwIi+EBABB8I\niOADARF8ICCCDwRE8IGACD4QEMEHAiL4QEAEHwiI4AMBEXwgIIIPBETwgYAIPhAQwQcCIvhAQAQf\nCIjgAwFdnefFZjYo6TNJlyR95e53tqIpAO2VK/gaCXzJ3c+3ohkAnZH3UN9a8B4AOixvaF3Sy2Z2\nyMzWtqIhAO2X91B/mbt/ZGY3SvqtmR139wPjFyqXy6PTpVJJpVIp52oBjJdlmbIsa2pZc/eWrNTM\n+iVdcPfN4+Z7q9YBoHlmJne3WrVJH+qb2Qwzm1WZninpO5KOTvb9AHROnkP9Xkl7zMwr77PT3fe1\npi0A7dSyQ/26K+BQH+iKthzqA7hyEXwgIIIPBETwgYAIPhAQwQcCIvhAQHnv1UeX7d+/P1k3q3kZ\nd1RPT0+yfvRo+mbMpUuXJusLFy5M1tEd7PGBgAg+EBDBBwIi+EBABB8IiOADARF8IKAr/jr+a6+9\nlqwfPHgwWX/mmWda2U7HnTt3Ltfrp02blqx/+eWXyfqMGTOS9VmzZiXry5cvT9Z37NiRa/2ojT0+\nEBDBBwIi+EBABB8IiOADARF8ICCCDwRU+Ofqb9q0KVnfuHFjsn7x4sVJrxvd98ADDyTr27ZtS9Zn\nzpzZynauKDxXH8AYBB8IiOADARF8ICCCDwRE8IGACD4QUMPr+Ga2VdJ3JQ25+6LKvB5Jv5R0i6RB\nSQ+5+2d1Xp/rOv6tt96arA8ODibrd999d7I+e/bsibbUUitWrEjWV61a1aFOJmffvn3J+pYtW5L1\nkydP5lp/o+v827dvT9an8t/z572O/7yke8fNe1LSfnf/lqRXJK3P1yKATmoYfHc/IOn8uNkrJV2+\nZWqbpPtb3BeANprsOf48dx+SJHc/K2le61oC0G6teuZe8iS+XC6PTpdKJZVKpRatFsBlWZYpy7Km\nlp1s8IfMrNfdh8ysT9LHqYWrgw+gPcbvVAcGBuou2+yhvlV+LntR0prK9KOS9k6kQQDd1TD4ZvaC\npP+S9Jdm9kcz+4GkTZL+3szelbSi8juAK0Th/x7/008/Tdbff//9ZH3x4sXJ+jXXXDPhntC88+fH\nXxAaq9F9DIcPH861/p07dybrq1evzvX+Rcbf4wMYg+ADARF8ICCCDwRE8IGACD4QEMEHAir8dXxM\nbQcPHkzWly5dmuv9e3t7k/WzZ8/mev8i4zo+gDEIPhAQwQcCIvhAQAQfCIjgAwERfCAggg8ERPCB\ngAg+EBDBBwIi+EBABB8IiOADARF8IKBWjZ0H1LR3b3qQpQMHDrR1/Z9//nmyfurUqWT95ptvbmU7\nhcEeHwiI4AMBEXwgIIIPBETwgYAIPhAQwQcCavhcfTPbKum7kobcfVFlXr+ktZI+riy2wd1/U+f1\nPFe/jYaHh5P1PXv2JOsbN25sZTtf0+g6ebf/35g7d26yfv78+Q510np5n6v/vKR7a8zf7O5LKj81\nQw+gmBoG390PSKr1sVfzkwRA8eU5x3/czI6Y2c/NbE7LOgLQdpO9V/9nkp5ydzezf5W0WdI/1lu4\nXC6PTpdKJZVKpUmuFkA9WZYpy7Kmlm1q0Ewzu0XSry9/uddsrVLny7024su9fPhyr8F7qOqc3sz6\nqmqrJB2dfHsAOq3hob6ZvSCpJOkbZvZHSf2Svm1miyVdkjQo6Udt7BFAizUMvruvrjH7+Tb0EtKx\nY8eS9UOHDiXrmzZtStZPnDgx4Z4ieeKJJ7rdQldw5x4QEMEHAiL4QEAEHwiI4AMBEXwgIIIPBMRz\n9XM6d+5csv7YY48l67t3707W231L6+23356s9/X1JeuNPPvss8n69OnTk/XVq2vdRvJnb7/99oR7\nqrZgwYJcr79SsccHAiL4QEAEHwiI4AMBEXwgIIIPBETwgYC4jt/Arl27kvWnnnoqWT9+/HiyPnv2\n7GT9hhtuSNaffvrpZL3R+O6LFtV8YtqoOXO6+xzVG2+8MdfrG/V/7721nhw/9bHHBwIi+EBABB8I\niOADARF8ICCCDwRE8IGAuI7fwKuvvpqsN7pOv2bNmmR9w4YNyfrChQuT9Svd6dOnk/VG4w40cu21\n1ybr8+bNy/X+Vyr2+EBABB8IiOADARF8ICCCDwRE8IGACD4QUMPr+GY2X9J2Sb2SLkl6zt3/3cx6\nJP1S0i2SBiU95O6ftbHXrti8eXOyvmTJkmR97dq1rWxnyjl16lSyfubMmVzv/+CDD+Z6/VTVzB7/\nT5LWufsdkpZKetzM/krSk5L2u/u3JL0iaX372gTQSg2D7+5n3f1IZXpY0nFJ8yWtlLStstg2Sfe3\nq0kArTWhc3wz+6akxZIOSup19yFp5MNBUsx7H4ErUNP36pvZLEm7Jf3Y3YfNbPygbnUHeSuXy6PT\npVJJpVJpYl0CaCjLMmVZ1tSyTQXfzK7WSOh3uPveyuwhM+t19yEz65P0cb3XVwcfQHuM36kODAzU\nXbbZQ/1fSDrm7luq5r0oaU1l+lFJe8e/CEAxNXM5b5mk70t6x8wOa+SQfoOkn0r6lZn9UNIHkh5q\nZ6MAWqdh8N39d5Km1Sn/XWvbKZ7rrrsuWec6fT6NnnfQSKNxB9atW5fr/acq7twDAiL4QEAEHwiI\n4AMBEXwgIIIPBETwgYB4rj7a6q677krW33rrrVzv//DDDyfrt912W673n6rY4wMBEXwgIIIPBETw\ngYAIPhAQwQcCIvhAQOZe91F5rVmBmbd7HSiu66+/Plm/cOFCst7T05Osv/nmm8l65Ov4ZiZ3t1o1\n9vhAQAQfCIjgAwERfCAggg8ERPCBgAg+EBB/j49cXn/99WT9iy++SNbnzJmTrL/00kvJeuTr9Hmw\nxwcCIvhAQAQfCIjgAwERfCAggg8E1DD4ZjbfzF4xsz+Y2Ttm9s+V+f1m9qGZvVX5ua/97QJohWau\n4/9J0jp3P2JmsyT9t5n9tlLb7O6b29ceuu3ixYvJ+vr165P16dOnJ+tr165N1u+5555kHZPTMPju\nflbS2cr0sJkdl3RTpVzzj/wBFNuEzvHN7JuSFkv6fWXW42Z2xMx+bmbpW7AAFEbTwa8c5u+W9GN3\nH5b0M0m3u/tijRwRcMgPXCGaulffzK7WSOh3uPteSXL3T6oWeU7Sr+u9vlwuj06XSiWVSqVJtAog\nJcsyZVnW1LLN/pHOLyQdc/ctl2eYWV/l/F+SVkk6Wu/F1cEH0B7jd6oDAwN1l20YfDNbJun7kt4x\ns8OSXNIGSavNbLGkS5IGJf0oT9MAOqeZb/V/J2lajdJvWt8OgE7gufpIunTpUrK+c+fOZH3JkiXJ\n+h133DHhntAcnqsPYAyCDwRE8IGACD4QEMEHAiL4QEAEHwiI6/jAFMV1fABjEHwgIIIPBNTx4Df7\n98LdQn/5FLm/IvcmdbY/gj8O/eVT5P6K3Js0xYMPoPsIPhBQR67jt3UFAOqqdx2/7cEHUDwc6gMB\nEXwgoI4F38zuM7MTZvaemf2kU+ttlpkNmtnbZnbYzN4oQD9bzWzIzP6nal6Pme0zs3fN7OVujl5U\np7/CDKRaY7DXf6nML8Q27PZgtB05xzezqyS9J2mFpDOSDkl6xN1PtH3lTTKz/5X01+5+vtu9SJKZ\nLZc0LGm7uy+qzPuppHPu/m+VD88ed3+yQP31S7pQhIFUzaxPUl/1YK+SVkr6gQqwDRP9PawObMNO\n7fHvlHTS3T9w968k7dLIv2SRmAp06uPuBySN/xBaKWlbZXqbpPs72lSVOv1JBRlI1d3PuvuRyvSw\npOOS5qsg27BOfx0bjLZT/6PfJOlU1e8f6s//kkXhkl42s0Nmlh67uXvmufuQNDqK8bwu91NL4QZS\nrRrs9aCk3qJtw24MRluYPVwBLHP3v5H0DxrZ8Mu73VATinYttnADqdYY7HX8NuvqNuzWYLSdCv5p\nSQuqfp9fmVcY7v5R5Z+fSNqjkdOTohkys15p9Bzx4y73M4a7f1L11JXnJP1tN/upNdirCrQN6w1G\n24lt2KngH5L0F2Z2i5lNl/SIpBc7tO6GzGxG5ZNXZjZT0neUGAS0g0xjz/delLSmMv2opL3jX9Bh\nY/qrBOmy5ECqHfK1wV5VrG1YczDaqnrbtmHH7tyrXJbYopEPm63uvqkjK26Cmd2qkb28a2Q8wZ3d\n7s/MXpBUkvQNSUOS+iX9p6T/kHSzpA8kPeTu/1eg/r6tkXPV0YFUL59Pd6G/ZZJek/SORv67Xh7s\n9Q1Jv1KXt2Giv9XqwDbkll0gIL7cAwIi+EBABB8IiOADARF8ICCCDwRE8IGACD4Q0P8DZarNQLmR\n4gYAAAAASUVORK5CYII=\n",
+ "text/plain": [
+ ""
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ }
+ ],
+ "source": [
+ "# Parameters\n",
+ "batch_size = 128\n",
+ "learning_rate = 0.05\n",
+ "number_iterations = 2000\n",
+ "steps = 10\n",
+ "\n",
+ "# Network Parameters\n",
+ "n_input = 784 # 28x28 images\n",
+ "n_classes = 10 # 10 digit classes\n",
+ "dropout = 0.80 # Dropout probability\n",
+ "\n",
+ "# tf Graph input\n",
+ "X = tf.placeholder(tf.float32, [None, n_input])\n",
+ "Y = tf.placeholder(tf.float32, [None, n_classes])\n",
+ "keep_prob = tf.placeholder(tf.float32) #dropout (keep probability)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 5,
+ "metadata": {
+ "collapsed": false
+ },
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "Iter 1280, Minibatch Loss= 37.628696, Training Accuracy= 0.17188\n",
+ "Testing Accuracy: 0.136719\n"
+ ]
+ },
+ {
+ "data": {
+ "image/png": "iVBORw0KGgoAAAANSUhEUgAAAP4AAAD8CAYAAABXXhlaAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAADeFJREFUeJzt3V2MXPV5x/Hf4xhLJCtiK6p3AdcYWhqqIGNaglThixOc\nBqgiGWLJdckFTouVC9pGzY0xNzvmRYorgbS9MBfEiZYSyzFBKbhSC1gwArdysVI7xY3BkWoTk9jL\ni3jJSoiX8vRij7fj9cz/zO6ZmXN2n+9HWnHmPDNzHo/4zXmfv7m7AMSyqOoGAAwewQcCIvhAQAQf\nCIjgAwERfCCgUsE3s5vN7BUzO25mW3vVFID+srmexzezRZKOS1on6TeSDkna5O6vzHgeFwoAFXF3\naze/zBr/ekm/dPfX3P1jSXskre+w8Om/0dHRcx7X7Y/+Fm5/de6tH/2llAn+pZJOtTx+PZ8HoOY4\nuAcEtLjEa38taWXL4xX5vPM0Go3p6aVLl5ZYZP9lWVZ1C0n0N3d17k0q31+z2VSz2ezquWUO7n1G\n0quaOrh3WtJLkv7C3Y/NeJ7PdRkA5s7M5B0O7s15je/u/2tmfy3pGU3tMuyaGXoA9TTnNX7XC2CN\nD1Qitcbn4B4QEMEHAiL4QEAEHwiI4AMBEXwgIIIPBETwgYAIPhAQwQcCIvhAQAQfCIjgAwERfCAg\ngg8ERPCBgAg+EBDBBwIi+EBABB8IiOADARF8ICCCDwRE8IGACD4QEMEHAiL4QEAEHwiI4AMBEXwg\nIIIPBLS4zIvN7KSk9yR9Kuljd7++F01F4u6l6osWLezv7gMHDiTrzz77bLJ+4sSJZH3nzp3J+tDQ\nULI+X5UKvqYCn7n7O71oBsBglF1dWA/eA8CAlQ2tS3razA6Z2ZZeNASg/8pu6t/g7qfN7HckPWtm\nx9z9vJ2yRqMxPZ1lmbIsK7lYADM1m001m82unmtFB4+6ZWajkn7r7g/NmO+9WsZCxMG9NA7uzZ2Z\nyd2tXW3O/9eY2WfNbCif/pykr0k6Otf3AzA4ZTb1hyX91Mw8f58fufszvWkLQD/1bFO/4wKCb+p/\n+OGHyfr999+frK9duzZZv+mmm2bdU52cOXMmWd+2bVuyPj4+Xmr577//frLOpj6ABYPgAwERfCAg\ngg8ERPCBgAg+EBDBBwIqe60+ChRdcvrAAw8k61dffXWyPt/P458+fTpZf+yxx0q9f9Hns2TJklLv\nP1+xxgcCIvhAQAQfCIjgAwERfCAggg8ERPCBgDiP32cvvPBC1S3U2vLly5P1pUuXJutvv/12sr5m\nzZpknfP4AMIg+EBABB8IiOADARF8ICCCDwRE8IGAOI9f0kcffZSs79u3b0CdzE9jY2PJetF5+iIb\nNmwo9fqFijU+EBDBBwIi+EBABB8IiOADARF8ICCCDwRUeB7fzHZJ+rqkCXdfnc9bJunHki6TdFLS\nRnd/r4991tbzzz+frB85cqTU+7/xxhvJetH48iMjI6WWX6ToOgaztsOzTzt48GAv2znPlVde2df3\nn6+6WeP/UNLMUQnulrTf3b8o6TlJ23rdGID+KQy+ux+Q9M6M2esljefT45Ju7XFfAPporvv4y919\nQpLc/Yyk9O8nAaiVXl2r76lio9GYns6yTFmW9WixAM5qNptqNptdPXeuwZ8ws2F3nzCzEUnJI1Ct\nwQfQHzNXqtu3b+/43G439S3/O+spSZvz6TskPTmbBgFUqzD4ZrZb0r9L+gMz+5WZfUvS9yT9qZm9\nKmld/hjAPFG4qe/ut3cofbXHvcxLRb/7XtZ1112XrPf7PH2RovPw+/fvT9YnJydLLX/Lli3J+oUX\nXljq/RcqrtwDAiL4QEAEHwiI4AMBEXwgIIIPBETwgYD4Xf2ShoeHS9UnJiaS9Y0bN866p14qut+/\nqL+i3xMo684770zWP/jgg2S96PcEigwNDZV6fVVY4wMBEXwgIIIPBETwgYAIPhAQwQcCIvhAQOae\n/Lm88gsw834vo85GR0eT9fvuuy9Zv/zyy5P1F198MVm/5JJLkvUix48fT9avuuqqUu/fb6tWrUrW\nV69enaw//PDDyfrFF18825YGxszk7m0HNmCNDwRE8IGACD4QEMEHAiL4QEAEHwiI4AMBcT9+nxWd\nhy9y4sSJZL3oPPott9xSavn9vp++rHXr1iXrRffLj42NJet1Pk9fBmt8ICCCDwRE8IGACD4QEMEH\nAiL4QEAEHwio8H58M9sl6euSJtx9dT5vVNIWSWdP8t7j7v/a4fWh78cv+l33TZs2Jev79u3rZTvz\nztatW5P1e++9N1m/4IILetnOvFL2fvwfSrqpzfyH3P2P8r+2oQdQT4XBd/cDkt5pU2r7TQKg/srs\n499lZkfM7Ptm9vmedQSg7+Z6rf5OSfe6u5vZ/ZIekvRXnZ7caDSmp7MsU5Zlc1wsgE6azaaazWZX\nz51T8N39zZaHj0hKHoFqDT6A/pi5Ut2+fXvH53a7qW9q2ac3s5GW2jckHZ1VhwAqVbjGN7PdkjJJ\nXzCzX0kalfQVM1sj6VNJJyV9u489Augxfle/Yp988kmyvnnz5mR99+7dPexm8G677bZkfc+ePcl6\n5PP0RfhdfQDnIPhAQAQfCIjgAwERfCAggg8ERPCBgDiPX3Pvvvtusn7q1Klkveh+9SeeeGLWPfXS\n4cOHk/VrrrlmQJ0sPJzHB3AOgg8ERPCBgAg+EBDBBwIi+EBABB8IiPP4C1zRb7DdeOONfV3++vXr\nk/W9e/cm69xvP3ecxwdwDoIPBETwgYAIPhAQwQcCIvhAQAQfCIjz+PPcW2+9laxfccUVyfrk5GQv\n2zkP99tXh/P4AM5B8IGACD4QEMEHAiL4QEAEHwiI4AMBLS56gpmtkPSopGFJn0p6xN3/wcyWSfqx\npMsknZS00d3f62OvaOPo0aPJer/P069cubJUHdXoZo3/iaTvuvuXJP2JpLvM7CpJd0va7+5flPSc\npG39axNALxUG393PuPuRfHpS0jFJKyStlzSeP21c0q39ahJAb81qH9/MVklaI+mgpGF3n5Cmvhwk\nLe91cwD6o3Af/ywzG5L0E0nfcfdJM5t5AX7HC/Ibjcb0dJZlyrJsdl0CKNRsNgt/Y/Gsrm7SMbPF\nkv5Z0r+4+1g+75ikzN0nzGxE0vPu/odtXstNOn1U9Y9pFh28K7pJZ9myZb1sBy16cZPODyT94mzo\nc09J2pxP3yHpyTl3CGCgujmdd4Okb0p62cwOa2qT/h5JOyTtNbO/lPSapI39bBRA7xQG393/TdJn\nOpS/2tt2MFuPP/54sl60m2XWdktwWtGm/IMPPpissylfT1y5BwRE8IGACD4QEMEHAiL4QEAEHwiI\n4AMBdX2tPuanovP0Ra699tpkfcOGDaXeH9VgjQ8ERPCBgAg+EBDBBwIi+EBABB8IiOADAXEeH0mj\no6NVt4A+YI0PBETwgYAIPhAQwQcCIvhAQAQfCIjgAwFxHn+e27FjR7J+0UUXlXr9kiVLZt0T6o81\nPhAQwQcCIvhAQAQfCIjgAwERfCCgwuCb2Qoze87M/tvMXjazv8nnj5rZ62b2n/nfzf1vF0AvWBfj\np49IGnH3I2Y2JOlnktZL+nNJv3X3hwpe70XLANB7ZiZ3bzuwQuEFPO5+RtKZfHrSzI5JuvTse/es\nSwADM6t9fDNbJWmNpP/IZ91lZkfM7Ptm9vke9wagT7oOfr6Z/xNJ33H3SUk7Jf2eu6/R1BZBcpMf\nQH10da2+mS3WVOj/0d2flCR3f7PlKY9I2tfp9Y1GY3o6yzJlWTaHVgGkNJtNNZvNrp5beHBPkszs\nUUlvuft3W+aN5Pv/MrO/k/Rld7+9zWs5uAdUIHVwr5uj+jdIekHSy5I8/7tH0u2a2t//VNJJSd92\n94k2ryf4QAVKBb8HCyf4QAVSwefKPSAggg8ERPCBgAg+EBDBBwIi+EBABB8IiOADARF8ICCCDwRE\n8IGACD4Q0MCD3+39wlWhv3Lq3F+de5MG2x/Bn4H+yqlzf3XuTVrgwQdQPYIPBDSQH+Lo6wIAdFTZ\nL/AAqB829YGACD4Q0MCCb2Y3m9krZnbczLYOarndMrOTZvZzMztsZi/VoJ9dZjZhZv/VMm+ZmT1j\nZq+a2dNVjl7Uob/aDKTaZrDXv83n1+IzrHow2oHs45vZIknHJa2T9BtJhyRtcvdX+r7wLpnZ/0j6\nY3d/p+peJMnM1kqalPSou6/O5+2Q9La7/33+5bnM3e+uUX+j6mIg1UFIDPb6LdXgMyw7GG1Zg1rj\nXy/pl+7+mrt/LGmPpv6RdWKq0a6Pux+QNPNLaL2k8Xx6XNKtA22qRYf+pJoMpOruZ9z9SD49KemY\npBWqyWfYob+BDUY7qP/RL5V0quXx6/r/f2RduKSnzeyQmW2pupkOlp8dtCQfxWh5xf20U7uBVFsG\nez0oabhun2EVg9HWZg1XAze4+3WS/kxTH/zaqhvqQt3OxdZuINU2g73O/Mwq/QyrGox2UMH/taSV\nLY9X5PNqw91P5/99U9JPNbV7UjcTZjYsTe8jvlFxP+dw9zdbhk16RNKXq+yn3WCvqtFn2Gkw2kF8\nhoMK/iFJv29ml5nZEkmbJD01oGUXMrPP5t+8MrPPSfqapKPVdiVpal+vdX/vKUmb8+k7JD058wUD\ndk5/eZDO+oaq/wx/IOkX7j7WMq9On+F5/Q3qMxzYlXv5aYkxTX3Z7HL37w1kwV0ws8s1tZZ3TQ0d\n/qOq+zOz3ZIySV+QNCFpVNI/SXpc0u9Kek3SRnd/t0b9fUVdDKQ6oP46Dfb6kqS9qvgzLDsYbenl\nc8kuEA8H94CACD4QEMEHAiL4QEAEHwiI4AMBEXwgIIIPBPR/vSbmShV5DoYAAAAASUVORK5CYII=\n",
+ "text/plain": [
+ ""
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ },
+ {
+ "data": {
+ "image/png": "iVBORw0KGgoAAAANSUhEUgAAAP4AAAD8CAYAAABXXhlaAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAADlVJREFUeJzt3X+MVfWZx/HPw5AlaSWCKEwiU2vWWDeNBncXo6HGW+kW\nsyHB1Dhr6R/SVUNUdoFGg9U/ZjD7R4GosDFNlFKCBtNfsatutLANe7PaTRf8wYqC2kSx2MrI7rAK\nJqDuPPvHXGYv49zvuTPnnnsP87xfyYRzz3PuPQ8HPnPOuefc+zV3F4BYpnS6AQDtR/CBgAg+EBDB\nBwIi+EBABB8IKFfwzew6M3vDzN4yszWtagpAsWyi1/HNbIqktyQtlPRHSXsk3eTub4xajhsFgA5x\ndxtrfp49/hWSfufu77r7p5J+ImlJg5WP/PT19Z32uGw/9Dd5+ytzb0X0l5In+OdLOlT3+L3aPAAl\nx5t7QEBTczz3D5K+VPd4bm3e5/T3949Mz5gxI8cqi1epVDrdQhL9TVyZe5Py91etVlWtVptaNs+b\ne12S3tTwm3vvS9ot6dvufmDUcj7RdQCYODOTN3hzb8J7fHf/XzNbIWmnhk8ZtowOPYBymvAev+kV\nsMcHOiK1x+fNPSAggg8ERPCBgAg+EBDBBwIi+EBABB8IiOADARF8ICCCDwRE8IGACD4QEMEHAiL4\nQEAEHwiI4AMBEXwgIIIPBETwgYAIPhAQwQcCIvhAQAQfCIjgAwERfCAggg8ERPCBgAg+EBDBBwIi\n+EBABB8IaGqeJ5vZQUkfShqS9Km7X9GKpgAUK1fwNRz4irsfbUUzANoj76G+teA1ALRZ3tC6pB1m\ntsfMbmtFQwCKl/dQf4G7v29m50n6FzM74O4vjF6ov79/ZLpSqahSqeRcLYDRqtWqqtVqU8uau7dk\npWbWJ+mYuz84ar63ah0AmmdmcncbqzbhQ30z+4KZnVWb/qKkb0p6baKvB6B98hzqz5H0SzPz2uts\nd/edrWkLQJFadqjfcAXBD/X37duXrB89WuyV0JdffjlZP3LkSLK+aNGiXOvv6elJ1i+88MJcr4/G\nCjnUB3DmIvhAQAQfCIjgAwERfCAggg8ERPCBgLiOn+Hjjz9O1m+//fZk/bnnnkvWBwcHx93TmWTl\nypXJ+oYNG5L1rq6uVrYTCtfxAZyG4AMBEXwgIIIPBETwgYAIPhAQwQcCCn8dP+vz6IsXL07WX3zx\nxVa2E84DDzyQrK9atapNnUw+XMcHcBqCDwRE8IGACD4QEMEHAiL4QEAEHwgo/HX8Sy+9NFnfv39/\nrtefOjU9Zsn999+frPf29uZaf14vvfRSsn7rrbcm68eOHUvWp02blqznHWfxrrvuStavvfbaXK9f\nZlzHB3Aagg8ERPCBgAg+EBDBBwIi+EBABB8IKH2RWZKZbZG0WNKAu19WmzdT0k8lXSDpoKRed/+w\nwD4L8/rrryfrZmNeBm1a1nX6NWvW5Hr9omWNX3/uuecm6wsXLkzWT548mazv2LEjWc9yww035Hr+\nZNXMHn+rpEWj5t0j6dfu/hVJuyR9v9WNAShOZvDd/QVJR0fNXiJpW216m6TrW9wXgAJN9Bx/trsP\nSJK7H5Y0u3UtASha5jl+k5I34/f3949MVyqV3PdfA/i8arWqarXa1LITDf6Amc1x9wEz65b0QWrh\n+uADKMboneratWsbLtvsob7Vfk55WtKy2vTNkp4aT4MAOisz+Gb2hKR/l3Sxmf3ezL4r6QeS/srM\n3pS0sPYYwBki81Df3Zc2KH2jxb2UUtb47OvWrUvWV6xY0cp2Sueaa65J1h955JFkffny5a1sB03i\nzj0gIIIPBETwgYAIPhAQwQcCIvhAQAQfCKhV9+pPWjNnzkzWV69e3aZOyinr+wp6enra1AnGgz0+\nEBDBBwIi+EBABB8IiOADARF8ICCCDwTEdfwMg4ODyfrGjRuT9VWrVrWyndIZGhpK1l999dU2dYLx\nYI8PBETwgYAIPhAQwQcCIvhAQAQfCIjgAwGFv47vnhz2L/M69UcffdTKdkrn0KFDyfqmTZuS9Yce\neqiV7aBF2OMDARF8ICCCDwRE8IGACD4QEMEHAiL4QECWdR3bzLZIWixpwN0vq83rk3SbpA9qi93r\n7r9q8HzPWkcnTZmS/t2X9b3xXV1dyfrmzZuT9UsuuSRZnzdvXrI+bdq0ZH3v3r3J+oYNG5L1J598\nMln/5JNPkvWiXX755cn67t27k/Wsf/8zmZnJ3cf8D9zM33qrpEVjzH/Q3f+89jNm6AGUU2bw3f0F\nSUfHKKV3hQBKK89xzp1mttfMfmRmZ7esIwCFm+i9+j+UdL+7u5n9g6QHJd3SaOH+/v6R6Uqlokql\nMsHVAmikWq2qWq02teyEgu/uR+oebpb0TGr5+uADKMboneratWsbLtvsob6p7pzezLrrat+S9Nq4\nOgTQUZl7fDN7QlJF0iwz+72kPklfN7N5koYkHZS0vMAeAbRY5nX83Cso+XX89evXJ+v33Xdfsp71\nef28ent7k/Xp06cn69u3b0/WT5w4Me6e6mXdR5B1nfzkyZPJetb2Pfvs9PvKzz77bLJ+5ZVXJutn\nsrzX8QFMMgQfCIjgAwERfCAggg8ERPCBgAg+EFD46/hZHn/88WR92bJl7WmkQy6++OJkfefOncl6\nT09Psn7jjTcm61nfB5Dl0UcfTdZvuaXhR0zOeFzHB3Aagg8ERPCBgAg+EBDBBwIi+EBABB8IaKLf\nuRfG0qVLk/X58+cXuv477rgjWX/77beT9auuuipZ7+vrS9ZnzJiRrHd3dyfrKCf2+EBABB8IiOAD\nARF8ICCCDwRE8IGACD4QENfxM3R1dSXrWePb57Vr165CXx8xsccHAiL4QEAEHwiI4AMBEXwgIIIP\nBETwgYAyr+Ob2VxJj0maI2lI0mZ3/0czmynpp5IukHRQUq+7f1hgr5iE1q9fn6zn/V59jK2ZPf5n\nkr7n7l+VdJWkO83sEkn3SPq1u39F0i5J3y+uTQCtlBl8dz/s7ntr08clHZA0V9ISSdtqi22TdH1R\nTQJorXGd45vZlyXNk/RbSXPcfUAa/uUgaXarmwNQjKbv1TezsyT9QtJKdz9uZqMHxGs4QF5/f//I\ndKVSUaVSGV+XADJVq1VVq9Wmlm1q0EwzmyrpnyU95+6bavMOSKq4+4CZdUv6V3f/szGee0YPmoli\nvfPOO8n6RRddlOv1GTQz36CZP5a0/1Toa56WtKw2fbOkpybcIYC2auZy3gJJ35G0z8xe0fAh/b2S\n1kn6mZn9raR3JfUW2SiA1skMvrv/RlKjD6V/o7XtIJrzzjuv0y2ExJ17QEAEHwiI4AMBEXwgIIIP\nBETwgYAIPhBQU7fs5loBt+wiYWhoKFl/+OGHk/XVq1cn6+ecc06y/vzzzyfrRY+bUKRW3LILYBIh\n+EBABB8IiOADARF8ICCCDwRE8IGAmv7OPaAIU6ak9z2zZs3K9fqDg4PJ+t13352sP/PMM7nWX1bs\n8YGACD4QEMEHAiL4QEAEHwiI4AMBEXwgID6Pj1L77LPPkvU1a9Yk6xs3bkzWp05N38oyffr0ZD1r\nHMitW7fmev08+Dw+gNMQfCAggg8ERPCBgAg+EBDBBwLKDL6ZzTWzXWb2upntM7O/q83vM7P3zOzl\n2s91xbcLoBUyr+ObWbekbnffa2ZnSXpJ0hJJfyPpmLs/mPF8ruOjMCdOnEjWV6xYkaxnXWfPq1qt\nJutXX311YetOXcfP/CIOdz8s6XBt+riZHZB0/qnXblmXANpmXOf4ZvZlSfMk/Udt1p1mttfMfmRm\nZ7e4NwAFaTr4tcP8X0ha6e7HJf1Q0p+6+zwNHxEkD/kBlEdT37lnZlM1HPrH3f0pSXL3I3WLbJbU\n8MvJ+vv7R6YrlUrm/c0Axq9arWa+p3BKUx/SMbPHJP2Xu3+vbl537fxfZrZa0nx3XzrGc3lzD4Xh\nzb3Gcr25Z2YLJH1H0j4ze0WSS7pX0lIzmydpSNJBSctb1jGAQjXzrv5vJHWNUfpV69sB0A58Hh+Y\npPg8PoDTEHwgIIIPBETwgYAIPhAQwQcCIvhAQAQfCIjgAwERfCAggg8E1PbgN/t54U6hv3zK3F+Z\ne5Pa2x/BH4X+8ilzf2XuTZrkwQfQeQQfCKgtn8cvdAUAGmr0efzCgw+gfDjUBwIi+EBAbQu+mV1n\nZm+Y2VtmtqZd622WmR00s/80s1fMbHcJ+tliZgNm9mrdvJlmttPM3jSzHZ0cvahBf6UZSHWMwV7/\nvja/FNuw04PRtuUc38ymSHpL0kJJf5S0R9JN7v5G4Stvkpm9Lekv3P1op3uRJDP7mqTjkh5z98tq\n89ZJ+m93X1/75TnT3e8pUX99amIg1XZIDPb6XZVgG+YdjDavdu3xr5D0O3d/190/lfQTDf8ly8RU\nolMfd39B0uhfQkskbatNb5N0fVubqtOgP6kkA6m6+2F331ubPi7pgKS5Ksk2bNBf2wajbdd/9PMl\nHap7/J7+/y9ZFi5ph5ntMbPbOt1MA7PdfUAaGcV4dof7GUvpBlKtG+z1t5LmlG0bdmIw2tLs4Upg\ngbv/paS/1vCG/1qnG2pC2a7Flm4g1TEGex29zTq6DTs1GG27gv8HSV+qezy3Nq803P392p9HJP1S\nw6cnZTNgZnOkkXPEDzrcz2nc/Ujd6CmbJc3vZD9jDfaqEm3DRoPRtmMbtiv4eyRdZGYXmNmfSLpJ\n0tNtWncmM/tC7TevzOyLkr4p6bXOdiVp+Fyv/nzvaUnLatM3S3pq9BPa7LT+akE65Vvq/Db8saT9\n7r6pbl6ZtuHn+mvXNmzbnXu1yxKbNPzLZou7/6AtK26CmV2o4b28a3g8we2d7s/MnpBUkTRL0oCk\nPkn/JOnnknokvSup193/p0T9fV3D56ojA6meOp/uQH8LJP2bpH0a/nc9Ndjrbkk/U4e3YaK/pWrD\nNuSWXSAg3twDAiL4QEAEHwiI4AMBEXwgIIIPBETwgYAIPhDQ/wEuJO4w9LufhwAAAABJRU5ErkJg\ngg==\n",
+ "text/plain": [
+ ""
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ },
+ {
+ "data": {
+ "image/png": "iVBORw0KGgoAAAANSUhEUgAAAP4AAAD8CAYAAABXXhlaAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAADilJREFUeJzt3X+I3PWdx/HXO42ntGojodmI25pDvR6KS1ATOFJwymqj\nEogW8XJR0J4/+oc19QqxNkR29zhJUzCgQvFH0xpjQ1sLvViEMxYZipTU0Gta7cYkehdTm2STO6Ik\niMS67/6xk3R2OvP5zu58vzPf7Pv5gMXZ73tmvu/9xtd8v9/5fGc+5u4CEMusXjcAoPsIPhAQwQcC\nIvhAQAQfCIjgAwF1FHwzu87M3jSzPWb2zbyaAlAsm+44vpnNkrRH0qCkA5J2SFrh7m823I8LBYAe\ncXdrtryTPf5iSXvd/R13/0jSjyQtb7HyUz9DQ0OTfi/bD/3N3P7K3FsR/aV0EvwLJP2x7vd3a8sA\nlBxv7gEBze7gsX+S9Lm63/try/7G8PDwqdtz5szpYJXFq1QqvW4hif6mr8y9SZ33V61WVa1W27pv\nJ2/ufULSbk28uXdQ0muS/sXddzXcz6e7DgDTZ2byFm/uTXuP7+4fm9nXJG3TxCnDxsbQAyinae/x\n214Be3ygJ1J7fN7cAwIi+EBABB8IiOADARF8ICCCDwRE8IGACD4QEMEHAiL4QEAEHwiI4AMBEXwg\nIIIPBETwgYAIPhAQwQcCIvhAQAQfCIjgAwERfCAggg8ERPCBgDqZQgtdsGHDhmR99erVHT3/+Ph4\nsj5rVrH7hvrp1Zp56KGHCl1/VOzxgYAIPhAQwQcCIvhAQAQfCIjgAwERfCCgjsbxzWyfpPcljUv6\nyN0X59EU/uqZZ55J1s2aTn/etqxx+k6fP0vRz4/mOr2AZ1xSxd2P5tEMgO7o9FDfcngOAF3WaWhd\n0ktmtsPM7s6jIQDF6/RQf4m7HzSzz0h62cx2ufurjXeqvx67UqmoUql0uFoAjarVqqrValv3NXfP\nZaVmNiTpmLtvaFjuea0jooGBgWR9dHS0o+fP+rcp+s23rA/prF27ttD1z2RmJndv+g847UN9M/uk\nmZ1du/0pSV+S9MZ0nw9A93RyqN8n6Wdm5rXn+aG7b8unLQBFmnbw3f1/JS3MsZcZKetQetu29Gvl\n/v3782wnd2eeeWayvm7dumR9xYoVebaDNjEUBwRE8IGACD4QEMEHAiL4QEAEHwiI4AMB5XbJbssV\nBL9kN+tvP+OMM7rUSXNZ/T3++OPJ+mWXXZasX3311VPuCfko5JJdAKcvgg8ERPCBgAg+EBDBBwIi\n+EBABB8IqNPv3Avvww8/TNZvvfXWLnXS3E033ZSsP//8813qBGXCHh8IiOADARF8ICCCDwRE8IGA\nCD4QEMEHAmIcv0Nr1qxJ1rdu3Vro+rOuE3jyyScLXT9OT+zxgYAIPhAQwQcCIvhAQAQfCIjgAwER\nfCCgzHF8M9soaZmkMXcfqC07T9KPJV0oaZ+kW9z9/QL7LK0jR44U+vxXXXVVsv7YY48l62eddVae\n7WCGaGeP/wNJSxuWPSjpF+7+eUmvSPpW3o0BKE5m8N39VUlHGxYvl7SpdnuTpBtz7gtAgaZ7jj/P\n3cckyd0PSZqXX0sAipbXtfrJCdiGh4dP3a5UKqpUKjmtFsBJ1WpV1Wq1rftON/hjZtbn7mNmNl/S\n4dSd64MPoBiNO9WRkZGW9233UN9qPye9IOmO2u3bJRX7ETQAucoMvpltkfQrSf9gZvvN7CuSvi3p\nWjPbLWmw9juA04QVPXe9mXnR6+ill19+OVm//vrrC13/6tWrk/V169YVun6Ul5nJ3a1ZjSv3gIAI\nPhAQwQcCIvhAQAQfCIjgAwERfCAgxvELtn379mR9yZIlha7/nnvuSdYHBweT9WXLliXrfN6/vBjH\nBzAJwQcCIvhAQAQfCIjgAwERfCAggg8ExDh+wbL+9qNHG7/AeLL7778/Wd+yZcuUe6qX1d/FF1+c\nrF9yySXJ+sMPP5ysz58/P1nv6+tL1tEa4/gAJiH4QEAEHwiI4AMBEXwgIIIPBETwgYAYxy+5Dz74\nIFl/++23k/Wscf4XX3wxWX/rrbeS9RMnTiTrWS666KJk/c4770zWs75vYM6cOVPuaaZgHB/AJAQf\nCIjgAwERfCAggg8ERPCBgAg+EFDmOL6ZbZS0TNKYuw/Ulg1JulvS4drd1rj7f7V4POP4p7FqtZqs\nj46OJuurVq1K1tv4/y9Znzt3brJ+3333Jetr165N1k9nnY7j/0DS0ibLN7j7FbWfpqEHUE6ZwXf3\nVyU1+5qY9EsxgNLq5Bz/XjPbaWbfM7NP59YRgMLNnubjvivp393dzew/JG2Q1PKi6uHh4VO3K5WK\nKpXKNFcLoJVqtZr5nsxJ0wq+ux+p+/VpST9P3b8++ACK0bhTHRkZaXnfdg/1TXXn9GZW/9WoX5b0\nxpQ6BNBTmXt8M9siqSJprpntlzQk6YtmtlDSuKR9kr5aYI8Acsbn8XvswIEDyfrBgweT9SuvvDLP\ndkrnqaeeStYfeeSRZD3r+wTGx8eT9W3btiXr1157bbLeS3weH8AkBB8IiOADARF8ICCCDwRE8IGA\nCD4QEOP4Bcv62/v7+5P1a665JlnftGnTlHuaSbKugxgcHEzW9+zZk6zfdtttyXqZtz/j+AAmIfhA\nQAQfCIjgAwERfCAggg8ERPCBgKb7nXuo+fjjj5P1zZs3J+tjY2N5thPOOeeck6zfcMMNyXrWOP5M\nxR4fCIjgAwERfCAggg8ERPCBgAg+EBDBBwJiHL9D7733XrJ+1113damTmSnrOoeseRj37t2bYzcz\nB3t8ICCCDwRE8IGACD4QEMEHAiL4QEAEHwgocxzfzPolPSupT9K4pKfd/TEzO0/SjyVdKGmfpFvc\n/f0Cez0tdTqnwHPPPZesL1iwIFkfGBhI1rP6M2v6tey5ufnmm5P1WbOK3TeNj48n6zN1Toh2tuqf\nJX3D3S+T9E+S7jWzf5T0oKRfuPvnJb0i6VvFtQkgT5nBd/dD7r6zdvu4pF2S+iUtl3RyGpFNkm4s\nqkkA+ZrScZSZLZC0UNJ2SX3uPiZNvDhImpd3cwCK0fa1+mZ2tqSfSvq6ux83s8aTn5YnQ8PDw6du\nVyqVzOurAUxdtVpVtVpt675tBd/MZmsi9JvdfWtt8ZiZ9bn7mJnNl3S41ePrgw+gGI071ZGRkZb3\nbfdQ//uSRt390bplL0i6o3b7dklbGx8EoJzaGc5bIulWSa+b2W81cUi/RtJ6ST8xs3+V9I6kW4ps\nFEB+rOhxSjPzmToWKknHjh1L1hcvXpys9/rz4r0ex+/1+leuXJmsr1+/Plk///zz82wnV2Ymd2+6\nAblyDwiI4AMBEXwgIIIPBETwgYAIPhAQwQcCYhy/YIcPt7ySWZK0dOnSZH337t3J+okTJ6bcU71e\nj6Nnrf/cc89N1p944olk/dJLL03WL7/88mT9dMY4PoBJCD4QEMEHAiL4QEAEHwiI4AMBEXwgIMbx\nSy7rO9RGR0eT9VWrViXrWf82ixYtStYfeOCBZL1TWfMGXHHFFYWu/3TGOD6ASQg+EBDBBwIi+EBA\nBB8IiOADARF8ICDG8YEZinF8AJMQfCAggg8ERPCBgAg+EBDBBwLKDL6Z9ZvZK2b2BzN73czuqy0f\nMrN3zey/az/XFd8ugDxkjuOb2XxJ8919p5mdLek3kpZL+mdJx9x9Q8bjGccHeiA1jj8768HufkjS\nodrt42a2S9IFJ587ty4BdM2UzvHNbIGkhZJ+XVt0r5ntNLPvmdmnc+4NQEHaDn7tMP+nkr7u7scl\nfVfSRe6+UBNHBMlDfgDlkXmoL0lmNlsTod/s7lslyd2P1N3laUk/b/X44eHhU7crlYoqlco0WgWQ\nUq1WM7+j8aS2PqRjZs9K+j93/0bdsvm183+Z2b9JWuTuK5s8ljf3gB5IvbnXzrv6SyT9UtLrkrz2\ns0bSSk2c749L2ifpq+4+1uTxBB/ogY6Cn8PKCT7QA3wsF8AkBB8IiOADARF8ICCCDwRE8IGACD4Q\nEMEHAiL4QEAEHwiI4AMBEXwgoK4Hv93PC/cK/XWmzP2VuTepu/0R/Ab015ky91fm3qQZHnwAvUfw\ngYC68kUcha4AQEs9+wYeAOXDoT4QEMEHAupa8M3sOjN708z2mNk3u7XedpnZPjP7nZn91sxeK0E/\nG81szMx+X7fsPDPbZma7zeylXs5e1KK/0kyk2mSy11W15aXYhr2ejLYr5/hmNkvSHkmDkg5I2iFp\nhbu/WfjK22Rm/yPpSnc/2uteJMnMviDpuKRn3X2gtmy9pP939+/UXjzPc/cHS9TfkNqYSLUbEpO9\nfkUl2IadTkbbqW7t8RdL2uvu77j7R5J+pIk/skxMJTr1cfdXJTW+CC2XtKl2e5OkG7vaVJ0W/Ukl\nmUjV3Q+5+87a7eOSdknqV0m2YYv+ujYZbbf+R79A0h/rfn9Xf/0jy8IlvWRmO8zs7l4308K8k5OW\n1GYxmtfjfpop3USqdZO9bpfUV7Zt2IvJaEuzhyuBJe5+laQbNLHhv9DrhtpQtrHY0k2k2mSy18Zt\n1tNt2KvJaLsV/D9J+lzd7/21ZaXh7gdr/z0i6WeaOD0pmzEz65NOnSMe7nE/k7j7kbppk56WtKiX\n/TSb7FUl2oatJqPtxjbsVvB3SLrYzC40s7+TtELSC11adyYz+2TtlVdm9ilJX5L0Rm+7kjRxrld/\nvveCpDtqt2+XtLXxAV02qb9akE76snq/Db8vadTdH61bVqZt+Df9dWsbdu3KvdqwxKOaeLHZ6O7f\n7sqK22Bmf6+JvbxrYurwH/a6PzPbIqkiaa6kMUlDkv5T0vOSPivpHUm3uPt7Jervi2pjItUu9ddq\nstfXJP1EPd6GnU5G2/H6uWQXiIc394CACD4QEMEHAiL4QEAEHwiI4AMBEXwgIIIPBPQXx2whLJK5\nHg0AAAAASUVORK5CYII=\n",
+ "text/plain": [
+ ""
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ },
+ {
+ "data": {
+ "image/png": "iVBORw0KGgoAAAANSUhEUgAAAP4AAAD8CAYAAABXXhlaAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAADbxJREFUeJzt3V2MHfV5x/HfY68qEYP8Eq1twMWxipqaCmOgsVSci4lo\nE6gjbOWCAhHCSUG5gDRqQAogwR6hXti9sEQvcoHjBFMFJWmEy4vUQCM8RCb4RRQH3CzGKFonJOu1\nW7nYy4u04KcXO7vdXfb85/i8zu7z/UgrZuc5Z+dh4HfOmfmfmb+5uwDEsqDXDQDoPoIPBETwgYAI\nPhAQwQcCIvhAQC0F38xuMLM3zewtM/tOu5oC0FnW7Di+mS2Q9Jak6yX9QdIhSbe4+5szHscXBYAe\ncXebbX0r7/gbJB1z9+PuPibpR5I219n45M/AwMC036v2Q3/zt78q99aJ/lJaCf6lkn435fd3inUA\nKo6Te0BAfS089/eSLpvy+6pi3SfUarXJ5SVLlrSwyc7LsqzXLSTRX/Oq3JvUen95nivP84Ye28rJ\nvYWSjmr85N6wpIOSbnX3wRmP82a3AaB5Ziavc3Kv6Xd8d//YzO6R9ILGDxl2zQw9gGpq+h2/4Q3w\njg/0ROodn5N7QEAEHwiI4AMBEXwgIIIPBETwgYAIPhAQwQcCIvhAQAQfCIjgAwERfCAggg8ERPCB\ngAg+EBDBBwIi+EBABB8IiOADARF8ICCCDwRE8IGACD4QEMEHAiL4QEAEHwiI4AMBEXwgIIIPBETw\ngYAIPhBQXytPNrMhSe9KOidpzN03tKMpAJ3VUvA1HvjM3U+3oxkA3dHqR31rw98A0GWthtYlPW9m\nh8zsrnY0BKDzWv2ov9Hdh82sX9J/mNmgu++b+aBarTa5nGWZsixrcbMAZsrzXHmeN/RYc/e2bNTM\nBiSddfcdM9Z7u7YBoHFmJne32WpNf9Q3s0+Z2YXF8iJJX5R0pNm/B6B7Wvmov0LSHjPz4u/80N1f\naE9bADqpbR/1626Aj/pAT3Tkoz6AuYvgAwERfCAggg8ERPCBgAg+EBDBBwJq9bv66LEPPvggWT92\n7FiyfvDgwWT97bffTtb37t2brA8ODibrt99+e7L+/vvvJ+uPP/54sn7dddcl6y+//HKyPl/xjg8E\nRPCBgAg+EBDBBwIi+EBABB8IiOADAXE9foe99957yfq2bduS9e3btyfrZfu2rH7u3Llkfa7r7+9P\n1kdGRrrUSfdxPT6AaQg+EBDBBwIi+EBABB8IiOADARF8ICDG8TvsyJH05ELr1q1r6e9fe+21yfra\ntWuT9YceeihZX7NmTbI+PDycrC9atChZP3PmTLK+ZMmSZH3ZsmXJOuP4jOMDKBB8ICCCDwRE8IGA\nCD4QEMEHAiL4QECl4/hmtkvSlyWNuPu6Yt1SST+WtFrSkKSb3f3dOs8PPY5fdr37qVOnkvWTJ08m\n61dccUWyvnDhwmS96s6ePZusL168OFlnHL/5cfwfSPrSjHX3S/q5u39W0ouSHmitRQDdVBp8d98n\n6fSM1Zsl7S6Wd0va0ua+AHRQs8f4y919RJLc/YSk5e1rCUCntWvuvORBfK1Wm1zOskxZlrVpswAm\n5HmuPM8bemxDF+mY2WpJz045uTcoKXP3ETNbKWmvu896NQgn9zi51wpO7jWvHRfpWPEz4RlJW4vl\nOyQ93XR3ALquNPhm9qSkX0r6UzP7rZl9TdI2SX9tZkclXV/8DmCOKD3Gd/fb6pT+qs29zEsLFqRf\nW1esWNFSfa776KOPkvV77723pb//wAOMNM+Gb+4BARF8ICCCDwRE8IGACD4QEMEHAiL4QEDcVx8d\nNTY2lqzfc889yfrOnTuT9WuuuSZZP3DgQLI+17/SnMJ99QFMQ/CBgAg+EBDBBwIi+EBABB8IiOAD\nATGOj4567rnnkvWbbropWV+6dGmyfuzYsWR92bJlyfp8xjg+gGkIPhAQwQcCIvhAQAQfCIjgAwER\nfCAgxvHRkrIpwNavX5+sDw8PJ+uvvvpqsn711Vcn65Exjg9gGoIPBETwgYAIPhAQwQcCIvhAQAQf\nCKiv7AFmtkvSlyWNuPu6Yt2ApLsknSwe9qC7/6xjXaJnyu6Lv2XLlmS9bJx+06ZNyfqVV16ZrKM5\njbzj/0DSl2ZZv8Pdryl+CD0wh5QG3933STo9S2nWbwQBqL5WjvHvNrPDZvY9M1vcto4AdFzpMX4d\n35X0iLu7mf2jpB2S/q7eg2u12uRylmXKsqzJzQKoJ89z5Xne0GMbukjHzFZLenbi5F6jtaLORTpz\nWNnJvbIX8VdeeSVZLzu5t2fPnmS9r6/Z9675rx0X6ZimHNOb2copta9IOtJ8ewC6rZHhvCclZZI+\nbWa/lTQg6Qtmtl7SOUlDkr7RwR4BtBnX4yOp1fvib9iwIVnft29fss5H+eZxPT6AaQg+EBDBBwIi\n+EBABB8IiOADARF8ICDG8YM7fvx4sr527dpkffHi9PVZr7/+erLe39+frKN5jOMDmIbgAwERfCAg\ngg8ERPCBgAg+EBDBBwLiYufgHnnkkWT9ww8/TNaXL1+erF900UXn3RM6j3d8ICCCDwRE8IGACD4Q\nEMEHAiL4QEAEHwiI6/HnuZdeeilZv/HGG5P1sv92+/fvT9avuuqqZB2dw/X4AKYh+EBABB8IiOAD\nARF8ICCCDwRE8IGASq/HN7NVkp6QtELSOUk73f2fzWyppB9LWi1pSNLN7v5uB3vFLD7++ONkfevW\nrcl62fX2d955Z7LOOP3c1Mg7/keSvu3ufy7pLyXdbWZ/Jul+ST93989KelHSA51rE0A7lQbf3U+4\n++FieVTSoKRVkjZL2l08bLekLZ1qEkB7ndcxvpl9RtJ6SfslrXD3EWn8xUFS+h5MACqj4XvumdmF\nkn4q6VvuPmpmM7/EXfdL3bVabXI5yzJlWXZ+XQIolee58jxv6LENXaRjZn2SnpP07+7+aLFuUFLm\n7iNmtlLSXnf/xAyLXKTTWWUn9y6//PJkvWzSzLKTe4899liyjt5px0U635f064nQF56RtLVYvkPS\n0013CKCrGhnO2yjpq5LeMLPXNP6R/kFJ2yX9xMy+Lum4pJs72SiA9uF6/DnuvvvuS9Z37NiRrPf1\npV/7jx49mqyvWbMmWUfvcD0+gGkIPhAQwQcCIvhAQAQfCIjgAwERfCAgxvErbnR0NFlfvXp1sn76\n9Olk/amnnkrWt2zhosu5inF8ANMQfCAggg8ERPCBgAg+EBDBBwIi+EBADd9zD71RduursnH6hx9+\nOFnftGnTefeEuY93fCAggg8ERPCBgAg+EBDBBwIi+EBABB8IiOvxe+zAgQPJetk8g/39/cl62X3x\nL7jggmQdcxfX4wOYhuADARF8ICCCDwRE8IGACD4QUGnwzWyVmb1oZv9lZm+Y2TeL9QNm9o6Z/Wfx\nc0Pn2wXQDqXj+Ga2UtJKdz9sZhdKelXSZkl/K+msuycnYI8+jl92X/xLLrmkpecPDQ0l65dddlmy\njvkrNY5feiMOdz8h6USxPGpmg5IunfjbbesSQNec1zG+mX1G0npJE183u9vMDpvZ98xscZt7A9Ah\nDQe/+Jj/U0nfcvdRSd+V9Cfuvl7jnwiSH/kBVEdD99wzsz6Nh/5f3P1pSXL3U1MeslPSs/WeX6vV\nJpezLCv9/jmA85fnufI8b+ixDV2kY2ZPSPpvd//2lHUri+N/mdk/SPqcu982y3M5uZfAyT10Sksn\n98xso6SvSnrDzF6T5JIelHSbma2XdE7SkKRvtK1jAB3VyFn9lyUtnKX0s/a3A6AbuK9+h505cyZZ\nHxsbS9ZvvfXWZP3iiy8+754AvrILBETwgYAIPhAQwQcCIvhAQAQfCIjgAwFxX31gnuK++gCmIfhA\nQAQfCKjrwW/0euFeob/WVLm/Kvcmdbc/gj8D/bWmyv1VuTdpngcfQO8RfCCgrozjd3QDAOqqN47f\n8eADqB4+6gMBEXwgoK4F38xuMLM3zewtM/tOt7bbKDMbMrNfmdlrZnawAv3sMrMRM3t9yrqlZvaC\nmR01s+d7OXtRnf4qM5HqLJO9/n2xvhL7sNeT0XblGN/MFkh6S9L1kv4g6ZCkW9z9zY5vvEFm9htJ\n17r76V73Iklm9nlJo5KecPd1xbrtkv7H3f+pePFc6u73V6i/ATUwkWo3JCZ7/ZoqsA9bnYy2Vd16\nx98g6Zi7H3f3MUk/0vi/ZJWYKnTo4+77JM18EdosaXexvFvSlq42NUWd/qSKTKTq7ifc/XCxPCpp\nUNIqVWQf1umva5PRdut/9Esl/W7K7+/o//8lq8IlPW9mh8zsrl43U8dydx+RJmcxXt7jfmZTuYlU\np0z2ul/Siqrtw15MRluZd7gK2OjufyHpbzS+4z/f64YaULWx2MpNpDrLZK8z91lP92GvJqPtVvB/\nL2nqJG6rinWV4e7DxT9PSdqj8cOTqhkxsxXS5DHiyR73M427n5py15Wdkj7Xy35mm+xVFdqH9Saj\n7cY+7FbwD0m63MxWm9kfSbpF0jNd2nYpM/tU8corM1sk6YuSjvS2K0njx3pTj/eekbS1WL5D0tMz\nn9Bl0/orgjThK+r9Pvy+pF+7+6NT1lVpH36iv27tw659c68YlnhU4y82u9x9W1c23AAzW6Pxd3nX\n+LRiP+x1f2b2pKRM0qcljUgakPRvkv5V0h9LOi7pZnf/3wr19wWNH6tOTqQ6cTzdg/42SvqFpDc0\n/t91YrLXg5J+oh7vw0R/t6kL+5Cv7AIBcXIPCIjgAwERfCAggg8ERPCBgAg+EBDBBwIi+EBA/wdC\nvz5vgHbXgQAAAABJRU5ErkJggg==\n",
+ "text/plain": [
+ ""
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ },
+ {
+ "data": {
+ "image/png": "iVBORw0KGgoAAAANSUhEUgAAAP4AAAD8CAYAAABXXhlaAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAADkpJREFUeJzt3X+MVfWZx/HPI2hMUQtWfogjsqGRJSZodmWI2j9uQyjG\nYDA1cbUasbuahviD2MSUanQu6/4BRjSuscYIbRBRyjbpoiZUre51JZuuZBd2wSI2CpSfw6zjaoeo\nceXZP+ZA74xzv/fO3F9neN6vZOKd87l3zuPVz5x775lzjrm7AMRyWrsHANB6FB8IiOIDAVF8ICCK\nDwRE8YGA6iq+mV1tZu+Z2ftm9pNGDQWguWyk+/HN7DRJ70uaJ+mQpK2SbnT39wbdjz8UANrE3W2o\n5fVs8Tsl/cHd97n7l5I2SFpUYeUnv7q6ugZ8n7cv5jt158vzbM2YL6We4l8gaX/Z9weyZQByjg/3\ngIDG1vHYg5KmlX3fkS37mmKxePL2+PHj61hl8xUKhXaPkMR8I5fn2aT65yuVSiqVSjXdt54P98ZI\n2q3+D/cOS3pH0k3uvmvQ/Xyk6wAwcmYmr/Dh3oi3+O7+lZndJek19b9lWDO49ADyacRb/JpXwBYf\naIvUFp8P94CAKD4QEMUHAqL4QEAUHwiI4gMBUXwgIIoPBETxgYAoPhAQxQcCovhAQBQfCIjiAwFR\nfCAgig8ERPGBgCg+EBDFBwKi+EBAFB8IiOIDAVF8ICCKDwRE8YGAKD4QEMUHAqL4QEAUHwiI4gMB\nUXwgoLH1PNjM9kr6RNJxSV+6e2cjhkLtjh07lsx7e3uT+V133ZXMV6xYkcxnzZqVzJFPdRVf/YUv\nuPvHjRgGQGvU+1LfGvAzALRYvaV1Sa+a2VYzu6MRAwFovnpf6l/l7ofNbKKk181sl7tvGXynYrF4\n8nahUFChUKhztQAGK5VKKpVKNd3X3L0hKzWzLkl/cvfHBi33Rq0DX8eHe6jEzOTuNlQ24pf6ZvYN\nMzsruz1O0vck7RzpzwPQOvW81J8s6ddm5tnPWe/urzVmLADN1LCX+hVXwEv9ptq/f38ynzlzZjL/\n4osvkvnYseltw913353MZ8+encwPHTqUzG+55ZZk3tHRkcwja8pLfQCjF8UHAqL4QEAUHwiI4gMB\nUXwgIIoPBMR+/FHuww8/TOZXXnllMu/p6UnmDz30UDJ/66236sovueSSZH7w4MFkfu211ybzW2+9\nNZnPnTs3mY8bNy6Z5xn78QEMQPGBgCg+EBDFBwKi+EBAFB8IiOIDAdV7zj202dKlS5N5tf30Gzdu\nTObXX399Mj9+/Hgyr3a8/+mnn57Mt23blswfffTRZD5//vxkfs455yTzauewu/TSS5N5XrHFBwKi\n+EBAFB8IiOIDAVF8ICCKDwRE8YGAOB4/5/bs2ZPMZ8yYkcwXL16czFevXp3Mx4wZk8xHuwMHDiTz\nVatWJfPOzs5kftNNNw17pkbheHwAA1B8ICCKDwRE8YGAKD4QEMUHAqL4QEBV9+Ob2RpJCyV1u/vs\nbNkESb+UdJGkvZJucPdPKjye/fh1mDNnTjJ/9913k3m168+PHz9+2DPhz3p7e5P5ueee26JJvq7e\n/fi/kLRg0LJlkn7r7jMlvSnpp/WNCKCVqhbf3bdI+njQ4kWS1ma310q6rsFzAWiikb7Hn+Tu3ZLk\n7kckTWrcSACarVHn3Eu+iS8WiydvFwoFFQqFBq0WwAmlUqnqOQJPqOkgHTO7SNLLZR/u7ZJUcPdu\nM5si6V/cfVaFx/LhXh34cC/fTuUP9yTJsq8TXpJ0W3Z7saRNI54OQMtVLb6ZvSDp3yRdbGZ/NLMf\nSlohab6Z7ZY0L/sewCjB8fht9tFHHyXziRMnJvNrrrkmmb/yyivDngmnBo7HBzAAxQcCovhAQBQf\nCIjiAwFRfCAgig8E1Ki/1ccIPfLII3U9ftmyZQ2aBJGwxQcCovhAQBQfCIjiAwFRfCAgig8ERPGB\ngNiP32RfffVVMt+zZ08yr3YugyVLliTzrq6uZD537txkfuGFFyZzjE5s8YGAKD4QEMUHAqL4QEAU\nHwiI4gMBUXwgIM6r32QffPBBMr/44ouTebXnbtKk9PVKp06dmsx37tyZzO+5555k/sADDyTzCRMm\nJHM0D+fVBzAAxQcCovhAQBQfCIjiAwFRfCAgig8EVPV4fDNbI2mhpG53n50t65J0h6Sj2d3ud/ff\nNG3KUWzatGnJfMOGDcn87bffTuYPPvhgMp84cWIy7+3tTeb33XdfMp81a1Yy37FjRzKvNh+ao5Yt\n/i8kLRhi+WPu/lfZF6UHRpGqxXf3LZI+HiIa8i+CAORfPe/x7zSz7Wa22sy+2bCJADTdSM+59zNJ\nf+/ubmb/IOkxSX9X6c7FYvHk7UKhoEKhMMLVAqikVCqpVCrVdN8RFd/de8q+fVbSy6n7lxcfQHMM\n3qguX7684n1rfalvKntPb2ZTyrLvS0of4gUgV2rZnfeCpIKkb5nZHyV1SfqumV0m6bikvZJ+1MQZ\nATQYx+Mjqdp/u6effjqZr1q1Kpk///zzyfyKK65I5qiM4/EBDEDxgYAoPhAQxQcCovhAQBQfCIji\nAwGxHx916evrS+bVrhswb968ZL5u3bphz4R+7McHMADFBwKi+EBAFB8IiOIDAVF8ICCKDwTEfnw0\n1ebNm5P5woULk/m+ffuSeUdHx7BnioL9+AAGoPhAQBQfCIjiAwFRfCAgig8ERPGBgEZ67TzU6PHH\nH0/mS5YsSeZnnnlmI8dpuWrH41fz5JNPJvOVK1fW9fOjYosPBETxgYAoPhAQxQcCovhAQBQfCIji\nAwFV3Y9vZh2SnpM0WdJxSc+6+z+a2QRJv5R0kaS9km5w90+aOOuoNG7cuGRe7bz0o30//vTp05P5\neeedl8wPHz7cwGlwQi1b/P+T9GN3v0TSFZLuNLO/lLRM0m/dfaakNyX9tHljAmikqsV39yPuvj27\n3Sdpl6QOSYskrc3utlbSdc0aEkBjDes9vplNl3SZpN9Jmuzu3VL/LwdJkxo9HIDmqPlv9c3sLEm/\nkrTU3fvMbPCJ9CqeWK9YLJ68XSgUVCgUhjclgKpKpZJKpVJN962p+GY2Vv2lX+fum7LF3WY22d27\nzWyKpKOVHl9efADNMXijunz58or3rfWl/s8l/d7dnyhb9pKk27LbiyVtGvwgAPlUy+68qyTdLGmH\nmW1T/0v6+yWtlLTRzP5W0j5JNzRzUACNw3n161RtP/z8+fOT+RlnnJHM33jjjWQ+duzoPqXCvffe\nm8xffPHFZH7kyJFGjnNK4bz6AAag+EBAFB8IiOIDAVF8ICCKDwRE8YGA2I/fZOvXr0/mS5cuTeYL\nFixI5g8//HAynzp1ajI/erTiX1pLkqZNm5bMq+np6UnmM2bMSObnn39+Mt+9e/ewZ4qC/fgABqD4\nQEAUHwiI4gMBUXwgIIoPBETxgYBG98Hco8DNN9+czKudf3Dz5s3J/PLLL0/mZ599djL/9NNPk/mm\nTekTK23dujWZP/PMM8n82LFjyfz2229P5hgZtvhAQBQfCIjiAwFRfCAgig8ERPGBgCg+EBDH449y\nn3/+eTJ//fXXk/lTTz2VzD/77LNkvmXLlmReTbXHd3Z2JvMxY8bUtf5TGcfjAxiA4gMBUXwgIIoP\nBETxgYAoPhBQ1eKbWYeZvWlm75rZDjO7O1veZWYHzOw/s6+rmz8ugEaouh/fzKZImuLu283sLEn/\nIWmRpL+R9Cd3f6zK49mPD7RBaj9+1RNxuPsRSUey231mtkvSBSd+dsOmBNAyw3qPb2bTJV0m6d+z\nRXea2XYzW21m32zwbACapObiZy/zfyVpqbv3SfqZpBnufpn6XxEkX/IDyI+azrlnZmPVX/p17r5J\nkty9/KJoz0p6udLji8XiyduFQqHqeeYADF+pVFKpVKrpvjUdpGNmz0n6H3f/cdmyKdn7f5nZvZLm\nuPsPhngsH+4BbZD6cK+WT/WvkvSvknZI8uzrfkk/UP/7/eOS9kr6kbt3D/F4ig+0QV3Fb8DKKT7Q\nBhyWC2AAig8ERPGBgCg+EBDFBwKi+EBAFB8IiOIDAVF8ICCKDwRE8YGAKD4QUMuLX+vxwu3CfPXJ\n83x5nk1q7XwUfxDmq0+e58vzbNIpXnwA7UfxgYBaciKOpq4AQEVtOwMPgPzhpT4QEMUHAmpZ8c3s\najN7z8zeN7OftGq9tTKzvWb2X2a2zczeycE8a8ys28z+u2zZBDN7zcx2m9mr7bx6UYX5cnMh1SEu\n9npPtjwXz2G7L0bbkvf4ZnaapPclzZN0SNJWSTe6+3tNX3mNzOxDSX/t7h+3exZJMrPvSOqT9Jy7\nz86WrZT0kbs/kv3ynODuy3I0X5dquJBqKyQu9vpD5eA5rPditPVq1Ra/U9If3H2fu38paYP6/yXz\nxJSjtz7uvkXS4F9CiyStzW6vlXRdS4cqU2E+KScXUnX3I+6+PbvdJ2mXpA7l5DmsMF/LLkbbqv/R\nL5C0v+z7A/rzv2ReuKRXzWyrmd3R7mEqmHTioiXZVYwmtXmeoeTuQqplF3v9naTJeXsO23Ex2txs\n4XLgKne/XNI16n/iv9PugWqQt32xubuQ6hAXex38nLX1OWzXxWhbVfyDkqaVfd+RLcsNdz+c/bNH\n0q/V//Ykb7rNbLJ08j3i0TbPM4C795RdNulZSXPaOc9QF3tVjp7DShejbcVz2Krib5X0bTO7yMzO\nkHSjpJdatO6qzOwb2W9emdk4Sd+TtLO9U0nqf69X/n7vJUm3ZbcXS9o0+AEtNmC+rEgnfF/tfw5/\nLun37v5E2bI8PYdfm69Vz2HL/nIv2y3xhPp/2axx9xUtWXENzOwv1L+Vd/VfOnx9u+czsxckFSR9\nS1K3pC5J/yzpnyRdKGmfpBvc/X9zNN93VcOFVFs0X6WLvb4jaaPa/BzWezHautfPn+wC8fDhHhAQ\nxQcCovhAQBQfCIjiAwFRfCAgig8ERPGBgP4fDIwzi8Qky54AAAAASUVORK5CYII=\n",
+ "text/plain": [
+ ""
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ },
+ {
+ "data": {
+ "image/png": "iVBORw0KGgoAAAANSUhEUgAAAP4AAAD8CAYAAABXXhlaAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAADBBJREFUeJzt3U+sXHUZxvHnKY1JBQKE2DahFo1GNAbS+KeJoYtDQCVG\nUmABtS5AG8IClWhCoGzuYFyICxJcsAAKKcamapNa2AgSPDFokEZbBS2URFqg0gsaamxCCNLXxT29\nTi93/nTmzJnTvt9PMumZ3+/MnPee9Jnzd+bniBCAXJZMuwAAzSP4QEIEH0iI4AMJEXwgIYIPJDRW\n8G1fafsF2/tt315XUQAmy6Nex7e9RNJ+SZdL+oek3ZI2RMQLC+bjRgFgSiLCi7WPs8VfK+mliDgY\nEe9K2i5pfY+Fzz9mZmZOeN62B/WdvvW1ubZJ1NfPOMG/QNKrXc9fq9oAtBwn94CElo7x2kOSVnc9\nX1W1vU+n05mfPvfcc8dY5OQVRTHtEvqivtG1uTZp/PrKslRZlkPNO87JvTMkvai5k3uvS3pW0tci\nYt+C+WLUZQAYnW1Fj5N7I2/xI+I929+S9ITmDhm2LAw9gHYaeYs/9ALY4gNT0W+Lz8k9ICGCDyRE\n8IGECD6QEMEHEiL4QEIEH0iI4AMJEXwgIYIPJETwgYQIPpAQwQcSIvhAQgQfSIjgAwkRfCAhgg8k\nRPCBhAg+kBDBBxIi+EBCBB9IaJwhtHAa2LFjR9/+l19+uW//bbfdVmc5aAhbfCAhgg8kRPCBhAg+\nkBDBBxIi+EBCBB9IyOOMXW/7gKR/Szom6d2IWLvIPDHOMjBZg67jX3/99X3733vvvTrLQY1sKyK8\nWN+4N/Ack1RExFtjvg+ABo27q+8a3gNAw8YNbUh63PZu2zfVURCAyRt3V//SiHjd9ock/dr2voh4\neuFMnU5nfrooChVFMeZiASxUlqXKshxq3rFO7p3wRvaMpP9ExD0L2jm512Kc3Dt99Tu5N/Kuvu0P\n2j6rmj5T0pckPT/q+wFozji7+isk7bQd1fv8NCKeqKcsAJM0cvAj4mVJa2qsBaegt99+u2//smXL\nGqoEJ4NLcUBCBB9IiOADCRF8ICGCDyRE8IGECD6QEL+rj7Fs27atb/+mTZsaqgQngy0+kBDBBxIi\n+EBCBB9IiOADCRF8ICGCDyTEdXyM5eDBg9MuASNgiw8kRPCBhAg+kBDBBxIi+EBCBB9IiOADCdU2\nhFbPBTCEVquNO4TW2Wef3bf/yJEjJ10T6jGRIbQAnLoIPpAQwQcSIvhAQgQfSIjgAwkRfCChgcG3\nvcX2rO2/dLWdZ/sJ2y/aftz2OZMtE0CdhtniPyzpywva7pD0ZERcJOkpSZvrLgzA5AwMfkQ8Lemt\nBc3rJW2tprdKurrmugBM0KjH+MsjYlaSIuKwpOX1lQRg0ur6zb2+N+N3Op356aIoVBRFTYsFcFxZ\nlirLcqh5h/qSju0LJT0WEZdUz/dJKiJi1vZKSb+JiE/1eC1f0mkxvqRz+qrjSzquHsc9KunGavoG\nSbtGrg5A44a5nLdN0u8lfcL2K7a/IemHkr5o+0VJl1fPAZwiBh7jR8TGHl1X1FwLTkHr1q2bdgkY\nAXfuAQkRfCAhgg8kRPCBhAg+kBDBBxIi+EBCdd2rj6SuueaaaZeAEbDFBxIi+EBCBB9IiOADCRF8\nICGCDyRE8IGECD6QEMEHEiL4QEIEH0iI4AMJEXwgIYIPJETwgYT4Pn5y77zzTt/+Y8eO9e1neLRT\nE1t8ICGCDyRE8IGECD6QEMEHEiL4QEIEH0ho4HV821skfVXSbERcUrXNSLpJ0hvVbHdGxK8mViUm\nZvPmzX37lyzpv22wXWc5aMgwW/yHJX15kfZ7IuIz1YPQA6eQgcGPiKclvbVIFx/1wClqnGP8W2zv\ntf2g7XNqqwjAxI16r/59kr4fEWH7B5LukbSp18ydTmd+uigKFUUx4mIB9FKWpcqyHGpeD/MlC9sX\nSnrs+Mm9Yfuq/uCLHO21evXqvv2HDh3q23///ff37d+0qef2ABNmWxGx6CH5sLv6Vtcxve2VXX3X\nSnp+9PIANG2Yy3nbJBWSzrf9iqQZSZfZXiPpmKQDkm6eYI0AajYw+BGxcZHmhydQC4CGcOcekBDB\nBxIi+EBCBB9IiOADCRF8ICGCDyRE8IGECD6QEMEHEiL4QEIEH0iI4AMJEXwgIYIPJETwgYQIPpAQ\nwQcSIvhAQgQfSIjgAwkRfCAhgg8kRPCBhAg+kBDBBxIi+EBCBB9IiOADCRF8ICGCDyS0dNAMtldJ\nekTSCknHJD0QET+2fZ6kn0m6UNIBSddFxL8nWCsmYO3atX37d+7c2VAlaNIwW/z/SvpeRHxa0hck\n3WL7k5LukPRkRFwk6SlJmydXJoA6DQx+RByOiL3V9FFJ+yStkrRe0tZqtq2Srp5UkQDqdVLH+LY/\nImmNpGckrYiIWWnuw0HS8rqLAzAZA4/xj7N9lqQdkm6NiKO2Y8EsC5/P63Q689NFUagoipOrEsBA\nZVmqLMuh5h0q+LaXai70P4mIXVXzrO0VETFre6WkN3q9vjv4ACZj4Ub1rrvu6jnvsLv6D0n6W0Tc\n29X2qKQbq+kbJO1a+CIA7TTM5bxLJX1d0nO292hul/5OSXdL+rntb0o6KOm6SRYKoD4Dgx8Rv5N0\nRo/uK+otB03bsGFD3/5B1/H37NlTZzloCHfuAQkRfCAhgg8kRPCBhAg+kBDBBxIi+EBCjuh5i309\nC7Bj0svA6Pbv39+3/+KLL+7bv2zZsr79R44cOemaUA/biggv1scWH0iI4AMJEXwgIYIPJETwgYQI\nPpAQwQcS4jo++tq4cWPf/u3bt/ft37Wr/w8zXXXVVSddE4bDdXwAJyD4QEIEH0iI4AMJEXwgIYIP\nJETwgYS4jg+cpriOD+AEBB9IiOADCRF8ICGCDyRE8IGEBgbf9irbT9n+q+3nbH+7ap+x/ZrtP1WP\nKydfLoA6DLyOb3ulpJURsdf2WZL+KGm9pOsl/Sci7hnweq7jA1PQ7zr+0kEvjojDkg5X00dt75N0\nwfH3rq1KAI05qWN82x+RtEbSH6qmW2zvtf2g7XNqrg3AhAwd/Go3f4ekWyPiqKT7JH0sItZobo+g\n7y4/gPYYuKsvSbaXai70P4mIXZIUEW92zfKApMd6vb7T6cxPF0WhoihGKBVAP2VZqizLoeYd6ks6\nth+R9M+I+F5X28rq+F+2vyvp8xHxvl9m5OQeMB39Tu4Nc1b/Ukm/lfScpKged0raqLnj/WOSDki6\nOSJmF3k9wQemYKzg17Bwgg9MAV/LBXACgg8kRPCBhAg+kBDBBxIi+EBCBB9IiOADCRF8ICGCDyRE\n8IGECD6QUOPBH/b7wtNCfeNpc31trk1qtj6CvwD1jafN9bW5Nuk0Dz6A6SP4QEKN/BDHRBcAoKep\n/QIPgPZhVx9IiOADCTUWfNtX2n7B9n7btze13GHZPmD7z7b32H62BfVssT1r+y9dbefZfsL2i7Yf\nn+boRT3qa81AqosM9vqdqr0V63Dag9E2coxve4mk/ZIul/QPSbslbYiIFya+8CHZ/rukz0bEW9Ou\nRZJsr5N0VNIjEXFJ1Xa3pH9FxI+qD8/zIuKOFtU3oyEGUm1Cn8Fev6EWrMNxB6MdV1Nb/LWSXoqI\ngxHxrqTtmvsj28Rq0aFPRDwtaeGH0HpJW6vprZKubrSoLj3qk1oykGpEHI6IvdX0UUn7JK1SS9Zh\nj/oaG4y2qf/oF0h6tev5a/r/H9kWIelx27tt3zTtYnpYfnzQkmoUo+VTrmcxrRtItWuw12ckrWjb\nOpzGYLSt2cK1wKUR8TlJX9Hcil837YKG0LZrsa0bSHWRwV4XrrOprsNpDUbbVPAPSVrd9XxV1dYa\nEfF69e+bknZq7vCkbWZtr5DmjxHfmHI9J4iIN7uGTXpA0uenWc9ig72qReuw12C0TazDpoK/W9LH\nbV9o+wOSNkh6tKFlD2T7g9Unr2yfKelLkp6fblWS5o71uo/3HpV0YzV9g6RdC1/QsBPqq4J03LWa\n/jp8SNLfIuLerrY2rcP31dfUOmzszr3qssS9mvuw2RIRP2xkwUOw/VHNbeVDc0OH/3Ta9dneJqmQ\ndL6kWUkzkn4p6ReSPizpoKTrIuJIi+q7TEMMpNpQfb0Ge31W0s815XU47mC0Yy+fW3aBfDi5ByRE\n8IGECD6QEMEHEiL4QEIEH0iI4AMJEXwgof8B3PZcgJfvmMYAAAAASUVORK5CYII=\n",
+ "text/plain": [
+ ""
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ },
+ {
+ "data": {
+ "image/png": "iVBORw0KGgoAAAANSUhEUgAAAP4AAAD8CAYAAABXXhlaAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAADttJREFUeJzt3X+MVfWZx/HPA8QfoCmGyEAcGNZtlg2bECOLSSOJt7Fb\nyKYKqYo/VqPdFdG4Wq0xRWPCsNmE1j9MWBL8w0LFWmxLTRdQo0jIzepCV2I7VoTRMQ22ggxqXAdE\nicizf8yFnZnO/Z47c+6PA8/7lUy8c55773m48TPnx/ee8zV3F4BYxrS6AQDNR/CBgAg+EBDBBwIi\n+EBABB8IKFfwzWyBmXWb2Ttm9sN6NQWgsWy04/hmNkbSO5KulHRA0i5JN7h795Dn8UUBoEXc3YZb\nnmeLf5mkHnd/z92/lPQLSQurrPzUz/Llywf9XrQf+jtz+ytyb43oLyVP8C+S9OcBv79fWQag4Di5\nBwQ0Lsdr90uaPuD39sqyv9DZ2Xnq8cSJE3OssvFKpVKrW0iiv9Ercm9S/v7K5bLK5XJNz81zcm+s\npLfVf3LvA0mvSbrR3fcOeZ6Pdh0ARs/M5FVO7o16i+/uX5nZv0raqv5DhrVDQw+gmEa9xa95BWzx\ngZZIbfE5uQcERPCBgAg+EBDBBwIi+EBABB8IiOADARF8ICCCDwRE8IGACD4QEMEHAiL4QEAEHwiI\n4AMBEXwgIIIPBETwgYAIPhAQwQcCIvhAQAQfCIjgAwHlmUILBdDT05Os79y5M1m/9957k/Vp06Yl\n6w8++GCyvmDBgmR98uTJyToagy0+EBDBBwIi+EBABB8IiOADARF8ICCCDwRkeeauN7N9kj6VdELS\nl+5+2TDP8TzrONN1d3cn6ytXrkzWn3322WT96NGjI+6pnmbPnp2sd3V1NamTeMxM7m7D1fJ+geeE\npJK7f5LzfQA0Ud5dfavDewBosryhdUkvmdkuM1tSj4YANF7eXf3L3f0DM7tQ0stmttfdXx36pM7O\nzlOPS6WSSqVSztUCGKpcLqtcLtf03Fwn9wa9kdlySYfd/bEhyzm5l8DJPU7uNUrq5N6od/XNbLyZ\nnVd5PEHStyXtHu37AWiePLv6bZJ+Y2ZeeZ+fu/vW+rQFoJHqtqtfdQXBd/UPHDiQrM+aNStZ7+vr\ny7X+m2++OVkfMya903fw4MFkfevW9N/6cePS25ZHH300Wb/vvvuSdVTXkF19AKcvgg8ERPCBgAg+\nEBDBBwIi+EBABB8IiHH8nF555ZVk/aGHHkrWd+zYkazPnDkzWb/nnnuS9bvuuitZNxt2mPeUL774\nItf6165dm2v9zz//fLKedd/+yBjHBzAIwQcCIvhAQAQfCIjgAwERfCAggg8ElPeee2e8Y8eOJetL\nly5N1rNurTVnzpxk/eWXX07WJ06cmKzndc455yTrq1evTtYPHTqUrG/ZsiVZf/fdd5N1jA5bfCAg\ngg8ERPCBgAg+EBDBBwIi+EBABB8IKPw4/vHjx5P1JUvSc4FmjdNnXU/f6nH6vLLG+bPuR5A1jo/G\nYIsPBETwgYAIPhAQwQcCIvhAQAQfCIjgAwFljuOb2VpJ35HU6+6zK8sukPRLSR2S9kla7O6fNrDP\nhtmwYUOy/vTTT+d6/46OjmS96OP0ec2aNavVLWAYtWzxfypp/pBlyyRtc/eZkrZLSn9LA0ChZAbf\n3V+V9MmQxQslra88Xi9pUZ37AtBAoz3Gn+zuvZLk7gclTa5fSwAarV7f1U9OjtfZ2XnqcalUUqlU\nqtNqAZxULpdVLpdreu5og99rZm3u3mtmUyQl76g4MPgAGmPoRnXFihVVn1vrrr5Vfk7aLOm2yuNb\nJW0aSYMAWisz+Ga2QdIOSX9jZn8ys+9J+pGkfzCztyVdWfkdwGkic1ff3W+qUvpWnXtpiblz5ybr\n5557brL++eefJ+urVq0acU9FsmbNmmT9/vvvT9ZvueWWeraDOuGbe0BABB8IiOADARF8ICCCDwRE\n8IGACD4QkLknv2affwVm3uh1NNKUKVOS9az536+//vpk/ZlnnhlxT800b968ZH3Hjh3Jent7e7L+\n1ltvJetnnXVWsn722Wcn65GZmdzdhquxxQcCIvhAQAQfCIjgAwERfCAggg8ERPCBgOp1z70z1qWX\nXpqsv/jii8n6c889l6xv3rw5Wb/66quT9UZbvHhxsp41jj927Nhk/fzzzx9xT8iPLT4QEMEHAiL4\nQEAEHwiI4AMBEXwgIIIPBMT1+BkOHz6crF933XXJ+tatW5P18ePHJ+sbNmxI1rO+Z5Bl7969yXrW\nv6+vry9Znzp1arLe09OTrGd9PqiO6/EBDELwgYAIPhAQwQcCIvhAQAQfCIjgAwFljuOb2VpJ35HU\n6+6zK8uWS1oi6eRN5R9292EvTD/dx/GzZI3zX3PNNcn6tm3b6tlO3dXw/0eu12/cuDFZv/baa5N1\nVJd3HP+nkuYPs/wxd7+08pO+GwWAQskMvru/KumTYUrpP/UACivPMf7dZtZlZj8xs6/VrSMADTfa\ne+6tkfRv7u5m9u+SHpP0L9We3NnZeepxqVRSqVQa5WoBVFMul1Uul2t6bk0X6ZhZh6QtJ0/u1Vqr\n1Dm5l8DJPU7uNUo9LtIxDTimN7OBU8h+V9Lu0bcHoNkyd/XNbIOkkqRJZvYnScslfdPMLpF0QtI+\nSUsb2COAOuN6/AY7cuRIsp513/ydO3cm68ePH0/Wv/rqq2S91bJ25bPuR5DX66+/nqzPmTMn1/uP\nG9e6qSu4Hh/AIAQfCIjgAwERfCAggg8ERPCBgAg+EBDj+Ke5PXv2JOvbt29P1ru6upL1devWjbin\nSCZMmJCsv/HGG8n6xRdfXM92BmEcH8AgBB8IiOADARF8ICCCDwRE8IGACD4QUOsuFkZdzJo1K1m/\n4oorkvWjR4/Ws51wsm6t1tHR0aRORoYtPhAQwQcCIvhAQAQfCIjgAwERfCAggg8ExDh+wWXdF/+R\nRx5J1j/++ONc6586dWqu1x84cCBZz5qCa+C8i8P56KOPkvVjx44l6w888ECyfuGFFybr48ePT9bH\njh2brLcKW3wgIIIPBETwgYAIPhAQwQcCIvhAQAQfCCjzvvpm1i7pKUltkk5IesLd/8PMLpD0S0kd\nkvZJWuzunw7zeu6rn8P+/fuT9WnTpiXrWdeD33777cn6HXfckaxn3bf/xhtvTNazxvFfeOGFZH3+\n/PnJemR576t/XNIP3P3vJH1D0t1m9reSlkna5u4zJW2X9FC9GgbQWJnBd/eD7t5VeXxE0l5J7ZIW\nSlpfedp6SYsa1SSA+hrRMb6ZzZB0iaTfSmpz916p/4+DpMn1bg5AY9T8XX0zO0/SryV9392PmNnQ\nA/eqB/IDv29dKpVUKpVG1iWATOVyWeVyuabn1hR8Mxun/tD/zN03VRb3mlmbu/ea2RRJh6q9PutC\nCwD5Dd2orlixoupza93VXydpj7uvGrBss6TbKo9vlbRp6IsAFFPmFt/MLpf0T5LeNLPfq3+X/mFJ\nP5b0KzP7Z0nvSVrcyEYB1E9m8N39vyVVu6j4W/VtB0M9/vjjuV6/aFF6sGXZsmXJel9fX7L+5JNP\njrSlQWbMmJGsM07fGHxzDwiI4AMBEXwgIIIPBETwgYAIPhAQwQcCyrweP/cKuB4/l+7u7mR97ty5\nyfpnn32WrN95553J+saNG5P1rPv2T5o0KVnfvXt3st7W1paso7q81+MDOMMQfCAggg8ERPCBgAg+\nEBDBBwIi+EBAjOOf5np6epL1lStXJut5r6efPn16sr569epk/aqrrsq1flTHOD6AQQg+EBDBBwIi\n+EBABB8IiOADARF8ICDG8YEzFOP4AAYh+EBABB8IiOADARF8ICCCDwSUGXwzazez7Wb2lpm9aWb3\nVJYvN7P3zex3lZ8FjW8XQD1kjuOb2RRJU9y9y8zOk/S6pIWSrpd02N0fy3g94/hAC6TG8cdlvdjd\nD0o6WHl8xMz2Srro5HvXrUsATTOiY3wzmyHpEkn/U1l0t5l1mdlPzOxrde4NQIPUHPzKbv6vJX3f\n3Y9IWiPpr939EvXvESR3+QEUR+auviSZ2Tj1h/5n7r5Jktz9wwFPeULSlmqv7+zsPPW4VCqpVCqN\nolUAKeVyWeVyuabn1nSRjpk9Jekjd//BgGVTKsf/MrP7Jc1195uGeS0n94AWSJ3cq+Ws/uWS/kvS\nm5K88vOwpJvUf7x/QtI+SUvdvXeY1xN8oAVyBb8OKyf4QAtwWS6AQQg+EBDBBwIi+EBABB8IiOAD\nARF8ICCCDwRE8IGACD4QEMEHAiL4QEBND36t1wu3Cv3lU+T+ityb1Nz+CP4Q9JdPkfsrcm/SGR58\nAK1H8IGAmnIjjoauAEBVLbsDD4DiYVcfCIjgAwE1LfhmtsDMus3sHTP7YbPWWysz22dmb5jZ783s\ntQL0s9bMes3sDwOWXWBmW83sbTN7qZWzF1XprzATqQ4z2eu9leWF+AxbPRltU47xzWyMpHckXSnp\ngKRdkm5w9+6Gr7xGZvZHSXPc/ZNW9yJJZjZP0hFJT7n77MqyH0v62N0frfzxvMDdlxWov+WqYSLV\nZkhM9vo9FeAzzDsZbV7N2uJfJqnH3d9z9y8l/UL9/8giMRXo0MfdX5U09I/QQknrK4/XS1rU1KYG\nqNKfVJCJVN39oLt3VR4fkbRXUrsK8hlW6a9pk9E263/0iyT9ecDv7+v//5FF4ZJeMrNdZrak1c1U\nMfnkpCWVWYwmt7if4RRuItUBk73+VlJb0T7DVkxGW5gtXAFc7u5/L+kf1f/Bz2t1QzUo2lhs4SZS\nHWay16GfWUs/w1ZNRtus4O+XNH3A7+2VZYXh7h9U/vuhpN+o//CkaHrNrE06dYx4qMX9DOLuHw6Y\nNukJSXNb2c9wk72qQJ9htclom/EZNiv4uyR93cw6zOwsSTdI2tykdWcys/GVv7wyswmSvi1pd2u7\nktR/rDfweG+zpNsqj2+VtGnoC5psUH+VIJ30XbX+M1wnaY+7rxqwrEif4V/016zPsGnf3KsMS6xS\n/x+bte7+o6asuAZm9lfq38q7+qcO/3mr+zOzDZJKkiZJ6pW0XNJ/StooaZqk9yQtdvf/LVB/31QN\nE6k2qb9qk72+JulXavFnmHcy2tzr5yu7QDyc3AMCIvhAQAQfCIjgAwERfCAggg8ERPCBgAg+END/\nASuKY3dzPOkFAAAAAElFTkSuQmCC\n",
+ "text/plain": [
+ ""
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ },
+ {
+ "data": {
+ "image/png": "iVBORw0KGgoAAAANSUhEUgAAAP4AAAD8CAYAAABXXhlaAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAADhBJREFUeJzt3X+MHPV5x/HPc1iV5ZywcVT7EK5J1dqOVGGhlIAq548J\nuAlCFoeCRCkIQRqB/3CoaRAKsZBuXfgjrhCSkYgQjoNM5FMSAonJP4XGMIpQZWxaO4XGhgh0jh3j\ng1Y28gkhg+/pHze+no/b76xvd3bH97xf0om5eWZ3Hs/x2fm5M+buAhBLX68bANB9BB8IiOADARF8\nICCCDwRE8IGA2gq+mV1vZofM7G0z+26nmgJQLZvteXwz65P0tqTrJB2TtE/Sre5+aNp0XCgA9Ii7\n20zj21njXy3p9+5+2N0/kfQTSYNNZj75MzQ0dM7vdfuhv7nbX517q6K/lHaCf5mkI1N+P1qMA1Bz\nHNwDAprXxmv/KGn5lN+XFeM+o9FoTA4vWrSojVlWL8uyXreQRH+zV+fepPb7y/NceZ63NG07B/cu\nkvSWJg7uvSdpr6S/d/eD06bz2c4DwOyZmbzJwb1Zr/Hd/YyZfVvSS5rYZdg+PfQA6mnWa/yWZ8Aa\nH+iJ1Bqfg3tAQAQfCIjgAwERfCAggg8ERPCBgAg+EBDBBwIi+EBABB8IiOADARF8ICCCDwRE8IGA\nCD4QEMEHAiL4QEAEHwiI4AMBEXwgIIIPBETwgYAIPhAQwQcCIvhAQAQfCIjgAwERfCAggg8ERPCB\ngAg+ENC8dl5sZiOSPpQ0LukTd7+6E02hdSdPnkzWd+/enawPDw8n688//3yy3tdX7brj4MGDyfrK\nlSsrnf9c1VbwNRH4zN1PdKIZAN3R7se1deA9AHRZu6F1SS+a2T4zu7sTDQGoXrub+mvc/T0z+1NJ\n/2ZmB9391ekTNRqNyeEsy5RlWZuzBTBdnufK87ylac3dOzJTMxuSdMrdH5s23js1D3wWB/c4uNeM\nmcndbabarP9qZrbAzPqL4c9J+pqkN2f7fgC6p51N/aWSfmFmXrzPTnd/qTNtAahSxzb1m84g+Kb+\n6dOnk/WyTdk77rgjWT916lSyfuTIkWS9TNnfzmzGLcmOWbRoUbL+zjvvJOsLFy7sZDsXlEo29QFc\nuAg+EBDBBwIi+EBABB8IiOADARF8IKB2r9Wf8z7++ONkfdOmTcn64cOHk/Vdu3add0+RnDiR/sb3\nmTNnutTJ3MIaHwiI4AMBEXwgIIIPBETwgYAIPhAQwQcC4jx+idtvvz1Zr/t5+IceeihZX716dVvv\nf++99ybro6Ojbb0/qsEaHwiI4AMBEXwgIIIPBETwgYAIPhAQwQcC4jx+m8oe4VR2HUCZtWvXJuvX\nXHNNW+9f5tixY5W+f5k1a9Yk6/Pnz+9SJ3MLa3wgIIIPBETwgYAIPhAQwQcCIvhAQAQfCKj0PL6Z\nbZe0TtKou68uxl0i6aeSLpc0IukWd/+wwj57ZufOncn6p59+mqz39/d3sp2ue/LJJ5P1qr9v/8AD\nDyTrCxYsqHT+c1Ura/ynJX192rgHJf3a3VdJelnS9zrdGIDqlAbf3V+VNP1xJoOSdhTDOyTd1OG+\nAFRotvv4S9x9VJLc/bikJZ1rCUDVOnWtvqeKjUZjcjjLMmVZ1qHZAjgrz3Pled7StLMN/qiZLXX3\nUTMbkPR+auKpwQdQjekr1c2bNzedttVNfSt+znpB0l3F8J2S6n2rWQDnKA2+mQ1L+ndJK83sD2b2\nTUnfl/S3ZvaWpOuK3wFcIMw9uXve/gzMvOp5oDoXXXRRsm5myXqZgYGBZP3o0aNtvX9kZiZ3n/EP\nxJV7QEAEHwiI4AMBEXwgIIIPBETwgYAIPhAQ99VHTz3++OO9biEk1vhAQAQfCIjgAwERfCAggg8E\nRPCBgAg+EBDn8YN7+OGHk/Xx8fFkva8vve5YvHhxsn7FFVck66gGa3wgIIIPBETwgYAIPhAQwQcC\nIvhAQAQfCIjz+HPc6dOnk/WRkZFkvew8fdl99QcHB5P1FStWJOuoBmt8ICCCDwRE8IGACD4QEMEH\nAiL4QEAEHwjIyp5db2bbJa2TNOruq4txQ5LulvR+Mdkmd//XJq/3snmgOu+++26yvnLlymS97G93\n8cUXJ+t79+5N1jmPXx0zk7vPeKFFK2v8pyV9fYbxj7n7l4qfGUMPoJ5Kg+/ur0o6MUMpfckWgNpq\nZx9/g5kdMLMfmtnCjnUEoHKzvVb/B5L+2d3dzB6R9JikbzWbuNFoTA5nWaYsy2Y5WwDN5HmuPM9b\nmrb04J4kmdnlkn519uBeq7WizsG9HuLgXlztHtyTJvbnJ9/AzAam1L4h6c3Ztweg20o39c1sWFIm\n6fNm9gdJQ5K+amZXShqXNCJpfYU9Auiw0uC7+20zjH66gl5QgaGhoUrf/8Ybb0zW2ZSvJ67cAwIi\n+EBABB8IiOADARF8ICCCDwRE8IGAuK/+Be7kyZPJ+qFDh5L1sktyx8fHk/Vrr702WZ/rypb/7t27\nk/Xh4eFk/bnnnjvvnlrBGh8IiOADARF8ICCCDwRE8IGACD4QEMEHAuI8fs2VnSdet25dsr5///5k\nvez59mW35rr55puT9ap99NFHyfprr72WrG/cuDFZL7vOYWxsLFkv+/s9++yzyXpVWOMDARF8ICCC\nDwRE8IGACD4QEMEHAiL4QECcx6+5su9z79mzp9L533///cl6f39/sn7mzJlkvew89+uvv56sP/ro\no8n6K6+8kqyXKTuPv2rVqmT9iSeeSNbXrl173j11Amt8ICCCDwRE8IGACD4QEMEHAiL4QEAEHwio\n9Dy+mS2T9IykpZLGJW1z98fN7BJJP5V0uaQRSbe4+4cV9ooeGB0dTdYfeeSRZP3EiRPJ+tatW5P1\nsvPoZfcTaFfZ9+VvuOGGZH3+/PmdbKdjWlnjfyrpO+7+V5L+RtIGM/uipAcl/drdV0l6WdL3qmsT\nQCeVBt/dj7v7gWJ4TNJBScskDUraUUy2Q9JNVTUJoLPOax/fzL4g6UpJeyQtdfdRaeLDQdKSTjcH\noBotX6tvZv2Sfi5po7uPmdn0na+mO2ONRmNyOMsyZVl2fl0CKJXnufI8b2naloJvZvM0Efofu/uu\nYvSomS1191EzG5D0frPXTw0+gGpMX6lu3ry56bStbur/SNLv3H3qIdgXJN1VDN8padf0FwGop1ZO\n562RdLukN8xsvyY26TdJ2iLpZ2b2D5IOS7qlykYBdI6VnSdtewZmXvU8LmRly2bevN7eMmF8fDxZ\n7+ur9hqwsvmX3fe/7H4C99xzz3n3dKEwM7n7jBc6cOUeEBDBBwIi+EBABB8IiOADARF8ICCCDwTE\nffV7bNu2bcl61d83L1N2nr7d/q666qpkvew8/ZYtW5L1Sy+99Lx7ioA1PhAQwQcCIvhAQAQfCIjg\nAwERfCAggg8ExHl8JC1evDhZHxwcTNY3btyYrC9fvjxZX7hwYbKO2WGNDwRE8IGACD4QEMEHAiL4\nQEAEHwiI4AMBcV/9HitbNhs2bEjWn3rqqbbmv379+mT9vvvuS9ZXrFjR1vxRHe6rD+AcBB8IiOAD\nARF8ICCCDwRE8IGASoNvZsvM7GUz+28ze8PM7i3GD5nZUTP7z+Ln+urbBdAJpefxzWxA0oC7HzCz\nfkn/IWlQ0t9JOuXuj5W8nvP4QA+kzuOX3ojD3Y9LOl4Mj5nZQUmXnX3vjnUJoGvOax/fzL4g6UpJ\nrxWjNpjZATP7oZlxqxTgAtFy8IvN/J9L2ujuY5J+IOkv3P1KTWwRJDf5AdRHS/fcM7N5mgj9j919\nlyS5+wdTJtkm6VfNXt9oNCaHsyxTlmWzaBVASp7nyvO8pWlb+pKOmT0j6X/c/TtTxg0U+/8ys3+S\n9GV3v22G13JwD+iB1MG9Vo7qr5H0G0lvSPLiZ5Ok2zSxvz8uaUTSencfneH1BB/ogbaC34GZE3yg\nB/haLoBzEHwgIIIPBETwgYAIPhAQwQcCIvhAQAQfCIjgAwERfCAggg8ERPCBgLoe/Fa/L9wr9Nee\nOvdX596k7vZH8Kehv/bUub869ybN8eAD6D2CDwTUlRtxVDoDAE317A48AOqHTX0gIIIPBNS14JvZ\n9WZ2yMzeNrPvdmu+rTKzETP7rZntN7O9Nehnu5mNmtl/TRl3iZm9ZGZvmdmLvXx6UZP+avMg1Rke\n9vqPxfhaLMNeP4y2K/v4ZtYn6W1J10k6JmmfpFvd/VDlM2+Rmb0r6a/d/USve5EkM/uKpDFJz7j7\n6mLcFkn/6+7/Unx4XuLuD9aovyG18CDVbkg87PWbqsEybPdhtO3q1hr/akm/d/fD7v6JpJ9o4h9Z\nJ6Ya7fq4+6uSpn8IDUraUQzvkHRTV5uaokl/Uk0epOrux939QDE8JumgpGWqyTJs0l/XHkbbrf/R\nL5N0ZMrvR/X//8i6cEkvmtk+M7u71800seTsQ0uKpxgt6XE/M6ndg1SnPOx1j6SldVuGvXgYbW3W\ncDWwxt2vknSDJhb8V3rdUAvqdi62dg9SneFhr9OXWU+XYa8eRtut4P9R0vIpvy8rxtWGu79X/PcD\nSb/QxO5J3Yya2VJpch/x/R73cw53/2DKY5O2SfpyL/uZ6WGvqtEybPYw2m4sw24Ff5+kvzSzy83s\nTyTdKumFLs27lJktKD55ZWafk/Q1SW/2titJE/t6U/f3XpB0VzF8p6Rd01/QZef0VwTprG+o98vw\nR5J+5+5bp4yr0zL8TH/dWoZdu3KvOC2xVRMfNtvd/ftdmXELzOzPNbGWd008Onxnr/szs2FJmaTP\nSxqVNCTpl5KelfRnkg5LusXdT9aov6+qhQepdqm/Zg973SvpZ+rxMmz3YbRtz59LdoF4OLgHBETw\ngYAIPhAQwQcCIvhAQAQfCIjgAwERfCCg/wO4SOgpnCFWVAAAAABJRU5ErkJggg==\n",
+ "text/plain": [
+ ""
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ },
+ {
+ "data": {
+ "image/png": "iVBORw0KGgoAAAANSUhEUgAAAP4AAAD8CAYAAABXXhlaAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAADmtJREFUeJzt3W+MVfWdx/HPF4kaSkQlMoMypZtdWs0GQuwW3ciDi+y2\nuJKgRC3VRKxi+gBrs31SywNnxmxi2QcmrqbxT2mFBtN2iS2S4Mo2eGN00x3cLesfEEkM/50RRQkT\nRWX97oO5sHeGub97Z8499x7m+34lE8+c77n39+XGz5xz7u/O/MzdBSCWSe1uAEDrEXwgIIIPBETw\ngYAIPhAQwQcCyhR8M1tiZm+b2Ttm9pNmNQUgXzbeeXwzmyTpHUmLJR2RtEPSCnd/e8RxfFAAaBN3\nt9H2ZznjL5C01933u/sXkn4jaVmNwc98dXd3D/u+aF/0N3H7K3JvefSXkiX4V0g6WPX9oco+AAXH\nm3tAQJMzPPawpK9WfT+rsu8sPT09Z7YvvvjiDEPmr1QqtbuFJPobvyL3JmXvr1wuq1wuN3Rsljf3\nzpO0R0Nv7r0nqU/S99x994jjfLxjABg/M5PXeHNv3Gd8d/9fM7tP0jYN3TKsGxl6AMU07jN+wwNw\nxgfaInXG5809ICCCDwRE8IGACD4QEMEHAiL4QEAEHwiI4AMBEXwgIIIPBETwgYAIPhAQwQcCIvhA\nQAQfCIjgAwERfCAggg8ERPCBgAg+EBDBBwIi+EBABB8IiOADARF8ICCCDwRE8IGACD4QEMEHAiL4\nQEAEHwhocpYHm9k+ScclfSnpC3df0Iym0DzHjh1L1jdt2pTp+W+55ZZk/dJLL830/MhHpuBrKPAl\nd/+oGc0AaI2sl/rWhOcA0GJZQ+uSXjSzHWZ2bzMaApC/rJf617n7e2Z2maR/N7Pd7v7KyIN6enrO\nbJdKJZVKpYzDAhipXC6rXC43dKy5e1MGNbNuSSfc/ZER+71ZY2DseHMvLjOTu9totXFf6pvZFDOb\nWtn+iqRvS3pzvM8HoHWyXOp3SPq9mXnleTa6+7bmtAUgT0271K85AJf6Sf39/cn6tm3pn6XV75+M\n5tChQ8n6qVOnkvV6Jk9OnzvMRr3SPKOzszNZ7+vrS9Y7OjqS9chyudQHcO4i+EBABB8IiOADARF8\nICCCDwRE8IGAmMfP6MiRI8n6tddem6x/8MEHyfrJkyfH3NNE8uCDDybr9T7HEBnz+ACGIfhAQAQf\nCIjgAwERfCAggg8ERPCBgJjHr2PLli3J+h133JGsDw4ONrOdcObMmZOs79mzp0WdnHuYxwcwDMEH\nAiL4QEAEHwiI4AMBEXwgIIIPBJR17bwJb/Xq1cl61nn6q666KllfunRpsn7//fcn69OmTRtzT9VW\nrFiRrG/dujXT89fDOov54IwPBETwgYAIPhAQwQcCIvhAQAQfCIjgAwHVncc3s3WSlkoacPd5lX2X\nSPqtpNmS9km6zd2P59hnbubOnZusHz58ONPzr1q1Kll/+OGHk/Xp06dnGr+ed999N1l/6aWXch3/\nnnvuSdYfe+yxXMePqpEz/q8kfWfEvgck/dHdvyFpu6SfNrsxAPmpG3x3f0XSRyN2L5O0vrK9XtJN\nTe4LQI7Ge48/w90HJMnd+yXNaF5LAPLWrM/qJ/+oXvX6ZqVSic9fAzkol8sql8sNHTve4A+YWYe7\nD5hZp6T3UwezsCGQv5En1d7e3prHNnqpb5Wv056XdFdle6WkzWNpEEB71Q2+mT0r6T8kfd3MDpjZ\n9yX9TNLfm9keSYsr3wM4R9S91Hf322uU/q7JveSi3vrqu3btStbvvvvuZL3ebczMmTOT9UmT8v0M\nVb2/F1Dv9/k//fTTTOPfeOONyfqaNWuS9fPPPz/T+Bgdn9wDAiL4QEAEHwiI4AMBEXwgIIIPBETw\ngYAs77XrzczzHqPO+JnqJ0+eTNaLPs/8+uuvJ+vz58/PdfxnnnkmWb/zzjtzHT8yM5O7j/o/OGd8\nICCCDwRE8IGACD4QEMEHAiL4QEAEHwhows/jb9y4MVlfuHBhst7V1ZWs5/379FkdOHAgWb/66quT\n9WPHjmUaf8qUKcn6ggULkvWVK1cm69dcc02yfuWVVybrExnz+ACGIfhAQAQfCIjgAwERfCAggg8E\nRPCBgCb8PD7SXnvttWR9yZIlyXrWef6sLrjggmR90aJFyfry5cuT9VtvvTVZnzZtWrLeTszjAxiG\n4AMBEXwgIIIPBETwgYAIPhAQwQcCqjuPb2brJC2VNODu8yr7uiXdK+n9ymFr3P3fajyeefxz2EMP\nPZSs79ixI1nv6+tL1o8ePTrmnlpp3rx5yfrLL7+crF900UXNbGdMss7j/0rSd0bZ/4i7X135GjX0\nAIqpbvDd/RVJH41SSi9BA6CwstzjrzaznWb2CzMr7ucWAZxl8jgf93NJD7m7m9k/SXpE0j21Du7p\n6TmzXSqVVCqVxjksgFrK5bLK5XJDx44r+O5e/Y7M05K2pI6vDj6AfIw8qfb29tY8ttFLfVPVPb2Z\ndVbVlkt6c0wdAmirumd8M3tWUknSdDM7IKlb0iIzmy/pS0n7JP0gxx4BNBm/j49cHTx4MFn/+OOP\nk/Xjx48n693d3cn67t27k/X+/v5kvZ6nnnoqWV+1alWm58+C38cHMAzBBwIi+EBABB8IiOADARF8\nICCCDwQ03s/qAw3p6urKVK9n27Ztyfp9992XrD/55JOZxj98+HCmx7cLZ3wgIIIPBETwgYAIPhAQ\nwQcCIvhAQAQfCIh5fBTaiRMnkvUNGzYk61nn6esp+roAtXDGBwIi+EBABB8IiOADARF8ICCCDwRE\n8IGAmMdvs88//zxZ/+STT5L1J554Ilm//PLLk/Wbb745Wb/wwguT9c8++yxZP3XqVLL+3HPPJetr\n165N1vfu3ZusZ3XZZZcl69dff32u4+eFMz4QEMEHAiL4QEAEHwiI4AMBEXwgIIIPBGT11q43s1mS\nNkjqkPSlpKfd/V/M7BJJv5U0W9I+Sbe5+1mLmZuZ1xsjss2bNyfr9ebZ65k9e3ayXu9zBIsWLUrW\nX3311WR9//79yXreJk1Kn9vmzJmTrG/fvj1Znzlz5ph7ahUzk7vbaLVGzvinJP3Y3f9a0t9KWm1m\nV0p6QNIf3f0bkrZL+mmzGgaQr7rBd/d+d99Z2R6UtFvSLEnLJK2vHLZe0k15NQmgucZ0j29mX5M0\nX9KfJHW4+4A09MNB0oxmNwcgHw1/Vt/MpkraJOlH7j5oZiNv3GveyPf09JzZLpVKKpVKY+sSQF3l\nclnlcrmhYxsKvplN1lDof+3up9+NGjCzDncfMLNOSe/Xenx18AHkY+RJtbe3t+axjV7q/1LSLnd/\ntGrf85LuqmyvlJR+expAYdQ945vZdZLukPSGmf1ZQ5f0ayStlfQ7M7tb0n5Jt+XZKIDmqTuPn3mA\n4PP4b731VrK+cOHCZP348bM+GhFK1nn4xx9/PFlfvHjxmHs6V2SdxwcwwRB8ICCCDwRE8IGACD4Q\nEMEHAiL4QEDM4+ds7ty5yfquXbuS9aK/duedd16yPnXq1GS9s7MzWY88D58V8/gAhiH4QEAEHwiI\n4AMBEXwgIIIPBETwgYCYx2+zrVu3Jusffvhhsv7CCy8k6zfccMOYexqLrq6uZJ2/r9g+zOMDGIbg\nAwERfCAggg8ERPCBgAg+EBDBBwJiHh+YoJjHBzAMwQcCIvhAQAQfCIjgAwERfCCgusE3s1lmtt3M\n3jKzN8zsh5X93WZ2yMz+u/K1JP92ATRD3Xl8M+uU1OnuO81sqqT/krRM0nclnXD3R+o8nnl8oA1S\n8/iT6z3Y3fsl9Ve2B81st6QrTj9307oE0DJjusc3s69Jmi/pPyu7VpvZTjP7hZlNa3JvAHLScPAr\nl/mbJP3I3Qcl/VzSX7r7fA1dESQv+QEUR91LfUkys8kaCv2v3X2zJLn70apDnpa0pdbje3p6zmyX\nSiX+DhuQg3K5rHK53NCxDf2SjpltkPSBu/+4al9n5f5fZvaPkr7l7reP8lje3APaIPXmXiPv6l8n\n6WVJb0jyytcaSbdr6H7/S0n7JP3A3QdGeTzBB9ogU/CbMDjBB9qAX8sFMAzBBwIi+EBABB8IiOAD\nARF8ICCCDwRE8IGACD4QEMEHAiL4QEAEHwio5cFv9PeF24X+silyf0XuTWptfwR/BPrLpsj9Fbk3\naYIHH0D7EXwgoJb8IY5cBwBQU9v+Ag+A4uFSHwiI4AMBtSz4ZrbEzN42s3fM7CetGrdRZrbPzP7H\nzP5sZn0F6GedmQ2Y2etV+y4xs21mtsfMXmzn6kU1+ivMQqqjLPZ6f2V/IV7Ddi9G25J7fDObJOkd\nSYslHZG0Q9IKd38798EbZGbvSvqmu3/U7l4kycwWShqUtMHd51X2rZX0obv/c+WH5yXu/kCB+utW\nAwuptkJisdfvqwCvYdbFaLNq1Rl/gaS97r7f3b+Q9BsN/SOLxFSgWx93f0XSyB9CyyStr2yvl3RT\nS5uqUqM/qSALqbp7v7vvrGwPStotaZYK8hrW6K9li9G26n/0KyQdrPr+kP7/H1kULulFM9thZve2\nu5kaZpxetKSyitGMNvczmsItpFq12OufJHUU7TVsx2K0hTnDFcB17v43kv5BQy/8wnY31ICizcUW\nbiHVURZ7HfmatfU1bNditK0K/mFJX636flZlX2G4+3uV/x6V9HsN3Z4UzYCZdUhn7hHfb3M/w7j7\n0aplk56W9K129jPaYq8q0GtYazHaVryGrQr+Dkl/ZWazzex8SSskPd+isesysymVn7wys69I+rak\nN9vblaShe73q+73nJd1V2V4pafPIB7TYsP4qQTptudr/Gv5S0i53f7RqX5Few7P6a9Vr2LJP7lWm\nJR7V0A+bde7+s5YM3AAz+wsNneVdQ0uHb2x3f2b2rKSSpOmSBiR1S/qDpH+V1CVpv6Tb3P3jAvW3\nSA0spNqi/mot9ton6Xdq82uYdTHazOPzkV0gHt7cAwIi+EBABB8IiOADARF8ICCCDwRE8IGACD4Q\n0P8Biwwr3wnxaXIAAAAASUVORK5CYII=\n",
+ "text/plain": [
+ ""
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ },
+ {
+ "data": {
+ "image/png": "iVBORw0KGgoAAAANSUhEUgAAAP4AAAD8CAYAAABXXhlaAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAADU1JREFUeJzt3V2MHYV5xvHnsS0kEouADLYFSwhqTSoqWVbbgCpbMIE2\nQVUkQyQodYRwWiHERxs1N3EswR4DF0klQHCRG+IgOwQlbqRgchOcyIwQARfT4hZqA5HQYuPgBSob\nxzeI4rcX53i7XvbMObvna9bv/yetmJ13zpnXwz5nvs7MOCIEIJdFo24AwPARfCAhgg8kRPCBhAg+\nkBDBBxLqKfi2r7P9uu03bX+nX00BGCzP9zy+7UWS3pR0raTfS9or6eaIeH3GdHxRABiRiPBs43tZ\n418h6XcR8XZEfCzpp5LWt5n51M/4+Phpv9fth/7O3P7q3Nsg+qvSS/AvknRo2u/vtMYBqDkO7gEJ\nLenhtYclfX7a72OtcZ/SaDSmhs8999weZjl4RVGMuoVK9Dd/de5N6r2/sixVlmVX0/ZycG+xpDfU\nPLj3rqSXJP1dRByYMV3Mdx4A5s+2os3BvXmv8SPiE9t3S9ql5i7D1pmhB1BP817jdz0D1vjASFSt\n8Tm4ByRE8IGECD6QEMEHEiL4QEIEH0iI4AMJEXwgIYIPJETwgYQIPpAQwQcSIvhAQgQfSIjgAwkR\nfCAhgg8kRPCBhAg+kBDBBxIi+EBCBB9IiOADCRF8ICGCDyRE8IGECD6QEMEHEiL4QEIEH0iI4AMJ\nLenlxbYnJH0o6aSkjyPiin40BWCwegq+moEvIuJoP5oBMBy9buq7D+8BYMh6DW1Iesb2Xtu39aMh\nAIPX66b+2oh41/YFkn5t+0BEPD9zokajMTVcFIWKouhxtgBmKstSZVl2Na0joi8ztT0u6Q8R8dCM\n8dGveQDonm1FhGerzXtT3/ZnbC9tDX9W0lckvTbf9wMwPL1s6q+Q9Avb0Xqfn0TErv60BWCQ+rap\n33YGbOoDIzGQTX0ACxfBBxIi+EBCBB9IiOADCRF8ICGCDyRE8IGECD6QEMEHEiL4QEIEH0iI4AMJ\nEXwgIYIPJNTrPfdGbsuWLZX1K6+8srJ+9dVXV9bPPvvsOfeE+jhw4EBl/dixY5X1VatWVdbPP//8\nOfdUB6zxgYQIPpAQwQcSIvhAQgQfSIjgAwkRfCChBX9f/cWLF3eaf2X9lltuqazfd999lfWLL764\nsp7dBx98UFk/fvx4Zf25556rrO/YsaOyvmfPnp7m3+k8fqfvCYwS99UHcBqCDyRE8IGECD6QEMEH\nEiL4QEIEH0io43l821slfU3SZESsbo07T9LPJF0iaULSTRHxYZvXD/Q8/gUXXFBZP3r0aE/vv3Tp\n0sr6hg0bKutdLN8599RPg+5v165dlfWJiYnK+qiX37Jlyyrrk5OTA51/L3o9j/+4pK/OGLdJ0m8i\n4ouSdkv6bm8tAhimjsGPiOclzVxtrpe0rTW8TdL1fe4LwADNdx9/eURMSlJEHJG0vH8tARi0ft1z\nr3JHrNFoTA0XRaGiKPo0WwCnlGWpsiy7mna+wZ+0vSIiJm2vlPRe1cTTgw9gMGauVKtuRNvtpr5b\nP6c8LWlja/hWSTvn0iCA0eoYfNtPSnpB0mW2D9r+pqTvSfpr229Iurb1O4AFYsFfj3/o0KHK+qOP\nPlpZf/jhh/vZzqeM+jx0J9n763Se/uWXX66s1/l+DFyPD+A0BB9IiOADCRF8ICGCDyRE8IGECD6Q\n0II/jz9ou3fvrqzv3bu3sr5p06bK+qJFo/3sPXnyZGW91/46Pbfg4MGDlfVnn322st6pv07n2V94\n4YXK+oUXXlhZrzPO4wM4DcEHEiL4QEIEH0iI4AMJEXwgIYIPJNSve+6dsa655pqe6nfeeWc/21lw\nliyp/hO74447KuudztN3uh5/48aNlfWFfJ6+F6zxgYQIPpAQwQcSIvhAQgQfSIjgAwkRfCAhrsdH\nTz766KPK+r333ltZf/DBByvrnf52brzxxsr69u3bK+tnnXVWZX0h43p8AKch+EBCBB9IiOADCRF8\nICGCDyRE8IGEOl6Pb3urpK9JmoyI1a1x45Juk/Rea7LNEfGrgXWJ2jp8+HBlvdN5+l6tWrWqsn4m\nn6fvRTdr/MclfXWW8Q9FxJ+1fgg9sIB0DH5EPC/p6Cyl6lufAKitXvbx77K9z/YPbX+ubx0BGLj5\n3nPvB5Lui4iw/YCkhyT9Q7uJG43G1HBRFCqKYp6zBdBOWZYqy7Krabu6SMf2JZJ+eergXre1Vp2L\ndM5gb731VmX9sssu6+n9O/3tbN68ubJ+//339zT/hawfF+lY0/bpba+cVvu6pNfm3x6AYevmdN6T\nkgpJy2wflDQu6cu210g6KWlC0u0D7BFAn3UMfkRsmGX04wPoBQvQU089NdD3v+GGGyrr99xzz0Dn\nf6bim3tAQgQfSIjgAwkRfCAhgg8kRPCBhAg+kBD31UelF198sbK+bt26gc7/k08+Gej7n8m4rz6A\n0xB8ICGCDyRE8IGECD6QEMEHEiL4QELzveceIKl5rhgLD2t8ICGCDyRE8IGECD6QEMEHEiL4QEIE\nH0iI8/gYqQceeGDULaTEGh9IiOADCRF8ICGCDyRE8IGECD6QEMEHEup4Ht/2mKTtklZIOinpsYh4\n1PZ5kn4m6RJJE5JuiogPB9grRuDYsWMDff9LL710oO+P2XWzxv9fSd+OiD+V9JeS7rL9J5I2SfpN\nRHxR0m5J3x1cmwD6qWPwI+JIROxrDZ+QdEDSmKT1kra1Jtsm6fpBNQmgv+a0j2/7C5LWSNojaUVE\nTErNDwdJy/vdHIDB6Pq7+raXSvq5pG9FxAnbMx+I1/YBeY1GY2q4KAoVRTG3LgF0VJalyrLsatqu\ngm97iZqh/3FE7GyNnrS9IiImba+U9F67108PPoDBmLlS3bJlS9tpu93U/5Gk/RHxyLRxT0va2Bq+\nVdLOmS8CUE/dnM5bK+kbkl61/Yqam/SbJX1f0g7bfy/pbUk3DbJRAP3TMfgR8VtJi9uU/6q/7aBu\n7r777lG3gAHgm3tAQgQfSIjgAwkRfCAhgg8kRPCBhAg+kBD31UeliLaXYHRVRz2xxgcSIvhAQgQf\nSIjgAwkRfCAhgg8kRPCBhDiPj0pXXXVVZf2JJ54YUifoJ9b4QEIEH0iI4AMJEXwgIYIPJETwgYQI\nPpAQ5/FR6fbbb6+sdzqPv3r16sr62NjYnHtC71jjAwkRfCAhgg8kRPCBhAg+kBDBBxLqGHzbY7Z3\n2/5v26/a/sfW+HHb79j+j9bPdYNvF0A/uNN90W2vlLQyIvbZXirp3yWtl/S3kv4QEQ91eH1w7/WF\n6/jx45X1/fv3V9Yvv/zyyvo555wz557QHduKCM9W6/gFnog4IulIa/iE7QOSLjr13n3rEsDQzGkf\n3/YXJK2R9G+tUXfZ3mf7h7Y/1+feAAxI18Fvbeb/XNK3IuKEpB9I+qOIWKPmFkHlJj+A+ujqu/q2\nl6gZ+h9HxE5Jioj3p03ymKRftnt9o9GYGi6KQkVRzKNVAFXKslRZll1N2/HgniTZ3i7pg4j49rRx\nK1v7/7L9z5K+FBEbZnktB/cWMA7uLVw9HdyzvVbSNyS9avsVSSFps6QNttdIOilpQlL1ZVwAaqOb\no/q/lbR4ltKv+t8OgGHoalO/pxmwqQ+MRNWmPl/ZBRIi+EBCBB9IiOADCRF8ICGCDyRE8IGECD6Q\nEMEHEiL4QEIEH0ho6MHv9nrhUaG/3tS5vzr3Jg23P4I/A/31ps791bk36QwPPoDRI/hAQkO5Hn+g\nMwDQVrvr8QcefAD1w6Y+kBDBBxIaWvBtX2f7ddtv2v7OsObbLdsTtv/T9iu2X6pBP1ttT9r+r2nj\nzrO9y/Ybtp8Z5dOL2vRXmwepzvKw139qja/FMhz1w2iHso9ve5GkNyVdK+n3kvZKujkiXh/4zLtk\n+y1Jfx4RR0fdiyTZXifphKTtEbG6Ne77kv4nIv6l9eF5XkRsqlF/4+riQarDUPGw12+qBsuw14fR\n9mpYa/wrJP0uIt6OiI8l/VTNf2SdWDXa9YmI5yXN/BBaL2lba3ibpOuH2tQ0bfqTavIg1Yg4EhH7\nWsMnJB2QNKaaLMM2/Q3tYbTD+kO/SNKhab+/o///R9ZFSHrG9l7bt426mTaWR8SkNPUU4+Uj7mc2\ntXuQ6rSHve6RtKJuy3AUD6OtzRquBtZGxF9I+hs1F/y6UTfUhbqdi63dg1RnedjrzGU20mU4qofR\nDiv4hyV9ftrvY61xtRER77b++76kX6i5e1I3k7ZXSFP7iO+NuJ/TRMT7056e8pikL42yn9ke9qoa\nLcN2D6MdxjIcVvD3Svpj25fYPkvSzZKeHtK8O7L9mdYnr2x/VtJXJL022q4kNff1pu/vPS1pY2v4\nVkk7Z75gyE7rrxWkU76u0S/DH0naHxGPTBtXp2X4qf6GtQyH9s291mmJR9T8sNkaEd8byoy7YPtS\nNdfyoebzBH8y6v5sPympkLRM0qSkcUlPSfpXSRdLelvSTRFxrEb9fVnNfdWpB6me2p8eQX9rJT0n\n6VU1/7+eetjrS5J2aMTLsKK/DRrCMuQru0BCHNwDEiL4QEIEH0iI4AMJEXwgIYIPJETwgYQIPpDQ\n/wEM6qsOGcxlNgAAAABJRU5ErkJggg==\n",
+ "text/plain": [
+ ""
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ },
+ {
+ "data": {
+ "image/png": "iVBORw0KGgoAAAANSUhEUgAAAP4AAAD8CAYAAABXXhlaAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAADoJJREFUeJzt3X+sVPWZx/HPg7jB1ohI5BLBwupiTdYQ3S4mxiYOotVs\najD9Q13UaBVTo7hNa0yBkNzLahTWH1ES+4+lBBDTHyQs9g9XIDrZkA2Ku2V7bUFIlF8FLuwqWqJE\n9D77xx3YucOd75l7z/w48LxfyQ0z5zkz52Hgc885850zX3N3AYhlVKcbANB+BB8IiOADARF8ICCC\nDwRE8IGAcgXfzG41sx1mttPMftaspgC0lo10HN/MRknaKWmWpAOStkq6y9131KzHBwWADnF3G2p5\nnj3+tZJ2ufsedz8h6VeSZtfZ+Kmf7u7uQfeL9kN/Z29/Re6tFf2l5An+JEn7qu7vrywDUHC8uQcE\nNDrHY/8s6VtV9ydXlp2mp6fn1O0LL7wwxyZbr1QqdbqFJPobuSL3JuXvr1wuq1wuN7Runjf3zpH0\ngQbe3Dso6V1J/+ju22vW85FuA8DImZm8zpt7I97ju/vXZjZP0gYNnDIsrw09gGIa8R6/4Q2wxwc6\nIrXH5809ICCCDwRE8IGACD4QEMEHAiL4QEAEHwiI4AMBEXwgIIIPBETwgYAIPhAQwQcCIvhAQAQf\nCIjgAwERfCAggg8ERPCBgAg+EBDBBwIi+EBABB8IiOADARF8ICCCDwRE8IGACD4QEMEHAiL4QEAE\nHwhodJ4Hm9luSZ9K6pd0wt2vbUZTAForV/A1EPiSu3/SjGYAtEfeQ31rwnMAaLO8oXVJb5rZVjN7\nqBkNAWi9vIf617v7QTO7WNJGM9vu7ptrV+rp6Tl1u1QqqVQq5dwsgFrlclnlcrmhdc3dm7JRM+uW\n9Bd3f6FmuTdrGwAaZ2ZydxuqNuJDfTP7hpmdX7n9TUnfk/T+SJ8PQPvkOdTvkrTOzLzyPGvcfUNz\n2gLQSk071K+7AQ71z2jvvfdesn706NE2dTK0mTNnJuvnnHNOmzopnpYc6gM4cxF8ICCCDwRE8IGA\nCD4QEMEHAiL4QEB5P6uPDjty5EiyvmrVqmR9zZo1yfqOHTuS9ePHjyfrrTZ9+vRk/dVXX03Wr7rq\nqma2c8Zgjw8ERPCBgAg+EBDBBwIi+EBABB8IiOADAXE9fov19fUl6wcPHkzWq7+vcCi9vb3J+kcf\nfZSsn+26urqS9Tlz5iTrjz/+eLJ+ySWXDLunduF6fACDEHwgIIIPBETwgYAIPhAQwQcCIvhAQIzj\n55Q1jn7zzTcn64cPH25mO003derUZD3revgFCxYk619//XWyPnfu3GR9586dyXp/f3+ynmXs2LHJ\n+oMPPpisP/fcc7m2nwfj+AAGIfhAQAQfCIjgAwERfCAggg8ERPCBgDLH8c1suaTvS+pz9+mVZeMk\n/VrSFEm7Jd3h7p/WefwZPY6/ZMmSZD1rnPbjjz9uZjvDdvfddyfrd955Z7I+a9asZP28884bdk/N\ntHHjxmR93rx5yfqHH36YrGd9zmDChAnJ+v79+5P10aNbN7VF3nH8FZJuqVk2X9Imd/+2pLckpT+l\nAaBQMoPv7pslfVKzeLaklZXbKyXd3uS+ALTQSM/xJ7h7nyS5+yFJ6eMdAIXSrBOM5El89ffGlUol\nlUqlJm0WwEnlclnlcrmhdUca/D4z63L3PjObKCl5pUnWF0YCyK92p7p48eK66zZ6qG+Vn5Nel3R/\n5fZ9ktYPp0EAnZUZfDN7TdJ/SLrCzPaa2Q8lLZF0s5l9IGlW5T6AM0T46/F37dqVrF9zzTXJ+uef\nf97Mdk4zZsyYZH3ZsmXJ+gMPPJCsjxoV+zNcL7/8crL+2GOP5Xr+1atXJ+tZn7PIg+vxAQxC8IGA\nCD4QEMEHAiL4QEAEHwiI4AMBte5i4DPElVdemay3+jMI9957b7K+aNGiZH3atGnNbCecRx55JFnf\nsmVLsr5mzZpmttM27PGBgAg+EBDBBwIi+EBABB8IiOADARF8IKDw4/hZ86ebDXk5c8OeeOKJZP2p\np55K1s8999xc20da1r9v3n//omKPDwRE8IGACD4QEMEHAiL4QEAEHwiI4AMBhR/Hb7Ubb7wxWWec\nvrOOHz+erB86dCjX8y9cuDBZb+X36qewxwcCIvhAQAQfCIjgAwERfCAggg8ERPCBgDLH8c1suaTv\nS+pz9+mVZd2SHpJ0uLLaQnf/t5Z12ULz589P1pcuXZrr+Xt6epL1G264IVkfM2ZMru1HlzVOP2/e\nvGR906ZNubZ/zz335Hp8qzSyx18h6ZYhlr/g7n9X+TkjQw9ElRl8d98s6ZMhSmfnV5MAAeQ5x3/U\nzLaZ2S/MbGzTOgLQciP9rP7PJf2zu7uZPSXpBUkP1lu5+jy3VCqpVCqNcLMA6imXyyqXyw2tO6Lg\nu/uRqruvSPpdav2sN7gA5Fe7U128eHHddRs91DdVndOb2cSq2g8kvT+sDgF0VCPDea9JKkkab2Z7\nJXVLmmlmV0vql7Rb0o9a2COAJrNWz/9uZt7qbeRx+PDhZP2KK65I1j/77LNc29+wYUOyftNNN+V6\n/ug2btyYrN9yy1Aj1Y0bNSp90Lxu3bpk/bbbbsu1/RQzk7sPOfrGJ/eAgAg+EBDBBwIi+EBABB8I\niOADARF8IKDw4/hZJk+enKwfOHAg1/OPGzcuWd+yZUuyPm3atFzbL7r+/v5k/fnnn0/Wn3nmmWT9\n6NGjw+6pWtb/j7179+Z6/jwYxwcwCMEHAiL4QEAEHwiI4AMBEXwgIIIPBMQ4foZ9+/Yl6zNmzEjW\ns673z5L1vfovvvhisj537txkPet68lbr7e1N1p988slkfe3atc1s5zRdXV3J+tatW5P1rHH+VmIc\nH8AgBB8IiOADARF8ICCCDwRE8IGACD4QEOP4Ob3zzjvJ+nXXXdemToZ2wQUXJOvLli1L1rO+L2DP\nnj3J+qJFi5L1L774Ilk/ceJEsp7XpZdemqxv3rw51+M7iXF8AIMQfCAggg8ERPCBgAg+EBDBBwIi\n+EBAmeP4ZjZZ0ipJXZL6Jb3i7svMbJykX0uaImm3pDvc/dMhHn9Wj+NnjTNnjWM/++yzzWwHNaZM\nmZKsv/3228n61KlTm9hNe+Udx/9K0k/d/W8lXSfpUTO7UtJ8SZvc/duS3pK0oFkNA2itzOC7+yF3\n31a5fUzSdkmTJc2WtLKy2kpJt7eqSQDNNaxzfDObKulqSVskdbl7nzTwy0HShGY3B6A1Rje6opmd\nL2mtpB+7+zEzqz1xr3si39PTc+p2qVRSqVQaXpcAMpXLZZXL5YbWbSj4ZjZaA6Ff7e7rK4v7zKzL\n3fvMbKKkut8qWR18AK1Ru1NdvHhx3XUbPdT/paQ/uftLVctel3R/5fZ9ktbXPghAMWXu8c3sekl3\nS+o1s99r4JB+oaSlkn5jZg9I2iPpjlY2CqB5uB6/xb788stk/emnn07WV6xYkaxnfe//me6yyy5L\n1t94441k/aKLLkrWx48fP+yezhRcjw9gEIIPBETwgYAIPhAQwQcCIvhAQAQfCIhx/II7ePBgst7X\n15esP/zww8n6V199Neyeql1++eXJ+oIF+a7WnjRpUrJ+8cUX53r+sxnj+AAGIfhAQAQfCIjgAwER\nfCAggg8ERPCBgBjHB85SjOMDGITgAwERfCAggg8ERPCBgAg+EBDBBwIi+EBABB8IiOADARF8ICCC\nDwRE8IGACD4QUGbwzWyymb1lZn80s14ze6yyvNvM9pvZf1V+bm19uwCaIfN6fDObKGmiu28zs/Ml\n/aek2ZLulPQXd38h4/Fcjw90QOp6/NFZD3b3Q5IOVW4fM7Ptkk7OcjDkkwIotmGd45vZVElXS3qn\nsuhRM9tmZr8ws7FN7g1AizQc/Mph/lpJP3b3Y5J+Lulyd79aA0cEyUN+AMWReagvSWY2WgOhX+3u\n6yXJ3Y9UrfKKpN/Ve3xPT8+p26VSSaVSaQStAkgpl8sql8sNrdvQl22a2SpJ/+PuP61aNrFy/i8z\n+4mkGe4+Z4jH8uYe0AGpN/caeVf/ekn/LqlXkld+Fkqao4Hz/X5JuyX9yN1Pm7qV4AOdkSv4Tdg4\nwQc6gK/XBjAIwQcCIvhAQAQfCIjgAwERfCAggg8ERPCBgAg+EBDBBwIi+EBABB8IqO3Bb/R64U6h\nv3yK3F+Re5Pa2x/Br0F/+RS5vyL3Jp3lwQfQeQQfCKgtX8TR0g0AqKtj38ADoHg41AcCIvhAQG0L\nvpndamY7zGynmf2sXdttlJntNrP/NrPfm9m7BehnuZn1mdkfqpaNM7MNZvaBmb3ZydmL6vRXmIlU\nh5js9Z8qywvxGnZ6Mtq2nOOb2ShJOyXNknRA0lZJd7n7jpZvvEFm9qGk77j7J53uRZLM7LuSjkla\n5e7TK8uWSvpfd/+Xyi/Pce4+v0D9dauBiVTbITHZ6w9VgNcw72S0ebVrj3+tpF3uvsfdT0j6lQb+\nkkViKtCpj7tvllT7S2i2pJWV2ysl3d7WpqrU6U8qyESq7n7I3bdVbh+TtF3SZBXkNazTX9smo23X\nf/RJkvZV3d+v//9LFoVLetPMtprZQ51upo4JJyctqcxiNKHD/QylcBOpVk32ukVSV9Few05MRluY\nPVwBXO/ufy/pHzTwwn+30w01oGhjsYWbSHWIyV5rX7OOvoadmoy2XcH/s6RvVd2fXFlWGO5+sPLn\nEUnrNHB6UjR9ZtYlnTpHPNzhfgZx9yNV0ya9ImlGJ/sZarJXFeg1rDcZbTtew3YFf6ukvzGzKWb2\nV5LukvR6m7adycy+UfnNKzP7pqTvSXq/s11JGjjXqz7fe13S/ZXb90laX/uANhvUXyVIJ/1AnX8N\nfynpT+7+UtWyIr2Gp/XXrtewbZ/cqwxLvKSBXzbL3X1JWzbcADP7aw3s5V0DU4ev6XR/ZvaapJKk\n8ZL6JHVL+ldJv5V0qaQ9ku5w96MF6m+mGphItU391Zvs9V1Jv1GHX8O8k9Hm3j4f2QXi4c09ICCC\nDwRE8IGACD4QEMEHAiL4QEAEHwiI4AMB/R9U4yve45rLUQAAAABJRU5ErkJggg==\n",
+ "text/plain": [
+ ""
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ },
+ {
+ "data": {
+ "image/png": "iVBORw0KGgoAAAANSUhEUgAAAP4AAAD8CAYAAABXXhlaAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAADIxJREFUeJzt3V+MXOV5x/HvQ1AvElAMUb22cEOqVqSmwkDbIFXkYoLb\nBJVIRrlwaYpkUoS4oG1U38ThxkvVi6QXSFQoN4QEu7LlpJZSoKKFRnhU0eLaamOwsYEIZBIS2NCK\noviOsk8vduyuN7tnZnfO/LGf70ca+cx5z8x59si/Oee8Z868kZlIquWSSRcgafwMvlSQwZcKMvhS\nQQZfKsjgSwUNFfyIuDUiXo6IVyPiK20VJWm0Yq3X8SPiEuBVYCvwU+AocEdmvrxkOb8oIE1IZsZy\n84fZ498E/DAz38jM94EDwLYVVn7usXv37vOeT9vD+i7e+qa5tlHU12SY4F8F/HjR8zd78yRNOTv3\npIIuHeK1PwE+vuj5pt68XzA7O3tuet26dUOscvQ6nc6kS2hkfWs3zbXB8PV1u1263e5Ayw7Tufch\n4BUWOvfeAo4Af5SZp5Ysl2tdh6S1iwhyhc69Ne/xM/ODiPhT4BkWThkeXRp6SdNpzXv8gVfgHl+a\niKY9vp17UkEGXyrI4EsFGXypIIMvFWTwpYIMvlSQwZcKMvhSQQZfKsjgSwUZfKkggy8VZPClggy+\nVJDBlwoy+FJBBl8qyOBLBRl8qSCDLxVk8KWCDL5U0DBDaOki8PDDDze279ixo7H98ssvb7McjYl7\nfKkggy8VZPClggy+VJDBlwoy+FJBBl8qKIYZuz4iTgPvAfPA+5l50zLL5DDr0Ghdc801je2HDx9u\nbL/yyivbLEctiggyM5ZrG/YLPPNAJzPfHfJ9JI3RsIf60cJ7SBqzYUObwNMRcTQi7mmjIEmjN+yh\n/s2Z+VZE/DLwzxFxKjOfW7rQ7OzsuelOp0On0xlytZKW6na7dLvdgZYdqnPvvDeK2A38PDMfXDLf\nzr0pZufexaupc2/Nh/oR8eGIuKw3/RHgs8CJtb6fpPEZ5lB/BvheRGTvffZl5jPtlCVplFo71F9x\nBR7qT7WIZY8Ezzl58mRj++bNm9ssRy0ayaG+pAuXwZcKMvhSQQZfKsjgSwUZfKkggy8V5O/qF7dx\n48bGdq/jX5zc40sFGXypIIMvFWTwpYIMvlSQwZcKMvhSQV7HL27r1q2N7a+99tqYKtE4uceXCjL4\nUkEGXyrI4EsFGXypIIMvFWTwpYK8jq9GJ044ONLFyD2+VJDBlwoy+FJBBl8qyOBLBRl8qSCDLxXU\n9zp+RDwKfB6Yy8wtvXlXAN8BrgZOA9sz870R1qkRyczG9n379jW27927t81yNCaD7PG/DXxuybxd\nwPcz85PAs8BX2y5M0uj0DX5mPge8u2T2NmBPb3oPcHvLdUkaobWe46/PzDmAzHwbWN9eSZJGra3v\n6jeeKM7Ozp6b7nQ6dDqdllYr6axut0u32x1o2ejXuQMQEVcDTy7q3DsFdDJzLiI2AIcyc9nREyMi\nB1mHJuPOO+9sbN+/f39j+/z8fJvlqEURQWbGcm2DHupH73HWE8BdvekdwONrrk7S2PUNfkTsB/4N\nuCYifhQRXwK+Bvx+RLwCbO09l3SBGOhQf6gVeKg/1R577LHG9rvvvrux/YMPPmixGrWpjUN9SRcR\ngy8VZPClggy+VJDBlwoy+FJBBl8qyN/VL25ubq6xvd93MF544YXG9uuvv37VNWn03ONLBRl8qSCD\nLxVk8KWCDL5UkMGXCjL4UkHej19cv/vpr7322sb2e++9t7F9586dq65J7fB+fEnnMfhSQQZfKsjg\nSwUZfKkggy8VZPClgryOr0bXXXfdUK8/fvx4S5VotbyOL+k8Bl8qyOBLBRl8qSCDLxVk8KWCDL5U\nUN/f1Y+IR4HPA3OZuaU3bzdwD/Cz3mL3Z+Y/jaxKTcxtt93W2P7UU0+NqRK1aZA9/reBzy0z/8HM\n/K3ew9BLF5C+wc/M54B3l2la9htBkqbfMOf490XEsYj4ZkR8tLWKJI3cWsfO+wbwl5mZEfFXwIPA\n3SstPDs7e2660+nQ6XTWuFpJK+l2u3S73YGWHegmnYi4GnjybOfeoG29dm/SuYDt2rWrsb1f596L\nL77YZjlahTZu0gkWndNHxIZFbV8ATqy9PEnjNsjlvP1AB/hYRPwI2A18JiJuAOaB00DzbyxLmire\nj69Ghw4damzfunVrY/tLL73U2L558+ZV16TBeD++pPMYfKkggy8VZPClggy+VJDBlwoy+FJBa/2u\nvoq48cYbh3r9gQMHGtsfeOCBod5fa+MeXyrI4EsFGXypIIMvFWTwpYIMvlSQwZcK8jq+Gq1bt66x\nfWZmprH99ddfb7MctcQ9vlSQwZcKMvhSQQZfKsjgSwUZfKkggy8V5HV8DWXTpk2N7UeOHBlTJVoN\n9/hSQQZfKsjgSwUZfKkggy8VZPClggy+VFD0G7s+IjYBe4EZYB54JDP/JiKuAL4DXA2cBrZn5nvL\nvD77rUMXroMHDza2b9++vbF9fn6+zXK0SESQmbFc2yB7/P8FdmbmbwK/C9wXEb8B7AK+n5mfBJ4F\nvtpWwZJGq2/wM/PtzDzWmz4DnAI2AduAPb3F9gC3j6pISe1a1Tl+RHwCuAE4DMxk5hwsfDgA69su\nTtJoDPxd/Yi4DDgIfDkzz0TE0hP3FU/kZ2dnz013Oh06nc7qqpTUV7fbpdvtDrRs3849gIi4FPgH\n4B8z86HevFNAJzPnImIDcCgzNy/zWjv3LmJ27k2vYTv3AL4FnDwb+p4ngLt60zuAx9dcoaSx6nuo\nHxE3A38MHI+IH7BwSH8/8HXguxHxJ8AbQPNHu6Sp0Tf4mfmvwIdWaP69dsvRhWZubq6xfePGjWOq\nRKvhN/ekggy+VJDBlwoy+FJBBl8qyOBLBRl8qSB/V19Def755xvbb7nlljFVotVwjy8VZPClggy+\nVJDBlwoy+FJBBl8qyOBLBXkdX0OZmZlpbN+yZcuYKtFquMeXCjL4UkEGXyrI4EsFGXypIIMvFWTw\npYIGGkJrqBU4hJY0EW0MoSXpImLwpYIMvlSQwZcKMvhSQQZfKqhv8CNiU0Q8GxEvRcTxiPiz3vzd\nEfFmRPxn73Hr6MuV1Ia+1/EjYgOwITOPRcRlwH8A24A/BH6emQ/2eb3X8aUJaLqO3/eHODLzbeDt\n3vSZiDgFXHX2vVurUtLYrOocPyI+AdwA/Htv1n0RcSwivhkRH225NkkjMnDwe4f5B4EvZ+YZ4BvA\nr2XmDSwcETQe8kuaHgP95l5EXMpC6P82Mx8HyMx3Fi3yCPDkSq+fnZ09N93pdOh0OmsoVVKTbrdL\nt9sdaNmBbtKJiL3Af2XmzkXzNvTO/4mIvwA+lZlfXOa1du5JE9DUuTdIr/7NwL8Ax4HsPe4HvsjC\n+f48cBq4NzPnlnm9wZcmYKjgt7Bygy9NgLflSjqPwZcKMvhSQQZfKsjgSwUZfKkggy8VZPClggy+\nVJDBlwoy+FJBBl8qaOzBH/R+4UmxvuFMc33TXBuMtz6Dv4T1DWea65vm2uAiD76kyTP4UkFj+SGO\nka5A0oom9gs8kqaPh/pSQQZfKmhswY+IWyPi5Yh4NSK+Mq71DioiTkfECxHxg4g4MgX1PBoRcxHx\n4qJ5V0TEMxHxSkQ8PcnRi1aob2oGUl1msNc/782fim046cFox3KOHxGXAK8CW4GfAkeBOzLz5ZGv\nfEAR8Trw25n57qRrAYiITwNngL2ZuaU37+vAf2fmX/c+PK/IzF1TVN9uBhhIdRwaBnv9ElOwDYcd\njHZY49rj3wT8MDPfyMz3gQMs/JHTJJiiU5/MfA5Y+iG0DdjTm94D3D7WohZZoT6YkoFUM/PtzDzW\nmz4DnAI2MSXbcIX6xjYY7bj+o18F/HjR8zf5/z9yWiTwdEQcjYh7Jl3MCtafHbSkN4rR+gnXs5yp\nG0h10WCvh4GZaduGkxiMdmr2cFPg5sz8HeAPWNjwn550QQOYtmuxUzeQ6jKDvS7dZhPdhpMajHZc\nwf8J8PFFzzf15k2NzHyr9+87wPdYOD2ZNnMRMQPnzhF/NuF6zpOZ7ywaNukR4FOTrGe5wV6Zom24\n0mC049iG4wr+UeDXI+LqiPgl4A7giTGtu6+I+HDvk5eI+AjwWeDEZKsCFs71Fp/vPQHc1ZveATy+\n9AVjdl59vSCd9QUmvw2/BZzMzIcWzZumbfgL9Y1rG47tm3u9yxIPsfBh82hmfm0sKx5ARPwqC3v5\nZGHo8H2Tri8i9gMd4GPAHLAb+Hvg74BfAd4Atmfm/0xRfZ9hgIFUx1TfSoO9HgG+y4S34bCD0Q69\nfr+yK9Vj555UkMGXCjL4UkEGXyrI4EsFGXypIIMvFWTwpYL+DwPQlgiqSU45AAAAAElFTkSuQmCC\n",
+ "text/plain": [
+ ""
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ },
+ {
+ "data": {
+ "image/png": "iVBORw0KGgoAAAANSUhEUgAAAP4AAAD8CAYAAABXXhlaAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAADIhJREFUeJzt3X+o3fV9x/HnOw0qrRKlmASSNR2by8ogii7KsMgp1lZG\nIdI/MpeBsRuhoNvK+k+t/+Qi+6PZH4L7IyA2KcmoJF2k8wdsumKPw0mWMJtN16iFEVvb5FaHSvOP\nRH3vj3uSXm/v/Z6Te359k/fzARe/5/v5nvt554uv8/n+uucTmYmkWlZMuwBJk2fwpYIMvlSQwZcK\nMvhSQQZfKmio4EfE7RHxSkS8FhHfGFVRksYrlnsfPyJWAK8BtwK/AI4Cd2bmKwu280EBaUoyMxZb\nP8yIfyPwk8x8PTPPAAeALUt0fu5n586dH3ndth/ru3jra3Nt46ivyTDBXwf8bN7rN3rrJLWcF/ek\nglYO8d6fA5+a93p9b91vmJmZObd85ZVXDtHl+HU6nWmX0Mj6lq/NtcHw9XW7Xbrd7kDbDnNx72PA\nq8xd3DsJHAH+NDOPL9gul9uHpOWLCHKJi3vLHvEz84OI+EvgGeZOGfYsDL2kdlr2iD9wB4740lQ0\njfhe3JMKMvhSQQZfKsjgSwUZfKkggy8VZPClggy+VJDBlwoy+FJBBl8qyOBLBRl8qSCDLxVk8KWC\nDL5UkMGXCjL4UkEGXyrI4EsFGXypIIMvFWTwpYIMvlSQwZcKMvhSQQZfKsjgSwUZfKkggy8VZPCl\nglYO8+aIOAG8C3wInMnMG0dRlC4cGzdubGzfvHlzY/vevXsb2y+55JLzrkn9DRV85gLfycy3R1GM\npMkY9lA/RvA7JE3YsKFN4OmIOBoRO0ZRkKTxG/ZQ/+bMPBkRVwP/GhHHM/P5hRvNzMycW+50OnQ6\nnSG7lbRQt9ul2+0OtG1k5kg6jYidwK8y88EF63NUfah9vLjXXhFBZsZibcs+1I+Ij0fE5b3lTwBf\nAF5e7u+TNDnDHOqvAb4fEdn7Pd/NzGdGU5akcRrZof6SHXiof1FbsaL5oHHVqlWN7UeOHGlsv+aa\na867Js0Zy6G+pAuXwZcKMvhSQQZfKsjgSwUZfKkggy8VNOyz+lKje+65p7Hd+/TT4YgvFWTwpYIM\nvlSQwZcKMvhSQQZfKsjgSwV5H19jdcMNN0y7BC3CEV8qyOBLBRl8qSCDLxVk8KWCDL5UkMGXCjL4\nUkEGXyrI4EsFGXypIIMvFWTwpYIMvlSQwZcK6vv3+BGxB/gSMJuZm3rrrgIOAhuAE8DWzHx3jHVq\nSp577rnG9sxsbF+3bt0oy9GIDDLifwf44oJ19wE/yMyNwLPAN0ddmKTx6Rv8zHweeHvB6i3Avt7y\nPuCOEdclaYyWe46/OjNnATLzFLB6dCVJGrdRfede44nezMzMueVOp0On0xlRt5LO6na7dLvdgbaN\nfhdnACJiA/DkvIt7x4FOZs5GxFrgh5n5mSXem4P0oXbqd3Gv34f44cOHG9tvuumm8y1JA4oIMjMW\naxv0UD96P2c9AdzdW94OPL7s6iRNXN/gR8SjwAvA70XETyPiK8C3gNsi4lXg1t5rSReIvuf4mblt\niabPj7gWtVDEokeKA7dv2rRplOVoRHxyTyrI4EsFGXypIIMvFWTwpYIMvlSQwZcKGtWz+rpIHT16\ndNolaAwc8aWCDL5UkMGXCjL4UkEGXyrI4EsFGXypIO/jF/f+++83th84cKCx/dJLL21s7/f3+poO\nR3ypIIMvFWTwpYIMvlSQwZcKMvhSQQZfKsj7+MWdOXOmsf3FF19sbN+2balpF+Zcdtll512Txs8R\nXyrI4EsFGXypIIMvFWTwpYIMvlSQwZcK6nsfPyL2AF8CZjNzU2/dTmAH8MveZvdn5r+MrUqNzTvv\nvNPYnpmN7bfccssoy9GEDDLifwf44iLrH8zM63s/hl66gPQNfmY+D7y9SJNfrSJdoIY5x783Io5F\nxLcjYtXIKpI0dst9Vn838EBmZkT8LfAg8BdLbTwzM3NuudPp0Ol0ltmtpKV0u1263e5A20a/izcA\nEbEBePLsxb1B23rtOUgfmo6TJ082tq9bt66x/eGHH25s37Fjx3nXpNGICDJz0VPyQQ/1g3nn9BGx\ndl7bl4GXl1+epEkb5Hbeo0AH+GRE/BTYCXwuIq4DPgROAF8dY42SRmygQ/2hOvBQv9Uee+yxxvat\nW7c2tr/33nuN7StX+pUP0zKKQ31JFxGDLxVk8KWCDL5UkMGXCjL4UkEGXyrIm6wXuQ8++KCxff/+\n/UP9fu/TX5gc8aWCDL5UkMGXCjL4UkEGXyrI4EsFGXypIG/CXuT6fW/+U0891dh+7bXXjrIctYQj\nvlSQwZcKMvhSQQZfKsjgSwUZfKkggy8V5H18Ndq1a9e0S9AYOOJLBRl8qSCDLxVk8KWCDL5UkMGX\nCjL4UkF97+NHxHpgP7AG+BB4JDP/PiKuAg4CG4ATwNbMfHeMtWoZDh482Nh+9dVXN7bfdtttoyxH\nLTHIiP8+8PXM/APgj4B7I+L3gfuAH2TmRuBZ4JvjK1PSKPUNfmaeysxjveXTwHFgPbAF2NfbbB9w\nx7iKlDRa53WOHxGfBq4DDgNrMnMW5j4cgNWjLk7SeAz8rH5EXA4cAr6WmacjIhdssvD1OTMzM+eW\nO50OnU7n/KqU1Fe326Xb7Q60bWQumddfbxSxEngK+OfMfKi37jjQyczZiFgL/DAzP7PIe3OQPjQe\nu3fvbmx/4IEHGttPnTo1ynI0QRFBZsZibYMe6u8Ffnw29D1PAHf3lrcDjy+7QkkTNcjtvJuBPwNe\niogfMXdIfz+wC/heRPw58DqwdZyFShqdvsHPzH8HPrZE8+dHW45G7a233pp2CWohn9yTCjL4UkEG\nXyrI4EsFGXypIIMvFWTwpYL8Xv2L3KFDhxrbt2/fPqFK1CaO+FJBBl8qyOBLBRl8qSCDLxVk8KWC\nDL5UkPfxL3LXX399Y/sVV1wxoUrUJo74UkEGXyrI4EsFGXypIIMvFWTwpYIMvlSQ9/EvcnfddVdj\n+wsvvDChStQmjvhSQQZfKsjgSwUZfKkggy8VZPClgvoGPyLWR8SzEfE/EfFSRPxVb/3OiHgjIl7s\n/dw+/nIljUJkZvMGEWuBtZl5LCIuB/4T2AL8CfCrzHywz/uzXx+SRi8iyMxYrK3vAzyZeQo41Vs+\nHRHHgXVnf/fIqpQ0Med1jh8RnwauA/6jt+reiDgWEd+OiFUjrk3SmAwc/N5h/iHga5l5GtgN/E5m\nXsfcEUHjIb+k9hjoWf2IWMlc6P8hMx8HyMw3523yCPDkUu+fmZk5t9zpdOh0OssoVVKTbrdLt9sd\naNu+F/cAImI/8FZmfn3eurW9838i4m+AzZm5bZH3enFPmoKmi3uDXNW/Gfg34CUgez/3A9uYO9//\nEDgBfDUzZxd5v8GXpmCo4I+gc4MvTUFT8H1yTyrI4EsFGXypIIMvFWTwpYIMvlSQwZcKMvhSQQZf\nKsjgSwUZfKkggy8VNPHgD/r3wtNifcNpc31trg0mW5/BX8D6htPm+tpcG1zkwZc0fQZfKmgiX8Qx\n1g4kLWlq38AjqX081JcKMvhSQRMLfkTcHhGvRMRrEfGNSfU7qIg4ERH/FRE/iogjLahnT0TMRsR/\nz1t3VUQ8ExGvRsTT05y9aIn6WjOR6iKTvf51b30r9uG0J6OdyDl+RKwAXgNuBX4BHAXuzMxXxt75\ngCLif4EbMvPtadcCEBGfBU4D+zNzU2/dLuD/MvPveh+eV2XmfS2qbycDTKQ6CQ2TvX6FFuzDYSej\nHdakRvwbgZ9k5uuZeQY4wNw/sk2CFp36ZObzwMIPoS3Avt7yPuCOiRY1zxL1QUsmUs3MU5l5rLd8\nGjgOrKcl+3CJ+iY2Ge2k/kdfB/xs3us3+PU/si0SeDoijkbEjmkXs4TVZyct6c1itHrK9SymdROp\nzpvs9TCwpm37cBqT0bZmhGuBmzPzD4E/Zm7Hf3baBQ2gbfdiWzeR6iKTvS7cZ1Pdh9OajHZSwf85\n8Kl5r9f31rVGZp7s/fdN4PvMnZ60zWxErIFz54i/nHI9H5GZb86bNukRYPM061lssldatA+Xmox2\nEvtwUsE/CvxuRGyIiEuAO4EnJtR3XxHx8d4nLxHxCeALwMvTrQqYO9ebf773BHB3b3k78PjCN0zY\nR+rrBemsLzP9fbgX+HFmPjRvXZv24W/UN6l9OLEn93q3JR5i7sNmT2Z+ayIdDyAifpu5UT6Zmzr8\nu9OuLyIeBTrAJ4FZYCfwT8A/Ar8FvA5szcx3WlTf5xhgItUJ1bfUZK9HgO8x5X047GS0Q/fvI7tS\nPV7ckwoy+FJBBl8qyOBLBRl8qSCDLxVk8KWCDL5U0P8D9gVPO2Pq+v4AAAAASUVORK5CYII=\n",
+ "text/plain": [
+ ""
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ },
+ {
+ "data": {
+ "image/png": "iVBORw0KGgoAAAANSUhEUgAAAP4AAAD8CAYAAABXXhlaAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAADghJREFUeJzt3X+MFfW5x/HPo6CBEpQ0AonYrblGbmKCRAuxseppyG31\nhgStRr00BttiGqW2sYmpNSZ7tt4/iolGb0z9w4LBBrfUml7sP8pFc6Jrg5B7S8UWtbEuxbIsW4KG\n9Qdiee4fe6CH9ZzvnN05P4Z93q9k4+w8c2Yej37OzJyZna+5uwDEclq3GwDQeQQfCIjgAwERfCAg\ngg8ERPCBgHIF38yuNrM3zOwtM/tRq5oC0F422ev4ZnaapLckLZO0T9IOSTe7+xvjluNGAaBL3N3q\nzc+zx18q6c/uvsfdj0r6paQVDTZ+4qe3t/ek34v2Q39Tt78i99aO/lLyBP9cSXtrfn+3Og9AwfHl\nHhDQtByv/ZukL9T8vqA67zPK5fKJ6bPPPjvHJtuvVCp1u4Uk+pu8Ivcm5e+vUqmoUqk0tWyeL/dO\nl/Smxr7cG5K0XdJ/uPvuccv5ZLcBYPLMTN7gy71J7/Hd/R9m9j1JWzR2yrBufOgBFNOk9/hNb4A9\nPtAVqT0+X+4BARF8ICCCDwRE8IGACD4QEMEHAiL4QEAEHwiI4AMBEXwgIIIPBETwgYAIPhAQwQcC\nIvhAQAQfCIjgAwERfCAggg8ERPCBgAg+EBDBBwIi+EBABB8IiOADARF8ICCCDwRE8IGACD4QEMEH\nAiL4QEDT8rzYzAYlvS/pmKSj7r60FU2hcwYGBpL1m266KVnv7+9P1q+88soJ94T2yxV8jQW+5O6H\nWtEMgM7Ie6hvLVgHgA7LG1qX9LyZ7TCz21rREID2y3uof7m7D5nZOZL+x8x2u/tnThrL5fKJ6VKp\npFKplHOzAMarVCqqVCpNLWvu3pKNmlmvpMPu/tC4+d6qbaD1+HJv6jIzubvVq036UN/MZprZrOr0\n5yR9TdLrk10fgM7Jc6g/T9JvzMyr69no7lta0xaAdpp08N39HUmLW9gLumDTpk3J+tDQULL+8ssv\nJ+sc6hcTl+KAgAg+EBDBBwIi+EBABB8IiOADARF8IKC89+qj4Pbu3ZusP/3007nWv3z58lyvR3ew\nxwcCIvhAQAQfCIjgAwERfCAggg8ERPCBgLiOP8XdcMMNyfqBAweS9aznI1544YUTbQkFwB4fCIjg\nAwERfCAggg8ERPCBgAg+EBDBBwLiOv4pbt++fcn6a6+9lmv9t99+e7I+Y8aMXOtvt6NHjybr06dP\n71AnxcIeHwiI4AMBEXwgIIIPBETwgYAIPhAQwQcCyryOb2brJC2XNOzui6rz5kjaJKlH0qCkG939\n/Tb2OWVlXWe+7777kvWs6/RHjhxJ1nt6epL1ZcuWJetFt3r16mT9scceS9ZnzpzZynYKo5k9/hOS\nvj5u3j2Strr7QkkvSvpxqxsD0D6ZwXf3AUmHxs1eIWlDdXqDpGtb3BeANprsOf5cdx+WJHffL2lu\n61oC0G6tulffU8VyuXxiulQqZT7HDcDEVSoVVSqVppadbPCHzWyeuw+b2XxJySc21gYfQHuM36n2\n9fU1XLbZQ32r/hz3rKRbq9OrJG2eSIMAuisz+Gb2lKTfSbrQzP5qZt+S9FNJ/2Zmb0paVv0dwCnC\n3JOn5/k3YObt3sapbHR0NFmfPXt2rvVPm5Y+m9u1a1eyvnDhwlzbz+vYsWPJ+qpVq5L1jRs3Jus7\nduxI1i+99NJkvcjMTO5u9WrcuQcERPCBgAg+EBDBBwIi+EBABB8IiOADAfFc/S7bs2dPrtdnXafv\n7+9P1rt9nT7L4OBgsr59+/ZkPeseknfeeSdZP5Wv46ewxwcCIvhAQAQfCIjgAwERfCAggg8ERPCB\ngLiO32YfffRRsn7XXXflWv/atWuT9euvvz7X+vN67733kvWRkZFk/bLLLkvWDx0a/wDok5111lnJ\n+lVXXZWsT1Xs8YGACD4QEMEHAiL4QEAEHwiI4AMBEXwgIK7jt9nQ0FCyvnXr1lzrf+WVV5L1Dz74\nINf6BwYGkvVXX301Wf/kk0+S9azn5h85ciRZz3L33Xcn6+ecc06u9Z+q2OMDARF8ICCCDwRE8IGA\nCD4QEMEHAiL4QECW9dxxM1snabmkYXdfVJ3XK+k2SQeqi93r7s81eL1nbWMqe/DBB5P1rOvM3Xbd\nddcl62eccUay/swzzyTrn376abKeNW5A1n0Ml1xySbJ++umnJ+unMjOTu1u9WjN7/Cckfb3O/Ifc\n/ZLqT93QAyimzOC7+4Ckeo85qftJAqD48pzjrzGznWb2czNLP98IQKFM9l79n0n6ibu7mf2npIck\nfafRwuVy+cR0qVRSqVSa5GYBNFKpVFSpVJpadlLBd/faJyQ+Lum3qeVrgw+gPcbvVPv6+hou2+yh\nvqnmnN7M5tfUviHp9Ql1CKCrMvf4ZvaUpJKkz5vZXyX1SvqqmS2WdEzSoKTvtrFHAC2WGXx3X1ln\n9hNt6GVKWrp0aVe3P3369GR95cp6/3n/6c4770zWL7744mQ967n627ZtS9Znz56drC9ZsiRZR33c\nuQcERPCBgAg+EBDBBwIi+EBABB8IiOADAWX+PX7uDQT/e/zDhw8n6/39/cn6wYMHk/Wenp5k/Zpr\nrknW58yZk6xnWb9+fbK+evXqZD3rOvxLL72UrJ955pnJemR5/x4fwBRD8IGACD4QEMEHAiL4QEAE\nHwiI4AMBcR0fSSMjI8n6+eefn6x/+OGHyfodd9yRrD/66KPJOhrjOj6AkxB8ICCCDwRE8IGACD4Q\nEMEHAiL4QECTHTsPQbzwwgvJetZ1+qxxEh9++OGJtoQWYI8PBETwgYAIPhAQwQcCIvhAQAQfCIjg\nAwFlXsc3swWSnpQ0T9IxSY+7+3+Z2RxJmyT1SBqUdKO7v9/GXtEGb7/9drK+Zs2aXOtftGhRsj5t\nGreSdEMze/xPJf3Q3S+S9GVJa8zsXyXdI2mruy+U9KKkH7evTQCtlBl8d9/v7jur06OSdktaIGmF\npA3VxTZIurZdTQJorQmd45vZFyUtlrRN0jx3H5bGPhwkzW11cwDao+kTLDObJenXkn7g7qNmNv5B\neg0frFcul09Ml0qlzPu3AUxcpVJRpVJpatmmgm9m0zQW+l+4++bq7GEzm+fuw2Y2X9KBRq+vDT6A\n9hi/U+3r62u4bLOH+usl/cndH6mZ96ykW6vTqyRtHv8iAMXUzOW8yyV9U9IuM/u9xg7p75W0VtKv\nzOzbkvZIurGdjQJoHZ6rP8V9/PHHyfp5552XrB88eDBZv+KKK5L15557LlmfMWNGso7J47n6AE5C\n8IGACD4QEMEHAiL4QEAEHwiI4AMB8cfQU1zWc+uzrtNneeCBB5J1rtMXE3t8ICCCDwRE8IGACD4Q\nEMEHAiL4QEAEHwiI6/hTXNYz2LKelXDLLbck6xdddNFEW0IBsMcHAiL4QEAEHwiI4AMBEXwgIIIP\nBETwgYC4jj/F3X///cn6li1bkvULLrggWZ81a9aEe0L3sccHAiL4QEAEHwiI4AMBEXwgIIIPBJQZ\nfDNbYGYvmtkfzWyXmd1Znd9rZu+a2f9Vf65uf7sAWsGy/h7bzOZLmu/uO81slqT/lbRC0k2SDrv7\nQxmv96xtAGg9M5O7W71a5g087r5f0v7q9KiZ7ZZ07vF1t6xLAB0zoXN8M/uipMWSXq3OWmNmO83s\n52Z2Vot7A9AmTQe/epj/a0k/cPdRST+T9C/uvlhjRwTJQ34AxdHUvfpmNk1jof+Fu2+WJHcfqVnk\ncUm/bfT6crl8YrpUKqlUKk2iVQAplUol8xmLx2V+uSdJZvakpL+7+w9r5s2vnv/LzO6StMTdV9Z5\nLV/uAV2Q+nKvmW/1L5f0kqRdkrz6c6+klRo73z8maVDSd919uM7rCT7QBbmC34KNE3ygC1LB5849\nICCCDwRE8IGACD4QEMEHAiL4QEAEHwiI4AMBEXwgIIIPBETwgYAIPhBQx4Pf7N8Ldwv95VPk/orc\nm9TZ/gj+OPSXT5H7K3Jv0hQPPoDuI/hAQB15EEdbNwCgoa49gQdA8XCoDwRE8IGAOhZ8M7vazN4w\ns7fM7Eed2m6zzGzQzP5gZr83s+0F6GedmQ2b2Ws18+aY2RYze9PMnu/m6EUN+ivMQKp1Bnv9fnV+\nId7Dbg9G25FzfDM7TdJbkpZJ2idph6Sb3f2Ntm+8SWb2F0mXuvuhbvciSWb2FUmjkp5090XVeWsl\nHXT3B6ofnnPc/Z4C9derJgZS7YTEYK/fUgHew7yD0ebVqT3+Ukl/dvc97n5U0i819i9ZJKYCnfq4\n+4Ck8R9CKyRtqE5vkHRtR5uq0aA/qSADqbr7fnffWZ0elbRb0gIV5D1s0F/HBqPt1P/o50raW/P7\nu/rnv2RRuKTnzWyHmd3W7WYamHt80JLqKEZzu9xPPYUbSLVmsNdtkuYV7T3sxmC0hdnDFcDl7v4l\nSf+usTf+K91uqAlFuxZbuIFU6wz2Ov496+p72K3BaDsV/L9J+kLN7wuq8wrD3Yeq/xyR9BuNnZ4U\nzbCZzZNOnCMe6HI/J3H3kZphkx6XtKSb/dQb7FUFeg8bDUbbifewU8HfIekCM+sxszMk3Szp2Q5t\nO5OZzax+8srMPifpa5Je725XksbO9WrP956VdGt1epWkzeNf0GEn9VcN0nHfUPffw/WS/uTuj9TM\nK9J7+Jn+OvUeduzOvepliUc09mGzzt1/2pENN8HMztfYXt41NnT4xm73Z2ZPSSpJ+rykYUm9kv5b\n0tOSzpO0R9KN7v5egfr7qpoYSLVD/TUa7HW7pF+py+9h3sFoc2+fW3aBePhyDwiI4AMBEXwgIIIP\nBETwgYAIPhAQwQcCIvhAQP8PPQHL9DXNtssAAAAASUVORK5CYII=\n",
+ "text/plain": [
+ ""
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ },
+ {
+ "data": {
+ "image/png": "iVBORw0KGgoAAAANSUhEUgAAAP4AAAD8CAYAAABXXhlaAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAADt5JREFUeJzt3X+sVPWZx/HPg4QE0FSsAgJbMUu2GI0/qmg2kDANtTWG\nCPYPF9xE7BrTP2y3KX+IGA0XXZN2NSZsYqNSIEDAtgsWbGJUCJmou+mCbkFZfkiyQivIBddfhSjg\n3mf/uAN77+2d75l7z8ycc3ner+TGc88zM+dh4ueec+Y753zN3QUglmFFNwCg/Qg+EBDBBwIi+EBA\nBB8IiOADAeUKvpndZmb7zOw9M1vUrKYAtJYNdhzfzIZJek/SLElHJO2QNM/d9/V5HF8UAAri7tbf\n+jx7/JslHXD3Q+5+RtKvJM2ps/FzP0uWLOn1e9l+6O/87a/MvbWiv5Q8wZ8o6U89fv+gtg5AyfHh\nHhDQ8BzPPSzpGz1+n1Rb9xc6OjrOLV988cU5Ntl6lUql6BaS6G/wytyblL+/arWqarXa0GPzfLh3\ngaT96v5w70NJ2yXNd/e9fR7ng90GgMEzM3mdD/cGvcd39/81sx9Jek3dpwwr+oYeQDkNeo/f8AbY\n4wOFSO3x+XAPCIjgAwERfCAggg8ERPCBgAg+EBDBBwIi+EBABB8IiOADARF8ICCCDwRE8IGACD4Q\nEMEHAiL4QEAEHwiI4AMBEXwgIIIPBETwgYAIPhAQwQcCIvhAQAQfCIjgAwERfCAggg8ERPCBgAg+\nEBDBBwIanufJZnZQ0meSuiSdcfebm9EUgNbKFXx1B77i7p80oxkA7ZH3UN+a8BoA2ixvaF3Sq2a2\nw8zub0ZDAFov76H+dHf/0Mwuk7TFzPa6+5t9H9TR0XFuuVKpqFKp5NwsgL6q1aqq1WpDjzV3b8pG\nzWyJpD+7+9N91nuztgGgcWYmd7f+aoM+1DezUWZ2YW15tKTvSto92NcD0D55DvXHSfqtmXntdda5\n+2vNaQtAKzXtUL/uBjjUBwrRkkN9AEMXwQcCIvhAQAQfCIjgAwERfCAggg8ElPe7+sjw+eefJ+tb\nt27N9fpr165N1hv97nY9Wd/BMOt3mLhtZs2alaxv2LChTZ0MLezxgYAIPhAQwQcCIvhAQAQfCIjg\nAwERfCCg8Nfjv/7668n6G2+8kawvX748WT916lSyfuzYsWS9aGUfxx8xYkSyPm3atGR906ZNyfol\nl1wy4J7KguvxAfRC8IGACD4QEMEHAiL4QEAEHwiI4AMBnffj+DfccEOyvnt3evKfrq6uZrbTdCNH\njkzW58+fn6zPmTMnWb/lllsG3NNALFu2LFl/7rnnkvWPP/441/affPLJZH3hwoW5Xr9IjOMD6IXg\nAwERfCAggg8ERPCBgAg+EBDBBwLKHMc3sxWSZkvqdPdra+vGSPq1pCskHZR0l7t/Vuf5hY7jDxuW\n/ttW9PXkM2fOTNYfeeSRZH3q1KnJ+oQJEwbcUzudPHkyWZ8xY0ay/s477+Ta/ty5c5P1jRs35nr9\nIuUdx18l6Xt91j0kaau7f1PSNkmL87UIoJ0yg+/ub0r6pM/qOZJW15ZXS0r/2QRQKoM9xx/r7p2S\n5O5HJY1tXksAWq1Zc+clT+I7OjrOLVcqFVUqlSZtFsBZ1Wq14bkSBxv8TjMb5+6dZjZeUvKOkT2D\nD6A1+u5Uly5dWvexjR7qW+3nrJck3VtbXiBp80AaBFCszOCb2XpJ/y7pb8zsj2b2A0k/k3Srme2X\nNKv2O4AhIvNQ393vrlP6TpN7aYnJkycn64cOHWpPI3Vkfcchq172+wVs2bIlWc86Dcw7Tp91v4JH\nH3001+sPVXxzDwiI4AMBEXwgIIIPBETwgYAIPhAQwQcCOu/vq//RRx8l62vWrEnW169fn6zv2bMn\nWT916lSynlfWOPW8efOS9bz31d+1a1eyfueddybrX3zxRbI+evToZH327NnJetY4/VVXXZWsD2Xc\nVx9ALwQfCIjgAwERfCAggg8ERPCBgAg+ENB5P47fam+99Vayvn379mT9qaeeStaPHUve1SxzHDyv\nBuZdyPX6Wd9D2Lw5fXOnWbNm5dr++YxxfAC9EHwgIIIPBETwgYAIPhAQwQcCIvhAQIzjl1zW9e6L\nFi1K1rPua5+l1eP4U6dOTda3bduWrI8ZMyZZHzFixIB7Ol8wjg+gF4IPBETwgYAIPhAQwQcCIvhA\nQAQfCGh41gPMbIWk2ZI63f3a2rolku6XdPZi8Yfd/ZWWdRnYlClTkvVPP/20TZ20xr59+5L1CRMm\nJOu33nprsv7KK/xv2Z9G9virJH2vn/VPu/u3aj+8u8AQkhl8d39T0if9lPJ9ZQtAYfKc4z9gZjvN\n7Jdm9rWmdQSg5TLP8ev4haTH3N3N7J8kPS3pvnoP7ujoOLdcqVRUqVQGuVkA9VSrVVWr1YYe29BF\nOmZ2haTfnf1wr9Farc5FOjmcPHkyWc+62eSOHTtybb/VF+nkxYd79TXjIh1Tj3N6Mxvfo/Z9SbsH\n3x6AdmtkOG+9pIqkr5vZHyUtkfRtM7teUpekg5J+2MIeATQZ1+OX3H331f3oRJK0atWqXK+fNX/9\nxo0bc73+ypUrk/XFixcn68ePH8+1/fHjxyfrb7/9drJ++eWX59p+kbgeH0AvBB8IiOADARF8ICCC\nDwRE8IGACD4QEOP4BXvxxReT9fnz5yfrX331VbI+d+7cZP2xxx5L1q+++upkPa/Tp08n6y+88EKy\nntX/wYMHk/Wscfrnn38+Wb/99tuT9SIxjg+gF4IPBETwgYAIPhAQwQcCIvhAQAQfCIhx/BY7cOBA\nsn7dddcl66dOnUrWJ02alGv7Q33++MOHDyfrM2fOTNbff//9ZH3kyJHJetb7W+T1/IzjA+iF4AMB\nEXwgIIIPBETwgYAIPhAQwQcCGuzceWjQmTNnkvWscfosF1xwQbI+1Mfps0ycODFZz5pLLmsex6xx\n/htvvDFZP3LkSLJeFPb4QEAEHwiI4AMBEXwgIIIPBETwgYAIPhBQ5ji+mU2StEbSOEldkpa7+7+Y\n2RhJv5Z0haSDku5y989a2CswYFn3K7jjjjuS9WXLliXrnZ2dA+6pDBrZ438laaG7Xy3pbyU9YGZT\nJT0kaau7f1PSNkmLW9cmgGbKDL67H3X3nbXlE5L2SpokaY6k1bWHrZaUnrIFQGkM6BzfzCZLul7S\n7yWNc/dOqfuPg6SxzW4OQGs0/F19M7tQ0gZJP3H3E2bW90Z6dW+s19HRcW65Uqlkfj8awMBVq9XM\naxPOaij4ZjZc3aFf6+6ba6s7zWycu3ea2XhJx+o9v2fwAbRG353q0qVL6z620UP9lZL2uHvPjzhf\nknRvbXmBpM19nwSgnBoZzpsu6e8lvWtmf1D3If3Dkn4u6Tdm9g+SDkm6q5WNAmiezOC7+79JqnfR\n93ea287558orr0zWFyxYkKyvXr06Wc8aR77nnnuS9WeeeSZZv+iii5L1op0+fTpZ37NnT7K+cePG\nXNt/8MEHcz2/KHxzDwiI4AMBEXwgIIIPBETwgYAIPhAQwQcCslbPXW9m3uptDGVffvllsj53bvqi\nxy1btuTa/k033ZSsX3PNNcn6448/nqwPH55v6oZnn302Wd+1a1eyvmnTplzbHzVqVLKedd/9Sy+9\nNNf28zAzubv1V2OPDwRE8IGACD4QEMEHAiL4QEAEHwiI4AMBMY5fcvv370/Wn3jiiWR93bp1ubbf\n1dWVrA8bNrT3HVOmTEnWV6xYkazPmDGjme00FeP4AHoh+EBABB8IiOADARF8ICCCDwRE8IGAGMcf\n4rLuK//yyy+3qZOhafr06cn6ZZdd1qZOmo9xfAC9EHwgIIIPBETwgYAIPhAQwQcCygy+mU0ys21m\n9l9m9q6Z/bi2fomZfWBm/1n7ua317QJohsxxfDMbL2m8u+80swslvS1pjqS/k/Rnd3864/mM4wMF\nSI3jZ8524O5HJR2tLZ8ws72SJp597aZ1CaBtBnSOb2aTJV0v6T9qqx4ws51m9ksz+1qTewPQIg0H\nv3aYv0HST9z9hKRfSPprd79e3UcEyUN+AOXR0MRmZjZc3aFf6+6bJcndj/d4yHJJv6v3/I6OjnPL\nlUpFlUplEK0CSKlWq6pWqw09tqGLdMxsjaSP3H1hj3Xja+f/MrOfSprm7nf381w+3AMKkPpwr5FP\n9adLel3Su5K89vOwpLvVfb7fJemgpB+6e2c/zyf4QAFyBb8JGyf4QAG4LBdALwQfCIjgAwERfCAg\ngg8ERPCBgAg+EBDBBwIi+EBABB8IiOADARF8IKC2B7/R64WLQn/5lLm/Mvcmtbc/gt8H/eVT5v7K\n3Jt0ngcfQPEIPhBQW27E0dINAKirsDvwACgfDvWBgAg+EFDbgm9mt5nZPjN7z8wWtWu7jTKzg2a2\ny8z+YGbbS9DPCjPrNLN3eqwbY2avmdl+M3u1yNmL6vRXmolU+5ns9R9r60vxHhY9GW1bzvHNbJik\n9yTNknRE0g5J89x9X8s33iAz+29JN7r7J0X3IklmNkPSCUlr3P3a2rqfS/ofd//n2h/PMe7+UIn6\nW6IGJlJth8Rkrz9QCd7DvJPR5tWuPf7Nkg64+yF3PyPpV+r+R5aJqUSnPu7+pqS+f4TmSFpdW14t\naW5bm+qhTn9SSSZSdfej7r6ztnxC0l5Jk1SS97BOf22bjLZd/6NPlPSnHr9/oP//R5aFS3rVzHaY\n2f1FN1PH2LOTltRmMRpbcD/9Kd1Eqj0me/29pHFlew+LmIy2NHu4Epju7jdJul3db/yMohtqQNnG\nYks3kWo/k732fc8KfQ+Lmoy2XcE/LOkbPX6fVFtXGu7+Ye2/xyX9Vt2nJ2XTaWbjpHPniMcK7qcX\ndz/eY9qk5ZKmFdlPf5O9qkTvYb3JaNvxHrYr+DskTTGzK8xshKR5kl5q07Yzmdmo2l9emdloSd+V\ntLvYriR1n+v1PN97SdK9teUFkjb3fUKb9eqvFqSzvq/i38OVkva4+7Ie68r0Hv5Ff+16D9v2zb3a\nsMQydf+xWeHuP2vLhhtgZleqey/v6p46fF3R/ZnZekkVSV+X1ClpiaRNkv5V0l9JOiTpLnf/tET9\nfVsNTKTapv7qTfa6XdJvVPB7mHcy2tzb5yu7QDx8uAcERPCBgAg+EBDBBwIi+EBABB8IiOADARF8\nIKD/A/MRb6AUm78tAAAAAElFTkSuQmCC\n",
+ "text/plain": [
+ ""
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ }
+ ],
+ "source": [
+ "# Create some wrappers for simplicity\n",
+ "def conv2d(x, W, b, strides=1):\n",
+ " # Conv2D wrapper, with bias and relu activation\n",
+ " x = tf.nn.conv2d(x, W, strides=[1, strides, strides, 1], padding='SAME')\n",
+ " x = tf.nn.bias_add(x, b)\n",
+ " return tf.nn.relu(x)\n",
+ "\n",
+ "\n",
+ "def subsampling(x, k=2):\n",
+ " # MaxPool2D wrapper\n",
+ " return tf.nn.max_pool(x, ksize=[1, k, k, 1], strides=[1, k, k, 1],\n",
+ " padding='SAME')\n",
+ "\n",
+ "\n",
+ "# Create model\n",
+ "def conv_net(x_in, weights, biases, dropout):\n",
+ " # Reshape input picture\n",
+ " x_in = tf.reshape(x_in, shape=[-1, 28, 28, 1])\n",
+ "\n",
+ " # Convolution Layer 1\n",
+ " conv_layer_1 = conv2d(x_in, weights['wc1'], biases['bc1'])\n",
+ " # Subsampling\n",
+ " conv_layer_1 = subsampling(conv_layer_1, k=2)\n",
+ "\n",
+ " # Convolution Layer 2\n",
+ " conv_layer_2 = conv2d(conv_layer_1, weights['wc2'], biases['bc2'])\n",
+ " # Subsampling\n",
+ " conv_layer_2 = subsampling(conv_layer_2, k=2)\n",
+ "\n",
+ " # Fully connected layer\n",
+ " # Reshape conv_layer_2 output to fit fully connected layer input\n",
+ " fully_connected_layer = tf.reshape(conv_layer_2, [-1, weights['wd1'].get_shape().as_list()[0]])\n",
+ " fully_connected_layer = tf.add(tf.matmul(fully_connected_layer, weights['wd1']), biases['bd1'])\n",
+ " fully_connected_layer = tf.nn.relu(fully_connected_layer)\n",
+ " # Apply Dropout\n",
+ " fully_connected_layer = tf.nn.dropout(fully_connected_layer, dropout)\n",
+ "\n",
+ " # Output, class prediction\n",
+ " prediction_output = tf.add(tf.matmul(fully_connected_layer, weights['out']), biases['out'])\n",
+ " return prediction_output\n",
+ "\n",
+ "# Store layers weight & bias\n",
+ "weights = {\n",
+ " # 5x5 convolutional units, 1 input, 32 outputs\n",
+ " 'wc1': tf.Variable(tf.random_normal([5, 5, 1, 32])),\n",
+ " # 5x5 convolutional units, 32 inputs, 64 outputs\n",
+ " 'wc2': tf.Variable(tf.random_normal([5, 5, 32, 64])),\n",
+ " # fully connected, 7*7*64 inputs, 1024 outputs\n",
+ " 'wd1': tf.Variable(tf.random_normal([7*7*64, 1024])),\n",
+ " # 1024 inputs, 10 outputs (class prediction)\n",
+ " 'out': tf.Variable(tf.random_normal([1024, n_classes]))\n",
+ "}\n",
+ "\n",
+ "biases = {\n",
+ " 'bc1': tf.Variable(tf.random_normal([32])),\n",
+ " 'bc2': tf.Variable(tf.random_normal([64])),\n",
+ " 'bd1': tf.Variable(tf.random_normal([1024])),\n",
+ " 'out': tf.Variable(tf.random_normal([n_classes]))\n",
+ "}\n",
+ "\n",
+ "# Construct model\n",
+ "pred = conv_net(X, weights, biases, keep_prob)\n",
+ "\n",
+ "# Define loss and optimizer\n",
+ "cost = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(pred, Y))\n",
+ "optimizer = tf.train.AdamOptimizer(learning_rate=learning_rate).minimize(cost)\n",
+ "\n",
+ "# Evaluate model\n",
+ "correct_pred = tf.equal(tf.argmax(pred, 1), tf.argmax(Y, 1))\n",
+ "accuracy = tf.reduce_mean(tf.cast(correct_pred, tf.float32))\n",
+ "\n",
+ "# Initializing the variables\n",
+ "init = tf.initialize_all_variables()\n",
+ "\n",
+ "# Launch the graph\n",
+ "with tf.Session() as sess:\n",
+ " sess.run(init)\n",
+ " step = 1\n",
+ " # Keep training until reach max iterations\n",
+ " while step * batch_size < number_iterations:\n",
+ " batch_x, batch_y = mnist.train.next_batch(batch_size)\n",
+ " test = batch_x[0]\n",
+ " fig = plt.figure()\n",
+ " plt.imshow(test.reshape((28, 28), order='C'), cmap='Greys',\n",
+ " interpolation='nearest')\n",
+ " # Run optimization op (backprop)\n",
+ " sess.run(optimizer, feed_dict={X: batch_x, Y: batch_y,\n",
+ " keep_prob: dropout})\n",
+ " if step % steps == 0:\n",
+ " # Calculate batch loss and accuracy\n",
+ " loss, acc = sess.run([cost, accuracy], feed_dict={X: batch_x,\n",
+ " Y: batch_y,\n",
+ " keep_prob: 1.})\n",
+ " print \"Iter \" + str(step*batch_size) + \", Minibatch Loss= \" + \\\n",
+ " \"{:.6f}\".format(loss) + \", Training Accuracy= \" + \\\n",
+ " \"{:.5f}\".format(acc)\n",
+ " step += 1\n",
+ "\n",
+ " # Calculate accuracy for 256 mnist test images\n",
+ " print \"Testing Accuracy:\", \\\n",
+ " sess.run(accuracy, feed_dict={X: mnist.test.images[:256],\n",
+ " Y: mnist.test.labels[:256],\n",
+ " keep_prob: 1.})"
+ ]
+ }
+ ],
+ "metadata": {
+ "kernelspec": {
+ "display_name": "Python 2",
+ "language": "python",
+ "name": "python2"
+ },
+ "language_info": {
+ "codemirror_mode": {
+ "name": "ipython",
+ "version": 2
+ },
+ "file_extension": ".py",
+ "mimetype": "text/x-python",
+ "name": "python",
+ "nbconvert_exporter": "python",
+ "pygments_lexer": "ipython2",
+ "version": "2.7.11+"
+ }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 0
+}
diff --git a/Building-Machine-Learning-Projects-with-TensorFlow-master/6/convolution.ipynb b/Building-Machine-Learning-Projects-with-TensorFlow-master/6/convolution.ipynb
new file mode 100755
index 000000000..1c93ea19e
--- /dev/null
+++ b/Building-Machine-Learning-Projects-with-TensorFlow-master/6/convolution.ipynb
@@ -0,0 +1,70 @@
+{
+ "cells": [
+ {
+ "cell_type": "code",
+ "execution_count": 4,
+ "metadata": {
+ "collapsed": false
+ },
+ "outputs": [],
+ "source": [
+ "import tensorflow as tf\n",
+ "\n",
+ "#Generate the filename queue, and read the gif files contents\n",
+ "filename_queue = tf.train.string_input_producer(tf.train.match_filenames_once(\"data/test.gif\"))\n",
+ "reader = tf.WholeFileReader()\n",
+ "key, value = reader.read(filename_queue)\n",
+ "image=tf.image.decode_gif(value)\n",
+ "\n",
+ "#Define the kernel parameters\n",
+ "kernel=tf.constant(\n",
+ " [\n",
+ " [[[-1.]],[[-1.]],[[-1.]]],\n",
+ " [[[-1.]],[[8.]],[[-1.]]],\n",
+ " [[[-1.]],[[-1.]],[[-1.]]]\n",
+ " ] \n",
+ " )\n",
+ "\n",
+ "#Define the train coordinator\n",
+ "coord = tf.train.Coordinator()\n",
+ "\n",
+ "with tf.Session() as sess:\n",
+ " tf.initialize_all_variables().run()\n",
+ " threads = tf.train.start_queue_runners(coord=coord)\n",
+ " #Get first image\n",
+ " image_tensor = tf.image.rgb_to_grayscale(sess.run([image])[0])\n",
+ " #apply convolution, preserving the image size\n",
+ " imagen_convoluted_tensor=tf.nn.conv2d(tf.cast(image_tensor, tf.float32),kernel,[1,1,1,1],\"SAME\")\n",
+ " #Prepare to save the convolution option\n",
+ " file=open (\"blur2.png\", \"wb+\")\n",
+ " #Cast to uint8 (0..255), previous scalation, because the convolution could alter the scale of the final image\n",
+ " out=tf.image.encode_png(tf.reshape(tf.cast(imagen_convoluted_tensor/tf.reduce_max(imagen_convoluted_tensor)*255.,tf.uint8), tf.shape(imagen_convoluted_tensor.eval()[0]).eval()))\n",
+ " file.write(out.eval())\n",
+ " file.close()\n",
+ " coord.request_stop()\n",
+ "coord.join(threads)"
+ ]
+ }
+ ],
+ "metadata": {
+ "kernelspec": {
+ "display_name": "Python 2",
+ "language": "python",
+ "name": "python2"
+ },
+ "language_info": {
+ "codemirror_mode": {
+ "name": "ipython",
+ "version": 2
+ },
+ "file_extension": ".py",
+ "mimetype": "text/x-python",
+ "name": "python",
+ "nbconvert_exporter": "python",
+ "pygments_lexer": "ipython2",
+ "version": "2.7.11+"
+ }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 0
+}
diff --git a/Building-Machine-Learning-Projects-with-TensorFlow-master/6/image_subsampling.ipynb b/Building-Machine-Learning-Projects-with-TensorFlow-master/6/image_subsampling.ipynb
new file mode 100644
index 000000000..4c3ef9659
--- /dev/null
+++ b/Building-Machine-Learning-Projects-with-TensorFlow-master/6/image_subsampling.ipynb
@@ -0,0 +1,67 @@
+{
+ "cells": [
+ {
+ "cell_type": "code",
+ "execution_count": 35,
+ "metadata": {
+ "collapsed": false
+ },
+ "outputs": [],
+ "source": [
+ "import tensorflow as tf\n",
+ "\n",
+ "#Generate the filename queue, and read the gif files contents\n",
+ "filename_queue = tf.train.string_input_producer(tf.train.match_filenames_once(\"data/test.gif\"))\n",
+ "reader = tf.WholeFileReader()\n",
+ "key, value = reader.read(filename_queue)\n",
+ "image=tf.image.decode_gif(value)\n",
+ "\n",
+ "#Define the coordinator\n",
+ "coord = tf.train.Coordinator()\n",
+ "\n",
+ "def normalize_and_encode (img_tensor):\n",
+ " image_dimensions = tf.shape(img_tensor.eval()[0]).eval()\n",
+ " return tf.image.encode_jpeg(tf.reshape(tf.cast(img_tensor, tf.uint8), image_dimensions))\n",
+ "\n",
+ "with tf.Session() as sess:\n",
+ " maxfile=open (\"maxpool.jpeg\", \"wb+\")\n",
+ " avgfile=open (\"avgpool.jpeg\", \"wb+\")\n",
+ " tf.initialize_all_variables().run()\n",
+ " threads = tf.train.start_queue_runners(coord=coord)\n",
+ " \n",
+ " image_tensor = tf.image.rgb_to_grayscale(sess.run([image])[0])\n",
+ " \n",
+ " maxed_tensor=tf.nn.avg_pool(tf.cast(image_tensor, tf.float32),[1,2,2,1],[1,2,2,1],\"SAME\")\n",
+ " averaged_tensor=tf.nn.avg_pool(tf.cast(image_tensor, tf.float32),[1,2,2,1],[1,2,2,1],\"SAME\")\n",
+ " \n",
+ " maxfile.write(normalize_and_encode(maxed_tensor).eval())\n",
+ " avgfile.write(normalize_and_encode(averaged_tensor).eval())\n",
+ " coord.request_stop()\n",
+ " maxfile.close()\n",
+ " avgfile.close()\n",
+ "coord.join(threads)"
+ ]
+ }
+ ],
+ "metadata": {
+ "kernelspec": {
+ "display_name": "Python 2",
+ "language": "python",
+ "name": "python2"
+ },
+ "language_info": {
+ "codemirror_mode": {
+ "name": "ipython",
+ "version": 2
+ },
+ "file_extension": ".py",
+ "mimetype": "text/x-python",
+ "name": "python",
+ "nbconvert_exporter": "python",
+ "pygments_lexer": "ipython2",
+ "version": "2.7.11+"
+ }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 0
+}
diff --git a/Building-Machine-Learning-Projects-with-TensorFlow-master/7/Code/CH7_time_series.ipynb b/Building-Machine-Learning-Projects-with-TensorFlow-master/7/Code/CH7_time_series.ipynb
new file mode 100755
index 000000000..bf3a59454
--- /dev/null
+++ b/Building-Machine-Learning-Projects-with-TensorFlow-master/7/Code/CH7_time_series.ipynb
@@ -0,0 +1,289 @@
+{
+ "cells": [
+ {
+ "cell_type": "code",
+ "execution_count": 1,
+ "metadata": {
+ "collapsed": false
+ },
+ "outputs": [
+ {
+ "name": "stderr",
+ "output_type": "stream",
+ "text": [
+ "Using TensorFlow backend.\n"
+ ]
+ }
+ ],
+ "source": [
+ "%matplotlib inline\n",
+ "%config InlineBackend.figure_formats = {'png', 'retina'}\n",
+ "\n",
+ "import numpy as np\n",
+ "import pandas as pd\n",
+ "import tensorflow as tf\n",
+ "from matplotlib import pyplot as plt\n",
+ "\n",
+ "\n",
+ "from tensorflow.python.framework import dtypes\n",
+ "from tensorflow.contrib import learn\n",
+ "\n",
+ "from keras.models import Sequential \n",
+ "from keras.layers.core import Dense, Activation \n",
+ "from keras.layers.recurrent import LSTM\n",
+ "from keras.layers import Dropout"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 2,
+ "metadata": {
+ "collapsed": false
+ },
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ ""
+ ]
+ },
+ "execution_count": 2,
+ "metadata": {},
+ "output_type": "execute_result"
+ },
+ {
+ "data": {
+ "image/png": "iVBORw0KGgoAAAANSUhEUgAAAwIAAAH/CAYAAADkL2pWAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAAWJQAAFiUBSVIk8AAAIABJREFUeJzsnXnYXDW9x795uxdKKVDBFq4UpBTkgiK77CAg+yYVAakV\nZBMRFBH02rK5sV1Q2aUFREGQC7JDWcqmVKkCCkKhQKVlLW3pvub+kTdMJpOcNcnJmfl9nud9zrxn\n5pyTyZzk5JvfEsY5B0EQBEEQBEEQnUVX1QUgCIIgCIIgCCI8JAQIgiAIgiAIogMhIUAQBEEQBEEQ\nHQgJAYIgCIIgCILoQEgIEARBEARBEEQHQkKAIAiCIAiCIDoQEgIEQRAEQRAE0YGQECAIgiAIgiCI\nDoSEAEEQBEEQBEF0ICQECIIgCIIgCKIDISFAEARBEARBEB0ICQGCIAiCIAiC6EBICBAEQRAEQRBE\nB0JCgCAIgiAIgiA6ECdCgDF2CGPsMsbY44yxOYyxFYyxGxI+35sxdhJj7BnG2PuMsbmMsRcZY5cy\nxv4r4biju4+ZyxibzRh7lDG2j4vvQBAEQRAEQRCdBOOclz8JY38HsCmAeQDeAjACwE2c868ZPtsD\nwEQA2wF4CcAEAIsBbAlgJwCzAWzHOf+3dtyFAE4D8B8AtwHoDeArAFYH8C3O+eWlvwhBEARBEARB\ndAiuhMBOAN7inL/W/fpRAL+1CIFDAfwBwEOc8z2198YC+DGA6zjnxyj7twXwFIApALbknH/Uvf+/\nAEwG0B/ACM75tNJfhiAIgiAIgiA6ACeuQZzziZzz1zJ+fD0AHMC9hvfu7N4O1vaf0H3M+VIEdF93\nGoBfA+gD4Ou5Ck0QBEEQBEEQHUwVwcL/AsAAfIkxxrT39oMY8D+k7d+le/uA4Xz3dZ9vV5eFJAiC\nIAiCIIh2pmfoC3LO72GM/RHAwQBeYIxNALAEwBYAvgDgMgAf+/szxvoDGApgLuf8XcMpp3Rvh3st\nOEEQBEEQBEG0EcGFAABwzr/MGBsD4IcANlLeehjA7znnK5R9A7u3cyynk/tXdVtKgiAIgiAIgmhf\nggsBxlgfADcC2BPAiQD+BGABhDXglwCeYIwdyjm/y9P1y0dHEwRBEARBEERGOOe6O3wUVBEjcCaA\nQwGcxTm/lnP+Hud8Huf8ge79vQBcqnxezvgPhBm5f7aX0hIEQRAEQRBEG1KFa9A+EAHBj+lvcM6f\nZ4zNAvApxtggzvkszvkCxth0AEMYY2sa4gQ26N6+kqcQLtKmEgLGGNWnI6gu3UL16RaqT3dQXbqF\n6tMtVJ/uaM2LExdVWAT6dG/1FKFgjPUGMKD73yXKW490b/cynG/v7u3DTkpHEARBEARBEB1AFULg\nCYh0n2d1D/xVzoawUkzinM9X9l/ZfcwPGWMfBwUzxtYFcBKARQDG+ysyQRAEQRAEQbQXrlYWPgDA\ngd3/rgURCDwVYtAPAB9wzk/v/uwQAH8GsDaANwHcD2AhRLDwVhCBw7tyzidp17gQwKkApgO4DUBv\nACMBrAbgW5zzKzKWlQPkGuQSMiG6g+rSLVSfbqH6dAfVpVuoPt1C9ekO6RoUa7CwKyEwBsCPEz7y\nBud8feXzqwM4AyJeYBiEZeJtCPeeX3DOjf7+jLGvQVgANgawAsCzAC7gnN+Xo6wkBBxDHYY7qC7d\nQvXpFqpPd1BduoXq0y1Un+7oCCFQJ0gIuIc6DHdQXbqF6tMtVJ/uoLp0C9WnW6g+3RG7EKgiRoAg\nCIIgCIIgiIohIUCUZsyYMVUXoW2gunQL1adbqD7dQXXpFqpPt1B9dg7kGkQQBEEQBEEQHiDXIIIg\nCIIgCIIgoqOKlYUJgiAIgiCCE/sqr0R9aBfPErIIEARBEARBEEQHQhYBgiAIgiA6inaZzSXC025W\nJbIIEARBEARBEEQHQkKAIAiCIAiCIDoQEgIEQRAEQRAE0YGQECAIgiAIgiCIDoSEAEEQBEEQBEF0\nICQECKLGLFgAvPhi1aUwwznw/PPA0qVVl4QgCIIgCBMkBAiixmy1FfCZzwCPP151SVr5zW+AzTYD\nDjus6pIQBEEQBGGChABB1Jh//Uts77mn2nKYuO46sb3jjmrLQRAEQRCEGRICBNEG9IxwacAu6l0I\ngiCICHnzzTfR1dWFHj16VF2UyqFHNUG0ATH2ZW22+CJBEETHM2rUKHR1dWHXXXetuiiEI0gIEEQb\nQEKAIAiC8A1jDIw697aChABBtAHkGkQQBEGEgHNedREIh9CjmiDagBiFAE0aEQRBEETckBAgiDYg\nRtcgsggQBEEQADB16lQcd9xxWH/99dGvXz+sttpq2GmnnfCb3/wGK1asMB4zb948jB8/HiNHjsR/\n//d/Y9CgQejfvz822GADHHfccXj11VcTr7l48WKce+652GijjdCvXz8MGTIEhx9+OF566SUfX7G2\nRDiPSBBEXkgIEARBEDFy991347DDDsOiRYvAGMPAgQOxYMECPPnkk3jiiSdwyy234M4770S/fv2a\njrv++utx8skngzGGHj16YODAgVi0aBGmTp2K1157Db/73e9w5513GgOX58+fj9122w2TJk0CYwy9\ne/fGwoUL8Yc//AH33HMPrr766lBfP3roUU0QbQC5BhEEQRCxMXXqVBx++OFYvHgxdtllF7z88sv4\n8MMPMXfuXFx11VXo27cvHn74YZxyyiktx66xxhr40Y9+hEmTJmHBggV4//33sXDhQrz00ks48sgj\nMX/+fHz1q1/FwoULW479zne+g0mTJqF///4YP3485s2bh1mzZuG5557DRhtthBNOOCHE168FJAQI\nog0gIUAQBEHExvnnn4/58+dj/fXXxz333INPf/rTAIBevXrhmGOOwaWXXgrOOa677jpMnTq16diR\nI0finHPOwec//3n0VB5yw4cPxw033IDdd98d77//Pm677bam46ZNm4Zx48aBMYYrrrgCRx111Mfr\nBWyyySa4//770bt3b8/fvD6QECAqYfx4YI89gHnzqi5Je0CuQQRRL8aNE33gggVVl4TIC2Px/MXO\n7bffDsYYTjvtNPTt27fl/WOOOQZDhw4F57xlQJ/GPvvsA845nnrqqZZrrlixAkOGDMFRRx3Vctyg\nQYPIIqBAj2qiEr7+deChhwBy03NDjEKgDg8pgqiK0aOpDyTam6lTp2LOnDkAgJ133tn4GcYYdt55\nZ3DOMXny5Jb3p0+fjjPOOANbbLEFBg0ahJ49e6KrqwtdXV049dRTwRjDjBkzmo6R59lhhx2sZdtp\np50Kfqv2I0KHAqKTWLy46hK0BzG6BpFFgCDSmT+/6hIQeaE0+tl4//33P349dOhQ6+fWXnvtls8D\nwMSJE7Hffvth3rx5Hy9iNnDgwI8tCwsXLsRHH32E+Vojev/998EYw5AhQ6zXTCpPp0GPaqJSYhzA\n1pEYLQIkBAgiHRpUEp3AokWLcn1+2bJlHwcE77HHHnjiiSewcOFCfPjhh5gxYwZmzJiBiy66CJxz\nWuCsJDQMIyolxgFsXVD7vhjrkVyDCCIdSwp1gqg9gwcP/vj1tGnTmv5Xeeutt1o+/+c//xnTp0/H\n6quvjjvuuMMYX/Duu+9ar8s5b3EZUkl6r9OgOTuiUsgiUJxlyxqvY5wQISFAEOnE2HYJwgXrrbce\nVl11VQDAo48+avwM5xyPPfYYGGPYfPPNP94vxcHw4cONIgAAJkyYYNwvz/Pkk09ayzZx4sT0L9Ah\nkBAgKoWEQHFiFwLkGkQQ6ZBFgGhnDj74YHDOcemllxrdg6655hpMnz4djDF8+ctf/nj/wIEDAQBT\npkzBkiVLWo578MEH8eijj34cO6Bfs6urC9OnT8dNN93U8v6sWbNw5ZVXlvlabQU9qolKidGlpS4s\nXdp4HaMQIIsAQaQTY9sliDSWLl2KmTNnJv4tW7YMZ511FlZaaSXMmDEDe++9N1555RUAwJIlS3DN\nNdfglFNOAWMMxxxzDIYNG/bx+b/whS+gf//+mDlzJo466ii88847AESswXXXXYdDDz0Ua6yxhrFs\n//Vf/4XRo0eDc47jjjsON954I5Z1z5y98MIL2GuvvbCYMpV8DAkBolJICBQndiFAFgGCSCfGtksQ\naTz11FMYPHhw4t/TTz+N9dZbD7///e/Rt29fTJw4ESNGjMBqq62GAQMG4LjjjsOSJUuw++6745JL\nLmk6/8CBA/HTn/4UAHDrrbdiyJAhGDRoEFZZZRUcc8wxGD58OMaMGWMNFL7kkkuwzTbbYOHChTj6\n6KMxYMAADBo0CJttthleeuklXHHFFd7rqC7Qo5qoFHINKo4qBGJ0LyAhQBDpxNh2CSIJxljqX5fy\nANh3333xwgsv4Nhjj8WwYcOwcOFCrLTSSthhhx1wzTXX4P7770e/fv1arnPyySfj9ttvx/bbb4+V\nVloJy5cvx0YbbYRzzz0XTz31FFZeeeWPr6ez0kor4bHHHsM555yDDTfcEIwx9OvXD4cffjj++te/\nYptttvn4u3Q6rNPSLjHGOABKN1Uxsu3deCNw5JHVlqWuvPUWsM464vW4ccCoUZUWp4WvfAW45Rbx\nmpobQTQj+8AzzwR+8pNqy9JJyIEfjQGIouS9h5TPR6k6aM6OqBRyDSpO7K5BNNFCxMKsWcCttwKG\nmMPKibHtEgTROZAQICqFXIOKQ1mDCCIbhx4KHHYYMHZs1SVphVyDCIKoEnpUE5VCFoHiLF/eeB3j\nYIIsAkQsPPKI2BoyCVZOjCKeIIjOgYQAEZzYV8StC+rgP8bBBFkEiNiYPbvqErQSY9slCKJzoEc1\nEZzYXVrqgmoRiLEeSQgQsfHRR1WXoJUYrXkEQXQO9KgmghN7kGtdiN0iQK5BRCysskrVJbATY9sl\nCKJzICFABIcsAm5QhUBss4qTJgHjx1ddCnc88ABwwglxZp1Ruf564Mc/rroU8fDEE8BBBzVbAtSJ\niBigPpAgiCqhnC1EcMgi4IaYXYO23rrqErhlr73EdpNNgJNOqrYsSci1JL76VWDEiEqLEgUnnQS8\n8ELzvkmTgC98oZrymIhNxBME0VmQRYAIDgkBN8TuGtSOTJ9edQmyMX9+1SWIg6lTW/fFZtWhtksQ\nRJWQECCCowoBmg0rDgmB8KhubTFD8RkCU7uIra3EVh6CIDoLEgJEcChGwA2xryPQjsTmX26DhICd\n2NpKbOUhCKKzICFABIdcg9xAFoHw1EUIEHZiayuxlYcgiM6ChAARHBICbmgHIXDzzcDQocBzz4W9\n7je/CWy+ef6BfUjXoKefBoYMAe66K/+xZBEQmOohthn4OrTdDz8E1lsPOO+8qktSDrWuGWP0R3+F\n/toNEgJEcGLOdlMn2qEeDz8cmDED+PrXw173mmuAv/9dZJDJQ0iLwGGHAW+/Dey/f/5j2/BZ5YzY\n2kpswsTElVcCr78O/M//VF2ScixYUHUJCCI+KH0oERz1QRzbQ7lOxLyOQF6qGriqYioLIYVAjx7h\nrtWumPqX2NpKHfrA9hKWosJD1/vKKzeyeVX9m8vf83vfAy64oNqyENVDFgEiOO00gK2SdnANqpq8\nQiCka1CfPsWPba+Bm1ti63Pq0Hbb5X6qchKqDr8z0ZmQECCCQxYBN9TJNSht8EUWgVZ69y5+bLsM\n3MpiqofY2kpswsREu9xPNAnVTGxtgagGEgJEcOowkz1jhviLmTrUoyTvgDsUL74IzJ2b/fMhhcCr\nr/r9fFE4B55/Pr6FubIS2wAw9rYLxCcE3ngD+OCD/MeRECCIVkgIEMGJ3SLAuchkM3RonOWT1Omh\nFqsQOOUUYPjw7J8PJQQmTAAWL87++TffBDbYwF95VG64AdhsM+Dgg8NcrwymdhFbm46tPCZiEgJz\n5gDDhgGDB+c/Vu2HYu2TQlKHe4/wDwkBIjixz2SrM50xryRbJ9egPIPa0LzzTvbPdgXqMe+4I9/n\n//a35v993g/jxontPff4u4YrTFaL2ERzbOUxEZMQeOut4sfWafKEIEJBQoAITuwWATXFXMxCIHZB\npZKWti+mgYaOWrehhEDe6+j3qc9BTuz3mmT5cvOsb2zlj608JkLd91noWSLXYZVCoA6/M9GZRNS8\niU4h9lmZhQsbr2NeSbZOQkCtUxMxl7+KezRmIVAXbG03trqJ+d6XxCQEyqTVJdcggmglouZNdAqx\nD2DrIgTUB1lsgxudNCEQM+o9EGrwkNdCQhaBVmzBzLGVP/a2C8RlsSsjBGKfhApNbG2BqAYSAkRw\nyDXIDbEKqtmzW/eluQY9+6wIeI0RVQiEGjyUtQj4vB9iuteSsMWlxDYArEN9xioE8tYdCYFm6nDv\nEf4hIUAEJ9YBrKQuFoFY63HUqNZ9WSwC//3fzoviBHWQHWrwkHfWkywCrdgsArENAOtQnzEJAZW8\nEzVVugbV4XcmOhMnQoAxdghj7DLG2OOMsTmMsRWMsRtSjulijB3DGJvIGPuQMbaAMfYaY+xmxtin\nLccczRh7hjE2lzE2mzH2KGNsHxffgQhH7BaBugiBWLMG3Xln674sQiBPPv+QVOEaFHOMQEz3WhK2\nQWJs5Y9NmJiISQiov1/e/pksAs3E1haIaigRf9/EjwBsCmAegLcAjEj6MGNsJQB/ArALgL8DGA9g\nEYChAHYAMBzAq9oxFwI4DcB/AFwNoDeArwC4izH2Lc755Y6+C+GZWGeyJeQa5J4016CYqcI1SB94\nLV+ebCUg16BWbKIttgFgHeozJiGg/n55+2cSAgTRiish8B0Ab3HOX2OM7QTg0ZTPXw1gZwDf5Jxf\nq7/JGOuh/b8thAiYAmBLzvlH3fsvADAZwIWMsbs559NKfxPCO+qDL8bOuC4WgTo91GIPFubcPtip\nwjVItwgsXgz072//PGUNaqUuFoHYymMipqxBZSwClDWomTrce4R/nDRvzvlEzvlrWT7LGPscgMMB\n3GwSAd3n05voCQA4gPOlCOj+3DQAvwbQB8DXi5SdCE/sM9l1EQKxugaZiF0I2PzJgThcg5LKB5Br\nkAmbEIhNJMVWHhMxWQTq6hpUl3ZDdB5V6PwjIAb1NzPGVmGMHckY+wFj7FjG2PqWY3bp3j5geO8+\nAAzArh7KSngg9hiBIq5BV1whgmRDPlxsguquu4AvfCH/6rQ+0V2D5s0DDj64mrKYyCoEFiwAvvxl\n4JZb/JYnrxDQBQq5BtlFmyz/ihXA0UcDV14ZrkxJ5YmZLELg1VeB/fYTGcB8otZXHVyDxowBzjgj\nzt85b5n++U9g332Bf/3LT3lsvP02sP/+wMSJYa/bKbhyDcrDFt3bdQFcB2A19U3G2BUATuZc3KKM\nsf4QsQNzOefvGs43pXs73EtpCefEbhFQB11ZZ5xOPFFsR48GdtzRfZlM2Orxu98FpkwBvvEN4MAD\nw5QljUWLmv+/7DLg//6vmrKYSBpoqwPKZ58Vf7fdBowc6a88+sArTQjo92kdZpl9k2YReOAB4IYb\nxN/xx4crl608MZPFNWjkSGDyZFGvafdrGdT6it01iHPgnHP8XycUu+wCfPABMHUq8OKL4a77ne+I\nCa677opzzFB3qrAIfAJiBv9iAI9ABBYPALA7RIDwCQD+R/n8wO7tHMv55P5VnZeU8ELsQqCM6Tmk\nC4xtQTGZx//DD8OVJQ19YPDRR+bPhcB0zyUNXGJYWdiWE19CrkGtpMUIvPNOuLIkUQdf9SwWgRkz\nxNa3O2WdXIPS2m3d+OADsZ05M+x13zVNARPOqEIIyGu+BOArnPMpnPMFnPNHAXwZwm3oNMZYFdYK\nIgCxuwaVyUoREpugGjQofFlM5VDRB9q9evkviw1TGZMe2FXco2Vdg0gIpFsEYklXG3MfI4k1RiB2\n16DYs6UVbcurBp52rUufU1eqEAKzIQb7d0n3Hwnn/HkAr0NYCDbq3i1n/AfCjNxvWM/UDmPM+jd2\n7Ng8pyJyEnu2mzIzTiEfmDEKAVt96QPZnhXKfNM9F5tFQE8VmjdYmGIE0tOHzrHZmAPTLhaBUNQp\na1DsSRKKQkKglbFjx1rHlLFThRB4uXtrG7jP6t72AwDO+QIA0wGszBhb0/D5Dbq3r+QpBOfc+kdC\nwC+xWwTUMuWd0QmZZciWNUhNM6n75vvGVl/6jHuVQsB0z82fbx9gVHGP5o0RiMk1KJZMW7bZ4jlz\nxHeo0j1NhYRAPsrECISehIpdCJBFwB1jx461jiljpwohMAEiRmAT/Q3GWG80BvZvKG890r3dy3C+\nvbu3DzsqH+GZOsUIHHQQ8MYb2Y/dd99wmQ1sDzW1/KF9OW0Pvphcg0wDgM03B/bcU7wePRro3RuY\nPt3+ed/UNUbghz8Udffcc/6unxWbEPjBD4B99qnWNaiMe0sVxCQE1Lr71rfyHUuuQW4ILQQIv1Qh\nBP4IYAaAkYyxLbX3fgzh6vMI5/w9Zf+VEOLhh4yxj29Bxti6AE6CWJV4vL8iEy6pk0UAyJ9e8LTT\n3JUlCdvgX30dekaqDkLAds899JDYjhsntjfdJLYxCIGY0ocm8ZOfiO0vflHN9VWSBtj33VftbG3d\nhIBK1e6cat0980y+Y8k1qJmi/USfPm7LkUaM44R2womBnjF2AACZqHCt7u12jLHuRyo+4JyfDghX\nH8bYKAB3AXiCMXY7hOvP1gC2B/AOgKZkbpzzPzPGLgZwKoDnGWO3AegNYCREtqBv0arC9aFOFgEg\n/wMjlKnf5hpUZQyGbQYs9hgBE7JOKVi4mSz1EUO7TmuHVbowlfFzrwK9f6lypeEy9xa5BtWTGPqT\ndsbV4/izAL6m/M8BDOv+A4Sbz+kfv8n5BMbYVhBpQneDsAK8A+ByAOdxzlsSu3HOv8cYex7CAnAs\ngBUAngVwAef8PkffgwhA3SwCeR8YoYSATVCp+0P7H9sefLHHCCR9rooZ0LwxAiQEWpEz7QMHmgOD\n9TiQkO4vdRYCy5fXQ8inHUuuQcXbaej2HUN/0s44ac6c87MBnJ3zmBcAHJbzmBsA3JDnGCI+6pQ1\nCMg/mA5l6rfVY4xCoI4WAUkVDyH9mmlCQP9O9OBstMM11zQLAfVeXbYsrLtaXVIUS6rsU3TK3Nvk\nGtRMXYQA4ZcKDXxEp1I316D77suXfYdcg1q5+27gr39t/B9bsLBk0qTG6yotAvo104KFySLQihxg\nf+IT5vfVe3XpUrEg1oQJwIMPxrko1kcfifKF6F8mTmwsHgXY+5cqKDN4feSRxv9kEWjw8svACy9U\nXQo7MfQn7QwJASI4dXMNeuUV4KSTsh/fya5BNsG0ZAmw1VaN1Y71PPmSEPdD0jW23rrxWtZjFfeo\nPkgh16D8yDoZPNj8vm4RGDoU+OIXRfaoMWP8lq2IENh7b1G+X//aT5kkDzwA7LwzsOGGjX3tYBG4\n917gpz9t/E8xAg1GjAA23TT7hBe5BrUXJASI4MRuETA9IK67LvvxnSwE0gass7tXD7H97iHKm3cA\nUMUMqF4/aYNFvYxVC4EYkBaBlVcGxo8XLkIqukVARWaM8kUR16CnnhLbP/7RfXlUHntMbKVoB1pj\nBKpErbsRI7Ifp1oDgDDfI60/jI2sFgwSAu0FCQEiOHWzCOT9fBWuQVlEQQjSBqyyzLY6DhE4mTdY\nOAaLQNZ6lVS9snAM7VoOsHv0AI4+WvypzJvXeK3Xb+/efstWJljYd92agqZjiutSv3+eNJZ6pqMQ\n3yN2IaDfSzG0WxOxlqtdICFABCd2i0DeMukPlE62CKTNbsry2B7CIYRA1gGA/FwMMQJZ69V2vEvq\nJgRkYLo+wFUXFNNnQvv29VcuwN3quKGIySJQdKJD//1D1GNabE9s5E2tTLQHJASI4MQUeGYibyen\nPxg7WQhkdWGpUgjUIX1oWYtA1e0qhoGCrBMpBPQZYdUi8O67ze/5XjCpjEWg6vsxJiGQpyy6ECDX\nIFGXWS30VVryY+hP2hkSAkRw1IfK2WcD660nslTEQl0sAnmyBv3tb8Dpp/sPXksb1IwfL7Z1sAhI\nTPdDyBljQNTL0qXA97/f8BVP+nwVrkFvvunvmkVIswiobUEXAiFdg+bMAU45BZg/P/+xPlDraf58\n4HvfA559trHP1H4WLwbeaVn9xw9F3ZSqsAjYhMDDD/u/dlayirwq3cNICPiFhAARHL0Tef11kaUi\nFupoEUhbR2DLLYELLwR+9jO/ZUpzYbngArG1PUhCzKDlNX+bPu/7QWhyDfrVr0T9bb996+djcA3a\nd19/1yyCGiMAJK+Gqw9iQwu9yy4DHnoo27EhB0VjxgAXXQRcf31jn6l/u+yycGUqalGOKUZg9939\nXzsLnDf/nkn9d5VWIRICfiEhQAQn9kZdRyGQ1TXolVf8linrjH4dXIOS0of6/o1NFoF//cv++Rhc\ng/75z8brGNq4bhFIEgJVugZJsloEQv62kye37jPd+2+84b0oH+MqRiBEPx17jADnxSwCJATaCxIC\nRHCq9l9Oo6xrUKiVQossKOZ7oJ3l/PrDJ+/xZXGRPjSUEOjfX2yXLhULSqV9XlJ11qAY0GMETNlw\nJDEIgazWMN/9p1pPMt1vyOun4SrGjLIGCbKmsiXXoPaFhAARnNgbdTtbBHyLlCznX7q0HhaBtPSh\nPh+G8txyQLpsWbIQiME1KO9nfJPHIqC7BvkWAqbfJ+ugMWTdzprVuq/qYOGiA9KYXINiQv09Y7UI\nEH4hIUAEp+oZpTTqKARsokAvSwwWgaVLq11HwFX6UJ+/s6wfGbSaZhEgIdCKHiOQZBEILQRM9ZPV\njSRk3ZosAlUPAuvkGlQHIUCuQQQJASI4sTbqFSuAPfYAzj0333GhhcDs2cBnPwtcc01j3w03NDJR\nxO4aVBeLgMRWVp/WFd0isGgR8Oc/N97femvgnHNaP2/73yWxtl/J7bcDG28MvPii+L8uMQKnnJJN\nDIR0DZozJ/z106B1BNxx7bXNKy5ndQ164AFgm238f7/vfU/8bs895/c6nQ4JASI4VT9IbPz73/bM\nHYxV4yJi4sorzR3juHGt5QntGlQHIZA3a5Dtd/cp+GQZpUVAD/KeNElkdLGVperBepXXP+QQ4KWX\ngFtvFf92wFhyAAAgAElEQVTLOkyyCHz4YfP/VbgGAcC99xY/1gdlLBe+qJMQqINF4OCDG6+zWgQA\n4JlngLvu8lMmyUUX+T0/ISAhQASn6kFKVs45R3R+vXuLMuurj0pCm0kXLTLvlyulJgkB3wPtLEJj\nyZJ6WATSFhQLKQTScrSTa5AdGXCdZBFQFxdL+6wLZP2stRbwP//T2J/lnqq6bn2vRZIGuQb5I6tF\nQFL1vUi4gYQAEZxYLQL6w79HD/HwGDxY/D9zpvm40ELAdj0pVMg1KJk6WQTkzLTuumL7vO1/l9Tt\n4d+vn9gmWQT0AZDvPkrWIWON8uU91hdJ9QRULwTaYUGxWMljEQDS7xWiHpAQIIJTFyEgO7nVVxdb\n3X1AEvr72DrrGCwCZYVAXRYUA8IKAdu9ZytL1elDYxILcqCdZ5Y/1IJxXV35VzGuuv+sWgjUaUGx\nqt2o8kJCoDMhIdCGzJhhdx8xsWIF8Oab/sqjE9MgQUVmF5HITm611cTWZhEI3dnbzLeyfLoQULPN\nxOAaNHMm8N575vdidA2yff6DD9yUR7J8uWiHnAOvvSb2ZR0k6g/wd98V/YBvFiwQbktvvdW831Rn\nnItVxKdNC7fWBpDNNUjHt5VPtQj06tXYn2VwWrUQsLlIhkIXAkuXAs8+m/6bhXQNevNNcf4iExuy\nnVTxnExql6b6IiHQHpAQaDPeeAMYOhQYMSL7MV/7GrDuusAf/+irVM1U/SCzkWYRsAmBfff1VyYT\ntvozCYEVK4CBAxv/xxAsvO22wCWXFD++LK4sAhtv7PZePvJI0Q6HD28EjWYNWtXLcf75oh+YOtVd\n+STqAGX99YFPfhJYZ5304848E1hvPeBTnwL22899uWxkcQ3SCeUa1NXVXJ9ZJnA63TVIT488ejSw\nxRYi61IefAmB++8X7figg4oJgWuuEe3k9NOdFy0Vsgh0JiQE2oyJE8U2zwz/TTeJ7ZVXui+PiVgt\nAnqnJv+XAwnbzP/rr/srkwlbZy0HETGsIzBkSGPfiSfmP94nriwCgNvBxM03i+2rrzb2ZRUCtnLI\n/sAlan2kBTGr/Pznjdf33++uPGnEaBGQgyrGmq+VZZAd0ppiomohoE90/Pa34rV8jtnQf39fzyGZ\nve2uu4rdRz/+sdhWkTEnb7AwCYH2gIRAm1GmYfrOlCGJ1SKgl0vWpTTdhxikZsHWWUuhEkPWoIsv\nbuzba69sM8ZAvSwCec5VlKKuQRLd3c0FdY0RiNEiwFhze84yyK66H4rNNUiS9vwKFSysXqeIEDAt\n4haKvBYBn2OGWMcJ7QgJgTajTMMMJQRiGiSo6OWKVQjYOusVK8R7MWQNUv2e8zwMY7QIVCkEiroG\nSXy06Vjbr40iFoGQrkFq+8gyyPbdRurkGlRGCPi6j9XrFLmP5ISO2oeGIiYhULXlq5MgIdBmlGmY\nPmYPTcSq9PVyybqUK5PG0jElddZ6jv6qFhTLGwCpH+8TV+lD85yrKKpFQN6HJkJaBLJgqrNQEw06\n0iKQ57eKzTVI/UzVmWjaRQiEeA6VmUQYNMhtWbIQk2tQLM/bToCEQJuhNszXXwfOO0+smJuFUIOG\nWGcU81gEOAeuvx544YUwZVP5v/+zv5cmBEK5BqmD1hUrsv/mv/qVCJR98EH3ZZO8/LJ5v+6GE4NF\nQC1Tr172WULbwLUqi4DpM3nz5buib1+xzTOwCOkalGYRmDIFuOKKxv+zZrVmaQqJqYwhJ3dsFs9Y\nhEBW16C0+3HVVd2UJw8xBQuTEAhHwhwTUUfUhnn88WJAddttwD/+kX5sp8cI5BECDz8MjBoVpFhN\nzJ0LvP++/f3Fi6t1DTIJgeXLswuBl14C9tlHvF6xws+D5vDDzfv79m3O8hE6WNiE6hokRYHpN6yD\na1DfvsD8+W7LkoWVVhLbPPd+SNegzTZr7DfNtg8f3rpvxx39ZIQCirkGhRy0tYtr0LJlyTFAVQgB\nsgh0JmQRaDPUhvnss2L73HPZju10IWALFpaDWnUg8corYcqkY3oIH3lk43WaRcC3WV9eW7UuFf29\nQ6/YLGeOJTFYBFQLgMkiIK9fh2DhKjKMDB/eSJ+bRQjIz4Z0DTrgAGCnncT/WdNNhs5UpmK650PG\nT8XuGpRVCKTdY1W4BpFFoDMhIdBmqA0z78CeXIOa/9ctAmrHVJW/s6kzHjUKGDZMvE4TAsuX+31o\nqwMcdV+R3zz0g8AmBPSyr7VWY22JkEKgd+9WISB/X7nV3W983KdZvnMsbXzvvRuvs9xP3/ym2IZ0\nDWJM5MIHwqysnUba4C4mIaC+Tit3HVyD1DKp67+EIiYhEEtyjk6AhECbUQchUDeLgMk1qKr8ybbM\nDdKFZMkS+4yZxKdVQHV5SCpDFqoWApKkbBmhLQJ6wLCsI1kO/eEZk2tQFW1G/f5ZBhayvkO6BgEN\nF5GqA4GBYkIgZFst6gYXm0XAVGdq6tAqJpvyugb5hCwC4SAh0GaUEQKUPrT5/7oIgR49mgcS6mdM\nnanPPOA2i0ARfDwIkmZc9VSdNosA5+GEQFqwsKwjOZOn15kPcV/UIhBiIKFfV/3+We4nWb8hXYOA\nxu+s358xWAh0YrIIqOR9fsUQI6Dz4YfZjvVFXouAz2c5CYFwkBBoM2xCYMQI4XPYsyfw/PPmY30L\ngfnzgUMOEcHLeZgyBdhvP2DyZD/lkqTFCKgd0/e+57csNmyz03IgsddezZ35DTe0fj6ERUA3jxd5\nYPgYjM2caX9PtwjIutbrPKQQyOsaFIIiMQL/+U9y3btCrwdVCOSxCNx9N/DRR+7KpfLaa8C++4rX\naULAZ/YsG3VyDVJ5/XXgpz8Fzj4bOP309OOqdg0aNUo81+bMAd59V8SKbLBBtmPLkNR+bYPv5cuB\nr3yldb/P/s9Wlqri89oZEgJthjqYV1+//LIwOy5fDhx0kPlY365Bl18O3H579nSmkiOOEA/mrbf2\nUy5JVovAzJkie08VpAmBd95pfs+UKtOnRUB1edhkE/F6112LncvHjFDS75YnWDiUEFBdgbK4Bp17\nbvP7PspXRNTde6/7cphIco3KIwQA4MIL3ZRJ54gjgGnTxGv5e6qufSoTJpjP8ZnP+CkbkP77mu4p\nvdw+20VS+c46Cxg7Vvx2elvXy1T1OgIPPiiea08+KYTLn/6U/VhfZbKJjxdfND9LqhAChxzi75qd\nCgmBNsMmBFTee8+837cQKDp4luX1bSrMGiNQpcnS5hqUZQXajTcWW58WAdXlYfJk8dutu26xc/mo\n56TBoF6H8qFYpWuQHiOgt1HdNejb325+PxYhsGiR+3KY0L9vUYsA4M8iIEUA0AjutsUI2NpA0TaV\nhSJCQO9T5sxxV54s1zdhardJ/7sir1vk0qXABx+07vfVtxRJh2y7D6sQAm+/7e+anQoJgTYjS4zA\nKquY9/t2DUpaGTWJLINcF+gdZIwrC6dZBJKQ6ehCBQv36gUMHty8Pw+hhYBuEZAPxZhcg/Q2qguB\nHj2aB78+3AuKuAaF8nXPGiMwZIj5eLW+fU2MqL9J//5ia3MNsv1+Pu+7tHOb3tetjD7dwLL2JWmu\nQFW7BqV9JibXINsxVQiBquLz2hkSAm1GXiGgNvBYhYAtm4tr8gQLV0UZISAHHaGDhYF4YgSSxIUt\nRsBU9lgsArpY6dEjeUE5FxQ5ZyghoJfN5hpkay9qffvqD9X7WrcI6PWU5LPtiyJCQE4uyEkbNejV\nNVn7EpOAT3rfFXktAsuXmwe3MVkEqhACtuctCQH3kBDoQFZeufFabfi+s/kUnWELJQSyugZVmfUo\nLWtQEnLQsXChP1FjSh9aFNcWgaVL3VsEXA3IbPdUXtegrq709LFlyWoRWLq08dlQaTGTLAIxCgEp\nzm0xArEKAfn7SqQQWHttsZ05038fk4ZeR2nCwAd1EwLLluVbuZwsAu0BCYE2Q22YtgavPhzVB4/v\nzCN1tQjorkFVroNQxiIghcDRR4vP/+c/bssGuLUIuBQC55wjvvPf/mb/jE0ImHyLZRtydS/YAqqL\nuAap+GjTWb7zrFlikHvwweL/GFyD1DZiW7U1tGtQUYtA1a5Bu+0mFtWTAkBaGaUQuOwy8Z3yZohz\nUT5JmhCIyTXINLitwjXojjvE73bdddmOISHQHpAQaDOyCIHhwxuv1cbm2wc+9hiBrBaBLB20r9km\n07W7uoABA9KPlbOPMgjy+uvdlUtiSh9aFJf345gxYqtn1VHRV+W1WQQA965Bjz1m3q+vI2CyCHBu\nt8RUZRGYPFmU7Y47xP9ygLvOOu7Lo5LkGnT++cDmmwM33wyMHw989rPApZc2f75qIaBbTkL7jwPp\n98zy5cCjj4rkD889J/ZJQfCJT4jt/feL7ZFHui9fUYtArK5By5bFYxGYNElsv/GNbMdUsY4ACQH3\nkBBoM7IIAZs7UNVCwFbeqoKFbUIgSwcdMvVbjx5idi4NfaDrc7EpF24VPgY7K61kf0+vH9keqswa\npN77vXubZ/xVK0yI1VOLPPylEDj5ZLH11aaTLAJrrw08+ywwcqRYV+Xvfwf237/581W5BrVLjMDA\ngc37fbgHuXINitUisMceYluFELARk2tQFSsutztUpW1GFv9gtXNOW4XWJTYhIDtBWwdVdYyA7hpU\nJhNEWWyz07EIAZtFoGrXIEmSEIgxa5A6YO7Vy+waZHMLAqrLGqQjZ7rlwNdXX5OUPtSE3ieFEALq\nd6+ra5CE84ZfeVdXc/xZlnMVwZVFIET60CzX0Msp74UqXIPyHkOuQe0BCYE2Q22YttkYtYGpDbyq\nGIG0QVVsFoEs9RTSIpBVCMhBmMSnEIgxWBhoHaio6EIgKSakKiFgcg1SMwbpxJY1SN6DRVebTsOW\nAtiGXmchXIPU+ksLFo7RNUgXAtIa0K9fmL66TjECWdAtAvIejMkiEFPWIMI9JATaDLVhvv+++TNq\nA1MbuM1P2RVpQmD5cvEgfPDB5lz3vh8u8+YBDz3U2vHoQmDCBOEX+/rr6ecMaRHo0QNYbbX0Y3WL\nQNGYDZ3Zs4GHHxZlizFY2DTwMqEHXNfBNUi1CJgGva7K98EHon/gvJwQ6Nu3Uc68K4xnwaVFwJcQ\nUO+npBiBGTOAxx83nyMmISDXDAglBOoUI5CF0EKgyHlDuQb94x/Aq6+K108/bf4MuQa5h6q0zcji\n6mNzDZo5UwT5+cLWQaqDqtNPB/bcEzj++Mb7vh8uBx4o/DJ/8QtzuVR3knPPFeVLI7Rr0FprpR+r\n+++6GujsvDOw++4i+NhmEdhii/zndVWH6gqxSefUrQU2i0BVC4r17Nlar0uXhnEN2mwzYJddRBBo\nkXNKIdC7d6PO5ErXLkmKETBRhWuQihQCqtVRfoehQ4XIVpF9aCyuQQBw1VWN11myl5Ul9vShZYVA\np7oGzZkDfO5zwAYbAG+8AVx4oflz5BrkHhICbUbWLAUSvYH//e9uy6Oid2yf+ITIOqEOqq69Vry+\n4YbG53w/kB9+WGwfeKB5v+xwPv95YKONxOupU7OdM7Rr0BZbiMFaEiNHth7nApk55I477BaB668H\nhg3Ld15XFgFV+Kozrr/9LXDWWcA11wBXX93qXpWUPjSUEFAHql1drQPb2bPDuAbNmCG2Tz/d2o6f\neQbYa6/k42W9+x4olrUI5PXvLotMY9rV1ShLkkuEFAyxWASARurQzTePSwjEsKBYFmxCoNNcg955\np/F6yhT750gIuIeEQJuRpWHaLAJAsutEWfRr/eQnwKabpudkr6rhy+t2dQG//KV4Lc3gafi2CKiD\n+B49xP9nn5187OqrNw98XPvgL1litwh84hPABRfkO5+r8qm/hRyQfv3rwBFHiJSSxxwDHHts82yw\nev0qYwTShMDMmcmuQa7vQ1OMxWc/a5+9k6gWAZ+UjREIGTMFNItPW5yASmxCgPNGeQ88MIwQKBsj\nIFOcVrkejEodYgRCuAap/X2SgCch4B4SAm1GXiGgdwo+3XBss3WhBlV5UTsc+cCORQio/v6y/vLO\n8Lte5GnJErtFAMg/SPAhBBYtElt90A+0zg7bYgQA9ysL21AfiFLwqahCIESwsCnrUq9e6f2GvNd8\nu/mVdQ0KkU5ZLZMqBGxrCaj4HiRmObeeflq19sQcI6BbzmJxDdLXEaiTa5DLOlTHJUnPMhIC7iEh\n0GbkdQ3SP5/0ECqL3rHVSQjIYFzVfJmEb9cgNcONrEe1vLaOVP2MayGweHHygmKmwXcSrh6E6m8h\n729ToHRWi0BMrkEffujfNciU7lKFsXSRF8oiUNY1KLQQUIP8bSlEVWKwCKj1IhM8AKL8MbkG2YKF\n5W8ek2uQSowWgRCuQWQRqA4SAm1Glka+dKnwwTv1VODdd5vfk/6ePkizCMybl379EH67EpNFwJaJ\nSefii92XBzALAZNFwJYvX60/16LvqacaGR9MQiSvEPDpGmQqi74vaR2BNHc2V6QJgZ/9DHjzzcb7\nOi4GjLNmNV7b/NeLCgHX7Tmva1DSSsxVWQRiFwLqPaAKgT594hYCumCOSQiYXIOef15k63LFsmXA\nmWcCEyfmPzaEa5D6e5EQCAsJgTYjq2vQjjsC//u/wOjRze+paTtdk2YRsPmQq98ppNVA7XDSYic2\n37z5/wsuaARYuiRJCKjlHTAg+XjAvUVAxYVFoA6uQS7ux6SBjS4ETAPbAw8UW18WgTlzGq9l/elk\nFQK9eolVfSWu23Nei4DOhhs2XvsabKv3nirY88QIVOkapAqBZcvCWwSKxgjIdhZKxGfFFiwMAN/6\nlrvrXH+9mDg45JD8x4awCCS5Bh1+eOM1CQH3kBBoM0wNUx8ULlvWcHF5/vnm93wKgTSLgC0jTwxC\nIKnz2WcfEfisM2+e+zIluQapnefKKwNf+5r9eMCvEMhqETjssMbrO+9sfs+Ha1CSEKjKNSjpHOpA\n1mQRABpt2ZcQsGVduuUW4K23xOu0AaA6GzthgtvyqeSNEdBZe+1Gu/G90vqzz5oHgFliBGKxCCxb\n1hwjENIicNhhog7vvtv8ubrECNhWFgaAF18sXx5J0vo3BxyQfKysq5VWEumYt9pK/O/LNUidANl6\na5HZTUJCwD0kBNoMU8Pccsvm/5PS04V0DZKN3TSjbTuuKiGQxM47m32nfTys5TlNwcJqefv0ATbZ\nJPlcMVgE1FzyulXFp2tQlhiBUOlDk86hlrNHj+SBrS/XILW/kEJq0CAxEBs6VPyfFiSqZjYaOtRf\nQKRel0VS5H7+82LrSwjI8w4f3ry/Lq5Bavl0i0DIYOE11xR9ht5vSOoUI2ByDQLcps5OEpi77ZZ8\nrKyr7bcXE4vbbtu83wVqe1PrY+edmye+SAi4x9HaokQsmBqmPuhJEgJVugbZOr3YhYBtptZHWZOy\nBumzi2muODFYBNR6s7nmlEW97+TrPK5BvtOHJg3qsrgGSUJYBKQQ0K+Vtkq1Phsrt64HtGUtAkDj\nu/gSArYsT3mEQCyuQXqwcIhBmp6iOMtzQ/0/xhgB9TuoFgGX9ZnlvrKhJ4HwsbBdUlrzrNZ5ohgk\nBNqMLEIg6QFXpWuQrYGrD/eQQiDrbIy6GJCKj1m7rK5BWYSAzwxRWS0C6mDI5ppTFtM9k8c1KBaL\ngE1wSkJkDZL3jH4t/UFtW7xJ1puvwZhLIeBr1t2FEIjRItCnT5hBmp6i2NZP18k1qGqLgO7SpX8H\nm/hyWYdq+fRFT0kI+IVcg9oMUyeuP3BicQ3SB7C6j/gppwh/3dgtAj16mIWAjxlF+f1NJvi8FoHf\n/16YeH0ENRexCNhcc8piOk8W16A33zTXT1UxAkVcg8aMAV54oXjZgGwWgaRyjBzZmk3K5ToMr78u\nfJZ79ADGjk0uSxbkdwttEcgSLOzLkqKSxyKg/rahYwTyCoGYXYNUfAmBpPtKf57Ysmnpde6yDnWB\nKdGFAOEeEgJthqlhqrPHQPIDbu5ct+VRSXMN0rnsMuDGG0WudEmVQuCqq8yfO+QQ88DSh+uNOqs1\ndCiwxhqNh69aj336mAPA/vd/m8v3l7+IlHKuKWIR0OvQVf2ZBk2mgazpN/zLX4Df/a51f11cgwDg\noIOKlUtiChY2XWujjUR2Ld33/Q9/aC2jywHtSScBf/2r+C1uvrn5vSwWAeljPmyY2Pp0DeLcvvp2\nlmDhv/9dbGNxDQIa/XPsQiBW1yDbgmJFzpVEUn+q/256ndrqPIQQIPxDQqDNMJnq1EVrgOSHb9aV\nc4uQZhHIclyVQuCb32y1mOywAzBkiHnA4VMIdHUBb7whZqttMQJrrtnI7iA55RTgt79t3mdLCVmG\nrCsLq7+9LhSSLFd5yGIlM13fhmoRcDGQ9ekaBADTphUrl8TkGmRqsy+8INYcWGst+7l8uAbNn29/\nL4sQ+OtfxQTIlCnif59CQA2a1ttIFtcg/Tw+yCsEJFmskC7IGiOQlj6UXIMaFHUNqkIIxJL2tZ2g\nGIE2w9RIBg2yf0b351Vn311TVAioHUTVrkH9+ony6r76ptlkHz74qhDQr2kKONOtQUDrYmNF/KjT\ncBEs7Kr+TIOmMguexZY+VH3fRFlBldU1SLouqYtk6fhwDUoaeGUZSHV1iXS7Ep8xAja3ICCbEOjR\nQ/weMQqBPn3M5XLt2uEqRiCWAWVS+tBQrkFpFoEQrkFJMQKEX8gi0GaYou0HDmzep3Y8spHJAaNP\ni4DNNShtIKp2EFULAX2/7neq4tsikFSuJBN9koBwRRHXIP0Yn65Bpu+clvlGxeVgImlQp9ZJUdeg\nsuSNEUgSAj5cg1zXic8YgSQhkCdGoErXIFu92CwCrix7krIxArG5BiUtKEauQUQISAi0GaZZ91VX\nbd6ndpCyYa+xhtj6FAKzZ7eWDchnEfA5E6aTRQhIYhACthR0OvogpEqLQNJv76r+ktJ/qsRoEVBJ\nCxb2JQSyZA1SyWIRcDkYcy0EfLgGLV8u3KayWASSLGGybAsX+kv/W/Q3sQkB15no8ggBzhvPtFhj\nBEK5BuURAnlcg2bOdDNrnxQsrCJ/z9mzw44H2hkSAm2GSQisuWbyZwBhNejZU6yG6+MBM28e8Mtf\nNu/TFxSzEZNrkG1/DEJAX1BM3ycJIQSyWgRWWcV+Dp8WgazlM1GVEBgwoJhrUFnyWgT0mCQVXQjE\n4BqkI9uyy5nsL35R1Iseh6CiCwHVXUkvGyCCs31QRgiYvpfrTHT6oDRpIcrTThOTXPfe22q9jTVG\nIJRrkNqG04SA7hokt088Ier36KPdli9NCNxxh3B53m678tclHAkBxtghjLHLGGOPM8bmMMZWMMZu\nyHH8td3HrGCMrWf5TBdj7FTG2HOMsQWMsZmMsXsYY9u6+A7tgmllzf32A778ZZGFB7C7SvTvL177\nWEvg3/9u3ZfVIhCza5CkimBhnayuQXpZfQwgs864H3qoyLp05ZWt74WOEcjjGhQqaxAAjBsnMkB9\n7WvZZr8feKB8mVR8uAa5jBFIqpMiK90OGCC2LjOoPfqo2N5+u9ia6k+K4o8+EltTP6N+16lT3ZVP\npeg9vfLKYSwCeWIEZJa0n/407hgBm0XAR2yF6dx636fHkdmsMA8/LLY33li+fLYYAcmuuzZef+Ur\nYjtpUvnrEu4sAj8CcBKAzQC8BSCz1maM7QdgNIC5KcfdAuAiAL0A/BLA7QB2APB49zkImC0CPXuK\nFH4nn2w/rqsrX9aKvNiCC7MQs0UgKUbAd7CwTkyuQaa6M+3r0we47TbguONa3wvtGpRVCIS2CIwa\nJWbA+vTJ5hq0xx4N0e+C2F2DkgZLcnIjD7L8Ptwkk+pPv66sG3VthBD9X5FrjBkjtrG5BqmvQwmB\nvJaGUK5BOuo19ftRimFJ1kxNZUiLETjvvMZrX26QnYqrn/M7AIZzzgcCOBFAJh3LGFsDwNUAbgYw\nOeFzhwM4BMCTAD7LOT+Dc34sgF0ALAdwDWNsJdvxnYQtM4/tfwljjdkzHwNYU8eRtTFXZRGwdXZZ\nv0sMFoGYXIPyEjpYOGuZXQuBPLPiWV2DXD6oXVoEfLgGJX3Xfv3yn0+6NoUWAvK6MnObvLfUgWUI\nn+giLjPyNw/pGpRXCIRyDSorBHwFC+vnUv9P68eyWmHKkOYalJRhjiiHk5+Tcz6Rc/5agUOvgbAC\nnJTyuRO6P/cjzvnHtwvn/FkIS8FgAIcWuH7boXdCWd1AGIvXIlAH1yBTvfqoRzUPeVIZqo4RcPGg\nCC0E8lBFjIB6XRP6KsSuMAmBpHIkxQj4cA1KGiwVEQLqzLzrwWIRi4BahhDZVIrc07LsIS0CaTEC\nsVgE0voa/Tf1FSOQhB4PY8u4VIUQkKjXJIuAWyoLFmaMjQKwP4Bvcs5nJXyuD4BtASyAsAjo3Adh\ngdjV8F7HYYoRULE1IJ+uQUuXmldnrasQULGtEgr4X1lYJ/b0oXmZOVMsfiZ9pouS1TUoK6pF4Oab\nyw8W89zTWbMGufxNfboG/eY3wMSJ5cqX9F2LuAb16SN8pJctA/7xj4Zfvwvuv19sk4TAn/7UvAJx\nkhB44gmx+rVLivSxUvz5FAJPPgn8+c/2wFUdNcZDtQj4FgJJvvgm7rtP/OYStQ4nTAgT1Kw/q6QQ\n4By45RaxeCXg1zUobR0Btc3Mso4YiSJUIgQYY58C8L8AbuSc353y8fUB9AAwlXNuarrdeRgw3PBe\nx6F3QvqgT38AqZ2pLyFw+eUiWEtHlmWXXZKPjy1GwNQJmoISq4wRkA+TOlsEHnwQOOooc/xAHnxa\nBO6/Xzwoy1An16AsQkBfwFBFFwKXXALsvHO5Pse1axDQGNhuvrkIZn/ssWLn0ZGrPJvqb+jQxutJ\nk8x9nS4EdtwR2NZxugz9ugcemH6MrC/Tb+FCCCxdKlZx32671tlpG9/9buP1smWtkyixuAYtXQp8\n8EHjf30S5957y5cJaK0vdX2hwYOb35N90i23iMDcH/6w+RwhLQKbbCK2ZAXwR3AhwBhjAK6HCA4+\nJe/Iz18AACAASURBVMMh8nadY3lf7l/V8n5HoXfi+hoCtoVCurr8xQg89ZR5v2zY55+ffHxsQsAW\n9HrHHc37qowRSPKhjCFGYPPNgccfz3YuvV7zUlQIJAXcqsc/8kj+MqnY7ml1ltB03TPOaH4vKR1g\nGfLGCPTsCdx1l/k9W6awMm0l6V4r6kusZ00xZT0rg6n+1l+/8XratHyuQS5jB/T78cYbxYDwttvs\nx5hSndrOVwT1Hkxai8GGKUYglGuQHnibRo8ewKmnNv7/61/Ll8nEsGHArbcKq8OIESI72c47i/dk\nHctsV5IqhMCoUf6uSQiqqNrTILL9HMM5tw3uiYLonZtuptc7T9XM6ssiYMsVLzvkpIeIXp7YhIDa\n6R9wALDFFo3/qxQCeRZZqsIicMQRYoYvC7qYzYvpnkkTKttsA3z1q/b31TqbU7IXs93TpvpRr7vX\nXvb3fAmBLBYBANh3X2DttVv36xYBSZnZWR8DBN2SIFded4Wt/o49VmxnzjQLARsu/fD1+3HllYHD\nDgN2281+TJLlxUWfrQ4MZb+a53evMkagSP910EFuyqKi93mrry7SN8vfddSohnVTCoF585qPSYvL\nKINJCBx0UOtq5IR7ggoBxtgGAM4DMI5znjXbtXzMDrS8L/fPtrxvK4v1b6yar61m5BUCEp8xAgMt\nv1yRhh2bENBRH05VriycZwXaKiwCeR4kSa4mWShiEeA8eTCtHl9WCNhmc9OyU+nlMwWLu8A0C53l\nnkkSqy6FgI9BiW8hYLNUmDIWZakbl5l5bH1sURcs1xaBLFYpnSpjBIoIAfX+8OXCZIrl0bN56ULA\np0VA9UQwJcWIZd0HG2PHjrWOKWMntEVgYwB9AIxWFhBbwRhbAWCn7s+82r1v/+7/X4NIEboeY8xU\n3g26t6/kKQjn3PrXTkJAz+BRRdagThIC6ncKHSNgygsda4yA7X2TdagKIQAkD6bV42fnmoJoJc/A\nK2mtiBAWAdO1bOQRAmXatY9BiR5kHMoiIAdn778vtl1d2dx+fFoEJEWDsl1bBLJapfTjq4oRyCsE\nGGt+BrsSefp9ZMruJetG1re+qF5o1yD1Oj6epy4ZO3asdUwZO6Gzsb4B4FrLe/sCWBPAHwB81P1Z\ncM4XM8aeBrA9hEuRnmNib4jUog+7L2790O+5rBYBn+sI6P62aWVJ4hvfAJ5+2pydwjV5XYOA5u/k\nwyJw+eVimzZIDOUaZHvIF7UIrLpq6yyUD9egtAcZY8n3mHp82axG99yTfg3TvtiFQNbAZiB+i0AZ\nC4tpIJ8mBGTwaFYh0EkWAfl8yjMYffNN8Qf4ixGYPl3E7cgMO5IiQkC1Ml54oXAZG14yHUqatwDQ\nqBt5z+lCIPSCYr4t7IQgqEWAc/4c5/ybpj8AL3d/7Kzufc8rh14BkSL0vO50ogAAxtiWAA4D8B7E\nSsMdT5pFwPYA+uc//VkEbA/5IgPQv/0N+P3vy5UnK7YBxg9+0Hh9zDHN7/nuuF7ptnu99FLre0kr\nRaqkZZLKg22QYqu7T39abHff3fy+zE6hkjfYTqeoa5D+HX78Y7E966zm48sOwuR5dUyDz3XXFds1\n10z+HV0KAemKoZJlIJBHjPqIEVCz8ORFH9iWqc88QkpaT6WVibHmPl3NhKNStUUgSTTH4BqkItuV\nayEwejRw002tyTF23FFss5Z38GDgc59r3rfffuXLp/eDJku9vM8XLxZt0uYa5MOKrD4v5e+t3nMb\nbADCE06EAGPsAMbYOMbYOABymLSd3McYu6DM+TnnNwO4DcB2AP7OGPs5Y+w3AB6B+A7Hcs7nJZ2j\nU8ibNUiyfLnfdQRMFO1M5MqbvrHV1emnC+H00ksi6NV2jM8ZDOk6YLt2HtegMrM7eYXA888DU6cC\nG29sfv+440TaUJWyDx0X6UOvvhoYO1b85ued1zzw8TEjvfvu5u998MHA5Mni/kv6HV3GCJhW2C3q\nGmQ73rVFYP31G6K5CEXWH7Bh6gds9SfFnToQUu/fX/zCfFzVQqDI+fKgugaVFQJrrCG2rj02Xn3V\nvH/ttUUWqFmzgLffFn82LrlE3HvrrAP8+teN/VOm2I/Jit4PmsRb797CPXPZMmENsAkBlxMNEtM6\nAuo9t/rqwElpS88ShXDlGvRZAF9T/ucAhnX/AcLN5/QM50lqml8BcDKA0QC+BWARgMcAnMc5fyZf\ncduXNCFg6zz79vUnBGwp72LPApDkGvSZz5jf8+0aJDG5o1SRNSivEOjXT6Sts8FY62xY2fukqGuQ\nyuDBYt+IEeJ/9SHqw0xum81W60cPUvZlESgqBEK5Bpnqf9iwcoN53SJQpnx5LAJyv00I2O61ql2D\nipwvD2Vdg1RkznzXFoGk59w664jXAwYku3qp7X6jjZrPUZa0NYYkq60mBMDMmXbXIB9CIM01CEh+\ndhDFcSIEOOdnAzi75DkSl5XqXkzs0u4/woLe2HXzn61D6d3bX4xAuwmBJHwHC0tMQqCoRcCHECiD\nPggrO+Ne1DUoqQzqQ9SHEMiS/z6Ua5DJAheTRSAtlqIIuogoM2gsKwSyXDu0RaBXL7ulV8fFzHuR\nFLY2pEXAtRCw9YV5LLBqP6P2gy6elXr5kuJUpk0T7d4WLFyVECi6LgiRDC3R0GakCYGkTogsAs0U\neYD5ynKglyXNItBOQqDsoM5UxrziQq9/VxYB2z1WRAgkBRKXIYRrUJlBmem3LCseXVoE8rgGpVkE\nbIQQArZBahou+giXMQIyC5n8TV25CGV9zmVNP63WsYsBsP472M4pg4jfe6/1XpBld+l6KDG5Bul1\nFSJJSCdCQqDNKOoapOZND2URiH2lwCKDE3UA9vDD9kDQPBx/PLDees3m/7RFsvIMwsr8Drbftgzq\nKtfy/zK4cA3SP+9KCOy6q3l/WYtADDECVboGuRYCoS0C6qJZWe6vEK5Bap3mSad67LHAH/5Qrkym\n9KFF251suytWAL/8pXAVevnl5GOyUGTCy5ZRD2i2SoV2DQKA115rfa9q1yASAn6IfChG5EVV/Tvt\n1OxnCNg7lBUrGjncdXNgWfQO8nOfE0GPRR/WPmahTRR5+F98cfP/555bvhxXXSVS0qlBtHfd1fq5\noguK+QgWLos6EKvSNej73xf36557Nr/vyjXoscfM+7MIgcGDRVCs6RiXD2rTbHNMrkGm+6OseNRX\nQ3cdI2Ab0JgsAj/8oci2JVMH339/tmsUJUu/p35GroYs2Xvv1s+PHFmuTGmuQSeckM1K0atX435Z\nsQL49reF0P3+98uVD8juGgSIVcv32ENMFtnO4ds1yNbHyHHAe++1vpfkGlTWsqL+xqYFxQASAr4g\nIdBmyMY0frwYZGQd9HHeMAmaZgBdlAkQM9uTJwN//GPx87k0gydRRAisv75IceqDHj0av59cFl4l\na/pQl4MwlwMQFZcz2mWyBv385+J+1cvjO1g4ixDo6gKefNJ8jEshYLo/YhICPiwCeo71MhYBk2uQ\n7fc1ZQ0aOlRkjTnhBLFPF6Vly1fkXGpKWd3qaUoBXJY016DLLzenVAaAvfZqvL7qqsb9on7PsosC\nAnYhYPqtb7oJeOABYOutm+999RyuLQJZYwRk35b0nDX1L2Wtw2r5KEYgLCQE2gzZYabNOOn4FAJq\nB+HigeXSDJ5E0bL6MJsCzTnFsy4oZhoQ6Z1pjEJArcOy94wL1yCdGISA/jm1TDEIgSoXFCv7m+jr\nr4S2CNh8pG1UKQSyTDaVncnN4hpkWiALaG4L6mSK+pu6EAIuYuHUuq/KIpAkBGSdmSZqyj4LsggB\nsgj4gYRAmyEbUF4hsGJF4+HnUwi4cCWJ2SIA+AmkAhqDAsbSAyRDWQR8xAgAzfdv2UGOi3UEdNSH\naNHZ56R6zyoE1N9SrTP9HizzG5uOjX1BsdgtAnlcg7IQWgio7T6LENCFVV6yZA2y+durda0KAdcW\nARdCQO2r1DgMF2uVZI0RkPtNE26ynZomGqoSAq7Xg+hEyNBSczgXriibbCJmEGRjtDXyLK5Brhfs\nci0EQgTGAcXL6ssi8OKLYmt7sBSNEYjdIlC2o58+vXVfWSHgwiKQ9L2yDh7Uz9msA4C4z4vOKoZw\nDXKdNajs7+tSCJjaiK1/rkoILFrUcCWdPz/fsXoZTWWWmXqKkiVrkG2wrAsB+bm33mrs9+kaVFQI\nqN/HRT+b1yJgWsRM3me+XYNef11ss7gGlenbCAFZBGrOddcBW20FHHCA+D+Pa5D6uk6uQS4tApdc\nYn8vNtcgGdBmGxyYsgapwaSmzwFxCgGXFoFf/ap1X9oAK205e99CoKxFwNVvbDuurGtQ7BaBql2D\nQguBH/wA+NKXRADru+/aPyfTUa+1VmNfFouAnr0uL2WyBtksAmowrCkdc15sv0FRIaDiI31oWoyA\nGn8kSRICLi0Ckya17lPLZioTURwSAjXnppvE9qGHxDaPa9CZZzZec94wRfpMH5qn0do6KpdCYPx4\n+3tFH/4uBzmm+rLVi8k16Cc/EcuyP/ts8TIk4cs1SH3wle3o5aBuk00a+2wDxeeeE/WVJBD18vkQ\nAll9YW1CAACuvrrxumgdlhEC+homSe/FJgRcLihWxDVITR+ahbJtZMKEbJ/7y1+AE08UQbeSLEKg\n7OSIOsiUr0334N13NxYMk+hCIE/qUxeUEQKnnCK2tpXG85DVNSip70mKEfDhwqmPRUyZoUgIlIeE\nQJuRxzVI9ankvDVjhesyAfkare3h4dI1KMlkXbSD0Tv+MvWZJwe5yTVo4EAxI7755vZrxGgRUL9j\n2Y5eihU1RaBtgLXppqK+bIGHEt8xAmVdgwCR1lG2obJCQP+OWcqXVIcuZ9x9uAb5CKhX+9vYXIOy\nLiI5YgTw6183D0yzuAaVFWZZ+8F99gEuuKB5ny4E8iyG5oIyQuAb3xBbF4t8ZnUNSrI+yHOYns1l\n7kHOzcfr+0y/Xah04u0MCYGaoz+gyrgGyWNcz/IWjRGwBd26tAgkmayLdmz6g7BMeU0PgCyuQXke\nPjEKAfU7lh3kyDKqM4FlB4pqXbuebQfKuwZJTBlS8mDLUpXl/koKENVFgmshUHbgqR/vIkZAXZsg\nNtegvFZg9f7M4qtf9vcwPZPSFmWT6EJAt/b4pmjWIMDtIp95YwRMJLkGlWnDtnGB/ruTRcAPJATa\njKJCYMWKxjGuB3dFhYDtO8RuEXApBEwPgCwWgTwDXRIC5ahSCKQFiJsypOTBl0XApRAomtWo7DWy\nIsW86g5l69vk7x46fahLIeAjna6pn7FdRy+POmjt2TO8RSCPf7/+fJRld2ER0O+RtBgBE0lZg8rc\ng7Zxgf67m0QcWQTKQ0KgjfjwQ+CZZ8TrLK5BukXAl2tQ0RgBW4e0cCEwaxYwdizw5pulipbox+zK\nNaiMcMnzAKjCIuArRsCVaxDnjTKqFqZ2sQioJM3ExiYEdGtBmd/YdKyLdItp18iK7E/VvibNNUhS\nB4tAFtegMu3tyiuB005r3V/UIuBDCPzrX/b3yrgGyT6rrBC4+25g7tzmfUVcg8gi0J6QEKg5auM7\n6aTG6ywWAbVzDuUadNRR5s+oqz9KbDECCxeKVTbPPhvYffdyZUuyCIwYUeyc+kNPXXwnL6YHgO2h\nkDV9qE6MFgFXQkBdqt5FgK/EtxD4/Ofzny9pAOZaCGSpv6S2pb/n2iKQNDArQmjXIImtnl0GMwP5\nhYDNsgy4jRF49VXR15smU4oKgSTLWVHURARp5Uliiy2a/5dlLysE9tuvdV8Z16CePd2mACYhUC0k\nBGqO+hBUM8MUcQ2S/69Y4bZxycZ82mnAxRebP/OHPwDXXgvcdVdjX5Jr0FNPidevvlqubHpnuNVW\nYqn6iROBz3ym2Dn1h4rrjCO2h4IpfWgWYhQCukgtiuoqZxPBRfApBB55RNyHZcokcRkj8PLLredN\nIik7i/6e68xas2YVP5+JUK5BWYXA8883/x8qWFii3mtFRGJWklJZZ3UN0oWACdcWpKTymHjrLZH5\nb4cdzMf6GOxmFQJqqlh1YsClmM8qBMg1yA+0oFgboftCmkhyDWJMdAJLl4o/VyvkysZ84IH2wcGA\nAY0MCWr5urpaO8GFC93l6tfPvf/+whJQ1BoAtHb8ZToq00ydbfBdhUXAl2uQqxiBOgqBXXYpdj7f\nFoHhwxv7s/zuSW1Uf8+1EHCRdz3tGlnx4Rq05prN//tI3ZhEUvtx7Rpko6hFIDRZrjl0qDlFqKw3\nH4NdW7n0e9OWynn11YEPPjC/l5esMQI+0pYSZBGoPeoDVG0kRVyDgNZgNRekpTS1YTPjLljgbiY6\nawBVHnxbBGxQ1qBW5H2sm7JdZpXx4RpUhCSXDNeuQVnuy1BCwHRs1nUYylwjK7KNDBjQ2FdWCLh0\nyyhCUvtx6RqUdFxWIaDea1kSLbimzDMlBouAem31tcs4n6wWAdP9QEKgPCQE2oi8QkC3CKjHuRzg\npS1yZsPkhwgIn3tXi57pnYiLB0IMQqDurkGuYgRCWARcD+iL4sM1yCYEsrS/JIui/p5ri4Dr38RF\nG1a/s22ipV2FgA9s19H318E1yEZZi14SWYWA2pZ0i4Dtc3nJKgTyHEtkh4RAG1HWNQjwKwSKWARs\nnf28eeXKJAkhBMp0VHmEQDu5BrkQAkuXArvtJl736lWvYOEi+HANsq0j4NoiUDYzlI5rgVr0tzr7\nbOCss8RrdYBlK18VQqBI/5Q0cPaxwJuJdnINsuHTNShr1iCbRUAXAi4stzpZ2jFZBMpDQqDm5HUN\nUoNtQrkGlRECvjtuH65BadfIQx7LB7kGNXPvvY3sMbp1qezARA3qcyUE+vUDNtyweJlidw1Ss5r5\njhFwfV8Wrb+xYxuv1e9s61/1PtI24A5ldbzwQvP+T35SrJS8wQbpZQP8zLgXcQ2S9Tt6dPNnfFoE\nysSr+HQNyrqOgM0ioKfe9mERMLXj7bdv/p+EQHlICLQRWYTAySc3XptmXWOzCKhlnD27OWDRBT4s\nAmnXyENRiwC5BjXXnWvXoIEDGyLDlRCYPbtc2svYXIPUAdghhwC//KX5vTLlsx3r2lLlwnrj0iLg\ncuVj2yBszhzgu981v9erl1i35qWXWt+r2jUoi0Xg2muBL36xsZ9cgxokxQio7cBlCts8rkETJwIf\nfQR86lPJxxLZoaxBNUdtmFlcg2yBU3VwDRowoNUcWfaho3dePh4IoVyD2mlBMRcWAbU+XAsBoPEg\ndCUEymbCShICri0CeWME1lyz+Ry+YwRisQiouBQCOj4GYaZUjSq2+zU2i4BJCDDWHLwduxCI0TVI\nz+kfKkagq0v8dvL+I4tAecgiUHPyugapnbepw44ta5A+y60LgbLZQfROxEfgZxUWgbq7BplEal7U\nh7tr1yD1/DHHCJQtoy1GIK9FQD9evz9dWwRiFAJqfdRBCBRtIy4tAi6yBtliBNTnUexCAHDfX5R1\nDXIZ51MkWNintaTTICHQRqgN0zZAVsWCqSNwaRFYulSYl8tYBPTOTxcCZfOFxy4EisYI1N01qA4W\nAVez7a4IubJw3hgBvWz6+WK3CLh2DbINcIouzuVjEFZ0cGwqswsxn+U6QHYhoO4vIwTS+uiycWe+\nrAJFsgapr5NciPJSRgiQa1B5SAi0EVk6Nt01SP+cSyGw0UbAqqsC773XWr4sDB7c2rnoqxmWdacI\nMZtQhWtQKD9dX65Ba6zReO1DCLiYAXTlf+8KkyuHrxiBVVZJPzbN+qhSph2ajnV9X7p2DVp55WzH\nZL1PYxcCIYNdQwuBT34y+X1XQsB1HdrKpT9T1cXr1DIkCYa82O5Bfa0CFZ+B1J0GCYGaozY+6bOn\n++6p6A9nvQN06Rr02mvN58o6e//AA8CeewKXXNLaueyxR/P/w4aVK2PsFoE8QkAlz4PNdYzAr35V\n/HySs89u+PC66Oh9uAbFYhG4/HLg0EOBffZpfc+1ReChh0TbvPji9GNV62Nafbt2DXIxSzhhQvI1\n8tKzJ3DHHcCXvgSceWb2Y7LgWkiVoS5CwJVr0KxZye+XnXRwMeBNsv7rbLopsP/+QuDsuCPwxz82\n3ksSAj7E6I032o8h1yB3kBBoI2Rj+sUv7J/RLQK2xVd8uHxkfajtsQdw//3AkCGtD2A1bSNQvhMI\n0YmEcg1SCeUaZPpuaprIoqyxhhCEtmvkxfU6Auo5qhYCJ5wA3HqruX25jhHYfXfRNocOTT82z4yr\n63vQBbvtBnz72+6u0dUFHHCASGubNNOpEkIIuHatMP3Wru512ScA5RcUc2UR8I0LFxhZ/9/4Rvpn\ne/cG7rwTmDFDZOj5zGca7/myCJgmlHbeOTlLILkGuYOEQM1RG59sEEmmSD1GwKdrkE4Rf35TrnWV\nsuVsJ9cglVAWAZ+dcNmBdqfFCJjw5RqUhTwB364tAq4oW38qRVxEsh4TkxDwaREYPLjxOqtFwOYS\nWBch4MIiII+dO7dcWdQyJGUXyovpHkwbL5BrkDtICNQckxBIGuSonZ9v1yAdF0JA7+RdCwFyDcqH\nTyFQdjabhIC/YOG8pPUnMVoEgPL3oEqRe66OFgGfQkCNHbIJAf36tnoPlTWoLC5dYFwKAd8xAmki\nmCwC7iAh0EbIRprUgPRsLDaLwNtvAzfdVNw1xYQLIaBTVrCQEIh3ECbv1b/9DZg6Nf/xvtOHuhQC\nvoK7fa0snBefQqAuFoFOFgKufiM1a5ztnPPnp5cHqI9FoOyAV62njz4qV5aQWYPS7n2KEXAHCYE2\nIotrkMqyZXYhcNRRwJFHAuef76ZsjBUzjac9QNrdNaioEEsKGNeJ1SKgPsDXXz//8XVaR8CXEHDl\nGlS2fGk+8a6DXbfYovj5VFwONor0f0mDITVLVOxCwJXVTK0PW7CrntFKrXe1ztR4uZiFQFkXGHkc\nY8BWW4nX665b7lxAa6IOF/egusjb668nH0OuQe6glYXbiCyuQSrLlrV+Vn/w3HcfcM455ctWNN9/\naCFQd4vAAw+IGbGs6QmBegiBIpBFoHwZ1UFEER57TAQcnnhi63uTJwObby5eu7AI/OIXQjj36wcc\ncUTx86lU4Rr03e8CF10kXieJhwkTgMMPB958s3OEQFcX8MgjIiW1Gi+gsuGGIuPcqac2jnnwQTEb\nPnBg43N9+zZeuxYCY8aI9Nm2MubBpXvfOecA66wDfPnLxc6llmHLLYFx44DjjhPPKRfPkS23FL8v\nILwSkvjqV4EvfEEkFSHKQUKg5phW/cs687R8ud0ioJ+zLL6EALkGNaOnV81CuwoB/VwxryPQrq5B\nO+0k/kx87nPAdtsBTz/txj1t443NKVTLUIVr0K67NoRAUr+57bZiYHf00e0rBPTjurqAXXZJP270\n6GYh8MUvtn5GtZq6FgLf/nb2zFBpuHIN6uoSE0SyXsqcSzJqlBADjz/u5h5U7/fZs5OPcZGdjhCQ\na1Abkdc1KIsQcDUwLioE0jqXdncNKhojkIfYYwRcnSs2i4DpXK6JJVjYhjyvi3vQRxldWgSy9su2\nXPcmXNyDIdYRcLXoXtbfOEuq4Dzuk3nJu3hmEi5dg8piKoMLsayOXaT7VohkCoSAhEDNyZs1SMUU\nI6A/eKoWAmnUQQhUsY5AHtrVIqDe2z16+LMIuHB78DXQjiVGwIYLIeCzjFVYBNTPZc2cEpNFwHQv\nu3INytpO9Ox4JtRU2q4HnS6FgCuLgC8h4EIsqxYB1X2LCAMJgZqjdmByJd88wcJJi68A7gbKaasv\n2vDpGvTII8Dvflf8+KxUkTUoD50gBPRUuS4HdhQjUByXM9qxWwSy/sahLQIh0i+6cg3KSpbUoGqw\nsOsJIR9CoGw/46KPSbIIlKlD+Rzv0YOEQBWQEGgjJk8W2zQh8MlPiu122wE/+IF4fdppYutLCBTF\nNGBT/T3LWAR22y3b9cpS5kFr+n577VX8fCZcuGVsu63Yfv3r5csjKTuw04WAijobWJQ6CIGqYwTS\n6CSLQKyuQSGEgCvXoKyo96utbtQ+IIR7VFFicA066iixPeGE1vdctBF1BfPvf1+8zrIKMuEGEgJt\nSNoDZ8oU4I03RErGM84AXnoJuOAC8Z4v16CimDq/u+8GXn5ZvHZdvtiChfVjV1kF+NOfypVHx4VF\n4IQTgH//G7jmGjdlAty7BgHA++8D777rxlWtDkKgypWFs9BJMQJ1sQicemp6oGZeQlsEVGxCx4Vr\nkH7cCy+IjEYu78UYXIPGjRP9++jRre+5aCNqopNRo8SY5Kqrip+PyAdlDao5pg4s7YGz0kriDxCN\neMSIxnu+goVd0rs3MHx443/O484DXaaD1Ot//fXdmp1N18iDGuS14YZuyiNxHSwMNK9MWhZ1EFvk\nHqyDaxDFCIhtSCFQdYzA0KHu3TOqXH3bNoB24Rqkl2+ttdz2MUB5i4CL9pHUv7u2COhjEsI/ZBFo\nQ4osXCOpg2uQxOVsXZbrFaWM6V3/blX/HjpqB+4a1zECrikbcxAiWLhsG/EdIxC7RYBcg9xQtP5c\n9Hc+XYNM6xy4JvY4H5cWgZgn9NoZEgI1x9TBlhEC+oOn6oFn0vV9LDEem2uQXh4f5XNlEXCND9cg\n15QZKJJrkPvZRNd0omuQD2K0CLgQAvpxPvvBGLIGmYi9DRPpULXXnCKuQUnE5hoklxyXrkwqLmfr\nJD46S5cxAj46yqL196MfAbfdJl77FgLqKqBlz+WSMgOxOrkGxTybWJdg4SKuQXVcR8DlNUJZBFzF\nCPi4B2NwDUoi9jZMpEPVXnNMswQxugZ9+tPFjvvZz4S/6s9/3vqeS4vA9tsLv8Tjjy9/Lp0yM26y\ng/zmN4Fhw4Dx450UyXiNvJx/fuO1b9egsov/xDjQrpMQiDlGoC7BwkVcg+q4jgAgVjtWqcIi8O1v\nA1tvDWyzjfn9zTYT7wPuyudzQiRW1yCXYpSEQDVQsHDNWbSodZ9L1yBXs+3nnlvsuBNPFH8mKh+4\nvgAAIABJREFUXAqBM88E9t67/HlMuOggDznEXxaFkP7PeShrEQhhzYpdCHRCjEC7WQTawTVo/Hhg\nyBDgpz8V/1cRI3Dppcnv9+gBPPmkmPxyZbHweQ+SaxDhC6r2mrNgQes+l65BriwCPgP5QvrvFiH2\nIKpYhYD6nYtYBEK4PMQuBGKPEegki0CsrkG+YgTU37TKGIEkyt5/ISwCneAaREKgWqjaa87Cha37\nXLoGueqIY8ymYDqXD1y4BvksX8jZzjyoA7uyFoEYfdxDlK8uMQKdYBHotKxBdRACrtxu9PO5pC6u\nQWQRqC9U7TXHtxAgi0B5yCJQDLnsfNHzk2uQu5WFY7VYAGHSh1aVNaiuMQJA829aZfrQJFymAAY6\n0zWILAL1h6q9xnBuFgJlGpOvGIF2MdsXIcZsCptu2nqNMvgQAuq96OIh7QNX6UM33thNeXRin02M\nPQd5J7oGrblm8XOpDB3aeB2rRQBw14YBP/dg7K5BsYt5Ih0SAjVm8WL36wjoFgFXnUfsZvtYXYN8\ndZB33unGLUPio/7WXBM48kjxOtYFu1xZBH7zGzfl0ZF9QeyziWXuwaVLxVZdKdYVneQaNHgwcNpp\nwOGHFz+XyvHHAwceKF5XmT40jTJiL2QcUtE2XIesQZQ+tFqo2muMyRoAlBMCekN0NdPbLmb7IsTY\nQa67LnD55c3XKIOvBbvOOKP4sbELAcmIEe5mYXU6QQgsWSK2al54V3TCgmLy2L32Ai66yF1b7t27\nkbnHVTCuD1yJeV/UJc6HXIPqC1V7jbEJgTKNaf785v9dPRRiDKIyncsHsbs9xCwEJEXKWJesQT7N\n4XIgWVYIxLyOgBQCsVsEirgGhYwRiDkHvk/KlDFE+TrJNYiEQDVQtdcYHxaBuXPdnUulky0CLlyD\nfGblidU1CChXxtgtAiGEgGy/auB1HuqwjoBPIVD1gmIhXYNiFAJ1ixHwQeyuQWQRqD9U7TXGtIYA\nUK5D/+ij4seqhMymELsQiNE1CKiHRcCVEPBF7ELAlUUg1tSDb7wBTJ8uXreLRaCdhICrBe184ioF\nsC9cWQRibcMACYGqoWqvMTYhsMoqxc+52WbN/7vqwDvNIrDGGo3X5BpUnNiFgByoyYDVPNTBIhC7\nf/EWWzRex24RyNrH9O/feL3aatnOGasQcLWgnU9idw1yZVWJNUWxeiwJgWqgaq8xM2eKrfqwuPtu\nYK21ip/zkEOAW24BxowR/7vqwDtNCDz3HPC5z4nXsS4oRq5B5Vl1VbGdNSv/sSGFQLvGCMg+EPAT\nLFxF1qD+/YGHHwbGjwf23DP5s3URAuQaVJzYXYPIIlB/qNprzIcfiq0qBPbZp9w5u7qAww4DjjpK\n/O+qA/c5kI1RCAwZAnz1q+J17BYBF/i2CBQhxEN69dXFVh2QZqUOrkF1iBGQ+HQNCt3H7LorcPTR\n5BoUu2tQnYKFY4yTktA6AtVCQqDGyMHHoEHuz+1qRVL9fC6JfR2BuuRXjtk1SFI2a5CvB4wU4bEK\ngbq4BsV6D8Y82aCeM1Yh0O6uQbGXD6iHaxCtI1AtVO01xuQa5IqyHbjeKcT6ENTP5ZKyJl2gPjEC\nMbqOhHANkhYBaZ3LQx0sAnUSAj5w6TYSo+uNemyMfXRIH3xyDSoGuQbVH6r2GuPTIuDat7PTYgSA\n8iZdoD4xAp0aLBy7a5Ari0DMOch94jKjTIwDbSBu16DYZ9zJNYiChdsBqvYaI2chd9xRbIcNc3du\n1x14rA9B/VwukR14kYwykrpYBGIXAr4eggMGiK2+/kYW6mARqEMOcp+4dBuJtQ+MOVg49hiBkOlD\n21nMkxCoFifVzhg7hDF2GWPsccbYHMbYCsbYDZbPfpoxdgZj7GHG2DTG2GLG2DuMsTsYYzunXOdo\nxtgzjLG5jLHZjLFHGWMlw2Pri5yFHDYMmDMHePlld+d27dvZiRYBmcXEhRCI3SIQu2uQL1y4FdQh\na1CnugaVmY2ti2sQxQiIbaxCQIr52BcFJItAfXFV7T8CcBKAzQC8BSCpeZwL4CcAPgHgHgAXAngS\nwN4AHmGMfct0EGPsQgDjAKwF4GoANwLYBMBdjLET3XyNeiGFwOqri7UDevVyd27XMzmxzobp53KJ\nzGIiVz4tArkGFT82xEPahVCpg2tQpwoBF3E++rlcUhchELNFoIxYCVE++VwvOqFEC4oRaaQkJ8vM\ndwC8xTl/jTG2E4BHEz57H4Cfcc6fU3cyxnYAMAHABYyxWznn7yrvbQvgNABTAGzJOf+oe/8FACYD\nuJAxdjfnfJqj71MLVCHgGooRKI8LIdDprkGSWLMGxS4EXAULx2jxCUEZiwq5BtUjRiB21yApBGJ3\nDSKLQH1xUu2c84mc89cyfvYGXQR0738CwGMAegPYTnv7BAgrw/lSBHQfMw3ArwH0AfD1YqWvLz6z\nBtUhfWjsqf2kEFi8uPg56mIRiHGgSBaB8haBOq0j4INOcA2aP19sY3QNin3l3hDlK7N6OVAv1yBa\nR6AaYtNf8lbXH1u7dG8fMBxzHwAGYFdfhYqRJUuAefNE5z1woPvzu/bt9DkbFutAVsYIkEWgOLEH\nC7uowzpYBGJ2K/BJGdegOlgETjwRuPRS8dqnECh7//kk9vShneAaROsIVEs01c4Y+xSA3QAsAPC4\nsr8/gKEA5qnuQgpTurfDvRcyIuQsc9++cbrd6MfFWEbTuVzSSTECaSugFqWdhUDIjCOxCoGys4mf\n/KTYfuc7bsqj0+6uQVdc0XjtQwioFpVY20jswcJlhYBvtxsKFq4/UVQ7Y6w3gJsg3ILGcM7nKG/L\n+e45LQc271/VU/GiJPYHdB0egqZzuaSTYgRcBqqrxB4s7GIQEbNrUOwxAvK+O+UUN+XRcbEWiKQT\n+0DGyt2DsacPDekaFGvAPwUL15/Kq50x1gXgtwC2BXAz5/ziQNe1/o0dOzZEEUoRe25gChburBgB\nXxYBSayzibHHCNRlHYGiv5WcJfUlRF26BvnAZR/oy72vzECWXIPidw0ii4Bg7Nix1jFl7FRa7d0i\n4CYAhwK4BcBRho/JGX+bJ7zcPzvPtTnn1r86CIFQy4aTRaA4LmMEYhcCvi0ClDWoGO2ePlR+L1/3\nn0vXIB/UQQiUGchSsHD8rkFkERCMHTvWOqaMncqqnTHWE8DNAEZCWASO4Jy3NCvO+QIA0wGszBhb\n03CqDbq3r/gqa4zErvJDxgi8/rq7c7mkrGvQggXAtO6EuLG7BsUeLOyL2IVA7MHCriwCvixSLrMG\n+aDdhQClD62Pa9A//1n8HO0gBOpMJdXOGOsF4DYAhwAYzzn/Gk+WTY90b/cyvLd39/Zhh0WMHnIN\nAl54QWyPOKL8uWIUAscc03jt0yIQM50QLFyHlYVj7WfINUhs6yAEYo0RKGM1I9cgYHa3L8ZNNxU/\nB6UPrZbgQqA7MPgOAPsBuJZzPjrDYVdCpAj9IWPs46Bgxti6ECsaLwIw3nVZY4Zcg4C5c92dK8YY\ngd//vvHaZwcZs+WSLALlKDubGHsOcnINEtuYhUCZPPhqHQ4Y4KY8OjL99uxczsUCcg1qWK3LQBaB\nanFiUGWMHQDgwO5/1+rebscYG9f9+gPO+endr68C8CUA7wN4mzE2xnDKxzjnE+U/nPM/M8YuBnAq\ngOcZY7dBZBgaCZEt6FudtqpwKHNfzAuKuSRGi4BK7DECvnCVNSjGdlIni0CMFhUgnGtQ0frzTR2E\ngKsYgUH/3955h99RVH38e0glJIRApHeUIjUgRZREkKCE3lGaL0WKIAgCviASfFEBlY7AS5EqvckL\niAESmooCEaVDAClSTEISYnqY94/Z8c7du3vv1pkz957P8/yevXfv7r3zO7s7c86cMsOqaU+cpZbS\nW7NAZx4kNAiYnlbPMQeyjoBfquo+NwJwoPVeAVgt+gOAtwAYQ2DV6PPhAE5L+T4F4NGmHUp9n4j+\nBu0BOAzApwCeAfBzpdQDpf+DwHBV1q+qHAHuDzjXZGED9xyBupFk4WJ0c7Lwp582rnFdSiz3GvPd\nbgjYMqzbEJg6Nf+5IXgE6n6GZ8wo/x3iEfBLJYaAUuoMAGdkPHbrzkelnnsdgOuKnt9NuAoN4pwj\nUAYXoUviESiPhAaVo6pkYY7rCNhhQXXJsJdCg+q6xmVmtF14BJZcUm+5egS4hwZV4REQQ8AvIvZA\nCS00iNsD7sJjUcU6AoY6PQL33MPXGBBDoBxlQ1s4ryNQd1gQ0FuGQK97BLgbAly9emU9AkoBV16p\nX3PTE3oFEXuguJqps38rD9w9Ai4MAbsDLztg1F01aNy46r+/CqoyBEaPrqY9cbgbAtzji8t4Hs3/\nVKchIKFB5anKEDggaZWhCjDJwkVmtl2uLMw1NOg732n9rTxMmNB4LYaAH0TsgeKi3FZVizkB/B5w\nF+0ra0zZ1N2+jz+u/vuroIpk4ZEjga22qqY9cbgbAqEkC5dRtOvsWyRZuDxVJAtvuy2w667tjy1K\nt68sXHfYjb3+ahF5fPRR4zU3PaFXELEHigslosrZsF70CNjfW3bAqDM0qK7vr5Iyg/SIEdW2xSaU\nqkFlPQIccwRcTIZIaFB5yniljAw33ZRnGKzLZGGuXr0+fco9J/b/xX0c6lbEEAgUF7NhVa64yO0B\nd2UIlK2+ZKjbI8Dt+hi4K4rcPQJlk4U55wi4NKS4hgbZ8iv7e5w9Atyf4TrhHhoElDME7HPEI+AH\nEXuguOggq3SZcnvA4wM7xxhom7o9Atyuj4G7osi98lIooUFcry/3lYWJqrsHORoCLie8uBoC3EOD\ngHIGsxgC/hGxBwr30CBXinZR4gM7xxhom7o9Alw7YO6KIvf2cU8WrkJ+XHMEXBmHVYUH1W0IlCkf\nyvUZltAgTRmDWQwB/4jYA8XFIFhljgC3B9xFBw6E4xHgZqgZuCva3NtXlUeAc415rqFBNnfcUb4t\naVRlCNR9jcUjUAzuiwICEhoUOiL2QHE5U9ILOQJ1wdkjEAJVVA3qZUOgrEeg7n6mzKJ73Gc6TfuG\nDwd23726NsUJxSPQjTkCLsaRqtYC4RoaZD/7rsZloZkeVS/Cx+Ug2As5AnVR1SBdx3W2B2au5REN\nXENHQqkaxDVHYMAAveVqCFQRGlT3JEgZY8WGoyEQyjNcJ9yfYaDcPThrVuN10QkLoRzM1DMhK9w7\nSO45AqGFBtVxne2BuWgiWt1I1aBycFciqvAIcJ3pdG0IcPUIcA//CiU0iOszDJRr4+zZjddiCPhB\nDIFACS00qFc9ApxDg7rdEJDQoPKhQTNn6m1dz68xBObOzX+uy8ppIXgEuBoCxiMwbVr+c7k/wy5D\ng4r+FvfQINsQ4DoOdTvM1DMhK6GFBolHoNz31CE/exaW60wMd0Wbe/vKziaeeWa58zvBPUcgpNAg\nroaA+f9//OP854byDNdJ2dAv7qFBYgj4RwyBQOEeGiSGgKYqj0DdhkCRDnjIEL294YZq2pMEd0Wb\ne/vKegSWXVZvt9qqmvbECSVHoMzz2+uGwKhRervMMvnPdTGbzd0Q6PbQILtv4joh1e2IIRAo3EOD\nXK3cWxTXNb45egTKhgYNHaq3dSmJZQnFa8Z1gLbP23zzatoTh3uOAPcFxQD+hsAXvqC3XJ+RUEKD\nurVqkH2OGAJ+YKaeCVkJRckxcPMIuBqkq/II1IGt/JfpgLkO0i5rkHNNJq1KiahLSeSeI9BLoUF1\nPSdV9IFcF84Uj4CmjMEshoB/xBAIlNBCg8Qj4Ob38lDWI+Dif5KqQeWoah2BupOFuzE0yJUhUNWi\nZ3XnCHANvel2jwD3FbhtGUqOgB+YqWdCVlxWzOjGHAHxCJTPEeDutpf2VRcaVJcSwT1HQKoGlYd7\n+Bx3Q6Xs9eW+Ard4BPwjhkCguOwgueYIrL568XNDi9+tgw03bLzu5tCgXm5fVR6BukODuOYIlDWk\nADEEuIfPcTcEyt6D5tmv6/oC1eXSbLRRNe0R8iGGQKC4DA3imiPw+9/rrZlVzIOEBgHbbw+sv75+\nzT00qAihKBGcPQKuQoO45gjY/3feaxzaZEORfjQL3J+RqkKD6mpj2Wf444/1dtiwatqTRBWhQTvs\nAOy8c3VtErIjhkCgSGgQMHy43hplIg/cQ4NcKdn77qtfcw0Niv9WkXO4JtQbXHgEuIYGcc8RAIrP\ndoYWGlSkH81Cr4QGcTUEpkzR26WWqqY9SVQRGrTPPvxCiHsFMQQChXtokItkYe5JXkDxQdCVoWJW\n/ezm0CDuVYPqxAzQXEODuOcIAMUVMTEENNxDg8rILwSPgDEEllyymvYkUUXVIG4FRXoJEX2gcA8N\nineqdXSSVYYu1UXRQdCVoWJmjLmHBknVoGKEEhrENUcAKD7bGZohIKFB+c914REoe32nTtVbFx4B\nMQTCREQfKKGFBtXpEeA6GwsUN1ZcGQLGI8A1NKgXlAjOycKuQoO45ggA5WY7gXAMAQkNyn+uhAZp\nqggNEkPAHyL6QAktNKiOdnKv9gDw9wiUUcQM3AfpXm4f9wXFuBuiQPnQoLrp5qpB8e+og6pCTM8/\nv5r2xCn7DM+cqbdDhlTTniSqCA2S/AB/iCEQKNxDg7jnCIhHQDNokN7Onp3/XKkaxN8Q4J4szH22\nGCif5xOKR6AuuPfTVbRv222Bo46qrk02Va0FUmf50CpW4BaPgD9E9IHCPTRIcgQ0RWXoalBfdFG9\nLWMISNUg/u1buLDcjGddg3QVXse6FYiiz7AYAppQnpEyz4epYFcH3PN8AMkRCB0RfaBwr6YgOQIa\n7qFBZQwBg4sZ9yKEUjWobvlVYdBzXHXWdY6AGALFCOEZAfi2r6qVhV0sKCY5AmEiog+UUJSc+HdV\nCXeXMxBOaNCsWfnPdR0aVFSGvRwaBJRLGHYVGsRVCQPEECgL92eEe4li8QgIdSOiDxSXoUFVKNq9\nagh0s0fAZWiQ/Xt5j+euRHBNdgXCCA2qW35lk1173RDgHhrEvUSx/exxnXHn3McInRHRBwp3lyT3\nHAHuC4rZ7Tv11OraE6dMsrDBlSKWF+5KhGuPQJlBuu7QoG7OEagb7oYAd2OPu0cAKKdo1+3Vs7+b\nq6EitEdEHyihhQbVQTd7BEzn/ZnPAGeeWW2bbIxHgGtoUJnfC0WJcOUR4BwaxHU2FpCqQWXh/oxw\n9wgA1cy4c60aJOVD/SOGQKCEFhpUB2Xix0PJEah7lqSK8qGuPAIcFTHuYQ9ANR4ByRGQHIGicH9G\nuLcP4L9gl4QGhY2IPlC4hwaFMltcN2VzBOqcxQH4Vw2yv7/oNebuNePqEbDl3cs5AmIIlCOUBcVC\nCE/jqmiXMVRkHQH/iOgDxcXDbTpIs0R5HrjH4ItHQMO9ahBQ3BCQqkGaorN1LmOLOcdnl1W0xRDQ\nW+6e5W4NDXL5HHM1VIT2iOgDxYUSYZTDK67Ify73GXfuiXyuOsf+/bUM583L34lLaFAYhkDR8qGh\n1B/nqmhz72NcwT30JoRnmHvoDff2Ce0R0QeKiw5om230duDA/Ofaneptt1XTniTKKol1UzY0qO7O\nkQjo10+/LpJMar6jTrq1apArOHulJDSoPGUMgcUX19tx46prTxzu17jbqwa5NOjLeFXEEPCHiD5Q\nXDw8W2zR/FtFGDMG2HPPatqTBHdDgLMSZjCGwPz5+c4LJQ+EuxLhSlHkHBrEVUkEutsQMMrhxhtX\n15443J+RkEKDuCYLS/nQsBHRB0oo1RTqhnuOAHePAFDeEOjl0CDuYQ9A+fA0rjOJrmZjy4Yfcr2+\n9jm9/IyEcA9yj8EXQyBsRPSB4nKmhPNsXSg5Apw9AkVjyA3cDYFeD20pOpsYigLBWdEGeLdPxpFy\n7ZNk4ebv5ixDIR0xBALF5SDNdSbH/v6iinbd9IJHoG6kalA5ig7SLhQI7koiwN/rGIohwPUZ4d4+\ngH8yrngEwkZEHyjcZ0q4GwL28Z/7XHXtiROCRyCU0KC8iBKh6fbQIK7y494+IJzQG+7PcK8nC4ew\nFoOQjog+UKQD15SdrRs8GHjmmWrbZBOCR4B7aJCBY45ALxgCLjwCSvG8vkBvGAJcvT7x76gDSRYu\nj1QNChsRfaBwd/cZuOcIjBgBDBlSbZtsOCthhm4NDRJDQMM9NKhs6BfXBcW4X1/7HO7PSJ2E4BGQ\nZGGhTkT0gRJKaFDdcFYSgeIzJS6UMEMooUEcrzH3sAyAd2gQ0L1Vebi3DxDPMlDNbDbnHIFQkoXF\nEPCHiD5QuHeQoiQ2fz9XJQxohAblNQQM3K+xq9CWvHBXFF3PuHN9hqVqUDm4PyMh5MJJsrBQJyL6\nQJFVPzWhKBFcwx5sTjst3/HcvT6hhD1wVRRdeaW6dcZdqga1fjfHfjqE0CDuycJiCISNiD5QxCPQ\n/P1clYgQkoX/+le9vf/+fOdJ1aCwDIG8SgT30CBXzwj3PoZ7sjDA23MrycLl4Z6nIrRHDIFA4e4y\njX9HXXAeYICwPAJFkapBYRgCXBXtULx63WgIuFLCOBtTIXgEJFlYqBMRfaBwLwnGPWzE9UwYVyWs\nDKFcY+5KBFdFUUKDNN1qqACSy2V/dwgeAa7JwrKOQNiI6AOFu0eA+yDtqgPvZo+AKBFhKGHcqwaF\nYghIsnBxOBtTIYTAhpIjIOsIhImIPlBEyWn+/m5TIkLqHLkbAlyrBsW/oy6434Nln2HuHou66eZ+\nOn5+HYQwmy1Vg4Q6EdEHSiihQdyVRK4DYAidI/fQoFCqBtVN0URD16FBHK8v0L0eC1ve3PvpOunm\n0CCl+E8ahjDWdTsi+kDhHhoU/4664G4ISGhQeaRqUDkkNKgc3d4+gG8/GMozzNUrZRsqYggIaYjo\nAyWU2Mm64TzA2N/fjR4Bg1QN6m5DgHtoEFf5SfsacL7G3ewRcF35i3PlKiGdANQMIQlZUKz5+zkO\nMEBveATqhrPXp5sNAe6hQdxnY7lfXx+GgDzDxShrCHD16tnncB7ruh0RfaCE4hHo9dCgbvYIyDUO\nI1GzW0ODuNfAj59fF2UNARd9DGfPbRXjHNdk4RA8AlI+1D8i+kDhniMgSqKm6CDjaja2CrhfY65V\ng1zfg92mRHA3pLhXDXIZksHZc1tmnJPQIE0VMgxhrOtWRPSBIlWDNJxnmuzv5zobCwBrr13/b5Sh\n6DUKpWoQV0WW+4y79DEa7oaU/RscZdjNycLcw/sAMQQ4UInoiWgPIrqQiB4joulE9CkRXdfhnC2J\n6H4imkJEs4joOSI6lohS20REOxLRBCKaRkSfENGfiOjAKv6H0OAe9mDgrkRw7SBddo4PPJD/HJcV\nR5J+M8/x3JUIroqia48AR4+P/f1cFW3u7QN4G1OSLFweyREIm74Vfc8PAWwAYCaAdwG0nWMkol0A\n3A5gNoBbAEwFsBOA8wBsCWCfhHOOBnAhgMkArgcwD8CeAK4hovWUUidV9L8EgcuwhzKhQXXDucY8\nwF8JA4DlltPb/v3r/60idPtsItd7kHv7uD/D3OUXQtWg+Pl1EMIzLMnCQp1UJfrjAKyplBoK4CgA\nqY8FEQ0BcAWABQBGKaUOU0qdDGAjAH8EsCcR7R07ZxUAPwcwBcAmSqljlFInQBsfkwCcQESbV/S/\nBIGLQTCE0CDOSqL9/ZwNgSKduCtDD+B9jUN4RrgriqGEBnFUYoEwkoXLPsN1EkJokHgEhDqpRPRK\nqUeVUpMyHr4XgOEAblJKTbS+Yx60Z4EAHBk75xAA/QFcpJR6xzpnOoCfRuccUfw/CA/uSVQSv6sJ\nITSoSDKpxBc3f3cIzwh3RZvj9QUkWbgKOF/jEGrg94IhIOsI+MOHDbY1AAXgwYTPHgMwC8CWRNQv\ndg5SzjERzttU1sIAcDETEcJsJ/fZxJA8Akrlv9acDQGXa21wfkZCMQS4zmh3u/w4hwZJeJ/GGALc\nk4XFIxAmPkS/VrR9Nf6BUmohgDehcxdWz3jOBwD+DWBFIhpYbVP54rIiSgjJwhxnmoDyHgFXg3Re\nOfoIDcqLKBEa7ooi9xntbpWfePWav5tzaBD3EsBVeEbFEPCHD9EPjbbTUz43+5cocM7QlM+7jlDi\nn+uG8wBjfz/nQRrIP9C4bp/9m3mP565EcJ1NlGek+fu5ti8EQ4BzaFAvVA2SZGGhHSL6QAmlahDX\nGfdQ2ueqcyyqLMpsYvNv5UEURU0oz7AkCxeHszEVgkcglBwBWUcgTHyIvtPsvdk/rcA5aR6DFogo\n9W/s2LFZv8YbUjWo+fs5DjD293MNezDkVSakapCmzDPiilAMAa6KbFlDpW64h1bZv8HxGe4Fj4Dk\nCNTP2LFjU3VK7vgQ/SvRds34B0TUB8Bq0KVF38h4zrIAFgPwrlJqTtZGKKVS/0IwBLhXU5DyoRru\ns50GzqFBZa+xeM30duFC/Zt5jb1eN5a73ZDq9dCgEMY5SRbmz9ixY1N1Su74EP0j0OU+v57w2SgA\ngwA8qZSan/GcMdH24SobyR3uSk78O+qCs5Jof3/emRzXhkAIoUF5cZlQz9lrZg/So0YBa66Z7X4U\nY1nDXdHm3j77Nzi2MYTQoLLJwlxzBOyJiQAmzrsWH4bA7dCrA+9LRJuYnUQ0AMCZ0KVFL42d82sA\ncwEcHS0uZs4ZBuCU6JzLa243K0IJDaob7lV5uCdqGop6BFzCUVEMzRB4/HFg0iTgww87nyeKrEba\nV54yz0ndSGhQeYreg7Nm6e3AgeF7BEKmbxVfQkS7ANg1ertstN2SiH4dvZ6slDoRAJRSnxDRYQBu\nAzCBiG4GMBXAztChP7cppW6zv18p9RYRnQjgAgBPE9EtAOYB2BPACgB+oZR6qor/JRRcKjmcXaac\nZ5oAoG/0hC1YkO88X4YAt7AR+zfEEChG0rXNcp1dz3ZyzxHg2sdwl5/9G936DEuycPPvZWXKFL1d\naqlq2yPkoxJDAMBGAA603ivoWP/VovdvATjxPx8qdQ8RjQJwKoDdAQwE8DqA7wG4KOk06f33AAAg\nAElEQVQHlFIXE9GbAL4P4ABob8aLAE5RSt1Q0f8RDC46oJCShTkOMEA4hkAIoUEcr3GohkCW9rqa\n7ezWHAED1/b5SBbmaEx1s0fAVY5A0etrDIEll6y2PUI+KjEElFJnADgj5zl/BLBjznPuA3BfnnO6\nFe4LiokhoAnFEOAcGsT5GodgLBc1BCRHQFN2xr1uuHss7N/geI1DMOaLTtRwLx8qHgEeSFRWoIiS\no+GuRBhDYP789sfFkdCgBuY3Xnml/XFxuCfUiyGg4a7Icp7NBvjLD+DdT1dRNUiShZt/LysvvaS3\nYgj4RQyBQOGu5MS/oy64D9L9+uktd49ACKFBO+8MzJuX/TyXCfViCBSn7DPMNf5ZDIEGnPvpbg4N\n4p4j8LOf6e3gwdW2R8iHGAKBEoqSUzecXc5A+dAgV5UUOIcG2cycmf1YF9d4wAC9nTs3/7k+1hEw\n5EkW5qrIus4R4NrHhJAszLmflmTh8hS9BxdfXG933bX9cUK9iCEQKBIa1Pz9HAcYoLghEMrKwq7X\nEcijcLtoY//++vvnz+fr9UlSImQdgexwV7S5G1IA72vczR4B7guKmePXWafa9gj5EEMgULiHBokS\noQklWZhzaJB9bWfPzn9e3UrEoEH6dZ62Ae6fEfse5GgIcAwbAfgr2tzlZ/8GxzZKsnB5uBvLQntE\n/IESyoJivR5fHFqyMMfQIPvacjMEAGDRRfVWDIFicH+GuSs5IRkCRfsN7uOcJAs3/15WXBkqQntE\n/IHicqYkhGThvJ24q9m6bvUIuGyfPfgdeyxw3nnZznM1SBuPgFklMyuuDYEzrALPnAwB074rrwR+\n85vs53GfcefevpCqBtVJmXHOdWjQeecBkydnP8+Vom3+/zvvBK66Kvt5YgjwQMQfKC6UnCpmSuqG\n+yAYStWgonJ00T67TQ8/DBx/PPD++9nPE49A6z6OhsCNNwL77Ze973D9DOepWAXw9wi4VMK6PTTI\nVbIwAHz729nPcx0aBACHHpq9L3SdCyckI4ZAoMiCYhruM+7c22cIJTTIkGWgcSXDoh4Bgw9DIMv9\n6Do0yDBjRrbzXLVv2DC9nTo133niEWjAuahDSMnCAPDss9nPc50sbMhrzItHwC8i/kCRqkEaM1vH\nNQY/FEMglNAgAydFtqxHoG5C8QgYsircrpSIJZfUW7MKala4JzOHFBrE1RBwnSwM5LvOrnME4r/b\nCQkN4oGIP1CkapBGDIFqCCU0yMBJkQ0lR8CGk/zi7cuqcLuajTWrnhb1CHANDZKqQc3fzTk0yP7+\nIoaAa49AXkNAQoP8IoZAoHBfUMzQ64q2VA0qT9L9100eATEEmt9nNQRctc8YAt3qEej1BcXKeL59\nhAZ1kyEgoUE8EPEHCvcO0pWiyN0jYCcLf/IJsMUWwMUXdz7PdQfZzaFBdcvQGAK77qrlceqp2c4T\nQyD5+7/+9WwrSLtqnwkNeuop/VvrrQe8/HLn81zPFt9wA3DiidnP87Gg2I475hsbuFfH85EsnEd+\nvgyBrBNKEhrEAxF/oEhokIa7IWB7LC6/XCsTxxzT+TzuKwsbfLl0sxgCrmQ4YEDz+5/+NNt5PuKL\nDZwMAfMM21x9defzXLVv8cWb37/wAnDIIZ3P8zFb/ItfZD/PR2jQRx8BL76Y/TzuoUE+rnGetQRc\nJQvH/38JDQoLMQQCRRYU04QSGrRgAc/FsAycQ4OSZMApNKhoIp7re9CGkyFgPCo2WUp1ukqEjBt6\nQL6qVa4WLcyLD0MAAObMKXd+1UiycHnEIxA2Iv5ACcVlyt0j4GqQXrAgnxylalAD7qEt3BWxJCWA\nkyFlkq3z4kqJ6N+/dV8WmfiYLQb4rcMANF+jPOsxuFxQLJRk4RBCgyRHICxE/IESyoJiXA0BHysL\nc4uNteEcGsS9Dr54BMqR5BHIo2hzNQR8eQTmzs12no9kYSBfXy2hQZqioUHcDQHxCPBAxB8o3BcU\ni39HXXBfudcM0jNm5Ks6IqFBDSQ0qBwhGgJZ7i/uhoDryQYDx1Vd7d/IaqgA7otifPABT89taFWD\n8oYGSY6AX8QQCBSpGqThXp7TGCoTJ2arFmTwFRqU1xBwrUQYOFUNSjIEsgzWYghouIcGJf3/WX7T\n1/XNup5FSKFBLia8AGC55YBvfCP7udyrBvlaWVhCg8JCxB8oLqsGKcWz/jMQTtWgvHA3BAycQ4N8\nxWgD2ZQdqRqkSfIIZMHnbCKn0KD49c3qEfBlCHDzCADNXp9bb81+XiihQXUnC8e/X0KDwkLEHygu\nQ4MA/oYA99CgvLg2BPIaVL5DgzgpstwNAe4egaKGgM/ZRM6hQdw9Ahyrp3G/B7mvLBy/ByU0KCzE\nEAgU14sRcTUEuIcGdbshwNkjIIaAhrshwD00KAlOHoGiOQK+koW7yRDw4RHgWDWoqEdAQoN4IOIP\nFFcPUNmEYQkNKnae6w6y6HoMYggkGwJZwh983oOcciySlLAs/Q13Q0CShZPhaAgUNUZ99DEcFxSL\n34MSGhQWIv5AcdWJ2x6Bjz7SK37mWUynbowhcPnlwPTp2c/jbgi4HqQ5hwaFaAhw8ggUzRFwdQ8m\nGQJZ7kPuhoCvHAGOoUH2b2RtX9r5dcA9NIh71SAJDQobMQQCxVUnbnsEttsOOOQQ4JRTOp/nWtGe\nMgU46KDs57nOYciLhAY1KJsj4GNl1zyVUXo9NChJCcviUeFuCHD3CLjuYwxFPAJ10+2hQXUnC4tH\nIGxE/IHiOklJKeC55/TrceM6n+dD0b7nnuzn+VQSs+B6kOYcGlS0fGgoVYPqhrshkFSnP4v8uBsC\nvvoY7sY8twXFgPKhQT5KFGdBcgSELIj4A8WVkpOUI5BFgeE+4+5rti4roXgEXBBiaBD3HAFOhkBR\njwr38qG++pisxrwvJSzPZIMkC2uKXiPOoUH2GCKhQX4RQyBQXFcN4moIcFe009rXSYahGAKcPQKS\nI6Dhvo5AkjHPPTSI04Ji8eub9Rn2tbIwR49A2RwBH89wFjgnC0t+AB/EEAgUH6FB8d/OAlePgG9D\noJMi5ssQ4BgaFKJHgJMhULZqkI9nOI/8JDSo+T13Y56jIdDtoUGucwQ45XAJnZFLECiuQ4P23jvf\nea6rBuXFtyHQSRHzlSMQSmgQpxltCQ0qR9nQIK6GQCihQa4NgYsvBu68M9t53D0CvvKQvvvdbH0w\n5xwBSRTmg1yCQHGdyPfgg62/3Q7finYnfOcwcDMEujk0yNVaGzbiEchOUY+AS0VizJjm9+IRKMce\ne2Q7zlUbBw4sdp6vyYaLLgL+9rfO5/nKEZDQoLAQQyBQXA0ySy6Z/tvt4B476bt9nQZrCQ1qUDQ0\nyOdAw8kQ4B66ZBsCJkSD2zoC99wD7L9/4z0nj0DRHAFfKwvnwfeETSdchd4kXaMsZVhDSBYWj4B/\n5BIEiqtBZqmlWvdxMgSKdiK+25fVI+B6ZeFQQoM4zWgnyYKTol009MZH+z73Ob3lZgj07QussUbj\nPWePQFZj3leysI/zO8G9+lySoZFl1j2EZGExBPwjlyBQXA3SRQ0BQ697BNLoNFiHsrJwCAuK+bjG\n3HMEOBkCthK22GJ6m8fj40qRsNvJySMQYmhQVnyutZEFn8nCeXIE6vZYlMkR4Hz/9QpiCFRA1uW0\n8x7bjlBCg+om3gFlla/vQZBrjsAnn/Ay9NJ+g5NHIMQcAU7tSzIE8ngEXCfUZ/1NX9d35sxsipjv\nPjALoYQGcTcEXLcvyzgsHgE+yCUoyfTpwPDhwEEHdT729NOBxRcH3nyz/O/6DA3Kgq/QmwEDgCuu\n6Hye70GQmyFglInLLgN23LHz8S5Dg5IqemRRFH1eY06KdkhVjYoYAtw9Aq6VsHPOAUaO7Hye5Ag0\nCDE0iJMhEP//8xiiYgj4Ry5BSe67D5g2Dbjuus7H/vjHwKxZwPnnl/9dVx2kGZiTfrsdvpSchQuB\nb3+783m+ZyO4GQL2QHj//Z2Pd9m+K64A1lmneR+nqkFJcDIEQvIImGRhjtd38ODGa04LiiVd3yef\n7KyMhZAjEEr1OVfr+ST9djt8jXOSIxAWcglKUuQmriJej3sH5LsqTyd8z0ZwrRqUFxftW3tt4MUX\nm/flKS8pOQKt+7gaAkbZ5ugRsL2jnJKF076f26KFRQjFI8A1NMhVsnDa77ZDcgT4IIZASXwZAj5d\nknngWjXI92wEN49A3hkxl6FBSXBStJPgpGhzLx8aSmiQnS/FKVk4DU6GAHePQLeHBtWdLJz2u+3w\nPRknNJBLUJIiD1iVHgGuHZCvZOGsiCHQTHwg7HT9fM8mclJkk8jSPkOvJzMXDQ0Sj0B7xBDIDvfQ\nIO45AnEkWTgs5BLk5OmngXffBR59VM9aFbmJX3+9fMKwT7czp4oUYghUQ9wQyFp9SQyB8usI1E3S\n/8/Jo2I/wyYxnKNHIK8hEIpHgLMi5ssjkOX5APyGBnGOw3/iic7H+H4+hAaMuwB+PPccsOmmwEor\nAV/5iq4CVEQRveMOYPXVy7WFe2iQ7wW7OiGGQDN5FyXyHRrEyRBIgpOiPWBA6z6u8uvfX285GgLD\nhjVeh7Byaqc2hqSIuTYEjjsu23muZJh0D2W5B33lCJx/PvDgg+2P8f18CA3kEuTgj39sfn/llf46\n0VBCg7gaKqEYAq5XFjZwM1Ti5FG0XZbOGzhQbzkp2osvDlx8cfM+Tu2zMUZLntAgV+0bNAjYZZfm\n326Hy/bdeCOw+ebN+7opNKhu4v3fZZdlO8+nR4Bj+JzNffe1/9z3GCw0kEuQg3jHOmRI9ps4qUMr\n08lJ1SBNqIZApxlP10pOXI5ZBhmAd2iQjxnPSy7RW26K9ne+A2y0UeM9t/YZOHsEAOCII/SW28rW\n3/wmcMIJzfs4GfNJv8FpHCmaLOwzRyBPHL7rZOEshOSR6nbEEMhBfBZo8ODmDqBdx5Y08GaZVerU\nFq6KtoF7aJAsKKbJawhIaFAyRpHl2D77WeEUumSTxxDwEVpgnhOOoUF583x8GwKcVgcPsXwod49A\nJzi3rdeQS5CDJEPA7mzbDV6zZ7fuyzLYpRFKaFDdhLqOAHdDgNs6B3E4KtpAQ5HlqGjb9zp3+XFV\ncsxzwi00CChuCHDtAwH+VYN8TshxfUbiv52G7/tPaCCXIAfxjnXiRODSSxvv2ylPr7/eui9+/LPP\nAuedx6sGr4QG1QN3Q6AbQoPEI9CM/Tsc2wfwDw0yvzV+PD9FJ68h4Htl4TxeH66GgI88JEOWPtpX\nsjAATJ/e/nPfY7DQQC5BDuId/+zZwP33N963G1xvvLF1X/xB3mQT4Pjjgdtvz94W7h4B7qFBLjqh\npEEmdEPAd2gQpxn3ESMarzkbAkVDg1wO1GuvrbdcDQH7ObnrrvbHuvYImHvPwCk0aNttW/dxMgSK\n9meu7sGk/597rf6PP27/ue/wXKGBGAI56PTgtevYkhSrtONfe61zW0IJDRKPgF4z4vvfb97HLfSm\nqCEQgkeg7ms8cqSukPHmm41ZWU5KjsGWQ1KoYhyXA/XLLwMPPdQwBLiGPdjPyXvvtT/WtSFllzcF\neBkChx8O3Hln8yQXp9Cgovl6Lp+Rp59ufp/HI+AiWXjSpOb33CrjCenIJchBGUMg6bO047shNMgg\nhgCw4orArrs27+OmaHdjaJDLQXrMGGDVVcMxBGbN6ny8y/attRbw1a825LdgQed+xmdoEKBzxNrh\nesbTXvAM4GUI9OkD7LYbsMcewPLL632cnpGiHgGXyuwmmzS/z2MsuzAEVl8dWHnlxvtO7ZPQID7I\nJchBp461nXKS9FCkPSicEtHKJuPWTQihQQD/GXdZUKwajBw5zXYa7N+ZOzd7jLtL+RE1npWsioTL\n9tn9xWKLtT/WtfyKGgKuFTGOxjL30KAkOC4oZocDccpREdojhkAOOg2cPjwCEhpU7DzXHXj8d7gZ\nAqFVDeKUI2DDUckxxO/BOXPaH+/rGmc1pnwosjNnZj/WdR+z6KLN77kqYnmMZVeEEBoUh1toEAB8\n8knjNcfnV0hGLkEO8oYGKQV861s6dvO661qP/9KXdLLxyy/rOGNDN4QG+TYEsoYVcA294e6xMPgy\nBCZPzr76pxgCmvg13m679Ov84x8D//M/+rXra5xVhj5mY6dNa7zu5JXybSxzm2wwcHxGksbcPJ55\nH8ost9CgONzGOCEduQQ5yBsa9M47wLXXNg8eNh99BOywA7DPPsDjjzf2Z1G0QwkN8lU1iFtpP+6h\nQUUNFZfst1/z+1/9Kv1Y+1mV0CDNqac2v3/yST0JkcTppzdeu1YUBw3S23//u/1xPhQJe8Kmk1fK\nx4znl7/ceM0pR8Am6/UF3LVx1KjWfXmeYa6GgOvQoAsvbLzmGNonJCOGQA7yhgZ1cr0b3nor3+8A\n/EODDL4MlaxucV+hQdxCb4oaAi5nmq6/HpgxA3j1Vf1+ypT0Y037i64YWhSOs52Gr35Vy8+mnQwN\nrgfqJZfU26lT2x/nwxAYPhw44AD9upNHwIei8+ijwPrr69dcDYGs1xdw18bhw3VYi23ccS/PmSdH\nwFU/fcwxwIQJzb+dhngE+FBwGY3eJG9oUNaVg+24OqC7QoPqJq2D6yRD7qE3Ygi0QgQMGdL4zSlT\ntJzaLVbE2RAwuFTEhgxpfs/REDBJr53a5kuRMIps1tAg11WNFl9cv+aaLGzkl+Xec9kPmipQiy2m\nvRV5Qm+45gj46KeXWEJvJUcgHOQS5CBvaFBWZSCuNHOvGsQpRyCtE+E2G9FthoDPFSsXXRQYMEDP\n3qWVwfRtCOQJK/BJlllZrjPGvgyBrIvG+VISzbPMNVk4q6EHhFP5S0KDGmSVn3gE+CCXIAedOtbp\n0/XNP2OGXmwmSwxkEiGGBi1Y0KyU+U4W5mYI5K0aZEI4uFYN8pmERtRQJtKURSPfpFWd68T8Hqew\nh3Zw9ghMmtTeWPKlyBpDwJRgNZWE0vpA14pOVkPA1/3X6dm18WkIcAwNsuXQaeVewH1oEJBdfpIj\nwAcxBHLQSUHfbTc9Izh0qF5EasyYen4H4BcaNGKEdqmaQdG3IRByaNAPfwjce69+zdUj4Hs2p9Os\nom+PwMyZwLnntj9WDIFkjEfg+99vTRC34eAR+MpXdLjVRx8BG2zQCCux28fVI+DbEOgGj4BPZfb8\n84FHHml/jI8JG87lf4VkvF4CItqBiH5PRO8Q0SwimkREtxLRFinHb0lE9xPRlOj454joWCJy8n/k\nrZQST8xLImnGkntoUBLPP6+3f/+73oYSGsSxtN9PftJ4zXVBMZ+hQUAj1j2trrtvQwAATjih/bG+\nFLFbb228nj278/Gu27ftto3XN92UfpwvRWLAAL2dN69R7e2RR4CXXtKvX3yxuX1iCDSTJ0fAx4x2\n1gXtAPf3YPxa2dW9kvDRT+ddEFAMAf94uwREdDaAewFsBOABAOcDeAbAzgCeJKJvxo7fBcCjAL4M\n4E4AFwHoB+A8AG2Gi+rI4irMS1IHF2JoUNpnEhqUTNb8Ee4eAR+hQUBj8aS0HAFfVYPyhCL5UsT2\n2qtRejXLfei6fXaJznZw8AgkYa6rr/Zxn5HN4xHwYdAX8Qj4Gkc6LXDnMzRIyoeGg5eqQUS0DIAT\nAHwAYH2l1BTrs1EAxgP4MYDfRPuGALgCwAIAo5RSE6P9p0XH7klEeyulbkWN1FE7vWxVHi6hQQYj\nI+5Vg1wPgnF5ZF2wi2PoEuB/ADS1yNNmtI0C4TpHII/C4jM0qN1gHX92XLcvvkJuGr4NAbvUpC2j\neB/I1SPgO1m4U46AUn5yfYrkCPhSZjkbAlyrVgmt+LoEq0S//ZRtBACAUupRAJ8A+Iy1ey8AwwHc\nZIyA6Nh5AH4IgAAcWXej6/AIhBgalMUjEEpoEHdDgGuysO/QIKMsdjIEfIYGdcKnIdCuzGn82XHd\nvqz3lG9DwPYI2M93vA+UZOFmsoYG2X0Mx6o3gN9kYaC19HgcH8+IVA0KD1+X4DUA8wBsRkRL2R8Q\n0UgAQwCMs3ZvDUABeDDhux4DMAvAlkRU6bD/xhvAAw/o+M+XX67HEEh6kEMKDZo+HbjttsZ717Nh\noRgCcbgbAtxDg4xHYNYsvXDfbbfpe9HgKzQoaeE4pXRc/gUXNK8yzsEQmDQJ+L//a/7MtyEQ5y9/\nSd7v6xm2cwQM46zRyrRLkoWTsUODXnyxsQBVHF/GvJHfG290Ptb3rHYnQ8BnjsWUKcDEienH+fam\nCA28hAYppT4mopMAnAvgRSK6G8AUAJ8FsBO0wn+Edcpa0fbVhO9aSERvAvg8gNUBvFJVO9dYo/m9\nWVGyDOut10isTYN71SB7gNl//2ZFwrdbPN6OTp+76sDNIj+GbskR4OAROPlkvbT9TjsBv/2t3u8r\nNCh+ve66S7dhn330+3vuaVT64GAIPPmkltvDDwPbbKP3cTMENttMtyl+r/n2CNihQVdf3Xht5BeK\nR8B1+8yCU9OnA+uuq1+/+y6wwgrNx/ky5k3S95gxnb3frp/hRRZpvq5pOVIGn6FBALDxxuky9O1V\nFhp4uwRKqQsB7AFtjBwK4OTo/dsArlVKTbYOHxptpyMZs3+JGpr6H7LO4rbj+us7H8MpNCjpIbXD\nMeKziXEF15cSwc0jsMwywA03APvuq993WozIwNUQ8N2J28nC11yjX5uSq4C/2cQ4b73VPFs8fnzj\nNYccAcNzzzVex6+9b0MAaPb2GHzNKGYNS+PuEfDZvngeyHvvtR7ny5jPiq3gupJhfGXwTr/rMzSo\nE3Pm6G3WnCChPnxWDToJwO0ArgawBoDFAGwC4E0AvyGis3y1LY2ss7jtWHpp4Pjj2x/DKTQo7fvT\nrHyj4PpeNZWbIQDomug77aRfh24IcAkNmj07uUyvr9nEOFOnppcR5uARMNiDMTePAJAcT+7LI2Bk\nlRaWEe8DuRoCPu8/8/waksY8LsZ8Gj48KkvEpjo79b++PQLtMIa0GAL+8WIIRJWBzgJwt1LqRKXU\nW0qpOUqpvwLYDcB7AE4golWjU8x80NCWL2vePy3l86Q2pP6NHTs2UVHLqry1o1+/1k4wDqfQoDRM\ntYL4Q+x7EDRwnQ1Lii9uB3dDwLdHwJ6VtZ8rLrOJU6aIIVAFSRVmfPWBRlbTUkYbEzLk6xkJwRDI\novxxMebT8HF9QzAE4r+Vdh92myEwduzYVJ2SO748AjtCJ/9OiH+glJoN4M/QbRsR7TZx/2vGjyei\nPgBWgy4tmiG95z+/k/o3duzYxIGnKkOg043PKTQoDTNDNzRmmplB0LchwK18qKFTDfI4ruQX/x3u\nVYPsZGGDnYfBZTaRqyEQN5DswZqjIcDJI2DuvTRDwPdkSAiGQHwyLMmDzMWYT8PHGBw3BDrJxscz\nEv+ttEkl03d3mhgNhbFjx6bqlNzxZQhE86JNJUJtzH6jLj0CXSL06wnHjgIwCMCTSqlCwTtjxwJX\nXtm8L2ng+d3vinx7M336dL7x86wj4Cs06Oij9TZuCPgeBA0cQ4OA5kTD66/Xia7HHNMc327jS36n\nnNJehr5Dg5I8AsYQmDsXOOww/dq3IXDXXcBjjzXv+/BD4IwzgKee0u85eAQOOQQ45xxg9GjgW99q\n/kwMgWbMvZeUtwC09oGcFhS7/npgzz2BV17x61WOT4aZazl+vI6DP+44Psa8zcsv62fl7bclNCgr\naZNK3eYRCBlftvbjAI4G8G0i+l+l1D/NB0S0PYAvAZgD4A/R7tsBnA1gXyK6WCn1THTsAABnQnsX\nLi3SkHfe0YMyABx6aGN/WidfloED9WDbjjweAV8zsvfdB7z/PjB4cPP++Ey3GALN2B6BAw9s7L/4\n4mQD0KcSds89wO67J3/m+/4z950dp73YYnp77bWN5EMfs4l9+rS//3baqbkkJgdDANBGaRI+2rfx\nxsCzzzbeJ/XHxvvoWlHsFBpk+kBfXtt2hoDpc4YPB9ZeW7/mEBpk+j5TueqCC4BNN9WvORkC22yj\nx72XXgIeekjvc9kH/td/AXfe2Xjf6bc5GwLGIyCGgH98eQRuh14nYBkALxHRNUR0FhH9FoCpQ3Oy\nUupjAFBKfQLgMAB9AEwgoiuI6GwAfwWwOYDblFK3tfxKBmzF1e44q0gMjvPOO7pTW3ddPbPw+us6\neThOlvUKfM+4A8C//tWIeTf4SBb+17+A++9v3setfKghb2iQz9Jq7Vb+9B0aZBYlsttoZGsraD6U\niKT70ebFF921JY08cvHRxxglyxB/XpRqXHtzL7jCeHTT+rh4eKSvPqbdGDZ9Ov/QIBNSxyk06P33\n9faVV/wYejvuCNx8c+N9JwXft+cWSA8NMh6BbgkNChkvw7jSQVNjAHwPwAsAdgVwPIDNoA2B7ZRS\nF8fOuQc6DOhRALtDexTmRd/xjaJtsWtB20pFFaVCbZZeGlhxxcb7tdbS6xSMHNl6LPeqQRtsoLdT\nprQqFD5yBIYP1zOINlw9AlyThZNoJxvfA0zS6qTmXrRL7PkwBIYNa9RHTyLeJg45Au3w0b54mUS7\nnwa0krhwofYMGcXXFZ1mMH17BJLWOYjz6ae8koWTDAEjP04eARtfY4g91nV6jn1P2AASGhQC3mxt\npdRCABdGf1nP+SN0onFl2DHGU6Y0Zuir9gjEF5QydFq5Nw2foRnDh+vt1Kmtg7CvHIH4rCBXQyDL\nIG3D3RDwNcCY1Ult490Mina4mq/ZxHazXPHnm0toUBo+rnG8X4wbzsYAXGopOCerIeDbI9BusiEE\nQ8Ds42oI+L6+QNg5AhIaxIeeX9PNrjpizy76NAQ4eQSS+EyUyj1lSvqA7bp98cEi1NAgDkpilt/2\nPdNkFMAPP2zsM+21220WrXFNu8GNw4Jd3EOD4r8Zf16MAejDEOjXr72B6dsjkMXraBsCPp7hLOsI\nmPb5DA3KUjDBtfzs38s6zklokNCOnjcE4h4BQ9WhQfFsf0PSIMEpRyDp+40hcHpTZCcAACAASURB\nVPjhrZWUuJQPNTL8+9/10vU33tj8uW+3fdogzal047e+peW09datScO+B5ihQ/WAaBvy48cD660H\nHHBAY1/a6q91084QiA/eYgh05ic/ATbbrNEvn3OO3rrODzC0u74//KHe+uoDTR/z059qQ4kIWHNN\n4N//bhzz6ad+S1DH5bfXXq3HnHKK3trtdk1aPz11KrDssvq1z+djzhxgo42AI4/U719+WY/PX/6y\nvr6+J2yA5knVmTN1WPTJJ4tHgBNiCFiKgq1UlPUIjB6tZ2Y23hhYfnngkkuSj0vqRLJ4BMyAWPds\nyYYbAl/4QvO+rycVcY2Iy81lJ2mv2Gw6wCOOAP75T2D//ZuP9TUb1m62bv58v1WX7KpZhjfeACZM\n0GUwbXyHBi2ySPJz8sILze/tykwuaScXDsYe9xyBJP7yF23YA8BHH+mtUcZc024W0+Q3GAXW9Yyn\nHTpiPCevvab/DL5DgwYObH6fVB7WLFr53HP1t8fGLiXezqtiPnPdB66wQqPi07RpWj6XXabfP/UU\nMHky8OST+jNfoUGHH954besEd94JvPqqNuRNMng8H0hwT88bArbybw/QZQ2BU07R1vozz+hShubB\njZPUCWfxRphOKF61p2r69tUD8B13NPaNHt2sdNsYGfpYQ+OXv2yUnzNKYlo7fIcGJeUIzJ7dut/l\nIH3FFcBVVzXvS6ukxWGm6YIL2n/+3e/qmunckNCg4pj+2kzgfOc7ftrRbhbTPCe+8hjSkqft8c23\nIZA0bnFZd+mQQxp5cFmKOvgIDfrDH5I/s/voefP8eW4vu0x7KuJtsq+xzzwfoZmeNwRsj4A9QJcN\nDco60JY1BFxVzLAfVqLWhcQMpu1cVtVM+33fhkDSADN7tv91GOLysFfFtY1m36FBQOcBxHU1maxI\naFBxzCy771VJ8xgCrsOX0iaHOBkCSc+mr3yeJPIUdfAhv7R+N24I+JywMX1Mmj4jhgAfxBBIMQTK\negTKGAJZftt0UD4MgUUWSe9YfBsCpl0hVg2aNcu/IRCflbMNAftZ8R0aBHRWsLgaAnEkNCg7Rnnw\nXXqwnQGycGHzOgdcPAK2Aeo7WTipjb7yeZLIs96LD/mlPb+2DjN3rt+qQUYHSvMI+Ez4F5phtFSH\nH+xZzioNgTKx++06n/ff1w+3T49Au47v44+bl6/37RGw2/r883rthgULdJxi/HMXtBtgXnihdYE5\n1/KLz1b/6U+N13ffDayzjl4P45VX9D6fhkCnAaTusLmqEI9Adu65B1huucZKw74MgU6/e/fdegwZ\nNKg1Hr5u0saEt99uvJ42Tef+AHw8ApwMAdN3mDyFdnAor2vgEhoENHQgE8b0xS82fz5tmr730gqp\nCO7oeUPA7nzsWWSfoUHtDIHll2+87tPH3QNuz74S6cE4iZtu0n+f/3zjWJcYeSRVxFh/fV1N4YMP\nGvtcd+LmmiV5LHbaqXWf6/bFDYHTTmu8NpUpbHyGBpnqVWn49ggMGQJ88knn43zUSQ/VELjlFv1n\n8BUa1Gmm2FTZ8jHbmXbf77NP47Vt4Gdd3LBKkox0e1LON+aev+CC1ryptGNdElJo0Ekn6e2117bK\nauhQv2OIoOn50CA7RCOPR6BPH2C33YDttwfOP19XGhg2rPF5Wgx9nDyGQFx5dKnoDBigk3HPPVe/\n71SN5cUX629TEp1Cg554Anj99dbjXZL13gD8ewQ64dMjsOqqwA9+0FjpOo5vQ+CmmzrPBh97rC6n\n5xrfssnCLbekF1kw+PIImKpFnfBR3jTvtbXD/1yRxyPw1FP1tiUJM5HFNTQoTXm2dRjbEPAZGmSw\nPVKGxRZz0xahPT1vCNgPeidDYJNNGq+33VaXwrr/fj2YH3IIcPbZjc/LzASlJSjFk6lcD+bHHw98\n73v6db9+wAknND777/9OPsd3aFCn3/cxm5Pn3hBDIB0i4Gc/a13LwuBb2d1hB+C++9I/X3VVPYng\nU4Y2u+2WvN+HoggAe+8NXHRR+2Nch9204+CDmyeDAD8egbx9RtVr5mQhqyEwZIheP8I1++6rt1nC\nlXw8v0St13nhwma9Ze5cHqFBhgULWnPQfPfRgobJEOSPPIaAfdN2ij/O6rLO4xGId0q+HyJ7Ni5t\nQOYUGpSEj06csyGQZTE7Gw5u3bR8HA45Au3kw6VcomHw4OT9JqnPB+36uEUX9Re2lBTG0qdP6yyo\nD0Mgr2Lvo1pP0nVNkqnvHBCuhgDQ2rfMn88zNMgQbx/gX4cRNGIIWEp3pxwB+8ZOUjLsTiPrAJXH\nEIh3lL4VHdvY4WIIxEODOBoCecIFxCPQmbR4dw6DTDtDIK+s6yZt8uLjj922w6bdNfSVHwAkK4hc\nDIG8xrwPQ6DT+GnwdY3N72bJW/BljMb7lgULeIcGJS2Y6VuHETQMhnE//OMfwFln6VX4DBdeqLcP\nP6wXV4pjD0pJim+RqgdJnci77wK//33j/bhxwDXX8PYIpD3Q3EODxCPQTIiGQJpHwPfzAYRlCKQZ\nVJw9Ar5IMwTi96KPHIEQDIGk63rwwa37fHsEZswAfv7z1hXLbXz1gfEQ4vnzgfPOS/6cg0fg5z/X\nOV02HPpooYerBm21FfDOO837PvxQJ5Nuu23yOZ1Cg0aM0Ns81neaove1rzVCB7bbTm/vuiu9PT6w\nE324GQJZ69xzNwRcKxIbb5zveA6hQWn3XrwUqw9WXrn5/VJLNWrh+w4N2nzz5kTMZZZJPm7LLd20\nJ4l2OQDLLuuuHXH2209XQbFJMkhXWcVNe2zyJp/7KNuZNHZ9+GHrPt8egWef1X8nnZT+vHKYDAF0\nvqIdemMW3/PVRydd43g5Vt86jKBhcgu7J24EGN59N/2cTqFBo0cDv/0t8Oab2dvRSVG2O594m30/\nRHZiHJekvVA9AmZJe8OgQbpmuqle4YqRI/Mdz2EQ7NevUaLOcPPNwFe+4qU5TaywAjB+vL6W48YB\nEyc2PvPtEXjgAeD224GnnwYee6y1HOuWWwJ33AEcdpif9gGtybeGCy8EbrzRbVtsLrlE//3sZ419\nffo0K2LXXNO5ulodrLdePtlw8Qgk4cOjAuTzRHApr/vaa83vfa+1kfbs2vjWYQRNz3oE0mj3UHcK\nDSJKrgVfFKWaK3bES9b5fohshTbNEMiyRHuVZF1ZOH68S5IGtwMOaHbr7rorsPPO7tpkM2oU8Oij\nzfu+9jXgwQdbj+VgCADApps2v7drpvsmzSDx7REYNgzYY4/Ge7PInmGVVRr18H2R5j075hi37Yiz\n2GLAUUfpPtlUTIsbAgcd5KdtALDLLtmP5WwI+Fp1No8ngksfGE/E9W0IZLl2kiPAAya3MB/aPdSd\nPAJFaGd4zJvXCCMAWr0Vvh8iW6FNa4vrRMNOVYPiblIu5UPj7fJ5bZMU1LTZaw6hQUD+uGgO+DYE\n4sRDW3zG4BtshZHLvWZjP6d9+vgpxZlEnmvHJVk4CV+GQB75cTEE4vfetGl66yu8asiQzsf4nswU\nNExuYT60e6jzlA/NSjtFdPbs9oaA74fI7qTT5OE60bBTaFDcc+GjE0/qIONKjs9rm6T0py2wx3UQ\nDAHfoUFxOBoC3IkbKp0WonRFnudSPAKtJN37ac8rl9Cg+L1nDAFfz3EWufjWYQQNk2GcD3vumf5Z\np9Cgqhk9ujmB6qGH0tvjA46GgBkAf/QjXRfdrr4ENBKoDFyUMTEEyhGiR4DLvWeI34NiCHSGqyGQ\nBy7Jwkn4MgSSxvc0jxSXPtAOLQUalQ996wnt4Ny2XoLJLRwGRx7ZiNvee+9qvrOd1fz00+2TvnzP\nRCy6qI4zHjYs3f3oOk52gw30dvLkVqU/zqBBwNCh9bcpTlIVFk6hQaef3rovTVngosx+7WuN18cf\n768deeAeGnTooX7aEWe//fT2gQf01nfego393M6d6z4nqh1Zk+XPPbfWZiQSr6aVho/+GdDKfdY+\n2HeIbifyFC+pkrSVym24y65XEEOgAzNmaGVnxgxgo42Au+8GPvkEWHXVar6/kzKfVFLNcOqp1bSh\nDB98APzzn8mW/cYbA+uv77Y9hx+uqytluT6TJ6fXoK+TJZbQpRtt4u3wuVjSdtvpRLNPPmnss1/b\nGPezb5ZZRrdx2jTgl7/03ZpscDGiDPY9OH58/jKUdXH99br/HT1ab2+/3XeLkjHJmUBzaWVfjBun\n+2YzORJn8GDd5m98w227AGDxxfVfnAsv1H24wWc/yD2PISs+PD6Arpg2Y4aumBbfbxCPAA/EEOjA\nkCFaWTdx3US6A62KToaAPbjE8dlJGvr3127UJLepr9JvK66YPMjE8Rn6EG8ft7CMxRdvvs/t6lU2\nXAwBQLfX1wxiEbgZAvY9yEmOdv9r+mOO2H11lv6nbvr2BZZbLj1pc/Bgv+1MUqBXWKFZOfQ5xnEP\nX8qKz/ypIUNa5bjSSo3XYgjwQAwBz3Qa1NpV3fGtLNokzaynrVTqAk6ySSKu+HMzBOKEYAiEBufQ\nII4VerhjGwKcDKk0fOfVJCmBgwY134c++0Huax2EQlw3sA1TMQR4IIYAc9IWPgN4eAQMYgjko5Mh\nwOXamkS4NPey6/Kw3QRnQ4BLAmRI2EYxB4+AIe0+820IJIXeLLpoc18oHoHwiesB9rMhOQI8kO7e\nM508Au3cepyU3SRDwEf8fSjEZcPVI7D66nqbNiDLjE5xuIUGiSFQDlupscMfuOK75G4WQ8BnP9gt\nOQK+iRsCtrfM52Sh0EC6e8+UiXfloiwCyaEEPh/yWbP8/XYWOnkEuCjYd98NbLWVTjy0WWQRYORI\n4IIL/LSrG+DmEbDvQQkNys6ECfpZuOwy4N579crcnJ6LtPvMtxKW1McNHcrHEMjaB++wQ73tSONX\nvwK23x647jpg7bUb++OGyYQJTpvVQrvQIE6es15GDAHwm7neZ5/m9xtumFzXmJMhwC00KCmUZeRI\nYP/93bcliU6GAJd7ct11gcceay15Ono08OijwBpr+GlXNyAege5g1Cj9LKy5JrDjjlrxsiuj+CbN\nEPAdfpg0477UUrxDgw45BHjvvcb7hx4CVlvNXZtsjjwSuP9+4IADgJde0tdZKeCttxrHfO97+v70\nSbvQIPGm8EC6e/gd9JI8AnEFv2/f5OO4KItAGIbA7Nl8ZiA6GQLcZ2RFUSyPGAKCT3xPJCUZAkss\nwdsj0L9/835OY7DBlhkHz3K70CAxBHgg3T38Kl1JCn58FqRfP35KQxxuOQJJoUEhGQLcFTHuhkoI\ncAsNkqpBvYVvQyBJSe3bl0/VoKSJrP79mw0Yjs+J3SYOfUy70CCpuMQD5uqGG7h7BPr181/hoRNJ\nHQ43j8CsWXwNgXhnyXGmyYbjABgaHAZpG/uacjdEheyk3We+DYG0ZFz73ksKiXVF0hgS9whw7wc5\n6A1xPcCWH4eF9wQxBAA0J9rYbLFF/b+9zjqt+zbcsPl9nz48Huh2JHXq8+e7b4dh5MjWfV/8ovuV\njtOIX+O44r/88u7akhV7NWRRFIuz9dZ66zt2N46EBvUWX/6y39/fZJPk/bZH3Od9mLSa+oABzYot\nd0PAd2UooHVBO1t+Sy/tti1CMsznHevjf/4HOO00/fqb3wSefbZ15uSuu+pvx8EH68WaRo8GnngC\nGDFCK1wHHth8HLfZwzhDhwI33qhjPE0VhbRFqFxw5ZVajuuuC/zpT7ozOuII3b4LL2xWan1wzDHA\n2283qosssQTw/PPAD34AjBnTaihw4O679UqlgCiKZbj5ZuDqq/WzzwkJDeodzjpL90E+Oe447RHf\nYQddlexLX9L7DzwQmDwZ2Gwzv+2bObN1X//+zX0f95BdDu1bemngf/8X+Pa39ft+/YAHHtDXmOOE\nVy9CiruGWTFEpADg3XcVVlxR77voIuC557TyaFhxxfaLedXNN78J3HSTfj1ypK7cEofrpTPhTmPG\nAPfd57ctnHn//UZH+MQTjYGQM+ba7r03cMstftsiVMukScBnP6tff/ABsMwyftsjVMPmmwN//nPz\nPq5jByeGDWtdOf2ss4CTT270g48/7t+zkoRp31FHAZdc4rctgL7fjAF1++3AHnv4bY9rKLogSqkS\nBePro2fn9ez4yH79WmevfbvUbLckB6u+CD49AiFgz7qGljQlM8bdh+QICEKDtNAgG+4hu771GIOd\nCyl9Cz969pLYcYh9+wLTpzd/7jO+HWjuhEKdvYnLVGjGVrxCK6MmnXn3IaFB3Umo44dvkpT8eKUj\n7oYAx/aVWURVqIeeHc7jJcDis9e+DQE7PjHUjlw8Au2xO0TxCAi+kWTh7iTU8YMLdj8thoDQjfRs\nd28/3AsW6BUDbXzHsB12WON1aKFBRx6pt0cf7bcd3LFLp3EvF2oweQx77+23HUL1iCHQG7iohtcN\nmPHrzDMbocSrrKK3Rn9Yd1337cqCyVvYc0+/7UhCPAL86NlkYaXUf27IX/1KV5T5+9+BlVcGJk7U\npSZ91jC2k2s226w12cscw5EFC4AXXtClOkWhaM/bb2vvVCiJmXPmAK+/rgdA6dC7i48/bnimZs6U\nGt/dwqabAk8/rV8//zywxhp+x7ZQWLBAy2uDDYD33tMJ9F/4gu73Pv4YmDpVy5IjHPtp04577gF2\n3tlvW1zDPVk4kHnIelmwQN+kG2yg35s63z6xH16uCn8affvyLH/JkZVX9t2CfAwcCKy3nu9WCHUg\nHoHuxB4/uM5gc6RvX2CjjfTrlVbSf4Zhw/QfVzj301wME6GBdPfwnw/QidBCgwRBCA8xBARBqBsx\nBPgh3T34GwJJHgEZqAVBqBI7AVySwQVBqAMxBPgh6iT41NpNI8kjIAO1IAhVIh6B7iS00FKhuxFD\ngB893d2fcAIwZAhw8MG+W9KeJEMglCozgiCEwSKLACNG6LhoGay7h4su0tfz0kt9t0ToZfbZB1h6\naWDUKN8tEeL0dNUgQNfZ5Tq7bgbj9dfXFY1shgyROv2CIFSLUs0Vy4TugPM4J/QOvXofStUg5oRw\nUybZaiG0WxCEsCASb0A3IuOFwAG5D3ki8z4BIDkCgiAIgiAIQtWIIRAAkiMgCIIgCIIgVI0YAgGw\n4oqt+1ZbzX07BEEQBEEQhO5BDAHG/OUvwJ57AlddBUyYAOy9NzBunN53002+WycIgiAIgiCETM9X\nDRIEQRAEQRCEOuBeNUg8AoIgCIIgCILQg4ghIAiCIAiCIAg9iBgCgiAIgiAIgtCDiCEgCIIgCIIg\nCD2IGAKCIAiCIAiC0IOIISAIgiAIgiAIPYgYAoIgCIIgCILQg4ghIAiCIAiCIAg9iBgCgiAIgiAI\ngtCDiCEgCIIgCIIgCD2Id0OAiL5KRHcR0ftENIeI3iOi3xHR1xOO3ZKI7ieiKUQ0i4ieI6Jjicj7\n/9HLjB071ncTugaRZbWIPKtF5FkdIstqEXlWi8izdyCllL8fJzoHwPcBvAPgAQCTAXwGwCYAHlJK\n/cA6dhcAtwOYDeAWAFMB7ARgbQC3KaX2yfibCgB8/t/dBhGJPCtCZFktIs9qEXlWh8iyWkSe1SLy\nrA4iAgAopchzUxLxZggQ0WEALgfwawCHK6UWxD7vo5RaGL0eAmASgCEAtlRKTYz29wcwHsAWAL6h\nlLo1w++KIVAx0mFUh8iyWkSe1SLyrA6RZbWIPKtF5Fkd3A0BLyE1kQJ/JoB/IMEIAABjBETsBWA4\ngJuMERAdMw/ADwEQgCNrbbQgCIIgCIIgdBF9Pf3uaOgQoHMBKCLaAcC6AOYA+LNS6k+x47cGoAA8\nmPBdjwGYBWBLIuqnlJpfX7MFQRAEQRAEoTvwZQhsCq3YzwMwEcB60XsAICJ6DMCeSqnJ0b61ou2r\n8S9SSi0kojcBfB7A6gBeqbPhgiAIgiAIgtAN+Kq2szR0OM+JAD4F8CXo+P8NoGf9RwKw4/2HRtvp\nKd9n9i9ReUsFQRAEQRAEoQvx5REwBsh8ADsppd6J3r9ARLtDz+qPIqLNlVJP1dEAk7whVIPIszpE\nltUi8qwWkWd1iCyrReRZLSLP3sCXR2BatJ1oGQEAAKXUbDRyATaLtmbGfyiSMfunpXwuCIIgCIIg\nCIKFL4+AieNPU9w/jraLWsdvAmBN6JyC/0BEfQCsBmABgDc6/TDX8k2CIAiCIAiC4BJfHoGHoZOD\nP5/y+XrR9s1o+wh0TkHLasMARgEYBOBJqRgkCIIgCIIgCNnwYggopd4GcC+AlYnoOPszItoOwNeg\nvQK/i3bfDr3q8L5EtIl17ADo9QgUgEsdNF0QBEEQBEEQugKfKwuvAOBJACtBz/hPhC7/uQt0JaF9\nlFJ3W8fvAuA2AHMB3AxgKoCdocOFblNK7ev0HxAEQRAEQRCEgPFmCAAAES0F4EfQCv1yAGZALxB2\nllLq6YTjvwjgVABfBDAQwOsArgJwkZK1sAVBEARBEAQhM14NAUEQBEEQBEEQ/OArWVgQBEEQBEEQ\nBI+IISAIgiAIgiAIPUjPGAJEtAIRXU1E7xHRHCJ6k4jOI6IlfLfNF0S0JBEdSkR3EtFrRDSLiKYR\n0eNEdDClLCtIRFsS0f1ENCU65zkiOpaIUu8nItqRiCZE3/8JEf2JiA6s77/jARHtT0SfRn8HpxyT\nWzZEdBARPRUdP42IxhPRDvX8F34hoq8S0V1E9H707L5HRL8jopZywnJvtoeIdiCi3xPRO5F8JhHR\nrUS0RcrxPS1PItqDiC4koseIaHr0HF/X4RwnMguxD8gjTyL6LBGdTEQPE9HbRDSXiD4goruJ6Csd\nfieXbIhoESL6XnStZkXX7j7SeYksKXJvxs6/0hqbVk85JrdciGggEZ1BRC8T0Wwi+pCIbiGitYv8\nn64o+KwvQlqHepSIplp96s1E9NmUc/jdm0qprv+Drkb0IYCFAO4A8FMAD0FXJ3oRwDDfbfQkl8Mj\nGbwL4HoAPwFwJXRFpk8B3Jpwzi4A5kMndl8B4OxIhp8CuCXld46OPv8IwEUAfgngH9G+c3zLoUb5\nrgRdBnd6dO8dXIVsAPwi+vwf0fEXAfhXtO8o3/93xTI8x/pfL4MuF3w5gKehiwrIvZldlmdb/+v/\nRv3grQDmRPfnN0WeLf/PxEg20wG8EL2+rs3xTmQWah+QR54Aboo+/zt0efCfQJcSnxf9n0dXJRvo\nioRGHzg7unYzomu5k2+5VXFvxs7dKfp/zdi0ehVyAdAfwBPROU8B+BmAG6JrNhPApr7lVpU8ASwG\nvSbWpwCeAXAudJ96LfTitmNCuTe9C9/RBX4wuqhHxfb/MhLwr3y30ZNcvgJgh4T9S0c36kIAu1n7\nh0APVrMBjLD294cuBbsQwN6x71olOv5fAFay9g8F8Fp0zua+ZVGTfB+K/sezkWAIFJENdMWsT6FX\n217c2r8y9FobswCs7Pt/r0h+h0X/61UA+iZ83kfuzcyyXAZ69fV/Algq9tmoSM6vizxb5DYKwBox\nOaUprk5kFnIfkFOeBwLYMGH/VtBlxGcDWKasbAB8IzrnMQD9rf2bQBvJHwBYzLfsysgydt5wAO8D\n+A2A8UgxBIrIBcB/R+fcHNtvDI+/+5ZbVfIEcGMku0NTPu8Te8/23vQufAcXd/VIkJMSPhsM4JPo\nb1HfbeX0Zz3QF1j7Do72XZ1w/NbRZ+Nj+38cPSw/Sjjnv6Jzfu37/61BfsdCK15fBnA6kg2B3LIB\ncF10zoEJ55wRfXa67/+/Avn1h/bivYkEIyDheLk328tns+j/uSvl8+kApos828qwk+LqRGbd0gd0\nkmeHc83k3m6x/bllA61kLQQwMuGca6PPDvItr6pkCeAu6AmBYWhvCOSWCxoTiKsknPNo9Nko3/Iq\nK08AI6LPb8zxnWzvzV7IEdg62v4+/oFSaib0TM0gAIkxsj3M/Gi7wNq3NfQqzg8mHP8YtEW7JRH1\ni52DlHMeiLbblGgnO4hoHWiX6PlKqSfaHFpENp3OoYRzQmQ0gM9Ah/KpKLb9JCL6bko8u9yb7XkN\n2j2/Gen1W/4DEY2Ens0eZ+0WeebHlcx6pQ9oR9L4BOSUDRENgJ6pnQUd0tLxnJAhom9Br9v0baXU\nx22Oyy0XIloDOhz2VaXUP7KcEzD7QT/rNxPR4qRzAX9ARIdFckiC7b3ZC4bAWtAX7NWUz1+Ltmu6\naQ5/iKgPgIOg5fY766O1om2LLJVSCxHN3kJ7YbKc8wGAfwNYkYgGlm+5fyLZXQ/gLejF79qRSzZE\nNAjACgBmKqU+TPi+brqXN4W+/+ZBx27eC21cnQfgD1Fi5XDreLk32xAN+idBhwi9SESXE9FPiehW\n6IHpQQBHWKeIPPNTu8x6rA9IhIhWAfBVaAXpMWt/EdmsAaAPgDeUUp9mPCdIIrmdD+B6pdT/dTi8\niFxS7+U254TKF6LtqgAmQc/O/wQ6j+1VIrqYqFFshfu92QuGwNBoOz3lc7O/Z6sHJXA2gHUB3KeU\nsmcJi8gy6zlDUz4PjdMBbAjgW0qpuR2OzSubXrqXl4ae7TgR2gX7JehZ6w2gldaR0ImuBrk3O6CU\nuhDAHtDK6KEATo7evw3gWqXUZOtwkWd+XMisl/qAFoioP3Rsdn/oMApbDnXKP2h5RkrptdBh0Mdm\nOEVk2R4zPp0L4BEAa0OPT9sCeB3AkQBOs45nLc9eMASEHBDRdwEcD52hHmTJP18Q0ebQuRW/UEr9\n2Xd7Asf0TaYywh+VUrOUUi8A2B260tWoSOZCBojoJOiqK1dDzzYtBp109iaA3xDRWR6bJwhtIV1+\n9QbocImblVLnem5SSBwPnWR9aMx4EophxqeXAOyrlHotGp/GA9gL2pt9PBH19dbCHPSCIdBpFsrs\nn+agLawhoqOhXYfPA9hGKRWXSRFZZj0n6M4pCgm6DroiwI/iH6ecllc2vXQvm/9holLqHfsDpdRs\nNOIsN4u2cm+2gYhGATgLwN1KqROVUm8ppeYopf4KYDcA7wE4gYhWjU4RrHqBPgAABcpJREFUeebH\nhcx6qQ/4D5ERcCOAPQHcAuCAhMPqlH+w8iSiz0GXXf61UiopPj0JkWV7pkEr+/eqKHPXoJT6G/Tk\nyhAA60S7WcuzFwyBV6AVsbQ4qs9F27S4tp6AiI4DcCGAv0EbAR8lHPZKtG2RZaQIrwadvPVGxnOW\nhZ6VfFcpNad461kwGPpeWgfAXGuhlk/RMAzMAi5mJiuXbJRSs6AVtsFEtExCG7rpXjaySevkTKLb\norHj5d5MZkfogWtC/IPIsPoz9HgwItot8sxP7TLrsT4AABDNqt4MYB9oj8B+STHTBWUzCVHVHEpe\n8K0b5Pl5AAMAHGyPS9HYNCo65vVo387R+yJySb2X25wTKrnGJ+73Zi8YAuOj7XbxD4hoMHTs8SwA\nf3LZKE4Q0cnQsW7PAtg6Fits8wi0UdWyoit0hzIIwJNKqfkZzxkTbR8u0m5mzIVejO2qaGv/PRsd\n83j0/o/R+yKyeSTadrs8H4ZWXD+f8vl60fbNaCv3ZnsGRNvPpHxu9s+LtiLP/LiSWa/0AYgqLN0O\nnctyjVLqwPgMbIxcsonyuP4AfW22SjlHIWx5voXWMcn8fRAdc2v0/i2gmFyUUpOg843WjBKT0855\nJOGz0HgI+rldL/5BlMdilPS3rI/43ptl64+G8Add+WYhYisRQiu/nwK4xHcbPcrmNDRWAVyiw7H2\ngjmbWPsHRDfsQgB7xc5ZFY0Fc1ax9g+DTqoJbpGhAjJOW0cgt2zQWJTkVft6Rd81BYwXEyogt7sj\nGRwX279dtH8ygCFyb2aS5V7RffNPAMvHPts++l//jWiVdZFnogw71RZ3IrNu6QMyyLM/gPsiGVye\n8TtzywbAvtE5jwMYYO3fFHrRpvcBDPYtrzKybHNeu3UEcssFwA+ic24BQNb+XaL9f/Mtq4ruzUHQ\neWpzEFstGToM61MA40K5Nyn60q6GiFaHXi9gaQC/hU7w2AJ6Zd2XAXxJtamp260Q0UEAfg3trr4Y\nybG7bymlrrXO2QV6yeu50O7aqdB1idcEcJtSat+E3zkawAXRsbdAzzruCV1O6xdKqZMr/LfYQUSn\nQxsDhyqlro59lls2RPQLAN+DdjXeDj1g7gNgSWhj99L6/ht3ENEK0M/tStCzKROhSy+aQWUfpdTd\n1vFyb6YQVQ35HXRVi5nQiwp9AO1x2SE67Fil1MXWOT0vz0gGu0ZvlwXwNejQnsejfZOVUifGjq9d\nZqH2AXnkSUS/hi5j/S8Al0LPfsaZoJR6NPYbuWUTldHdAzrk417o1Xf3hjbidledy206J++9mfId\n46ErsH1OKfVGwue55BLNhj8CrfQ+Az1bvQr0/TwHOuz46dz/rAMKPOvbQsuEANwJfb9tDr2I6AcA\ntlLaS2L/Bs9707fl5dDCWwE6bOM96BvyTQC/BDDUd9s8ysTMVLf7eyThvC8C+D9oK/bfAJ4D8F1Y\nMwAJ5+wAPfswHbqE2VMA9vctA8dyPjjl89yyga7o9FR0/HToznd73/9rDbJbClpJejN6bj+C7kC/\nkHK83Jvp/2efSBZ/gI5tnQc9YN0D4Ksiz8T/pVMfmbRivROZhdgH5JEnGrPV7f5aVmQuIhvoMOlj\no2v17+ja3QvGHqwi92bCd4yHnghs8QgUlQuAgQDGQiuus6FXiL8ZwNq+ZVa1PAGsDx1W9SH0+PQW\n9KTqsm1+h9292RMeAUEQBEEQBEEQmumFZGFBEARBEARBEGKIISAIgiAIgiAIPYgYAoIgCIIgCILQ\ng4ghIAiCIAiCIAg9iBgCgiAIgiAIgtCDiCEgCIIgCIIgCD2IGAKCIAiCIAiC0IOIISAIgiAIgiAI\nPYgYAoIgCIIgCILQg4ghIAiCIAiCIAg9iBgCgiAIgiAIgtCDiCEgCIIgCIIgCD2IGAKCIAiCIAiC\n0IOIISAIgiAIgiAIPYgYAoIgCIIgCILQg4ghIAiCIAiCIAg9iBgCgiAIgiAIgtCD/D8uYSUya9Za\nHAAAAABJRU5ErkJggg==\n",
+ "text/plain": [
+ ""
+ ]
+ },
+ "metadata": {
+ "image/png": {
+ "height": 255,
+ "width": 385
+ }
+ },
+ "output_type": "display_data"
+ }
+ ],
+ "source": [
+ "df = pd.read_csv(\"data/elec_load.csv\", error_bad_lines=False)\n",
+ "plt.subplot()\n",
+ "plot_test, = plt.plot(df.values[:1500], label='Load')\n",
+ "plt.legend(handles=[plot_test])"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 3,
+ "metadata": {
+ "collapsed": false
+ },
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ " Load\n",
+ "count 140256.000000\n",
+ "mean 145.332503\n",
+ "std 48.477976\n",
+ "min 0.000000\n",
+ "25% 106.850998\n",
+ "50% 151.428571\n",
+ "75% 177.557604\n",
+ "max 338.218126\n"
+ ]
+ },
+ {
+ "data": {
+ "text/plain": [
+ ""
+ ]
+ },
+ "execution_count": 3,
+ "metadata": {},
+ "output_type": "execute_result"
+ },
+ {
+ "data": {
+ "image/png": "iVBORw0KGgoAAAANSUhEUgAAAxkAAAH/CAYAAAA7YRBJAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAAWJQAAFiUBSVIk8AAAIABJREFUeJzsnXmYFMX9/981uywLiBwiilcABRT1p/EWg4hX1MREUYMH\nRkUNBoOoidGg+Yr3EY2KirdRCRqNIibBMx54IB5BjfFCQRBBueVcENj6/dFbOzU1VX1Wd9fMfl7P\ns8/M9llT3V1V7/4cxTjnIAiCIAiCIAiCsEUh7wIQBEEQBEEQBFFdkMggCIIgCIIgCMIqJDIIgiAI\ngiAIgrAKiQyCIAiCIAiCIKxCIoMgCIIgCIIgCKuQyCAIgiAIgiAIwiokMgiCIAiCIAiCsAqJDIIg\nCIIgCIIgrEIigyAIgiAIgiAIq5DIIAiCIAiCIAjCKiQyCIIgCIIgCIKwCokMgiAIgiAIgiCsQiKD\nIAiCIAiCIAirkMggCIIgCIIgCMIqTokMxtiWjLH7GWNzGWNrGGNfMsZuYox1jHCMgxljNzDG/s0Y\nW8QYa2SMvRpivx0YY48xxuYzxhoYY58yxkYzxuqT/SqCIAiCIAiCaFkwznneZQAAMMZ6AngTQBcA\nEwF8BmAvAAcC+BTAfpzzpSGO8ySAnwFYA+ALADsBeINzvr/PPnsDeAlADYC/A/i66bx7AngdwEGc\n83WxfxxBEARBEARBtCBcEhnPATgYwAjO+Vhp+Y0AzgNwJ+d8eIjj7A1gOTxhsg2ALwG8bhIZjLEC\ngP8B6APgZ5zzSdK6vwMYBOAPnPPr4/42giAIgiAIgmhJOCEymqwYXwD4knO+rbJuIwDfNP3blXPe\nEOG4P0CwyBgI4EUAr3DOD1TW9QAwA8AsznnPsOclCIIgCIIgiJaMKzEZA5s+n1dXcM5XAngDQFsA\n+6Rw7gMBcADPac79JYDpAH7QJDgIgiAIgiAIggjAFZHRB95Af7ph/edNn71TOjdyOjdBEARBEARB\nVB2uiIwOTZ/LDOvF8tBZpirk3ARBEARBEARRddTmXYBqgjGWf4ALQRAEQRAE0WLgnLO8y6DDFUuG\nsBZ0MKwXy7+rsnMTBEEQBEEQRNXhiiXjMwAM5riHXk2fpriJpOeGzXO7kLGrWmCMUX1agurSLlSf\ndqH6tAfVpV2oPu1C9WkPxpw0YDTjiiXj5abPQ9UVTSls9wOwGsDUFM79EjyBc5jm3D3giY/ZTZmm\nCIIgCIIgCIIIwAmRwTmfCS99bXfG2G+U1ZcDaAfgIXmODMZYH8ZYHyRnMoBPAOzPGDtSOj4DcD28\nrFd3WDgPQRAEQRAEQbQInJiMD2iekO8NAF0B/APewH8fAAfAm717P875Umn7RgCcc16jHGc/AGfC\nEwcbATgGwAIAz4htOOenKfvsBW9CvjoAjwP4CsBBAHYH8DqAgznn60L8Bt50/PA/nPCFzKr2oLq0\nC9WnXag+7UF1aReqT7tQfdpDuEu5GvjtjMgAAMbYlvAsF4cB2ATeTN8TAFzOOV+mbNsIoJFzXqss\nPwXA/T6n4eo+TfttD+AyeBMDtgcwG8DDAK7jnK8NWX4SGZahxsgeVJd2ofq0C9WnPagu7UL1aReq\nT3uQyGhBkMiwDzVG9qC6tAvVp12oPu1BdWkXqk+7UH3aw3WR4URMBkEQBEEQBEEQ1QOJDMJpLr30\n0ryLUDVQXdqF6tMuVJ/2oLq0C9WnXag+Ww7kLmURcpciCIIgCIIgsoDcpQiCIAiCIAiCaFG4MuM3\nQRAEQeSK67PnEgTRMqgWjxiyZBAEQRAEQRAEYRWyZBAEQRCERLW8RSQIorKoNmsqWTIIgiAIgiAI\ngrAKiQyCIAiCIAiCIKxCIoMgCIIgCIIgCKuQyCAIgiAIgiAIwiokMgiCIAiCIAiCsAqJDIIgCIIg\nCIIgrEIigyAIgiAIgiAIq5DIIAiCIAiCIAjCKiQyCIIgCIIgcmb27NkoFAqoqakpW3fZZZehUChg\n6NChOZQsGt27d0ehUMCrr76ad1Fyw+9atiRIZBAEoeX114ELLgC+/z7vkpSzdClw7rnA//6Xd0kI\nghCcdtppKBQKKBQK2HPPPX23HTJkSMUMmoloMMZizVx96qmnolAo4MADD0yhVEQekMggCEJL//7A\nDTcAd92Vd0nKOf984JZbgJ13zrskBEGoMMYwbdo0TJw40XebOAPRlkqXLl2w/fbbo1u3bnkXJTXo\nnqg+SGQQBOHLvHl5l6Cc6dPzLgFBEH5wzvF///d/gdsQ4Tj77LPx8ccf46qrrsq7KKlC90R1QSKD\nIAhfXHQppZddBOEmjDEMGDAAbdu2xUcffYSHH3447yIRBJETJDIIgvCFRAZBEFHYfPPNMWLECHDO\nMXr0aDQ2NsY6zoQJE3DYYYeha9euqK+vx9Zbb40hQ4bgvffe026vBttOnToVxx57LLbYYgvU1tbi\n/PPPBwBMnjwZhUIBPXv2BAA899xzOPjgg7HJJpugU6dOOPTQQzF16tTm4y5fvhwXX3wx+vTpg7Zt\n22KbbbbBRRddhDVr1mjLMXfuXNxwww04/PDD0bt3b7Rr1w4dOnTAbrvthtGjR2PZsmWR68IU+C2C\nrIP+HnroobJjrlq1CldffTX22msvdOzYEW3atEHv3r0xcuRIfP31177lGT9+PPbdd1+0b98em2yy\nCQ466CA8/fTTkX+XLWbOnIlhw4Zh2223RZs2bdC5c2cMGDAA9913n/H+W7lyJR544AEMHjwYO++8\nMzp16oS2bduiV69eGDZsGL744gvfc65duxZXXHEFdthhB7Rp0wZbbLEFTjjhBHzyySdp/MTKhHNO\nf5b+AHCvSgmi8gG8v8svz7sk5ey/f7F8BGELasOTceqpp3LGGD/hhBP4kiVLeIcOHXihUOD33ntv\n2bZDhgzhjDF+2mmnla1rbGzkv/zlLzljjBcKBd6qVSveuXNnXigUOGOM19TU8DvuuKNsv1mzZjXv\n8+ijj/JWrVrxQqHAO3XqxFu3bs3PO+88zjnnr7zyCmeM8R49evCxY8fyQqHAa2treceOHZvP0bZt\nWz5lyhS+cOFCvtNOO/FCocDbt2/P6+vrm7c58sgjtfVw7LHHNpejvr6ed+nShdfW1jbvt9122/G5\nc+f6ll9l9OjRvFAolNXXXnvtxbt166b922yzzZqP9+CDD5bs9/HHH/Mf/OAHzevr6up4+/btm8vY\nuXNnPmXKFO3vO/vss5v3q62t5Z07d+Y1NTW8UCjwMWPG8O7du/NCocAnT56s3d+EuH8GDhwYab9/\n/vOfvE2bNs1lEtdb/JZDDjmEr169umy/2267reQe69KlS8n13WijjfiLL76oPefKlSv53nvvXXKd\nxf3Tvn17/sgjjxivpR9R2yBp+9zHwLo/smQQBOFLbW3eJSiHLBkE4TadOnXCeeedB845rrjiCqxb\nty70vtdddx3GjRuHQqGAK6+8EkuXLsXixYvx9ddf4xe/+AUaGxsxYsQIvP7668ZjnHHGGTj66KMx\na9YsLFmyBKtXr8a5555bss2CBQtw/vnn4+KLL8bixYuxdOlSfPnll+jXrx/WrFmDc889F2eddRY2\nbNiA119/HcuXL8eKFStw7733ora2FpMmTcKzzz5bdu6+ffvi1ltvxfTp09HQ0ICFCxdizZo1eOWV\nV7DXXns1v3WPijemLOWtt97CvHnztH+nnXYaAGDjjTdGv379mvdZvnw5jjjiCMyZMweDBw/GBx98\ngDVr1mD58uWYMWMGTjrpJCxduhTHHHMMli9fXnK+8ePHY+zYsWCM4YILLsDixYuxePFifPPNN/jl\nL3+J3/3ud1i4cGHk3xaXmTNn4oQTTsDatWsxcOBAfPbZZ1iyZAlWrFiBu+66C/X19XjxxRcxcuTI\nsn27dOmCSy65BG+//TZWr16NhQsXoqGhAZ988gmGDBmCVatW4cQTT0RDQ0PZvueeey7efvtttG3b\nFg888ABWrlyJpUuX4oMPPsAOO+yAX//611n8fPfJW+VU0x/oLRhRRQhLwXXX5V2ScgYO5GTJIKxD\nbXgyZEsG55wvX76cb7LJJs1vuGVMloyVK1c2W0AuvvjisnNs2LCB9+/fnxcKBT5gwICSdbIlYP/9\n9zeWU1gyCoUCP/3008vWf/XVV81vs1u3bs1nzpxZts3pp59u3N+PpUuX8q5du/Kamho+e/ZsY/lV\nRo8ebbT86BBv0mtra/nTTz9dsu7iiy/mjDE+ZMgQ4/6HH344LxQK/MYbbyxZvu222/JCocCHDh2q\n3e+QQw5p/g1ZWDKGDh3KGWO8V69evKGhoWz93Xff3Wz9mjFjRqTyHHLIIbxQKPCHHnqoZPns2bOb\nLTfqOs45X7JkCe/atStZMjhZMgiCCMDFmIwCtVyEozDmzl/etG/fHr///e/BOcc111yjfSOs8sIL\nL2D58uWoq6vDBRdcULa+UCjgj3/8IzjneO2117BgwQLtcUT8RRAXXXRR2bKtt94avXr1AmMMxx13\nHHr06FG2zUEHHQTOOf4XcbKejh07ol+/fuCcY8qUKZH2Dcu0adNw+umngzGGq6++GocffnjJ+oce\negiMMd86OvHEE8E5xwsvvNC87P3338fMmTMB6OsNAEaNGmXhF4RnwoQJzb+lvr6+bP0ZZ5yBLbfc\nEpxzPP7445GO/ZOf/AScc7zxxhtl52xsbMQWW2yBk08+uWy/Tp06kSWjCeqqCYLwhdylCIKIy4gR\nI7DZZpth/vz5GDNmTOD206ZNAwDssssu6NChg3ab/fffvzm4W2yvsu+++waeq76+Htttt512Xdeu\nXQEAO+20k3b9ZpttBgBYunSpdv0777yDoUOHYocddkD79u1LgrCfeuopAMC8FPKDL1y4EEcddRTW\nrFmDE044oUyoff31181B3Ycffji6deum/RPuRXPmzGneV9T1Zptthl69emnP369fP9Rm1GnMnDmz\nOYj+gAMO0G7DGMMBBxwAzrn2Xpk7dy4uvPBC7LHHHujUqRNqa2ubr9N5550HxljZdRLH6d+/v7Fs\nAwYMiPmrqgsHhw8EQbiEi5YMEhmEq3BK819CmzZtMGrUKIwcORJ/+tOfMHz4cLRv3964vfDn33LL\nLY3btG7dGl26dMGCBQuM/v+bbrppYNmEUNAhRIxp8juxXhdrcsMNN+DCCy8s2bZz586oq6sDACxb\ntgxr1qzBqlWrAssYhfXr12PQoEH4+uuvseeee+K+++4r2+abb75p/h4UO8EYK7E+ie232GIL4z51\ndXXo0qUL5s+fH7X4kZHL73e/bLXVVmXbA16WsSOPPBIrV65sngSwQ4cOzRaRhoYGLF++vOw6LVy4\nEIwx33rwK09LgiwZRNXBOWDILEjEwEWRQe5SBFE5DBs2DFtvvTWWLl2KG2+8MdQ+pvSwYclr5uiP\nP/642ZVoxIgR+Oijj7B27VosWrSoOSD7mGOOAWB/4rnhw4fjjTfeQLdu3TBx4kS0bt26bBs5net3\n332HDRs2+P7NmDHDahnTIur9sn79+ubg7kMPPRSvvfYaGhoasGTJkubrdOONN8oxt0QMqKsmqo5T\nTgHatAFmzcq7JNWBiyKDLBkEUTnU1dU1x1HcfPPNWLx4sXFbYYH46quvjNusXbu2+RhhLBZZ8sQT\nT6CxsRGHHXYYbr75Zmy//fZlgieNt/y333477r33XrRu3RoTJkwwWmBk683s2bMjnUPUtZ+b17p1\n67Bo0aJIx42LfO397hfhHiZv/+abb2Lu3Lno3LkzJk6ciH79+jVbmgSm67TpppuCc+5bD2m4wlUi\nJDKIqmPcOO9z/Ph8y1EtuBiTQZYMgqgsTjvtNGy77bZYsWIFrr32WuN2u+22GwDg888/L3HtkZk8\neTLWr19fsr0rfP3112CMYdddd9WuX716dclEfzaYPHlyc/zAnXfeib333tu4bffu3ZuFxjPPPBPp\nPKKu58+fb5yobsqUKc3XJm169uyJjh07AgBefvll7Tacc7zyyitgjJXcK0J49O7dWxswDgD//ve/\ntcvFcfxSKE+ePDn4B7QAqKsmqhYX38BXIiQyCIJISk1NDUaPHg3OOe644w6jgDj00EOx8cYbY926\ndfjTn/5Utr6xsRFXXHEFAC8AXARou0KHDh3AOceHH36oXX/llVdixYoV1s43a9YsHHvssdiwYQNG\njhyJU045JXCfU089FZxz3HDDDcbrIJBnJ991112bA+Wvu+467fbXXHNNhNInZ9CgQeCc45ZbbtG6\nTN1zzz2YO3duc6YwgUgq8Pnnn+P7778v2+/555/Hyy+/rHW7GzRoEAqFAubOnYvxmreZS5cuxZ13\n3pnkZ1UN1FUTVYuLg+NKxEWxRu5SBFF5nHjiiejbty8aGhrw0ksvaQdwbdu2xahRo8A5x5gxY3D1\n1Vc3B97OmzcPxx9/PN544w3U1NTgyiuvzPonBHLIIYcAACZNmoRrr722OXB60aJFuOCCC3Dttdei\nS5cuVs71/fff46ijjsKSJUtw8MEHh453ueiii9CzZ08sXLgQ++67L/7+97+XDNDnzJmDu+++G7vt\ntltzJiyBEIr3338/LrroomYRsmDBApx22ml45ZVX0K5du0S/a926dc2T/Jn+hLVk1KhRaNeuHebN\nm4cjjjgC06dPb66be+65ByNHjgRjDGeccUZJKuL99tsPbdu2xeLFi3HyySfj22+/BeDFdtx///04\n9thjjddpm222wdChQ8E5x7BhwzBu3Ljm8nz44Yc47LDDsHbt2kR1UDXkPVFHNf2BJnJyAjFJ25//\nnHdJKpfGxmI9Pvxw3qUp52c/4zQZH2EdasOToU7Gp+OJJ57gjLHmP93kchs2bOCnnnpq84R4tbW1\nvHPnziX/33nnnWX7+U1mJyMm4+vRo4dxmwMOOIAXCgX+4IMPRj7GMccc01zWQqHQXPZCocDPPPPM\n5nq67LLLQpdfNxmf2J4xxjt16sQ333xz499jjz1WcrwZM2bwHXfcsaROu3Tpwtu2bdt8TNNkcyNG\njNBem0KhwG+99VbevXv3RJPxhfmTj/3Pf/6Tt23btrlMnTp14nV1dc2/4dBDD+WrV68uO9+YMWOa\n92GM8Y4dO/JWrVpxxhjffffd+W233WacHHDlypV83333bd6/vr6ed+zYkTPGePv27fnf/vY3moyP\n02R8RBVDloz4yFkZuYOJNchdiiDchDHmm9lp0KBB2H333Zu3021bKBTwl7/8BY8//jh+/OMfo1On\nTli1ahW22GILnHTSSXj77bcxbNgw3zIkLWcYTMd47LHHcO2116Jv377NwcT9+/fHQw89hLvvvjuw\njKZ1uvOJZcuWLcOCBQu0fwsXLiybCLFnz5547733MHbsWBx44IHo3Lkzli9fjlatWmGXXXbBsGHD\nMGnSJAwZMqSsHGPGjMFf//pX7LPPPs3xDAMHDsSkSZPwm9/8JvD3+SHfF6a/gtIB/PSnP8WHH36I\nM888Ez169EBDQwPatWuH/v3745577sGzzz6LNm3alJ1rxIgRmDBhAn70ox+hXbt22LBhA3bYYQdc\nccUVeOONN7DRRhsZr3G7du3wyiuv4PLLL0efPn3AGEObNm1wwgkn4J133sE+++yTqB6qBcZdHEFU\nKIwxz5xBdZor4pm+/XZg+PB8y1KprF4NCIv3X/8KnHRSvuVRGTQIePJJ7zs9boQtxICA2nCCIPIg\nahskbe+kmqH3gUTV4mIsQaVAlgyCIAiCIJJAXTVRtZC7VHxkkSHN3eQMLdwCTRAEQRDOQyKDqFpI\nZMRHTnNOlgyCIAiCIKJCXTVRtZC7VHxcd5ciSwZBEARBuA2JDKJqIUtGfFy3ZJDIIAiCIAi3IZFB\nVC1kyYiP65YMcpciCIIgCLehrpqoWkhkxEcO9nYx8JtEBkEQBEG4DXXVRFUhv3UnkREfWVi4aMkg\ndymCIAiCcBsSGURV4XosQaWwYUPxu4v1SJYMgiAIgnAb6qqJqsL1WIJKgSwZBEEQBEEkgUQGUVWQ\nyLCDyzEZr74KPPBA3qWwx8SJwEknAWvW5F0Sf8aOBUaOpOeKIAiCCAcl+SSqCnKXsoPL7lIDBuRd\nArscfbT3ueeewLnn5lsWP84+2/v81a+AHXfMtywEQRCE+5DIIKoKsmTYwXV3qWpk4cK8SxCOtWvz\nLkH6MPLHIwiCSAy5SxFVhSwyXHPzqSRIZGSPfO+6DI2/CYIgiDA4JTIYY1syxu5njM1ljK1hjH3J\nGLuJMdYx4nE6McZubtp/TdPx7mOMbWnYfhZjrNHwN8/OryOygNyl7CC7S5FYy4ZKERnVDOc89F/b\nthxA6d8LL4TfP80/UZ7hw/MvC/2l/7fRRsVrnndZRDkuvDD/slTyX7XgjLsUY6wngDcBdAEwEcBn\nAPYCMBLAjxlj+3HOl4Y4Tuem42wH4CUAjwDYHsBpAI5gjO3LOZ+l7MYBfAfgJgDqe7qVcX8TkT3k\nLmUHsmRkD4mMykJn0XHtWXGtPEQ6uHidXSwTkT0uWTLugCcwRnDOj+Gcj+KcHwxv4L89gKtCHuca\neALjRs75IU3HGQTgHACbARhr2O87zvkVnPPLlb8/J/tZRJaQyLBDNYiMxx4Dtt4a+O9/sz3vWWcB\ne+wRXTTIVri0efNNYKutgH/9K/q+5C5lxjWrXyU8u0uWANttB1x9dd4lSQbnwGGHAYMG5V0SgnAH\nJ0RGkxXjEACzOOeqCLgUwCoAJzPG2gQcpx2AIU3bX6asvh3AbHhWke4Wik04iMtZkSqJaqjHwYOB\nr78Ghg7N9rx33QX85z/A229H2y9LkTF4MDB3LnDkkdH3JZFhxrVnxTXRo+Ouu4AZM4CLL867JMlY\nvRp47jngySfzLokbuPYsEPnghMgAMLDp83l1Bed8JYA3ALQFsE/AcfYB0AbAG5zzVcpxOIDnlPPJ\ntGaMncQY+wNj7BzG2AGMMVfqhwiJ3LBRIxefarBkCPIqvyzUwpCluxQJhXRwbVBf6c8uQRCVjSsx\nGX3gxUVMN6z/HJ6lozeAlwOOg4DjsKbjqGwO4CHpfwbgS8bYaZzzV33OSTiEy5PIVRJUj8lxWWS0\nbh1/XxIoHroBvGuDetfKo6Na7if1BVe1/K64VMK9R6SPK2/qOzR9LjOsF8uDskzFPc79AA6CJzTa\nAdgZwJ0AugN4mjG2c8B5CUcgS4YdKsmSESSC8urso4qMLN2l6uri79vSB09+uCbIXX92geq5n/J8\nMePidXaxTET2uCIycqUp4PsVzvlCzvkazvnHnPPhAP4Mz01rdL4lJMJSCYPj5cuBZSYZ7AiVFJPh\n2sBOMHduNOtElpaMRYuibb9gQTrl0DF3rrvXNAjXyu1aeXS4JjIWL/biK6JC1l+CKMcVkSGGXB0M\n68Xy7zI6juDOps/9Q24PwJst1vQ3evToKIciIuK6JYNzoEMHoGNHN8snqASxJsjSAhCFU0/1skyF\nJSuR8eabwPz54bf/5htgs83SK4/Mk096Wa+yDtaPA7lL2cElkbFyJdClC9C5c/R9SWSUUgn3XqUw\nevRo45jSdVwRGZ/BHCsBAL2aPk2xFvJxEHAcv9gPlYVNn+1Cbg/Af0InEhnp4vrg+Pvvi9+jutNk\nSSV1mGvX5l0CM1HS52bVX4wfH237KVNK/0/zubrlFu/zwQfTO4ctdKLQtWfFxTZQpeDKKATA7Nne\nZ5w2RW7PXW7bicpj9OjRFTtpnyuPtwjmPlRdwRjbCMB+AFYDmBpwnKkAGgDs15TOVj4Ok47vFzwu\ns2/T58yQ2xM547olo6Gh+N3lydcqyV1KrtNKJqvBVtTzqJaiNAfSrt9rgsZGvQXNtfK7Jnp0uPQy\ntjZBKhyKySjFxTIR2eOEyOCcz4SXvrY7Y+w3yurL4VkSHuKcNw8nGGN9GGN95A2b0taOA7ARyuMo\nRsAL5H5WnvGbMbY9Y6ytWqamuTRug2f5GBfjZxE54LolQx4Qu+rmA7hfjzKVLDLyeOMZdVBHIqMc\n0wsC1wb1lVCfLomMmpr4+1aS9ZcgssKVFLYAMBzefBi3MMYOAvAJvHkvDgDwKYBLlO0/gScA1GZh\nVNM+5zPGfgjgbQB9AfwMwLcAVBEzGMBvGWOvwpusbwWAbQH8BEBrAJMA3Jj41xGZIHeqLjb0lWLJ\nIJGRDfIAPqv7Naklw/X7IQtM7jSu1Y1r5dFRLSIjT3epSrjORMvEGZHBOZ/JGNsDnuXiMACHA/gG\nwE0ALuec6/LxlD1anPMljLF94M0UfhSAHwFYDOA+AJdyzucpu7wML4bjhwD6wbOafAfgNXjWk4ge\nzESeuD44lrOWuCwy5E7SJbGmG9wFZYL5z3+A777zgu1dQ74HshqYRB1IkSWjHDm2SsalZwVwrzw6\nXBUZUee6IEsGQZTjjMgAAM75XACnh9zW+D6Oc/4dgPOa/oKO8yoAmmyvSqikmAxyl4rO+eeXLwtj\nydh//2hB2Fkhi4ysBiZRB3Wq+CGRYRYZrpXftfLocElkyGzYEC1Gg2IySnGxTET2OBGTQRC2cHVw\nLCB3qWSMHVu+LIzI+PBD+2WxAblLVSamFwSuvcGuhGvlUnYpub6its+UXYogynHo8SaI5Lg6OBZU\nisig7FLZkIe7lDqoCzovuUuVYxIZrpXfNdGjwyVLhnz9olqayV2qFNeeBSIfSGQQVQW5S9mhkjpM\n10WG332YhyVDHdQFiV0SGeWYhJlrz0ol1KdLIkO+flFfAlVSm5kFlXDvEelDIoOoKlxv6Csl8Nt1\ni5BMUOB33vhd5zxiMlRLRtDEY1m6S7l+rwnIXcoe1eIu5XrfQxB54NDjTRDJqUZLxksvAbffnk55\nTJjcpf77X+Dcc4H338+2PH6oloz164FrrsmnLDpMQcJA6UBm3TrguuuAd95JtzzqoM6vfEC2loxK\nIYy71K23Ai+HnfY1JSrhWoWxZCxeDFx+OTBnTrplSeIulUdMxuOPAw8/7GZfF7VM334LXHYZMH9+\nOuUxsXIlcMUVwOefZ3veloJT2aUIIimuv4GX3xqHfVN20EHeZ79+wA9/aL9MOkz1OGwYMHUq8MIL\nwEcfZVORaufuAAAgAElEQVSWIFSRcf/9wKhR+ZRFh98gXh7IvPmm9weke++qKWyDRAZllyonyF3q\nzTeBc87xvuf5m6pFZJxxBjBxIvDII8Ann6RXlkqyZHAOHHec990ll7O4HHUU8NZb3rPz7LPZnfeS\nS4BbbgGuvtp919tKhCwZRFXhuiUjic/vggV2y+KHSWRMn+59fvxxdmUJQh0kz5iRTzkA/T3nN4jP\n4x5VByRBIkO9T8ldKtiS4cpb0WoRGVOnep+ffppuWSopJkMun4vPTdQyvfWW9/nuu/bL4oewyq9Z\nk+15WwokMoiqwnVLRhJzfJaYOsxOnbIvSxDqILlVq3zKAUQXGXkMAl12l3LxmdURFJOxfHl2ZfHD\n5TZG4NJb+Epyl3I9Fi0uWU+aWiltTqVCIoOoKly3ZCQxx2eJKSYjz1mzTZ2+OkiOMoGWbSpRZAQF\nfpO7VDlBlgxXREYlzNdAgd/xqFbXng4dsj1fpbQ5lYpDjzdBJMf1DB9J3pRl+cbPZBHaeOPi96xF\nkqlTdcmSobvn/AbxleAuRZaMcoJiMlasyK4sflSCyKgWSwaJDDvk+SKLsA+JDKKqcN2SIXc+RxwB\nrFoVft+f/CT97CqCMG5nS5dmUxZBWJHhmiXjxBOBm2/2vo8fDwwaVPT/dcGSEVVk5PVcPf008NOf\nepmG8sY0AB0+HLj2WncsGeQuFQ353h43Ltq+WbtLuS4y4rYT5C5VXZDIIKqKSorJALw0l2FZvx44\n80y75TEhd5KmN3RZv601+SC7JDJ0ouG//wXOO8/7PmQI8OSTXgYsIJ97tFJjMn7yE2DSJOD//i+9\n84fFb/D+hz9Ee3lgm0qJ+9KRd5st39vixUCcfbN4eVCtMRlt22Z7vrzvuWqHRAZRVVSayFiyJNr+\nWeUQN9Vjnu5opjd3qjuSa5YMHStXep+VYMlQy5i3u1TWFjQdQW+q8xzcJ8mQlAfyNc/bvStJn0Hu\nUqW42P/qqJRyViokMoiqwnV3KbVMUTvVrAYvYURG1gOCSnCXCju4EHVaCZYM9Trn/VzlfX6g+By2\naaNfn2d60UqzZJjalzxIcq3IXaoUF55TIn9IZBBVRaVZMqJ2RlkN7E3ZpfJ861ipgd9+2+U9qAIq\nL7uUC8+1GLx37apfL+fcz/oaV0oGOwFZMuLhusiIS56inLAPiQyiqqg0S0aSXOxpYuow83SXMvkg\nr15dWhbX5snwW5fHPapet0pzl3KBIJEh36tioM95Nr8vrrtUVnWvnidMm5JV2ZLc26Y4trRwPSZD\nttZGuX4kMqoLEhlEVVFJKWwB4PbbgX/9K/z+Wf2mSnKXevppoG/f4hv5mprsyqTid3222674XdRp\nHveoes6oloy8Z/x2YVAg6mTTTfXr5Xt13Tpg5EjPTa1QAF59Nd2yxXGXuvtub34CMftxWsybB3Tu\nXBq8Xw2WjDlzgB//uPg/uUsVOeQQYPfd3eyPATfak2qGRAZRVVSaJQMAjjwy/P55u0vlKTL83rh/\n9pk3gAHMKTGz6OT87rmZM8vL4oIlI2ggmqW7VKUg6qxTJ+DnPy9fL79lXr8eGDOm+P+JJ6Zbtjju\nUsOGednizj03nTIJbr0V+O474IorisvCtClZpbmV665Tp/D7qZmosnhGgl4O5I2oyxdfBN57z7vu\nUfbLChfHCdUEiQyiqqi0mIyouBT4nfVgM2jAJAYopnJl4Z8etU5csGQE1Qu5S5UjnsPaWmDiROCC\nC0rXyyls1fpt3TrdsiXJLpXH/ehq4Pc224TfTxVBWfyOIDdH13D12Xa1XNUCiQyiqqhES4Yfamfl\nUkxG1paMShAZYa9vntmloooMcpcqR9SJyGSmZuwSKYqB8sFg2iIjSXaptOtWZ5HIs01RifsSRb3+\nJDLKceG5JbKHRAZRVVSbJUPtdPMQGa64SwUNmIIyNrlkycgzJiNq8gGyZJQjWzKA8sGzLDIWLSpd\nl6XIcM2SoRMZrsZkRKkL9Xdl8Ttcd5cCwr/0y/PlYKW0OZUKiQyiqpAbjLffBqZNK00nmTeVYskI\nk8JWlO3774HPP0+/TEEDpoULvU9THWfx5i+qJUM3kEk7O5bJkvHFF/qBiwspbF17eSBEhkgyoL7J\nXr68+F2dQDNLd6mlS4GPPoq3bxZMn176XJrO/+232ZTHlsjI05LhStYpNauUX9+Vp8ucC+1JNUMi\ng6gq5Abq6ae9rBZ77JFfeVSiNqCVYMk48kigd2/g2WfTLVOQyBgwwPusJEuGroNTB6y20YmMN94A\nevUCDj64fHsXJuO75JJ8z6+iukv5BSZnLTLU+tlpJ2Dq1Hj7psk//gH06QP89rfFZbr2bfLk7MpU\nDSJjr73SP3dYwk5Q6JLLHGEXEhlEVaHrJKO8yUubanKXEt+ff977HDcu3TKF9S+vpJgMXVnTvsa6\n7FLjx3vfX389ePs8LBnXXJPeOeOgukv5CcO8RQYAzJoVf9+0GDu2fJnu3n/ssfTLIogbk5GHu5RJ\nZLjS33FeWod+7XeeIsOFlxbVDIkMoqrIOztJEEndpbLKLmWaWMqvM0h7EB/m+GrHFnX/pNiwZGQl\nMsRgd926UveeoPLkHZPhwqBAdZdyyZKhuz5h/fezjMnQpTTNu/2uBkuGS4QVD3lmLXShPalmSGQQ\nVUXenVQQSS0ZLk/Gl/YgPozA2rChskSGbvu0Z4ZWRcb69dFEBmWXimbJUOMJ8rBkhB2QZlm3OpGR\nt6sMiQx7cF56PcmS0TIhkUFUFa43GNXoLiVI28oSRiSsW1dZ7lKm7dO8zqJ+6uq8zyBLRpbuUpVC\nUApbGRfcpcIOSLO8tsuWlS+rFpFB2aXKrcquxmS4PmaodEhkEFWFywOgZ58F3nkn2j5ZZ5fiHHjk\nEW8GbcGDD3pZasR6U1lccJdat87caVSKJQPIRmTI7lJycO0TTwAff2wuS0t2l5o/33sexGR7Ydyl\nVEuGEHdpobs+jz4af1+byPUk2pQszx8EWTLs8cADwJIlxf/DiowPPwQmTEitWM289x5w3nnABx+k\nf66WTG3eBSAIm7j6VuLbb4HDD4++X9buUpMmASeeWL78uuuAa691312KLBnhzy0Gu998U7r+2GNL\ntyN3qSL9+5emaw5jyRCplQX19fbLJaOrn9deA95/H9h11+j7poXuTXxWMWcmbAV+k8jw2o3Bg4v/\nh3WXmjsXOOYY4N13veyQabHbbukdmyhClgyiqsj7TZiJxYtL/99+e+DPfy7+b2qAszYdT5umXy4G\nVuQu5U8lWjK++irc9qb/beLqSwKBOh9Mmzbep58lY8WK0v/TngdFrsMddyx+/+KLaPvmQd5zGlWS\nu5TrIgPw5qoShLVkCGbMsF8eIntIZBBVRd6dpAn1TeeQIZ6ptnNn739dECSQvWgydQQNDd6n69ml\n8hYZlWDJUGMy1JiBoLLkLTJcesaFyPCzZKiTo6VdfnF9unUDjj8+3r5p4SfGgGI7kxdxRYZ6/cmS\nUU5UkRF0rxCVAYkMoqpw1ZKhdkKiARUiQ7V0CLK2ZJisEcIHndyl/LFlyUjTKqSKjKDZlElkmGnb\n1vuMMiBKu40S9cNY9PiPvOs279mqKSYjPcK6SwlIZFQHJDKIqiLvTtKEKjLE/5ts4n3KAXIyWYsM\n0/mEpYXcpfxJOg+KIEt3qaDBilpGV5+xPAhjyVDJInkDEE9k5P2SJm9LRiVNxud6dikVsmS0TEhk\nEKFpbAR+9zvgH/8Iv8+0acBZZ5kH0bbJu5M0IbLQCEQDKkSGyZLx6qvplUmHqSMQ5VMtGffdV/w/\nbZER5vhjx3oB6jpctGSYBuw33minPIJ33/Wew7feAm67zVsWNpWqek+MHAn89rfpPGtyfTz2GPCH\nP3jnMm0j+OQTzwVx0CD9rOVpESYmQyXtNkocv1AoLVeY+5/cpYrfGxuBl14CjjsOmDLFf7+sLBkr\nVgDDhwNTp8azZHz7LfCrX+UzK3hUkRFFuBPuQtmliNA884w3+LnxxvBvM0V2iMZG4O670yubwNW3\nrCZ3qQ4dvE9dzngAOOOM9Mqkw9QRiPKpKWzl8rkQkzFmTLL9kxL2/hOdqmkw8qc/AVdfXcxelJQ9\n9/Q+77qruCzsW25dGf/8Z2DAAOBnP0teNhm5/uTMNKZtBAcfDMyb531/8sns2gHhLhVlQJSlu5R8\nrjAD+Lxf0rgkMjZsAE4+2buvZswwJ8UAsrNkXHEFcMcd3t/OO0ff/5RTgOefB556KjgWyzZ+L4l0\n9UUiozqgy0iExvS2PQyzZlkrhi95d5Im1EGP6JTkCdFcwNQ5irdmfu4EWYmMq64qLvvb36LvnyZR\n7z+/wXDa93JcS4ZAN89BXgiBkTVxLBlZukvJ5wozgM87haxrMRnivgrqv9Trn5bIlTPBxWkfRLan\nBQvslCcKLrlLufoyshohkUGEJslDn9VbCVcbD5PIEG+q8+7cBX4iI2gG16wCv3v3Li6rrwe23jrc\n/i5ZMoICv4PW2SCsJcN0T6TxTLv6/JqIE5ORlSWjUCi9dmEG8Gk/I667S5leogSVOyuRIRNHrJqy\nGGaBS4Hfrr6MrEZIZBChSTKoyEpkuNp4qOUSDajIme+6JQMoD6rOy5IhuxA1Nobv0F2yZATFZEQ5\nVlySuEsBJDIAN7NLieOHtWTIdZ53O5S3yDBllwq617OKyZDP43eOoOdoo43slCcKLlky8r7PWxIk\nMojQJHno1cDntHBVZFSKJcOv8f3+e39LRlbZpeTJzKJc7zxFhulNZ56WDNldqq7OPJAyDQ7SeKbj\nprBNe4I7E8KSEeXed81dSi572oN81y0ZJpERdK9nNU9GWJERdI916mSnPFFwSWS40t+2BEhkEKGR\nH/qHH/bcVO64I9y+Ld1dKoolY/VqoF8/+xmGguC8NFuUSpDIyMpdKq4lY8QIL0D4l7+0XzaBHFgt\nU19f+r9rloy6OvNAPUt3qTDo6kwM9rMmzksCl9yl7rkH2HXX4v8rVgATJqRbPj90ZcwyjbctS0Za\n/ZB8Hr96Caqzjh3tlCcKLrlLkcjIDhIZRGjkh/6SS4Cvv/bS6YWBLBml//uJjPHjgTff9NIFZ0lQ\nYL8qMtS6TntyKNFxyvdS1Ov94ovAuHHpDVxMGdRMIsOVmIxWrcozWQWV0SV3qajzQdhis828zygC\nO0t3qdNPLy5fs6Z821/9Cvj449JlxxyTXtmCBo66NiRL1xaTyIg64M2iH/I7R9AgOg+RQZaMlgmJ\nDCI08kO/fHm0fbMSGa5aMqK4S+XVAOoa+qefBrbZxvsugr8FaqfR0JBu/cv5/9VlUcl6ksOwlozN\nNy/6S6c9UJEtF61alVsyxPlFXanZqNIQGWF+s+4ey2PirrPPLrZrYZ7ZESO8zyxT2G69NXDnnd7/\nlTBDtO7aZikyTC9Rgu5119ylgu7HSnCXStNSSiIjO0hkEKGRG7ioDQAFfpf+72fJyGumU10n0Lp1\n8S1xkLsUoH9bagvZDUQuQxxhk3UnE8WSIQbzaQshWVTo3KVEHYlyqPelSzEZeSBbT8IMhEWQeFYx\nGeI5adfO+6wEkaGrmyyfVdO9VWnuUro6k6+/uBezhNylWiYkMojQyA1t1AFGS4/JiOIulZfIML1N\nEoOptWuDRUaagZuyG4i6LCppDPT87j3VCmCyZHBefLaytmSo7lKijkQ51EFqXpYMHXk89/LvDzNo\nEfWdpbsUUPqSQCaPOgtq23R1k5e7lIwr7lJJAr+XLCl+z+PaR7VkpFlGEhnZQSKDCI3cwMnf1671\n4jNWrDDvm4W71IoV8Rp3v3LbQi2XGKDo3KXyeuOoq7uamuIgZfHi0m10DXWaIkN2AxHE7czT6GRW\nrTKvi2LJEPdG3u5SqiXDdA/bJK4lY9Ei+2UJOq/cpoUZCItnfdkye2XSIdqzIJHhd7/mhasi4/vv\nvXJ8/304a23eImPZstLJMles8ProMPsmwe/59RMZumcizfaPREZ2kMggQiMPKuTvYkK0jTcG7r8/\neN80+Owz7/zybNBhePRRb7+0MzmFtWSsXw+cc066ZTERZMkYMKB0mz/+sXz7NGfs1cVktGrljruU\n/KZQJazI4Dw/kaFaMlSR0aFD6fo03jTGERmvvWa/HDrUwW5UkSGeo8mTgf/+1165ZB5/3MtMBxTb\nGGFFU0XG2LHplCEJuns+ywGh6Zn75htg552BLbYA2rcvHzCr+2UhMvwG7b17A5tu6vWLkyd7fdye\ne4bbNwl+z6/pOi5d6vUtKnmIjPHj0ztnS4VEBhGaMDEZZ56pX562JeOhh+Ltd/753mfamZxMMRmq\nJSOLN7ImgkRGGLKyZNx+O3D44cBxx8U7VhqdrJ/AUt2lRF3r3KXyEBl1dcHuUk89Vbo+jfLFES5p\nDdhV1IGJ3AaGERlyfd97r50yqZx7bvG7uOdMloy5c/XHOOQQ++UKi+6eUi0HaVo2/O6/zz7zrLnr\n1wMrV/rvl0VMRtDzt2GDV+Zrry1f55IlY/Zs/fI8REZeL/iqGRIZRGjCiIyNN9YvT9uSoQ6QwqK+\nYU6LKDEZeWFyl1IHyDrEW7IsREah4KVOfvrp+KlL03g76ncN1fvM5IIE5GfJUJ9R1ZKx227xJ0IM\nSxxLRlbuhX7uUvL91LWrfn+5jUrrpYs8kBNzh8gxVTKmZyDNdMBB94xuvdqm+FkMkxJWHATN8J23\nu5RgwwZ9PEkeIsN0v5n2SbP9M7XVecVDVjMkMojQyA+gqZM0iYy0LRlxRUaYAbQNwoqMPBu5JJYM\nMaDJOvAbcMddyu+Y6n0mBoOuWDJatSp/RsXvEeWoqfGfJ8UGcVLYZiUy1LKZ3KVMz4u8PAuRITII\nmSwZpjfLaWa/iiMyVAuhCyIjyHKRt7uUvI0rIsNU3jxEhqmtJpFhH2dEBmNsS8bY/YyxuYyxNYyx\nLxljNzHGIk0bwxjrxBi7uWn/NU3Hu48xtmXa5652TIHfMrLIkBuPtB9e1y0ZYd2l8sSGyEgzJkOX\nwlZeHoW8LRmirl2JyairKx/4ikGB+CwUgrOLJSXOtczLkmFylzI9L3J9p2XZ9bNkqPVkegZcExni\nxYWYQC5o0tAkhH3mgmIyssjelMSSkUdMhumcpt+RR3aprLJgtiScqFLGWE8A0wCcAmAqgD8DmAFg\nJIApjLFQU8cwxjo37T8CwBdNx3kLwGkA3mWMdU/r3C2BMA+9nH9b7njTHjDFfTPomiUjzxS8SUSG\nuO5z5gCvv263XAKblgybnezy5cArr/gPdk3uUrqy2xYZJp9nWZiHcZeqqTHPiGyLsJaMd94B5s3z\n/nfNXUrN0iWQ69tlkZFmWx1GZHz1FfDee8VlQmRstZX3OW8e8NJL6Vz3sG2J2n7kYcmoNHephgbg\nxRfLrd1kyahunBAZAO4A0AXACM75MZzzUZzzgwHcBGB7AGFzBl0DYDsAN3LOD2k6ziAA5wDYDIAu\nn4atc1c9cmNgahjExE9AaSeQ9gRUrrtLhZ2ML+uZqGWSxGSIAc1ZZwH9+3sZTWyjS2EbF5uWjIED\nvb8HHjBvEzYmIw1LRvfu+uVx3KVk8orJ+OQTYK+9gC2bbNNqrEFa+LlLbbdd8fvuu+v3z9qSIYS/\nKbuUq5aMH/zAi//59ltvmRiUiut9/PHAQQelk6wjrshwOSZDRx4iY/Ro4OCDgdNPD7cPpbCtDnIX\nGU2WhEMAzOKcqyLgUgCrAJzMGGsTcJx2AIY0bX+Zsvp2ALMB/Fi2Ztg6d0tBfuhNDYN426Run/ZD\n7brIMFkyVHepOD7ptjBZMtTUpTraKE9IGtYMk7tUHGzej9OmeZ+TJpm3UevHhZgM+d7XuUutX++V\nxyTu8nKX+uab0v/F4PmnP/U+TZaEpPi5S/3hD8BFFwHvvw/ceivw+9+X3w9yudKKyZDv6yBLhusx\nGbNmeZ/CBbNzZ+9TXIc777RatJJjB5GXu5StmIw83KUEjzxS+r/pnshDZJC7lH1cqNKBTZ/Pqys4\n5ysBvAGgLYB9Ao6zD4A2AN7gnJdMM8Q55wCeU85n89wtgjAiQ3545W3yFhmm8uYdkxHHkpFWB2ES\nGZtsEryvOohOYxDlqruUQLbiqaj14ycqsxIZshuczl1qw4bSeIygjDo2SBKTMbCpNU/L5cHPktGu\nHXDNNcAuu3iD4euuA3bcsXT7LCwZchkr1V1KhvOiJUN92ZFGn+K6u1TUc6xfX/o8HHZY+H3TKpOK\nS5YMcpeyjwsiow8ADmC6Yf3nTZ+9QxwHAcdhynFsnbtFECbo0xSHkZe7VNCAzZUUtqr/ux9ZdhA1\nNeFEhhyLI/azjauWDEEUkeGaJcPkLmVylQLyi8lQEYNncQ9m9ZY26D5U26QsLBk2Uti6Ysng3OtP\nGhu9ulSfrzSsBbYCv7Nwlwrz+1VLhrgH83CXiroPiYzqwAWRId5PaCaWL1kelOkpznFsnbtFID/0\npkw6rlkyggZsecVkiHKJcov6TOJnmxTTW3XhpuBHnpaMOKRxP260kXmdGjxvsmTIIiNtYR7GXUq2\nZKi4ll1K3IMbNmQzAA26x9X1WYuMSk9hK1sx2rRJd/4O+ZxhCHKPysJdKgyqyBB1mKe7lIpL2aVI\nZNjHBZFBVAhyYyCC8lRMlowJE9JtNEydtiwyFi0Crrqq1Kc7bZHx5ZfeOZcvL12uWjJefBGYORN4\nvsxxr5wsLRl5u0t9+qnnhrJmjd0UtrY6Wbmz8rOKhRUZQH6WDD93qTQtGR98AFx/famoiYIYPLdu\nXSznxIl2yibjl11Kh58lIwvfbz93qSlTgH/+U7+fS+5Snzf5ErgmMlywZISBLBlFHnrIm8AVAG67\nTb8NiQz7xAyXtYqwFpjCS8Xy71I4jq1zl8B87tRLL70Uo0ePjnI4Zwjz0MsiQ208nn8e+PGP7ZbJ\nj+7di2KosRE4+WTg2WeBf/wDeOstb3naImPffYH584HeisOduEU23bS47PrrgbvuCj5mlpaMmhov\n20sQ3bqV72eDHXbwPteuNVsyBg8Gbr892nFtWTJk8eiXUrOjYgv1c5cSdZflPBk1NeXXbM2abNyl\ndt3V++zSpfze7t69GABsQrgB1dUV9x80yP5LjaTuUlmksJURlgzZJbOx0Tv3fvuVby/mQUnTkhF0\nTVRLxsMPe9+XL3dbZOQRkxEGtZwtVWTMnw+ccor3/f33vRTIOlwVGaNHj8Zll6n5jCoDFywZn6E8\nVkKmV9OnKW5CPg4CjqPGX9g6dwmcc+NfpQoMIFwDIg/e1EZCzQpjE/VcP/kJ8PbbpW+FX33V+/72\n28Xt0m5U5s/3Pqcrd5A4b9++wNCh3neTdUgla3epvn2BM8/033fw4PL9bDJtmjnL0Q03eGkto2BL\nZMjHEYPdjh2Bjz/23qb/5z/A1KnmoNU8J+NTB72qkFi6NFt3qdmzy3/z5Mle6ks/hLhLexCa1F0q\nqj99UoSbI2PFuvGbMFIMQF1ylxLPydCh2YgM1yfja0nuUjbbv++k18QLF5q3c1lkmMaUruOCyHi5\n6fNQdQVjbCMA+wFYDW+iPD+mAmgAsF9TOlv5OEw6/svSKlvnbhEktWSoLjU2URvNo4/2rARZDdii\nIjdmJ5/sfS5ZEm7ftN2ldG9cRXpQE61bl74Z9xvMxGHtWrO7VH09cNxx0Y5nq5OVr4UQGYMGeRaY\nn//cy/e/997lb7XF4CmLyfhMyIPgQqG8XpcsycZdSqCLaenWDfjFL/z3y0pkJHWXyjJGDSh1czTF\nZcik/ZY7zLHV9eKZ2nnnbOLnkloyNtvM+yR3qeT72Cyj3B/5PbeuioxKJneRwTmfCS+FbHfG2G+U\n1ZcDaAfgIc558zyRjLE+jLE+8oZNaWvHAdgIwGjlOCMAdAfwLOd8VpJzt2TCpswTqI1H3LkswmB6\ny1gJIkMMBhYtCrdvWm+hxHHl2AJRj2EaX/l6256N9/vv/QO/o86NYGuQJ1+LNWvMZVGXZTkZnwn5\nedS5Sy1eXCyDzpJhu3y67FytWgWLBzkmI01MyRtM5CEy5GsoJ2wwZZiSccGSIZ+b81IB6bK7lOpW\n6JLIkGmpIkN+3vyeWxIZ9nEhJgMAhsObk+IWxthBAD6BNzfFAQA+BXCJsv0n8FyfVE06qmmf8xlj\nPwTwNoC+AH4G4FsAqpCIc+4WS1RLhrq9GISlgXou0cFXgsgQg4Gw7mRpu0vV1wMrV3rfRf3J5a2p\nCS5DGiLDL4WtCyJDDOB0Ylotn8mSkae7VFRLho37UL5PTCIhrMhwzZKhrs9KZIjrUomWDLn/2LCh\n9NpmMQBMml1KXHNylwpPFu5S8vNGloxsyd2SATRbFPYA8ACAvQCcD6AHgJsA7Ms5X6rbTXOcJfAE\nwhgA2zYdZ08A9wHYg3P+paVzt0jCiox33vHca774onRdQ4r2ILXRVC0ZixcXZ441kaV7o05kfBcy\nvcDw4fbLAxSvr+zWJupPHoCqc2Lo8HtjGocpU4r3k0uWDJ27lK4sJnepPLNLqe5Sauf7pz8BH35Y\nvq3ARvlkF0HTPRNXZKQd+B0kMvyyoKU1yAuyZPiJDHGP5mnJkEXG+vWlVqpKiMnIKmlDWEwi48MP\nvRgoWzQ0AMceCzz6aPR9Tc+pzeeXREZ+OCEyAIBzPpdzfjrnfEvOeT3nvAfn/Lec87I5LDjnBc65\n1grDOf+Oc35e0/71Tcc7k3M+z8a5WzJh3aX22guYNAk48cTSdWmKjCB3qcsvD94vy45BbsyCYlWO\nPrr0/4kTgzPuxEG2ZAh07lJqpiSB3PHatmTI6CwZUQcgtgZSOktGGHeprCwZfh11kLsUABx+uPeZ\nlrvUihXF76rIEJnXgq6tGJjW1gL9+9stn0xUdykVkUULSM+SId9ncrsSRmSk/ZYbCL4mcvlkkUHu\nUh5R7zlTTAYAnHGGnTIBXlbEJ54Azj47+r5yncvPSFbuUqNGFb+TyLCPMyKDcB9dA7zjjqX/y2+i\n5l02rl4AACAASURBVCmyLsiSkIQgkWEKqnZBZPgxciTwG42Tn21LAaAXGSZLxkUXle+fZkyGTFhL\nxsiRxe/vvlu6Lm93KVNMBmBXZPgdI8hdSt4/LUuG3F4Id8raWm/OmK++8v4PGlzKLl3yPDO2B8tR\nLRkqnTsDF1/sfU9LZIgyzp1buly4olWyu1SWImPYMC8L0Qcf6LczBX6LZ8pVdym5HVqwwE6Zgo71\nu9/57yvq6v/9Py8bn8hUmEXg95AhpdnrSGTYh0QGERrdQ7/55qX/+2UVytNdyvQGyHWRsdVW+kFr\nGmX1c5dSTe5BE/S5IDJkdxGR9UWQhruUX+B32OxSti0ZfgPtoBS2MmnFZMjthRBp7dsDPXoUxW7Q\n4FIWQvX1xQF12paMOHPBdO3qfaadvGHjjUuXV7q7VF1dttmlWrf25m0R10ulUgO/5WfJ5oDa777a\nZhv/fUVddevmtUNiTJGWJUNmyy1LxwYkMuzjSuA3UQGYJmuTkR9mxkoHUXm6S5kaj7xERlizd01N\n9iIjyJLRunVwprCs3aV0A3v53jRZEpIiH0eXAth0/qzmyfA7hlw/JncpQVruUrp5RtRyBN1ragYs\n8Zm2JSPOXDDit6RlyTAF6ofJLuWCu5QqMuSJFrMYAKoZ7EzX2GTJcC0mY/16syXD5lxGYdzwTKjz\nH6URk+aXkEaunywmyWxpkMggQqN76NUBQF6WjEoTGWE7TNMb5jQGAmFjMurqggOtXbBk+ImMNNyl\n/MqiPiemGb+BfNylGPPvYLN0l/KbxE7MSi2jThiY1kDPhiUjb5FRye5SWWaXEucynbOSLBkmkZGV\nJUMVGaaECGpfbbMO5fKp146sF+lCuo0ITRiR4dd5phmTEeQu9dRTpeuHDAGOOKIyRIbuTW4agxTx\n+3VvnqKKjAcfBHr1Ar4sy+eWnLCWDHm7KPdpFHT3TJjA79mz9fVTSe5Sl14KvP12/LIB4SwZMuq1\nP/BAYMaM0v3Epw0h/umnwA9+4N3/557rX5YwpO2SlERkqMdIgygiY/Bg4KOPvO9Zx2QEWTJMKWxd\nj8mQ69DmW/swFjKB+htM1qO0RIY6l5dcngqYQLviIJFBhEb30KuZkfwGb3ImGdsEWTJUxo8Hnnmm\nNCA8T5Hx2GP67U46SS8y0gz8rqkBBgwA9tmn6Act12NdXTE4T2bcuNL/v/gC+MMf7JfThiXD1ozk\nugFZWBH0xRdeRhYZWWTYGOyl6S4FeLOaJyGMJQMAjjzSm/W5b9/S5S+/XF5Gm/U3fHgxAP2550rX\nhbFkHH+89ymydIl90nhJwLl5LpkwImPaNO/TFZEBFF9MtW4dPU11HMKKjEq1ZKQlMmxYMtQ6tzng\nl/tLvwmDCfuQyCBCo2s4O3Uq/d+vg1q82G55ZEwiI2gg4Iol47jjys9/8MFe8LJOZKThjiT7tr/8\nsjc3hc5toK4O6NAB+NGPSvcfMgR4/PHSZWk04nFEhlqHtuovTJyS7vx+2ByoJM0upZZJJews9SbC\nioynnvIy/Yi0tjrScJfya8/CiIxHHvGOMWmS93+a7lKy25j6jITJLpVFPEFUkSEIYz21gSrSKj0m\noxLdpbK0ZBDpQyKDCI1uwKiKDL+GwZRG1gZB7lIm/ALC0kTXwJv84nWDmbRFBmPmgDjRaejKqg6m\n4/itB2HDXcpW/SWxZOjI0l0qaDI+07YySTtsnbuUrv7E/ShnDFNRRYaNN/J+7UfYN8HyoD8LkaG7\nVmEsGTbrzYRtkWH7JUbcwG/VkuGSu5SMC5YM19ylZMiyYR8SGURodFkZOnQoXSY3aqLREIHEeVgy\nghpS2YyaZueqYuosdMvzsGSoyOXySyWpDnDSyNYRx5Kh7uOqyJD3T9uSoYrIOO5SSdGlsPUrh1/q\nZPWZt/E8+w3qkgR+p9HWhBEZfm6Womx5WjJM4quuTt8O2nJ7FMR1l1JjMlx1l3LZkpGVyJDvGRIV\n6UMigwiNbiDvJzIEwsUhTUvG0qWl/4vGPorIyNuSYVrugsjQWTJ0qAOcPC0ZfudO010qjIXHRJYp\nbGVqauK5SyUlrLuUIIolw0b92a6TNGIy1q/3XuCIY+rutSiWjIaG9LLDxb0mJkuG7YyFUUQG58VJ\n6ColJsOFFLZR3KUWLrQjBMK6Sy1c6H0uWUJuVbYgkUGERicy1Mn4dI3rxht7Hd/Klel0XitWALfd\nVl42ILghlctDIsP7DLJkiE5DV9YsREZYS4Y6IZmMq5aMLN2lZDbeOB9LRtTsUn6WjDTcpfwGdUmy\nS9l8Az9woDdx3PTp3v9hLBkbbWQuGwBst5298snEvadNc/PYzlhoSqeq0tgInHOON8nnP/9ZHpPh\nkrtUHoHf8j0YdjJNNf5v8mRvMsSTTrJbPrldUK/T0qVeMo5NNgH23DP5eQkSGUQE1A6iUPCyvpxw\nAnD77d4y06CrbVvvexpzZXz2WfmyahIZukFDmtmlwooMHS5ZMo45BvjFL4B77ilfZ6v+dPd7WKFo\nIktLxkMPAYMGASefHO6t/QsvJC+TjE1LRhruUn51EmcG6vbtvU+bmfZef937nDjR+9TVnxDcy5d7\nn7p7VP6tc+bYK59M3Hu6XbtsLBlRYjLEi63rr8/OkhGVrNyl/Ca4U9u+du1K/zdZj0TmuEceSV4+\nU3YpwSGHFL8LUfP++8nPS5DIICKgs2TU1AAPP+ylejRRKITLbhIXXacadnDrsshQ/XxlKsVdKquY\nDN2y1q2BRx8FzjijfB25S3mcfLL35q5163CB3wcfDIwZk7xcgjQtGTbqz28gpqbvDoMofxrxaX71\np55X1M3o0cVtsmj/4pzj0ku9T9fcpeTvWYmMqBaSrNylVORzqvejENqCsBm9khDkLnXZZcXvNOu3\nXag6idCoDVzYt9aMRZsMKiphMzLpyCsmw9SQuZJdSsV1d6moZO0uFbbMecVkAOHdpWx2wmnGZKRt\nyRDW2SiI8qcRn+YnMtTzivtCbtOzSHwRx41ICCSdUHdFZGQ1GV8ckSGTliVDPZb8f1AbFNZ6lISg\n7FJ+6c6JZJDIIEKjc5fy+18gi4w03HySWDJcDvw2TawFuGHJyEtk2OiEshYZUcgjJkM+rw413a0t\ndNml/I7vUnapOJYMMdhfvNj+QDSMJcNPZGQR6BrnnhZ1prNk5BWTId9b69fnZ8kIehbXrze7utoU\nGX6o8UemzFx5iAyBX7pzIhkkMojQmNLEmv4XFArpWTI2bPAC71Qq1V1KR1YiQ57MS8X1FLZRWbbM\nm+At6SAlrLtUWGRLxr/+lXwgasuSoc4Obos03aUeewx4551k5fO7lnFERn29ZwFZtw749FO7MS6v\nveZ9+lkynnqqdGZwP5Hxn/8A//2vvfIB8dpYcc3TdJeaNs2b7NEUhKwitxuNjdlNxucX+6Bj8mTg\npZeK/8si48UX7ZVLRS6X2leJfoZz4JlngG++8f7Pw11KZ8lQM1USySCRQYQmSGSojYPcUKclMu69\nFxg1qny5KNu++/rv75olQ9fA6mIg0rRk6AYpOr/eMAHOrloynn4aOOoo4De/SXacNC0Zjz9eDOaN\nS7W5S6mTf8qoIuPyy4G99kr2rNi2ZADFAX/fvsChhwJTpsQ7jsonn3ifuvrr1q34fdo0/X2h3st7\n7AHssoudsgnU8/74x8H7iGuuu+9siIz164Hddwd23bX8rbqJCy4oftfFZGTlLhV0nmXLgFmziv+r\nfcm//22lWGX1JcddqM+suM8mTgSOOAL43e9Kj5GGyDAFfvfu7X2mlaKbIJFBREDtIDp2LP0/D0uG\n6W2MKMv11/vv75olQ+cuVV8PjB9ful2e7lJ+5mQXYjJ69QKeey7csR5+OFlZdPdMGEvLVVeZ18l1\n/eyz0cskY3IZ+tvf/M97zjml66KkpIxCVJFRW6svu7yfev+mJTKiTLAoo2bX+eCDeMcxoas/MZgC\ngC+/1LtLmdLq2ozVUJ+XRx8F7rvPy3JmQpdu13S8OOjSmwa1W3JdrV+f3WR8qqhQg6iDqKkBfvWr\n4v8iM5ltevYEHngA+Mc/gJ12Am65xRPVQLGO1TY6yHqUBJMl48wzvU8K9k4PqloiNGrDqbouqA2z\naEwYSy+7lGkuBFEWdbJAFddEhokTT/TeLAryTGEbZYKyPCwZZ53lvSEOgyqUoxLHkrH33sCwYeb1\n8v7LlsUrl8B0T+vqR75WRx9tXmdTZER1lwKAwYOBrbYqX65aMgRJ3iqnMfhQLSD19XaPb6q/00/3\nPpcsidbW2QyuVs/boQMwdKiXCt2En8XIRpst34OiP4hy3fPMLhWn/bIx70QQm2wCnHJK8bqec04x\nJa1oM1euLN0nK3cpcf6jjy6KQrJkpAeJDCI0asOpZnrxy5iUliUjSGREwQWR4dfAyuvytGSIunU1\nu1QUAZeHyAD8B+pZiIygZAl+s/SmZcnQlcOEnxB2XWSoWamyEhnipdCiRcVlYdq8NEWGIG4WL9si\nI4w1TUXOLpW2u1SQN0EY4lrg/FDbXF0WODXrmzpXTNaB3/J5XJnXpBohkUGERm04gywZgjSzS1Wb\nyNC5Swnk35RnCtu8LRlxRYbqpgL4+/iHIU7gt/w86JDrLKnIMLm6RJ0LJQtLhq4cJvzuUXWdayIj\nL0uGKjIYqwyRkbYlI2qGM5VKsmQwVvp7bV3bIC8HoFxkqJaMPEVGGv0p4UEigwhNkCUjj3kyTP66\ncdLQXXZZdkIjjt9p2o3iE0+Un0d37rBZiIK2DSLOgAQw162uQ87LkuH3NtGmJePNN4PPITBN2gWU\nXsc4M12biGvJiHIPJnmm0/APVwfNSepTd/+Z6ka010JkFArh4i1spomtFJERpd2aNat8tnXb/ciS\nJV6Cky++KF0ep/0Ss74DXszi3LnJygaU30dhLBlZukvpAr/l86Thfkx4kMggQhPXXerzz9OdjE9H\nnMHthAnAk0/aL4sO0+Bl6NDid9UvPm2RIVJgzpxZvk43g2sYd6kkHYZpAGSqOyEs99lHv14OeBSY\nLGFhiSsy1G1E2U4/vXSd2hFHxZQ9S2eN2Gwz77NVq3KRnpa7lHBPkYnrLmXa33VLRhKRoRNpphcs\nYkAqUnSqIsPkq5+3JcNPkNt2l4ojMmTEs2HbXerXvwauuaY8EcSee0Y7TufOXmIMmWOPTVY2oLwd\n1Ikf2ZuBc7csGdtsY/+chAeJDCI0uqA9GVPDvGpVeiLDNIFU3E5izpz4ZYmCaaB8zTVe6tJ//AM4\n++zSdWm7SwlE3nKZPNylooqMOXOAV1/1Aqt1jBoF3HNP6bKkEy/FcZdSByBXXgmMGeNlW7ntttIy\npdHh/r//p//dxx/vvZH94AP/VMQ2RYYuJ31SkWHTXUp3r9XXJ3v7q8YYJLGWRLEEiWsqBzfL96/6\nbAjyFhl+9WNjMJ/UXUpGuAnZtmS8+65+ea9ewNSpwFdfAe+/D3z4ofkYF17ovVTp0we4+OLi8qlT\nk5dP/b06Ydi6tSew163zrGOmmAyb7YsgSGRssQUwaJD98xIkMogIxE1hW1+fXnYp2yIjK0wdZ10d\ncMwxXlYOvwF7muZd2ZwuyMNdKqrI2HxzoH9/8/Fqa4Gf/7x0WdJBvK6MUQeNfft6z8ehh3rPitxB\npyEyfvhD/fJWrbz62WGH7CwZixeXL3PdkvGjH3mDkriolowkA9IoIkMsF/uolgyTW1LeIiPO8aKQ\nNPBbZtNNvU/bIsPUFtbUeC9Vtt7am9Nkhx3Mx9htt+L3gQNLj2G7fKaXN0KELV5sdpfKSmSo7XTQ\nnFpEPEhkEKEJsmSYOorWrdML/K5UkRGnU80qUE0nMlxyl0qCOpBKQ2TEjRkRpC0ywlhv/MSizUHA\nkiXB59YRRejatmQkjdNQ78Ek5dO1A0GWDJPIMJF1TEaUttuFmAyZLl28T9siw9TP+b0MUJHXyda0\npNZcILzIEC7WS5aYLRk2Y74EuhS2al2lkXWLIJFBRCCuJQOoHHcpl8lTZISd8dnmjN9ZiIykA0Yb\n7lIqabtLhRlUZOUulYYlI+3sUkmviUuWDFeyS8nPoV/K2rDHi0Ia7lK2YzL8LBkyfu2Z/Jvke9BG\nX6lehzAZztS+Oyt3Kfn+l7EhtohySGQQoVEbzrAxGZxnLzJcn8EzTuco1+8LLwB//Wvyctx8sxdg\nLtfjqlXl2+ksGUFlDNo2iDRERk2NXUtB3MBvGfW5slW+88/XL48jMuRy2HzTqLNkxE1hK0g7u1RS\nYaoOorMWGbK7SJhnLGuR4ZdNSuWMM4oJK+Ji011KPLuNjcBTTwEHHQQsWJCsfID9l2m2RUZUS8aX\nX5avS1NkBGWXAsiSkRaOD8UIl1Abkt69S/83dfycFzvWpNlyVHSdbJs28QcCWaWwjXOe3/++9P+T\nT05ejvPO84J9X3ihuOy228q3ixuT4Zq7FFA6yMvTXWqvvbzP/fcvXS930EkGtDfdpF8eZlDhl57a\n5iBA1x645C6VhiVDTbudlbuUzpIhZp4Xbcmf/1y+n00X16jt3vbbl/7fvn35NgMGxC8PEOwute22\n4Y8l7o3GRuCoo4CXXipvt+MQRWRsvbXXbjz8cOlyue7zEhkim58ucYJfTEZSy5B8jUlkZAuJDCI0\n4kG9+27vbbf6xsnUWDU2lvpi2kRufLt08d666TLWhMXmWzs/4gygBwwA/v1v+2UBvGsrGl0x8JDR\nZZcKE5ORpHPQCUgbyG/i83CXEkyZ4rmmCT9ugQsxGXV1pTn55TLZFBm6+nPJXSoNS4Yq4GxbMkyD\nJZ3I2H134LvvgAcf9Jadd175fjZdf6K6Zz3/fOm6p58u397mAFTnLvXpp17GNR1yAPVddxX3k8v0\n1VfJygeEd5cCvJTxq1YBJ5xQ2hfKx5BftKThLmVqY8S9qetn/SwZSV84yfubRAa5S6UDVSsRGtEY\nt22r95v1s2TIWSVsIouMjTZKPntuViIj7sBCZC+xTU1N+ay1MnHdpVwUGfIgLKnlKom7VE2N/s2s\nCyIDKJ1DJC2Robs/XM8ulVRkqLMh235GoogMoNztVcWmdTfMseS5U9R7VXfvJn1GgtylamvN8yi0\na1f83rp1qSVD8N13ycoHRBMZ8ksUOW7SlEksDUuG6Zh+IkM8B7r2Zd26ZCIgjMggS0Y6kCWDCI14\nOIM6MRXOs7Fk2OgMXRcZafirqugGUTp3qTCWjCSYXASSItdh0ntGt3/QIDSLwG+/c4TtrOVr6Scy\nkgyS44qMSnaXcsmSEYasRYb8m9Qy6srcqVOyMoXJLmUSYnJd19QUn335dy5blqx8gJ2YDJPIsPEi\nI6y7lFiuy1gm6kwX85X0hVNckWE7gL8lQiKD8EV+OMWDbmpA/ERGFpYMEhnxEfVouobV5C5lU2TY\nCPxWsWHJsC0y5H1cEBmV7C5l05Khi8kIap/lyfjCYKNdXb3ae1ai/tYwMV5piAz1PKZrroqMtCwZ\ntkWG/PtsvMyJ6i6lExl+loykZdSNY9RrqisziYzkkMggjLzzjvfg3Xij9794OE1vykydlhyTkabI\nsBEobDMn/KuvmtfF7bjTyCEOeEGKgLnT0lkydGWxOcCrBHcpeeZcQdDgLSh7josiw69McevQVEbb\n2aXIkuGRlyXj7rs9t6LaWn1WIT/CDPbVVOpRSZJdyiQy3n+/uNyGyDBdAxtzitgQGVHdpR55xHwM\nk7tUEuTfOGGCf9l0ZSLiQyKDMHLhhd7n737nfUZxl/rNb4rfOS9mVLE5iJfLBERrEEyNoE1Lhi6A\nWhC347YZnKYbfJkGHrqYjJtuAnbaCXj00WjnCEta7lJyHSYdQKlvpQFzHT75pFdfd97pf8y03aXC\nDkxMlgwAOO204nfbIiNM+WRf+KB1rqWwVeNwbAvxIEuGacZjE0mfkbFjw233zDNA375eALUgjCUj\nyrwaOuQ6FFYe3T141VXly1SRoesb08xYGNeSAQD77ON99uyZvBxR3aV0iHpKO/BboPb1updmWWWb\nrGZIZBChieIuteWWxe+cF/exPXCM6y5lEko2RYbfG7a4jZfaySZpfHXXIowlQ3zv0QP48EPgF78w\nn8NFS4b8G5N2IqIOH3iguMwkDI46yquvPn38j2lDSKZpyQCA++8vDgbiXmOxnzrYDTNw0ok70zrX\nLBlqXSa5B8WgWB6YuWbJCNumHnYY8NFHwG67FZfZTIltQtcO6s4zahTwl7+ULpPrvbY2ueCJShJL\nhnjZYaOdjeoupcPPXSqpS6buHlaPqbMwk8hIDokMIjRR3KXk75wX97E9cJSPF2XAbfoNNi0tWYiM\nJKIoSo79sNmlVFwUGeq9mQRRRjmrWdKBkFzXSQfwOmyIDEDvfx6FJCJDdTnyW+daTIa6v41nRA5M\nNrVt4rpnLTKiTsDqZ8lLQ/TFmdBQoFoyokwkaIMoLyTU/tHmBLlR3aV0+FkyktyDYd3EdAKR3KWS\nQyKDMKJ2fuKhDGPJkL/LlgzbA8e4lgxTALVNS4ZfQGLcgYXaeCcRRbpJtsK4S0Xp1F10l5LLb8uS\nIZvabYoM265IQPiBSdAEjLpMOlEQZVTrK6klw6bI0O2bVGSo2IjJkEVGUPuctciIOpmfXP4wloyk\n10PXJ5nqRl0u9yN5iIwoL3zU9lS0WTZERlhLRhh3KZ3bUpJn2CQU1PogS0Y6kMggQtHY6AWCA+Fi\nMlSXFLGPK+5SfiKjsdEL3EsqiPzyz7tqyQjTuVa6JcOmu5TOkmHzTXeelowgYambeCwKou5ddpeK\nErcUFxsiwzSniYyoV5O4M5G1yDD1I4D9up8505tsz68MfstVS4bubXjS9kCeNySoPH6o19GWJWPh\nQm9SURnb7lJJ7kESGflCIoMIxU03BQd+5+EuJTcUJl93EXQuYxIZq1d7v/WHPwTOOitZ2fwsGfKg\nIAppiwzT9cnDXSotS4YtkSFScjJmdwI9G5YMP7p2jb6P39wp1ewupftt8+fHP54OGylsw7hLxR2w\nu+4uFXcQv2gRsO22XnuvEldkpDGh22GHmddFaYtV8S36wKgiUOVHPypfFkdkiPvM9nwVJpGh9nUk\nMtKBRAZhRH6w77ij+D2Ju1ScXOl+iIFojx7A3/+u3+bNN731P/95cZlf4Pctt3jf778/Wdl0b7Wu\nvx747W+Bgw6Kd0y1U0kyENd1/qYBgUlABuGiJcOWu5QsuuXr4oLIMNX7yJHAoEHRj+c3uLMhMuSA\n2jD15xdgq65Lco119Th9evzj6cjaXUpgGpw/9VTp/1lbMvzaGpsxMjNmhCuDTJDI0JHUkjF5snld\nGJExaZKX7fGEE/T7Jo070D0PpnL5zeAuWzbVl3BZWDIoJiMdLCbEJKoNuYONkr1E/S7e9tbUeA/t\n+vX23vjI2X26d9dvs9NOnlkcKDb4YnZWdRDR0GAvTazaMF55JXDBBcmOaWuOAkAvKEwDgmoN/LYx\nwKutrRyRcfPN8Y6XhruU7LZz6qnFtLhhfrPfpJQ2JwvUlcVmGmnAzjMSxV1KYLpPBw4s/d+WS2FY\noky0CNiPkQHiWzJ0pFG+oHPKHHGE96eS1BLpR1hLRrduwJw53nf5OejcudQFi9ylKheyZBBG5Ide\nDsaK4y4l72dz8BgUjG6ipkbfQK9ebc9NR22gbPgT2xQZUd4wkrtUOXK2tUoRGXFJw13KFJMRxr2m\nmkSGjRcFssgIa8kI+7Y+64GW36CcRIZ/eeLsm6XIUJfL55a/24yrCusupbu3SGQkh0QGEQpZZER1\nlxIPahpzZdgWGQ0NdrJtAOmIDJsDgCi/s1rdpWyIoEqyZMQlbXcpmTD3pS4DjWmd64HfNp4R+S1s\n2EkOXRUZftgUGVHFjG55JYsM8VvScAkKm8LWJDLUuKosLBlR9iXCQyKDCEVSdyl5PxcsGbW15jcX\ntubKUBvGJB2CQO2wkjSCUURGXEtGElzPLjVunPepWjJsZpdyRWT4WTJsT8YXxsKWpyXDtoUt7jV+\n+23gb3/zvsttsum5yUNkxKl7cpcKjw2Rkae7lHx/ZG3JCPMcuySwKxWKySCMmGIyTA2IPGjVvS1O\nw5IRNAu5CZMlQz5mUtKwZKgdVh6WDHKXAt591wvgB7yO0y8jTlTk2KI0UtjGwS8mw7YlI47I2Hln\n8zrblgzb92Xc8u29d/G7/JtdEhl+bczuu+uX+6X+JpHhX544+7roLqVmZrSRoCPsclvnJTzIkkGE\nIkxMximnFL+7bslQRcbttwObb26vXEA6IiPoHFGIa8moJnepuPX35ZfF77bdpXbayctCBtizZDzz\nDPDGG/HLlIa7lNhPPXacmIzHHzevsz1bsO370sZARm6TTYOnsCLD5osMU1nGjvXuSR0dOpjv1zRm\nYNeRJLuUOu+GqyIjTXeppJaMdu3M20UliiVj2jRg+PDgfYnwkMggQhFGZPTsWfyue1ucpsiImq2q\npqa0I/nVr4AttijdJmnnoHbOaXQ2SRrBKIHf1WTJsCEy5GtpO/AbAI491vu0JTIOOwzo1y9+ebJ0\nl4oqMn79a6B3b/26JOUz7WtbZNiwOoVxl1IHfmHbozT84c88E9h0U/N+pvs1q5iMsJYMnYW/Tx/g\nqKOSly8MSZIQpOkuFTYmQ7735e9qpqc07kHdc/LDH3ovHLfbLvl5CQ8SGYSRqO5S8ja6zsDlwO+a\nmvJgs6RZZLJooPKwZFR6Clsb7lJpiwxbrki2yHIyvqjuUn7BuPJ54lCJloyk7lIqaQzw4j4jaViD\ndSR1l5KXu27J4Nx+e5HUXaq+vnS7LLJLyaQpwFoazogMxlg/xtjTjLHFjLHVjLEPGGMjGWORy8gY\n24Ex9hhjbD5jrIEx9iljbDRjrF6z7Q8YY40+fw/b+YWVR9QUtnLHr2v8XHOXkn8fY+XBZrZFhu2G\nXHeOKJC7lJ1OxLa7lHwMV0SGy9mlgmaFbgmWDLntNb3ECTN7tg7b7mZA/IG3rsxptKtJJ+OTEiaC\nMgAAIABJREFU+w5XRQZjyZ9hE2HdpUwiw6bLY5zA7zRdyVoaTgR+M8Z+DuBxAA0AHgWwBMCRAG4C\n0A/A4AjH2hvASwBqAPwdwNcADgTwfwAOZIwdxDnXdRPvA5ioWf6/8L+kepEbDVPDpooMdbI7myJj\n3Djggw+SBX6rnZNqyUg6YWAWb0HyyC6VlcjIIvA7bvnStmTYGsDbIk13qaQxGUH1Xa3ZpWTCWDJU\n8rRkuC4yTH1c2BS2tiwZd97pvz5ppr9CwbtGjY12swYmdZfys3hEJY7ISDMovqWRuyWDMdYewD0A\n1gMYwDk/k3N+IYBdAbwJ4FjG2C9CHqsA4C8A6gEcwzk/mXP+B8753gCeALAfgPMMu7/POb9c8zch\n4U+sWPx8JHWoHb/auNp0l/rlL4EbbwTmzy89dhA//7n3efrp5Z2TGpPRtWuyMlaru1SeIqNt2/jH\nE5x0UvG7DXcp1ZJh482lK5YMIbz32ad8ne3J+I4+2vuUE0iYyEpkpGXJOO644ndb1rT+/b3vIp4n\niDxFRlyymjDNFUvGr3/tvz7pC420BtOm/nirrUr/v+CC4ne5DOr+abhLjRhh3ofcpeyRu8gAcByA\nLgAe4Zy/JxZyzr8HcAkABiDgUWtmAIDtAUzmnE9S1v2+6VhnJS5xC0Q0lH/8o3kb2YVBWDJk0nCX\nEoQVGX//O/Dxx8Dxx5c3XGqDLs+iGwfX3aWiBH7LROk0bXcO334b/3iCn/4UuO8+77vrMRlp3DNR\nmDULmDkT6NGjfJ1td6lHH/WezRNPDN43T0uGDcaPBw4+2Ptu4xrX1AAvvOBlNjr00HD7hG0zq1Vk\nqO3YDTcUv1dKTEZSbLgFiWMMHly+TGXLLYHp04FPPgH+9z9g1KjiOvk6+rlVRUX32zbZBDj3XPM+\n5C5lDxfcpQYC4ACe06x7FcBqAP0YY60Mbk4yB5qOxTn/kjE2HUAvxlgPzvmXyiZbMMZ+BWATAIsB\nvMk5/zDib6laxMPml+Y1S0uGStgOs1UrYIcdvO9q566mzUsqhqrJXUomT5HRvn3848n07et92nJV\nqdaYjPbtzXVuO/BbfjaDiOIemUZMQVJatQJ23BH497/tnKNQ8F7y9OkTfp9KFBm6tsfWvS5b68KK\nDNP8OJUmMmxMuBj2furVy/84gN3kDbp7cJddwmUXI0tGclwQGaJZnK6u4JxvYIx9CaAvgJ4APot7\nrCY+B9ALQG8Aqsg4pOlPwBhjrwA4hXM+J+C8VYkuh7XfAEqNyTD5r+ZpyZBRGy7ZEgMkF0OuWzJc\nFxlpNvBJO1c/d6lqEhl+pBWTEQa5/oMGsrbdpWxh01oVx58+7D4uiYw0LRlySt2w7lKm7bIK/E6K\njcG0uH9XrkxWliwtGUH3PrlL2cMFdykxv+cyw3qxvGNKx1oN4HIAuwPo1PQ3AF7w+AEA/s0YCxGR\nUN2IB/X/s3fm4XMU1d7/nuwrSQAJssoiCFc22SQiYRHZN0EMyHLZvKhwgYuICugPRQRBo0bhCgKC\nKFsA4cqqQNhkUQGFV4Gw7wgJJJGEQJJ6/6gpp6ane6aX6qpTM+fzPPP0TM90d011d3WdOt9zqtPN\nmfyOo1zKpts8Fq49Gb1iZPiKyajTVe0yqwp3T0ZdnRzXMRll6TYYwNXQdXkNlrnmfHgyXNdfnTEZ\nyy7bfJ/1nEs+E7LqvWrSEF9UlQXZ99a8edXK4jMmo9u1L3IpdzgxMojouS5pYJOvS1wc1wVKqTeU\nUgNKqUeUUnMbr3sA7ADgAQBrAjg8bCnDYN/Y5mbr9DBLji5myaWuvFK79f/xDzfltPddhG4Nl8il\n0gkpl3KFuY7/8hfgrLOKbx9Tdqm65haoK4VtUeo0MmLxZHA1Mnx4Mqp60gy2XDYrXm3u3NbPebIo\n+ZrbowwuBzNi8mR0i0cUuZQ7XF3+MwE8XuD1irWt8S6MQzpm/ds5yuFsX0qpxQB+AR0svlWOY/8b\nIsp8DQwMFNlVUNLkUnld7GlGhmk4LrhAB38dcUT1MpoylemodHs4uZZL1YHPwO+tttKxDMlUv53g\nOopsP/i/+tXi2ycnJ8zSZpfF5cO/rk5OXTN+52WffbREc5dd2r/70pfaj1MGU/9bbNFcd9ll5fdn\n49KTkbddtq+FTtscckjzPXcjw5WkcNAgYOeddazMiiumb7PppsCYMa3bbLONjiWysxHa0uE6PImj\nRwM77FB9P1U707bkcdo0nSzFJNUoil2GHXcEPvjB9uOUwTzH11uvua7bACc3udTAwEBmn5I7TmIy\nlFLbd/9VJk9AS5XWAvCw/QURDQawGnR622dy7guNfaVhQo6yYjaSvNFYju74qwQqdEqYGsgjl7JZ\ntCjbyDC8+271cgHlJ83z7cmIXS41Y4b+D75G5nx4MsqSNDLqSmHrYpS2biPDRQelDFdeqa/hZCwV\nAPzsZ8BDDwH33+/Gk/GNb+gOKOCuPkN4Mq67DthtN/2+U7t54YXA5psDRx7Zu0ZGcrtBg4Df/a5z\nGzdiBPD8882JWwcNAm67rX2bPCnfy/L661ra5bKdKXuebMnjZpsBb71V/v6wz8eYMcBLLwFbbw3c\nfbeba3D55YFHG6l85mQJ6hvccos+p3WexyIMDAxkDlBzNzQ4OPJuh/YW7Jjy3WQAowDcmyOzVMd9\nEdFq0MbH8ymZpbIw41d5DJyeo6hcymbRovbfutRZdtpvXvrdk1HUyCAq/gDhLpdytS/OMRlc5VJV\nYzKI0g0MgwvJg13GQYPc1mWImIysuRzSMHPScDcyXMmlTEbEbnWZlkUxuY3dOXXdBxw+PD17Yxlc\nZ4ircn+kGX0uPAr2AKnZ3/z5nbcZPVobOi4nKOxXOBgZ0wG8CWAKEW1sVhLRcACnQaekPdfegIhG\nEtHaRLRyYl93AvgHgK2IaDfr9wTg+xn72ohSTEEi2g7AsY1tLi3/93oDl3Kp5D6r0s+ejBAxGUWI\nQS5VhuTkhJyNjLoDv0PJpbph9uvCk1GHoRYiu5T9Ox8ZdnyksHXlych7HeaJtxgxolyZ8lD2eZeG\nS7lUVdLuAxf3iG1kjMsS0gu1ETyFrVJqHhEdAeAqADOI6HIAswHsDu15uEopdVVis80A3AFgBvTc\nGGZfS4joEAC3AZhORNMBvABgO2hJ1j0AfpTY1w+h5874I4CXGuvWR3POjZOVUvc7+rvRUlQu1Snw\n28Ddk1HFyFAKmDWr2PHKECK7VBF61ZORlEu57oTG4MlwPQrqGhdGhqsMWGlw92T0upFR9rrIE39l\ne9hcD5a4zFzlUi5VlbR6cnGPJI2Mt94qvy+hOMGNDABQSl1HRJMBnATgMwBGAHgKwHEApmVt1ngl\n9/UgEW0K4FToeS/GAni+8fnMFNnVJQD2ArAJtMxqKIDXAVwO4GdKqXur/bt4sRvh6dP1Mu/DbNgw\nrYGcO7c5qlOXJyPZmc9LnUbG/vsDN99cfvu8hJjxuwhcjQyXncY6R7k5GxlcsktlEYsnw6eRYf/O\nh5HhQzJa9vyWLZttqGUd25ZUua4Dl54MTgMFaQlFXHsy1lgDeO658vsSisPCyAAApdR9AHbN+ds7\nAWSOqSulHgfwuazvE7+9CMBFeX4rdPdk/PznwK23AnvsobN0HHMMcPrp+juXs3i6IO34112ng0Zv\nvbXaw+Hyy8tvW4QqHfG0bX/3u/L7S8PFKPLAAPDHPwInn+ykSADcy6UA4Otf11pfo2V3sX/ORkbV\nDkCeCT6rwN2T0a9yqTPOKL+/NEJOWJl1bNuT4eo59/nP64xLLu8XDnKpGTN0H+G889q/c+HJsKXe\nF14IHH64TuQg+IGNkSHEQbcH0xe+oF+ATut3663N75IjMKHTw6U1/rvvrrOvDBqkv1fKXQeDm1wq\nWZ6PfSw9HWgVXIxArbEG8K1vuSmPwbVcCmga0y6IwZPBaRQ0De6ejH6US519NnD88eX3l0ZIIyNr\nkMeFXCpZvh/+sDVNrgs4yKUmT9avNFwY4nYZV1mltU8i1A+HwG+BKZ0CscpQl1yqLFkNl91gujQM\nuBkZyW3rOB+u3NyuqcOT4ZKq16DIpdx2ULgHfnOVS/mYMdm3XMom6/+5kEsl/1ed7SDXe9ilJ4Pz\npIi9jFS7kEla412loYtBLmVw2QGokyoP8eR/42Zk+OjgcduXgcjNSDzAVy4VUweFe+B3GblUrxgZ\nHOVStpHhygiq4z7mIJfqhIvncN1lFDoj1S4UooqRUVd2qbLkMTJcdry5ezLqKF/Zfb72GvD73+v3\ndXsyyuw/TS7lmirXoM8Utlw7KNzlUiE8GRxiMlzjKoVtGXzKpepsBzlkl0pDPBnxI9UuZNLrcqkN\nN9TLDTZo/64OI2PVVd3ty+A6JsM1ZTtQq6zSTDVYt5FRdVZXjp34GORSvjooXAO/Q8Rk9GJ2qZCe\njAkT0tfbsRMuZyR3DXe5lMtrUIyMMEjgt5BJ2jwKHD0ZRx5ZbrsLL9SvQw9t/86lkXHKKTqN75Qp\n1feVpMpIoflv55yjPQfHHeemTDZlz7GdPrjumIcyE2dx9xTEkF2q3wO/Q3syYpVLXXutbvNff11/\nDhGTcfPNwMyZwLrrpn+/2mrAtGnA0Ufz9mT0g1xKjIywSLULmbz7bvs6jp6MrMwU3dhoI/0g2Gij\n9u9cGhlbbKFT5tXRyLkY4VljDeDUU4Hx492UycZnes4i2OeCq5FRZaSbuxEExGFkxOLJyHuPFJEJ\ncjUy9twTOOSQ5ucQnowddgCOOqrzb8zgV1Uj3FCnoStyKaEupNqFTObPb1/nMvDblZHRK+kly+BC\nLsW98eUol/IRT8Tdk+EquxTnUVDxZPAzMpKElEt1omr9Jbfrx+xSrlPYCv5h3r0QQrJgQfs6jnIp\n7hNlcTUyfDS+3OsPaA3SzItPT0HVFLZ1B6ZXlavEMArK3ZPRb0aGfc2FTGHbiaqeNB+eDFdyKc73\ncCyDab2KVLuQSZqR4VIuFYORwd2T4SKFbZ3l4yqXsuONylw/3OVI3MsHxCGXisWTEYtcylWbb++H\nqyfD5Vw3AG+5FGdvpMilwiLVLqSyZAmwcGH7+n6RS8ViZHAcpT3ssOZ7rp6MceOa7110AOrAlZFR\nR8IBoHoHJQYjw/y3XmljQnsyNt+8/L5sttqq+T5kCttuVKlD7uUD4vBkiJERFql2IZW0oG/ArVyq\n20MuL/3syeDoRv7pT4Hdd289RhXq8GQstRRw0UX6fZkycu8A2P9pYMBJcdow56WXjQzj8bInV3NF\nP8ilzLbbbQfccQfwyU+W35fNzjsDF1yg37uSI9WBq3u4LrjLpcSTET9S7UIqaVIpwO2N6qrz2Cuj\njGVwkcLW9QNixAhgp530e65GBgBssoleVvVkcBzFM+VbZ512D6IrqhoZdT/8uRsZoeVSPj0ZK64I\nbL11+f0kIQK2316/j8GTwX0go+pAAdfkEva2YmSEQapdSCXLyKjS4Zs3r/Wzq5tePBnlqPMB4aKD\nZ+DYCY1lFLTOoH7TSRVPRjlCezJ8xmTUMVDgKrtZnbhKQ10XrjJgcY37srcVIyMMUu1CKmnpa4Fq\nD4u5c93sy0dAXD+ksPWROYezJ6PK/44lu1SdRoY5L4sWlds+htmCjZFRJgNZN/ophW2d6Ve5ZpcC\nRC5VFUlhGz9iZAip1CGXShoZZUk2OP2cwpZrdqkYjAwDV7mUi06eDyOjVz0Zd90FvPqqfs/dk8Fd\nLsV5joc6EblUNSSFbfxItQup/Otfeml3AIiAiRPL73PvvVs/u3o49JtcygRVA/3hyehXuZTp2GYZ\n/J3wUT5XcimO5xcAJk9uvucek5G3DpdaSr8AYPXV8+2Tq5EhcqnqxCKXksDveJFqF1KZPVsvP/CB\n5rpXXmlN/VmUTTcFXnwRuPde/dnVw6HfAr+vuQY46ij9nqtWNQZPRpUy+hhlXHppvXzrreLbxiCX\n8jUZn4trkLsnI+89PGwY8PTTwMyZwKRJ+fbpoo0RuVTxbUUuJYHfvYCjJKJCrzFrll4uvTTwz3/q\n98svX32/K63UnH/DVerBfvNkDB4MrLqqfi9yqfK48mTU9YBdZhm9NPdiESTw2+01WEeGrlCSzGWX\n1a+8+3ThyeA4ECRyKf5yKfFkxI9Uu5CK6dhMmOB+365ctMn9ucSlkVFHJ6rqCJS9bb/Lpcrgo4Ni\nPBnGq1gEn56MfjAyOMp9bDjGPAC85VI+OvEil6qGeDLiR6pdSMV0bOowMlxraevsJHP0ZNj75BoQ\n5/KccA/8rgvungxX2aU4j4LWiYvsYcl9ucRFG8g5uxT3TrwPI4i7XEqyS8WPGBlCKrZcyjWuHw79\nJpey98lxMj4brqPIAH+5lLn3uBoZVeVSMYyC1okpX5n6iyUujXN2Ke6T8fk0grjLpcSTES8SkyGk\nYjwZO+wAPPaYDtp2hWstbZ0PWK5yH9PBKzuKDMQTk8Ex+5API2P0aL2skl0qBk9GDHKpOnDZAeXY\niQd4y6W4ezJ8Bn738kCBpLANixgZQipm9HTiROChh9w2Iq61tP3oyTCTg5nJwsoQS0wGd09GXbgo\nn8Rk8DUyqtSfeDLikEtVedb58LSYhAbvv19u+5jkUmJkhEGqXUjFGBnLLOO+AXH9cOD6gE3uyyUm\npWYVI0M8GeW3ta8Ljp3kGORS3OfJqBsXyRsMXNvAOjt4IpeqjjEyuMZVSeB3/Ei1C6kYuVSdMRmc\nA7/7wcjod0+GgWsHgLuR0U/zZNRBFT28yKVELuUCM1BQ1pMhk/EJ3ZBqF1KxPRmucf1w4DhKlrYv\nl/STJ0PkUsW3jcmTwbmDUie9LpdavBh44QX9nqMRxD2FrcilxJPRC0hMhtDGwoXA/Pm6EzFmjPv9\nu56ptR9T2Bojw0xsWIZYPBkc5TQ+Ar+5GxmxxGRwzS5VRS4Vg5Gxyy7ALbfo9xyNDPFk8JdLSQrb\n+BHbTmjDjI4PH85TiuRTLsW1k9xPnowhNQ2FcPdkuLgGOculuAd+r7yyXp52mpvyJIlFLlW2/oyB\nAdRTPttI4z6jNtfycZdLSXap+JFqF9rgPsIYwyhe2r5c4jK7FHcjw4y2uSYWTwbXUVBX82Rw9FTZ\n2x9wgJvyJHEpl+I4GGRTV0xGFUOXe3Ypn54MrnIpicmIH6l2oQ3uE+zE9oDl6smIRS5Vl5FRBcku\nFY9cquw1aDpedXnSXGaXqgPuRgZQrZMscqnq8y1x7yvY24qREQapdqEN7hkjxJMhcikXcJdLxWJk\n9KpcynRc6zJyXcql6oB7GwhU6ySLXKq6JyMGuZQYGWGRahfa8OUCjWEyviefdLcvl1Q1MubNa2Z+\n4e7JkOxSxbeV7FLVr0HTca3bk9EPRka/ejJikUtxHSgw1+DDD5ffhxgZYZFqF9rgPsGOD0/GI4/o\n5X/+Z/V9ccwuZf8v7p6MuoglJoOrkeHKk8E1M03dngyX2aXqoNeNDB+eAu4xI1UDv+tuZ956Sy+v\nuqr8PsTICItUu9BGbHKpOso5f767fXH0ZFxzTfN9nZ4MzognoxquAr+5Si1Mx1DkUtX3VZeRUUUu\nZdfh2LFuypNk/Hi9fPvt4tvGJJeqqwP/4ovV9yEpbMMiRobQBne5lA8jwyVcs0sZ6hzhicGTUQYf\nngwXQaMS+M0/8Lts/dVNDEaGK0/GhAluypNk6aX10kxuWwSRSwFz5lTfh6SwDYtUu9AGd7lUcjvu\njQdHT4YN95iMuqkalMmxkxyTXIpj/S1Z0jzHdXWQuWce6nUjw67DuoyMZZbRy9mzi28bwzwZdd/D\nc+dW34fIpcIi1S60IXKpaviIGXFpZEhMRvFtRS7lLvCb4zwZtlSqrjrsp8Bv7tml+t2TwVUuJUZG\n/Ei1C23EJpfi1nj48LTE4sm47jq+hoYYGdWoKvfxFZNRpv7qlkoBEvjtglg8GVyNDFfzZHCVSykF\n/OIX+j23fkK/INUutOFrhNE+VhG4ezJ8GBn2w7Xqw6hOTwYA3Hab+/27wJWRse22bsqThLuRwb2D\nUsVjWnf6WkACv13gysiYMsVNeZKMG6eXZTrLMQR+130Pf+EL7ccqwp13Nt+LkREGqXahDR/ZGKrq\npW24NR4+ylfVUMvalyvsfb75pvv9u8BF4PfmmwNbb+2kOG1wNzJiCfyu4imos22RwO/qVAlcNv/r\nk58EPvtZd2WyqWLoxhT4Xdd98p3vtB+rCK+/3nzPbTCyX2DWPRM44KOD4jLokVvj4csIqhrbktyP\nS+xzws0ITFKlA7D55m7LYtMv2aU4xmT4GGgRuVR1qgQumzrcckue3rQY5smo+z4ZOrRaO2Nvw/05\n1KtItQtt+BjFczkTKncjg+NIrU3dRga382Nw4SnwcY9U6WxwlktxjsnwOdDCVS5l11/V43GUS/n0\n2Jdpo0UupalijIuRER6pdqENH42vyxEebo1HbJ6MuuVS3M6PgftIN/fA9FjkUlyNDO7ZpYjcZYnj\nKJfiPlAgcqnWfZe5T+z/xfU51OtItQttiFyqGuLJ6B9PBncjQ+bJ4F9/XOVSgDvJVN0pbLl7MlzE\nHtYBd7kUIHKp2JFqF9rgLpeKLfC7LsSTUQ0Xgd9cOygxzZNRVxmryLm4j9DaTJtWvSxZuDIyOMql\nYrmH64R7hjhA5FKxI9UutBGbXIrbSLkvI4OzJ8OG2/lJwrUTz93I4B74beaS4TrK7UIuNXo0cNRR\n7sqUJBYjg6shyd3I4H4P2/suU0Z7Himu8zX1OmJkCG2IXKoavlJScvZk2A/9sqNkdcO9E+/CEO/n\nwO8qE1ZyH6E15Rs1yl150uBuZIhcqhrcJ9QEqpVxwYLm+7KSMKEabIwMIppERDcS0Swimk9EfyWi\nY4ioUBmJ6DAi+l8iup+I3iGiJUT07Rzb7UpEM4jobSKa19j+oPL/KF58jtK6MDK4uUHFk9Hasetl\nI4OrpDAmT0bdRsbChcW3jSW7VN0DLNyNDBdyKR+j8FzjbqqeX+7GuG1kcH0O9To1zmeaHyLaA8B0\nAAsAXAFgNoDdAEwFMAnA5wrs7mwASwF4C8DLANbIcfyjAPwEwJsAfgXgPQD7APglEX1UKfXVAseP\nHnMz+2h8XYzwcPNkSExG60Of6whSP2SXEiOjmieD62R8YmRoYklhK3Kp8lQxxsXICE/wMWAiGgvg\nfACLAExWSh2hlDoRwIYA7gOwDxHtW2CXnwPwIaXUsgC+C6BjE0JEqwI4C8AsABsrpY5WSh0PYH0A\nTwM4nohqnHKLH7HJpcSTUW0/ddRfrxsZEpNRXS516ql6WVcZhw/XyzJGBvfJ+AyxGBl1tdGmDr/5\nzeLbcr+HRS6lEblU3HDonn0WwLIALlNKPWxWKqXeA3AytJHwxbw7U0rdqpR6scDxDwMwDMA0ezul\n1BwApzeOf2SB/UVPbHIp8WRU208d9VdVLmX+29lnuylPGpJdqhpVOyjjx+vlWmu5KU8S7jEZ3Cfj\nA/h7MtZZRy/NtVQECfzm740EqhnjtmEhnowwcDAytgGgANyS8t1dAOYDmEREQ2s8PjKOf1NjuW1N\nx2YJd7kUd09GbB0AjnKpFVfUy89+1k15OsG1E9/rRoa5bnfZxU15knA3MkQuVZ0ddtBLrp34WDwZ\nVWMyuMql7P8lRkYYOHTP1m4sn0x+oZRaDOBZ6NiR1QMc/zUA7wBYiYhG1HR8dnCXS3GPyfBlZLiS\nS9VB1REk7h0AMTKqy6VMp6HuFLZcYzJcSEb73cjgnoGNuyej6lwt3OVSdh2KkREGDkbGuMZyTsb3\nZn0Jh6jT44/L+L7n4G5kcJdL+fZkcMz/XdWTEUsHgPs9wvXhDzT/V10d0CrZpWLpPPW7kcE9A5tL\nj30d9Lpcyt5GYjLC4MTIIKLnGqli874ucXFcoR58yKV6OYWteDLcBX5zNzK46rmT+6gDV/NkcPZk\ncO08GcTI0EuuAwVV2ugYAr9FLiV0w9WlMRPA4wVer1jbdvMUmPVvOyprkrzHz/J0tEFEma+BgYEq\nZfVCbCM84snwc7wiLLdc8z13uVQZJP1l9Q5K3XKpKtmlJPBbwz27lMilqlHV0DXXLlePn/2/Jk50\nU54QDAwMZPYpueNkngyl1PYVNn8CwMYA1gLwsP0FEQ0GsBp0ettnKhyj2/EnNY7/QOL4ywMYDeBF\npdS7eXeoOPb6CsBdCpLcpo4H2D33AFtuWW5b8WQABx4IHHqofs9VLpU8VpltuHdQuD78AX9yKe4x\nGUrpV5FzFZtcypwL13CXS3EP/K4akzGnMfQ6rkYxuYu+wtJLN59HMTIwMJA5QM3d0OAgNLkdOk3s\njinfTQYwCsC9Sqm6FHWdjr9zY3lbTcdmSSyzGSf35ZKPflQvx44tvq14MrSU5rTT9Ptelkv1c/l6\nWS7ly1NVtgMVm5FhvEqu4X6PuPJk1FXGqgMFs2fr5dJLuylPGi48GVOnNiduFPzCwciYDj3T9hQi\n2tisJKLhAE6DTm97rr0BEY0korWJaGUHx78IwEIARzUm5jPHmADgG43j/9zBcaLBxwOWewpb7m5u\noLyh5qt8VTqhkl2Kf/m4y6W4x2QA5UeSYzMy6vJk9ItciquRMWuWXi6zjJvypOEi8Jtb3GY/4UQu\nVQWl1DwiOgLAVQBmENHlAGYD2B1awnSVUuqqxGabAbgDwAwk5rAgosMAGKHLmo3l7pZB8rhS6kzr\n+M8R0QkAfgzgz0R0BYD3AOwDYEUAZyulWmRUvQ53uZQPT0YMRkbZB6wveZUZOeIql+LeiefegYpF\nLlUmu5SvTvzgwdoIL1uH/W5kuJCMcr2H7f9U94zpnD0ZLmKXxMgIR3AjAwCUUtcR0WQAJwH4DIAR\nAJ4CcByAaVmbNV5JtgRwUOJ36zVeAHAngDPtDZRSPyWiZwF8BcCB0B6evwP4hlLq0jL/KWZELsU/\n9SBQvg5jMDIMPoyMMsRyj4hcircno+wobWyB33UbGVzbaVfPubo9GWXPr09PRhW5lBhio2vQAAAg\nAElEQVQZ4WBhZACAUuo+ALvm/O2dAFLHv5RShwA4pMTxbwBwQ9HtehHucikfgd/cUw8C/eHJ8EGV\na7CfPS2uPBmcjYy6Oyf9IpcaUlNPg/s94soI2mcfN+VJUjXw++1Gzs/xdc1iBpFLxY5UvdCGyKX4\nj5AB/D0ZI0fq5YIFxbeNpQPQz+UzHUeuMRkuBjK4duJjMzLqKid3SaGr7FLnneemPElcxVXVGVTt\nYp4MMTLCIVUvtBGLFMTQr4HfZTsAZR8oRaliZBi4dgC4d1B8ejKqyqXqisngPmM6UL6THJuRURfc\nJYUu7uEpU4AxY9yVyYa7NxIQuVTsSNULbXCXS0lMhoa7XMqFJ6NOXMRkcO+g1F0+F7LCujoA3Dug\nQPVOPPfy1U0M94h9rCL4eA5XjcnwaWRUaWOYTyXR04iRIbTBXS7FPSZD5FKaUaP0cv784tv67ADY\nx8sL9w6Kr05yleBvX3KpKvewr5iMXg/8rotelkv5fA5zlTza+5bsUnEiVS+0EZtcql9jMnrZk2Hg\nOgLl4x6JQe7jQspQt1yK6ygy0D8xGXXB3VvF3dBNzjpfFJFLCd2QqhfaELlUHEaGC09GXR08gL9c\nqsrxuGeXSu6jLqoEf9fdAYjBSBMjoxrc7xHucil71nmOAwX2vjlKMoXuSNULbcQywlMnMRgZLjwZ\nJgVhHRi5FNfsUvb+e1UuVTdVgr/rllpwH+UGxMioCvfYOe5yKYC/p0CyS8WNVL3QBne5lI8Hngu9\nft1U9WRMnFhf1hKg6ckoE5NhECOj9zsoHOVSvrTcVWes7ncjg7sh6eIe5hoXZG8jcikhC6l6oY3Y\n5FJ1EIORUdWTUXfDG4NcSoyMapSVS9ka8LrnUOCclaZqCtu6icXI4HqPcJdLAfw78THcx0I2YmQI\nbYhcSlO1A1o3VT0ZdRsZMcmlisK9g+Lbk1FULmU//OsqI/dRbkDkUlXph+xSdbfTVYwMH9mluBtB\nQmek6oU2fMqlfv3r4ttyNzJ8PZBj8mSUPWe+RqA4ejJiCFwu2wHwETDKvQMKiJFRFe6GZAwZzrh3\n4qsEfksK2/BI1Qtt+GjczDHefLP4tnaDveuubsqTBncpA3dPxqBBWk6jVPGRbu6GpE9DnHMnuaqR\nwVVmwb0Tz718Nl/9qpuypMH9HuFuBAFusjf5GCzgagQJnZGqF9rw0bgdd1zrsYpgttlpJ+D6692V\nKUnZB4TEZDQZOlQv33+/2Hbcs0txT2HLvRPqQ2bhooPHNeiW+/kFgAkT9LJOI4O7tyoGuRT3TrwL\nb5AYGeGQqhfa4J5dyn44cHxAcA/KjMHIMHA1MmLpoHAdBeXuyeA+GZ+Bc/m4ewqS+6gDCfyuDvfs\nV0JnpOqFNmLJLsW9A1o33OVSQDP7EHe5VFHEyNBUNXT7PSYjFm8pdyODe5ZCztcg98BvMTLiRqpe\naIP7CJTvDhRXI0PkUu4QT0Y5OHvTuHeQgd6WS/nwiHM3JGOQS/WDJ0NS2IZDjAyhDe5ZNwz9PsoY\nkydD5FLF4d6BAiQmoypVBzK4nl97m34erIpBLuXiHPvwSIonI06k6oU2fNyYLh4OdeNCLnXtte7K\nkyQmTwZ3uRTn7FJcO1BA78qluHfwuJ9fgH8nPpY01P3uyYhhsEDIRqpeaENGaVv3X7YDuuWWwJ57\nui2TTQyeDO5yKckuVQ3Ocin7v/eipwDgXT55jsThyeBuZEh2qbiRqhfakJiM1v1z76Bw9mSUDfw2\n9PM5rtJBSe6jLjjLpYDqkkeu9SeB36375l4+rkYQEI+RIXKpOJGqF9rgPjrBvRPvy0Vb9gHrq4MH\nVPdk1E2vZ5eqG85yKYD/PdwPcql+nrAypsDvMudYsksJ3ZCqF9rgPgKV3EddVJUjcTWCQngynn22\n2HaSXYp/BwrgLZey98/9Hu5FI8OnpBDovXvY9zVYxZMhgd9CFlL1QhuxaGnrhrOUxt4/11FkAHj+\neb3ca69y24tcqjeNDJFLte6fa/likUvZx6uyD9fEcA/3g1xKUtiGQ4wMoQ3JnNO6f44dUCAOT8as\nWeW2E0OSfwcP6H25FNd72MD1/ALxtNN14mIwTbJLtR6rCJJdKjxS9UIbPmf85vzw4t5BiSG7VFli\n6aD0s94cKC+14C6XiiUmo26kna5GlQ6yZJdq3bfIpeJEql5oIxa5FNdOfCyjyDE0vL7OcVEkha2G\nu1xKYjKqwT3wG+DdTscQ+F3lHPsM/JYUtnEiVS+0wV0uldxHXXCW0gBxyKXK4muktuzxYumgcO2E\n+pJLce6AAr1tZPgy1Di30zEEfosnQ6gTqXqhDZ9yKQn8Lo/IparD+Rz3g5Hha5SW4/kFetvIMMg9\nzPsedmFkSHYpIQupeqEN7lpV7p4C7uWLqeGVDgrvDkqvyqW415+Ba/nsa5Z7Gbnfw1wDv32d4xi8\naUI2EXQzBN/4aNwkJqM6/eDJqBvORkYM9wh3T0bVe5h7+eqGe2A60Lv3sG+5FNd7WDwZcSNVL7Th\nM6iVsyeD88ML6G1Phu9zXBTucUuxGBlcU9hK4LeGe3YuoJq3wN6+DmK6h4t6MrjPdQNIClsOSNUL\nbXAf4TH0u5HRy54Mgy83N8dOaAxyqV4dBeU+UMDdyPApU6lah3XSy3Ip7nFVQFzPul5Fql5oI5ZR\n2rrh7ingXr4qiFyq+gitvY+64B6TwV3yGIsRxLV89jE4lrGXs0v59kaKXCpOpOqFNmLJLsXVU8C9\nfDE1vP3srXIxClo3vSqXimGOAkCMDPsYvXoP+/JkcH2OiJERN1L1QhuSXap1/xwfXkAcnoxjj622\nfT+f4xjkUtzlNL1aPu5GpE8jg7PkLAZPRtmYDDEyhDxI1QttSHap1v1zfHjZ++c6AgUA3/2uXo4Y\nkX8byUyjieEe4d4J5e6N5F5/MQV+c6zDGAYKYonJkBS2cSJGhtCGT7mUTPJUnhg8GWUeYCGMjKKI\nt0/T651kKZ9ecvUE2cfg2E5Xec5xD/zmPteNvY14MsIhVS+0EUsHqm44u+EB/nMUAOX1voDf0SeO\nhloMo6CxdJK5xmRwHoUH+J9fgHc7HYNcirsnQ1LYxo1UvdCGyKVa98/1AVu2fL5GoOxjFCmjyKVa\n9835Himr5+beSeYek2HgWj7JLtW67xju4V5L3gCIJ4MDUvVCGzIZX+v+OXZAgTjkUnYdFi1nv0st\nermDwt0Q534PS+B3k6qpnrnfw1zlUjHFZIiREQ6peqENCWpt3T/HDqi9f85yKaLiDwnxZLTum/M9\nwr0TyllKA8RTf1zLB/A21EQuVR0xMuJGql5ow8cIShVPRnIfdRFLB4Vr+QxljQyfo6BFESNDwz0u\niHsnudfLJ9mlWo9VBO6ejJgCvyW7VDjEyBDaiGUyvrrh/PCy98+1fIayun3Ogd+xZKbh3gnlfo9w\n7UBxP7+SXUrjIp7Alyej1wYK7G3EkxEOqXqhjViyS3GXS/lqfDk+XG2KPsRikktJdim95N5JLhu3\nxLX+DFzLF5Ncqt+fc1Un46s78FuyS8WNVL3QBvcRHu5Ghq8OSmyeDM5yKY7nODYj4623gNdfz7ed\nGEGt+5cZv8vDuR3sZbmUeDKEPAwJXQCBHz7nAIhhhIfjCBkQRwcA4C2X4iy1qJo1x95HXdjX4NJL\n6/cLFnSf4T2WTny/38Pc688+Rq/dwxL43br/MudXYjLCI/ad0AZ3T4ah3x+wZR8OIpdq0uuB33WT\n1kmePbv7drF04jl3oACpP/sYHOswBm9kLEZG0Wtw7ly9HD1aPBkhkaoX2ohFq1o3nEfIAGDoUL1c\ntKjYdiKXyj5m0d/3ewcl7dzmKS93byT3mAzuRkaIwG+OcS0xyKXKnmPu2aXMYMcyy7gtj1AMMTKE\nNnyMUMSQOYe7kTGkIXaMxcgQuVQxYohbSruP89zT3DvJ3Mtn4Fq+mORSddIPcilfM34XvQZnzdJL\nMTLCwsbIIKJJRHQjEc0iovlE9FciOoaICpWRiA4jov8lovuJ6B0iWkJE3+7w+8mN32S9Tq/+7+JC\nOlCt++fYAQXiMzI4y6U4Z5fibIinSeE4GRncJY9VO/F1E4ORwVku5WIwTQK/W4+XF+PJMLFiQhhY\nBH4T0R4ApgNYAOAKALMB7AZgKoBJAD5XYHdnA1gKwFsAXgawRs7tZjReSe4pcOyeIBa5FFephRgZ\nrZSNyfA5CjpjBrDWWvm3k+xSml6VS3GXqsRSf5zlUtwlj7F4Muq+R8qe37vv1kvxZIQluJFBRGMB\nnA9gEYDJSqmHG+tPAXAHgH2IaF+l1JU5d/k5AP9QSr1IRAcDuCjndjOUUpkej37CxwNWAr+rU9bI\n8J3Wr2xHymcH5b/+CzjooO5ZkQyxjIKGyLHP0cjgGlMQi5HBOfCbs8c5hoGCWCbjK1qHF1+sl6NG\nuS2PUAwOcqnPAlgWwGXGwAAApdR7AE4GQAC+mHdnSqlblVIvOi9lH+FzlFYCv8sTiyejaEyGT7mU\nzTvv5P+tjzo0Bs+77xbfVoyM1v1z7cRXLR9XT4vPwG/O3paYAr+LejK4B34b4+LAA92WRyhGcE8G\ngG0AKAC3pHx3F4D5ACYR0VCl1Ps1lmNNIvoytNTqNQB3K6WeqvF4bJGYjNb9c3x4AU0j4/2Cd4XI\npZrYx1i4MP92Pso4dKi+TxYt0ufYZBPLQ0gjI09nReQ+Gu6deO6eFvsYHMsocqnqVL1HVlrJbXmE\nYnAwMtZuLJ9MfqGUWkxEzwJYF8DqAJ6osRyfb7wMRERXAzhCKfV2jcdlhwS1arjruXs1ha3BR/ns\nc7tgQfHt6u6gjBoF/OtfumycjQz7GuRoZHD3FPSaERRTdimuRkYs82RwzS4lE/HxgINcalxjOSfj\ne7N+fE3HfwPAiQDWAzAWwAcA7ATgIQB7A7i+puOyxcfNGZMng2vjJnKp6tjnlpuRAQAjR+plkbIB\nYmQYuN/DsRhBvWhkJLevAxfPOcku1Xq8vPiMCxKycVL9RPRclzSwydclLo7rAqXU35VSZzWW85VS\ns5VSt0LLuJ4F8Aki2i1wMb3CPbtUch91wXmEDIjHyOAsl7LL9J//CQwM5NvO1wMsFiPj21bKDE5G\nhinfD34A/Pzn+bfj7ikQI6gJ5zTAVZ5zvs/xj34EvPZa/u18GxnXXANMnZp/O98JToR0XFX/TACP\nF3i9Ym1rPBXjkI5Z71WypJSaB+A30IHnWxXZlogyXwN5ezEB4Z5dSnLEa2IxMjjLpewO8V/+Apx6\nKvDKK9m/N/jqAJjgxfnzi23nu5Nsk+d69F2+3/0OOPLI/G2Hr/IZCVyReCCAvycjxIzfHAeDYgj8\ntuVOhx6afztfgd/2+fmf/8k/4NJLcqmBgYHMPiV3nMRkKKW2r7D5EwA2BrAWgIftL4hoMIDVoNPb\nPlPhGGV5o7EcXWQjFSo9jiNiyS7lqwPAtRMfm5HBUS6VVqY82ZxELqVJ62Bw9GQY5s4FxmUNZ1n4\nKp+ZKMxMHJaXWDwZYmS0HqsIvgO/AeDvf8+/nW9PhqHoQEEveDIGBgYyB6i5Gxocqv92aG/Bjinf\nTQYwCsC9NWeWymIL6MxXIQycYMgcABru2ZtiMTJikUsZOI3EGyOjqCfD0O9GRnL/s2bl285XB6Ws\nkcHdk+HzHuYcPB9T4Ld9zDz4DvxOHrcbIpfiAYfqnw7gTQBTiGhjs5KIhgM4DbqTf669ARGNJKK1\niWjlqge3j5lYfwCAfQG8ByDvRIA9QSxyKV+eDO5GBtfyGTjLpbgbGUYuVdaTUTfcjYxk+fIaGb5G\nkc1sxHnLZRBPRpNeTWEbQi5VJPg7lCejqJHBfKC/5wmewlYpNY+IjgBwFYAZRHQ5gNkAdoeWUF2l\nlLoqsdlm0LOBzwCwrf0FER0GYMvGxzUby90tg+RxpdSZ1ibTiWgRgD8DeAnACACbNo7xPoAvKKVe\nqPxHI0LkUhqRS7mBs1wq7frj1EkWuVQ1kuXL6zHgLpfiHpjuU6rCWS5VZTDNtyFpHzMP3I2MXpJL\nxQyL6ldKXQctjboTwGcAHAXtQTgOwH5ZmzVeSbYEcFDjNanxm/WsdTskfn8OtBxqEoAvATgMwDIA\nLgSwiVLqV2X/V6xwb3wN/e4psI2gefOALbYAfvrT7tv5bnw5y6XSOsRFPBm+skvtuaeuj5NPzred\nGBnp+99xRz3vSDd8GxkPPKCPtd56wBM5ZoPyLZe69FLghBPybxdixu/ddiv2POGeRTEWuZRvIyPv\nYJXIpXjApvqVUvcppXZVSi2jlBqtlNpAKfUTlRJFrZS6Uyk1WCm1Xcp3hzS+y3ptm/j9WUqpHZRS\nqzaOO0op9WGl1OFKqUfr/M9ciWUyvrqJRS61aBFw3nnA/fcDRx/dfTvfbmTOcqk08hgZvupw+PDW\nz9/9br7tQnRQDJyMjLQJDC+6qPt2vsq31FKtnx97LF+GnxBBwWefnX+7EHKp118vFrjMXS7lq5Nc\nVi4VIrsUIHKp2GBjZAh8kMn4NNzlSHb5OE4kZ+Asl0qrA04xGWWDKn1fgzacjAzjCbLJky7WV1Br\n0ogE8t3LvjwZaec3DyGMDCBfZrhO27smtsDvMnIproHfIpfigVS/0Ia4kTVVPRm+OgCLFhUbgZLs\nUk24y324GxncPRlpRkYefI0iDxvWvi5PnYTwZAD85hkBWs/Re+/l387nZHycA79ji8kQuVRcSPUL\nbUh2KU3ZwG9fHQDbyCjycJDsUk24TybH3chIG+nmVH+9amSEOr95Jw0MEfgNlDMyuHoyQhiSMRgZ\nIpeKCzEyhDa4Z5dK7qMuuAd+m/K9+26xeRRELtWEu1yKu1yFu1zKpAAuiq8OVFrMSJ5j+ipf8vxy\nnG3ZPkaRttq3kZEn4YCNBH6n71/myYgLqX6hDe7ZpSTwW2M6AA88wDcoEyhvZIgnI92TkechK3Ip\nTZong5McKe364+TJSJ7fvEZGqHs4r6cF8GtkAMDYsfkScxi4z5PhK/C7rFxKYjJ4INUvtOEzu1Qv\nyqVklLsVU84iDzDA/yioIU85Q2R+MeQxesWToUkzMvK0OSE7KEWMDN+ejLweUzEymvu2vVV5Uowb\nRC6lSbaB4smIC6l+oQ2fcimA5yRKAH+5VJrUIg++jYyixprIpZqkGRl5OlJiZGi4x2SkwcnTUlYu\nFcrIKJJdivs1GCLwu4xcqu7sUlWNDInJCIsYGUIbvifKKjubLPfsUuLJ0BQ11kQu1STtAZ4nuFXk\nUhruMRlpcPJkxCCXsuGYyruqoevzHi7ibQ4VFyRyqbiQ6hfa8D1bcNngb5kno9x2oTwZRY01znIp\n7kaGQTwZ2cfuBHcjIxZPhu/sUkWMjLTt66CsoRvLPBm+jQyRS8WFVL/Qhu+sEUppre+MGcU6KHVj\nOse//z2/1IhAdSPDV+PLWS7V656MuonRyMhTf9yNjFBtTN6YjFDZpcpk2asb7nIp7tmlRC4VN2Jk\nCG2EkEvtuy+wzTbAWWd13863XOrBB4Fjj82/HXcjw3fjK3Kp8nCXS8U4T0YvGRlcU9iKXKoJd7lU\n2cn4fGWXErlU3Ej1C234lkspBdxwg35/6aXdtwvRgTr33PzbcTcyRC7V+RhFOskhsktxCvzmHpOR\nNtldESMjxCgoJ7lU8vxyHCjgPE8GUN2TwXXCT1+B3yKXihupfqENXzdnWuB3kfSS/Z69KRYjo2hs\nS2i5VJEUtuLJaF9XxMgIMdldkfoTT0br56L3sG8jo0j8HPfkAyEHMvLAOSbDfoaIXCosYmQIbYTw\nZJSBu5Hhu/FNHj8L7p6MUB0UAye5T68aGaECl4F8niDucqlQ9Vf0HvYd+N1LnoyQAxl5CBWTwclT\nKnRHjAyhjZApbIt4MuqGe8xDlhHUbaSH+zwZBonJ4G9kcJdLlfVkcDcyQo1y5+3Eh5Kb9ZKREWKe\njCJw9mSIVIoPcgqENnw3bmee2X7sTohcSpNlBHXrJMcyT4YPxMioBvfsUmnl425k5DlmKE8GR7mU\nzbRpwEMP5fstdyMjlCfjoovybRcq8LuIkSGejPCIkSG04fsBdvrpzXWcjAzuMQ9Z5evWmRe5VBPu\n82SkPcA5BX5zzy4Vgydj+eVbP3NOYRvDPbzxxvm2425khBrIOPRQ4Kmnum8XasZvTjFfQnfkFAht\n+Grcxo/PPnYnuGfdCG1kcPNkcJZLcc8ulQYnT0baPZKnIxrSyMhTPp8joY88Aqy7bvNzL8ilRBPf\nhHtsX9o9/Oab3bcTuZSQBzkFQhu+Grdllsk+dh763cjIOj/cjIxelEuFdMdzMjLKypFClG+99fSy\nyPn10UmZOBHYZ5/mZ86B30XlUr4Dv4vAXXYbKrAayNcGx2BkiJEbHjEyhDZ83aBLL11uO1+d0LKN\nZ+hRPG5GRmxyKU5ynzS4duINnORcdgfPpBIt4mnxNRJq1yMnT0ZZuVSoGb+LENrj3I0Qk/Elj92J\nGLJLiScjPHIKhDZ8Nb5lPRkil+pMXiPDdweKo1yq7DwZYmRouHsybCNj9Gi9LCKX8nWP2OXsJU8G\n55Fk8WRoyj7nfAV+J8vHyQASuiOngDGLFwMHHghceGH3315/PbD33sC8edWP66sTmubJ4Gxk7LEH\ncN993bcL/YDt1hHw7Uo2D9kf/hA444zuv/cpl+pVT4ZBsl813xsjg5tcCihuZITS6596KnDKKd23\nC+2NzAN3T0bImAxOcqnk/kUuFRdiZDDmhhuASy8FDjus+2/32AO45hrdkauKrxt0qaXa13EyMpKN\n2/XXA5Mmdd+Ou5ERSi4FAF//evff+yzfCSe0r+MUuJxGETlS3Ygnww0jRjTfc/ZkAMBpp/Ga8JO7\nkRGjJ6NIR77u7FJJRC4VF3IKGDN/fvFt3n67+nFDypF6IfA7VONr4GZklB3J81G+ffcFXn65dR2n\nTnIanMrH3chI82RwNDJsr26eY4ZOLtGtoxdDJy8WIyPEOebkycg6bp7fcL7++gU5BYwpc4O46NiG\ndNPmgXvgd+gGjus8GXnxKZcCgBVWaP1cpJMsKWzb13EqX1rgN8fsYXZ8Gie5VFZZuhkZEvjdRORS\nbhG5VFyIkcGYUEZGqJlGAV5yqaqejFBGBjdPRlkjI9QDoshkbf3uyeA+WWBZuZRvIzItCUYnQnei\n8noyOBsZrrbvRoxyKc5GRh65VOhnsNBETgEjzI1tlmU6uS49GVzdtLEYGRKToSmT5xzgbWT4qsO0\n+4FT+dL2z6l89j1cxpMRQi7FyZORBScjoyy+PKbc5VJljQxf2aWyjtuJ0PeH0EROARPmzgVWX11P\nGLXccsAdd5S7Qb73PWDPPauVJRa5VCg9cjdCN3DcjIzkQzZv+ULBqZOcBidPQRqc6s/e/7Bheskx\nJmP8+Ob7IqPI4skoTyi51E035dsu5HO4iLfAd+zhvvsCL73U+TfiyeCDnAImXH458NxzwGOPAW++\nCey3X/kb5LrrqpWF+wiKgbsnQ4wMTdkJvcSTwd9TsOyywHbbta7jVD6b4cP1kruRwW2elgMPbF/H\nKfCbe0xGcpBl553zbRcqRSzAX5LULYtmaCNcaCJGBhOSN/Xw4eE6qSE9BXmkNL5GusXIcEOyHrl7\nMrh6Cs4/Xy85deKJgD/8Adhgg+Y6TuWzMZ4MjnIpIuDGG1uP3Qmf5bvkEuCKK1rXSeB3frjP+J32\nnON4j9hwm3BWyEZOAROSjfaYMa2NS6eOV9oNl1f3nobIpTSxZpfqFSMjBk+Gz3NsOskcO/F2PXAs\nH1BOLuWzfOY+4ebJAIrP/B36HuZUh2VjMkI+h7kbGd3gXLZ+Q04BE5JGwZgxrYZFp0ZzwYL2dXka\niW5l4SqXiiXwW4wMTVEjw8DZyAjRCRUjoxqcYzKA5n1SxJsbYkZygFdMRtoxOE2oGWN2qTxGWqjA\n7zyIXIoPDC+P/iR5U48YAcye3fzcqdFMMzKSv1+8uHV/nZDsUppYs0vlnSfD18MhNrkU904yRzmX\nfS1xLB9QTC4VwlNljvXGG91/67uNic3IKHKO+10ulXaNc/dkdJuoWORSfJBTwIRko33XXcBBBzU/\nd+r4nHde+7pkI/HpT+tc7I8/3r0s3OVSBq5yqdANXLcHhO8OSi/Lpeou45gxzfexeDK4Ghljx+ol\nd0/Go48CM2d2/q3vNibZUeYU+G2n/zXkTS4BhEnekIeQEy5yNzJmzer8fWg1gdBETgETurnIOzWa\naSNfyd/ffrteXnNN/rJwl0vVTQxyqcsuA5ZaqnUdt058L8qlfNXhAQfolNS//GVzNJmTFMRgH4eb\nkfGjHwGf/zyw7bb6M9cOlH2fdMsQ6HugINnGcAr8Pv10YKedgBVXbK4r4smom7LH8XkNfu1rrZ+L\nyKV8pLA1SS8M3dqY0GoCoYkYGUzodlN36likfZf1+yJ6X5FLldvO58NhyhTghhta13E3MvLKuULB\nycgYPhy49lrg4IOLGRmGEJ6MblIGwO81eMwxwKWX8o/JsI9lvC5Z+L6Hk94CTnKp5ZbTmbleeglY\nfnm9jpMhXjYJi8+O8ve+1/q5iCHuw8g4/HDgwx9ufpbsUvEgp4AJ3RrtTh2ftBsu6ybkZGRwD/yO\nJbtU8jjcjQxu5UvCbSTeYCQrnEZpDfY1uGBB9+OHqL/Bg/XxlMo/Eh/Kk2HL5NLwXb5llmn9zMnI\nsOHo7StrZITsKOdpY3wHfr/1VvN9XkmwGBnhkVPAhCpyKdeejJCTAHEyMmLwZAD8O/G9KJcK8RDj\n2IEy2PWwZEl+b5Xvc5zXUAsht7CvO+N1ycJ3/Y0b1/qZq5FRxhDnamRwnocC8CuXAoC3324/dhYi\nl+KDGBlMKCOX+tOfgFtv1a8kv/2ttvzfew/4v/9rrufuySiCeDI0vWZkhJZLvf66ll7kwedDLBYj\nAwBuuSX7t08+Cdx3n37PdaQ7RAdvzpzm+26Gru9R7uRxuBoZHO+RtPYsTxsXsqweYt0AACAASURB\nVKNcZMZvX0aG/dwQuVQ8yClgQlEjY84cYLPNgB12AF55pf33xx8P7LILcMopwO67N9dzNzJiCPzu\nVoe+Hw4il3LPIYdkf2dff9zlUr7Kt+GGrZ933x149dX03669NvD73+v3vs9x3gxdIToptregW/lC\nj9Ryyi5lwzED2wc/2L6uyGSB3D0Zvsq3wQbN9yKXigc5BUzo1nFNNpp5cqnfdx9wwQWt64qMoIhc\nKn19t3MVeh4KblKVorMFh3hAPPQQcNppwFe/qj8/+2z2b035fY3gGTiO0hq+8x3g1FNb1734Yvft\nfHeSTUfell6kEeIa/MQnmu85GkEmQyHAV66S9/wC/u6RLbYApk4FTjihuY5rhjMDR7nU9dfrgVOA\np9xRSEeMDCYU9WTk1bX/61+tn3vJkxFKLsUtaJS7p6Bo+Xy74QFgo42Ak05qGhmdJq4092LZmXzL\nYo7H0ZMxZgzwzW+2rssz+afvToAJYOaYZ58IOOoo/Z6jJ2ObbXSHGeArl8p7fgG/ZTz2WOD73wdG\njdKfi8iRuMulfN0jq6yiB4IAvp40oR05BUwoamTkTWOZzJRTxE3b70ZGVieXm5HRa3KpkCN448fr\nenn77ezzbMrv28gwHiGOnow08nT0fJfPpGLtZgCFugaLyrm4BlaHKl/e82vjO8MZIJ6MsuStP5FL\n8UFOAROKyqXypNkscxzA3yhAr8qlYvFkcJVz+db62gwerA0NpVpTJtqY8idlYHXDWS6VBkcjw4x0\n94qR4bt85l7uJU+GT8rEVXE1MkJ4nDlnhxPSESODCd0a7WnTgIsvBvbfH9hxx+4zwmbBKatF3sbp\nhhv0//dNL8qlnnwS+PWv9XvungzfMQ+GbqOhoeVSL78MPP98599yMDI4yqXMud1/f+DBB7N/F6qT\nYoyMhQuB227TEhuldIbAn/0sfPmKGhm+O8hFPBkh57rhKEey6+HUU9ul1klCDAblrT+RS/HB81ic\nkEW30fHrr9cvQ6cUkVWOA/CTS+26q15++tM6M01oTwa3oMfkcTp14jfeOHu7uohJLgUAEyboZZYn\nw5TftyfDPt7222uDMYtQRsY66wD/+Id+nyf41nf5Jk5svt988+xBFw7ZkT71Kf1+882bGQJ32AFY\nc03+noxQRhDXmAxDGblUqIGCk04Cfvzj7O9FLiXkQU4BE8pO2NOJtJHWmOVS5sER2sjgJpdK1lmn\nBtgeneJqZISUSwHN4Mz589O/D+3JAICZMzv/NpSRcffdwB576Pd5ZF2+y/df/5Xvd5zkUnYqYGP4\nxuLJ4ByTEeI+jkUuBQD339/5e5FLCXkQI4MJedynRSkbWM1NLmUwZeeeXSr0wyFvUgCuRkZoudTI\nkXq5YEH696EDv/MQUhO/0076fdp1mGx/fJfPeKm6EcrIGD5cL7NiMky5uHsyYojJCGlkcJdLAXHL\npcSTwQc5BUzwZWTEKJdKfhfak8G9gcub3pi7kRHak5FlZIQO/C5CiJG8Tql2k+1PKE9BNzh5Muxz\naNo+8WSkUyR7GOeReCC87Paddzr/PoRcqmh2MzEywsPmFBDRJCK6kYhmEdF8IvorER1DRLnLSEQr\nENHRjf08S0TvEtGbRHQrEe3VZdtdiWgGEb1NRPOI6H4iOqj6P8tHHXKpqkZGCLlUp/IlvwvlyYhZ\nLmXD1cgILZfq5skIJZcq48kIQadUu8lz77sTmvd4oY2MrOyBSU8GdyPDd/3l9WTYcVVcYzJCd5Tn\nzev8fQgjTWIy4oPFKSCiPQDcCWBLANcAmAZgKICpAC4rsKujAfwYwFoAbgfwAwA3N/Z7NRGdnXH8\nowBcD2BdAL8CcB6ADwL4JRF9v8Rf6srPfw7sthuw997A2WfX48mYO7d9XR4jg4tc6t57gW23bX5O\nPmDrJuv/iyejGLHJpeyYjCee0NfgH//Y/D6UXCp5PT3/vO4IrLmmPpf33NP8LmR2KVMvl14K7Lxz\na2xL8t4JrZn+n/9JXx86u5TtyZgypfk+6c3lKpcKVX/GkzFrFvC97wEHHpj+vAg1UGCOe8EF3X8r\ncql2zADGW28B3/hG9u8kJoMPwY0MIhoL4HwAiwBMVkodoZQ6EcCGAO4DsA8R7Ztzdw809rGmUuow\npdRJSqkDAGwEYC6A44hoo8TxVwVwFoBZADZWSh2tlDoewPoAngZwPBFt7uCvtnDkkcDvfgdccw1w\nwglujIyBge6/4SSXSmuc3n23+X7rrYE77mh+NnUUOj0nNyNj5ZVbP3fLsW8IZWR0ixkJbaTZnox9\n9tHX4DbbNL8PJZdKcsMNeoDi6af1509+svkdByMDAG66CbjkkuZnbkbG1Knp90vomAy7HbQx157I\npdIZNUrflwsX6k7opZcCjzzS/rtQGeKeeUYvf/CDzr+zDSNfdZg0uLo9R0LKpQBtRGYR6vwK7QQ3\nMgB8FsCyAC5TSj1sViql3gNwMgAC8MU8O1JK/VYpdXfK+icAXNH4uHXi68MADAMwTSn1orXNHACn\nN45/ZN4/U5a8wbqdOOww4IADOv+Gk1wqrfG0R7mTI96mjkIbGXnlUr7KN2KEHnX6xS/0Z26B38mG\nPia5lOnA2w/cUKOgSWbN0nNmpBHyHkmeb7vuuBkZQLp+P5SnwFx7WZnNTF1y92SEuv6ImnVoSOss\nc7mHswhRf+PHt37udm2FlEt1w0hdk9eC4B8ORsY2ABSAtJkf7gIwH8AkIqraHJiuV7KLY8Yo045/\nU2O5bcp3penkvq3CkCHACit0/g0nuVRROVLyAcvVkxGiAzB6NDBmjH7P3ZPBXU9ry6XMw8quKy6j\nZLNnp0siAT6eDKBZnwBPIyNNvx/qGjSdojlz0r8397Z4MrJJdizTnrehJI95CXH9jRvX+rlb+xZS\nLmXIkk4bI91ue4QwcDAy1m4s26aWUkotBvAs9KSBq5c9QEOStTe0MXNrgeO/BuAdACsR0Yiyx0+S\nlrUhb8ewE0OHdrfc88QzhO7Em1zwSy3Vuj4ZDMnVyAgdNJr3WvJVvuRxuMdkpAV+29cil1HQWbPi\nMDLsjE4cjYw0T0aoe9h0iroZGRL4nU2yY9lpUC/0QEEWIYzIpCejW/sbcjK+ZBmSiCeDDxyMDGM/\nZzSr/14/PuP7PFwAYDkA5zSkU2WOPy7j+44sXtzeyKWNnGU9VIowZEh3y52TXCqLN97Qy+TISvIB\nGwpu2aUMdmYapfS116kzEKqDl5U5xxDak9HNyDDl52BkpM2qvWQJL7mUGVVcvLjdY8vByIjZk+G7\nfJ1SiCrVvGdCBt528mSYNpHLQEGSZNyhz/Nb1MgIcQ0mr6csBYgYGXxwcnkQ0XNEtKTA65Lue3UD\nEf0QwD7Q0qvjfR0X0A3xeusBu+7auj5t5OzutkiS4hC1j/4niUEute66+mHazcgQT0YrtidjyhQt\nnxo5UicZSCNU/X3zm53z2IeOyUib8XvsWL189VVg9931+9CjoDfeCDzwQOu6O+/U2abMeg6ejC9+\nUU+CN2QIsNJKrd9xNzJ8l890itKMRyB8G9jJk7H//vr5c/XVPOVSp5yir8EhQ5rpWTkZGZddpst2\n001hniEf+EDr527HDuHJSJJlZJi2W4yM8Li6hGcCeLzA6xVr226eArM+o9nNppF+9lgAMwDsopRK\nuyTzHj+3r4GIQEQYOpTwj38QbryR/r1uYGCg6yQ3ZdhrL90ROuAAYJNNsn/HKbtUJ15+ud0rE+IB\n++Uvt88SzN2T8d57wJVX6gw177+v0yWnEfL8XnNN9neh5VLGULelSOZaNMH1QJgOyoknto842uyw\nA/Dss83PHIwMILvTHKJ806a1fk4LsjbpO0ePrr88NuY6y3pGGC9aKG9zp4kWL79cr7/mGp5Gxmmn\nNdfdd59ehh4osNl/f7084IAwRu4ZZ7R+zuvJCGlkZElve82TMTAw8O8+ZPLFHSdNlFJqe6XUugVe\nX7M2N/KltZL7JaLBAFaDDtZ+pkiZiGgqgK8AuA3AzkqpjHwdHY+/PIDRAF5SSmUkFWxHKQWlFGbO\nVNBhIArz5+t1AwMDuecxyMuYMbphJ9Lv//Qn3bB+5jPtv+Uul/rQh/Ry1qzslHo+H2A//Snw4IOt\n67h6Mkz6S26B30WPHVouZefaN5hr0QTX2+t8csYZ+v7OIjmjNQe5VCdClO+II1o/J+V7ixZpuRJR\nuze1brp1ikIHfueJ+wot18sTk2Hqj5MnwybEM3iFFYC//735OW9MRsgJ77rJpXol8HtgYODf/crk\nizscYjJuh04Tu2PKd5MBjAJwb4YXIhUi+hmAY6AzRu3axUDodPydG8vb8h7bxtZ02x0WF5mkbLIe\nhGVn/A4plzIu29mz2ztMoQK/zSyyBq4TURUN/OZqZIR+eJnzbUu6TMfZNjJCjYJ26ogm73kunows\nOBhByfvFJJ6YMMH/KG1eIyPUQFC3GcmBViMjxD2cJ7uUaaM5eTJsQnvDgfzZpTjKpXrNkxEzHIyM\n6QDeBDCFiDY2K4loOIDToF0B59obENFIIlqbiBLTkAFEdD70vBo3ANhDKdUlzBQXAVgI4KjGxHxm\nPxMAfKNx/AzBSWdsI8PusLj2ZBi9eJKyRkbIUahll9XLWbPajYxQgd9JI65bHYbuAMRgZHSqm9Bu\n+DRPhnng2iNjoWNG0khem2JktJO8rpL3i2mrk4MLPugVTwbXwO/kupCejKznmFLh6s+uD47zZCTJ\n6ktJTAYfgtvxSql5RHQEgKsAzCCiywHMBrA7tITpKqXUVYnNNgNwB3Ssxb/nsCCib0FPrjcfwN8A\nfD1Fs/aIUuo66/jPEdEJAH4M4M9EdAWA96CDxVcEcLZS6oHkTvLgy5ORFeyddvMXSWEbohNlPBlX\nXw089VTrd6GCHpP1YGcAeewx4CMfaW2cQ49CZY0yJs99SCPjlUZU1ksv6bqbOLH5HRe5lD3R3d/+\nprOePWOJNo1u3zedHpwcUsRyl0sleeUV4LnnmlLNl17SS3Md+GTQIC17zLqHzTUZaiDItDF//Svw\nwgu63pZfHljLEhtzk0u99lr7b0y8VUgjY9Gi9OMr1Zyl3HcbaJ+vxYt1ezdhgn4pBTz0ELDccsDK\nK4f3OAPtfamZM/WzRDwZfODgyUCj0z8ZwJ0APgPgKOiO/nEA9svarPGy+VBj3UgAXwPwzZTXHinH\n/ym0UfMYgAMBHAHgVQAHK6VOLPu/7IBC25Ph2sj46EfT16c18N2kPoC/URRbemJYbjm9vPrq5mzL\nhmTQY+jsUhdeCKy/PrBf4goNbWSkjTIqxSt96Ekn6eDWlVfWnRSb0A+v0aPbDfRZs/S1+c1vNteN\ncDZzTjFM7E0arr2kZeDuyUhy/vnAaqs1g9O/9S29zPIQ102njpEJWg8d93XddcCqqwKTJwNrr93a\nkQ9tZCTrb8qU9t+cfLJe2jEIvsnyBr39NrBNY4rgUOcXAF5/HVhjjaaxfccdOqnMhz+sy85BLmW3\ndzNnamN39dXFk8EJFkYGACil7lNK7aqUWkYpNVoptYFS6icqJbJFKXWnUmqwUmq7xPpDGus7vQ7N\nOP4NSqltlFLjlFJjlVKbK6UurfKfbE+GPTJVtSNw4onAHnsAZ50FHHoocPbZ+bfNY2T4yiH+kY/o\nxn7y5Oa6Qw7J/r2ptxAPsKssX5p5wJtMQ1df3fpbjkbGe++1r/dZf7/8ZXuKxFesHHP2XR7aDU+k\nH1Td+NrXuv+mDvIEzef5bV3EZmQYjOfU3LdbbRWmHJ06RibblZlHo1vKctckJayGV19tvg9tZHQy\nwpN0SqVdByarFZBP1uq7/pZbDjj22PTvnn9eLxcu1J6gUO30JdYECPbA2b336qU9f1AyM6TgHzZG\nRi9iGxl2576qJ2OnnYDf/hb4yleACy7IvpHSGqg8xzaNX9YDxSXf+Q5wvDV7yZpraiMqjZBSkH32\nAbbbrrUcWcfnaGQsWBDWyDj4YODMM1vX2Ya3/T60XAoAjjuu8/fHHKO9WNwQuVR5zOinWe6wQ5hy\ndDIyzECLkd/6jhvJeibY113owO+0MuYZXPPBxz/e9NbnMTJC1J/x5CWxB0dtT4bvMh54ILDRRvp9\nVn/G3B8hJI9CK2Jk1IhtZNg3aFUjI+9IYdoDPI8XxaeRAbQ+KAcNys5Nn/Rk+MY0pnmNDN8dqE4p\nbOfPD5edK+t4ZkIsIN0gD2lkdHs4FRkt9Yl4MspjrsfQ6S87Hdc8O8wIvO9OVNYzwb7uFi8OG/id\nVsZ3cyegr58iCTpCtIFZngm73xJaLtVpvhYgbPIGoRUxMmrEjsmwb4aqcqkqMqYingxfHamkkZH1\nYAoplwKajWm3B2hoT0baAzXNk+G7fMkOsD3ZnW1khJZLAd0fTr4M8KqIkZEf0zEJHTTa6bhLlujO\nnUmzy8XIyPJkcJFL2e1LaIoYGRyyrxmSnoyQ7bRpY+z+jF1X4sngQ/DsUr1MXXKpvHKEtE5kp2PP\nmKFHu03j5yvzht0QdGpU//Y34Ne/Dm9kpHkyLrgA2Hpr3cGfOVOv4zQb729+0x5n4Lv+kkbGj37U\nfP/1r+vOwcc+BlxxhV7H2ZMhRkY2scqlDjpI687NjOmhPBndjJsTTtD30lJL+Z/nIeu6v/nm5vtH\nHwVuuUW/5+LJ4GhkPPecDqzuRAyejBBlNNf91Kl6stzDD2/9/p139P/wHbMktCNGRo1kyaWqejLy\ndnDSGvhOoycmowWgb2JfjYcdU0IEbLBB+u/uvVe/zFwavh9gneRShx+usw3ZXgTfje+gQdrQSDMk\n7axIBt/1l5S52R0TO5jPENLIWGGFzt+HNjLWXTdfZpwQD9kidcPJyACAU05pvg/lyTBBq1lMnaqX\nIaQgWd7tb3+7+d5O6GB7832Rdv1llcOkLfaJ6RdcfHEzzi+LGIyMkJ6Ma67Rr2HD2rPBjR/Pr33p\nR0QuVSN2h75ITMbqqwNnnAGcey5w++3AH/8IfPrTze+rZEzIyr+eNHx8dqKGDAH+8Afgtsa86rvu\n2jl95Jtv6mVouVSSpEwpxAOiyLUR2pPRjZByqeWX11nDvvCF9O9DGxnTp7cOCqTxm9+E6UQVGV0v\nek244uGHO2eyA8IZGXnnXwkhBSnq3eZiZGR5Mu65p96ypPG5z+llnmufi5GhVGsfYeHCsAk6ktfh\nW2+1D2JJ+loeiJFRI0WMjE02ab5fay2dYenII3VHYostgH33bX6f9+FSxJORbIR9d6K22w7YtjGt\nIpH+74aTTvJbliyScqlujWuIxrdIx4O7kRHSkwEAn/kMcOqp6d+FDvxeZ53WUfckq67aPn9LSPbe\nO319t1H7uthwQ+Dzn8/+ftCgcIZkmqf7sMPa7+0QnoyiSTfyxB24Jq+RMW4csOKK9ZcnyWab6WUe\nAyzESDxR+gS0nDwZyYGMRYvar83QA0GCRoyMGrEbWDsmI+0hYt8QaZN82dvnnQQsJiMjiT0KkfV/\nOcmlOv3eJ0U6Hr7rr2gaydBGBpA9Kh/6/gA6P9xDZWDLIm3iTaAZwByCTudw5MhwUou0DvHgwe2j\ntyE8GUXv4SzPeZ3kNTJCB/bniRMJ1QYm25b33+dlZCTvhWT5gPADQYKGwWO8dyniybAbxrSbw5Xb\nOa+REfoGtR8AWWXhJpdKEqKTwtnIiM2TAWTLQzgYGZ3qJ5QMKYusDp3vydBsuhkZoUhr6wcPbjd4\nQ3gyisYThgi4zvv8FCMjm6ThsGhRdnYpDnKpRYva+zYc2mhBjIxaeOQRrfd94YXmupNP1jflL3/Z\nGiRnsG8aVyn40jqRL7/cnKkaAC68UMuRko1w6BuUs5EhcqlyxBST0a0Moe8PoHPsAzdPRlZZuXoy\nQmWWAnh7MorewyHmp0g7r7vt1r4ulJFhrq0339QJQ+64I/u3obxpyfO2cCFw1lnp34fMLmU46yzg\n6KNb13FoowUxMmrhYx/TxsRNN7Wuv+WW7GDDbkaGCfz+j//IX46sBuqII5rvDzsMOP104K9/bf1N\n6BvUlleIXCo/RUY3feuRTcxNXjh4MrKuvRAB1UnWWqv186abNt+HNjKS8SArr5z+u332qb8sWXTq\nZK62mr9yJEmLQRs8uL2D/5GP+CmPjR07mAcuRkYaWZO+1o257v7f/9Opzzu1ixzaQAC46qrWz++8\no5e+Uygb8kjGQ/dhBA2TS7i3yHrAz5mTvU23mIyNNwYefxx44IFqZbOxH1r//Gd2eUJgj9KFlm4Z\nisqluHgyPvGJ1s8bbAD84x/+O1Lrr59u2GRdaxwesEOGANOmta57+GHg4x8PUx6bpZfWczo88wzw\n9NOtI6Kh5VIXX6znS3jjDT33xPjxrd/vvDPw2GM6uD4UWZ6AW24BrrvOb1lsBgZ07n97kGrw4Fa5\nyn33AVOmeC8aVl212DOIs5ERajboIh4UDm0gALz2Wutn05cJ5Q3K48UL3YcRNDJPhkc6NRjdYjIA\nYO21ix2v00i7Uq1Gz6uvZpcnBHmMDN96306T8aXBxZOx2WZ6fhHDRz8aZhQUANZcU0v2bLbZpjl5\nlw0HuRQATJzY+nnDDcOUI40sj0poT8bQofo6MyTP5bhxxbyydZDVUbHThYdg0CDtlXrjjea6wYNb\nY/lCGrn2ee2GGBntFJHicZnnIRlHarLCcTYyuAxO9jtM7OT+oFOD0U0u5fp4CxcCs2Y1P7/0Uuv3\noY0M+wGQ5Rr1refuJpdKaqa5eDKSHbyQ5zZthD0rYw2XUbyiGXU4ENqTkSQpq+CQw94uE5drzca+\nT5NGRkjyZjcEeBsZIWJagDg9GcmAfzNAGSp2KStbnU3oPoygYXIJ9we+jYxOLFjQamQkR5dDjwLY\nD4Csh5rvzDRJuVTyfCbrLMQDIk1nnDQyQp7btBH2rIw1XB6wYmRUJ2lkhAysToPLiLGNfZ8m5VIh\nKXJfhjAy8rZvnORS3AdasowMDoMFWYiRwQMml3B/8NnPZn/XLSajDJ0aqPXXb5VIJTNchL5BbSMj\nqyy+PRmms37ccbpTcuutrd8nZ+oN0TlN6ywlO3ghz22akZE1QstFLhWjkRFaLpWEoyfDhqORYQ88\ncfJkFIGzJyOUkZH2fM8KoOZyXU6d2vrZZKgMZQTlqZfQfRhBI0ZGYJZbTjc6X/0qcMwxwNixwMEH\nu9l3pxvxpZeASy/N/j5U5g3DiBFad7zZZtmjnkcd5bdMkybph32eCabWXx+YMKH+MiWZPLl9HSdP\nRvJhBWTPAcNl5HaPPZrv08rPEW6ejOQ1+N//HaYcSb7+dd3Z/NOfdJzI8ceHLlETu84WLgwzsV0W\neQP2L7mk3nKkkTehxVJL1VuOLIjyDyS6GnCsi+efD3Pc/fbTsXIrrZT9GzEyeCBGRkAWLwZef12P\ngK+xBvCjH+nR+eWW83N8Wy6V5Ljj/JShE3/8o86ikjYh2lZb+c+OtP/+wNy5+lx14+GHw4zyjBoF\nbL1167pkBy/kKPKmm2rjwR6VnTcv/bedsrH5ZNy45mRUxx4bujT54OzJePDBzp0Dn5x+um6DN9xQ\nt4dnnx26ROnY98LYseHKYZg+XQ8ObLFF+vcTJ+rn2/bb+y0XoNvAZLIGQGc8+9KXWn8XCu7elryE\nmGwR0EqHl18Gzjmndb2d0ESMDB5IdqmAmE6o3Ql0KRHp5lI0GSLS4CBnINKvNFdyqAZk1Kh8Xp6Q\nWtqkUcbJyADayzN3bvrvOl2fvuEi3coLZyMjVG79LMy55XyO7Xsh1Ai8DZFuR7LaOaKwbWCaATF2\nbOu1F7Id7BUjI6SUNG2CSjtVdui4UkEjnowepoqRwSkwM+3hn+bd8AWnukkjWV/Jz9zKn2VkhJwN\nOnY4y6U4d+a5YnsyOBgZ3Qgdx5TWwRw5svXai8GTESoDViwk+wH2vSGeDB6IkdHHdNJThh7ttkkb\n+QxpZHCqmzSS9cXNk2Ew5coKDuXkyYgNzp4MLhlzYoKbJ8OQdZ3FYGSEbAe5Z8CKheSzTowMfkhz\n38N082R06ohw6YgC/IwMbh24JN08GVzO7Zpr6mWWxpxLOWOEmydDjIxq2BLNVVcNV468hE7akNbB\n5GRk9IpcKjSdPBkh+whCE2nue5gq6e84dfDS5BUhdd1Z2ZC40M3I4DLC89vfAtttB/z+963rhw4F\nPvUpnQhBKAc3I0PkUuW4+259j5x7LnDTTfzui6wBl9AdvLQ2bvz4+IyMXXettxxZnHeezqr3m98A\n663XXL/CCq2/u/tuv+VKkuwH2ANW48b5LYuQjhgZNcMtyPFzn2v9vMEG6WnyOBkZ3DwZaRk1ttoK\nOOAA/2VJo5uRweWa/MhHgD/8Adh889b1226rDQ/f2cN6CW7eNvFklGPLLfU9suaawI476vvigx8M\nXaomWddZ6LivNDnS0kvzjsk47LDWSXH/8IdwXqsjjtCDQPvtB/ztb/o8KwU88UTzN8cdp6/PkHTy\nZEg8Cw+kua+ZkA/UNE9G0ngYMiTfBG4hicHIWLCAj1a6m5HBfSSZe/ligJsnQ4yM/iL0IFWakcHd\nkzFsWOt6Ts9gg11nHDziyX6A7b0QqRkPpLmvGe5GxtCh/DokSbjJpWI3Mrh38sTIqA5nT4ac394n\n9CRyaR3gIUP4GBlpg2TDhrUaRxzvE7tMHNqYTnKpEJPhCu0w727EDzcjI+kiHjo0fCaQMoT0ZKTF\nZMyfH4+RwfHhZcPdCIoBbgMH9jUn57d3iEkuBbRehyENobSMeklPBvd2mkO/IdkPsOtvzBi/ZRHS\nkea+ZkwGnSQbbhjm2P/xH62fBw/m1yFJkjYqFTJ7STKGAAA23hhYZx3/ZUlj3XVbPydHe5Zf3l9Z\n8rLBBs330gktz6RJepk1E3MoRC7VX2y2Wdjj28HKNrZhEfI6nDevfd3w4a2dZjEyupOcGNeuvw98\nwG9ZhHQYqv7i58QTgTPP1O8POgj4ylfaf/N//1d/Ob7wBeDNN4GddwZuY7xXGAAAFdtJREFUvx3Y\ndFMdoHzooa2/425kjB8PnH++dn/us49elzWBmw8uugj46U91Z/6ee3SA2bHHag3o977X7OiF4thj\ngZkzgQsu0J/Hjwceegj48peBvfYCPvaxsOVL43e/A1ZeWb+XTmh5pk8HfvYzfa45IXKp3iTNk3Hy\nyfoZGJKvfAV4/33d3t1wA7D11nr9AQcAL7wQ3gj/17/a1w0b1tr2cX8uh05TDOgBs6lTdRA6oNuZ\na68F/vlPYMUVw5ZN0JDiIKzrEYhIAcCLL6p/d5imTdPZGc4/v/m7lVfWDV0o9t8fuOwy/X6rrYC7\n7mr/DdfLwkjAdtlFd0yFdF59tZlu8K67gE9+Mmx58mDO7b77AldcEbYsglueegr48If1+9deAyZO\nDFsewQ2bbw48+GDrOq7PDk5MmNA+2egZZ2jjzLSDd98dPntTGqZ8X/wicM45YcsC6OvNGGfTpwN7\n7x22PL6hxglRSlWYtKA+ZMywBmw96pAh7aPu77/vtzxJ7FGUWB8IIT0ZMWCPFseWZUM8Gb2HyKUE\noUmWJ8OGgxypE1w8LXbsqbQt/JBTUgN21oqhQ3kbGVwaiqKIkdGZmI0MkdP0HmJk9CaxDlKFJk1q\nFJuRwb18Ag+kua8BO7hs8OD2DnFoLaMYGb2P3ZGLLZWfGBm9h8z4LQidSWbE4t6JD92PSSMto6YQ\nFjEyasC+0BctAnbfvfX77bbzW54ke+7ZfB/bSNRee+nlvvuGLQd3bEOXw6RJefjQh/Ry++2DFkOo\nAfFk9AcmFlHojElg8t//3VyXjFNaYw1/5SnCaqvp5ac+FbYcaYiRwQ/JLlUzixbpzAcf/agOfLz/\nfmCnncKW6YQTgJNO0u9jMzJ+9StgxgyeDRwnRo4E/vzn8Pnqi/CnP+kyf/rToUsiuEaMjN7Efn7c\nfLNO5S1056KLgIMP1m3dQQfpRDA776y/e+YZ4JVXgNVXD1vGLB58kG87LUYGPyS7lENMdiml1L8v\n9p/8BDj66JClSseUb9NNdecuiVwWgiC4Yt685mSV8+eHnW1ZcMcmmwB/+Yt+L88MIRSmP3P99cBu\nu4Uti28ku1Sfw1G3aCMPBkEQ6kZm/BYEoW7Ek8EPae5rhruRkRb4LTeqIAguEblUbyKDVAInpO/C\nD2nuayZ0utpupBkZkv1FEASXyIzfgiDUjRgZ/BAjoyY23VQvd9klbDm6kTYSNUTSAQiC4BDbeyEd\ngd7h2GP18tBDw5ZD6G+WXVYvP/axsOUQ2pHAb4fYgd/vvQf885/ASiuFLlU65kG/3nrAo4+2fjdm\njA7UFARBcMVbb+llbPO2CJ157jlglVVEBieEY/58PXfW8suHLol/uAd+y5h1TQwbxtfAsEmzMUXO\nIAiCa8S46E3M/DaCEIpRo+JK195PyNhDnyNyKUEQBEEQBME1YmT0ORL4LQiCIAiCILhGjIw+R+RS\ngiAIgiAIgmvEyOhTrrwSmDgRuOQS4PzzdcDUGWcAyy0HTJ8eunSCIAiCIAhCzEh2KYfY2aViQKlm\nlinz3l4nCIIgCIIg8IR7dinxZPQxtjFh3ouBIQiCIAiCIFRFjAxBEARBEARBEJzCxsggoklEdCMR\nzSKi+UT0VyI6hohyl5GIViCioxv7eZaI3iWiN4noViLaK2ObyUS0pMPrdHf/UijKwMBA6CL0DFKX\nbpH6dIvUpzukLt0i9ekWqc/+gUVMBhHtAWA6gAUArgAwG8BuAD4C4Cql1Ody7ud7AE4E8AyAOwG8\nBmBVAJ8BMALAD5VSX0lsMxnAHQBmNF5J7lFK3Z7z+FHFZMQAEUl9OkLq0i1Sn26R+nSH1KVbpD7d\nIvXpDu4xGcGNDCIaC+BpAGMBTFJKPdxYPwy68/9xAPsppa7Msa89AcxSSt2dWL82gAcax9jEHKPx\nnTEyBpRS3674X8TIcIw0Ru6QunSL1KdbpD7dIXXpFqlPt0h9uoO7kcFBLvVZAMsCuMzu/Cul3gNw\nMgAC8MU8O1JK/TZpYDTWPwHtIQGArasWWBAEQRAEQRCEbIaELgCAbQAoALekfHcXgPkAJhHRUKXU\n+xWOY7ZdlPH9mkT0ZQBLQcus7lZKPVXheIIgCIIgCILQl3AwMtZuLJ9MfqGUWkxEzwJYF8DqAJ4o\nc4CGJGtvaGPm1oyffb7xsjajqwEcoZR6u8xxBUEQBEEQBKEf4SCXGtdYzsn43qwfX+EYFwBYDsA5\nDemUzRvQweLrQcdsfADATgAegjZMrq9wXEEQBEEQBEHoO5x4MojoOQCrFNjkUqXUQS6O3Q0i+iGA\nfaCzTR2f/F4p9XcAf7dWzQdwKxHdB+ARAJ8got2UUv9X4JjVCi20IPXpDqlLt0h9ukXq0x1Sl26R\n+nSL1Gd/4EouNRO6c56XV6z3xlMxLu2H1vrCkiUi+j6AY6FT0+5aJKZDKTWPiH4D4CQAWwHIbWQI\ngiAIgiAIQj/jxMhQSm1fYfMnAGwMYC0AD9tfENFgAKtBB2s/U2SnRDQVwDEAbgOwm1Lq3RJle6Ox\nHJ3nx1xTiAmCIAiCIAiCTzjEZNwOnaZ2x5TvJgMYBeDeIl4IIvoZtIFxC7QHo4yBAQBbQAeLFzJw\nBEEQBEEQBKGf4WBkTAfwJoApRLSxWUlEwwGcBt3JP9fegIhGEtHaRLRycmdEdD70vBo3ANhDKbWw\n08HtYybWHwBgXwDvAeg6EaAgCIIgCIIgCJrgM34DABHtAeAqAAsBXA5gNoDdoSVUVymlpiR+b2bp\nnqGU2tZa/y0A34KOD/kxtIGQ5BGl1HXWNs9Cy7H+DOAlACMAbApgM+i5NQ5XSv3KzT8VBEEQBEEQ\nhN6HwzwZUEpd1zAcTgLwGeiO/lMAjgMwLWuzxsvmQ411IwF8LWO7iwFcZ30+B8CnAEyCnnmcALwM\n4EIAP1ZKPVrw7wiCIAiCIAhCX8PCkyEIgiAIgiAIQu/AISZDEARBEARBEIQeQowMQRAEQRAEQRCc\nIkaGA4hoRSK6kIheJqJ3iehZIppKRONDly0URLQ0ER1ORNcQ0Uwimk9EbxPR3UR0KGVM90lEk4jo\nRiKa1djmr0R0DBFlXqtEtCsRzWjsfx4R3U9EXmaUDwkRHUBESxqvQzN+U7huiOhgInqg8fu3iegO\nItqlnn8RFiLajoiuJaJXG/fuy0R0MxG1pdSWa7MzRLQLEd1KRC826udpIrqSiD6e8fu+rk8i2puI\nfkJEdxHRnMZ9fEmXbbzUWYxtQJH6JKI1iehEIrqNiF4gooVE9BoR/ZaItu5ynEJ1Q0SDiOi4xrma\n3zh3NxDRFhX/cm2UuTYT2//CejatnvGbwvVCRCOI6FQiepyIFhDR60R0BRF9pMz/9EXJe30Q6T7U\nnUQ022pTLyeiNTO24XdtKqXkVeEFYHUArwNYDOBqAKcD+AOAJQD+DmBC6DIGqpf/atTBSwB+BeC7\nAH4BnTlsCYArU7bZAzqj11wA5wM4s1GHSwBckXGcoxrf/xM6ScAPADzfWPf90PVQY/2uDOAtAHMa\n196hLuoGwNmN759v/H4a9KSUSwB8KfT/dlyH37f+6/9Cp8z+OXSmuTPk2ixUl2da//W8Rjt4JYB3\nG9fn/lKfbf/n4UbdzAHw/xrvL+nwey91FmsbUKQ+AVzW+P5R6BT534VOp/9e438e5apuoDNnmv7A\nmY1zN7dxLncLXW8urs3Etrs1/q95Nq3uol4ADANwT2ObBwB8D8CljXP2LwCbhq43V/UJPQH0bY3/\n+hcAP4RuUy+Gnrdt51iuzeCVH/sLesK/xcmT2DjJSwCcE7qMgeplawC7pKxfrnETLAawl7V+LPSD\ncAGAjaz1wwDc2/j9vol9rdr4/RsAVrbWjwMws7HN5qHroqb6/UPjP56JFCOjTN1ATz65BMATAJay\n1q8CPZfNfACrhP7vjurviMZ/vQDAkJTvB8u1mbsuJ0KnAX8FwDKJ7yY36vkpqc+2epsMYI1EPWV1\nir3UWcxtQMH6PAjABinrPwmdSn8BgIlV6wbAfo1t7gIwzFq/MbQB/hqA0aHrrkpdJrZbFsCrAH4D\nPc1AqpFRpl4AfL2xzeWJ9caoeTR0vbmqTwC/btTd4RnfD058ZnttBq/8mF/QXowlAJ5O+W4MgHmN\n18jQZeX0shqLH1vrDm2suzDl99s0vrsjsf7bjRvxmynbHNLY5qLQ/7eG+jsGulO3JfS8MGlGRuG6\nAXBJY5uDUrY5tfHdt0L/fwf1Nwza+/gsUgyMlN/Ltdm5fjZr/J9rM76fA2CO1GfHOuzWKfZSZ73S\nBnSrzy7bmoHDvRLrC9cNdAduMYCtUra5uPHdwaHry1VdArgWerBhAjobGYXrBc3ByVVTtrmz8d3k\n0PVVtT4BbNT4/tcF9sn22pSYjGps01jemvxCKfUv6BGmUQBSNcl9zPuN5SJr3TbQc5zckvL7u6At\n8UlENDSxDTK2uamx3Dblu2ghonWg3cQ/Ukrd0+GnZeqm2zaUsk2MbA/gA9DyRtWIJfgqEf13RvyA\nXJudmQktWdiMiJaxvyCiraBH4X9vrZb6LI6vOuuXNqATac8noGDdENFw6BHm+dAyn67bxAwR/Sf0\nJMpfUEq91eF3heuFiNaAlgg/qZR6Ps82EfN56Hv9ciJainTs5deI6IhGPaTB9toUI6Maa0NfDE9m\nfD+zsVzLT3H4Q0SDARwMXW83W1+t3Vi21aVS6v+3d7cxdlR1HMe//0BarCUVFWxE06opAQSNEQRD\nsFEIBjGiQGkTFQzpC00IxZpS3wC+0WBSUUkN4QXUihqKxFQLiUbZgo1FiPLQ4ANI242PLVZDE1q2\nNe3fF/8z7mR25u6d2dn7sPf3SW4md2bO3bn/PXdmzpnzcIxU60w8PeomzT7gEPA2Mztp5kfefyl2\n9wPjxMSVndSKjZktAE4HXnX3/SWfN5fy8vlE/jtKtJXdRhTcvgnsTJ1k35zbX3mzg3RDcQvRbOoP\nZnaPmX3NzB4kLno/Bz6fS6J41jfrMRuxc0ApM1sCXELcfP0qt75JbN4FnADscffjXaYZSilu3wLu\nd/eHp9m9SVwq83KHNMPqvLRcCuwmnip8leg3+KKZbTSbHDhn0POmChkzsygtD1Zsz9aP7ChTJb4O\nvBt4xN3ztZtNYtltmkUV24fN7cB7gc+5+5Fp9q0bm1HKy6cRtTTriMfSFxG17e8hbog/RHRazihv\nTsPd7wKuJm50VwPr0/u/AJvd/UBud8Wzvl7EbJTOAVOY2TyiLfw8omlJPg6zGf+hjme64d1MNA1f\n00USxbKz7Pp0JzAGnElcny4FXgK+ANya23+g46lChvSMmd0ErCVGMhjKYSf7xcwuIPqybHD3p/p9\nPEMuO+9lI2g84e6H3f33wFXEiGjLU8ylC2Z2CzE6z31ELdnriQ6Ee4EfmtkdfTw8kY4shgD+PtGE\n5AF3v7PPhzRM1hId5lcXCmbSTHZ9+iOwyt3/nK5P24EVxFP4tWZ2Yt+OsAYVMmZmutqzbP0rPTiW\ngWZmNxKPU58HPuLuxZg0iWW3aYb6xJeaSX2PGDnituLmimR1YzNKeTn7Ds+4+1/zG9z9NSbbtX4g\nLZU3OzCz5cAdwFZ3X+fu4+4+4e7PAp8C/g58ycyWpiSKZ329iNkonQP+LxUwfgBcA2wBPluy22zG\nf2jjaWbLiKG/N7l7WX+AMoplZ68QBYltnnphZ9x9F1FxczJwVlo90PFUIWNmXiBu8qrarS1Ly6p2\nhCPBzG4G7gJ2EQWMl0t2eyEtp8Qy3WS/g+iIt6fLNIuJ2tS/uftE86MfCAuJvHQWcCQ3ydFxJgsd\n2eRHWQ1crdi4+2HiZnChmb2l5BjmUl7OYlN1As06Lb6usL/yZrmPExfFx4obUqHtKeJa8760WvGs\nb9ZjNmLnAABSbfADwEriScany9qoN4zNbtLoSlY+WeJciOfZwHzghvx1KV2blqd9XkrrPpHeN4lL\nZV7ukGZY1bo+DXreVCFjZran5WXFDWa2kGjrfRj4TS8PapCY2XqibeHTwIcLbbPzxogC25SZlomT\n1QLg1+7+3y7TfCwtH21y3APmCDGR4b1pmX89nfbZkd4/kd43ic1YWs71eD5K3BSfXbH9nLTcm5bK\nm53NT8tTK7Zn64+mpeJZX69iNirnANJIXA8RfYe+6+7XFWuOC2rFJvWb20n8by6uSOMMdzzHmXpN\nyl770j4Ppvfj0Cwu7r6b6N91RupkXpVmrGTbsPkl8bs9p7gh9RvKCgDjuU2DmzdnOgbuqL+IEZKO\nUZghlLixPg58p9/H2MfY3Mrk7JxvmGbf/GRT78+tn59+DMeAFYU0S5mcbGpJbv0pRAepoZugq0GM\nq+bJqB0bJif0eTH//0qf9W8GeCKuBnHbmmJwc2H9ZWn9AeBk5c2uYrki5Zt/AG8tbLs8fddDwCmK\nZ2UMpxs7vycxmyvngC7iOQ94JMXgni4/s3ZsgFUpzQ5gfm79+cSEZ/8EFvY7XjOJZYd0nebJqB0X\n4MspzRbAcuuvTOt39TtWLeXNBUS/wAkKs5gTTdOOA78Ylrxp6UOlITN7JzEfxmnAT4nOOhcSM17/\nCbjIO4wZPVeZ2fXAJuIR/kbK20qPu/vmXJoriWnujxCPsP9DjLt9BvAjd19V8nduBL6d9t1C1JZe\nQwzptsHd17f4tQaOmd1OFDRWu/t9hW21Y2NmG4AvEo9fHyIuxiuBNxIF6btn79v0jpmdTvxu307U\nAj1DDP+ZXbBWuvvW3P7KmxXS6DI/I0Y/eZWYkGsf8aToirTbGnffmEsz8vFMMfhkersY+CjR3GlH\nWnfA3dcV9p/1mA3rOaBOPM1sEzGU+r+Au4la26LH3P3xwt+oHZs0lPPVRDOYbcSs2NcSBcSrfPoh\nX3uubt6s+IztxEh9y9x9T8n2WnFJtfhjxA3174ha9iVEfp4gmmL/tvaX7YEGv/VLiZgY8GMiv11A\nTMC7D7jY4+lO/m8MZt7sd6luLryIE/a96Z87QTSz+AawqN/H1seYZDXsnV5jJek+CDxMlL4PAc8B\nN5GruShJcwVRa3KQGEbvSeAz/Y5Bj+N8Q8X22rEhRv56Mu1/kDixX97v7zoLsXsTcQO2N/1uXyZO\nzudV7K+8Wf09T0ix2Em0JT5KXAx/AlyieJZ+l+nOkbv7FbNhPAfUiSeTteydXlNmSm8SG6JZ+pr0\nvzqU/nfbGOAnb03yZslnbCcqGac8yWgaF+Ak4CvETfFrwH6iwH1mv2PWdjyBc4mmZvuJ69M4UWG7\nuMPfGbi8qScZIiIiIiLSKnX8FhERERGRVqmQISIiIiIirVIhQ0REREREWqVChoiIiIiItEqFDBER\nERERaZUKGSIiIiIi0ioVMkREREREpFUqZIiIiIiISKtUyBARERERkVapkCEiIiIiIq1SIUNERERE\nRFqlQoaIiIiIiLRKhQwREREREWmVChkiIiIiItIqFTJERERERKRVKmSIiIiIiEirVMgQEREREZFW\n/Q/hhc0bsvCAGgAAAABJRU5ErkJggg==\n",
+ "text/plain": [
+ ""
+ ]
+ },
+ "metadata": {
+ "image/png": {
+ "height": 255,
+ "width": 396
+ }
+ },
+ "output_type": "display_data"
+ }
+ ],
+ "source": [
+ "print df.describe()\n",
+ "array=(df.values- 147.0) /339.0\n",
+ "plt.subplot()\n",
+ "plot_test, = plt.plot(array[:1500], label='Normalized Load')\n",
+ "plt.legend(handles=[plot_test])"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 4,
+ "metadata": {
+ "collapsed": false
+ },
+ "outputs": [],
+ "source": [
+ "listX = []\n",
+ "listy = []\n",
+ "X={}\n",
+ "y={}\n",
+ "\n",
+ "for i in range(0,len(array)-6):\n",
+ " listX.append(array[i:i+5].reshape([5,1]))\n",
+ " listy.append(array[i+6])\n",
+ "\n",
+ "arrayX=np.array(listX)\n",
+ "arrayy=np.array(listy)\n",
+ "\n",
+ "\n",
+ "X['train']=arrayX[0:13000]\n",
+ "X['test']=arrayX[13000:14000]\n",
+ "\n",
+ "y['train']=arrayy[0:13000]\n",
+ "y['test']=arrayy[13000:14000]\n"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 5,
+ "metadata": {
+ "collapsed": false
+ },
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "Train on 11960 samples, validate on 1040 samples\n",
+ "Epoch 1/10\n",
+ "11960/11960 [==============================] - 31s - loss: 0.0026 - val_loss: 0.0020\n",
+ "Epoch 2/10\n",
+ "11960/11960 [==============================] - 24s - loss: 0.0019 - val_loss: 0.0018\n",
+ "Epoch 3/10\n",
+ "11960/11960 [==============================] - 24s - loss: 0.0017 - val_loss: 0.0014\n",
+ "Epoch 4/10\n",
+ "11960/11960 [==============================] - 23s - loss: 0.0016 - val_loss: 0.0013\n",
+ "Epoch 5/10\n",
+ "11960/11960 [==============================] - 31s - loss: 0.0015 - val_loss: 0.0013\n",
+ "Epoch 6/10\n",
+ "11960/11960 [==============================] - 25s - loss: 0.0014 - val_loss: 0.0012\n",
+ "Epoch 7/10\n",
+ "11960/11960 [==============================] - 28s - loss: 0.0014 - val_loss: 0.0011\n",
+ "Epoch 8/10\n",
+ "11960/11960 [==============================] - 35s - loss: 0.0013 - val_loss: 0.0012\n",
+ "Epoch 9/10\n",
+ "11960/11960 [==============================] - 24s - loss: 0.0013 - val_loss: 0.0012\n",
+ "Epoch 10/10\n",
+ "11960/11960 [==============================] - 23s - loss: 0.0013 - val_loss: 0.0011\n"
+ ]
+ }
+ ],
+ "source": [
+ "#Build the model\n",
+ "\n",
+ "model = Sequential()\n",
+ "\n",
+ "model.add(LSTM( input_dim=1, output_dim=50, return_sequences=True))\n",
+ "\n",
+ "model.add(Dropout(0.2))\n",
+ "\n",
+ "model.add(LSTM( input_dim=100, output_dim=200, return_sequences=False))\n",
+ "model.add(Dropout(0.2))\n",
+ "\n",
+ "model.add(Dense(output_dim=1))\n",
+ "model.add(Activation(\"linear\"))\n",
+ "\n",
+ "\n",
+ "model.compile(loss=\"mse\", optimizer=\"rmsprop\")\n",
+ " \n",
+ "\n",
+ "#Fit the model to the data\n",
+ "\n",
+ "model.fit(X['train'], y['train'], batch_size=512, nb_epoch=10, validation_split=0.08)\n",
+ "test_results = model.predict( X['test'])\n",
+ "\n",
+ "# Rescale the test dataset and predicted data\n",
+ "\n",
+ "test_results = test_results* 339 + 147\n",
+ "y['test'] = y['test'] * 339 + 147"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 6,
+ "metadata": {
+ "collapsed": false
+ },
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ ""
+ ]
+ },
+ "execution_count": 6,
+ "metadata": {},
+ "output_type": "execute_result"
+ },
+ {
+ "data": {
+ "image/png": "iVBORw0KGgoAAAANSUhEUgAAAwIAAAH/CAYAAADkL2pWAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAAWJQAAFiUBSVIk8AAAIABJREFUeJzsnXmYFNXZ9u9Tvcy+sS+yagQ1atxQXBBxQ01MTDTRqJEX\nEFyiSdTEvMYo4Ke+0bhhEhc0bBIWDYpGEZfAiCiyCIIKiugwwMAAszLTe9X5/qila+3pnumZrp5+\nftc110xXV9Wc6q7l3Od+nucwzjkIgiAIgiAIgsgthEw3gCAIgiAIgiCIroeEAEEQBEEQBEHkICQE\nCIIgCIIgCCIHISFAEARBEARBEDkICQGCIAiCIAiCyEFICBAEQRAEQRBEDkJCgCAIgiAIgiByEBIC\nBEEQBEEQBJGDkBAgCIIgCIIgiByEhABBEARBEARB5CAkBAiCIAiCIAgiByEhQBAEQRAEQRA5CAkB\ngiAIgiAIgshBSAgQBEEQBEEQRA7SYSHAGOvBGJvMGFvKGNvBGAswxhoZY6sZYxMZY8xhuzMZY28x\nxuqUbT5jjP2GMebYJsbYDxljq5T9H2aMrWWM/aqjx0AQBEEQBEEQuQbjnHdsB4xNBfAMgBoAKwFU\nA+gL4KcAygG8wjn/uWmbHwN4BUAQwGIA9QB+BGAkgJc557+w+T+/BjATwCFlmwiAKwEMAvBXzvkf\nOnQgBEEQBEEQBJFDpEMIjAVQxDl/07S8D4D1AI4AcCXn/FVleQmAnQBKAJzJOd+kLPdDFhJnALiG\nc75Et68hALYDaAFwMud8t7K8DMAGAMOVfX3SoYMhCIIgCIIgiByhw6FBnPNVZhGgLD8A4FkADMBY\n3VtXAegFYKEqApT1IwDuVda/2bS7SQD8AJ5WRYCyTROAh5RtburosRAEQRAEQRBErtDZycJR5XdM\nt+w8ABzACpv1PwAQAHAmY8xn2gYO2yxXfo/rQDsJgiAIgiAIIqfoNCHAGPMAuAFyp/9t3VsjlN9f\nm7fhnIsAvgPghRzuk8w2+wG0AjiCMZbf8ZYTBEEQBEEQRPenMx2BvwA4DsCbnPN3dcvLlN9NDtup\ny8vbsU2Zw/sEQRAEQRAEQejwdsZOGWO3A7gDwJcAXFXekzHWsexogiAIgiAIgkgBzrltOf1Mk3ZH\nQCnz+SSAzwGM45w3mlZpa/ReXa7fLtltnBwDgiAIgiAIgiB0pNURYIz9FsDjALYAuIBzfshmta8A\nnALgaACb9G8oeQXDICcXf2va5kxlm09M2/QDUARgN+c8lGxbO1o2leh+MMbovCAs0HlB2EHnBWEH\nnReEGYd5dV1D2hwBxtjdkEXApwDOcxABAPBfyOU+x9u8dy6AQgBrOOfRJLe5VPn9fnvaTRAEQRAE\nQRC5SIcnFAMAxtifAUyHPIHYxTbhQPp19ROKnc0536gsz4M8odjpAK7mnL+s22YogG2QJxQ7lXO+\nS1leofzPYUhyQjE1R4AUO2GGRnIIO+i8IOyg84Kwg84LwozqCLg1RyAdMwvfAGA25HCev8E+Tr+K\ncz5Xt82PAbwMIAxgEYB6AJdDDv15mXN+tc3/+TWAp5R1FwOIALgSwEAAf+Wc351ke0kIELbQDZyw\ng84Lwg46Lwg76LwgzOSCELgfwH1trFbJOTdM+MUYGw3gTwBGA8gH8A2AFyHPHmzbKMbYZQDuAnAy\n5LCmL5X1X0qhvSQECFvoBk7YQecFYQedF4QddF4QZrq9EMg2SAgQTtANnLCDzgvCDjovCDvovCDM\nuF0IdOaEYgRBEARBEARBuBQSAgShcP/992e6CYQLofOCsIPOC8IOOi+IbINCgwiCIAiCIAiiE6DQ\nIIIgCIIgCIIgXEdaZxYmCIIgCCJ7cfssqAThFrpLZAk5AgRBEARBEASRg5AjQBAEQRCEge4y2kkQ\n6aa7uWbkCBAEQRAEQRBEDkJCgCAIgiAIgiByEBICBEEQBEEQBJGDkBAgCIIgCIIgiByEhABBEARB\nEARB5CAkBAiCIAiCIAgiByEhQBAEQRAEQRA5CAkBgiAIgiAIgshBSAgQBEEQBEHkOLt27YIgCPB4\nPJb3JkyYAEEQMGPGjAy0rHMYO3YsBEHAvHnzMt2UjEIzCxMEQRAEQRCOMMbSOqPu3LlzUVVVhSuu\nuAInnHBC2vabCuk+pmyFhABBEARBEAThSP/+/TFixAj06tUrLfubM2cOPvjgAwwbNixjQoCQISFA\nEARBEARBOPLQQw/hoYceynQziE6AcgQIgiAIgiAIIgchIUAQBEEQBJEGhg4dCkEQ8MEHH2D37t2Y\nPHkyBg8ejIKCAgwfPhy///3v0dzcbNlOn4wbiUTw4IMP4sQTT0RpaSkEQbBss2vXLtx2220YOXIk\nioqKUFpailNPPRWPPPIIAoGAY/vC4TAeeOABHHPMMSgoKMCAAQNwzTXXYNu2bQmPK5lk4RUrVuDK\nK6/EoEGDkJ+fj/79+2P06NF48MEHsWfPHgByboAgCKisrATnXNuv+jN8+HDLfqPRKP72t79hzJgx\n6NmzJ/Lz8zF06FBMmjQJ27dvT9jut99+G+PGjUN5eTnKysowevRovPTSSwm3yTUoNIggCIIgCCIN\nqAmoO3bswJVXXom6ujoUFxdDEATs2rULjz32GJYtW4bVq1ejb9++lu2CwSDOOeccbNiwAT6fD4WF\nhRAE45jt0qVLcd111yEcDgMACgsLEYlEsGnTJnz66adYsGAB3nvvPfTu3duwXWtrK84//3ysW7cO\njDH4/X4Eg0EsWbIEb775Jp5//vk2j8uOaDSKiRMnYsGCBdo6ZWVlaG1txbp167Bu3TqIooj77rsP\nBQUF6NevH+rr6xGNRlFaWoqCggJtX3369DHse//+/Rg/fjy2bNkCxhgEQUBRURF2796N2bNnY+HC\nhViwYAGuuOIKS7seffRR3H333Vrby8rKsGHDBtxwww3YvHmz47HmHJzznPoBwOXDJgiCIAhCDz0j\nO8bQoUO5IAi8vLycH3300fyjjz7S3nv99dd5nz59uCAI/OKLLzZsN2HCBM4Y4yUlJbxHjx785Zdf\n5tFolHPOeXV1NY/FYpxzztetW8f9fj/3+/38vvvu4zU1NZxzziVJ4mvXruWjRo3ijDE+fvx4S9sm\nT57MGWO8qKiIz5s3T9vn1q1b+ahRo3h5eTlnjHFBECzbTpgwgQuCwKdPn25575ZbbuGMMe7z+fiM\nGTP4gQMHtPeqqqr4Y489xmfNmmXYZuzYsVwQBD537lzHzzIajfLTTjuNC4LAL7roIv7JJ59obd6/\nfz+/4447OGOMFxcX82+//daw7erVq7kgCFwQBH7DDTfw2tpazjnnTU1N/I9//CNnjPHy8vI222BH\nqteIbv2M94HtfjLegC4/YLrJEQRBEIQt9IzsGEOHDuWMMV5YWGjpnHLO+cqVK7XO9po1a7TlqhAQ\nBIG/9957jvs/66yzuCAIlo61SkNDAx8wYAAXBIFv3LhRW75r1y7u8Xi4IAh83rx5lu3q6+t5nz59\nUhYCX3zxhdbhfuGFFxzbbSYZITBr1izOGONjx47loijarnPTTTdxQRD4bbfdZlg+btw4LggCv+CC\nC2y3U0URCQFOOQIEQRAEQaQHxtz1k5nPgOHnP/85hg0bZnlv7NixOPPMMwEAr7zyiuX9E044Aeef\nf77tfr/99lt89NFHKC8vx8SJE23XKS8vxyWXXAIAePfdd7XlS5cuhSRJGDBgAK6//nrLdhUVFbj5\n5pvbPjgT8+fPB+ccI0eOxKRJk1LePhFz584FYwy33367JTxK5dprrwXn3HCsDQ0NWLVqFQDg7rvv\ntt3unnvuSWtbsxnKESAIgiAIgkgjY8eOdXzv3HPPxUcffYRPP/3UsJwxhtGjRztu99FHHwEAWlpa\nMHDgQMf1WlpawDnH7t27tWXq/zrnnHMStitV1q5dC8YYLr300pS3TYQoili/fj0AYMqUKbjlllsc\n1wNgONZNmzaBcw6Px4OzzjrLdrthw4Zh0KBBWhJzLkNCgCAIgiCItCBH4BKJOurqewcPHrS8Z07w\n1bNv3z4AQCwWw4EDBxL+f8aYoXrQwYMHwRjDgAED2tVmJ2prawEAgwcPTnnbRNTX1yMSiYAxhvr6\n+oTrMsYQCoW01+rnWlZWZkhENjNw4EASAiAhQBAEQRAE4Qo8Ho/je5IkAQBOPPFEi5vQ3VCPFQA2\nb96M448/PoOt6d5QjgBBEARBEEQaqampafO9RKP/dqjlRvVhMMnSu3dvcM6Tald72rRr166Ut01E\nz549NVGU6r7Vz7WpqcngFJhpz/F2R0gIEARBEARBpJHKysqE7zHGcPLJJ6e0TzV/oL6+XoufTxb1\nf3344YcJ25UqZ5xxBjjnWL58eUrbqcm/3CGWzOv14tRTTwWAlPd90kkngTEGSZIcj7eqqgrV1dUp\n7be7QkKAIAiCIAgiTXDOsXjxYlRVVVne++CDD7BmzRoAwFVXXZXSfkeMGKF1vP/whz9oibJ2hEIh\nRCIR7fVPf/pTCIKAvXv3YsGCBZb1Gxoa8Oyzz6bUHgC4/vrrwRjD9u3bMWvWrKS3Ky0tBQA0NjY6\nrjNhwgRwzjFnzhxs3bo14f70+6moqMC4cePAOccjjzxiu/7DDz+cdFu7OyQECIIgCIIg0oQ6a+/4\n8ePx8ccfA5DFwRtvvIGrrroKjDFcdNFFCSsEOTFz5kzk5eWhsrIS48aNw5o1a7RRdUmS8Pnnn2PG\njBkYPnw49u/fr203ePBgTJw4EZxzTJ06FfPnz0csFgMAbN26FePHj9dmKk6FY489FlOnTgXnHLfc\ncgumT59uSIKuqqrC9OnT8dxzzxm2O+6448A5x9KlS9Hc3Gy770mTJuGMM85AMBjEeeedhxdeeAGH\nDx/W3q+trcWCBQswduxYzJw507DttGnTwBjD+++/jwkTJmjJ1c3Nzbjnnnswa9YslJeXp3y83ZJM\nT2TQ1T+gyVIIgiAIwhZ6RnYMdWbhF198kfft21ebLbiwsFCbwGrEiBF8//79hu3UCcXsZu418/bb\nb/OKigouCAJnjPH8/Hzeq1cv7vf7OWOMM8a4x+Ph1dXVhu1aWlr46NGjDdupswmXlJTwRYsWJZxQ\nzKl94XCYX3311dp+GWO8oqKCFxcXa/szb7d9+3ael5fHBUHgPp+PDxw4kA8dOpSfffbZhvUOHjzI\nzznnHG3fHo+H9+zZU9u3uv8ZM2ZY2vXoo49q2wmCwHv06MG9Xi8XBIHfddddSU1qZkeq1whoQjGC\nIAiCIIjc4aijjsKGDRswadIklJeXQ5IkDBs2DHfddRfWr1+vJdnqYUnOgHbxxRfj66+/xr333otT\nTjkF+fn5aGpqQllZGc466yz87//+LzZs2IBBgwYZtisqKsKqVaswY8YMjBgxAowxFBQU4JprrsH6\n9etxxhlnJGyH03K/34+FCxdi2bJluPzyy9GvXz8EAgGUlpZi9OjReOihh3DjjTcathkxYgTee+89\njB8/HuXl5aitrUV1dbUlgbdXr16orKzEggULcNlll6FPnz5oaWkBYwzHHHMMbrjhBixZsgR//OMf\nLe266667sHz5cowbNw4lJSUQRRGjRo3C/Pnz8eijjyb1WecCjOdY0V/GmGwL5NhxEwRBEERbqJ09\neka2j2HDhqG6uhorV67EmDFjMt0cohNI9RrRrZ+hua4TQ44AQRAEQRAEQeQgJAQIgiAIgiAIIgch\nIUAQBEEQBEEQOQgJAYIgCIIgiDSRbNIvQbgBShYmCIIgCAIAJQsTRFtQsjBBEARBEARBEFkPCQGC\nIAiCIAiCyEFICBAEQRAEQRBEDkJCgCAIgiAIgiByEBICBEEQBEEQBJGDkBAgCIIgCIIgiByEhABB\nEARBEARB5CAkBAiCIAiCIAgiByEhQBAEQRAEQRA5SFqEAGPsZ4yxmYyxDxhjTYwxiTE2L8H6fsbY\nrYyxTxhjBxljhxljXzLGnmKMDU6w3Q3KNocZY42MsZWMscvScQwEQRAEQRAEkUuwdEwjzhjbBOAE\nAC0A9gAYCWAB5/xXNut6AFQCOBPANgDvAQgDOA3AuQAaAZzJOd9u2u6vAO4AsBvAKwD8AK4G0BPA\nrznn/0iyrRyg6dMJgiAIwgxjDAA9IwnCiVSvEd36rNMa1QHSJQTOBbCHc75T+XslgJcchMCVAJYA\neJdzfrHpvWkA7gPwT875ZN3y0QDWANgB4DTOebOyfDCATwEUAhjJOa9Ooq0kBAiCIAjCBhICBJGY\n7iYE0hIaxDmv5JzvTHL14QA4gLds3lum/O5tWn6zss2DqghQ/m81gL8DyAPwPyk1miAIgiAIIo3M\nnTsX06dPx5YtWzLdFADAtGnTMH36dDQ3N7e9MpGTZCJZ+AsADMAlTJVJcX4EucP/rmn5ecrvFTb7\nW67sb1w6G0kQ3Q3z6EWujfhxznPumAmC6FrmzJmDGTNmYPPmzZluCgBgxowZmDFjBhobGzPdFMKl\ndLkQ4Jy/CeDfAC4EsJUx9iRj7BHG2H8B/AnATABavD9jrBDAQAAtnPNam13uUH4f3bktJ4jspeZw\nDfr8tQ+mrZoGAKhtqUWfv/bBPe/fk9mGdSEXzL8A58w+h8QAQRAEQShkpHwo5/wqADMgd95vA3An\n5EThSgALOeeSbvUy5XeTw+7U5eWd0FSC6BY8/cnTOBQ4hOmV0wEAz218DocCh/Dwhw9nuGVdgyiJ\n+O93/8Wa3WsQioUy3RyCIAiCcAVdLgQYY3mMsSWQKwDdAqA/5M7+pQCGAljNGPtRV7eLILozHsFj\neO0VvBlqSWZojbZqf0fESAZbQhBEd2Tu3LkQBAGVlZXgnGPChAkQBEH7GT58uGH9aDSKv/3tbxgz\nZgx69uyJ/Px8DB06FJMmTcL27dsd/guwbNkyXHrppejXrx/8fj969uyJkSNH4pe//CWWLFmiraf+\nf8YYOOcYOnSooT0TJ07stM+CyC4y4Qj8L4ArAdzDOX+Bc36Ac97COV+hLPcBeEq3vjriXwZ71OUp\nBcAxxhx/pk2blsquCML1eJgn4evuTkukRfs7LIYz2BKCILojBQUFWuecMYaysjL069dP++nTp4+2\n7v79+3Haaafh9ttvx5o1a9Dc3Iz8/Hzs3r0bs2fPxsknn4xXX33V8j/+9Kc/4YorrsCKFStw8OBB\nFBYWIhQKYceOHVi8eDF++9vfauuWl5ejX79+4JyDMYbevXsb2lNeTkEU6WTatGmOfUq3kwkhcBnk\nhOBV5jc451sANAAYwhirUJYFAOwFUMwY62uzv+8pv79OpRFq4qDdDwkBIhESlwwdy2zA7AiYX3d3\nDEIgRkKAIIj08vOf/xw1NTUYPXo0AOCpp55CTU2N9rN27VoAQCwWw+WXX46tW7fiwgsvxMcff4xQ\nKITGxkbU1NTgd7/7HUKhEH71q1/hu+++0/a/a9cu/OUvfwFjDPfccw8OHjyIxsZGtLa24sCBA3jl\nlVdw2WXx+VWffPJJ1NTUaK83bNhgaM/jjz/eRZ9MbjBt2jTHPqXbyYQQyFN+m0uEgjHmB1CivNT7\n9/9Vfo+32d+lyu/309I6gmiD8S+NR8nDJdjTvCfTTUkagQkJX3d3WiPx0CByBAiCyBRz5szBhg0b\nMGbMGCxfvhyjRo2CxyMPzPTt2xePPfYYpk6dikAggCeeeELbbt26dZAkCSNHjsQDDzyAHj16aO/1\n7NkTV1xxBWbNmuX4f7OhQ0pkhkwECq8G8H0A9zDGPuKc6zv805U2fcI5b9UtfxbA9QD+xBhbxjlv\nBADG2FAAtwIIAZjT+U0nCODdb+Xqtv/5+j+46dSbMtya5KDQIHIECKIrYNPdFQrB73dXB3ju3Llg\njOH222+HINgPyFx77bV47rnn8O678UrqpaWlAICmpiYEg0EUFBR0SXuJ7k9ahABj7McAfqK87Kf8\nPpMxNlv5+xDn/PfK3w8C+CGA8wFsZ4y9DSAI4CwAowAEAPxGv3/O+ceMsccB/A7AFsbYKwD8AH4B\nuVrQr5OZVZgg0okoiZluQtJQaBDlCBAEkVlEUcT69esBAFOmTMEtt9ziuB4A7N69W1t2+umno0eP\nHlr40a233ooLL7wQQ4cO7fR2E92bdDkCPwDwK91rDmCY8gMAVQB+DwCc8xrG2MkA7oacLzABcojS\nPgD/BPAI59wS7885v4sxtgWyA3AjAAnARgCPcs6Xp+k4CCJpJEOVW3ejdwA45znnCOirBpEjQBCd\nh9tG4N1EfX09IpEIGGOor69PuC5jDKFQvNRxeXk55s+fj+uvvx5bt27F1KlTAQD9+vXDRRddhIkT\nJ2LMmDGd2n6ie5KWQGHO+XTOuSfBz5Gm9es453/gnB/HOS/knOdzzodxzifbiQDddvM456dzzks4\n52Wc83EkAohMkU1CQORx9yIiRnLaEdh2aFsGW5J5omIU3zXISYiHAodQH0zcIXFClETsrN+ZzqZl\nFXWBOtQF6jLdDCKLkKT4M2Pz5s0QRTHhTywWM2x/ySWX4LvvvsPzzz+PX/ziFxg4cCBqa2sxb948\njB07FjfdlB2hqoS7yK2MQYJII9kkBPS184OxIBjicbwxKWa3SbdCLwQmvT4Jy7Yvy2BrMssPF/4Q\nw2cOx7s730XvR3uj5yM927WfSa9PwlFPH4WXtryU5ha6H845ej3aC70e7UVJmETS9OzZU0sM3rVr\nV7v2UVJSgkmTJmHhwoXYvXs3vvjiC0yZMgUAMGvWLCxfTmOjRGqQECCIdsKRPR0AgxCIBg0OQS6E\nyuirBgHAi5tezFBLMs87O98BAPxjwz86tJ+5n80FAMz8ZGaH25Rt6MVzLghpInnUBGA7gej1enHq\nqacCQNo67CNHjsSzzz6LM844AwBQWVlpeF+tY0+ClXCChABBtJNsdgT0nZdcSJ41z/vg9/gz1BL3\ncDh8WPu7I+dyVIqmozlZhf6Yc+H6IZJHre7T2Gg/x+mECRPAOcecOXOwdevWhPvS7yMaTXydqVWE\nwmHj+dhWewiChABBtJNsqhqkFwKhWMgoBHLAESAhYKUp3KT93ZFzOSrmoBDQHXMuXD9E8hx33HHg\nnGPp0qVobm62vD9p0iScccYZCAaDOO+88/DCCy/g8OG4KK+trcWCBQswduxYzJwZd9ueeeYZjB8/\nHgsXLsT+/fu15U1NTXjooYewatUqAMDFF19saQ8AzJs3z5CjQBAqJASInGL93vX4bP9n2Lx/c4f3\ntWrXKoRiobZXdAH6jkswmnuOgL5qENBxIfDJnk+yPlG0MRQfIexIeIteZGYLdYE6fLT7o3Zvn62O\nwOb9m1FzuKbtFYl2c/3118Pv9+PDDz9Er169cMQRR2DYsGE455xzAMjhQa+//jrOPvtsNDQ0YMqU\nKaioqECvXr1QUlKC/v374/rrr8fq1au1sB5ADu155513cO2112LAgAEoKSlBjx49UFFRgXvvvRcA\nMHXqVIwfb5x3dfLkyeCc44knnkBxcTGGDh2KYcOG4Q9/+EPXfSiEq8nEhGIEkRE27duEUS+M0l5X\n/7Yag8oGtXt/b3/zNqa8MQXzrpiXjuZ1KglDg3JgRDOdjsCa6jU4e/bZ6F/cHzV3Zm+nSi8E9Dkj\nqZKNQmDs3LH4/MDnqJxQiTFDUi+5mI2OwK7GXTjpuZMAUInPzmTEiBF477338PDDD2P9+vWora2F\nJEmGycN69eqFyspKLF68GAsWLMDGjRtRX18Pv9+PY445BqNGjcIPf/hDXH755do21157LUpKSvDe\ne+9hy5Yt2LdvH1paWjBgwACMGjUKkydPxqWXXmppz4QJEyBJEmbNmoUvv/wSe/bsAeccdXXZPZBB\npA8SAkTO8MneTwyvv6r7qkNCAADmb5mfHUJAMiYL55ojYI5j74gQWLFzBQBgX8u+DrUp0zSF4qFB\nueYIfH7gcwDAWzveap8QyEJHQD1movM5++yz8eabbyZchzGGq6++GldffXVS++zVqxcmTpyIiRMn\nptye9m5H5AYUGkTkDD7Bl+kmZIxczxEwx8B35FwwVyDKVvSd2Q7lCGRZsrD+WmjvcWejI0DVjQiC\nsIOEAJEzeAWjAaavpd/dyfWqQebQF/O5kAqBaKCjzXEdueQI7GqM12/f3by7XfvIRkeAhABBEHaQ\nECByBnPn74uDX+DJtU/im/pvMtSiriNhsnCWjGh2BPPI74cfi2hvWW1z4nF3INUcgY01G7W/s61q\n0M6GnbZ/V1ZV4u/r/o59h/dh8eeLcbD1oOM+yBEgCKK7QDkCRM5gFgK/efs3AOQ4/41TNtpt0m3Q\nj9oGogFDxzhbRjQ7grmju3ZdBJWVwNixqe+LHAHg1Fmnan9nW2hQdVO19ndVYxUA+TsdO3csAODX\ny38NABjZayS23brNdh/kCBAE0V0gR4DIGXwe+7jwHXU7urglXY9eCDSGGnPOEbB0gjwRzJrVvn11\nS0egAzkC2dbBDEaD2t9qvofd6P/2Q9sd90GOAEEQ3QUSAkTO4DR7qsC6/2WgFwL1wfrcyxEwd3SF\nKGpr27evXHcEeHtjqlyCfjQ/GAuCc476YH2795Et1w8JAYIg7Oj+PSCCUHB6EOonbemu6IXAl981\n4HBrzPa97oolBt4TQWs7B/a7S9UgPal0ErMtFMiMOachLIZTFwLkCBAE0U0gIUDkDI5CoBtXDxIl\nEU9/8rRhJuW39r+IRZWbtNfZ0pHpCBZHwBNBSyCGxz9+HP/34f8ZwkXa4nDksPa3k8uUbaSSLJzt\nwtEsZILRIBpCDe3eBzkCBJHbrKleg7mb52a6Ge2GkoWJnMGpukl3Dg2as3kObn/7dsvycO/45GrZ\n0pHpCHaOQF3xB7jznTsBAMMrhuPnx/08qX01BOOdxnAsjAJfQdramSlScgSyrEqQGXP7Q7EQOQIE\nQbSbs2efDQA4uf/JOL7v8RluTep03x4QQZjIxdCgz2o/a3OdjiSKZgvqMZaERsoLPBEExGbt/ZZI\nS1L74ZwbRo9vvyPc7jKkmcApvj+Vc6DbOQKxoEHcGd5zcIqy0RHI9pAugnA7+opk2QQJASJncBIC\n3bkjnMxFcMniAAAgAElEQVSxpVpDPhtRv/uBhybICzwRhCPx4042xKcl0mI4j16YHcamTQk2cBlO\nncFURouzXgiYHIF7pwdxsMXeEXAKGSJHgCAIM9l6byQhQOQM6egEZRvJHNtL/xKxdGkXNCaDaGIn\nqoTxeCIIReOfTbJCwNIx9IZRV5eOFnYNTmE9Hc0RyKZKQub7wMKXg/hki32H38kpyEZHQP+9dZfc\nFoLINPrBtlAslMGWtB8SAjnE3ua9uOudu7LWvuooTp3i7myZJyMENm0W8bOfJb/PXY27cOeKO7G3\neW8HWta1qDdrHlGFQBRgqTsCllhyTxgtSlTRsu3L8MiaRzrc1s5i3d51+OXSX9q+11FHIFtGwlbv\nWo2n1z1tXHjmo/gwaD+phFPugF5QPbvhWcza2M5JKboQvXORjFPIGKMf+qEfmx89+nllUi064BZI\nCOQQV758JR77+DH8aOGPMt2UjOA0GtqtHQGexLGx1EKDfrrkp3h87eOOnUo3oo54c50j0B4hYBkh\n9oZx6JD8508W/wR3v3c3ttRu6XB7O4MZlTPw+lev27732ztEHDiQ3H7sOv3ZMhI2Zs4Y68LvL3Fc\nvyncZLtcP3iw9/BeTPnPFNffR/TORS6EAxJEV6AvJ+3kILodqhqUQ2ys2QgAru2odDZOD+qYFAPn\n3KL0uwPmkT/vc58jNvX7xpWE1DoFn+77FADw8e6PO9S2rkT9HKSITggI6XEEzBOTufVh0Bxudnxv\n0+YYHn4YeOKJtvdj56C5vRPcXpwGD+yWh2IhFPuLO7tJ7SZZRyCZMK+jnz4aO+rlGdn5/dzy2q0E\no0EUPlQIv5CHSCwm3wNWPAZcfKdhvTPfjWHNhx5sO7gNx/7jWADAiX1PxOabNuOEZ07A1gNb4dv4\nG0RPeQqlO/8HzfP/iU8+AUaNysRRJeZXr/4K87fMx6Ses/HibRNw/fXAvHnA2j1rMfrF0Zb1h5QN\nQdVvq8Cmy8/DV656BRceeSHK/q8MAHDbqNvQEGrAS1te0rbxCl75HiB5gBkxXHUVsMRZX3cp6nFc\ndORFWHHdCvR/rD/2t+xHXsOJCFeYimlsnAy8MQt79gADBxq3B+zPbX2hiVSrj7kFcgRyiO5cJjMZ\nEnVWOjJC1hpwb7yt+ZhjtSOAPacbV0rREchG1O9XCnfQEbDJEUh2JD3T6C1sC4KIYJJTKdg5At1W\nCDiEDdotT2UuikyQTkfAvH225Byo7RbgkTutAOC1ullfV8nXiv5cV68f9Vzn0TwAgMcrH7vo0tuo\n6tapbmhRkbzcqT/gETyG1xKXDOd2fbDecg/wC3nKTkUAHGEXps2o56jWcQ+XWlfyycepb//AkoEJ\n96sXAhQaRLie7jjinQqJcgE60pE59nj3xkdbjkvyAMEexmWCiN69U993Np1P6udgEAKd5Ai4lYQl\nUoWY1kFoCzshkA15NuaRbi/y2twmFUcgGMsiIdDBSmnm7Tnc6wLoUdvN4AEkJSDCRggcam5BLGY8\n19XrR72XiFE/AEDwuFsIqOelmh9VWCgv9zCP/fomQStxyRD61xBqsNwDvII3LqwE0ZVCgHMOzrkW\nysNDNkLAKx97SHdKDC0fmnC/+gEWcgQI16MfAZj32bwMtqRriYpRTFo2CYs+X+S4TlNLch0ZO9u8\nem8YgUC7m9ep6Efu/IIfAAMLVxhXYhJ8vq5tV1ejdgBiIflhKPgigNCOqkE2OQLZ4ggkFAJMREES\n86K1RFpw9StXW5a71RF4bftrmLhsIiJixNJR93ObjoCJVByBlpC78yQMoUHKfaGqsQrX/PsabK3d\nmvR+3trxFnY17TIsyzpHgHkA7uwIwNeKgwdNjkDE7AjIQqDuiPnAKc+5VwgoHftYKB9AXAg4OQLB\nWNDwnJO4ZLh26oP1FiFscFiEmCuFgHocHBz53nzwmM1Dz8YROKL0CO1vu0EQcgSIrIIhPoJ7w2s3\nZLAlXctr21/DPzf/Ezsbdjquc8TgGJrs8wIN2D7wvGF88UUHGtiJ6DtoPkF+cBUJJkeAiVk1KVZ7\nMIcGMW/7QoMscfa6qkFuR5/UZkGIIZZEX/6ptU+httVqgeyscqcQuGLxFZi9eTbmbp5rEXFeMQkh\nkIIjsPdA9jkC1y29Dos+X4Qz/3lm0vu57F+XWZZljRCQbEKDCmxGcX0B7NtndQQ453ERKOocpXF/\ndq8QUDrxYji50KBQLGQYQIpKUYNL0BRqsnSIBeaNOywuFgJqp73YXwwxZuOIKKJQ3359qNShwCHL\nJpQjQGQV2RTKkU7MFU0uHH6hZZ2YGMPHSeS+2j7wPGFscWn+tV4IeBUhkCeZHAFBRNT9kR0dwuwI\nQIi2KzTIMvLtDcPlA8EAZCfL4gg8/RWw7Qr5b0FMStDUBe0nTbjgwihefrmDjexEGkINloe0EE2v\nI1DjdiGgcwTOO1/+vqsaqwAkP7O2E9kyj4TawWXQOQI9vwIAHLXzr8ChEfIyTxT79xuFAAdHMBaM\n3wNEf3zHBfWIxdz5GajPv1jQFBok2IcGhWIhg9A1u2mhWMgqBPShVi4WAupgSJGvCJKdEBDk49bf\n0/VhcHYhdd2hahAJgRxC7wjkEuZO3mkDbEo7eKLw+62L29oXAMAbxvbt7WxcJ2MQAkw+wHxudQQi\nSaY56PcnSmLWdQBUIcDbmSxsSbL0ZIcQCItha9vrjgZiapJfLCkh4Pc4XCRCDM8/37E2djYW2z5S\n0uY2qTgCNQddLgR0jsC27RIWLUpfAYmsdgR6fg0AKDx0LhAql5cJUezbZxV8n37eai8EBBGHI4c7\nte3tRR3Nj5qEQKLvXh/3HhEjhsG0YCxoEQIekyPgxnui3hEo8hXHhaAej3xceiFjeObZJNmbHYFv\nv01Tg7sQEgI5RK5WDTKPBBawcutKQiypOH8nR6DZuTJjRtGPYHghP7gKmdURSFYINIYa4/vmIgJR\nlyZHmFA/h2hAEQKsfcnC6kOBqbfOLHEEHEd81Yc3S84RyPM4JNh6oujTp31t6yrM9wEpmF5HoPaQ\nu08EvSMAJruAOScE7ByBYjnJR2g8EhCVuHFP1BIaBADnjGtBa0DpGMaM10JTxJ1hIepofiQg5wi0\nFRoEAIfDcVETESOG0KBg1CoEuOQx3Evc6AhwxF3RQm9xXAjqUYSAwRHgiR0B/b01GAviyBEh1NSk\nqdFdRG72DAFcMO+CrOnEdIQn1z6Jnyz6CWJSzBIalMpoLucc1y29DtNXTU93EzsdcweARcqsKwmx\npDrzttUxvO6NE9d3WATID7lCmxyBZEODzNZntsREqh34WDgP4AycSdpNH0hdCPihPE2H/Re1l5yL\nbQe3pbfBacYxP4DHE/w66gj07du+tnUFDMxy7oqt6c0RONCQPY4ABBGi6Bwekgrr1rm3alAoFsKl\nCy7FsxueBaCrGsR1HVcA3mgFpNYKQFKEgBDF4cM2yaH+FntHAMD2w+swds5YrKpa1SnH0l7U0fxI\na3JVgwAY3A1zaFAwFrQI4QP7siNHQHU68j1FDo6AfFxOjoDdc8IyyJLfgM2bO97eriRnhcD7372P\nDTUbMt2MTud3K36HZV8tw1s73rKMABgeDG3wdd3XWLB1AaZVTktzCzsfc0hA4yGbUU0hmpQQsO0w\n+lpdKwQM+RFcFoLFHqsjIIqAlERf2Gx/Z0uVBHVUJxLyxEfyfPHOcUurlNTxW4TA8QsRHfiBIfne\njZ0iJ0fgogvUh7eIw0lENiQSAsmE1mUS87kbbWrbwmjLEWC6hNFDTe4WAnUBXX4HEyFJiTuDyXL6\n6e51BBZ9vgjLv1mOm9+8GYDJEdCNCPtDA+XOn84RCIXshEArJKhCwPgcebTqF6jcVYnz5p7XKcfS\nXtRnQLhVbm8yjsC6zfH7RVSMGp4jETGCQNjkfvHsyBFQnQ0/KzAIQQ2b0CBDjoBNaJBlQLmgIStc\nYj05KwQA95a86wyC0aAlRyCVCXDassfcjHnU+puvbW4ASToCtg+8gga0JijIkkn0N3CuCIFSn9UR\nAJCUK2AeCc0WR0A9Z8WoB4gpCcP++Jf2l0ckXHVV2/tR7xlebiy6r/8c3Hh9OAmBooLUHAGfx6HO\nrBB1bQldQC6UYAiNARDZO7LN7dpyBIZ8ORP48mcAgL217hUCUTGK6qbq+AJBrhSWrtAgMRkVnQHM\n573BEdCNCDMxX+786RwBWyHgDUFi9qFBbkU9hpAiBJLJEbjxVufQIADYucf0sJTc7wjoKz4J8Nl/\nfzahQea8ODOWAdWCeuzb1/H2diUkBHIEkYuWC99cTScR+huBPk48GzCPWn+1zV4ItLt8aEG9ax0B\n/ffGJQchoMTKJ5MnYB4h3d+YXY4AuAeIybGy8Ou+NCZi6dK296PeMzySUQj4hHgHuaOztnYGTrMK\nFxWkliPgOPIrxJKemThTWDp1dSPa3KYtRyAW8QEtckzU9m9COGStLugKdjXtMp6XLH2hQQCHKMbP\ni7//PQ27TBPmwS/NEeBGRwBiXnKOgDcIrgoBKTsmX1FFa+CwkiPWRtUgAECec2gQAPA808OSM9cL\nAYlL8RwvyWtxdADYOwI8sSNgHmBAfgMJgWwip4SAZBUCqcyEmc2z55nbu7/G5gbu6UBoUH6De4WA\n7juurZUfihX55gnFUhACphHSaybW47XXOtbGzoZzHv/eJA+Y5gjohUBqOQIeqdCwXC3Nql/HTTg5\nAoWFqTkCTiPk8LjcEQCzduqaB7a5naMjoBcCyvkkCUG8/37H2tlZ7Kw3zaGiFAhIiyMgxCBK8XC4\nX/+647tMF/rvPBwL60Z0BWOMuCoETI6A5ftX7hke5rFPNnUZ+lHwYKt8bMmEBsGf2BFQJ97SYJIh\n3ygchuvmpjEIAW5yBKK6GeeRoGoQOQLdDzc+sDsLkYuWZOETnz0Rd71zV1LbZ+vseW9/8zbW7V1n\nWNZwyN4RWOW5B6fNOs2q8HVksyOgUlxgGgkRUggNMo+QFjTg2mvb27quQf3O5NFBBiY6CIEfT8Ql\nCy5JmESvCQHR5AiwuBDIptCgwrx4jkAy57DdzJry9u52BBhj1ge22HZSg5MjEE8+98U7Eb4gaq1z\nrWWUNdVr8L2nv4cXN71ofGPyGXgteGebOQKf7f8MR808Cq9tj6t9vfsFADjjKRyOGZ8JyVYh62z0\nz6pXvnwFJzx7gvxCMnXkY3lym9tyBJSRcg/z2iebKkx9Y2o6mt9h1PPXJ/gQDMjP/2RCg/SOwONr\nH8cf3/9j4n/EJM0R8ObJ/9Mt54AKB9eEHeMmRyCqfihtzCOgOAKHw4dx4rMn4sEPHtTuK0Wsl7xS\nQQNVDcom3PjA7ixESbTYpC2RFjz28WNJbZ+ts+ct37HcutAuSUiI4fOKh7GhZgNW7FzhuD/bTmKW\n5AioFBYCl4+4PL6gA44A8htQblON1U1ocf2C/L0Lkl1okAScNBtvf/M29rfsd9yX+iAQTELAoxcC\nLgwNUhPaTuh1ChAqBf7zDN59F/AK8VG8QKDthHGnjnGy5XcziaVTJ/lwUtN92ssea2datnFyBNQq\nTLFAYTzUzBt0XWjQ5Ysuxzf13+DlL02zvfmC+ER4vM3QoMlvTMbOhp24YvEV2rLBZYONK130e8t2\nbrkf6itFXffqddrfAvcbngM8Zu8IWJOF5Q6yAC/AnbtPz3/qjkk11Pb7PX7tO0lUNcgj2QySJAOT\ngKDsNHtL5YvAbeFBekcAUsccgSVfLMGW2i24d+W92sBhnqgIAX+L6wYE2iKnhUCuOwIqycyGl62z\n56kdl5+M/El8oV1spxB/4CcaEc6m0CDOuTH8S0kWLigAll29DMM+/Ze8vEOOQD3KbKqxugmt8648\n+AQnRyAJ1HuGEDMKAaYbHXTjAIPqDB1bdjrwlwaMbLkJF1wQF0ceb3Ji0DE0SIi62xGwCw0SffhR\nyXT8tkkEpouof+9Gy3ZOwkcdDIke7hFPPveGcPBgWpvdYRy/L4W2QoP09eRTwS33w/qQ/aCVRyow\njOhL0Tz5/peSI+D+7pPafp/Hpwn1RKFB3pgSNuq3fu9Vv6lCacup9v9IEIGG4fI+esszarlbCDg4\nAknmCOirp6mOgFdSJih0eeEEO9x/JnciOSUEbHIEVHY27LRdridbHQH1QdgjX5cg6+AIaH8meDg6\nhQaFQkDMZaeTtTys0RruWaEmisrH1C5HoKAB9S4/HdSOuToCJqijXrryoXohkKj8pxZjahICeufF\njY6AKgh5pADgAnr2lJerI8Iev3xcbT28E4UGuf3hZwn5k3zIywPKSgW5U2cT893QFLUVyOo9MNJU\nYQgNcpsjoAo9J9pTPjSZ89uNjoAegecbvm8xJHcK1blWknIEsiBHQL1f+wU/olHA4wF86iHaPOeE\niCoErEquNK8UUrjA/h8xCWg4Uv67h9yfcJsQ0OdLcNFrdAREvzxQJkgAEx2rBq14R0QwCJTkxWcl\nV88Rb0xZ5olQ+dBs4tVlMVRXJ17HjaN77UHk1tAgFXMimSiJllHxbM0RUC/8igJdgqxonyyskroQ\nkD8Ptzz8VCxhQYoj0L+//HJAP+VBlkpokPJ5lniVz7NkL2oPSAiFXJYZpsPsCHi4jSMgJJ40RkUT\nAlGjENAn07txgEE9F0TlQa4KAbWj6PXJn1FbD7BEoUFucwTM97CIZHUE+vbVOWE2Md9Ll0UxcaJ1\n3+o9MNqsdwTcJwQcy70qtCdZWH0mCnDuCLvlXug0aCWIRkcgqggBD7N3BHww3jPaCg0C5PtIKpN2\ndgZa+4V4xSA1MMAuLIwFlQGzPKsjUOArgBjOj6/LdSKTiUC9LATEnp8DaPte0tUYHAHRZ3QEJG88\nZ8gTcZxHYMb/E/GnPxlnWFddM0EVAr6g6469LXJaCLz87xguv9z5/ftW3oeih4rwXcN3XdeoTiJZ\nRyAUC2HQE4Pwk8U/MayTtY6A0nEp9uqEgK0jEO/gOIVQAc6OAOCeh5+KNVFYPq5x4+RXV/5UfhAU\nl6Y+j0C5T5lGdsiHwP0eTPi3O5Lj7DB3XDzcJkdAN8twoo689p5FCMT35cbBA/VcUEf0KpTLQR0R\n9vg66Ai4sGqQ/lqVuGRpu4f5cP31wJHKQCa4oIllDSGKl14yLuKcx++BwQp4oJxPvqDrQoMsib0m\n2jMRmCqsvch3XMctoUFOg1aCWGBJFgYAj8kRUJ8feVBmoVY6yIy3LQSOmnkULv3XpR1ofcfRRquV\nHKYi3W3Lrj/AA4oQMIUGMTDkefIQC8UdAaZPttc5AsEj3gKOWeo+R0CXLGxxBLhHJwSijjkCYCKW\nLTMOiKg5ZSxaLC844ynUn20NM3QzOS0EIMTw2WfObz/wwQMIi2E8ufbJrmtTJ5EoR+BQID6M9em+\nT7GvZR9e/+p1wzr6Ec/mcBJ1Nl2CeuF7orqMVsmL/u+8j5G9RqLCO0BepptcKlFcrf7B6d8ul8th\nfrkH5JaHn4pTeVh1NLiiXH4QqqPBqTgCw/JONixf/M2sdray8zE7Al47R8Abv/Mn+v7VhwIPG4VA\nMKYTAi4ODVJH9EqUwSstRyBZR8AxR8B9joD+YR2TYpbQoGf/7oPfD1x7LfDMM8B11ymx33oUp1A/\nx0hrtBUxKYZCbyEg5smzlAKA133zCLQVGpTK7PIqqtD1wiFMBO4ZFHGqlgUx3zggpIwOex0cAT9X\nhEB5FQCggPdMWDUIAL5r/A5vf/N2u9ueDtT2q8UMCnVVj+2EQEydbbvAKKDyvfngnCEaiH/nUswk\nBPafGH89aI3rRsX1joAU8yZ0BAxVg/T3c0GE12u8D2pCIBIPF4oe/0L6D6ATyXkhkAxufLCnSiJH\nQJ8I7BQzqr+hrlzTigcfTG/7Ogu1M8BCekfAB9+ecdh26zZccPRZ8jLdCEiih6MaPz6kbAgibzwO\nABB88s32cPvy6joNiyPAGbZti79Uv2vmSd0R8EolwMJlaWlnZ6OV/IQSBqMKAUE3GpqiI8CjxnkE\nAqK7HQE1NEiKyMeuCgEtRyBJR0ArR8jMJWjdlyys/x5jUsziCBQWygMjeXnATTcB8+cDPq/p/qc4\nhRs2xBepbkBZnnxPiQsBOTTITZPsthUapP9M7MJY9GJKCy9THQHuJAS4a4SAXflkwBoapI4OewX7\nHAGfpFRE6COHvZTEjsyqZGEvknMEovXKwFjRAcPyfG++PM9OVPedm4WAmIe7jn1afu0LJjVBZ1ei\nFwI8ZqoaJHkcQ4PMjoDHY7wutLwDnRAA3HUfaAv3n8mdiRDTHoiJaI996jYkLjnmCLTowhqcysnp\nhcC3u1tw773pbV9noXZcY636L5pDVPpq2oiZLiYymXkEuCQYbhwAXBcaYc4RKCxkGDky/lr9rlk7\nZhaWYj4tJtTtaKFBqiPAbEIaUhUCJkcgxuPbu3HgQHMEQkYhYK4a1NYonjnUQMOFycKGsn9ctAj8\nfJvTwDKCrjgC+pAfVQiU+hQh4JF3JOQFIYruug+0FRqklpUF7M97fbKt+rd6D9TK8JpJck6KrsBx\n0syoKTRINAkBc46AqDgCykh5XmB4VgkBNQla7wjYDvodloUAKzLGuOV7C+SiEDHdd64PDVKeIT3L\n4u5Yg8tSCfXJwpJo5wio333EcR4BMEkWAjbOKA8ZO5Nuc0QS4f4zuTMRYlqYRCKcRvgagg0ZTwZK\nFpE7OwL6Tr6TI6APDUJxLeCJoK651Tlm2CWoF35Ls+6B6IloFX60EbMkHQE7IcAF+TNw24io+SEo\nmELDtO9aEAEhisZg25aGegOUoj6gcVh6GtqJNAQbtI45U3IEbEMaPLrQIKeEWMTvBVKoqM113IQ6\nMhoNGkODtEpK3tQcAQ+sQiAadVflLP3D2s4RKLA7Dcz3P8UR0M86rnaIS3xyPHW+IO9IyJM/Yzc5\ng205Anua92h/m+97EpfQGGrUXqvx9ur5rU3MZ4ZJrnAEOOe286gAkBO8bRyBPK+9I+CJGWske5qO\ndH3VoIgY0b4/gScXGoSWfgAAXmgUAvt3F2D7dsQT4wFjGW6l6lrv8rg75jYhYAgNiibKEYgoMyPL\nuUAGgayGBtk8I6QgCYHsRIhpSXOJsBvhW1W1Cj0e6YFb37q1ExqWHvQiJSbFHHMEDKFBSTgCGLge\nuO176PtkGY79+7HpaWwnoXYGWpqMD8QeSk6Ul3XcEeBKZ8F1QsBki5u/f4MjMPUUXLe5r2GE0A5t\nRCXqi9dedikbajagxyM9MHGZXPZFTRb22wqB1BwBMYEQcHPVIDXGt705AtoIIzcKAV+++66BtkKD\nUnEE9EJAdQSKPfLDI98rf6bMJ394rhICbTgC+s/IfN9rCjUZSumqx60Ja9HBEWCiK4RAwhDPWL6t\nI5DvizsCkQgQjilCIGoUAuIh94cGHTXzKFz00kUA4tdrW6FBUKsGMeMAJ48UYPZsGO/55mRhABUl\n8VK6rhYCMXPVIJ0QUMIcH1r9EHo+0hN7D++Nr6eGBtk4AmKAhEB2IiQ3cmcXGvT4x3J8+DMbnklr\nk9KJXsBExahzaJBDQpV+ZNPSOS6vhshF7GzY6WpXRO24Hm7yAe88gv51v8CoQadg4UL5fW3ELC/+\npE/GEZDEeN1xzmIAk1wVEgDYOAKCvSPAPWGg71ZEeNBSStaMegMUo/Ln1me7HCPmFZOIsetiXvhU\nTthaWbUSQNwR8NmFBqWYLCwGEzgCLg4NigTscwQET5KOgPLZMMkkBPLk7d0kBJyShQuajwdWTrd3\nBMwDITaOwOGI3NMvEORwkbgQyD5HQI/5vqcep4o6YKQ+FwbWTrLfkeAOIeCUHwBADg2ycQTy/fLn\n5fHL33s4ppzv0VLD5oE97hcCu5t3a3+r12ubjkCwh3UZAMTy8fXXAL78GfpJp8LXeAyw7tfx9xUh\nUOBVZ9l2YWiQrmqQaHYEIiUGRyAQAO5daRP/LFhzBFRigWLDazfdC9vC3WdyZ5NkXKvdg10/s5xb\n0XfkI2LE0RHQCwHzNiqJRjkNYUMuQ73wW5t9wEe/x33HLsInawWcqBQ40EYAdRVkknEEJJEBYPGR\nURcmS5ptccF0taudnoA/Xh43WUcgFpEfmKdF7gIAuFELDikbYngtKHWv/Z6OOwKxRELAxaFBoRZ7\nR0BIMkdAewCKxvufN09e7iYxrP8eo2LcERi07l9A5X3tdgTU+6Uf8oM/X+n8cK8LhYDJEbCEdOkw\n3/fMA0RPPxvEl1/Gn4cV1b8yvK8NBjDJFTkCjvkBUCbWs6kapAkBn/y9h6LKfSGsEwKSBw27BrdZ\nNchN2AkB2/5A0CFEIlaALVsA7D8JM45Yj5M//tIoBAQRl10mzzUAwPWhQWLUlCMQrLAIAVuYtWqQ\nSrSVHIHsJMmSd3aOQCojLZlC3+6IGHGuGqTryOtFT7JCwGn2RjegdlxaD8vfV5nR4dUJgSSrBik9\nXkmSP0vtweqJuE4ImEfEnHIEQt54hYi2JovTkq/D8ud55HD585PgvnCYfsX9jAuUB7ca020gnULA\nxY5AqMU+R4AlmSOg3hN4zOQI+N3nCOi/x2eei6GuUW57JCi3vb05AurIuCoECpXOD/fIT343dIJV\nzMLGy51r/5vve/qQUQB4c0UId98dF7qBVo8hvIYr90S3hAY55gcA4FFTaJAyOlxgEgJqaBAP6R4c\njUNQf9DnekfAgGQNDbIl5CQE4ufNsGHx0FqVwiIJb7wBFHizIzRIjJiqBoVMQiDoMLLl4AgwMERa\njaGyJASyhWQdAdMI3+6m3W3WZ3YDhtAgKbnQILMjUN1Ubci2t8PNE4wZHAEA5eXG97URM31okIMj\nEBEj2NeyT36hPATUsmwoq874aKjEJUPyX5uhQTb5IG19l2ZH4ITj5N+cxZIqP9qV6OObgXhoUJ6t\nI5BcsrD6IIm2Zo8jsLtpt+b0BA87OAKe1OYRMAsBj18Oj/uubo/dZhlBP2oXjsaw/6D8HYcDctuT\ncsTjg8AAACAASURBVAS8IaBkr60j4JXkc0AVApLgQkfANGAl8DyHNWFIDAZsQka98oRp6nMl0OIx\nzNLO1Y61y0KD7ApgiGFTaJAyOlyYJx+PoAoBxRHgQZ0QUCbOchQCpknpMlF10BKuK1odAVti+fBI\nNvdH3YSb3/ueVQhIXAJjcXfMrY6A9vyyOAI94snPnghavFX2O2EiJMnqCOR58xAOGK+tnBMCjLGf\nMcZmMsY+YIw1McYkxti8NrYRGGOTGWOVjLF6xliAMbaTMbaIMXaUwzY3MMY+YYwdZow1MsZWMsYu\na3fD2xEa9Nr21zD4ycF4actLCbZwB+ZOfTJVg/TH+o/1/8CQJ4fggQ8eSOwItDGKnEnMVYPMQsC2\nfKiDI3DarNNw4fwL5RfKQ0CdqAW3fh+7A1+lqdXtY9LrkzDoiUF4ddurAKyOQCkbYHht94Bsy91R\nb4BRxRHo00v5/DxR7Njhrvggc4ecqY6At+PlQ6MB5yeqmxyBt3a8hcFPDsb2Q9sBAKFm+SFfrISz\nagnjSghMslWDpKgpNMgfBS65DZe9PwhLvliSruZ3CGO1j5j2HasP7KRyBHp+A9x5BHZL67RF6v3S\nIyqOQJ58PklCGGCSq4SA+Rp3TPAFcOGsK1FVFX9tEQK+IMor4p3aQKtgCK/hokf9J65wRdSBkHJ/\nb8t7cmiQ1REozDcJAcUBk1p1D46G4fJvp6pBpiIKmRAC5v/JY9byoU7kw8YVUPJf+vUDBg+Gpdqi\n+v/ioUEhudyoi9A7ArGwyREwhAZF0XTlaPudCCLCYetgkSDlgZvuiW5yR9siXY7AvQBuBXAigD0A\nEvYIGGNFAN4F8DyAYgBzADwJ4EMAowAcbbPNXwHMBtBP2W4+gO8DeIMxdku7Wq2EBrUV36y/qP68\n8s/t+leZwBzm45QjEIqFtE6TvvM0rXIaAOD+VfcnTKDMBkdArRpkFgLagz+JHIEttVviL8yOAIAv\nom92tLkdYs7mOQCAp9fJk7qo1vhgfg6w7Se4XHjWsL6dMEzWEYiG5M+ztETQPovjvu+uTpClQ648\nuAu9NqP5aUwWdlPVoL+t+5vhdWuTMTRItfLVsJZkqwbFIiZHwBcDRv0DAFwzE7t5IiBNCLSm4Ago\nVJct0P5WQymlkCwEelQw5HmUToU35O5rIObsCLQEYrjxxvhrS+6XN4imw/L+PMwjj/pLekdADQ1y\nR/lQdSCkxNPL8l5TXZ69I6AKAa98D4gooUGoPxrYOBn9QucCG6fIy5wcARcIAXNHVXXwEoYGvfIv\nAEAes1ELSv7LaacBjAG9TB+pJgRcHBrEOY8LgYjJEdCHBuU1ySXS7WAigkHrMyLQnGfJm8o5RwDA\nbwEczTkvA3AL4BCDEud5AGMBTOGcn8I5v4Nzfg/n/AbO+XAAK/QrM8ZGA7gDwA4Ax3PO7+Sc3wbg\nFAD1AP7KGBuccquFGDhvexRM3zne1bgr5X+TKZJ1BID4Td9uNLMsryzrcwQOOwkBdcTMH39yJcoR\n0NAcgfiDMBJ1z0gwEB8ROyJ6PrD4VfQtGGh43y40KNkcAVUIFBXFk3AhxOTKEi7BEqKjPPgLfcXW\nlXWhQYnOde36SNChclNoUN/ivobXLY3G0KBiv/xZiB5ZCCdbNUgMGY9fHUEFgNI8Y4WVTGHoDAkx\nTeyFAinkCCiILfFYCHWkPBqQe1UVFcaRUDcJAfO5LIoJHs2eMNasib+0hgaF0KwKAUEVAjrh5LLQ\nIHUgpJhZhUBLCzPe/5TruUgRAqpDpp5DkUAe8MYs/A9bBew7Wd5GLwT0OQQxo8LMxP3A/L2rDp6j\nI9A0CPj8GgCAz64Qilf+LK+4Qn55pGkuSfUY3Z4sHA9tNVUNCvbQOvLefrJ7attfEkSEQjbho2Ke\ncV4FAIFg9kxEmxYhwDmv5JwnrjuowBg7CcA1ABZxzl9w2J/5yrkZssvwIOe8WbdeNYC/A8gD8D8p\nN1xILsFN3zk2l1RzMxZHIIE+O9gk3/TtRi8Glw1O2DnKBkegucFeCGgXuy/+5Fq/KYwxY9roFClx\noPoZVsNuEwJqaJAyLby542MbGtSWEFBugOGg/HkWFwN+bzw8yE12qEXUKoKlyFYIpBYaZOgARYzD\nbG4KDepT2MfwWgoXID8fUMulq0IgJsjXf7KOgGiywT3e+GdWkueOUrJOoUFiOA+CAHhtBv+d5lEJ\nh+P3TnXQJNKiOAI9dCOh3qCrhYCUqG/iDYOx+Dp2oUHNLXFHIBCAIUdAG2F3WWiQl9tc75yhtNha\nNaioQBECqiMgquFk8vneV6+r9UKgVQk/iuUZ7w3IjCNg/t5jYbn95mIZGrq8BruKiOW9g6iqAiZM\nkF8frcZsKNup+Vj5uvKhjY1tnG9djMQlBMO60CCHqkFCH1kInNTnNOtOmCIEzK5xzOoItAbd4wy3\nRSaSha+F3KlfxBgrZYxdxxj7I2PsRsbYkQ7bnKf8XmHz3nLIDsS4lFtSUAf0/9SQJyBxCev2rjNU\nHJC4BM451u9db7ubDTUbEIgGsG7vujbLL3Yl+pGIqBRNeEM6+YxWfPih/ejF4LLBCRMosyFHIBLy\nweezhgPYhQZt+SKM1auBd9913q9qg+uFQCTmng4gEH8QShG5k+J47Dr0om7z/s3473f/1c6JqBjF\nh9UfAgAiwbgjkKf2KoWYK0YCVSznsjJiWexvIzTI4VyXuBS/hgydAGNn202OgEXUxPLRT1dMqUj5\nLGKsFfC3oCqyPuG8IOpnY46HFXRCoNTvDkfg8wOfx194IlopUIg+FBTIIQ5mnEKDQojf49QOcqhZ\nJwR88ZAIN3SCY1IM73/7Pjbt22RYLkoJ4mA9YQQCwE5lSE+tGqRVRvMG0dIaFwIAwLhOCKiOgMtC\ngwS7GZAZR57P6ggUF8QnFAOAqGQUAvprxxBaFOil7cdruq9mJDTI1FGNhuT2mwfCNHQTiOV5rUIg\nhiCGDIlfM9/7nvKGKTxKE8T+AKQh/8WanZtTbntnEQxyvPGm0h8Ie41tjxRrolbqtwEAcEzFydad\nCEpokJ0jYBECLquekYBMCIFTld9DAewEMBfAgwCeBfA1Y+xvTBfMzhgrBDAQQAvn3C5wa4fy25JX\n0CbHLwKmnoLKqtXaomfWP4PTXzgdV79ytbZMlETM3jwbo14YZbub02adhqKHinD6C6fjqpevSrkZ\nnYXZEUg0UtkcbMGrr9qPZhb5i7I3NEiMP/zLy60Pf21UXD+Toif50CCfEL/4oy4TAqqYlSKyAkjG\nEagL1AEAqhqrcNJzJ+H8eefjX1vl2NG73rkLOxvkXkIoEBcCWudJiGa8cpIeyzmrdFRK8trnCKgd\nfHk26viJNKDAOH7hJkfAOlkgwwBdzrjqCERZC3DdeMzNG4U3vn7DcX/a9dRgPGamCw1ygyNQF6jD\nzW/eHF+gJDt6mQ8As80PAJxDgyKeBsSU00L9TIPN8dAgfbUUNzgCL376Ii6Yf4HFwZYSCQFBAoQY\n9ioTqarHWcCVTq43pDkCgiYEdMJJ7Vi5JDRIq5oWsxECwYp4cjOgjQ6XFKqDGkYhEGq1OgJ+n34w\nQH7Dy4tw5HDjOZSJ+4H5HhYNtuEI6Mi3EQID/McaXmv72f8DAMCxveX3PYInXonvhvMx5l8nZTSc\nWj+oEQpLWhRINGwS/KEKLbcjVvYNAOCIgu/FhY1KCo5AIESOQCL6QH6KPg7gvwBGAigBcAGAbyCH\nAekzctVTrslhf+pyJ63bJq/vXKT9PeezOQCAZV8t05aJXMS8zxIWQdJ4a8db7W1G2jHnCCQcmfDK\no0F2o5lRMardWMreXgpsuRb5X10HBOS42fqQi0ODVOUu+WxHQ2xDAZTR4YRhLjZCwK05AlLYITTI\n5ti/qq3CCy8ANYdrtGVqSdKZ62Zqy8KBeGhQXAi4zBEwPYDV8oal+e0TAupyj3K8vSr/hRljZ+DG\ni88yrPfvV0V8ldkCUhot0bgQmNJvFgCgf//4+6oQiKAFGCwHiC/fsdxxf9rcIh/fAd/G23HzqXJn\nm+k+vxJ/5oWA/vwFAPhkheplcocvzyHFw+AIfH0ZoJZJLajXqqCoI+WtDXahQSFXOAJ7D+91eMdB\nCKiJvp6w1n5t4rSoIgR8QUhKrAdTug4C7EOD3HAfUAdCeDQfWLIER+/7M7D4FaDyXmDPGRhzjq77\n0yznTxUXGR2BGFfCySI+S5Ls4EGm7d9/ECOqH7XcV92QLKyGcjo6ArrQIM3hBYAvrgLW34xf97dW\nSbz/fuAHXy3FlJNuxn+u+Y+23FyVTSu5nQEMn70QiwsBJcftibEv4qGxjwIt/YD1t+CyIb9Aac2P\ngU0TcEGfa8HMIpJJCIWAiNkRiBZZHYEQOQLJ/M9tAK7mnO/gnAc45ysBXAX5TnUHY6zLCvXva43f\nNO3i6EVJtNQkzwYsjkCikAVlNNdu9CIqRTUFzHZeBCx9CaP3zweWypU09hzKDkfAbjTENiFIcQQS\nlT9TQ4P02+snYMsk6rmqWuNiKPkcgUORvbjx5iC++TbeGbY7Linqg9cL+P26WuVJluPtKsznuzoC\nWJxnkzGnc4ScqgZpQkD53HrWXIM/n/tnVOQby+1V7Yrhxz9ud7PTitppXfrzpTgmOBkADI5AkU8e\n1Y4g3nMbWGpMKtejdTDCJShZ8xTGDZMjMqPe+D3AqTpZV1LoM33HSg6QR5kJ3O8wwa6hE/fRncB8\nJT4wvwF1slmmdZBb6uxDg9pKuO4KkglPe3D0c/EXEUUce+NCQM2F8ISV+HdvEBAUR0CZk8MDm2Rh\nJTQo07ONG3KkvrwKl5fMALb9DFj5AJ5/nsFbdjC+clh+OJQ6OAIQ/SgoMDoCPo/uXOEeYPU9OJ5f\na7mvuiFHQA1tcnQEdPe/Av3FUXc08OY/MKB4kGWTadOATasG47nL/4FhFcPi2/uMD5pMHL+KoT8j\nxLTvNRKSz9uJJ03EH8fcJb9/4Hj8Y9wiHLf1NWDZbOSLfazldgV5HoGIedKcSLFFCHy1I3scgUzM\nitUIubP/BjcFo3LOtzDGvgMwHMAxALYiPuLvdAqryxsd3rdnWvzPD7AM7Bbdw+tcxLMSkGQVGRei\nvwAP1UdQ15rgglRmxm3LEQgrF9APfgCs/Fp2BD7aVI+33gIuvTSNjU8DhonQJJ9t6TTbUADFEUhY\n9UBxBDiLf15h7oJhMB3qiFgslHyOAACg4jtUVScWApB8Wi16bRTV467QICdHoCBfQKGv0DGfp01H\nQBmjUJ+XPQpMs+sILnIElE5rsb8YNcogud4RKPAVyLNi8rj9lWhEXzsXJB/y8uLffVA4oA02u6F8\nqqXzoTgCghLvnpQjwAU5iRAACuotQqC5Tr6hmJOF3VA20LHzpevwHdFXd0OIFgL5zbaOAAK9ZM/e\nF5TLsCI+OZ++app6T/Tni4hw2VFNpm59Z2EODRqsqyvYuzdQVVdl2WZAP2WCRKXDKHJ1IMmP/BJ5\nu5Ejge3bge9/X8A2dUPl3tK7tzXPxA1CQA1tcnQEdBQX6Dq0SuKz0/VihzmcJpM5U4b/7YlqjoAq\nBPLzjeHCPl/8nA0EgGgoH9D3G5TzP2QnBKIPG/qVi9Efi19M04F0MplwBNRHpFPHXe1+FQAA5zwA\nYC+AYsZYX5v11bSV1AoXTjP+NAYbwTnH6bNON4gAAKhuqtaSJNtiSNmQlJqRLnbU7cCqqlWGZfqL\nYPuOCA7VJbggPZGEjoAmBJSQkB/8APGHZH4Dbr21Q83vME2hJry67VVDp/WLg18AAATIte6TmkAI\nSNIRUO4eOiEQgQtiAnSoD8JoMPkcAQBAxU7srTG6SYCpmoQYF1ZaTKjbQoPMDyAx/lBTQ2Ls0BJi\nOcebX7+phZmo14AAoxCoKDBNwMPcEyKmdubqa4vx6KPyMr0jIDBBSxhW0RdK2LRvk6FIgt5h8/vj\n3/1+Hp9jww3J0pb7mJIj4FFm1nV0BPTXBBfksoIAUNCAQ4fkP9WR8sYD8jlkyBHwuUMIOMelx4WA\nocOqVlDxhrUcB/XcYcF4joDqCNgLAXmZf8inAJDxe4HqCHAlR0ovSgYPlvOgzPTuoXMEmAQR8Sph\n6v1z9WrgT38Cpt2v6z4pIqi01PpMycT1YHY1I0E/GItPJGhBCQ0aPBgoyu+YEDCHBmXSETD8b8bj\nlcMiPghCvHrak08Cd98tD5Ko58nXXwORVqsjAABhixAoAjDD2K8s/xb/n70vD5OjKrs/t6q7p2cm\nyWQmCQmEhKwQCIQtRNmEAAICAiIigks+QARRQRT1k4+fiCjKoriwKCrijiuLC8i+o7IFlSCQkIQA\n2SfLzPRWVff3x61bdWvrru6uW3WHzHmePJneqqq7qu59zz3nfd+lS2nd4guqIAsicC9YjsDu/hcI\nIQW4gf1y4aX77f+PDtkeX4e+r52DuvKxKyNfW7l5Zezt7Dhmx3YOo2Xs/L2dsfCWhXhpg8uHPJNB\njnW9jEQdRaBiVEBBQUBgmRoIAfbYA8IkuRErV2ZbKuw9t74HJ/3mJFz20GXOc3vcsAcAwILd7CSs\nc3qYNcjuKVBPEaBcEYD7pWtEMSJgT4RGlDXIN2EVBuyG3mOXY8WqoCLgNE0CwhUBxZKF/cFQpcy+\nbyMiwAP+P7/8Zxz3q+Mw6zuzPNvjigCfHP3WID5ZqAAetC7+p/t99/NVxfP/FiIR2OcH+2DBDxfA\nsAzHIqkRDaA6OjrCSw2qoAgExjHbGkSs+taggCJQ9ioCVbOKslGGRjSUtnQil2PBlVg/XQkiEBV8\nkggiwGuq60FrELVLY+Y6BUXADvrHld7O3jw0zllAGXjnR9j/GQ+HfCGECuWT778fuP56YJ99gH13\n2BcA0L3hAOcz3OZISc2xkbA8MOKMn+PHA5dfDkyb6rMGAdD14AKLCooAKqPR0wNoURGffV3Mm+e7\np1tRBHzWoCyLJwT2zfsFWTmPGnD++cDXv87+5uf5xz9GMNHcvv4rRogi4LeVa4Zy3ZWjkAUR+D2A\nNwC8nxDiL9T6/8CsPvdTStcKz98I9itfTAhxxC1CyDSwjsZlsO7ELWPt4Fq+zXY2E223SAliyTzP\nZFDcVD9AqaMIcAsFnzg6OoBddwVQtk9FcTMsamLdusBHU8MDyx8AAPx+ye8j3xO7gVDPSgC0PhGw\ngtYg0WetAvhEWB2MlyNg8aZJhUGsej1IBKIUATFZWCUi4J8MN6xhqzsdHa43vt7nHlvJkmf57xil\nCASq5NjXhKkAH3D87BvZ9z33XGA3bwGQwG/hJJkLAUzNrDnXQc5eBe7oCM8nUIEIBI7BtgYRs36y\nsJPvAgBUQzHXyVSEXAWr15ecVeQduqcAIOjrY8GE21m4ogQRiA4+XSLgKHmARxHwW4OsrYwI5DtL\nAUVgn/6v4tKDvwrc9Heg0xv1ZK0I8PwYWmFEt1gEFi5k9wAA3PTum3DZoZdB/90fnc/w34RqNWf1\nmBeE8FsrPYtItjVI04IxQKmSbbLw/KH/A5a812kiWA977ukb5w13zIwLz3WFjHME/IS4y5b1rHxk\n5TCuCDzzDALN4fj1Xw0lAj5ohhIVxOIgESJACDmBEHIzIeRmAF+wnz6AP0cIuYq/17b6LAIbkR4h\nhPySEHIVIeRhAF8EsBrAOeL2KaVPgFUZmgngeULINwkh3wPwT7BqQZ+xm4u1DH7j1Gu6FQdZT4Jb\nK+6V5wnqi/0NFYGoqkEbt7LAgN/gHR1sUDxsoQ6Ux7DVhOJmrFqVzHdoB/UGndjWoHwJGP2mh837\nt+sQAQhEgA6gqka+MAB3Zbc6FE8RMIbswUyvYLDcgAgIORdisnDWk7+IwLVsrww2tAbZsrq/QEAU\nEQhsy54s1kR0qU8TPBjasp4d49y5wff4j58rSRXD222Zj5E8MCoUgBm9MwLbe+hRAw891P6xt4Og\nNchmqGZ9RcATxFAN3V0EXYSpAqs2bsTSjax87uQuVj611xYMHCKgq0EEIldhBUXAQ3pCFAFOBIwt\nzBqkFcqu7c0OfHs6R+FLh30R9/52JoqjhVUABXoJcEXDqrCByj/+TRo1CZcccgluuJr1Abn+evc3\nsYhLBDjx9X/eQwRshXjPPYMLLLN3NvHqq+1/n2bAx6oFkxfgqSu/AlTG4LXXot9PQDB9OnDOOb57\nQBgz48JzXSFbq2AgHuiyVysFq5cfnoRqPxGwr/+qv6BELSwBsbZtEQEAewH4sP3vSLAgf7rw3Eni\nmyml9wJYAOAOAIcD+CSAqQCuB7BPWJdiSulnwboHvwngowA+BJZMfByl9IZ2v4BDBNpUBLImAlsq\nTuNl7w2YL3uaZgVgd4UNm0BWvM4GeF3zyoS//S0wJm+vIhf7lScCYYlrodYgAOhd5iECfs8lJwIW\nhPNdGHASMrME9yQ6AZ3tcwwkC/vVED6Y5SqoVN3vxe8N78RHhl2yMJd541qD/NeSnwjw+yBIBNj7\nVJgEeDDH/ezbbRd8T4AI2IqAWCRBrBymC4qA3w8MAIufN3DooW0felsIBB86OyfUaEAE/IpAERiV\nY2Pc6xv6nT4a2+UYEeizh7+OnFqKQGTwpbn3dVSOgGMNsklkdRMjAiQfVAR4MHX44UDZEm5+rZa5\nNYhf+2bJVQTCcNppzAZ67rmCIkBqTglR3lCt3vh5zsd0/PCHwAknhDSlIxZ+lHLSKB+rxKB+94AZ\n28XkHSlefhnYccf2FQG/XVCZqkEA65UBONagMDhdk4FIRSBQWW5EEQAopV+mlOp1/gU6BlNK/0Up\nPYVSOpFSWqSUTqOUfoJSurrOfn5KKX0bpXQ0pbSHUnoYpTS66HUTcMpjDndFoBqhCABAMaoVAwC9\nivWTbsXjrz0efM3TjMcdFPr6gJmTuYdWfSIQyxrES+Dt8BRWTr0Ctzx3C4DgeQ1TBFAYqLvqkjac\ngG4gXBEIkKCqTQT0Ciq1oCLgqbJTGBx+ycKGS4j8CbIirrm2hhNOACwaTxEI2Ix4ZYmMA0LTMlEy\nSiAg2LCGnfwJE4Lv8/8WJaOEvy39G2578TbnOcMynOtAt2vHRwXTYrCZFaJWxKlRP1nYrwjk88CE\n0WyMe+Jfq/G5ez4HAOiFjwgIioAS5UOjFAGh30NojsARn8dyk80BPJCu9DMiMNDxCjD/RgBujkBk\ngKjXMh8L/EQgagUYcKvpOIoAREWgEPp5cfyctJ2OM89kNrGAykyspgLpJOAEqpZ7jm+9Nfr9hLD8\nBsBPBFpQBHzWoExzBKIIsRltDdp1V/fvznxMRaAaMp9oBrZsCT6tIrLIEVASjz1Zw0UXJaMIZJkk\n7rEGxZHkeCORiYvRf/ipuOmZm4Lvydmt2hFMHBpb5HkCm5QiAmHfPY41qLDVXg5YeAn69/0iFt2+\nCC9teCmECLDfTUwWRmFQLSJgKwLlCCIQmLCEWuIVI0gEuNQOABiYGJIsrFaOQGACEmTuMR1jIj9X\nqRm44w7Wkl4Evwb4aigPJv3JcXzVKGsiwIlgV74L69ay6zVMEZg0apLn8VBtCEf9/Cj8z+3/4zwn\nWoN4YMTHgbdNfhv7o38a+18FIhAx9lm1+uVD/YpAPg9MHc+i/fWTf+r8pr0GS7RwrEGKKQKRCyIv\nHQsAOGLGEZg2dprz9NTJ9vFPfRyPTzsOlFIhR0C4aN7+bXsH7B6ICqag1TInRHy8MoZYkBZ5rAJ4\nECtag7gi4B8/xVhB/DuwuETM1ImAM1/ZRGDy5GBukAjx+D1EIAFrkDJVgzwvRCsCIhEY1xOhCIQ1\nFPND2/asQcMeb6yu4eqr29/O0mUGDj88u2YqHmtQFBO/76vu3/aNjgkvRG+UJ9rRIBFwJkCthrVr\n/R9MH9wWE7hREU8R0AbtIusdrq79xtY3AttzrUFineKKEmSIg+cIWJVO6LpbKo0j0hqkV1D1EQH+\nDwDOHPUHYMMuIcnCilmDAoqAO6nVLfNrB7KGGUEEfPdBQFnhlSUyDoT4+S/mis69GUYELl94Ob5+\n+DeAJz4DAPjLQ8EbWUwW1nyKwO2n3o5r3vEj4JGL2RMqEIGIsc+sNpcjkM8DfV12tL8dK8RQzBUx\npcSK1UUpAllXDIxcBPrLdfjodj/Gb07+DWb1zcIdp96BZ85+Bvvs6Q7qRr6fNaCkJvs9hsYHtxND\nEciaEDk5DkONFQEOHsSagiKgRViDoqCCIuASAfZ9IhuJ2RBLXCauCGTZRyAqBqqTIzBxojs+7L27\n9025Qrg1SLPYxXHfyYuFJ7cxa9BbAloy1qDBkokHHgD8ZWbTgscaFHUDPn+6+3fNNs531imRY/vq\niBW0BIj+8KwDH8BdAQhrghUnR4AOBCOl/lJ/QBEY2BpCBHIVvPhis0csBzWzBpOaLNiPWP2ItgZV\nUTPc77Xk5Sr+ei9bXevp6MHM6nsAuDWpVU0WDtj0hNWtmb0Bt6ILeywwDG80x+8nTgSirTFqKAI8\n2bdD70B/P6to0tcXfN+Unin4/EGfQ9frrDpzOf964D2GZbg5AvBaBCeOmoiP73+GqygpUD6Vn/vJ\no71VjcxK89agvqL9o/Wy/IDP7P8ZbOpnwZ4/R0ArsHEn67EwKgCaN2cUvvmh/3F6X7x7l3dj7+33\n9pYGhhtEd+dHAVYOpOJT0GhjRSDr659/h+pA/RwBEY4iIBAB3o26HpEQ44agImBFjxWS4Cxc2b1T\nxkQLoAF4k8hbqBqkkCIQGQPVqRpECPDoo8BddwE7Tfa+qdARrgj85Md5LFkCHDZ3Hg6ffjh7chtM\nFh7+sBODHKtMq7BXw9JcGRXZfN0cAecF4a6OQwT4fsygIiDaQrIe+IH6RCCONcjcGjRRbyxtDCYH\n2VUiLOpVBO67L/vVQMC1hRRz4bYggMnBHjIgWIPEVd1ly6s48RQW4Y8qjHIGt7BkYZWIQDBZ2J3U\nZvbVIwJcEYhIFg5RxjxQRBHgyb55jR3omDF16ohDsDiNDma816yaM/npId15i0WgkHPHgqzBsKOr\nRQAAIABJREFUAwBHsbRhVJqzBhUKQsO4IlNb+zr7nNLCfkUg18F+86zHwqjga/Hi8KZSnlVgSpwg\nulNnb9Zrvl4ZVmNFIOvr30l2HoivCPCxzILp9EUglF0TLSsCmgkj5VuCj1XUjKcIJGoNUilHoI4i\nUO987rcfcNRRwWIIWo5tz/ARgfG9OcyZw/4WY6IRIjDc0LUOOOxipypEy+hbCsz7GUqlZA4rDsSV\nz/6SG9BHMnEhE36HCfGJgBVCBMRE0UoFePqNp/GlB74UGoingaaJgG/1xtwSogiU+/GNx77hfZKG\nKQJVrFxJsWxZkwedMCiokx9Q1KOJAOD7/oI1yBPMTXsImP0XAIwI+G0m4sCnUnJUvapBdRWBfAk4\n9FIsXv8P78f5fdZIEdBrwEFX4JkN2dbQ5IpAnrAbNkwRE9FdsMeFXDCCE5OFeWDk//6ju+3rYPyL\nwGEXY+NQ4zFFFvi5L2jhRKApRaDTK6P0FnuxYYP9ty9HQC+oQQSaDb484yUljr++qLMxQTe9Rei5\nNTIymNr3B3hi4FdNHUPS4GSmvCV+jgAhxB3P3nsaAECzGisCnm34XQXESp0U8YUry4inCDSyBvlt\npfXgVwQeedTCpk3xP58k6uUIxDmffiIwfgLbnp8IiN95hAgMR3CP/MR/A+/4Gt4YCMriTeOkD2Pz\nQHreIJEIbK64lYEiZTHDnRwnjrO/fyHkin31MM9DangtAYDXH16pAPNvmo/LHr4MNz51YxPfIDnw\n2u/iCn7OnsTidBYOswb9benfcN0/r/PtiA32p+zyYe/zehUrVjR71MmD+8MLWgMiIK5eRSgCAIB3\nfwwASzxdbdf1mmTnmIpkUCkiUKePgD9B1oN5vwAO/TIWb3zC+3FeYthqQARm3gMc8UVcuuLQVg47\nMfBrIBeXCHREz4weaxANX1V3iMB2/wHe8TWc+6fzWjjqZMDPfUH3RX9mA2tQSLLw9LHTPe/p6+zD\nK6+wv6dOZf9zRUBThQg06cv2VATTLKzdwuaRImFjguYrokFDFIHZfbPdBwdfgV9WTgsqqSnBopZD\nZsoD7MKPG8g741mfvaJjE4F694+4oh4IPomV+vXAYwKzxu7JRoqAiLDyoc3UUPErAt+7zsSHPhT/\n80kiOgYqxiKGh033xkCablcNsrzMbudxbs1R1yo7Yg0aPvB7HxPC6k3prYaJfjWRFESuClE3+POz\nd47ZPXsAv/XWG+ODShxr0MsbXo5z6InDtCyYpneF68BnXgIQPpAHZNzBIBF4eWPId7GtZF/c/8u4\n6/S7XEKRqyhx83NrUIGw0S6WIlCNUAQE5PV8gAjwa4DYfQSyyo/xI0wRmD4ddmt5giXnLQFuuzn2\n9vgKu0brd6dVBdwalAM70O7oiqkAgFHF6EhJTBaOUgR04q2f/viqx5o53ETBz31R8930TTYUy+eB\nd81+F3Yd75YS6SS9+M9/2N977MH+d3ME1CACgWD0h0/gF/OjVyg8RADAq2+yJiodnAj4I4WQqkFP\nnPkE/FixOZtVEa6IduW7YBk6NA3I5Rp8yIZ/TuS9J+IG00EiYKauCDi9UOzFu0aKQCNrUDPwEwEQ\nC3/6U9ObSQR8HBhPdwV+eg92/NujwI3PAmYhFhFYOH0hHvjIA/jqYazACrGJwKBlx3fX/RtzHn0S\nU3umOp8ZUQSGIyox+m63gLVb0iMCnuBfYMDRiTJu8BdofmJj39HHAuWx3v1UG1uDOPxdWdPCuvUW\nTj7ZJQK7TdgN1hYWscbqIyASgYGJAIA3NoT0X7ATybqLBRw16yj0Fm2PgK4IEbAnwjxhXzpq0PMQ\nIdsapHfUIQJakAjwibOzW51GWkDw+v/ql4v417/c1a054+cAb+4Te1s8sCZW/VVlVcCJi454isCY\nOkumYvlQTgT8RGjpy96xRCxlnDb4mNgRQQTi5QjoKBSYavjJBZ90nn7lX30olVhJRn9nYS3PfvOs\n/fEBErzq7Viwy9TwN8NXGhjAynVs/sqDjQl+IkDNoCIwrmsc9p60t+d9vBNz2uC2oK6c21U47qq2\nP5C1DPZd4ybchikCqVuD7HuVz9ntVg1qBoHFxQyLBzjngurAsiMwq3AgsHovAPFzPg6ddii2H8Wq\nCWq6CeRKqNEy8qQArNsN48tv87xfjIlUmQsbYYQISFIE1g1sbPymhCASgTBFQLN8sx51T3uAvdso\nDeoewgAAtUoda5AvOYxmljFr4bbbgBdfZoF6Xss7+RpxrEGodgNVO3h4fT/2FAkhAnZlGR4MinXE\nVbj5uS2EE4F4igBb/cs1IAJr1rC/JzKehJy9ElzsYp9RxR7krxo0fmxncFXcd41HoWbVnMB62BAB\nm7joNB4RmDwxemYUrUHECg+md5/r/S2HjOxayzrWIOI74U02FOPvE5PLn3iARf9ip1bx/geyVwT8\nJPhjHwNmzYp+v18ReGOjTQRouCJAI/oI+IPAtnPuWgQnNl25+BWDOPzfoWyy36YeERDzApSyBlVb\nqBok3gMJKQJZqadupTd2vU4XXH5xrWKAu2BGNNPJpxyV6wVAAteWGBNlPQ7ExQgRkEQE1g+maA0S\nfJiVmomnnmJ/85sgXxPqQFMCCINWlDVoaFAHQKBR9/VaJUQR4J+fdReWz/uo83xmJcOIBez9Y1z8\nIOsAWtALdYlAwBpk5YCyvcz3xnz2vxbyXWxrEE+iEuuIq0AEuDUoD/alo4JAb44AC5oqfc8Ch10S\n/gErD8NgK6H8OuADX/8BHweIhRfeeBWLbmON2LKEf1W0pyvkAqDxhsCqWXUVAVNNa9Ddr9yNs+88\n2yEsfitTIyJw0QV1rEGWYA0KKSMMAP/v/3KBz2QFZxGEFgBLOMeNrEEhOQKAN7n8b7ez8WGvvdy3\n8vuf5CpAYSsue+4sPLj8wfa+RBvwj78LF9Z/P6+ww/GDn7GFrJzFAmn/aroVoggAwSAwe0UgfsUg\nDv93qFC2rbjBdEAN19KzBj39xtM44/Yz8MZWVvnLrLHv0sgW6GmIJs4JZvOrHYGYgpgN958UtlS2\n4Kw7zsKjKx8FIMwBIUSgmfHbWTDTTKCT3RvdOisi4L+2VKukGAfbLBHgtbCxYef6b2wRG4ayUQRW\nvmZgv/2A5cvdmyBfFUpi+gKfKGvQ0AC78HUi3NRmnT4Cc27H5pk/dHeTojXIoz4QCzjhTPzXuBcA\nIwK8lGtojoDfGmTlgfVzWLCz/NDondqKAF8pU0kRoNStGsQbnUQNxN7VH/tNdaRcPrFwNQBwB36q\nV4GdHsKnnjgRtyy+Bcf84pgWv0Ey8K+K9o5qkwgYXiIg3gfnzj+3tYNMEEf/4mjc9MxNuPpx1hnR\nb2VqRATmzilE9lERrUG8SZF/It1x+5gm7BTAzz01dW+55EbWoJAcAQAeD/Da19l1tLfggvEoAvv+\nAH9Z/SMsvKVB9C0RfhLcKBAWrU8AgNFvAgDyJvOU7Fn+lOdloxpPEVg9uDrO4SYOrnBwa1g7ikAN\njCQNB2vQ/Jvm4+bnbsZXHv4KgPCS32EQ51Dv8TdfTj1MEQgrWSsDlz10GX707I9w8M0HAxC+i70Y\nMF5YE602UdjQowgU2SJvF2ELAoF7QLAGjRABxfGF7pdxz2kPoedfX2xvQ3+8BfjWcrxvw7Oep8Uy\nnrLhsUDYQdx//+tOhnpFuPp9Vogoa9CgTQRyIhGwq6WIk0oUkUjTGuT5/rr37i7oBaxbx/4e6015\nYG8PUwRu/T2OXvYfYO3uwQ84H/SudqqqCGgWO1lRA7FTOx6IJwOH1KX2nOtcBcsG/gUgO1sAhz8Y\nGjs6+P2KRXcIzG2dEbmtUtVVBMIqz3z3Xd/Fz9/z8zaONjn8841/AkCAuMRZFYxSCGtmzbUGRayq\nR40FWYCfe2rqnipprVQN4s9/e6dVwNVvgAdHHiIg3P9h5VfThp8ENyICn3rbp/D8Oc9jnDaNPTF+\nCQBgjMnuib1xJhaf43ZN3TrAAqxGikBWXWW5esV7XrSjCBhaY0VAtapBzrEYzfVAAILnTFz0iYOw\nHIG0FIFVW1Z5HjvfJaQTNi8BHAfOgqHuKgKdYIpApDVIgaZ6cbHNEoEpo3fCEbPfgScfad4D58GW\nHYHNO+FDR87zPL2pmp4i4JHgbW93LifI42XRGuQ95VET/+BWPfh6CBGIIhJpKgJOgAYAee+dR40C\nBgaYlWXcuOBnAzkCVg6o9GCvKbMDydLeD/qIgEKKAODmCBA72StqIPZ0FDUba6W8QY1ILF7fKpTc\nzQ86577dLt3tIqgIBGfDO293z3/3pv0it3XoEVUMVdl1Ro2gNUjXdMzfYX47h5sYlm9aDgABK1Mj\nRQDwJQoKEPsI8GvAHwSqRATchkq5UEWgmapBHPvPnQwMsKTBUaO8nnvn/tcrbE6wkXU/FY5GgTAh\nBHtM3APb9dkXyXjWIn1UjVmiursI5k0U5rgOlgjUSBHIqpkU/901u9RtM4Gwf04w9XYVgfSrBjnH\nYsRTBEQiI56zRx8Fnn027BPRCFME0iICfjgLAvYCqHg/N0UEQnIEipQpAiPWoGEMPilOHN9Ep4ww\n2MFx71jvT7m5mpEiYHc11TRBHjc63OpI1LsCHjV5lwbZ+wriwG4HAKopAnzlM/S1EjvmnSMcYAFr\nkP0dFywA+63KEaO/T3lQThGwrUGcCEQpAp7Oq0ZjIrB1U9Bz6ikRKDSmiyKZacGfLBxmDZo5w71v\nA4mlApYtr+LNtfZ1FpFw6u9imxX4+XDui4SIgLPg0IQikFXRAD72WQFFoLnOwocJZcR32cX9e/fd\nvQm0/P6nesWjuq7YlE35zGatQRwdOfukjmJdAzvLjAgErh2bCLwVFQH/IpaVay5HQIWqQc6xhJT8\nDoN4n4rn7MADge23b26fYTkCWRVW8FuDCgVgyhT21Pwm1m2cOIG41qAOK1wRcPsIGKhWgczqpjQB\ndZZwUgYPZNoOVuxB32872VpLUREwg4pAqQSYPcJkWOoFOrYGFYGIFf2a7QH1vN6MNShFReCSByIS\nWwFUhtgIFEkEwqxBAPbd135c6gOKIWVw/NagjBUBcfKxqOVYg6hRP0egWUVgcEuQCKzcvNJ90Ole\n91HXVlrwB0NhdfLF1b8OUidS1qvYtNVWBGrhycKe3zIDFHNFlI0yNpVZG0+uCPDjbYcIVAw3WdiK\nSEAMGwtKRgld+Rg7Thj83FuGL0egiapBP/uphlPf474mBoK8bCgHv/83F5YAu/3OeX5p/1LMHjcb\naaNZaxCHeP4JNOQHdwIQRgTshmMNFIGsikb4K1w1owgEjtke60fXqTSuWtUgDjOkCWjDz7Sp4oQp\nArKJ0NrBtfjIbR/BE6+5vSze9Qu3/we1F0ALBeCJJ4A//xn48IdDNxUKJ04QkoULVniOAB8H9UIN\nJrIvJRwH26wi4BCBZoKVf78/+Jx9gfnr9A6Z6dVQDMsRKJXcAckyNBbQAg5xmUIWAACOnX1s+Dar\nMa1BEUQqzQnghqduiHytNMAmgtkRc3GoNQgCsSv7Znzng2rlCHj6R1DTUQRotb4i4GmhHkcR2By0\nBl1z5DXCBtVRBPzBUFig7iECWh39Wqth4xZvYK2aIsBrXXNwRcAy4uUIANFE4MyzDax4jdcmD14D\nQDgRyKqXAD/3ZoQiECdH4Jh3aZG158WKQYDv2tr1NufPtYNr4x90gvCMv2v2aIkIdNEJqJYiuurG\nVQQUsQY1owgE5q67voViMX654IAKlmLVID/iKgKiNejoWUcDAA6YckBL+wzLEZD9/S++72Lc9cpd\n2FxxS33f9cpd+NaT32IPTNcaNHkycPbZzZFDb9UgNsflauFVg/g9kOtg1+BwsAdts0SAD2wNg5U3\nhRH/d78CvrEeRU1YGrDCiUDFLCVwlPHgJQLs76EhnzzOA1pbETh/9GPY+LmNmNEbniBZrdgMOicS\ngSasQRk1FPPDrLDRW6wWICK0ahDYgLFkCbDnLn0RH6x5cg6yVgTECde0TCdHgBOBSEVADF6tnF1e\nNhpbQqxBi/ZahMsOvYw9EBSBqKAyLTi/yS33ovf6Ac9kxyESgc5cfUVg86AdWEcRAQUUAQ5KqasI\nVNtXBMqVGu74MyMCZpW9x08EAuoasgsEHUWgFl41KI4iEFgkAPD3vwPnnw9c4hMho0hgVivi/Pv3\n/PlPwPefiR30+BXgyIprxZg5Ahlbg/h43rIicOU64MkLmqrDr5I1iFd5a8YaNLVnKjZ+biMeXvRw\nS/vMQhHYWK7vwOA5Aq1alJyxjbiKACcC/muLjwUjRGAYgAcyYYO9gw2z4S2fRYDSOBR0IfilOggJ\nTooVKz0i4EkWJq4iwCcD07CtQYCjYHR25NDb2Rs58XNrUKGBIqBCjkA9mLXwoIUjyhqUzwNz5gCz\ndwxXBMb01rB8ufs4a0XA31SOW4PMSvxk4WuuIQCpf95KA+G2kJ3H2d4rMUcga2sQD0Kqo1DUw38A\n8f4vxiQCkdYgRXIEAGbJ4YqAmYA1CJoBqrGJjTcWjKMIZBUI8vvBMnItVw0KmxsWLACuvTa4ChhF\nAjMjAvbvXtvcB1i5lhQBaukBIjCq4D3pjRSBrL6/0/PCbFMRGGIrSI0q56haNciMqQj40dvZG0rs\n4yAsRyBrewwVcgRagTMWCDkC1iCLDfzjIB8L9IIazQXjYJslAnEmRWyeGlghvflmoJAXbhBLR3d3\nsPNijaZ39sMUgVJJUAQM3bUGUe8NEVkusMre15GrTwQicwwybCYkImr1kiMw2Zt56Dqg26e4rxiu\nCFBS82xTVAT608sTdxBlDTLKbLkijjXowgvj7Cg8COzrtH8nMUcga2uQ0EwmakVQPP89nfWJwNYh\nO7CuhgeTOS1Xf2FBMsRxYKA64CgCViU+EYgkb5rheKVr5fhEwJ+wnRb4/WD4cwTswDBqhbeRIhAF\nVRWBStle+GmFCJhBItDT4ZW+/feAatYgJJEjAOC009r4fEqBcNjiG+8s3Iw1qF1koQg0qlAXVjWo\nGbjJwpYzx1U3szmvzxci8LFA62Ax4AgRUBixylltngJREfjTn4BFi3wTnpUL3dZgpQQzpTHQSwQs\nABSbBodwwd0XAADMmmAN8klkUSuAZo0pHR5rUBNVg+pV8kkTZoSfmSNgDaKaZ3Lr7QxXBAwakSNw\nwpnY9NHtsPeN+2BLJb08EXHCfX7N8/jOP77DjrPchDUoDqxwRcD5nTJKFqaU4tTfnYpFty1ynnPu\nCysXORGKwd4Jx9QnAoNlO7COIAJAtvYg8Rp4+MkB5x6sVSJ83iGIVAR6VuK/21/KtlcOJ9ehikDG\n1iCzFp4jENZTBGisCEQhchzNSBHhwahpsDyHuCuhnu8RYg3qKXqJgD9+VM4a1K4iYOPkk9v4fErW\nIG4HFcHzeRoRoSQV/CxyBBoRGWomaQ1iK32ljWzOCxABew7Q8naHdzVCobrYZolAw0nRKKDwpNcI\neqydV+sJHqkeGmSVzRJ+8pO2DjE2PFWDAEAz8XzlT85DVjUoXBGInPgtHZ2dwNiiMGNW2CQQJ1nY\nU9s/QxiVJqxBZg4A8awaeL6/gJoVUT4UALrX4bk1z+K51c+1csgtIWrltTbUoHyoL3A9ebcGM16E\nIjBp1CT2x+g3nOfSzBEYrA3i1v/cilsW3+I85wQhlh6LCEzq893Ib+zr/q1XUa6xa9qohFuDgGzt\nQeI18L7TBvHCS/bxltkxxenuGXnODv6682d1GCkCpj9HoMrOcSQRaFER0IiGA6ccGHg+a2sQH8fj\nLviK598KUQRuPPZGtvp65/dDP6+aNYgaySgC06bV/4y4Ih3Ij0vJGjRYGww8Zwwx6atZa1A7yEIR\naARSZb9Dq0SAz5MGys5i18D6+ooAyY9Yg5RHQ0XgawM49oAZocmTXkUgnAggV8aVV6ZTQzYw2RIT\nW6tu9jz0KrSqN0egIRGgOrq64E0m3shqSqumCPCBp686L/AaJwJR59tD6uzVbnGw6M6Hf9D/m4cR\nojRXw6L2xYlArPKhAH5z8m/wk6Pdqie49fcYfd/N7uMIRWCH0TuwbY1a4zyXpjVoqDbk/O0EgYI1\nKA4RCJS5/OETwJIT2d96FVWrvjUIyFgREK+BwgCeepYdb7WUABEQUC3FLx+a1YowP/eGXxGwlVF/\ncQeOVhUBAHho0UN478yPeJ7L2hoEqje1Gu4hAkaQCBy808GoXlLF0z84G0uWBD/vv+dfWGLijDOa\nOfJk4FhTjWQUgVwThdYDn9dMlFJIGRyoDgSPZR2bs1O1BmWQI9DIGqRvYb9Dq0SA58aU6YCTI7B1\nLVtNCJQS9ikCI0RAYTRkyFYes2d7m8hweFaRIxQB5Et46SVgIHhvJo4AEdAMbDCXu497ViJfa00R\nmNUntM/sZzeTuLoSRQSWvFzBv/8d9xu0B6eDshVc9omyMXB4Jns7B0IcLPzJcVHYUAq2KUwzCIiy\nYFSG6ucI+FewCSGYsZ2QGWfmvQGyGR4EakTD9N7pnufStAaJkyBfDWxWEejMC9GCpTPSYzdkg15z\niECtpL4igMIAttg5DdWhZIkAJTV0dAT9tmFjwS0/N1IZA/1wcgT8ioCtjEZ5hVtVBAA2L3QXvb9f\nVtYot5FSc0RA/P5higDAzvM++7BiCvU+DwDrNpi4+eb0myo5Y4DRZtWgFhBmDRoclP8bDFaDikBt\nLZu/m6ka1C78dluiW7AswMhGHAQAaJtY7NJqjkB3gU14/bU3Ac2CboxG/wZ7ATJKEciNEAHlEYcA\n9/QAY8a0pgiQPFsCWL++1SOMj0BirmZiI5a6j3tWOM0v4uYIgOrQdWBm70z3uRBFICrYW/VmBfvt\nF/srtAU+8BIrONrxOtixrEFCxSCOuERg1ZZVgefSDAKiLBjlgfqKgKePgI1RReF3pDo6C2KJ0eic\nC8+1ArYylFb1KHESdIKAJpOFPfeCTXi4xxi5MgywqGhoC/s9wlaVVckRQGEA0G2P6mCyRABjVoU2\nVwojAlddbQZKbaYBfj8Y1Zx7LgGgHOEJstGOIgAA3UU1rDEOCaZay4pAWLJwI4StBgNAtRryZolw\nrEG19hWBHXds/Jl6VYPyBQuUAoPBOD1RbK36ytWVe1jVKLS+Et4K/OqCnrMT1yWqAg0Vjf5kFIFB\nkzktcrVebGJ9GwM2Q6fLeK4EaMYIERjuGDMm/AKLkyNAcyUANBUiEKYIbNYEItCxBQXTqwhwRK7a\nWjoqFWDHMcIouHUygHjWIOiVVG4AT6BpBu/yKD8zh+dcmkFrUFwiELYao4I1qLSlfo5AWNdXf2+B\nbnE5KUIRAIJE4PHXHkfflX2487931jnyZJCEIuC5lm0LnRNEnnAmrKmsrnat3IF8Pjy48CsCaZbR\n9YwDp57kNLYqJ00EiptCtxVIvAcAzcCTTzbeZNJwkoWrOpATBiKrvsdDvAYa2Q3C4CmuAOD1N9Kx\nhYj4xF8+gf9u+C97UIcEhyGqfGjcQDrMHw6kvyrqdsFuL0fg9NOBv/2t8WfEccR/H3R2s2tRdlnp\nk3/jy+/aOBM8581f1dCPJK1B/vtGz7HfM8s8AbqhPSLgtwjTUh8oZYtBum/Y43PAup67gf8dg39s\nuKe1naaIESLgR60TeHYRgOjBz181KFJ206vpKAL+ZGFioqIJDTZu/zG6BnfD3pP2Bl5knmd+U9ZT\nBMplYN7Eedh/x/2xy+CZDomIRQRsWUx2HMQHbY1ooGaQ1BhD3SAk+lw2sgZxSdDB/ZchNzANd3/w\nbs/T3z7625HHlgai1Aej1IlCIfr7n7ffeZgzfg6+fribDOpZ1bZyoYpAGBGYOCpYbHtTeROO//Xx\njb9AmxCJAFfImq0a5AliOGGuhkS8Rgd6e8NVRb8iYFopXgMRZLC8hX2HOJXSPPfzxpnQt+7keX1i\nYTrwyMWhRCB0BZ2YAek8DTh5IoYO5IcavNuFGMS1Ehz5A+Grr7Fw9NFNb6YtXPfP69wHdUhwGLyK\nQK55IhBSMQZIPwh0FgPaVAR+/nNg112j33vF4Vdg1/G74qP7fNR57uYTbsb0sdMxpoMlqBaLbHtb\nJBeRWz/kCzaGJgCIlyic5IJFQBHIs2sgq5Xxo2cdDWMzm5tatQYVc0XP+Ga8dDiAoC0I8M0B+RJe\n2PpYaztNESNEQMBpu5yNP+w7BNzOkiPXrQtfFfLaSfRom1G+lJkiYIKNvA8evxJYcQiKuSKe+dgz\nwL3fAOAOzJFNQyxGBPJ6Ho+f+Tg+1PND56U4VYO4LWH16ua/TzMQiQBCiABKfejujraCec8l+7wo\ng3sUgT/+BHj4Elw1+VUcOfNIz3b2m7wfLl94uee5Wlr1Y1GnOotRjKyQArCyn0vOW4LPH/R55zm/\nIsDLZQJwfuOwTs2duSZm24QhVswIswY1rQjYFjonyV6E2RFIEOPwKwKDpeyvAWuwL9TT3xDfeQXj\nln3CfXzLfTinsgxYPyeUCBBCgqqAZng6cKcFp7Nws0SgxSZKHEFrjIWHW2vQmgyo1jIRgKWDUrYw\nEjdZNqgIZBMEOmNAJZmqQVH4wkFfwAvnveApq7rnpD2x7PxlOHlXtkJf7GTbk6kIiJ3EHdhJ8s18\ndxlIQxGIUu/+9IE/4Wu7/RWb+tnrrSoChBDP/GbdzeKosHnAPwdUskyOiIkRIiCgb0wB73kPcMEF\nLBh8//vDV4VCLQRhyJWwbp2EA/UhSARMmMQOxF9nF6U/GJwwocFGbUWAY/Jk929xYmmkCLz8coP9\ntIlGigBKfXUtEd6qQey7iCunHiJgW4/OOit8W3714LQPmvjFL6L3nSQibUi1zsgKKVHwKwLcY84e\ns984LLgLyzdIC61ag8TAL+y+HpMPWfIx6hABnyKwdTBFe1hUTkqDe6AedAj3VKkXz9kVcWNvTzMj\nm3fJhEcRKMQ3Z4fam5pAIBDWskkWdtCkNchDZGwyHDc/IPB5IHNrkFkL9r5phKSUXL7FpUVuAAAg\nAElEQVTI0NEp3xoU2sDTnq/iEEG51qBsVCGAzUmf/rT7uJ0yqp44y54fGioCAGpGxmNADIwQAQGc\nyX3rW8DGjcD06eHv8waPdRSB4uaMkoUNWDYR+Ntf2Xc66ij20oMPAldeCbzjHQ02aq8GcYjEQfy+\n0USAjfyyu+w6FYOIBmqEEYHeukFLmDVIJAIeb6BZP/HYn09QKln44Aej950EeAAcGQQaLRABnyJQ\nHgwqAmGeU0/VHQFpVA/yE4GB6kDTycJhQVC3Hq4IRKks/tWggYH0rEGRqlCDe6AeKkPC/V3qw+23\nsz+j6qoHaqhrRqqJojxXh/8WlpELKALiooYfMhSBTNGGNYgHO00RgQgilDYRcOyBkhWBeuBjS66b\nMQCZRCC0XLe9aJO1NUhLQxGICMKqQ5145BH29ze/2V7StNOkTkAoEfDNAdURRWB4QWwexW+eMMnJ\nv3IYSQQ+sSvWrZefLBjWR4Bq7K57/GH2RXgztEMOAS66KEbVJJ/SEVU5ITLIs61BtZCFiiTBB22d\n6LBCiUADRSCkalCkImDVD2gDicWaGbuRTyu4/OHLMfqK0Xh4xcPRQaCZr2sNCkMcRSAMUdagNPoJ\niMnaf1/1d4y+YjQ2luw8mTrBkHh/+4sAAHDL7oqgWmxFYGAondUgSml0AFPraokILFgAnHiClwhw\nHHdc9HF4QNJLlv3Nf36DUVeMwrVPXuvtLCzkeXzhC2wxJArtNsGLSpbNDG1ag4B2FYFsrUGcCDSj\nCPR1suu8Xasjn1uWTPksMH6JXCIQ1sCzCUUgqnFmK/DfQ7qenTXoqSc6YVnAO98JjzLQCgKLHIiw\nBvnmgEp1hAgMC9x8ws04dNqhOP9t58d6vz94rJeR//oa+bNgIAjUa6A6Gwj717ObcsqUJjdqeYnA\nvHnAeecxVi0iUhHQ2DGlRQQiFYFyb90VcU/wZ3sqPYqAaPcJsx4JCDQfI2bzv3sTuOQBVpfxs3/7\nbLg16LkPA6W+phUBfzAwbUpQEQhDlCKQRodhURG48vErvS/WSRYWV5L8uT8AoNeEkX7zjuh94xRg\n85TYOQIXXGimUkO9fqlaEpsIiL/H3/8O7LefcPB2QF0oAEccEf75QB8FzUiNCJx/Fxu/P333p719\nBO78AfabeAAe+MgDuOIKYNas6G1s170dFu21CJ/d/7MtHUNUIJwZ6uTHhMFDZOyFkWaIQKB8bsbW\nIKPcvCJw5wfuxAFTDsCDix5s6xg8avP+35SaLByqCMQgAg8vehgHTjkQvz7514kdy0FTD8KJc07E\n1J6pAAAtQ2vQIw+wOemEExLcqFCdMI4isHVIfSLQRL+8ty4W7bUIi/ZaFPpawxyBetYgAC+8VAbQ\nxEjaAgJVg2xbTl7LY8tmNhg1GwyCap5qF4QA3/te8G1Rq71azoKFdImAVQs5lsqYunWgPcGfEay5\n7znXDVb3whQBmUSA49VNrwYDwQ2zgNtuARDMD2kE8Zo/7ngLl36mA/P5PFFHEYjKEUjbGjShawJe\n2vCS+2LMYChUETCEkf4v16G6ilVAiqsIPPaYhaeegvSeGo1K1cYlAv4V/a0VcRmTXRfbbx9dgWh0\nYTTKhhD1pdRVFfDeq55k4fVz8Nf3PRY7afnmE25u/KYIZK0IBBaFrOZyBMLyZJohAoHFgIyrBtVa\nUAT2mrQXHjuj/UovHiIw+g0lFYGDdzoYj57xaKLHohENf3z/H3HT0zfh7D+dnYoiEIXHHmIX//EJ\nFq4jVt7RBuLkCAwOAyIwogg0QGjVIF/AUI8IrHi9JL2zZmDwzzObREeuA6USq3Mbp3SgiP+7WMcv\nf9n4fVGKAAWbANIkAmaYIkC1up5gz2Bdq998C3oV++8fva3AJEgsqRVTeNCxfmh94BrIUXf2bpoE\nCvjSpSZm7uRVBKLITaQ1SJOvCIhVg8QKHgBi+6Q9zcVy7B4/4kAh4i/1Ok2BInME/Cuimok0ikdF\nWsNstJojIDYpOpxVzMO559bZT4AMp6cIhBEBo+ZtoCgbWecIBFaGm7QGtZssHBgDMrYG8YaSWVTO\n8cQJY15PP0egCWuQDPDxNA1FICpHYGhzJ7q7W3BE1IFG3cEkTtWgwdIIEXhLwhM8Ug2nn17nzfkS\n/vMfuccTSBa2k+MKGrsgWWO0GBuy3O914KFDkaueIiKJAEmHCPCV0EhFAPU7Q3qtQfWJwKmn1/CH\nP0RvK1BHnZhSv/+0sdOcv/0rwqO73JmvmdUwP0zL9AS3u++Wx50R/cGirEE5kq41SPwbQOzKKaI6\nNHkHHdddB3z1EuEmELrSLlwYvo2ANYaYqSTLNupi3exCAIeoCPzqV8AvfsFyjKIwusPXclhijgCl\nFEs3LvXkCXFwYmTWgt3CZSIsWTZu6c0kEFgZbtIaJEsRSIsIDFQHsGZgjasIlJuvGpQUVFEEmlWE\nkwIfT7UMFQEYnaGlrtuBSARiKQLlESIw7OHprGtDXDV58UXUbxiTK0svoRlYDbTL5eUJuyBbWRH2\nyPt1UN/2QdNLFtZ0p1ScH/GtQSxajAqaTntfJyZNit5WoEuvJjcI3Gms2+zJX9GgO++uzC5b1vo+\nCnrBE9xe/9089twz/L1RioCGdK1BWyo+M25MRUDMZego6Pj4x4Exo9zAqKizC+PUU4F99w3fRtg1\nMBi/emXLaKQIbL99a9vlTZEAVjnstNPqdylNUxH4/tPfx6zvzsLF910MwKcIWF5FIDUiEKIIjB4d\n/l4ZCKwMN1k1yJsj0DwRCNgDU84RmHj1REy6ZhLWDq4FkK0i4CEC3euwebO8fYUrAuxc1lPEZYJ/\nf6JnlyMAo5gYEeBjW3d5Z+e5MCLgXxwtVUaIwLDHt4/+Nk6Ze4rHM/jJBZ/EodMOxdcO+xp22cV9\n7/0fvh+n7n4qfn/K790ncyXpHQWD1iCmCOSaJQKaBfz2VsytfRjHzj421kc8F/0b+wL/PkXYntwV\nccBnDap6J+H8H9h5qEcEPNYv2xrkn/h+/p6f44y9zsAxs4+peyxzJ8zFpxZ8Sti4JZUIiJO2J/hd\nfggWGJ/Fl77EHn7qU2ga3z/u+/jEfp/AXpP2Cu8YG4KoHIE0iIB4D2wuuzPuPpsuA0DqBkNXvfMq\nfPGgLzrVQgDv6vK1R12LC99+IX767Rn42teAW26J3tYH533Q22yOWKkQAR74duu+m/2HjwOo3x21\nHi7c/0KcuvupgU7aURhd8EW9EnMErnr8KgDA1x9jXbHDrEGwdGgas0emgbAcAZmVw/wIKgJaujkC\nGVuDhmps7nth3QsAXCKQhSLgL0W7YYO8fdVTBLIiAnwMTUMRiCx/ahYSIwIPLXoIJ+92Mha8/lPn\nuTDXRMCmlHXBgBgYSRZugImjJuLWk2/1PHfkzCMDnWUBYOH0hVg4nXkGDpp6EB5d+SiQl08EAsnC\ndo6ATltQBP5zCi655BTkY06cnolj81Tgd7dC3/2PMFGzrTFyL7HIZOHX9kft+ZMAADvsEP15z01r\nst/Lv3p4+rzTcfq8ev4vd1vffte38fTiCh6rfF+6LUS0g2yusOB3Z/IuvPSTv2DnLwBf+hJw4YVo\nqaHT2fueHfp8vRrbUdYgUUqVhbDf4tqjrsU/v3c+nkF9n+xnDwhWiBEn8fPfHq+aGADMmzgPd3/w\nbpBPTwPGrgCIKT1HCHCJUEErYtC0idB9XwVWsaSWuETAP4n1FHvwq/f+KvZxpKkIzOidgWX9rtwl\nrsaLzeTSyg/wHwOA1KxhHMEcgTasQS1UDVIlWZjDrBZASHqKkAj/AopMIsAV4U69GyXTXnmwiUC9\n+U8m0lQE6imiDZunxsQ+2++D377vtzhRaBJazyHgQBtRBLZZOKujuZJUbyAQrQgQy80RaAaHHBL/\nvZ6Jzx54NL6aSqz0FAFo3tKWQvnT2AOB/fl2Pb177mHvW7I1SAzKnVVw6lohCGmNBMTdpx+R1iBL\nfiTm/S0Y8+7Md+LFF9lzzSZtt9thtqvoXgOpKAI2ESLUvXh7et2br1VFoFmkmSMwfay342OoIkD1\nVIPAMEUgVSLgXxlu1hoUkizczGp6UBHIpnyoA7OAzs6YOXIJw19oZP1GeSvDnAAWtWADzMwUAZ4j\noMm/BuoRgaRzBF4SCtJtt13j9x962AgR2GbhDIi5slRF4M2tb+LZ1c96nyzwFYF4ioA4YI0dG5Pl\n2vCsINmBtM5XQlK0BhFo3tKW9mrW2LFNVAyxB852bQTOarLkIEBMEOar4MT+3rISFOsRgShrUKNG\nbO1gS2ULnnjtCc9vYVI28G5aX8TTTwOjR0cn90ah3Q6zEyfa90BK1iA+ERK4xz3/7VXoOmsMFifx\nH2i/w+iofHqKwKRR7kC1tbI1NFk4e0WAjQFp9JJYM7AG/3j9H94nm6wa1K41KJgjkJ41KPTaNfOZ\n5AcAweB0fb+8JXFOAAskSAS2WUWAsrgmaSLwla+wmOqee+K9f3SP+kRgxBokCY5EKtkatMM3Q+5y\nWxGgtXhEQCOas4K2YkVz+/cM/DU2COmaDliQXjUHEFZC/YqAPYk1JQvaxKndINqRhIkpdfDz2GFC\nFAEZCJTmFBBFBIgpLxI74EcH4D/rwstyvfIiuwePPDL+qmZBL6BqVjF3wty2jouXH03LGuRUzxIU\ngZOOmIzbPtnciu6svjrdtmJgzvg53ifsHAFK5a7KLt+0PDRZGFYuW0XAtsYYhnx7yqRrwlZwSOs5\nAq2UD82walCgeh7AFIEWS+e2C78607+1Asvqqpts3/K+bEWggCARmDo1+f3FASfmRJOfIxBaNc1u\nEJo0EXjve4GTToo/njUq5KACRoiAJIjWINk5AgHYOQJWTCJACAHvkNGslWRMxxh866hv4ZFXnsEf\nrvskACCnZWANIrpPEWiFCLCBs10i4KxMpmkNshUBan/vpBWBP77/j/j32n9j/g7zI98TVcvZMuUN\nM1EkAAA2rGZBSTMK19NnP40fPfMjXHLIJW0dl6MopFw1iEAHfvwI9nz/7Thrn7NQaFLY+N+D/hel\nWgkf2OMDLR3H2fuejRWbV+DupXfj+TXPQ8sZsCxWRjjplXnx+h+sDUZag7JWBACgWs3Gpw40V0fe\nQ2QSUQTSswaFVs6pdWemCPgruVGtgk2bwivNtAtOOnLUJQJf+XIeR4xP3h4aF44ikEKeSGiwbVcB\nnD07+f01IgF3nHoHvvHYN/DYa4+NEIFtGWlZg8Kgdw7BBGBV2QzQqHxd3KowUbjg7Rfg1GnAH9bZ\n+9dda5Bsf6wTDND2FYF99y5gzcsNysHGQBbWoMee3gzkXGtQ0kHHiXNOxIlzTmzpszUjm6oJ620i\n0MzEu/t2u+NbR3+r7X3rQp5MKoqAmCOw8iC8o3xQ0yQAALoL3bjmqGtaPo68nseV77wSNbOG59c8\nj3zBRAVAqZQ8ERCtIBWjEqEIZJ8jADAi0Govh3bRerJw80QgrJcKkE6ycGjlnGp3JhWDgCARgF7B\n+vWSiIBNgjTTPVkzdirg7Xskv6+44PNgGopAaLBtK/yHHipvv1F49y7vRle+C0f87IhhQQRGcgQk\nwSECefnJwn7kOtkSpFFhN0KjCSise3KzmDABmDsXOPhgMQjKMkegeSJwykkFrFjRehdWDtEalFbV\noFdeY4qAachRBNrB8hUmvvvd9Pe7ZhVbEZIx8TaCeA1kkSOQ1eozBw8Cch3suGTkCYiKwJoNFQxu\nVVERYMeRZsKwH82siHuThZuvGhRAitYgvyKgQQOMYmaKQICY5CrSKgfxfWmmO9mLfVGygCbkCgJZ\nKAIduPfe+n1PZIKT6hEisA3D8UqmaQ2yk2NqO90FoAkikIB5V9eBxYuBhx7yroinSgTaVAQKeiGR\nQSMLaxCmPwAA6N+gRiDogWa21MugXbz5GrsH4ybKJom0rUF8BZxXDcr6/PNJMF9gxyWbCLz/tAqe\nfNyVQFRUBLJCmsnCAaRIBO565S7P4059FACilCKwcaOcfXESRGrqEAGnj0BGikBXRwcOP1zePhth\nhAiMQMgRSNEaZHvirCJbdqiV2AzQaCBv1xrEoevMO6elWDXImfBpeNWgZhKFkho4s7AGcWxYJ7dq\nUEvIqKHKlg3NW4OSgqiKpdlHgFA1FCH+/XOFdBQB5CrOPQ8IiaNWLlNFYNSYYUwEWrAGhYNKJwKP\nrHgEZ915lue5DruUpio5AshVpHUXdtSHqjpEIGtFIIcmLnwJ4PdSaCKzYhghApIgWoPSJgIcnAg0\nUgSSIgIcWViDENFHYOzY+NtKauD0W4NklQ4MHWAkVw1qhAc+8gC+ccQ3vE9qGQ2ERnZEwL0GUuos\nTNVUBHISFQEK4cbSvUTAqaJV6clUEdD07K1BzXz/sGThphpShkEzpROBJ1c9GXiugzCPZ1aKQCB5\nWZdIBLgiYLiTfUCdShnighiQPhHQkREDtDGiCIzAUz50cBAw04iFfESgAsZAGq3oJJEjIEK0RaRm\nDaK+qkH2JNYoUVpEYooAL5uWc0sHykBoTX9JVYPi4tBph+JzB37O+6StCFjRLQhaQpgi4oGRXY6A\neA8MDcnfnzPZZEwEOZwcgXw2ikB/uZ/9UerLVBHQdHaMssfBemgm+AxTBGbMaPMAJJdRBsIXszgR\nUEkR2LRJzr4cRUAha1DWioBO1VAERojANgxuDcoV2QyYRjDgJwJ09CoA6SsC4mpoekQgXBFopnRa\n0tYgXZe7EhIaCEuqGtQW7BbrSa8KDtYaLLXXMswREFSxNCqmqJojoOfTyRHwKwIbS7YZu9SbqSLA\nK6bIVgTqNYLba6/42/EQGZtUtl2HnVjSFQEPgbGRt2vqZ6YI+JOFU1AEVLIGiZXTgPSJgGaNEIG4\nGCECksCtQXoHGwFlTIS/+tevvE8Yvgt/TDwikESysIhMrEH+HAHaPBHo0JMZODgR4kGQrCCgnjUo\na4+4B/aKUNJkeKDawHxvZEcERDKcBhEQO+kC2Z9/Pgbodo6AjIWQeoqAYRno0LoAsyNjRUDOGLCx\ntBE/euZH2FphJemigo2VK4HJk+Nv16sIsL/bnh4kW4PWDa7Dzc/dHHg+Z6qnCMjOEaAKEQE+Br5W\n/g+wxy9RKstrrx22KKbREWtQXCRCBAgh7yWEfIcQ8jAhZDMhxCKE/LSJz//Q/oxFCAkVIgkhGiHk\n04SQxYSQIULIBkLInwkh+yfxHZIGtwZpHXIUgfuW3YfT/nCa90mfIoDlhwJobA06ZvYxAIB37PSO\nRI4tC2sQpZrTEIy90Lw1aEZvuxo4g1MtISeZCIQqAmpYQzywrUFJe+UHq3U2aDGFqLu7uWTJpCDe\nA6koAqrmCOQk5ghQX44A9TZO6NYZA3wrKgKn/u5UnHXnWTj3z+cCiOiqC2DKlOa2G2YNahYBqymR\nSwSO//XxWLxmceD5pS9mqwgcPPVg7xO6PGsQJx1Wxf2yWRMBPgZuNfqB956OtV0PSttXWLA9fiCZ\neKZV8O8/HIhAUutG/wdgHoABAKsAzKn/dheEkHcDOAPAVgD1KrjfCuC9AF4E8F0AfQDeD+BhQshJ\nlNI7Wzt0OeCry1qeRQFJE4EX1r0QfNJPBO7+JoDGisANx96Ag6YchPfNfV8ix5amNcitF655ZFG+\nmhVHEXjqo0/h5Y0vY89JeyZyTI41SDIRCM8RUK9qkJYzYSFlRaDcC4Bkkh8ApG8N8isCWRMBfg9o\naeYIUG8AOkrrw0Yk38isHvyKANHlEIF7lt0DALjjv3cAAGpmMgOtP1n4859vfhuvfOoV3LfsPnzm\nb5/B1upW6dagsERhAKgOZKsIXLbwMszqm4XfvvBb3P/q/VIVAX7/mxX3ywZ6WqQMv+V4IL8MwEIp\n+woE2/d8HbMmXihlX3GxzSkCAC4AsDOltAfAx4F42aeEkPEAfgDg1wCeqfO+D4CRgEcB7EUp/Tyl\n9KNgV5UJ4CZCSEZ9G8PRkWNEgEgiAp6KGRwiEfjHx4EyK5nTSBEY0zEG5+53LsZ3tWsGZcikoRjV\nPYlSzSQL77vDvjh191MTOybHGiQpCODgJEhbcor7pCLJoiJ4Lfmk74G6OQIlthqcGREQKmakmSOg\nSo6IkyMgUREI5AgQLzFe+V8VFIF07IFRikCzEBWBSy7WccUVzW9jRu8MfHTfj7or0impYgFUs60a\n1JXvwjnzz8G0nmnsCYk5AjzYNCou63Xm4Yzg379VbrNTZx0Egu1/nodZM7IdBLc5IkApfYhSurSF\nj94EgAI4r8H7zrXf93+UUmdIpZQ+DaYUTABwcgv7lwauCJAcGwFlTIQBiETAcEe/tFvbZ2INsjRn\n4GcvNG8NSgppW4OsquB9UcQj7kEWOQJlFgRmkR8AeCtmpKkIUEXOv3sPpKgI+IgASowFvhUVAQ4+\nBiSlCIhEYGyP3lZ+gFhGOY2GYgFUs+0jwMEXBdNQBIyqe/6SLgLSLPz7N0vpEYGzz8rhkkuk7S4W\nnD4CjarbKYDMrhRCyCIAxwM4m1LaX+d9HQD2BzAEpgj48VcwBeIwCYfZMhxFICdJEQirEiESAbti\nSqGQflAgVgtINVlYJAIZJs06tgjJNcSd7y4miSuyIiyC6GyQTjpHoC4RsINAFaxBtVrypVM3lTfh\nsocuw/JNywEIFjlFzj+fBDWZOQL+PgJ+IlBOXxHwr4KuGfNnYOLzw0YREIlMWCWeZiAuCGVDBNh8\nsN12GexbgFOEQmKOgIpEwDn/Nmo1ecnCfiJw3XdzTRUKkYFtThFoFoSQnQBcC+BnlNI/NXj7TAA6\ngGWUhpmi8bL9/84JHmLbcG5+SUQgFDXBA2QrAu13hWwe4kpQuoqAmCOQnSzqrIbKLh/Kgz8xSVrF\nqkFEkjWoXrKwbYvLyhbg1NG3bVFJ3wef+Msn8KUHv4SFtzDPrbI5AnqGisCmnQDIa+gXhtAKbOfu\nKb2EsAxFoF1ribggJJMIRDbOsonA9tvL23cciIrA1q1y9sHv/1rFPX+9nRnJoTb8RGSoIi8Y8Afb\nWduigBEiUBeEjZS3gCUHnx/jI7yvYZSoxp9vooesfPCbn2pyrEGhOQJDgsffVgfStgUB6VqD+ERI\n/eVDtexuPj4A8oZi0vsImEFrUNaBoAeSrEElo85NZf8OaQaBIvg1kMvLIYOPv/Y4ALiKAL8PFFME\nSKo5Ar6T/QRLFmy7IVaTWHzOYlxxuNdcL6u7PJ8HREWg95ZlwE8ewD9OX9b09sQAqt0VZX+H9aRV\nMQ6neacftiKUOREQFAFZzSU9ROC6f+Puk5/EmI5sl8T9wfhgqSatsaoTbP/1Wpy45pnES6K3guFE\nBLJYN7wQwMEAjqGUSnLMZQ9+81M9RUVgUNBAa9kpAllYg6jlm7RyWWjRDG5DMblEwAmEPGVT1asa\nJEsRKNUaR5eygo9G4PdAvmCijOSvAf/k4uQImGooQk537TQVARErDsZBC0bjmmuAvfdOft/1MG/i\nPKwbXOd5LmlLiE50Tx8RrgjMnTAXL66cDpjTsefU5rcrBlChi03NHKM9DhaKJqqbmUVShl+/M9eJ\nLZUQpmXbAydNSn6fzUBUBFIhAuvm4sBpcvbTDAJEUquhvz+BBnUhcMbDp8/GDvtmJAP7MJyIQKqK\nACFkNoDLAdxMKb075sc4WeiJeJ0/39RQSwiJ/HfppZc2s6lQ8Jvf0jIiArY1SMZN1whZWIMCVqB8\nCacmVwioKTjNlHLs2GQlitezBmW9IiyCSuojUDYak72sFAGHDOblkEH/5MKvBdUUAU3LKEdg8xRM\nnQosWJDNb+H3R/dHZsG1v/1LLgFWr2MDrY48TJMtALWbJB1anrgJ8HGw0MG2I8seFKkI2JXDspgD\nRYiKgOwVcW4NyqJ3ih/+ewB6DRs2yNmXa43MZTb2XXrppZ44sjPfCVwKVC7JomRWc0jbGrQbgA4A\nZwgNxCxCiAXgEPs9r9jPHW8/XgpWInQGIaFa5Wz7/5eaORBKaeS/RIiAffNbJFtFYE7sjg7JIbOq\nQQJOOqWMX/5S7r6jwIkQT5SUde4da5ChdtUgSpLvLvvIikfwxfu/GPl6t503flhGJQT4NbB1/88A\nO9+ZOBHwd5V2JkJTDSLojAFZ5QhsmZLpSrB/NVSGIsBx+eXApZexgZZQduLHJmCUDS1I0QT4b1Ao\nsmtVFhEo5iJkBlsR0LLNmXUVAb0K05SzOCEqgrquxvgfpggkRQSuffJa3PjUjQBYHwm3n5CeapUw\nEZdeeqknjjQtE7gU7J/iSPtyWQ7ghxGvHQdgIoDfANhivxeU0goh5HEAB4FZih7yfe4YsNKi9yV/\nuK2D3/wmJOUIhI0mIYrALrsku984UMEaVKWltkrftQMnUVKiPxoQrUHqVQ3qLfaiv8yWQWUoAu/4\nSUTXyHW7AhOW4HvnnoLu44CTTkpun82A3wNm12rgtONRqSQ7+0dbg9Q4/06OgERFIJAjIFZNe+Fk\ndL09+X3GhWwi4N/+kpdqwFyA2HlSSZTNTcoa1CGZCHTm6ucIZA2nn0LO7v5rAXrCuaziirgKagAA\njOsc530iIUWgbJTx6bs/DQA4Z/45+OYT33RfpFpmRMAPjWjQiNa2spYGUiUClNLFAM4Oe40Q8gAY\nEfgipdSf5XQDGAm4nBByBKW0Yn9mPwCnAFgL4A/SDrwFcEXAzEoRsCfFZlvMJ4FsrEHeiTGOf1wW\n/FWDsrAGZb0itPRTS7Gsfxnm3zQfFHLLqHrw87swZtJ6fOT/7Q2SYSDol8VlW4MqBtsBtdWhrM+/\nsxigpagIVNnjg40v45E35mdaP96fKCnTGgQAxS57oLXUUQQysQYtOwyYcT8A4FtX9OLgDMcADidf\nRrNAARiGXCKQdd8Ejp5iD14870W859b3YMn6JYkpAuJ1SSlF1WQTy1HWd3A3sl8EEZHTcs7xqYxE\npgtCyAkATrQfckH2AELIzfbf6ymlF7W6fUrprwkhJ4F1F36WEHIngPFgJEAD8FDQw+YAACAASURB\nVFFKaZ2i4ukjp+VAQGDBBIiJoaFk7/zQ1ZoQa1DaiXKA1xokO/hzvdFeIhDHPy4LfmuQNCJQxxqU\n9WDY29mLPQp7AAC7BwBpiXIeVEeha/PUzNQgDn8gmPR9ECACppcIZH3+/YpA0vkhgC9QFXIE+qp7\nAci2kVSa1iAAKHYzIkDNBIlAgsnCQErWoFX7O0TggvPUSBrlCdiazkZCGXkCYh8RVRQBANhl/C54\n16x3MSKQkCIgXpeGZTjffZTBygWroggA2xgRALAXgA8LjymA6fY/gNl84hCBeiPPqQA+CeAMAJ8A\nUAbwIIDLKaV/b+5w5YMQgo5cBwtIcxWUSimU7xEaat16K8EUCuy6q/zd+iFag2QTAdca5J0YsyQC\n/oZiWViDsl4RBtzrgCsCstUhAICVnUdUhD8QTDxHwNetkisCUIQI8HuA2J2FZXRUjcoRqFXZb68S\nEZCtCHR02kTASNAalFCOQL4gt3qap/FZLjslOApOOWmNXZ8yFkQ8ikAGlQLrwWlSJ0ERqFk1Zxww\nDTUWwUS025QvLSSSRkMp/TKlVK/zb2aMbSyklOZCbEH8dYtS+m1K6Z6U0m5K6ThK6btVJAEcYrWA\nVDoLw10G7Z1Qwv77J7vPuBCtQaYpZwWEw9NZWEDdGvOSwQPgl7TbgPe9D1skMAFKqbsyYgojn0JV\ng/h1QEEBYqWjCCjiEfWv2Eq3BtmKgFVTgwjwCfDJtfcCx56L/k3JZ0gG+wioQwT8gbpsRaDDtgZZ\nteQUgaSqBr2y4Dhgxr3SFAEPKS6qV5HcIQJ2nwsZ86GKOQIcTsM3vZbIfSAqAjVTIAI1OzldgfGf\nY5siAiMIh1g/OJUcAQDbF5kIs/f2GXiCbPi7qspUBfzJwseP/wwA4AsHfkHeThvAsxo493d4xvxp\n4vvgciiBr5GarYyo4BMlhAjqkJkaEVBhIpSdI+CvGuTkCNTUyBGY0TvDPff73Yj+6prE9+Gxriiu\nCGzZkmxPC//2uSJgGupZg6odbwIffqc8IiDeC/84DwDwtslvk7OzFpC2IqDC+CdCVASSUAb9igAn\ngqYxQgRaxQgRkAhREUjaFhG1WvPbQ5Zg3UXrML4ru+LJbjMlOV1VRfDfwbKJwFlTr8LrF76OD+35\nIXk7bYBAEFhNfgmIf28NureHgiJVgzjSLCULINPycSJkW4McUIKrrhIUgaoaisC0sdOw8tMrncdl\no5z4b+AZA7Wa0kSAwkq0u7B/jNELXmtQEh3lk7IGcci2SC5c+3tg9d745g6r8cj/PCJnZy3AyRmz\ni0fIVgRUWAQSISoCSdwD4n3vUQQUtAb5lTtVMUIEJEJUBGQnC3LssVtHpiQAcAe+nGRvKCDIwjYR\nyOcJdhi9g7wdxkDg5q9E9cJrHfx7M0VAWHWgag08WSgCKhAB2dYgB1YOn/ucqwioYg0CgB1G74Cd\nx+3MHuiVxPMEvNagaoAIdGaYKxoYA/QKtm6Vt33D8hKBJL5724qA7xiT/P4i+FiolScAACZ2T3RX\noRWAQ4hGFIFEiIDHGuTJERhRBFrFCBGQCFERSJoI+K0BAPDTnwJjxiS7n1bAV6tkJ4kBgiJgciIg\nb19xEVgNLEkgAo41SPcSAX+H5YwhKgLbFBGQbA1yYJ/7wYpaigCHMwbmKoknzEYRgWpFPUUAuYSJ\ngO/6qvmIQFcCCaNtlw8Vj7HaJa2rLB8LaxV1bJEiHEVAG1EEErcGmTXn/BsjOQItY3gc5TCFqAgk\nbYsIUwR2yHYh3AFfCcrl0yMCVJGymUDIJF1J/qCcVbAAEcgpQQY5UlcEFK0aJC1Pxr7uNw2wm8ys\nqpEjwOF2Va0knjDrLR+qOBGQqQh8bB+s11lOmJWgItBusrDnN9iyI9ZLKunLx0JViQDhRTxICooA\n1bdZRcCoqRMDcAwXIjCiCEiETEUgjAgksQqUBPgEoOfTyxHgycIqBEABW0g1+c6CnmpJPmtQEomC\nScEZCDUjnRwBECWIgHPv25CtCGwZ9BIBVSZDURFImgg0sgapVDUoaUXAY9vZ/lms2u7HAADLYNdD\nO0TgQ/NYftXp805vfSMA+kuCBFQdhfXr29pcJPiKcFVRIuAmC6dTNUi1759ejoB6ikBPMXk3gAyM\nEAGJSDtHQBUi4FQNyqdYNUhha5AMIlDPGqQSEZBhDYrKj+F5IipMBN0Fb7amPCLALviBsk0EKooR\nAUERkGoN0iyWMIxtQxGIugfMavuKwC0n3oLyxWVM7Zna+kYAvLrpVfdBriLPGsQVgSoba1RbER+p\nGuStGtSm4yyyahC3Bqky9gGsetpwwAgRkIi0FYEsk+NEZGENsix1BgH/amBVBhHgycI0aA065JDE\nd9cyZFiDnOZZEVCBCIwqjPI83rgxuW176qZTZjsYLCtqDUpLEbD3AShKBBJWBCKJQK19IsCbYbaL\nLRVh+VevjCgCEnME3KIZ6ioCWr4Gw2i/w3R0HwG7k7UC4z/HzN6GLbSUwAgRkIhtVhGwg780rEF8\nEuCKgAoBkN8aVJZoDfITgUUf0XHFFYnvrmXIKB/KS2UGYE+0KtwH3XmvIvDf/ya3bU+zPJ39qENV\nbx8BXZGccZmKQKCqTY5FGCoQAdlVg2QSASnISSQCdhBcLStOBEg6ycKqKQLcHprrYGNVu/agqM7C\nKiYLjxCBEUjtI6AyEXDKhyasCLy59U3sdO1OuPrxq53n3KpB6iQKBRSBmjxrEKB5SoaedYaeSA3x\npJCFIjB6dDL7aQd+ReCFJcl11i3VBCJg22Gc38ToQD4PEEmJmc0iXUWAEwH25bdFRcBIwBokBbpE\naxAdHkRAVvnQk39zsrs4oqIiYFuDOBFot3KQP0fAqRpVVccVwDGzb4QIbPMo6DY11WvyFYGHL1aG\nCPBAWE+YCFz9+NVYuXklLrrnIue54VA+dGjIatsX6YcrBXsVAdWqFLjJwgkSAVERWPIeEOr9vUeN\nQubwE4FXlianiFRNYTCxFYEatX8Ts0OJe4BDZtWgABHQvdagLIPhzHIEKmzOUWEu+N67voeuvH0g\ntiKQ9DgIqK8IEJuVy1IEfr/k9+4DS8GqQbY1KFdISBGI6iMwYg1qGSNEQCLcJBkj+T4Cok+4fzpw\n/+XKDACONSiXrDWIhCxzDoeqQUMlCy+9lOw+oqxBgWolGcO1BiVXNYivfo/DbODWP7CmagJUUAT8\nycImNbByZcSbm4QnCLQVAYO6ioAK9wCHqwiU5SYLA06OQMUmAlmOh7KrBkUqAmX2pVVQBM5bcB7W\nXbSOPbBVcRlNxfiKcEVRIpBGsrALokwcwMHjID0vwRok5AjUFLQGTR4zGZu/kHAnRQkYIQISkSN8\nNVSCIkCDo4kqdgCnfGguWUUgrF23iopAIAggFh5+ONl9ONYgX/lQ1VqaS7EG2YqATtmM5ycCKioC\nICYGBpLZNm8eBYDZDYgFA64ioFIgJNMaFGh4ZVuDYLGmclqGs1tWikBNISIAeM8/QKXYg/gcUCln\nTwDD4FwLJPnyoWFN31S6/wFvsjDQvjXIrwg4VYMUtAZpRMOYDoUa+0RghAhIhFhDXa41SILe2gYc\na1DCRCAwucJVRlQiAoHjJBSLFye7D8ceQtW2BskoH8oVAY0TATIMiIBmYGgomW0HgkCtBou4ioBK\ngUCq1iBOBKiW+W8gO0egZobLa6oRAV3T2WIAoYBmSEkYdqxBw6RqUJKKQBghVI0IcUVAyyWjCET1\nETAUtAYNF4wQAYlwrEF6DZaV7EpAZC11BeC3BiVFgsJsL35FQAVbRGBVnliJV06ae/1cALYlSmVr\nkERFQLPCFQElrEG+qkHQTJRK4e9tFoF7XxeIgNmhTBAIeFeEpVuDNFclyzoYDKsaNDiYzLYppUKx\nAC+4NSjr7y9CrJ4nhQjYv4Vl6NB1NeYAEa4ikHyOQFgcoNK5B1xFgCREBKKqBqmYLDxcMEIEJIKv\nzup5drMmWTnIMwAQtRQBPvBpaVqDDHWrBoFYiZ57MQCySEVta5CM8qG2IkBsIqANB2tQgopAYDVY\nq8HSXEVAKSKQpiLAQbXMV0UDioBmJBYARpEAAIDRgY6ObG1RfojV82RYg8TCCaoFwYDcqkGr1wUH\n1ayvfT/4gijRJViDhKpBKpYPHS5QaLh464EzYU4EkrQHKa0I2MFf0kQgzBrkVAxQ2hqULBEQq8aY\nWklpa5Boj0taEeBEwG8NUkIR8CULg8hVBHjFHJVzBPr7k60a4wQEpu+mp1rm40DYGCC9szagnCIE\npKcIgKpNBGRUDTrmuOGjCPAKZ9IUgcqINahVjBABiXAVAXYDyCMCFM8+m9y224VjDdKTrRokrrSv\nWcMGVKdqkOLWoCRXgcQ6+owIuPvbJqxBXBEwO7z7sKGCIhAgZEkqApaPVebKgGYxZcTKKRUI8iBQ\n76jANJGYPQYQFAHDtwSqABEIqoI0sQCwLhEwikqUDhUhKgIycwRUVQQI7CoeJHlF4MWXhk+OAK9w\nJitHYMQa1DpGiIBEpGUNIhrFXnslt+12kYY1aNIk4F3HD2FDydaaqQZNU0MSD3Q8TVgREOvoU720\n7VmDTHf1GxAmWhsqKAIBaKa8ZOECK0eUIwr6w+0gsNDJzlm7tgARLhHwfWGqZb4gkJkioJg1DEhX\nEVAtCAbkVA16bfNreLX/VSe4FqHS/Q+4igAlcqsGqVg+dLhAgfXTty6cbHnJ1iCiWI6AYw3SEyYC\nvlW2e/aagHv+bkdXCqwCcgQm6oSJQNkoO39Traa0NUhqZ2F7JdgfdKmgCASgGfKsQTYR4OVUVQoE\neRCoFXiN/+S27RABM6gIbLNEwOzA6LHJ7CcpiIpA0gnjgPqKgN8a1O518K81/8K8G+exB9N/Fnhd\nNTLEG6taJHlr0OatNWzYaAF5gJo6NA3Q1VoLGxZQYP30rQselPGyWUkSAbGhmCr9Azh48EcStgZ5\nQEygICyxUj3zyZ9jZu9MnLPvOe4TSSsChu8HVblqkITyoTwQorY3XPOpIKooAtcfc737gCSnCASS\nhYssC1e3mCdEKSJgB4FaPnki4AQEIYpA1osCYfbAdKxBHZgzJ5n9JAVREZAxFwyXHIGkqgYt7V/q\nPtj+mcDrqv0GozvYgFymjAEk2Vn4D7fXUK7YCwIK3PfDFSNEQCJcIiA5WVgxRcCxBtmKQFLf2/Od\nc74ZRaFBgBCCG467AR/b92P2EwnnCJh1iIBi1iBHoSDJWYMcImDY95dv9VWVQPjc/c7FruN3ZQ9k\nKgLFfnsXTApRKRDgQSCRQARUtgYFuqCnqAjstlsy+0kKoiKQ5IIIh6gIqLYaDgSJQLvXgadaVvfa\nwOuq/QY9HT0AgAFjM0DMtq1B4vd/dWWNLQoCANVGbEEtYoQISITTUU+CIuC1BiW33STAV4FJwtYg\nzwRY8HXnUYgIcIgTgFRFgA6DZOEEqwbx68Ayg0Tg+uvVuh/cqkkScwRsRQA1VqlIFSIEuEEgyaVL\nBFQbC9JUBKZNS2Y/SUFUBKQQAUERUO68I3lFQHQDoGtd4HXViICu6RhbtP1qxU3t5wj4rEH8d4Wl\n5vkfDhghAhLhBAEdA8AO/0S1SkEpxXOrn8NAdaCtbSudI2AHfytrTwOgiQ3+HktE0VeUXIFVQD+k\nEQG/IkDd29ifOJs1RGuQaSZTPtIhArYioAsZ4uee2/72k4T7/ZNTBPxVg/RRTBGgFaYIKEUEhCAQ\nkEUE1MsRCIImRoSjugoDAMwOzJ+fzH6SgkxFQAwKQYmC511Qh1JSBFRSBDl6i73sj50exrp17c0D\nnmIcmkAERhSBljFCBCSCE4H1U34MnL0Ad6z8Ke579T7s/f29cdCPD2pr2yorAjwAfrH8EDDtQTmS\neAgRUG01QCQCssqH+qGqIsBtYkmsivqJQFh/CVUgWqNkKQKFXlaKxSoraA3S5REBt4+AeuVDA0jJ\nGvTn2zuw667J7CcpiGQwSVUccNUA1lSQqHfe4Y5PNCFFYLhZgwCgr7OP/XHqSSiN/ldbeQIe8qfX\nPB3FR4hAa1B3Bn0LwKmfa+M3K76Nv7z8FwDA4jWL29q2yjkCnmB0p4flSOJFn76o4OSfmiIA4IZj\nb8Q1R14T7GibMfzN5ZL4HVwiYG9bYSIgWqNk5QhoE15kzw+pZw1yuopq7Ji3lRwBALjx2BvdBylZ\ngw54m3pRoFg5LGlFgNtkeMEAFc67H27VIDZPJ6sIBK1BKi0EcPR29roPJi3Gm2+2vi3P9/coAiPW\noFah4G3z1oG/lOPG6mp05ZPp9jIcFAEAwKZpMBIqZ+exRPgVAUudqkEcaeUI3HsvcPj8jyW3gwTh\nNJfLmzCQTDOdetYg1SAjR8BvDan2sUWF6qB6ioDYWRrYtojAx+Z/DK9sfAVXP3F1aoqAo8AoBBm9\nRDhcRUB9IkAhQRHQ1e8jAACdOWF1Yuv2ePNNtFzdymMN0kesQUlA3Rn0LQCntbaNTbU16M53J7Jt\nlRWBtYOCXFkdLUkRGF7WIJmKgCrlMsPgNNWzK2clEQzxyd+sqU8EZOQIOPeBnRtS61zFHpbVyxFw\niICefOU0lTsLc4hjQBqKgGPDUQiiIpC4NchWBMgwIAKJJQvT6A3kcmr2UdlUFuZrQttSBDzWIG2k\nalASUHcGfQvArwhYsDyKAG0xY+aepfdg+ablzmPVFAHx2KSthHUML2uQzBwBFQd+DpnWIE4EVLYG\nycgR4MpYYcsuvhfUswa53z95RaBeHwFVAkIxUTQNRUDFeyEVRWAYWIOkJAv7MG6cevEAAGwsbXQf\nELM9IhClCIxUDWoZ6o0abyGEdXkVB/GS0fwS4SMrHsGRPz/SVzlELUVgp56d3AcJToAjVYMYhpMi\nIFqDgGStQcNCEZCYI5DbvLP3heqINYhBnaRRdwxIrmpQ3fKhCiKVHAFbEVDlvIuQag3yYdy49rYt\nCwsmL3AfaAZWr259W15rVHXEGpQA1J1B3wLwJwsDwJsDLhX2sOSY+Ocb/ww8p1r50IsOvMh9kKQk\nTutYgyo9yk0CqeQIXPfvYUEEeNWgbY0IiDkCiQeCJd+sX1XXGkQ1dgNIIQI1X96VQosCaVuDVIRL\nhiVYg6j61iCnpLNsReCG5zB+fHvbloWrj7zafaCZGBxsfVseJ0VxE6CNdBZuF+rOoG8BhCkCN//W\npcL9pf5kdqQYERhVGIUP7P4B9kCWJO4nAqU+pVZCAe8EIEURePICYN3cbdYaZFTVJwJijkDSyhgt\n+xhglVmDVLoPHCIgwRo03IjANqsIaOkpAqqcdxGiKgRIUgReOAlYs6eyRKCvsw8nzjmRPdCMtsYB\njzWoc4P9JAFARhSBFqHuDPoWQBgR2Fh1FYENQ80rAsMF7uBnJrYS5rUG+XIESr045ZRk9pMUpOcI\nGB0oFtWc/Dj4PSBDEaCm+kRAzBFIOhA0y12AJXx3WxFQSSGSSQScgCCECKiyMihDEfA3lFMdoiIg\nK0dAZUXAbw2SoghY7IJX1RoEeMfCtoiAqAh0cSLAfuMRItAa1J1B3wLwVw0CAMy43/nzxA/044or\nmttmWOdY1axBgLgKlI4iMG92H848M5n9JAU3cS+57sqAoAgYHUoFfWFwrEH55KoGOdeBpaNQUDNB\nkkPMEUicCNTygCH4gGwiMGZMMvtJAnwMlKsI+CqxjSgCSkFUBJKyBj228jHc/uLtzjVAqPpEAP+f\nvfOOk6So+/+nJu5s3r07LsMljpwUDjjiCQpIElBBMpIeATGhICIeKjxilmBAf4AowqOI8hhQyQiC\nINGHJNwBdwcHF3bvbndvdyd0/f6oru7qnp7ZCdUzVT31fr143U7oprunQ33q8w2ScgS4C+LBYjuu\nqiMAeO+F9ZwHHiHUzpopciGgygSAbqj7BI0AQY6AyMbxQVx6aXXrpAGJwQtjH6xuJQ0gjAegZyas\n3dtI5ayT+5R7CISeI1DQQAjEwssRgJVQXgiEkSPArwMrl/AmytoD4p4eOf8fGTiOAEIUAtliIaDK\ngEAMD2xZIRCCI7DvTfviQ//zIby16S0AAIG6A0F/Z+FQHIGCRo5ArE5HQBwD8cgA4wjUhWJDp2gx\nkRBAor6nIqEx0P+9AR858fi61hMGMYScJDf5P57PpvZI6lomkdBCg3R0BOLycgScGTErgURCbSEQ\nRo6AJzQqp7YjwO+BVphCQIscATZ4sSyg3ki2ICFw67G3YuepO9e34pDw5whQKq/E5TvDLOdOp9Cg\nUHIEbEegv7++dYeJeC+UFhrEsdi6jRCoDXWfoBEgqGqQh3j1V4P4EEjltwCeORML56iXLdpoS7yn\nS70ngHMMYhYorf8BwBEdAZUThQH35k9kOgLUdQSSSeEBoyBh5gjASoIURCHQAULU6ivhCIEw+who\nkCMQi8mZDQaC74Mf2/Fj2HGLHetfeQjwY8DvAbLug4A7KaJyaJDTSyLUHAG24+3txR+pQoLIyREI\n3H8TGlQXRgiESCWOQLWzd2P5MefvQp7dYBYsqHbLwscdBIeULOxDpcEPxxkExNmNS5YtrqMjQGLy\nBgFiaFAyqbgjEEKOgHMdWAmQghAalO1EV1f9M84ycYQAbWSOAFFmQOgOgsMVAkTFLlI2Ti+RBDsG\nMkuIZgtsZcYRYCNglUoH+xEdgXrOgaDwaBMaVB8KPTKix4RCID6O0VFmlVbKaM7tSsQfKvPn17Bx\nIdNoR0BlIRAPSwgUUkrutwi/BkjcTnCVLARUDw0KtY+AlUCs4A0NUiksCGhCaJBdRlCVAaF/MkDq\n+a8JsksIi6Eh3B1V2RHwh4fVex/glZI82I6AykJAWo5A0ICJqttQTgfUfYJGAE/VIBowY5MYRy5X\n3cNRdAQAYOpUtcoFckKvGiQy2qfkgNg/CJB1HBwxmM+go6P8d5tNKKFBre4I8KT5QhIxS3jy5zqU\nShQG3N+/gDwA2gAhoFaIQKNCg1TGdQTkCAFx/19bYXemUngg2JDyoQUNHAEiKUfAOALSUfcJGgE8\njsCzpwHPneL9gp0jsNFXEr8co/lRz+uFC2vdunBpWP3sZ84ArntJaSFAJDsCg2N2I7rRPqVjQoHw\nQ4O0cQRCyRFIOI2U2Adp5RyBGIl5wgRD6SMgVg2yBwSqzAzzkB0Sk+8IEKrueS/idwTqDQ0SnwPf\n/P4wAKCQU98RCLePgEaOQIg5AkYI1IYedxJN8SQLj/YDLx3r/YJdNWjTpsrX6RcCe+1V69aFS8NC\ng17+EDAyVWkhIDs0yOlIPdannSMgVQjQuPqOgBAXa1msaky9eJKF4+IBJVLWLxvHGa0zNthPYI6A\nqo5AXE5YCCAKAUV2cgJkOwKeXLEkcwQKeX2EQL3XgK5CQFqOQJmqQapc97qh7hM0AngcgdH+4nrX\ntTgCOa8Q2HvvWrcuXMJwBJwBUD7tvmlXSlHxBhiWIzAwanekHu3XzhGQHRqkiyPAZ0NlXAvOjKiV\ncGaaOU8/Xf/6ZePGBtcXEuCnXGiQKgNC5x4gMTSID4QJVWQnJ8DfS6RuISA6wynmCFgtLwTUDw0K\npY+A86ZxBOpB3SdoBPAIgbE+p863+4XqHQF/jsCuu9a6deHiCgGJjZT4TNCY0DMg24n2drUqpXDC\nyhEQQ4NUdwTEmz8gZyAs9hFQ3hHgfRQSYXRWTvgcAWC77epfv2xEISDTEXBmBsWJAXuQoMqA0C8E\nZDpiMc0cAWmhQYViIaCyI8CbyskSAuU6C6ssBKTlCAQmCxshUA/qPkEjgCdZeDRACEjIEVAtJpjj\nXPRhhAaN9rlvZjuRTgd/v9n4EwVlOAJ5K49N45tY8vl4j/qOAA8NioVTNSiZBI7fgTXU22/L/epf\nuWTc+OgQhEAh6TgtAPDZzwK//nX965eNKARkNtZzZkapkCcRZ6MsVUIEwpgM0C40qAGOAM8RUOV3\nF3EdATaAbdXQoHBzBNT9/XVAQf0cHURH4FPn9qM714GvCbP/s+eOYyXqCw1S9cIPNTRoXCiNkuvA\nRH3bmkUYfQQ2jG0AAKSsXmRpTHlHwJ0FCi806Ixdz8C2k7fFLlN3qX/lknFDo+SHhsBKOMcVAL7z\nnfrXHQbhC4EY+49YgH2cVZkZFpsKArKThRXZyQlwrgFZQkB0BNJDAIBClj0EVPndRRoSGqRD1SDT\nR0BZjCMQIqIQOPGYPlxwjtcRiKeZLF6/vvJ1+kOD2tpKfLHJhJEs7MwE+UKD1q6Vs37ZhNFMiOcH\nJPOsl7wujoDM0CC/I0AIweLZi9GRUk8VubOhIZSPtBIeR0BVRCEgK08GEIUAQZx6bUFVZgZ5WEgo\n5UMtBUe9AcjuI+BxBNJsZi2vgxCg4ScLq3LeB+HvqVJrYYNyoUEq77/KGCEQImLVoP5MPzpTPiGQ\nYkJgzZrK1+kPDVIxNh4I2RHwhAapN/jj+OODZQyCeMWgRI4dA10cgTDLh6qM6wqF4YgkPY6AqoTl\nCDgzgzSGmE8IqHJeOPcASc2kAA1Dg4g3NEhqjoAtBPiMuCq/uwg/ByzIeQ6USxZWuMF0Ub5UreFB\ngY6AXTXIOAK1IWUYSQg5jhByDSHkYULIRkKIRQi5pcR3FxBCLiaE3EcIWUEIGSeEvEMI+T0h5MAJ\n/j+nEUL+SQgZIoRsIIQ8QAg5XMY+hIGYxNiT7kEm4fXtYkl2Jbz7buXrFEODVL7oxdrh+Xx13ZNL\n4TwAhHKBW85sw5VX1r/uMAgjR4A7ArGsZo4ACa+hmMq4YRESQ4OEqkH8uKqMKwRy0oSAd1aQIEa9\nIwBVzgu/Kyg1NEgzR0BaaJAnR4CFBvGBsMpCQJYjUK6zsMrw+0A8yba/ViFg+gjIR9Z88mUAzgew\nC4BVQJBkc/gagKsAbAHgTwC+DeARAB8EcD8h5IKghQgh3wZwE4BpAG4A8AsAOwL4AyHkPDm7ER59\nmT6nuYxDonpHQAwN6n72SzI2LRRERwCQUz/duQEKN7033yC49NL61x0GiO1DHwAAIABJREFUYZQP\n5RWDyJgejkAYVYNcIRBXZsBXijAcgY1jdlJRthPbDlwEADhvd3VvgWGEBvHBALEfYUnLWzVBFac0\njPKh/PzvWf8BAMDMzPz6VxoifldQbo6Afo5AODkC6o+AuSCMJ9n5W+txMKFB8pF12XwawCpK6TJC\nyAEAHijz3bsBfINS+pz4JiFkPwD3AvgWIeQ3lNJ3hc/2BvBZAK8C2INSusl+/1sAngbwbULIHyml\nKyTtjzQGvjAAi1rOw3DDxRvw5NtP4v2/eD9ovHohMF6wZfQNT2JS5r2yN1caYmy0BTYAjMfLLzMR\nnuRADQijdCB3BOhm5gioLgTEknFA64UG+XME6t1/SimWDS5jLwbnYZvR3XH7hftjTu+c+lYcIk6I\npMTQIFcIEFAAnfk5GE695nwus0xpPbihQfIdgfTwQuA7q/DLP0+qf6Uh4i+aILOzsCMEbEdAxYGg\ndwKQIputz8oXc2NAeHdtBTtq+nAcgUR9jkBwsrAJDaoHKSMqSulDlNJlFX73Fr8IsN//O4AHAaQA\nLPZ9/Akwl+FKLgLsZVYAuB5AGsAZtW19uPRl+jCp3b1R97T1YGbXTACARaoPDRrP21fPum3R0a5u\nbFAYM2HOTABVd79F+AOADwJkHAOeI2BtZo5AK4YGic6Qig9+EdcVkrP/azevxXB2GO2kFxjtRyoF\nzOubp3QvhTByBPhggDsCXfm5ns9lNi6rhzAdAVpIAEMz0Z5StGKEjVtCOARHgDfUU9gRANykcRAq\nzREglrexpupIyxGwxwFxCDd/ExpUF6o9PfgV7r9dLrH//WvAMncDIADeF9ZGySadYBdwgbiOQKUx\n9I4jkE8rXSrMPwskYyZMTA7UgTBKB3JHoDCklyNASWsmC8suH7psgM23TI6zcBAdHnxhCAF/aFB3\nwSsExsaKFmkKzmywzPKx9ow4LbDjqs01IEkMexwBjsI5AoA3VFaWEIh5hIA+jkC9jeX4/ichPPxM\naFBdKDOiIoRsBeAgAJsBPCy83w5gJoBhMVxI4FX734Whb6Qk0nF2Ab+58Q2Qi6Yje8TJWLl2I5b8\nfAl+/uzPSy5nUQt5K89mF6yEVkJAxgPQjY3UwxFwk4WZgJExCOY5ArkhzRyBEMuHqozsHAEeFtRH\n5gHQTwhIzxGwj29XQW1HQOY9gJ//Vt4Otagz5DJs/PeAuoVAIeAkUtwRkCkEeGdhUhCcIA2EgL+5\nYr2hQQkqDIBM1aC6UEIIEEJSAG4FCwv6CqVUbLHFu0eVarvF3+8t8blycEcAAGjnO8DOt+IL934O\nD77xIE6/6/SSy/GwoCRJAyBKDwJlh0QAQmjQU+cAAE7f9fT6Vxoi/oRpKaFBthDIbmRCQGUxCIgd\nplu0alBMriOwdoQ1zei0pgPQ48EXriPAJgW2yh0KAoIZo4cgnQaOO07O/6dewrgHOKFBeXbyKy8E\nfPcAqVWDOC3oCJCC4AjkOrDrrvWtN2wcRyBeZ44A5UJAGAAZR6Aumn7ZEDal80sAewO4nVL63SZv\nUuhwR0DkrU2rJ1yOhwUlSBpZqD0IdOIBJYbFOI7Aum0w8+fDuPFyhZUQigcBspOFYzF1H3wcfvOn\npDWThWULYp4fQe3KWa0qBPhggIcGZUgvRi4dQSqegvU1dQYEYRQM8OQIQAMh4MsRCNMRUOV39xOG\nEIAgBP72x07stVN96w0b2X0EgoSADvdDFWmqI2CLgFsBfBjA/wA4JeBrfMa/J+Az8f0NVf6/S/63\ndOnSalZVNaIjwFm/ed2Ey3FHIEHY8ioLgVCShZ1qAQRJ2lFcjlUxwhgE8GRhjPUp+9ATafXQINk5\nAjwsgBb0scIdIRDPIZeT01PEnyMQiwGZZAbxmFolZUNPFoYGQoBIDg3S0BFwnlXEqvsa4GVIRSGw\nePcOdHXVsYENgN8HSJ2N5fi17xUCzb8fLl26tOSYUnWaJgQIIQkAtwM4HswROInS4gK5lNLNAN4C\n0EkImRqwqq3tf/9Tzf+fUlryv7CFQDJW/KQaGF874XLcEYhTjYSAzGRh6iYLq/7wA8IJC+COAEb7\nlRrwlMJJFkYIoUE0ruyDn+MOBCU7AhoKARK3Y9sl9BTxCwFV7wdhhgYV8pokC5scAec8iCfq7zDt\nOAJCP51MUuHBgI3bWE5S1SBLrdCgpUuXlhxTqk5ThAAhJAngDgDHAbiZUnoqLX+07rf/PTTgsw/a\n/94ncRNDJUghbsiVbyZw7h/OxXtvYH0D4mBCQIccgVgYjgAlyjQMKkcYVYN4jgBGNXMEQqoapPox\nkJ0jwB0BS0MhwBsJSS0cYJcSVvV+wHMYQgkN0iVZWHKekI6OAH8WpFL191JwXEG4J5PK5YM50nIE\nUFoI6HA/VJGGnz12YvDvARwJ4GeU0o9XsNiPwUrFfIkQ4iQFE0LmgHU0HgNws+xtDZP3zfVWOx2n\nIyW/a1ELNzx9A9bZ4UMxnRwBSQMgr04kyj/8APnNhCilbmfpXLsWN70wyofyB6FWOQKSHQGroM+D\njzugPDZYRuUgfx8BVYWA6wjUPxPM0bZqkKxkYY0dgWS6fiHARTDPu9IFf3NJqY6A/YzV4X6oIlIu\nG0LI0QA+ZL+cZv+7mBByk/33Okrp5+2/fwLgMABrAawmhHwlYJUPUkof4i8opY8RQr4L4DMAnieE\n3AFWYeh4sGpBF6jYVbgc955yL2Jfrezp9fbQ257XMYud7VoIgbicAaC/k6jqDz9AflgAHwTGSAwW\niPKz4YCYLNyiVYMk5wjw64A7AunidCPlCNMR0CU0KIzOwtoIAd9kQJiOgKr3A5mOgCsEJNXibRCy\ncwSoJYyf0iyVVNXfX3Vk6eddAZwqvKYA5tr/AcAbALgQmGN/PhnAl0usjwJ4yPMGpRcRQp4HcwDO\nBmABeArAtyild9e9Bw2mkgSSRx4BvvpV4PSl3qbNvKOgykJAdkiEOwOorxCodxDAZ8LjJA4Letz0\n+Hkgq2qQRS12LlAC0Jjyx8AND5PkCJjQIADBycIq4g8PlOGG8IGwZXIEXDRxBBJShYBmjoAzJpBT\nNYhawhiqjQkBHe6HKiLlsqGUXgHgigq/u2Tib5Vc9hYAt9S6vC7krTziJI79DmA3ztXTXgPmu59n\nNzMh0N/fjK2rDNkVc5xygUTtB7+IdCHgOALshqr6IBgoThau9xjw2dAYErCg7oOfIztEzgkNyusn\nBGSGBvmbC6o6MRBGCWFdHYFG5Aioek90hEDHBgBT5AiBmJ6OABeE9YYGUSoIAeMI1IXij9HWZPI3\nJ2Pj+Ebgcvb69YHFns/Xv8uEwJQpjd6yypE9API3EFL94QcUzwbKmg2OQZ9BoOsIyBkEfOUBFklI\noIcYkj0b6jgCGgsBmc0FtXEEwhACOT0aiskOkSznCPT11bfusODn66pjFwLJHyObPbfmdblVgxS/\n+fmQliPgOALCRZ9h1eN1uB+qiKK3z9Zm47i3ifJI/z+8X7DrB0+e3Kgtqh7pycL2xe+UYVP84QcY\nRwCQ7wg8/c7TAICOwiwAxhHQ4cHXyqFBYv14oEUdgZj3HhBmZ2FVc2Y8z/TZj9VXNci+B+DvXwRW\n74qfHvH/6tu4BuF3BOrPESBAtsPzmQ7PRBVR9PZpKEteH0dAViMpfyiAqg9+Ebekm/wcAUCPm57f\nEZAVIrb3wPUA1D8GssMi+DlQ0FAIkAQbwEkVAlTtiQE3WZidtzKFQD6nSY6A7GThMo6Aqni2OZar\nSww5z8Kh6cBPnsGZ76mk8GLzcd1ROVWDKCXAmNtn9vjjAQ16dymJBsMpQxGt6AhQN1kYUPfBLyI9\nNMieCdIlLAZwB4EW5MwGu1Vzmt9AphKMI9CYHAFVJwZMjoD88MByjoCqOD1wACCek5IjABpDLKbP\n4NdxBIicPgIAAcZdIXD77XVsXIuj+FyCIRDbEejtneB7TcQpmygpLtRfN1z1hx8QniOgVY6A5NAg\n/hAs5O3kO8XvYG4Ndbl5Ijo6AlJzBLTrIxBGjoAmQkB2snAJR0D1e4FDTJ4QUP23F3GdIUmOgBXz\nCAFD7Sh6+zSUxXYEVH34AfKrBjmhAERDR0ByjgAf/Kg+Gw64A2ErJCGg+jFwzwG5DcV0EgJOQ7F4\neJ2FVb0fhCkEQNlOq/wcAIrvAWHlCLS3F7+tJPFczYNgQF8h4O8pU2+OACgBNswt/2VDRSh+C4k2\n9596P3abtlv1C+YVzYgSkB0Wo0uVEBHZgwB+A4xpGBrEHQHZoUGqzwK6lTIkOwI5fYSAmyMgPzSI\nh8mpej/g9wAqyRUE9OqsDRS7gvVeA0PZIfbH8FT3zYJGQiCWw5o1tS/u/P6aCQF/T5m6qwZRAvzl\ne9i99xDcfZJ2raSUQtHbZ2uwZO4SPHT6Q4GfJcZLZwKnE2l84hNhbZUc/B01W7p8qOSwEJ1zBGSJ\noXyenQeqH4PwHAHeqbS+9TUCt6OoPEdAHAwB6goBfr+S6QiIM6I63Adl5wgMjA7YfwjNdawk9tuv\nvvU2jHgOy5ZN/LVSuL9/XIvfn+NOCklMFh6Ziqt3+gsOXXColG1sVRS9fbYOmWRwe+DC4OySy1z0\n6TSuvz6sLZKDMwCKy7GDPQlC0EMI8EGArNlAf7KwToPAgmQhYGkSGiQ7R8ARQho6AmGEBvFrQdX7\nQRihQc69UJMZYdmOwODooP2HKwROOTGJH/2ovvU2jFgOy5fXvriuoUH+6lG1hgY557+lT76g6hgh\n0GQSsYSbTS9AN5QWAu3ptPKVApx24vYDsG4hYM8CxHRMFpaVI+BLFlZ9EAwIjgCVMwjkDwFdkoWd\n0BDZjoCGQoA7AjJCgxxRrLgj4N4D5JUP1W0g6M8RCMMRuOC8JCZNqm+9DUOaI6DH78/xu8O1OgL+\n/CBVr32dMIdQAdoSbcVvDk8r+f32pPrBkP5k4Xof/rqUCxRxqwaxQYC08qFUQyEgPTRIE0eAV8+y\nr4N6qoUArhjU0RFAGMnCujgCEnMExIGg6kIYKHYE6n0WOEJAcAR4QroWxIwQACSFBkGPsYDqaHAb\niT6ZRAbD2WHvm7nikKHOl8/BIUdvwIe3/3CDtqx2ZAsBncuHUsmOgI45AgUqVwjo5gjwHIGxsfrW\np7cjIK+hmHMtaOIIyLoHAN5JER3ugzIdAUopBsfs0CDBEUjGNbgZcuIsWdiyajtvnc7CmgkB/hvl\nKZsNkZIsDHWvfZ1Q/DHaGgTmCQQ0SBm45SdaDP6A4rAYWY6AzsnCsnIEeNlAHQaBfDawICk0yBEC\nOU0cgZjXEainbCDgDoBzWX3OgXBDg/SoGiTTEeAzoroMBGXmCAxnh5G38miLt2NMcM11cgRiiRws\nCmzaVFsvIEcIWnolC6fjrNphjrKbYM05AtTkCMhG0dtna5FJBAiBgJbpqg96RNzyoXLiQp2LX0Mh\nwJOFpVUN0jA0SLojUNCrahCts5smhw+A8xoJAWe2NhZGaJDagwHe94RKShYH9AsNkXkf5G5AV6IP\nGO133tfJEYglmRIeHKxted1+f046YQsBi90E680RMI6APMwhVIDAHAFLb7PGXz5UdmiQDhe/9M7C\nGuYIeMMCqLwcgZweoUH+DtuyHAErr885EGb50FZ0BHQbCDr3AFr/pBDPD+hK9gPZrrq3rRmQBHsY\nDgzUtrxuvz8nFWezFnmaA4hlQoMUwhxCBQgMDRISoXTE30hJdrKwDjfAsHIEoJEQiJGYJ0RKljOk\nW2dh2Y4AaBzJJJSvHgYIycJhOAKa5AiEUzWIKC+EAfdZIKOz8IaxDQCAjliv89sDeoUG8VyZVnME\nYiTm/k6x2rsrO8nClj5jAdXR4DYSfcbyARmEz5+E97zvDVz7mYNx7/J7cfC8gxu/YXUgO0fA31lY\nh4tfemiQkCQG6BEWArCBYLaQBWJ5FAr1/XD+qkGqD4T8fQTqThZ2usrGtfr9AThCQGaOABfFqt4P\nwugsrF0fAYmOQN5iCycIO/l3efEunP25lZjdU7rctnLE63MEdO0sDLDwoFw2ByTGkc2ma1qHeP4D\n6k4C6ITij9HW4O2ht4vfpHEcnLwci2cDi2cvbvxG1UnYycI6XPyyBwFiR0lA/dlwjlcI1Hbz5+iW\nLBymI6CrEGhFR0CmENBtRtgtGFC/EPD/7jOGjsL5i+ravMYTk+UI6JUsDLCE4WEMA/Hx+nMELH3G\nAqpjDqECrBlZE/i+Ng1SAggrR0DH0CBpOQLCbDCg/iCYIw4E5XXW1cQRIF5HQFaOAGispYWAP0dA\n1ftBGJ2FtRMCEsuH+pPEVb/+g6CkNUODADdhmDkCta3D9BGQjzmEKmKXxdpzzyZvRx34Z0KlOQLU\nhAbp6AgAAHa+VXqysOrHwD0H5DgCYulA1fedw39/KjE0yO+OqToY4A5mKI4ANMsRoPU/C9zznx1X\nHfbfj0VyQO8beHjTLcJvWcXyOgsBu4Qo4uM1nwf+ZGHdjoGKKHr7bC12n7G7750YvvhF4IADmrI5\nUvDHRsvKEdDREZA1CPA7AtrNCB9+Pjb1P1DXunQrHyo9R0AQg7oMglxHgN0E6j0GQHG+jKpCIJQc\nAd36CMTE0CAqNTRIl2tApIAccN6OuDt9Gm565qaqlxePgarnfSlER6BmIeAkC6t97euEOYQK8Pvj\nf4/rDrvOeZ1MEFx1VRM3SALSk4U1Lh8qTQhQPUODnPAYACPdz9W1Ln4e5LN6DAT4OWBJcgREMaj6\nvnO4EIjZ5UM3b65/nX5HQNUBsRseGEbVID2EQIzEhCpyhbo7C9srAqDPPbCI1AgA4KE3H6p6UV07\nCwNyHAGTIyAfcwgVYGb3TJy/6HznNdGhJuAEhJUsrLMjIKuhGNVMCDgzwgAs1Hci6BYa5HZVlZQj\noKEjwEsG8j4CIyP1r9PNEVB7YsAkCzN4DXnEsy3vCIgMZYeqXkbH358jOgKFAmBVHxllQoNCQNHb\nZ2vjziLpi7+GvrTQII0ufuMIMEQhUECNGWI2upUPlZ0joLMjAIlCwJ0VVDtHIFwhoEeOABCCELD0\nuP4nYjg7XPUyYp6QDs9BEe4IxFPsRljLuMA/FlD12tcJcwgVJFJCQEITGUAsH6pfsrAlO0fADofQ\n5SEoCoGcJUcIWJoIAR4fzQeCMnMEtBOCdrLwmrFVuPLhK50usbXgzxFQ9X5g+ggwknH7ZI1n5SQL\nR8URGG9NRyDRVocQsM9/y+QISMMcQgWJkhCQnSOg0yyAXwzJqhqkc2hQNi9HCIDGkEio31nXEYPU\nOAIg7AK4d9qhuOyBy3DOH86peZ3irCig7v2Ah3nKEgJujDwAEG0GgrIdAR4Woss9sCfdE/h+XY6A\njkKAOwLp2oWAv3KUbsdARRS9fbY2URAC/thoeVWD1J4BFPFWy5BfNUiHYwAUOwL1HAdtSye2cI6A\nv3zoprYXAACPrny05nWK/RQAdYVAWE0FYxrdBwFBCMRykoSAXo7AS+e/hD987A+Y2TXT8/7GsSE8\n+STg0XcToHtnYUBOaJDpIyAPcwgVJApCwN9MqxWThf31s2XlCOj2EBSFAOI5DFXvhjuIpRN1mA00\njkCxI8CppYZ60bKaVA2ikqoG+e+DupwDshwBxxnWLEdgetd0HLHwCDdEyubttcNYtAj4y18qX1ck\nHIF6hABPFjZVg6RhDqGC8CY0OiM9WZiXD1V8BlCED4AKVE5HVecBUFB78OPHKwSy2LSp9nX5Q4NU\nx+mqSi3E46xKRj3ngc45ApZEIeAmzqt9P5DtCPD7IF+vLvcAUzWIwStocawECw36058qX4cognX5\n/TncEYjZQqCW7sJ8MsjkCMjDHEIFiZIjICs0SEtHgA8CUQD2uRprZ9xS1/r85UN1eQg6TbUAID4u\nTQjoMBDm18HG8Y3AsScDPW/WlTAshgXo8vtzIfD68IvA/L8577eWI2ABu92I12d8u6716Vg0ARAG\nwPFszWUjgeLQIB3uASJ+RwAJNhJub698Ha3uCPjzRHQ7BiqiyaOktYiWEJCTKKtj+VA+AMpZWeD9\nl2AlAODUmtfnLx+qy0DQk+CY3FxXaJBuQkBsplbY4VZg/VYYH78SnZ21rU9MktXl9/c4Qqcc4vxZ\nlyNgecPkVJ0VdEPDLODoM7EcwIaxs9Db1lvT+nQVAtwRSLVnkQVrKlfLNaB7+VC/I8BpNSEQkxAa\nBBMaJA1zCBUkWkJAdtUgfR6A4iBQBn5HQIdjAPgGfKnhlgoNKrqW+1+rK09Ax2ThollQGymOgOJV\ng/jvP2a5zRMcV6cGXCGgz4QI4AqBTAebAa+1u7TbP0JTIVDiWkinK1+H1kLADg0iyfqThS0jBKRh\nDqGCREEI+Ounyw4N0uHiJ4RI/S395UN1eQh6hcCIJCFA9HAEYr4ndf8yPPNMdVVCRLROFvYhM0dA\n1QERH7DzPCEAyFm13wz5IIg7ArqcA1wIpNvZvtfaVM4fFqLL/nOCHQFa1eSA2END1fO+FNwRIAkZ\nycJqX/s6ocFwqvWIghCQ7ghQb7KwLhe/TFdA1xwBmY6A6AzpsP9F13LfMhxxBPD979e2Pp2Thf3I\ncAQo1cMREMkVar8Z6h4alG5njkCtQsApHWlFJEcAAGKFqhwSrTsL82ThZO3Jwn4xqOq1rxPmECoI\nUb1LUgWElSxMNbPESw2CasFxBDSrGuQXAuecU3+yoI45AgCAzAYgM4DPfra29RlHgOEeBz1yBETq\ncQQcIaBp1aBUpsVDg4IcgVi+KmGkdWhQXF5okCkfKg9zCBUkSo4Av2nJ7iysyw2wKDSkDiLhCCRG\nYVnAE0/UuS5NHIFAUd/1ds3r0zFHoJQQoLXGR8E9D6yC2tdCWI6AbvdBPhPOhUC9oUG6Jgt3pbuK\n34zlqjoeOjcUc8rISgkNMkJAFuYQKkikhIBdNUh2aJAuF79/ECQjLlprR8DuLlv3QEATRyCQJNv5\nWsbBxhFguNeC2iEish0Bp5+KpjkCyTY5QkBXR2Bu79ziN2P5qhwScTJAl2cAhwtCEmfXQD2OgOpd\nxXXCHEIFiYIQ4CERVJIjoGvtYH9oSN6qvY6q4wgoPgvqx3FzAEcI1BsaoLUQSLEmQqtXV79olHIE\nnH2pAb8joOqxMDkCDCc0qI3te92hQZo2FJvfN7/4zSpCgzziWUNHwO0nUbsQEAuHGBEgB3MYFSQK\nQsB1BFq3fChQPAiSUTpQt9kw8eGVamP7X7cQANFm/4uwhcDy5dUvahwBBj8OluKOQFBomJQcAd2E\nQIwJgYQkR8AqsOOq6u9einl984rfjOcqvh/y8z4GvVxhjusIsAmhmjoLC2HCRgjIwRxGBeEl53TG\nSRaW7Ajo1FkYKM4RqMsRoHzwo9dDQBzw0VgOOO5E3PHO1TWtS7SFdRsEcPbYh42C3nyz+mV1zBEI\nY2LDGRDm1XYEgu7lUnIENLsPckfg8cn/BRz66dqrBvlKR+pyDXDm98txBHQTghzHEYhJCA0C0W7/\nVcUIAQWJgiPAlf94YRwAlZYjAM1KhvlnQ8eytTsCuoYGiUIgF9sE7HQb7thwSdXroZR6ZoN02X8/\nfVOZI/DGG9UtRynVLlk6LFxRzG4EqVQzt6Y0hJAiMdCKfQQ8ZTP3+gE2jdTWVU9XV5Qzq3tW8ZtV\n5Ajw855o7gjUExokRgfoMg5QHXMYFSQKQiAVTyEZS7IbVzzbsqFB/hyB4c3yHAFdHoL1hICIuImS\nBIAeDcWC6JrEhMBDDwEPPFD5cp5ci4gIgT/+ETV1WubnVEFxRwAovp9LyRHQ7D7oVIuxWb95EC++\nCIyOVrce3YVAJpEpfrOKqkHahwZJcATEyllGCMjBHEYFiYIQAICOVAf7IzVSUyygiK5l8/yOwPBI\n/Y6AzqFBItWWj/Tb4ioP/oLgM8OZHvbUv+ce4H3vq9wZ8A8CdNv/II48Evja16pfjh+LguI5AkCA\nEGjFHAGfEPjujwawww7AhRdWtx4nJEzThmKB5YTrcAR0Gwg7joCk0CDd9l9VzGFUkKgIgc5UJwCA\npIdhWUC+9slwbUuG+XMEhiQ4AjqHBomM5cdqWo9uYRGc/kw/AKC9Z9jz/uuvV7a8Mwigev3+E3Hb\nbdUvo0uOAFB8P3/rnRxeeaW2demeI+CQGQAA3HxzdWV0HUegoOc9IJB46zkC1BYC9SYL67b/qqLJ\ncKq1iJoQSM56Hjh/e/zyuRqe+Da6lg/1OwIHv7+AwcHa1uU4Anm9HgKlhMBwdjjw/YnWw2fWVR78\nBcGFwA0vfhPtX+0FllwOAFi/vrLl+e/PZwMjMQgC0Ntb/TJOmFxe/Zlh/yzwOZ9djW1/sDO+9+j1\nVa/LHyKpyzlQJAQ+vh/Q+zryeeA//6l8PW5oEDumuux/WWxHoBJBFJUcASrDETA5AtIwh1Ehbjji\nBsRJHNd/sPoHhIp0JFloUP7gTwNTXsIZfzyx5nVp21nYlyMwOp7HtdfWtq6o5QhUKwScREmi1yCI\nw4UAAGy2NqJnn9sBAGvXVra8fxCg0/5/aNsPlfysr6/69bmhgnHEYmo7hP57ABZdC0z9Nz577wVV\nr0vX0KCgamkzT/4KAODJJytfD78HWJrmCAQSy4NSYKwCg9SZDKCaCgHuCBDTR0AlzGFUiLPfezbG\nLxvHPlvu0+xNkQJ3BJCssWi8gD80SJcbYFENdVLA0FBt6/I7Aro8BEsJgZFcdTUEoxIaxLHiLFNy\n3brKltfZEbjzo3fior0vCvyso6P69Tn9FDQoI1uqj0ItiFWjAH3ug0GiP9W9AQCwZk3l63H7CKjv\nBJXCSZi16eypvMkanwzQNjRIhiNgQoOkY4SAYvhjynXGyRGQcJr5Q4N0mQko+j1jeRRqzBfWtY9A\nqaTg2kOD9BQCfRnv1LcVY0KgakeAqh8X74cQgnQiHfhZLcLYGRBb6ndYLhIC+baa16VrjsBItlj0\nJ1LMJaj0/AeikSPgvw7a2tlxqCRPwD8ZoMvvz5HhCJhkYfmYw2geTBs9AAAgAElEQVQIDVcIeGNk\nKaVVV4zRtXxo0SAgVqhdCAhdZQF9xJD8HAG9zgFOT7rH8zoPFgtQtSOgabKwfyaUs2lT9etyG6tp\n6AjkA0pIVogTHqdZjkDQtT6SWAmgNiGgc2hQOu4VAukMGw2PjEz8XPRPBuh2D+SOgEVyALGcZOFq\nSkyLrpguz0DVkXIYCSHHEUKuIYQ8TAjZSAixCCG3TLDMYkLInwkh6wkhmwkhzxFCPkVI6UxZQsgR\nhJAHCSEbCCFDhJDHCSGnytgHg3x4joCYLEcpxT437oMDf35gVevSNVm4KD64ilJxfsQbYDwOBFWi\nU5GiREGb//3LCO67r/L1+Guo6zgIEMnRUQC0ciGgcY4A4Gsq5UCxcWP16xJzBLQTAgKjueoK6ftD\ng3Q5Bzw9MGzWF5YDoLUJgQg5AulMHgDFh/+6CAfdclDZZaPiCLxL/w+4uB//xq/wpfu+hEnfnITV\nQ6srWocYGmSEgBxkHcbLAJwPYBcAq4CAq16AEHI0gIcA7AvgTgDXAkgC+B6AwNIyhJALAPwvgO0B\n/ALADQCmA7iZEPJNKXthkIqTI0BctX/HPSvx2KrH8PCbD7sz3BXg7yysyw0wKEeg0ioxfpxZUCuu\n1QPwzuPvxIL+BUXvX/vjYRx8cOXr8YdF6HIMLtnnEuw5c0/890H/jfdMfw8u3fdSJGIJWLCAeK7i\ngRBPuIxR9jDVZf85gY5ArFCTENA6RyDhDv5fWD5YVQlF9xrQayB8+QGXY5tJ22DpAUuxcNJCAMA4\n3QwkR2sUAnpWDgOKJ0ZSmTyQ3oQXN/4LD7zxAC77Mi3ZaE3XHBGOZzKgbSPubj8JVz1yFTaMbcAP\nn/xhResQQ4N0239VkSUEPg1gIaW0B8B5AErOVRJCugD8FEAewAGU0rMppRcD2BXAYwA+TAj5qG+Z\nrQB8C8B6AO+llH6SUvo5ADsDWAbgc4SQPSXti0ESXAgUku6T/qOfesr5+7SzRisupekPDdJlJiAo\nR6DSGWA/bjiEXkJg0cxFePWTr2LLni29H6SqrBrkdBbW6yH43wf/Nx4/63H0tPXgqXOewpUHXYm2\nhB0nnhjFwEBl68kW2IiRWOxhqtsgKNARiGfrcwR0zBFIuOVh9th/AIcdVvm6/I0VdbkPzOmdg5cv\neBlfOfAreOWCVzApM4l9kBypSgjwe4CunYWBgNCgthyQdEf+V15VwI9/HLys9qFBJcIDgeDKUkEY\nR0A+Ug4jpfQhSumyCr/+EQCTAdxGKX1GWEcWzFkgAD7hW+ZMACkA11JKVwrLbARwlb3Mf9W+B4Yw\n4J2FrYQw4JvxL+fPW/9nDKdWGNila2hQUI5AzUJAyBHQZf9Fio6FLQSsCsNDdU8WFskk7DjxxFjF\nYtgRApTNKOq2/4GDgHgW2SwwPl7dusQcgVRw5JkyFE0GCIM+tA3i/vsrX5fuM8Icxy1ODddVNUi3\nawAoDg1KZfLeSZF4Fm++Gbys9qFBgeGBjEqFgMkRkE8zDuMSsNChvwZ89jCAzQAWE0KSvmVQYpm7\n7X/fJ20LDVJwbvYighBAYhR//GNl6+J2IJ8J0uUG6E+URixff2iQZo4Ap1gIsDIZwxUaA5ESAklb\nCCRHMTRUWdftXIElFcYsTYVA0CCgnanial0BrXMEhNAg3mG3UnRtKOaHPxtimWFs2lS5EOS/eyGv\n7/77Q4OS6TzQ9bb7RiyH/n4EIj4DAH2eg5xyjoCzbxPg5puY0CBZNEMIbGP/W9RPkFJaAPA6gASA\neRUu8w6AEQCzCCG112UzSGdCIWDPjFUyCPJb4rrcAIqS5OwcgUpnwUVER0DHB2ApR6DS8pFRmQ0F\nXEegq49dAxs2TLyMGxqkqRAIGgRcuDWw06+wcmXxR+XQumpQslgI1HoN6HYOcLhb3DOFTQZU6pJG\noWrQtM5pntfru+4HzjjAfSOeLSmMtG8oJsEREPMFjSMgh2YcRl5Dr9QcEH9fbDxf6TI9JT43NAEn\nDlqkXZgBs2fGKpkNcmJDNesjUFQOzu4jUFOCpJMsnNDuAQAEDIji7IevdhAUBUeAXxtd/ewaqCQ8\nSHshUGoQcNiFeO656tald46AIARsV2zVqsrWpWuIpB8+SdQzmU0GVJonEIXQoOs/eD0OnHMgZnXP\nAgC80nGj9wvxbMmJAZMj4HXFdBkHqI45jIbQ8CdFFWEnzVXSWt0ZUGsWGuSvjzx1OruR1xIe5Nwo\nrYSWD8BiIcBCXaqfDdV7EAS4oUGdvezkr8YRgC0EVB8A+yk5CCAFPPtsdevSu2qQUCbIFgXVCgHd\nHQEuBDr7axUCeiVLi2zZsyUeOO0BHL714cFfiGdLTgy4572mQkBmjoBpKCaNZhzGiWbv+fvio7HS\nZSqeZyWElPxv6dKlla7GUIZSnUQdbIu8EiGg60yYPzSou5fd7GpJGBaFgC77L1KcOF2jEIiAI8BD\ngzp6q3cEYGlaPrTUICBWvRDQOkdAJFmdEHDDIvS+BniPmY5e5ohUKgScXDGNHQFOSWFcgSOgrRAo\nlyNQYTlxMTRIpf1funRpyTGl6jRDCLxi/7vQ/wEhJA5gLlhp0eUVLjMNQAeAVZTSCoaUDN7dNug/\nIwTkMLEjULkQ0LV8qN8R6OphN7tXV7+DwdEKy8XYtLojoGtX1SC4I5DpqlwI5Cx2vEhB09CgMo7A\n8uXBH5VC6xwBz4fs5ldpd+WoOQKZ3vpCg1QaCFZLyfMinpvYEbC7y6teMcsPIaS4yaZNnlYZGqSY\nI7B06dKSY0rVacZhvB+s3OehAZ8dAKAdwKOU0lyFy3zQ/reKHqWGRjChI1BFaJDjCFiaOQK+m0Bn\nVx5Ibsapz09H/zf7q0oa1l0IFHdZrs8R0OUcCII7Am1VCAHuCNC8pqFBZRyBt99GbY21dMwR8HxY\n+WQIIO633tcAFwKpTiYEKi0hGoVkYU5pIVCBI6CpEABK3weeeiaPKVMwYeEA0RVTSQjoTDMO4x0A\n1gE4gRDyXv4mISQN4OtgpUV/5FvmJgDjAC6wm4vxZfoAXGov85OQt9tQJRM6AlWEBjmWsGahQX5H\noLO7AHS7d7ottgBWrKhsXY4QoBHpI1BzjoD+gwCeLNzWyU7+WoRAJhPOtoVFSUcgVgClEw8ARLTO\nERCx74GlOsn68YdI6noN8NCgVEd1oUGOEMjrLYSAMsK4TI6AOyHGdjw9wSNWRUrdB154kTXb/MEP\nyi8v5orp/PurhBQhQAg5mhByEyHkJgCX2G8v5u8RQr7Fv0spHQJwNoA4gAcJIT8lhFwN4FkAewL4\nDaX0N+L6KaVvAPg8gH4A/yKEXEcI+S6A58BCib5NKf2njH0xyGNiR6D60CDd+gj4cwTaO73NY9av\nB2680b9UMK4trKcjICtHIBKhQb7yoS+/PPEyXAhYOU2FQKmBD2G/a6kmSkFEJkegSkcgcn0E2ti9\nsNryobo9B4Io5wgMDgJBESX8GcAdkSg5AoixfZuoop6qoUE6I+s2sisAsUcsBRugz7VfvwE2kGcf\nUnoXIeQAAF8CcCyANgCvAfgMgGuD/geU0usIIa8DuAjAKWAi5kUAl1JKfylpPwwSkekI+GfCdLkB\nBDoCaSEgmFjo7a1sZ3QPDZLlCFDo5QoFwXMEtlrAroF772UP/nJ5ZX4h0KZZ15TSOQLsd33jjcrX\npVOOQKmYaABOeGTVjoDmA2EuBHjX+erDA9kgUIM8zJKUuh7S7VmMZ1mjxa4u72fOeV/QNzSopACy\nJ4Z+9jPgyiuZWx6E6SMgHymHkVJ6BaU0Xua/+QHLPEYpPYJSOolS2kEp3YVSeg0tk1lBKf0TpXQJ\npbSHUtpFKd3TiAB1kZkjoGv5UP/pPNz5DDD9afeNWC5w5icIUzUoeo7AstzfMXneW3j7beDvfy+/\nDBcChaymQqBM6UAAWL268nVFJkegxtAg3R0B3lCsEGOhQbWEB+q675xS50XvJHadv/NO8WeuI6Bv\naBDvkF5E12pg9qMAgK9/vfTyoium43NQRYyeMoSGzKpBupYP9TsCd7z1PeCQi9w34rmKasgD0XUE\nRkYqWz4qYRGA6wj89qXfYsMpWwMALrus/DL8AVrIspGvdkKgTOlAoPLKOUCEcgRqTBbWPUegK8Wm\nusfsit/Dw+W+7RKlQWBJIdDPrvNAIWA7AlRjR2A0X0L1znwSOHNfYO79eO210subPgLyMYfREBoy\n+wi4OQJ6CQF/jkARZSpE+ImqI1CpEPDPhup4DDi8qygA5MkoQCy88kqZBeA6AvlsxHIEbKoRAmKO\ngOqDIZmOgFM0QfOqOfz8f3ecJYZUKgSi5AiUuh56+tl1/u67xZ9xAayzEBjLT/DAn/Mg3nqr9Mcm\nNEg+5jAaQkNqHwGnapDefQSKqFEI6PgQrNcRiEpYBADM7/NFS3a9jfXrUbacrJMjYAsB3QYB5R0B\nWp0joFGOQCV9BFqtfOj8fnb+rxhaBoDWFBqk675zxPOif8PBwPMnAgA6e8uEBtnnvaWxEJiQQqq8\nEDDJwtIxh9EQGqaPgBECIvFYcB+BVnQE+ECI0z5rGQqF8hUznM7ChRTa2vRLlCzrCCTGa3MENMsR\nmNox1fdhjeVDNXcEJmUmoTvdjeHcENC+riUdAfG8OOaA+Tj6CDaq7+qd2BHgQkDHHIGJIFYK69cD\n4+PBn4t9BHR+BqiEEQKG0JgoJjjd2QLlQyfKBG7h0KBExxCw063YOF5Zh+WoxEcDwMyumZ7X7bOW\nAShfRtEvBHSj7P1g9x9jcHhzxevSKUcgRtzH7IL+Bd4PW7SPACHEccXiU5Yhm62soZxYQ17XfeeI\n18O2WyzA1MlMCHR0T+wI0Hx0HYHuLrZvb78d/Ll4DhhHQA7mMBpCgxBSNjyovaf1yocWUYUQcLtK\n6ukIJIh3o/M9rwLHnYwXdzm6ouWjFBrkd0dSU1isdKVCQLf8AGACR+DQz+DFeedVvC4xR2DmzPLf\nbTYErnUzr2+e98Ma+wjoNiESBD8W6amV5wlEKTSoO93t/L3t5G2RirNRfXsnc0qDui1zAVyIgBAQ\n86REeG+VVauClzOhQfIxh9EQKuXCg/gNr6ocAc1CgyZMFo7VUjUoruUguFSs9OYpf8fixcD995df\nXtcSsqV4/MzHsXU/qxiU7mTxUevWBTcSAoCcZZfds5LRcwQADG7584rXJeYIzC8qTq0u4uAPQO2O\ngOahQYB7LNKdTAFUIgTEqkE67zsAHLHwCHzjoG/gmwd/E4cuONQRAqkME/wDA+53+fkRhRyBZ899\nFtcddh3O2u2swM87+9mJUKrBoFN+tJDU/hmgCkYIGEKlnCPQ1sFueFEODQorR0CX/RcplzT52GPA\nQQeVX951hfQfBAHAnrP2xCcXfRIAkGpnT/pf/xro6QFuuYV956mngB12AP72twiEBk1QNQgA5swB\nHn544nWJOQILFpT/brMhQjJHW8L3w9WYLByFa4Afi1QH2/lKEoaj5AhkkhlcvO/F+Pw+n0cilnCu\nj6QtBNavZ9/797/ZPWHpUnf/rby+OQK7TNsF5y86H13prsDP23vZpMjrrwcvL06IGEdADuYwGkKl\nnCOQSFcuBHRNFp4wRyCRxaZNQKEw8bpEIaDjA6Bs9RQygWBCtEKDOLyfQCLDhMAvf8kGRKedxhKH\nzzoLePFF4JBDIiAEJnAEADYLeOSRE68rb18whMQwZ06dG9ZAeCM5h8Q4QKwaHAG9cwQA91gkbRFc\nbWiQzvseBHcEkmmvELj8ciCXA664QigfGgFHqCPZEfh+uoudCMuXBy8nOgJGCMjBHEZDqJRzBGis\nCkfAVz9blxvARI5A6r23Agv+UlHFFFEI6GgJlxUCXWXqxdlEpXSiCB8MZdNvAQd+Bdjjh4Dtft3x\nj6ewYqr7WnshIDgCPeme4C/FWfWgcl2G129ej6ffeQoAMHUL9fsIiHDh5yExVnmOANXTGQ2CH4tk\nZhTY+k/48xu/nXCZKDkCfrgQuP65bwDxLNZNvgvf+cd38VLvd4E4K6Hj5omx8163ymEinanOwPcT\n7RMIAeMISEdjPWnQgXKOgEXYwKZUmTDPdzXtLHzEwiPwwtoXSn6e3eVHwC4/woYNFH195dcVaUeg\nZwWwaXbZ5aMUFsHhg6FX6d+AA//G3nxzf2DNjjjrid2BvQG8Nh9Ydoj2ycJi9Zz9t9off/jPH4q/\n1LMSGFiAd98Fpk8PXs+Nz9zo/D2lY4KLRjGKHAEASIxidLS9ouWjlCPAQ4MSbaPASUfgqy8DPdeO\nIDvSjksuCV5G7Cqr874HwYXAaH4UqcU/RvagT+GiewDMAbD3OPDIF5F3qmXFtXwGiHSkgh2BWJqF\nBlXiCOgyDlAdo6cMoRLkCPAHABcCmyuoGugmielVNeiKA6/A7cfdjmO3O9Z577bjbsNes/byfK+S\nPAHdHYGiPgIiyYljI8SycYDegyBOUcw4AHT66gb2sqy5zeP8AainIyDywa0/iLtOuAsLJy30fpBh\nGZLlHLLVw7Zd8PqBmNY2N6QtlIdYNSjw906MY3y8fDM5TpSEABdFpM39sT/3pUF88YuVlI6MriMA\nAImt/uX9cAZ7PTzidQR0xuMIrNwL+Ne5AIB8jDkCa9YEF04wjoB8zGE0hEqQI9CVYklC1G4oVUlY\njK4PgHQijeN3PB5x4m70CTuegBldMzzfm0gIUEpdIUD1fAiUdQRs6zufL/0VXRPGyxE4Q5wZ8L4u\nsB97w5DeoUEiXakuHLXNUdhu8nae9/fYn/WUKHdPGBi1j89zp07ooqlGUGhQqp2d+1U5o5rlSgXB\nj4XVJtTMzbDf/9//Dl5GbCalswgKQrw/thU1x2C/98AG1xHQ8RkgIgqBnbfrwM8+cTYAYHN+GOk0\n6ysRFDInOgKq9xDRBSMEDKES5AjwagHcESjXTZXjJt3qWTt407h3ZCPO/gDA+sHy2cJ8AEAQA2hM\ny4dAWSGQYKOggYHSX4nSbCgnMGY8MwAQ4Xyw2I5uHudCQM/yoSJ8EOCfKEh0TuwIDI7ZDejG+tDf\nH8rmSUWsGhQk/NK2EKgkYZiLYaug/zXAj0U+tdZ9s439ts8/H7yMrhNClbBusyuI4h3+mSH2uztC\nIAKOgJgsPGNaEvssYq9HciPosVOIgibIREegIzi6yFAlGg6pDDpRzhGwwAY2lYTFiPWjdXwA+IWA\nv4LKgw/nyi7Pk8RilD35dYwPLSsE5t0DHPRFrH63tCXgrxqk43ngJ9ARaBsE3n+x+9puOjWa1TtH\nQIRPBvgnCmIdbCBYrpSk4wiM9kfCEeBCoLrqafoLAR4mNZ4QHQH221YiBHTe9yBWbFzh/J1vXxn4\nnZdeik6OgOgIJGNJ5/Vwdhi9vez9oElC0REwQkAORggYQiWoMkBfhj2986jcERDjw3UcAB6/w/EA\ngMO3PhxAsSNw/Y9y+Oc/Sy/Pw4Jidn6/jrNB/nAoD3v8GNjvG7jthV+U/EqUBkGcwJjxLR8BFn/H\nfZ1iMbOjOd5VKK39IICHBBU5hhXkCAyO2o7AaJ8WQmCiHIFUFY6AXwzr6I5yuCgaJcWhQc89F7xM\nlB2BQ+Yf4vw9mlrh+WzuXHYOPf1sdByBnja3clgynnQqiQ2MDqCnl038TeQItFeWY2+YAI1vIwYd\nmNMzp+i9bSZtAwDI0xpCgzR9AFyw6AI8dPpD+PVHfg2gWAggnsODD5ZengsBAn07Sp6404n484l/\nxuNnPu64Qn7e3LAi8H0g2lWDRJL9vtqZthAYzvMBcD+6gg+f8qz8zEo891/PYXoXKwnkdwytdBU5\nAjo6AoID5NTRz9QiBNiEiM7lI/n+D1tCaJAtBF96icWI+xH3PwrXv8hHdvgITtrpJADAWOJdz2fT\nplNMmQJkc9HJEehrcy/ejWMb0ZXuwqTMJIzlx9A2hd0DA4UAdwSshHEEJGGEgCFU5vfPL3pv28nb\nAgByVhaJBLPEJ0qUE8vG6TgLFo/Fsf9W+6M9yaYwioRALIfu7tLLO0KA6usIJGIJHLb1Ydhz1p4l\nu8xuHiudKxGlRElOUGgQ6fH2VOjsZ+X0NlN3JrzcuaIys7pnYeepOzuv/Y5APhnxHAFB+PHyicm2\nykODopQsy92RPBXCIjMDSCZZ0YCXXy5eJsqOQIzEPK6ASCIBzJgBIOY6Arq7guJ1wcOi+HghPnkZ\ngBKhQZYbGmQcATloOKQy6MT8vmIhwMMCsoWskxQ0kSsglg+NwgMgyBEYHCz9fb8Q0P0hUKrL7ObR\n0kLA30xJ94EQEOwIZJNrPK+nzBwGiIUx8AFwr3Pd6I7fEcjFy+cIjOZGMZYfAymkgVxGa0eAJ0sm\nM2zqu9rQIN3P/8BE+bZBLFnC/gyqHCTmium+/0GUarJFY+NMCBD399dxMqgUjhCwxwu0lwmB8o6A\nyRGQhREChlDZqnerove2m8KEQK6Qq1wIaB4a5KdoIBzLlU2ajoIjIFLKEXh1WQFf/3rwoMjfWTgK\nA4HAHAEf3ZOHgdQQKLGQKHQBVlJbR8CP3xEYj5V3BLgbEM/2ASDaCQHx967FEYjSjHiQGxbrGMBO\nO7G/V60qXiZK+x9EqSZbj6z5E7LzfwfsfyV7g8ZRKF9oTgt4eNB4gV0DXAiMdxhHoJEYIWAIlXl9\n8wB4E+Zmdc8CwC7o7p7SSUEiuocG+anXEdBeCJRwBN5cUcCXvwx8/vPFn/mThaMwEEjEEuWrKQHI\ndI84sdPJPHtwRkYI+ByBMbAnfykh4OQHjLGYIB1Cg0Q8oUG2IxBP1ZAjEIHOukGOQKLvHcycyf5+\n662ijyPliARRyhEAgPsmu00pYcXRWfqr2nD1wVcDAL6w+AsAgC17tgQAZNPsxzeOQGOI4KVkUIlU\nPIU1F61BPBZHwSogHosjRmJIxpJMCPTmAKSqCA2KxkxQUI5AJUKA15TXXgiUcAR4DOxttwHXXef9\niA8CrAg5AgCbGR3Klq6XSdqGnWoq8Swb+UYmNMifIwA2LV7SEbArBlkjTBDp4AiIkyDiLDjPF0qk\nq+8jEIWBcJAblu9aXrEQiMJzwE85IeCBxnHBBeFuSyM46z1nYcncJc6EIS8rjCTLi/ILgYJVcK8B\nK24cAUlEYG7VoDpTOqagP9Pv/Au4A+HuPhYfW24QDOhfPtRPkCNQzhUpWGyAHJUcAXFw5MHuNj00\nBFx9NXDLLe5HUSwfCrAkwUDe2gMAkMOw2214LFqOgP86yIONhtfZFSVzOeDMM4GbbmKvuSNgjfQj\nHod2s6JBycKxlAkN4lidKzF5Gjseb79dvEyUHJEgxCZb5Zg3J47DDw95YxoAIQQL+hc490AuhApx\nJgRG2D9Yvx444wzgnvttZ9xKAiDGEZCEEQKGpsBnhLeax4TAiy+W/77YWVj3ByBgHAFnf/wkNwNg\nA8BLLgFOOw3gP30UqwYB3mPhEUjD0wAAWYw4HVfp5mg5AsRX/zJLmRBYtYr97rfeCtx4I/Dxj7PP\nnYpBdg8BHcpnivsoOiDcEYhX4QhEKTQmMFmYUNDuNwCY0KBy9PdF5ObngwuhLFjJZC6Ob76Z/XfY\n4W5YEADjCEjCCAFDU+AD4SemnQUAePrp8t8XLfGo5ghUkiwceSGQGgaOPQk4b0dg7v0AgNWrgVuf\nvxUn/PYEANFzBNxZTt9AYHgqAGC04DoC+aFoOQJ+xvKj6OsDxvNZfODmw/Gz//uB53Oxh4Bu+QGA\nVxTECRvM5VPrgFMPxsObbnY+e/jNh7HfTfvh5XXeGppRqqNfKlH+E38/CgC77i3L+1nUikb4KZUs\n7IdaEdx5uPe/Vdl/A2fsjxXJv+LW52/FN1YvYffAD53OvmgLAeMIyCECQyqDjvAb+pPDvwPSGycU\nAlELDepO+0ZyFToC/AGguxA4eN7BwR/0vQ7s/CtgixeAHW8HACxbBpz8u5Pd70Sgq6rI0dseDQA4\napujvAOBYdZ0a+P4RqR7WBLN+CZmBURXCIxh9mwA2/wv7l3xZzza9WnnM8sSugqP6dFVGADOfe+5\nAIBTdzkVAEuIjJM4ZnfPBgA83fYtYN59uG30DGfge8DNB+CRFY/gtN+f5llXlPoIxEjM6SYLALvP\n2B0A8OrAf9A9dRD5fHHIaNQdAe4STUR3fwVxZBrChcBQYQDY6u94fOtDcfLvTsa6rgeBfb8BbP9b\n9sWCcQRkEpFHqUE3hrPDzt+ZmcuwahWwZk3p70dtJognRznEc9i0yQ2D8eN3BHTPEfjpkT/F8guX\n47bjbvO8P2XL9e6LBIuVWLbMt7B9DugQFlIJtx57K5ZduAx3fvROjyOwzXQ2UBwcHURbt329jHch\nndb/9w8iTuIo0AJmbplzQsREhob07Cq809SdsOmSTbj56JsBAMsuXIahLw45v3WeuDFB223n7ajr\nCB+bKOUIAN774M+O/JnjknRvwfZ7cJCFh02dCrzwQvT230+MxJxjAAC4+X7s+teBou8t2m+C6hqa\nUtYRSbjXCQU7D4wjIAcjBAxNYTTvXtRb7cZGeuVcATFJLAoPAH/H5Vgyh0LBOwgQcRyBQjRCgwgh\nmNs3t2gGLJ8UBj7J0kIgSrOBMRLDvL55iMfiHiFw1WWT0JZoQ87KIdGzlr2Z64iUG+CpqGPHjE+f\n7Q+WZ+p4cLA4R0AXutJdTlhQIpZAJpkpKp0KAP/5D/DNb7qv47G4J4k4ajPic/vmOn/P65vndJ3u\nnOwKgZNPZpNEl18evf0PwhM2OtaH3rbeouIKY7RM622NqbhqUow9KI0jIAcjBAxNp3/+xEJA7Cwc\nhZCQSZlJnteZDpYExask+HEcgYgIAY6/fKQz0AOABBsBvfGGbyEaQ9vEfbi0RKwa0pnqdKpsoXsl\n+zfbqWVsfCXwKjKTpo6yXBFOmpVWHRzUP0dAxH/uc778ZffvTRviyGSA++5jr6NWNUesHNSV7kJf\nhqm7tn72O//+9+53+/qi7wgAPiFQSKK7ixTNlG8cj6YjUOqRqVQAACAASURBVFYIdL7j/p0YR0dH\ndApGNJsIDKkMupPYggmB558v/qxQYKXDohYa5K+WEp/2AnDhfPzyuV/hhqduwMJrF+Int63CQQcB\nK1dGzxHgBM2KOsx4ErhwPp7K/sr7PiXYdddwt6tZiA/CzlSn03kz176CvZntxOTJzdiy8OHJoz8i\nOwMd7wofMHE4MCAIxbE+/YVA0Ll//vbA4m85L99exW52V13FXotFE3p7w97C8PEnDHPh+/wORwCn\nfABXvXQqcMr7wV2hKCVLl8IjBKwkpkwpHiAPjZfuO6IzqXiqZIPFvq2EerLxHObODfyaoQaMEDA0\nnbHEagDFiWErVwJbbglssQXwn1ejFRoEAF9b8jXn702LLgX6l+NTD5+Ec/94Ll4deBUX3LEU998P\nHHKIKwSsQjRyBDilZkUBAJ1rgP7leGn7k7zvW0lMmxbudjUL8YE/uX2yMzAaS3NHoCNSQuBjO30M\n0zun4zN7fcYJDRqma4Bt/uB+ya6Y5HcEZsxo9NbKJfDcn/IS8IEvuK/txPjpLG/cMyO+xx4hb2AD\nuGTfS9Cf6Xc6zHLha8WywPx7gF1+Acy/F+h8F2vWRK+xZBB+R2DuXKGbLphY+uqSrzZhyxpDKVeA\ndnobS8yfH/g1Qw1EVFMbdGIzZQrAHxbz6KNuU5n77qfATohM+VAAuGz/y/DSupfwq3//KvgLaRYH\n+tJLwHiONRRrKUegFLkMzjtP/raogDgImNM7xwmVyMbs2rLZTkyaFLSknvS29eKtz74FQgjuf/1+\n9wMxWTjjxos7ybOjfU4HWl2p6NynbLS73s6hF4XAokUhbVgDWdC/AOs+v85xSJ1QOD+kgDVrWiNH\nwNN13WJCYP2rbhEF8XhFkc5UJzaMFdfSHoK3scS8eUVfMdRIRIZUBp0ZLrBZvuFh7/urV7t/Dw1H\nq3wop6ifgEC8zU2afPsd2xHIR0wIlHMESrD/Pm3Yf/8QNkYB1o+6D/xUPFU8MIpgaBAf1Hg6LKeE\n0AfbETjnXAsDTvnQXv2FQCXnvl0ueOVK4KmngILl3gejEh4nDmq5I1BEcrRICETpOSAS5AhwJmUm\nRVoEAKW7KxeQ87w2QkAeRggYms5wnj3c/ULgrdUF90WMN6CKlhBIxpIlP8tarhB4a7WdI5CPWGiQ\nMCvaleqqaJl5swM6kkaEd4bf8bwuGhjlohUaJCJWEvMkBmYGAFIAUkOsbOB4F2Al9RcClToCpIAX\nXgB2353i8WftmVIaw5Qp4W5fMyjpCCTGsHZtazgC/hyBOXPcl9M6IxoTKVBp5aDZs0PekBbCCAFD\n09mYHQBAPULguieuw/cz3cC0Z7Bw0RvAnteyDyI2E1ROCNC4IAS4I1BgO58svZhWiLOiPW09Zb7p\n0tsZXSHgJ8gRiFJokMhYXqiTGXNbyqYW/Ry4tAvY9efsjdE+JBLQfiBckSMw+zG273PvA447EX/Z\n+D0AAEEMXZXpZq0oJQSS7aMYHgbGs63nCEyd6r4Uy61GlUqFQE9ljwtDBRghYGgKvzjmF9hxix0B\nADmLNRAShcAn7/4kCrHNwOHno+N917gfRKR8KMcTD1r0oSsEVr1rH5xsBzo7o1M2bWb3TBw872BM\n75yOj2z/kYqW6YuwEPjpkT/FdpO3wz2n3AMgwBHIdkbmt/czmvP3D2Bkp/6DXQuHfcr+Yj+mTNG/\ns3TFNdOTo8DxxwI73e681ZaOTq6USFHHdZt9DmDnxrr10SqfGoQoBNa+m0QsxpoObj9le1xz6DVl\nlowG/kpSpYiiEG4WEbyVGHTg5J1Pxr8/8W/M6LJLf2QGMTICWJbvi5kBTJ8sxAxGbCaonCPA6+gD\nwKr1brWUKM0Ix0gM95xyD97+3Ns4epujK1qmryuiTQQALJq5CC+e/yIOnncwgCBHoCOysbGe0KAy\ntMf6cMUVIW9MAygZBhNE2yZgzB0kt3XkynxZX0qJo+NOYOdGLtdaVYP6utnz4cSdTsQL573QEo5A\nubw5ESME5GGEgKGp8BnPtj420N282feFzABmbSE+HEikbgBlHYHEmBMf+u6m6NRPL0WlFYR6OqLr\nCPjhVYMA1oX39l9msO++TdygECnlCPg57MB+nH12yBvTAMTftlpifSskbok6+BtncTp72bmRL0Q/\nRyBOXIUTj0VU7ZShUiHQWaGhZpgYIwQMTcXpJNkXnDCMjrWYLQoBSrDzzg3auAZQ3hEYxe67sz8H\nx7gj0BcpR0Ck0gpC7ckWEgJCaFBHqgPHfzSGqBYNyVmVzXKXrCyjGVXvRzzr/Fnofl3y1qhBKUeg\nkBpEPN4aVYNaHeMINB4jBAxNhdvj6R5vCVFxVuQ7z3zZXSCew267NWzzQqesI5DehBkzgMmTgUKS\n10/vb3lHIJNoHSEgho+UKqsXFUrFh/upZyZdJaruoZF0QwXjyXyZL+pLKSFw1h8+jr73PACQ6DsC\nUS8POhFBQiDo3tcR7dthQzFCwNBU+AWe6mTdxNauBf75rxwK1C0dumFcaDnctgG77NLQTQyVso5A\najPinQOsg6JdSz3SoUEVOgKZVnIEhEFv1EsH3n3S3RV9b2aX5nVD62XVIhw4/KNmb0UoiAM+cTII\nAEYXX+oRAt2V6UbtIDBCwIESzFz5qaIJs/Sd/xvJZPlmYQ6loanwWbFU+zgA4Mgjgb0OHCy9QNsG\nbLllI7asMZR1BACMdyxj4UFtbkfVyIYGVThDWmlViSjQ29br/B31RMHFsxfjc3t/bsLvze+f34Ct\nUZR8CvjZP7GgK0LxkQKiyD9mu2Pw6T0/7bxuS6YEIUCwcGGjt64xtLoj4Jkc+82vsWjg+973/vhD\n9K05svEbFmGMEDA0FT4LnMwwIbB+PdxB7/oFSFo+q7htQ6RmAhKx8v72SPo1XHopgHa3alDLOwIt\nFBoknh+TMxHtJCbgOQcKwSJ5fl8LC4ECmy3t7Z3ge5oidpe2qOURBplUCoBbNWibbRq8cQ3COAJi\nQ7UE+vt9z8l8xuQHSCZCQyqDjvAHf6JtFOh5k73ZZyfCjfajn3gf+vGODY3cvNApGxoEYBm9BzNm\nALF2t2pQqzsCrRQaJFJVuUlN8ZwDw1MDvxN1Z6QsthDYYosmb0cDsKjlEf2ZdALoXAMAmDwphr5o\npIoU0eqOgF8IXHihzznPZSLTUFMVjBAwNBX+4P+/GRcBn5kDLLgbOPkw9uFYH6a3eYVA0opWYOhE\nFRIeGbkJT7z1BKw0FwK9kQqNEjGOQHnm9UW0gYCAeA5sM7M4JyIZS7ZUaFgRthCYGqyRIkXBKnh+\n61fpX5y/t14Q0UxhGEdAfCb+9tdJ7Lyzb8Is34bXo1k0q2kYIWBoKkWDvwOXun8//ilM6fBOf0/7\nv6vD36gGUnJ2e8Ad9D3wul0tI9cGFFKRqpokIj4A9p61NxbPXhz4UGy1geBtx92GE3c6EWfsdkaz\nNyV0REdgwXR3tLvDlB2w96y98cPDf9iMzQqNx858DMdse0zlC9hCYFq088YBAAVaCL7Wh6Zhx/kR\ntQMMnudATxcTfB5HIN+G0cpajhgqxAgBQ1MpCgfpW87+Hd4CeO0w9HYKD4K/fhu98emN27gGUKpc\nHjbNBu5houe5d59j72XZd6MaHyxa4kdvczQe/fij+NJ+Xyr6XquFBp2w4wm49dhbK66vrTPixIAY\nCnXhnhfiH2f+A2e956xmbFZo7DVrL9x5/J2VL2CxAVGrOALZQrb4gz9fF9n8AMCEBomDfp4b4HEE\nClWW3TVMiBEChqZS5Ah0rGP/jrJBwKRuYdBXSOHiixu0YQ2ipBDIZYABFhYlCoHttmvQhjUZaicF\nBg36WzU0qBUQJwa6Um5GYCuIoIpooRyBAi1gLD9W/EGuA9tu2/jtaRQmNMi91rkoEMXBPnulcd99\nDd+sSBPdQDuDFpRMEB1j1u+kHnfQ99WlKRx/aCO2qnGUbBKVzwCDTAi8uPZFAMDsaR146KFGbVlz\nodQWAgGD/olKrhr0RZwY6Ei514YRAja2EEi1wOEoWAWM5gNiQLKdxhGIMOK1HuQIXPPdNN4TrcCA\nptNUR4AQcjgh5G+EkJWEkM2EkGWEkF8TQvYq8f3FhJA/E0LW299/jhDyKUKIcTY0pWSC6Gg/enqA\nrjZ3IDhzWhJRu0eWdATybY4jwJkxuRNTpjRgoxSgnCNgiC7ixIB4bUS9mZrYL4JDxgLi4AstoABs\ntu7fOvB3P+ygTtZkMaIYRyBACAiTP5UWlTBUTtMcAULI1QA+D2AdgN/b/y4AcBSA4wghp1BKfyV8\n/2gAdwAYBfA/AAYAHAngewAWAzi+oTtgkEJJR2C0Dxs3ehNDozgrWFYIZLvw/rmH4p7X/1L+uxGE\nOwLi73/aLqfhwDkHNmmLDI1AfMjP6p6Fu064C0+89QSWzFnSxK0Kn398/B/44ZM/xKbsJtzy3C0A\ngIvedzYotbB8w3Lc+RLLI8ikUvjCV5q5peHz7LnP4v898/9wxYFXoD3Zjvtfvx+/e/l3zuc/+FZH\n5CaERIwjIIQG2U6A6AhUWmbaUDlNmUknhEwF8DkA7wDYjlJ6DqX0UkrpRwEcAoAA+Krw/S4APwWQ\nB3AApfRsSunFAHYF8BiADxNCPtro/TDUT0l1b8+GiTPCrSUE2HFZeuDlzltiqETUsSjrICqGBl2y\n7yU4fdfTm7RFhkYgPuTn983HUdscha+/7+uRHxxtN2U7XPvBazG7e7bzXnsyg2994FueqkJ77ZHC\n0qVN2MAGssu0XXDNYdegL9OHdCKNG468wfN5K02ItCLioN84Ao2hWSE1W9n/739SSteLH1BKHwIw\nBEAMgvgIgMkAbqOUPiN8NwvgMjDh8ImwN9ogn1LqfuGW/bjrLu9AMIpCoNTgfo/3pHHFFUB/uxse\n0EoPwKDQIPMAiD7ibzy/P8LxHyUIut+JeUStmB/jv+9F/T5oQoMCkoWNIxAqzRICrwLIAlhECPEU\niieE7A+gC8A9wttLwHqL/zVgXQ8D2AxgMSGk9e6SmlNqcHfe6X046qjoOwKl9n/Jfmlcfrm3hGJn\nMtoPQJGgZGHzAIg+YrnIGV0zmrglzUG83/HBjzjwjeI9cCLS8bRncNyebG/i1oRP1N2viTA5Ao2n\nKUKAUjoI4AsApgJ4kRDyE0LIVYSQX4MN9v8K4L+ERXiNgP8ErKsA4HWwfIfot96MGKUGd30ZNhMe\n9RyBUjd9flz62lxHIOoPQJGtJ20NwPv7mwdA9BEHvbEWrAER5Ai0uhAghCAeizuvxb+jyG7TWMfI\nVnUGJqoaZCaE5NO0ZGFK6TWEkDcB3AhA7BLzGoCfU0rXCe/12P9uLLE6/n5EWy1Fl1KDO15DXHww\nepqKRBAC4oTE8OMizoTwz6LM0+c8jQfeeAAn7HgCAF9okHkARJ69Zu2Faw69Bu+d8d5mb0pTCJr4\nECsKtaIQAIBJmUl4d+TdZm9GQ7hk30uQSWRwzHZVdJyOEEHJwlwQ+D83yKGZVYO+AOBKAN8HcD1Y\n4vC2AL4B4FeEkF0ppZc0a/sMjaHU4I7PgkU9NEgkHosjb+UBBB+XwC6bEWO36btht+m7Oa89oUHG\nEYg8hBB8cs9PNnszmkbQ/U4MD4z6PbAUUzuntowQaEu04eJ9I9Y5swqCHAGRVnQKw6ZZVYMOABvw\n/55S+nlK6RuU0jFK6bMAjgHwFoDPEULm2IvwGf+eopV5399QxTaU/G9p1MsyKIQ4AybiCIGIJwuL\niDe4oEFvKwgBPxM9FAyGKBF0v+NhkkD0XdFSbNHRAq2UDQC8Lji/5/MqciqzdOnSkmNK1WmWtDoC\nLPn3Qf8HlNJRAE+AbRufGnzF/neh//uEkDiAuWClRZdXugGU0pL/GSHQOErN8vJqOlHPERARB7pB\njkDOyjVyc5RAFEc63FANhnoIcgTE+954Ybzh26QCUW8oZ3AJqhqkixAoNaZUnWYJAT7KKdUnlb/P\np0DvBysRemjAdw8A0A7gUUpp642UNMeEBrliaJ/Z+xS9BwBT2tnlsPesvRu7YQoghkUYDFFHnPgI\nKhU6nB1u5OYow2ELDgMAdKe7m7wlhrAJcoF1EAI60yyv/e8ALgBwDiHkBkrp2/wDQshhAPYBMAbg\nH/bbdwC4GsAJhJDrKKVP2d9NA/g6mLvwowZuv0ES4oB3ZtdMvDX0FoDWCg165YJX8MiKR0AIwT3L\nWdVcUSA9fe7TeOD1B/CxnT7WrE1sGn2ZPvzt5L+hp61UVKDBEB0mut+NZEcauTnK8LEdPwZKKfae\n3XqTIa1GUEMxIwTCpVlC4A6wPgEHA3iJEPI7sGTh7QEcbn/nYrvMKCilQ4SQswH8BsCDhJDbAQwA\nOAosXOg3lNLfNHgfDBIQB7wzumY4QoA30WkFR2Cr3q2wVe9WuPOlO533RIE0q3sWTtnllGZsmhK8\nf/77m70JBkNDmOh+16qOACEEJ+18UrM3w9AAxBBZHhpqhEC4NEUIUEopIeSDAM4HcAKAD4GF9wwA\n+COAayil9/mWuctOMv4SgGMBtIGVGv0MgGsbuPkGiYgD3qmdU52/+QNxIqs8SphayQZDazORI9Cq\nQsDQOgTlghkhEC7N7CNQAHCN/V+lyzwGlmhsiAjigFesl81nAsTBcdTLhokPflMq02BoPSYqjmCE\ngCHqBDVSM0IgXKI9sjIoj2gDdiY7iz4XZwd4uFBU8QgB4wgYDC1HqdCgj2z/EQDASTuZ8BhDtJnc\nPrnoPSMEwsUU5jY0nRfOewGUUlz3xHWBn79ywSsYy495HpJRxDgCBkNrU6qT+s0fuhlnv+dsHDjn\nwCZslcHQOLrSXXj23GedEuKAEQJhY4SAoelsP2V7AECBFgI/XzipqH1EJDGOgMHQ2pTKiWpPtpuk\neUPLsMu0XTyvjRAIFxMaZFCGghUsBFoF8cFvHAGDofUQQyHN4MdgYJhrIVyMEDAoQylHoFUwjoDB\nYOCYwY/BwDDXQrgYIWBQho/tyBpmLZmzpMlb0hxMjoDBYOBs2bNlszfBYFCCM3c7EwBwzLbHNHlL\nogmhlDZ7GxoKIYQCQKvtty68vO5lzO2d25Iz4is2rsBW398KALD+C+vRn+lv8hYZDIZGMzg6iKHs\nkBECBoMNpRQvr3sZC/oXaNlPiIf8UUqLa6MqgEkWNijFtpO3bfYmNA3jCBgMhr5MH/oyfc3eDINB\nGQgh2G7Kds3ejMhiQoMMBkUwOQIGg8FgMBgaiRECBoMiiB0VxUZrBoPBYDAYDGFghIDBoAhd6S4A\n0e+gbDAYDAaDQQ1MsrDBoBAj2RHEY3FPYyGDwWAwGAx6onqysBECBoPBYDAYDAZDCKguBExokMFg\nMBgMBoPB0IIYIWAwGAwGg8FgMLQgRggYDAaDwWAwGAwtiBECBoPBYDAYDAZDC2KEgMFgMBgMBoPB\n0IIYIWAwGAwGg8FgMLQgRggYDAaDwWAwGAwtiBECBoPBYDAYDAZDC2KEgMFgMBgMBoPB0IIYIWAw\nGAwGg8FgMLQgRggYDAaDwWAwGAwtiBECBoPBYDAYDAZDC2KEgMFgMBgMBoPB0IIYIWAwGAwGg8Fg\nMLQgRggYDAaDwWAwGAwtiBECBoPBYDAYDAZDC2KEgMFgMBgMBoPB0IIYIWAwGAwGg8Hw/9u792C7\nqvqA49/fBEEpNCAPbVPAx1QqMnacVJAwQ0QK1kZMfVBox5aW4lAcBugDaKdW+KNaYAAt+Cit4Igz\nVRIstFYFy0soSBjHlE6x5WV4mKoYIFEIISH59Y+1jtmenJPce3Pv2fee/f3M7Nn3rr32Ofvc37p7\n798+e68ldZCJgCRJktRBJgKSJElSB5kISJIkSR1kIiBJkiR1kImAJEmS1EEmApIkSVIHmQhIkiRJ\nHWQiIEmSJHWQiYAkSZLUQSYCkiRJUgeZCEiSJEkdZCIgSZIkdZCJgCRJktRBJgKSJElSB5kISJIk\nSR1kIiBJkiR1kImAJEmS1EEmApIkSVIHtZ4IRMQxEXF9RHw/IjZExOqIuDEifmNA3UUR8dWIeCoi\n1kfEfRFxVkS0/jk0911wwQVtb4JmIduFBrFdaBDbheaayMz23jziYuDPgSeArwFrgP2AhcDNmfkX\njbpLgeuA54FrgaeB44FfAZZn5okTfM8EaPNza3aKCNuFtmG70CC2Cw1iu1C/iAAgM6PlTRmotUQg\nIj4AXAl8FjgtM1/sWz4vMzfXn/cEHgH2BBZl5spavitwG/AW4Hcyc9kE3tdEQAO5A9cgtgsNYrvQ\nILYL9ZvtiUArt9TUE/i/AR5jQBIA0EsCqhOAfYEv9JKAWmcj8CEggNNndKMlSZKkMbJLS+97LOUW\noMuAjIglwBuADcC9mXlPX/2jgQRuGvBadwDrgUUR8ZLM3DRzmy1JkiSNh7YSgTdTTuw3AiuBQ+vv\nABERdwDvy8w1tezgOn+w/4Uyc3NErAIOAV4DPDCTGy5JkiSNg7Z629mfcjvPOcAW4EjK/f9vpFz1\nPwpo3u8/v87XDXm9Xvle076lkiRJ0hhq6xuBXgKyCTg+M5+ov98fEe+hXNVfHBGHZ+aKmdiA3sMb\nUpPtQoPYLjSI7UKD2C40l7T1jcDaOl/ZSAIAyMzn2foswGF13rviP5/BeuVrhyyXJEmS1NDWNwK9\n+/iHnbg/U+cva9RfCLyO8kzBT0XEPODVwIvAd3f0xrO1+yZJkiRplNr6RuAWysPBhwxZfmidr6rz\nWynPFGwz2jCwGNgduMsegyRJkqSJaSURyMzHgS8DB0bE2c1lEXEc8HbKtwI31uLrKKMOnxQRCxt1\nd6OMR5DAp0ew6ZIkSdJYaHNk4QXAXcABlCv+Kyndfy6l9CR0Ymbe0Ki/FFgOvAB8EXgaeBfldqHl\nmXnSSD+AJEmSNIe1lggARMQ+wIcpJ/S/APyYMkDYhZn5rQH1jwD+CjgCeCnwMHAVcEU6prckSZI0\nYa0mApIkSZLa0dbDwpIkSZJaZCIgSZIkdVBnEoGIWBARV0fE6ojYEBGrIuJjEbFX29umnRMRL4+I\nUyPinyPioYhYHxFrI+LOiDglhgzzGBGLIuKrEfFUXee+iDgrIob+X0TEOyPi9vr6P4mIeyLi92fu\n02k6RcT7I2JLnU4ZUmfSMY6IkyNiRa2/NiJui4glM/MpNF0i4piIuD4ivl+PC6sj4saI2KaravcX\n3RARSyLi6xHxRI3zIxGxLCLeMqS+7WIMRMR7I+LyiLgjItbVY8Q1O1hnJLGf8eNLZo79ROmN6IfA\nZuBLwEeBmym9E30H2LvtbXTaqfieVmP5PeDzwEeAz1B6ltoCLBuwzlJgE+UB9X8ELqptYQtw7ZD3\nOaMufxK4ArgUeKyWXdz238Fph+3kAEq3xOvqvuCU6YgxcEld/litfwXwo1r2wbY/t9PQ9nBxI25/\nT+mK+krgW5QOK5p13V90YKpx7cXsH+q5wjJgQ91n/K7tYjwnSs+Vm+vx4f768zXbqT+S2I/i+NL6\nH39EAb6pBvWDfeWX1j/mp9reRqediu9bgSUDyvev/zybgXc3yves/4jPA29qlO9K6dJ2M/Dbfa91\nUK3/I+CARvl84KG6zuFt/y2ctttObq6xuogBicBUYkzpwWwLZfTzn2+UH0gZ+2Q9cGDbn91pm7bw\ngRq3q4BdBiyf1/jZ/UUHJuAVwIvA/wH79C1bXNvLw7aL8ZxqjF/bF++BicCoYj+q48vY3xoUEa8B\njgUezcxP9S0+H3gO+L2IeNnIN07TIjNvz8yvDCh/knKlLyjJQs8JwL7AFzJzZaP+RuBDtf7pfS/3\nR5R/8isy84nGOusoV40C+OPp+DyafhFxFqUN/CFl5znIVGJ8OmVAw49k5o8b6zwOfBLYrb6nZomI\n2JVy9f8x4LTMfLG/TmZubvzq/qIbDqLcLr0iM59qLsjMbwA/AfZrFNsuxkhmfiMzH5lg9VHFfiTH\nl7FPBICj6/zr/Qsy81lK9rY7MPD+P815m+q8ebA/mvLPddOA+ndQThQXRcRL+tZhyDpfq/O37cR2\naoZExOuBvwU+npn/sZ2qU4nxjtaJAeuoXcdSTui+BGS9J/zciDhzyH3g7i+64SFgI3BYHePopyLi\nKMpV4H9vFNsuumtUsR/J8aULicDBlIA9OGT5Q3X+utFsjkYlIuYBJ1Pif2Nj0cF1vk2bqFcCVwG7\nUJ4tmcg6P6B8s/RLEfHSnd9yTZfaBj4PPEoZjHB7JhXjiNgdWAA8m5k/HPB67ltmpzdT9gkbKfcF\nf5mSKH4MuLs+yLdvo777iw7IzGeAcym3CH0nIq6MiI9GxDLKidhN/OwVW9tFd8147Ed5fOlCIjC/\nztcNWd4rt/eg8XMR8AbgK5nZvJIzlTYx0XXmD1mudpwP/CrwB5n5wg7qTjbG7lvmpv0pV9LOodx/\neyTlau8bKSd7R1EeEO1xf9ERmXk58F7KSdypwHn198eBz2XmmkZ120V3jSL2Izu+dCERUAdFxJnA\nn1Ke4rdbtg6KiMOBvwQuycx7294ezRq9494m4PjM/GZmrs/M+4H3UHofW1zbjzokIs4FrgOuBl4L\n/BywkHKF958i4sIWN0+aEV1IBHaUYffK145gWzQCEXEG8HHgv4G3ZWZ/bKfSJia6zrDsXSNUbwm6\nhtLbwof7Fw9ZbbIxdt8yN/XisbL50B5AZj7P1vtxD6tz9xcdEBGLgQuBGzLznMx8NDM3ZOZ/Au8G\nVgN/FhGvqqvYLrprFLEf2fGlC4nAA5QD/7D7qH65zoc9Q6A5JCLOBi4H/ouSBDw5oNoDdb5Nm6gn\nkK+mPFz83Qmu80rKlaPvZeaGqW+9ptEelP/t1wMvNAYR28LWxOAzteyy+vukYpyZ6yknB3tExCsG\nbIP7ltmpF+dhB9Bn6rzXk5z7i254J+XZkdv7F9QE8V7KOdObarHtortmPPajPL50IRG4rc6P618Q\nEXtQ7g9dD9wzyo3S9IuI84DLgG8DR/fdz9l0KyU5Dt4wAQAAAt1JREFU3Gb0UEr/wbsDd2Xmpgmu\n85t1fstUtlsz4gXKoHJX1Xlz+natc2f9/Zv196nE+NY6t13MHbdQTvgOGbL80DpfVefuL7phtzrf\nb8jyXvnGOrdddNeoYj+a48t0DsgwWydKjzGbgTP6yi+jPCz2yba30WmnY/zXNZYrgL12ULc5GMjC\nRvluwN21rZzQt86r2DoYyEGN8r2Bh3EgmDkzUR4gHjSg2KRjzNYBXx5strv6Wk/hgGKzcgJuqPE8\nu6/8uFq+Btizlrm/6MBE6Rt+C2VAsV/sW/aOGrPngL1tF+M9MbkBxWYs9qM6vkR90bFWBxW7i9Jb\nxL8C/0MZN+CtwP8CR2bpOkxzUEScDHyW8lXcJxh8f+Wjmfm5xjpLgeWUK8dfBJ4G3kX52m55Zp40\n4H3OAP6u1r2WcmXofZQuvi7JzPOm8WNphkTE+ZRk4NTMvLpv2aRjHBGXAH9C+Rr3OsqgMScCL6dc\nfPj0zH0aTUVELKAcEw6gXHVbSenqbynlwHtiZt7QqO/+YsxFRFAuGv468CxwPfADyjdHS2q1szLz\nE411bBdjosbyt+qvrwTeTrm1585atiYzz+mrP+OxH8nxpe3Ma4QZ3gLKbQKrgQ2Ur30vBea3vW1O\nOx3b3hXe7U23DljvCODfKJn1c8B9wJlQEuQh77WEcrvZOspIkyuA97f9N3CaUns5ZcjySceY0jPV\nilp/HeXk8h1tf1an7cZsH8pBeVU9JjxZD7S/NqS++4sxn4B5NaZ3U54h2UhJBv4FOMZ2Mb7TBM4j\nHmkr9jN9fOnENwKSJEmSflYXHhaWJEmS1MdEQJIkSeogEwFJkiSpg0wEJEmSpA4yEZAkSZI6yERA\nkiRJ6iATAUmSJKmDTAQkSZKkDjIRkCRJkjrIRECSJEnqIBMBSZIkqYNMBCRJkqQOMhGQJEmSOshE\nQJIkSeogEwFJkiSpg0wEJEmSpA4yEZAkSZI66P8BWfT9sy7HTJEAAAAASUVORK5CYII=\n",
+ "text/plain": [
+ ""
+ ]
+ },
+ "metadata": {
+ "image/png": {
+ "height": 255,
+ "width": 385
+ }
+ },
+ "output_type": "display_data"
+ }
+ ],
+ "source": [
+ "plt.subplot()\n",
+ "plot_predicted, = plt.plot(test_results, label='predicted')\n",
+ "\n",
+ "plot_test, = plt.plot(y['test'] , label='test')\n",
+ "plt.legend(handles=[plot_predicted, plot_test])"
+ ]
+ }
+ ],
+ "metadata": {
+ "kernelspec": {
+ "display_name": "Python 2",
+ "language": "python",
+ "name": "python2"
+ },
+ "language_info": {
+ "codemirror_mode": {
+ "name": "ipython",
+ "version": 2
+ },
+ "file_extension": ".py",
+ "mimetype": "text/x-python",
+ "name": "python",
+ "nbconvert_exporter": "python",
+ "pygments_lexer": "ipython2",
+ "version": "2.7.11+"
+ }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 0
+}
diff --git a/Building-Machine-Learning-Projects-with-TensorFlow-master/7/Code/CH7_time_series.py b/Building-Machine-Learning-Projects-with-TensorFlow-master/7/Code/CH7_time_series.py
new file mode 100644
index 000000000..9b0ac95eb
--- /dev/null
+++ b/Building-Machine-Learning-Projects-with-TensorFlow-master/7/Code/CH7_time_series.py
@@ -0,0 +1,110 @@
+import numpy as np
+import pandas as pd
+import tensorflow as tf
+from matplotlib import pyplot as plt
+
+
+from tensorflow.python.framework import dtypes
+from tensorflow.contrib import learn
+
+import logging
+logging.basicConfig(level=logging.INFO)
+
+
+from tensorflow.contrib import learn
+from sklearn.metrics import mean_squared_error
+
+LOG_DIR = './ops_logs'
+TIMESTEPS = 5
+RNN_LAYERS = [{'steps': TIMESTEPS}]
+DENSE_LAYERS = None
+TRAINING_STEPS = 10000
+BATCH_SIZE = 100
+PRINT_STEPS = TRAINING_STEPS / 100
+
+def lstm_model(time_steps, rnn_layers, dense_layers=None):
+ def lstm_cells(layers):
+ return [tf.nn.rnn_cell.BasicLSTMCell(layer['steps'],state_is_tuple=True)
+ for layer in layers]
+
+ def dnn_layers(input_layers, layers):
+ return input_layers
+
+ def _lstm_model(X, y):
+ stacked_lstm = tf.nn.rnn_cell.MultiRNNCell(lstm_cells(rnn_layers), state_is_tuple=True)
+ x_ = learn.ops.split_squeeze(1, time_steps, X)
+ output, layers = tf.nn.rnn(stacked_lstm, x_, dtype=dtypes.float32)
+ output = dnn_layers(output[-1], dense_layers)
+ return learn.models.linear_regression(output, y)
+
+ return _lstm_model
+
+
+regressor = learn.TensorFlowEstimator(model_fn=lstm_model(TIMESTEPS, RNN_LAYERS, DENSE_LAYERS), n_classes=0,
+ verbose=2, steps=TRAINING_STEPS, optimizer='Adagrad',
+ learning_rate=0.03, batch_size=BATCH_SIZE)
+
+df = pd.read_csv("data/elec_load.csv", error_bad_lines=False)
+plt.subplot()
+plot_test, = plt.plot(df.values[:1500], label='Load')
+plt.legend(handles=[plot_test])
+
+
+print df.describe()
+array=(df.values- 147.0) /339.0
+plt.subplot()
+plot_test, = plt.plot(array[:1500], label='Normalized Load')
+plt.legend(handles=[plot_test])
+
+
+listX = []
+listy = []
+X={}
+y={}
+
+for i in range(0,len(array)-6):
+ listX.append(array[i:i+5].reshape([5,1]))
+ listy.append(array[i+6])
+
+arrayX=np.array(listX)
+arrayy=np.array(listy)
+
+
+X['train']=arrayX[0:12000]
+X['test']=arrayX[12000:13000]
+X['val']=arrayX[13000:14000]
+
+y['train']=arrayy[0:12000]
+y['test']=arrayy[12000:13000]
+y['val']=arrayy[13000:14000]
+
+
+# print y['test'][0]
+# print y2['test'][0]
+
+
+#X1, y2 = generate_data(np.sin, np.linspace(0, 100, 10000), TIMESTEPS, seperate=False)
+# create a lstm instance and validation monitor
+validation_monitor = learn.monitors.ValidationMonitor(X['val'], y['val'],
+ every_n_steps=PRINT_STEPS,
+ early_stopping_rounds=1000)
+
+regressor.fit(X['train'], y['train'], monitors=[validation_monitor], logdir=LOG_DIR)
+
+predicted = regressor.predict(X['test'])
+rmse = np.sqrt(((predicted - y['test']) ** 2).mean(axis=0))
+score = mean_squared_error(predicted, y['test'])
+print ("MSE: %f" % score)
+
+#plot_predicted, = plt.plot(array[:1000], label='predicted')
+
+plt.subplot()
+plot_predicted, = plt.plot(predicted, label='predicted')
+
+plot_test, = plt.plot(y['test'], label='test')
+plt.legend(handles=[plot_predicted, plot_test])
+
+
+
+
+
diff --git a/Building-Machine-Learning-Projects-with-TensorFlow-master/7/Code/model.py b/Building-Machine-Learning-Projects-with-TensorFlow-master/7/Code/model.py
new file mode 100755
index 000000000..c30455259
--- /dev/null
+++ b/Building-Machine-Learning-Projects-with-TensorFlow-master/7/Code/model.py
@@ -0,0 +1,125 @@
+import tensorflow as tf
+from tensorflow.contrib import rnn
+from tensorflow.contrib import legacy_seq2seq
+
+import numpy as np
+
+
+class Model():
+ def __init__(self, args, training=True):
+ self.args = args
+ if not training:
+ args.batch_size = 1
+ args.seq_length = 1
+
+ if args.model == 'rnn':
+ cell_fn = rnn.BasicRNNCell
+ elif args.model == 'gru':
+ cell_fn = rnn.GRUCell
+ elif args.model == 'lstm':
+ cell_fn = rnn.BasicLSTMCell
+ elif args.model == 'nas':
+ cell_fn = rnn.NASCell
+ else:
+ raise Exception("model type not supported: {}".format(args.model))
+
+ cells = []
+ for _ in range(args.num_layers):
+ cell = cell_fn(args.rnn_size)
+ if training and (args.output_keep_prob < 1.0 or args.input_keep_prob < 1.0):
+ cell = rnn.DropoutWrapper(cell,
+ input_keep_prob=args.input_keep_prob,
+ output_keep_prob=args.output_keep_prob)
+ cells.append(cell)
+
+ self.cell = cell = rnn.MultiRNNCell(cells, state_is_tuple=True)
+
+ self.input_data = tf.placeholder(
+ tf.int32, [args.batch_size, args.seq_length])
+ self.targets = tf.placeholder(
+ tf.int32, [args.batch_size, args.seq_length])
+ self.initial_state = cell.zero_state(args.batch_size, tf.float32)
+
+ with tf.variable_scope('rnnlm'):
+ softmax_w = tf.get_variable("softmax_w",
+ [args.rnn_size, args.vocab_size])
+ softmax_b = tf.get_variable("softmax_b", [args.vocab_size])
+
+ embedding = tf.get_variable("embedding", [args.vocab_size, args.rnn_size])
+ inputs = tf.nn.embedding_lookup(embedding, self.input_data)
+
+ # dropout beta testing: double check which one should affect next line
+ if training and args.output_keep_prob:
+ inputs = tf.nn.dropout(inputs, args.output_keep_prob)
+
+ inputs = tf.split(inputs, args.seq_length, 1)
+ inputs = [tf.squeeze(input_, [1]) for input_ in inputs]
+
+ def loop(prev, _):
+ prev = tf.matmul(prev, softmax_w) + softmax_b
+ prev_symbol = tf.stop_gradient(tf.argmax(prev, 1))
+ return tf.nn.embedding_lookup(embedding, prev_symbol)
+
+ outputs, last_state = legacy_seq2seq.rnn_decoder(inputs, self.initial_state, cell, loop_function=loop if not training else None, scope='rnnlm')
+ output = tf.reshape(tf.concat(outputs, 1), [-1, args.rnn_size])
+
+
+ self.logits = tf.matmul(output, softmax_w) + softmax_b
+ self.probs = tf.nn.softmax(self.logits)
+ loss = legacy_seq2seq.sequence_loss_by_example(
+ [self.logits],
+ [tf.reshape(self.targets, [-1])],
+ [tf.ones([args.batch_size * args.seq_length])])
+ self.cost = tf.reduce_sum(loss) / args.batch_size / args.seq_length
+ with tf.name_scope('cost'):
+ self.cost = tf.reduce_sum(loss) / args.batch_size / args.seq_length
+ self.final_state = last_state
+ self.lr = tf.Variable(0.0, trainable=False)
+ tvars = tf.trainable_variables()
+ grads, _ = tf.clip_by_global_norm(tf.gradients(self.cost, tvars),
+ args.grad_clip)
+ with tf.name_scope('optimizer'):
+ optimizer = tf.train.AdamOptimizer(self.lr)
+ self.train_op = optimizer.apply_gradients(zip(grads, tvars))
+
+ # instrument tensorboard
+ tf.summary.histogram('logits', self.logits)
+ tf.summary.histogram('loss', loss)
+ tf.summary.scalar('train_loss', self.cost)
+
+ def sample(self, sess, chars, vocab, num=200, prime='The ', sampling_type=1):
+ state = sess.run(self.cell.zero_state(1, tf.float32))
+ for char in prime[:-1]:
+ x = np.zeros((1, 1))
+ x[0, 0] = vocab[char]
+ feed = {self.input_data: x, self.initial_state: state}
+ [state] = sess.run([self.final_state], feed)
+
+ def weighted_pick(weights):
+ t = np.cumsum(weights)
+ s = np.sum(weights)
+ return(int(np.searchsorted(t, np.random.rand(1)*s)))
+
+ ret = prime
+ char = prime[-1]
+ for n in range(num):
+ x = np.zeros((1, 1))
+ x[0, 0] = vocab[char]
+ feed = {self.input_data: x, self.initial_state: state}
+ [probs, state] = sess.run([self.probs, self.final_state], feed)
+ p = probs[0]
+
+ if sampling_type == 0:
+ sample = np.argmax(p)
+ elif sampling_type == 2:
+ if char == ' ':
+ sample = weighted_pick(p)
+ else:
+ sample = np.argmax(p)
+ else: # sampling_type == 1 default:
+ sample = weighted_pick(p)
+
+ pred = chars[sample]
+ ret += pred
+ char = pred
+ return ret
diff --git a/Building-Machine-Learning-Projects-with-TensorFlow-master/7/Code/sample.py b/Building-Machine-Learning-Projects-with-TensorFlow-master/7/Code/sample.py
new file mode 100755
index 000000000..0de3a8192
--- /dev/null
+++ b/Building-Machine-Learning-Projects-with-TensorFlow-master/7/Code/sample.py
@@ -0,0 +1,46 @@
+from __future__ import print_function
+import tensorflow as tf
+
+import argparse
+import os
+from six.moves import cPickle
+
+from model import Model
+
+from six import text_type
+
+
+def main():
+ parser = argparse.ArgumentParser(
+ formatter_class=argparse.ArgumentDefaultsHelpFormatter)
+ parser.add_argument('--save_dir', type=str, default='save',
+ help='model directory to store checkpointed models')
+ parser.add_argument('-n', type=int, default=500,
+ help='number of characters to sample')
+ parser.add_argument('--prime', type=text_type, default=u' ',
+ help='prime text')
+ parser.add_argument('--sample', type=int, default=1,
+ help='0 to use max at each timestep, 1 to sample at '
+ 'each timestep, 2 to sample on spaces')
+
+ args = parser.parse_args()
+ sample(args)
+
+
+def sample(args):
+ with open(os.path.join(args.save_dir, 'config.pkl'), 'rb') as f:
+ saved_args = cPickle.load(f)
+ with open(os.path.join(args.save_dir, 'chars_vocab.pkl'), 'rb') as f:
+ chars, vocab = cPickle.load(f)
+ model = Model(saved_args, training=False)
+ with tf.Session() as sess:
+ tf.global_variables_initializer().run()
+ saver = tf.train.Saver(tf.global_variables())
+ ckpt = tf.train.get_checkpoint_state(args.save_dir)
+ if ckpt and ckpt.model_checkpoint_path:
+ saver.restore(sess, ckpt.model_checkpoint_path)
+ print(model.sample(sess, chars, vocab, args.n, args.prime,
+ args.sample).encode('utf-8'))
+
+if __name__ == '__main__':
+ main()
diff --git a/Building-Machine-Learning-Projects-with-TensorFlow-master/7/Code/save/.gitignore b/Building-Machine-Learning-Projects-with-TensorFlow-master/7/Code/save/.gitignore
new file mode 100755
index 000000000..5e7d2734c
--- /dev/null
+++ b/Building-Machine-Learning-Projects-with-TensorFlow-master/7/Code/save/.gitignore
@@ -0,0 +1,4 @@
+# Ignore everything in this directory
+*
+# Except this file
+!.gitignore
diff --git a/Building-Machine-Learning-Projects-with-TensorFlow-master/7/Code/train.py b/Building-Machine-Learning-Projects-with-TensorFlow-master/7/Code/train.py
new file mode 100755
index 000000000..bdab9a50a
--- /dev/null
+++ b/Building-Machine-Learning-Projects-with-TensorFlow-master/7/Code/train.py
@@ -0,0 +1,140 @@
+from __future__ import print_function
+import tensorflow as tf
+
+import argparse
+import time
+import os
+from six.moves import cPickle
+
+from utils import TextLoader
+from model import Model
+
+
+def main():
+ parser = argparse.ArgumentParser(
+ formatter_class=argparse.ArgumentDefaultsHelpFormatter)
+ parser.add_argument('--data_dir', type=str, default='data/tinyshakespeare',
+ help='data directory containing input.txt')
+ parser.add_argument('--save_dir', type=str, default='save',
+ help='directory to store checkpointed models')
+ parser.add_argument('--log_dir', type=str, default='logs',
+ help='directory to store tensorboard logs')
+ parser.add_argument('--rnn_size', type=int, default=128,
+ help='size of RNN hidden state')
+ parser.add_argument('--num_layers', type=int, default=2,
+ help='number of layers in the RNN')
+ parser.add_argument('--model', type=str, default='lstm',
+ help='rnn, gru, lstm, or nas')
+ parser.add_argument('--batch_size', type=int, default=50,
+ help='minibatch size')
+ parser.add_argument('--seq_length', type=int, default=50,
+ help='RNN sequence length')
+ parser.add_argument('--num_epochs', type=int, default=50,
+ help='number of epochs')
+ parser.add_argument('--save_every', type=int, default=1000,
+ help='save frequency')
+ parser.add_argument('--grad_clip', type=float, default=5.,
+ help='clip gradients at this value')
+ parser.add_argument('--learning_rate', type=float, default=0.002,
+ help='learning rate')
+ parser.add_argument('--decay_rate', type=float, default=0.97,
+ help='decay rate for rmsprop')
+ parser.add_argument('--output_keep_prob', type=float, default=1.0,
+ help='probability of keeping weights in the hidden layer')
+ parser.add_argument('--input_keep_prob', type=float, default=1.0,
+ help='probability of keeping weights in the input layer')
+ parser.add_argument('--init_from', type=str, default=None,
+ help="""continue training from saved model at this path. Path must contain files saved by previous training process:
+ 'config.pkl' : configuration;
+ 'chars_vocab.pkl' : vocabulary definitions;
+ 'checkpoint' : paths to model file(s) (created by tf).
+ Note: this file contains absolute paths, be careful when moving files around;
+ 'model.ckpt-*' : file(s) with model definition (created by tf)
+ """)
+ args = parser.parse_args()
+ train(args)
+
+
+def train(args):
+ data_loader = TextLoader(args.data_dir, args.batch_size, args.seq_length)
+ args.vocab_size = data_loader.vocab_size
+
+ # check compatibility if training is continued from previously saved model
+ if args.init_from is not None:
+ # check if all necessary files exist
+ assert os.path.isdir(args.init_from)," %s must be a a path" % args.init_from
+ assert os.path.isfile(os.path.join(args.init_from,"config.pkl")),"config.pkl file does not exist in path %s"%args.init_from
+ assert os.path.isfile(os.path.join(args.init_from,"chars_vocab.pkl")),"chars_vocab.pkl.pkl file does not exist in path %s" % args.init_from
+ ckpt = tf.train.get_checkpoint_state(args.init_from)
+ assert ckpt, "No checkpoint found"
+ assert ckpt.model_checkpoint_path, "No model path found in checkpoint"
+
+ # open old config and check if models are compatible
+ with open(os.path.join(args.init_from, 'config.pkl'), 'rb') as f:
+ saved_model_args = cPickle.load(f)
+ need_be_same = ["model", "rnn_size", "num_layers", "seq_length"]
+ for checkme in need_be_same:
+ assert vars(saved_model_args)[checkme]==vars(args)[checkme],"Command line argument and saved model disagree on '%s' "%checkme
+
+ # open saved vocab/dict and check if vocabs/dicts are compatible
+ with open(os.path.join(args.init_from, 'chars_vocab.pkl'), 'rb') as f:
+ saved_chars, saved_vocab = cPickle.load(f)
+ assert saved_chars==data_loader.chars, "Data and loaded model disagree on character set!"
+ assert saved_vocab==data_loader.vocab, "Data and loaded model disagree on dictionary mappings!"
+
+ if not os.path.isdir(args.save_dir):
+ os.makedirs(args.save_dir)
+ with open(os.path.join(args.save_dir, 'config.pkl'), 'wb') as f:
+ cPickle.dump(args, f)
+ with open(os.path.join(args.save_dir, 'chars_vocab.pkl'), 'wb') as f:
+ cPickle.dump((data_loader.chars, data_loader.vocab), f)
+
+ model = Model(args)
+
+ with tf.Session() as sess:
+ # instrument for tensorboard
+ summaries = tf.summary.merge_all()
+ writer = tf.summary.FileWriter(
+ os.path.join(args.log_dir, time.strftime("%Y-%m-%d-%H-%M-%S")))
+ writer.add_graph(sess.graph)
+
+ sess.run(tf.global_variables_initializer())
+ saver = tf.train.Saver(tf.global_variables())
+ # restore model
+ if args.init_from is not None:
+ saver.restore(sess, ckpt.model_checkpoint_path)
+ for e in range(args.num_epochs):
+ sess.run(tf.assign(model.lr,
+ args.learning_rate * (args.decay_rate ** e)))
+ data_loader.reset_batch_pointer()
+ state = sess.run(model.initial_state)
+ for b in range(data_loader.num_batches):
+ start = time.time()
+ x, y = data_loader.next_batch()
+ feed = {model.input_data: x, model.targets: y}
+ for i, (c, h) in enumerate(model.initial_state):
+ feed[c] = state[i].c
+ feed[h] = state[i].h
+ train_loss, state, _ = sess.run([model.cost, model.final_state, model.train_op], feed)
+
+ # instrument for tensorboard
+ summ, train_loss, state, _ = sess.run([summaries, model.cost, model.final_state, model.train_op], feed)
+ writer.add_summary(summ, e * data_loader.num_batches + b)
+
+ end = time.time()
+ print("{}/{} (epoch {}), train_loss = {:.3f}, time/batch = {:.3f}"
+ .format(e * data_loader.num_batches + b,
+ args.num_epochs * data_loader.num_batches,
+ e, train_loss, end - start))
+ if (e * data_loader.num_batches + b) % args.save_every == 0\
+ or (e == args.num_epochs-1 and
+ b == data_loader.num_batches-1):
+ # save for the last result
+ checkpoint_path = os.path.join(args.save_dir, 'model.ckpt')
+ saver.save(sess, checkpoint_path,
+ global_step=e * data_loader.num_batches + b)
+ print("model saved to {}".format(checkpoint_path))
+
+
+if __name__ == '__main__':
+ main()
diff --git a/Building-Machine-Learning-Projects-with-TensorFlow-master/7/Code/utils.py b/Building-Machine-Learning-Projects-with-TensorFlow-master/7/Code/utils.py
new file mode 100755
index 000000000..39e64ca5a
--- /dev/null
+++ b/Building-Machine-Learning-Projects-with-TensorFlow-master/7/Code/utils.py
@@ -0,0 +1,75 @@
+import codecs
+import os
+import collections
+from six.moves import cPickle
+import numpy as np
+
+
+class TextLoader():
+ def __init__(self, data_dir, batch_size, seq_length, encoding='utf-8'):
+ self.data_dir = data_dir
+ self.batch_size = batch_size
+ self.seq_length = seq_length
+ self.encoding = encoding
+
+ input_file = os.path.join(data_dir, "input.txt")
+ vocab_file = os.path.join(data_dir, "vocab.pkl")
+ tensor_file = os.path.join(data_dir, "data.npy")
+
+ if not (os.path.exists(vocab_file) and os.path.exists(tensor_file)):
+ print("reading text file")
+ self.preprocess(input_file, vocab_file, tensor_file)
+ else:
+ print("loading preprocessed files")
+ self.load_preprocessed(vocab_file, tensor_file)
+ self.create_batches()
+ self.reset_batch_pointer()
+
+ def preprocess(self, input_file, vocab_file, tensor_file):
+ with codecs.open(input_file, "r", encoding=self.encoding) as f:
+ data = f.read()
+ counter = collections.Counter(data)
+ count_pairs = sorted(counter.items(), key=lambda x: -x[1])
+ self.chars, _ = zip(*count_pairs)
+ self.vocab_size = len(self.chars)
+ self.vocab = dict(zip(self.chars, range(len(self.chars))))
+ with open(vocab_file, 'wb') as f:
+ cPickle.dump(self.chars, f)
+ self.tensor = np.array(list(map(self.vocab.get, data)))
+ np.save(tensor_file, self.tensor)
+
+ def load_preprocessed(self, vocab_file, tensor_file):
+ with open(vocab_file, 'rb') as f:
+ self.chars = cPickle.load(f)
+ self.vocab_size = len(self.chars)
+ self.vocab = dict(zip(self.chars, range(len(self.chars))))
+ self.tensor = np.load(tensor_file)
+ self.num_batches = int(self.tensor.size / (self.batch_size *
+ self.seq_length))
+
+ def create_batches(self):
+ self.num_batches = int(self.tensor.size / (self.batch_size *
+ self.seq_length))
+
+ # When the data (tensor) is too small,
+ # let's give them a better error message
+ if self.num_batches == 0:
+ assert False, "Not enough data. Make seq_length and batch_size small."
+
+ self.tensor = self.tensor[:self.num_batches * self.batch_size * self.seq_length]
+ xdata = self.tensor
+ ydata = np.copy(self.tensor)
+ ydata[:-1] = xdata[1:]
+ ydata[-1] = xdata[0]
+ self.x_batches = np.split(xdata.reshape(self.batch_size, -1),
+ self.num_batches, 1)
+ self.y_batches = np.split(ydata.reshape(self.batch_size, -1),
+ self.num_batches, 1)
+
+ def next_batch(self):
+ x, y = self.x_batches[self.pointer], self.y_batches[self.pointer]
+ self.pointer += 1
+ return x, y
+
+ def reset_batch_pointer(self):
+ self.pointer = 0
diff --git a/Building-Machine-Learning-Projects-with-TensorFlow-master/8/LICENSE.txt b/Building-Machine-Learning-Projects-with-TensorFlow-master/8/LICENSE.txt
new file mode 100644
index 000000000..94a9ed024
--- /dev/null
+++ b/Building-Machine-Learning-Projects-with-TensorFlow-master/8/LICENSE.txt
@@ -0,0 +1,674 @@
+ GNU GENERAL PUBLIC LICENSE
+ Version 3, 29 June 2007
+
+ Copyright (C) 2007 Free Software Foundation, Inc.
+ Everyone is permitted to copy and distribute verbatim copies
+ of this license document, but changing it is not allowed.
+
+ Preamble
+
+ The GNU General Public License is a free, copyleft license for
+software and other kinds of works.
+
+ The licenses for most software and other practical works are designed
+to take away your freedom to share and change the works. By contrast,
+the GNU General Public License is intended to guarantee your freedom to
+share and change all versions of a program--to make sure it remains free
+software for all its users. We, the Free Software Foundation, use the
+GNU General Public License for most of our software; it applies also to
+any other work released this way by its authors. You can apply it to
+your programs, too.
+
+ When we speak of free software, we are referring to freedom, not
+price. Our General Public Licenses are designed to make sure that you
+have the freedom to distribute copies of free software (and charge for
+them if you wish), that you receive source code or can get it if you
+want it, that you can change the software or use pieces of it in new
+free programs, and that you know you can do these things.
+
+ To protect your rights, we need to prevent others from denying you
+these rights or asking you to surrender the rights. Therefore, you have
+certain responsibilities if you distribute copies of the software, or if
+you modify it: responsibilities to respect the freedom of others.
+
+ For example, if you distribute copies of such a program, whether
+gratis or for a fee, you must pass on to the recipients the same
+freedoms that you received. You must make sure that they, too, receive
+or can get the source code. And you must show them these terms so they
+know their rights.
+
+ Developers that use the GNU GPL protect your rights with two steps:
+(1) assert copyright on the software, and (2) offer you this License
+giving you legal permission to copy, distribute and/or modify it.
+
+ For the developers' and authors' protection, the GPL clearly explains
+that there is no warranty for this free software. For both users' and
+authors' sake, the GPL requires that modified versions be marked as
+changed, so that their problems will not be attributed erroneously to
+authors of previous versions.
+
+ Some devices are designed to deny users access to install or run
+modified versions of the software inside them, although the manufacturer
+can do so. This is fundamentally incompatible with the aim of
+protecting users' freedom to change the software. The systematic
+pattern of such abuse occurs in the area of products for individuals to
+use, which is precisely where it is most unacceptable. Therefore, we
+have designed this version of the GPL to prohibit the practice for those
+products. If such problems arise substantially in other domains, we
+stand ready to extend this provision to those domains in future versions
+of the GPL, as needed to protect the freedom of users.
+
+ Finally, every program is threatened constantly by software patents.
+States should not allow patents to restrict development and use of
+software on general-purpose computers, but in those that do, we wish to
+avoid the special danger that patents applied to a free program could
+make it effectively proprietary. To prevent this, the GPL assures that
+patents cannot be used to render the program non-free.
+
+ The precise terms and conditions for copying, distribution and
+modification follow.
+
+ TERMS AND CONDITIONS
+
+ 0. Definitions.
+
+ "This License" refers to version 3 of the GNU General Public License.
+
+ "Copyright" also means copyright-like laws that apply to other kinds of
+works, such as semiconductor masks.
+
+ "The Program" refers to any copyrightable work licensed under this
+License. Each licensee is addressed as "you". "Licensees" and
+"recipients" may be individuals or organizations.
+
+ To "modify" a work means to copy from or adapt all or part of the work
+in a fashion requiring copyright permission, other than the making of an
+exact copy. The resulting work is called a "modified version" of the
+earlier work or a work "based on" the earlier work.
+
+ A "covered work" means either the unmodified Program or a work based
+on the Program.
+
+ To "propagate" a work means to do anything with it that, without
+permission, would make you directly or secondarily liable for
+infringement under applicable copyright law, except executing it on a
+computer or modifying a private copy. Propagation includes copying,
+distribution (with or without modification), making available to the
+public, and in some countries other activities as well.
+
+ To "convey" a work means any kind of propagation that enables other
+parties to make or receive copies. Mere interaction with a user through
+a computer network, with no transfer of a copy, is not conveying.
+
+ An interactive user interface displays "Appropriate Legal Notices"
+to the extent that it includes a convenient and prominently visible
+feature that (1) displays an appropriate copyright notice, and (2)
+tells the user that there is no warranty for the work (except to the
+extent that warranties are provided), that licensees may convey the
+work under this License, and how to view a copy of this License. If
+the interface presents a list of user commands or options, such as a
+menu, a prominent item in the list meets this criterion.
+
+ 1. Source Code.
+
+ The "source code" for a work means the preferred form of the work
+for making modifications to it. "Object code" means any non-source
+form of a work.
+
+ A "Standard Interface" means an interface that either is an official
+standard defined by a recognized standards body, or, in the case of
+interfaces specified for a particular programming language, one that
+is widely used among developers working in that language.
+
+ The "System Libraries" of an executable work include anything, other
+than the work as a whole, that (a) is included in the normal form of
+packaging a Major Component, but which is not part of that Major
+Component, and (b) serves only to enable use of the work with that
+Major Component, or to implement a Standard Interface for which an
+implementation is available to the public in source code form. A
+"Major Component", in this context, means a major essential component
+(kernel, window system, and so on) of the specific operating system
+(if any) on which the executable work runs, or a compiler used to
+produce the work, or an object code interpreter used to run it.
+
+ The "Corresponding Source" for a work in object code form means all
+the source code needed to generate, install, and (for an executable
+work) run the object code and to modify the work, including scripts to
+control those activities. However, it does not include the work's
+System Libraries, or general-purpose tools or generally available free
+programs which are used unmodified in performing those activities but
+which are not part of the work. For example, Corresponding Source
+includes interface definition files associated with source files for
+the work, and the source code for shared libraries and dynamically
+linked subprograms that the work is specifically designed to require,
+such as by intimate data communication or control flow between those
+subprograms and other parts of the work.
+
+ The Corresponding Source need not include anything that users
+can regenerate automatically from other parts of the Corresponding
+Source.
+
+ The Corresponding Source for a work in source code form is that
+same work.
+
+ 2. Basic Permissions.
+
+ All rights granted under this License are granted for the term of
+copyright on the Program, and are irrevocable provided the stated
+conditions are met. This License explicitly affirms your unlimited
+permission to run the unmodified Program. The output from running a
+covered work is covered by this License only if the output, given its
+content, constitutes a covered work. This License acknowledges your
+rights of fair use or other equivalent, as provided by copyright law.
+
+ You may make, run and propagate covered works that you do not
+convey, without conditions so long as your license otherwise remains
+in force. You may convey covered works to others for the sole purpose
+of having them make modifications exclusively for you, or provide you
+with facilities for running those works, provided that you comply with
+the terms of this License in conveying all material for which you do
+not control copyright. Those thus making or running the covered works
+for you must do so exclusively on your behalf, under your direction
+and control, on terms that prohibit them from making any copies of
+your copyrighted material outside their relationship with you.
+
+ Conveying under any other circumstances is permitted solely under
+the conditions stated below. Sublicensing is not allowed; section 10
+makes it unnecessary.
+
+ 3. Protecting Users' Legal Rights From Anti-Circumvention Law.
+
+ No covered work shall be deemed part of an effective technological
+measure under any applicable law fulfilling obligations under article
+11 of the WIPO copyright treaty adopted on 20 December 1996, or
+similar laws prohibiting or restricting circumvention of such
+measures.
+
+ When you convey a covered work, you waive any legal power to forbid
+circumvention of technological measures to the extent such circumvention
+is effected by exercising rights under this License with respect to
+the covered work, and you disclaim any intention to limit operation or
+modification of the work as a means of enforcing, against the work's
+users, your or third parties' legal rights to forbid circumvention of
+technological measures.
+
+ 4. Conveying Verbatim Copies.
+
+ You may convey verbatim copies of the Program's source code as you
+receive it, in any medium, provided that you conspicuously and
+appropriately publish on each copy an appropriate copyright notice;
+keep intact all notices stating that this License and any
+non-permissive terms added in accord with section 7 apply to the code;
+keep intact all notices of the absence of any warranty; and give all
+recipients a copy of this License along with the Program.
+
+ You may charge any price or no price for each copy that you convey,
+and you may offer support or warranty protection for a fee.
+
+ 5. Conveying Modified Source Versions.
+
+ You may convey a work based on the Program, or the modifications to
+produce it from the Program, in the form of source code under the
+terms of section 4, provided that you also meet all of these conditions:
+
+ a) The work must carry prominent notices stating that you modified
+ it, and giving a relevant date.
+
+ b) The work must carry prominent notices stating that it is
+ released under this License and any conditions added under section
+ 7. This requirement modifies the requirement in section 4 to
+ "keep intact all notices".
+
+ c) You must license the entire work, as a whole, under this
+ License to anyone who comes into possession of a copy. This
+ License will therefore apply, along with any applicable section 7
+ additional terms, to the whole of the work, and all its parts,
+ regardless of how they are packaged. This License gives no
+ permission to license the work in any other way, but it does not
+ invalidate such permission if you have separately received it.
+
+ d) If the work has interactive user interfaces, each must display
+ Appropriate Legal Notices; however, if the Program has interactive
+ interfaces that do not display Appropriate Legal Notices, your
+ work need not make them do so.
+
+ A compilation of a covered work with other separate and independent
+works, which are not by their nature extensions of the covered work,
+and which are not combined with it such as to form a larger program,
+in or on a volume of a storage or distribution medium, is called an
+"aggregate" if the compilation and its resulting copyright are not
+used to limit the access or legal rights of the compilation's users
+beyond what the individual works permit. Inclusion of a covered work
+in an aggregate does not cause this License to apply to the other
+parts of the aggregate.
+
+ 6. Conveying Non-Source Forms.
+
+ You may convey a covered work in object code form under the terms
+of sections 4 and 5, provided that you also convey the
+machine-readable Corresponding Source under the terms of this License,
+in one of these ways:
+
+ a) Convey the object code in, or embodied in, a physical product
+ (including a physical distribution medium), accompanied by the
+ Corresponding Source fixed on a durable physical medium
+ customarily used for software interchange.
+
+ b) Convey the object code in, or embodied in, a physical product
+ (including a physical distribution medium), accompanied by a
+ written offer, valid for at least three years and valid for as
+ long as you offer spare parts or customer support for that product
+ model, to give anyone who possesses the object code either (1) a
+ copy of the Corresponding Source for all the software in the
+ product that is covered by this License, on a durable physical
+ medium customarily used for software interchange, for a price no
+ more than your reasonable cost of physically performing this
+ conveying of source, or (2) access to copy the
+ Corresponding Source from a network server at no charge.
+
+ c) Convey individual copies of the object code with a copy of the
+ written offer to provide the Corresponding Source. This
+ alternative is allowed only occasionally and noncommercially, and
+ only if you received the object code with such an offer, in accord
+ with subsection 6b.
+
+ d) Convey the object code by offering access from a designated
+ place (gratis or for a charge), and offer equivalent access to the
+ Corresponding Source in the same way through the same place at no
+ further charge. You need not require recipients to copy the
+ Corresponding Source along with the object code. If the place to
+ copy the object code is a network server, the Corresponding Source
+ may be on a different server (operated by you or a third party)
+ that supports equivalent copying facilities, provided you maintain
+ clear directions next to the object code saying where to find the
+ Corresponding Source. Regardless of what server hosts the
+ Corresponding Source, you remain obligated to ensure that it is
+ available for as long as needed to satisfy these requirements.
+
+ e) Convey the object code using peer-to-peer transmission, provided
+ you inform other peers where the object code and Corresponding
+ Source of the work are being offered to the general public at no
+ charge under subsection 6d.
+
+ A separable portion of the object code, whose source code is excluded
+from the Corresponding Source as a System Library, need not be
+included in conveying the object code work.
+
+ A "User Product" is either (1) a "consumer product", which means any
+tangible personal property which is normally used for personal, family,
+or household purposes, or (2) anything designed or sold for incorporation
+into a dwelling. In determining whether a product is a consumer product,
+doubtful cases shall be resolved in favor of coverage. For a particular
+product received by a particular user, "normally used" refers to a
+typical or common use of that class of product, regardless of the status
+of the particular user or of the way in which the particular user
+actually uses, or expects or is expected to use, the product. A product
+is a consumer product regardless of whether the product has substantial
+commercial, industrial or non-consumer uses, unless such uses represent
+the only significant mode of use of the product.
+
+ "Installation Information" for a User Product means any methods,
+procedures, authorization keys, or other information required to install
+and execute modified versions of a covered work in that User Product from
+a modified version of its Corresponding Source. The information must
+suffice to ensure that the continued functioning of the modified object
+code is in no case prevented or interfered with solely because
+modification has been made.
+
+ If you convey an object code work under this section in, or with, or
+specifically for use in, a User Product, and the conveying occurs as
+part of a transaction in which the right of possession and use of the
+User Product is transferred to the recipient in perpetuity or for a
+fixed term (regardless of how the transaction is characterized), the
+Corresponding Source conveyed under this section must be accompanied
+by the Installation Information. But this requirement does not apply
+if neither you nor any third party retains the ability to install
+modified object code on the User Product (for example, the work has
+been installed in ROM).
+
+ The requirement to provide Installation Information does not include a
+requirement to continue to provide support service, warranty, or updates
+for a work that has been modified or installed by the recipient, or for
+the User Product in which it has been modified or installed. Access to a
+network may be denied when the modification itself materially and
+adversely affects the operation of the network or violates the rules and
+protocols for communication across the network.
+
+ Corresponding Source conveyed, and Installation Information provided,
+in accord with this section must be in a format that is publicly
+documented (and with an implementation available to the public in
+source code form), and must require no special password or key for
+unpacking, reading or copying.
+
+ 7. Additional Terms.
+
+ "Additional permissions" are terms that supplement the terms of this
+License by making exceptions from one or more of its conditions.
+Additional permissions that are applicable to the entire Program shall
+be treated as though they were included in this License, to the extent
+that they are valid under applicable law. If additional permissions
+apply only to part of the Program, that part may be used separately
+under those permissions, but the entire Program remains governed by
+this License without regard to the additional permissions.
+
+ When you convey a copy of a covered work, you may at your option
+remove any additional permissions from that copy, or from any part of
+it. (Additional permissions may be written to require their own
+removal in certain cases when you modify the work.) You may place
+additional permissions on material, added by you to a covered work,
+for which you have or can give appropriate copyright permission.
+
+ Notwithstanding any other provision of this License, for material you
+add to a covered work, you may (if authorized by the copyright holders of
+that material) supplement the terms of this License with terms:
+
+ a) Disclaiming warranty or limiting liability differently from the
+ terms of sections 15 and 16 of this License; or
+
+ b) Requiring preservation of specified reasonable legal notices or
+ author attributions in that material or in the Appropriate Legal
+ Notices displayed by works containing it; or
+
+ c) Prohibiting misrepresentation of the origin of that material, or
+ requiring that modified versions of such material be marked in
+ reasonable ways as different from the original version; or
+
+ d) Limiting the use for publicity purposes of names of licensors or
+ authors of the material; or
+
+ e) Declining to grant rights under trademark law for use of some
+ trade names, trademarks, or service marks; or
+
+ f) Requiring indemnification of licensors and authors of that
+ material by anyone who conveys the material (or modified versions of
+ it) with contractual assumptions of liability to the recipient, for
+ any liability that these contractual assumptions directly impose on
+ those licensors and authors.
+
+ All other non-permissive additional terms are considered "further
+restrictions" within the meaning of section 10. If the Program as you
+received it, or any part of it, contains a notice stating that it is
+governed by this License along with a term that is a further
+restriction, you may remove that term. If a license document contains
+a further restriction but permits relicensing or conveying under this
+License, you may add to a covered work material governed by the terms
+of that license document, provided that the further restriction does
+not survive such relicensing or conveying.
+
+ If you add terms to a covered work in accord with this section, you
+must place, in the relevant source files, a statement of the
+additional terms that apply to those files, or a notice indicating
+where to find the applicable terms.
+
+ Additional terms, permissive or non-permissive, may be stated in the
+form of a separately written license, or stated as exceptions;
+the above requirements apply either way.
+
+ 8. Termination.
+
+ You may not propagate or modify a covered work except as expressly
+provided under this License. Any attempt otherwise to propagate or
+modify it is void, and will automatically terminate your rights under
+this License (including any patent licenses granted under the third
+paragraph of section 11).
+
+ However, if you cease all violation of this License, then your
+license from a particular copyright holder is reinstated (a)
+provisionally, unless and until the copyright holder explicitly and
+finally terminates your license, and (b) permanently, if the copyright
+holder fails to notify you of the violation by some reasonable means
+prior to 60 days after the cessation.
+
+ Moreover, your license from a particular copyright holder is
+reinstated permanently if the copyright holder notifies you of the
+violation by some reasonable means, this is the first time you have
+received notice of violation of this License (for any work) from that
+copyright holder, and you cure the violation prior to 30 days after
+your receipt of the notice.
+
+ Termination of your rights under this section does not terminate the
+licenses of parties who have received copies or rights from you under
+this License. If your rights have been terminated and not permanently
+reinstated, you do not qualify to receive new licenses for the same
+material under section 10.
+
+ 9. Acceptance Not Required for Having Copies.
+
+ You are not required to accept this License in order to receive or
+run a copy of the Program. Ancillary propagation of a covered work
+occurring solely as a consequence of using peer-to-peer transmission
+to receive a copy likewise does not require acceptance. However,
+nothing other than this License grants you permission to propagate or
+modify any covered work. These actions infringe copyright if you do
+not accept this License. Therefore, by modifying or propagating a
+covered work, you indicate your acceptance of this License to do so.
+
+ 10. Automatic Licensing of Downstream Recipients.
+
+ Each time you convey a covered work, the recipient automatically
+receives a license from the original licensors, to run, modify and
+propagate that work, subject to this License. You are not responsible
+for enforcing compliance by third parties with this License.
+
+ An "entity transaction" is a transaction transferring control of an
+organization, or substantially all assets of one, or subdividing an
+organization, or merging organizations. If propagation of a covered
+work results from an entity transaction, each party to that
+transaction who receives a copy of the work also receives whatever
+licenses to the work the party's predecessor in interest had or could
+give under the previous paragraph, plus a right to possession of the
+Corresponding Source of the work from the predecessor in interest, if
+the predecessor has it or can get it with reasonable efforts.
+
+ You may not impose any further restrictions on the exercise of the
+rights granted or affirmed under this License. For example, you may
+not impose a license fee, royalty, or other charge for exercise of
+rights granted under this License, and you may not initiate litigation
+(including a cross-claim or counterclaim in a lawsuit) alleging that
+any patent claim is infringed by making, using, selling, offering for
+sale, or importing the Program or any portion of it.
+
+ 11. Patents.
+
+ A "contributor" is a copyright holder who authorizes use under this
+License of the Program or a work on which the Program is based. The
+work thus licensed is called the contributor's "contributor version".
+
+ A contributor's "essential patent claims" are all patent claims
+owned or controlled by the contributor, whether already acquired or
+hereafter acquired, that would be infringed by some manner, permitted
+by this License, of making, using, or selling its contributor version,
+but do not include claims that would be infringed only as a
+consequence of further modification of the contributor version. For
+purposes of this definition, "control" includes the right to grant
+patent sublicenses in a manner consistent with the requirements of
+this License.
+
+ Each contributor grants you a non-exclusive, worldwide, royalty-free
+patent license under the contributor's essential patent claims, to
+make, use, sell, offer for sale, import and otherwise run, modify and
+propagate the contents of its contributor version.
+
+ In the following three paragraphs, a "patent license" is any express
+agreement or commitment, however denominated, not to enforce a patent
+(such as an express permission to practice a patent or covenant not to
+sue for patent infringement). To "grant" such a patent license to a
+party means to make such an agreement or commitment not to enforce a
+patent against the party.
+
+ If you convey a covered work, knowingly relying on a patent license,
+and the Corresponding Source of the work is not available for anyone
+to copy, free of charge and under the terms of this License, through a
+publicly available network server or other readily accessible means,
+then you must either (1) cause the Corresponding Source to be so
+available, or (2) arrange to deprive yourself of the benefit of the
+patent license for this particular work, or (3) arrange, in a manner
+consistent with the requirements of this License, to extend the patent
+license to downstream recipients. "Knowingly relying" means you have
+actual knowledge that, but for the patent license, your conveying the
+covered work in a country, or your recipient's use of the covered work
+in a country, would infringe one or more identifiable patents in that
+country that you have reason to believe are valid.
+
+ If, pursuant to or in connection with a single transaction or
+arrangement, you convey, or propagate by procuring conveyance of, a
+covered work, and grant a patent license to some of the parties
+receiving the covered work authorizing them to use, propagate, modify
+or convey a specific copy of the covered work, then the patent license
+you grant is automatically extended to all recipients of the covered
+work and works based on it.
+
+ A patent license is "discriminatory" if it does not include within
+the scope of its coverage, prohibits the exercise of, or is
+conditioned on the non-exercise of one or more of the rights that are
+specifically granted under this License. You may not convey a covered
+work if you are a party to an arrangement with a third party that is
+in the business of distributing software, under which you make payment
+to the third party based on the extent of your activity of conveying
+the work, and under which the third party grants, to any of the
+parties who would receive the covered work from you, a discriminatory
+patent license (a) in connection with copies of the covered work
+conveyed by you (or copies made from those copies), or (b) primarily
+for and in connection with specific products or compilations that
+contain the covered work, unless you entered into that arrangement,
+or that patent license was granted, prior to 28 March 2007.
+
+ Nothing in this License shall be construed as excluding or limiting
+any implied license or other defenses to infringement that may
+otherwise be available to you under applicable patent law.
+
+ 12. No Surrender of Others' Freedom.
+
+ If conditions are imposed on you (whether by court order, agreement or
+otherwise) that contradict the conditions of this License, they do not
+excuse you from the conditions of this License. If you cannot convey a
+covered work so as to satisfy simultaneously your obligations under this
+License and any other pertinent obligations, then as a consequence you may
+not convey it at all. For example, if you agree to terms that obligate you
+to collect a royalty for further conveying from those to whom you convey
+the Program, the only way you could satisfy both those terms and this
+License would be to refrain entirely from conveying the Program.
+
+ 13. Use with the GNU Affero General Public License.
+
+ Notwithstanding any other provision of this License, you have
+permission to link or combine any covered work with a work licensed
+under version 3 of the GNU Affero General Public License into a single
+combined work, and to convey the resulting work. The terms of this
+License will continue to apply to the part which is the covered work,
+but the special requirements of the GNU Affero General Public License,
+section 13, concerning interaction through a network will apply to the
+combination as such.
+
+ 14. Revised Versions of this License.
+
+ The Free Software Foundation may publish revised and/or new versions of
+the GNU General Public License from time to time. Such new versions will
+be similar in spirit to the present version, but may differ in detail to
+address new problems or concerns.
+
+ Each version is given a distinguishing version number. If the
+Program specifies that a certain numbered version of the GNU General
+Public License "or any later version" applies to it, you have the
+option of following the terms and conditions either of that numbered
+version or of any later version published by the Free Software
+Foundation. If the Program does not specify a version number of the
+GNU General Public License, you may choose any version ever published
+by the Free Software Foundation.
+
+ If the Program specifies that a proxy can decide which future
+versions of the GNU General Public License can be used, that proxy's
+public statement of acceptance of a version permanently authorizes you
+to choose that version for the Program.
+
+ Later license versions may give you additional or different
+permissions. However, no additional obligations are imposed on any
+author or copyright holder as a result of your choosing to follow a
+later version.
+
+ 15. Disclaimer of Warranty.
+
+ THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
+APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
+HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
+OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
+THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
+IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
+ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
+
+ 16. Limitation of Liability.
+
+ IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
+WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
+THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
+GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
+USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
+DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
+PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
+EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
+SUCH DAMAGES.
+
+ 17. Interpretation of Sections 15 and 16.
+
+ If the disclaimer of warranty and limitation of liability provided
+above cannot be given local legal effect according to their terms,
+reviewing courts shall apply local law that most closely approximates
+an absolute waiver of all civil liability in connection with the
+Program, unless a warranty or assumption of liability accompanies a
+copy of the Program in return for a fee.
+
+ END OF TERMS AND CONDITIONS
+
+ How to Apply These Terms to Your New Programs
+
+ If you develop a new program, and you want it to be of the greatest
+possible use to the public, the best way to achieve this is to make it
+free software which everyone can redistribute and change under these terms.
+
+ To do so, attach the following notices to the program. It is safest
+to attach them to the start of each source file to most effectively
+state the exclusion of warranty; and each file should have at least
+the "copyright" line and a pointer to where the full notice is found.
+
+
+ Copyright (C)
+
+ This program is free software: you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation, either version 3 of the License, or
+ (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program. If not, see .
+
+Also add information on how to contact you by electronic and paper mail.
+
+ If the program does terminal interaction, make it output a short
+notice like this when it starts in an interactive mode:
+
+ Copyright (C)
+ This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
+ This is free software, and you are welcome to redistribute it
+ under certain conditions; type `show c' for details.
+
+The hypothetical commands `show w' and `show c' should show the appropriate
+parts of the General Public License. Of course, your program's commands
+might be different; for a GUI interface, you would use an "about box".
+
+ You should also get your employer (if you work as a programmer) or school,
+if any, to sign a "copyright disclaimer" for the program, if necessary.
+For more information on this, and how to apply and follow the GNU GPL, see
+.
+
+ The GNU General Public License does not permit incorporating your program
+into proprietary programs. If your program is a subroutine library, you
+may consider it more useful to permit linking proprietary applications with
+the library. If this is what you want to do, use the GNU Lesser General
+Public License instead of this License. But first, please read
+.
diff --git a/Building-Machine-Learning-Projects-with-TensorFlow-master/8/neural_style.py b/Building-Machine-Learning-Projects-with-TensorFlow-master/8/neural_style.py
new file mode 100644
index 000000000..55132db12
--- /dev/null
+++ b/Building-Machine-Learning-Projects-with-TensorFlow-master/8/neural_style.py
@@ -0,0 +1,206 @@
+# Copyright (c) 2015-2017 Anish Athalye. Released under GPLv3.
+
+import os
+
+import numpy as np
+import scipy.misc
+
+from stylize import stylize
+
+import math
+from argparse import ArgumentParser
+
+from PIL import Image
+
+# default arguments
+CONTENT_WEIGHT = 5e0
+CONTENT_WEIGHT_BLEND = 1
+STYLE_WEIGHT = 5e2
+TV_WEIGHT = 1e2
+STYLE_LAYER_WEIGHT_EXP = 1
+LEARNING_RATE = 1e1
+BETA1 = 0.9
+BETA2 = 0.999
+EPSILON = 1e-08
+STYLE_SCALE = 1.0
+ITERATIONS = 1000
+VGG_PATH = 'imagenet-vgg-verydeep-19.mat'
+POOLING = 'max'
+
+def build_parser():
+ parser = ArgumentParser()
+ parser.add_argument('--content',
+ dest='content', help='content image',
+ metavar='CONTENT', required=True)
+ parser.add_argument('--styles',
+ dest='styles',
+ nargs='+', help='one or more style images',
+ metavar='STYLE', required=True)
+ parser.add_argument('--output',
+ dest='output', help='output path',
+ metavar='OUTPUT', required=True)
+ parser.add_argument('--iterations', type=int,
+ dest='iterations', help='iterations (default %(default)s)',
+ metavar='ITERATIONS', default=ITERATIONS)
+ parser.add_argument('--print-iterations', type=int,
+ dest='print_iterations', help='statistics printing frequency',
+ metavar='PRINT_ITERATIONS')
+ parser.add_argument('--checkpoint-output',
+ dest='checkpoint_output', help='checkpoint output format, e.g. output%%s.jpg',
+ metavar='OUTPUT')
+ parser.add_argument('--checkpoint-iterations', type=int,
+ dest='checkpoint_iterations', help='checkpoint frequency',
+ metavar='CHECKPOINT_ITERATIONS')
+ parser.add_argument('--width', type=int,
+ dest='width', help='output width',
+ metavar='WIDTH')
+ parser.add_argument('--style-scales', type=float,
+ dest='style_scales',
+ nargs='+', help='one or more style scales',
+ metavar='STYLE_SCALE')
+ parser.add_argument('--network',
+ dest='network', help='path to network parameters (default %(default)s)',
+ metavar='VGG_PATH', default=VGG_PATH)
+ parser.add_argument('--content-weight-blend', type=float,
+ dest='content_weight_blend', help='content weight blend, conv4_2 * blend + conv5_2 * (1-blend) (default %(default)s)',
+ metavar='CONTENT_WEIGHT_BLEND', default=CONTENT_WEIGHT_BLEND)
+ parser.add_argument('--content-weight', type=float,
+ dest='content_weight', help='content weight (default %(default)s)',
+ metavar='CONTENT_WEIGHT', default=CONTENT_WEIGHT)
+ parser.add_argument('--style-weight', type=float,
+ dest='style_weight', help='style weight (default %(default)s)',
+ metavar='STYLE_WEIGHT', default=STYLE_WEIGHT)
+ parser.add_argument('--style-layer-weight-exp', type=float,
+ dest='style_layer_weight_exp', help='style layer weight exponentional increase - weight(layer) = weight_exp*weight(layer) (default %(default)s)',
+ metavar='STYLE_LAYER_WEIGHT_EXP', default=STYLE_LAYER_WEIGHT_EXP)
+ parser.add_argument('--style-blend-weights', type=float,
+ dest='style_blend_weights', help='style blending weights',
+ nargs='+', metavar='STYLE_BLEND_WEIGHT')
+ parser.add_argument('--tv-weight', type=float,
+ dest='tv_weight', help='total variation regularization weight (default %(default)s)',
+ metavar='TV_WEIGHT', default=TV_WEIGHT)
+ parser.add_argument('--learning-rate', type=float,
+ dest='learning_rate', help='learning rate (default %(default)s)',
+ metavar='LEARNING_RATE', default=LEARNING_RATE)
+ parser.add_argument('--beta1', type=float,
+ dest='beta1', help='Adam: beta1 parameter (default %(default)s)',
+ metavar='BETA1', default=BETA1)
+ parser.add_argument('--beta2', type=float,
+ dest='beta2', help='Adam: beta2 parameter (default %(default)s)',
+ metavar='BETA2', default=BETA2)
+ parser.add_argument('--eps', type=float,
+ dest='epsilon', help='Adam: epsilon parameter (default %(default)s)',
+ metavar='EPSILON', default=EPSILON)
+ parser.add_argument('--initial',
+ dest='initial', help='initial image',
+ metavar='INITIAL')
+ parser.add_argument('--initial-noiseblend', type=float,
+ dest='initial_noiseblend', help='ratio of blending initial image with normalized noise (if no initial image specified, content image is used) (default %(default)s)',
+ metavar='INITIAL_NOISEBLEND')
+ parser.add_argument('--preserve-colors', action='store_true',
+ dest='preserve_colors', help='style-only transfer (preserving colors) - if color transfer is not needed')
+ parser.add_argument('--pooling',
+ dest='pooling', help='pooling layer configuration: max or avg (default %(default)s)',
+ metavar='POOLING', default=POOLING)
+ return parser
+
+
+def main():
+ parser = build_parser()
+ options = parser.parse_args()
+
+ if not os.path.isfile(options.network):
+ parser.error("Network %s does not exist. (Did you forget to download it?)" % options.network)
+
+ content_image = imread(options.content)
+ style_images = [imread(style) for style in options.styles]
+
+ width = options.width
+ if width is not None:
+ new_shape = (int(math.floor(float(content_image.shape[0]) /
+ content_image.shape[1] * width)), width)
+ content_image = scipy.misc.imresize(content_image, new_shape)
+ target_shape = content_image.shape
+ for i in range(len(style_images)):
+ style_scale = STYLE_SCALE
+ if options.style_scales is not None:
+ style_scale = options.style_scales[i]
+ style_images[i] = scipy.misc.imresize(style_images[i], style_scale *
+ target_shape[1] / style_images[i].shape[1])
+
+ style_blend_weights = options.style_blend_weights
+ if style_blend_weights is None:
+ # default is equal weights
+ style_blend_weights = [1.0/len(style_images) for _ in style_images]
+ else:
+ total_blend_weight = sum(style_blend_weights)
+ style_blend_weights = [weight/total_blend_weight
+ for weight in style_blend_weights]
+
+ initial = options.initial
+ if initial is not None:
+ initial = scipy.misc.imresize(imread(initial), content_image.shape[:2])
+ # Initial guess is specified, but not noiseblend - no noise should be blended
+ if options.initial_noiseblend is None:
+ options.initial_noiseblend = 0.0
+ else:
+ # Neither inital, nor noiseblend is provided, falling back to random generated initial guess
+ if options.initial_noiseblend is None:
+ options.initial_noiseblend = 1.0
+ if options.initial_noiseblend < 1.0:
+ initial = content_image
+
+ if options.checkpoint_output and "%s" not in options.checkpoint_output:
+ parser.error("To save intermediate images, the checkpoint output "
+ "parameter must contain `%s` (e.g. `foo%s.jpg`)")
+
+ for iteration, image in stylize(
+ network=options.network,
+ initial=initial,
+ initial_noiseblend=options.initial_noiseblend,
+ content=content_image,
+ styles=style_images,
+ preserve_colors=options.preserve_colors,
+ iterations=options.iterations,
+ content_weight=options.content_weight,
+ content_weight_blend=options.content_weight_blend,
+ style_weight=options.style_weight,
+ style_layer_weight_exp=options.style_layer_weight_exp,
+ style_blend_weights=style_blend_weights,
+ tv_weight=options.tv_weight,
+ learning_rate=options.learning_rate,
+ beta1=options.beta1,
+ beta2=options.beta2,
+ epsilon=options.epsilon,
+ pooling=options.pooling,
+ print_iterations=options.print_iterations,
+ checkpoint_iterations=options.checkpoint_iterations
+ ):
+ output_file = None
+ combined_rgb = image
+ if iteration is not None:
+ if options.checkpoint_output:
+ output_file = options.checkpoint_output % iteration
+ else:
+ output_file = options.output
+ if output_file:
+ imsave(output_file, combined_rgb)
+
+
+def imread(path):
+ img = scipy.misc.imread(path).astype(np.float)
+ if len(img.shape) == 2:
+ # grayscale
+ img = np.dstack((img,img,img))
+ elif img.shape[2] == 4:
+ # PNG with alpha channel
+ img = img[:,:,:3]
+ return img
+
+
+def imsave(path, img):
+ img = np.clip(img, 0, 255).astype(np.uint8)
+ Image.fromarray(img).save(path, quality=95)
+
+if __name__ == '__main__':
+ main()
diff --git a/Building-Machine-Learning-Projects-with-TensorFlow-master/8/stylize.py b/Building-Machine-Learning-Projects-with-TensorFlow-master/8/stylize.py
new file mode 100644
index 000000000..92c42580d
--- /dev/null
+++ b/Building-Machine-Learning-Projects-with-TensorFlow-master/8/stylize.py
@@ -0,0 +1,211 @@
+# Copyright (c) 2015-2017 Anish Athalye. Released under GPLv3.
+
+import vgg
+
+import tensorflow as tf
+import numpy as np
+
+from sys import stderr
+
+from PIL import Image
+
+CONTENT_LAYERS = ('relu4_2', 'relu5_2')
+STYLE_LAYERS = ('relu1_1', 'relu2_1', 'relu3_1', 'relu4_1', 'relu5_1')
+
+try:
+ reduce
+except NameError:
+ from functools import reduce
+
+
+def stylize(network, initial, initial_noiseblend, content, styles, preserve_colors, iterations,
+ content_weight, content_weight_blend, style_weight, style_layer_weight_exp, style_blend_weights, tv_weight,
+ learning_rate, beta1, beta2, epsilon, pooling,
+ print_iterations=None, checkpoint_iterations=None):
+ """
+ Stylize images.
+
+ This function yields tuples (iteration, image); `iteration` is None
+ if this is the final image (the last iteration). Other tuples are yielded
+ every `checkpoint_iterations` iterations.
+
+ :rtype: iterator[tuple[int|None,image]]
+ """
+ shape = (1,) + content.shape
+ style_shapes = [(1,) + style.shape for style in styles]
+ content_features = {}
+ style_features = [{} for _ in styles]
+
+ vgg_weights, vgg_mean_pixel = vgg.load_net(network)
+
+ layer_weight = 1.0
+ style_layers_weights = {}
+ for style_layer in STYLE_LAYERS:
+ style_layers_weights[style_layer] = layer_weight
+ layer_weight *= style_layer_weight_exp
+
+ # normalize style layer weights
+ layer_weights_sum = 0
+ for style_layer in STYLE_LAYERS:
+ layer_weights_sum += style_layers_weights[style_layer]
+ for style_layer in STYLE_LAYERS:
+ style_layers_weights[style_layer] /= layer_weights_sum
+
+ # compute content features in feedforward mode
+ g = tf.Graph()
+ with g.as_default(), g.device('/cpu:0'), tf.Session() as sess:
+ image = tf.placeholder('float', shape=shape)
+ net = vgg.net_preloaded(vgg_weights, image, pooling)
+ content_pre = np.array([vgg.preprocess(content, vgg_mean_pixel)])
+ for layer in CONTENT_LAYERS:
+ content_features[layer] = net[layer].eval(feed_dict={image: content_pre})
+
+ # compute style features in feedforward mode
+ for i in range(len(styles)):
+ g = tf.Graph()
+ with g.as_default(), g.device('/cpu:0'), tf.Session() as sess:
+ image = tf.placeholder('float', shape=style_shapes[i])
+ net = vgg.net_preloaded(vgg_weights, image, pooling)
+ style_pre = np.array([vgg.preprocess(styles[i], vgg_mean_pixel)])
+ for layer in STYLE_LAYERS:
+ features = net[layer].eval(feed_dict={image: style_pre})
+ features = np.reshape(features, (-1, features.shape[3]))
+ gram = np.matmul(features.T, features) / features.size
+ style_features[i][layer] = gram
+
+ initial_content_noise_coeff = 1.0 - initial_noiseblend
+
+ # make stylized image using backpropogation
+ with tf.Graph().as_default():
+ if initial is None:
+ noise = np.random.normal(size=shape, scale=np.std(content) * 0.1)
+ initial = tf.random_normal(shape) * 0.256
+ else:
+ initial = np.array([vgg.preprocess(initial, vgg_mean_pixel)])
+ initial = initial.astype('float32')
+ noise = np.random.normal(size=shape, scale=np.std(content) * 0.1)
+ initial = (initial) * initial_content_noise_coeff + (tf.random_normal(shape) * 0.256) * (1.0 - initial_content_noise_coeff)
+ image = tf.Variable(initial)
+ net = vgg.net_preloaded(vgg_weights, image, pooling)
+
+ # content loss
+ content_layers_weights = {}
+ content_layers_weights['relu4_2'] = content_weight_blend
+ content_layers_weights['relu5_2'] = 1.0 - content_weight_blend
+
+ content_loss = 0
+ content_losses = []
+ for content_layer in CONTENT_LAYERS:
+ content_losses.append(content_layers_weights[content_layer] * content_weight * (2 * tf.nn.l2_loss(
+ net[content_layer] - content_features[content_layer]) /
+ content_features[content_layer].size))
+ content_loss += reduce(tf.add, content_losses)
+
+ # style loss
+ style_loss = 0
+ for i in range(len(styles)):
+ style_losses = []
+ for style_layer in STYLE_LAYERS:
+ layer = net[style_layer]
+ _, height, width, number = map(lambda i: i.value, layer.get_shape())
+ size = height * width * number
+ feats = tf.reshape(layer, (-1, number))
+ gram = tf.matmul(tf.transpose(feats), feats) / size
+ style_gram = style_features[i][style_layer]
+ style_losses.append(style_layers_weights[style_layer] * 2 * tf.nn.l2_loss(gram - style_gram) / style_gram.size)
+ style_loss += style_weight * style_blend_weights[i] * reduce(tf.add, style_losses)
+
+ # total variation denoising
+ tv_y_size = _tensor_size(image[:,1:,:,:])
+ tv_x_size = _tensor_size(image[:,:,1:,:])
+ tv_loss = tv_weight * 2 * (
+ (tf.nn.l2_loss(image[:,1:,:,:] - image[:,:shape[1]-1,:,:]) /
+ tv_y_size) +
+ (tf.nn.l2_loss(image[:,:,1:,:] - image[:,:,:shape[2]-1,:]) /
+ tv_x_size))
+ # overall loss
+ loss = content_loss + style_loss + tv_loss
+
+ # optimizer setup
+ train_step = tf.train.AdamOptimizer(learning_rate, beta1, beta2, epsilon).minimize(loss)
+
+ def print_progress():
+ stderr.write(' content loss: %g\n' % content_loss.eval())
+ stderr.write(' style loss: %g\n' % style_loss.eval())
+ stderr.write(' tv loss: %g\n' % tv_loss.eval())
+ stderr.write(' total loss: %g\n' % loss.eval())
+
+ # optimization
+ best_loss = float('inf')
+ best = None
+ with tf.Session() as sess:
+ sess.run(tf.global_variables_initializer())
+ stderr.write('Optimization started...\n')
+ if (print_iterations and print_iterations != 0):
+ print_progress()
+ for i in range(iterations):
+ stderr.write('Iteration %4d/%4d\n' % (i + 1, iterations))
+ train_step.run()
+
+ last_step = (i == iterations - 1)
+ if last_step or (print_iterations and i % print_iterations == 0):
+ print_progress()
+
+ if (checkpoint_iterations and i % checkpoint_iterations == 0) or last_step:
+ this_loss = loss.eval()
+ if this_loss < best_loss:
+ best_loss = this_loss
+ best = image.eval()
+
+ img_out = vgg.unprocess(best.reshape(shape[1:]), vgg_mean_pixel)
+
+ if preserve_colors and preserve_colors == True:
+ original_image = np.clip(content, 0, 255)
+ styled_image = np.clip(img_out, 0, 255)
+
+ # Luminosity transfer steps:
+ # 1. Convert stylized RGB->grayscale accoriding to Rec.601 luma (0.299, 0.587, 0.114)
+ # 2. Convert stylized grayscale into YUV (YCbCr)
+ # 3. Convert original image into YUV (YCbCr)
+ # 4. Recombine (stylizedYUV.Y, originalYUV.U, originalYUV.V)
+ # 5. Convert recombined image from YUV back to RGB
+
+ # 1
+ styled_grayscale = rgb2gray(styled_image)
+ styled_grayscale_rgb = gray2rgb(styled_grayscale)
+
+ # 2
+ styled_grayscale_yuv = np.array(Image.fromarray(styled_grayscale_rgb.astype(np.uint8)).convert('YCbCr'))
+
+ # 3
+ original_yuv = np.array(Image.fromarray(original_image.astype(np.uint8)).convert('YCbCr'))
+
+ # 4
+ w, h, _ = original_image.shape
+ combined_yuv = np.empty((w, h, 3), dtype=np.uint8)
+ combined_yuv[..., 0] = styled_grayscale_yuv[..., 0]
+ combined_yuv[..., 1] = original_yuv[..., 1]
+ combined_yuv[..., 2] = original_yuv[..., 2]
+
+ # 5
+ img_out = np.array(Image.fromarray(combined_yuv, 'YCbCr').convert('RGB'))
+
+
+ yield (
+ (None if last_step else i),
+ img_out
+ )
+
+
+def _tensor_size(tensor):
+ from operator import mul
+ return reduce(mul, (d.value for d in tensor.get_shape()), 1)
+
+def rgb2gray(rgb):
+ return np.dot(rgb[...,:3], [0.299, 0.587, 0.114])
+
+def gray2rgb(gray):
+ w, h = gray.shape
+ rgb = np.empty((w, h, 3), dtype=np.float32)
+ rgb[:, :, 2] = rgb[:, :, 1] = rgb[:, :, 0] = gray
+ return rgb
diff --git a/Building-Machine-Learning-Projects-with-TensorFlow-master/8/stylize.pyc b/Building-Machine-Learning-Projects-with-TensorFlow-master/8/stylize.pyc
new file mode 100644
index 000000000..938ce58b4
Binary files /dev/null and b/Building-Machine-Learning-Projects-with-TensorFlow-master/8/stylize.pyc differ
diff --git a/Building-Machine-Learning-Projects-with-TensorFlow-master/8/vgg.py b/Building-Machine-Learning-Projects-with-TensorFlow-master/8/vgg.py
new file mode 100644
index 000000000..fb9860d82
--- /dev/null
+++ b/Building-Machine-Learning-Projects-with-TensorFlow-master/8/vgg.py
@@ -0,0 +1,69 @@
+# Copyright (c) 2015-2017 Anish Athalye. Released under GPLv3.
+
+import tensorflow as tf
+import numpy as np
+import scipy.io
+
+VGG19_LAYERS = (
+ 'conv1_1', 'relu1_1', 'conv1_2', 'relu1_2', 'pool1',
+
+ 'conv2_1', 'relu2_1', 'conv2_2', 'relu2_2', 'pool2',
+
+ 'conv3_1', 'relu3_1', 'conv3_2', 'relu3_2', 'conv3_3',
+ 'relu3_3', 'conv3_4', 'relu3_4', 'pool3',
+
+ 'conv4_1', 'relu4_1', 'conv4_2', 'relu4_2', 'conv4_3',
+ 'relu4_3', 'conv4_4', 'relu4_4', 'pool4',
+
+ 'conv5_1', 'relu5_1', 'conv5_2', 'relu5_2', 'conv5_3',
+ 'relu5_3', 'conv5_4', 'relu5_4'
+)
+
+def load_net(data_path):
+ data = scipy.io.loadmat(data_path)
+ mean = data['normalization'][0][0][0]
+ mean_pixel = np.mean(mean, axis=(0, 1))
+ weights = data['layers'][0]
+ return weights, mean_pixel
+
+def net_preloaded(weights, input_image, pooling):
+ net = {}
+ current = input_image
+ for i, name in enumerate(VGG19_LAYERS):
+ kind = name[:4]
+ if kind == 'conv':
+ kernels, bias = weights[i][0][0][0][0]
+ # matconvnet: weights are [width, height, in_channels, out_channels]
+ # tensorflow: weights are [height, width, in_channels, out_channels]
+ kernels = np.transpose(kernels, (1, 0, 2, 3))
+ bias = bias.reshape(-1)
+ current = _conv_layer(current, kernels, bias)
+ elif kind == 'relu':
+ current = tf.nn.relu(current)
+ elif kind == 'pool':
+ current = _pool_layer(current, pooling)
+ net[name] = current
+
+ assert len(net) == len(VGG19_LAYERS)
+ return net
+
+def _conv_layer(input, weights, bias):
+ conv = tf.nn.conv2d(input, tf.constant(weights), strides=(1, 1, 1, 1),
+ padding='SAME')
+ return tf.nn.bias_add(conv, bias)
+
+
+def _pool_layer(input, pooling):
+ if pooling == 'avg':
+ return tf.nn.avg_pool(input, ksize=(1, 2, 2, 1), strides=(1, 2, 2, 1),
+ padding='SAME')
+ else:
+ return tf.nn.max_pool(input, ksize=(1, 2, 2, 1), strides=(1, 2, 2, 1),
+ padding='SAME')
+
+def preprocess(image, mean_pixel):
+ return image - mean_pixel
+
+
+def unprocess(image, mean_pixel):
+ return image + mean_pixel
diff --git a/Building-Machine-Learning-Projects-with-TensorFlow-master/8/vgg.pyc b/Building-Machine-Learning-Projects-with-TensorFlow-master/8/vgg.pyc
new file mode 100644
index 000000000..aba001e0d
Binary files /dev/null and b/Building-Machine-Learning-Projects-with-TensorFlow-master/8/vgg.pyc differ
diff --git a/Building-Machine-Learning-Projects-with-TensorFlow-master/9/cluster_pi_final.py b/Building-Machine-Learning-Projects-with-TensorFlow-master/9/cluster_pi_final.py
new file mode 100644
index 000000000..660ea48f7
--- /dev/null
+++ b/Building-Machine-Learning-Projects-with-TensorFlow-master/9/cluster_pi_final.py
@@ -0,0 +1,26 @@
+import tensorflow as tf
+import numpy as np
+
+tf.app.flags.DEFINE_integer("numsamples", "100","Number of samples per server")
+FLAGS = tf.app.flags.FLAGS
+
+print ("Sample number per server: " + str(FLAGS.numsamples) )
+cluster = tf.train.ClusterSpec({"local": ["ec2-52-90-57-240.compute-1.amazonaws.com:2222", "ec2-54-196-135-128.compute-1.amazonaws.com:2222"]})
+
+c=[]
+
+def generate_sum():
+ i=tf.constant(np.random.uniform(size=FLAGS.numsamples*2), shape=[FLAGS.numsamples,2])
+ distances=tf.reduce_sum(tf.pow(i,2),1)
+ return (tf.reduce_sum(tf.cast(tf.greater_equal(tf.cast(1.0,tf.float64),distances),tf.int32)))
+
+
+with tf.device("/job:local/task:0"):
+ test1= generate_sum()
+
+with tf.device("/job:local/task:1"):
+ test2= generate_sum()
+
+with tf.Session("grpc://ec2-52-90-57-240.compute-1.amazonaws.com:2222") as sess:
+ result = sess.run(tf.cast(test1 + test2,tf.float64)/FLAGS.numsamples*2.0)
+ print(result)
diff --git a/Building-Machine-Learning-Projects-with-TensorFlow-master/9/gpu_pi.py b/Building-Machine-Learning-Projects-with-TensorFlow-master/9/gpu_pi.py
new file mode 100644
index 000000000..ca0d8935f
--- /dev/null
+++ b/Building-Machine-Learning-Projects-with-TensorFlow-master/9/gpu_pi.py
@@ -0,0 +1,20 @@
+import tensorflow as tf
+import numpy as np
+c = []
+#Distribute the work between the GPUs
+for d in ['/gpu:0', '/gpu:1', '/gpu:2', '/gpu:3']:
+ #Generate the random 2D samples
+ i=tf.constant(np.random.uniform(size=10000), shape=[5000,2])
+ with tf.Session() as sess:
+ tf.initialize_all_variables()
+ #Calculate the euclidean distance to the origin
+ distances=tf.reduce_sum(tf.pow(i,2),1)
+ #Sum the samples inside the circle
+ tempsum = sess.run(tf.reduce_sum(tf.cast(tf.greater_equal(tf.cast(1.0,tf.float64),distances),tf.float64)))
+ #append the current result to the results array
+ c.append( tempsum)
+ #Do the final ratio calculation on the CPU
+ with tf.device('/cpu:0'):
+ with tf.Session() as sess:
+ sum = tf.add_n(c)
+ print (sess.run(sum/20000.0)*4.0)
\ No newline at end of file
diff --git a/Building-Machine-Learning-Projects-with-TensorFlow-master/9/start_server.py b/Building-Machine-Learning-Projects-with-TensorFlow-master/9/start_server.py
new file mode 100644
index 000000000..b690fba37
--- /dev/null
+++ b/Building-Machine-Learning-Projects-with-TensorFlow-master/9/start_server.py
@@ -0,0 +1,8 @@
+import tensorflow as tf
+tf.app.flags.DEFINE_string("index", "0","Server index")
+FLAGS = tf.app.flags.FLAGS
+print FLAGS.index
+#cluster = tf.train.ClusterSpec({"local": ["ec2-52-90-57-240.compute-1.amazonaws.com:2222", "ec2-54-196-135-128.compute-1.amazonaws.com:2222"]})
+cluster = tf.train.ClusterSpec({"local": ["localhost:2222", "localhost:2223"]})
+server = tf.train.Server(cluster, job_name="local", task_index=int(FLAGS.index))
+server.join()
diff --git a/Building-Machine-Learning-Projects-with-TensorFlow-master/9/trainer.py b/Building-Machine-Learning-Projects-with-TensorFlow-master/9/trainer.py
new file mode 100644
index 000000000..f52bc3313
--- /dev/null
+++ b/Building-Machine-Learning-Projects-with-TensorFlow-master/9/trainer.py
@@ -0,0 +1,93 @@
+import tensorflow as tf
+import numpy as np
+from sklearn.utils import shuffle
+
+# Here we define our cluster setup via the command line
+tf.app.flags.DEFINE_string("ps_hosts", "",
+ "Comma-separated list of hostname:port pairs")
+tf.app.flags.DEFINE_string("worker_hosts", "",
+ "Comma-separated list of hostname:port pairs")
+
+# Define the characteristics of the cluster node, and its task index
+tf.app.flags.DEFINE_string("job_name", "", "One of 'ps', 'worker'")
+tf.app.flags.DEFINE_integer("task_index", 0, "Index of task within the job")
+
+FLAGS = tf.app.flags.FLAGS
+
+
+def main(_):
+ ps_hosts = FLAGS.ps_hosts.split(",")
+ worker_hosts = FLAGS.worker_hosts.split(",")
+
+ # Create a cluster following the command line paramaters.
+ cluster = tf.train.ClusterSpec({"ps": ps_hosts, "worker": worker_hosts})
+
+ # Create the local task.
+ server = tf.train.Server(cluster,
+ job_name=FLAGS.job_name,
+ task_index=FLAGS.task_index)
+
+ if FLAGS.job_name == "ps":
+ server.join()
+ elif FLAGS.job_name == "worker":
+
+ # Assigns ops to the local worker by default.
+ with tf.device(tf.train.replica_device_setter(
+ worker_device="/job:worker/task:%d" % FLAGS.task_index,
+ cluster=cluster)):
+
+ #Define the training set, and the model parameters, loss function and training operation
+ trX = np.linspace(-1, 1, 101)
+ trY = 2 * trX + np.random.randn(*trX.shape) * 0.4 + 0.2 # create a y value
+ X = tf.placeholder("float", name="X") # create symbolic variables
+ Y = tf.placeholder("float", name = "Y")
+
+ def model(X, w, b):
+ return tf.mul(X, w) + b # We just define the line as X*w + b0
+
+ w = tf.Variable(-1.0, name="b0") # create a shared variable
+ b = tf.Variable(-2.0, name="b1") # create a shared variable
+ y_model = model(X, w, b)
+
+ loss = (tf.pow(Y-y_model, 2)) # use sqr error for cost function
+ global_step = tf.Variable(0)
+
+ train_op = tf.train.AdagradOptimizer(0.8).minimize(
+ loss, global_step=global_step)
+
+ #Create a saver, and a summary and init operation
+ saver = tf.train.Saver()
+ summary_op = tf.merge_all_summaries()
+ init_op = tf.initialize_all_variables()
+
+ # Create a "supervisor", which oversees the training process.
+ sv = tf.train.Supervisor(is_chief=(FLAGS.task_index == 0),
+ logdir="/tmp/train_logs",
+ init_op=init_op,
+ summary_op=summary_op,
+ saver=saver,
+ global_step=global_step,
+ save_model_secs=600)
+
+ # The supervisor takes care of session initialization, restoring from
+ # a checkpoint, and closing when done or an error occurs.
+ with sv.managed_session(server.target) as sess:
+ # Loop until the supervisor shuts down
+ step = 0
+ while not sv.should_stop() :
+ # Run a training step asynchronously.
+ # See `tf.train.SyncReplicasOptimizer` for additional details on how to
+ # perform *synchronous* training.
+ for i in range(100):
+ trX, trY = shuffle (trX, trY, random_state=0)
+ for (x, y) in zip(trX, trY):
+ _, step = sess.run([train_op, global_step],feed_dict={X: x, Y: y})
+ #Print the partial results, and the current node doing the calculation
+ print ("Partial result from node: " + str(FLAGS.task_index) + ", w: " + str(w.eval(session=sess))+ ", b0: " + str(b.eval(session=sess)))
+ # Ask for all the services to stop.
+ sv.stop()
+
+
+
+if __name__ == "__main__":
+ tf.app.run()
diff --git a/Building-Machine-Learning-Projects-with-TensorFlow-master/ERRATA_AND_UPDATES.md b/Building-Machine-Learning-Projects-with-TensorFlow-master/ERRATA_AND_UPDATES.md
new file mode 100644
index 000000000..4da724dae
--- /dev/null
+++ b/Building-Machine-Learning-Projects-with-TensorFlow-master/ERRATA_AND_UPDATES.md
@@ -0,0 +1,3 @@
+## We are updating all examples to make them compatible with Tensorflow 1.0
+
+This file will summarize the errata found in the book, and the updated examples for the new versions of Tensorflow appearing (starting with 0.12)
diff --git a/Building-Machine-Learning-Projects-with-TensorFlow-master/LICENSE b/Building-Machine-Learning-Projects-with-TensorFlow-master/LICENSE
new file mode 100644
index 000000000..9f90f100d
--- /dev/null
+++ b/Building-Machine-Learning-Projects-with-TensorFlow-master/LICENSE
@@ -0,0 +1,21 @@
+MIT License
+
+Copyright (c) 2016 Packt
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/Building-Machine-Learning-Projects-with-TensorFlow-master/README.md b/Building-Machine-Learning-Projects-with-TensorFlow-master/README.md
new file mode 100644
index 000000000..6df467235
--- /dev/null
+++ b/Building-Machine-Learning-Projects-with-TensorFlow-master/README.md
@@ -0,0 +1,37 @@
+# Building Machine Learning Projects with TensorFlow
+This is the code repository for [Building Machine Learning Projects with TensorFlow](https://www.packtpub.com/big-data-and-business-intelligence/building-machine-learning-projects-tensorflow?utm_source=github&utm_medium=repository&utm_campaign=9781786466587), published by [Packt](https://www.packtpub.com). It contains all the supporting project files necessary to work through the book from start to finish.
+## Instructions and Navigations
+All of the code is organized into folders. Each folder starts with a number followed by the application name. For example, Chapter02.
+
+
+
+The code will look like the following:
+```
+>>> import tensorflow as tf
+>>> tens1 = tf.constant([[[1,2],[2,3]],[[3,4],[5,6]]])
+>>> print sess.run(tens1)[1,1,0]
+5
+```
+
+
+
+
+
Software Required
+
Hardware Required
+
Operating System
+
+
+
Tensorflow 0.10, Jupyter Notebook
+
Any x86 computer
+
Ubuntu Linux 16.04
+
+
+
+## Related Products
+* [Machine Learning with TensorFlow](https://www.packtpub.com/big-data-and-business-intelligence/machine-learning-tensorflow?utm_source=github&utm_medium=repository&utm_campaign=9781786462961)
+
+* [Getting Started with TensorFlow](https://www.packtpub.com/big-data-and-business-intelligence/getting-started-tensorflow?utm_source=github&utm_medium=repository&utm_campaign=9781786468574)
+
+* [Building Machine Learning Systems with Python - Second Edition](https://www.packtpub.com/big-data-and-business-intelligence/building-machine-learning-systems-python-second-edition?utm_source=github&utm_medium=repository&utm_campaign=9781784392772)
+### Suggestions and Feedback
+[Click here](https://docs.google.com/forms/d/e/1FAIpQLSe5qwunkGf6PUvzPirPDtuy1Du5Rlzew23UBp2S-P3wB-GcwQ/viewform) if you have any feedback or suggestions.
diff --git a/Hands_on_ML_TF/.gitignore b/Hands_on_ML_TF/.gitignore
new file mode 100644
index 000000000..3ac0a2250
--- /dev/null
+++ b/Hands_on_ML_TF/.gitignore
@@ -0,0 +1,11 @@
+*.bak
+*.ckpt
+*.pyc
+.DS_Store
+.ipynb_checkpoints
+checkpoint
+logs/*
+tf_logs/*
+images/**/*.png
+my_*
+datasets/words
diff --git a/Hands_on_ML_TF/09_up_and_running_with_tensorflow.zip b/Hands_on_ML_TF/09_up_and_running_with_tensorflow.zip
new file mode 100644
index 000000000..43261e5dd
Binary files /dev/null and b/Hands_on_ML_TF/09_up_and_running_with_tensorflow.zip differ
diff --git a/Hands_on_ML_TF/10_introduction_to_artificial_neural_networks.zip b/Hands_on_ML_TF/10_introduction_to_artificial_neural_networks.zip
new file mode 100644
index 000000000..f370062bd
Binary files /dev/null and b/Hands_on_ML_TF/10_introduction_to_artificial_neural_networks.zip differ
diff --git a/Hands_on_ML_TF/11_deep_learning.zip b/Hands_on_ML_TF/11_deep_learning.zip
new file mode 100644
index 000000000..f58a3b974
Binary files /dev/null and b/Hands_on_ML_TF/11_deep_learning.zip differ
diff --git a/Hands_on_ML_TF/12_distributed_tensorflow.md b/Hands_on_ML_TF/12_distributed_tensorflow.md
new file mode 100644
index 000000000..262d457c6
--- /dev/null
+++ b/Hands_on_ML_TF/12_distributed_tensorflow.md
@@ -0,0 +1,345 @@
+
+**Chapter 12 – Distributed TensorFlow**
+
+_This notebook contains all the sample code and solutions to the exercices in chapter 12._
+
+# Setup
+
+First, let's make sure this notebook works well in both python 2 and 3, import a few common modules, ensure MatplotLib plots figures inline and prepare a function to save the figures:
+
+
+```python
+# To support both python 2 and python 3
+from __future__ import division, print_function, unicode_literals
+
+# Common imports
+import numpy as np
+import os
+
+# to make this notebook's output stable across runs
+def reset_graph(seed=42):
+ tf.reset_default_graph()
+ tf.set_random_seed(seed)
+ np.random.seed(seed)
+
+# To plot pretty figures
+%matplotlib inline
+import matplotlib
+import matplotlib.pyplot as plt
+plt.rcParams['axes.labelsize'] = 14
+plt.rcParams['xtick.labelsize'] = 12
+plt.rcParams['ytick.labelsize'] = 12
+
+# Where to save the figures
+PROJECT_ROOT_DIR = "."
+CHAPTER_ID = "distributed"
+
+def save_fig(fig_id, tight_layout=True):
+ path = os.path.join(PROJECT_ROOT_DIR, "images", CHAPTER_ID, fig_id + ".png")
+ print("Saving figure", fig_id)
+ if tight_layout:
+ plt.tight_layout()
+ plt.savefig(path, format='png', dpi=300)
+```
+
+# Local server
+
+
+```python
+import tensorflow as tf
+```
+
+
+```python
+c = tf.constant("Hello distributed TensorFlow!")
+server = tf.train.Server.create_local_server()
+```
+
+
+```python
+with tf.Session(server.target) as sess:
+ print(sess.run(c))
+```
+
+ b'Hello distributed TensorFlow!'
+
+
+# Cluster
+
+
+```python
+cluster_spec = tf.train.ClusterSpec({
+ "ps": [
+ "127.0.0.1:2221", # /job:ps/task:0
+ "127.0.0.1:2222", # /job:ps/task:1
+ ],
+ "worker": [
+ "127.0.0.1:2223", # /job:worker/task:0
+ "127.0.0.1:2224", # /job:worker/task:1
+ "127.0.0.1:2225", # /job:worker/task:2
+ ]})
+```
+
+
+```python
+task_ps0 = tf.train.Server(cluster_spec, job_name="ps", task_index=0)
+task_ps1 = tf.train.Server(cluster_spec, job_name="ps", task_index=1)
+task_worker0 = tf.train.Server(cluster_spec, job_name="worker", task_index=0)
+task_worker1 = tf.train.Server(cluster_spec, job_name="worker", task_index=1)
+task_worker2 = tf.train.Server(cluster_spec, job_name="worker", task_index=2)
+```
+
+# Pinning operations across devices and servers
+
+
+```python
+reset_graph()
+
+with tf.device("/job:ps"):
+ a = tf.Variable(1.0, name="a")
+
+with tf.device("/job:worker"):
+ b = a + 2
+
+with tf.device("/job:worker/task:1"):
+ c = a + b
+```
+
+
+```python
+with tf.Session("grpc://127.0.0.1:2221") as sess:
+ sess.run(a.initializer)
+ print(c.eval())
+```
+
+ 4.0
+
+
+
+```python
+reset_graph()
+
+with tf.device(tf.train.replica_device_setter(
+ ps_tasks=2,
+ ps_device="/job:ps",
+ worker_device="/job:worker")):
+ v1 = tf.Variable(1.0, name="v1") # pinned to /job:ps/task:0 (defaults to /cpu:0)
+ v2 = tf.Variable(2.0, name="v2") # pinned to /job:ps/task:1 (defaults to /cpu:0)
+ v3 = tf.Variable(3.0, name="v3") # pinned to /job:ps/task:0 (defaults to /cpu:0)
+ s = v1 + v2 # pinned to /job:worker (defaults to task:0/cpu:0)
+ with tf.device("/task:1"):
+ p1 = 2 * s # pinned to /job:worker/task:1 (defaults to /cpu:0)
+ with tf.device("/cpu:0"):
+ p2 = 3 * s # pinned to /job:worker/task:1/cpu:0
+
+config = tf.ConfigProto()
+config.log_device_placement = True
+
+with tf.Session("grpc://127.0.0.1:2221", config=config) as sess:
+ v1.initializer.run()
+```
+
+# Readers
+
+
+```python
+reset_graph()
+
+test_csv = open("my_test.csv", "w")
+test_csv.write("x1, x2 , target\n")
+test_csv.write("1., , 0\n")
+test_csv.write("4., 5. , 1\n")
+test_csv.write("7., 8. , 0\n")
+test_csv.close()
+
+filename_queue = tf.FIFOQueue(capacity=10, dtypes=[tf.string], shapes=[()])
+filename = tf.placeholder(tf.string)
+enqueue_filename = filename_queue.enqueue([filename])
+close_filename_queue = filename_queue.close()
+
+reader = tf.TextLineReader(skip_header_lines=1)
+key, value = reader.read(filename_queue)
+
+x1, x2, target = tf.decode_csv(value, record_defaults=[[-1.], [-1.], [-1]])
+features = tf.stack([x1, x2])
+
+instance_queue = tf.RandomShuffleQueue(
+ capacity=10, min_after_dequeue=2,
+ dtypes=[tf.float32, tf.int32], shapes=[[2],[]],
+ name="instance_q", shared_name="shared_instance_q")
+enqueue_instance = instance_queue.enqueue([features, target])
+close_instance_queue = instance_queue.close()
+
+minibatch_instances, minibatch_targets = instance_queue.dequeue_up_to(2)
+
+with tf.Session() as sess:
+ sess.run(enqueue_filename, feed_dict={filename: "my_test.csv"})
+ sess.run(close_filename_queue)
+ try:
+ while True:
+ sess.run(enqueue_instance)
+ except tf.errors.OutOfRangeError as ex:
+ print("No more files to read")
+ sess.run(close_instance_queue)
+ try:
+ while True:
+ print(sess.run([minibatch_instances, minibatch_targets]))
+ except tf.errors.OutOfRangeError as ex:
+ print("No more training instances")
+```
+
+ No more files to read
+ [array([[ 4.00000000e+00, 5.00000000e+00],
+ [ 1.00000000e+00, 8.62997533e-19]], dtype=float32), array([1, 0], dtype=int32)]
+ [array([[ 7., 8.]], dtype=float32), array([0], dtype=int32)]
+ No more training instances
+
+
+
+```python
+#coord = tf.train.Coordinator()
+#threads = tf.train.start_queue_runners(coord=coord)
+#filename_queue = tf.train.string_input_producer(["test.csv"])
+#coord.request_stop()
+#coord.join(threads)
+```
+
+# Queue runners and coordinators
+
+
+```python
+reset_graph()
+
+filename_queue = tf.FIFOQueue(capacity=10, dtypes=[tf.string], shapes=[()])
+filename = tf.placeholder(tf.string)
+enqueue_filename = filename_queue.enqueue([filename])
+close_filename_queue = filename_queue.close()
+
+reader = tf.TextLineReader(skip_header_lines=1)
+key, value = reader.read(filename_queue)
+
+x1, x2, target = tf.decode_csv(value, record_defaults=[[-1.], [-1.], [-1]])
+features = tf.stack([x1, x2])
+
+instance_queue = tf.RandomShuffleQueue(
+ capacity=10, min_after_dequeue=2,
+ dtypes=[tf.float32, tf.int32], shapes=[[2],[]],
+ name="instance_q", shared_name="shared_instance_q")
+enqueue_instance = instance_queue.enqueue([features, target])
+close_instance_queue = instance_queue.close()
+
+minibatch_instances, minibatch_targets = instance_queue.dequeue_up_to(2)
+
+n_threads = 5
+queue_runner = tf.train.QueueRunner(instance_queue, [enqueue_instance] * n_threads)
+coord = tf.train.Coordinator()
+
+with tf.Session() as sess:
+ sess.run(enqueue_filename, feed_dict={filename: "my_test.csv"})
+ sess.run(close_filename_queue)
+ enqueue_threads = queue_runner.create_threads(sess, coord=coord, start=True)
+ try:
+ while True:
+ print(sess.run([minibatch_instances, minibatch_targets]))
+ except tf.errors.OutOfRangeError as ex:
+ print("No more training instances")
+```
+
+ [array([[ 7., 8.],
+ [ 4., 5.]], dtype=float32), array([0, 1], dtype=int32)]
+ [array([[ 1.00000000e+00, 8.62997533e-19]], dtype=float32), array([0], dtype=int32)]
+ No more training instances
+
+
+
+```python
+reset_graph()
+
+def read_and_push_instance(filename_queue, instance_queue):
+ reader = tf.TextLineReader(skip_header_lines=1)
+ key, value = reader.read(filename_queue)
+ x1, x2, target = tf.decode_csv(value, record_defaults=[[-1.], [-1.], [-1]])
+ features = tf.stack([x1, x2])
+ enqueue_instance = instance_queue.enqueue([features, target])
+ return enqueue_instance
+
+filename_queue = tf.FIFOQueue(capacity=10, dtypes=[tf.string], shapes=[()])
+filename = tf.placeholder(tf.string)
+enqueue_filename = filename_queue.enqueue([filename])
+close_filename_queue = filename_queue.close()
+
+instance_queue = tf.RandomShuffleQueue(
+ capacity=10, min_after_dequeue=2,
+ dtypes=[tf.float32, tf.int32], shapes=[[2],[]],
+ name="instance_q", shared_name="shared_instance_q")
+
+minibatch_instances, minibatch_targets = instance_queue.dequeue_up_to(2)
+
+read_and_enqueue_ops = [read_and_push_instance(filename_queue, instance_queue) for i in range(5)]
+queue_runner = tf.train.QueueRunner(instance_queue, read_and_enqueue_ops)
+
+with tf.Session() as sess:
+ sess.run(enqueue_filename, feed_dict={filename: "my_test.csv"})
+ sess.run(close_filename_queue)
+ coord = tf.train.Coordinator()
+ enqueue_threads = queue_runner.create_threads(sess, coord=coord, start=True)
+ try:
+ while True:
+ print(sess.run([minibatch_instances, minibatch_targets]))
+ except tf.errors.OutOfRangeError as ex:
+ print("No more training instances")
+
+
+```
+
+ [array([[ 4.00000000e+00, 5.00000000e+00],
+ [ 1.00000000e+00, 8.62997533e-19]], dtype=float32), array([1, 0], dtype=int32)]
+ [array([[ 7., 8.]], dtype=float32), array([0], dtype=int32)]
+ No more training instances
+
+
+# Setting a timeout
+
+
+```python
+reset_graph()
+
+q = tf.FIFOQueue(capacity=10, dtypes=[tf.float32], shapes=[()])
+v = tf.placeholder(tf.float32)
+enqueue = q.enqueue([v])
+dequeue = q.dequeue()
+output = dequeue + 1
+
+config = tf.ConfigProto()
+config.operation_timeout_in_ms = 1000
+
+with tf.Session(config=config) as sess:
+ sess.run(enqueue, feed_dict={v: 1.0})
+ sess.run(enqueue, feed_dict={v: 2.0})
+ sess.run(enqueue, feed_dict={v: 3.0})
+ print(sess.run(output))
+ print(sess.run(output, feed_dict={dequeue: 5}))
+ print(sess.run(output))
+ print(sess.run(output))
+ try:
+ print(sess.run(output))
+ except tf.errors.DeadlineExceededError as ex:
+ print("Timed out while dequeuing")
+
+```
+
+ 2.0
+ 6.0
+ 3.0
+ 4.0
+ Timed out while dequeuing
+
+
+# Exercise solutions
+
+**Coming soon**
+
+
+```python
+
+```
diff --git a/Hands_on_ML_TF/13_convolutional_neural_networks.zip b/Hands_on_ML_TF/13_convolutional_neural_networks.zip
new file mode 100644
index 000000000..0f18437b4
Binary files /dev/null and b/Hands_on_ML_TF/13_convolutional_neural_networks.zip differ
diff --git a/Hands_on_ML_TF/14_recurrent_neural_networks.zip b/Hands_on_ML_TF/14_recurrent_neural_networks.zip
new file mode 100644
index 000000000..662cd3eb4
Binary files /dev/null and b/Hands_on_ML_TF/14_recurrent_neural_networks.zip differ
diff --git a/Hands_on_ML_TF/15_autoencoders.zip b/Hands_on_ML_TF/15_autoencoders.zip
new file mode 100644
index 000000000..dad1dcec3
Binary files /dev/null and b/Hands_on_ML_TF/15_autoencoders.zip differ
diff --git a/Hands_on_ML_TF/16_reinforcement_learning.md b/Hands_on_ML_TF/16_reinforcement_learning.md
new file mode 100644
index 000000000..44b289abd
--- /dev/null
+++ b/Hands_on_ML_TF/16_reinforcement_learning.md
@@ -0,0 +1,1354 @@
+
+**Chapter 16 – Reinforcement Learning**
+
+This notebook contains all the sample code and solutions to the exercices in chapter 16.
+
+# Setup
+
+First, let's make sure this notebook works well in both python 2 and 3, import a few common modules, ensure MatplotLib plots figures inline and prepare a function to save the figures:
+
+
+```python
+# To support both python 2 and python 3
+from __future__ import division, print_function, unicode_literals
+
+# Common imports
+import numpy as np
+import os
+import sys
+
+# to make this notebook's output stable across runs
+def reset_graph(seed=42):
+ tf.reset_default_graph()
+ tf.set_random_seed(seed)
+ np.random.seed(seed)
+
+# To plot pretty figures and animations
+%matplotlib nbagg
+import matplotlib
+import matplotlib.animation as animation
+import matplotlib.pyplot as plt
+plt.rcParams['axes.labelsize'] = 14
+plt.rcParams['xtick.labelsize'] = 12
+plt.rcParams['ytick.labelsize'] = 12
+
+# Where to save the figures
+PROJECT_ROOT_DIR = "."
+CHAPTER_ID = "rl"
+
+def save_fig(fig_id, tight_layout=True):
+ path = os.path.join(PROJECT_ROOT_DIR, "images", CHAPTER_ID, fig_id + ".png")
+ print("Saving figure", fig_id)
+ if tight_layout:
+ plt.tight_layout()
+ plt.savefig(path, format='png', dpi=300)
+```
+
+Note: there may be minor differences between the output of this notebook and the examples shown in the book. You can safely ignore these differences. They are mainly due to the fact that most of the environments provided by OpenAI gym have some randomness.
+
+# Introduction to OpenAI gym
+
+In this notebook we will be using [OpenAI gym](https://gym.openai.com/), a great toolkit for developing and comparing Reinforcement Learning algorithms. It provides many environments for your learning *agents* to interact with. Let's start by importing `gym`:
+
+
+```python
+import gym
+```
+
+Next we will load the MsPacman environment, version 0.
+
+
+```python
+env = gym.make('MsPacman-v0')
+```
+
+ [2017-06-08 07:37:31,348] Making new env: MsPacman-v0
+
+
+Let's initialize the environment by calling is `reset()` method. This returns an observation:
+
+
+```python
+obs = env.reset()
+```
+
+Observations vary depending on the environment. In this case it is an RGB image represented as a 3D NumPy array of shape [width, height, channels] (with 3 channels: Red, Green and Blue). In other environments it may return different objects, as we will see later.
+
+
+```python
+obs.shape
+```
+
+
+
+
+ (210, 160, 3)
+
+
+
+An environment can be visualized by calling its `render()` method, and you can pick the rendering mode (the rendering options depend on the environment). In this example we will set `mode="rgb_array"` to get an image of the environment as a NumPy array:
+
+
+```python
+img = env.render(mode="rgb_array")
+```
+
+Let's plot this image:
+
+
+```python
+plt.figure(figsize=(5,4))
+plt.imshow(img)
+plt.axis("off")
+save_fig("MsPacman")
+plt.show()
+```
+
+
+
+
+
+
+
+
+
+ Saving figure MsPacman
+
+
+Welcome back to the 1980s! :)
+
+In this environment, the rendered image is simply equal to the observation (but in many environments this is not the case):
+
+
+```python
+(img == obs).all()
+```
+
+
+
+
+ True
+
+
+
+Let's create a little helper function to plot an environment:
+
+
+```python
+def plot_environment(env, figsize=(5,4)):
+ plt.close() # or else nbagg sometimes plots in the previous cell
+ plt.figure(figsize=figsize)
+ img = env.render(mode="rgb_array")
+ plt.imshow(img)
+ plt.axis("off")
+ plt.show()
+```
+
+Let's see how to interact with an environment. Your agent will need to select an action from an "action space" (the set of possible actions). Let's see what this environment's action space looks like:
+
+
+```python
+env.action_space
+```
+
+
+
+
+ Discrete(9)
+
+
+
+`Discrete(9)` means that the possible actions are integers 0 through 8, which represents the 9 possible positions of the joystick (0=center, 1=up, 2=right, 3=left, 4=down, 5=upper-right, 6=upper-left, 7=lower-right, 8=lower-left).
+
+Next we need to tell the environment which action to play, and it will compute the next step of the game. Let's go left for 110 steps, then lower left for 40 steps:
+
+
+```python
+env.reset()
+for step in range(110):
+ env.step(3) #left
+for step in range(40):
+ env.step(8) #lower-left
+```
+
+Where are we now?
+
+
+```python
+plot_environment(env)
+```
+
+
+
+
+
+
+
+
+
+The `step()` function actually returns several important objects:
+
+
+```python
+obs, reward, done, info = env.step(0)
+```
+
+The observation tells the agent what the environment looks like, as discussed earlier. This is a 210x160 RGB image:
+
+
+```python
+obs.shape
+```
+
+
+
+
+ (210, 160, 3)
+
+
+
+The environment also tells the agent how much reward it got during the last step:
+
+
+```python
+reward
+```
+
+
+
+
+ 0.0
+
+
+
+When the game is over, the environment returns `done=True`:
+
+
+```python
+done
+```
+
+
+
+
+ False
+
+
+
+Finally, `info` is an environment-specific dictionary that can provide some extra information about the internal state of the environment. This is useful for debugging, but your agent should not use this information for learning (it would be cheating).
+
+
+```python
+info
+```
+
+
+
+
+ {}
+
+
+
+Let's play one full game (with 3 lives), by moving in random directions for 10 steps at a time, recording each frame:
+
+
+```python
+frames = []
+
+n_max_steps = 1000
+n_change_steps = 10
+
+obs = env.reset()
+for step in range(n_max_steps):
+ img = env.render(mode="rgb_array")
+ frames.append(img)
+ if step % n_change_steps == 0:
+ action = env.action_space.sample() # play randomly
+ obs, reward, done, info = env.step(action)
+ if done:
+ break
+```
+
+Now show the animation (it's a bit jittery within Jupyter):
+
+
+```python
+def update_scene(num, frames, patch):
+ patch.set_data(frames[num])
+ return patch,
+
+def plot_animation(frames, repeat=False, interval=40):
+ plt.close() # or else nbagg sometimes plots in the previous cell
+ fig = plt.figure()
+ patch = plt.imshow(frames[0])
+ plt.axis('off')
+ return animation.FuncAnimation(fig, update_scene, fargs=(frames, patch), frames=len(frames), repeat=repeat, interval=interval)
+```
+
+
+```python
+video = plot_animation(frames)
+plt.show()
+```
+
+
+
+
+
+
+
+
+
+Once you have finished playing with an environment, you should close it to free up resources:
+
+
+```python
+env.close()
+```
+
+To code our first learning agent, we will be using a simpler environment: the Cart-Pole.
+
+# A simple environment: the Cart-Pole
+
+The Cart-Pole is a very simple environment composed of a cart that can move left or right, and pole placed vertically on top of it. The agent must move the cart left or right to keep the pole upright.
+
+
+```python
+env = gym.make("CartPole-v0")
+```
+
+ [2017-06-08 07:38:24,279] Making new env: CartPole-v0
+
+
+
+```python
+obs = env.reset()
+```
+
+
+```python
+obs
+```
+
+
+
+
+ array([-0.03625774, -0.01502138, 0.03556243, -0.03981458])
+
+
+
+The observation is a 1D NumPy array composed of 4 floats: they represent the cart's horizontal position, its velocity, the angle of the pole (0 = vertical), and the angular velocity. Let's render the environment... unfortunately we need to fix an annoying rendering issue first.
+
+## Fixing the rendering issue
+
+Some environments (including the Cart-Pole) require access to your display, which opens up a separate window, even if you specify the `rgb_array` mode. In general you can safely ignore that window. However, if Jupyter is running on a headless server (ie. without a screen) it will raise an exception. One way to avoid this is to install a fake X server like Xvfb. You can start Jupyter using the `xvfb-run` command:
+
+ $ xvfb-run -s "-screen 0 1400x900x24" jupyter notebook
+
+If Jupyter is running on a headless server but you don't want to worry about Xvfb, then you can just use the following rendering function for the Cart-Pole:
+
+
+```python
+from PIL import Image, ImageDraw
+
+try:
+ from pyglet.gl import gl_info
+ openai_cart_pole_rendering = True # no problem, let's use OpenAI gym's rendering function
+except Exception:
+ openai_cart_pole_rendering = False # probably no X server available, let's use our own rendering function
+
+def render_cart_pole(env, obs):
+ if openai_cart_pole_rendering:
+ # use OpenAI gym's rendering function
+ return env.render(mode="rgb_array")
+ else:
+ # rendering for the cart pole environment (in case OpenAI gym can't do it)
+ img_w = 600
+ img_h = 400
+ cart_w = img_w // 12
+ cart_h = img_h // 15
+ pole_len = img_h // 3.5
+ pole_w = img_w // 80 + 1
+ x_width = 2
+ max_ang = 0.2
+ bg_col = (255, 255, 255)
+ cart_col = 0x000000 # Blue Green Red
+ pole_col = 0x669acc # Blue Green Red
+
+ pos, vel, ang, ang_vel = obs
+ img = Image.new('RGB', (img_w, img_h), bg_col)
+ draw = ImageDraw.Draw(img)
+ cart_x = pos * img_w // x_width + img_w // x_width
+ cart_y = img_h * 95 // 100
+ top_pole_x = cart_x + pole_len * np.sin(ang)
+ top_pole_y = cart_y - cart_h // 2 - pole_len * np.cos(ang)
+ draw.line((0, cart_y, img_w, cart_y), fill=0)
+ draw.rectangle((cart_x - cart_w // 2, cart_y - cart_h // 2, cart_x + cart_w // 2, cart_y + cart_h // 2), fill=cart_col) # draw cart
+ draw.line((cart_x, cart_y - cart_h // 2, top_pole_x, top_pole_y), fill=pole_col, width=pole_w) # draw pole
+ return np.array(img)
+
+def plot_cart_pole(env, obs):
+ plt.close() # or else nbagg sometimes plots in the previous cell
+ img = render_cart_pole(env, obs)
+ plt.imshow(img)
+ plt.axis("off")
+ plt.show()
+```
+
+
+```python
+plot_cart_pole(env, obs)
+```
+
+
+
+
+
+
+
+
+
+Now let's look at the action space:
+
+
+```python
+env.action_space
+```
+
+
+
+
+ Discrete(2)
+
+
+
+Yep, just two possible actions: accelerate towards the left or towards the right. Let's push the cart left until the pole falls:
+
+
+```python
+obs = env.reset()
+while True:
+ obs, reward, done, info = env.step(0)
+ if done:
+ break
+```
+
+
+```python
+plt.close() # or else nbagg sometimes plots in the previous cell
+img = render_cart_pole(env, obs)
+plt.imshow(img)
+plt.axis("off")
+save_fig("cart_pole_plot")
+```
+
+
+
+
+
+
+
+
+
+ Saving figure cart_pole_plot
+
+
+
+```python
+img.shape
+```
+
+
+
+
+ (400, 600, 3)
+
+
+
+Notice that the game is over when the pole tilts too much, not when it actually falls. Now let's reset the environment and push the cart to right instead:
+
+
+```python
+obs = env.reset()
+while True:
+ obs, reward, done, info = env.step(1)
+ if done:
+ break
+```
+
+
+```python
+plot_cart_pole(env, obs)
+```
+
+
+
+
+
+
+
+
+
+Looks like it's doing what we're telling it to do. Now how can we make the poll remain upright? We will need to define a _policy_ for that. This is the strategy that the agent will use to select an action at each step. It can use all the past actions and observations to decide what to do.
+
+# A simple hard-coded policy
+
+Let's hard code a simple strategy: if the pole is tilting to the left, then push the cart to the left, and _vice versa_. Let's see if that works:
+
+
+```python
+frames = []
+
+n_max_steps = 1000
+n_change_steps = 10
+
+obs = env.reset()
+for step in range(n_max_steps):
+ img = render_cart_pole(env, obs)
+ frames.append(img)
+
+ # hard-coded policy
+ position, velocity, angle, angular_velocity = obs
+ if angle < 0:
+ action = 0
+ else:
+ action = 1
+
+ obs, reward, done, info = env.step(action)
+ if done:
+ break
+```
+
+
+```python
+video = plot_animation(frames)
+plt.show()
+```
+
+
+
+
+
+
+
+
+
+Nope, the system is unstable and after just a few wobbles, the pole ends up too tilted: game over. We will need to be smarter than that!
+
+# Neural Network Policies
+
+Let's create a neural network that will take observations as inputs, and output the action to take for each observation. To choose an action, the network will first estimate a probability for each action, then select an action randomly according to the estimated probabilities. In the case of the Cart-Pole environment, there are just two possible actions (left or right), so we only need one output neuron: it will output the probability `p` of the action 0 (left), and of course the probability of action 1 (right) will be `1 - p`.
+
+Note: instead of using the `fully_connected()` function from the `tensorflow.contrib.layers` module (as in the book), we now use the `dense()` function from the `tf.layers` module, which did not exist when this chapter was written. This is preferable because anything in contrib may change or be deleted without notice, while `tf.layers` is part of the official API. As you will see, the code is mostly the same.
+
+The main differences relevant to this chapter are:
+* the `_fn` suffix was removed in all the parameters that had it (for example the `activation_fn` parameter was renamed to `activation`).
+* the `weights` parameter was renamed to `kernel`,
+* the default activation is `None` instead of `tf.nn.relu`
+
+
+```python
+import tensorflow as tf
+
+# 1. Specify the network architecture
+n_inputs = 4 # == env.observation_space.shape[0]
+n_hidden = 4 # it's a simple task, we don't need more than this
+n_outputs = 1 # only outputs the probability of accelerating left
+initializer = tf.contrib.layers.variance_scaling_initializer()
+
+# 2. Build the neural network
+X = tf.placeholder(tf.float32, shape=[None, n_inputs])
+hidden = tf.layers.dense(X, n_hidden, activation=tf.nn.elu,
+ kernel_initializer=initializer)
+outputs = tf.layers.dense(hidden, n_outputs, activation=tf.nn.sigmoid,
+ kernel_initializer=initializer)
+
+# 3. Select a random action based on the estimated probabilities
+p_left_and_right = tf.concat(axis=1, values=[outputs, 1 - outputs])
+action = tf.multinomial(tf.log(p_left_and_right), num_samples=1)
+
+init = tf.global_variables_initializer()
+```
+
+In this particular environment, the past actions and observations can safely be ignored, since each observation contains the environment's full state. If there were some hidden state then you may need to consider past actions and observations in order to try to infer the hidden state of the environment. For example, if the environment only revealed the position of the cart but not its velocity, you would have to consider not only the current observation but also the previous observation in order to estimate the current velocity. Another example is if the observations are noisy: you may want to use the past few observations to estimate the most likely current state. Our problem is thus as simple as can be: the current observation is noise-free and contains the environment's full state.
+
+You may wonder why we are picking a random action based on the probability given by the policy network, rather than just picking the action with the highest probability. This approach lets the agent find the right balance between _exploring_ new actions and _exploiting_ the actions that are known to work well. Here's an analogy: suppose you go to a restaurant for the first time, and all the dishes look equally appealing so you randomly pick one. If it turns out to be good, you can increase the probability to order it next time, but you shouldn't increase that probability to 100%, or else you will never try out the other dishes, some of which may be even better than the one you tried.
+
+Let's randomly initialize this policy neural network and use it to play one game:
+
+
+```python
+n_max_steps = 1000
+frames = []
+
+with tf.Session() as sess:
+ init.run()
+ obs = env.reset()
+ for step in range(n_max_steps):
+ img = render_cart_pole(env, obs)
+ frames.append(img)
+ action_val = action.eval(feed_dict={X: obs.reshape(1, n_inputs)})
+ obs, reward, done, info = env.step(action_val[0][0])
+ if done:
+ break
+
+env.close()
+```
+
+Now let's look at how well this randomly initialized policy network performed:
+
+
+```python
+video = plot_animation(frames)
+plt.show()
+```
+
+
+
+
+
+
+
+
+
+Yeah... pretty bad. The neural network will have to learn to do better. First let's see if it is capable of learning the basic policy we used earlier: go left if the pole is tilting left, and go right if it is tilting right. The following code defines the same neural network but we add the target probabilities `y`, and the training operations (`cross_entropy`, `optimizer` and `training_op`):
+
+
+```python
+import tensorflow as tf
+
+reset_graph()
+
+n_inputs = 4
+n_hidden = 4
+n_outputs = 1
+
+learning_rate = 0.01
+
+initializer = tf.contrib.layers.variance_scaling_initializer()
+
+X = tf.placeholder(tf.float32, shape=[None, n_inputs])
+y = tf.placeholder(tf.float32, shape=[None, n_outputs])
+
+hidden = tf.layers.dense(X, n_hidden, activation=tf.nn.elu, kernel_initializer=initializer)
+logits = tf.layers.dense(hidden, n_outputs)
+outputs = tf.nn.sigmoid(logits) # probability of action 0 (left)
+p_left_and_right = tf.concat(axis=1, values=[outputs, 1 - outputs])
+action = tf.multinomial(tf.log(p_left_and_right), num_samples=1)
+
+cross_entropy = tf.nn.sigmoid_cross_entropy_with_logits(labels=y, logits=logits)
+optimizer = tf.train.AdamOptimizer(learning_rate)
+training_op = optimizer.minimize(cross_entropy)
+
+init = tf.global_variables_initializer()
+saver = tf.train.Saver()
+```
+
+We can make the same net play in 10 different environments in parallel, and train for 1000 iterations. We also reset environments when they are done.
+
+
+```python
+n_environments = 10
+n_iterations = 1000
+
+envs = [gym.make("CartPole-v0") for _ in range(n_environments)]
+observations = [env.reset() for env in envs]
+
+with tf.Session() as sess:
+ init.run()
+ for iteration in range(n_iterations):
+ target_probas = np.array([([1.] if obs[2] < 0 else [0.]) for obs in observations]) # if angle<0 we want proba(left)=1., or else proba(left)=0.
+ action_val, _ = sess.run([action, training_op], feed_dict={X: np.array(observations), y: target_probas})
+ for env_index, env in enumerate(envs):
+ obs, reward, done, info = env.step(action_val[env_index][0])
+ observations[env_index] = obs if not done else env.reset()
+ saver.save(sess, "./my_policy_net_basic.ckpt")
+
+for env in envs:
+ env.close()
+```
+
+ [2017-06-08 07:39:13,311] Making new env: CartPole-v0
+ [2017-06-08 07:39:13,315] Making new env: CartPole-v0
+ [2017-06-08 07:39:13,318] Making new env: CartPole-v0
+ [2017-06-08 07:39:13,320] Making new env: CartPole-v0
+ [2017-06-08 07:39:13,322] Making new env: CartPole-v0
+ [2017-06-08 07:39:13,324] Making new env: CartPole-v0
+ [2017-06-08 07:39:13,326] Making new env: CartPole-v0
+ [2017-06-08 07:39:13,328] Making new env: CartPole-v0
+ [2017-06-08 07:39:13,331] Making new env: CartPole-v0
+ [2017-06-08 07:39:13,333] Making new env: CartPole-v0
+
+
+
+```python
+def render_policy_net(model_path, action, X, n_max_steps = 1000):
+ frames = []
+ env = gym.make("CartPole-v0")
+ obs = env.reset()
+ with tf.Session() as sess:
+ saver.restore(sess, model_path)
+ for step in range(n_max_steps):
+ img = render_cart_pole(env, obs)
+ frames.append(img)
+ action_val = action.eval(feed_dict={X: obs.reshape(1, n_inputs)})
+ obs, reward, done, info = env.step(action_val[0][0])
+ if done:
+ break
+ env.close()
+ return frames
+```
+
+
+```python
+frames = render_policy_net("./my_policy_net_basic.ckpt", action, X)
+video = plot_animation(frames)
+plt.show()
+```
+
+ [2017-06-08 07:39:15,860] Making new env: CartPole-v0
+
+
+ INFO:tensorflow:Restoring parameters from ./my_policy_net_basic.ckpt
+
+
+ [2017-06-08 07:39:15,862] Restoring parameters from ./my_policy_net_basic.ckpt
+
+
+
+
+
+
+
+
+
+
+Looks like it learned the policy correctly. Now let's see if it can learn a better policy on its own.
+
+# Policy Gradients
+
+To train this neural network we will need to define the target probabilities `y`. If an action is good we should increase its probability, and conversely if it is bad we should reduce it. But how do we know whether an action is good or bad? The problem is that most actions have delayed effects, so when you win or lose points in a game, it is not clear which actions contributed to this result: was it just the last action? Or the last 10? Or just one action 50 steps earlier? This is called the _credit assignment problem_.
+
+The _Policy Gradients_ algorithm tackles this problem by first playing multiple games, then making the actions in good games slightly more likely, while actions in bad games are made slightly less likely. First we play, then we go back and think about what we did.
+
+
+```python
+import tensorflow as tf
+
+reset_graph()
+
+n_inputs = 4
+n_hidden = 4
+n_outputs = 1
+
+learning_rate = 0.01
+
+initializer = tf.contrib.layers.variance_scaling_initializer()
+
+X = tf.placeholder(tf.float32, shape=[None, n_inputs])
+
+hidden = tf.layers.dense(X, n_hidden, activation=tf.nn.elu, kernel_initializer=initializer)
+logits = tf.layers.dense(hidden, n_outputs)
+outputs = tf.nn.sigmoid(logits) # probability of action 0 (left)
+p_left_and_right = tf.concat(axis=1, values=[outputs, 1 - outputs])
+action = tf.multinomial(tf.log(p_left_and_right), num_samples=1)
+
+y = 1. - tf.to_float(action)
+cross_entropy = tf.nn.sigmoid_cross_entropy_with_logits(labels=y, logits=logits)
+optimizer = tf.train.AdamOptimizer(learning_rate)
+grads_and_vars = optimizer.compute_gradients(cross_entropy)
+gradients = [grad for grad, variable in grads_and_vars]
+gradient_placeholders = []
+grads_and_vars_feed = []
+for grad, variable in grads_and_vars:
+ gradient_placeholder = tf.placeholder(tf.float32, shape=grad.get_shape())
+ gradient_placeholders.append(gradient_placeholder)
+ grads_and_vars_feed.append((gradient_placeholder, variable))
+training_op = optimizer.apply_gradients(grads_and_vars_feed)
+
+init = tf.global_variables_initializer()
+saver = tf.train.Saver()
+```
+
+
+```python
+def discount_rewards(rewards, discount_rate):
+ discounted_rewards = np.zeros(len(rewards))
+ cumulative_rewards = 0
+ for step in reversed(range(len(rewards))):
+ cumulative_rewards = rewards[step] + cumulative_rewards * discount_rate
+ discounted_rewards[step] = cumulative_rewards
+ return discounted_rewards
+
+def discount_and_normalize_rewards(all_rewards, discount_rate):
+ all_discounted_rewards = [discount_rewards(rewards, discount_rate) for rewards in all_rewards]
+ flat_rewards = np.concatenate(all_discounted_rewards)
+ reward_mean = flat_rewards.mean()
+ reward_std = flat_rewards.std()
+ return [(discounted_rewards - reward_mean)/reward_std for discounted_rewards in all_discounted_rewards]
+```
+
+
+```python
+discount_rewards([10, 0, -50], discount_rate=0.8)
+```
+
+
+
+
+ array([-22., -40., -50.])
+
+
+
+
+```python
+discount_and_normalize_rewards([[10, 0, -50], [10, 20]], discount_rate=0.8)
+```
+
+
+
+
+ [array([-0.28435071, -0.86597718, -1.18910299]),
+ array([ 1.26665318, 1.0727777 ])]
+
+
+
+
+```python
+env = gym.make("CartPole-v0")
+
+n_games_per_update = 10
+n_max_steps = 1000
+n_iterations = 250
+save_iterations = 10
+discount_rate = 0.95
+
+with tf.Session() as sess:
+ init.run()
+ for iteration in range(n_iterations):
+ print("\rIteration: {}".format(iteration), end="")
+ all_rewards = []
+ all_gradients = []
+ for game in range(n_games_per_update):
+ current_rewards = []
+ current_gradients = []
+ obs = env.reset()
+ for step in range(n_max_steps):
+ action_val, gradients_val = sess.run([action, gradients], feed_dict={X: obs.reshape(1, n_inputs)})
+ obs, reward, done, info = env.step(action_val[0][0])
+ current_rewards.append(reward)
+ current_gradients.append(gradients_val)
+ if done:
+ break
+ all_rewards.append(current_rewards)
+ all_gradients.append(current_gradients)
+
+ all_rewards = discount_and_normalize_rewards(all_rewards, discount_rate=discount_rate)
+ feed_dict = {}
+ for var_index, gradient_placeholder in enumerate(gradient_placeholders):
+ mean_gradients = np.mean([reward * all_gradients[game_index][step][var_index]
+ for game_index, rewards in enumerate(all_rewards)
+ for step, reward in enumerate(rewards)], axis=0)
+ feed_dict[gradient_placeholder] = mean_gradients
+ sess.run(training_op, feed_dict=feed_dict)
+ if iteration % save_iterations == 0:
+ saver.save(sess, "./my_policy_net_pg.ckpt")
+```
+
+ [2017-06-08 07:39:25,090] Making new env: CartPole-v0
+
+
+ Iteration: 249
+
+
+```python
+env.close()
+```
+
+
+```python
+frames = render_policy_net("./my_policy_net_pg.ckpt", action, X, n_max_steps=1000)
+video = plot_animation(frames)
+plt.show()
+```
+
+ [2017-06-08 08:01:35,953] Making new env: CartPole-v0
+
+
+ INFO:tensorflow:Restoring parameters from ./my_policy_net_pg.ckpt
+
+
+ [2017-06-08 08:01:35,956] Restoring parameters from ./my_policy_net_pg.ckpt
+
+
+
+
+
+
+
+
+
+
+# Markov Chains
+
+
+```python
+transition_probabilities = [
+ [0.7, 0.2, 0.0, 0.1], # from s0 to s0, s1, s2, s3
+ [0.0, 0.0, 0.9, 0.1], # from s1 to ...
+ [0.0, 1.0, 0.0, 0.0], # from s2 to ...
+ [0.0, 0.0, 0.0, 1.0], # from s3 to ...
+ ]
+
+n_max_steps = 50
+
+def print_sequence(start_state=0):
+ current_state = start_state
+ print("States:", end=" ")
+ for step in range(n_max_steps):
+ print(current_state, end=" ")
+ if current_state == 3:
+ break
+ current_state = rnd.choice(range(4), p=transition_probabilities[current_state])
+ else:
+ print("...", end="")
+ print()
+
+for _ in range(10):
+ print_sequence()
+```
+
+ States: 0 0 3
+ States: 0 1 2 1 2 1 2 1 2 1 3
+ States: 0 1 2 1 2 1 2 1 2 1 2 1 2 1 2 1 2 1 2 1 2 1 3
+ States: 0 3
+ States: 0 1 2 1 2 1 2 1 2 1 2 1 2 1 2 1 3
+ States: 0 1 3
+ States: 0 1 2 1 2 1 2 1 2 1 2 1 2 1 2 1 2 1 2 1 2 1 2 1 2 1 2 1 2 1 2 1 2 1 2 1 2 1 2 1 2 1 2 1 2 1 2 1 2 1 ...
+ States: 0 0 3
+ States: 0 0 0 1 2 1 2 1 3
+ States: 0 1 2 1 2 1 2 1 2 1 2 1 2 1 2 1 2 1 2 1 2 1 3
+
+
+# Markov Decision Process
+
+
+```python
+transition_probabilities = [
+ [[0.7, 0.3, 0.0], [1.0, 0.0, 0.0], [0.8, 0.2, 0.0]], # in s0, if action a0 then proba 0.7 to state s0 and 0.3 to state s1, etc.
+ [[0.0, 1.0, 0.0], None, [0.0, 0.0, 1.0]],
+ [None, [0.8, 0.1, 0.1], None],
+ ]
+
+rewards = [
+ [[+10, 0, 0], [0, 0, 0], [0, 0, 0]],
+ [[0, 0, 0], [0, 0, 0], [0, 0, -50]],
+ [[0, 0, 0], [+40, 0, 0], [0, 0, 0]],
+ ]
+
+possible_actions = [[0, 1, 2], [0, 2], [1]]
+
+def policy_fire(state):
+ return [0, 2, 1][state]
+
+def policy_random(state):
+ return rnd.choice(possible_actions[state])
+
+def policy_safe(state):
+ return [0, 0, 1][state]
+
+class MDPEnvironment(object):
+ def __init__(self, start_state=0):
+ self.start_state=start_state
+ self.reset()
+ def reset(self):
+ self.total_rewards = 0
+ self.state = self.start_state
+ def step(self, action):
+ next_state = rnd.choice(range(3), p=transition_probabilities[self.state][action])
+ reward = rewards[self.state][action][next_state]
+ self.state = next_state
+ self.total_rewards += reward
+ return self.state, reward
+
+def run_episode(policy, n_steps, start_state=0, display=True):
+ env = MDPEnvironment()
+ if display:
+ print("States (+rewards):", end=" ")
+ for step in range(n_steps):
+ if display:
+ if step == 10:
+ print("...", end=" ")
+ elif step < 10:
+ print(env.state, end=" ")
+ action = policy(env.state)
+ state, reward = env.step(action)
+ if display and step < 10:
+ if reward:
+ print("({})".format(reward), end=" ")
+ if display:
+ print("Total rewards =", env.total_rewards)
+ return env.total_rewards
+
+for policy in (policy_fire, policy_random, policy_safe):
+ all_totals = []
+ print(policy.__name__)
+ for episode in range(1000):
+ all_totals.append(run_episode(policy, n_steps=100, display=(episode<5)))
+ print("Summary: mean={:.1f}, std={:1f}, min={}, max={}".format(np.mean(all_totals), np.std(all_totals), np.min(all_totals), np.max(all_totals)))
+ print()
+```
+
+ policy_fire
+ States (+rewards): 0 (10) 0 (10) 0 1 (-50) 2 2 2 (40) 0 (10) 0 (10) 0 (10) ... Total rewards = 210
+ States (+rewards): 0 1 (-50) 2 (40) 0 (10) 0 (10) 0 1 (-50) 2 2 (40) 0 (10) ... Total rewards = 70
+ States (+rewards): 0 (10) 0 1 (-50) 2 (40) 0 (10) 0 (10) 0 (10) 0 (10) 0 (10) 0 (10) ... Total rewards = 70
+ States (+rewards): 0 1 (-50) 2 1 (-50) 2 (40) 0 (10) 0 1 (-50) 2 (40) 0 ... Total rewards = -10
+ States (+rewards): 0 1 (-50) 2 (40) 0 (10) 0 (10) 0 1 (-50) 2 (40) 0 (10) 0 (10) ... Total rewards = 290
+ Summary: mean=121.1, std=129.333766, min=-330, max=470
+
+ policy_random
+ States (+rewards): 0 1 (-50) 2 1 (-50) 2 (40) 0 1 (-50) 2 2 (40) 0 ... Total rewards = -60
+ States (+rewards): 0 (10) 0 0 0 0 0 (10) 0 0 0 (10) 0 ... Total rewards = -30
+ States (+rewards): 0 1 1 (-50) 2 (40) 0 0 1 1 1 1 ... Total rewards = 10
+ States (+rewards): 0 (10) 0 (10) 0 0 0 0 1 (-50) 2 (40) 0 0 ... Total rewards = 0
+ States (+rewards): 0 0 (10) 0 1 (-50) 2 (40) 0 0 0 0 (10) 0 (10) ... Total rewards = 40
+ Summary: mean=-22.1, std=88.152740, min=-380, max=200
+
+ policy_safe
+ States (+rewards): 0 1 1 1 1 1 1 1 1 1 ... Total rewards = 0
+ States (+rewards): 0 1 1 1 1 1 1 1 1 1 ... Total rewards = 0
+ States (+rewards): 0 (10) 0 (10) 0 (10) 0 1 1 1 1 1 1 ... Total rewards = 30
+ States (+rewards): 0 (10) 0 1 1 1 1 1 1 1 1 ... Total rewards = 10
+ States (+rewards): 0 1 1 1 1 1 1 1 1 1 ... Total rewards = 0
+ Summary: mean=22.3, std=26.244312, min=0, max=170
+
+
+
+# Q-Learning
+
+Q-Learning will learn the optimal policy by watching the random policy play.
+
+
+```python
+n_states = 3
+n_actions = 3
+n_steps = 20000
+alpha = 0.01
+gamma = 0.99
+exploration_policy = policy_random
+q_values = np.full((n_states, n_actions), -np.inf)
+for state, actions in enumerate(possible_actions):
+ q_values[state][actions]=0
+
+env = MDPEnvironment()
+for step in range(n_steps):
+ action = exploration_policy(env.state)
+ state = env.state
+ next_state, reward = env.step(action)
+ next_value = np.max(q_values[next_state]) # greedy policy
+ q_values[state, action] = (1-alpha)*q_values[state, action] + alpha*(reward + gamma * next_value)
+```
+
+
+```python
+def optimal_policy(state):
+ return np.argmax(q_values[state])
+```
+
+
+```python
+q_values
+```
+
+
+
+
+ array([[ 39.13508139, 38.88079412, 35.23025716],
+ [ 18.9117071 , -inf, 20.54567816],
+ [ -inf, 72.53192111, -inf]])
+
+
+
+
+```python
+all_totals = []
+for episode in range(1000):
+ all_totals.append(run_episode(optimal_policy, n_steps=100, display=(episode<5)))
+print("Summary: mean={:.1f}, std={:1f}, min={}, max={}".format(np.mean(all_totals), np.std(all_totals), np.min(all_totals), np.max(all_totals)))
+print()
+```
+
+ States (+rewards): 0 (10) 0 (10) 0 1 (-50) 2 (40) 0 (10) 0 1 (-50) 2 (40) 0 (10) ... Total rewards = 230
+ States (+rewards): 0 (10) 0 (10) 0 (10) 0 1 (-50) 2 2 1 (-50) 2 (40) 0 (10) ... Total rewards = 90
+ States (+rewards): 0 1 (-50) 2 (40) 0 (10) 0 (10) 0 (10) 0 (10) 0 (10) 0 (10) 0 (10) ... Total rewards = 170
+ States (+rewards): 0 1 (-50) 2 (40) 0 (10) 0 (10) 0 (10) 0 (10) 0 (10) 0 (10) 0 (10) ... Total rewards = 220
+ States (+rewards): 0 1 (-50) 2 (40) 0 (10) 0 1 (-50) 2 (40) 0 (10) 0 (10) 0 (10) ... Total rewards = -50
+ Summary: mean=125.6, std=127.363464, min=-290, max=500
+
+
+
+# Learning to play MsPacman using Deep Q-Learning
+
+
+```python
+env = gym.make("MsPacman-v0")
+obs = env.reset()
+```
+
+ [2017-06-08 08:02:11,285] Making new env: MsPacman-v0
+
+
+
+```python
+obs.shape
+```
+
+
+
+
+ (210, 160, 3)
+
+
+
+
+```python
+env.action_space
+```
+
+
+
+
+ Discrete(9)
+
+
+
+## Preprocessing
+
+Preprocessing the images is optional but greatly speeds up training.
+
+
+```python
+mspacman_color = np.array([210, 164, 74]).mean()
+
+def preprocess_observation(obs):
+ img = obs[1:176:2, ::2] # crop and downsize
+ img = img.mean(axis=2) # to greyscale
+ img[img==mspacman_color] = 0 # Improve contrast
+ img = (img - 128) / 128 - 1 # normalize from -1. to 1.
+ return img.reshape(88, 80, 1)
+
+img = preprocess_observation(obs)
+```
+
+
+```python
+plt.figure(figsize=(11, 7))
+plt.subplot(121)
+plt.title("Original observation (160×210 RGB)")
+plt.imshow(obs)
+plt.axis("off")
+plt.subplot(122)
+plt.title("Preprocessed observation (88×80 greyscale)")
+plt.imshow(img.reshape(88, 80), interpolation="nearest", cmap="gray")
+plt.axis("off")
+save_fig("preprocessing_plot")
+plt.show()
+```
+
+
+
+
+
+
+
+
+
+ Saving figure preprocessing_plot
+
+
+## Build DQN
+
+Note: instead of using `tf.contrib.layers.convolution2d()` or `tf.contrib.layers.conv2d()` (as in the book), we now use the `tf.layers.conv2d()`, which did not exist when this chapter was written. This is preferable because anything in contrib may change or be deleted without notice, while `tf.layers` is part of the official API. As you will see, the code is mostly the same, except that the parameter names have changed slightly:
+* the `num_outputs` parameter was renamed to `filters`,
+* the `stride` parameter was renamed to `strides`,
+* the `_fn` suffix was removed from parameter names that had it (e.g., `activation_fn` was renamed to `activation`),
+* the `weights_initializer` parameter was renamed to `kernel_initializer`,
+* the weights variable was renamed to `"kernel"` (instead of `"weights"`), and the biases variable was renamed from `"biases"` to `"bias"`,
+* and the default `activation` is now `None` instead of `tf.nn.relu`.
+
+
+```python
+reset_graph()
+
+input_height = 88
+input_width = 80
+input_channels = 1
+conv_n_maps = [32, 64, 64]
+conv_kernel_sizes = [(8,8), (4,4), (3,3)]
+conv_strides = [4, 2, 1]
+conv_paddings = ["SAME"]*3
+conv_activation = [tf.nn.relu]*3
+n_hidden_inputs = 64 * 11 * 10 # conv3 has 64 maps of 11x10 each
+n_hidden = 512
+hidden_activation = tf.nn.relu
+n_outputs = env.action_space.n
+initializer = tf.contrib.layers.variance_scaling_initializer()
+
+learning_rate = 0.01
+
+def q_network(X_state, scope):
+ prev_layer = X_state
+ conv_layers = []
+ with tf.variable_scope(scope) as scope:
+ for n_maps, kernel_size, strides, padding, activation in zip(conv_n_maps, conv_kernel_sizes, conv_strides, conv_paddings, conv_activation):
+ prev_layer = tf.layers.conv2d(prev_layer, filters=n_maps, kernel_size=kernel_size, strides=strides, padding=padding, activation=activation, kernel_initializer=initializer)
+ conv_layers.append(prev_layer)
+ last_conv_layer_flat = tf.reshape(prev_layer, shape=[-1, n_hidden_inputs])
+ hidden = tf.layers.dense(last_conv_layer_flat, n_hidden, activation=hidden_activation, kernel_initializer=initializer)
+ outputs = tf.layers.dense(hidden, n_outputs)
+ trainable_vars = {var.name[len(scope.name):]: var for var in tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, scope=scope.name)}
+ return outputs, trainable_vars
+
+X_state = tf.placeholder(tf.float32, shape=[None, input_height, input_width, input_channels])
+actor_q_values, actor_vars = q_network(X_state, scope="q_networks/actor") # acts
+critic_q_values, critic_vars = q_network(X_state, scope="q_networks/critic") # learns
+
+copy_ops = [actor_var.assign(critic_vars[var_name])
+ for var_name, actor_var in actor_vars.items()]
+copy_critic_to_actor = tf.group(*copy_ops)
+
+with tf.variable_scope("train"):
+ X_action = tf.placeholder(tf.int32, shape=[None])
+ y = tf.placeholder(tf.float32, shape=[None, 1])
+ q_value = tf.reduce_sum(critic_q_values * tf.one_hot(X_action, n_outputs),
+ axis=1, keep_dims=True)
+ cost = tf.reduce_mean(tf.square(y - q_value))
+ global_step = tf.Variable(0, trainable=False, name='global_step')
+ optimizer = tf.train.AdamOptimizer(learning_rate)
+ training_op = optimizer.minimize(cost, global_step=global_step)
+
+init = tf.global_variables_initializer()
+saver = tf.train.Saver()
+```
+
+
+```python
+actor_vars
+```
+
+
+
+
+ {'/conv2d/bias:0': ,
+ '/conv2d/kernel:0': ,
+ '/conv2d_1/bias:0': ,
+ '/conv2d_1/kernel:0': ,
+ '/conv2d_2/bias:0': ,
+ '/conv2d_2/kernel:0': ,
+ '/dense/bias:0': ,
+ '/dense/kernel:0': ,
+ '/dense_1/bias:0': ,
+ '/dense_1/kernel:0': }
+
+
+
+
+```python
+from collections import deque
+
+replay_memory_size = 10000
+replay_memory = deque([], maxlen=replay_memory_size)
+
+def sample_memories(batch_size):
+ indices = rnd.permutation(len(replay_memory))[:batch_size]
+ cols = [[], [], [], [], []] # state, action, reward, next_state, continue
+ for idx in indices:
+ memory = replay_memory[idx]
+ for col, value in zip(cols, memory):
+ col.append(value)
+ cols = [np.array(col) for col in cols]
+ return cols[0], cols[1], cols[2].reshape(-1, 1), cols[3], cols[4].reshape(-1, 1)
+```
+
+
+```python
+eps_min = 0.05
+eps_max = 1.0
+eps_decay_steps = 50000
+import sys
+
+def epsilon_greedy(q_values, step):
+ epsilon = max(eps_min, eps_max - (eps_max-eps_min) * step/eps_decay_steps)
+ if rnd.rand() < epsilon:
+ return rnd.randint(n_outputs) # random action
+ else:
+ return np.argmax(q_values) # optimal action
+```
+
+
+```python
+n_steps = 100000 # total number of training steps
+training_start = 1000 # start training after 1,000 game iterations
+training_interval = 3 # run a training step every 3 game iterations
+save_steps = 50 # save the model every 50 training steps
+copy_steps = 25 # copy the critic to the actor every 25 training steps
+discount_rate = 0.95
+skip_start = 90 # Skip the start of every game (it's just waiting time).
+batch_size = 50
+iteration = 0 # game iterations
+checkpoint_path = "./my_dqn.ckpt"
+done = True # env needs to be reset
+
+with tf.Session() as sess:
+ if os.path.isfile(checkpoint_path):
+ saver.restore(sess, checkpoint_path)
+ else:
+ init.run()
+ while True:
+ step = global_step.eval()
+ if step >= n_steps:
+ break
+ iteration += 1
+ print("\rIteration {}\tTraining step {}/{} ({:.1f}%)".format(iteration, step, n_steps, step * 100 / n_steps), end="")
+ if done: # game over, start again
+ obs = env.reset()
+ for skip in range(skip_start): # skip boring game iterations at the start of each game
+ obs, reward, done, info = env.step(0)
+ state = preprocess_observation(obs)
+
+ # Actor evaluates what to do
+ q_values = actor_q_values.eval(feed_dict={X_state: [state]})
+ action = epsilon_greedy(q_values, step)
+
+ # Actor plays
+ obs, reward, done, info = env.step(action)
+ next_state = preprocess_observation(obs)
+
+ # Let's memorize what happened
+ replay_memory.append((state, action, reward, next_state, 1.0 - done))
+ state = next_state
+
+ if iteration < training_start or iteration % training_interval != 0:
+ continue
+
+ # Critic learns
+ X_state_val, X_action_val, rewards, X_next_state_val, continues = sample_memories(batch_size)
+ next_q_values = actor_q_values.eval(feed_dict={X_state: X_next_state_val})
+ y_val = rewards + continues * discount_rate * np.max(next_q_values, axis=1, keepdims=True)
+ training_op.run(feed_dict={X_state: X_state_val, X_action: X_action_val, y: y_val})
+
+ # Regularly copy critic to actor
+ if step % copy_steps == 0:
+ copy_critic_to_actor.run()
+
+ # And save regularly
+ if step % save_steps == 0:
+ saver.save(sess, checkpoint_path)
+```
+
+ Iteration 300999 Training step 99999/100000 (100.0%)
+
+# Exercise solutions
+
+Coming soon...
diff --git a/Hands_on_ML_TF/LICENSE b/Hands_on_ML_TF/LICENSE
new file mode 100644
index 000000000..4909afd04
--- /dev/null
+++ b/Hands_on_ML_TF/LICENSE
@@ -0,0 +1,178 @@
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+
diff --git a/Hands_on_ML_TF/README.md b/Hands_on_ML_TF/README.md
new file mode 100644
index 000000000..bbec5aa13
--- /dev/null
+++ b/Hands_on_ML_TF/README.md
@@ -0,0 +1,107 @@
+Machine Learning Notebooks
+==========================
+
+This project aims at teaching you the fundamentals of Machine Learning in
+python. It contains the example code and solutions to the exercises in my O'Reilly book [Hands-on Machine Learning with Scikit-Learn and TensorFlow](http://shop.oreilly.com/product/0636920052289.do):
+
+[](http://shop.oreilly.com/product/0636920052289.do)
+
+Simply open the [Jupyter](http://jupyter.org/) notebooks you are interested in:
+
+* Using [jupyter.org's notebook viewer](http://nbviewer.jupyter.org/github/ageron/handson-ml/blob/master/index.ipynb)
+ * note: [github.com's notebook viewer](https://github.com/ageron/handson-ml/blob/master/index.ipynb) also works but it is slower and the math formulas are not displayed correctly,
+* or by cloning this repository and running Jupyter locally. This option lets you play around with the code. In this case, follow the installation instructions below.
+
+# Installation
+
+First, you will need to install [git](https://git-scm.com/), if you don't have it already.
+
+Next, clone this repository by opening a terminal and typing the following commands:
+
+ $ cd $HOME # or any other development directory you prefer
+ $ git clone https://github.com/ageron/handson-ml.git
+ $ cd handson-ml
+
+If you want to go through chapter 16 on Reinforcement Learning, you will need to [install OpenAI gym](https://gym.openai.com/docs) and its dependencies for Atari simulations.
+
+If you are familiar with Python and you know how to install Python libraries, go ahead and install the libraries listed in `requirements.txt` and jump to the [Starting Jupyter](#starting-jupyter) section. If you need detailed instructions, please read on.
+
+## Python & Required Libraries
+Of course, you obviously need Python. Python 2 is already preinstalled on most systems nowadays, and sometimes even Python 3. You can check which version(s) you have by typing the following commands:
+
+ $ python --version # for Python 2
+ $ python3 --version # for Python 3
+
+Any Python 3 version should be fine, preferably ≥3.5. If you don't have Python 3, I recommend installing it (Python ≥2.6 should work, but it is deprecated so Python 3 is preferable). To do so, you have several options: on Windows or MacOSX, you can just download it from [python.org](https://www.python.org/downloads/). On MacOSX, you can alternatively use [MacPorts](https://www.macports.org/) or [Homebrew](https://brew.sh/). On Linux, unless you know what you are doing, you should use your system's packaging system. For example, on Debian or Ubuntu, type:
+
+ $ sudo apt-get update
+ $ sudo apt-get install python3
+
+Another option is to download and install [Anaconda](https://www.continuum.io/downloads). This is a package that includes both Python and many scientific libraries. You should prefer the Python 3 version.
+
+If you choose to use Anaconda, read the next section, or else jump to the [Using pip](#using-pip) section.
+
+## Using Anaconda
+When using Anaconda, you can optionally create an isolated Python environment dedicated to this project. This is recommended as it makes it possible to have a different environment for each project (e.g. one for this project), with potentially different libraries and library versions:
+
+ $ conda create -n mlbook python=3.5 anaconda
+ $ source activate mlbook
+
+This creates a fresh Python 3.5 environment called `mlbook` (you can change the name if you want to), and it activates it. This environment contains all the scientific libraries that come with Anaconda. This includes all the libraries we will need (NumPy, Matplotlib, Pandas, Jupyter and a few others), except for TensorFlow, so let's install it:
+
+ $ conda install -n mlbook -c conda-forge tensorflow=1.0.0
+
+This installs TensorFlow 1.0.0 in the `mlbook` environment (fetching it from the `conda-forge` repository). If you chose not to create an `mlbook` environment, then just remove the `-n mlbook` option.
+
+Next, you can optionally install Jupyter extensions. These are useful to have nice tables of contents in the notebooks, but they are not required.
+
+ $ conda install -n mlbook -c conda-forge jupyter_contrib_nbextensions
+
+You are all set! Next, jump to the [Starting Jupyter](#starting-jupyter) section.
+
+## Using pip
+If you are not using Anaconda, you need to install several scientific Python libraries that are necessary for this project, in particular NumPy, Matplotlib, Pandas, Jupyter and TensorFlow (and a few others). For this, you can either use Python's integrated packaging system, pip, or you may prefer to use your system's own packaging system (if available, e.g. on Linux, or on MacOSX when using MacPorts or Homebrew). The advantage of using pip is that it is easy to create multiple isolated Python environments with different libraries and different library versions (e.g. one environment for each project). The advantage of using your system's packaging system is that there is less risk of having conflicts between your Python libraries and your system's other packages. Since I have many projects with different library requirements, I prefer to use pip with isolated environments.
+
+These are the commands you need to type in a terminal if you want to use pip to install the required libraries. Note: in all the following commands, if you chose to use Python 2 rather than Python 3, you must replace `pip3` with `pip`, and `python3` with `python`.
+
+First you need to make sure you have the latest version of pip installed:
+
+ $ pip3 install --user --upgrade pip
+
+The `--user` option will install the latest version of pip only for the current user. If you prefer to install it system wide (i.e. for all users), you must have administrator rights (e.g. use `sudo pip3` instead of `pip3` on Linux), and you should remove the `--user` option. The same is true of the command below that uses the `--user` option.
+
+Next, you can optionally create an isolated environment. This is recommended as it makes it possible to have a different environment for each project (e.g. one for this project), with potentially very different libraries, and different versions:
+
+ $ pip3 install --user --upgrade virtualenv
+ $ virtualenv -p `which python3` env
+
+This creates a new directory called `env` in the current directory, containing an isolated Python environment based on Python 3. If you installed multiple versions of Python 3 on your system, you can replace `` `which python3` `` with the path to the Python executable you prefer to use.
+
+Now you must activate this environment. You will need to run this command every time you want to use this environment.
+
+ $ source ./env/bin/activate
+
+Next, use pip to install the required python packages. If you are not using virtualenv, you should add the `--user` option (alternatively you could install the libraries system-wide, but this will probably require administrator rights, e.g. using `sudo pip3` instead of `pip3` on Linux).
+
+ $ pip3 install --upgrade -r requirements.txt
+
+Great! You're all set, you just need to start Jupyter now.
+
+## Starting Jupyter
+If you want to use the Jupyter extensions (optional, they are mainly useful to have nice tables of contents), you first need to install them:
+
+ $ jupyter contrib nbextension install --user
+
+Then you can activate an extension, such as the Table of Contents (2) extension:
+
+ $ jupyter nbextension enable toc2/main
+
+Okay! You can now start Jupyter, simply type:
+
+ $ jupyter notebook
+
+This should open up your browser, and you should see Jupyter's tree view, with the contents of the current directory. If your browser does not open automatically, visit [localhost:8888](http://localhost:8888/tree). Click on `index.ipynb` to get started!
+
+Note: you can also visit [http://localhost:8888/nbextensions](http://localhost:8888/nbextensions) to activate and configure Jupyter extensions.
+
+Congrats! You are ready to learn Machine Learning, hands on!
diff --git a/Hands_on_ML_TF/requirements.txt b/Hands_on_ML_TF/requirements.txt
new file mode 100644
index 000000000..6946d3bdf
--- /dev/null
+++ b/Hands_on_ML_TF/requirements.txt
@@ -0,0 +1,44 @@
+# First make sure to update pip:
+# $ sudo pip install --upgrade pip
+#
+# Then you probably want to work in a virtualenv (optional):
+# $ sudo pip install --upgrade virtualenv
+# Or if you prefer you can install virtualenv using your favorite packaging system. E.g., in Ubuntu:
+# $ sudo apt-get update && sudo apt-get install virtualenv
+# Then:
+# $ cd $my_work_dir
+# $ virtualenv my_env
+# $ . my_env/bin/activate
+#
+# Next, optionally uncomment the OpenAI gym lines (see below). If you do, make sure to install the dependencies first.
+#
+# Then install these requirements:
+# $ pip install --upgrade -r requirements.txt
+#
+# Finally, start jupyter:
+# $ jupyter notebook
+#
+
+jupyter==1.0.0
+matplotlib==1.5.3
+numexpr==2.6.1
+numpy==1.12.0
+pandas==0.19.1
+Pillow==3.4.2
+protobuf==3.0.0
+psutil==5.0.0
+scikit-learn==0.18.1
+scipy==0.18.1
+sympy==1.0
+tensorflow==1.0.0
+
+# Optional: OpenAI gym is only needed for the Reinforcement Learning chapter.
+# There are a few dependencies you need to install first, check out:
+# https://github.com/openai/gym#installing-everything
+#gym[all]==0.5.4
+# If you only want to install the Atari dependency, uncomment this line instead:
+#gym[atari]==0.5.4
+
+# Optional: these are useful Jupyter extensions, in particular to display
+# the table of contents.
+https://github.com/ipython-contrib/jupyter_contrib_nbextensions/tarball/master
diff --git a/TensorFlow-Machine-Learning-Cookbook-master/.gitattributes b/TensorFlow-Machine-Learning-Cookbook-master/.gitattributes
new file mode 100644
index 000000000..bdb0cabc8
--- /dev/null
+++ b/TensorFlow-Machine-Learning-Cookbook-master/.gitattributes
@@ -0,0 +1,17 @@
+# Auto detect text files and perform LF normalization
+* text=auto
+
+# Custom for Visual Studio
+*.cs diff=csharp
+
+# Standard to msysgit
+*.doc diff=astextplain
+*.DOC diff=astextplain
+*.docx diff=astextplain
+*.DOCX diff=astextplain
+*.dot diff=astextplain
+*.DOT diff=astextplain
+*.pdf diff=astextplain
+*.PDF diff=astextplain
+*.rtf diff=astextplain
+*.RTF diff=astextplain
diff --git a/TensorFlow-Machine-Learning-Cookbook-master/.gitignore b/TensorFlow-Machine-Learning-Cookbook-master/.gitignore
new file mode 100644
index 000000000..cd2946ad7
--- /dev/null
+++ b/TensorFlow-Machine-Learning-Cookbook-master/.gitignore
@@ -0,0 +1,47 @@
+# Windows image file caches
+Thumbs.db
+ehthumbs.db
+
+# Folder config file
+Desktop.ini
+
+# Recycle Bin used on file shares
+$RECYCLE.BIN/
+
+# Windows Installer files
+*.cab
+*.msi
+*.msm
+*.msp
+
+# Windows shortcuts
+*.lnk
+
+# =========================
+# Operating System Files
+# =========================
+
+# OSX
+# =========================
+
+.DS_Store
+.AppleDouble
+.LSOverride
+
+# Thumbnails
+._*
+
+# Files that might appear in the root of a volume
+.DocumentRevisions-V100
+.fseventsd
+.Spotlight-V100
+.TemporaryItems
+.Trashes
+.VolumeIcon.icns
+
+# Directories potentially created on remote AFP share
+.AppleDB
+.AppleDesktop
+Network Trash Folder
+Temporary Items
+.apdisk
diff --git a/TensorFlow-Machine-Learning-Cookbook-master/Chapter 01/activation_functions.py b/TensorFlow-Machine-Learning-Cookbook-master/Chapter 01/activation_functions.py
new file mode 100644
index 000000000..ea6faca7e
--- /dev/null
+++ b/TensorFlow-Machine-Learning-Cookbook-master/Chapter 01/activation_functions.py
@@ -0,0 +1,63 @@
+# Activation Functions
+#----------------------------------
+#
+# This function introduces activation
+# functions in Tensorflow
+
+# Implementing Activation Functions
+import matplotlib.pyplot as plt
+import numpy as np
+import tensorflow as tf
+import tensorflow.nn as nn
+from tensorflow.python.framework import ops
+ops.reset_default_graph()
+
+# Open graph session
+sess = tf.Session()
+
+# X range
+x_vals = np.linspace(start=-10., stop=10., num=100)
+
+# ReLU activation
+print(sess.run(tf.nn.relu([-3., 3., 10.])))
+y_relu = sess.run(tf.nn.relu(x_vals))
+
+# ReLU-6 activation
+print(sess.run(tf.nn.relu6([-3., 3., 10.])))
+y_relu6 = sess.run(tf.nn.relu6(x_vals))
+
+# Sigmoid activation
+print(sess.run(tf.nn.sigmoid([-1., 0., 1.])))
+y_sigmoid = sess.run(tf.nn.sigmoid(x_vals))
+
+# Hyper Tangent activation
+print(sess.run(tf.nn.tanh([-1., 0., 1.])))
+y_tanh = sess.run(tf.nn.tanh(x_vals))
+
+# Softsign activation
+print(sess.run(tf.nn.softsign([-1., 0., 1.])))
+y_softsign = sess.run(tf.nn.softsign(x_vals))
+
+# Softplus activation
+print(sess.run(tf.nn.softplus([-1., 0., 1.])))
+y_softplus = sess.run(tf.nn.softplus(x_vals))
+
+# Exponential linear activation
+print(sess.run(tf.nn.elu([-1., 0., 1.])))
+y_elu = sess.run(tf.nn.elu(x_vals))
+
+# Plot the different functions
+plt.plot(x_vals, y_softplus, 'r--', label='Softplus', linewidth=2)
+plt.plot(x_vals, y_relu, 'b:', label='ReLU', linewidth=2)
+plt.plot(x_vals, y_relu6, 'g-.', label='ReLU6', linewidth=2)
+plt.plot(x_vals, y_elu, 'k-', label='ExpLU', linewidth=0.5)
+plt.ylim([-1.5,7])
+plt.legend(loc='top left')
+plt.show()
+
+plt.plot(x_vals, y_sigmoid, 'r--', label='Sigmoid', linewidth=2)
+plt.plot(x_vals, y_tanh, 'b:', label='Tanh', linewidth=2)
+plt.plot(x_vals, y_softsign, 'g-.', label='Softsign', linewidth=2)
+plt.ylim([-2,2])
+plt.legend(loc='top left')
+plt.show()
diff --git a/TensorFlow-Machine-Learning-Cookbook-master/Chapter 01/data_gathering.py b/TensorFlow-Machine-Learning-Cookbook-master/Chapter 01/data_gathering.py
new file mode 100644
index 000000000..c5a7bb879
--- /dev/null
+++ b/TensorFlow-Machine-Learning-Cookbook-master/Chapter 01/data_gathering.py
@@ -0,0 +1,143 @@
+# Data gathering
+#----------------------------------
+#
+# This function gives us the ways to access
+# the various data sets we will need
+
+# Data Gathering
+import matplotlib.pyplot as plt
+import numpy as np
+import tensorflow as tf
+from tensorflow.python.framework import ops
+ops.reset_default_graph()
+
+
+# Iris Data
+from sklearn import datasets
+
+iris = datasets.load_iris()
+print(len(iris.data))
+print(len(iris.target))
+print(iris.data[0])
+print(set(iris.target))
+
+# Low Birthrate Data
+import requests
+
+birthdata_url = 'https://www.umass.edu/statdata/statdata/data/lowbwt.dat'
+birth_file = requests.get(birthdata_url)
+birth_data = birth_file.text.split('\r\n')[5:]
+birth_header = [x for x in birth_data[0].split(' ') if len(x)>=1]
+birth_data = [[float(x) for x in y.split(' ') if len(x)>=1] for y in birth_data[1:] if len(y)>=1]
+print(len(birth_data))
+print(len(birth_data[0]))
+
+
+# Housing Price Data
+import requests
+
+housing_url = 'https://archive.ics.uci.edu/ml/machine-learning-databases/housing/housing.data'
+housing_header = ['CRIM', 'ZN', 'INDUS', 'CHAS', 'NOX', 'RM', 'AGE', 'DIS', 'RAD', 'TAX', 'PTRATIO', 'B', 'LSTAT', 'MEDV']
+housing_file = requests.get(housing_url)
+housing_data = [[float(x) for x in y.split(' ') if len(x)>=1] for y in housing_file.text.split('\n') if len(y)>=1]
+print(len(housing_data))
+print(len(housing_data[0]))
+
+
+# MNIST Handwriting Data
+from tensorflow.examples.tutorials.mnist import input_data
+
+mnist = input_data.read_data_sets("MNIST_data/", one_hot=True)
+print(len(mnist.train.images))
+print(len(mnist.test.images))
+print(len(mnist.validation.images))
+print(mnist.train.labels[1,:])
+
+
+# Ham/Spam Text Data
+import requests
+import io
+from zipfile import ZipFile
+
+# Get/read zip file
+zip_url = 'http://archive.ics.uci.edu/ml/machine-learning-databases/00228/smsspamcollection.zip'
+r = requests.get(zip_url)
+z = ZipFile(io.BytesIO(r.content))
+file = z.read('SMSSpamCollection')
+# Format Data
+text_data = file.decode()
+text_data = text_data.encode('ascii',errors='ignore')
+text_data = text_data.decode().split('\n')
+text_data = [x.split('\t') for x in text_data if len(x)>=1]
+[text_data_target, text_data_train] = [list(x) for x in zip(*text_data)]
+print(len(text_data_train))
+print(set(text_data_target))
+print(text_data_train[1])
+
+
+# Movie Review Data
+import requests
+import io
+import tarfile
+
+movie_data_url = 'http://www.cs.cornell.edu/people/pabo/movie-review-data/rt-polaritydata.tar.gz'
+r = requests.get(movie_data_url)
+# Stream data into temp object
+stream_data = io.BytesIO(r.content)
+tmp = io.BytesIO()
+while True:
+ s = stream_data.read(16384)
+ if not s:
+ break
+ tmp.write(s)
+stream_data.close()
+tmp.seek(0)
+# Extract tar file
+tar_file = tarfile.open(fileobj=tmp, mode="r:gz")
+pos = tar_file.extractfile('rt-polaritydata/rt-polarity.pos')
+neg = tar_file.extractfile('rt-polaritydata/rt-polarity.neg')
+# Save pos/neg reviews
+pos_data = []
+for line in pos:
+ pos_data.append(line.decode('ISO-8859-1').encode('ascii',errors='ignore').decode())
+neg_data = []
+for line in neg:
+ neg_data.append(line.decode('ISO-8859-1').encode('ascii',errors='ignore').decode())
+tar_file.close()
+
+print(len(pos_data))
+print(len(neg_data))
+print(neg_data[0])
+
+
+# The Works of Shakespeare Data
+import requests
+
+shakespeare_url = 'http://www.gutenberg.org/cache/epub/100/pg100.txt'
+# Get Shakespeare text
+response = requests.get(shakespeare_url)
+shakespeare_file = response.content
+# Decode binary into string
+shakespeare_text = shakespeare_file.decode('utf-8')
+# Drop first few descriptive paragraphs.
+shakespeare_text = shakespeare_text[7675:]
+print(len(shakespeare_text))
+
+
+# English-German Sentence Translation Data
+import requests
+import io
+from zipfile import ZipFile
+sentence_url = 'http://www.manythings.org/anki/deu-eng.zip'
+r = requests.get(sentence_url)
+z = ZipFile(io.BytesIO(r.content))
+file = z.read('deu.txt')
+# Format Data
+eng_ger_data = file.decode()
+eng_ger_data = eng_ger_data.encode('ascii',errors='ignore')
+eng_ger_data = eng_ger_data.decode().split('\n')
+eng_ger_data = [x.split('\t') for x in eng_ger_data if len(x)>=1]
+[english_sentence, german_sentence] = [list(x) for x in zip(*eng_ger_data)]
+print(len(english_sentence))
+print(len(german_sentence))
+print(eng_ger_data[10])
diff --git a/TensorFlow-Machine-Learning-Cookbook-master/Chapter 01/matrices.py b/TensorFlow-Machine-Learning-Cookbook-master/Chapter 01/matrices.py
new file mode 100644
index 000000000..29d09ea27
--- /dev/null
+++ b/TensorFlow-Machine-Learning-Cookbook-master/Chapter 01/matrices.py
@@ -0,0 +1,58 @@
+# Matrices and Matrix Operations
+#----------------------------------
+#
+# This function introduces various ways to create
+# matrices and how to use them in Tensorflow
+
+import numpy as np
+import tensorflow as tf
+from tensorflow.python.framework import ops
+ops.reset_default_graph()
+
+# Declaring matrices
+sess = tf.Session()
+
+# Declaring matrices
+
+# Identity matrix
+identity_matrix = tf.diag([1.0,1.0,1.0])
+print(sess.run(identity_matrix))
+
+# 2x3 random norm matrix
+A = tf.truncated_normal([2,3])
+print(sess.run(A))
+
+# 2x3 constant matrix
+B = tf.fill([2,3], 5.0)
+print(sess.run(B))
+
+# 3x2 random uniform matrix
+C = tf.random_uniform([3,2])
+print(sess.run(C))
+print(sess.run(C)) # Note that we are reinitializing, hence the new random variabels
+
+# Create matrix from np array
+D = tf.convert_to_tensor(np.array([[1., 2., 3.], [-3., -7., -1.], [0., 5., -2.]]))
+print(sess.run(D))
+
+# Matrix addition/subtraction
+print(sess.run(A+B))
+print(sess.run(B-B))
+
+# Matrix Multiplication
+print(sess.run(tf.matmul(B, identity_matrix)))
+
+# Matrix Transpose
+print(sess.run(tf.transpose(C))) # Again, new random variables
+
+# Matrix Determinant
+print(sess.run(tf.matrix_determinant(D)))
+
+# Matrix Inverse
+print(sess.run(tf.matrix_inverse(D)))
+
+# Cholesky Decomposition
+print(sess.run(tf.cholesky(identity_matrix)))
+
+# Eigenvalues and Eigenvectors
+print(sess.run(tf.self_adjoint_eig(D)))
\ No newline at end of file
diff --git a/TensorFlow-Machine-Learning-Cookbook-master/Chapter 01/operations.py b/TensorFlow-Machine-Learning-Cookbook-master/Chapter 01/operations.py
new file mode 100644
index 000000000..bc973c2b0
--- /dev/null
+++ b/TensorFlow-Machine-Learning-Cookbook-master/Chapter 01/operations.py
@@ -0,0 +1,49 @@
+# Operations
+#----------------------------------
+#
+# This function introduces various operations
+# in Tensorflow
+
+# Declaring Operations
+import matplotlib.pyplot as plt
+import numpy as np
+import tensorflow as tf
+from tensorflow.python.framework import ops
+ops.reset_default_graph()
+
+# Open graph session
+sess = tf.Session()
+
+# div() vs truediv() vs floordiv()
+print(sess.run(tf.div(3,4)))
+print(sess.run(tf.truediv(3,4)))
+print(sess.run(tf.floordiv(3.0,4.0)))
+
+# Mod function
+print(sess.run(tf.mod(22.0,5.0)))
+
+# Cross Product
+print(sess.run(tf.cross([1.,0.,0.],[0.,1.,0.])))
+
+# Trig functions
+print(sess.run(tf.sin(3.1416)))
+print(sess.run(tf.cos(3.1416)))
+# Tangemt
+print(sess.run(tf.div(tf.sin(3.1416/4.), tf.cos(3.1416/4.))))
+
+# Custom operation
+test_nums = range(15)
+#from tensorflow.python.ops import math_ops
+#print(sess.run(tf.equal(test_num, 3)))
+def custom_polynomial(x_val):
+ # Return 3x^2 - x + 10
+ return(tf.sub(3 * tf.square(x_val), x_val) + 10)
+
+print(sess.run(custom_polynomial(11)))
+# What should we get with list comprehension
+expected_output = [3*x*x-x+10 for x in test_nums]
+print(expected_output)
+
+# Tensorflow custom function output
+for num in test_nums:
+ print(sess.run(custom_polynomial(num)))
\ No newline at end of file
diff --git a/TensorFlow-Machine-Learning-Cookbook-master/Chapter 01/placeholders.py b/TensorFlow-Machine-Learning-Cookbook-master/Chapter 01/placeholders.py
new file mode 100644
index 000000000..eb51882cc
--- /dev/null
+++ b/TensorFlow-Machine-Learning-Cookbook-master/Chapter 01/placeholders.py
@@ -0,0 +1,23 @@
+# Placeholders
+#----------------------------------
+#
+# This function introduces how to
+# use placeholders in Tensorflow
+
+import numpy as np
+import tensorflow as tf
+from tensorflow.python.framework import ops
+ops.reset_default_graph()
+
+# Using Placeholders
+sess = tf.Session()
+
+x = tf.placeholder(tf.float32, shape=(4, 4))
+y = tf.identity(x)
+
+rand_array = np.random.rand(4, 4)
+
+merged = tf.merge_all_summaries()
+writer = tf.train.SummaryWriter("/tmp/variable_logs", sess.graph_def)
+
+print(sess.run(y, feed_dict={x: rand_array}))
\ No newline at end of file
diff --git a/TensorFlow-Machine-Learning-Cookbook-master/Chapter 01/tensors.py b/TensorFlow-Machine-Learning-Cookbook-master/Chapter 01/tensors.py
new file mode 100644
index 000000000..a10ffe79b
--- /dev/null
+++ b/TensorFlow-Machine-Learning-Cookbook-master/Chapter 01/tensors.py
@@ -0,0 +1,64 @@
+# Tensors
+
+import tensorflow as tf
+from tensorflow.python.framework import ops
+ops.reset_default_graph()
+
+# Introduce tensors in tf
+
+# Get graph handle
+sess = tf.Session()
+
+my_tensor = tf.zeros([1,20])
+
+# Declare a variable
+my_var = tf.Variable(tf.zeros([1,20]))
+
+# Different kinds of variables
+row_dim = 2
+col_dim = 3
+
+# Zero initialized variable
+zero_var = tf.Variable(tf.zeros([row_dim, col_dim]))
+
+# One initialized variable
+ones_var = tf.Variable(tf.ones([row_dim, col_dim]))
+
+# shaped like other variable
+sess.run(zero_var.initializer)
+sess.run(ones_var.initializer)
+zero_similar = tf.Variable(tf.zeros_like(zero_var))
+ones_similar = tf.Variable(tf.ones_like(ones_var))
+
+sess.run(ones_similar.initializer)
+sess.run(zero_similar.initializer)
+
+# Fill shape with a constant
+fill_var = tf.Variable(tf.fill([row_dim, col_dim], -1))
+
+# Create a variable from a constant
+const_var = tf.Variable(tf.constant([8, 6, 7, 5, 3, 0, 9]))
+# This can also be used to fill an array:
+const_fill_var = tf.Variable(tf.constant(-1, shape=[row_dim, col_dim]))
+
+# Sequence generation
+linear_var = tf.Variable(tf.linspace(start=0.0, stop=1.0, num=3)) # Generates [0.0, 0.5, 1.0] includes the end
+
+sequence_var = tf.Variable(tf.range(start=6, limit=15, delta=3)) # Generates [6, 9, 12] doesn't include the end
+
+# Random Numbers
+
+# Random Normal
+#rnorm_var = tf.random_normal([row_dim, col_dim], mean=0.0, stddev=1.0)
+
+# Initialize operation
+initialize_op = tf.initialize_all_variables()
+
+# Add summaries to tensorboard
+#merged = tf.merge_all_summaries()
+
+# Initialize graph writer:
+#writer = tf.train.SummaryWriter("/tmp/variable_logs", sess.graph_def)
+
+# Run initialization of variable
+sess.run(initialize_op)
\ No newline at end of file
diff --git a/TensorFlow-Machine-Learning-Cookbook-master/Chapter 02/back_propagation.py b/TensorFlow-Machine-Learning-Cookbook-master/Chapter 02/back_propagation.py
new file mode 100644
index 000000000..6f757e5fe
--- /dev/null
+++ b/TensorFlow-Machine-Learning-Cookbook-master/Chapter 02/back_propagation.py
@@ -0,0 +1,120 @@
+# Back Propagation
+#----------------------------------
+#
+# This python function shows how to implement back propagation
+# in regression and classification models.
+
+import matplotlib.pyplot as plt
+import numpy as np
+import tensorflow as tf
+from tensorflow.python.framework import ops
+ops.reset_default_graph()
+
+# Create graph
+sess = tf.Session()
+
+# Regression Example:
+# We will create sample data as follows:
+# x-data: 100 random samples from a normal ~ N(1, 0.1)
+# target: 100 values of the value 10.
+# We will fit the model:
+# x-data * A = target
+# Theoretically, A = 10.
+
+# Create data
+x_vals = np.random.normal(1, 0.1, 100)
+y_vals = np.repeat(10., 100)
+x_data = tf.placeholder(shape=[1], dtype=tf.float32)
+y_target = tf.placeholder(shape=[1], dtype=tf.float32)
+
+# Create variable (one model parameter = A)
+A = tf.Variable(tf.random_normal(shape=[1]))
+
+# Add operation to graph
+my_output = tf.mul(x_data, A)
+
+# Add L2 loss operation to graph
+loss = tf.square(my_output - y_target)
+
+# Initialize variables
+init = tf.initialize_all_variables()
+sess.run(init)
+
+# Create Optimizer
+my_opt = tf.train.GradientDescentOptimizer(0.02)
+train_step = my_opt.minimize(loss)
+
+# Run Loop
+for i in range(100):
+ rand_index = np.random.choice(100)
+ rand_x = [x_vals[rand_index]]
+ rand_y = [y_vals[rand_index]]
+ sess.run(train_step, feed_dict={x_data: rand_x, y_target: rand_y})
+ if (i+1)%25==0:
+ print('Step #' + str(i+1) + ' A = ' + str(sess.run(A)))
+ print('Loss = ' + str(sess.run(loss, feed_dict={x_data: rand_x, y_target: rand_y})))
+
+# Classification Example
+# We will create sample data as follows:
+# x-data: sample 50 random values from a normal = N(-1, 1)
+# + sample 50 random values from a normal = N(1, 1)
+# target: 50 values of 0 + 50 values of 1.
+# These are essentially 100 values of the corresponding output index
+# We will fit the binary classification model:
+# If sigmoid(x+A) < 0.5 -> 0 else 1
+# Theoretically, A should be -(mean1 + mean2)/2
+
+ops.reset_default_graph()
+
+# Create graph
+sess = tf.Session()
+
+# Create data
+x_vals = np.concatenate((np.random.normal(-1, 1, 50), np.random.normal(3, 1, 50)))
+y_vals = np.concatenate((np.repeat(0., 50), np.repeat(1., 50)))
+x_data = tf.placeholder(shape=[1], dtype=tf.float32)
+y_target = tf.placeholder(shape=[1], dtype=tf.float32)
+
+# Create variable (one model parameter = A)
+A = tf.Variable(tf.random_normal(mean=10, shape=[1]))
+
+# Add operation to graph
+# Want to create the operstion sigmoid(x + A)
+# Note, the sigmoid() part is in the loss function
+my_output = tf.add(x_data, A)
+
+# Now we have to add another dimension to each (batch size of 1)
+my_output_expanded = tf.expand_dims(my_output, 0)
+y_target_expanded = tf.expand_dims(y_target, 0)
+
+# Initialize variables
+init = tf.initialize_all_variables()
+sess.run(init)
+
+# Add classification loss (cross entropy)
+xentropy = tf.nn.sigmoid_cross_entropy_with_logits(my_output_expanded, y_target_expanded)
+
+# Create Optimizer
+my_opt = tf.train.GradientDescentOptimizer(0.05)
+train_step = my_opt.minimize(xentropy)
+
+# Run loop
+for i in range(1400):
+ rand_index = np.random.choice(100)
+ rand_x = [x_vals[rand_index]]
+ rand_y = [y_vals[rand_index]]
+
+ sess.run(train_step, feed_dict={x_data: rand_x, y_target: rand_y})
+ if (i+1)%200==0:
+ print('Step #' + str(i+1) + ' A = ' + str(sess.run(A)))
+ print('Loss = ' + str(sess.run(xentropy, feed_dict={x_data: rand_x, y_target: rand_y})))
+
+# Evaluate Predictions
+predictions = []
+for i in range(len(x_vals)):
+ x_val = [x_vals[i]]
+ prediction = sess.run(tf.round(tf.sigmoid(my_output)), feed_dict={x_data: x_val})
+ predictions.append(prediction[0])
+
+accuracy = sum(x==y for x,y in zip(predictions, y_vals))/100.
+print('Ending Accuracy = ' + str(np.round(accuracy, 2)))
\ No newline at end of file
diff --git a/TensorFlow-Machine-Learning-Cookbook-master/Chapter 02/batch_stochastic_training.py b/TensorFlow-Machine-Learning-Cookbook-master/Chapter 02/batch_stochastic_training.py
new file mode 100644
index 000000000..1dc4fe405
--- /dev/null
+++ b/TensorFlow-Machine-Learning-Cookbook-master/Chapter 02/batch_stochastic_training.py
@@ -0,0 +1,104 @@
+# Batch and Stochastic Training
+#----------------------------------
+#
+# This python function illustrates two different training methods:
+# batch and stochastic training. For each model, we will use
+# a regression model that predicts one model variable.
+
+import matplotlib.pyplot as plt
+import numpy as np
+import tensorflow as tf
+from tensorflow.python.framework import ops
+ops.reset_default_graph()
+
+# We will implement a regression example in stochastic and batch training
+
+# Stochastic Training:
+# Create graph
+sess = tf.Session()
+
+# Create data
+x_vals = np.random.normal(1, 0.1, 100)
+y_vals = np.repeat(10., 100)
+x_data = tf.placeholder(shape=[1], dtype=tf.float32)
+y_target = tf.placeholder(shape=[1], dtype=tf.float32)
+
+# Create variable (one model parameter = A)
+A = tf.Variable(tf.random_normal(shape=[1]))
+
+# Add operation to graph
+my_output = tf.mul(x_data, A)
+
+# Add L2 loss operation to graph
+loss = tf.square(my_output - y_target)
+
+# Initialize variables
+init = tf.initialize_all_variables()
+sess.run(init)
+
+# Create Optimizer
+my_opt = tf.train.GradientDescentOptimizer(0.02)
+train_step = my_opt.minimize(loss)
+
+loss_stochastic = []
+# Run Loop
+for i in range(100):
+ rand_index = np.random.choice(100)
+ rand_x = [x_vals[rand_index]]
+ rand_y = [y_vals[rand_index]]
+ sess.run(train_step, feed_dict={x_data: rand_x, y_target: rand_y})
+ if (i+1)%5==0:
+ print('Step #' + str(i+1) + ' A = ' + str(sess.run(A)))
+ temp_loss = sess.run(loss, feed_dict={x_data: rand_x, y_target: rand_y})
+ print('Loss = ' + str(temp_loss))
+ loss_stochastic.append(temp_loss)
+
+
+# Batch Training:
+# Re-initialize graph
+ops.reset_default_graph()
+sess = tf.Session()
+
+# Declare batch size
+batch_size = 20
+
+# Create data
+x_vals = np.random.normal(1, 0.1, 100)
+y_vals = np.repeat(10., 100)
+x_data = tf.placeholder(shape=[None, 1], dtype=tf.float32)
+y_target = tf.placeholder(shape=[None, 1], dtype=tf.float32)
+
+# Create variable (one model parameter = A)
+A = tf.Variable(tf.random_normal(shape=[1,1]))
+
+# Add operation to graph
+my_output = tf.matmul(x_data, A)
+
+# Add L2 loss operation to graph
+loss = tf.reduce_mean(tf.square(my_output - y_target))
+
+# Initialize variables
+init = tf.initialize_all_variables()
+sess.run(init)
+
+# Create Optimizer
+my_opt = tf.train.GradientDescentOptimizer(0.02)
+train_step = my_opt.minimize(loss)
+
+loss_batch = []
+# Run Loop
+for i in range(100):
+ rand_index = np.random.choice(100, size=batch_size)
+ rand_x = np.transpose([x_vals[rand_index]])
+ rand_y = np.transpose([y_vals[rand_index]])
+ sess.run(train_step, feed_dict={x_data: rand_x, y_target: rand_y})
+ if (i+1)%5==0:
+ print('Step #' + str(i+1) + ' A = ' + str(sess.run(A)))
+ temp_loss = sess.run(loss, feed_dict={x_data: rand_x, y_target: rand_y})
+ print('Loss = ' + str(temp_loss))
+ loss_batch.append(temp_loss)
+
+plt.plot(range(0, 100, 5), loss_stochastic, 'b-', label='Stochastic Loss')
+plt.plot(range(0, 100, 5), loss_batch, 'r--', label='Batch Loss, size=20')
+plt.legend(loc='upper right', prop={'size': 11})
+plt.show()
\ No newline at end of file
diff --git a/TensorFlow-Machine-Learning-Cookbook-master/Chapter 02/combining_everything_together.py b/TensorFlow-Machine-Learning-Cookbook-master/Chapter 02/combining_everything_together.py
new file mode 100644
index 000000000..dc923cd96
--- /dev/null
+++ b/TensorFlow-Machine-Learning-Cookbook-master/Chapter 02/combining_everything_together.py
@@ -0,0 +1,100 @@
+# Combining Everything Together
+#----------------------------------
+# This file will perform binary classification on the
+# class if iris dataset. We will only predict if a flower is
+# I.setosa or not.
+#
+# We will create a simple binary classifier by creating a line
+# and running everything through a sigmoid to get a binary predictor.
+# The two features we will use are pedal length and pedal width.
+#
+# We will use batch training, but this can be easily
+# adapted to stochastic training.
+
+import matplotlib.pyplot as plt
+import numpy as np
+from sklearn import datasets
+import tensorflow as tf
+from tensorflow.python.framework import ops
+ops.reset_default_graph()
+
+# Load the iris data
+# iris.target = {0, 1, 2}, where '0' is setosa
+# iris.data ~ [sepal.width, sepal.length, pedal.width, pedal.length]
+iris = datasets.load_iris()
+binary_target = np.array([1. if x==0 else 0. for x in iris.target])
+iris_2d = np.array([[x[2], x[3]] for x in iris.data])
+
+# Declare batch size
+batch_size = 20
+
+# Create graph
+sess = tf.Session()
+
+# Declare placeholders
+x1_data = tf.placeholder(shape=[None, 1], dtype=tf.float32)
+x2_data = tf.placeholder(shape=[None, 1], dtype=tf.float32)
+y_target = tf.placeholder(shape=[None, 1], dtype=tf.float32)
+
+# Create variables A and b (0 = x1 - A*x2 + b)
+A = tf.Variable(tf.random_normal(shape=[1, 1]))
+b = tf.Variable(tf.random_normal(shape=[1, 1]))
+
+# Add model to graph:
+# x1 - A*x2 + b
+my_mult = tf.matmul(x2_data, A)
+my_add = tf.add(my_mult, b)
+my_output = tf.sub(x1_data, my_add)
+#my_output = tf.sub(x_data[0], tf.add(tf.matmul(x_data[1], A), b))
+
+# Add classification loss (cross entropy)
+xentropy = tf.nn.sigmoid_cross_entropy_with_logits(my_output, y_target)
+
+# Create Optimizer
+my_opt = tf.train.GradientDescentOptimizer(0.05)
+train_step = my_opt.minimize(xentropy)
+
+# Initialize variables
+init = tf.initialize_all_variables()
+sess.run(init)
+
+# Run Loop
+for i in range(1000):
+ rand_index = np.random.choice(len(iris_2d), size=batch_size)
+ #rand_x = np.transpose([iris_2d[rand_index]])
+ rand_x = iris_2d[rand_index]
+ rand_x1 = np.array([[x[0]] for x in rand_x])
+ rand_x2 = np.array([[x[1]] for x in rand_x])
+ #rand_y = np.transpose([binary_target[rand_index]])
+ rand_y = np.array([[y] for y in binary_target[rand_index]])
+ sess.run(train_step, feed_dict={x1_data: rand_x1, x2_data: rand_x2, y_target: rand_y})
+ if (i+1)%200==0:
+ print('Step #' + str(i+1) + ' A = ' + str(sess.run(A)) + ', b = ' + str(sess.run(b)))
+
+
+# Visualize Results
+# Pull out slope/intercept
+[[slope]] = sess.run(A)
+[[intercept]] = sess.run(b)
+
+# Create fitted line
+x = np.linspace(0, 3, num=50)
+ablineValues = []
+for i in x:
+ ablineValues.append(slope*i+intercept)
+
+# Plot the fitted line over the data
+setosa_x = [a[1] for i,a in enumerate(iris_2d) if binary_target[i]==1]
+setosa_y = [a[0] for i,a in enumerate(iris_2d) if binary_target[i]==1]
+non_setosa_x = [a[1] for i,a in enumerate(iris_2d) if binary_target[i]==0]
+non_setosa_y = [a[0] for i,a in enumerate(iris_2d) if binary_target[i]==0]
+plt.plot(setosa_x, setosa_y, 'rx', ms=10, mew=2, label='setosa')
+plt.plot(non_setosa_x, non_setosa_y, 'ro', label='Non-setosa')
+plt.plot(x, ablineValues, 'b-')
+plt.xlim([0.0, 2.7])
+plt.ylim([0.0, 7.1])
+plt.suptitle('Linear Separator For I.setosa', fontsize=20)
+plt.xlabel('Petal Length')
+plt.ylabel('Petal Width')
+plt.legend(loc='lower right')
+plt.show()
\ No newline at end of file
diff --git a/TensorFlow-Machine-Learning-Cookbook-master/Chapter 02/evaluating_models.py b/TensorFlow-Machine-Learning-Cookbook-master/Chapter 02/evaluating_models.py
new file mode 100644
index 000000000..43c103ac7
--- /dev/null
+++ b/TensorFlow-Machine-Learning-Cookbook-master/Chapter 02/evaluating_models.py
@@ -0,0 +1,158 @@
+# Evaluating models in Tensorflow
+#
+# This code will implement two models. The first
+# is a simple regression model, we will show how to
+# call the loss function, MSE during training, and
+# output it after for test and training sets.
+#
+# The second model will be a simple classification
+# model. We will also show how to print percent
+# classified correctly during training and after
+# for both the test and training sets.
+
+import matplotlib.pyplot as plt
+import numpy as np
+import tensorflow as tf
+from tensorflow.python.framework import ops
+ops.reset_default_graph()
+
+# Create graph
+sess = tf.Session()
+
+# Regression Example:
+# We will create sample data as follows:
+# x-data: 100 random samples from a normal ~ N(1, 0.1)
+# target: 100 values of the value 10.
+# We will fit the model:
+# x-data * A = target
+# Theoretically, A = 10.
+
+# Declare batch size
+batch_size = 25
+
+# Create data
+x_vals = np.random.normal(1, 0.1, 100)
+y_vals = np.repeat(10., 100)
+x_data = tf.placeholder(shape=[None, 1], dtype=tf.float32)
+y_target = tf.placeholder(shape=[None, 1], dtype=tf.float32)
+
+# Split data into train/test = 80%/20%
+train_indices = np.random.choice(len(x_vals), round(len(x_vals)*0.8), replace=False)
+test_indices = np.array(list(set(range(len(x_vals))) - set(train_indices)))
+x_vals_train = x_vals[train_indices]
+x_vals_test = x_vals[test_indices]
+y_vals_train = y_vals[train_indices]
+y_vals_test = y_vals[test_indices]
+
+# Create variable (one model parameter = A)
+A = tf.Variable(tf.random_normal(shape=[1,1]))
+
+# Add operation to graph
+my_output = tf.matmul(x_data, A)
+
+# Add L2 loss operation to graph
+loss = tf.reduce_mean(tf.square(my_output - y_target))
+
+# Initialize variables
+init = tf.initialize_all_variables()
+sess.run(init)
+
+# Create Optimizer
+my_opt = tf.train.GradientDescentOptimizer(0.02)
+train_step = my_opt.minimize(loss)
+
+# Run Loop
+for i in range(100):
+ rand_index = np.random.choice(len(x_vals_train), size=batch_size)
+ rand_x = np.transpose([x_vals_train[rand_index]])
+ rand_y = np.transpose([y_vals_train[rand_index]])
+ sess.run(train_step, feed_dict={x_data: rand_x, y_target: rand_y})
+ if (i+1)%25==0:
+ print('Step #' + str(i+1) + ' A = ' + str(sess.run(A)))
+ print('Loss = ' + str(sess.run(loss, feed_dict={x_data: rand_x, y_target: rand_y})))
+
+# Evaluate accuracy (loss) on test set
+mse_test = sess.run(loss, feed_dict={x_data: np.transpose([x_vals_test]), y_target: np.transpose([y_vals_test])})
+mse_train = sess.run(loss, feed_dict={x_data: np.transpose([x_vals_train]), y_target: np.transpose([y_vals_train])})
+print('MSE on test:' + str(np.round(mse_test, 2)))
+print('MSE on train:' + str(np.round(mse_train, 2)))
+
+# Classification Example
+# We will create sample data as follows:
+# x-data: sample 50 random values from a normal = N(-1, 1)
+# + sample 50 random values from a normal = N(1, 1)
+# target: 50 values of 0 + 50 values of 1.
+# These are essentially 100 values of the corresponding output index
+# We will fit the binary classification model:
+# If sigmoid(x+A) < 0.5 -> 0 else 1
+# Theoretically, A should be -(mean1 + mean2)/2
+
+ops.reset_default_graph()
+
+# Create graph
+sess = tf.Session()
+
+# Declare batch size
+batch_size = 25
+
+# Create data
+x_vals = np.concatenate((np.random.normal(-1, 1, 50), np.random.normal(2, 1, 50)))
+y_vals = np.concatenate((np.repeat(0., 50), np.repeat(1., 50)))
+x_data = tf.placeholder(shape=[1, None], dtype=tf.float32)
+y_target = tf.placeholder(shape=[1, None], dtype=tf.float32)
+
+# Split data into train/test = 80%/20%
+train_indices = np.random.choice(len(x_vals), round(len(x_vals)*0.8), replace=False)
+test_indices = np.array(list(set(range(len(x_vals))) - set(train_indices)))
+x_vals_train = x_vals[train_indices]
+x_vals_test = x_vals[test_indices]
+y_vals_train = y_vals[train_indices]
+y_vals_test = y_vals[test_indices]
+
+# Create variable (one model parameter = A)
+A = tf.Variable(tf.random_normal(mean=10, shape=[1]))
+
+# Add operation to graph
+# Want to create the operstion sigmoid(x + A)
+# Note, the sigmoid() part is in the loss function
+my_output = tf.add(x_data, A)
+
+# Initialize variables
+init = tf.initialize_all_variables()
+sess.run(init)
+
+# Add classification loss (cross entropy)
+xentropy = tf.reduce_mean(tf.nn.sigmoid_cross_entropy_with_logits(my_output, y_target))
+
+# Create Optimizer
+my_opt = tf.train.GradientDescentOptimizer(0.05)
+train_step = my_opt.minimize(xentropy)
+
+# Run loop
+for i in range(1800):
+ rand_index = np.random.choice(len(x_vals_train), size=batch_size)
+ rand_x = [x_vals_train[rand_index]]
+ rand_y = [y_vals_train[rand_index]]
+ sess.run(train_step, feed_dict={x_data: rand_x, y_target: rand_y})
+ if (i+1)%200==0:
+ print('Step #' + str(i+1) + ' A = ' + str(sess.run(A)))
+ print('Loss = ' + str(sess.run(xentropy, feed_dict={x_data: rand_x, y_target: rand_y})))
+
+# Evaluate Predictions on test set
+y_prediction = tf.squeeze(tf.round(tf.nn.sigmoid(tf.add(x_data, A))))
+correct_prediction = tf.equal(y_prediction, y_target)
+accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))
+acc_value_test = sess.run(accuracy, feed_dict={x_data: [x_vals_test], y_target: [y_vals_test]})
+acc_value_train = sess.run(accuracy, feed_dict={x_data: [x_vals_train], y_target: [y_vals_train]})
+print('Accuracy on train set: ' + str(acc_value_train))
+print('Accuracy on test set: ' + str(acc_value_test))
+
+# Plot classification result
+A_result = -sess.run(A)
+bins = np.linspace(-5, 5, 50)
+plt.hist(x_vals[0:50], bins, alpha=0.5, label='N(-1,1)', color='white')
+plt.hist(x_vals[50:100], bins[0:50], alpha=0.5, label='N(2,1)', color='red')
+plt.plot((A_result, A_result), (0, 8), 'k--', linewidth=3, label='A = '+ str(np.round(A_result, 2)))
+plt.legend(loc='upper right')
+plt.title('Binary Classifier, Accuracy=' + str(np.round(acc_value, 2)))
+plt.show()
\ No newline at end of file
diff --git a/TensorFlow-Machine-Learning-Cookbook-master/Chapter 02/layering_nested_operations.py b/TensorFlow-Machine-Learning-Cookbook-master/Chapter 02/layering_nested_operations.py
new file mode 100644
index 000000000..4eb7f0c9a
--- /dev/null
+++ b/TensorFlow-Machine-Learning-Cookbook-master/Chapter 02/layering_nested_operations.py
@@ -0,0 +1,36 @@
+# Layering Nested Operations
+import matplotlib.pyplot as plt
+import numpy as np
+import tensorflow as tf
+from tensorflow.python.framework import ops
+ops.reset_default_graph()
+
+# Create graph
+sess = tf.Session()
+
+# Create tensors
+
+# Create data to feed in
+my_array = np.array([[1., 3., 5., 7., 9.],
+ [-2., 0., 2., 4., 6.],
+ [-6., -3., 0., 3., 6.]])
+x_vals = np.array([my_array, my_array + 1])
+x_data = tf.placeholder(tf.float32, shape=(3, 5))
+m1 = tf.constant([[1.],[0.],[-1.],[2.],[4.]])
+m2 = tf.constant([[2.]])
+a1 = tf.constant([[10.]])
+
+# 1st Operation Layer = Multiplication
+prod1 = tf.matmul(x_data, m1)
+
+# 2nd Operation Layer = Multiplication
+prod2 = tf.matmul(prod1, m2)
+
+# 3rd Operation Layer = Addition
+add1 = tf.add(prod2, a1)
+
+for x_val in x_vals:
+ print(sess.run(add1, feed_dict={x_data: x_val}))
+
+merged = tf.merge_all_summaries()
+my_writer = tf.train.SummaryWriter('/home/nick/OneDrive/Documents/tensor_flow_book/Code/2_Tensorflow_Way', sess.graph)
\ No newline at end of file
diff --git a/TensorFlow-Machine-Learning-Cookbook-master/Chapter 02/loss_functions.py b/TensorFlow-Machine-Learning-Cookbook-master/Chapter 02/loss_functions.py
new file mode 100644
index 000000000..f001ef58c
--- /dev/null
+++ b/TensorFlow-Machine-Learning-Cookbook-master/Chapter 02/loss_functions.py
@@ -0,0 +1,105 @@
+# Loss Functions
+#----------------------------------
+#
+# This python script illustrates the different
+# loss functions for regression and classification.
+
+import matplotlib.pyplot as plt
+import tensorflow as tf
+from tensorflow.python.framework import ops
+ops.reset_default_graph()
+
+# Create graph
+sess = tf.Session()
+
+###### Numerical Predictions ######
+x_vals = tf.linspace(-1., 1., 500)
+target = tf.constant(0.)
+
+# L2 loss
+# L = (pred - actual)^2
+l2_y_vals = tf.square(target - x_vals)
+l2_y_out = sess.run(l2_y_vals)
+
+# L1 loss
+# L = abs(pred - actual)
+l1_y_vals = tf.abs(target - x_vals)
+l1_y_out = sess.run(l1_y_vals)
+
+# Pseudo-Huber loss
+# L = delta^2 * (sqrt(1 + ((pred - actual)/delta)^2) - 1)
+delta1 = tf.constant(0.25)
+phuber1_y_vals = tf.mul(tf.square(delta1), tf.sqrt(1. + tf.square((target - x_vals)/delta1)) - 1.)
+phuber1_y_out = sess.run(phuber1_y_vals)
+
+delta2 = tf.constant(5.)
+phuber2_y_vals = tf.mul(tf.square(delta2), tf.sqrt(1. + tf.square((target - x_vals)/delta2)) - 1.)
+phuber2_y_out = sess.run(phuber2_y_vals)
+
+# Plot the output:
+x_array = sess.run(x_vals)
+plt.plot(x_array, l2_y_out, 'b-', label='L2 Loss')
+plt.plot(x_array, l1_y_out, 'r--', label='L1 Loss')
+plt.plot(x_array, phuber1_y_out, 'k-.', label='P-Huber Loss (0.25)')
+plt.plot(x_array, phuber2_y_out, 'g:', label='P-Huber Loss (5.0)')
+plt.ylim(-0.2, 0.4)
+plt.legend(loc='lower right', prop={'size': 11})
+plt.show()
+
+
+###### Categorical Predictions ######
+x_vals = tf.linspace(-3., 5., 500)
+target = tf.constant(1.)
+targets = tf.fill([500,], 1.)
+
+# Hinge loss
+# Use for predicting binary (-1, 1) classes
+# L = max(0, 1 - (pred * actual))
+hinge_y_vals = tf.maximum(0., 1. - tf.mul(target, x_vals))
+hinge_y_out = sess.run(hinge_y_vals)
+
+# Cross entropy loss
+# L = -actual * (log(pred)) - (1-actual)(log(1-pred))
+xentropy_y_vals = - tf.mul(target, tf.log(x_vals)) - tf.mul((1. - target), tf.log(1. - x_vals))
+xentropy_y_out = sess.run(xentropy_y_vals)
+
+# Sigmoid entropy loss
+# L = -actual * (log(sigmoid(pred))) - (1-actual)(log(1-sigmoid(pred)))
+# or
+# L = max(actual, 0) - actual * pred + log(1 + exp(-abs(actual)))
+xentropy_sigmoid_y_vals = tf.nn.sigmoid_cross_entropy_with_logits(x_vals, targets)
+xentropy_sigmoid_y_out = sess.run(xentropy_sigmoid_y_vals)
+
+# Weighted (softmax) cross entropy loss
+# L = -actual * (log(pred)) * weights - (1-actual)(log(1-pred))
+# or
+# L = (1 - pred) * actual + (1 + (weights - 1) * pred) * log(1 + exp(-actual))
+weight = tf.constant(0.5)
+xentropy_weighted_y_vals = tf.nn.weighted_cross_entropy_with_logits(x_vals, targets, weight)
+xentropy_weighted_y_out = sess.run(xentropy_weighted_y_vals)
+
+# Plot the output
+x_array = sess.run(x_vals)
+plt.plot(x_array, hinge_y_out, 'b-', label='Hinge Loss')
+plt.plot(x_array, xentropy_y_out, 'r--', label='Cross Entropy Loss')
+plt.plot(x_array, xentropy_sigmoid_y_out, 'k-.', label='Cross Entropy Sigmoid Loss')
+plt.plot(x_array, xentropy_weighted_y_out, 'g:', label='Weighted Cross Entropy Loss (x0.5)')
+plt.ylim(-1.5, 3)
+#plt.xlim(-1, 3)
+plt.legend(loc='lower right', prop={'size': 11})
+plt.show()
+
+# Softmax entropy loss
+# L = -actual * (log(softmax(pred))) - (1-actual)(log(1-softmax(pred)))
+unscaled_logits = tf.constant([[1., -3., 10.]])
+target_dist = tf.constant([[0.1, 0.02, 0.88]])
+softmax_xentropy = tf.nn.softmax_cross_entropy_with_logits(unscaled_logits, target_dist)
+print(sess.run(softmax_xentropy))
+
+# Sparse entropy loss
+# Use when classes and targets have to be mutually exclusive
+# L = sum( -actual * log(pred) )
+unscaled_logits = tf.constant([[1., -3., 10.]])
+sparse_target_dist = tf.constant([2])
+sparse_xentropy = tf.nn.sparse_softmax_cross_entropy_with_logits(unscaled_logits, sparse_target_dist)
+print(sess.run(sparse_xentropy))
\ No newline at end of file
diff --git a/TensorFlow-Machine-Learning-Cookbook-master/Chapter 02/multiple_layers.py b/TensorFlow-Machine-Learning-Cookbook-master/Chapter 02/multiple_layers.py
new file mode 100644
index 000000000..cfe6f7f3e
--- /dev/null
+++ b/TensorFlow-Machine-Learning-Cookbook-master/Chapter 02/multiple_layers.py
@@ -0,0 +1,48 @@
+# Layering Nested Operations
+import matplotlib.pyplot as plt
+import numpy as np
+import tensorflow as tf
+from tensorflow.python.framework import ops
+ops.reset_default_graph()
+
+# Create graph
+sess = tf.Session()
+
+# Create tensors
+
+# Create a small random 'image' of size 4x4
+x_shape = [1, 4, 4, 1]
+x_val = np.random.uniform(size=x_shape)
+
+x_data = tf.placeholder(tf.float32, shape=x_shape)
+
+# Create a layer that takes a spatial moving window average
+# Our window will be 2x2 with a stride of 2 for height and width
+# The filter value will be 0.25 because we want the average of the 2x2 window
+my_filter = tf.constant(0.25, shape=[2, 2, 1, 1])
+my_strides = [1, 2, 2, 1]
+mov_avg_layer= tf.nn.conv2d(x_data, my_filter, my_strides,
+ padding='SAME', name='Moving_Avg_Window')
+
+# Define a custom layer which will be sigmoid(Ax+b) where
+# x is a 2x2 matrix and A and b are 2x2 matrices
+def custom_layer(input_matrix):
+ input_matrix_sqeezed = tf.squeeze(input_matrix)
+ A = tf.constant([[1., 2.], [-1., 3.]])
+ b = tf.constant(1., shape=[2, 2])
+ temp1 = tf.matmul(A, input_matrix_sqeezed)
+ temp = tf.add(temp1, b) # Ax + b
+ return(tf.sigmoid(temp))
+
+# Add custom layer to graph
+with tf.name_scope('Custom_Layer') as scope:
+ custom_layer1 = custom_layer(mov_avg_layer)
+
+# The output should be an array that is 2x2, but size (1,2,2,1)
+#print(sess.run(mov_avg_layer, feed_dict={x_data: x_val}))
+
+# After custom operation, size is now 2x2 (squeezed out size 1 dims)
+print(sess.run(custom_layer1, feed_dict={x_data: x_val}))
+
+merged = tf.merge_all_summaries()
+my_writer = tf.train.SummaryWriter('/home/nick/OneDrive/Documents/tensor_flow_book/Code/2_Tensorflow_Way', sess.graph)
\ No newline at end of file
diff --git a/TensorFlow-Machine-Learning-Cookbook-master/Chapter 02/operations_on_a_graph.py b/TensorFlow-Machine-Learning-Cookbook-master/Chapter 02/operations_on_a_graph.py
new file mode 100644
index 000000000..cc4de31dc
--- /dev/null
+++ b/TensorFlow-Machine-Learning-Cookbook-master/Chapter 02/operations_on_a_graph.py
@@ -0,0 +1,24 @@
+# Operations on a Computational Graph
+import matplotlib.pyplot as plt
+import numpy as np
+import tensorflow as tf
+from tensorflow.python.framework import ops
+ops.reset_default_graph()
+
+# Create graph
+sess = tf.Session()
+
+# Create tensors
+
+# Create data to feed in
+x_vals = np.array([1., 3., 5., 7., 9.])
+x_data = tf.placeholder(tf.float32)
+m = tf.constant(3.)
+
+# Multiplication
+prod = tf.mul(x_data, m)
+for x_val in x_vals:
+ print(sess.run(prod, feed_dict={x_data: x_val}))
+
+merged = tf.merge_all_summaries()
+my_writer = tf.train.SummaryWriter('/home/nick/OneDrive/Documents/tensor_flow_book/Code/2_Tensorflow_Way', sess.graph)
\ No newline at end of file
diff --git a/TensorFlow-Machine-Learning-Cookbook-master/Chapter 03/deming_regression.py b/TensorFlow-Machine-Learning-Cookbook-master/Chapter 03/deming_regression.py
new file mode 100644
index 000000000..b246bec61
--- /dev/null
+++ b/TensorFlow-Machine-Learning-Cookbook-master/Chapter 03/deming_regression.py
@@ -0,0 +1,91 @@
+# Deming Regression
+#----------------------------------
+#
+# This function shows how to use Tensorflow to
+# solve linear Deming regression.
+# y = Ax + b
+#
+# We will use the iris data, specifically:
+# y = Sepal Length
+# x = Petal Width
+
+import matplotlib.pyplot as plt
+import numpy as np
+import tensorflow as tf
+from sklearn import datasets
+from tensorflow.python.framework import ops
+ops.reset_default_graph()
+
+# Create graph
+sess = tf.Session()
+
+# Load the data
+# iris.data = [(Sepal Length, Sepal Width, Petal Length, Petal Width)]
+iris = datasets.load_iris()
+x_vals = np.array([x[3] for x in iris.data])
+y_vals = np.array([y[0] for y in iris.data])
+
+# Declare batch size
+batch_size = 50
+
+# Initialize placeholders
+x_data = tf.placeholder(shape=[None, 1], dtype=tf.float32)
+y_target = tf.placeholder(shape=[None, 1], dtype=tf.float32)
+
+# Create variables for linear regression
+A = tf.Variable(tf.random_normal(shape=[1,1]))
+b = tf.Variable(tf.random_normal(shape=[1,1]))
+
+# Declare model operations
+model_output = tf.add(tf.matmul(x_data, A), b)
+
+# Declare Demming loss function
+demming_numerator = tf.abs(tf.sub(y_target, tf.add(tf.matmul(x_data, A), b)))
+demming_denominator = tf.sqrt(tf.add(tf.square(A),1))
+loss = tf.reduce_mean(tf.truediv(demming_numerator, demming_denominator))
+
+# Initialize variables
+init = tf.initialize_all_variables()
+sess.run(init)
+
+# Declare optimizer
+my_opt = tf.train.GradientDescentOptimizer(0.1)
+train_step = my_opt.minimize(loss)
+
+# Training loop
+loss_vec = []
+for i in range(250):
+ rand_index = np.random.choice(len(x_vals), size=batch_size)
+ rand_x = np.transpose([x_vals[rand_index]])
+ rand_y = np.transpose([y_vals[rand_index]])
+ sess.run(train_step, feed_dict={x_data: rand_x, y_target: rand_y})
+ temp_loss = sess.run(loss, feed_dict={x_data: rand_x, y_target: rand_y})
+ loss_vec.append(temp_loss)
+ if (i+1)%50==0:
+ print('Step #' + str(i+1) + ' A = ' + str(sess.run(A)) + ' b = ' + str(sess.run(b)))
+ print('Loss = ' + str(temp_loss))
+
+# Get the optimal coefficients
+[slope] = sess.run(A)
+[y_intercept] = sess.run(b)
+
+# Get best fit line
+best_fit = []
+for i in x_vals:
+ best_fit.append(slope*i+y_intercept)
+
+# Plot the result
+plt.plot(x_vals, y_vals, 'o', label='Data Points')
+plt.plot(x_vals, best_fit, 'r-', label='Best fit line', linewidth=3)
+plt.legend(loc='upper left')
+plt.title('Sepal Length vs Pedal Width')
+plt.xlabel('Pedal Width')
+plt.ylabel('Sepal Length')
+plt.show()
+
+# Plot loss over time
+plt.plot(loss_vec, 'k-')
+plt.title('L2 Loss per Generation')
+plt.xlabel('Generation')
+plt.ylabel('L2 Loss')
+plt.show()
diff --git a/TensorFlow-Machine-Learning-Cookbook-master/Chapter 03/elasticnet_regression.py b/TensorFlow-Machine-Learning-Cookbook-master/Chapter 03/elasticnet_regression.py
new file mode 100644
index 000000000..a4af8ebeb
--- /dev/null
+++ b/TensorFlow-Machine-Learning-Cookbook-master/Chapter 03/elasticnet_regression.py
@@ -0,0 +1,81 @@
+# Elastic Net Regression
+#----------------------------------
+#
+# This function shows how to use Tensorflow to
+# solve elastic net regression.
+# y = Ax + b
+#
+# We will use the iris data, specifically:
+# y = Sepal Length
+# x = Pedal Length, Petal Width, Sepal Width
+
+import matplotlib.pyplot as plt
+import numpy as np
+import tensorflow as tf
+from sklearn import datasets
+from tensorflow.python.framework import ops
+ops.reset_default_graph()
+
+# Create graph
+sess = tf.Session()
+
+# Load the data
+# iris.data = [(Sepal Length, Sepal Width, Petal Length, Petal Width)]
+iris = datasets.load_iris()
+x_vals = np.array([[x[1], x[2], x[3]] for x in iris.data])
+y_vals = np.array([y[0] for y in iris.data])
+
+# Declare batch size
+batch_size = 50
+
+# Initialize placeholders
+x_data = tf.placeholder(shape=[None, 3], dtype=tf.float32)
+y_target = tf.placeholder(shape=[None, 1], dtype=tf.float32)
+
+# Create variables for linear regression
+A = tf.Variable(tf.random_normal(shape=[3,1]))
+b = tf.Variable(tf.random_normal(shape=[1,1]))
+
+# Declare model operations
+model_output = tf.add(tf.matmul(x_data, A), b)
+
+# Declare the elastic net loss function
+elastic_param1 = tf.constant(1.)
+elastic_param2 = tf.constant(1.)
+l1_a_loss = tf.reduce_mean(tf.abs(A))
+l2_a_loss = tf.reduce_mean(tf.square(A))
+e1_term = tf.mul(elastic_param1, l1_a_loss)
+e2_term = tf.mul(elastic_param2, l2_a_loss)
+loss = tf.expand_dims(tf.add(tf.add(tf.reduce_mean(tf.square(y_target - model_output)), e1_term), e2_term), 0)
+
+# Initialize variables
+init = tf.initialize_all_variables()
+sess.run(init)
+
+# Declare optimizer
+my_opt = tf.train.GradientDescentOptimizer(0.001)
+train_step = my_opt.minimize(loss)
+
+# Training loop
+loss_vec = []
+for i in range(1000):
+ rand_index = np.random.choice(len(x_vals), size=batch_size)
+ rand_x = x_vals[rand_index]
+ rand_y = np.transpose([y_vals[rand_index]])
+ sess.run(train_step, feed_dict={x_data: rand_x, y_target: rand_y})
+ temp_loss = sess.run(loss, feed_dict={x_data: rand_x, y_target: rand_y})
+ loss_vec.append(temp_loss[0])
+ if (i+1)%250==0:
+ print('Step #' + str(i+1) + ' A = ' + str(sess.run(A)) + ' b = ' + str(sess.run(b)))
+ print('Loss = ' + str(temp_loss))
+
+# Get the optimal coefficients
+[[sw_coef], [pl_coef], [pw_ceof]] = sess.run(A)
+[y_intercept] = sess.run(b)
+
+# Plot loss over time
+plt.plot(loss_vec, 'k-')
+plt.title('Loss per Generation')
+plt.xlabel('Generation')
+plt.ylabel('Loss')
+plt.show()
diff --git a/TensorFlow-Machine-Learning-Cookbook-master/Chapter 03/lasso_and_ridge_regression.py b/TensorFlow-Machine-Learning-Cookbook-master/Chapter 03/lasso_and_ridge_regression.py
new file mode 100644
index 000000000..18d31baca
--- /dev/null
+++ b/TensorFlow-Machine-Learning-Cookbook-master/Chapter 03/lasso_and_ridge_regression.py
@@ -0,0 +1,100 @@
+# Lasso and Ridge Regression
+#----------------------------------
+#
+# This function shows how to use Tensorflow to
+# solve lasso or ridge regression.
+# y = Ax + b
+#
+# We will use the iris data, specifically:
+# y = Sepal Length
+# x = Petal Width
+
+import matplotlib.pyplot as plt
+import numpy as np
+import tensorflow as tf
+from sklearn import datasets
+from tensorflow.python.framework import ops
+ops.reset_default_graph()
+
+# Create graph
+sess = tf.Session()
+
+# Load the data
+# iris.data = [(Sepal Length, Sepal Width, Petal Length, Petal Width)]
+iris = datasets.load_iris()
+x_vals = np.array([x[3] for x in iris.data])
+y_vals = np.array([y[0] for y in iris.data])
+
+# Declare batch size
+batch_size = 50
+
+# Initialize placeholders
+x_data = tf.placeholder(shape=[None, 1], dtype=tf.float32)
+y_target = tf.placeholder(shape=[None, 1], dtype=tf.float32)
+
+# Create variables for linear regression
+A = tf.Variable(tf.random_normal(shape=[1,1]))
+b = tf.Variable(tf.random_normal(shape=[1,1]))
+
+# Declare model operations
+model_output = tf.add(tf.matmul(x_data, A), b)
+
+# Declare Lasso loss function
+# Lasso Loss = L2_Loss + heavyside_step,
+# Where heavyside_step ~ 0 if A < constant, otherwise ~ 99
+#lasso_param = tf.constant(0.9)
+#heavyside_step = tf.truediv(1., tf.add(1., tf.exp(tf.mul(-100., tf.sub(A, lasso_param)))))
+#regularization_param = tf.mul(heavyside_step, 99.)
+#loss = tf.add(tf.reduce_mean(tf.square(y_target - model_output)), regularization_param)
+
+# Declare the Ridge loss function
+# Ridge loss = L2_loss + L2 norm of slope
+ridge_param = tf.constant(1.)
+ridge_loss = tf.reduce_mean(tf.square(A))
+loss = tf.expand_dims(tf.add(tf.reduce_mean(tf.square(y_target - model_output)), tf.mul(ridge_param, ridge_loss)), 0)
+
+# Initialize variables
+init = tf.initialize_all_variables()
+sess.run(init)
+
+# Declare optimizer
+my_opt = tf.train.GradientDescentOptimizer(0.001)
+train_step = my_opt.minimize(loss)
+
+# Training loop
+loss_vec = []
+for i in range(1500):
+ rand_index = np.random.choice(len(x_vals), size=batch_size)
+ rand_x = np.transpose([x_vals[rand_index]])
+ rand_y = np.transpose([y_vals[rand_index]])
+ sess.run(train_step, feed_dict={x_data: rand_x, y_target: rand_y})
+ temp_loss = sess.run(loss, feed_dict={x_data: rand_x, y_target: rand_y})
+ loss_vec.append(temp_loss[0])
+ if (i+1)%300==0:
+ print('Step #' + str(i+1) + ' A = ' + str(sess.run(A)) + ' b = ' + str(sess.run(b)))
+ print('Loss = ' + str(temp_loss))
+
+# Get the optimal coefficients
+[slope] = sess.run(A)
+[y_intercept] = sess.run(b)
+
+# Get best fit line
+best_fit = []
+for i in x_vals:
+ best_fit.append(slope*i+y_intercept)
+
+# Plot the result
+plt.plot(x_vals, y_vals, 'o', label='Data Points')
+plt.plot(x_vals, best_fit, 'r-', label='Best fit line', linewidth=3)
+plt.legend(loc='upper left')
+plt.title('Sepal Length vs Pedal Width')
+plt.xlabel('Pedal Width')
+plt.ylabel('Sepal Length')
+plt.show()
+
+# Plot loss over time
+plt.plot(loss_vec, 'k-')
+plt.title('L2 Loss per Generation')
+plt.xlabel('Generation')
+plt.ylabel('L2 Loss')
+plt.show()
diff --git a/TensorFlow-Machine-Learning-Cookbook-master/Chapter 03/lin_reg_decomposition.py b/TensorFlow-Machine-Learning-Cookbook-master/Chapter 03/lin_reg_decomposition.py
new file mode 100644
index 000000000..efe1ff0fb
--- /dev/null
+++ b/TensorFlow-Machine-Learning-Cookbook-master/Chapter 03/lin_reg_decomposition.py
@@ -0,0 +1,66 @@
+# Linear Regression: Decomposition Method
+#----------------------------------
+#
+# This function shows how to use Tensorflow to
+# solve linear regression via the matrix inverse.
+#
+# Given Ax=b, and a Cholesky decomposition such that
+# A = L*L' then we can get solve for x via
+# 1) L*y=t(A)*b
+# 2) L'*x=y
+
+import matplotlib.pyplot as plt
+import numpy as np
+import tensorflow as tf
+from tensorflow.python.framework import ops
+ops.reset_default_graph()
+
+# Create graph
+sess = tf.Session()
+
+# Create the data
+x_vals = np.linspace(0, 10, 100)
+y_vals = x_vals + np.random.normal(0, 1, 100)
+
+# Create design matrix
+x_vals_column = np.transpose(np.matrix(x_vals))
+ones_column = np.transpose(np.matrix(np.repeat(1, 100)))
+A = np.column_stack((x_vals_column, ones_column))
+
+# Create b matrix
+b = np.transpose(np.matrix(y_vals))
+
+# Create tensors
+A_tensor = tf.constant(A)
+b_tensor = tf.constant(b)
+
+# Find Cholesky Decomposition
+tA_A = tf.matmul(tf.transpose(A_tensor), A_tensor)
+L = tf.cholesky(tA_A)
+
+# Solve L*y=t(A)*b
+tA_b = tf.matmul(tf.transpose(A_tensor), b)
+sol1 = tf.matrix_solve(L, tA_b)
+
+# Solve L' * y = sol1
+sol2 = tf.matrix_solve(tf.transpose(L), sol1)
+
+solution_eval = sess.run(sol2)
+
+# Extract coefficients
+slope = solution_eval[0][0]
+y_intercept = solution_eval[1][0]
+
+print('slope: ' + str(slope))
+print('y_intercept: ' + str(y_intercept))
+
+# Get best fit line
+best_fit = []
+for i in x_vals:
+ best_fit.append(slope*i+y_intercept)
+
+# Plot the results
+plt.plot(x_vals, y_vals, 'o', label='Data')
+plt.plot(x_vals, best_fit, 'r-', label='Best fit line', linewidth=3)
+plt.legend(loc='upper left')
+plt.show()
\ No newline at end of file
diff --git a/TensorFlow-Machine-Learning-Cookbook-master/Chapter 03/lin_reg_inverse.py b/TensorFlow-Machine-Learning-Cookbook-master/Chapter 03/lin_reg_inverse.py
new file mode 100644
index 000000000..40166d5e3
--- /dev/null
+++ b/TensorFlow-Machine-Learning-Cookbook-master/Chapter 03/lin_reg_inverse.py
@@ -0,0 +1,60 @@
+# Linear Regression: Inverse Matrix Method
+#----------------------------------
+#
+# This function shows how to use Tensorflow to
+# solve linear regression via the matrix inverse.
+#
+# Given Ax=b, solving for x:
+# x = (t(A) * A)^(-1) * t(A) * b
+# where t(A) is the transpose of A
+
+import matplotlib.pyplot as plt
+import numpy as np
+import tensorflow as tf
+from tensorflow.python.framework import ops
+ops.reset_default_graph()
+
+# Create graph
+sess = tf.Session()
+
+# Create the data
+x_vals = np.linspace(0, 10, 100)
+y_vals = x_vals + np.random.normal(0, 1, 100)
+
+# Create design matrix
+x_vals_column = np.transpose(np.matrix(x_vals))
+ones_column = np.transpose(np.matrix(np.repeat(1, 100)))
+A = np.column_stack((x_vals_column, ones_column))
+
+# Create b matrix
+b = np.transpose(np.matrix(y_vals))
+
+# Create tensors
+A_tensor = tf.constant(A)
+b_tensor = tf.constant(b)
+
+# Matrix inverse solution
+tA_A = tf.matmul(tf.transpose(A_tensor), A_tensor)
+tA_A_inv = tf.matrix_inverse(tA_A)
+product = tf.matmul(tA_A_inv, tf.transpose(A_tensor))
+solution = tf.matmul(product, b_tensor)
+
+solution_eval = sess.run(solution)
+
+# Extract coefficients
+slope = solution_eval[0][0]
+y_intercept = solution_eval[1][0]
+
+print('slope: ' + str(slope))
+print('y_intercept: ' + str(y_intercept))
+
+# Get best fit line
+best_fit = []
+for i in x_vals:
+ best_fit.append(slope*i+y_intercept)
+
+# Plot the results
+plt.plot(x_vals, y_vals, 'o', label='Data')
+plt.plot(x_vals, best_fit, 'r-', label='Best fit line', linewidth=3)
+plt.legend(loc='upper left')
+plt.show()
\ No newline at end of file
diff --git a/TensorFlow-Machine-Learning-Cookbook-master/Chapter 03/lin_reg_l1_vs_l2.py b/TensorFlow-Machine-Learning-Cookbook-master/Chapter 03/lin_reg_l1_vs_l2.py
new file mode 100644
index 000000000..09b459d34
--- /dev/null
+++ b/TensorFlow-Machine-Learning-Cookbook-master/Chapter 03/lin_reg_l1_vs_l2.py
@@ -0,0 +1,111 @@
+# Linear Regression: L1 vs L2
+#----------------------------------
+#
+# This function shows how to use Tensorflow to
+# solve linear regression via the matrix inverse.
+
+import matplotlib.pyplot as plt
+import numpy as np
+import tensorflow as tf
+from sklearn import datasets
+from tensorflow.python.framework import ops
+ops.reset_default_graph()
+
+# Create graph
+sess = tf.Session()
+
+# Load the data
+# iris.data = [(Sepal Length, Sepal Width, Petal Length, Petal Width)]
+iris = datasets.load_iris()
+x_vals = np.array([x[3] for x in iris.data])
+y_vals = np.array([y[0] for y in iris.data])
+
+# Declare batch size and number of iterations
+batch_size = 25
+learning_rate = 0.4 # Will not converge with learning rate at 0.4
+iterations = 50
+
+# Initialize placeholders
+x_data = tf.placeholder(shape=[None, 1], dtype=tf.float32)
+y_target = tf.placeholder(shape=[None, 1], dtype=tf.float32)
+
+# Create variables for linear regression
+A = tf.Variable(tf.random_normal(shape=[1,1]))
+b = tf.Variable(tf.random_normal(shape=[1,1]))
+
+# Declare model operations
+model_output = tf.add(tf.matmul(x_data, A), b)
+
+# Declare loss functions
+loss_l1 = tf.reduce_mean(tf.abs(y_target - model_output))
+
+# Initialize variables
+init = tf.initialize_all_variables()
+sess.run(init)
+
+# Declare optimizers
+my_opt_l1 = tf.train.GradientDescentOptimizer(learning_rate)
+train_step_l1 = my_opt_l1.minimize(loss_l1)
+
+# Training loop
+loss_vec_l1 = []
+for i in range(iterations):
+ rand_index = np.random.choice(len(x_vals), size=batch_size)
+ rand_x = np.transpose([x_vals[rand_index]])
+ rand_y = np.transpose([y_vals[rand_index]])
+ sess.run(train_step_l1, feed_dict={x_data: rand_x, y_target: rand_y})
+ temp_loss_l1 = sess.run(loss_l1, feed_dict={x_data: rand_x, y_target: rand_y})
+ loss_vec_l1.append(temp_loss_l1)
+ if (i+1)%25==0:
+ print('Step #' + str(i+1) + ' A = ' + str(sess.run(A)) + ' b = ' + str(sess.run(b)))
+
+
+# L2 Loss
+# Reinitialize graph
+ops.reset_default_graph()
+
+# Create graph
+sess = tf.Session()
+
+# Initialize placeholders
+x_data = tf.placeholder(shape=[None, 1], dtype=tf.float32)
+y_target = tf.placeholder(shape=[None, 1], dtype=tf.float32)
+
+# Create variables for linear regression
+A = tf.Variable(tf.random_normal(shape=[1,1]))
+b = tf.Variable(tf.random_normal(shape=[1,1]))
+
+# Declare model operations
+model_output = tf.add(tf.matmul(x_data, A), b)
+
+# Declare loss functions
+loss_l2 = tf.reduce_mean(tf.square(y_target - model_output))
+
+# Initialize variables
+init = tf.initialize_all_variables()
+sess.run(init)
+
+# Declare optimizers
+my_opt_l2 = tf.train.GradientDescentOptimizer(learning_rate)
+train_step_l2 = my_opt_l2.minimize(loss_l2)
+
+loss_vec_l2 = []
+for i in range(iterations):
+ rand_index = np.random.choice(len(x_vals), size=batch_size)
+ rand_x = np.transpose([x_vals[rand_index]])
+ rand_y = np.transpose([y_vals[rand_index]])
+ sess.run(train_step_l2, feed_dict={x_data: rand_x, y_target: rand_y})
+ temp_loss_l2 = sess.run(loss_l2, feed_dict={x_data: rand_x, y_target: rand_y})
+ loss_vec_l2.append(temp_loss_l2)
+ if (i+1)%25==0:
+ print('Step #' + str(i+1) + ' A = ' + str(sess.run(A)) + ' b = ' + str(sess.run(b)))
+
+
+# Plot loss over time
+plt.plot(loss_vec_l1, 'k-', label='L1 Loss')
+plt.plot(loss_vec_l2, 'r--', label='L2 Loss')
+plt.title('L1 and L2 Loss per Generation')
+plt.xlabel('Generation')
+plt.ylabel('L1 Loss')
+plt.legend(loc='upper right')
+plt.show()
\ No newline at end of file
diff --git a/TensorFlow-Machine-Learning-Cookbook-master/Chapter 03/lin_reg_tensorflow_way.py b/TensorFlow-Machine-Learning-Cookbook-master/Chapter 03/lin_reg_tensorflow_way.py
new file mode 100644
index 000000000..4e7fe8227
--- /dev/null
+++ b/TensorFlow-Machine-Learning-Cookbook-master/Chapter 03/lin_reg_tensorflow_way.py
@@ -0,0 +1,89 @@
+# Linear Regression: Tensorflow Way
+#----------------------------------
+#
+# This function shows how to use Tensorflow to
+# solve linear regression.
+# y = Ax + b
+#
+# We will use the iris data, specifically:
+# y = Sepal Length
+# x = Petal Width
+
+import matplotlib.pyplot as plt
+import numpy as np
+import tensorflow as tf
+from sklearn import datasets
+from tensorflow.python.framework import ops
+ops.reset_default_graph()
+
+# Create graph
+sess = tf.Session()
+
+# Load the data
+# iris.data = [(Sepal Length, Sepal Width, Petal Length, Petal Width)]
+iris = datasets.load_iris()
+x_vals = np.array([x[3] for x in iris.data])
+y_vals = np.array([y[0] for y in iris.data])
+
+# Declare batch size
+batch_size = 25
+
+# Initialize placeholders
+x_data = tf.placeholder(shape=[None, 1], dtype=tf.float32)
+y_target = tf.placeholder(shape=[None, 1], dtype=tf.float32)
+
+# Create variables for linear regression
+A = tf.Variable(tf.random_normal(shape=[1,1]))
+b = tf.Variable(tf.random_normal(shape=[1,1]))
+
+# Declare model operations
+model_output = tf.add(tf.matmul(x_data, A), b)
+
+# Declare loss function (L2 loss)
+loss = tf.reduce_mean(tf.square(y_target - model_output))
+
+# Initialize variables
+init = tf.initialize_all_variables()
+sess.run(init)
+
+# Declare optimizer
+my_opt = tf.train.GradientDescentOptimizer(0.05)
+train_step = my_opt.minimize(loss)
+
+# Training loop
+loss_vec = []
+for i in range(100):
+ rand_index = np.random.choice(len(x_vals), size=batch_size)
+ rand_x = np.transpose([x_vals[rand_index]])
+ rand_y = np.transpose([y_vals[rand_index]])
+ sess.run(train_step, feed_dict={x_data: rand_x, y_target: rand_y})
+ temp_loss = sess.run(loss, feed_dict={x_data: rand_x, y_target: rand_y})
+ loss_vec.append(temp_loss)
+ if (i+1)%25==0:
+ print('Step #' + str(i+1) + ' A = ' + str(sess.run(A)) + ' b = ' + str(sess.run(b)))
+ print('Loss = ' + str(temp_loss))
+
+# Get the optimal coefficients
+[slope] = sess.run(A)
+[y_intercept] = sess.run(b)
+
+# Get best fit line
+best_fit = []
+for i in x_vals:
+ best_fit.append(slope*i+y_intercept)
+
+# Plot the result
+plt.plot(x_vals, y_vals, 'o', label='Data Points')
+plt.plot(x_vals, best_fit, 'r-', label='Best fit line', linewidth=3)
+plt.legend(loc='upper left')
+plt.title('Sepal Length vs Pedal Width')
+plt.xlabel('Pedal Width')
+plt.ylabel('Sepal Length')
+plt.show()
+
+# Plot loss over time
+plt.plot(loss_vec, 'k-')
+plt.title('L2 Loss per Generation')
+plt.xlabel('Generation')
+plt.ylabel('L2 Loss')
+plt.show()
diff --git a/TensorFlow-Machine-Learning-Cookbook-master/Chapter 03/logistic_regression.py b/TensorFlow-Machine-Learning-Cookbook-master/Chapter 03/logistic_regression.py
new file mode 100644
index 000000000..a28054fcb
--- /dev/null
+++ b/TensorFlow-Machine-Learning-Cookbook-master/Chapter 03/logistic_regression.py
@@ -0,0 +1,114 @@
+# Logistic Regression
+#----------------------------------
+#
+# This function shows how to use Tensorflow to
+# solve logistic regression.
+# y = sigmoid(Ax + b)
+#
+# We will use the low birth weight data, specifically:
+# y = 0 or 1 = low birth weight
+# x = demographic and medical history data
+
+import matplotlib.pyplot as plt
+import numpy as np
+import tensorflow as tf
+import requests
+from sklearn import datasets
+from sklearn.preprocessing import normalize
+from tensorflow.python.framework import ops
+ops.reset_default_graph()
+
+# Create graph
+sess = tf.Session()
+
+birthdata_url = 'https://www.umass.edu/statdata/statdata/data/lowbwt.dat'
+birth_file = requests.get(birthdata_url)
+birth_data = birth_file.text.split('\r\n')[5:]
+birth_header = [x for x in birth_data[0].split(' ') if len(x)>=1]
+birth_data = [[float(x) for x in y.split(' ') if len(x)>=1] for y in birth_data[1:] if len(y)>=1]
+# Pull out target variable
+y_vals = np.array([x[1] for x in birth_data])
+# Pull out predictor variables (not id, not target, and not birthweight)
+x_vals = np.array([x[2:9] for x in birth_data])
+
+# Split data into train/test = 80%/20%
+train_indices = np.random.choice(len(x_vals), round(len(x_vals)*0.8), replace=False)
+test_indices = np.array(list(set(range(len(x_vals))) - set(train_indices)))
+x_vals_train = x_vals[train_indices]
+x_vals_test = x_vals[test_indices]
+y_vals_train = y_vals[train_indices]
+y_vals_test = y_vals[test_indices]
+
+# Normalize by column (min-max norm)
+def normalize_cols(m):
+ col_max = m.max(axis=0)
+ col_min = m.min(axis=0)
+ return (m-col_min) / (col_max - col_min)
+
+x_vals_train = np.nan_to_num(normalize_cols(x_vals_train))
+x_vals_test = np.nan_to_num(normalize_cols(x_vals_test))
+
+# Declare batch size
+batch_size = 25
+
+# Initialize placeholders
+x_data = tf.placeholder(shape=[None, 7], dtype=tf.float32)
+y_target = tf.placeholder(shape=[None, 1], dtype=tf.float32)
+
+# Create variables for linear regression
+A = tf.Variable(tf.random_normal(shape=[7,1]))
+b = tf.Variable(tf.random_normal(shape=[1,1]))
+
+# Declare model operations
+model_output = tf.add(tf.matmul(x_data, A), b)
+
+# Declare loss function (Cross Entropy loss)
+loss = tf.reduce_mean(tf.nn.sigmoid_cross_entropy_with_logits(model_output, y_target))
+
+# Initialize variables
+init = tf.initialize_all_variables()
+sess.run(init)
+
+# Declare optimizer
+my_opt = tf.train.GradientDescentOptimizer(0.01)
+train_step = my_opt.minimize(loss)
+
+# Actual Prediction
+prediction = tf.round(tf.sigmoid(model_output))
+predictions_correct = tf.cast(tf.equal(prediction, y_target), tf.float32)
+accuracy = tf.reduce_mean(predictions_correct)
+
+# Training loop
+loss_vec = []
+train_acc = []
+test_acc = []
+for i in range(1500):
+ rand_index = np.random.choice(len(x_vals_train), size=batch_size)
+ rand_x = x_vals_train[rand_index]
+ rand_y = np.transpose([y_vals_train[rand_index]])
+ sess.run(train_step, feed_dict={x_data: rand_x, y_target: rand_y})
+
+ temp_loss = sess.run(loss, feed_dict={x_data: rand_x, y_target: rand_y})
+ loss_vec.append(temp_loss)
+ temp_acc_train = sess.run(accuracy, feed_dict={x_data: x_vals_train, y_target: np.transpose([y_vals_train])})
+ train_acc.append(temp_acc_train)
+ temp_acc_test = sess.run(accuracy, feed_dict={x_data: x_vals_test, y_target: np.transpose([y_vals_test])})
+ test_acc.append(temp_acc_test)
+ if (i+1)%300==0:
+ print('Loss = ' + str(temp_loss))
+
+# Plot loss over time
+plt.plot(loss_vec, 'k-')
+plt.title('Cross Entropy Loss per Generation')
+plt.xlabel('Generation')
+plt.ylabel('Cross Entropy Loss')
+plt.show()
+
+# Plot train and test accuracy
+plt.plot(train_acc, 'k-', label='Train Set Accuracy')
+plt.plot(test_acc, 'r--', label='Test Set Accuracy')
+plt.title('Train and Test Accuracy')
+plt.xlabel('Generation')
+plt.ylabel('Accuracy')
+plt.legend(loc='lower right')
+plt.show()
\ No newline at end of file
diff --git a/TensorFlow-Machine-Learning-Cookbook-master/Chapter 04/linear_svm.py b/TensorFlow-Machine-Learning-Cookbook-master/Chapter 04/linear_svm.py
new file mode 100644
index 000000000..b8317b4e9
--- /dev/null
+++ b/TensorFlow-Machine-Learning-Cookbook-master/Chapter 04/linear_svm.py
@@ -0,0 +1,146 @@
+# Linear Support Vector Machine: Soft Margin
+#----------------------------------
+#
+# This function shows how to use Tensorflow to
+# create a soft margin SVM
+#
+# We will use the iris data, specifically:
+# x1 = Sepal Length
+# x2 = Petal Width
+# Class 1 : I. setosa
+# Class -1: not I. setosa
+#
+# We know here that x and y are linearly seperable
+# for I. setosa classification.
+
+import matplotlib.pyplot as plt
+import numpy as np
+import tensorflow as tf
+from sklearn import datasets
+from tensorflow.python.framework import ops
+ops.reset_default_graph()
+
+# Create graph
+sess = tf.Session()
+
+# Load the data
+# iris.data = [(Sepal Length, Sepal Width, Petal Length, Petal Width)]
+iris = datasets.load_iris()
+x_vals = np.array([[x[0], x[3]] for x in iris.data])
+y_vals = np.array([1 if y==0 else -1 for y in iris.target])
+
+# Split data into train/test sets
+train_indices = np.random.choice(len(x_vals), round(len(x_vals)*0.8), replace=False)
+test_indices = np.array(list(set(range(len(x_vals))) - set(train_indices)))
+x_vals_train = x_vals[train_indices]
+x_vals_test = x_vals[test_indices]
+y_vals_train = y_vals[train_indices]
+y_vals_test = y_vals[test_indices]
+
+# Declare batch size
+batch_size = 100
+
+# Initialize placeholders
+x_data = tf.placeholder(shape=[None, 2], dtype=tf.float32)
+y_target = tf.placeholder(shape=[None, 1], dtype=tf.float32)
+
+# Create variables for linear regression
+A = tf.Variable(tf.random_normal(shape=[2,1]))
+b = tf.Variable(tf.random_normal(shape=[1,1]))
+
+# Declare model operations
+model_output = tf.sub(tf.matmul(x_data, A), b)
+
+# Declare vector L2 'norm' function squared
+l2_norm = tf.reduce_sum(tf.square(A))
+
+# Declare loss function
+# = max(0, 1-pred*actual) + alpha * L2_norm(A)^2
+# L2 regularization parameter, alpha
+alpha = tf.constant([0.01])
+# Margin term in loss
+classification_term = tf.reduce_mean(tf.maximum(0., tf.sub(1., tf.mul(model_output, y_target))))
+# Put terms together
+loss = tf.add(classification_term, tf.mul(alpha, l2_norm))
+
+# Declare prediction function
+prediction = tf.sign(model_output)
+accuracy = tf.reduce_mean(tf.cast(tf.equal(prediction, y_target), tf.float32))
+
+# Declare optimizer
+my_opt = tf.train.GradientDescentOptimizer(0.01)
+train_step = my_opt.minimize(loss)
+
+# Initialize variables
+init = tf.initialize_all_variables()
+sess.run(init)
+
+# Training loop
+loss_vec = []
+train_accuracy = []
+test_accuracy = []
+for i in range(500):
+ rand_index = np.random.choice(len(x_vals_train), size=batch_size)
+ rand_x = x_vals_train[rand_index]
+ rand_y = np.transpose([y_vals_train[rand_index]])
+ sess.run(train_step, feed_dict={x_data: rand_x, y_target: rand_y})
+
+ temp_loss = sess.run(loss, feed_dict={x_data: rand_x, y_target: rand_y})
+ loss_vec.append(temp_loss)
+
+ train_acc_temp = sess.run(accuracy, feed_dict={x_data: x_vals_train, y_target: np.transpose([y_vals_train])})
+ train_accuracy.append(train_acc_temp)
+
+ test_acc_temp = sess.run(accuracy, feed_dict={x_data: x_vals_test, y_target: np.transpose([y_vals_test])})
+ test_accuracy.append(test_acc_temp)
+
+ if (i+1)%100==0:
+ print('Step #' + str(i+1) + ' A = ' + str(sess.run(A)) + ' b = ' + str(sess.run(b)))
+ print('Loss = ' + str(temp_loss))
+
+# Extract coefficients
+[[a1], [a2]] = sess.run(A)
+[[b]] = sess.run(b)
+slope = -a2/a1
+y_intercept = b/a1
+
+# Extract x1 and x2 vals
+x1_vals = [d[1] for d in x_vals]
+
+# Get best fit line
+best_fit = []
+for i in x1_vals:
+ best_fit.append(slope*i+y_intercept)
+
+# Separate I. setosa
+setosa_x = [d[1] for i,d in enumerate(x_vals) if y_vals[i]==1]
+setosa_y = [d[0] for i,d in enumerate(x_vals) if y_vals[i]==1]
+not_setosa_x = [d[1] for i,d in enumerate(x_vals) if y_vals[i]==-1]
+not_setosa_y = [d[0] for i,d in enumerate(x_vals) if y_vals[i]==-1]
+
+# Plot data and line
+plt.plot(setosa_x, setosa_y, 'o', label='I. setosa')
+plt.plot(not_setosa_x, not_setosa_y, 'x', label='Non-setosa')
+plt.plot(x1_vals, best_fit, 'r-', label='Linear Separator', linewidth=3)
+plt.ylim([0, 10])
+plt.legend(loc='lower right')
+plt.title('Sepal Length vs Pedal Width')
+plt.xlabel('Pedal Width')
+plt.ylabel('Sepal Length')
+plt.show()
+
+# Plot train/test accuracies
+plt.plot(train_accuracy, 'k-', label='Training Accuracy')
+plt.plot(test_accuracy, 'r--', label='Test Accuracy')
+plt.title('Train and Test Set Accuracies')
+plt.xlabel('Generation')
+plt.ylabel('Accuracy')
+plt.legend(loc='lower right')
+plt.show()
+
+# Plot loss over time
+plt.plot(loss_vec, 'k-')
+plt.title('Loss per Generation')
+plt.xlabel('Generation')
+plt.ylabel('Loss')
+plt.show()
\ No newline at end of file
diff --git a/TensorFlow-Machine-Learning-Cookbook-master/Chapter 04/multiclass_svm.py b/TensorFlow-Machine-Learning-Cookbook-master/Chapter 04/multiclass_svm.py
new file mode 100644
index 000000000..bb402aeee
--- /dev/null
+++ b/TensorFlow-Machine-Learning-Cookbook-master/Chapter 04/multiclass_svm.py
@@ -0,0 +1,154 @@
+# Multi-class (Nonlinear) SVM Example
+#----------------------------------
+#
+# This function wll illustrate how to
+# implement the gaussian kernel with
+# multiple classes on the iris dataset.
+#
+# Gaussian Kernel:
+# K(x1, x2) = exp(-gamma * abs(x1 - x2)^2)
+#
+# X : (Sepal Length, Petal Width)
+# Y: (I. setosa, I. virginica, I. versicolor) (3 classes)
+#
+# Basic idea: introduce an extra dimension to do
+# one vs all classification.
+#
+# The prediction of a point will be the category with
+# the largest margin or distance to boundary.
+
+import matplotlib.pyplot as plt
+import numpy as np
+import tensorflow as tf
+from sklearn import datasets
+from tensorflow.python.framework import ops
+ops.reset_default_graph()
+
+# Create graph
+sess = tf.Session()
+
+# Load the data
+# iris.data = [(Sepal Length, Sepal Width, Petal Length, Petal Width)]
+iris = datasets.load_iris()
+x_vals = np.array([[x[0], x[3]] for x in iris.data])
+y_vals1 = np.array([1 if y==0 else -1 for y in iris.target])
+y_vals2 = np.array([1 if y==1 else -1 for y in iris.target])
+y_vals3 = np.array([1 if y==2 else -1 for y in iris.target])
+y_vals = np.array([y_vals1, y_vals2, y_vals3])
+class1_x = [x[0] for i,x in enumerate(x_vals) if iris.target[i]==0]
+class1_y = [x[1] for i,x in enumerate(x_vals) if iris.target[i]==0]
+class2_x = [x[0] for i,x in enumerate(x_vals) if iris.target[i]==1]
+class2_y = [x[1] for i,x in enumerate(x_vals) if iris.target[i]==1]
+class3_x = [x[0] for i,x in enumerate(x_vals) if iris.target[i]==2]
+class3_y = [x[1] for i,x in enumerate(x_vals) if iris.target[i]==2]
+
+# Declare batch size
+batch_size = 50
+
+# Initialize placeholders
+x_data = tf.placeholder(shape=[None, 2], dtype=tf.float32)
+y_target = tf.placeholder(shape=[3, None], dtype=tf.float32)
+prediction_grid = tf.placeholder(shape=[None, 2], dtype=tf.float32)
+
+# Create variables for svm
+b = tf.Variable(tf.random_normal(shape=[3,batch_size]))
+
+# Gaussian (RBF) kernel
+gamma = tf.constant(-10.0)
+dist = tf.reduce_sum(tf.square(x_data), 1)
+dist = tf.reshape(dist, [-1,1])
+sq_dists = tf.add(tf.sub(dist, tf.mul(2., tf.matmul(x_data, tf.transpose(x_data)))), tf.transpose(dist))
+my_kernel = tf.exp(tf.mul(gamma, tf.abs(sq_dists)))
+
+# Declare function to do reshape/batch multiplication
+def reshape_matmul(mat):
+ v1 = tf.expand_dims(mat, 1)
+ v2 = tf.reshape(v1, [3, batch_size, 1])
+ return(tf.batch_matmul(v2, v1))
+
+# Compute SVM Model
+model_output = tf.matmul(b, my_kernel)
+first_term = tf.reduce_sum(b)
+b_vec_cross = tf.matmul(tf.transpose(b), b)
+y_target_cross = reshape_matmul(y_target)
+
+second_term = tf.reduce_sum(tf.mul(my_kernel, tf.mul(b_vec_cross, y_target_cross)),[1,2])
+loss = tf.reduce_sum(tf.neg(tf.sub(first_term, second_term)))
+
+# Gaussian (RBF) prediction kernel
+rA = tf.reshape(tf.reduce_sum(tf.square(x_data), 1),[-1,1])
+rB = tf.reshape(tf.reduce_sum(tf.square(prediction_grid), 1),[-1,1])
+pred_sq_dist = tf.add(tf.sub(rA, tf.mul(2., tf.matmul(x_data, tf.transpose(prediction_grid)))), tf.transpose(rB))
+pred_kernel = tf.exp(tf.mul(gamma, tf.abs(pred_sq_dist)))
+
+prediction_output = tf.matmul(tf.mul(y_target,b), pred_kernel)
+prediction = tf.arg_max(prediction_output-tf.expand_dims(tf.reduce_mean(prediction_output,1), 1), 0)
+accuracy = tf.reduce_mean(tf.cast(tf.equal(prediction, tf.argmax(y_target,0)), tf.float32))
+
+# Declare optimizer
+my_opt = tf.train.GradientDescentOptimizer(0.01)
+train_step = my_opt.minimize(loss)
+
+# Initialize variables
+init = tf.initialize_all_variables()
+sess.run(init)
+
+# Training loop
+loss_vec = []
+batch_accuracy = []
+for i in range(100):
+ rand_index = np.random.choice(len(x_vals), size=batch_size)
+ rand_x = x_vals[rand_index]
+ rand_y = y_vals[:,rand_index]
+ sess.run(train_step, feed_dict={x_data: rand_x, y_target: rand_y})
+
+ temp_loss = sess.run(loss, feed_dict={x_data: rand_x, y_target: rand_y})
+ loss_vec.append(temp_loss)
+
+ acc_temp = sess.run(accuracy, feed_dict={x_data: rand_x,
+ y_target: rand_y,
+ prediction_grid:rand_x})
+ batch_accuracy.append(acc_temp)
+
+ if (i+1)%25==0:
+ print('Step #' + str(i+1))
+ print('Loss = ' + str(temp_loss))
+
+# Create a mesh to plot points in
+x_min, x_max = x_vals[:, 0].min() - 1, x_vals[:, 0].max() + 1
+y_min, y_max = x_vals[:, 1].min() - 1, x_vals[:, 1].max() + 1
+xx, yy = np.meshgrid(np.arange(x_min, x_max, 0.02),
+ np.arange(y_min, y_max, 0.02))
+grid_points = np.c_[xx.ravel(), yy.ravel()]
+grid_predictions = sess.run(prediction, feed_dict={x_data: rand_x,
+ y_target: rand_y,
+ prediction_grid: grid_points})
+grid_predictions = grid_predictions.reshape(xx.shape)
+
+# Plot points and grid
+plt.contourf(xx, yy, grid_predictions, cmap=plt.cm.Paired, alpha=0.8)
+plt.plot(class1_x, class1_y, 'ro', label='I. setosa')
+plt.plot(class2_x, class2_y, 'kx', label='I. versicolor')
+plt.plot(class3_x, class3_y, 'gv', label='I. virginica')
+plt.title('Gaussian SVM Results on Iris Data')
+plt.xlabel('Pedal Length')
+plt.ylabel('Sepal Width')
+plt.legend(loc='lower right')
+plt.ylim([-0.5, 3.0])
+plt.xlim([3.5, 8.5])
+plt.show()
+
+# Plot batch accuracy
+plt.plot(batch_accuracy, 'k-', label='Accuracy')
+plt.title('Batch Accuracy')
+plt.xlabel('Generation')
+plt.ylabel('Accuracy')
+plt.legend(loc='lower right')
+plt.show()
+
+# Plot loss over time
+plt.plot(loss_vec, 'k-')
+plt.title('Loss per Generation')
+plt.xlabel('Generation')
+plt.ylabel('Loss')
+plt.show()
\ No newline at end of file
diff --git a/TensorFlow-Machine-Learning-Cookbook-master/Chapter 04/nonlinear_svm.py b/TensorFlow-Machine-Learning-Cookbook-master/Chapter 04/nonlinear_svm.py
new file mode 100644
index 000000000..7aa28cc06
--- /dev/null
+++ b/TensorFlow-Machine-Learning-Cookbook-master/Chapter 04/nonlinear_svm.py
@@ -0,0 +1,137 @@
+# Nonlinear SVM Example
+#----------------------------------
+#
+# This function wll illustrate how to
+# implement the gaussian kernel on
+# the iris dataset.
+#
+# Gaussian Kernel:
+# K(x1, x2) = exp(-gamma * abs(x1 - x2)^2)
+
+import matplotlib.pyplot as plt
+import numpy as np
+import tensorflow as tf
+from sklearn import datasets
+from tensorflow.python.framework import ops
+ops.reset_default_graph()
+
+# Create graph
+sess = tf.Session()
+
+# Load the data
+# iris.data = [(Sepal Length, Sepal Width, Petal Length, Petal Width)]
+iris = datasets.load_iris()
+x_vals = np.array([[x[0], x[3]] for x in iris.data])
+y_vals = np.array([1 if y==0 else -1 for y in iris.target])
+class1_x = [x[0] for i,x in enumerate(x_vals) if y_vals[i]==1]
+class1_y = [x[1] for i,x in enumerate(x_vals) if y_vals[i]==1]
+class2_x = [x[0] for i,x in enumerate(x_vals) if y_vals[i]==-1]
+class2_y = [x[1] for i,x in enumerate(x_vals) if y_vals[i]==-1]
+
+# Declare batch size
+batch_size = 150
+
+# Initialize placeholders
+x_data = tf.placeholder(shape=[None, 2], dtype=tf.float32)
+y_target = tf.placeholder(shape=[None, 1], dtype=tf.float32)
+prediction_grid = tf.placeholder(shape=[None, 2], dtype=tf.float32)
+
+# Create variables for svm
+b = tf.Variable(tf.random_normal(shape=[1,batch_size]))
+
+# Gaussian (RBF) kernel
+gamma = tf.constant(-25.0)
+dist = tf.reduce_sum(tf.square(x_data), 1)
+dist = tf.reshape(dist, [-1,1])
+sq_dists = tf.add(tf.sub(dist, tf.mul(2., tf.matmul(x_data, tf.transpose(x_data)))), tf.transpose(dist))
+my_kernel = tf.exp(tf.mul(gamma, tf.abs(sq_dists)))
+
+# Compute SVM Model
+model_output = tf.matmul(b, my_kernel)
+first_term = tf.reduce_sum(b)
+b_vec_cross = tf.matmul(tf.transpose(b), b)
+y_target_cross = tf.matmul(y_target, tf.transpose(y_target))
+second_term = tf.reduce_sum(tf.mul(my_kernel, tf.mul(b_vec_cross, y_target_cross)))
+loss = tf.neg(tf.sub(first_term, second_term))
+
+# Gaussian (RBF) prediction kernel
+rA = tf.reshape(tf.reduce_sum(tf.square(x_data), 1),[-1,1])
+rB = tf.reshape(tf.reduce_sum(tf.square(prediction_grid), 1),[-1,1])
+pred_sq_dist = tf.add(tf.sub(rA, tf.mul(2., tf.matmul(x_data, tf.transpose(prediction_grid)))), tf.transpose(rB))
+pred_kernel = tf.exp(tf.mul(gamma, tf.abs(pred_sq_dist)))
+
+prediction_output = tf.matmul(tf.mul(tf.transpose(y_target),b), pred_kernel)
+prediction = tf.sign(prediction_output-tf.reduce_mean(prediction_output))
+accuracy = tf.reduce_mean(tf.cast(tf.equal(tf.squeeze(prediction), tf.squeeze(y_target)), tf.float32))
+
+# Declare optimizer
+my_opt = tf.train.GradientDescentOptimizer(0.01)
+train_step = my_opt.minimize(loss)
+
+# Initialize variables
+init = tf.initialize_all_variables()
+sess.run(init)
+
+# Training loop
+loss_vec = []
+batch_accuracy = []
+for i in range(300):
+ rand_index = np.random.choice(len(x_vals), size=batch_size)
+ rand_x = x_vals[rand_index]
+ rand_y = np.transpose([y_vals[rand_index]])
+ sess.run(train_step, feed_dict={x_data: rand_x, y_target: rand_y})
+
+ temp_loss = sess.run(loss, feed_dict={x_data: rand_x, y_target: rand_y})
+ loss_vec.append(temp_loss)
+
+ acc_temp = sess.run(accuracy, feed_dict={x_data: rand_x,
+ y_target: rand_y,
+ prediction_grid:rand_x})
+ batch_accuracy.append(acc_temp)
+
+ if (i+1)%75==0:
+ print('Step #' + str(i+1))
+ print('Loss = ' + str(temp_loss))
+
+# Create a mesh to plot points in
+x_min, x_max = x_vals[:, 0].min() - 1, x_vals[:, 0].max() + 1
+y_min, y_max = x_vals[:, 1].min() - 1, x_vals[:, 1].max() + 1
+xx, yy = np.meshgrid(np.arange(x_min, x_max, 0.02),
+ np.arange(y_min, y_max, 0.02))
+grid_points = np.c_[xx.ravel(), yy.ravel()]
+[grid_predictions] = sess.run(prediction, feed_dict={x_data: rand_x,
+ y_target: rand_y,
+ prediction_grid: grid_points})
+grid_predictions = grid_predictions.reshape(xx.shape)
+
+# Plot points and grid
+plt.contourf(xx, yy, grid_predictions, cmap=plt.cm.Paired, alpha=0.8)
+plt.plot(class1_x, class1_y, 'ro', label='I. setosa')
+plt.plot(class2_x, class2_y, 'kx', label='Non setosa')
+plt.title('Gaussian SVM Results on Iris Data')
+plt.xlabel('Pedal Length')
+plt.ylabel('Sepal Width')
+plt.legend(loc='lower right')
+plt.ylim([-0.5, 3.0])
+plt.xlim([3.5, 8.5])
+plt.show()
+
+# Plot batch accuracy
+plt.plot(batch_accuracy, 'k-', label='Accuracy')
+plt.title('Batch Accuracy')
+plt.xlabel('Generation')
+plt.ylabel('Accuracy')
+plt.legend(loc='lower right')
+plt.show()
+
+# Plot loss over time
+plt.plot(loss_vec, 'k-')
+plt.title('Loss per Generation')
+plt.xlabel('Generation')
+plt.ylabel('Loss')
+plt.show()
+
+# sess.run(prediction_output, feed_dict={x_data: rand_x, y_target: rand_y, prediction_grid: grid_points})
+# sess.run(pred_kernel, feed_dict={x_data: rand_x, y_target: rand_y, prediction_grid: grid_points})
+# sess.run(model_output, feed_dict={x_data:rand_x, y_target: rand_y})
+# sess.run(second_term, feed_dict={x_data:rand_x, y_target: rand_y})
\ No newline at end of file
diff --git a/TensorFlow-Machine-Learning-Cookbook-master/Chapter 04/support_vector_regression.py b/TensorFlow-Machine-Learning-Cookbook-master/Chapter 04/support_vector_regression.py
new file mode 100644
index 000000000..39cb506c6
--- /dev/null
+++ b/TensorFlow-Machine-Learning-Cookbook-master/Chapter 04/support_vector_regression.py
@@ -0,0 +1,120 @@
+# SVM Regression
+#----------------------------------
+#
+# This function shows how to use Tensorflow to
+# solve support vector regression. We are going
+# to find the line that has the maximum margin
+# which INCLUDES as many points as possible
+#
+# We will use the iris data, specifically:
+# y = Sepal Length
+# x = Pedal Width
+
+import matplotlib.pyplot as plt
+import numpy as np
+import tensorflow as tf
+from sklearn import datasets
+from tensorflow.python.framework import ops
+ops.reset_default_graph()
+
+# Create graph
+sess = tf.Session()
+
+# Load the data
+# iris.data = [(Sepal Length, Sepal Width, Petal Length, Petal Width)]
+iris = datasets.load_iris()
+x_vals = np.array([x[3] for x in iris.data])
+y_vals = np.array([y[0] for y in iris.data])
+
+# Split data into train/test sets
+train_indices = np.random.choice(len(x_vals), round(len(x_vals)*0.8), replace=False)
+test_indices = np.array(list(set(range(len(x_vals))) - set(train_indices)))
+x_vals_train = x_vals[train_indices]
+x_vals_test = x_vals[test_indices]
+y_vals_train = y_vals[train_indices]
+y_vals_test = y_vals[test_indices]
+
+# Declare batch size
+batch_size = 50
+
+# Initialize placeholders
+x_data = tf.placeholder(shape=[None, 1], dtype=tf.float32)
+y_target = tf.placeholder(shape=[None, 1], dtype=tf.float32)
+
+# Create variables for linear regression
+A = tf.Variable(tf.random_normal(shape=[1,1]))
+b = tf.Variable(tf.random_normal(shape=[1,1]))
+
+# Declare model operations
+model_output = tf.add(tf.matmul(x_data, A), b)
+
+# Declare loss function
+# = max(0, abs(target - predicted) + epsilon)
+# 1/2 margin width parameter = epsilon
+epsilon = tf.constant([0.5])
+# Margin term in loss
+loss = tf.reduce_mean(tf.maximum(0., tf.sub(tf.abs(tf.sub(model_output, y_target)), epsilon)))
+
+# Declare optimizer
+my_opt = tf.train.GradientDescentOptimizer(0.075)
+train_step = my_opt.minimize(loss)
+
+# Initialize variables
+init = tf.initialize_all_variables()
+sess.run(init)
+
+# Training loop
+train_loss = []
+test_loss = []
+for i in range(200):
+ rand_index = np.random.choice(len(x_vals_train), size=batch_size)
+ rand_x = np.transpose([x_vals_train[rand_index]])
+ rand_y = np.transpose([y_vals_train[rand_index]])
+ sess.run(train_step, feed_dict={x_data: rand_x, y_target: rand_y})
+
+ temp_train_loss = sess.run(loss, feed_dict={x_data: np.transpose([x_vals_train]), y_target: np.transpose([y_vals_train])})
+ train_loss.append(temp_train_loss)
+
+ temp_test_loss = sess.run(loss, feed_dict={x_data: np.transpose([x_vals_test]), y_target: np.transpose([y_vals_test])})
+ test_loss.append(temp_test_loss)
+ if (i+1)%50==0:
+ print('-----------')
+ print('Generation: ' + str(i+1))
+ print('A = ' + str(sess.run(A)) + ' b = ' + str(sess.run(b)))
+ print('Train Loss = ' + str(temp_train_loss))
+ print('Test Loss = ' + str(temp_test_loss))
+
+# Extract Coefficients
+[[slope]] = sess.run(A)
+[[y_intercept]] = sess.run(b)
+[width] = sess.run(epsilon)
+
+# Get best fit line
+best_fit = []
+best_fit_upper = []
+best_fit_lower = []
+for i in x_vals:
+ best_fit.append(slope*i+y_intercept)
+ best_fit_upper.append(slope*i+y_intercept+width)
+ best_fit_lower.append(slope*i+y_intercept-width)
+
+# Plot fit with data
+plt.plot(x_vals, y_vals, 'o', label='Data Points')
+plt.plot(x_vals, best_fit, 'r-', label='SVM Regression Line', linewidth=3)
+plt.plot(x_vals, best_fit_upper, 'r--', linewidth=2)
+plt.plot(x_vals, best_fit_lower, 'r--', linewidth=2)
+plt.ylim([0, 10])
+plt.legend(loc='lower right')
+plt.title('Sepal Length vs Pedal Width')
+plt.xlabel('Pedal Width')
+plt.ylabel('Sepal Length')
+plt.show()
+
+# Plot loss over time
+plt.plot(train_loss, 'k-', label='Train Set Loss')
+plt.plot(test_loss, 'r--', label='Test Set Loss')
+plt.title('L2 Loss per Generation')
+plt.xlabel('Generation')
+plt.ylabel('L2 Loss')
+plt.legend(loc='upper right')
+plt.show()
diff --git a/TensorFlow-Machine-Learning-Cookbook-master/Chapter 04/svm_kernels.py b/TensorFlow-Machine-Learning-Cookbook-master/Chapter 04/svm_kernels.py
new file mode 100644
index 000000000..b781ea46d
--- /dev/null
+++ b/TensorFlow-Machine-Learning-Cookbook-master/Chapter 04/svm_kernels.py
@@ -0,0 +1,145 @@
+# Illustration of Various Kernels
+#----------------------------------
+#
+# This function wll illustrate how to
+# implement various kernels in Tensorflow.
+#
+# Linear Kernel:
+# K(x1, x2) = t(x1) * x2
+#
+# Gaussian Kernel (RBF):
+# K(x1, x2) = exp(-gamma * abs(x1 - x2)^2)
+
+import matplotlib.pyplot as plt
+import numpy as np
+import tensorflow as tf
+from sklearn import datasets
+from tensorflow.python.framework import ops
+ops.reset_default_graph()
+
+# Create graph
+sess = tf.Session()
+
+# Generate non-lnear data
+(x_vals, y_vals) = datasets.make_circles(n_samples=350, factor=.5, noise=.1)
+y_vals = np.array([1 if y==1 else -1 for y in y_vals])
+class1_x = [x[0] for i,x in enumerate(x_vals) if y_vals[i]==1]
+class1_y = [x[1] for i,x in enumerate(x_vals) if y_vals[i]==1]
+class2_x = [x[0] for i,x in enumerate(x_vals) if y_vals[i]==-1]
+class2_y = [x[1] for i,x in enumerate(x_vals) if y_vals[i]==-1]
+
+# Declare batch size
+batch_size = 350
+
+# Initialize placeholders
+x_data = tf.placeholder(shape=[None, 2], dtype=tf.float32)
+y_target = tf.placeholder(shape=[None, 1], dtype=tf.float32)
+prediction_grid = tf.placeholder(shape=[None, 2], dtype=tf.float32)
+
+# Create variables for svm
+b = tf.Variable(tf.random_normal(shape=[1,batch_size]))
+
+# Apply kernel
+# Linear Kernel
+# my_kernel = tf.matmul(x_data, tf.transpose(x_data))
+
+# Gaussian (RBF) kernel
+gamma = tf.constant(-50.0)
+dist = tf.reduce_sum(tf.square(x_data), 1)
+dist = tf.reshape(dist, [-1,1])
+sq_dists = tf.add(tf.sub(dist, tf.mul(2., tf.matmul(x_data, tf.transpose(x_data)))), tf.transpose(dist))
+my_kernel = tf.exp(tf.mul(gamma, tf.abs(sq_dists)))
+
+# Compute SVM Model
+model_output = tf.matmul(b, my_kernel)
+first_term = tf.reduce_sum(b)
+b_vec_cross = tf.matmul(tf.transpose(b), b)
+y_target_cross = tf.matmul(y_target, tf.transpose(y_target))
+second_term = tf.reduce_sum(tf.mul(my_kernel, tf.mul(b_vec_cross, y_target_cross)))
+loss = tf.neg(tf.sub(first_term, second_term))
+
+# Create Prediction Kernel
+# Linear prediction kernel
+# my_kernel = tf.matmul(x_data, tf.transpose(prediction_grid))
+
+# Gaussian (RBF) prediction kernel
+rA = tf.reshape(tf.reduce_sum(tf.square(x_data), 1),[-1,1])
+rB = tf.reshape(tf.reduce_sum(tf.square(prediction_grid), 1),[-1,1])
+pred_sq_dist = tf.add(tf.sub(rA, tf.mul(2., tf.matmul(x_data, tf.transpose(prediction_grid)))), tf.transpose(rB))
+pred_kernel = tf.exp(tf.mul(gamma, tf.abs(pred_sq_dist)))
+
+prediction_output = tf.matmul(tf.mul(tf.transpose(y_target),b), pred_kernel)
+prediction = tf.sign(prediction_output-tf.reduce_mean(prediction_output))
+accuracy = tf.reduce_mean(tf.cast(tf.equal(tf.squeeze(prediction), tf.squeeze(y_target)), tf.float32))
+
+# Declare optimizer
+my_opt = tf.train.GradientDescentOptimizer(0.002)
+train_step = my_opt.minimize(loss)
+
+# Initialize variables
+init = tf.initialize_all_variables()
+sess.run(init)
+
+# Training loop
+loss_vec = []
+batch_accuracy = []
+for i in range(1000):
+ rand_index = np.random.choice(len(x_vals), size=batch_size)
+ rand_x = x_vals[rand_index]
+ rand_y = np.transpose([y_vals[rand_index]])
+ sess.run(train_step, feed_dict={x_data: rand_x, y_target: rand_y})
+
+ temp_loss = sess.run(loss, feed_dict={x_data: rand_x, y_target: rand_y})
+ loss_vec.append(temp_loss)
+
+ acc_temp = sess.run(accuracy, feed_dict={x_data: rand_x,
+ y_target: rand_y,
+ prediction_grid:rand_x})
+ batch_accuracy.append(acc_temp)
+
+ if (i+1)%250==0:
+ print('Step #' + str(i+1))
+ print('Loss = ' + str(temp_loss))
+
+# Create a mesh to plot points in
+x_min, x_max = x_vals[:, 0].min() - 1, x_vals[:, 0].max() + 1
+y_min, y_max = x_vals[:, 1].min() - 1, x_vals[:, 1].max() + 1
+xx, yy = np.meshgrid(np.arange(x_min, x_max, 0.02),
+ np.arange(y_min, y_max, 0.02))
+grid_points = np.c_[xx.ravel(), yy.ravel()]
+[grid_predictions] = sess.run(prediction, feed_dict={x_data: rand_x,
+ y_target: rand_y,
+ prediction_grid: grid_points})
+grid_predictions = grid_predictions.reshape(xx.shape)
+
+# Plot points and grid
+plt.contourf(xx, yy, grid_predictions, cmap=plt.cm.Paired, alpha=0.8)
+plt.plot(class1_x, class1_y, 'ro', label='Class 1')
+plt.plot(class2_x, class2_y, 'kx', label='Class -1')
+plt.title('Gaussian SVM Results')
+plt.xlabel('x')
+plt.ylabel('y')
+plt.legend(loc='lower right')
+plt.ylim([-1.5, 1.5])
+plt.xlim([-1.5, 1.5])
+plt.show()
+
+# Plot batch accuracy
+plt.plot(batch_accuracy, 'k-', label='Accuracy')
+plt.title('Batch Accuracy')
+plt.xlabel('Generation')
+plt.ylabel('Accuracy')
+plt.legend(loc='lower right')
+plt.show()
+
+# Plot loss over time
+plt.plot(loss_vec, 'k-')
+plt.title('Loss per Generation')
+plt.xlabel('Generation')
+plt.ylabel('Loss')
+plt.show()
+
+# sess.run(prediction_output, feed_dict={x_data: rand_x, y_target: rand_y, prediction_grid: grid_points})
+# sess.run(pred_kernel, feed_dict={x_data: rand_x, y_target: rand_y, prediction_grid: grid_points})
+# sess.run(model_output, feed_dict={x_data:rand_x, y_target: rand_y})
+# sess.run(second_term, feed_dict={x_data:rand_x, y_target: rand_y})
\ No newline at end of file
diff --git a/TensorFlow-Machine-Learning-Cookbook-master/Chapter 05/address_matching.py b/TensorFlow-Machine-Learning-Cookbook-master/Chapter 05/address_matching.py
new file mode 100644
index 000000000..f43cf2b71
--- /dev/null
+++ b/TensorFlow-Machine-Learning-Cookbook-master/Chapter 05/address_matching.py
@@ -0,0 +1,112 @@
+# Address Matching with k-Nearest Neighbors
+#----------------------------------
+#
+# This function illustrates a way to perform
+# address matching between two data sets.
+#
+# For each test address, we will return the
+# closest reference address to it.
+#
+# We will consider two distance functions:
+# 1) Edit distance for street number/name and
+# 2) Euclidian distance (L2) for the zip codes
+
+import random
+import string
+import numpy as np
+import tensorflow as tf
+from tensorflow.python.framework import ops
+ops.reset_default_graph()
+
+# First we generate the data sets we will need
+# n = Size of created data sets
+n = 10
+street_names = ['abbey', 'baker', 'canal', 'donner', 'elm']
+street_types = ['rd', 'st', 'ln', 'pass', 'ave']
+rand_zips = [random.randint(65000,65999) for i in range(5)]
+
+# Function to randomly create one typo in a string w/ a probability
+def create_typo(s, prob=0.75):
+ if random.uniform(0,1) < prob:
+ rand_ind = random.choice(range(len(s)))
+ s_list = list(s)
+ s_list[rand_ind]=random.choice(string.ascii_lowercase)
+ s = ''.join(s_list)
+ return(s)
+
+# Generate the reference dataset
+numbers = [random.randint(1, 9999) for i in range(n)]
+streets = [random.choice(street_names) for i in range(n)]
+street_suffs = [random.choice(street_types) for i in range(n)]
+zips = [random.choice(rand_zips) for i in range(n)]
+full_streets = [str(x) + ' ' + y + ' ' + z for x,y,z in zip(numbers, streets, street_suffs)]
+reference_data = [list(x) for x in zip(full_streets,zips)]
+
+# Generate test dataset with some typos
+typo_streets = [create_typo(x) for x in streets]
+typo_full_streets = [str(x) + ' ' + y + ' ' + z for x,y,z in zip(numbers, typo_streets, street_suffs)]
+test_data = [list(x) for x in zip(typo_full_streets,zips)]
+
+# Now we can perform address matching
+# Create graph
+sess = tf.Session()
+
+# Placeholders
+test_address = tf.sparse_placeholder( dtype=tf.string)
+test_zip = tf.placeholder(shape=[None, 1], dtype=tf.float32)
+ref_address = tf.sparse_placeholder(dtype=tf.string)
+ref_zip = tf.placeholder(shape=[None, n], dtype=tf.float32)
+
+# Declare Zip code distance for a test zip and reference set
+zip_dist = tf.square(tf.sub(ref_zip, test_zip))
+
+# Declare Edit distance for address
+address_dist = tf.edit_distance(test_address, ref_address, normalize=True)
+
+# Create similarity scores
+zip_max = tf.gather(tf.squeeze(zip_dist), tf.argmax(zip_dist, 1))
+zip_min = tf.gather(tf.squeeze(zip_dist), tf.argmin(zip_dist, 1))
+zip_sim = tf.div(tf.sub(zip_max, zip_dist), tf.sub(zip_max, zip_min))
+address_sim = tf.sub(1., address_dist)
+
+# Combine distance functions
+address_weight = 0.5
+zip_weight = 1. - address_weight
+weighted_sim = tf.add(tf.transpose(tf.mul(address_weight, address_sim)), tf.mul(zip_weight, zip_sim))
+
+# Predict: Get max similarity entry
+top_match_index = tf.argmax(weighted_sim, 1)
+
+
+# Function to Create a character-sparse tensor from strings
+def sparse_from_word_vec(word_vec):
+ num_words = len(word_vec)
+ indices = [[xi, 0, yi] for xi,x in enumerate(word_vec) for yi,y in enumerate(x)]
+ chars = list(''.join(word_vec))
+ return(tf.SparseTensorValue(indices, chars, [num_words,1,1]))
+
+# Loop through test indices
+reference_addresses = [x[0] for x in reference_data]
+reference_zips = np.array([[x[1] for x in reference_data]])
+
+# Create sparse address reference set
+sparse_ref_set = sparse_from_word_vec(reference_addresses)
+
+for i in range(n):
+ test_address_entry = test_data[i][0]
+ test_zip_entry = [[test_data[i][1]]]
+
+ # Create sparse address vectors
+ test_address_repeated = [test_address_entry] * n
+ sparse_test_set = sparse_from_word_vec(test_address_repeated)
+
+ feeddict={test_address: sparse_test_set,
+ test_zip: test_zip_entry,
+ ref_address: sparse_ref_set,
+ ref_zip: reference_zips}
+ best_match = sess.run(top_match_index, feed_dict=feeddict)
+ best_street = reference_addresses[best_match[0]]
+ [best_zip] = reference_zips[0][best_match]
+ [[test_zip_]] = test_zip_entry
+ print('Address: ' + str(test_address_entry) + ', ' + str(test_zip_))
+ print('Match : ' + str(best_street) + ', ' + str(best_zip))
\ No newline at end of file
diff --git a/TensorFlow-Machine-Learning-Cookbook-master/Chapter 05/image_recognition.py b/TensorFlow-Machine-Learning-Cookbook-master/Chapter 05/image_recognition.py
new file mode 100644
index 000000000..7ae2d7793
--- /dev/null
+++ b/TensorFlow-Machine-Learning-Cookbook-master/Chapter 05/image_recognition.py
@@ -0,0 +1,93 @@
+# MNIST Digit Prediction with k-Nearest Neighbors
+#-----------------------------------------------
+#
+# This script will load the MNIST data, and split
+# it into test/train and perform prediction with
+# nearest neighbors
+#
+# For each test integer, we will return the
+# closest image/integer.
+#
+# Integer images are represented as 28x8 matrices
+# of floating point numbers
+
+import random
+import numpy as np
+import tensorflow as tf
+import matplotlib.pyplot as plt
+from PIL import Image
+from tensorflow.examples.tutorials.mnist import input_data
+from tensorflow.python.framework import ops
+ops.reset_default_graph()
+
+# Create graph
+sess = tf.Session()
+
+# Load the data
+mnist = input_data.read_data_sets("MNIST_data/", one_hot=True)
+
+# Random sample
+train_size = 1000
+test_size = 102
+rand_train_indices = np.random.choice(len(mnist.train.images), train_size, replace=False)
+rand_test_indices = np.random.choice(len(mnist.test.images), test_size, replace=False)
+x_vals_train = mnist.train.images[rand_train_indices]
+x_vals_test = mnist.test.images[rand_test_indices]
+y_vals_train = mnist.train.labels[rand_train_indices]
+y_vals_test = mnist.test.labels[rand_test_indices]
+
+# Declare k-value and batch size
+k = 4
+batch_size=6
+
+# Placeholders
+x_data_train = tf.placeholder(shape=[None, 784], dtype=tf.float32)
+x_data_test = tf.placeholder(shape=[None, 784], dtype=tf.float32)
+y_target_train = tf.placeholder(shape=[None, 10], dtype=tf.float32)
+y_target_test = tf.placeholder(shape=[None, 10], dtype=tf.float32)
+
+# Declare distance metric
+# L1
+distance = tf.reduce_sum(tf.abs(tf.sub(x_data_train, tf.expand_dims(x_data_test,1))), reduction_indices=2)
+
+# L2
+#distance = tf.sqrt(tf.reduce_sum(tf.square(tf.sub(x_data_train, tf.expand_dims(x_data_test,1))), reduction_indices=1))
+
+# Predict: Get min distance index (Nearest neighbor)
+top_k_xvals, top_k_indices = tf.nn.top_k(tf.neg(distance), k=k)
+prediction_indices = tf.gather(y_target_train, top_k_indices)
+# Predict the mode category
+count_of_predictions = tf.reduce_sum(prediction_indices, reduction_indices=1)
+prediction = tf.argmax(count_of_predictions, dimension=1)
+
+# Calculate how many loops over training data
+num_loops = int(np.ceil(len(x_vals_test)/batch_size))
+
+test_output = []
+actual_vals = []
+for i in range(num_loops):
+ min_index = i*batch_size
+ max_index = min((i+1)*batch_size,len(x_vals_train))
+ x_batch = x_vals_test[min_index:max_index]
+ y_batch = y_vals_test[min_index:max_index]
+ predictions = sess.run(prediction, feed_dict={x_data_train: x_vals_train, x_data_test: x_batch,
+ y_target_train: y_vals_train, y_target_test: y_batch})
+ test_output.extend(predictions)
+ actual_vals.extend(np.argmax(y_batch, axis=1))
+
+accuracy = sum([1./test_size for i in range(test_size) if test_output[i]==actual_vals[i]])
+print('Accuracy on test set: ' + str(accuracy))
+
+# Plot the last batch results:
+actuals = np.argmax(y_batch, axis=1)
+
+Nrows = 2
+Ncols = 3
+for i in range(len(actuals)):
+ plt.subplot(Nrows, Ncols, i+1)
+ plt.imshow(np.reshape(x_batch[i], [28,28]), cmap='Greys_r')
+ plt.title('Actual: ' + str(actuals[i]) + ' Pred: ' + str(predictions[i]),
+ fontsize=10)
+ frame = plt.gca()
+ frame.axes.get_xaxis().set_visible(False)
+ frame.axes.get_yaxis().set_visible(False)
\ No newline at end of file
diff --git a/TensorFlow-Machine-Learning-Cookbook-master/Chapter 05/mixed_distance_functions_knn.py b/TensorFlow-Machine-Learning-Cookbook-master/Chapter 05/mixed_distance_functions_knn.py
new file mode 100644
index 000000000..7e55037dc
--- /dev/null
+++ b/TensorFlow-Machine-Learning-Cookbook-master/Chapter 05/mixed_distance_functions_knn.py
@@ -0,0 +1,115 @@
+# Mixed Distance Functions for k-Nearest Neighbor
+#----------------------------------
+#
+# This function shows how to use different distance
+# metrics on different features for kNN.
+#
+# Data:
+#----------x-values-----------
+# CRIM : per capita crime rate by town
+# ZN : prop. of res. land zones
+# INDUS : prop. of non-retail business acres
+# CHAS : Charles river dummy variable
+# NOX : nitrix oxides concentration / 10 M
+# RM : Avg. # of rooms per building
+# AGE : prop. of buildings built prior to 1940
+# DIS : Weighted distances to employment centers
+# RAD : Index of radian highway access
+# TAX : Full tax rate value per $10k
+# PTRATIO: Pupil/Teacher ratio by town
+# B : 1000*(Bk-0.63)^2, Bk=prop. of blacks
+# LSTAT : % lower status of pop
+#------------y-value-----------
+# MEDV : Median Value of homes in $1,000's
+
+
+import matplotlib.pyplot as plt
+import numpy as np
+import tensorflow as tf
+import requests
+from tensorflow.python.framework import ops
+ops.reset_default_graph()
+
+# Create graph
+sess = tf.Session()
+
+# Load the data
+housing_url = 'https://archive.ics.uci.edu/ml/machine-learning-databases/housing/housing.data'
+housing_header = ['CRIM', 'ZN', 'INDUS', 'CHAS', 'NOX', 'RM', 'AGE', 'DIS', 'RAD', 'TAX', 'PTRATIO', 'B', 'LSTAT', 'MEDV']
+cols_used = ['CRIM', 'INDUS', 'NOX', 'RM', 'AGE', 'DIS', 'TAX', 'PTRATIO', 'B', 'LSTAT']
+num_features = len(cols_used)
+housing_file = requests.get(housing_url)
+housing_data = [[float(x) for x in y.split(' ') if len(x)>=1] for y in housing_file.text.split('\n') if len(y)>=1]
+
+y_vals = np.transpose([np.array([y[13] for y in housing_data])])
+x_vals = np.array([[x for i,x in enumerate(y) if housing_header[i] in cols_used] for y in housing_data])
+
+## Min-Max Scaling
+x_vals = (x_vals - x_vals.min(0)) / x_vals.ptp(0)
+
+## Create distance metric weight matrix weighted by standard deviation
+weight_diagonal = x_vals.std(0)
+weight_matrix = tf.cast(tf.diag(weight_diagonal), dtype=tf.float32)
+
+# Split the data into train and test sets
+train_indices = np.random.choice(len(x_vals), round(len(x_vals)*0.8), replace=False)
+test_indices = np.array(list(set(range(len(x_vals))) - set(train_indices)))
+x_vals_train = x_vals[train_indices]
+x_vals_test = x_vals[test_indices]
+y_vals_train = y_vals[train_indices]
+y_vals_test = y_vals[test_indices]
+
+# Declare k-value and batch size
+k = 4
+batch_size=len(x_vals_test)
+
+# Placeholders
+x_data_train = tf.placeholder(shape=[None, num_features], dtype=tf.float32)
+x_data_test = tf.placeholder(shape=[None, num_features], dtype=tf.float32)
+y_target_train = tf.placeholder(shape=[None, 1], dtype=tf.float32)
+y_target_test = tf.placeholder(shape=[None, 1], dtype=tf.float32)
+
+# Declare weighted distance metric
+# Weighted - L2 = sqrt((x-y)^T * A * (x-y))
+subtraction_term = tf.sub(x_data_train, tf.expand_dims(x_data_test,1))
+first_product = tf.batch_matmul(subtraction_term, tf.tile(tf.expand_dims(weight_matrix,0), [batch_size,1,1]))
+second_product = tf.batch_matmul(first_product, tf.transpose(subtraction_term, perm=[0,2,1]))
+distance = tf.sqrt(tf.batch_matrix_diag_part(second_product))
+
+# Predict: Get min distance index (Nearest neighbor)
+top_k_xvals, top_k_indices = tf.nn.top_k(tf.neg(distance), k=k)
+x_sums = tf.expand_dims(tf.reduce_sum(top_k_xvals, 1),1)
+x_sums_repeated = tf.matmul(x_sums,tf.ones([1, k], tf.float32))
+x_val_weights = tf.expand_dims(tf.div(top_k_xvals,x_sums_repeated), 1)
+
+top_k_yvals = tf.gather(y_target_train, top_k_indices)
+prediction = tf.squeeze(tf.batch_matmul(x_val_weights,top_k_yvals), squeeze_dims=[1])
+
+# Calculate MSE
+mse = tf.div(tf.reduce_sum(tf.square(tf.sub(prediction, y_target_test))), batch_size)
+
+# Calculate how many loops over training data
+num_loops = int(np.ceil(len(x_vals_test)/batch_size))
+
+for i in range(num_loops):
+ min_index = i*batch_size
+ max_index = min((i+1)*batch_size,len(x_vals_train))
+ x_batch = x_vals_test[min_index:max_index]
+ y_batch = y_vals_test[min_index:max_index]
+ predictions = sess.run(prediction, feed_dict={x_data_train: x_vals_train, x_data_test: x_batch,
+ y_target_train: y_vals_train, y_target_test: y_batch})
+ batch_mse = sess.run(mse, feed_dict={x_data_train: x_vals_train, x_data_test: x_batch,
+ y_target_train: y_vals_train, y_target_test: y_batch})
+
+ print('Batch #' + str(i+1) + ' MSE: ' + str(np.round(batch_mse,3)))
+
+# Plot prediction and actual distribution
+bins = np.linspace(5, 50, 45)
+
+plt.hist(predictions, bins, alpha=0.5, label='Prediction')
+plt.hist(y_batch, bins, alpha=0.5, label='Actual')
+plt.title('Histogram of Predicted and Actual Values')
+plt.xlabel('Med Home Value in $1,000s')
+plt.ylabel('Frequency')
+plt.legend(loc='upper right')
+plt.show()
\ No newline at end of file
diff --git a/TensorFlow-Machine-Learning-Cookbook-master/Chapter 05/nearest_neighbor.py b/TensorFlow-Machine-Learning-Cookbook-master/Chapter 05/nearest_neighbor.py
new file mode 100644
index 000000000..2d0f53770
--- /dev/null
+++ b/TensorFlow-Machine-Learning-Cookbook-master/Chapter 05/nearest_neighbor.py
@@ -0,0 +1,115 @@
+# k-Nearest Neighbor
+#----------------------------------
+#
+# This function illustrates how to use
+# k-nearest neighbors in tensorflow
+#
+# We will use the 1970s Boston housing dataset
+# which is available through the UCI
+# ML data repository.
+#
+# Data:
+#----------x-values-----------
+# CRIM : per capita crime rate by town
+# ZN : prop. of res. land zones
+# INDUS : prop. of non-retail business acres
+# CHAS : Charles river dummy variable
+# NOX : nitrix oxides concentration / 10 M
+# RM : Avg. # of rooms per building
+# AGE : prop. of buildings built prior to 1940
+# DIS : Weighted distances to employment centers
+# RAD : Index of radian highway access
+# TAX : Full tax rate value per $10k
+# PTRATIO: Pupil/Teacher ratio by town
+# B : 1000*(Bk-0.63)^2, Bk=prop. of blacks
+# LSTAT : % lower status of pop
+#------------y-value-----------
+# MEDV : Median Value of homes in $1,000's
+
+import matplotlib.pyplot as plt
+import numpy as np
+import tensorflow as tf
+import requests
+from tensorflow.python.framework import ops
+ops.reset_default_graph()
+
+# Create graph
+sess = tf.Session()
+
+# Load the data
+housing_url = 'https://archive.ics.uci.edu/ml/machine-learning-databases/housing/housing.data'
+housing_header = ['CRIM', 'ZN', 'INDUS', 'CHAS', 'NOX', 'RM', 'AGE', 'DIS', 'RAD', 'TAX', 'PTRATIO', 'B', 'LSTAT', 'MEDV']
+cols_used = ['CRIM', 'INDUS', 'NOX', 'RM', 'AGE', 'DIS', 'TAX', 'PTRATIO', 'B', 'LSTAT']
+num_features = len(cols_used)
+housing_file = requests.get(housing_url)
+housing_data = [[float(x) for x in y.split(' ') if len(x)>=1] for y in housing_file.text.split('\n') if len(y)>=1]
+
+y_vals = np.transpose([np.array([y[13] for y in housing_data])])
+x_vals = np.array([[x for i,x in enumerate(y) if housing_header[i] in cols_used] for y in housing_data])
+
+## Min-Max Scaling
+x_vals = (x_vals - x_vals.min(0)) / x_vals.ptp(0)
+
+# Split the data into train and test sets
+train_indices = np.random.choice(len(x_vals), round(len(x_vals)*0.8), replace=False)
+test_indices = np.array(list(set(range(len(x_vals))) - set(train_indices)))
+x_vals_train = x_vals[train_indices]
+x_vals_test = x_vals[test_indices]
+y_vals_train = y_vals[train_indices]
+y_vals_test = y_vals[test_indices]
+
+# Declare k-value and batch size
+k = 4
+batch_size=len(x_vals_test)
+
+# Placeholders
+x_data_train = tf.placeholder(shape=[None, num_features], dtype=tf.float32)
+x_data_test = tf.placeholder(shape=[None, num_features], dtype=tf.float32)
+y_target_train = tf.placeholder(shape=[None, 1], dtype=tf.float32)
+y_target_test = tf.placeholder(shape=[None, 1], dtype=tf.float32)
+
+# Declare distance metric
+# L1
+distance = tf.reduce_sum(tf.abs(tf.sub(x_data_train, tf.expand_dims(x_data_test,1))), reduction_indices=2)
+
+# L2
+#distance = tf.sqrt(tf.reduce_sum(tf.square(tf.sub(x_data_train, tf.expand_dims(x_data_test,1))), reduction_indices=1))
+
+# Predict: Get min distance index (Nearest neighbor)
+top_k_xvals, top_k_indices = tf.nn.top_k(tf.neg(distance), k=k)
+x_sums = tf.expand_dims(tf.reduce_sum(top_k_xvals, 1),1)
+x_sums_repeated = tf.matmul(x_sums,tf.ones([1, k], tf.float32))
+x_val_weights = tf.expand_dims(tf.div(top_k_xvals,x_sums_repeated), 1)
+
+top_k_yvals = tf.gather(y_target_train, top_k_indices)
+prediction = tf.squeeze(tf.batch_matmul(x_val_weights,top_k_yvals), squeeze_dims=[1])
+
+# Calculate MSE
+mse = tf.div(tf.reduce_sum(tf.square(tf.sub(prediction, y_target_test))), batch_size)
+
+# Calculate how many loops over training data
+num_loops = int(np.ceil(len(x_vals_test)/batch_size))
+
+for i in range(num_loops):
+ min_index = i*batch_size
+ max_index = min((i+1)*batch_size,len(x_vals_train))
+ x_batch = x_vals_test[min_index:max_index]
+ y_batch = y_vals_test[min_index:max_index]
+ predictions = sess.run(prediction, feed_dict={x_data_train: x_vals_train, x_data_test: x_batch,
+ y_target_train: y_vals_train, y_target_test: y_batch})
+ batch_mse = sess.run(mse, feed_dict={x_data_train: x_vals_train, x_data_test: x_batch,
+ y_target_train: y_vals_train, y_target_test: y_batch})
+
+ print('Batch #' + str(i+1) + ' MSE: ' + str(np.round(batch_mse,3)))
+
+# Plot prediction and actual distribution
+bins = np.linspace(5, 50, 45)
+
+plt.hist(predictions, bins, alpha=0.5, label='Prediction')
+plt.hist(y_batch, bins, alpha=0.5, label='Actual')
+plt.title('Histogram of Predicted and Actual Values')
+plt.xlabel('Med Home Value in $1,000s')
+plt.ylabel('Frequency')
+plt.legend(loc='upper right')
+plt.show()
+
diff --git a/TensorFlow-Machine-Learning-Cookbook-master/Chapter 05/text_distances.py b/TensorFlow-Machine-Learning-Cookbook-master/Chapter 05/text_distances.py
new file mode 100644
index 000000000..d10aa49a7
--- /dev/null
+++ b/TensorFlow-Machine-Learning-Cookbook-master/Chapter 05/text_distances.py
@@ -0,0 +1,86 @@
+# Text Distances
+#----------------------------------
+#
+# This function illustrates how to use
+# the Levenstein distance (edit distance)
+# in Tensorflow.
+
+import tensorflow as tf
+from tensorflow.python.framework import ops
+ops.reset_default_graph()
+
+# Start Graph Session
+sess = tf.Session()
+
+#----------------------------------
+# First compute the edit distance between 'bear' and 'beers'
+hypothesis = list('bear')
+truth = list('beers')
+h1 = tf.SparseTensor([[0,0,0], [0,0,1], [0,0,2], [0,0,3]],
+ hypothesis,
+ [1,1,1])
+
+t1 = tf.SparseTensor([[0,0,0], [0,0,1], [0,0,2], [0,0,3],[0,0,4]],
+ truth,
+ [1,1,1])
+
+print(sess.run(tf.edit_distance(h1, t1, normalize=False)))
+
+#----------------------------------
+# Compute the edit distance between ('bear','beer') and 'beers':
+hypothesis2 = list('bearbeer')
+truth2 = list('beersbeers')
+h2 = tf.SparseTensor([[0,0,0], [0,0,1], [0,0,2], [0,0,3], [0,1,0], [0,1,1], [0,1,2], [0,1,3]],
+ hypothesis2,
+ [1,2,4])
+
+t2 = tf.SparseTensor([[0,0,0], [0,0,1], [0,0,2], [0,0,3], [0,0,4], [0,1,0], [0,1,1], [0,1,2], [0,1,3], [0,1,4]],
+ truth2,
+ [1,2,5])
+
+print(sess.run(tf.edit_distance(h2, t2, normalize=True)))
+
+#----------------------------------
+# Now compute distance between four words and 'beers' more efficiently:
+hypothesis_words = ['bear','bar','tensor','flow']
+truth_word = ['beers']
+
+num_h_words = len(hypothesis_words)
+h_indices = [[xi, 0, yi] for xi,x in enumerate(hypothesis_words) for yi,y in enumerate(x)]
+h_chars = list(''.join(hypothesis_words))
+
+h3 = tf.SparseTensor(h_indices, h_chars, [num_h_words,1,1])
+
+truth_word_vec = truth_word*num_h_words
+t_indices = [[xi, 0, yi] for xi,x in enumerate(truth_word_vec) for yi,y in enumerate(x)]
+t_chars = list(''.join(truth_word_vec))
+
+t3 = tf.SparseTensor(t_indices, t_chars, [num_h_words,1,1])
+
+print(sess.run(tf.edit_distance(h3, t3, normalize=True)))
+
+#----------------------------------
+# Now we show how to use sparse tensors in a feed dictionary
+
+# Create input data
+hypothesis_words = ['bear','bar','tensor','flow']
+truth_word = ['beers']
+
+def create_sparse_vec(word_list):
+ num_words = len(word_list)
+ indices = [[xi, 0, yi] for xi,x in enumerate(word_list) for yi,y in enumerate(x)]
+ chars = list(''.join(word_list))
+ return(tf.SparseTensorValue(indices, chars, [num_words,1,1]))
+
+hyp_string_sparse = create_sparse_vec(hypothesis_words)
+truth_string_sparse = create_sparse_vec(truth_word*len(hypothesis_words))
+
+hyp_input = tf.sparse_placeholder(dtype=tf.string)
+truth_input = tf.sparse_placeholder(dtype=tf.string)
+
+edit_distances = tf.edit_distance(hyp_input, truth_input, normalize=True)
+
+feed_dict = {hyp_input: hyp_string_sparse,
+ truth_input: truth_string_sparse}
+
+print(sess.run(edit_distances, feed_dict=feed_dict))
\ No newline at end of file
diff --git a/TensorFlow-Machine-Learning-Cookbook-master/Chapter 06/activation_functions.py b/TensorFlow-Machine-Learning-Cookbook-master/Chapter 06/activation_functions.py
new file mode 100644
index 000000000..bda81d421
--- /dev/null
+++ b/TensorFlow-Machine-Learning-Cookbook-master/Chapter 06/activation_functions.py
@@ -0,0 +1,88 @@
+# Combining Gates and Activation Functions
+#----------------------------------
+#
+# This function shows how to implement
+# various gates with activation functions
+# in Tensorflow
+#
+# This function is an extension of the
+# prior gates, but with various activation
+# functions.
+
+import tensorflow as tf
+import numpy as np
+import matplotlib.pyplot as plt
+from tensorflow.python.framework import ops
+ops.reset_default_graph()
+
+# Start Graph Session
+sess = tf.Session()
+tf.set_random_seed(5)
+np.random.seed(42)
+
+batch_size = 50
+
+a1 = tf.Variable(tf.random_normal(shape=[1,1]))
+b1 = tf.Variable(tf.random_uniform(shape=[1,1]))
+a2 = tf.Variable(tf.random_normal(shape=[1,1]))
+b2 = tf.Variable(tf.random_uniform(shape=[1,1]))
+x = np.random.normal(2, 0.1, 500)
+x_data = tf.placeholder(shape=[None, 1], dtype=tf.float32)
+
+sigmoid_activation = tf.sigmoid(tf.add(tf.matmul(x_data, a1), b1))
+
+relu_activation = tf.nn.relu(tf.add(tf.matmul(x_data, a2), b2))
+
+# Declare the loss function as the difference between
+# the output and a target value, 0.75.
+loss1 = tf.reduce_mean(tf.square(tf.sub(sigmoid_activation, 0.75)))
+loss2 = tf.reduce_mean(tf.square(tf.sub(relu_activation, 0.75)))
+
+# Initialize variables
+init = tf.initialize_all_variables()
+sess.run(init)
+
+# Declare optimizer
+my_opt = tf.train.GradientDescentOptimizer(0.01)
+train_step_sigmoid = my_opt.minimize(loss1)
+train_step_relu = my_opt.minimize(loss2)
+
+# Run loop across gate
+print('\nOptimizing Sigmoid AND Relu Output to 0.75')
+loss_vec_sigmoid = []
+loss_vec_relu = []
+activation_sigmoid = []
+activation_relu = []
+for i in range(750):
+ rand_indices = np.random.choice(len(x), size=batch_size)
+ x_vals = np.transpose([x[rand_indices]])
+ sess.run(train_step_sigmoid, feed_dict={x_data: x_vals})
+ sess.run(train_step_relu, feed_dict={x_data: x_vals})
+
+ loss_vec_sigmoid.append(sess.run(loss1, feed_dict={x_data: x_vals}))
+ loss_vec_relu.append(sess.run(loss2, feed_dict={x_data: x_vals}))
+
+ activation_sigmoid.append(np.mean(sess.run(sigmoid_activation, feed_dict={x_data: x_vals})))
+ activation_relu.append(np.mean(sess.run(relu_activation, feed_dict={x_data: x_vals})))
+
+
+# Plot the activation values
+plt.plot(activation_sigmoid, 'k-', label='Sigmoid Activation')
+plt.plot(activation_relu, 'r--', label='Relu Activation')
+plt.ylim([0, 1.0])
+plt.title('Activation Outputs')
+plt.xlabel('Generation')
+plt.ylabel('Outputs')
+plt.legend(loc='upper right')
+plt.show()
+
+
+# Plot the loss
+plt.plot(loss_vec_sigmoid, 'k-', label='Sigmoid Loss')
+plt.plot(loss_vec_relu, 'r--', label='Relu Loss')
+plt.ylim([0, 1.0])
+plt.title('Loss per Generation')
+plt.xlabel('Generation')
+plt.ylabel('Loss')
+plt.legend(loc='upper right')
+plt.show()
\ No newline at end of file
diff --git a/TensorFlow-Machine-Learning-Cookbook-master/Chapter 06/gates.py b/TensorFlow-Machine-Learning-Cookbook-master/Chapter 06/gates.py
new file mode 100644
index 000000000..b313e7666
--- /dev/null
+++ b/TensorFlow-Machine-Learning-Cookbook-master/Chapter 06/gates.py
@@ -0,0 +1,97 @@
+# Implementing Gates
+#----------------------------------
+#
+# This function shows how to implement
+# various gates in Tensorflow
+#
+# One gate will be one operation with
+# a variable and a placeholder.
+# We will ask Tensorflow to change the
+# variable based on our loss function
+
+import tensorflow as tf
+from tensorflow.python.framework import ops
+ops.reset_default_graph()
+
+# Start Graph Session
+sess = tf.Session()
+
+#----------------------------------
+# Create a multiplication gate:
+# f(x) = a * x
+#
+# a --
+# |
+# |---- (multiply) --> output
+# x --|
+#
+
+a = tf.Variable(tf.constant(4.))
+x_val = 5.
+x_data = tf.placeholder(dtype=tf.float32)
+
+multiplication = tf.mul(a, x_data)
+
+# Declare the loss function as the difference between
+# the output and a target value, 50.
+loss = tf.square(tf.sub(multiplication, 50.))
+
+# Initialize variables
+init = tf.initialize_all_variables()
+sess.run(init)
+
+# Declare optimizer
+my_opt = tf.train.GradientDescentOptimizer(0.01)
+train_step = my_opt.minimize(loss)
+
+# Run loop across gate
+print('Optimizing a Multiplication Gate Output to 50.')
+for i in range(10):
+ sess.run(train_step, feed_dict={x_data: x_val})
+ a_val = sess.run(a)
+ mult_output = sess.run(multiplication, feed_dict={x_data: x_val})
+ print(str(a_val) + ' * ' + str(x_val) + ' = ' + str(mult_output))
+
+#----------------------------------
+# Create a nested gate:
+# f(x) = a * x + b
+#
+# a --
+# |
+# |-- (multiply)--
+# x --| |
+# |-- (add) --> output
+# b --|
+#
+#
+
+# Start a New Graph Session
+ops.reset_default_graph()
+sess = tf.Session()
+
+a = tf.Variable(tf.constant(1.))
+b = tf.Variable(tf.constant(1.))
+x_val = 5.
+x_data = tf.placeholder(dtype=tf.float32)
+
+two_gate = tf.add(tf.mul(a, x_data), b)
+
+# Declare the loss function as the difference between
+# the output and a target value, 50.
+loss = tf.square(tf.sub(two_gate, 50.))
+
+# Initialize variables
+init = tf.initialize_all_variables()
+sess.run(init)
+
+# Declare optimizer
+my_opt = tf.train.GradientDescentOptimizer(0.01)
+train_step = my_opt.minimize(loss)
+
+# Run loop across gate
+print('\nOptimizing Two Gate Output to 50.')
+for i in range(10):
+ sess.run(train_step, feed_dict={x_data: x_val})
+ a_val, b_val = (sess.run(a), sess.run(b))
+ two_gate_output = sess.run(two_gate, feed_dict={x_data: x_val})
+ print(str(a_val) + ' * ' + str(x_val) + ' + ' + str(b_val) + ' = ' + str(two_gate_output))
\ No newline at end of file
diff --git a/TensorFlow-Machine-Learning-Cookbook-master/Chapter 06/implementing_different_layers.py b/TensorFlow-Machine-Learning-Cookbook-master/Chapter 06/implementing_different_layers.py
new file mode 100644
index 000000000..c04e0ab3b
--- /dev/null
+++ b/TensorFlow-Machine-Learning-Cookbook-master/Chapter 06/implementing_different_layers.py
@@ -0,0 +1,242 @@
+# Implementing Different Layers
+#---------------------------------------
+#
+# We will illustrate how to use different types
+# of layers in Tensorflow
+#
+# The layers of interest are:
+# (1) Convolutional Layer
+# (2) Activation Layer
+# (3) Max-Pool Layer
+# (4) Fully Connected Layer
+#
+# We will generate two different data sets for this
+# script, a 1-D data set (row of data) and
+# a 2-D data set (similar to picture)
+
+import tensorflow as tf
+import numpy as np
+from tensorflow.python.framework import ops
+ops.reset_default_graph()
+
+#---------------------------------------------------|
+#-------------------1D-data-------------------------|
+#---------------------------------------------------|
+print('\n----------1D Arrays----------')
+
+# Create graph session
+sess = tf.Session()
+
+# Generate 1D data
+data_size = 25
+data_1d = np.random.normal(size=data_size)
+
+# Placeholder
+x_input_1d = tf.placeholder(dtype=tf.float32, shape=[data_size])
+
+#--------Convolution--------
+def conv_layer_1d(input_1d, my_filter):
+ # Tensorflow's 'conv2d()' function only works with 4D arrays:
+ # [batch#, width, height, channels], we have 1 batch, and
+ # width = 1, but height = the length of the input, and 1 channel.
+ # So next we create the 4D array by inserting dimension 1's.
+ input_2d = tf.expand_dims(input_1d, 0)
+ input_3d = tf.expand_dims(input_2d, 0)
+ input_4d = tf.expand_dims(input_3d, 3)
+ # Perform convolution with stride = 1, if we wanted to increase the stride,
+ # to say '2', then strides=[1,1,2,1]
+ convolution_output = tf.nn.conv2d(input_4d, filter=my_filter, strides=[1,1,1,1], padding="VALID")
+ # Get rid of extra dimensions
+ conv_output_1d = tf.squeeze(convolution_output)
+ return(conv_output_1d)
+
+# Create filter for convolution.
+my_filter = tf.Variable(tf.random_normal(shape=[1,5,1,1]))
+# Create convolution layer
+my_convolution_output = conv_layer_1d(x_input_1d, my_filter)
+
+#--------Activation--------
+def activation(input_1d):
+ return(tf.nn.relu(input_1d))
+
+# Create activation layer
+my_activation_output = activation(my_convolution_output)
+
+#--------Max Pool--------
+def max_pool(input_1d, width):
+ # Just like 'conv2d()' above, max_pool() works with 4D arrays.
+ # [batch_size=1, width=1, height=num_input, channels=1]
+ input_2d = tf.expand_dims(input_1d, 0)
+ input_3d = tf.expand_dims(input_2d, 0)
+ input_4d = tf.expand_dims(input_3d, 3)
+ # Perform the max pooling with strides = [1,1,1,1]
+ # If we wanted to increase the stride on our data dimension, say by
+ # a factor of '2', we put strides = [1, 1, 2, 1]
+ # We will also need to specify the width of the max-window ('width')
+ pool_output = tf.nn.max_pool(input_4d, ksize=[1, 1, width, 1],
+ strides=[1, 1, 1, 1],
+ padding='VALID')
+ # Get rid of extra dimensions
+ pool_output_1d = tf.squeeze(pool_output)
+ return(pool_output_1d)
+
+my_maxpool_output = max_pool(my_activation_output, width=5)
+
+#--------Fully Connected--------
+def fully_connected(input_layer, num_outputs):
+ # First we find the needed shape of the multiplication weight matrix:
+ # The dimension will be (length of input) by (num_outputs)
+ weight_shape = tf.squeeze(tf.pack([tf.shape(input_layer),[num_outputs]]))
+ # Initialize such weight
+ weight = tf.random_normal(weight_shape, stddev=0.1)
+ # Initialize the bias
+ bias = tf.random_normal(shape=[num_outputs])
+ # Make the 1D input array into a 2D array for matrix multiplication
+ input_layer_2d = tf.expand_dims(input_layer, 0)
+ # Perform the matrix multiplication and add the bias
+ full_output = tf.add(tf.matmul(input_layer_2d, weight), bias)
+ # Get rid of extra dimensions
+ full_output_1d = tf.squeeze(full_output)
+ return(full_output_1d)
+
+my_full_output = fully_connected(my_maxpool_output, 5)
+
+# Run graph
+# Initialize Variables
+init = tf.initialize_all_variables()
+sess.run(init)
+
+feed_dict = {x_input_1d: data_1d}
+
+# Convolution Output
+print('Input = array of length 25')
+print('Convolution w/filter, length = 5, stride size = 1, results in an array of length 21:')
+print(sess.run(my_convolution_output, feed_dict=feed_dict))
+
+# Activation Output
+print('\nInput = the above array of length 21')
+print('ReLU element wise returns the array of length 21:')
+print(sess.run(my_activation_output, feed_dict=feed_dict))
+
+# Max Pool Output
+print('\nInput = the above array of length 21')
+print('MaxPool, window length = 5, stride size = 1, results in the array of length 17:')
+print(sess.run(my_maxpool_output, feed_dict=feed_dict))
+
+# Fully Connected Output
+print('\nInput = the above array of length 17')
+print('Fully connected layer on all four rows with five outputs:')
+print(sess.run(my_full_output, feed_dict=feed_dict))
+
+#---------------------------------------------------|
+#-------------------2D-data-------------------------|
+#---------------------------------------------------|
+print('\n----------2D Arrays----------')
+
+
+# Reset Graph
+ops.reset_default_graph()
+sess = tf.Session()
+
+#Generate 2D data
+data_size = [10,10]
+data_2d = np.random.normal(size=data_size)
+
+#--------Placeholder--------
+x_input_2d = tf.placeholder(dtype=tf.float32, shape=data_size)
+
+# Convolution
+def conv_layer_2d(input_2d, my_filter):
+ # Tensorflow's 'conv2d()' function only works with 4D arrays:
+ # [batch#, width, height, channels], we have 1 batch, and
+ # 1 channel, but we do have width AND height this time.
+ # So next we create the 4D array by inserting dimension 1's.
+ input_3d = tf.expand_dims(input_2d, 0)
+ input_4d = tf.expand_dims(input_3d, 3)
+ # Note the stride difference below!
+ convolution_output = tf.nn.conv2d(input_4d, filter=my_filter, strides=[1,2,2,1], padding="VALID")
+ # Get rid of unnecessary dimensions
+ conv_output_2d = tf.squeeze(convolution_output)
+ return(conv_output_2d)
+
+# Create Convolutional Filter
+my_filter = tf.Variable(tf.random_normal(shape=[2,2,1,1]))
+# Create Convolutional Layer
+my_convolution_output = conv_layer_2d(x_input_2d, my_filter)
+
+#--------Activation--------
+def activation(input_2d):
+ return(tf.nn.relu(input_2d))
+
+# Create Activation Layer
+my_activation_output = activation(my_convolution_output)
+
+#--------Max Pool--------
+def max_pool(input_2d, width, height):
+ # Just like 'conv2d()' above, max_pool() works with 4D arrays.
+ # [batch_size=1, width=given, height=given, channels=1]
+ input_3d = tf.expand_dims(input_2d, 0)
+ input_4d = tf.expand_dims(input_3d, 3)
+ # Perform the max pooling with strides = [1,1,1,1]
+ # If we wanted to increase the stride on our data dimension, say by
+ # a factor of '2', we put strides = [1, 2, 2, 1]
+ pool_output = tf.nn.max_pool(input_4d, ksize=[1, height, width, 1],
+ strides=[1, 1, 1, 1],
+ padding='VALID')
+ # Get rid of unnecessary dimensions
+ pool_output_2d = tf.squeeze(pool_output)
+ return(pool_output_2d)
+
+# Create Max-Pool Layer
+my_maxpool_output = max_pool(my_activation_output, width=2, height=2)
+
+
+#--------Fully Connected--------
+def fully_connected(input_layer, num_outputs):
+ # In order to connect our whole W byH 2d array, we first flatten it out to
+ # a W times H 1D array.
+ flat_input = tf.reshape(input_layer, [-1])
+ # We then find out how long it is, and create an array for the shape of
+ # the multiplication weight = (WxH) by (num_outputs)
+ weight_shape = tf.squeeze(tf.pack([tf.shape(flat_input),[num_outputs]]))
+ # Initialize the weight
+ weight = tf.random_normal(weight_shape, stddev=0.1)
+ # Initialize the bias
+ bias = tf.random_normal(shape=[num_outputs])
+ # Now make the flat 1D array into a 2D array for multiplication
+ input_2d = tf.expand_dims(flat_input, 0)
+ # Multiply and add the bias
+ full_output = tf.add(tf.matmul(input_2d, weight), bias)
+ # Get rid of extra dimension
+ full_output_2d = tf.squeeze(full_output)
+ return(full_output_2d)
+
+# Create Fully Connected Layer
+my_full_output = fully_connected(my_maxpool_output, 5)
+
+# Run graph
+# Initialize Variables
+init = tf.initialize_all_variables()
+sess.run(init)
+
+feed_dict = {x_input_2d: data_2d}
+
+# Convolution Output
+print('Input = [10 X 10] array')
+print('2x2 Convolution, stride size = [2x2], results in the [5x5] array:')
+print(sess.run(my_convolution_output, feed_dict=feed_dict))
+
+# Activation Output
+print('\nInput = the above [5x5] array')
+print('ReLU element wise returns the [5x5] array:')
+print(sess.run(my_activation_output, feed_dict=feed_dict))
+
+# Max Pool Output
+print('\nInput = the above [5x5] array')
+print('MaxPool, stride size = [1x1], results in the [4x4] array:')
+print(sess.run(my_maxpool_output, feed_dict=feed_dict))
+
+# Fully Connected Output
+print('\nInput = the above [4x4] array')
+print('Fully connected layer on all four rows with five outputs:')
+print(sess.run(my_full_output, feed_dict=feed_dict))
\ No newline at end of file
diff --git a/TensorFlow-Machine-Learning-Cookbook-master/Chapter 06/improving_linear_regression.py b/TensorFlow-Machine-Learning-Cookbook-master/Chapter 06/improving_linear_regression.py
new file mode 100644
index 000000000..b6ff94cfd
--- /dev/null
+++ b/TensorFlow-Machine-Learning-Cookbook-master/Chapter 06/improving_linear_regression.py
@@ -0,0 +1,137 @@
+# Improving Linear Regression with Neural Networks (Logistic Regression)
+#----------------------------------
+#
+# This function shows how to use Tensorflow to
+# solve logistic regression with a multiple layer neural network
+# y = sigmoid(A3 * sigmoid(A2* sigmoid(A1*x + b1) + b2) + b3)
+#
+# We will use the low birth weight data, specifically:
+# y = 0 or 1 = low birth weight
+# x = demographic and medical history data
+
+import matplotlib.pyplot as plt
+import numpy as np
+import tensorflow as tf
+import requests
+from tensorflow.python.framework import ops
+ops.reset_default_graph()
+
+# Create graph
+sess = tf.Session()
+
+birthdata_url = 'https://www.umass.edu/statdata/statdata/data/lowbwt.dat'
+birth_file = requests.get(birthdata_url)
+birth_data = birth_file.text.split('\r\n')[5:]
+birth_header = [x for x in birth_data[0].split(' ') if len(x)>=1]
+birth_data = [[float(x) for x in y.split(' ') if len(x)>=1] for y in birth_data[1:] if len(y)>=1]
+# Pull out target variable
+y_vals = np.array([x[1] for x in birth_data])
+# Pull out predictor variables (not id, not target, and not birthweight)
+x_vals = np.array([x[2:9] for x in birth_data])
+
+# Split data into train/test = 80%/20%
+train_indices = np.random.choice(len(x_vals), round(len(x_vals)*0.8), replace=False)
+test_indices = np.array(list(set(range(len(x_vals))) - set(train_indices)))
+x_vals_train = x_vals[train_indices]
+x_vals_test = x_vals[test_indices]
+y_vals_train = y_vals[train_indices]
+y_vals_test = y_vals[test_indices]
+
+# Normalize by column (min-max norm)
+def normalize_cols(m):
+ col_max = m.max(axis=0)
+ col_min = m.min(axis=0)
+ return (m-col_min) / (col_max - col_min)
+
+x_vals_train = np.nan_to_num(normalize_cols(x_vals_train))
+x_vals_test = np.nan_to_num(normalize_cols(x_vals_test))
+
+# Declare batch size
+batch_size = 90
+
+# Initialize placeholders
+x_data = tf.placeholder(shape=[None, 7], dtype=tf.float32)
+y_target = tf.placeholder(shape=[None, 1], dtype=tf.float32)
+
+
+# Create variable definition
+def init_variable(shape):
+ return(tf.Variable(tf.random_normal(shape=shape)))
+
+
+# Create a logistic layer definition
+def logistic(input_layer, multiplication_weight, bias_weight, activation = True):
+ linear_layer = tf.add(tf.matmul(input_layer, multiplication_weight), bias_weight)
+ # We separate the activation at the end because the loss function will
+ # implement the last sigmoid necessary
+ if activation:
+ return(tf.nn.sigmoid(linear_layer))
+ else:
+ return(linear_layer)
+
+
+# First logistic layer (7 inputs to 7 hidden nodes)
+A1 = init_variable(shape=[7,14])
+b1 = init_variable(shape=[14])
+logistic_layer1 = logistic(x_data, A1, b1)
+
+# Second logistic layer (7 hidden inputs to 5 hidden nodes)
+A2 = init_variable(shape=[14,5])
+b2 = init_variable(shape=[5])
+logistic_layer2 = logistic(logistic_layer1, A2, b2)
+
+# Final output layer (5 hidden nodes to 1 output)
+A3 = init_variable(shape=[5,1])
+b3 = init_variable(shape=[1])
+final_output = logistic(logistic_layer2, A3, b3, activation=False)
+
+# Declare loss function (Cross Entropy loss)
+loss = tf.reduce_mean(tf.nn.sigmoid_cross_entropy_with_logits(final_output, y_target))
+
+# Declare optimizer
+my_opt = tf.train.AdamOptimizer(learning_rate = 0.005)
+train_step = my_opt.minimize(loss)
+
+# Initialize variables
+init = tf.initialize_all_variables()
+sess.run(init)
+
+# Actual Prediction
+prediction = tf.round(tf.nn.sigmoid(final_output))
+predictions_correct = tf.cast(tf.equal(prediction, y_target), tf.float32)
+accuracy = tf.reduce_mean(predictions_correct)
+
+# Training loop
+loss_vec = []
+train_acc = []
+test_acc = []
+for i in range(1500):
+ rand_index = np.random.choice(len(x_vals_train), size=batch_size)
+ rand_x = x_vals_train[rand_index]
+ rand_y = np.transpose([y_vals_train[rand_index]])
+ sess.run(train_step, feed_dict={x_data: rand_x, y_target: rand_y})
+
+ temp_loss = sess.run(loss, feed_dict={x_data: rand_x, y_target: rand_y})
+ loss_vec.append(temp_loss)
+ temp_acc_train = sess.run(accuracy, feed_dict={x_data: x_vals_train, y_target: np.transpose([y_vals_train])})
+ train_acc.append(temp_acc_train)
+ temp_acc_test = sess.run(accuracy, feed_dict={x_data: x_vals_test, y_target: np.transpose([y_vals_test])})
+ test_acc.append(temp_acc_test)
+ if (i+1)%150==0:
+ print('Loss = ' + str(temp_loss))
+
+# Plot loss over time
+plt.plot(loss_vec, 'k-')
+plt.title('Cross Entropy Loss per Generation')
+plt.xlabel('Generation')
+plt.ylabel('Cross Entropy Loss')
+plt.show()
+
+# Plot train and test accuracy
+plt.plot(train_acc, 'k-', label='Train Set Accuracy')
+plt.plot(test_acc, 'r--', label='Test Set Accuracy')
+plt.title('Train and Test Accuracy')
+plt.xlabel('Generation')
+plt.ylabel('Accuracy')
+plt.legend(loc='lower right')
+plt.show()
\ No newline at end of file
diff --git a/TensorFlow-Machine-Learning-Cookbook-master/Chapter 06/single_hidden_layer_network.py b/TensorFlow-Machine-Learning-Cookbook-master/Chapter 06/single_hidden_layer_network.py
new file mode 100644
index 000000000..0c60cb63e
--- /dev/null
+++ b/TensorFlow-Machine-Learning-Cookbook-master/Chapter 06/single_hidden_layer_network.py
@@ -0,0 +1,103 @@
+# Implementing a one-layer Neural Network
+#---------------------------------------
+#
+# We will illustrate how to create a one hidden layer NN
+#
+# We will use the iris data for this exercise
+#
+# We will build a one-hidden layer neural network
+# to predict the fourth attribute, Petal Width from
+# the other three (Sepal length, Sepal width, Petal length).
+
+import matplotlib.pyplot as plt
+import numpy as np
+import tensorflow as tf
+from sklearn import datasets
+from tensorflow.python.framework import ops
+ops.reset_default_graph()
+
+iris = datasets.load_iris()
+x_vals = np.array([x[0:3] for x in iris.data])
+y_vals = np.array([x[3] for x in iris.data])
+
+# Create graph session
+sess = tf.Session()
+
+# Set Seed
+seed = 3
+tf.set_random_seed(seed)
+np.random.seed(seed)
+
+# Split data into train/test = 80%/20%
+train_indices = np.random.choice(len(x_vals), round(len(x_vals)*0.8), replace=False)
+test_indices = np.array(list(set(range(len(x_vals))) - set(train_indices)))
+x_vals_train = x_vals[train_indices]
+x_vals_test = x_vals[test_indices]
+y_vals_train = y_vals[train_indices]
+y_vals_test = y_vals[test_indices]
+
+# Normalize by column (min-max norm)
+def normalize_cols(m):
+ col_max = m.max(axis=0)
+ col_min = m.min(axis=0)
+ return (m-col_min) / (col_max - col_min)
+
+x_vals_train = np.nan_to_num(normalize_cols(x_vals_train))
+x_vals_test = np.nan_to_num(normalize_cols(x_vals_test))
+
+# Declare batch size
+batch_size = 50
+
+# Initialize placeholders
+x_data = tf.placeholder(shape=[None, 3], dtype=tf.float32)
+y_target = tf.placeholder(shape=[None, 1], dtype=tf.float32)
+
+# Create variables for both Neural Network Layers
+hidden_layer_nodes = 10
+A1 = tf.Variable(tf.random_normal(shape=[3,hidden_layer_nodes])) # inputs -> hidden nodes
+b1 = tf.Variable(tf.random_normal(shape=[hidden_layer_nodes])) # one biases for each hidden node
+A2 = tf.Variable(tf.random_normal(shape=[hidden_layer_nodes,1])) # hidden inputs -> 1 output
+b2 = tf.Variable(tf.random_normal(shape=[1])) # 1 bias for the output
+
+
+# Declare model operations
+hidden_output = tf.nn.relu(tf.add(tf.matmul(x_data, A1), b1))
+final_output = tf.nn.relu(tf.add(tf.matmul(hidden_output, A2), b2))
+
+# Declare loss function
+loss = tf.reduce_mean(tf.square(y_target - final_output))
+
+# Declare optimizer
+my_opt = tf.train.GradientDescentOptimizer(0.005)
+train_step = my_opt.minimize(loss)
+
+# Initialize variables
+init = tf.initialize_all_variables()
+sess.run(init)
+
+# Training loop
+loss_vec = []
+test_loss = []
+for i in range(500):
+ rand_index = np.random.choice(len(x_vals_train), size=batch_size)
+ rand_x = x_vals_train[rand_index]
+ rand_y = np.transpose([y_vals_train[rand_index]])
+ sess.run(train_step, feed_dict={x_data: rand_x, y_target: rand_y})
+
+ temp_loss = sess.run(loss, feed_dict={x_data: rand_x, y_target: rand_y})
+ loss_vec.append(np.sqrt(temp_loss))
+
+ test_temp_loss = sess.run(loss, feed_dict={x_data: x_vals_test, y_target: np.transpose([y_vals_test])})
+ test_loss.append(np.sqrt(test_temp_loss))
+ if (i+1)%50==0:
+ print('Generation: ' + str(i+1) + '. Loss = ' + str(temp_loss))
+
+
+# Plot loss (MSE) over time
+plt.plot(loss_vec, 'k-', label='Train Loss')
+plt.plot(test_loss, 'r--', label='Test Loss')
+plt.title('Loss (MSE) per Generation')
+plt.xlabel('Generation')
+plt.ylabel('Loss')
+plt.legend(loc='upper right')
+plt.show()
\ No newline at end of file
diff --git a/TensorFlow-Machine-Learning-Cookbook-master/Chapter 06/tic_tac_toe/base_tic_tac_toe_moves.csv b/TensorFlow-Machine-Learning-Cookbook-master/Chapter 06/tic_tac_toe/base_tic_tac_toe_moves.csv
new file mode 100644
index 000000000..d403cb673
--- /dev/null
+++ b/TensorFlow-Machine-Learning-Cookbook-master/Chapter 06/tic_tac_toe/base_tic_tac_toe_moves.csv
@@ -0,0 +1,31 @@
+0,0,0,0,-1,0,0,0,0,0
+0,-1,0,0,0,0,0,0,0,0
+0,0,0,0,0,-1,0,0,0,6
+-1,0,0,0,0,0,0,0,0,4
+0,0,0,0,0,0,1,-1,-1,3
+0,-1,0,0,1,0,0,0,-1,0
+0,-1,1,0,0,-1,0,0,0,7
+-1,0,0,0,-1,0,0,0,1,6
+0,0,1,0,0,-1,-1,0,0,4
+0,0,-1,0,0,0,0,-1,1,4
+1,0,0,-1,0,0,0,-1,0,2
+0,0,-1,0,1,0,-1,0,0,5
+-1,0,0,1,-1,-1,0,0,1,6
+-1,1,-1,0,1,0,0,1,0,8
+0,0,0,-1,0,1,1,-1,-1,1
+-1,1,0,0,0,-1,0,-1,1,3
+0,-1,1,0,1,-1,-1,0,0,8
+0,0,-1,1,0,-1,0,-1,1,0
+1,-1,0,0,-1,0,0,0,0,7
+1,0,-1,0,-1,0,0,0,0,6
+1,0,0,0,-1,0,-1,0,0,2
+1,0,0,0,-1,-1,0,0,0,3
+1,0,0,0,-1,0,0,0,-1,6
+1,-1,0,-1,-1,0,0,1,0,5
+1,-1,0,0,-1,0,-1,1,0,2
+1,-1,-1,0,-1,0,0,1,0,6
+1,-1,0,0,-1,-1,0,1,0,3
+1,0,-1,-1,-1,0,1,0,0,8
+1,-1,1,0,-1,0,-1,0,0,7
+1,0,0,1,-1,-1,-1,0,0,2
+1,0,0,-1,-1,0,1,0,-1,5
diff --git a/TensorFlow-Machine-Learning-Cookbook-master/Chapter 06/tic_tac_toe/tic_tac_toe_moves.py b/TensorFlow-Machine-Learning-Cookbook-master/Chapter 06/tic_tac_toe/tic_tac_toe_moves.py
new file mode 100644
index 000000000..794a8f7c8
--- /dev/null
+++ b/TensorFlow-Machine-Learning-Cookbook-master/Chapter 06/tic_tac_toe/tic_tac_toe_moves.py
@@ -0,0 +1,231 @@
+# Learning Optimal Tic-Tac-Toe Moves via a Neural Network
+#---------------------------------------
+#
+# We will build a one-hidden layer neural network
+# to predict tic-tac-toe optimal moves. This will
+# be accomplished by loading a small list of board
+# positions with the optimal play response in a csv
+# then we apply two random board transformations.
+#
+# We then train the neural network on the board + response
+#
+import tensorflow as tf
+import matplotlib.pyplot as plt
+import csv
+import random
+import numpy as np
+import random
+from tensorflow.python.framework import ops
+ops.reset_default_graph()
+
+# X = 1
+# O = -1
+# empty = 0
+# response on 1-9 grid for placement of next '1'
+
+
+# For example, the 'test_board' is:
+#
+# O | - | -
+# -----------------
+# X | O | O
+# -----------------
+# - | - | X
+#
+# board above = [-1, 0, 0, 1, -1, -1, 0, 0, 1]
+# Optimal response would be position 6, where
+# the position numbers are:
+#
+# 0 | 1 | 2
+# -----------------
+# 3 | 4 | 5
+# -----------------
+# 6 | 7 | 8
+
+batch_size = 50
+symmetry = ['rotate180', 'rotate90', 'rotate270', 'flip_v', 'flip_h']
+
+
+# Print a board
+def print_board(board):
+ symbols = ['O',' ','X']
+ board_plus1 = [int(x) + 1 for x in board]
+ print(' ' + symbols[board_plus1[0]] + ' | ' + symbols[board_plus1[1]] + ' | ' + symbols[board_plus1[2]])
+ print('___________')
+ print(' ' + symbols[board_plus1[3]] + ' | ' + symbols[board_plus1[4]] + ' | ' + symbols[board_plus1[5]])
+ print('___________')
+ print(' ' + symbols[board_plus1[6]] + ' | ' + symbols[board_plus1[7]] + ' | ' + symbols[board_plus1[8]])
+
+
+## Given a board, a response, and a transformation, get the new board+response
+def get_symmetry(board, response, transformation):
+ '''
+ :param board: list of integers 9 long:
+ opposing mark = -1
+ friendly mark = 1
+ empty space = 0
+ :param transformation: one of five transformations on a board:
+ 'rotate180', 'rotate90', 'rotate270', 'flip_v', 'flip_h'
+ :return: tuple: (new_board, new_response)
+ '''
+ if transformation == 'rotate180':
+ new_response = 8 - response
+ return(board[::-1], new_response)
+ elif transformation == 'rotate90':
+ new_response = [6, 3, 0, 7, 4, 1, 8, 5, 2].index(response)
+ tuple_board = list(zip(*[board[6:9], board[3:6], board[0:3]]))
+ return([value for item in tuple_board for value in item], new_response)
+ elif transformation == 'rotate270':
+ new_response = [2, 5, 8, 1, 4, 7, 0, 3, 6].index(response)
+ tuple_board = list(zip(*[board[0:3], board[3:6], board[6:9]]))[::-1]
+ return([value for item in tuple_board for value in item], new_response)
+ elif transformation == 'flip_v':
+ new_response = [6, 7, 8, 3, 4, 5, 0, 1, 2].index(response)
+ return(board[6:9] + board[3:6] + board[0:3], new_response)
+ elif transformation == 'flip_h': # flip_h = rotate180, then flip_v
+ new_response = [2, 1, 0, 5, 4, 3, 8, 7, 6].index(response)
+ new_board = board[::-1]
+ return(new_board[6:9] + new_board[3:6] + new_board[0:3], new_response)
+ else:
+ raise ValueError('Method not implmented.')
+
+
+## Read in board move csv file
+def get_moves_from_csv(csv_file):
+ '''
+ :param csv_file: csv file location containing the boards w/ responses
+ :return: moves: list of moves with index of best response
+ '''
+ moves = []
+ with open(csv_file, 'rt') as csvfile:
+ reader = csv.reader(csvfile, delimiter=',')
+ for row in reader:
+ moves.append(([int(x) for x in row[0:9]],int(row[9])))
+ return(moves)
+
+## Get random board with optimal move
+def get_rand_move(moves, rand_transforms=2):
+ '''
+ :param moves: list of the boards w/responses
+ :param rand_transforms: how many random transforms performed on each
+ :return: (board, response), board is a list of 9 integers, response is 1 int
+ '''
+ (board, response) = random.choice(moves)
+ possible_transforms = ['rotate90', 'rotate180', 'rotate270', 'flip_v', 'flip_h']
+ for i in range(rand_transforms):
+ random_transform = random.choice(possible_transforms)
+ (board, response) = get_symmetry(board, response, random_transform)
+ return(board, response)
+
+
+# Initialize our graph session
+sess = tf.Session()
+
+# Get list of optimal moves w/ responses
+moves = get_moves_from_csv('base_tic_tac_toe_moves.csv')
+
+# Create a train set:
+train_length = 500
+train_set = []
+for t in range(train_length):
+ train_set.append(get_rand_move(moves))
+
+# To see if the network learns anything new, we will remove
+# all instances of the board [-1, 0, 0, 1, -1, -1, 0, 0, 1],
+# which the optimal response will be the index '6'. We will
+# Test this at the end.
+test_board = [-1, 0, 0, 1, -1, -1, 0, 0, 1]
+train_set = [x for x in train_set if x[0] != test_board]
+
+def init_weights(shape):
+ return(tf.Variable(tf.random_normal(shape)))
+
+
+def model(X, A1, A2, bias1, bias2):
+ layer1 = tf.nn.sigmoid(tf.add(tf.matmul(X, A1), bias1))
+ layer2 = tf.add(tf.matmul(layer1, A2), bias2)
+ return(layer2) # note that we dont take the softmax at the end because our cost fn does that for us
+
+
+X = tf.placeholder(dtype=tf.float32, shape=[None, 9])
+Y = tf.placeholder(dtype=tf.int32, shape=[None])
+
+A1 = init_weights([9, 81])
+bias1 = init_weights([81])
+A2 = init_weights([81, 9])
+bias2 = init_weights([9])
+
+model_output = model(X, A1, A2, bias1, bias2)
+
+loss = tf.reduce_mean(tf.nn.sparse_softmax_cross_entropy_with_logits(model_output, Y))
+train_step = tf.train.GradientDescentOptimizer(0.025).minimize(loss)
+prediction = tf.argmax(model_output, 1)
+
+init = tf.initialize_all_variables()
+sess.run(init)
+
+loss_vec = []
+for i in range(10000):
+ rand_indices = np.random.choice(range(len(train_set)), batch_size, replace=False)
+ batch_data = [train_set[i] for i in rand_indices]
+ x_input = [x[0] for x in batch_data]
+ y_target = np.array([y[1] for y in batch_data])
+ sess.run(train_step, feed_dict={X: x_input, Y: y_target})
+
+ temp_loss = sess.run(loss, feed_dict={X: x_input, Y: y_target})
+ loss_vec.append(temp_loss)
+ if i%500==0:
+ print('iteration ' + str(i) + ' Loss: ' + str(temp_loss))
+
+
+# Print loss
+plt.plot(loss_vec, 'k-', label='Loss')
+plt.title('Loss (MSE) per Generation')
+plt.xlabel('Generation')
+plt.ylabel('Loss')
+plt.show()
+
+# Make Prediction:
+test_boards = [test_board]
+feed_dict = {X: test_boards}
+logits = sess.run(model_output, feed_dict=feed_dict)
+predictions = sess.run(prediction, feed_dict=feed_dict)
+print(predictions)
+
+# Declare function to check for win
+def check(board):
+ wins = [[0,1,2], [3,4,5], [6,7,8], [0,3,6], [1,4,7], [2,5,8], [0,4,8], [2,4,6]]
+ for i in range(len(wins)):
+ if board[wins[i][0]]==board[wins[i][1]]==board[wins[i][2]]==1.:
+ return(1)
+ elif board[wins[i][0]]==board[wins[i][1]]==board[wins[i][2]]==-1.:
+ return(1)
+ return(0)
+
+# Let's play against our model
+game_tracker = [0., 0., 0., 0., 0., 0., 0., 0., 0.]
+win_logical = False
+num_moves = 0
+while not win_logical:
+ player_index = input('Input index of your move (0-8): ')
+ num_moves += 1
+ # Add player move to game
+ game_tracker[int(player_index)] = 1.
+
+ # Get model's move by first getting all the logits for each index
+ [potential_moves] = sess.run(model_output, feed_dict={X: [game_tracker]})
+ # Now find allowed moves (where game tracker values = 0.0)
+ allowed_moves = [ix for ix,x in enumerate(game_tracker) if x==0.0]
+ # Find best move by taking argmax of logits if they are in allowed moves
+ model_move = np.argmax([x if ix in allowed_moves else -999.0 for ix,x in enumerate(potential_moves)])
+
+ # Add model move to game
+ game_tracker[int(model_move)] = -1.
+ print('Model has moved')
+ print_board(game_tracker)
+ # Now check for win or too many moves
+ if check(game_tracker)==1 or num_moves>=5:
+ print('Game Over!')
+ win_logical = True
+
+
diff --git a/TensorFlow-Machine-Learning-Cookbook-master/Chapter 06/using_a_multiple_layer_network.py b/TensorFlow-Machine-Learning-Cookbook-master/Chapter 06/using_a_multiple_layer_network.py
new file mode 100644
index 000000000..eee189b8f
--- /dev/null
+++ b/TensorFlow-Machine-Learning-Cookbook-master/Chapter 06/using_a_multiple_layer_network.py
@@ -0,0 +1,178 @@
+# Using a Multiple Layer Network
+#---------------------------------------
+#
+# We will illustrate how to use a Multiple
+# Layer Network in Tensorflow
+#
+# Low Birthrate data:
+#
+#Columns Variable Abbreviation
+#-----------------------------------------------------------------------------
+# Identification Code ID
+# Low Birth Weight (0 = Birth Weight >= 2500g, LOW
+# 1 = Birth Weight < 2500g)
+# Age of the Mother in Years AGE
+# Weight in Pounds at the Last Menstrual Period LWT
+# Race (1 = White, 2 = Black, 3 = Other) RACE
+# Smoking Status During Pregnancy (1 = Yes, 0 = No) SMOKE
+# History of Premature Labor (0 = None 1 = One, etc.) PTL
+# History of Hypertension (1 = Yes, 0 = No) HT
+# Presence of Uterine Irritability (1 = Yes, 0 = No) UI
+# Number of Physician Visits During the First Trimester FTV
+# (0 = None, 1 = One, 2 = Two, etc.)
+# Birth Weight in Grams BWT
+#------------------------------
+# The multiple neural network layer we will create will be composed of
+# three fully connected hidden layers, with node sizes 25, 10, and 3
+
+import tensorflow as tf
+import matplotlib.pyplot as plt
+import requests
+import numpy as np
+from tensorflow.python.framework import ops
+ops.reset_default_graph()
+
+# Set Seed
+seed = 3
+tf.set_random_seed(seed)
+np.random.seed(seed)
+
+
+birthdata_url = 'https://www.umass.edu/statdata/statdata/data/lowbwt.dat'
+birth_file = requests.get(birthdata_url)
+birth_data = birth_file.text.split('\r\n')[5:]
+birth_header = [x for x in birth_data[0].split(' ') if len(x)>=1]
+birth_data = [[float(x) for x in y.split(' ') if len(x)>=1] for y in birth_data[1:] if len(y)>=1]
+
+
+batch_size = 100
+
+# Extract y-target (birth weight)
+y_vals = np.array([x[10] for x in birth_data])
+
+# Filter for features of interest
+cols_of_interest = ['AGE', 'LWT', 'RACE', 'SMOKE', 'PTL', 'HT', 'UI', 'FTV']
+x_vals = np.array([[x[ix] for ix, feature in enumerate(birth_header) if feature in cols_of_interest] for x in birth_data])
+
+# Create graph session
+sess = tf.Session()
+
+# Split data into train/test = 80%/20%
+train_indices = np.random.choice(len(x_vals), round(len(x_vals)*0.8), replace=False)
+test_indices = np.array(list(set(range(len(x_vals))) - set(train_indices)))
+x_vals_train = x_vals[train_indices]
+x_vals_test = x_vals[test_indices]
+y_vals_train = y_vals[train_indices]
+y_vals_test = y_vals[test_indices]
+
+
+# Normalize by column (min-max norm to be between 0 and 1)
+def normalize_cols(m):
+ col_max = m.max(axis=0)
+ col_min = m.min(axis=0)
+ return (m-col_min) / (col_max - col_min)
+
+x_vals_train = np.nan_to_num(normalize_cols(x_vals_train))
+x_vals_test = np.nan_to_num(normalize_cols(x_vals_test))
+
+
+# Define Variable Functions (weights and bias)
+def init_weight(shape, st_dev):
+ weight = tf.Variable(tf.random_normal(shape, stddev=st_dev))
+ return(weight)
+
+
+def init_bias(shape, st_dev):
+ bias = tf.Variable(tf.random_normal(shape, stddev=st_dev))
+ return(bias)
+
+
+# Create Placeholders
+x_data = tf.placeholder(shape=[None, 8], dtype=tf.float32)
+y_target = tf.placeholder(shape=[None, 1], dtype=tf.float32)
+
+
+# Create a fully connected layer:
+def fully_connected(input_layer, weights, biases):
+ layer = tf.add(tf.matmul(input_layer, weights), biases)
+ return(tf.nn.relu(layer))
+
+
+#--------Create the first layer (25 hidden nodes)--------
+weight_1 = init_weight(shape=[8, 25], st_dev=10.0)
+bias_1 = init_bias(shape=[25], st_dev=10.0)
+layer_1 = fully_connected(x_data, weight_1, bias_1)
+
+#--------Create second layer (10 hidden nodes)--------
+weight_2 = init_weight(shape=[25, 10], st_dev=10.0)
+bias_2 = init_bias(shape=[10], st_dev=10.0)
+layer_2 = fully_connected(layer_1, weight_2, bias_2)
+
+
+#--------Create third layer (3 hidden nodes)--------
+weight_3 = init_weight(shape=[10, 3], st_dev=10.0)
+bias_3 = init_bias(shape=[3], st_dev=10.0)
+layer_3 = fully_connected(layer_2, weight_3, bias_3)
+
+
+#--------Create output layer (1 output value)--------
+weight_4 = init_weight(shape=[3, 1], st_dev=10.0)
+bias_4 = init_bias(shape=[1], st_dev=10.0)
+final_output = fully_connected(layer_3, weight_4, bias_4)
+
+# Declare loss function (L1)
+loss = tf.reduce_mean(tf.abs(y_target - final_output))
+
+# Declare optimizer
+my_opt = tf.train.AdamOptimizer(0.05)
+train_step = my_opt.minimize(loss)
+
+# Initialize Variables
+init = tf.initialize_all_variables()
+sess.run(init)
+
+# Training loop
+loss_vec = []
+test_loss = []
+for i in range(200):
+ rand_index = np.random.choice(len(x_vals_train), size=batch_size)
+ rand_x = x_vals_train[rand_index]
+ rand_y = np.transpose([y_vals_train[rand_index]])
+ sess.run(train_step, feed_dict={x_data: rand_x, y_target: rand_y})
+
+ temp_loss = sess.run(loss, feed_dict={x_data: rand_x, y_target: rand_y})
+ loss_vec.append(temp_loss)
+
+ test_temp_loss = sess.run(loss, feed_dict={x_data: x_vals_test, y_target: np.transpose([y_vals_test])})
+ test_loss.append(test_temp_loss)
+ if (i+1)%25==0:
+ print('Generation: ' + str(i+1) + '. Loss = ' + str(temp_loss))
+
+
+# Plot loss over time
+plt.plot(loss_vec, 'k-', label='Train Loss')
+plt.plot(test_loss, 'r--', label='Test Loss')
+plt.title('Loss per Generation')
+plt.xlabel('Generation')
+plt.ylabel('Loss')
+plt.legend(loc="upper right")
+plt.show()
+
+# Find the % classified correctly above/below the cutoff of 2500 g
+# >= 2500 g = 0
+# < 2500 g = 1
+actuals = np.array([x[1] for x in birth_data])
+test_actuals = actuals[test_indices]
+train_actuals = actuals[train_indices]
+
+test_preds = [x[0] for x in sess.run(final_output, feed_dict={x_data: x_vals_test})]
+train_preds = [x[0] for x in sess.run(final_output, feed_dict={x_data: x_vals_train})]
+test_preds = np.array([1.0 if x<2500.0 else 0.0 for x in test_preds])
+train_preds = np.array([1.0 if x<2500.0 else 0.0 for x in train_preds])
+
+# Print out accuracies
+test_acc = np.mean([x==y for x,y in zip(test_preds, test_actuals)])
+train_acc = np.mean([x==y for x,y in zip(train_preds, train_actuals)])
+print('On predicting the category of low birthweight from regression output (<2500g):')
+print('Test Accuracy: {}'.format(test_acc))
+print('Train Accuracy: {}'.format(train_acc))
\ No newline at end of file
diff --git a/TensorFlow-Machine-Learning-Cookbook-master/Chapter 07/bag_of_words.py b/TensorFlow-Machine-Learning-Cookbook-master/Chapter 07/bag_of_words.py
new file mode 100644
index 000000000..907467c6e
--- /dev/null
+++ b/TensorFlow-Machine-Learning-Cookbook-master/Chapter 07/bag_of_words.py
@@ -0,0 +1,176 @@
+# Working with Bag of Words
+#---------------------------------------
+#
+# In this example, we will download and preprocess the ham/spam
+# text data. We will then use a one-hot-encoding to make a
+# bag of words set of features to use in logistic regression.
+#
+# We will use these one-hot-vectors for logistic regression to
+# predict if a text is spam or ham.
+
+import tensorflow as tf
+import matplotlib.pyplot as plt
+import os
+import numpy as np
+import csv
+import string
+import requests
+import io
+from zipfile import ZipFile
+from tensorflow.contrib import learn
+from tensorflow.python.framework import ops
+ops.reset_default_graph()
+
+# Start a graph session
+sess = tf.Session()
+
+# Check if data was downloaded, otherwise download it and save for future use
+save_file_name = os.path.join('temp','temp_spam_data.csv')
+if os.path.isfile(save_file_name):
+ text_data = []
+ with open(save_file_name, 'r') as temp_output_file:
+ reader = csv.reader(temp_output_file)
+ for row in reader:
+ text_data.append(row)
+else:
+ zip_url = 'http://archive.ics.uci.edu/ml/machine-learning-databases/00228/smsspamcollection.zip'
+ r = requests.get(zip_url)
+ z = ZipFile(io.BytesIO(r.content))
+ file = z.read('SMSSpamCollection')
+ # Format Data
+ text_data = file.decode()
+ text_data = text_data.encode('ascii',errors='ignore')
+ text_data = text_data.decode().split('\n')
+ text_data = [x.split('\t') for x in text_data if len(x)>=1]
+
+ # And write to csv
+ with open(save_file_name, 'w') as temp_output_file:
+ writer = csv.writer(temp_output_file)
+ writer.writerows(text_data)
+
+texts = [x[1] for x in text_data]
+target = [x[0] for x in text_data]
+
+# Relabel 'spam' as 1, 'ham' as 0
+target = [1 if x=='spam' else 0 for x in target]
+
+# Normalize text
+# Lower case
+texts = [x.lower() for x in texts]
+
+# Remove punctuation
+texts = [''.join(c for c in x if c not in string.punctuation) for x in texts]
+
+# Remove numbers
+texts = [''.join(c for c in x if c not in '0123456789') for x in texts]
+
+# Trim extra whitespace
+texts = [' '.join(x.split()) for x in texts]
+
+# Plot histogram of text lengths
+text_lengths = [len(x.split()) for x in texts]
+text_lengths = [x for x in text_lengths if x < 50]
+plt.hist(text_lengths, bins=25)
+plt.title('Histogram of # of Words in Texts')
+
+# Choose max text word length at 25
+sentence_size = 25
+min_word_freq = 3
+
+# Setup vocabulary processor
+vocab_processor = learn.preprocessing.VocabularyProcessor(sentence_size, min_frequency=min_word_freq)
+
+# Have to fit transform to get length of unique words.
+vocab_processor.fit_transform(texts)
+embedding_size = len(vocab_processor.vocabulary_)
+
+# Split up data set into train/test
+train_indices = np.random.choice(len(texts), round(len(texts)*0.8), replace=False)
+test_indices = np.array(list(set(range(len(texts))) - set(train_indices)))
+texts_train = [x for ix, x in enumerate(texts) if ix in train_indices]
+texts_test = [x for ix, x in enumerate(texts) if ix in test_indices]
+target_train = [x for ix, x in enumerate(target) if ix in train_indices]
+target_test = [x for ix, x in enumerate(target) if ix in test_indices]
+
+# Setup Index Matrix for one-hot-encoding
+identity_mat = tf.diag(tf.ones(shape=[embedding_size]))
+
+# Create variables for logistic regression
+A = tf.Variable(tf.random_normal(shape=[embedding_size,1]))
+b = tf.Variable(tf.random_normal(shape=[1,1]))
+
+# Initialize placeholders
+x_data = tf.placeholder(shape=[sentence_size], dtype=tf.int32)
+y_target = tf.placeholder(shape=[1, 1], dtype=tf.float32)
+
+# Text-Vocab Embedding
+x_embed = tf.nn.embedding_lookup(identity_mat, x_data)
+x_col_sums = tf.reduce_sum(x_embed, 0)
+
+# Declare model operations
+x_col_sums_2D = tf.expand_dims(x_col_sums, 0)
+model_output = tf.add(tf.matmul(x_col_sums_2D, A), b)
+
+# Declare loss function (Cross Entropy loss)
+loss = tf.reduce_mean(tf.nn.sigmoid_cross_entropy_with_logits(model_output, y_target))
+
+# Prediction operation
+prediction = tf.sigmoid(model_output)
+
+# Declare optimizer
+my_opt = tf.train.GradientDescentOptimizer(0.001)
+train_step = my_opt.minimize(loss)
+
+# Intitialize Variables
+init = tf.initialize_all_variables()
+sess.run(init)
+
+# Start Logistic Regression
+print('Starting Training Over {} Sentences.'.format(len(texts_train)))
+loss_vec = []
+train_acc_all = []
+train_acc_avg = []
+for ix, t in enumerate(vocab_processor.fit_transform(texts_train)):
+ y_data = [[target_train[ix]]]
+
+
+ sess.run(train_step, feed_dict={x_data: t, y_target: y_data})
+ temp_loss = sess.run(loss, feed_dict={x_data: t, y_target: y_data})
+ loss_vec.append(temp_loss)
+
+ if (ix+1)%10==0:
+ print('Training Observation #' + str(ix+1) + ': Loss = ' + str(temp_loss))
+
+ # Keep trailing average of past 50 observations accuracy
+ # Get prediction of single observation
+ [[temp_pred]] = sess.run(prediction, feed_dict={x_data:t, y_target:y_data})
+ # Get True/False if prediction is accurate
+ train_acc_temp = target_train[ix]==np.round(temp_pred)
+ train_acc_all.append(train_acc_temp)
+ if len(train_acc_all) >= 50:
+ train_acc_avg.append(np.mean(train_acc_all[-50:]))
+
+# Get test set accuracy
+print('Getting Test Set Accuracy For {} Sentences.'.format(len(texts_test)))
+test_acc_all = []
+for ix, t in enumerate(vocab_processor.fit_transform(texts_test)):
+ y_data = [[target_test[ix]]]
+
+ if (ix+1)%50==0:
+ print('Test Observation #' + str(ix+1))
+
+ # Keep trailing average of past 50 observations accuracy
+ # Get prediction of single observation
+ [[temp_pred]] = sess.run(prediction, feed_dict={x_data:t, y_target:y_data})
+ # Get True/False if prediction is accurate
+ test_acc_temp = target_test[ix]==np.round(temp_pred)
+ test_acc_all.append(test_acc_temp)
+
+print('\nOverall Test Accuracy: {}'.format(np.mean(test_acc_all)))
+
+# Plot training accuracy over time
+plt.plot(range(len(train_acc_avg)), train_acc_avg, 'k-', label='Train Accuracy')
+plt.title('Avg Training Acc Over Past 50 Generations')
+plt.xlabel('Generation')
+plt.ylabel('Training Accuracy')
+plt.show()
\ No newline at end of file
diff --git a/TensorFlow-Machine-Learning-Cookbook-master/Chapter 07/doc2vec.py b/TensorFlow-Machine-Learning-Cookbook-master/Chapter 07/doc2vec.py
new file mode 100644
index 000000000..0780fb8c3
--- /dev/null
+++ b/TensorFlow-Machine-Learning-Cookbook-master/Chapter 07/doc2vec.py
@@ -0,0 +1,303 @@
+# Doc2Vec Model
+#---------------------------------------
+#
+# In this example, we will download and preprocess the movie
+# review data.
+#
+# From this data set we will compute/fit a Doc2Vec model to get
+# Document vectors. From these document vectors, we will split the
+# documents into train/test and use these doc vectors to do sentiment
+# analysis on the movie review dataset.
+#
+import tensorflow as tf
+import matplotlib.pyplot as plt
+import numpy as np
+import random
+import os
+import pickle
+import string
+import requests
+import collections
+import io
+import tarfile
+import urllib.request
+import text_helpers
+from nltk.corpus import stopwords
+from tensorflow.python.framework import ops
+ops.reset_default_graph()
+
+os.chdir(os.path.dirname(os.path.realpath(__file__)))
+
+# Make a saving directory if it doesn't exist
+data_folder_name = 'temp'
+if not os.path.exists(data_folder_name):
+ os.makedirs(data_folder_name)
+
+# Start a graph session
+sess = tf.Session()
+
+# Declare model parameters
+batch_size = 500
+vocabulary_size = 7500
+generations = 100000
+model_learning_rate = 0.001
+
+embedding_size = 200 # Word embedding size
+doc_embedding_size = 100 # Document embedding size
+concatenated_size = embedding_size + doc_embedding_size
+
+num_sampled = int(batch_size/2) # Number of negative examples to sample.
+window_size = 3 # How many words to consider to the left.
+
+# Add checkpoints to training
+save_embeddings_every = 5000
+print_valid_every = 5000
+print_loss_every = 100
+
+# Declare stop words
+#stops = stopwords.words('english')
+stops = []
+
+# We pick a few test words for validation.
+valid_words = ['love', 'hate', 'happy', 'sad', 'man', 'woman']
+# Later we will have to transform these into indices
+
+# Load the movie review data
+print('Loading Data')
+texts, target = text_helpers.load_movie_data(data_folder_name)
+
+# Normalize text
+print('Normalizing Text Data')
+texts = text_helpers.normalize_text(texts, stops)
+
+# Texts must contain at least 3 words
+target = [target[ix] for ix, x in enumerate(texts) if len(x.split()) > window_size]
+texts = [x for x in texts if len(x.split()) > window_size]
+assert(len(target)==len(texts))
+
+# Build our data set and dictionaries
+print('Creating Dictionary')
+word_dictionary = text_helpers.build_dictionary(texts, vocabulary_size)
+word_dictionary_rev = dict(zip(word_dictionary.values(), word_dictionary.keys()))
+text_data = text_helpers.text_to_numbers(texts, word_dictionary)
+
+# Get validation word keys
+valid_examples = [word_dictionary[x] for x in valid_words]
+
+print('Creating Model')
+# Define Embeddings:
+embeddings = tf.Variable(tf.random_uniform([vocabulary_size, embedding_size], -1.0, 1.0))
+doc_embeddings = tf.Variable(tf.random_uniform([len(texts), doc_embedding_size], -1.0, 1.0))
+
+# NCE loss parameters
+nce_weights = tf.Variable(tf.truncated_normal([vocabulary_size, concatenated_size],
+ stddev=1.0 / np.sqrt(concatenated_size)))
+nce_biases = tf.Variable(tf.zeros([vocabulary_size]))
+
+# Create data/target placeholders
+x_inputs = tf.placeholder(tf.int32, shape=[None, window_size + 1]) # plus 1 for doc index
+y_target = tf.placeholder(tf.int32, shape=[None, 1])
+valid_dataset = tf.constant(valid_examples, dtype=tf.int32)
+
+# Lookup the word embedding
+# Add together element embeddings in window:
+embed = tf.zeros([batch_size, embedding_size])
+for element in range(window_size):
+ embed += tf.nn.embedding_lookup(embeddings, x_inputs[:, element])
+
+doc_indices = tf.slice(x_inputs, [0,window_size],[batch_size,1])
+doc_embed = tf.nn.embedding_lookup(doc_embeddings,doc_indices)
+
+# concatenate embeddings
+final_embed = tf.concat(1, [embed, tf.squeeze(doc_embed)])
+
+# Get loss from prediction
+loss = tf.reduce_mean(tf.nn.nce_loss(nce_weights, nce_biases, final_embed, y_target,
+ num_sampled, vocabulary_size))
+
+# Create optimizer
+optimizer = tf.train.GradientDescentOptimizer(learning_rate=model_learning_rate)
+train_step = optimizer.minimize(loss)
+
+# Cosine similarity between words
+norm = tf.sqrt(tf.reduce_sum(tf.square(embeddings), 1, keep_dims=True))
+normalized_embeddings = embeddings / norm
+valid_embeddings = tf.nn.embedding_lookup(normalized_embeddings, valid_dataset)
+similarity = tf.matmul(valid_embeddings, normalized_embeddings, transpose_b=True)
+
+# Create model saving operation
+saver = tf.train.Saver({"embeddings": embeddings, "doc_embeddings": doc_embeddings})
+
+#Add variable initializer.
+init = tf.initialize_all_variables()
+sess.run(init)
+
+# Run the skip gram model.
+print('Starting Training')
+loss_vec = []
+loss_x_vec = []
+for i in range(generations):
+ batch_inputs, batch_labels = text_helpers.generate_batch_data(text_data, batch_size,
+ window_size, method='doc2vec')
+ feed_dict = {x_inputs : batch_inputs, y_target : batch_labels}
+
+ # Run the train step
+ sess.run(train_step, feed_dict=feed_dict)
+
+ # Return the loss
+ if (i+1) % print_loss_every == 0:
+ loss_val = sess.run(loss, feed_dict=feed_dict)
+ loss_vec.append(loss_val)
+ loss_x_vec.append(i+1)
+ print('Loss at step {} : {}'.format(i+1, loss_val))
+
+ # Validation: Print some random words and top 5 related words
+ if (i+1) % print_valid_every == 0:
+ sim = sess.run(similarity, feed_dict=feed_dict)
+ for j in range(len(valid_words)):
+ valid_word = word_dictionary_rev[valid_examples[j]]
+ top_k = 5 # number of nearest neighbors
+ nearest = (-sim[j, :]).argsort()[1:top_k+1]
+ log_str = "Nearest to {}:".format(valid_word)
+ for k in range(top_k):
+ close_word = word_dictionary_rev[nearest[k]]
+ log_str = '{} {},'.format(log_str, close_word)
+ print(log_str)
+
+ # Save dictionary + embeddings
+ if (i+1) % save_embeddings_every == 0:
+ # Save vocabulary dictionary
+ with open(os.path.join(data_folder_name,'movie_vocab.pkl'), 'wb') as f:
+ pickle.dump(word_dictionary, f)
+
+ # Save embeddings
+ model_checkpoint_path = os.path.join(os.getcwd(),data_folder_name,'doc2vec_movie_embeddings.ckpt')
+ save_path = saver.save(sess, model_checkpoint_path)
+ print('Model saved in file: {}'.format(save_path))
+
+# Start logistic model-------------------------
+max_words = 20
+logistic_batch_size = 500
+
+# Split dataset into train and test sets
+# Need to keep the indices sorted to keep track of document index
+train_indices = np.sort(np.random.choice(len(target), round(0.8*len(target)), replace=False))
+test_indices = np.sort(np.array(list(set(range(len(target))) - set(train_indices))))
+texts_train = [x for ix, x in enumerate(texts) if ix in train_indices]
+texts_test = [x for ix, x in enumerate(texts) if ix in test_indices]
+target_train = np.array([x for ix, x in enumerate(target) if ix in train_indices])
+target_test = np.array([x for ix, x in enumerate(target) if ix in test_indices])
+
+# Convert texts to lists of indices
+text_data_train = np.array(text_helpers.text_to_numbers(texts_train, word_dictionary))
+text_data_test = np.array(text_helpers.text_to_numbers(texts_test, word_dictionary))
+
+# Pad/crop movie reviews to specific length
+text_data_train = np.array([x[0:max_words] for x in [y+[0]*max_words for y in text_data_train]])
+text_data_test = np.array([x[0:max_words] for x in [y+[0]*max_words for y in text_data_test]])
+
+# Define Logistic placeholders
+log_x_inputs = tf.placeholder(tf.int32, shape=[None, max_words + 1]) # plus 1 for doc index
+log_y_target = tf.placeholder(tf.int32, shape=[None, 1])
+
+# Define logistic embedding lookup (needed if we have two different batch sizes)
+# Add together element embeddings in window:
+log_embed = tf.zeros([logistic_batch_size, embedding_size])
+for element in range(max_words):
+ log_embed += tf.nn.embedding_lookup(embeddings, log_x_inputs[:, element])
+
+log_doc_indices = tf.slice(log_x_inputs, [0,max_words],[logistic_batch_size,1])
+log_doc_embed = tf.nn.embedding_lookup(doc_embeddings,log_doc_indices)
+
+# concatenate embeddings
+log_final_embed = tf.concat(1, [log_embed, tf.squeeze(log_doc_embed)])
+
+# Define model:
+# Create variables for logistic regression
+A = tf.Variable(tf.random_normal(shape=[concatenated_size,1]))
+b = tf.Variable(tf.random_normal(shape=[1,1]))
+
+# Declare logistic model (sigmoid in loss function)
+model_output = tf.add(tf.matmul(log_final_embed, A), b)
+
+# Declare loss function (Cross Entropy loss)
+logistic_loss = tf.reduce_mean(tf.nn.sigmoid_cross_entropy_with_logits(model_output, tf.cast(log_y_target, tf.float32)))
+
+# Actual Prediction
+prediction = tf.round(tf.sigmoid(model_output))
+predictions_correct = tf.cast(tf.equal(prediction, tf.cast(log_y_target, tf.float32)), tf.float32)
+accuracy = tf.reduce_mean(predictions_correct)
+
+# Declare optimizer
+logistic_opt = tf.train.GradientDescentOptimizer(learning_rate=0.01)
+logistic_train_step = logistic_opt.minimize(logistic_loss, var_list=[A, b])
+
+# Intitialize Variables
+init = tf.initialize_all_variables()
+sess.run(init)
+
+# Start Logistic Regression
+print('Starting Logistic Doc2Vec Model Training')
+train_loss = []
+test_loss = []
+train_acc = []
+test_acc = []
+i_data = []
+for i in range(10000):
+ rand_index = np.random.choice(text_data_train.shape[0], size=logistic_batch_size)
+ rand_x = text_data_train[rand_index]
+ # Append review index at the end of text data
+ rand_x_doc_indices = train_indices[rand_index]
+ rand_x = np.hstack((rand_x, np.transpose([rand_x_doc_indices])))
+ rand_y = np.transpose([target_train[rand_index]])
+
+ feed_dict = {log_x_inputs : rand_x, log_y_target : rand_y}
+ sess.run(logistic_train_step, feed_dict=feed_dict)
+
+ # Only record loss and accuracy every 100 generations
+ if (i+1)%100==0:
+ rand_index_test = np.random.choice(text_data_test.shape[0], size=logistic_batch_size)
+ rand_x_test = text_data_test[rand_index_test]
+ # Append review index at the end of text data
+ rand_x_doc_indices_test = test_indices[rand_index_test]
+ rand_x_test = np.hstack((rand_x_test, np.transpose([rand_x_doc_indices_test])))
+ rand_y_test = np.transpose([target_test[rand_index_test]])
+
+ test_feed_dict = {log_x_inputs: rand_x_test, log_y_target: rand_y_test}
+
+ i_data.append(i+1)
+
+ train_loss_temp = sess.run(logistic_loss, feed_dict=feed_dict)
+ train_loss.append(train_loss_temp)
+
+ test_loss_temp = sess.run(logistic_loss, feed_dict=test_feed_dict)
+ test_loss.append(test_loss_temp)
+
+ train_acc_temp = sess.run(accuracy, feed_dict=feed_dict)
+ train_acc.append(train_acc_temp)
+
+ test_acc_temp = sess.run(accuracy, feed_dict=test_feed_dict)
+ test_acc.append(test_acc_temp)
+ if (i+1)%500==0:
+ acc_and_loss = [i+1, train_loss_temp, test_loss_temp, train_acc_temp, test_acc_temp]
+ acc_and_loss = [np.round(x,2) for x in acc_and_loss]
+ print('Generation # {}. Train Loss (Test Loss): {:.2f} ({:.2f}). Train Acc (Test Acc): {:.2f} ({:.2f})'.format(*acc_and_loss))
+
+
+# Plot loss over time
+plt.plot(i_data, train_loss, 'k-', label='Train Loss')
+plt.plot(i_data, test_loss, 'r--', label='Test Loss', linewidth=4)
+plt.title('Cross Entropy Loss per Generation')
+plt.xlabel('Generation')
+plt.ylabel('Cross Entropy Loss')
+plt.legend(loc='upper right')
+plt.show()
+
+# Plot train and test accuracy
+plt.plot(i_data, train_acc, 'k-', label='Train Set Accuracy')
+plt.plot(i_data, test_acc, 'r--', label='Test Set Accuracy', linewidth=4)
+plt.title('Train and Test Accuracy')
+plt.xlabel('Generation')
+plt.ylabel('Accuracy')
+plt.legend(loc='lower right')
+plt.show()
\ No newline at end of file
diff --git a/TensorFlow-Machine-Learning-Cookbook-master/Chapter 07/text_helpers.py b/TensorFlow-Machine-Learning-Cookbook-master/Chapter 07/text_helpers.py
new file mode 100644
index 000000000..226fd31b5
--- /dev/null
+++ b/TensorFlow-Machine-Learning-Cookbook-master/Chapter 07/text_helpers.py
@@ -0,0 +1,170 @@
+# Text Helper Functions
+#---------------------------------------
+#
+# We pull out text helper functions to reduce redundant code
+
+import string
+import os
+import urllib.request
+import io
+import tarfile
+import collections
+import numpy as np
+
+# Normalize text
+def normalize_text(texts, stops):
+ # Lower case
+ texts = [x.lower() for x in texts]
+
+ # Remove punctuation
+ texts = [''.join(c for c in x if c not in string.punctuation) for x in texts]
+
+ # Remove numbers
+ texts = [''.join(c for c in x if c not in '0123456789') for x in texts]
+
+ # Remove stopwords
+ texts = [' '.join([word for word in x.split() if word not in (stops)]) for x in texts]
+
+ # Trim extra whitespace
+ texts = [' '.join(x.split()) for x in texts]
+
+ return(texts)
+
+
+# Build dictionary of words
+def build_dictionary(sentences, vocabulary_size):
+ # Turn sentences (list of strings) into lists of words
+ split_sentences = [s.split() for s in sentences]
+ words = [x for sublist in split_sentences for x in sublist]
+
+ # Initialize list of [word, word_count] for each word, starting with unknown
+ count = [['RARE', -1]]
+
+ # Now add most frequent words, limited to the N-most frequent (N=vocabulary size)
+ count.extend(collections.Counter(words).most_common(vocabulary_size-1))
+
+ # Now create the dictionary
+ word_dict = {}
+ # For each word, that we want in the dictionary, add it, then make it
+ # the value of the prior dictionary length
+ for word, word_count in count:
+ word_dict[word] = len(word_dict)
+
+ return(word_dict)
+
+
+# Turn text data into lists of integers from dictionary
+def text_to_numbers(sentences, word_dict):
+ # Initialize the returned data
+ data = []
+ for sentence in sentences:
+ sentence_data = []
+ # For each word, either use selected index or rare word index
+ for word in sentence.split():
+ if word in word_dict:
+ word_ix = word_dict[word]
+ else:
+ word_ix = 0
+ sentence_data.append(word_ix)
+ data.append(sentence_data)
+ return(data)
+
+
+# Generate data randomly (N words behind, target, N words ahead)
+def generate_batch_data(sentences, batch_size, window_size, method='skip_gram'):
+ # Fill up data batch
+ batch_data = []
+ label_data = []
+ while len(batch_data) < batch_size:
+ # select random sentence to start
+ rand_sentence_ix = int(np.random.choice(len(sentences), size=1))
+ rand_sentence = sentences[rand_sentence_ix]
+ # Generate consecutive windows to look at
+ window_sequences = [rand_sentence[max((ix-window_size),0):(ix+window_size+1)] for ix, x in enumerate(rand_sentence)]
+ # Denote which element of each window is the center word of interest
+ label_indices = [ix if ix=1]
+
+ # And write to csv
+ with open(save_file_name, 'w') as temp_output_file:
+ writer = csv.writer(temp_output_file)
+ writer.writerows(text_data)
+
+
+texts = [x[1] for x in text_data]
+target = [x[0] for x in text_data]
+
+# Relabel 'spam' as 1, 'ham' as 0
+target = [1. if x=='spam' else 0. for x in target]
+
+# Normalize text
+# Lower case
+texts = [x.lower() for x in texts]
+
+# Remove punctuation
+texts = [''.join(c for c in x if c not in string.punctuation) for x in texts]
+
+# Remove numbers
+texts = [''.join(c for c in x if c not in '0123456789') for x in texts]
+
+# Trim extra whitespace
+texts = [' '.join(x.split()) for x in texts]
+
+# Define tokenizer
+def tokenizer(text):
+ words = nltk.word_tokenize(text)
+ return words
+
+# Create TF-IDF of texts
+tfidf = TfidfVectorizer(tokenizer=tokenizer, stop_words='english', max_features=max_features)
+sparse_tfidf_texts = tfidf.fit_transform(texts)
+
+# Split up data set into train/test
+train_indices = np.random.choice(sparse_tfidf_texts.shape[0], round(0.8*sparse_tfidf_texts.shape[0]), replace=False)
+test_indices = np.array(list(set(range(sparse_tfidf_texts.shape[0])) - set(train_indices)))
+texts_train = sparse_tfidf_texts[train_indices]
+texts_test = sparse_tfidf_texts[test_indices]
+target_train = np.array([x for ix, x in enumerate(target) if ix in train_indices])
+target_test = np.array([x for ix, x in enumerate(target) if ix in test_indices])
+
+# Create variables for logistic regression
+A = tf.Variable(tf.random_normal(shape=[max_features,1]))
+b = tf.Variable(tf.random_normal(shape=[1,1]))
+
+# Initialize placeholders
+x_data = tf.placeholder(shape=[None, max_features], dtype=tf.float32)
+y_target = tf.placeholder(shape=[None, 1], dtype=tf.float32)
+
+# Declare logistic model (sigmoid in loss function)
+model_output = tf.add(tf.matmul(x_data, A), b)
+
+# Declare loss function (Cross Entropy loss)
+loss = tf.reduce_mean(tf.nn.sigmoid_cross_entropy_with_logits(model_output, y_target))
+
+# Actual Prediction
+prediction = tf.round(tf.sigmoid(model_output))
+predictions_correct = tf.cast(tf.equal(prediction, y_target), tf.float32)
+accuracy = tf.reduce_mean(predictions_correct)
+
+# Declare optimizer
+my_opt = tf.train.GradientDescentOptimizer(0.0025)
+train_step = my_opt.minimize(loss)
+
+# Intitialize Variables
+init = tf.initialize_all_variables()
+sess.run(init)
+
+# Start Logistic Regression
+train_loss = []
+test_loss = []
+train_acc = []
+test_acc = []
+i_data = []
+for i in range(10000):
+ rand_index = np.random.choice(texts_train.shape[0], size=batch_size)
+ rand_x = texts_train[rand_index].todense()
+ rand_y = np.transpose([target_train[rand_index]])
+ sess.run(train_step, feed_dict={x_data: rand_x, y_target: rand_y})
+
+ # Only record loss and accuracy every 100 generations
+ if (i+1)%100==0:
+ i_data.append(i+1)
+ train_loss_temp = sess.run(loss, feed_dict={x_data: rand_x, y_target: rand_y})
+ train_loss.append(train_loss_temp)
+
+ test_loss_temp = sess.run(loss, feed_dict={x_data: texts_test.todense(), y_target: np.transpose([target_test])})
+ test_loss.append(test_loss_temp)
+
+ train_acc_temp = sess.run(accuracy, feed_dict={x_data: rand_x, y_target: rand_y})
+ train_acc.append(train_acc_temp)
+
+ test_acc_temp = sess.run(accuracy, feed_dict={x_data: texts_test.todense(), y_target: np.transpose([target_test])})
+ test_acc.append(test_acc_temp)
+ if (i+1)%500==0:
+ acc_and_loss = [i+1, train_loss_temp, test_loss_temp, train_acc_temp, test_acc_temp]
+ acc_and_loss = [np.round(x,2) for x in acc_and_loss]
+ print('Generation # {}. Train Loss (Test Loss): {:.2f} ({:.2f}). Train Acc (Test Acc): {:.2f} ({:.2f})'.format(*acc_and_loss))
+
+
+# Plot loss over time
+plt.plot(i_data, train_loss, 'k-', label='Train Loss')
+plt.plot(i_data, test_loss, 'r--', label='Test Loss', linewidth=4)
+plt.title('Cross Entropy Loss per Generation')
+plt.xlabel('Generation')
+plt.ylabel('Cross Entropy Loss')
+plt.legend(loc='upper right')
+plt.show()
+
+# Plot train and test accuracy
+plt.plot(i_data, train_acc, 'k-', label='Train Set Accuracy')
+plt.plot(i_data, test_acc, 'r--', label='Test Set Accuracy', linewidth=4)
+plt.title('Train and Test Accuracy')
+plt.xlabel('Generation')
+plt.ylabel('Accuracy')
+plt.legend(loc='lower right')
+plt.show()
\ No newline at end of file
diff --git a/TensorFlow-Machine-Learning-Cookbook-master/Chapter 07/using_word2vec.py b/TensorFlow-Machine-Learning-Cookbook-master/Chapter 07/using_word2vec.py
new file mode 100644
index 000000000..daec8cbe1
--- /dev/null
+++ b/TensorFlow-Machine-Learning-Cookbook-master/Chapter 07/using_word2vec.py
@@ -0,0 +1,165 @@
+# Using Word2Vec for prediction
+#---------------------------------------
+#
+# In this example, we will load our prior CBOW trained embeddings
+# to perform logistic regression model for movie review predictions
+#
+# From this data set we will compute/fit the CBOW model of
+# the Word2Vec Algorithm
+import tensorflow as tf
+import matplotlib.pyplot as plt
+import numpy as np
+import random
+import os
+import pickle
+import string
+import requests
+import collections
+import io
+import tarfile
+import urllib.request
+import text_helpers
+from nltk.corpus import stopwords
+from tensorflow.python.framework import ops
+ops.reset_default_graph()
+
+os.chdir(os.path.dirname(os.path.realpath(__file__)))
+
+# Start a graph session
+sess = tf.Session()
+
+# Declare model parameters
+embedding_size = 200
+vocabulary_size = 2000
+batch_size = 100
+max_words = 100
+
+# Declare stop words
+stops = stopwords.words('english')
+
+# Load Data
+print('Loading Data')
+data_folder_name = 'temp'
+texts, target = text_helpers.load_movie_data(data_folder_name)
+
+# Normalize text
+print('Normalizing Text Data')
+texts = text_helpers.normalize_text(texts, stops)
+
+# Texts must contain at least 3 words
+target = [target[ix] for ix, x in enumerate(texts) if len(x.split()) > 2]
+texts = [x for x in texts if len(x.split()) > 2]
+
+# Split up data set into train/test
+train_indices = np.random.choice(len(target), round(0.8*len(target)), replace=False)
+test_indices = np.array(list(set(range(len(target))) - set(train_indices)))
+texts_train = [x for ix, x in enumerate(texts) if ix in train_indices]
+texts_test = [x for ix, x in enumerate(texts) if ix in test_indices]
+target_train = np.array([x for ix, x in enumerate(target) if ix in train_indices])
+target_test = np.array([x for ix, x in enumerate(target) if ix in test_indices])
+
+# Load dictionary and embedding matrix
+dict_file = os.path.join(data_folder_name, 'movie_vocab.pkl')
+word_dictionary = pickle.load(open(dict_file, 'rb'))
+
+# Convert texts to lists of indices
+text_data_train = np.array(text_helpers.text_to_numbers(texts_train, word_dictionary))
+text_data_test = np.array(text_helpers.text_to_numbers(texts_test, word_dictionary))
+
+# Pad/crop movie reviews to specific length
+text_data_train = np.array([x[0:max_words] for x in [y+[0]*max_words for y in text_data_train]])
+text_data_test = np.array([x[0:max_words] for x in [y+[0]*max_words for y in text_data_test]])
+
+print('Creating Model')
+# Define Embeddings:
+embeddings = tf.Variable(tf.random_uniform([vocabulary_size, embedding_size], -1.0, 1.0))
+
+# Define model:
+# Create variables for logistic regression
+A = tf.Variable(tf.random_normal(shape=[embedding_size,1]))
+b = tf.Variable(tf.random_normal(shape=[1,1]))
+
+# Initialize placeholders
+x_data = tf.placeholder(shape=[None, max_words], dtype=tf.int32)
+y_target = tf.placeholder(shape=[None, 1], dtype=tf.float32)
+
+# Lookup embeddings vectors
+embed = tf.nn.embedding_lookup(embeddings, x_data)
+# Take average of all word embeddings in documents
+embed_avg = tf.reduce_mean(embed, 1)
+
+# Declare logistic model (sigmoid in loss function)
+model_output = tf.add(tf.matmul(embed_avg, A), b)
+
+# Declare loss function (Cross Entropy loss)
+loss = tf.reduce_mean(tf.nn.sigmoid_cross_entropy_with_logits(model_output, y_target))
+
+# Actual Prediction
+prediction = tf.round(tf.sigmoid(model_output))
+predictions_correct = tf.cast(tf.equal(prediction, y_target), tf.float32)
+accuracy = tf.reduce_mean(predictions_correct)
+
+# Declare optimizer
+my_opt = tf.train.AdagradOptimizer(0.005)
+train_step = my_opt.minimize(loss)
+
+# Intitialize Variables
+init = tf.initialize_all_variables()
+sess.run(init)
+
+# Load model embeddings
+model_checkpoint_path = os.path.join(data_folder_name,'cbow_movie_embeddings.ckpt')
+saver = tf.train.Saver({"embeddings": embeddings})
+saver.restore(sess, model_checkpoint_path)
+
+
+# Start Logistic Regression
+print('Starting Model Training')
+train_loss = []
+test_loss = []
+train_acc = []
+test_acc = []
+i_data = []
+for i in range(10000):
+ rand_index = np.random.choice(text_data_train.shape[0], size=batch_size)
+ rand_x = text_data_train[rand_index]
+ rand_y = np.transpose([target_train[rand_index]])
+ sess.run(train_step, feed_dict={x_data: rand_x, y_target: rand_y})
+
+ # Only record loss and accuracy every 100 generations
+ if (i+1)%100==0:
+ i_data.append(i+1)
+ train_loss_temp = sess.run(loss, feed_dict={x_data: rand_x, y_target: rand_y})
+ train_loss.append(train_loss_temp)
+
+ test_loss_temp = sess.run(loss, feed_dict={x_data: text_data_test, y_target: np.transpose([target_test])})
+ test_loss.append(test_loss_temp)
+
+ train_acc_temp = sess.run(accuracy, feed_dict={x_data: rand_x, y_target: rand_y})
+ train_acc.append(train_acc_temp)
+
+ test_acc_temp = sess.run(accuracy, feed_dict={x_data: text_data_test, y_target: np.transpose([target_test])})
+ test_acc.append(test_acc_temp)
+ if (i+1)%500==0:
+ acc_and_loss = [i+1, train_loss_temp, test_loss_temp, train_acc_temp, test_acc_temp]
+ acc_and_loss = [np.round(x,2) for x in acc_and_loss]
+ print('Generation # {}. Train Loss (Test Loss): {:.2f} ({:.2f}). Train Acc (Test Acc): {:.2f} ({:.2f})'.format(*acc_and_loss))
+
+
+# Plot loss over time
+plt.plot(i_data, train_loss, 'k-', label='Train Loss')
+plt.plot(i_data, test_loss, 'r--', label='Test Loss', linewidth=4)
+plt.title('Cross Entropy Loss per Generation')
+plt.xlabel('Generation')
+plt.ylabel('Cross Entropy Loss')
+plt.legend(loc='upper right')
+plt.show()
+
+# Plot train and test accuracy
+plt.plot(i_data, train_acc, 'k-', label='Train Set Accuracy')
+plt.plot(i_data, test_acc, 'r--', label='Test Set Accuracy', linewidth=4)
+plt.title('Train and Test Accuracy')
+plt.xlabel('Generation')
+plt.ylabel('Accuracy')
+plt.legend(loc='lower right')
+plt.show()
\ No newline at end of file
diff --git a/TensorFlow-Machine-Learning-Cookbook-master/Chapter 07/word2vec_cbow.py b/TensorFlow-Machine-Learning-Cookbook-master/Chapter 07/word2vec_cbow.py
new file mode 100644
index 000000000..4b0ff3f5b
--- /dev/null
+++ b/TensorFlow-Machine-Learning-Cookbook-master/Chapter 07/word2vec_cbow.py
@@ -0,0 +1,160 @@
+# Word2Vec: CBOW Model (Continuous Bag of Words)
+#---------------------------------------
+#
+# In this example, we will download and preprocess the movie
+# review data.
+#
+# From this data set we will compute/fit the CBOW model of
+# the Word2Vec Algorithm
+import tensorflow as tf
+import matplotlib.pyplot as plt
+import numpy as np
+import random
+import os
+import pickle
+import string
+import requests
+import collections
+import io
+import tarfile
+import urllib.request
+import text_helpers
+from nltk.corpus import stopwords
+from tensorflow.python.framework import ops
+ops.reset_default_graph()
+
+os.chdir(os.path.dirname(os.path.realpath(__file__)))
+
+# Make a saving directory if it doesn't exist
+data_folder_name = 'temp'
+if not os.path.exists(data_folder_name):
+ os.makedirs(data_folder_name)
+
+# Start a graph session
+sess = tf.Session()
+
+# Declare model parameters
+batch_size = 500
+embedding_size = 200
+vocabulary_size = 2000
+generations = 50000
+model_learning_rate = 0.001
+
+num_sampled = int(batch_size/2) # Number of negative examples to sample.
+window_size = 3 # How many words to consider left and right.
+
+# Add checkpoints to training
+save_embeddings_every = 5000
+print_valid_every = 5000
+print_loss_every = 100
+
+# Declare stop words
+stops = stopwords.words('english')
+
+# We pick some test words. We are expecting synonyms to appear
+valid_words = ['love', 'hate', 'happy', 'sad', 'man', 'woman']
+# Later we will have to transform these into indices
+
+# Load the movie review data
+print('Loading Data')
+texts, target = text_helpers.load_movie_data(data_folder_name)
+
+# Normalize text
+print('Normalizing Text Data')
+texts = text_helpers.normalize_text(texts, stops)
+
+# Texts must contain at least 3 words
+target = [target[ix] for ix, x in enumerate(texts) if len(x.split()) > 2]
+texts = [x for x in texts if len(x.split()) > 2]
+
+# Build our data set and dictionaries
+print('Creating Dictionary')
+word_dictionary = text_helpers.build_dictionary(texts, vocabulary_size)
+word_dictionary_rev = dict(zip(word_dictionary.values(), word_dictionary.keys()))
+text_data = text_helpers.text_to_numbers(texts, word_dictionary)
+
+# Get validation word keys
+valid_examples = [word_dictionary[x] for x in valid_words]
+
+print('Creating Model')
+# Define Embeddings:
+embeddings = tf.Variable(tf.random_uniform([vocabulary_size, embedding_size], -1.0, 1.0))
+
+# NCE loss parameters
+nce_weights = tf.Variable(tf.truncated_normal([vocabulary_size, embedding_size],
+ stddev=1.0 / np.sqrt(embedding_size)))
+nce_biases = tf.Variable(tf.zeros([vocabulary_size]))
+
+# Create data/target placeholders
+x_inputs = tf.placeholder(tf.int32, shape=[batch_size, 2*window_size])
+y_target = tf.placeholder(tf.int32, shape=[batch_size, 1])
+valid_dataset = tf.constant(valid_examples, dtype=tf.int32)
+
+# Lookup the word embedding
+# Add together window embeddings:
+embed = tf.zeros([batch_size, embedding_size])
+for element in range(2*window_size):
+ embed += tf.nn.embedding_lookup(embeddings, x_inputs[:, element])
+
+# Get loss from prediction
+loss = tf.reduce_mean(tf.nn.nce_loss(nce_weights, nce_biases, embed, y_target,
+ num_sampled, vocabulary_size))
+
+# Create optimizer
+optimizer = tf.train.GradientDescentOptimizer(learning_rate=model_learning_rate).minimize(loss)
+
+# Cosine similarity between words
+norm = tf.sqrt(tf.reduce_sum(tf.square(embeddings), 1, keep_dims=True))
+normalized_embeddings = embeddings / norm
+valid_embeddings = tf.nn.embedding_lookup(normalized_embeddings, valid_dataset)
+similarity = tf.matmul(valid_embeddings, normalized_embeddings, transpose_b=True)
+
+# Create model saving operation
+saver = tf.train.Saver({"embeddings": embeddings})
+
+#Add variable initializer.
+init = tf.initialize_all_variables()
+sess.run(init)
+
+# Run the skip gram model.
+print('Starting Training')
+loss_vec = []
+loss_x_vec = []
+for i in range(generations):
+ batch_inputs, batch_labels = text_helpers.generate_batch_data(text_data, batch_size,
+ window_size, method='cbow')
+ feed_dict = {x_inputs : batch_inputs, y_target : batch_labels}
+
+ # Run the train step
+ sess.run(optimizer, feed_dict=feed_dict)
+
+ # Return the loss
+ if (i+1) % print_loss_every == 0:
+ loss_val = sess.run(loss, feed_dict=feed_dict)
+ loss_vec.append(loss_val)
+ loss_x_vec.append(i+1)
+ print('Loss at step {} : {}'.format(i+1, loss_val))
+
+ # Validation: Print some random words and top 5 related words
+ if (i+1) % print_valid_every == 0:
+ sim = sess.run(similarity, feed_dict=feed_dict)
+ for j in range(len(valid_words)):
+ valid_word = word_dictionary_rev[valid_examples[j]]
+ top_k = 5 # number of nearest neighbors
+ nearest = (-sim[j, :]).argsort()[1:top_k+1]
+ log_str = "Nearest to {}:".format(valid_word)
+ for k in range(top_k):
+ close_word = word_dictionary_rev[nearest[k]]
+ log_str = '{} {},' .format(log_str, close_word)
+ print(log_str)
+
+ # Save dictionary + embeddings
+ if (i+1) % save_embeddings_every == 0:
+ # Save vocabulary dictionary
+ with open(os.path.join(data_folder_name,'movie_vocab.pkl'), 'wb') as f:
+ pickle.dump(word_dictionary, f)
+
+ # Save embeddings
+ model_checkpoint_path = os.path.join(os.getcwd(),data_folder_name,'cbow_movie_embeddings.ckpt')
+ save_path = saver.save(sess, model_checkpoint_path)
+ print('Model saved in file: {}'.format(save_path))
\ No newline at end of file
diff --git a/TensorFlow-Machine-Learning-Cookbook-master/Chapter 07/word2vec_skipgram.py b/TensorFlow-Machine-Learning-Cookbook-master/Chapter 07/word2vec_skipgram.py
new file mode 100644
index 000000000..82bf32e20
--- /dev/null
+++ b/TensorFlow-Machine-Learning-Cookbook-master/Chapter 07/word2vec_skipgram.py
@@ -0,0 +1,282 @@
+# Word2Vec: Skipgram Model
+#---------------------------------------
+#
+# In this example, we will download and preprocess the movie
+# review data.
+#
+# From this data set we will compute/fit the skipgram model of
+# the Word2Vec Algorithm
+#
+# Skipgram: based on predicting the surrounding words from the
+# Ex sentence "the cat in the hat"
+# context word: ["hat"]
+# target words: ["the", "cat", "in", "the"]
+# context-target pairs:
+# ("hat", "the"), ("hat", "cat"), ("hat", "in"), ("hat", "the")
+
+
+import tensorflow as tf
+import matplotlib.pyplot as plt
+import numpy as np
+import random
+import os
+import string
+import requests
+import collections
+import io
+import tarfile
+import urllib.request
+from nltk.corpus import stopwords
+from tensorflow.python.framework import ops
+ops.reset_default_graph()
+
+os.chdir(os.path.dirname(os.path.realpath(__file__)))
+
+# Start a graph session
+sess = tf.Session()
+
+# Declare model parameters
+batch_size = 50
+embedding_size = 200
+vocabulary_size = 10000
+generations = 50000
+print_loss_every = 500
+
+num_sampled = int(batch_size/2) # Number of negative examples to sample.
+window_size = 2 # How many words to consider left and right.
+
+# Declare stop words
+stops = stopwords.words('english')
+
+# We pick five test words. We are expecting synonyms to appear
+print_valid_every = 2000
+valid_words = ['cliche', 'love', 'hate', 'silly', 'sad']
+# Later we will have to transform these into indices
+
+# Load the movie review data
+# Check if data was downloaded, otherwise download it and save for future use
+def load_movie_data():
+ save_folder_name = 'temp'
+ pos_file = os.path.join(save_folder_name, 'rt-polarity.pos')
+ neg_file = os.path.join(save_folder_name, 'rt-polarity.neg')
+
+ # Check if files are already downloaded
+ if os.path.exists(save_folder_name):
+ pos_data = []
+ with open(pos_file, 'r') as temp_pos_file:
+ for row in temp_pos_file:
+ pos_data.append(row)
+ neg_data = []
+ with open(neg_file, 'r') as temp_neg_file:
+ for row in temp_neg_file:
+ neg_data.append(row)
+ else: # If not downloaded, download and save
+ movie_data_url = 'http://www.cs.cornell.edu/people/pabo/movie-review-data/rt-polaritydata.tar.gz'
+ stream_data = urllib.request.urlopen(movie_data_url)
+ tmp = io.BytesIO()
+ while True:
+ s = stream_data.read(16384)
+ if not s:
+ break
+ tmp.write(s)
+ stream_data.close()
+ tmp.seek(0)
+
+ tar_file = tarfile.open(fileobj=tmp, mode="r:gz")
+ pos = tar_file.extractfile('rt-polaritydata/rt-polarity.pos')
+ neg = tar_file.extractfile('rt-polaritydata/rt-polarity.neg')
+ # Save pos/neg reviews
+ pos_data = []
+ for line in pos:
+ pos_data.append(line.decode('ISO-8859-1').encode('ascii',errors='ignore').decode())
+ neg_data = []
+ for line in neg:
+ neg_data.append(line.decode('ISO-8859-1').encode('ascii',errors='ignore').decode())
+ tar_file.close()
+ # Write to file
+ if not os.path.exists(save_folder_name):
+ os.makedirs(save_folder_name)
+ # Save files
+ with open(pos_file, "w") as pos_file_handler:
+ pos_file_handler.write(''.join(pos_data))
+ with open(neg_file, "w") as neg_file_handler:
+ neg_file_handler.write(''.join(neg_data))
+ texts = pos_data + neg_data
+ target = [1]*len(pos_data) + [0]*len(neg_data)
+ return(texts, target)
+
+texts, target = load_movie_data()
+
+# Normalize text
+def normalize_text(texts, stops):
+ # Lower case
+ texts = [x.lower() for x in texts]
+
+ # Remove punctuation
+ texts = [''.join(c for c in x if c not in string.punctuation) for x in texts]
+
+ # Remove numbers
+ texts = [''.join(c for c in x if c not in '0123456789') for x in texts]
+
+ # Remove stopwords
+ texts = [' '.join([word for word in x.split() if word not in (stops)]) for x in texts]
+
+ # Trim extra whitespace
+ texts = [' '.join(x.split()) for x in texts]
+
+ return(texts)
+
+texts = normalize_text(texts, stops)
+
+# Texts must contain at least 3 words
+target = [target[ix] for ix, x in enumerate(texts) if len(x.split()) > 2]
+texts = [x for x in texts if len(x.split()) > 2]
+
+# Build dictionary of words
+def build_dictionary(sentences, vocabulary_size):
+ # Turn sentences (list of strings) into lists of words
+ split_sentences = [s.split() for s in sentences]
+ words = [x for sublist in split_sentences for x in sublist]
+
+ # Initialize list of [word, word_count] for each word, starting with unknown
+ count = [['RARE', -1]]
+
+ # Now add most frequent words, limited to the N-most frequent (N=vocabulary size)
+ count.extend(collections.Counter(words).most_common(vocabulary_size-1))
+
+ # Now create the dictionary
+ word_dict = {}
+ # For each word, that we want in the dictionary, add it, then make it
+ # the value of the prior dictionary length
+ for word, word_count in count:
+ word_dict[word] = len(word_dict)
+
+ return(word_dict)
+
+
+# Turn text data into lists of integers from dictionary
+def text_to_numbers(sentences, word_dict):
+ # Initialize the returned data
+ data = []
+ for sentence in sentences:
+ sentence_data = []
+ # For each word, either use selected index or rare word index
+ for word in sentence:
+ if word in word_dict:
+ word_ix = word_dict[word]
+ else:
+ word_ix = 0
+ sentence_data.append(word_ix)
+ data.append(sentence_data)
+ return(data)
+
+# Build our data set and dictionaries
+word_dictionary = build_dictionary(texts, vocabulary_size)
+word_dictionary_rev = dict(zip(word_dictionary.values(), word_dictionary.keys()))
+text_data = text_to_numbers(texts, word_dictionary)
+
+# Get validation word keys
+valid_examples = [word_dictionary[x] for x in valid_words]
+
+# Generate data randomly (N words behind, target, N words ahead)
+def generate_batch_data(sentences, batch_size, window_size, method='skip_gram'):
+ # Fill up data batch
+ batch_data = []
+ label_data = []
+ while len(batch_data) < batch_size:
+ # select random sentence to start
+ rand_sentence = np.random.choice(sentences)
+ # Generate consecutive windows to look at
+ window_sequences = [rand_sentence[max((ix-window_size),0):(ix+window_size+1)] for ix, x in enumerate(rand_sentence)]
+ # Denote which element of each window is the center word of interest
+ label_indices = [ix if ix 10 categories for output (num_targets)
+ with tf.variable_scope('full3') as scope:
+ # Final fully connected layer has 10 (num_targets) outputs.
+ full_weight3 = truncated_normal_var(name='full_mult3', shape=[192, num_targets], dtype=tf.float32)
+ full_bias3 = zero_var(name='full_bias3', shape=[num_targets], dtype=tf.float32)
+ final_output = tf.add(tf.matmul(full_layer2, full_weight3), full_bias3)
+
+ return(final_output)
+
+
+# Loss function
+def cifar_loss(logits, targets):
+ # Get rid of extra dimensions and cast targets into integers
+ targets = tf.squeeze(tf.cast(targets, tf.int32))
+ # Calculate cross entropy from logits and targets
+ cross_entropy = tf.nn.sparse_softmax_cross_entropy_with_logits(logits, targets)
+ # Take the average loss across batch size
+ cross_entropy_mean = tf.reduce_mean(cross_entropy, name='cross_entropy')
+ return(cross_entropy_mean)
+
+
+# Train step
+def train_step(loss_value, generation_num):
+ # Our learning rate is an exponential decay after we wait a fair number of generations
+ model_learning_rate = tf.train.exponential_decay(learning_rate, generation_num,
+ num_gens_to_wait, lr_decay, staircase=True)
+ # Create optimizer
+ my_optimizer = tf.train.GradientDescentOptimizer(model_learning_rate)
+ # Initialize train step
+ train_step = my_optimizer.minimize(loss_value)
+ return(train_step)
+
+
+# Accuracy function
+def accuracy_of_batch(logits, targets):
+ # Make sure targets are integers and drop extra dimensions
+ targets = tf.squeeze(tf.cast(targets, tf.int32))
+ # Get predicted values by finding which logit is the greatest
+ batch_predictions = tf.cast(tf.argmax(logits, 1), tf.int32)
+ # Check if they are equal across the batch
+ predicted_correctly = tf.equal(batch_predictions, targets)
+ # Average the 1's and 0's (True's and False's) across the batch size
+ accuracy = tf.reduce_mean(tf.cast(predicted_correctly, tf.float32))
+ return(accuracy)
+
+# Get data
+print('Getting/Transforming Data.')
+# Initialize the data pipeline
+images, targets = input_pipeline(batch_size, train_logical=True)
+# Get batch test images and targets from pipline
+test_images, test_targets = input_pipeline(batch_size, train_logical=False)
+
+# Declare Model
+print('Creating the CIFAR10 Model.')
+with tf.variable_scope('model_definition') as scope:
+ # Declare the training network model
+ model_output = cifar_cnn_model(images, batch_size)
+ # This is very important!!! We must set the scope to REUSE the variables,
+ # otherwise, when we set the test network model, it will create new random
+ # variables. Otherwise we get random evaluations on the test batches.
+ scope.reuse_variables()
+ test_output = cifar_cnn_model(test_images, batch_size)
+
+# Declare loss function
+print('Declare Loss Function.')
+loss = cifar_loss(model_output, targets)
+
+# Create accuracy function
+accuracy = accuracy_of_batch(test_output, test_targets)
+
+# Create training operations
+print('Creating the Training Operation.')
+generation_num = tf.Variable(0, trainable=False)
+train_op = train_step(loss, generation_num)
+
+# Initialize Variables
+print('Initializing the Variables.')
+init = tf.initialize_all_variables()
+sess.run(init)
+
+# Initialize queue (This queue will feed into the model, so no placeholders necessary)
+tf.train.start_queue_runners(sess=sess)
+
+# Train CIFAR Model
+print('Starting Training')
+train_loss = []
+test_accuracy = []
+for i in range(generations):
+ _, loss_value = sess.run([train_op, loss])
+
+ if (i+1) % output_every == 0:
+ train_loss.append(loss_value)
+ output = 'Generation {}: Loss = {:.5f}'.format((i+1), loss_value)
+ print(output)
+
+ if (i+1) % eval_every == 0:
+ [temp_accuracy] = sess.run([accuracy])
+ test_accuracy.append(temp_accuracy)
+ acc_output = ' --- Test Accuracy = {:.2f}%.'.format(100.*temp_accuracy)
+ print(acc_output)
+
+# Print loss and accuracy
+# Matlotlib code to plot the loss and accuracies
+eval_indices = range(0, generations, eval_every)
+output_indices = range(0, generations, output_every)
+
+# Plot loss over time
+plt.plot(output_indices, train_loss, 'k-')
+plt.title('Softmax Loss per Generation')
+plt.xlabel('Generation')
+plt.ylabel('Softmax Loss')
+plt.show()
+
+# Plot accuracy over time
+plt.plot(eval_indices, test_accuracy, 'k-')
+plt.title('Test Accuracy')
+plt.xlabel('Generation')
+plt.ylabel('Accuracy')
+plt.show()
\ No newline at end of file
diff --git a/TensorFlow-Machine-Learning-Cookbook-master/Chapter 08/deepdream.py b/TensorFlow-Machine-Learning-Cookbook-master/Chapter 08/deepdream.py
new file mode 100644
index 000000000..cf4709551
--- /dev/null
+++ b/TensorFlow-Machine-Learning-Cookbook-master/Chapter 08/deepdream.py
@@ -0,0 +1,192 @@
+# Using Tensorflow for Deep Dream
+#---------------------------------------
+# From: Alexander Mordvintsev
+# --https://github.com/tensorflow/tensorflow/tree/master/tensorflow/examples/tutorials/deepdream
+#
+# Make sure to download the deep dream model here:
+# https://storage.googleapis.com/download.tensorflow.org/models/inception5h.zip
+#
+# Run:
+# me@computer:~$ wget https://storage.googleapis.com/download.tensorflow.org/models/inception5h.zip
+# me@computer:~$ unzip inception5h.zip
+#
+# More comments added inline.
+
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
+import os
+import matplotlib.pyplot as plt
+import numpy as np
+import PIL.Image
+import tensorflow as tf
+from io import BytesIO
+from tensorflow.python.framework import ops
+ops.reset_default_graph()
+
+# Start a graph session
+graph = tf.Graph()
+sess = tf.InteractiveSession(graph=graph)
+
+os.chdir('/home/nick/Documents/tensorflow/inception-v1-model/')
+
+# Model location
+model_fn = 'tensorflow_inception_graph.pb'
+
+# Load graph parameters
+with tf.gfile.FastGFile(model_fn, 'rb') as f:
+ graph_def = tf.GraphDef()
+ graph_def.ParseFromString(f.read())
+
+# Create placeholder for input
+t_input = tf.placeholder(np.float32, name='input')
+
+# Imagenet average bias to subtract off images
+imagenet_mean = 117.0
+t_preprocessed = tf.expand_dims(t_input-imagenet_mean, 0)
+tf.import_graph_def(graph_def, {'input':t_preprocessed})
+
+# Create a list of layers that we can refer to later
+layers = [op.name for op in graph.get_operations() if op.type=='Conv2D' and 'import/' in op.name]
+
+# Count how many outputs for each layer
+feature_nums = [int(graph.get_tensor_by_name(name+':0').get_shape()[-1]) for name in layers]
+
+# Print count of layers and outputs (features nodes)
+print('Number of layers', len(layers))
+print('Total number of feature channels:', sum(feature_nums))
+
+# Picking some internal layer. Note that we use outputs before applying the ReLU nonlinearity
+# to have non-zero gradients for features with negative initial activations.
+layer = 'mixed4d_3x3_bottleneck_pre_relu'
+channel = 30 # picking some feature channel to visualize
+
+# start with a gray image with a little noise
+img_noise = np.random.uniform(size=(224,224,3)) + 100.0
+
+def showarray(a, fmt='jpeg'):
+ # First make sure everything is between 0 and 255
+ a = np.uint8(np.clip(a, 0, 1)*255)
+ # Pick an in-memory format for image display
+ f = BytesIO()
+ # Create the in memory image
+ PIL.Image.fromarray(a).save(f, fmt)
+ # Show image
+ plt.imshow(a)
+
+
+def T(layer):
+ '''Helper for getting layer output tensor'''
+ return graph.get_tensor_by_name("import/%s:0"%layer)
+
+
+# The following function returns a function wrapper that will create the placeholder
+# inputs of a specified dtype
+def tffunc(*argtypes):
+ '''Helper that transforms TF-graph generating function into a regular one.
+ See "resize" function below.
+ '''
+ placeholders = list(map(tf.placeholder, argtypes))
+ def wrap(f):
+ out = f(*placeholders)
+ def wrapper(*args, **kw):
+ return out.eval(dict(zip(placeholders, args)), session=kw.get('session'))
+ return wrapper
+ return wrap
+
+
+# Helper function that uses TF to resize an image
+def resize(img, size):
+ img = tf.expand_dims(img, 0)
+ # Change 'img' size by linear interpolation
+ return tf.image.resize_bilinear(img, size)[0,:,:,:]
+
+
+def calc_grad_tiled(img, t_grad, tile_size=512):
+ '''Compute the value of tensor t_grad over the image in a tiled way.
+ Random shifts are applied to the image to blur tile boundaries over
+ multiple iterations.'''
+ # Pick a subregion square size
+ sz = tile_size
+ # Get the image height and width
+ h, w = img.shape[:2]
+ # Get a random shift amount in the x and y direction
+ sx, sy = np.random.randint(sz, size=2)
+ # Randomly shift the image (roll image) in the x and y directions
+ img_shift = np.roll(np.roll(img, sx, 1), sy, 0)
+ # Initialize the while image gradient as zeros
+ grad = np.zeros_like(img)
+ # Now we loop through all the sub-tiles in the image
+ for y in range(0, max(h-sz//2, sz),sz):
+ for x in range(0, max(w-sz//2, sz),sz):
+ # Select the sub image tile
+ sub = img_shift[y:y+sz,x:x+sz]
+ # Calculate the gradient for the tile
+ g = sess.run(t_grad, {t_input:sub})
+ # Apply the gradient of the tile to the whole image gradient
+ grad[y:y+sz,x:x+sz] = g
+ # Return the gradient, undoing the roll operation
+ return np.roll(np.roll(grad, -sx, 1), -sy, 0)
+
+def render_deepdream(t_obj, img0=img_noise,
+ iter_n=10, step=1.5, octave_n=4, octave_scale=1.4):
+ # defining the optimization objective, the objective is the mean of the feature
+ t_score = tf.reduce_mean(t_obj)
+ # Our gradients will be defined as changing the t_input to get closer to
+ # the values of t_score. Here, t_score is the mean of the feature we select,
+ # and t_input will be the image octave (starting with the last)
+ t_grad = tf.gradients(t_score, t_input)[0] # behold the power of automatic differentiation!
+
+ # Store the image
+ img = img0
+ # Initialize the octave list
+ octaves = []
+ # Since we stored the image, we need to only calculate n-1 octaves
+ for i in range(octave_n-1):
+ # Extract the image shape
+ hw = img.shape[:2]
+ # Resize the image, scale by the octave_scale (resize by linear interpolation)
+ lo = resize(img, np.int32(np.float32(hw)/octave_scale))
+ # Residual is hi. Where residual = image - (Resize lo to be hw-shape)
+ hi = img-resize(lo, hw)
+ # Save the lo image for re-iterating
+ img = lo
+ # Save the extracted hi-image
+ octaves.append(hi)
+
+ # generate details octave by octave
+ for octave in range(octave_n):
+ if octave>0:
+ # Start with the last octave
+ hi = octaves[-octave]
+ #
+ img = resize(img, hi.shape[:2])+hi
+ for i in range(iter_n):
+ # Calculate gradient of the image.
+ g = calc_grad_tiled(img, t_grad)
+ # Ideally, we would just add the gradient, g, but
+ # we want do a forward step size of it ('step'),
+ # and divide it by the avg. norm of the gradient, so
+ # we are adding a gradient of a certain size each step.
+ # Also, to make sure we aren't dividing by zero, we add 1e-7.
+ img += g*(step / (np.abs(g).mean()+1e-7))
+ print('.',end = ' ')
+ showarray(img/255.0)
+
+# Run Deep Dream
+if __name__=="__main__":
+ # Create resize function that has a wrapper that creates specified placeholder types
+ resize = tffunc(np.float32, np.int32)(resize)
+
+ # Open image
+ img0 = PIL.Image.open('book_cover.jpg')
+ img0 = np.float32(img0)
+ # Show Original Image
+ showarray(img0/255.0)
+
+ # Create deep dream
+ render_deepdream(T(layer)[:,:,:,channel], img0, iter_n=15)
+
+ sess.close()
\ No newline at end of file
diff --git a/TensorFlow-Machine-Learning-Cookbook-master/Chapter 08/download_cifar10.py b/TensorFlow-Machine-Learning-Cookbook-master/Chapter 08/download_cifar10.py
new file mode 100644
index 000000000..dc765f568
--- /dev/null
+++ b/TensorFlow-Machine-Learning-Cookbook-master/Chapter 08/download_cifar10.py
@@ -0,0 +1,92 @@
+# Download/Saving CIFAR-10 images in Inception format
+#---------------------------------------
+#
+# In this script, we download the CIFAR-10 images and
+# transform/save them in the Inception Retrianing Format
+#
+# The end purpose of the files is for retrianing the
+# Google Inception tensorflow model to work on the CIFAR-10.
+
+import os
+import tarfile
+import _pickle as cPickle
+import numpy as np
+import urllib.request
+import scipy.misc
+
+cifar_link = 'https://www.cs.toronto.edu/~kriz/cifar-10-python.tar.gz'
+data_dir = 'temp'
+if not os.path.isdir(data_dir):
+ os.makedirs(data_dir)
+
+# Download tar file
+target_file = os.path.join(data_dir, 'cifar-10-python.tar.gz')
+if not os.path.isfile(target_file):
+ print('CIFAR-10 file not found. Downloading CIFAR data (Size = 163MB)')
+ print('This may take a few minutes, please wait.')
+ filename, headers = urllib.request.urlretrieve(cifar_link, target_file)
+
+# Extract into memory
+tar = tarfile.open(target_file)
+tar.extractall(path=data_dir)
+tar.close()
+objects = ['airplane', 'automobile', 'bird', 'cat', 'deer', 'dog', 'frog', 'horse', 'ship', 'truck']
+
+# Create train image folders
+train_folder = 'train_dir'
+if not os.path.isdir(os.path.join(data_dir, train_folder)):
+ for i in range(10):
+ folder = os.path.join(data_dir, train_folder, objects[i])
+ os.makedirs(folder)
+# Create test image folders
+test_folder = 'validation_dir'
+if not os.path.isdir(os.path.join(data_dir, test_folder)):
+ for i in range(10):
+ folder = os.path.join(data_dir, test_folder, objects[i])
+ os.makedirs(folder)
+
+# Extract images accordingly
+data_location = os.path.join(data_dir, 'cifar-10-batches-py')
+train_names = ['data_batch_' + str(x) for x in range(1,6)]
+test_names = ['test_batch']
+
+
+def load_batch_from_file(file):
+ file_conn = open(file, 'rb')
+ image_dictionary = cPickle.load(file_conn, encoding='latin1')
+ file_conn.close()
+ return(image_dictionary)
+
+
+def save_images_from_dict(image_dict, folder='data_dir'):
+ # image_dict.keys() = 'labels', 'filenames', 'data', 'batch_label'
+ for ix, label in enumerate(image_dict['labels']):
+ folder_path = os.path.join(data_dir, folder, objects[label])
+ filename = image_dict['filenames'][ix]
+ #Transform image data
+ image_array = image_dict['data'][ix]
+ image_array.resize([3, 32, 32])
+ # Save image
+ output_location = os.path.join(folder_path, filename)
+ scipy.misc.imsave(output_location,image_array.transpose())
+
+# Sort train images
+for file in train_names:
+ print('Saving images from file: {}'.format(file))
+ file_location = os.path.join(data_dir, 'cifar-10-batches-py', file)
+ image_dict = load_batch_from_file(file_location)
+ save_images_from_dict(image_dict, folder=train_folder)
+
+# Sort test images
+for file in test_names:
+ print('Saving images from file: {}'.format(file))
+ file_location = os.path.join(data_dir, 'cifar-10-batches-py', file)
+ image_dict = load_batch_from_file(file_location)
+ save_images_from_dict(image_dict, folder=test_folder)
+
+# Create labels file
+cifar_labels_file = os.path.join(data_dir,'cifar10_labels.txt')
+print('Writing labels file, {}'.format(cifar_labels_file))
+with open(cifar_labels_file, 'w') as labels_file:
+ for item in objects:
+ labels_file.write("{}\n".format(item))
\ No newline at end of file
diff --git a/TensorFlow-Machine-Learning-Cookbook-master/Chapter 08/introductory_cnn.py b/TensorFlow-Machine-Learning-Cookbook-master/Chapter 08/introductory_cnn.py
new file mode 100644
index 000000000..c1bf1c246
--- /dev/null
+++ b/TensorFlow-Machine-Learning-Cookbook-master/Chapter 08/introductory_cnn.py
@@ -0,0 +1,192 @@
+# Introductory CNN Model: MNIST Digits
+#---------------------------------------
+#
+# In this example, we will download the MNIST handwritten
+# digits and create a simple CNN network to predict the
+# digit category (0-9)
+
+import matplotlib.pyplot as plt
+import numpy as np
+import tensorflow as tf
+from tensorflow.contrib.learn.python.learn.datasets.mnist import read_data_sets
+from tensorflow.python.framework import ops
+ops.reset_default_graph()
+
+# Start a graph session
+sess = tf.Session()
+
+# Load data
+data_dir = 'temp'
+mnist = read_data_sets(data_dir)
+
+# Convert images into 28x28 (they are downloaded as 1x784)
+train_xdata = np.array([np.reshape(x, (28,28)) for x in mnist.train.images])
+test_xdata = np.array([np.reshape(x, (28,28)) for x in mnist.test.images])
+
+# Convert labels into one-hot encoded vectors
+train_labels = mnist.train.labels
+test_labels = mnist.test.labels
+
+# Set model parameters
+batch_size = 100
+learning_rate = 0.005
+evaluation_size = 500
+image_width = train_xdata[0].shape[0]
+image_height = train_xdata[0].shape[1]
+target_size = max(train_labels) + 1
+num_channels = 1 # greyscale = 1 channel
+generations = 500
+eval_every = 5
+conv1_features = 25
+conv2_features = 50
+max_pool_size1 = 2 # NxN window for 1st max pool layer
+max_pool_size2 = 2 # NxN window for 2nd max pool layer
+fully_connected_size1 = 100
+
+# Declare model placeholders
+x_input_shape = (batch_size, image_width, image_height, num_channels)
+x_input = tf.placeholder(tf.float32, shape=x_input_shape)
+y_target = tf.placeholder(tf.int32, shape=(batch_size))
+eval_input_shape = (evaluation_size, image_width, image_height, num_channels)
+eval_input = tf.placeholder(tf.float32, shape=eval_input_shape)
+eval_target = tf.placeholder(tf.int32, shape=(evaluation_size))
+
+# Declare model parameters
+conv1_weight = tf.Variable(tf.truncated_normal([4, 4, num_channels, conv1_features],
+ stddev=0.1, dtype=tf.float32))
+conv1_bias = tf.Variable(tf.zeros([conv1_features], dtype=tf.float32))
+
+conv2_weight = tf.Variable(tf.truncated_normal([4, 4, conv1_features, conv2_features],
+ stddev=0.1, dtype=tf.float32))
+conv2_bias = tf.Variable(tf.zeros([conv2_features], dtype=tf.float32))
+
+# fully connected variables
+resulting_width = image_width // (max_pool_size1 * max_pool_size2)
+resulting_height = image_height // (max_pool_size1 * max_pool_size2)
+full1_input_size = resulting_width * resulting_height * conv2_features
+full1_weight = tf.Variable(tf.truncated_normal([full1_input_size, fully_connected_size1],
+ stddev=0.1, dtype=tf.float32))
+full1_bias = tf.Variable(tf.truncated_normal([fully_connected_size1], stddev=0.1, dtype=tf.float32))
+full2_weight = tf.Variable(tf.truncated_normal([fully_connected_size1, target_size],
+ stddev=0.1, dtype=tf.float32))
+full2_bias = tf.Variable(tf.truncated_normal([target_size], stddev=0.1, dtype=tf.float32))
+
+
+# Initialize Model Operations
+def my_conv_net(input_data):
+ # First Conv-ReLU-MaxPool Layer
+ conv1 = tf.nn.conv2d(input_data, conv1_weight, strides=[1, 1, 1, 1], padding='SAME')
+ relu1 = tf.nn.relu(tf.nn.bias_add(conv1, conv1_bias))
+ max_pool1 = tf.nn.max_pool(relu1, ksize=[1, max_pool_size1, max_pool_size1, 1],
+ strides=[1, max_pool_size1, max_pool_size1, 1], padding='SAME')
+
+ # Second Conv-ReLU-MaxPool Layer
+ conv2 = tf.nn.conv2d(max_pool1, conv2_weight, strides=[1, 1, 1, 1], padding='SAME')
+ relu2 = tf.nn.relu(tf.nn.bias_add(conv2, conv2_bias))
+ max_pool2 = tf.nn.max_pool(relu2, ksize=[1, max_pool_size2, max_pool_size2, 1],
+ strides=[1, max_pool_size2, max_pool_size2, 1], padding='SAME')
+
+ # Transform Output into a 1xN layer for next fully connected layer
+ final_conv_shape = max_pool2.get_shape().as_list()
+ final_shape = final_conv_shape[1] * final_conv_shape[2] * final_conv_shape[3]
+ flat_output = tf.reshape(max_pool2, [final_conv_shape[0], final_shape])
+
+ # First Fully Connected Layer
+ fully_connected1 = tf.nn.relu(tf.add(tf.matmul(flat_output, full1_weight), full1_bias))
+
+ # Second Fully Connected Layer
+ final_model_output = tf.add(tf.matmul(fully_connected1, full2_weight), full2_bias)
+
+ return(final_model_output)
+
+model_output = my_conv_net(x_input)
+test_model_output = my_conv_net(eval_input)
+
+# Declare Loss Function (softmax cross entropy)
+loss = tf.reduce_mean(tf.nn.sparse_softmax_cross_entropy_with_logits(model_output, y_target))
+
+# Create a prediction function
+prediction = tf.nn.softmax(model_output)
+test_prediction = tf.nn.softmax(test_model_output)
+
+# Create accuracy function
+def get_accuracy(logits, targets):
+ batch_predictions = np.argmax(logits, axis=1)
+ num_correct = np.sum(np.equal(batch_predictions, targets))
+ return(100. * num_correct/batch_predictions.shape[0])
+
+# Create an optimizer
+my_optimizer = tf.train.MomentumOptimizer(learning_rate, 0.9)
+train_step = my_optimizer.minimize(loss)
+
+# Initialize Variables
+init = tf.initialize_all_variables()
+sess.run(init)
+
+# Start training loop
+train_loss = []
+train_acc = []
+test_acc = []
+for i in range(generations):
+ rand_index = np.random.choice(len(train_xdata), size=batch_size)
+ rand_x = train_xdata[rand_index]
+ rand_x = np.expand_dims(rand_x, 3)
+ rand_y = train_labels[rand_index]
+ train_dict = {x_input: rand_x, y_target: rand_y}
+
+ sess.run(train_step, feed_dict=train_dict)
+ temp_train_loss, temp_train_preds = sess.run([loss, prediction], feed_dict=train_dict)
+ temp_train_acc = get_accuracy(temp_train_preds, rand_y)
+
+ if (i+1) % eval_every == 0:
+ eval_index = np.random.choice(len(test_xdata), size=evaluation_size)
+ eval_x = test_xdata[eval_index]
+ eval_x = np.expand_dims(eval_x, 3)
+ eval_y = test_labels[eval_index]
+ test_dict = {eval_input: eval_x, eval_target: eval_y}
+ test_preds = sess.run(test_prediction, feed_dict=test_dict)
+ temp_test_acc = get_accuracy(test_preds, eval_y)
+
+ # Record and print results
+ train_loss.append(temp_train_loss)
+ train_acc.append(temp_train_acc)
+ test_acc.append(temp_test_acc)
+ acc_and_loss = [(i+1), temp_train_loss, temp_train_acc, temp_test_acc]
+ acc_and_loss = [np.round(x,2) for x in acc_and_loss]
+ print('Generation # {}. Train Loss: {:.2f}. Train Acc (Test Acc): {:.2f} ({:.2f})'.format(*acc_and_loss))
+
+
+# Matlotlib code to plot the loss and accuracies
+eval_indices = range(0, generations, eval_every)
+# Plot loss over time
+plt.plot(eval_indices, train_loss, 'k-')
+plt.title('Softmax Loss per Generation')
+plt.xlabel('Generation')
+plt.ylabel('Softmax Loss')
+plt.show()
+
+# Plot train and test accuracy
+plt.plot(eval_indices, train_acc, 'k-', label='Train Set Accuracy')
+plt.plot(eval_indices, test_acc, 'r--', label='Test Set Accuracy')
+plt.title('Train and Test Accuracy')
+plt.xlabel('Generation')
+plt.ylabel('Accuracy')
+plt.legend(loc='lower right')
+plt.show()
+
+# Plot some samples
+# Plot the 6 of the last batch results:
+actuals = rand_y[0:6]
+predictions = np.argmax(temp_train_preds,axis=1)[0:6]
+images = np.squeeze(rand_x[0:6])
+
+Nrows = 2
+Ncols = 3
+for i in range(6):
+ plt.subplot(Nrows, Ncols, i+1)
+ plt.imshow(np.reshape(images[i], [28,28]), cmap='Greys_r')
+ plt.title('Actual: ' + str(actuals[i]) + ' Pred: ' + str(predictions[i]),
+ fontsize=10)
+ frame = plt.gca()
+ frame.axes.get_xaxis().set_visible(False)
+ frame.axes.get_yaxis().set_visible(False)
\ No newline at end of file
diff --git a/TensorFlow-Machine-Learning-Cookbook-master/Chapter 08/stylenet.py b/TensorFlow-Machine-Learning-Cookbook-master/Chapter 08/stylenet.py
new file mode 100644
index 000000000..59187474d
--- /dev/null
+++ b/TensorFlow-Machine-Learning-Cookbook-master/Chapter 08/stylenet.py
@@ -0,0 +1,186 @@
+# Using Tensorflow for Stylenet/NeuralStyle
+#---------------------------------------
+#
+# We use two images, an original image and a style image
+# and try to make the original image in the style of the style image.
+#
+# Reference paper:
+# https://arxiv.org/abs/1508.06576
+#
+# Need to download the model 'imagenet-vgg-verydee-19.mat' from:
+# http://www.vlfeat.org/matconvnet/models/beta16/imagenet-vgg-verydeep-19.mat
+
+import os
+import scipy.misc
+import numpy as np
+import tensorflow as tf
+from tensorflow.python.framework import ops
+ops.reset_default_graph()
+
+# Start a graph session
+sess = tf.Session()
+
+os.chdir('/home/nick/OneDrive/Documents/tensor_flow_book/Code/8_Convolutional_Neural_Networks')
+
+# Image Files
+original_image_file = 'temp/book_cover.jpg'
+style_image_file = 'temp/starry_night.jpg'
+
+# Saved VGG Network path
+vgg_path = '/home/nick/Documents/tensorflow/vgg_19_models/imagenet-vgg-verydeep-19.mat'
+
+# Default Arguments
+original_image_weight = 5.0
+style_image_weight = 200.0
+regularization_weight = 50.0
+learning_rate = 0.1
+generations = 10000
+output_generations = 500
+
+# Read in images
+original_image = scipy.misc.imread(original_image_file)
+style_image = scipy.misc.imread(style_image_file)
+
+# Get shape of target and make the style image the same
+target_shape = original_image.shape
+style_image = scipy.misc.imresize(style_image, target_shape[1] / style_image.shape[1])
+
+# VGG-19 Layer Setup
+# From paper
+vgg_layers = ['conv1_1', 'relu1_1',
+ 'conv1_2', 'relu1_2', 'pool1',
+ 'conv2_1', 'relu2_1',
+ 'conv2_2', 'relu2_2', 'pool2',
+ 'conv3_1', 'relu3_1',
+ 'conv3_2', 'relu3_2',
+ 'conv3_3', 'relu3_3',
+ 'conv3_4', 'relu3_4', 'pool3',
+ 'conv4_1', 'relu4_1',
+ 'conv4_2', 'relu4_2',
+ 'conv4_3', 'relu4_3',
+ 'conv4_4', 'relu4_4', 'pool4',
+ 'conv5_1', 'relu5_1',
+ 'conv5_2', 'relu5_2',
+ 'conv5_3', 'relu5_3',
+ 'conv5_4', 'relu5_4']
+
+# Extract weights and matrix means
+def extract_net_info(path_to_params):
+ vgg_data = scipy.io.loadmat(path_to_params)
+ normalization_matrix = vgg_data['normalization'][0][0][0]
+ mat_mean = np.mean(normalization_matrix, axis=(0,1))
+ network_weights = vgg_data['layers'][0]
+ return(mat_mean, network_weights)
+
+
+# Create the VGG-19 Network
+def vgg_network(network_weights, init_image):
+ network = {}
+ image = init_image
+
+ for i, layer in enumerate(vgg_layers):
+ if layer[1] == 'c':
+ weights, bias = network_weights[i][0][0][0][0]
+ weights = np.transpose(weights, (1, 0, 2, 3))
+ bias = bias.reshape(-1)
+ conv_layer = tf.nn.conv2d(image, tf.constant(weights), (1, 1, 1, 1), 'SAME')
+ image = tf.nn.bias_add(conv_layer, bias)
+ elif layer[1] == 'r':
+ image = tf.nn.relu(image)
+ else:
+ image = tf.nn.max_pool(image, (1, 2, 2, 1), (1, 2, 2, 1), 'SAME')
+ network[layer] = image
+ return(network)
+
+# Here we define which layers apply to the original or style image
+original_layer = 'relu4_2'
+style_layers = ['relu1_1', 'relu2_1', 'relu3_1', 'relu4_1', 'relu5_1']
+
+# Get network parameters
+normalization_mean, network_weights = extract_net_info(vgg_path)
+
+shape = (1,) + original_image.shape
+style_shape = (1,) + style_image.shape
+original_features = {}
+style_features = {}
+
+# Get network parameters
+image = tf.placeholder('float', shape=shape)
+vgg_net = vgg_network(network_weights, image)
+
+# Normalize original image
+original_minus_mean = original_image - normalization_mean
+original_norm = np.array([original_minus_mean])
+original_features[original_layer] = sess.run(vgg_net[original_layer],
+ feed_dict={image: original_norm})
+
+# Get style image network
+image = tf.placeholder('float', shape=style_shape)
+vgg_net = vgg_network(network_weights, image)
+style_minus_mean = style_image - normalization_mean
+style_norm = np.array([style_minus_mean])
+
+for layer in style_layers:
+ layer_output = sess.run(vgg_net[layer], feed_dict={image: style_norm})
+ layer_output = np.reshape(layer_output, (-1, layer_output.shape[3]))
+ style_gram_matrix = np.matmul(layer_output.T, layer_output) / layer_output.size
+ style_features[layer] = style_gram_matrix
+
+# Make Combined Image
+initial = tf.random_normal(shape) * 0.05
+image = tf.Variable(initial)
+vgg_net = vgg_network(network_weights, image)
+
+# Loss
+original_loss = original_image_weight * (2 * tf.nn.l2_loss(vgg_net[original_layer] - original_features[original_layer]) /
+ original_features[original_layer].size)
+
+# Loss from Style Image
+style_loss = 0
+style_losses = []
+for style_layer in style_layers:
+ layer = vgg_net[style_layer]
+ feats, height, width, channels = [x.value for x in layer.get_shape()]
+ size = height * width * channels
+ features = tf.reshape(layer, (-1, channels))
+ style_gram_matrix = tf.matmul(tf.transpose(features), features) / size
+ style_expected = style_features[style_layer]
+ style_losses.append(2 * tf.nn.l2_loss(style_gram_matrix - style_expected) / style_expected.size)
+style_loss += style_image_weight * tf.reduce_sum(style_losses)
+
+# To Smooth the resuts, we add in total variation loss
+total_var_x = sess.run(tf.reduce_prod(image[:,1:,:,:].get_shape()))
+total_var_y = sess.run(tf.reduce_prod(image[:,:,1:,:].get_shape()))
+first_term = regularization_weight * 2
+second_term_numerator = tf.nn.l2_loss(image[:,1:,:,:] - image[:,:shape[1]-1,:,:])
+second_term = second_term_numerator / total_var_y
+third_term = (tf.nn.l2_loss(image[:,:,1:,:] - image[:,:,:shape[2]-1,:]) / total_var_x)
+total_variation_loss = first_term * (second_term + third_term)
+
+# Combined Loss
+loss = original_loss + style_loss + total_variation_loss
+
+# Declare Optimization Algorithm
+optimizer = tf.train.GradientDescentOptimizer(learning_rate)
+train_step = optimizer.minimize(loss)
+
+# Initialize Variables and start Training
+sess.run(tf.initialize_all_variables())
+for i in range(generations):
+
+ sess.run(train_step)
+
+ # Print update and save temporary output
+ if (i+1) % output_generations == 0:
+ print('Generation {} out of {}'.format(i + 1, generations))
+ image_eval = sess.run(image)
+ best_image_add_mean = image_eval.reshape(shape[1:]) + normalization_mean
+ output_file = 'temp_output_{}.jpg'.format(i)
+ scipy.misc.imsave(output_file, best_image_add_mean)
+
+
+# Save final image
+image_eval = sess.run(image)
+best_image_add_mean = image_eval.reshape(shape[1:]) + normalization_mean
+output_file = 'final_output.jpg'
+scipy.misc.imsave(output_file, best_image_add_mean)
\ No newline at end of file
diff --git a/TensorFlow-Machine-Learning-Cookbook-master/Chapter 09/implementing_lstm.py b/TensorFlow-Machine-Learning-Cookbook-master/Chapter 09/implementing_lstm.py
new file mode 100644
index 000000000..62ef2cd90
--- /dev/null
+++ b/TensorFlow-Machine-Learning-Cookbook-master/Chapter 09/implementing_lstm.py
@@ -0,0 +1,283 @@
+# -*- coding: utf-8 -*-
+#
+# Implmenting an LSTM RNN Model
+#------------------------------
+# Here we implement an LSTM model on all a data set of Shakespeare works.
+#
+#
+#
+
+import os
+import re
+import string
+import requests
+import numpy as np
+import collections
+import random
+import pickle
+import matplotlib.pyplot as plt
+import tensorflow as tf
+from tensorflow.python.framework import ops
+ops.reset_default_graph()
+
+# Start a session
+sess = tf.Session()
+
+# Set RNN Parameters
+min_word_freq = 5 # Trim the less frequent words off
+rnn_size = 128 # RNN Model size, has to equal embedding size
+epochs = 10 # Number of epochs to cycle through data
+batch_size = 100 # Train on this many examples at once
+learning_rate = 0.001 # Learning rate
+training_seq_len = 50 # how long of a word group to consider
+embedding_size = rnn_size
+save_every = 500 # How often to save model checkpoints
+eval_every = 50 # How often to evaluate the test sentences
+prime_texts = ['thou art more', 'to be or not to', 'wherefore art thou']
+
+# Download/store Shakespeare data
+data_dir = 'temp'
+data_file = 'shakespeare.txt'
+model_path = 'shakespeare_model'
+full_model_dir = os.path.join(data_dir, model_path)
+
+# Declare punctuation to remove, everything except hyphens and apostrophes
+punctuation = string.punctuation
+punctuation = ''.join([x for x in punctuation if x not in ['-', "'"]])
+
+# Make Model Directory
+if not os.path.exists(full_model_dir):
+ os.makedirs(full_model_dir)
+
+# Make data directory
+if not os.path.exists(data_dir):
+ os.makedirs(data_dir)
+
+print('Loading Shakespeare Data')
+# Check if file is downloaded.
+if not os.path.isfile(os.path.join(data_dir, data_file)):
+ print('Not found, downloading Shakespeare texts from www.gutenberg.org')
+ shakespeare_url = 'http://www.gutenberg.org/cache/epub/100/pg100.txt'
+ # Get Shakespeare text
+ response = requests.get(shakespeare_url)
+ shakespeare_file = response.content
+ # Decode binary into string
+ s_text = shakespeare_file.decode('utf-8')
+ # Drop first few descriptive paragraphs.
+ s_text = s_text[7675:]
+ # Remove newlines
+ s_text = s_text.replace('\r\n', '')
+ s_text = s_text.replace('\n', '')
+
+ # Write to file
+ with open(os.path.join(data_dir, data_file), 'w') as out_conn:
+ out_conn.write(s_text)
+else:
+ # If file has been saved, load from that file
+ with open(os.path.join(data_dir, data_file), 'r') as file_conn:
+ s_text = file_conn.read().replace('\n', '')
+
+# Clean text
+print('Cleaning Text')
+s_text = re.sub(r'[{}]'.format(punctuation), ' ', s_text)
+s_text = re.sub('\s+', ' ', s_text ).strip().lower()
+
+# Build word vocabulary function
+def build_vocab(text, min_word_freq):
+ word_counts = collections.Counter(text.split(' '))
+ # limit word counts to those more frequent than cutoff
+ word_counts = {key:val for key, val in word_counts.items() if val>min_word_freq}
+ # Create vocab --> index mapping
+ words = word_counts.keys()
+ vocab_to_ix_dict = {key:(ix+1) for ix, key in enumerate(words)}
+ # Add unknown key --> 0 index
+ vocab_to_ix_dict['unknown']=0
+ # Create index --> vocab mapping
+ ix_to_vocab_dict = {val:key for key,val in vocab_to_ix_dict.items()}
+
+ return(ix_to_vocab_dict, vocab_to_ix_dict)
+
+# Build Shakespeare vocabulary
+print('Building Shakespeare Vocab')
+ix2vocab, vocab2ix = build_vocab(s_text, min_word_freq)
+vocab_size = len(ix2vocab) + 1
+print('Vocabulary Length = {}'.format(vocab_size))
+# Sanity Check
+assert(len(ix2vocab) == len(vocab2ix))
+
+# Convert text to word vectors
+s_text_words = s_text.split(' ')
+s_text_ix = []
+for ix, x in enumerate(s_text_words):
+ try:
+ s_text_ix.append(vocab2ix[x])
+ except:
+ s_text_ix.append(0)
+s_text_ix = np.array(s_text_ix)
+
+
+
+# Define LSTM RNN Model
+class LSTM_Model():
+ def __init__(self, rnn_size, batch_size, learning_rate,
+ training_seq_len, vocab_size, infer_sample=False):
+ self.rnn_size = rnn_size
+ self.vocab_size = vocab_size
+ self.infer_sample = infer_sample
+ self.learning_rate = learning_rate
+
+ if infer_sample:
+ self.batch_size = 1
+ self.training_seq_len = 1
+ else:
+ self.batch_size = batch_size
+ self.training_seq_len = training_seq_len
+
+ self.lstm_cell = tf.nn.rnn_cell.BasicLSTMCell(rnn_size)
+ self.initial_state = self.lstm_cell.zero_state(self.batch_size, tf.float32)
+
+ self.x_data = tf.placeholder(tf.int32, [self.batch_size, self.training_seq_len])
+ self.y_output = tf.placeholder(tf.int32, [self.batch_size, self.training_seq_len])
+
+ with tf.variable_scope('lstm_vars'):
+ # Softmax Output Weights
+ W = tf.get_variable('W', [self.rnn_size, self.vocab_size], tf.float32, tf.random_normal_initializer())
+ b = tf.get_variable('b', [self.vocab_size], tf.float32, tf.constant_initializer(0.0))
+
+ # Define Embedding
+ embedding_mat = tf.get_variable('embedding_mat', [self.vocab_size, self.rnn_size],
+ tf.float32, tf.random_normal_initializer())
+
+ embedding_output = tf.nn.embedding_lookup(embedding_mat, self.x_data)
+ rnn_inputs = tf.split(1, self.training_seq_len, embedding_output)
+ rnn_inputs_trimmed = [tf.squeeze(x, [1]) for x in rnn_inputs]
+
+ # If we are inferring (generating text), we add a 'loop' function
+ # Define how to get the i+1 th input from the i th output
+ def inferred_loop(prev, count):
+ # Apply hidden layer
+ prev_transformed = tf.matmul(prev, W) + b
+ # Get the index of the output (also don't run the gradient)
+ prev_symbol = tf.stop_gradient(tf.argmax(prev_transformed, 1))
+ # Get embedded vector
+ output = tf.nn.embedding_lookup(embedding_mat, prev_symbol)
+ return(output)
+
+ decoder = tf.nn.seq2seq.rnn_decoder
+ outputs, last_state = decoder(rnn_inputs_trimmed,
+ self.initial_state,
+ self.lstm_cell,
+ loop_function=inferred_loop if infer_sample else None)
+ # Non inferred outputs
+ output = tf.reshape(tf.concat(1, outputs), [-1, self.rnn_size])
+ # Logits and output
+ self.logit_output = tf.matmul(output, W) + b
+ self.model_output = tf.nn.softmax(self.logit_output)
+
+ loss_fun = tf.nn.seq2seq.sequence_loss_by_example
+ loss = loss_fun([self.logit_output],[tf.reshape(self.y_output, [-1])],
+ [tf.ones([self.batch_size * self.training_seq_len])],
+ self.vocab_size)
+ self.cost = tf.reduce_sum(loss) / (self.batch_size * self.training_seq_len)
+ self.final_state = last_state
+ gradients, _ = tf.clip_by_global_norm(tf.gradients(self.cost, tf.trainable_variables()), 4.5)
+ optimizer = tf.train.AdamOptimizer(self.learning_rate)
+ self.train_op = optimizer.apply_gradients(zip(gradients, tf.trainable_variables()))
+
+ def sample(self, sess, words=ix2vocab, vocab=vocab2ix, num=10, prime_text='thou art'):
+ state = sess.run(self.lstm_cell.zero_state(1, tf.float32))
+ word_list = prime_text.split()
+ for word in word_list[:-1]:
+ x = np.zeros((1, 1))
+ x[0, 0] = vocab[word]
+ feed_dict = {self.x_data: x, self.initial_state:state}
+ [state] = sess.run([self.final_state], feed_dict=feed_dict)
+
+ out_sentence = prime_text
+ word = word_list[-1]
+ for n in range(num):
+ x = np.zeros((1, 1))
+ x[0, 0] = vocab[word]
+ feed_dict = {self.x_data: x, self.initial_state:state}
+ [model_output, state] = sess.run([self.model_output, self.final_state], feed_dict=feed_dict)
+ sample = np.argmax(model_output[0])
+ if sample == 0:
+ break
+ word = words[sample]
+ out_sentence = out_sentence + ' ' + word
+ return(out_sentence)
+
+with tf.variable_scope('lstm_model') as scope:
+ # Define LSTM Model
+ lstm_model = LSTM_Model(rnn_size, batch_size, learning_rate,
+ training_seq_len, vocab_size)
+ scope.reuse_variables()
+ test_lstm_model = LSTM_Model(rnn_size, batch_size, learning_rate,
+ training_seq_len, vocab_size, infer_sample=True)
+
+
+# Create model saver
+saver = tf.train.Saver(tf.all_variables())
+
+# Create batches for each epoch
+num_batches = int(len(s_text_ix)/(batch_size * training_seq_len)) + 1
+# Split up text indices into subarrays, of equal size
+batches = np.array_split(s_text_ix, num_batches)
+# Reshape each split into [batch_size, training_seq_len]
+batches = [np.resize(x, [batch_size, training_seq_len]) for x in batches]
+
+# Initialize all variables
+init = tf.initialize_all_variables()
+sess.run(init)
+
+# Train model
+train_loss = []
+iteration_count = 1
+for epoch in range(epochs):
+ # Shuffle word indices
+ random.shuffle(batches)
+ # Create targets from shuffled batches
+ targets = [np.roll(x, -1, axis=1) for x in batches]
+ # Run a through one epoch
+ print('Starting Epoch #{} of {}.'.format(epoch+1, epochs))
+ # Reset initial LSTM state every epoch
+ state = sess.run(lstm_model.initial_state)
+ for ix, batch in enumerate(batches):
+ training_dict = {lstm_model.x_data: batch, lstm_model.y_output: targets[ix]}
+ c, h = lstm_model.initial_state
+ training_dict[c] = state.c
+ training_dict[h] = state.h
+
+ temp_loss, state, _ = sess.run([lstm_model.cost, lstm_model.final_state, lstm_model.train_op],
+ feed_dict=training_dict)
+ train_loss.append(temp_loss)
+
+ # Print status every 10 gens
+ if iteration_count % 10 == 0:
+ summary_nums = (iteration_count, epoch+1, ix+1, num_batches+1, temp_loss)
+ print('Iteration: {}, Epoch: {}, Batch: {} out of {}, Loss: {:.2f}'.format(*summary_nums))
+
+ # Save the model and the vocab
+ if iteration_count % save_every == 0:
+ # Save model
+ model_file_name = os.path.join(full_model_dir, 'model')
+ saver.save(sess, model_file_name, global_step = iteration_count)
+ print('Model Saved To: {}'.format(model_file_name))
+ # Save vocabulary
+ dictionary_file = os.path.join(full_model_dir, 'vocab.pkl')
+ with open(dictionary_file, 'wb') as dict_file_conn:
+ pickle.dump([vocab2ix, ix2vocab], dict_file_conn)
+
+ if iteration_count % eval_every == 0:
+ for sample in prime_texts:
+ print(test_lstm_model.sample(sess, ix2vocab, vocab2ix, num=10, prime_text=sample))
+
+ iteration_count += 1
+
+
+# Plot loss over time
+plt.plot(train_loss, 'k-')
+plt.title('Sequence to Sequence Loss')
+plt.xlabel('Generation')
+plt.ylabel('Loss')
+plt.show()
\ No newline at end of file
diff --git a/TensorFlow-Machine-Learning-Cookbook-master/Chapter 09/implementing_rnn.py b/TensorFlow-Machine-Learning-Cookbook-master/Chapter 09/implementing_rnn.py
new file mode 100644
index 000000000..21cb95508
--- /dev/null
+++ b/TensorFlow-Machine-Learning-Cookbook-master/Chapter 09/implementing_rnn.py
@@ -0,0 +1,183 @@
+# Implementing an RNN in Tensorflow
+#----------------------------------
+#
+# We implement an RNN in Tensorflow to predict spam/ham from texts
+#
+
+import os
+import re
+import io
+import requests
+import numpy as np
+import matplotlib.pyplot as plt
+import tensorflow as tf
+from zipfile import ZipFile
+from tensorflow.python.framework import ops
+ops.reset_default_graph()
+
+# Start a graph
+sess = tf.Session()
+
+# Set RNN parameters
+epochs = 20
+batch_size = 250
+max_sequence_length = 25
+rnn_size = 10
+embedding_size = 50
+min_word_frequency = 10
+learning_rate = 0.0005
+dropout_keep_prob = tf.placeholder(tf.float32)
+
+
+# Download or open data
+data_dir = 'temp'
+data_file = 'text_data.txt'
+if not os.path.exists(data_dir):
+ os.makedirs(data_dir)
+
+if not os.path.isfile(os.path.join(data_dir, data_file)):
+ zip_url = 'http://archive.ics.uci.edu/ml/machine-learning-databases/00228/smsspamcollection.zip'
+ r = requests.get(zip_url)
+ z = ZipFile(io.BytesIO(r.content))
+ file = z.read('SMSSpamCollection')
+ # Format Data
+ text_data = file.decode()
+ text_data = text_data.encode('ascii',errors='ignore')
+ text_data = text_data.decode().split('\n')
+
+ # Save data to text file
+ with open(os.path.join(data_dir, data_file), 'w') as file_conn:
+ for text in text_data:
+ file_conn.write("{}\n".format(text))
+else:
+ # Open data from text file
+ text_data = []
+ with open(os.path.join(data_dir, data_file), 'r') as file_conn:
+ for row in file_conn:
+ text_data.append(row)
+ text_data = text_data[:-1]
+
+text_data = [x.split('\t') for x in text_data if len(x)>=1]
+[text_data_target, text_data_train] = [list(x) for x in zip(*text_data)]
+
+
+# Create a text cleaning function
+def clean_text(text_string):
+ text_string = re.sub(r'([^\s\w]|_|[0-9])+', '', text_string)
+ text_string = " ".join(text_string.split())
+ text_string = text_string.lower()
+ return(text_string)
+
+# Clean texts
+text_data_train = [clean_text(x) for x in text_data_train]
+
+# Change texts into numeric vectors
+vocab_processor = tf.contrib.learn.preprocessing.VocabularyProcessor(max_sequence_length,
+ min_frequency=min_word_frequency)
+text_processed = np.array(list(vocab_processor.fit_transform(text_data_train)))
+
+# Shuffle and split data
+text_processed = np.array(text_processed)
+text_data_target = np.array([1 if x=='ham' else 0 for x in text_data_target])
+shuffled_ix = np.random.permutation(np.arange(len(text_data_target)))
+x_shuffled = text_processed[shuffled_ix]
+y_shuffled = text_data_target[shuffled_ix]
+
+# Split train/test set
+ix_cutoff = int(len(y_shuffled)*0.80)
+x_train, x_test = x_shuffled[:ix_cutoff], x_shuffled[ix_cutoff:]
+y_train, y_test = y_shuffled[:ix_cutoff], y_shuffled[ix_cutoff:]
+vocab_size = len(vocab_processor.vocabulary_)
+print("Vocabulary Size: {:d}".format(vocab_size))
+print("80-20 Train Test split: {:d} -- {:d}".format(len(y_train), len(y_test)))
+
+# Create placeholders
+x_data = tf.placeholder(tf.int32, [None, max_sequence_length])
+y_output = tf.placeholder(tf.int32, [None])
+
+# Create embedding
+embedding_mat = tf.Variable(tf.random_uniform([vocab_size, embedding_size], -1.0, 1.0))
+embedding_output = tf.nn.embedding_lookup(embedding_mat, x_data)
+#embedding_output_expanded = tf.expand_dims(embedding_output, -1)
+
+
+# Define the RNN cell
+cell = tf.nn.rnn_cell.BasicRNNCell(num_units = rnn_size)
+output, state = tf.nn.dynamic_rnn(cell, embedding_output, dtype=tf.float32)
+output = tf.nn.dropout(output, dropout_keep_prob)
+
+# Get output of RNN sequence
+output = tf.transpose(output, [1, 0, 2])
+last = tf.gather(output, int(output.get_shape()[0]) - 1)
+
+
+weight = tf.Variable(tf.truncated_normal([rnn_size, 2], stddev=0.1))
+bias = tf.Variable(tf.constant(0.1, shape=[2]))
+logits_out = tf.nn.softmax(tf.matmul(last, weight) + bias)
+
+# Loss function
+losses = tf.nn.sparse_softmax_cross_entropy_with_logits(logits_out, y_output) # logits=float32, labels=int32
+loss = tf.reduce_mean(losses)
+
+accuracy = tf.reduce_mean(tf.cast(tf.equal(tf.argmax(logits_out, 1), tf.cast(y_output, tf.int64)), tf.float32))
+
+optimizer = tf.train.RMSPropOptimizer(learning_rate)
+train_step = optimizer.minimize(loss)
+
+init = tf.initialize_all_variables()
+sess.run(init)
+
+train_loss = []
+test_loss = []
+train_accuracy = []
+test_accuracy = []
+# Start training
+for epoch in range(epochs):
+
+ # Shuffle training data
+ shuffled_ix = np.random.permutation(np.arange(len(x_train)))
+ x_train = x_train[shuffled_ix]
+ y_train = y_train[shuffled_ix]
+ num_batches = int(len(x_train)/batch_size) + 1
+ # TO DO CALCULATE GENERATIONS ExACTLY
+ for i in range(num_batches):
+ # Select train data
+ min_ix = i * batch_size
+ max_ix = np.min([len(x_train), ((i+1) * batch_size)])
+ x_train_batch = x_train[min_ix:max_ix]
+ y_train_batch = y_train[min_ix:max_ix]
+
+ # Run train step
+ train_dict = {x_data: x_train_batch, y_output: y_train_batch, dropout_keep_prob:0.5}
+ sess.run(train_step, feed_dict=train_dict)
+
+ # Run loss and accuracy for training
+ temp_train_loss, temp_train_acc = sess.run([loss, accuracy], feed_dict=train_dict)
+ train_loss.append(temp_train_loss)
+ train_accuracy.append(temp_train_acc)
+
+ # Run Eval Step
+ test_dict = {x_data: x_test, y_output: y_test, dropout_keep_prob:1.0}
+ temp_test_loss, temp_test_acc = sess.run([loss, accuracy], feed_dict=test_dict)
+ test_loss.append(temp_test_loss)
+ test_accuracy.append(temp_test_acc)
+ print('Epoch: {}, Test Loss: {:.2}, Test Acc: {:.2}'.format(epoch+1, temp_test_loss, temp_test_acc))
+
+# Plot loss over time
+epoch_seq = np.arange(1, epochs+1)
+plt.plot(epoch_seq, train_loss, 'k--', label='Train Set')
+plt.plot(epoch_seq, test_loss, 'r-', label='Test Set')
+plt.title('Softmax Loss')
+plt.xlabel('Epochs')
+plt.ylabel('Softmax Loss')
+plt.legend(loc='upper left')
+plt.show()
+
+# Plot accuracy over time
+plt.plot(epoch_seq, train_accuracy, 'k--', label='Train Set')
+plt.plot(epoch_seq, test_accuracy, 'r-', label='Test Set')
+plt.title('Test Accuracy')
+plt.xlabel('Epochs')
+plt.ylabel('Accuracy')
+plt.legend(loc='upper left')
+plt.show()
\ No newline at end of file
diff --git a/TensorFlow-Machine-Learning-Cookbook-master/Chapter 09/seq2seq_translation.py b/TensorFlow-Machine-Learning-Cookbook-master/Chapter 09/seq2seq_translation.py
new file mode 100644
index 000000000..744d49a62
--- /dev/null
+++ b/TensorFlow-Machine-Learning-Cookbook-master/Chapter 09/seq2seq_translation.py
@@ -0,0 +1,255 @@
+# -*- coding: utf-8 -*-
+#
+# Creating Sequence to Sequence Models
+#-------------------------------------
+# Here we show how to implement sequence to sequence models.
+# Specifically, we will build an English to German translation model.
+#
+
+import os
+import re
+import string
+import requests
+import io
+import numpy as np
+import collections
+import random
+import pickle
+import string
+import matplotlib.pyplot as plt
+import tensorflow as tf
+from zipfile import ZipFile
+from collections import Counter
+from tensorflow.models.rnn.translate import data_utils
+from tensorflow.models.rnn.translate import seq2seq_model
+from tensorflow.python.framework import ops
+ops.reset_default_graph()
+
+# Start a session
+sess = tf.Session()
+
+# Model Parameters
+learning_rate = 0.1
+lr_decay_rate = 0.99
+lr_decay_every = 100
+max_gradient = 5.0
+batch_size = 50
+num_layers = 3
+rnn_size = 500
+layer_size = 512
+generations = 10000
+vocab_size = 10000
+save_every = 1000
+eval_every = 500
+output_every = 50
+punct = string.punctuation
+
+# Data Parameters
+data_dir = 'temp'
+data_file = 'eng_ger.txt'
+model_path = 'seq2seq_model'
+full_model_dir = os.path.join(data_dir, model_path)
+
+# Test Translation from English (lowercase, no punct)
+test_english = ['hello where is my computer',
+ 'the quick brown fox jumped over the lazy dog',
+ 'is it going to rain tomorrow']
+
+# Make Model Directory
+if not os.path.exists(full_model_dir):
+ os.makedirs(full_model_dir)
+
+# Make data directory
+if not os.path.exists(data_dir):
+ os.makedirs(data_dir)
+
+print('Loading English-German Data')
+# Check for data, if it doesn't exist, download it and save it
+if not os.path.isfile(os.path.join(data_dir, data_file)):
+ print('Data not found, downloading Eng-Ger sentences from www.manythings.org')
+ sentence_url = 'http://www.manythings.org/anki/deu-eng.zip'
+ r = requests.get(sentence_url)
+ z = ZipFile(io.BytesIO(r.content))
+ file = z.read('deu.txt')
+ # Format Data
+ eng_ger_data = file.decode()
+ eng_ger_data = eng_ger_data.encode('ascii',errors='ignore')
+ eng_ger_data = eng_ger_data.decode().split('\n')
+ # Write to file
+ with open(os.path.join(data_dir, data_file), 'w') as out_conn:
+ for sentence in eng_ger_data:
+ out_conn.write(sentence + '\n')
+else:
+ eng_ger_data = []
+ with open(os.path.join(data_dir, data_file), 'r') as in_conn:
+ for row in in_conn:
+ eng_ger_data.append(row[:-1])
+
+# Remove punctuation
+eng_ger_data = [''.join(char for char in sent if char not in punct) for sent in eng_ger_data]
+# Split each sentence by tabs
+eng_ger_data = [x.split('\t') for x in eng_ger_data if len(x)>=1]
+[english_sentence, german_sentence] = [list(x) for x in zip(*eng_ger_data)]
+english_sentence = [x.lower().split() for x in english_sentence]
+german_sentence = [x.lower().split() for x in german_sentence]
+
+print('Processing the vocabularies.')
+# Process the English Vocabulary
+all_english_words = [word for sentence in english_sentence for word in sentence]
+all_english_counts = Counter(all_english_words)
+eng_word_keys = [x[0] for x in all_english_counts.most_common(vocab_size-1)] #-1 because 0=unknown is also in there
+eng_vocab2ix = dict(zip(eng_word_keys, range(1,vocab_size)))
+eng_ix2vocab = {val:key for key, val in eng_vocab2ix.items()}
+english_processed = []
+for sent in english_sentence:
+ temp_sentence = []
+ for word in sent:
+ try:
+ temp_sentence.append(eng_vocab2ix[word])
+ except:
+ temp_sentence.append(0)
+ english_processed.append(temp_sentence)
+
+
+# Process the German Vocabulary
+all_german_words = [word for sentence in german_sentence for word in sentence]
+all_german_counts = Counter(all_german_words)
+ger_word_keys = [x[0] for x in all_german_counts.most_common(vocab_size-1)]
+ger_vocab2ix = dict(zip(ger_word_keys, range(1,vocab_size)))
+ger_ix2vocab = {val:key for key, val in ger_vocab2ix.items()}
+german_processed = []
+for sent in german_sentence:
+ temp_sentence = []
+ for word in sent:
+ try:
+ temp_sentence.append(ger_vocab2ix[word])
+ except:
+ temp_sentence.append(0)
+ german_processed.append(temp_sentence)
+
+
+# Process the test english sentences, use '0' if word not in our vocab
+test_data = []
+for sentence in test_english:
+ temp_sentence = []
+ for word in sentence.split(' '):
+ try:
+ temp_sentence.append(eng_vocab2ix[word])
+ except:
+ # Use '0' if the word isn't in our vocabulary
+ temp_sentence.append(0)
+ test_data.append(temp_sentence)
+
+# Define Buckets for sequence lengths
+# We will split data into the corresponding buckets:
+# (x1, y1), (x2, y2), ...
+# Where all entries in bucket 1: len(x)=len(sentence))
+ # Get RNN model outputs
+ encoder_inputs, decoder_inputs, target_weights = test_model.get_batch(
+ {bucket_id: [(sentence, [])]}, bucket_id)
+ # Get logits
+ _, test_loss, output_logits = test_model.step(sess, encoder_inputs, decoder_inputs,
+ target_weights, bucket_id, True)
+ ix_output = [int(np.argmax(logit, axis=1)) for logit in output_logits]
+ # If there is a 0 symbol in outputs end the output there.
+ ix_output = ix_output[0:[ix for ix, x in enumerate(ix_output+[0]) if x==0][0]]
+ # Get german words from indices
+ test_german = [ger_ix2vocab[x] for x in ix_output]
+ print('English: {}'.format(test_english[ix]))
+ print('German: {}'.format(test_german))
+
+
+# Plot train loss
+loss_generations = [i for i in range(generations) if i%output_every==0]
+plt.plot(loss_generations, train_loss, 'k-')
+plt.title('Sequence to Sequence Loss')
+plt.xlabel('Generation')
+plt.ylabel('Loss')
+plt.show()
\ No newline at end of file
diff --git a/TensorFlow-Machine-Learning-Cookbook-master/Chapter 09/stacking_multiple_lstm.py b/TensorFlow-Machine-Learning-Cookbook-master/Chapter 09/stacking_multiple_lstm.py
new file mode 100644
index 000000000..5099eaabe
--- /dev/null
+++ b/TensorFlow-Machine-Learning-Cookbook-master/Chapter 09/stacking_multiple_lstm.py
@@ -0,0 +1,278 @@
+# -*- coding: utf-8 -*-
+#
+# Stacking LSTM Layers
+#---------------------
+# Here we implement an LSTM model on all a data set of Shakespeare works.
+# We will stack multiple LSTM models for a more accurate representation
+# of Shakespearean language. We will also use characters instead of words.
+#
+
+import os
+import re
+import string
+import requests
+import numpy as np
+import collections
+import random
+import pickle
+import matplotlib.pyplot as plt
+import tensorflow as tf
+from tensorflow.python.framework import ops
+ops.reset_default_graph()
+
+# Start a session
+sess = tf.Session()
+
+# Set RNN Parameters
+num_layers = 3 # Number of RNN layers stacked
+min_word_freq = 5 # Trim the less frequent words off
+rnn_size = 128 # RNN Model size, has to equal embedding size
+epochs = 10 # Number of epochs to cycle through data
+batch_size = 100 # Train on this many examples at once
+learning_rate = 0.0005 # Learning rate
+training_seq_len = 50 # how long of a word group to consider
+save_every = 500 # How often to save model checkpoints
+eval_every = 50 # How often to evaluate the test sentences
+prime_texts = ['thou art more', 'to be or not to', 'wherefore art thou']
+
+# Download/store Shakespeare data
+data_dir = 'temp'
+data_file = 'shakespeare.txt'
+model_path = 'shakespeare_model'
+full_model_dir = os.path.join(data_dir, model_path)
+
+# Declare punctuation to remove, everything except hyphens and apostrophes
+punctuation = string.punctuation
+punctuation = ''.join([x for x in punctuation if x not in ['-', "'"]])
+
+# Make Model Directory
+if not os.path.exists(full_model_dir):
+ os.makedirs(full_model_dir)
+
+# Make data directory
+if not os.path.exists(data_dir):
+ os.makedirs(data_dir)
+
+print('Loading Shakespeare Data')
+# Check if file is downloaded.
+if not os.path.isfile(os.path.join(data_dir, data_file)):
+ print('Not found, downloading Shakespeare texts from www.gutenberg.org')
+ shakespeare_url = 'http://www.gutenberg.org/cache/epub/100/pg100.txt'
+ # Get Shakespeare text
+ response = requests.get(shakespeare_url)
+ shakespeare_file = response.content
+ # Decode binary into string
+ s_text = shakespeare_file.decode('utf-8')
+ # Drop first few descriptive paragraphs.
+ s_text = s_text[7675:]
+ # Remove newlines
+ s_text = s_text.replace('\r\n', '')
+ s_text = s_text.replace('\n', '')
+
+ # Write to file
+ with open(os.path.join(data_dir, data_file), 'w') as out_conn:
+ out_conn.write(s_text)
+else:
+ # If file has been saved, load from that file
+ with open(os.path.join(data_dir, data_file), 'r') as file_conn:
+ s_text = file_conn.read().replace('\n', '')
+
+# Clean text
+print('Cleaning Text')
+s_text = re.sub(r'[{}]'.format(punctuation), ' ', s_text)
+s_text = re.sub('\s+', ' ', s_text ).strip().lower()
+
+# Split up by characters
+char_list = list(s_text)
+
+# Build word vocabulary function
+def build_vocab(characters):
+ character_counts = collections.Counter(characters)
+ # Create vocab --> index mapping
+ chars = character_counts.keys()
+ vocab_to_ix_dict = {key:(ix+1) for ix, key in enumerate(chars)}
+ # Add unknown key --> 0 index
+ vocab_to_ix_dict['unknown']=0
+ # Create index --> vocab mapping
+ ix_to_vocab_dict = {val:key for key,val in vocab_to_ix_dict.items()}
+ return(ix_to_vocab_dict, vocab_to_ix_dict)
+
+# Build Shakespeare vocabulary
+print('Building Shakespeare Vocab by Characters')
+ix2vocab, vocab2ix = build_vocab(char_list)
+vocab_size = len(ix2vocab)
+print('Vocabulary Length = {}'.format(vocab_size))
+# Sanity Check
+assert(len(ix2vocab) == len(vocab2ix))
+
+# Convert text to word vectors
+s_text_ix = []
+for x in char_list:
+ try:
+ s_text_ix.append(vocab2ix[x])
+ except:
+ s_text_ix.append(0)
+s_text_ix = np.array(s_text_ix)
+
+
+
+# Define LSTM RNN Model
+class LSTM_Model():
+ def __init__(self, rnn_size, num_layers, batch_size, learning_rate,
+ training_seq_len, vocab_size, infer_sample=False):
+ self.rnn_size = rnn_size
+ self.num_layers = num_layers
+ self.vocab_size = vocab_size
+ self.infer_sample = infer_sample
+ self.learning_rate = learning_rate
+
+ if infer_sample:
+ self.batch_size = 1
+ self.training_seq_len = 1
+ else:
+ self.batch_size = batch_size
+ self.training_seq_len = training_seq_len
+
+ self.lstm_cell = tf.nn.rnn_cell.BasicLSTMCell(rnn_size)
+ self.lstm_cell = tf.nn.rnn_cell.MultiRNNCell([self.lstm_cell] * self.num_layers)
+ self.initial_state = self.lstm_cell.zero_state(self.batch_size, tf.float32)
+
+ self.x_data = tf.placeholder(tf.int32, [self.batch_size, self.training_seq_len])
+ self.y_output = tf.placeholder(tf.int32, [self.batch_size, self.training_seq_len])
+
+ with tf.variable_scope('lstm_vars'):
+ # Softmax Output Weights
+ W = tf.get_variable('W', [self.rnn_size, self.vocab_size], tf.float32, tf.random_normal_initializer())
+ b = tf.get_variable('b', [self.vocab_size], tf.float32, tf.constant_initializer(0.0))
+
+ # Define Embedding
+ embedding_mat = tf.get_variable('embedding_mat', [self.vocab_size, self.rnn_size],
+ tf.float32, tf.random_normal_initializer())
+
+ embedding_output = tf.nn.embedding_lookup(embedding_mat, self.x_data)
+ rnn_inputs = tf.split(1, self.training_seq_len, embedding_output)
+ rnn_inputs_trimmed = [tf.squeeze(x, [1]) for x in rnn_inputs]
+
+ decoder = tf.nn.seq2seq.rnn_decoder
+ outputs, last_state = decoder(rnn_inputs_trimmed,
+ self.initial_state,
+ self.lstm_cell)
+
+ # RNN outputs
+ output = tf.reshape(tf.concat(1, outputs), [-1, rnn_size])
+ # Logits and output
+ self.logit_output = tf.matmul(output, W) + b
+ self.model_output = tf.nn.softmax(self.logit_output)
+
+ loss_fun = tf.nn.seq2seq.sequence_loss_by_example
+ loss = loss_fun([self.logit_output],[tf.reshape(self.y_output, [-1])],
+ [tf.ones([self.batch_size * self.training_seq_len])],
+ self.vocab_size)
+ self.cost = tf.reduce_sum(loss) / (self.batch_size * self.training_seq_len)
+ self.final_state = last_state
+ gradients, _ = tf.clip_by_global_norm(tf.gradients(self.cost, tf.trainable_variables()), 4.5)
+ optimizer = tf.train.AdamOptimizer(self.learning_rate)
+ self.train_op = optimizer.apply_gradients(zip(gradients, tf.trainable_variables()))
+
+ def sample(self, sess, words=ix2vocab, vocab=vocab2ix, num=20, prime_text='thou art'):
+ state = sess.run(self.lstm_cell.zero_state(1, tf.float32))
+ char_list = list(prime_text)
+ for char in char_list[:-1]:
+ x = np.zeros((1, 1))
+ x[0, 0] = vocab[char]
+ feed_dict = {self.x_data: x, self.initial_state:state}
+ [state] = sess.run([self.final_state], feed_dict=feed_dict)
+
+ out_sentence = prime_text
+ char = char_list[-1]
+ for n in range(num):
+ x = np.zeros((1, 1))
+ x[0, 0] = vocab[char]
+ feed_dict = {self.x_data: x, self.initial_state:state}
+ [model_output, state] = sess.run([self.model_output, self.final_state], feed_dict=feed_dict)
+ sample = np.argmax(model_output[0])
+ if sample == 0:
+ break
+ char = words[sample]
+ out_sentence = out_sentence + char
+ return(out_sentence)
+
+with tf.variable_scope('lstm_model') as scope:
+ # Define LSTM Model
+ lstm_model = LSTM_Model(rnn_size, num_layers, batch_size, learning_rate,
+ training_seq_len, vocab_size)
+ scope.reuse_variables()
+ test_lstm_model = LSTM_Model(rnn_size, num_layers, batch_size, learning_rate,
+ training_seq_len, vocab_size, infer_sample=True)
+
+
+# Create model saver
+saver = tf.train.Saver(tf.all_variables())
+
+# Create batches for each epoch
+num_batches = int(len(s_text_ix)/(batch_size * training_seq_len)) + 1
+# Split up text indices into subarrays, of equal size
+batches = np.array_split(s_text_ix, num_batches)
+# Reshape each split into [batch_size, training_seq_len]
+batches = [np.resize(x, [batch_size, training_seq_len]) for x in batches]
+
+# Initialize all variables
+init = tf.initialize_all_variables()
+sess.run(init)
+
+# Train model
+train_loss = []
+iteration_count = 1
+for epoch in range(epochs):
+ # Shuffle word indices
+ random.shuffle(batches)
+ # Create targets from shuffled batches
+ targets = [np.roll(x, -1, axis=1) for x in batches]
+ # Run a through one epoch
+ print('Starting Epoch #{} of {}.'.format(epoch+1, epochs))
+ # Reset initial LSTM state every epoch
+ state = sess.run(lstm_model.initial_state)
+ for ix, batch in enumerate(batches):
+ training_dict = {lstm_model.x_data: batch, lstm_model.y_output: targets[ix]}
+ # We need to update initial state for each RNN cell:
+ for i, (c, h) in enumerate(lstm_model.initial_state):
+ training_dict[c] = state[i].c
+ training_dict[h] = state[i].h
+
+ temp_loss, state, _ = sess.run([lstm_model.cost, lstm_model.final_state, lstm_model.train_op],
+ feed_dict=training_dict)
+ train_loss.append(temp_loss)
+
+ # Print status every 10 gens
+ if iteration_count % 10 == 0:
+ summary_nums = (iteration_count, epoch+1, ix+1, num_batches+1, temp_loss)
+ print('Iteration: {}, Epoch: {}, Batch: {} out of {}, Loss: {:.2f}'.format(*summary_nums))
+
+ # Save the model and the vocab
+ if iteration_count % save_every == 0:
+ # Save model
+ model_file_name = os.path.join(full_model_dir, 'model')
+ saver.save(sess, model_file_name, global_step = iteration_count)
+ print('Model Saved To: {}'.format(model_file_name))
+ # Save vocabulary
+ dictionary_file = os.path.join(full_model_dir, 'vocab.pkl')
+ with open(dictionary_file, 'wb') as dict_file_conn:
+ pickle.dump([vocab2ix, ix2vocab], dict_file_conn)
+
+ if iteration_count % eval_every == 0:
+ for sample in prime_texts:
+ print(test_lstm_model.sample(sess, ix2vocab, vocab2ix, num=10, prime_text=sample))
+
+ iteration_count += 1
+
+#ydata[:-1] = xdata[1:]
+ #ydata[-1] = xdata[0]
+
+
+
+# Plot loss over time
+plt.plot(train_loss, 'k-')
+plt.title('Sequence to Sequence Loss')
+plt.xlabel('Generation')
+plt.ylabel('Loss')
+plt.show()
\ No newline at end of file
diff --git a/TensorFlow-Machine-Learning-Cookbook-master/Chapter 10/implementing_unit_tests.py b/TensorFlow-Machine-Learning-Cookbook-master/Chapter 10/implementing_unit_tests.py
new file mode 100644
index 000000000..6a63037fc
--- /dev/null
+++ b/TensorFlow-Machine-Learning-Cookbook-master/Chapter 10/implementing_unit_tests.py
@@ -0,0 +1,190 @@
+# -*- coding: utf-8 -*-
+# Implementing Unit Tests
+#----------------------------------
+#
+# Here, we will show how to implement different unit tests
+# on the MNIST example
+
+import numpy as np
+import tensorflow as tf
+from tensorflow.contrib.learn.python.learn.datasets.mnist import read_data_sets
+from tensorflow.python.framework import ops
+ops.reset_default_graph()
+
+# Start a graph session
+sess = tf.Session()
+
+# Load data
+data_dir = 'temp'
+mnist = read_data_sets(data_dir)
+
+# Convert images into 28x28 (they are downloaded as 1x784)
+train_xdata = np.array([np.reshape(x, (28,28)) for x in mnist.train.images])
+test_xdata = np.array([np.reshape(x, (28,28)) for x in mnist.test.images])
+
+# Convert labels into one-hot encoded vectors
+train_labels = mnist.train.labels
+test_labels = mnist.test.labels
+
+# Set model parameters
+batch_size = 100
+learning_rate = 0.005
+evaluation_size = 100
+image_width = train_xdata[0].shape[0]
+image_height = train_xdata[0].shape[1]
+target_size = max(train_labels) + 1
+num_channels = 1 # greyscale = 1 channel
+generations = 100
+eval_every = 5
+conv1_features = 25
+conv2_features = 50
+max_pool_size1 = 2 # NxN window for 1st max pool layer
+max_pool_size2 = 2 # NxN window for 2nd max pool layer
+fully_connected_size1 = 100
+dropout_prob = 0.75
+
+# Declare model placeholders
+x_input_shape = (batch_size, image_width, image_height, num_channels)
+x_input = tf.placeholder(tf.float32, shape=x_input_shape)
+y_target = tf.placeholder(tf.int32, shape=(batch_size))
+eval_input_shape = (evaluation_size, image_width, image_height, num_channels)
+eval_input = tf.placeholder(tf.float32, shape=eval_input_shape)
+eval_target = tf.placeholder(tf.int32, shape=(evaluation_size))
+
+# Dropout placeholder
+dropout = tf.placeholder(tf.float32, shape=())
+
+# Declare model parameters
+conv1_weight = tf.Variable(tf.truncated_normal([4, 4, num_channels, conv1_features],
+ stddev=0.1, dtype=tf.float32))
+conv1_bias = tf.Variable(tf.zeros([conv1_features], dtype=tf.float32))
+
+conv2_weight = tf.Variable(tf.truncated_normal([4, 4, conv1_features, conv2_features],
+ stddev=0.1, dtype=tf.float32))
+conv2_bias = tf.Variable(tf.zeros([conv2_features], dtype=tf.float32))
+
+# fully connected variables
+resulting_width = image_width // (max_pool_size1 * max_pool_size2)
+resulting_height = image_height // (max_pool_size1 * max_pool_size2)
+full1_input_size = resulting_width * resulting_height * conv2_features
+full1_weight = tf.Variable(tf.truncated_normal([full1_input_size, fully_connected_size1],
+ stddev=0.1, dtype=tf.float32))
+full1_bias = tf.Variable(tf.truncated_normal([fully_connected_size1], stddev=0.1, dtype=tf.float32))
+full2_weight = tf.Variable(tf.truncated_normal([fully_connected_size1, target_size],
+ stddev=0.1, dtype=tf.float32))
+full2_bias = tf.Variable(tf.truncated_normal([target_size], stddev=0.1, dtype=tf.float32))
+
+
+# Initialize Model Operations
+def my_conv_net(input_data):
+ # First Conv-ReLU-MaxPool Layer
+ conv1 = tf.nn.conv2d(input_data, conv1_weight, strides=[1, 1, 1, 1], padding='SAME')
+ relu1 = tf.nn.relu(tf.nn.bias_add(conv1, conv1_bias))
+ max_pool1 = tf.nn.max_pool(relu1, ksize=[1, max_pool_size1, max_pool_size1, 1],
+ strides=[1, max_pool_size1, max_pool_size1, 1], padding='SAME')
+
+ # Second Conv-ReLU-MaxPool Layer
+ conv2 = tf.nn.conv2d(max_pool1, conv2_weight, strides=[1, 1, 1, 1], padding='SAME')
+ relu2 = tf.nn.relu(tf.nn.bias_add(conv2, conv2_bias))
+ max_pool2 = tf.nn.max_pool(relu2, ksize=[1, max_pool_size2, max_pool_size2, 1],
+ strides=[1, max_pool_size2, max_pool_size2, 1], padding='SAME')
+
+ # Transform Output into a 1xN layer for next fully connected layer
+ final_conv_shape = max_pool2.get_shape().as_list()
+ final_shape = final_conv_shape[1] * final_conv_shape[2] * final_conv_shape[3]
+ flat_output = tf.reshape(max_pool2, [final_conv_shape[0], final_shape])
+
+ # First Fully Connected Layer
+ fully_connected1 = tf.nn.relu(tf.add(tf.matmul(flat_output, full1_weight), full1_bias))
+
+ # Second Fully Connected Layer
+ final_model_output = tf.add(tf.matmul(fully_connected1, full2_weight), full2_bias)
+
+ # Add dropout
+ final_model_output = tf.nn.dropout(final_model_output, dropout)
+
+ return(final_model_output)
+
+model_output = my_conv_net(x_input)
+test_model_output = my_conv_net(eval_input)
+
+# Declare Loss Function (softmax cross entropy)
+loss = tf.reduce_mean(tf.nn.sparse_softmax_cross_entropy_with_logits(model_output, y_target))
+
+# Create a prediction function
+prediction = tf.nn.softmax(model_output)
+test_prediction = tf.nn.softmax(test_model_output)
+
+# Create accuracy function
+def get_accuracy(logits, targets):
+ batch_predictions = np.argmax(logits, axis=1)
+ num_correct = np.sum(np.equal(batch_predictions, targets))
+ return(100. * num_correct/batch_predictions.shape[0])
+
+# Create an optimizer
+my_optimizer = tf.train.MomentumOptimizer(learning_rate, 0.9)
+train_step = my_optimizer.minimize(loss)
+
+# Initialize Variables
+init = tf.initialize_all_variables()
+sess.run(init)
+
+# Check values of tensors!
+class drop_out_test(tf.test.TestCase):
+ # Make sure that we don't drop too much
+ def dropout_greaterthan(self):
+ with self.test_session():
+ self.assertGreater(dropout.eval(), 0.25)
+
+# Test accuracy function
+class accuracy_test(tf.test.TestCase):
+ # Make sure accuracy function behaves correctly
+ def accuracy_exact_test(self):
+ with self.test_session():
+ test_preds = [[0.9, 0.1],[0.01, 0.99]]
+ test_targets = [0, 1]
+ test_acc = get_accuracy(test_preds, test_targets)
+ self.assertEqual(test_acc.eval(), 100.)
+
+# Test tensorshape
+class shape_test(tf.test.TestCase):
+ # Make sure our model output is size [batch_size, num_classes]
+ def output_shape_test(self):
+ with self.test_session():
+ numpy_array = np.ones([batch_size, target_size])
+ self.assertShapeEqual(numpy_array, model_output)
+
+# Perform unit tests
+tf.test.main()
+
+# Start training loop
+train_loss = []
+train_acc = []
+test_acc = []
+for i in range(generations):
+ rand_index = np.random.choice(len(train_xdata), size=batch_size)
+ rand_x = train_xdata[rand_index]
+ rand_x = np.expand_dims(rand_x, 3)
+ rand_y = train_labels[rand_index]
+ train_dict = {x_input: rand_x, y_target: rand_y, dropout: dropout_prob}
+
+ sess.run(train_step, feed_dict=train_dict)
+ temp_train_loss, temp_train_preds = sess.run([loss, prediction], feed_dict=train_dict)
+ temp_train_acc = get_accuracy(temp_train_preds, rand_y)
+
+ if (i+1) % eval_every == 0:
+ eval_index = np.random.choice(len(test_xdata), size=evaluation_size)
+ eval_x = test_xdata[eval_index]
+ eval_x = np.expand_dims(eval_x, 3)
+ eval_y = test_labels[eval_index]
+ test_dict = {eval_input: eval_x, eval_target: eval_y, dropout: 1.0}
+ test_preds = sess.run(test_prediction, feed_dict=test_dict)
+ temp_test_acc = get_accuracy(test_preds, eval_y)
+
+ # Record and print results
+ train_loss.append(temp_train_loss)
+ train_acc.append(temp_train_acc)
+ test_acc.append(temp_test_acc)
+ acc_and_loss = [(i+1), temp_train_loss, temp_train_acc, temp_test_acc]
+ acc_and_loss = [np.round(x,2) for x in acc_and_loss]
+ print('Generation # {}. Train Loss: {:.2f}. Train Acc (Test Acc): {:.2f} ({:.2f})'.format(*acc_and_loss))
\ No newline at end of file
diff --git a/TensorFlow-Machine-Learning-Cookbook-master/Chapter 10/parallelizing_tensorflow.py b/TensorFlow-Machine-Learning-Cookbook-master/Chapter 10/parallelizing_tensorflow.py
new file mode 100644
index 000000000..1b0f3ea4d
--- /dev/null
+++ b/TensorFlow-Machine-Learning-Cookbook-master/Chapter 10/parallelizing_tensorflow.py
@@ -0,0 +1,42 @@
+# -*- coding: utf-8 -*-
+# Parallelizing Tensorflow
+#----------------------------------
+#
+# We will show how to use Tensorflow distributed
+
+import tensorflow as tf
+
+# We will setup a local cluster (on localhost)
+
+# Cluster for 2 local workers (tasks 0 and 1):
+cluster = tf.train.ClusterSpec({'local': ['localhost:2222', 'localhost:2223']})
+# Server definition:
+server = tf.train.Server(cluster, job_name="local", task_index=0)
+server = tf.train.Server(cluster, job_name="local", task_index=1)
+# Finish and add
+#server.join()
+
+# Have each worker do a task
+# Worker 0 : create matrices
+# Worker 1 : calculate sum of all elements
+mat_dim = 25
+matrix_list = {}
+
+with tf.device('/job:local/task:0'):
+ for i in range(0, 2):
+ m_label = 'm_{}'.format(i)
+ matrix_list[m_label] = tf.random_normal([mat_dim, mat_dim])
+
+# Have each worker calculate the Cholesky Decomposition
+sum_outs = {}
+with tf.device('/job:local/task:1'):
+ for i in range(0, 2):
+ A = matrix_list['m_{}'.format(i)]
+ sum_outs['m_{}'.format(i)] = tf.reduce_sum(A)
+
+ # Sum all the cholesky decompositions
+ summed_out = tf.add_n(list(sum_outs.values()))
+
+with tf.Session(server.target) as sess:
+ result = sess.run(summed_out)
+ print('Summed Values:{}'.format(result))
\ No newline at end of file
diff --git a/TensorFlow-Machine-Learning-Cookbook-master/Chapter 10/production_ex_eval.py b/TensorFlow-Machine-Learning-Cookbook-master/Chapter 10/production_ex_eval.py
new file mode 100644
index 000000000..3e9d41ee4
--- /dev/null
+++ b/TensorFlow-Machine-Learning-Cookbook-master/Chapter 10/production_ex_eval.py
@@ -0,0 +1,105 @@
+# -*- coding: utf-8 -*-
+# Tensorflow Production Example (Evaluating)
+#----------------------------------
+#
+# We pull together everything and create an example
+# of best tensorflow production tips
+#
+# The example we will productionalize is the spam/ham RNN
+# from the RNN Chapter.
+
+import os
+import re
+import numpy as np
+import tensorflow as tf
+from tensorflow.python.framework import ops
+ops.reset_default_graph()
+
+tf.app.flags.DEFINE_string("storage_folder", "temp", "Where to store model and data.")
+tf.app.flags.DEFINE_string('model_file', False, 'Model file location.')
+tf.app.flags.DEFINE_boolean('run_unit_tests', False, 'If true, run tests.')
+FLAGS = tf.app.flags.FLAGS
+
+
+# Create a text cleaning function
+def clean_text(text_string):
+ text_string = re.sub(r'([^\s\w]|_|[0-9])+', '', text_string)
+ text_string = " ".join(text_string.split())
+ text_string = text_string.lower()
+ return(text_string)
+
+
+# Load vocab processor
+def load_vocab():
+ vocab_path = os.path.join(FLAGS.storage_folder, "vocab")
+ vocab_processor = tf.contrib.learn.preprocessing.VocabularyProcessor.restore(vocab_path)
+ return(vocab_processor)
+
+
+# Process input data:
+def process_data(input_data, vocab_processor):
+ input_data = clean_text(input_data)
+ input_data = input_data.split()
+ processed_input = np.array(list(vocab_processor.transform(input_data)))
+ return(processed_input)
+
+
+# Get input function
+def get_input_data():
+ """
+ For this function, we just prompt the user for a text message to evaluate
+ But this function could also potentially read a file in as well.
+ """
+ input_text = input("Please enter a text message to evaluate: ")
+ vocab_processor = load_vocab()
+ return(process_data(input_text, vocab_processor))
+
+
+# Test clean_text function
+class clean_test(tf.test.TestCase):
+ # Make sure cleaning function behaves correctly
+ def clean_string_test(self):
+ with self.test_session():
+ test_input = '--Tensorflow\'s so Great! Don\t you think so? '
+ test_expected = 'tensorflows so great don you think so'
+ test_out = clean_text(test_input)
+ self.assertEqual(test_expected, test_out)
+
+
+# Main function
+def main(args):
+ # Get flags
+ storage_folder = FLAGS.storage_folder
+
+ # Get user input text
+ x_data = get_input_data()
+
+ # Load model
+ graph = tf.Graph()
+ with graph.as_default():
+ sess = tf.Session()
+ with sess.as_default():
+ # Load the saved meta graph and restore variables
+ saver = tf.train.import_meta_graph("{}.meta".format(os.path.join(storage_folder, "model.ckpt")))
+ saver.restore(sess, os.path.join(storage_folder, "model.ckpt"))
+
+ # Get the placeholders from the graph by name
+ x_data_ph = graph.get_operation_by_name("x_data_ph").outputs[0]
+ dropout_keep_prob = graph.get_operation_by_name("dropout_keep_prob").outputs[0]
+ probability_outputs = graph.get_operation_by_name("probability_outputs").outputs[0]
+
+ # Make the prediction
+ eval_feed_dict = {x_data_ph: x_data, dropout_keep_prob: 1.0}
+ probability_prediction = sess.run(tf.reduce_mean(probability_outputs, 0), eval_feed_dict)
+
+ # Print output (Or save to file or DB connection?)
+ print('Probability of Spam: {:.4}'.format(probability_prediction[1]))
+
+# Run main module/tf App
+if __name__ == "__main__":
+ if FLAGS.run_unit_tests:
+ # Perform unit tests
+ tf.test.main()
+ else:
+ # Run evaluation
+ tf.app.run()
\ No newline at end of file
diff --git a/TensorFlow-Machine-Learning-Cookbook-master/Chapter 10/production_ex_train.py b/TensorFlow-Machine-Learning-Cookbook-master/Chapter 10/production_ex_train.py
new file mode 100644
index 000000000..7e9b5a4c0
--- /dev/null
+++ b/TensorFlow-Machine-Learning-Cookbook-master/Chapter 10/production_ex_train.py
@@ -0,0 +1,240 @@
+# -*- coding: utf-8 -*-
+# Tensorflow Production Example (Training)
+#----------------------------------
+#
+# We pull together everything and create an example
+# of best tensorflow production tips
+#
+# The example we will productionalize is the spam/ham RNN
+# from
+
+import os
+import re
+import io
+import requests
+import numpy as np
+import tensorflow as tf
+from zipfile import ZipFile
+from tensorflow.python.framework import ops
+ops.reset_default_graph()
+
+# Define App Flags
+tf.app.flags.DEFINE_string("storage_folder", "temp", "Where to store model and data.")
+tf.app.flags.DEFINE_float('learning_rate', 0.0005, 'Initial learning rate.')
+tf.app.flags.DEFINE_float('dropout_prob', 0.5, 'Per to keep probability for dropout.')
+tf.app.flags.DEFINE_integer('epochs', 20, 'Number of epochs for training.')
+tf.app.flags.DEFINE_integer('batch_size', 250, 'Batch Size for training.')
+tf.app.flags.DEFINE_integer('max_sequence_length', 20, 'Max sentence length in words.')
+tf.app.flags.DEFINE_integer('rnn_size', 15, 'RNN feature size.')
+tf.app.flags.DEFINE_integer('embedding_size', 25, 'Word embedding size.')
+tf.app.flags.DEFINE_integer('min_word_frequency', 20, 'Word frequency cutoff.')
+FLAGS = tf.app.flags.FLAGS
+
+# Define how to get data
+def get_data(storage_folder=FLAGS.storage_folder, data_file="text_data.txt"):
+ """
+ This function gets the spam/ham data. It will download it if it doesn't
+ already exist on disk (at specified folder/file location).
+ """
+ # Make a storage folder for models and data
+ if not os.path.exists(storage_folder):
+ os.makedirs(storage_folder)
+
+ if not os.path.isfile(os.path.join(storage_folder, data_file)):
+ zip_url = 'http://archive.ics.uci.edu/ml/machine-learning-databases/00228/smsspamcollection.zip'
+ r = requests.get(zip_url)
+ z = ZipFile(io.BytesIO(r.content))
+ file = z.read('SMSSpamCollection')
+ # Format Data
+ text_data = file.decode()
+ text_data = text_data.encode('ascii',errors='ignore')
+ text_data = text_data.decode().split('\n')
+
+ # Save data to text file
+ with open(os.path.join(storage_folder, data_file), 'w') as file_conn:
+ for text in text_data:
+ file_conn.write("{}\n".format(text))
+ else:
+ # Open data from text file
+ text_data = []
+ with open(os.path.join(storage_folder, data_file), 'r') as file_conn:
+ for row in file_conn:
+ text_data.append(row)
+ text_data = text_data[:-1]
+ text_data = [x.split('\t') for x in text_data if len(x)>=1]
+ [y_data, x_data] = [list(x) for x in zip(*text_data)]
+
+ return(x_data, y_data)
+
+
+# Create a text cleaning function
+def clean_text(text_string):
+ text_string = re.sub(r'([^\s\w]|_|[0-9])+', '', text_string)
+ text_string = " ".join(text_string.split())
+ text_string = text_string.lower()
+ return(text_string)
+
+
+# Define RNN Model
+def rnn_model(x_data_ph, max_sequence_length, vocab_size, embedding_size,
+ rnn_size, dropout_keep_prob):
+ # Create embedding
+ embedding_mat = tf.Variable(tf.random_uniform([vocab_size, embedding_size], -1.0, 1.0))
+ embedding_output = tf.nn.embedding_lookup(embedding_mat, x_data_ph)
+
+ # Define the RNN cell
+ cell = tf.nn.rnn_cell.BasicRNNCell(num_units = rnn_size)
+ output, state = tf.nn.dynamic_rnn(cell, embedding_output, dtype=tf.float32)
+ output = tf.nn.dropout(output, dropout_keep_prob)
+
+ # Get output of RNN sequence
+ output = tf.transpose(output, [1, 0, 2])
+ last = tf.gather(output, int(output.get_shape()[0]) - 1)
+
+
+ weight = tf.Variable(tf.truncated_normal([rnn_size, 2], stddev=0.1))
+ bias = tf.Variable(tf.constant(0.1, shape=[2]))
+ logits_out = tf.nn.softmax(tf.matmul(last, weight) + bias)
+
+ return(logits_out)
+
+
+# Define accuracy function
+def get_accuracy(logits, actuals):
+ # Calulate if each output is correct
+ batch_acc = tf.equal(tf.argmax(logits, 1), tf.cast(actuals, tf.int64))
+ # Convert logical to float
+ batch_acc = tf.cast(batch_acc, tf.float32)
+ return(batch_acc)
+
+# Define main program
+def main(args):
+ # Set verbosity to get more information from Tensorflow
+ tf.logging.set_verbosity(tf.logging.INFO)
+
+ # Create a visualizer object for Tensorboard viewing
+ summary_writer = tf.train.SummaryWriter('tensorboard', tf.get_default_graph())
+ # Create tensorboard folder if not exists
+ if not os.path.exists('tensorboard'):
+ os.makedirs('tensorboard')
+
+ # Set model parameters
+ storage_folder = FLAGS.storage_folder
+ learning_rate = FLAGS.learning_rate
+ epochs = FLAGS.epochs
+ run_unit_tests = FLAGS.run_unit_tests
+ epochs = FLAGS.epochs
+ batch_size = FLAGS.batch_size
+ max_sequence_length = FLAGS.max_sequence_length
+ rnn_size = FLAGS.rnn_size
+ embedding_size = FLAGS.embedding_size
+ min_word_frequency = FLAGS.min_word_frequency
+
+ # Get text->spam/ham data
+ x_data, y_data = get_data()
+
+ # Clean texts
+ x_data = [clean_text(x) for x in x_data]
+
+ # Change texts into numeric vectors
+ vocab_processor = tf.contrib.learn.preprocessing.VocabularyProcessor(max_sequence_length,
+ min_frequency=min_word_frequency)
+ text_processed = np.array(list(vocab_processor.fit_transform(x_data)))
+
+ # Save vocab processor (for loading and future evaluation)
+ vocab_processor.save(os.path.join(storage_folder, "vocab"))
+
+ # Shuffle and split data
+ text_processed = np.array(text_processed)
+ y_data = np.array([1 if x=='ham' else 0 for x in y_data])
+ shuffled_ix = np.random.permutation(np.arange(len(y_data)))
+ x_shuffled = text_processed[shuffled_ix]
+ y_shuffled = y_data[shuffled_ix]
+
+ # Split train/test set
+ ix_cutoff = int(len(y_shuffled)*0.80)
+ x_train, x_test = x_shuffled[:ix_cutoff], x_shuffled[ix_cutoff:]
+ y_train, y_test = y_shuffled[:ix_cutoff], y_shuffled[ix_cutoff:]
+ vocab_size = len(vocab_processor.vocabulary_)
+
+ with tf.Graph().as_default():
+ sess = tf.Session()
+ # Define placeholders
+ x_data_ph = tf.placeholder(tf.int32, [None, max_sequence_length], name='x_data_ph')
+ y_output_ph = tf.placeholder(tf.int32, [None], name='y_output_ph')
+ dropout_keep_prob = tf.placeholder(tf.float32, name='dropout_keep_prob')
+
+ # Define Model
+ rnn_model_outputs = rnn_model(x_data_ph, max_sequence_length, vocab_size,
+ embedding_size, rnn_size, dropout_keep_prob)
+
+ # Prediction
+ # Although we won't use the following operation, we declare and name
+ # the probability outputs so that we can recall them later for evaluation
+ rnn_prediction = tf.nn.softmax(rnn_model_outputs, name="probability_outputs")
+
+ # Loss function
+ losses = tf.nn.sparse_softmax_cross_entropy_with_logits(rnn_model_outputs, y_output_ph)
+ # Remember that for this loss function, logits=float32, labels=int32
+ loss = tf.reduce_mean(losses, name="loss")
+
+ # Model Accuracy Operation
+ accuracy = tf.reduce_mean(get_accuracy(rnn_model_outputs, y_output_ph), name="accuracy")
+
+ # Add scalar summaries for Tensorboard
+ with tf.name_scope('Scalar_Summaries'):
+ tf.scalar_summary('Loss', loss)
+ tf.scalar_summary('Accuracy', accuracy)
+
+ # Declare Optimizer/train step
+ optimizer = tf.train.GradientDescentOptimizer(learning_rate)
+ train_step = optimizer.minimize(loss)
+
+ # Declare summary merging operation
+ summary_op = tf.merge_all_summaries()
+
+ # Create a graph/Variable saving/loading operations
+ saver = tf.train.Saver()
+
+ init = tf.initialize_all_variables()
+ sess.run(init)
+
+ # Start training
+ for epoch in range(epochs):
+
+ # Shuffle training data
+ shuffled_ix = np.random.permutation(np.arange(len(x_train)))
+ x_train = x_train[shuffled_ix]
+ y_train = y_train[shuffled_ix]
+ num_batches = int(len(x_train)/batch_size) + 1
+ #
+ for i in range(num_batches):
+ # Select train data
+ min_ix = i * batch_size
+ max_ix = np.min([len(x_train), ((i+1) * batch_size)])
+ x_train_batch = x_train[min_ix:max_ix]
+ y_train_batch = y_train[min_ix:max_ix]
+
+ # Run train step
+ train_dict = {x_data_ph: x_train_batch,
+ y_output_ph: y_train_batch,
+ dropout_keep_prob:0.5}
+ _, summary = sess.run([train_step, summary_op], feed_dict=train_dict)
+
+ summary_writer = tf.train.SummaryWriter('tensorboard')
+ summary_writer.add_summary(summary, i)
+
+ # Run loss and accuracy for training
+ temp_train_loss, temp_train_acc = sess.run([loss, accuracy], feed_dict=train_dict)
+ test_dict = {x_data_ph: x_test, y_output_ph: y_test, dropout_keep_prob:1.0}
+ temp_test_loss, temp_test_acc = sess.run([loss, accuracy], feed_dict=test_dict)
+
+ # Print Epoch Summary
+ print('Epoch: {}, Test Loss: {:.2}, Test Acc: {:.2}'.format(epoch+1, temp_test_loss, temp_test_acc))
+
+ # Save model every epoch
+ saver.save(sess, os.path.join(storage_folder, "model.ckpt"))
+
+# Run main module/tf App
+if __name__ == "__main__":
+ tf.app.run()
\ No newline at end of file
diff --git a/TensorFlow-Machine-Learning-Cookbook-master/Chapter 10/production_tips_for_tf.py b/TensorFlow-Machine-Learning-Cookbook-master/Chapter 10/production_tips_for_tf.py
new file mode 100644
index 000000000..1d50701d8
--- /dev/null
+++ b/TensorFlow-Machine-Learning-Cookbook-master/Chapter 10/production_tips_for_tf.py
@@ -0,0 +1,90 @@
+# -*- coding: utf-8 -*-
+# Tips for Tensorflow to Production
+#----------------------------------
+#
+# Various Tips for Taking Tensorflow to Production
+
+############################################
+#
+# THIS SCRIPT IS NOT RUNNABLE.
+# -it only contains tips for production code
+#
+############################################
+
+# Also you can clear the default graph from memory
+import tensorflow as tf
+from tensorflow.python.framework import ops
+ops.reset_default_graph()
+
+# Saving Models
+# File types created from saving:
+# checkpoint file: Holds info on where the most recent models are
+# events file: Strictly for viewing graph in Tensorboard
+# pbtxt file: Textual protobufs file (uncompressed), used for debugging
+# chkp file: Holds data and model weights (large file)
+# meta chkp files: Model Graph and Meta-data (learning rate and operations)
+
+
+# Saving data pipeline structures (vocabulary, )
+word_list = ['to', 'be', 'or', 'not', 'to', 'be']
+vocab_list = list(set(word_list))
+vocab2ix_dict = dict(zip(vocab_list, range(len(vocab_list))))
+ix2vocab_dict = {val:key for key,val in vocab2ix_dict.items()}
+
+# Save vocabulary
+import json
+with open('vocab2ix_dict.json', 'w') as file_conn:
+ json.dump(vocab2ix_dict, file_conn)
+
+# Load vocabulary
+with open('vocab2ix_dict.json', 'r') as file_conn:
+ vocab2ix_dict = json.load(file_conn)
+
+# After model declaration, add a saving operations
+saver = tf.train.Saver()
+# Then during training, save every so often, referencing the training generation
+for i in range(generations):
+ ...
+ if i%save_every == 0:
+ saver.save(sess, 'my_model', global_step=step)
+
+# Can also save only specific variables:
+saver = tf.train.Saver({"my_var": my_variable})
+
+
+# other options for saver are 'keep checkpoint_every_n_hours'
+# also 'max_to_keep'= default 5.
+
+# Be sure to name operations, and variables for easy loading for referencing later
+conv_weights = tf.Variable(tf.random_normal(), name='conv_weights')
+loss = tf.reduce_mean(... , name='loss')
+
+# Instead of tyring argparse and main(), Tensorflow provides an 'app' function
+# to handle running and loading of arguments
+
+# At the beginning of the file, define the flags.
+tf.app.flags.DEFINE_string("worker_locations", "", "List of worker addresses.")
+tf.app.flags.DEFINE_float('learning_rate', 0.01, 'Initial learning rate.')
+tf.app.flags.DEFINE_integer('generations', 1000, 'Number of training generations.')
+tf.app.flags.DEFINE_boolean('run_unit_tests', False, 'If true, run tests.')
+
+# Need to define a 'main' function for the app to run
+def main(_):
+ worker_ips = FLAGS.worker_locations.split(",")
+ learning_rate = FLAGS.learning_rate
+ generations = FLAGS.generations
+ run_unit_tests = FLAGS.run_unit_tests
+
+# Run the Tensorflow app
+if __name__ == "__main__":
+ tf.app.run()
+
+
+# Use of Tensorflow's built in logging:
+# Five levels: DEBUG, INFO, WARN, ERROR, and FATAL
+tf.logging.set_verbosity(tf.logging.WARN)
+# WARN is the default value, but to see more information, you can set it to
+# INFO or DEBUG
+tf.logging.set_verbosity(tf.logging.DEBUG)
+# Note: 'DEBUG' is quite verbose.
+
diff --git a/TensorFlow-Machine-Learning-Cookbook-master/Chapter 10/using_multiple_devices.py b/TensorFlow-Machine-Learning-Cookbook-master/Chapter 10/using_multiple_devices.py
new file mode 100644
index 000000000..7a136dbe1
--- /dev/null
+++ b/TensorFlow-Machine-Learning-Cookbook-master/Chapter 10/using_multiple_devices.py
@@ -0,0 +1,63 @@
+# -*- coding: utf-8 -*-
+# Using Multiple Devices
+#----------------------------------
+#
+# This function gives us the ways to use
+# multiple devices (executors) in Tensorflow.
+
+import tensorflow as tf
+from tensorflow.python.framework import ops
+ops.reset_default_graph()
+
+# To find out where placement occurs, set 'log_device_placement'
+sess = tf.Session(config=tf.ConfigProto(log_device_placement=True))
+
+a = tf.constant([1.0, 2.0, 3.0, 4.0, 5.0, 6.0], shape=[2, 3], name='a')
+b = tf.constant([1.0, 2.0, 3.0, 4.0, 5.0, 6.0], shape=[3, 2], name='b')
+c = tf.matmul(a, b)
+
+# Runs the op.
+print(sess.run(c))
+
+
+# If we load a graph and want device placement to be forgotten,
+# we set a parameter in our session:
+config = tf.ConfigProto()
+config.allow_soft_placement = True
+sess_soft = tf.Session(config=config)
+
+# GPUs
+#---------------------------------
+# Note that the GPU must have a compute capability > 3.5 for TF to use.
+# http://docs.nvidia.com/cuda/cuda-c-programming-guide/index.html#compute-capability
+
+
+# Careful with GPU memory allocation, TF never releases it. TF starts with almost
+# all of the GPU memory allocated. We can slowly grow to that limit with an
+# option setting:
+
+config.gpu_options.allow_growth = True
+sess_grow = tf.Session(config=config)
+
+# Also, we can limit the size of GPU memory used, with the following option
+config.gpu_options.per_process_gpu_memory_fraction = 0.4
+sess_limited = tf.Session(config=config)
+
+
+# How to set placements on multiple devices.
+# Here, assume we have three devies CPU:0, GPU:0, and GPU:1
+if tf.test.is_built_with_cuda():
+ with tf.device('/cpu:0'):
+ a = tf.constant([1.0, 3.0, 5.0], shape=[1, 3])
+ b = tf.constant([2.0, 4.0, 6.0], shape=[3, 1])
+
+ with tf.device('/gpu:1'):
+ c = tf.matmul(a,b)
+ c = tf.reshape(c, [-1])
+
+ with tf.device('/gpu:2'):
+ d = tf.matmul(b,a)
+ flat_d = tf.reshape(d, [-1])
+
+ combined = tf.mul(c, flat_d)
+ print(sess.run(combined))
\ No newline at end of file
diff --git a/TensorFlow-Machine-Learning-Cookbook-master/Chapter 11/genetic_algorithm.py b/TensorFlow-Machine-Learning-Cookbook-master/Chapter 11/genetic_algorithm.py
new file mode 100644
index 000000000..dea6b6b7b
--- /dev/null
+++ b/TensorFlow-Machine-Learning-Cookbook-master/Chapter 11/genetic_algorithm.py
@@ -0,0 +1,118 @@
+# -*- coding: utf-8 -*-
+# Implementing a Genetic Algorithm
+# -------------------------------
+#
+# Genetic Algorithm Optimization in Tensorflow
+#
+# We are going to implement a genetic algorithm
+# to optimize to a ground truth array. The ground
+# truth will be an array of 50 floating point
+# numbers which are generated by:
+# f(x)=sin(2*pi*x/50) where 0= mutation] = 0
+
+ # Run GA step
+ feed_dict = {truth_ph: truth.reshape([1, features]),
+ crossover_mat_ph: crossover_mat,
+ mutation_val_ph: mutation_values}
+ step.run(feed_dict, session=sess)
+ best_individual_val = sess.run(best_individual, feed_dict=feed_dict)
+
+ if i % 5 == 0:
+ best_fit = sess.run(best_val, feed_dict = feed_dict)
+ print('Generation: {}, Best Fitness (lowest MSE): {:.2}'.format(i, -best_fit))
+
+plt.plot(truth, label="True Values")
+plt.plot(np.squeeze(best_individual_val), label="Best Individual")
+plt.axis((0, features, -1.25, 1.25))
+plt.legend(loc='upper right')
+plt.show()
\ No newline at end of file
diff --git a/TensorFlow-Machine-Learning-Cookbook-master/Chapter 11/k_means.py b/TensorFlow-Machine-Learning-Cookbook-master/Chapter 11/k_means.py
new file mode 100644
index 000000000..3c2f6764c
--- /dev/null
+++ b/TensorFlow-Machine-Learning-Cookbook-master/Chapter 11/k_means.py
@@ -0,0 +1,139 @@
+# -*- coding: utf-8 -*-
+# K-means with Tensorflow
+#----------------------------------
+#
+# This script shows how to do k-means with Tensorflow
+
+import numpy as np
+import matplotlib.pyplot as plt
+import tensorflow as tf
+from sklearn import datasets
+from scipy.spatial import cKDTree
+from sklearn.decomposition import PCA
+from sklearn.preprocessing import scale
+from tensorflow.python.framework import ops
+ops.reset_default_graph()
+
+sess = tf.Session()
+
+iris = datasets.load_iris()
+
+num_pts = len(iris.data)
+num_feats = len(iris.data[0])
+
+# Set k-means parameters
+# There are 3 types of iris flowers, see if we can predict them
+k=3
+generations = 25
+
+data_points = tf.Variable(iris.data)
+cluster_labels = tf.Variable(tf.zeros([num_pts], dtype=tf.int64))
+
+# Randomly choose starting points
+rand_starts = np.array([iris.data[np.random.choice(len(iris.data))] for _ in range(k)])
+
+centroids = tf.Variable(rand_starts)
+
+# In order to calculate the distance between every data point and every centroid, we
+# repeat the centroids into a (num_points) by k matrix.
+centroid_matrix = tf.reshape(tf.tile(centroids, [num_pts, 1]), [num_pts, k, num_feats])
+# Then we reshape the data points into k (3) repeats
+point_matrix = tf.reshape(tf.tile(data_points, [1, k]), [num_pts, k, num_feats])
+distances = tf.reduce_sum(tf.square(point_matrix - centroid_matrix), reduction_indices=2)
+
+#Find the group it belongs to with tf.argmin()
+centroid_group = tf.argmin(distances, 1)
+
+# Find the group average
+def data_group_avg(group_ids, data):
+ # Sum each group
+ sum_total = tf.unsorted_segment_sum(data, group_ids, 3)
+ # Count each group
+ num_total = tf.unsorted_segment_sum(tf.ones_like(data), group_ids, 3)
+ # Calculate average
+ avg_by_group = sum_total/num_total
+ return(avg_by_group)
+
+means = data_group_avg(centroid_group, data_points)
+
+update = tf.group(centroids.assign(means), cluster_labels.assign(centroid_group))
+
+init = tf.initialize_all_variables()
+
+sess.run(init)
+
+for i in range(generations):
+ print('Calculating gen {}, out of {}.'.format(i, generations))
+ _, centroid_group_count = sess.run([update, centroid_group])
+ group_count = []
+ for ix in range(k):
+ group_count.append(np.sum(centroid_group_count==ix))
+ print('Group counts: {}'.format(group_count))
+
+
+[centers, assignments] = sess.run([centroids, cluster_labels])
+
+# Find which group assignments correspond to which group labels
+# First, need a most common element function
+def most_common(my_list):
+ return(max(set(my_list), key=my_list.count))
+
+label0 = most_common(list(assignments[0:50]))
+label1 = most_common(list(assignments[50:100]))
+label2 = most_common(list(assignments[100:150]))
+
+group0_count = np.sum(assignments[0:50]==label0)
+group1_count = np.sum(assignments[50:100]==label1)
+group2_count = np.sum(assignments[100:150]==label2)
+
+accuracy = (group0_count + group1_count + group2_count)/150.
+
+print('Accuracy: {:.2}'.format(accuracy))
+
+# Also plot the output
+# First use PCA to transform the 4-dimensional data into 2-dimensions
+pca_model = PCA(n_components=2)
+reduced_data = pca_model.fit_transform(iris.data)
+# Transform centers
+reduced_centers = pca_model.transform(centers)
+
+# Step size of mesh for plotting
+h = .02
+
+# Plot the decision boundary. For that, we will assign a color to each
+x_min, x_max = reduced_data[:, 0].min() - 1, reduced_data[:, 0].max() + 1
+y_min, y_max = reduced_data[:, 1].min() - 1, reduced_data[:, 1].max() + 1
+xx, yy = np.meshgrid(np.arange(x_min, x_max, h), np.arange(y_min, y_max, h))
+
+# Get k-means classifications for the grid points
+xx_pt = list(xx.ravel())
+yy_pt = list(yy.ravel())
+xy_pts = np.array([[x,y] for x,y in zip(xx_pt, yy_pt)])
+mytree = cKDTree(reduced_centers)
+dist, indexes = mytree.query(xy_pts)
+
+# Put the result into a color plot
+indexes = indexes.reshape(xx.shape)
+plt.figure(1)
+plt.clf()
+plt.imshow(indexes, interpolation='nearest',
+ extent=(xx.min(), xx.max(), yy.min(), yy.max()),
+ cmap=plt.cm.Paired,
+ aspect='auto', origin='lower')
+
+# Plot each of the true iris data groups
+symbols = ['o', '^', 'D']
+label_name = ['Setosa', 'Versicolour', 'Virginica']
+for i in range(3):
+ temp_group = reduced_data[(i*50):(50)*(i+1)]
+ plt.plot(temp_group[:, 0], temp_group[:, 1], symbols[i], markersize=10, label=label_name[i])
+# Plot the centroids as a white X
+plt.scatter(reduced_centers[:, 0], reduced_centers[:, 1],
+ marker='x', s=169, linewidths=3,
+ color='w', zorder=10)
+plt.title('K-means clustering on Iris Dataset\n'
+ 'Centroids are marked with white cross')
+plt.xlim(x_min, x_max)
+plt.ylim(y_min, y_max)
+plt.legend(loc='lower right')
+plt.show()
\ No newline at end of file
diff --git a/TensorFlow-Machine-Learning-Cookbook-master/Chapter 11/solving_ode_system.py b/TensorFlow-Machine-Learning-Cookbook-master/Chapter 11/solving_ode_system.py
new file mode 100644
index 000000000..108aeb880
--- /dev/null
+++ b/TensorFlow-Machine-Learning-Cookbook-master/Chapter 11/solving_ode_system.py
@@ -0,0 +1,67 @@
+# -*- coding: utf-8 -*-
+# Solving a Sytem of ODEs
+#----------------------------------
+#
+# In this script, we use Tensorflow to solve a sytem
+# of ODEs.
+#
+# The system of ODEs we will solve is the Lotka-Volterra
+# predator-prey system.
+
+
+# Declaring Operations
+import matplotlib.pyplot as plt
+import tensorflow as tf
+from tensorflow.python.framework import ops
+ops.reset_default_graph()
+
+# Open interactive graph session
+sess = tf.Session()
+
+# Discrete Lotka-Volterra predator/prey equations
+#
+# X(t+1) = X(t) + (aX(t) + bX(t)Y(t)) * t_delta # Prey
+#
+# Y(t+1) = Y(t) + (cY(t) + dX(t)Y(t)) * t_delta # Predator
+
+# Declare constants and variables
+x_initial = tf.constant(1.0)
+y_initial = tf.constant(1.0)
+X_t1 = tf.Variable(x_initial)
+Y_t1 = tf.Variable(y_initial)
+
+# Make the placeholders
+t_delta = tf.placeholder(tf.float32, shape=())
+a = tf.placeholder(tf.float32, shape=())
+b = tf.placeholder(tf.float32, shape=())
+c = tf.placeholder(tf.float32, shape=())
+d = tf.placeholder(tf.float32, shape=())
+
+# Discretized ODE update
+X_t2 = X_t1 + (a * X_t1 + b * X_t1 * Y_t1) * t_delta
+Y_t2 = Y_t1 + (c * Y_t1 + d * X_t1 * Y_t1) * t_delta
+
+# Update to New Population
+step = tf.group(
+ X_t1.assign(X_t2),
+ Y_t1.assign(Y_t2))
+
+init = tf.initialize_all_variables()
+sess.run(init)
+
+# Run the ODE
+prey_values = []
+predator_values = []
+for i in range(1000):
+ # Step simulation (using constants for a known cyclic solution)
+ step.run({a: (2./3.), b: (-4./3.), c: -1.0, d: 1.0, t_delta: 0.01}, session=sess)
+ # Store each outcome
+ temp_prey, temp_pred = sess.run([X_t1, Y_t1])
+ prey_values.append(temp_prey)
+ predator_values.append(temp_pred)
+
+# Visualize the output
+plt.plot(prey_values)
+plt.plot(predator_values)
+plt.legend(['Prey', 'Predator'], loc='upper right')
+plt.show()
\ No newline at end of file
diff --git a/TensorFlow-Machine-Learning-Cookbook-master/Chapter 11/using_tensorboard.py b/TensorFlow-Machine-Learning-Cookbook-master/Chapter 11/using_tensorboard.py
new file mode 100644
index 000000000..121e672c1
--- /dev/null
+++ b/TensorFlow-Machine-Learning-Cookbook-master/Chapter 11/using_tensorboard.py
@@ -0,0 +1,124 @@
+# -*- coding: utf-8 -*-
+# Using Tensorboard
+#----------------------------------
+#
+# We illustrate the various ways to use
+# Tensorboard
+
+import os
+import io
+import time
+import numpy as np
+import matplotlib.pyplot as plt
+import tensorflow as tf
+
+# Initialize a graph session
+sess = tf.Session()
+
+# Create a visualizer object
+summary_writer = tf.train.SummaryWriter('tensorboard', tf.get_default_graph())
+
+# Create tensorboard folder if not exists
+if not os.path.exists('tensorboard'):
+ os.makedirs('tensorboard')
+print('Running a slowed down linear regression. '
+ 'Run the command: $tensorboard --logdir="tensorboard" '
+ ' Then navigate to http://127.0.0.0:6006')
+
+# You can also specify a port option with --port 6006
+
+# Wait a few seconds for user to run tensorboard commands
+time.sleep(3)
+
+# Some parameters
+batch_size = 50
+generations = 100
+
+# Create sample input data
+x_data = np.arange(1000)/10.
+true_slope = 2.
+y_data = x_data * true_slope + np.random.normal(loc=0.0, scale=25, size=1000)
+
+# Split into train/test
+train_ix = np.random.choice(len(x_data), size=int(len(x_data)*0.9), replace=False)
+test_ix = np.setdiff1d(np.arange(1000), train_ix)
+x_data_train, y_data_train = x_data[train_ix], y_data[train_ix]
+x_data_test, y_data_test = x_data[test_ix], y_data[test_ix]
+
+# Declare placeholders
+x_graph_input = tf.placeholder(tf.float32, [None])
+y_graph_input = tf.placeholder(tf.float32, [None])
+
+# Declare model variables
+m = tf.Variable(tf.random_normal([1], dtype=tf.float32), name='Slope')
+
+# Declare model
+output = tf.mul(m, x_graph_input, name='Batch_Multiplication')
+
+# Declare loss function (L1)
+residuals = output - y_graph_input
+l2_loss = tf.reduce_mean(tf.abs(residuals), name="L2_Loss")
+
+# Declare optimization function
+my_optim = tf.train.GradientDescentOptimizer(0.01)
+train_step = my_optim.minimize(l2_loss)
+
+# Visualize a scalar
+with tf.name_scope('Slope_Estimate'):
+ tf.scalar_summary('Slope_Estimate', tf.squeeze(m))
+
+# Visualize a histogram (errors)
+with tf.name_scope('Loss_and_Residuals'):
+ tf.histogram_summary('Histogram_Errors', l2_loss)
+ tf.histogram_summary('Histogram_Residuals', residuals)
+
+
+
+# Declare summary merging operation
+summary_op = tf.merge_all_summaries()
+
+# Initialize Variables
+init = tf.initialize_all_variables()
+sess.run(init)
+
+for i in range(generations):
+ batch_indices = np.random.choice(len(x_data_train), size=batch_size)
+ x_batch = x_data_train[batch_indices]
+ y_batch = y_data_train[batch_indices]
+ _, train_loss, summary = sess.run([train_step, l2_loss, summary_op],
+ feed_dict={x_graph_input: x_batch,
+ y_graph_input: y_batch})
+
+ test_loss, test_resids = sess.run([l2_loss, residuals], feed_dict={x_graph_input: x_data_test,
+ y_graph_input: y_data_test})
+
+ if (i+1)%10==0:
+ print('Generation {} of {}. Train Loss: {:.3}, Test Loss: {:.3}.'.format(i+1, generations, train_loss, test_loss))
+
+ log_writer = tf.train.SummaryWriter('tensorboard')
+ log_writer.add_summary(summary, i)
+ time.sleep(0.5)
+
+#Create a function to save a protobuf bytes version of the graph
+def gen_linear_plot(slope):
+ linear_prediction = x_data * slope
+ plt.plot(x_data, y_data, 'b.', label='data')
+ plt.plot(x_data, linear_prediction, 'r-', linewidth=3, label='predicted line')
+ plt.legend(loc='upper left')
+ buf = io.BytesIO()
+ plt.savefig(buf, format='png')
+ buf.seek(0)
+ return(buf)
+
+# Add image to tensorboard (plot the linear fit!)
+slope = sess.run(m)
+plot_buf = gen_linear_plot(slope[0])
+# Convert PNG buffer to TF image
+image = tf.image.decode_png(plot_buf.getvalue(), channels=4)
+# Add the batch dimension
+image = tf.expand_dims(image, 0)
+# Add image summary
+image_summary_op = tf.image_summary("Linear Plot", image)
+image_summary = sess.run(image_summary_op)
+log_writer.add_summary(image_summary, i)
+log_writer.close()
\ No newline at end of file
diff --git a/TensorFlow-Machine-Learning-Cookbook-master/LICENSE b/TensorFlow-Machine-Learning-Cookbook-master/LICENSE
new file mode 100644
index 000000000..117b3501c
--- /dev/null
+++ b/TensorFlow-Machine-Learning-Cookbook-master/LICENSE
@@ -0,0 +1,21 @@
+MIT License
+
+Copyright (c) 2017 Packt
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/TensorFlow-Machine-Learning-Cookbook-master/README.md b/TensorFlow-Machine-Learning-Cookbook-master/README.md
new file mode 100644
index 000000000..3f445e6ca
--- /dev/null
+++ b/TensorFlow-Machine-Learning-Cookbook-master/README.md
@@ -0,0 +1,38 @@
+# TensorFlow Machine Learning Cookbook
+This is the code repository for [TensorFlow Machine Learning Cookbook](https://www.packtpub.com/big-data-and-business-intelligence/tensorflow-machine-learning-cookbook?utm_source=github&utm_medium=repository&utm_content=9781786462169), published by Packt. It contains all the supporting project files necessary to work through the book from start to finish.
+
+# About the book
+TensorFlow is an open source software library for Machine Intelligence. The independent recipes in this book will teach you how to use TensorFlow for complex data computations and will let you dig deeper and gain more insights into your data than ever before. You’ll work through recipes on training models, model evaluation, sentiment analysis, regression analysis, clustering analysis, artificial neural networks, and deep learning – each using Google’s machine learning library TensorFlow.
+
+This guide starts with the fundamentals of the TensorFlow library which includes variables, matrices, and various data sources. Moving ahead, you will get hands-on experience with Linear Regression techniques with TensorFlow. The next chapters cover important high-level concepts such as neural networks, CNN, RNN, and NLP.
+
+Once you are familiar and comfortable with the TensorFlow ecosystem, the last chapter will show you how to take it to production.
+
+## Instructions and Navigations
+All of the code is organized into folders. Each folder starts with a number followed by the application name. For example, Chapter 03.
+
+The code will look like the following:
+
+ import matplotlib.pyplot as plt
+ import numpy as np
+ import tensorflow as tf
+ from sklearn import datasets
+ from tensorflow.python.framework import ops
+ ops.reset_default_graph()
+
+# Software requirements:
+Python 3, with the following installed Python libraries: TensorFlow, Numpy,
+Scikit-Learn, Requests, and Jupyter. It is compatible in all three major operating
+systems, Mac, Windows, and Linux. It requires no special hardware to run the
+scripts.
+
+## Related Products:
+* [Getting Started with TensorFlow](https://www.packtpub.com/big-data-and-business-intelligence/getting-started-tensorflow?utm_source=github&utm_medium=repository&utm_content=9781786468574)
+
+* [Deep Learning with TensorFlow [Video]](https://www.packtpub.com/big-data-and-business-intelligence/deep-learning-tensorflow-video?utm_source=github&utm_medium=repository&utm_content=9781786464491)
+
+* [Building Machine Learning Systems with TensorFlow [Video]](https://www.packtpub.com/big-data-and-business-intelligence/building-machine-learning-systems-tensorflow-video?utm_source=github&utm_medium=repository&utm_content=9781787281806)
+
+###Suggestions and Feedback
+[Click here](https://docs.google.com/forms/d/e/1FAIpQLSe5qwunkGf6PUvzPirPDtuy1Du5Rlzew23UBp2S-P3wB-GcwQ/viewform) if you have any feedback or suggestions.
+