From 483b543e6127010e87f1f3be1e13a31b6277c706 Mon Sep 17 00:00:00 2001 From: "Hong Jen Yee (PCMan)" Date: Sat, 6 Feb 2016 22:14:15 +0800 Subject: [PATCH] Code cleanup and add missing license files. Include Tornado web framework in the python distribution. --- COPYING.txt => LGPL-2.0.txt | 0 LICENSE.txt | 17 + PSF.txt | 41 + python/tornado/__init__.py | 29 + python/tornado/_locale_data.py | 94 + python/tornado/auth.py | 1139 ++++++ python/tornado/autoreload.py | 339 ++ python/tornado/concurrent.py | 510 +++ python/tornado/curl_httpclient.py | 500 +++ python/tornado/escape.py | 399 ++ python/tornado/gen.py | 1241 +++++++ python/tornado/http1connection.py | 722 ++++ python/tornado/httpclient.py | 659 ++++ python/tornado/httpserver.py | 304 ++ python/tornado/httputil.py | 897 +++++ python/tornado/ioloop.py | 1053 ++++++ python/tornado/iostream.py | 1550 ++++++++ python/tornado/locale.py | 521 +++ python/tornado/locks.py | 512 +++ python/tornado/log.py | 259 ++ python/tornado/netutil.py | 521 +++ python/tornado/options.py | 582 +++ python/tornado/platform/__init__.py | 0 .../__pycache__/__init__.cpython-35.pyc | Bin 0 -> 172 bytes .../__pycache__/asyncio.cpython-35.pyc | Bin 0 -> 7630 bytes .../platform/__pycache__/auto.cpython-35.pyc | Bin 0 -> 1231 bytes .../__pycache__/caresresolver.cpython-35.pyc | Bin 0 -> 3203 bytes .../__pycache__/common.cpython-35.pyc | Bin 0 -> 2515 bytes .../platform/__pycache__/epoll.cpython-35.pyc | Bin 0 -> 824 bytes .../__pycache__/interface.cpython-35.pyc | Bin 0 -> 2360 bytes .../__pycache__/kqueue.cpython-35.pyc | Bin 0 -> 3103 bytes .../platform/__pycache__/posix.cpython-35.pyc | Bin 0 -> 2130 bytes .../__pycache__/select.cpython-35.pyc | Bin 0 -> 2613 bytes .../__pycache__/twisted.cpython-35.pyc | Bin 0 -> 20225 bytes .../__pycache__/windows.cpython-35.pyc | Bin 0 -> 739 bytes python/tornado/platform/asyncio.py | 216 ++ python/tornado/platform/auto.py | 54 + python/tornado/platform/caresresolver.py | 79 + python/tornado/platform/common.py | 92 + python/tornado/platform/epoll.py | 26 + python/tornado/platform/interface.py | 63 + python/tornado/platform/kqueue.py | 91 + python/tornado/platform/posix.py | 70 + python/tornado/platform/select.py | 76 + python/tornado/platform/twisted.py | 585 +++ python/tornado/platform/windows.py | 20 + python/tornado/process.py | 357 ++ python/tornado/queues.py | 357 ++ python/tornado/simple_httpclient.py | 549 +++ python/tornado/speedups.cp35-win32.pyd | Bin 0 -> 8704 bytes python/tornado/stack_context.py | 388 ++ python/tornado/tcpclient.py | 183 + python/tornado/tcpserver.py | 273 ++ python/tornado/template.py | 975 +++++ python/tornado/test/__init__.py | 0 python/tornado/test/__main__.py | 14 + .../test/__pycache__/__init__.cpython-35.pyc | Bin 0 -> 168 bytes .../test/__pycache__/__main__.cpython-35.pyc | Bin 0 -> 474 bytes .../__pycache__/asyncio_test.cpython-35.pyc | Bin 0 -> 3648 bytes .../test/__pycache__/auth_test.cpython-35.pyc | Bin 0 -> 25603 bytes .../concurrent_test.cpython-35.pyc | Bin 0 -> 19184 bytes .../curl_httpclient_test.cpython-35.pyc | Bin 0 -> 5855 bytes .../__pycache__/escape_test.cpython-35.pyc | Bin 0 -> 10566 bytes .../test/__pycache__/gen_test.cpython-35.pyc | Bin 0 -> 56247 bytes .../httpclient_test.cpython-35.pyc | Bin 0 -> 27937 bytes .../httpserver_test.cpython-35.pyc | Bin 0 -> 48952 bytes .../__pycache__/httputil_test.cpython-35.pyc | Bin 0 -> 13605 bytes .../__pycache__/import_test.cpython-35.pyc | Bin 0 -> 1739 bytes .../__pycache__/ioloop_test.cpython-35.pyc | Bin 0 -> 28914 bytes .../__pycache__/iostream_test.cpython-35.pyc | Bin 0 -> 40329 bytes .../__pycache__/locale_test.cpython-35.pyc | Bin 0 -> 6213 bytes .../__pycache__/locks_test.cpython-35.pyc | Bin 0 -> 17993 bytes .../test/__pycache__/log_test.cpython-35.pyc | Bin 0 -> 9233 bytes .../__pycache__/netutil_test.cpython-35.pyc | Bin 0 -> 9483 bytes .../__pycache__/options_test.cpython-35.pyc | Bin 0 -> 9783 bytes .../__pycache__/process_test.cpython-35.pyc | Bin 0 -> 8794 bytes .../__pycache__/queues_test.cpython-35.pyc | Bin 0 -> 15982 bytes .../resolve_test_helper.cpython-35.pyc | Bin 0 -> 658 bytes .../test/__pycache__/runtests.cpython-35.pyc | Bin 0 -> 6226 bytes .../simple_httpclient_test.cpython-35.pyc | Bin 0 -> 35653 bytes .../stack_context_test.cpython-35.pyc | Bin 0 -> 12543 bytes .../__pycache__/tcpclient_test.cpython-35.pyc | Bin 0 -> 12038 bytes .../__pycache__/tcpserver_test.cpython-35.pyc | Bin 0 -> 1944 bytes .../__pycache__/template_test.cpython-35.pyc | Bin 0 -> 20730 bytes .../__pycache__/testing_test.cpython-35.pyc | Bin 0 -> 12773 bytes .../__pycache__/twisted_test.cpython-35.pyc | Bin 0 -> 28137 bytes .../test/__pycache__/util.cpython-35.pyc | Bin 0 -> 2308 bytes .../test/__pycache__/util_test.cpython-35.pyc | Bin 0 -> 9070 bytes .../test/__pycache__/web_test.cpython-35.pyc | Bin 0 -> 130569 bytes .../__pycache__/websocket_test.cpython-35.pyc | Bin 0 -> 17125 bytes .../test/__pycache__/wsgi_test.cpython-35.pyc | Bin 0 -> 5418 bytes python/tornado/test/asyncio_test.py | 113 + python/tornado/test/auth_test.py | 545 +++ python/tornado/test/concurrent_test.py | 415 +++ .../tornado/test/csv_translations/fr_FR.csv | 1 + python/tornado/test/curl_httpclient_test.py | 124 + python/tornado/test/escape_test.py | 245 ++ python/tornado/test/gen_test.py | 1359 +++++++ .../fr_FR/LC_MESSAGES/tornado_test.mo | Bin 0 -> 665 bytes .../fr_FR/LC_MESSAGES/tornado_test.po | 47 + python/tornado/test/httpclient_test.py | 661 ++++ python/tornado/test/httpserver_test.py | 1094 ++++++ python/tornado/test/httputil_test.py | 373 ++ python/tornado/test/import_test.py | 47 + python/tornado/test/ioloop_test.py | 658 ++++ python/tornado/test/iostream_test.py | 1096 ++++++ python/tornado/test/locale_test.py | 130 + python/tornado/test/locks_test.py | 518 +++ python/tornado/test/log_test.py | 241 ++ python/tornado/test/netutil_test.py | 213 ++ python/tornado/test/options_test.cfg | 5 + python/tornado/test/options_test.py | 265 ++ python/tornado/test/process_test.py | 243 ++ python/tornado/test/queues_test.py | 423 +++ python/tornado/test/resolve_test_helper.py | 12 + python/tornado/test/runtests.py | 179 + python/tornado/test/simple_httpclient_test.py | 759 ++++ python/tornado/test/stack_context_test.py | 288 ++ python/tornado/test/static/dir/index.html | 1 + python/tornado/test/static/robots.txt | 2 + python/tornado/test/static/sample.xml | 23 + python/tornado/test/static/sample.xml.bz2 | Bin 0 -> 285 bytes python/tornado/test/static/sample.xml.gz | Bin 0 -> 264 bytes python/tornado/test/static_foo.txt | 2 + python/tornado/test/tcpclient_test.py | 280 ++ python/tornado/test/tcpserver_test.py | 39 + python/tornado/test/template_test.py | 485 +++ python/tornado/test/templates/utf8.html | 1 + python/tornado/test/test.crt | 15 + python/tornado/test/test.key | 16 + python/tornado/test/testing_test.py | 278 ++ python/tornado/test/twisted_test.py | 732 ++++ python/tornado/test/util.py | 78 + python/tornado/test/util_test.py | 201 + python/tornado/test/web_test.py | 2765 ++++++++++++++ python/tornado/test/websocket_test.py | 419 +++ python/tornado/test/wsgi_test.py | 100 + python/tornado/testing.py | 735 ++++ python/tornado/util.py | 387 ++ python/tornado/web.py | 3279 +++++++++++++++++ python/tornado/websocket.py | 1063 ++++++ python/tornado/wsgi.py | 358 ++ .../input_methods/chewing/chewing_config.py | 6 + server/input_methods/chewing/chewing_ime.py | 5 +- 144 files changed, 37677 insertions(+), 4 deletions(-) rename COPYING.txt => LGPL-2.0.txt (100%) create mode 100644 LICENSE.txt create mode 100644 PSF.txt create mode 100644 python/tornado/__init__.py create mode 100644 python/tornado/_locale_data.py create mode 100644 python/tornado/auth.py create mode 100644 python/tornado/autoreload.py create mode 100644 python/tornado/concurrent.py create mode 100644 python/tornado/curl_httpclient.py create mode 100644 python/tornado/escape.py create mode 100644 python/tornado/gen.py create mode 100644 python/tornado/http1connection.py create mode 100644 python/tornado/httpclient.py create mode 100644 python/tornado/httpserver.py create mode 100644 python/tornado/httputil.py create mode 100644 python/tornado/ioloop.py create mode 100644 python/tornado/iostream.py create mode 100644 python/tornado/locale.py create mode 100644 python/tornado/locks.py create mode 100644 python/tornado/log.py create mode 100644 python/tornado/netutil.py create mode 100644 python/tornado/options.py create mode 100644 python/tornado/platform/__init__.py create mode 100644 python/tornado/platform/__pycache__/__init__.cpython-35.pyc create mode 100644 python/tornado/platform/__pycache__/asyncio.cpython-35.pyc create mode 100644 python/tornado/platform/__pycache__/auto.cpython-35.pyc create mode 100644 python/tornado/platform/__pycache__/caresresolver.cpython-35.pyc create mode 100644 python/tornado/platform/__pycache__/common.cpython-35.pyc create mode 100644 python/tornado/platform/__pycache__/epoll.cpython-35.pyc create mode 100644 python/tornado/platform/__pycache__/interface.cpython-35.pyc create mode 100644 python/tornado/platform/__pycache__/kqueue.cpython-35.pyc create mode 100644 python/tornado/platform/__pycache__/posix.cpython-35.pyc create mode 100644 python/tornado/platform/__pycache__/select.cpython-35.pyc create mode 100644 python/tornado/platform/__pycache__/twisted.cpython-35.pyc create mode 100644 python/tornado/platform/__pycache__/windows.cpython-35.pyc create mode 100644 python/tornado/platform/asyncio.py create mode 100644 python/tornado/platform/auto.py create mode 100644 python/tornado/platform/caresresolver.py create mode 100644 python/tornado/platform/common.py create mode 100644 python/tornado/platform/epoll.py create mode 100644 python/tornado/platform/interface.py create mode 100644 python/tornado/platform/kqueue.py create mode 100644 python/tornado/platform/posix.py create mode 100644 python/tornado/platform/select.py create mode 100644 python/tornado/platform/twisted.py create mode 100644 python/tornado/platform/windows.py create mode 100644 python/tornado/process.py create mode 100644 python/tornado/queues.py create mode 100644 python/tornado/simple_httpclient.py create mode 100644 python/tornado/speedups.cp35-win32.pyd create mode 100644 python/tornado/stack_context.py create mode 100644 python/tornado/tcpclient.py create mode 100644 python/tornado/tcpserver.py create mode 100644 python/tornado/template.py create mode 100644 python/tornado/test/__init__.py create mode 100644 python/tornado/test/__main__.py create mode 100644 python/tornado/test/__pycache__/__init__.cpython-35.pyc create mode 100644 python/tornado/test/__pycache__/__main__.cpython-35.pyc create mode 100644 python/tornado/test/__pycache__/asyncio_test.cpython-35.pyc create mode 100644 python/tornado/test/__pycache__/auth_test.cpython-35.pyc create mode 100644 python/tornado/test/__pycache__/concurrent_test.cpython-35.pyc create mode 100644 python/tornado/test/__pycache__/curl_httpclient_test.cpython-35.pyc create mode 100644 python/tornado/test/__pycache__/escape_test.cpython-35.pyc create mode 100644 python/tornado/test/__pycache__/gen_test.cpython-35.pyc create mode 100644 python/tornado/test/__pycache__/httpclient_test.cpython-35.pyc create mode 100644 python/tornado/test/__pycache__/httpserver_test.cpython-35.pyc create mode 100644 python/tornado/test/__pycache__/httputil_test.cpython-35.pyc create mode 100644 python/tornado/test/__pycache__/import_test.cpython-35.pyc create mode 100644 python/tornado/test/__pycache__/ioloop_test.cpython-35.pyc create mode 100644 python/tornado/test/__pycache__/iostream_test.cpython-35.pyc create mode 100644 python/tornado/test/__pycache__/locale_test.cpython-35.pyc create mode 100644 python/tornado/test/__pycache__/locks_test.cpython-35.pyc create mode 100644 python/tornado/test/__pycache__/log_test.cpython-35.pyc create mode 100644 python/tornado/test/__pycache__/netutil_test.cpython-35.pyc create mode 100644 python/tornado/test/__pycache__/options_test.cpython-35.pyc create mode 100644 python/tornado/test/__pycache__/process_test.cpython-35.pyc create mode 100644 python/tornado/test/__pycache__/queues_test.cpython-35.pyc create mode 100644 python/tornado/test/__pycache__/resolve_test_helper.cpython-35.pyc create mode 100644 python/tornado/test/__pycache__/runtests.cpython-35.pyc create mode 100644 python/tornado/test/__pycache__/simple_httpclient_test.cpython-35.pyc create mode 100644 python/tornado/test/__pycache__/stack_context_test.cpython-35.pyc create mode 100644 python/tornado/test/__pycache__/tcpclient_test.cpython-35.pyc create mode 100644 python/tornado/test/__pycache__/tcpserver_test.cpython-35.pyc create mode 100644 python/tornado/test/__pycache__/template_test.cpython-35.pyc create mode 100644 python/tornado/test/__pycache__/testing_test.cpython-35.pyc create mode 100644 python/tornado/test/__pycache__/twisted_test.cpython-35.pyc create mode 100644 python/tornado/test/__pycache__/util.cpython-35.pyc create mode 100644 python/tornado/test/__pycache__/util_test.cpython-35.pyc create mode 100644 python/tornado/test/__pycache__/web_test.cpython-35.pyc create mode 100644 python/tornado/test/__pycache__/websocket_test.cpython-35.pyc create mode 100644 python/tornado/test/__pycache__/wsgi_test.cpython-35.pyc create mode 100644 python/tornado/test/asyncio_test.py create mode 100644 python/tornado/test/auth_test.py create mode 100644 python/tornado/test/concurrent_test.py create mode 100644 python/tornado/test/csv_translations/fr_FR.csv create mode 100644 python/tornado/test/curl_httpclient_test.py create mode 100644 python/tornado/test/escape_test.py create mode 100644 python/tornado/test/gen_test.py create mode 100644 python/tornado/test/gettext_translations/fr_FR/LC_MESSAGES/tornado_test.mo create mode 100644 python/tornado/test/gettext_translations/fr_FR/LC_MESSAGES/tornado_test.po create mode 100644 python/tornado/test/httpclient_test.py create mode 100644 python/tornado/test/httpserver_test.py create mode 100644 python/tornado/test/httputil_test.py create mode 100644 python/tornado/test/import_test.py create mode 100644 python/tornado/test/ioloop_test.py create mode 100644 python/tornado/test/iostream_test.py create mode 100644 python/tornado/test/locale_test.py create mode 100644 python/tornado/test/locks_test.py create mode 100644 python/tornado/test/log_test.py create mode 100644 python/tornado/test/netutil_test.py create mode 100644 python/tornado/test/options_test.cfg create mode 100644 python/tornado/test/options_test.py create mode 100644 python/tornado/test/process_test.py create mode 100644 python/tornado/test/queues_test.py create mode 100644 python/tornado/test/resolve_test_helper.py create mode 100644 python/tornado/test/runtests.py create mode 100644 python/tornado/test/simple_httpclient_test.py create mode 100644 python/tornado/test/stack_context_test.py create mode 100644 python/tornado/test/static/dir/index.html create mode 100644 python/tornado/test/static/robots.txt create mode 100644 python/tornado/test/static/sample.xml create mode 100644 python/tornado/test/static/sample.xml.bz2 create mode 100644 python/tornado/test/static/sample.xml.gz create mode 100644 python/tornado/test/static_foo.txt create mode 100644 python/tornado/test/tcpclient_test.py create mode 100644 python/tornado/test/tcpserver_test.py create mode 100644 python/tornado/test/template_test.py create mode 100644 python/tornado/test/templates/utf8.html create mode 100644 python/tornado/test/test.crt create mode 100644 python/tornado/test/test.key create mode 100644 python/tornado/test/testing_test.py create mode 100644 python/tornado/test/twisted_test.py create mode 100644 python/tornado/test/util.py create mode 100644 python/tornado/test/util_test.py create mode 100644 python/tornado/test/web_test.py create mode 100644 python/tornado/test/websocket_test.py create mode 100644 python/tornado/test/wsgi_test.py create mode 100644 python/tornado/testing.py create mode 100644 python/tornado/util.py create mode 100644 python/tornado/web.py create mode 100644 python/tornado/websocket.py create mode 100644 python/tornado/wsgi.py diff --git a/COPYING.txt b/LGPL-2.0.txt similarity index 100% rename from COPYING.txt rename to LGPL-2.0.txt diff --git a/LICENSE.txt b/LICENSE.txt new file mode 100644 index 000000000..2e8f2efe8 --- /dev/null +++ b/LICENSE.txt @@ -0,0 +1,17 @@ +PIME is composed of several parts with different licenses. + +Code in these directories are licensed under LGPL 2.0: + cmake + installer + libIME + libpipe + PIME + PIMELauncher + server + +The following components are from other projects: + python: Embeddable version of python 3.5 is licensed under PSF. + + python/tornado: Tornado web framework licensed under Apache 2.0. + + rapidjson: C++ library used to parse and write json data licensed under BSD. diff --git a/PSF.txt b/PSF.txt new file mode 100644 index 000000000..af7474234 --- /dev/null +++ b/PSF.txt @@ -0,0 +1,41 @@ +1. This LICENSE AGREEMENT is between the Python Software Foundation ("PSF"), and + the Individual or Organization ("Licensee") accessing and otherwise using Python + 3.5.1 software in source or binary form and its associated documentation. + +2. Subject to the terms and conditions of this License Agreement, PSF hereby + grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce, + analyze, test, perform and/or display publicly, prepare derivative works, + distribute, and otherwise use Python 3.5.1 alone or in any derivative + version, provided, however, that PSF's License Agreement and PSF's notice of + copyright, i.e., "Copyright c 2001-2016 Python Software Foundation; All Rights + Reserved" are retained in Python 3.5.1 alone or in any derivative version + prepared by Licensee. + +3. In the event Licensee prepares a derivative work that is based on or + incorporates Python 3.5.1 or any part thereof, and wants to make the + derivative work available to others as provided herein, then Licensee hereby + agrees to include in any such work a brief summary of the changes made to Python + 3.5.1. + +4. PSF is making Python 3.5.1 available to Licensee on an "AS IS" basis. + PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR IMPLIED. BY WAY OF + EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND DISCLAIMS ANY REPRESENTATION OR + WARRANTY OF MERCHANTABILITY OR FITNESS FOR ANY PARTICULAR PURPOSE OR THAT THE + USE OF PYTHON 3.5.1 WILL NOT INFRINGE ANY THIRD PARTY RIGHTS. + +5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON 3.5.1 + FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS A RESULT OF + MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON 3.5.1, OR ANY DERIVATIVE + THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. + +6. This License Agreement will automatically terminate upon a material breach of + its terms and conditions. + +7. Nothing in this License Agreement shall be deemed to create any relationship + of agency, partnership, or joint venture between PSF and Licensee. This License + Agreement does not grant permission to use PSF trademarks or trade name in a + trademark sense to endorse or promote products or services of Licensee, or any + third party. + +8. By copying, installing or otherwise using Python 3.5.1, Licensee agrees + to be bound by the terms and conditions of this License Agreement. \ No newline at end of file diff --git a/python/tornado/__init__.py b/python/tornado/__init__.py new file mode 100644 index 000000000..85bacc7e9 --- /dev/null +++ b/python/tornado/__init__.py @@ -0,0 +1,29 @@ +#!/usr/bin/env python +# +# Copyright 2009 Facebook +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""The Tornado web server and tools.""" + +from __future__ import absolute_import, division, print_function, with_statement + +# version is a human-readable version number. + +# version_info is a four-tuple for programmatic comparison. The first +# three numbers are the components of the version number. The fourth +# is zero for an official release, positive for a development branch, +# or negative for a release candidate or beta (after the base version +# number has been incremented) +version = "4.3" +version_info = (4, 3, 0, 0) diff --git a/python/tornado/_locale_data.py b/python/tornado/_locale_data.py new file mode 100644 index 000000000..47c1df618 --- /dev/null +++ b/python/tornado/_locale_data.py @@ -0,0 +1,94 @@ +#!/usr/bin/env python +# coding: utf-8 +# +# Copyright 2012 Facebook +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""Data used by the tornado.locale module.""" + +from __future__ import absolute_import, division, print_function, with_statement + +# NOTE: This file is supposed to contain unicode strings, which is +# exactly what you'd get with e.g. u"Español" in most python versions. +# However, Python 3.2 doesn't support the u"" syntax, so we use a u() +# function instead. tornado.util.u cannot be used because it doesn't +# support non-ascii characters on python 2. +# When we drop support for Python 3.2, we can remove the parens +# and make these plain unicode strings. +from tornado.escape import to_unicode as u + +LOCALE_NAMES = { + "af_ZA": {"name_en": u("Afrikaans"), "name": u("Afrikaans")}, + "am_ET": {"name_en": u("Amharic"), "name": u("አማርኛ")}, + "ar_AR": {"name_en": u("Arabic"), "name": u("العربية")}, + "bg_BG": {"name_en": u("Bulgarian"), "name": u("Български")}, + "bn_IN": {"name_en": u("Bengali"), "name": u("বাংলা")}, + "bs_BA": {"name_en": u("Bosnian"), "name": u("Bosanski")}, + "ca_ES": {"name_en": u("Catalan"), "name": u("Català")}, + "cs_CZ": {"name_en": u("Czech"), "name": u("Čeština")}, + "cy_GB": {"name_en": u("Welsh"), "name": u("Cymraeg")}, + "da_DK": {"name_en": u("Danish"), "name": u("Dansk")}, + "de_DE": {"name_en": u("German"), "name": u("Deutsch")}, + "el_GR": {"name_en": u("Greek"), "name": u("Ελληνικά")}, + "en_GB": {"name_en": u("English (UK)"), "name": u("English (UK)")}, + "en_US": {"name_en": u("English (US)"), "name": u("English (US)")}, + "es_ES": {"name_en": u("Spanish (Spain)"), "name": u("Español (España)")}, + "es_LA": {"name_en": u("Spanish"), "name": u("Español")}, + "et_EE": {"name_en": u("Estonian"), "name": u("Eesti")}, + "eu_ES": {"name_en": u("Basque"), "name": u("Euskara")}, + "fa_IR": {"name_en": u("Persian"), "name": u("فارسی")}, + "fi_FI": {"name_en": u("Finnish"), "name": u("Suomi")}, + "fr_CA": {"name_en": u("French (Canada)"), "name": u("Français (Canada)")}, + "fr_FR": {"name_en": u("French"), "name": u("Français")}, + "ga_IE": {"name_en": u("Irish"), "name": u("Gaeilge")}, + "gl_ES": {"name_en": u("Galician"), "name": u("Galego")}, + "he_IL": {"name_en": u("Hebrew"), "name": u("עברית")}, + "hi_IN": {"name_en": u("Hindi"), "name": u("हिन्दी")}, + "hr_HR": {"name_en": u("Croatian"), "name": u("Hrvatski")}, + "hu_HU": {"name_en": u("Hungarian"), "name": u("Magyar")}, + "id_ID": {"name_en": u("Indonesian"), "name": u("Bahasa Indonesia")}, + "is_IS": {"name_en": u("Icelandic"), "name": u("Íslenska")}, + "it_IT": {"name_en": u("Italian"), "name": u("Italiano")}, + "ja_JP": {"name_en": u("Japanese"), "name": u("日本語")}, + "ko_KR": {"name_en": u("Korean"), "name": u("한국어")}, + "lt_LT": {"name_en": u("Lithuanian"), "name": u("Lietuvių")}, + "lv_LV": {"name_en": u("Latvian"), "name": u("Latviešu")}, + "mk_MK": {"name_en": u("Macedonian"), "name": u("Македонски")}, + "ml_IN": {"name_en": u("Malayalam"), "name": u("മലയാളം")}, + "ms_MY": {"name_en": u("Malay"), "name": u("Bahasa Melayu")}, + "nb_NO": {"name_en": u("Norwegian (bokmal)"), "name": u("Norsk (bokmål)")}, + "nl_NL": {"name_en": u("Dutch"), "name": u("Nederlands")}, + "nn_NO": {"name_en": u("Norwegian (nynorsk)"), "name": u("Norsk (nynorsk)")}, + "pa_IN": {"name_en": u("Punjabi"), "name": u("ਪੰਜਾਬੀ")}, + "pl_PL": {"name_en": u("Polish"), "name": u("Polski")}, + "pt_BR": {"name_en": u("Portuguese (Brazil)"), "name": u("Português (Brasil)")}, + "pt_PT": {"name_en": u("Portuguese (Portugal)"), "name": u("Português (Portugal)")}, + "ro_RO": {"name_en": u("Romanian"), "name": u("Română")}, + "ru_RU": {"name_en": u("Russian"), "name": u("Русский")}, + "sk_SK": {"name_en": u("Slovak"), "name": u("Slovenčina")}, + "sl_SI": {"name_en": u("Slovenian"), "name": u("Slovenščina")}, + "sq_AL": {"name_en": u("Albanian"), "name": u("Shqip")}, + "sr_RS": {"name_en": u("Serbian"), "name": u("Српски")}, + "sv_SE": {"name_en": u("Swedish"), "name": u("Svenska")}, + "sw_KE": {"name_en": u("Swahili"), "name": u("Kiswahili")}, + "ta_IN": {"name_en": u("Tamil"), "name": u("தமிழ்")}, + "te_IN": {"name_en": u("Telugu"), "name": u("తెలుగు")}, + "th_TH": {"name_en": u("Thai"), "name": u("ภาษาไทย")}, + "tl_PH": {"name_en": u("Filipino"), "name": u("Filipino")}, + "tr_TR": {"name_en": u("Turkish"), "name": u("Türkçe")}, + "uk_UA": {"name_en": u("Ukraini "), "name": u("Українська")}, + "vi_VN": {"name_en": u("Vietnamese"), "name": u("Tiếng Việt")}, + "zh_CN": {"name_en": u("Chinese (Simplified)"), "name": u("中文(简体)")}, + "zh_TW": {"name_en": u("Chinese (Traditional)"), "name": u("中文(繁體)")}, +} diff --git a/python/tornado/auth.py b/python/tornado/auth.py new file mode 100644 index 000000000..ff7172aa0 --- /dev/null +++ b/python/tornado/auth.py @@ -0,0 +1,1139 @@ +#!/usr/bin/env python +# +# Copyright 2009 Facebook +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""This module contains implementations of various third-party +authentication schemes. + +All the classes in this file are class mixins designed to be used with +the `tornado.web.RequestHandler` class. They are used in two ways: + +* On a login handler, use methods such as ``authenticate_redirect()``, + ``authorize_redirect()``, and ``get_authenticated_user()`` to + establish the user's identity and store authentication tokens to your + database and/or cookies. +* In non-login handlers, use methods such as ``facebook_request()`` + or ``twitter_request()`` to use the authentication tokens to make + requests to the respective services. + +They all take slightly different arguments due to the fact all these +services implement authentication and authorization slightly differently. +See the individual service classes below for complete documentation. + +Example usage for Google OAuth: + +.. testcode:: + + class GoogleOAuth2LoginHandler(tornado.web.RequestHandler, + tornado.auth.GoogleOAuth2Mixin): + @tornado.gen.coroutine + def get(self): + if self.get_argument('code', False): + user = yield self.get_authenticated_user( + redirect_uri='http://your.site.com/auth/google', + code=self.get_argument('code')) + # Save the user with e.g. set_secure_cookie + else: + yield self.authorize_redirect( + redirect_uri='http://your.site.com/auth/google', + client_id=self.settings['google_oauth']['key'], + scope=['profile', 'email'], + response_type='code', + extra_params={'approval_prompt': 'auto'}) + +.. testoutput:: + :hide: + + +.. versionchanged:: 4.0 + All of the callback interfaces in this module are now guaranteed + to run their callback with an argument of ``None`` on error. + Previously some functions would do this while others would simply + terminate the request on their own. This change also ensures that + errors are more consistently reported through the ``Future`` interfaces. +""" + +from __future__ import absolute_import, division, print_function, with_statement + +import base64 +import binascii +import functools +import hashlib +import hmac +import time +import uuid + +from tornado.concurrent import TracebackFuture, return_future, chain_future +from tornado import gen +from tornado import httpclient +from tornado import escape +from tornado.httputil import url_concat +from tornado.log import gen_log +from tornado.stack_context import ExceptionStackContext +from tornado.util import u, unicode_type, ArgReplacer + +try: + import urlparse # py2 +except ImportError: + import urllib.parse as urlparse # py3 + +try: + import urllib.parse as urllib_parse # py3 +except ImportError: + import urllib as urllib_parse # py2 + +try: + long # py2 +except NameError: + long = int # py3 + + +class AuthError(Exception): + pass + + +def _auth_future_to_callback(callback, future): + try: + result = future.result() + except AuthError as e: + gen_log.warning(str(e)) + result = None + callback(result) + + +def _auth_return_future(f): + """Similar to tornado.concurrent.return_future, but uses the auth + module's legacy callback interface. + + Note that when using this decorator the ``callback`` parameter + inside the function will actually be a future. + """ + replacer = ArgReplacer(f, 'callback') + + @functools.wraps(f) + def wrapper(*args, **kwargs): + future = TracebackFuture() + callback, args, kwargs = replacer.replace(future, args, kwargs) + if callback is not None: + future.add_done_callback( + functools.partial(_auth_future_to_callback, callback)) + + def handle_exception(typ, value, tb): + if future.done(): + return False + else: + future.set_exc_info((typ, value, tb)) + return True + with ExceptionStackContext(handle_exception): + f(*args, **kwargs) + return future + return wrapper + + +class OpenIdMixin(object): + """Abstract implementation of OpenID and Attribute Exchange. + + Class attributes: + + * ``_OPENID_ENDPOINT``: the identity provider's URI. + """ + @return_future + def authenticate_redirect(self, callback_uri=None, + ax_attrs=["name", "email", "language", "username"], + callback=None): + """Redirects to the authentication URL for this service. + + After authentication, the service will redirect back to the given + callback URI with additional parameters including ``openid.mode``. + + We request the given attributes for the authenticated user by + default (name, email, language, and username). If you don't need + all those attributes for your app, you can request fewer with + the ax_attrs keyword argument. + + .. versionchanged:: 3.1 + Returns a `.Future` and takes an optional callback. These are + not strictly necessary as this method is synchronous, + but they are supplied for consistency with + `OAuthMixin.authorize_redirect`. + """ + callback_uri = callback_uri or self.request.uri + args = self._openid_args(callback_uri, ax_attrs=ax_attrs) + self.redirect(self._OPENID_ENDPOINT + "?" + urllib_parse.urlencode(args)) + callback() + + @_auth_return_future + def get_authenticated_user(self, callback, http_client=None): + """Fetches the authenticated user data upon redirect. + + This method should be called by the handler that receives the + redirect from the `authenticate_redirect()` method (which is + often the same as the one that calls it; in that case you would + call `get_authenticated_user` if the ``openid.mode`` parameter + is present and `authenticate_redirect` if it is not). + + The result of this method will generally be used to set a cookie. + """ + # Verify the OpenID response via direct request to the OP + args = dict((k, v[-1]) for k, v in self.request.arguments.items()) + args["openid.mode"] = u("check_authentication") + url = self._OPENID_ENDPOINT + if http_client is None: + http_client = self.get_auth_http_client() + http_client.fetch(url, functools.partial( + self._on_authentication_verified, callback), + method="POST", body=urllib_parse.urlencode(args)) + + def _openid_args(self, callback_uri, ax_attrs=[], oauth_scope=None): + url = urlparse.urljoin(self.request.full_url(), callback_uri) + args = { + "openid.ns": "http://specs.openid.net/auth/2.0", + "openid.claimed_id": + "http://specs.openid.net/auth/2.0/identifier_select", + "openid.identity": + "http://specs.openid.net/auth/2.0/identifier_select", + "openid.return_to": url, + "openid.realm": urlparse.urljoin(url, '/'), + "openid.mode": "checkid_setup", + } + if ax_attrs: + args.update({ + "openid.ns.ax": "http://openid.net/srv/ax/1.0", + "openid.ax.mode": "fetch_request", + }) + ax_attrs = set(ax_attrs) + required = [] + if "name" in ax_attrs: + ax_attrs -= set(["name", "firstname", "fullname", "lastname"]) + required += ["firstname", "fullname", "lastname"] + args.update({ + "openid.ax.type.firstname": + "http://axschema.org/namePerson/first", + "openid.ax.type.fullname": + "http://axschema.org/namePerson", + "openid.ax.type.lastname": + "http://axschema.org/namePerson/last", + }) + known_attrs = { + "email": "http://axschema.org/contact/email", + "language": "http://axschema.org/pref/language", + "username": "http://axschema.org/namePerson/friendly", + } + for name in ax_attrs: + args["openid.ax.type." + name] = known_attrs[name] + required.append(name) + args["openid.ax.required"] = ",".join(required) + if oauth_scope: + args.update({ + "openid.ns.oauth": + "http://specs.openid.net/extensions/oauth/1.0", + "openid.oauth.consumer": self.request.host.split(":")[0], + "openid.oauth.scope": oauth_scope, + }) + return args + + def _on_authentication_verified(self, future, response): + if response.error or b"is_valid:true" not in response.body: + future.set_exception(AuthError( + "Invalid OpenID response: %s" % (response.error or + response.body))) + return + + # Make sure we got back at least an email from attribute exchange + ax_ns = None + for name in self.request.arguments: + if name.startswith("openid.ns.") and \ + self.get_argument(name) == u("http://openid.net/srv/ax/1.0"): + ax_ns = name[10:] + break + + def get_ax_arg(uri): + if not ax_ns: + return u("") + prefix = "openid." + ax_ns + ".type." + ax_name = None + for name in self.request.arguments.keys(): + if self.get_argument(name) == uri and name.startswith(prefix): + part = name[len(prefix):] + ax_name = "openid." + ax_ns + ".value." + part + break + if not ax_name: + return u("") + return self.get_argument(ax_name, u("")) + + email = get_ax_arg("http://axschema.org/contact/email") + name = get_ax_arg("http://axschema.org/namePerson") + first_name = get_ax_arg("http://axschema.org/namePerson/first") + last_name = get_ax_arg("http://axschema.org/namePerson/last") + username = get_ax_arg("http://axschema.org/namePerson/friendly") + locale = get_ax_arg("http://axschema.org/pref/language").lower() + user = dict() + name_parts = [] + if first_name: + user["first_name"] = first_name + name_parts.append(first_name) + if last_name: + user["last_name"] = last_name + name_parts.append(last_name) + if name: + user["name"] = name + elif name_parts: + user["name"] = u(" ").join(name_parts) + elif email: + user["name"] = email.split("@")[0] + if email: + user["email"] = email + if locale: + user["locale"] = locale + if username: + user["username"] = username + claimed_id = self.get_argument("openid.claimed_id", None) + if claimed_id: + user["claimed_id"] = claimed_id + future.set_result(user) + + def get_auth_http_client(self): + """Returns the `.AsyncHTTPClient` instance to be used for auth requests. + + May be overridden by subclasses to use an HTTP client other than + the default. + """ + return httpclient.AsyncHTTPClient() + + +class OAuthMixin(object): + """Abstract implementation of OAuth 1.0 and 1.0a. + + See `TwitterMixin` below for an example implementation. + + Class attributes: + + * ``_OAUTH_AUTHORIZE_URL``: The service's OAuth authorization url. + * ``_OAUTH_ACCESS_TOKEN_URL``: The service's OAuth access token url. + * ``_OAUTH_VERSION``: May be either "1.0" or "1.0a". + * ``_OAUTH_NO_CALLBACKS``: Set this to True if the service requires + advance registration of callbacks. + + Subclasses must also override the `_oauth_get_user_future` and + `_oauth_consumer_token` methods. + """ + @return_future + def authorize_redirect(self, callback_uri=None, extra_params=None, + http_client=None, callback=None): + """Redirects the user to obtain OAuth authorization for this service. + + The ``callback_uri`` may be omitted if you have previously + registered a callback URI with the third-party service. For + some services (including Friendfeed), you must use a + previously-registered callback URI and cannot specify a + callback via this method. + + This method sets a cookie called ``_oauth_request_token`` which is + subsequently used (and cleared) in `get_authenticated_user` for + security purposes. + + Note that this method is asynchronous, although it calls + `.RequestHandler.finish` for you so it may not be necessary + to pass a callback or use the `.Future` it returns. However, + if this method is called from a function decorated with + `.gen.coroutine`, you must call it with ``yield`` to keep the + response from being closed prematurely. + + .. versionchanged:: 3.1 + Now returns a `.Future` and takes an optional callback, for + compatibility with `.gen.coroutine`. + """ + if callback_uri and getattr(self, "_OAUTH_NO_CALLBACKS", False): + raise Exception("This service does not support oauth_callback") + if http_client is None: + http_client = self.get_auth_http_client() + if getattr(self, "_OAUTH_VERSION", "1.0a") == "1.0a": + http_client.fetch( + self._oauth_request_token_url(callback_uri=callback_uri, + extra_params=extra_params), + functools.partial( + self._on_request_token, + self._OAUTH_AUTHORIZE_URL, + callback_uri, + callback)) + else: + http_client.fetch( + self._oauth_request_token_url(), + functools.partial( + self._on_request_token, self._OAUTH_AUTHORIZE_URL, + callback_uri, + callback)) + + @_auth_return_future + def get_authenticated_user(self, callback, http_client=None): + """Gets the OAuth authorized user and access token. + + This method should be called from the handler for your + OAuth callback URL to complete the registration process. We run the + callback with the authenticated user dictionary. This dictionary + will contain an ``access_key`` which can be used to make authorized + requests to this service on behalf of the user. The dictionary will + also contain other fields such as ``name``, depending on the service + used. + """ + future = callback + request_key = escape.utf8(self.get_argument("oauth_token")) + oauth_verifier = self.get_argument("oauth_verifier", None) + request_cookie = self.get_cookie("_oauth_request_token") + if not request_cookie: + future.set_exception(AuthError( + "Missing OAuth request token cookie")) + return + self.clear_cookie("_oauth_request_token") + cookie_key, cookie_secret = [base64.b64decode(escape.utf8(i)) for i in request_cookie.split("|")] + if cookie_key != request_key: + future.set_exception(AuthError( + "Request token does not match cookie")) + return + token = dict(key=cookie_key, secret=cookie_secret) + if oauth_verifier: + token["verifier"] = oauth_verifier + if http_client is None: + http_client = self.get_auth_http_client() + http_client.fetch(self._oauth_access_token_url(token), + functools.partial(self._on_access_token, callback)) + + def _oauth_request_token_url(self, callback_uri=None, extra_params=None): + consumer_token = self._oauth_consumer_token() + url = self._OAUTH_REQUEST_TOKEN_URL + args = dict( + oauth_consumer_key=escape.to_basestring(consumer_token["key"]), + oauth_signature_method="HMAC-SHA1", + oauth_timestamp=str(int(time.time())), + oauth_nonce=escape.to_basestring(binascii.b2a_hex(uuid.uuid4().bytes)), + oauth_version="1.0", + ) + if getattr(self, "_OAUTH_VERSION", "1.0a") == "1.0a": + if callback_uri == "oob": + args["oauth_callback"] = "oob" + elif callback_uri: + args["oauth_callback"] = urlparse.urljoin( + self.request.full_url(), callback_uri) + if extra_params: + args.update(extra_params) + signature = _oauth10a_signature(consumer_token, "GET", url, args) + else: + signature = _oauth_signature(consumer_token, "GET", url, args) + + args["oauth_signature"] = signature + return url + "?" + urllib_parse.urlencode(args) + + def _on_request_token(self, authorize_url, callback_uri, callback, + response): + if response.error: + raise Exception("Could not get request token: %s" % response.error) + request_token = _oauth_parse_response(response.body) + data = (base64.b64encode(escape.utf8(request_token["key"])) + b"|" + + base64.b64encode(escape.utf8(request_token["secret"]))) + self.set_cookie("_oauth_request_token", data) + args = dict(oauth_token=request_token["key"]) + if callback_uri == "oob": + self.finish(authorize_url + "?" + urllib_parse.urlencode(args)) + callback() + return + elif callback_uri: + args["oauth_callback"] = urlparse.urljoin( + self.request.full_url(), callback_uri) + self.redirect(authorize_url + "?" + urllib_parse.urlencode(args)) + callback() + + def _oauth_access_token_url(self, request_token): + consumer_token = self._oauth_consumer_token() + url = self._OAUTH_ACCESS_TOKEN_URL + args = dict( + oauth_consumer_key=escape.to_basestring(consumer_token["key"]), + oauth_token=escape.to_basestring(request_token["key"]), + oauth_signature_method="HMAC-SHA1", + oauth_timestamp=str(int(time.time())), + oauth_nonce=escape.to_basestring(binascii.b2a_hex(uuid.uuid4().bytes)), + oauth_version="1.0", + ) + if "verifier" in request_token: + args["oauth_verifier"] = request_token["verifier"] + + if getattr(self, "_OAUTH_VERSION", "1.0a") == "1.0a": + signature = _oauth10a_signature(consumer_token, "GET", url, args, + request_token) + else: + signature = _oauth_signature(consumer_token, "GET", url, args, + request_token) + + args["oauth_signature"] = signature + return url + "?" + urllib_parse.urlencode(args) + + def _on_access_token(self, future, response): + if response.error: + future.set_exception(AuthError("Could not fetch access token")) + return + + access_token = _oauth_parse_response(response.body) + self._oauth_get_user_future(access_token).add_done_callback( + functools.partial(self._on_oauth_get_user, access_token, future)) + + def _oauth_consumer_token(self): + """Subclasses must override this to return their OAuth consumer keys. + + The return value should be a `dict` with keys ``key`` and ``secret``. + """ + raise NotImplementedError() + + @return_future + def _oauth_get_user_future(self, access_token, callback): + """Subclasses must override this to get basic information about the + user. + + Should return a `.Future` whose result is a dictionary + containing information about the user, which may have been + retrieved by using ``access_token`` to make a request to the + service. + + The access token will be added to the returned dictionary to make + the result of `get_authenticated_user`. + + For backwards compatibility, the callback-based ``_oauth_get_user`` + method is also supported. + """ + # By default, call the old-style _oauth_get_user, but new code + # should override this method instead. + self._oauth_get_user(access_token, callback) + + def _oauth_get_user(self, access_token, callback): + raise NotImplementedError() + + def _on_oauth_get_user(self, access_token, future, user_future): + if user_future.exception() is not None: + future.set_exception(user_future.exception()) + return + user = user_future.result() + if not user: + future.set_exception(AuthError("Error getting user")) + return + user["access_token"] = access_token + future.set_result(user) + + def _oauth_request_parameters(self, url, access_token, parameters={}, + method="GET"): + """Returns the OAuth parameters as a dict for the given request. + + parameters should include all POST arguments and query string arguments + that will be sent with the request. + """ + consumer_token = self._oauth_consumer_token() + base_args = dict( + oauth_consumer_key=escape.to_basestring(consumer_token["key"]), + oauth_token=escape.to_basestring(access_token["key"]), + oauth_signature_method="HMAC-SHA1", + oauth_timestamp=str(int(time.time())), + oauth_nonce=escape.to_basestring(binascii.b2a_hex(uuid.uuid4().bytes)), + oauth_version="1.0", + ) + args = {} + args.update(base_args) + args.update(parameters) + if getattr(self, "_OAUTH_VERSION", "1.0a") == "1.0a": + signature = _oauth10a_signature(consumer_token, method, url, args, + access_token) + else: + signature = _oauth_signature(consumer_token, method, url, args, + access_token) + base_args["oauth_signature"] = escape.to_basestring(signature) + return base_args + + def get_auth_http_client(self): + """Returns the `.AsyncHTTPClient` instance to be used for auth requests. + + May be overridden by subclasses to use an HTTP client other than + the default. + """ + return httpclient.AsyncHTTPClient() + + +class OAuth2Mixin(object): + """Abstract implementation of OAuth 2.0. + + See `FacebookGraphMixin` or `GoogleOAuth2Mixin` below for example + implementations. + + Class attributes: + + * ``_OAUTH_AUTHORIZE_URL``: The service's authorization url. + * ``_OAUTH_ACCESS_TOKEN_URL``: The service's access token url. + """ + @return_future + def authorize_redirect(self, redirect_uri=None, client_id=None, + client_secret=None, extra_params=None, + callback=None, scope=None, response_type="code"): + """Redirects the user to obtain OAuth authorization for this service. + + Some providers require that you register a redirect URL with + your application instead of passing one via this method. You + should call this method to log the user in, and then call + ``get_authenticated_user`` in the handler for your + redirect URL to complete the authorization process. + + .. versionchanged:: 3.1 + Returns a `.Future` and takes an optional callback. These are + not strictly necessary as this method is synchronous, + but they are supplied for consistency with + `OAuthMixin.authorize_redirect`. + """ + args = { + "redirect_uri": redirect_uri, + "client_id": client_id, + "response_type": response_type + } + if extra_params: + args.update(extra_params) + if scope: + args['scope'] = ' '.join(scope) + self.redirect( + url_concat(self._OAUTH_AUTHORIZE_URL, args)) + callback() + + def _oauth_request_token_url(self, redirect_uri=None, client_id=None, + client_secret=None, code=None, + extra_params=None): + url = self._OAUTH_ACCESS_TOKEN_URL + args = dict( + redirect_uri=redirect_uri, + code=code, + client_id=client_id, + client_secret=client_secret, + ) + if extra_params: + args.update(extra_params) + return url_concat(url, args) + + @_auth_return_future + def oauth2_request(self, url, callback, access_token=None, + post_args=None, **args): + """Fetches the given URL auth an OAuth2 access token. + + If the request is a POST, ``post_args`` should be provided. Query + string arguments should be given as keyword arguments. + + Example usage: + + ..testcode:: + + class MainHandler(tornado.web.RequestHandler, + tornado.auth.FacebookGraphMixin): + @tornado.web.authenticated + @tornado.gen.coroutine + def get(self): + new_entry = yield self.oauth2_request( + "https://graph.facebook.com/me/feed", + post_args={"message": "I am posting from my Tornado application!"}, + access_token=self.current_user["access_token"]) + + if not new_entry: + # Call failed; perhaps missing permission? + yield self.authorize_redirect() + return + self.finish("Posted a message!") + + .. testoutput:: + :hide: + + .. versionadded:: 4.3 + """ + all_args = {} + if access_token: + all_args["access_token"] = access_token + all_args.update(args) + + if all_args: + url += "?" + urllib_parse.urlencode(all_args) + callback = functools.partial(self._on_oauth2_request, callback) + http = self.get_auth_http_client() + if post_args is not None: + http.fetch(url, method="POST", body=urllib_parse.urlencode(post_args), + callback=callback) + else: + http.fetch(url, callback=callback) + + def _on_oauth2_request(self, future, response): + if response.error: + future.set_exception(AuthError("Error response %s fetching %s" % + (response.error, response.request.url))) + return + + future.set_result(escape.json_decode(response.body)) + + def get_auth_http_client(self): + """Returns the `.AsyncHTTPClient` instance to be used for auth requests. + + May be overridden by subclasses to use an HTTP client other than + the default. + + .. versionadded:: 4.3 + """ + return httpclient.AsyncHTTPClient() + + +class TwitterMixin(OAuthMixin): + """Twitter OAuth authentication. + + To authenticate with Twitter, register your application with + Twitter at http://twitter.com/apps. Then copy your Consumer Key + and Consumer Secret to the application + `~tornado.web.Application.settings` ``twitter_consumer_key`` and + ``twitter_consumer_secret``. Use this mixin on the handler for the + URL you registered as your application's callback URL. + + When your application is set up, you can use this mixin like this + to authenticate the user with Twitter and get access to their stream: + + .. testcode:: + + class TwitterLoginHandler(tornado.web.RequestHandler, + tornado.auth.TwitterMixin): + @tornado.gen.coroutine + def get(self): + if self.get_argument("oauth_token", None): + user = yield self.get_authenticated_user() + # Save the user using e.g. set_secure_cookie() + else: + yield self.authorize_redirect() + + .. testoutput:: + :hide: + + The user object returned by `~OAuthMixin.get_authenticated_user` + includes the attributes ``username``, ``name``, ``access_token``, + and all of the custom Twitter user attributes described at + https://dev.twitter.com/docs/api/1.1/get/users/show + """ + _OAUTH_REQUEST_TOKEN_URL = "https://api.twitter.com/oauth/request_token" + _OAUTH_ACCESS_TOKEN_URL = "https://api.twitter.com/oauth/access_token" + _OAUTH_AUTHORIZE_URL = "https://api.twitter.com/oauth/authorize" + _OAUTH_AUTHENTICATE_URL = "https://api.twitter.com/oauth/authenticate" + _OAUTH_NO_CALLBACKS = False + _TWITTER_BASE_URL = "https://api.twitter.com/1.1" + + @return_future + def authenticate_redirect(self, callback_uri=None, callback=None): + """Just like `~OAuthMixin.authorize_redirect`, but + auto-redirects if authorized. + + This is generally the right interface to use if you are using + Twitter for single-sign on. + + .. versionchanged:: 3.1 + Now returns a `.Future` and takes an optional callback, for + compatibility with `.gen.coroutine`. + """ + http = self.get_auth_http_client() + http.fetch(self._oauth_request_token_url(callback_uri=callback_uri), + functools.partial( + self._on_request_token, self._OAUTH_AUTHENTICATE_URL, + None, callback)) + + @_auth_return_future + def twitter_request(self, path, callback=None, access_token=None, + post_args=None, **args): + """Fetches the given API path, e.g., ``statuses/user_timeline/btaylor`` + + The path should not include the format or API version number. + (we automatically use JSON format and API version 1). + + If the request is a POST, ``post_args`` should be provided. Query + string arguments should be given as keyword arguments. + + All the Twitter methods are documented at http://dev.twitter.com/ + + Many methods require an OAuth access token which you can + obtain through `~OAuthMixin.authorize_redirect` and + `~OAuthMixin.get_authenticated_user`. The user returned through that + process includes an 'access_token' attribute that can be used + to make authenticated requests via this method. Example + usage: + + .. testcode:: + + class MainHandler(tornado.web.RequestHandler, + tornado.auth.TwitterMixin): + @tornado.web.authenticated + @tornado.gen.coroutine + def get(self): + new_entry = yield self.twitter_request( + "/statuses/update", + post_args={"status": "Testing Tornado Web Server"}, + access_token=self.current_user["access_token"]) + if not new_entry: + # Call failed; perhaps missing permission? + yield self.authorize_redirect() + return + self.finish("Posted a message!") + + .. testoutput:: + :hide: + + """ + if path.startswith('http:') or path.startswith('https:'): + # Raw urls are useful for e.g. search which doesn't follow the + # usual pattern: http://search.twitter.com/search.json + url = path + else: + url = self._TWITTER_BASE_URL + path + ".json" + # Add the OAuth resource request signature if we have credentials + if access_token: + all_args = {} + all_args.update(args) + all_args.update(post_args or {}) + method = "POST" if post_args is not None else "GET" + oauth = self._oauth_request_parameters( + url, access_token, all_args, method=method) + args.update(oauth) + if args: + url += "?" + urllib_parse.urlencode(args) + http = self.get_auth_http_client() + http_callback = functools.partial(self._on_twitter_request, callback) + if post_args is not None: + http.fetch(url, method="POST", body=urllib_parse.urlencode(post_args), + callback=http_callback) + else: + http.fetch(url, callback=http_callback) + + def _on_twitter_request(self, future, response): + if response.error: + future.set_exception(AuthError( + "Error response %s fetching %s" % (response.error, + response.request.url))) + return + future.set_result(escape.json_decode(response.body)) + + def _oauth_consumer_token(self): + self.require_setting("twitter_consumer_key", "Twitter OAuth") + self.require_setting("twitter_consumer_secret", "Twitter OAuth") + return dict( + key=self.settings["twitter_consumer_key"], + secret=self.settings["twitter_consumer_secret"]) + + @gen.coroutine + def _oauth_get_user_future(self, access_token): + user = yield self.twitter_request( + "/account/verify_credentials", + access_token=access_token) + if user: + user["username"] = user["screen_name"] + raise gen.Return(user) + + +class GoogleOAuth2Mixin(OAuth2Mixin): + """Google authentication using OAuth2. + + In order to use, register your application with Google and copy the + relevant parameters to your application settings. + + * Go to the Google Dev Console at http://console.developers.google.com + * Select a project, or create a new one. + * In the sidebar on the left, select APIs & Auth. + * In the list of APIs, find the Google+ API service and set it to ON. + * In the sidebar on the left, select Credentials. + * In the OAuth section of the page, select Create New Client ID. + * Set the Redirect URI to point to your auth handler + * Copy the "Client secret" and "Client ID" to the application settings as + {"google_oauth": {"key": CLIENT_ID, "secret": CLIENT_SECRET}} + + .. versionadded:: 3.2 + """ + _OAUTH_AUTHORIZE_URL = "https://accounts.google.com/o/oauth2/auth" + _OAUTH_ACCESS_TOKEN_URL = "https://accounts.google.com/o/oauth2/token" + _OAUTH_USERINFO_URL = "https://www.googleapis.com/oauth2/v1/userinfo" + _OAUTH_NO_CALLBACKS = False + _OAUTH_SETTINGS_KEY = 'google_oauth' + + @_auth_return_future + def get_authenticated_user(self, redirect_uri, code, callback): + """Handles the login for the Google user, returning an access token. + + The result is a dictionary containing an ``access_token`` field + ([among others](https://developers.google.com/identity/protocols/OAuth2WebServer#handlingtheresponse)). + Unlike other ``get_authenticated_user`` methods in this package, + this method does not return any additional information about the user. + The returned access token can be used with `OAuth2Mixin.oauth2_request` + to request additional information (perhaps from + ``https://www.googleapis.com/oauth2/v2/userinfo``) + + Example usage: + + .. testcode:: + + class GoogleOAuth2LoginHandler(tornado.web.RequestHandler, + tornado.auth.GoogleOAuth2Mixin): + @tornado.gen.coroutine + def get(self): + if self.get_argument('code', False): + access = yield self.get_authenticated_user( + redirect_uri='http://your.site.com/auth/google', + code=self.get_argument('code')) + user = yield self.oauth2_request( + "https://www.googleapis.com/oauth2/v1/userinfo", + access_token=access["access_token"]) + # Save the user and access token with + # e.g. set_secure_cookie. + else: + yield self.authorize_redirect( + redirect_uri='http://your.site.com/auth/google', + client_id=self.settings['google_oauth']['key'], + scope=['profile', 'email'], + response_type='code', + extra_params={'approval_prompt': 'auto'}) + + .. testoutput:: + :hide: + + """ + http = self.get_auth_http_client() + body = urllib_parse.urlencode({ + "redirect_uri": redirect_uri, + "code": code, + "client_id": self.settings[self._OAUTH_SETTINGS_KEY]['key'], + "client_secret": self.settings[self._OAUTH_SETTINGS_KEY]['secret'], + "grant_type": "authorization_code", + }) + + http.fetch(self._OAUTH_ACCESS_TOKEN_URL, + functools.partial(self._on_access_token, callback), + method="POST", headers={'Content-Type': 'application/x-www-form-urlencoded'}, body=body) + + def _on_access_token(self, future, response): + """Callback function for the exchange to the access token.""" + if response.error: + future.set_exception(AuthError('Google auth error: %s' % str(response))) + return + + args = escape.json_decode(response.body) + future.set_result(args) + + +class FacebookGraphMixin(OAuth2Mixin): + """Facebook authentication using the new Graph API and OAuth2.""" + _OAUTH_ACCESS_TOKEN_URL = "https://graph.facebook.com/oauth/access_token?" + _OAUTH_AUTHORIZE_URL = "https://www.facebook.com/dialog/oauth?" + _OAUTH_NO_CALLBACKS = False + _FACEBOOK_BASE_URL = "https://graph.facebook.com" + + @_auth_return_future + def get_authenticated_user(self, redirect_uri, client_id, client_secret, + code, callback, extra_fields=None): + """Handles the login for the Facebook user, returning a user object. + + Example usage: + + .. testcode:: + + class FacebookGraphLoginHandler(tornado.web.RequestHandler, + tornado.auth.FacebookGraphMixin): + @tornado.gen.coroutine + def get(self): + if self.get_argument("code", False): + user = yield self.get_authenticated_user( + redirect_uri='/auth/facebookgraph/', + client_id=self.settings["facebook_api_key"], + client_secret=self.settings["facebook_secret"], + code=self.get_argument("code")) + # Save the user with e.g. set_secure_cookie + else: + yield self.authorize_redirect( + redirect_uri='/auth/facebookgraph/', + client_id=self.settings["facebook_api_key"], + extra_params={"scope": "read_stream,offline_access"}) + + .. testoutput:: + :hide: + + """ + http = self.get_auth_http_client() + args = { + "redirect_uri": redirect_uri, + "code": code, + "client_id": client_id, + "client_secret": client_secret, + } + + fields = set(['id', 'name', 'first_name', 'last_name', + 'locale', 'picture', 'link']) + if extra_fields: + fields.update(extra_fields) + + http.fetch(self._oauth_request_token_url(**args), + functools.partial(self._on_access_token, redirect_uri, client_id, + client_secret, callback, fields)) + + def _on_access_token(self, redirect_uri, client_id, client_secret, + future, fields, response): + if response.error: + future.set_exception(AuthError('Facebook auth error: %s' % str(response))) + return + + args = urlparse.parse_qs(escape.native_str(response.body)) + session = { + "access_token": args["access_token"][-1], + "expires": args.get("expires") + } + + self.facebook_request( + path="/me", + callback=functools.partial( + self._on_get_user_info, future, session, fields), + access_token=session["access_token"], + fields=",".join(fields) + ) + + def _on_get_user_info(self, future, session, fields, user): + if user is None: + future.set_result(None) + return + + fieldmap = {} + for field in fields: + fieldmap[field] = user.get(field) + + fieldmap.update({"access_token": session["access_token"], "session_expires": session.get("expires")}) + future.set_result(fieldmap) + + @_auth_return_future + def facebook_request(self, path, callback, access_token=None, + post_args=None, **args): + """Fetches the given relative API path, e.g., "/btaylor/picture" + + If the request is a POST, ``post_args`` should be provided. Query + string arguments should be given as keyword arguments. + + An introduction to the Facebook Graph API can be found at + http://developers.facebook.com/docs/api + + Many methods require an OAuth access token which you can + obtain through `~OAuth2Mixin.authorize_redirect` and + `get_authenticated_user`. The user returned through that + process includes an ``access_token`` attribute that can be + used to make authenticated requests via this method. + + Example usage: + + ..testcode:: + + class MainHandler(tornado.web.RequestHandler, + tornado.auth.FacebookGraphMixin): + @tornado.web.authenticated + @tornado.gen.coroutine + def get(self): + new_entry = yield self.facebook_request( + "/me/feed", + post_args={"message": "I am posting from my Tornado application!"}, + access_token=self.current_user["access_token"]) + + if not new_entry: + # Call failed; perhaps missing permission? + yield self.authorize_redirect() + return + self.finish("Posted a message!") + + .. testoutput:: + :hide: + + The given path is relative to ``self._FACEBOOK_BASE_URL``, + by default "https://graph.facebook.com". + + This method is a wrapper around `OAuth2Mixin.oauth2_request`; + the only difference is that this method takes a relative path, + while ``oauth2_request`` takes a complete url. + + .. versionchanged:: 3.1 + Added the ability to override ``self._FACEBOOK_BASE_URL``. + """ + url = self._FACEBOOK_BASE_URL + path + # Thanks to the _auth_return_future decorator, our "callback" + # argument is a Future, which we cannot pass as a callback to + # oauth2_request. Instead, have oauth2_request return a + # future and chain them together. + oauth_future = self.oauth2_request(url, access_token=access_token, + post_args=post_args, **args) + chain_future(oauth_future, callback) + + +def _oauth_signature(consumer_token, method, url, parameters={}, token=None): + """Calculates the HMAC-SHA1 OAuth signature for the given request. + + See http://oauth.net/core/1.0/#signing_process + """ + parts = urlparse.urlparse(url) + scheme, netloc, path = parts[:3] + normalized_url = scheme.lower() + "://" + netloc.lower() + path + + base_elems = [] + base_elems.append(method.upper()) + base_elems.append(normalized_url) + base_elems.append("&".join("%s=%s" % (k, _oauth_escape(str(v))) + for k, v in sorted(parameters.items()))) + base_string = "&".join(_oauth_escape(e) for e in base_elems) + + key_elems = [escape.utf8(consumer_token["secret"])] + key_elems.append(escape.utf8(token["secret"] if token else "")) + key = b"&".join(key_elems) + + hash = hmac.new(key, escape.utf8(base_string), hashlib.sha1) + return binascii.b2a_base64(hash.digest())[:-1] + + +def _oauth10a_signature(consumer_token, method, url, parameters={}, token=None): + """Calculates the HMAC-SHA1 OAuth 1.0a signature for the given request. + + See http://oauth.net/core/1.0a/#signing_process + """ + parts = urlparse.urlparse(url) + scheme, netloc, path = parts[:3] + normalized_url = scheme.lower() + "://" + netloc.lower() + path + + base_elems = [] + base_elems.append(method.upper()) + base_elems.append(normalized_url) + base_elems.append("&".join("%s=%s" % (k, _oauth_escape(str(v))) + for k, v in sorted(parameters.items()))) + + base_string = "&".join(_oauth_escape(e) for e in base_elems) + key_elems = [escape.utf8(urllib_parse.quote(consumer_token["secret"], safe='~'))] + key_elems.append(escape.utf8(urllib_parse.quote(token["secret"], safe='~') if token else "")) + key = b"&".join(key_elems) + + hash = hmac.new(key, escape.utf8(base_string), hashlib.sha1) + return binascii.b2a_base64(hash.digest())[:-1] + + +def _oauth_escape(val): + if isinstance(val, unicode_type): + val = val.encode("utf-8") + return urllib_parse.quote(val, safe="~") + + +def _oauth_parse_response(body): + # I can't find an officially-defined encoding for oauth responses and + # have never seen anyone use non-ascii. Leave the response in a byte + # string for python 2, and use utf8 on python 3. + body = escape.native_str(body) + p = urlparse.parse_qs(body, keep_blank_values=False) + token = dict(key=p["oauth_token"][0], secret=p["oauth_token_secret"][0]) + + # Add the extra parameters the Provider included to the token + special = ("oauth_token", "oauth_token_secret") + token.update((k, p[k][0]) for k in p if k not in special) + return token diff --git a/python/tornado/autoreload.py b/python/tornado/autoreload.py new file mode 100644 index 000000000..1cbf26c6c --- /dev/null +++ b/python/tornado/autoreload.py @@ -0,0 +1,339 @@ +#!/usr/bin/env python +# +# Copyright 2009 Facebook +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""Automatically restart the server when a source file is modified. + +Most applications should not access this module directly. Instead, +pass the keyword argument ``autoreload=True`` to the +`tornado.web.Application` constructor (or ``debug=True``, which +enables this setting and several others). This will enable autoreload +mode as well as checking for changes to templates and static +resources. Note that restarting is a destructive operation and any +requests in progress will be aborted when the process restarts. (If +you want to disable autoreload while using other debug-mode features, +pass both ``debug=True`` and ``autoreload=False``). + +This module can also be used as a command-line wrapper around scripts +such as unit test runners. See the `main` method for details. + +The command-line wrapper and Application debug modes can be used together. +This combination is encouraged as the wrapper catches syntax errors and +other import-time failures, while debug mode catches changes once +the server has started. + +This module depends on `.IOLoop`, so it will not work in WSGI applications +and Google App Engine. It also will not work correctly when `.HTTPServer`'s +multi-process mode is used. + +Reloading loses any Python interpreter command-line arguments (e.g. ``-u``) +because it re-executes Python using ``sys.executable`` and ``sys.argv``. +Additionally, modifying these variables will cause reloading to behave +incorrectly. + +""" + +from __future__ import absolute_import, division, print_function, with_statement + +import os +import sys + +# sys.path handling +# ----------------- +# +# If a module is run with "python -m", the current directory (i.e. "") +# is automatically prepended to sys.path, but not if it is run as +# "path/to/file.py". The processing for "-m" rewrites the former to +# the latter, so subsequent executions won't have the same path as the +# original. +# +# Conversely, when run as path/to/file.py, the directory containing +# file.py gets added to the path, which can cause confusion as imports +# may become relative in spite of the future import. +# +# We address the former problem by setting the $PYTHONPATH environment +# variable before re-execution so the new process will see the correct +# path. We attempt to address the latter problem when tornado.autoreload +# is run as __main__, although we can't fix the general case because +# we cannot reliably reconstruct the original command line +# (http://bugs.python.org/issue14208). + +if __name__ == "__main__": + # This sys.path manipulation must come before our imports (as much + # as possible - if we introduced a tornado.sys or tornado.os + # module we'd be in trouble), or else our imports would become + # relative again despite the future import. + # + # There is a separate __main__ block at the end of the file to call main(). + if sys.path[0] == os.path.dirname(__file__): + del sys.path[0] + +import functools +import logging +import os +import pkgutil +import sys +import traceback +import types +import subprocess +import weakref + +from tornado import ioloop +from tornado.log import gen_log +from tornado import process +from tornado.util import exec_in + +try: + import signal +except ImportError: + signal = None + +# os.execv is broken on Windows and can't properly parse command line +# arguments and executable name if they contain whitespaces. subprocess +# fixes that behavior. +# This distinction is also important because when we use execv, we want to +# close the IOLoop and all its file descriptors, to guard against any +# file descriptors that were not set CLOEXEC. When execv is not available, +# we must not close the IOLoop because we want the process to exit cleanly. +_has_execv = sys.platform != 'win32' + +_watched_files = set() +_reload_hooks = [] +_reload_attempted = False +_io_loops = weakref.WeakKeyDictionary() + + +def start(io_loop=None, check_time=500): + """Begins watching source files for changes. + + .. versionchanged:: 4.1 + The ``io_loop`` argument is deprecated. + """ + io_loop = io_loop or ioloop.IOLoop.current() + if io_loop in _io_loops: + return + _io_loops[io_loop] = True + if len(_io_loops) > 1: + gen_log.warning("tornado.autoreload started more than once in the same process") + if _has_execv: + add_reload_hook(functools.partial(io_loop.close, all_fds=True)) + modify_times = {} + callback = functools.partial(_reload_on_update, modify_times) + scheduler = ioloop.PeriodicCallback(callback, check_time, io_loop=io_loop) + scheduler.start() + + +def wait(): + """Wait for a watched file to change, then restart the process. + + Intended to be used at the end of scripts like unit test runners, + to run the tests again after any source file changes (but see also + the command-line interface in `main`) + """ + io_loop = ioloop.IOLoop() + start(io_loop) + io_loop.start() + + +def watch(filename): + """Add a file to the watch list. + + All imported modules are watched by default. + """ + _watched_files.add(filename) + + +def add_reload_hook(fn): + """Add a function to be called before reloading the process. + + Note that for open file and socket handles it is generally + preferable to set the ``FD_CLOEXEC`` flag (using `fcntl` or + ``tornado.platform.auto.set_close_exec``) instead + of using a reload hook to close them. + """ + _reload_hooks.append(fn) + + +def _reload_on_update(modify_times): + if _reload_attempted: + # We already tried to reload and it didn't work, so don't try again. + return + if process.task_id() is not None: + # We're in a child process created by fork_processes. If child + # processes restarted themselves, they'd all restart and then + # all call fork_processes again. + return + for module in list(sys.modules.values()): + # Some modules play games with sys.modules (e.g. email/__init__.py + # in the standard library), and occasionally this can cause strange + # failures in getattr. Just ignore anything that's not an ordinary + # module. + if not isinstance(module, types.ModuleType): + continue + path = getattr(module, "__file__", None) + if not path: + continue + if path.endswith(".pyc") or path.endswith(".pyo"): + path = path[:-1] + _check_file(modify_times, path) + for path in _watched_files: + _check_file(modify_times, path) + + +def _check_file(modify_times, path): + try: + modified = os.stat(path).st_mtime + except Exception: + return + if path not in modify_times: + modify_times[path] = modified + return + if modify_times[path] != modified: + gen_log.info("%s modified; restarting server", path) + _reload() + + +def _reload(): + global _reload_attempted + _reload_attempted = True + for fn in _reload_hooks: + fn() + if hasattr(signal, "setitimer"): + # Clear the alarm signal set by + # ioloop.set_blocking_log_threshold so it doesn't fire + # after the exec. + signal.setitimer(signal.ITIMER_REAL, 0, 0) + # sys.path fixes: see comments at top of file. If sys.path[0] is an empty + # string, we were (probably) invoked with -m and the effective path + # is about to change on re-exec. Add the current directory to $PYTHONPATH + # to ensure that the new process sees the same path we did. + path_prefix = '.' + os.pathsep + if (sys.path[0] == '' and + not os.environ.get("PYTHONPATH", "").startswith(path_prefix)): + os.environ["PYTHONPATH"] = (path_prefix + + os.environ.get("PYTHONPATH", "")) + if not _has_execv: + subprocess.Popen([sys.executable] + sys.argv) + sys.exit(0) + else: + try: + os.execv(sys.executable, [sys.executable] + sys.argv) + except OSError: + # Mac OS X versions prior to 10.6 do not support execv in + # a process that contains multiple threads. Instead of + # re-executing in the current process, start a new one + # and cause the current process to exit. This isn't + # ideal since the new process is detached from the parent + # terminal and thus cannot easily be killed with ctrl-C, + # but it's better than not being able to autoreload at + # all. + # Unfortunately the errno returned in this case does not + # appear to be consistent, so we can't easily check for + # this error specifically. + os.spawnv(os.P_NOWAIT, sys.executable, + [sys.executable] + sys.argv) + # At this point the IOLoop has been closed and finally + # blocks will experience errors if we allow the stack to + # unwind, so just exit uncleanly. + os._exit(0) + +_USAGE = """\ +Usage: + python -m tornado.autoreload -m module.to.run [args...] + python -m tornado.autoreload path/to/script.py [args...] +""" + + +def main(): + """Command-line wrapper to re-run a script whenever its source changes. + + Scripts may be specified by filename or module name:: + + python -m tornado.autoreload -m tornado.test.runtests + python -m tornado.autoreload tornado/test/runtests.py + + Running a script with this wrapper is similar to calling + `tornado.autoreload.wait` at the end of the script, but this wrapper + can catch import-time problems like syntax errors that would otherwise + prevent the script from reaching its call to `wait`. + """ + original_argv = sys.argv + sys.argv = sys.argv[:] + if len(sys.argv) >= 3 and sys.argv[1] == "-m": + mode = "module" + module = sys.argv[2] + del sys.argv[1:3] + elif len(sys.argv) >= 2: + mode = "script" + script = sys.argv[1] + sys.argv = sys.argv[1:] + else: + print(_USAGE, file=sys.stderr) + sys.exit(1) + + try: + if mode == "module": + import runpy + runpy.run_module(module, run_name="__main__", alter_sys=True) + elif mode == "script": + with open(script) as f: + # Execute the script in our namespace instead of creating + # a new one so that something that tries to import __main__ + # (e.g. the unittest module) will see names defined in the + # script instead of just those defined in this module. + global __file__ + __file__ = script + # If __package__ is defined, imports may be incorrectly + # interpreted as relative to this module. + global __package__ + del __package__ + exec_in(f.read(), globals(), globals()) + except SystemExit as e: + logging.basicConfig() + gen_log.info("Script exited with status %s", e.code) + except Exception as e: + logging.basicConfig() + gen_log.warning("Script exited with uncaught exception", exc_info=True) + # If an exception occurred at import time, the file with the error + # never made it into sys.modules and so we won't know to watch it. + # Just to make sure we've covered everything, walk the stack trace + # from the exception and watch every file. + for (filename, lineno, name, line) in traceback.extract_tb(sys.exc_info()[2]): + watch(filename) + if isinstance(e, SyntaxError): + # SyntaxErrors are special: their innermost stack frame is fake + # so extract_tb won't see it and we have to get the filename + # from the exception object. + watch(e.filename) + else: + logging.basicConfig() + gen_log.info("Script exited normally") + # restore sys.argv so subsequent executions will include autoreload + sys.argv = original_argv + + if mode == 'module': + # runpy did a fake import of the module as __main__, but now it's + # no longer in sys.modules. Figure out where it is and watch it. + loader = pkgutil.get_loader(module) + if loader is not None: + watch(loader.get_filename()) + + wait() + + +if __name__ == "__main__": + # See also the other __main__ block at the top of the file, which modifies + # sys.path before our imports + main() diff --git a/python/tornado/concurrent.py b/python/tornado/concurrent.py new file mode 100644 index 000000000..5f8cdc414 --- /dev/null +++ b/python/tornado/concurrent.py @@ -0,0 +1,510 @@ +#!/usr/bin/env python +# +# Copyright 2012 Facebook +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +"""Utilities for working with threads and ``Futures``. + +``Futures`` are a pattern for concurrent programming introduced in +Python 3.2 in the `concurrent.futures` package. This package defines +a mostly-compatible `Future` class designed for use from coroutines, +as well as some utility functions for interacting with the +`concurrent.futures` package. +""" +from __future__ import absolute_import, division, print_function, with_statement + +import functools +import platform +import textwrap +import traceback +import sys + +from tornado.log import app_log +from tornado.stack_context import ExceptionStackContext, wrap +from tornado.util import raise_exc_info, ArgReplacer + +try: + from concurrent import futures +except ImportError: + futures = None + + +# Can the garbage collector handle cycles that include __del__ methods? +# This is true in cpython beginning with version 3.4 (PEP 442). +_GC_CYCLE_FINALIZERS = (platform.python_implementation() == 'CPython' and + sys.version_info >= (3, 4)) + + +class ReturnValueIgnoredError(Exception): + pass + +# This class and associated code in the future object is derived +# from the Trollius project, a backport of asyncio to Python 2.x - 3.x + + +class _TracebackLogger(object): + """Helper to log a traceback upon destruction if not cleared. + + This solves a nasty problem with Futures and Tasks that have an + exception set: if nobody asks for the exception, the exception is + never logged. This violates the Zen of Python: 'Errors should + never pass silently. Unless explicitly silenced.' + + However, we don't want to log the exception as soon as + set_exception() is called: if the calling code is written + properly, it will get the exception and handle it properly. But + we *do* want to log it if result() or exception() was never called + -- otherwise developers waste a lot of time wondering why their + buggy code fails silently. + + An earlier attempt added a __del__() method to the Future class + itself, but this backfired because the presence of __del__() + prevents garbage collection from breaking cycles. A way out of + this catch-22 is to avoid having a __del__() method on the Future + class itself, but instead to have a reference to a helper object + with a __del__() method that logs the traceback, where we ensure + that the helper object doesn't participate in cycles, and only the + Future has a reference to it. + + The helper object is added when set_exception() is called. When + the Future is collected, and the helper is present, the helper + object is also collected, and its __del__() method will log the + traceback. When the Future's result() or exception() method is + called (and a helper object is present), it removes the the helper + object, after calling its clear() method to prevent it from + logging. + + One downside is that we do a fair amount of work to extract the + traceback from the exception, even when it is never logged. It + would seem cheaper to just store the exception object, but that + references the traceback, which references stack frames, which may + reference the Future, which references the _TracebackLogger, and + then the _TracebackLogger would be included in a cycle, which is + what we're trying to avoid! As an optimization, we don't + immediately format the exception; we only do the work when + activate() is called, which call is delayed until after all the + Future's callbacks have run. Since usually a Future has at least + one callback (typically set by 'yield From') and usually that + callback extracts the callback, thereby removing the need to + format the exception. + + PS. I don't claim credit for this solution. I first heard of it + in a discussion about closing files when they are collected. + """ + + __slots__ = ('exc_info', 'formatted_tb') + + def __init__(self, exc_info): + self.exc_info = exc_info + self.formatted_tb = None + + def activate(self): + exc_info = self.exc_info + if exc_info is not None: + self.exc_info = None + self.formatted_tb = traceback.format_exception(*exc_info) + + def clear(self): + self.exc_info = None + self.formatted_tb = None + + def __del__(self): + if self.formatted_tb: + app_log.error('Future exception was never retrieved: %s', + ''.join(self.formatted_tb).rstrip()) + + +class Future(object): + """Placeholder for an asynchronous result. + + A ``Future`` encapsulates the result of an asynchronous + operation. In synchronous applications ``Futures`` are used + to wait for the result from a thread or process pool; in + Tornado they are normally used with `.IOLoop.add_future` or by + yielding them in a `.gen.coroutine`. + + `tornado.concurrent.Future` is similar to + `concurrent.futures.Future`, but not thread-safe (and therefore + faster for use with single-threaded event loops). + + In addition to ``exception`` and ``set_exception``, methods ``exc_info`` + and ``set_exc_info`` are supported to capture tracebacks in Python 2. + The traceback is automatically available in Python 3, but in the + Python 2 futures backport this information is discarded. + This functionality was previously available in a separate class + ``TracebackFuture``, which is now a deprecated alias for this class. + + .. versionchanged:: 4.0 + `tornado.concurrent.Future` is always a thread-unsafe ``Future`` + with support for the ``exc_info`` methods. Previously it would + be an alias for the thread-safe `concurrent.futures.Future` + if that package was available and fall back to the thread-unsafe + implementation if it was not. + + .. versionchanged:: 4.1 + If a `.Future` contains an error but that error is never observed + (by calling ``result()``, ``exception()``, or ``exc_info()``), + a stack trace will be logged when the `.Future` is garbage collected. + This normally indicates an error in the application, but in cases + where it results in undesired logging it may be necessary to + suppress the logging by ensuring that the exception is observed: + ``f.add_done_callback(lambda f: f.exception())``. + """ + def __init__(self): + self._done = False + self._result = None + self._exc_info = None + + self._log_traceback = False # Used for Python >= 3.4 + self._tb_logger = None # Used for Python <= 3.3 + + self._callbacks = [] + + # Implement the Python 3.5 Awaitable protocol if possible + # (we can't use return and yield together until py33). + if sys.version_info >= (3, 3): + exec(textwrap.dedent(""" + def __await__(self): + return (yield self) + """)) + else: + # Py2-compatible version for use with cython. + def __await__(self): + result = yield self + # StopIteration doesn't take args before py33, + # but Cython recognizes the args tuple. + e = StopIteration() + e.args = (result,) + raise e + + def cancel(self): + """Cancel the operation, if possible. + + Tornado ``Futures`` do not support cancellation, so this method always + returns False. + """ + return False + + def cancelled(self): + """Returns True if the operation has been cancelled. + + Tornado ``Futures`` do not support cancellation, so this method + always returns False. + """ + return False + + def running(self): + """Returns True if this operation is currently running.""" + return not self._done + + def done(self): + """Returns True if the future has finished running.""" + return self._done + + def _clear_tb_log(self): + self._log_traceback = False + if self._tb_logger is not None: + self._tb_logger.clear() + self._tb_logger = None + + def result(self, timeout=None): + """If the operation succeeded, return its result. If it failed, + re-raise its exception. + + This method takes a ``timeout`` argument for compatibility with + `concurrent.futures.Future` but it is an error to call it + before the `Future` is done, so the ``timeout`` is never used. + """ + self._clear_tb_log() + if self._result is not None: + return self._result + if self._exc_info is not None: + raise_exc_info(self._exc_info) + self._check_done() + return self._result + + def exception(self, timeout=None): + """If the operation raised an exception, return the `Exception` + object. Otherwise returns None. + + This method takes a ``timeout`` argument for compatibility with + `concurrent.futures.Future` but it is an error to call it + before the `Future` is done, so the ``timeout`` is never used. + """ + self._clear_tb_log() + if self._exc_info is not None: + return self._exc_info[1] + else: + self._check_done() + return None + + def add_done_callback(self, fn): + """Attaches the given callback to the `Future`. + + It will be invoked with the `Future` as its argument when the Future + has finished running and its result is available. In Tornado + consider using `.IOLoop.add_future` instead of calling + `add_done_callback` directly. + """ + if self._done: + fn(self) + else: + self._callbacks.append(fn) + + def set_result(self, result): + """Sets the result of a ``Future``. + + It is undefined to call any of the ``set`` methods more than once + on the same object. + """ + self._result = result + self._set_done() + + def set_exception(self, exception): + """Sets the exception of a ``Future.``""" + self.set_exc_info( + (exception.__class__, + exception, + getattr(exception, '__traceback__', None))) + + def exc_info(self): + """Returns a tuple in the same format as `sys.exc_info` or None. + + .. versionadded:: 4.0 + """ + self._clear_tb_log() + return self._exc_info + + def set_exc_info(self, exc_info): + """Sets the exception information of a ``Future.`` + + Preserves tracebacks on Python 2. + + .. versionadded:: 4.0 + """ + self._exc_info = exc_info + self._log_traceback = True + if not _GC_CYCLE_FINALIZERS: + self._tb_logger = _TracebackLogger(exc_info) + + try: + self._set_done() + finally: + # Activate the logger after all callbacks have had a + # chance to call result() or exception(). + if self._log_traceback and self._tb_logger is not None: + self._tb_logger.activate() + self._exc_info = exc_info + + def _check_done(self): + if not self._done: + raise Exception("DummyFuture does not support blocking for results") + + def _set_done(self): + self._done = True + for cb in self._callbacks: + try: + cb(self) + except Exception: + app_log.exception('Exception in callback %r for %r', + cb, self) + self._callbacks = None + + # On Python 3.3 or older, objects with a destructor part of a reference + # cycle are never destroyed. It's no longer the case on Python 3.4 thanks to + # the PEP 442. + if _GC_CYCLE_FINALIZERS: + def __del__(self): + if not self._log_traceback: + # set_exception() was not called, or result() or exception() + # has consumed the exception + return + + tb = traceback.format_exception(*self._exc_info) + + app_log.error('Future %r exception was never retrieved: %s', + self, ''.join(tb).rstrip()) + +TracebackFuture = Future + +if futures is None: + FUTURES = Future +else: + FUTURES = (futures.Future, Future) + + +def is_future(x): + return isinstance(x, FUTURES) + + +class DummyExecutor(object): + def submit(self, fn, *args, **kwargs): + future = TracebackFuture() + try: + future.set_result(fn(*args, **kwargs)) + except Exception: + future.set_exc_info(sys.exc_info()) + return future + + def shutdown(self, wait=True): + pass + +dummy_executor = DummyExecutor() + + +def run_on_executor(*args, **kwargs): + """Decorator to run a synchronous method asynchronously on an executor. + + The decorated method may be called with a ``callback`` keyword + argument and returns a future. + + The `.IOLoop` and executor to be used are determined by the ``io_loop`` + and ``executor`` attributes of ``self``. To use different attributes, + pass keyword arguments to the decorator:: + + @run_on_executor(executor='_thread_pool') + def foo(self): + pass + + .. versionchanged:: 4.2 + Added keyword arguments to use alternative attributes. + """ + def run_on_executor_decorator(fn): + executor = kwargs.get("executor", "executor") + io_loop = kwargs.get("io_loop", "io_loop") + + @functools.wraps(fn) + def wrapper(self, *args, **kwargs): + callback = kwargs.pop("callback", None) + future = getattr(self, executor).submit(fn, self, *args, **kwargs) + if callback: + getattr(self, io_loop).add_future( + future, lambda future: callback(future.result())) + return future + return wrapper + if args and kwargs: + raise ValueError("cannot combine positional and keyword args") + if len(args) == 1: + return run_on_executor_decorator(args[0]) + elif len(args) != 0: + raise ValueError("expected 1 argument, got %d", len(args)) + return run_on_executor_decorator + + +_NO_RESULT = object() + + +def return_future(f): + """Decorator to make a function that returns via callback return a + `Future`. + + The wrapped function should take a ``callback`` keyword argument + and invoke it with one argument when it has finished. To signal failure, + the function can simply raise an exception (which will be + captured by the `.StackContext` and passed along to the ``Future``). + + From the caller's perspective, the callback argument is optional. + If one is given, it will be invoked when the function is complete + with `Future.result()` as an argument. If the function fails, the + callback will not be run and an exception will be raised into the + surrounding `.StackContext`. + + If no callback is given, the caller should use the ``Future`` to + wait for the function to complete (perhaps by yielding it in a + `.gen.engine` function, or passing it to `.IOLoop.add_future`). + + Usage: + + .. testcode:: + + @return_future + def future_func(arg1, arg2, callback): + # Do stuff (possibly asynchronous) + callback(result) + + @gen.engine + def caller(callback): + yield future_func(arg1, arg2) + callback() + + .. + + Note that ``@return_future`` and ``@gen.engine`` can be applied to the + same function, provided ``@return_future`` appears first. However, + consider using ``@gen.coroutine`` instead of this combination. + """ + replacer = ArgReplacer(f, 'callback') + + @functools.wraps(f) + def wrapper(*args, **kwargs): + future = TracebackFuture() + callback, args, kwargs = replacer.replace( + lambda value=_NO_RESULT: future.set_result(value), + args, kwargs) + + def handle_error(typ, value, tb): + future.set_exc_info((typ, value, tb)) + return True + exc_info = None + with ExceptionStackContext(handle_error): + try: + result = f(*args, **kwargs) + if result is not None: + raise ReturnValueIgnoredError( + "@return_future should not be used with functions " + "that return values") + except: + exc_info = sys.exc_info() + raise + if exc_info is not None: + # If the initial synchronous part of f() raised an exception, + # go ahead and raise it to the caller directly without waiting + # for them to inspect the Future. + future.result() + + # If the caller passed in a callback, schedule it to be called + # when the future resolves. It is important that this happens + # just before we return the future, or else we risk confusing + # stack contexts with multiple exceptions (one here with the + # immediate exception, and again when the future resolves and + # the callback triggers its exception by calling future.result()). + if callback is not None: + def run_callback(future): + result = future.result() + if result is _NO_RESULT: + callback() + else: + callback(future.result()) + future.add_done_callback(wrap(run_callback)) + return future + return wrapper + + +def chain_future(a, b): + """Chain two futures together so that when one completes, so does the other. + + The result (success or failure) of ``a`` will be copied to ``b``, unless + ``b`` has already been completed or cancelled by the time ``a`` finishes. + """ + def copy(future): + assert future is a + if b.done(): + return + if (isinstance(a, TracebackFuture) and isinstance(b, TracebackFuture) + and a.exc_info() is not None): + b.set_exc_info(a.exc_info()) + elif a.exception() is not None: + b.set_exception(a.exception()) + else: + b.set_result(a.result()) + a.add_done_callback(copy) diff --git a/python/tornado/curl_httpclient.py b/python/tornado/curl_httpclient.py new file mode 100644 index 000000000..22f250232 --- /dev/null +++ b/python/tornado/curl_httpclient.py @@ -0,0 +1,500 @@ +#!/usr/bin/env python +# +# Copyright 2009 Facebook +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""Non-blocking HTTP client implementation using pycurl.""" + +from __future__ import absolute_import, division, print_function, with_statement + +import collections +import functools +import logging +import pycurl +import threading +import time +from io import BytesIO + +from tornado import httputil +from tornado import ioloop +from tornado import stack_context + +from tornado.escape import utf8, native_str +from tornado.httpclient import HTTPResponse, HTTPError, AsyncHTTPClient, main + +curl_log = logging.getLogger('tornado.curl_httpclient') + + +class CurlAsyncHTTPClient(AsyncHTTPClient): + def initialize(self, io_loop, max_clients=10, defaults=None): + super(CurlAsyncHTTPClient, self).initialize(io_loop, defaults=defaults) + self._multi = pycurl.CurlMulti() + self._multi.setopt(pycurl.M_TIMERFUNCTION, self._set_timeout) + self._multi.setopt(pycurl.M_SOCKETFUNCTION, self._handle_socket) + self._curls = [self._curl_create() for i in range(max_clients)] + self._free_list = self._curls[:] + self._requests = collections.deque() + self._fds = {} + self._timeout = None + + # libcurl has bugs that sometimes cause it to not report all + # relevant file descriptors and timeouts to TIMERFUNCTION/ + # SOCKETFUNCTION. Mitigate the effects of such bugs by + # forcing a periodic scan of all active requests. + self._force_timeout_callback = ioloop.PeriodicCallback( + self._handle_force_timeout, 1000, io_loop=io_loop) + self._force_timeout_callback.start() + + # Work around a bug in libcurl 7.29.0: Some fields in the curl + # multi object are initialized lazily, and its destructor will + # segfault if it is destroyed without having been used. Add + # and remove a dummy handle to make sure everything is + # initialized. + dummy_curl_handle = pycurl.Curl() + self._multi.add_handle(dummy_curl_handle) + self._multi.remove_handle(dummy_curl_handle) + + def close(self): + self._force_timeout_callback.stop() + if self._timeout is not None: + self.io_loop.remove_timeout(self._timeout) + for curl in self._curls: + curl.close() + self._multi.close() + super(CurlAsyncHTTPClient, self).close() + + def fetch_impl(self, request, callback): + self._requests.append((request, callback)) + self._process_queue() + self._set_timeout(0) + + def _handle_socket(self, event, fd, multi, data): + """Called by libcurl when it wants to change the file descriptors + it cares about. + """ + event_map = { + pycurl.POLL_NONE: ioloop.IOLoop.NONE, + pycurl.POLL_IN: ioloop.IOLoop.READ, + pycurl.POLL_OUT: ioloop.IOLoop.WRITE, + pycurl.POLL_INOUT: ioloop.IOLoop.READ | ioloop.IOLoop.WRITE + } + if event == pycurl.POLL_REMOVE: + if fd in self._fds: + self.io_loop.remove_handler(fd) + del self._fds[fd] + else: + ioloop_event = event_map[event] + # libcurl sometimes closes a socket and then opens a new + # one using the same FD without giving us a POLL_NONE in + # between. This is a problem with the epoll IOLoop, + # because the kernel can tell when a socket is closed and + # removes it from the epoll automatically, causing future + # update_handler calls to fail. Since we can't tell when + # this has happened, always use remove and re-add + # instead of update. + if fd in self._fds: + self.io_loop.remove_handler(fd) + self.io_loop.add_handler(fd, self._handle_events, + ioloop_event) + self._fds[fd] = ioloop_event + + def _set_timeout(self, msecs): + """Called by libcurl to schedule a timeout.""" + if self._timeout is not None: + self.io_loop.remove_timeout(self._timeout) + self._timeout = self.io_loop.add_timeout( + self.io_loop.time() + msecs / 1000.0, self._handle_timeout) + + def _handle_events(self, fd, events): + """Called by IOLoop when there is activity on one of our + file descriptors. + """ + action = 0 + if events & ioloop.IOLoop.READ: + action |= pycurl.CSELECT_IN + if events & ioloop.IOLoop.WRITE: + action |= pycurl.CSELECT_OUT + while True: + try: + ret, num_handles = self._multi.socket_action(fd, action) + except pycurl.error as e: + ret = e.args[0] + if ret != pycurl.E_CALL_MULTI_PERFORM: + break + self._finish_pending_requests() + + def _handle_timeout(self): + """Called by IOLoop when the requested timeout has passed.""" + with stack_context.NullContext(): + self._timeout = None + while True: + try: + ret, num_handles = self._multi.socket_action( + pycurl.SOCKET_TIMEOUT, 0) + except pycurl.error as e: + ret = e.args[0] + if ret != pycurl.E_CALL_MULTI_PERFORM: + break + self._finish_pending_requests() + + # In theory, we shouldn't have to do this because curl will + # call _set_timeout whenever the timeout changes. However, + # sometimes after _handle_timeout we will need to reschedule + # immediately even though nothing has changed from curl's + # perspective. This is because when socket_action is + # called with SOCKET_TIMEOUT, libcurl decides internally which + # timeouts need to be processed by using a monotonic clock + # (where available) while tornado uses python's time.time() + # to decide when timeouts have occurred. When those clocks + # disagree on elapsed time (as they will whenever there is an + # NTP adjustment), tornado might call _handle_timeout before + # libcurl is ready. After each timeout, resync the scheduled + # timeout with libcurl's current state. + new_timeout = self._multi.timeout() + if new_timeout >= 0: + self._set_timeout(new_timeout) + + def _handle_force_timeout(self): + """Called by IOLoop periodically to ask libcurl to process any + events it may have forgotten about. + """ + with stack_context.NullContext(): + while True: + try: + ret, num_handles = self._multi.socket_all() + except pycurl.error as e: + ret = e.args[0] + if ret != pycurl.E_CALL_MULTI_PERFORM: + break + self._finish_pending_requests() + + def _finish_pending_requests(self): + """Process any requests that were completed by the last + call to multi.socket_action. + """ + while True: + num_q, ok_list, err_list = self._multi.info_read() + for curl in ok_list: + self._finish(curl) + for curl, errnum, errmsg in err_list: + self._finish(curl, errnum, errmsg) + if num_q == 0: + break + self._process_queue() + + def _process_queue(self): + with stack_context.NullContext(): + while True: + started = 0 + while self._free_list and self._requests: + started += 1 + curl = self._free_list.pop() + (request, callback) = self._requests.popleft() + curl.info = { + "headers": httputil.HTTPHeaders(), + "buffer": BytesIO(), + "request": request, + "callback": callback, + "curl_start_time": time.time(), + } + try: + self._curl_setup_request( + curl, request, curl.info["buffer"], + curl.info["headers"]) + except Exception as e: + # If there was an error in setup, pass it on + # to the callback. Note that allowing the + # error to escape here will appear to work + # most of the time since we are still in the + # caller's original stack frame, but when + # _process_queue() is called from + # _finish_pending_requests the exceptions have + # nowhere to go. + callback(HTTPResponse( + request=request, + code=599, + error=e)) + else: + self._multi.add_handle(curl) + + if not started: + break + + def _finish(self, curl, curl_error=None, curl_message=None): + info = curl.info + curl.info = None + self._multi.remove_handle(curl) + self._free_list.append(curl) + buffer = info["buffer"] + if curl_error: + error = CurlError(curl_error, curl_message) + code = error.code + effective_url = None + buffer.close() + buffer = None + else: + error = None + code = curl.getinfo(pycurl.HTTP_CODE) + effective_url = curl.getinfo(pycurl.EFFECTIVE_URL) + buffer.seek(0) + # the various curl timings are documented at + # http://curl.haxx.se/libcurl/c/curl_easy_getinfo.html + time_info = dict( + queue=info["curl_start_time"] - info["request"].start_time, + namelookup=curl.getinfo(pycurl.NAMELOOKUP_TIME), + connect=curl.getinfo(pycurl.CONNECT_TIME), + pretransfer=curl.getinfo(pycurl.PRETRANSFER_TIME), + starttransfer=curl.getinfo(pycurl.STARTTRANSFER_TIME), + total=curl.getinfo(pycurl.TOTAL_TIME), + redirect=curl.getinfo(pycurl.REDIRECT_TIME), + ) + try: + info["callback"](HTTPResponse( + request=info["request"], code=code, headers=info["headers"], + buffer=buffer, effective_url=effective_url, error=error, + reason=info['headers'].get("X-Http-Reason", None), + request_time=time.time() - info["curl_start_time"], + time_info=time_info)) + except Exception: + self.handle_callback_exception(info["callback"]) + + def handle_callback_exception(self, callback): + self.io_loop.handle_callback_exception(callback) + + def _curl_create(self): + curl = pycurl.Curl() + if curl_log.isEnabledFor(logging.DEBUG): + curl.setopt(pycurl.VERBOSE, 1) + curl.setopt(pycurl.DEBUGFUNCTION, self._curl_debug) + return curl + + def _curl_setup_request(self, curl, request, buffer, headers): + curl.setopt(pycurl.URL, native_str(request.url)) + + # libcurl's magic "Expect: 100-continue" behavior causes delays + # with servers that don't support it (which include, among others, + # Google's OpenID endpoint). Additionally, this behavior has + # a bug in conjunction with the curl_multi_socket_action API + # (https://sourceforge.net/tracker/?func=detail&atid=100976&aid=3039744&group_id=976), + # which increases the delays. It's more trouble than it's worth, + # so just turn off the feature (yes, setting Expect: to an empty + # value is the official way to disable this) + if "Expect" not in request.headers: + request.headers["Expect"] = "" + + # libcurl adds Pragma: no-cache by default; disable that too + if "Pragma" not in request.headers: + request.headers["Pragma"] = "" + + curl.setopt(pycurl.HTTPHEADER, + ["%s: %s" % (native_str(k), native_str(v)) + for k, v in request.headers.get_all()]) + + curl.setopt(pycurl.HEADERFUNCTION, + functools.partial(self._curl_header_callback, + headers, request.header_callback)) + if request.streaming_callback: + def write_function(chunk): + self.io_loop.add_callback(request.streaming_callback, chunk) + else: + write_function = buffer.write + if bytes is str: # py2 + curl.setopt(pycurl.WRITEFUNCTION, write_function) + else: # py3 + # Upstream pycurl doesn't support py3, but ubuntu 12.10 includes + # a fork/port. That version has a bug in which it passes unicode + # strings instead of bytes to the WRITEFUNCTION. This means that + # if you use a WRITEFUNCTION (which tornado always does), you cannot + # download arbitrary binary data. This needs to be fixed in the + # ported pycurl package, but in the meantime this lambda will + # make it work for downloading (utf8) text. + curl.setopt(pycurl.WRITEFUNCTION, lambda s: write_function(utf8(s))) + curl.setopt(pycurl.FOLLOWLOCATION, request.follow_redirects) + curl.setopt(pycurl.MAXREDIRS, request.max_redirects) + curl.setopt(pycurl.CONNECTTIMEOUT_MS, int(1000 * request.connect_timeout)) + curl.setopt(pycurl.TIMEOUT_MS, int(1000 * request.request_timeout)) + if request.user_agent: + curl.setopt(pycurl.USERAGENT, native_str(request.user_agent)) + else: + curl.setopt(pycurl.USERAGENT, "Mozilla/5.0 (compatible; pycurl)") + if request.network_interface: + curl.setopt(pycurl.INTERFACE, request.network_interface) + if request.decompress_response: + curl.setopt(pycurl.ENCODING, "gzip,deflate") + else: + curl.setopt(pycurl.ENCODING, "none") + if request.proxy_host and request.proxy_port: + curl.setopt(pycurl.PROXY, request.proxy_host) + curl.setopt(pycurl.PROXYPORT, request.proxy_port) + if request.proxy_username: + credentials = '%s:%s' % (request.proxy_username, + request.proxy_password) + curl.setopt(pycurl.PROXYUSERPWD, credentials) + else: + curl.setopt(pycurl.PROXY, '') + curl.unsetopt(pycurl.PROXYUSERPWD) + if request.validate_cert: + curl.setopt(pycurl.SSL_VERIFYPEER, 1) + curl.setopt(pycurl.SSL_VERIFYHOST, 2) + else: + curl.setopt(pycurl.SSL_VERIFYPEER, 0) + curl.setopt(pycurl.SSL_VERIFYHOST, 0) + if request.ca_certs is not None: + curl.setopt(pycurl.CAINFO, request.ca_certs) + else: + # There is no way to restore pycurl.CAINFO to its default value + # (Using unsetopt makes it reject all certificates). + # I don't see any way to read the default value from python so it + # can be restored later. We'll have to just leave CAINFO untouched + # if no ca_certs file was specified, and require that if any + # request uses a custom ca_certs file, they all must. + pass + + if request.allow_ipv6 is False: + # Curl behaves reasonably when DNS resolution gives an ipv6 address + # that we can't reach, so allow ipv6 unless the user asks to disable. + curl.setopt(pycurl.IPRESOLVE, pycurl.IPRESOLVE_V4) + else: + curl.setopt(pycurl.IPRESOLVE, pycurl.IPRESOLVE_WHATEVER) + + # Set the request method through curl's irritating interface which makes + # up names for almost every single method + curl_options = { + "GET": pycurl.HTTPGET, + "POST": pycurl.POST, + "PUT": pycurl.UPLOAD, + "HEAD": pycurl.NOBODY, + } + custom_methods = set(["DELETE", "OPTIONS", "PATCH"]) + for o in curl_options.values(): + curl.setopt(o, False) + if request.method in curl_options: + curl.unsetopt(pycurl.CUSTOMREQUEST) + curl.setopt(curl_options[request.method], True) + elif request.allow_nonstandard_methods or request.method in custom_methods: + curl.setopt(pycurl.CUSTOMREQUEST, request.method) + else: + raise KeyError('unknown method ' + request.method) + + body_expected = request.method in ("POST", "PATCH", "PUT") + body_present = request.body is not None + if not request.allow_nonstandard_methods: + # Some HTTP methods nearly always have bodies while others + # almost never do. Fail in this case unless the user has + # opted out of sanity checks with allow_nonstandard_methods. + if ((body_expected and not body_present) or + (body_present and not body_expected)): + raise ValueError( + 'Body must %sbe None for method %s (unless ' + 'allow_nonstandard_methods is true)' % + ('not ' if body_expected else '', request.method)) + + if body_expected or body_present: + if request.method == "GET": + # Even with `allow_nonstandard_methods` we disallow + # GET with a body (because libcurl doesn't allow it + # unless we use CUSTOMREQUEST). While the spec doesn't + # forbid clients from sending a body, it arguably + # disallows the server from doing anything with them. + raise ValueError('Body must be None for GET request') + request_buffer = BytesIO(utf8(request.body or '')) + + def ioctl(cmd): + if cmd == curl.IOCMD_RESTARTREAD: + request_buffer.seek(0) + curl.setopt(pycurl.READFUNCTION, request_buffer.read) + curl.setopt(pycurl.IOCTLFUNCTION, ioctl) + if request.method == "POST": + curl.setopt(pycurl.POSTFIELDSIZE, len(request.body or '')) + else: + curl.setopt(pycurl.UPLOAD, True) + curl.setopt(pycurl.INFILESIZE, len(request.body or '')) + + if request.auth_username is not None: + userpwd = "%s:%s" % (request.auth_username, request.auth_password or '') + + if request.auth_mode is None or request.auth_mode == "basic": + curl.setopt(pycurl.HTTPAUTH, pycurl.HTTPAUTH_BASIC) + elif request.auth_mode == "digest": + curl.setopt(pycurl.HTTPAUTH, pycurl.HTTPAUTH_DIGEST) + else: + raise ValueError("Unsupported auth_mode %s" % request.auth_mode) + + curl.setopt(pycurl.USERPWD, native_str(userpwd)) + curl_log.debug("%s %s (username: %r)", request.method, request.url, + request.auth_username) + else: + curl.unsetopt(pycurl.USERPWD) + curl_log.debug("%s %s", request.method, request.url) + + if request.client_cert is not None: + curl.setopt(pycurl.SSLCERT, request.client_cert) + + if request.client_key is not None: + curl.setopt(pycurl.SSLKEY, request.client_key) + + if request.ssl_options is not None: + raise ValueError("ssl_options not supported in curl_httpclient") + + if threading.activeCount() > 1: + # libcurl/pycurl is not thread-safe by default. When multiple threads + # are used, signals should be disabled. This has the side effect + # of disabling DNS timeouts in some environments (when libcurl is + # not linked against ares), so we don't do it when there is only one + # thread. Applications that use many short-lived threads may need + # to set NOSIGNAL manually in a prepare_curl_callback since + # there may not be any other threads running at the time we call + # threading.activeCount. + curl.setopt(pycurl.NOSIGNAL, 1) + if request.prepare_curl_callback is not None: + request.prepare_curl_callback(curl) + + def _curl_header_callback(self, headers, header_callback, header_line): + header_line = native_str(header_line) + if header_callback is not None: + self.io_loop.add_callback(header_callback, header_line) + # header_line as returned by curl includes the end-of-line characters. + # whitespace at the start should be preserved to allow multi-line headers + header_line = header_line.rstrip() + if header_line.startswith("HTTP/"): + headers.clear() + try: + (__, __, reason) = httputil.parse_response_start_line(header_line) + header_line = "X-Http-Reason: %s" % reason + except httputil.HTTPInputError: + return + if not header_line: + return + headers.parse_line(header_line) + + def _curl_debug(self, debug_type, debug_msg): + debug_types = ('I', '<', '>', '<', '>') + if debug_type == 0: + curl_log.debug('%s', debug_msg.strip()) + elif debug_type in (1, 2): + for line in debug_msg.splitlines(): + curl_log.debug('%s %s', debug_types[debug_type], line) + elif debug_type == 4: + curl_log.debug('%s %r', debug_types[debug_type], debug_msg) + + +class CurlError(HTTPError): + def __init__(self, errno, message): + HTTPError.__init__(self, 599, message) + self.errno = errno + + +if __name__ == "__main__": + AsyncHTTPClient.configure(CurlAsyncHTTPClient) + main() diff --git a/python/tornado/escape.py b/python/tornado/escape.py new file mode 100644 index 000000000..2f04b4683 --- /dev/null +++ b/python/tornado/escape.py @@ -0,0 +1,399 @@ +#!/usr/bin/env python +# +# Copyright 2009 Facebook +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""Escaping/unescaping methods for HTML, JSON, URLs, and others. + +Also includes a few other miscellaneous string manipulation functions that +have crept in over time. +""" + +from __future__ import absolute_import, division, print_function, with_statement + +import re +import sys + +from tornado.util import unicode_type, basestring_type, u + +try: + from urllib.parse import parse_qs as _parse_qs # py3 +except ImportError: + from urlparse import parse_qs as _parse_qs # Python 2.6+ + +try: + import htmlentitydefs # py2 +except ImportError: + import html.entities as htmlentitydefs # py3 + +try: + import urllib.parse as urllib_parse # py3 +except ImportError: + import urllib as urllib_parse # py2 + +import json + +try: + unichr +except NameError: + unichr = chr + +_XHTML_ESCAPE_RE = re.compile('[&<>"\']') +_XHTML_ESCAPE_DICT = {'&': '&', '<': '<', '>': '>', '"': '"', + '\'': '''} + + +def xhtml_escape(value): + """Escapes a string so it is valid within HTML or XML. + + Escapes the characters ``<``, ``>``, ``"``, ``'``, and ``&``. + When used in attribute values the escaped strings must be enclosed + in quotes. + + .. versionchanged:: 3.2 + + Added the single quote to the list of escaped characters. + """ + return _XHTML_ESCAPE_RE.sub(lambda match: _XHTML_ESCAPE_DICT[match.group(0)], + to_basestring(value)) + + +def xhtml_unescape(value): + """Un-escapes an XML-escaped string.""" + return re.sub(r"&(#?)(\w+?);", _convert_entity, _unicode(value)) + + +# The fact that json_encode wraps json.dumps is an implementation detail. +# Please see https://github.com/tornadoweb/tornado/pull/706 +# before sending a pull request that adds **kwargs to this function. +def json_encode(value): + """JSON-encodes the given Python object.""" + # JSON permits but does not require forward slashes to be escaped. + # This is useful when json data is emitted in a tags from prematurely terminating + # the javascript. Some json libraries do this escaping by default, + # although python's standard library does not, so we do it here. + # http://stackoverflow.com/questions/1580647/json-why-are-forward-slashes-escaped + return json.dumps(value).replace("?@\[\]^`{|}~\s]))|(?:\((?:[^\s&()]|&|")*\)))+)""")) + + +def linkify(text, shorten=False, extra_params="", + require_protocol=False, permitted_protocols=["http", "https"]): + """Converts plain text into HTML with links. + + For example: ``linkify("Hello http://tornadoweb.org!")`` would return + ``Hello http://tornadoweb.org!`` + + Parameters: + + * ``shorten``: Long urls will be shortened for display. + + * ``extra_params``: Extra text to include in the link tag, or a callable + taking the link as an argument and returning the extra text + e.g. ``linkify(text, extra_params='rel="nofollow" class="external"')``, + or:: + + def extra_params_cb(url): + if url.startswith("http://example.com"): + return 'class="internal"' + else: + return 'class="external" rel="nofollow"' + linkify(text, extra_params=extra_params_cb) + + * ``require_protocol``: Only linkify urls which include a protocol. If + this is False, urls such as www.facebook.com will also be linkified. + + * ``permitted_protocols``: List (or set) of protocols which should be + linkified, e.g. ``linkify(text, permitted_protocols=["http", "ftp", + "mailto"])``. It is very unsafe to include protocols such as + ``javascript``. + """ + if extra_params and not callable(extra_params): + extra_params = " " + extra_params.strip() + + def make_link(m): + url = m.group(1) + proto = m.group(2) + if require_protocol and not proto: + return url # not protocol, no linkify + + if proto and proto not in permitted_protocols: + return url # bad protocol, no linkify + + href = m.group(1) + if not proto: + href = "http://" + href # no proto specified, use http + + if callable(extra_params): + params = " " + extra_params(href).strip() + else: + params = extra_params + + # clip long urls. max_len is just an approximation + max_len = 30 + if shorten and len(url) > max_len: + before_clip = url + if proto: + proto_len = len(proto) + 1 + len(m.group(3) or "") # +1 for : + else: + proto_len = 0 + + parts = url[proto_len:].split("/") + if len(parts) > 1: + # Grab the whole host part plus the first bit of the path + # The path is usually not that interesting once shortened + # (no more slug, etc), so it really just provides a little + # extra indication of shortening. + url = url[:proto_len] + parts[0] + "/" + \ + parts[1][:8].split('?')[0].split('.')[0] + + if len(url) > max_len * 1.5: # still too long + url = url[:max_len] + + if url != before_clip: + amp = url.rfind('&') + # avoid splitting html char entities + if amp > max_len - 5: + url = url[:amp] + url += "..." + + if len(url) >= len(before_clip): + url = before_clip + else: + # full url is visible on mouse-over (for those who don't + # have a status bar, such as Safari by default) + params += ' title="%s"' % href + + return u('%s') % (href, params, url) + + # First HTML-escape so that our strings are all safe. + # The regex is modified to avoid character entites other than & so + # that we won't pick up ", etc. + text = _unicode(xhtml_escape(text)) + return _URL_RE.sub(make_link, text) + + +def _convert_entity(m): + if m.group(1) == "#": + try: + if m.group(2)[:1].lower() == 'x': + return unichr(int(m.group(2)[1:], 16)) + else: + return unichr(int(m.group(2))) + except ValueError: + return "&#%s;" % m.group(2) + try: + return _HTML_UNICODE_MAP[m.group(2)] + except KeyError: + return "&%s;" % m.group(2) + + +def _build_unicode_map(): + unicode_map = {} + for name, value in htmlentitydefs.name2codepoint.items(): + unicode_map[name] = unichr(value) + return unicode_map + +_HTML_UNICODE_MAP = _build_unicode_map() diff --git a/python/tornado/gen.py b/python/tornado/gen.py new file mode 100644 index 000000000..bf184e548 --- /dev/null +++ b/python/tornado/gen.py @@ -0,0 +1,1241 @@ +"""``tornado.gen`` is a generator-based interface to make it easier to +work in an asynchronous environment. Code using the ``gen`` module +is technically asynchronous, but it is written as a single generator +instead of a collection of separate functions. + +For example, the following asynchronous handler: + +.. testcode:: + + class AsyncHandler(RequestHandler): + @asynchronous + def get(self): + http_client = AsyncHTTPClient() + http_client.fetch("http://example.com", + callback=self.on_fetch) + + def on_fetch(self, response): + do_something_with_response(response) + self.render("template.html") + +.. testoutput:: + :hide: + +could be written with ``gen`` as: + +.. testcode:: + + class GenAsyncHandler(RequestHandler): + @gen.coroutine + def get(self): + http_client = AsyncHTTPClient() + response = yield http_client.fetch("http://example.com") + do_something_with_response(response) + self.render("template.html") + +.. testoutput:: + :hide: + +Most asynchronous functions in Tornado return a `.Future`; +yielding this object returns its `~.Future.result`. + +You can also yield a list or dict of ``Futures``, which will be +started at the same time and run in parallel; a list or dict of results will +be returned when they are all finished: + +.. testcode:: + + @gen.coroutine + def get(self): + http_client = AsyncHTTPClient() + response1, response2 = yield [http_client.fetch(url1), + http_client.fetch(url2)] + response_dict = yield dict(response3=http_client.fetch(url3), + response4=http_client.fetch(url4)) + response3 = response_dict['response3'] + response4 = response_dict['response4'] + +.. testoutput:: + :hide: + +If the `~functools.singledispatch` library is available (standard in +Python 3.4, available via the `singledispatch +`_ package on older +versions), additional types of objects may be yielded. Tornado includes +support for ``asyncio.Future`` and Twisted's ``Deferred`` class when +``tornado.platform.asyncio`` and ``tornado.platform.twisted`` are imported. +See the `convert_yielded` function to extend this mechanism. + +.. versionchanged:: 3.2 + Dict support added. + +.. versionchanged:: 4.1 + Support added for yielding ``asyncio`` Futures and Twisted Deferreds + via ``singledispatch``. + +""" +from __future__ import absolute_import, division, print_function, with_statement + +import collections +import functools +import itertools +import os +import sys +import textwrap +import types + +from tornado.concurrent import Future, TracebackFuture, is_future, chain_future +from tornado.ioloop import IOLoop +from tornado.log import app_log +from tornado import stack_context +from tornado.util import raise_exc_info + +try: + try: + from functools import singledispatch # py34+ + except ImportError: + from singledispatch import singledispatch # backport +except ImportError: + # In most cases, singledispatch is required (to avoid + # difficult-to-diagnose problems in which the functionality + # available differs depending on which invisble packages are + # installed). However, in Google App Engine third-party + # dependencies are more trouble so we allow this module to be + # imported without it. + if 'APPENGINE_RUNTIME' not in os.environ: + raise + singledispatch = None + +try: + try: + from collections.abc import Generator as GeneratorType # py35+ + except ImportError: + from backports_abc import Generator as GeneratorType + + try: + from inspect import isawaitable # py35+ + except ImportError: + from backports_abc import isawaitable +except ImportError: + if 'APPENGINE_RUNTIME' not in os.environ: + raise + from types import GeneratorType + + def isawaitable(x): + return False + +try: + import builtins # py3 +except ImportError: + import __builtin__ as builtins + + +class KeyReuseError(Exception): + pass + + +class UnknownKeyError(Exception): + pass + + +class LeakedCallbackError(Exception): + pass + + +class BadYieldError(Exception): + pass + + +class ReturnValueIgnoredError(Exception): + pass + + +class TimeoutError(Exception): + """Exception raised by ``with_timeout``.""" + + +def _value_from_stopiteration(e): + try: + # StopIteration has a value attribute beginning in py33. + # So does our Return class. + return e.value + except AttributeError: + pass + try: + # Cython backports coroutine functionality by putting the value in + # e.args[0]. + return e.args[0] + except (AttributeError, IndexError): + return None + + +def engine(func): + """Callback-oriented decorator for asynchronous generators. + + This is an older interface; for new code that does not need to be + compatible with versions of Tornado older than 3.0 the + `coroutine` decorator is recommended instead. + + This decorator is similar to `coroutine`, except it does not + return a `.Future` and the ``callback`` argument is not treated + specially. + + In most cases, functions decorated with `engine` should take + a ``callback`` argument and invoke it with their result when + they are finished. One notable exception is the + `~tornado.web.RequestHandler` :ref:`HTTP verb methods `, + which use ``self.finish()`` in place of a callback argument. + """ + func = _make_coroutine_wrapper(func, replace_callback=False) + + @functools.wraps(func) + def wrapper(*args, **kwargs): + future = func(*args, **kwargs) + + def final_callback(future): + if future.result() is not None: + raise ReturnValueIgnoredError( + "@gen.engine functions cannot return values: %r" % + (future.result(),)) + # The engine interface doesn't give us any way to return + # errors but to raise them into the stack context. + # Save the stack context here to use when the Future has resolved. + future.add_done_callback(stack_context.wrap(final_callback)) + return wrapper + + +def coroutine(func, replace_callback=True): + """Decorator for asynchronous generators. + + Any generator that yields objects from this module must be wrapped + in either this decorator or `engine`. + + Coroutines may "return" by raising the special exception + `Return(value) `. In Python 3.3+, it is also possible for + the function to simply use the ``return value`` statement (prior to + Python 3.3 generators were not allowed to also return values). + In all versions of Python a coroutine that simply wishes to exit + early may use the ``return`` statement without a value. + + Functions with this decorator return a `.Future`. Additionally, + they may be called with a ``callback`` keyword argument, which + will be invoked with the future's result when it resolves. If the + coroutine fails, the callback will not be run and an exception + will be raised into the surrounding `.StackContext`. The + ``callback`` argument is not visible inside the decorated + function; it is handled by the decorator itself. + + From the caller's perspective, ``@gen.coroutine`` is similar to + the combination of ``@return_future`` and ``@gen.engine``. + + .. warning:: + + When exceptions occur inside a coroutine, the exception + information will be stored in the `.Future` object. You must + examine the result of the `.Future` object, or the exception + may go unnoticed by your code. This means yielding the function + if called from another coroutine, using something like + `.IOLoop.run_sync` for top-level calls, or passing the `.Future` + to `.IOLoop.add_future`. + + """ + return _make_coroutine_wrapper(func, replace_callback=True) + + +def _make_coroutine_wrapper(func, replace_callback): + """The inner workings of ``@gen.coroutine`` and ``@gen.engine``. + + The two decorators differ in their treatment of the ``callback`` + argument, so we cannot simply implement ``@engine`` in terms of + ``@coroutine``. + """ + # On Python 3.5, set the coroutine flag on our generator, to allow it + # to be used with 'await'. + if hasattr(types, 'coroutine'): + func = types.coroutine(func) + + @functools.wraps(func) + def wrapper(*args, **kwargs): + future = TracebackFuture() + + if replace_callback and 'callback' in kwargs: + callback = kwargs.pop('callback') + IOLoop.current().add_future( + future, lambda future: callback(future.result())) + + try: + result = func(*args, **kwargs) + except (Return, StopIteration) as e: + result = _value_from_stopiteration(e) + except Exception: + future.set_exc_info(sys.exc_info()) + return future + else: + if isinstance(result, GeneratorType): + # Inline the first iteration of Runner.run. This lets us + # avoid the cost of creating a Runner when the coroutine + # never actually yields, which in turn allows us to + # use "optional" coroutines in critical path code without + # performance penalty for the synchronous case. + try: + orig_stack_contexts = stack_context._state.contexts + yielded = next(result) + if stack_context._state.contexts is not orig_stack_contexts: + yielded = TracebackFuture() + yielded.set_exception( + stack_context.StackContextInconsistentError( + 'stack_context inconsistency (probably caused ' + 'by yield within a "with StackContext" block)')) + except (StopIteration, Return) as e: + future.set_result(_value_from_stopiteration(e)) + except Exception: + future.set_exc_info(sys.exc_info()) + else: + Runner(result, future, yielded) + try: + return future + finally: + # Subtle memory optimization: if next() raised an exception, + # the future's exc_info contains a traceback which + # includes this stack frame. This creates a cycle, + # which will be collected at the next full GC but has + # been shown to greatly increase memory usage of + # benchmarks (relative to the refcount-based scheme + # used in the absence of cycles). We can avoid the + # cycle by clearing the local variable after we return it. + future = None + future.set_result(result) + return future + return wrapper + + +class Return(Exception): + """Special exception to return a value from a `coroutine`. + + If this exception is raised, its value argument is used as the + result of the coroutine:: + + @gen.coroutine + def fetch_json(url): + response = yield AsyncHTTPClient().fetch(url) + raise gen.Return(json_decode(response.body)) + + In Python 3.3, this exception is no longer necessary: the ``return`` + statement can be used directly to return a value (previously + ``yield`` and ``return`` with a value could not be combined in the + same function). + + By analogy with the return statement, the value argument is optional, + but it is never necessary to ``raise gen.Return()``. The ``return`` + statement can be used with no arguments instead. + """ + def __init__(self, value=None): + super(Return, self).__init__() + self.value = value + # Cython recognizes subclasses of StopIteration with a .args tuple. + self.args = (value,) + + +class WaitIterator(object): + """Provides an iterator to yield the results of futures as they finish. + + Yielding a set of futures like this: + + ``results = yield [future1, future2]`` + + pauses the coroutine until both ``future1`` and ``future2`` + return, and then restarts the coroutine with the results of both + futures. If either future is an exception, the expression will + raise that exception and all the results will be lost. + + If you need to get the result of each future as soon as possible, + or if you need the result of some futures even if others produce + errors, you can use ``WaitIterator``:: + + wait_iterator = gen.WaitIterator(future1, future2) + while not wait_iterator.done(): + try: + result = yield wait_iterator.next() + except Exception as e: + print("Error {} from {}".format(e, wait_iterator.current_future)) + else: + print("Result {} received from {} at {}".format( + result, wait_iterator.current_future, + wait_iterator.current_index)) + + Because results are returned as soon as they are available the + output from the iterator *will not be in the same order as the + input arguments*. If you need to know which future produced the + current result, you can use the attributes + ``WaitIterator.current_future``, or ``WaitIterator.current_index`` + to get the index of the future from the input list. (if keyword + arguments were used in the construction of the `WaitIterator`, + ``current_index`` will use the corresponding keyword). + + On Python 3.5, `WaitIterator` implements the async iterator + protocol, so it can be used with the ``async for`` statement (note + that in this version the entire iteration is aborted if any value + raises an exception, while the previous example can continue past + individual errors):: + + async for result in gen.WaitIterator(future1, future2): + print("Result {} received from {} at {}".format( + result, wait_iterator.current_future, + wait_iterator.current_index)) + + .. versionadded:: 4.1 + + .. versionchanged:: 4.3 + Added ``async for`` support in Python 3.5. + + """ + def __init__(self, *args, **kwargs): + if args and kwargs: + raise ValueError( + "You must provide args or kwargs, not both") + + if kwargs: + self._unfinished = dict((f, k) for (k, f) in kwargs.items()) + futures = list(kwargs.values()) + else: + self._unfinished = dict((f, i) for (i, f) in enumerate(args)) + futures = args + + self._finished = collections.deque() + self.current_index = self.current_future = None + self._running_future = None + + for future in futures: + future.add_done_callback(self._done_callback) + + def done(self): + """Returns True if this iterator has no more results.""" + if self._finished or self._unfinished: + return False + # Clear the 'current' values when iteration is done. + self.current_index = self.current_future = None + return True + + def next(self): + """Returns a `.Future` that will yield the next available result. + + Note that this `.Future` will not be the same object as any of + the inputs. + """ + self._running_future = TracebackFuture() + + if self._finished: + self._return_result(self._finished.popleft()) + + return self._running_future + + def _done_callback(self, done): + if self._running_future and not self._running_future.done(): + self._return_result(done) + else: + self._finished.append(done) + + def _return_result(self, done): + """Called set the returned future's state that of the future + we yielded, and set the current future for the iterator. + """ + chain_future(done, self._running_future) + + self.current_future = done + self.current_index = self._unfinished.pop(done) + + @coroutine + def __aiter__(self): + raise Return(self) + + def __anext__(self): + if self.done(): + # Lookup by name to silence pyflakes on older versions. + raise getattr(builtins, 'StopAsyncIteration')() + return self.next() + + +class YieldPoint(object): + """Base class for objects that may be yielded from the generator. + + .. deprecated:: 4.0 + Use `Futures <.Future>` instead. + """ + def start(self, runner): + """Called by the runner after the generator has yielded. + + No other methods will be called on this object before ``start``. + """ + raise NotImplementedError() + + def is_ready(self): + """Called by the runner to determine whether to resume the generator. + + Returns a boolean; may be called more than once. + """ + raise NotImplementedError() + + def get_result(self): + """Returns the value to use as the result of the yield expression. + + This method will only be called once, and only after `is_ready` + has returned true. + """ + raise NotImplementedError() + + +class Callback(YieldPoint): + """Returns a callable object that will allow a matching `Wait` to proceed. + + The key may be any value suitable for use as a dictionary key, and is + used to match ``Callbacks`` to their corresponding ``Waits``. The key + must be unique among outstanding callbacks within a single run of the + generator function, but may be reused across different runs of the same + function (so constants generally work fine). + + The callback may be called with zero or one arguments; if an argument + is given it will be returned by `Wait`. + + .. deprecated:: 4.0 + Use `Futures <.Future>` instead. + """ + def __init__(self, key): + self.key = key + + def start(self, runner): + self.runner = runner + runner.register_callback(self.key) + + def is_ready(self): + return True + + def get_result(self): + return self.runner.result_callback(self.key) + + +class Wait(YieldPoint): + """Returns the argument passed to the result of a previous `Callback`. + + .. deprecated:: 4.0 + Use `Futures <.Future>` instead. + """ + def __init__(self, key): + self.key = key + + def start(self, runner): + self.runner = runner + + def is_ready(self): + return self.runner.is_ready(self.key) + + def get_result(self): + return self.runner.pop_result(self.key) + + +class WaitAll(YieldPoint): + """Returns the results of multiple previous `Callbacks `. + + The argument is a sequence of `Callback` keys, and the result is + a list of results in the same order. + + `WaitAll` is equivalent to yielding a list of `Wait` objects. + + .. deprecated:: 4.0 + Use `Futures <.Future>` instead. + """ + def __init__(self, keys): + self.keys = keys + + def start(self, runner): + self.runner = runner + + def is_ready(self): + return all(self.runner.is_ready(key) for key in self.keys) + + def get_result(self): + return [self.runner.pop_result(key) for key in self.keys] + + +def Task(func, *args, **kwargs): + """Adapts a callback-based asynchronous function for use in coroutines. + + Takes a function (and optional additional arguments) and runs it with + those arguments plus a ``callback`` keyword argument. The argument passed + to the callback is returned as the result of the yield expression. + + .. versionchanged:: 4.0 + ``gen.Task`` is now a function that returns a `.Future`, instead of + a subclass of `YieldPoint`. It still behaves the same way when + yielded. + """ + future = Future() + + def handle_exception(typ, value, tb): + if future.done(): + return False + future.set_exc_info((typ, value, tb)) + return True + + def set_result(result): + if future.done(): + return + future.set_result(result) + with stack_context.ExceptionStackContext(handle_exception): + func(*args, callback=_argument_adapter(set_result), **kwargs) + return future + + +class YieldFuture(YieldPoint): + def __init__(self, future, io_loop=None): + """Adapts a `.Future` to the `YieldPoint` interface. + + .. versionchanged:: 4.1 + The ``io_loop`` argument is deprecated. + """ + self.future = future + self.io_loop = io_loop or IOLoop.current() + + def start(self, runner): + if not self.future.done(): + self.runner = runner + self.key = object() + runner.register_callback(self.key) + self.io_loop.add_future(self.future, runner.result_callback(self.key)) + else: + self.runner = None + self.result_fn = self.future.result + + def is_ready(self): + if self.runner is not None: + return self.runner.is_ready(self.key) + else: + return True + + def get_result(self): + if self.runner is not None: + return self.runner.pop_result(self.key).result() + else: + return self.result_fn() + + +def _contains_yieldpoint(children): + """Returns True if ``children`` contains any YieldPoints. + + ``children`` may be a dict or a list, as used by `MultiYieldPoint` + and `multi_future`. + """ + if isinstance(children, dict): + return any(isinstance(i, YieldPoint) for i in children.values()) + if isinstance(children, list): + return any(isinstance(i, YieldPoint) for i in children) + return False + + +def multi(children, quiet_exceptions=()): + """Runs multiple asynchronous operations in parallel. + + ``children`` may either be a list or a dict whose values are + yieldable objects. ``multi()`` returns a new yieldable + object that resolves to a parallel structure containing their + results. If ``children`` is a list, the result is a list of + results in the same order; if it is a dict, the result is a dict + with the same keys. + + That is, ``results = yield multi(list_of_futures)`` is equivalent + to:: + + results = [] + for future in list_of_futures: + results.append(yield future) + + If any children raise exceptions, ``multi()`` will raise the first + one. All others will be logged, unless they are of types + contained in the ``quiet_exceptions`` argument. + + If any of the inputs are `YieldPoints `, the returned + yieldable object is a `YieldPoint`. Otherwise, returns a `.Future`. + This means that the result of `multi` can be used in a native + coroutine if and only if all of its children can be. + + In a ``yield``-based coroutine, it is not normally necessary to + call this function directly, since the coroutine runner will + do it automatically when a list or dict is yielded. However, + it is necessary in ``await``-based coroutines, or to pass + the ``quiet_exceptions`` argument. + + This function is available under the names ``multi()`` and ``Multi()`` + for historical reasons. + + .. versionchanged:: 4.2 + If multiple yieldables fail, any exceptions after the first + (which is raised) will be logged. Added the ``quiet_exceptions`` + argument to suppress this logging for selected exception types. + + .. versionchanged:: 4.3 + Replaced the class ``Multi`` and the function ``multi_future`` + with a unified function ``multi``. Added support for yieldables + other than `YieldPoint` and `.Future`. + + """ + if _contains_yieldpoint(children): + return MultiYieldPoint(children, quiet_exceptions=quiet_exceptions) + else: + return multi_future(children, quiet_exceptions=quiet_exceptions) + +Multi = multi + + +class MultiYieldPoint(YieldPoint): + """Runs multiple asynchronous operations in parallel. + + This class is similar to `multi`, but it always creates a stack + context even when no children require it. It is not compatible with + native coroutines. + + .. versionchanged:: 4.2 + If multiple ``YieldPoints`` fail, any exceptions after the first + (which is raised) will be logged. Added the ``quiet_exceptions`` + argument to suppress this logging for selected exception types. + + .. versionchanged:: 4.3 + Renamed from ``Multi`` to ``MultiYieldPoint``. The name ``Multi`` + remains as an alias for the equivalent `multi` function. + + .. deprecated:: 4.3 + Use `multi` instead. + """ + def __init__(self, children, quiet_exceptions=()): + self.keys = None + if isinstance(children, dict): + self.keys = list(children.keys()) + children = children.values() + self.children = [] + for i in children: + if not isinstance(i, YieldPoint): + i = convert_yielded(i) + if is_future(i): + i = YieldFuture(i) + self.children.append(i) + assert all(isinstance(i, YieldPoint) for i in self.children) + self.unfinished_children = set(self.children) + self.quiet_exceptions = quiet_exceptions + + def start(self, runner): + for i in self.children: + i.start(runner) + + def is_ready(self): + finished = list(itertools.takewhile( + lambda i: i.is_ready(), self.unfinished_children)) + self.unfinished_children.difference_update(finished) + return not self.unfinished_children + + def get_result(self): + result_list = [] + exc_info = None + for f in self.children: + try: + result_list.append(f.get_result()) + except Exception as e: + if exc_info is None: + exc_info = sys.exc_info() + else: + if not isinstance(e, self.quiet_exceptions): + app_log.error("Multiple exceptions in yield list", + exc_info=True) + if exc_info is not None: + raise_exc_info(exc_info) + if self.keys is not None: + return dict(zip(self.keys, result_list)) + else: + return list(result_list) + + +def multi_future(children, quiet_exceptions=()): + """Wait for multiple asynchronous futures in parallel. + + This function is similar to `multi`, but does not support + `YieldPoints `. + + .. versionadded:: 4.0 + + .. versionchanged:: 4.2 + If multiple ``Futures`` fail, any exceptions after the first (which is + raised) will be logged. Added the ``quiet_exceptions`` + argument to suppress this logging for selected exception types. + + .. deprecated:: 4.3 + Use `multi` instead. + """ + if isinstance(children, dict): + keys = list(children.keys()) + children = children.values() + else: + keys = None + children = list(map(convert_yielded, children)) + assert all(is_future(i) for i in children) + unfinished_children = set(children) + + future = Future() + if not children: + future.set_result({} if keys is not None else []) + + def callback(f): + unfinished_children.remove(f) + if not unfinished_children: + result_list = [] + for f in children: + try: + result_list.append(f.result()) + except Exception as e: + if future.done(): + if not isinstance(e, quiet_exceptions): + app_log.error("Multiple exceptions in yield list", + exc_info=True) + else: + future.set_exc_info(sys.exc_info()) + if not future.done(): + if keys is not None: + future.set_result(dict(zip(keys, result_list))) + else: + future.set_result(result_list) + + listening = set() + for f in children: + if f not in listening: + listening.add(f) + f.add_done_callback(callback) + return future + + +def maybe_future(x): + """Converts ``x`` into a `.Future`. + + If ``x`` is already a `.Future`, it is simply returned; otherwise + it is wrapped in a new `.Future`. This is suitable for use as + ``result = yield gen.maybe_future(f())`` when you don't know whether + ``f()`` returns a `.Future` or not. + + .. deprecated:: 4.3 + This function only handles ``Futures``, not other yieldable objects. + Instead of `maybe_future`, check for the non-future result types + you expect (often just ``None``), and ``yield`` anything unknown. + """ + if is_future(x): + return x + else: + fut = Future() + fut.set_result(x) + return fut + + +def with_timeout(timeout, future, io_loop=None, quiet_exceptions=()): + """Wraps a `.Future` in a timeout. + + Raises `TimeoutError` if the input future does not complete before + ``timeout``, which may be specified in any form allowed by + `.IOLoop.add_timeout` (i.e. a `datetime.timedelta` or an absolute time + relative to `.IOLoop.time`) + + If the wrapped `.Future` fails after it has timed out, the exception + will be logged unless it is of a type contained in ``quiet_exceptions`` + (which may be an exception type or a sequence of types). + + Currently only supports Futures, not other `YieldPoint` classes. + + .. versionadded:: 4.0 + + .. versionchanged:: 4.1 + Added the ``quiet_exceptions`` argument and the logging of unhandled + exceptions. + """ + # TODO: allow yield points in addition to futures? + # Tricky to do with stack_context semantics. + # + # It's tempting to optimize this by cancelling the input future on timeout + # instead of creating a new one, but A) we can't know if we are the only + # one waiting on the input future, so cancelling it might disrupt other + # callers and B) concurrent futures can only be cancelled while they are + # in the queue, so cancellation cannot reliably bound our waiting time. + result = Future() + chain_future(future, result) + if io_loop is None: + io_loop = IOLoop.current() + + def error_callback(future): + try: + future.result() + except Exception as e: + if not isinstance(e, quiet_exceptions): + app_log.error("Exception in Future %r after timeout", + future, exc_info=True) + + def timeout_callback(): + result.set_exception(TimeoutError("Timeout")) + # In case the wrapped future goes on to fail, log it. + future.add_done_callback(error_callback) + timeout_handle = io_loop.add_timeout( + timeout, timeout_callback) + if isinstance(future, Future): + # We know this future will resolve on the IOLoop, so we don't + # need the extra thread-safety of IOLoop.add_future (and we also + # don't care about StackContext here. + future.add_done_callback( + lambda future: io_loop.remove_timeout(timeout_handle)) + else: + # concurrent.futures.Futures may resolve on any thread, so we + # need to route them back to the IOLoop. + io_loop.add_future( + future, lambda future: io_loop.remove_timeout(timeout_handle)) + return result + + +def sleep(duration): + """Return a `.Future` that resolves after the given number of seconds. + + When used with ``yield`` in a coroutine, this is a non-blocking + analogue to `time.sleep` (which should not be used in coroutines + because it is blocking):: + + yield gen.sleep(0.5) + + Note that calling this function on its own does nothing; you must + wait on the `.Future` it returns (usually by yielding it). + + .. versionadded:: 4.1 + """ + f = Future() + IOLoop.current().call_later(duration, lambda: f.set_result(None)) + return f + + +_null_future = Future() +_null_future.set_result(None) + +moment = Future() +moment.__doc__ = \ + """A special object which may be yielded to allow the IOLoop to run for +one iteration. + +This is not needed in normal use but it can be helpful in long-running +coroutines that are likely to yield Futures that are ready instantly. + +Usage: ``yield gen.moment`` + +.. versionadded:: 4.0 +""" +moment.set_result(None) + + +class Runner(object): + """Internal implementation of `tornado.gen.engine`. + + Maintains information about pending callbacks and their results. + + The results of the generator are stored in ``result_future`` (a + `.TracebackFuture`) + """ + def __init__(self, gen, result_future, first_yielded): + self.gen = gen + self.result_future = result_future + self.future = _null_future + self.yield_point = None + self.pending_callbacks = None + self.results = None + self.running = False + self.finished = False + self.had_exception = False + self.io_loop = IOLoop.current() + # For efficiency, we do not create a stack context until we + # reach a YieldPoint (stack contexts are required for the historical + # semantics of YieldPoints, but not for Futures). When we have + # done so, this field will be set and must be called at the end + # of the coroutine. + self.stack_context_deactivate = None + if self.handle_yield(first_yielded): + self.run() + + def register_callback(self, key): + """Adds ``key`` to the list of callbacks.""" + if self.pending_callbacks is None: + # Lazily initialize the old-style YieldPoint data structures. + self.pending_callbacks = set() + self.results = {} + if key in self.pending_callbacks: + raise KeyReuseError("key %r is already pending" % (key,)) + self.pending_callbacks.add(key) + + def is_ready(self, key): + """Returns true if a result is available for ``key``.""" + if self.pending_callbacks is None or key not in self.pending_callbacks: + raise UnknownKeyError("key %r is not pending" % (key,)) + return key in self.results + + def set_result(self, key, result): + """Sets the result for ``key`` and attempts to resume the generator.""" + self.results[key] = result + if self.yield_point is not None and self.yield_point.is_ready(): + try: + self.future.set_result(self.yield_point.get_result()) + except: + self.future.set_exc_info(sys.exc_info()) + self.yield_point = None + self.run() + + def pop_result(self, key): + """Returns the result for ``key`` and unregisters it.""" + self.pending_callbacks.remove(key) + return self.results.pop(key) + + def run(self): + """Starts or resumes the generator, running until it reaches a + yield point that is not ready. + """ + if self.running or self.finished: + return + try: + self.running = True + while True: + future = self.future + if not future.done(): + return + self.future = None + try: + orig_stack_contexts = stack_context._state.contexts + exc_info = None + + try: + value = future.result() + except Exception: + self.had_exception = True + exc_info = sys.exc_info() + + if exc_info is not None: + yielded = self.gen.throw(*exc_info) + exc_info = None + else: + yielded = self.gen.send(value) + + if stack_context._state.contexts is not orig_stack_contexts: + self.gen.throw( + stack_context.StackContextInconsistentError( + 'stack_context inconsistency (probably caused ' + 'by yield within a "with StackContext" block)')) + except (StopIteration, Return) as e: + self.finished = True + self.future = _null_future + if self.pending_callbacks and not self.had_exception: + # If we ran cleanly without waiting on all callbacks + # raise an error (really more of a warning). If we + # had an exception then some callbacks may have been + # orphaned, so skip the check in that case. + raise LeakedCallbackError( + "finished without waiting for callbacks %r" % + self.pending_callbacks) + self.result_future.set_result(_value_from_stopiteration(e)) + self.result_future = None + self._deactivate_stack_context() + return + except Exception: + self.finished = True + self.future = _null_future + self.result_future.set_exc_info(sys.exc_info()) + self.result_future = None + self._deactivate_stack_context() + return + if not self.handle_yield(yielded): + return + finally: + self.running = False + + def handle_yield(self, yielded): + # Lists containing YieldPoints require stack contexts; + # other lists are handled in convert_yielded. + if _contains_yieldpoint(yielded): + yielded = multi(yielded) + + if isinstance(yielded, YieldPoint): + # YieldPoints are too closely coupled to the Runner to go + # through the generic convert_yielded mechanism. + self.future = TracebackFuture() + + def start_yield_point(): + try: + yielded.start(self) + if yielded.is_ready(): + self.future.set_result( + yielded.get_result()) + else: + self.yield_point = yielded + except Exception: + self.future = TracebackFuture() + self.future.set_exc_info(sys.exc_info()) + + if self.stack_context_deactivate is None: + # Start a stack context if this is the first + # YieldPoint we've seen. + with stack_context.ExceptionStackContext( + self.handle_exception) as deactivate: + self.stack_context_deactivate = deactivate + + def cb(): + start_yield_point() + self.run() + self.io_loop.add_callback(cb) + return False + else: + start_yield_point() + else: + try: + self.future = convert_yielded(yielded) + except BadYieldError: + self.future = TracebackFuture() + self.future.set_exc_info(sys.exc_info()) + + if not self.future.done() or self.future is moment: + self.io_loop.add_future( + self.future, lambda f: self.run()) + return False + return True + + def result_callback(self, key): + return stack_context.wrap(_argument_adapter( + functools.partial(self.set_result, key))) + + def handle_exception(self, typ, value, tb): + if not self.running and not self.finished: + self.future = TracebackFuture() + self.future.set_exc_info((typ, value, tb)) + self.run() + return True + else: + return False + + def _deactivate_stack_context(self): + if self.stack_context_deactivate is not None: + self.stack_context_deactivate() + self.stack_context_deactivate = None + +Arguments = collections.namedtuple('Arguments', ['args', 'kwargs']) + + +def _argument_adapter(callback): + """Returns a function that when invoked runs ``callback`` with one arg. + + If the function returned by this function is called with exactly + one argument, that argument is passed to ``callback``. Otherwise + the args tuple and kwargs dict are wrapped in an `Arguments` object. + """ + def wrapper(*args, **kwargs): + if kwargs or len(args) > 1: + callback(Arguments(args, kwargs)) + elif args: + callback(args[0]) + else: + callback(None) + return wrapper + +# Convert Awaitables into Futures. It is unfortunately possible +# to have infinite recursion here if those Awaitables assume that +# we're using a different coroutine runner and yield objects +# we don't understand. If that happens, the solution is to +# register that runner's yieldable objects with convert_yielded. +if sys.version_info >= (3, 3): + exec(textwrap.dedent(""" + @coroutine + def _wrap_awaitable(x): + if hasattr(x, '__await__'): + x = x.__await__() + return (yield from x) + """)) +else: + # Py2-compatible version for use with Cython. + # Copied from PEP 380. + @coroutine + def _wrap_awaitable(x): + if hasattr(x, '__await__'): + _i = x.__await__() + else: + _i = iter(x) + try: + _y = next(_i) + except StopIteration as _e: + _r = _value_from_stopiteration(_e) + else: + while 1: + try: + _s = yield _y + except GeneratorExit as _e: + try: + _m = _i.close + except AttributeError: + pass + else: + _m() + raise _e + except BaseException as _e: + _x = sys.exc_info() + try: + _m = _i.throw + except AttributeError: + raise _e + else: + try: + _y = _m(*_x) + except StopIteration as _e: + _r = _value_from_stopiteration(_e) + break + else: + try: + if _s is None: + _y = next(_i) + else: + _y = _i.send(_s) + except StopIteration as _e: + _r = _value_from_stopiteration(_e) + break + raise Return(_r) + + +def convert_yielded(yielded): + """Convert a yielded object into a `.Future`. + + The default implementation accepts lists, dictionaries, and Futures. + + If the `~functools.singledispatch` library is available, this function + may be extended to support additional types. For example:: + + @convert_yielded.register(asyncio.Future) + def _(asyncio_future): + return tornado.platform.asyncio.to_tornado_future(asyncio_future) + + .. versionadded:: 4.1 + """ + # Lists and dicts containing YieldPoints were handled earlier. + if isinstance(yielded, (list, dict)): + return multi(yielded) + elif is_future(yielded): + return yielded + elif isawaitable(yielded): + return _wrap_awaitable(yielded) + else: + raise BadYieldError("yielded unknown object %r" % (yielded,)) + +if singledispatch is not None: + convert_yielded = singledispatch(convert_yielded) + + try: + # If we can import t.p.asyncio, do it for its side effect + # (registering asyncio.Future with convert_yielded). + # It's ugly to do this here, but it prevents a cryptic + # infinite recursion in _wrap_awaitable. + # Note that even with this, asyncio integration is unlikely + # to work unless the application also configures AsyncIOLoop, + # but at least the error messages in that case are more + # comprehensible than a stack overflow. + import tornado.platform.asyncio + except ImportError: + pass + else: + # Reference the imported module to make pyflakes happy. + tornado diff --git a/python/tornado/http1connection.py b/python/tornado/http1connection.py new file mode 100644 index 000000000..1c577063b --- /dev/null +++ b/python/tornado/http1connection.py @@ -0,0 +1,722 @@ +#!/usr/bin/env python +# +# Copyright 2014 Facebook +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""Client and server implementations of HTTP/1.x. + +.. versionadded:: 4.0 +""" + +from __future__ import absolute_import, division, print_function, with_statement + +import re + +from tornado.concurrent import Future +from tornado.escape import native_str, utf8 +from tornado import gen +from tornado import httputil +from tornado import iostream +from tornado.log import gen_log, app_log +from tornado import stack_context +from tornado.util import GzipDecompressor + + +class _QuietException(Exception): + def __init__(self): + pass + + +class _ExceptionLoggingContext(object): + """Used with the ``with`` statement when calling delegate methods to + log any exceptions with the given logger. Any exceptions caught are + converted to _QuietException + """ + def __init__(self, logger): + self.logger = logger + + def __enter__(self): + pass + + def __exit__(self, typ, value, tb): + if value is not None: + self.logger.error("Uncaught exception", exc_info=(typ, value, tb)) + raise _QuietException + + +class HTTP1ConnectionParameters(object): + """Parameters for `.HTTP1Connection` and `.HTTP1ServerConnection`. + """ + def __init__(self, no_keep_alive=False, chunk_size=None, + max_header_size=None, header_timeout=None, max_body_size=None, + body_timeout=None, decompress=False): + """ + :arg bool no_keep_alive: If true, always close the connection after + one request. + :arg int chunk_size: how much data to read into memory at once + :arg int max_header_size: maximum amount of data for HTTP headers + :arg float header_timeout: how long to wait for all headers (seconds) + :arg int max_body_size: maximum amount of data for body + :arg float body_timeout: how long to wait while reading body (seconds) + :arg bool decompress: if true, decode incoming + ``Content-Encoding: gzip`` + """ + self.no_keep_alive = no_keep_alive + self.chunk_size = chunk_size or 65536 + self.max_header_size = max_header_size or 65536 + self.header_timeout = header_timeout + self.max_body_size = max_body_size + self.body_timeout = body_timeout + self.decompress = decompress + + +class HTTP1Connection(httputil.HTTPConnection): + """Implements the HTTP/1.x protocol. + + This class can be on its own for clients, or via `HTTP1ServerConnection` + for servers. + """ + def __init__(self, stream, is_client, params=None, context=None): + """ + :arg stream: an `.IOStream` + :arg bool is_client: client or server + :arg params: a `.HTTP1ConnectionParameters` instance or ``None`` + :arg context: an opaque application-defined object that can be accessed + as ``connection.context``. + """ + self.is_client = is_client + self.stream = stream + if params is None: + params = HTTP1ConnectionParameters() + self.params = params + self.context = context + self.no_keep_alive = params.no_keep_alive + # The body limits can be altered by the delegate, so save them + # here instead of just referencing self.params later. + self._max_body_size = (self.params.max_body_size or + self.stream.max_buffer_size) + self._body_timeout = self.params.body_timeout + # _write_finished is set to True when finish() has been called, + # i.e. there will be no more data sent. Data may still be in the + # stream's write buffer. + self._write_finished = False + # True when we have read the entire incoming body. + self._read_finished = False + # _finish_future resolves when all data has been written and flushed + # to the IOStream. + self._finish_future = Future() + # If true, the connection should be closed after this request + # (after the response has been written in the server side, + # and after it has been read in the client) + self._disconnect_on_finish = False + self._clear_callbacks() + # Save the start lines after we read or write them; they + # affect later processing (e.g. 304 responses and HEAD methods + # have content-length but no bodies) + self._request_start_line = None + self._response_start_line = None + self._request_headers = None + # True if we are writing output with chunked encoding. + self._chunking_output = None + # While reading a body with a content-length, this is the + # amount left to read. + self._expected_content_remaining = None + # A Future for our outgoing writes, returned by IOStream.write. + self._pending_write = None + + def read_response(self, delegate): + """Read a single HTTP response. + + Typical client-mode usage is to write a request using `write_headers`, + `write`, and `finish`, and then call ``read_response``. + + :arg delegate: a `.HTTPMessageDelegate` + + Returns a `.Future` that resolves to None after the full response has + been read. + """ + if self.params.decompress: + delegate = _GzipMessageDelegate(delegate, self.params.chunk_size) + return self._read_message(delegate) + + @gen.coroutine + def _read_message(self, delegate): + need_delegate_close = False + try: + header_future = self.stream.read_until_regex( + b"\r?\n\r?\n", + max_bytes=self.params.max_header_size) + if self.params.header_timeout is None: + header_data = yield header_future + else: + try: + header_data = yield gen.with_timeout( + self.stream.io_loop.time() + self.params.header_timeout, + header_future, + io_loop=self.stream.io_loop, + quiet_exceptions=iostream.StreamClosedError) + except gen.TimeoutError: + self.close() + raise gen.Return(False) + start_line, headers = self._parse_headers(header_data) + if self.is_client: + start_line = httputil.parse_response_start_line(start_line) + self._response_start_line = start_line + else: + start_line = httputil.parse_request_start_line(start_line) + self._request_start_line = start_line + self._request_headers = headers + + self._disconnect_on_finish = not self._can_keep_alive( + start_line, headers) + need_delegate_close = True + with _ExceptionLoggingContext(app_log): + header_future = delegate.headers_received(start_line, headers) + if header_future is not None: + yield header_future + if self.stream is None: + # We've been detached. + need_delegate_close = False + raise gen.Return(False) + skip_body = False + if self.is_client: + if (self._request_start_line is not None and + self._request_start_line.method == 'HEAD'): + skip_body = True + code = start_line.code + if code == 304: + # 304 responses may include the content-length header + # but do not actually have a body. + # http://tools.ietf.org/html/rfc7230#section-3.3 + skip_body = True + if code >= 100 and code < 200: + # 1xx responses should never indicate the presence of + # a body. + if ('Content-Length' in headers or + 'Transfer-Encoding' in headers): + raise httputil.HTTPInputError( + "Response code %d cannot have body" % code) + # TODO: client delegates will get headers_received twice + # in the case of a 100-continue. Document or change? + yield self._read_message(delegate) + else: + if (headers.get("Expect") == "100-continue" and + not self._write_finished): + self.stream.write(b"HTTP/1.1 100 (Continue)\r\n\r\n") + if not skip_body: + body_future = self._read_body( + start_line.code if self.is_client else 0, headers, delegate) + if body_future is not None: + if self._body_timeout is None: + yield body_future + else: + try: + yield gen.with_timeout( + self.stream.io_loop.time() + self._body_timeout, + body_future, self.stream.io_loop, + quiet_exceptions=iostream.StreamClosedError) + except gen.TimeoutError: + gen_log.info("Timeout reading body from %s", + self.context) + self.stream.close() + raise gen.Return(False) + self._read_finished = True + if not self._write_finished or self.is_client: + need_delegate_close = False + with _ExceptionLoggingContext(app_log): + delegate.finish() + # If we're waiting for the application to produce an asynchronous + # response, and we're not detached, register a close callback + # on the stream (we didn't need one while we were reading) + if (not self._finish_future.done() and + self.stream is not None and + not self.stream.closed()): + self.stream.set_close_callback(self._on_connection_close) + yield self._finish_future + if self.is_client and self._disconnect_on_finish: + self.close() + if self.stream is None: + raise gen.Return(False) + except httputil.HTTPInputError as e: + gen_log.info("Malformed HTTP message from %s: %s", + self.context, e) + self.close() + raise gen.Return(False) + finally: + if need_delegate_close: + with _ExceptionLoggingContext(app_log): + delegate.on_connection_close() + self._clear_callbacks() + raise gen.Return(True) + + def _clear_callbacks(self): + """Clears the callback attributes. + + This allows the request handler to be garbage collected more + quickly in CPython by breaking up reference cycles. + """ + self._write_callback = None + self._write_future = None + self._close_callback = None + if self.stream is not None: + self.stream.set_close_callback(None) + + def set_close_callback(self, callback): + """Sets a callback that will be run when the connection is closed. + + .. deprecated:: 4.0 + Use `.HTTPMessageDelegate.on_connection_close` instead. + """ + self._close_callback = stack_context.wrap(callback) + + def _on_connection_close(self): + # Note that this callback is only registered on the IOStream + # when we have finished reading the request and are waiting for + # the application to produce its response. + if self._close_callback is not None: + callback = self._close_callback + self._close_callback = None + callback() + if not self._finish_future.done(): + self._finish_future.set_result(None) + self._clear_callbacks() + + def close(self): + if self.stream is not None: + self.stream.close() + self._clear_callbacks() + if not self._finish_future.done(): + self._finish_future.set_result(None) + + def detach(self): + """Take control of the underlying stream. + + Returns the underlying `.IOStream` object and stops all further + HTTP processing. May only be called during + `.HTTPMessageDelegate.headers_received`. Intended for implementing + protocols like websockets that tunnel over an HTTP handshake. + """ + self._clear_callbacks() + stream = self.stream + self.stream = None + if not self._finish_future.done(): + self._finish_future.set_result(None) + return stream + + def set_body_timeout(self, timeout): + """Sets the body timeout for a single request. + + Overrides the value from `.HTTP1ConnectionParameters`. + """ + self._body_timeout = timeout + + def set_max_body_size(self, max_body_size): + """Sets the body size limit for a single request. + + Overrides the value from `.HTTP1ConnectionParameters`. + """ + self._max_body_size = max_body_size + + def write_headers(self, start_line, headers, chunk=None, callback=None): + """Implements `.HTTPConnection.write_headers`.""" + lines = [] + if self.is_client: + self._request_start_line = start_line + lines.append(utf8('%s %s HTTP/1.1' % (start_line[0], start_line[1]))) + # Client requests with a non-empty body must have either a + # Content-Length or a Transfer-Encoding. + self._chunking_output = ( + start_line.method in ('POST', 'PUT', 'PATCH') and + 'Content-Length' not in headers and + 'Transfer-Encoding' not in headers) + else: + self._response_start_line = start_line + lines.append(utf8('HTTP/1.1 %s %s' % (start_line[1], start_line[2]))) + self._chunking_output = ( + # TODO: should this use + # self._request_start_line.version or + # start_line.version? + self._request_start_line.version == 'HTTP/1.1' and + # 304 responses have no body (not even a zero-length body), and so + # should not have either Content-Length or Transfer-Encoding. + # headers. + start_line.code != 304 and + # No need to chunk the output if a Content-Length is specified. + 'Content-Length' not in headers and + # Applications are discouraged from touching Transfer-Encoding, + # but if they do, leave it alone. + 'Transfer-Encoding' not in headers) + # If a 1.0 client asked for keep-alive, add the header. + if (self._request_start_line.version == 'HTTP/1.0' and + (self._request_headers.get('Connection', '').lower() + == 'keep-alive')): + headers['Connection'] = 'Keep-Alive' + if self._chunking_output: + headers['Transfer-Encoding'] = 'chunked' + if (not self.is_client and + (self._request_start_line.method == 'HEAD' or + start_line.code == 304)): + self._expected_content_remaining = 0 + elif 'Content-Length' in headers: + self._expected_content_remaining = int(headers['Content-Length']) + else: + self._expected_content_remaining = None + lines.extend([utf8(n) + b": " + utf8(v) for n, v in headers.get_all()]) + for line in lines: + if b'\n' in line: + raise ValueError('Newline in header: ' + repr(line)) + future = None + if self.stream.closed(): + future = self._write_future = Future() + future.set_exception(iostream.StreamClosedError()) + future.exception() + else: + if callback is not None: + self._write_callback = stack_context.wrap(callback) + else: + future = self._write_future = Future() + data = b"\r\n".join(lines) + b"\r\n\r\n" + if chunk: + data += self._format_chunk(chunk) + self._pending_write = self.stream.write(data) + self._pending_write.add_done_callback(self._on_write_complete) + return future + + def _format_chunk(self, chunk): + if self._expected_content_remaining is not None: + self._expected_content_remaining -= len(chunk) + if self._expected_content_remaining < 0: + # Close the stream now to stop further framing errors. + self.stream.close() + raise httputil.HTTPOutputError( + "Tried to write more data than Content-Length") + if self._chunking_output and chunk: + # Don't write out empty chunks because that means END-OF-STREAM + # with chunked encoding + return utf8("%x" % len(chunk)) + b"\r\n" + chunk + b"\r\n" + else: + return chunk + + def write(self, chunk, callback=None): + """Implements `.HTTPConnection.write`. + + For backwards compatibility is is allowed but deprecated to + skip `write_headers` and instead call `write()` with a + pre-encoded header block. + """ + future = None + if self.stream.closed(): + future = self._write_future = Future() + self._write_future.set_exception(iostream.StreamClosedError()) + self._write_future.exception() + else: + if callback is not None: + self._write_callback = stack_context.wrap(callback) + else: + future = self._write_future = Future() + self._pending_write = self.stream.write(self._format_chunk(chunk)) + self._pending_write.add_done_callback(self._on_write_complete) + return future + + def finish(self): + """Implements `.HTTPConnection.finish`.""" + if (self._expected_content_remaining is not None and + self._expected_content_remaining != 0 and + not self.stream.closed()): + self.stream.close() + raise httputil.HTTPOutputError( + "Tried to write %d bytes less than Content-Length" % + self._expected_content_remaining) + if self._chunking_output: + if not self.stream.closed(): + self._pending_write = self.stream.write(b"0\r\n\r\n") + self._pending_write.add_done_callback(self._on_write_complete) + self._write_finished = True + # If the app finished the request while we're still reading, + # divert any remaining data away from the delegate and + # close the connection when we're done sending our response. + # Closing the connection is the only way to avoid reading the + # whole input body. + if not self._read_finished: + self._disconnect_on_finish = True + # No more data is coming, so instruct TCP to send any remaining + # data immediately instead of waiting for a full packet or ack. + self.stream.set_nodelay(True) + if self._pending_write is None: + self._finish_request(None) + else: + self._pending_write.add_done_callback(self._finish_request) + + def _on_write_complete(self, future): + exc = future.exception() + if exc is not None and not isinstance(exc, iostream.StreamClosedError): + future.result() + if self._write_callback is not None: + callback = self._write_callback + self._write_callback = None + self.stream.io_loop.add_callback(callback) + if self._write_future is not None: + future = self._write_future + self._write_future = None + future.set_result(None) + + def _can_keep_alive(self, start_line, headers): + if self.params.no_keep_alive: + return False + connection_header = headers.get("Connection") + if connection_header is not None: + connection_header = connection_header.lower() + if start_line.version == "HTTP/1.1": + return connection_header != "close" + elif ("Content-Length" in headers + or headers.get("Transfer-Encoding", "").lower() == "chunked" + or start_line.method in ("HEAD", "GET")): + return connection_header == "keep-alive" + return False + + def _finish_request(self, future): + self._clear_callbacks() + if not self.is_client and self._disconnect_on_finish: + self.close() + return + # Turn Nagle's algorithm back on, leaving the stream in its + # default state for the next request. + self.stream.set_nodelay(False) + if not self._finish_future.done(): + self._finish_future.set_result(None) + + def _parse_headers(self, data): + # The lstrip removes newlines that some implementations sometimes + # insert between messages of a reused connection. Per RFC 7230, + # we SHOULD ignore at least one empty line before the request. + # http://tools.ietf.org/html/rfc7230#section-3.5 + data = native_str(data.decode('latin1')).lstrip("\r\n") + # RFC 7230 section allows for both CRLF and bare LF. + eol = data.find("\n") + start_line = data[:eol].rstrip("\r") + try: + headers = httputil.HTTPHeaders.parse(data[eol:]) + except ValueError: + # probably form split() if there was no ':' in the line + raise httputil.HTTPInputError("Malformed HTTP headers: %r" % + data[eol:100]) + return start_line, headers + + def _read_body(self, code, headers, delegate): + if "Content-Length" in headers: + if "Transfer-Encoding" in headers: + # Response cannot contain both Content-Length and + # Transfer-Encoding headers. + # http://tools.ietf.org/html/rfc7230#section-3.3.3 + raise httputil.HTTPInputError( + "Response with both Transfer-Encoding and Content-Length") + if "," in headers["Content-Length"]: + # Proxies sometimes cause Content-Length headers to get + # duplicated. If all the values are identical then we can + # use them but if they differ it's an error. + pieces = re.split(r',\s*', headers["Content-Length"]) + if any(i != pieces[0] for i in pieces): + raise httputil.HTTPInputError( + "Multiple unequal Content-Lengths: %r" % + headers["Content-Length"]) + headers["Content-Length"] = pieces[0] + content_length = int(headers["Content-Length"]) + + if content_length > self._max_body_size: + raise httputil.HTTPInputError("Content-Length too long") + else: + content_length = None + + if code == 204: + # This response code is not allowed to have a non-empty body, + # and has an implicit length of zero instead of read-until-close. + # http://www.w3.org/Protocols/rfc2616/rfc2616-sec4.html#sec4.3 + if ("Transfer-Encoding" in headers or + content_length not in (None, 0)): + raise httputil.HTTPInputError( + "Response with code %d should not have body" % code) + content_length = 0 + + if content_length is not None: + return self._read_fixed_body(content_length, delegate) + if headers.get("Transfer-Encoding") == "chunked": + return self._read_chunked_body(delegate) + if self.is_client: + return self._read_body_until_close(delegate) + return None + + @gen.coroutine + def _read_fixed_body(self, content_length, delegate): + while content_length > 0: + body = yield self.stream.read_bytes( + min(self.params.chunk_size, content_length), partial=True) + content_length -= len(body) + if not self._write_finished or self.is_client: + with _ExceptionLoggingContext(app_log): + ret = delegate.data_received(body) + if ret is not None: + yield ret + + @gen.coroutine + def _read_chunked_body(self, delegate): + # TODO: "chunk extensions" http://tools.ietf.org/html/rfc2616#section-3.6.1 + total_size = 0 + while True: + chunk_len = yield self.stream.read_until(b"\r\n", max_bytes=64) + chunk_len = int(chunk_len.strip(), 16) + if chunk_len == 0: + return + total_size += chunk_len + if total_size > self._max_body_size: + raise httputil.HTTPInputError("chunked body too large") + bytes_to_read = chunk_len + while bytes_to_read: + chunk = yield self.stream.read_bytes( + min(bytes_to_read, self.params.chunk_size), partial=True) + bytes_to_read -= len(chunk) + if not self._write_finished or self.is_client: + with _ExceptionLoggingContext(app_log): + ret = delegate.data_received(chunk) + if ret is not None: + yield ret + # chunk ends with \r\n + crlf = yield self.stream.read_bytes(2) + assert crlf == b"\r\n" + + @gen.coroutine + def _read_body_until_close(self, delegate): + body = yield self.stream.read_until_close() + if not self._write_finished or self.is_client: + with _ExceptionLoggingContext(app_log): + delegate.data_received(body) + + +class _GzipMessageDelegate(httputil.HTTPMessageDelegate): + """Wraps an `HTTPMessageDelegate` to decode ``Content-Encoding: gzip``. + """ + def __init__(self, delegate, chunk_size): + self._delegate = delegate + self._chunk_size = chunk_size + self._decompressor = None + + def headers_received(self, start_line, headers): + if headers.get("Content-Encoding") == "gzip": + self._decompressor = GzipDecompressor() + # Downstream delegates will only see uncompressed data, + # so rename the content-encoding header. + # (but note that curl_httpclient doesn't do this). + headers.add("X-Consumed-Content-Encoding", + headers["Content-Encoding"]) + del headers["Content-Encoding"] + return self._delegate.headers_received(start_line, headers) + + @gen.coroutine + def data_received(self, chunk): + if self._decompressor: + compressed_data = chunk + while compressed_data: + decompressed = self._decompressor.decompress( + compressed_data, self._chunk_size) + if decompressed: + ret = self._delegate.data_received(decompressed) + if ret is not None: + yield ret + compressed_data = self._decompressor.unconsumed_tail + else: + ret = self._delegate.data_received(chunk) + if ret is not None: + yield ret + + def finish(self): + if self._decompressor is not None: + tail = self._decompressor.flush() + if tail: + # I believe the tail will always be empty (i.e. + # decompress will return all it can). The purpose + # of the flush call is to detect errors such + # as truncated input. But in case it ever returns + # anything, treat it as an extra chunk + self._delegate.data_received(tail) + return self._delegate.finish() + + def on_connection_close(self): + return self._delegate.on_connection_close() + + +class HTTP1ServerConnection(object): + """An HTTP/1.x server.""" + def __init__(self, stream, params=None, context=None): + """ + :arg stream: an `.IOStream` + :arg params: a `.HTTP1ConnectionParameters` or None + :arg context: an opaque application-defined object that is accessible + as ``connection.context`` + """ + self.stream = stream + if params is None: + params = HTTP1ConnectionParameters() + self.params = params + self.context = context + self._serving_future = None + + @gen.coroutine + def close(self): + """Closes the connection. + + Returns a `.Future` that resolves after the serving loop has exited. + """ + self.stream.close() + # Block until the serving loop is done, but ignore any exceptions + # (start_serving is already responsible for logging them). + try: + yield self._serving_future + except Exception: + pass + + def start_serving(self, delegate): + """Starts serving requests on this connection. + + :arg delegate: a `.HTTPServerConnectionDelegate` + """ + assert isinstance(delegate, httputil.HTTPServerConnectionDelegate) + self._serving_future = self._server_request_loop(delegate) + # Register the future on the IOLoop so its errors get logged. + self.stream.io_loop.add_future(self._serving_future, + lambda f: f.result()) + + @gen.coroutine + def _server_request_loop(self, delegate): + try: + while True: + conn = HTTP1Connection(self.stream, False, + self.params, self.context) + request_delegate = delegate.start_request(self, conn) + try: + ret = yield conn.read_response(request_delegate) + except (iostream.StreamClosedError, + iostream.UnsatisfiableReadError): + return + except _QuietException: + # This exception was already logged. + conn.close() + return + except Exception: + gen_log.error("Uncaught exception", exc_info=True) + conn.close() + return + if not ret: + return + yield gen.moment + finally: + delegate.on_close(self) diff --git a/python/tornado/httpclient.py b/python/tornado/httpclient.py new file mode 100644 index 000000000..9179227b1 --- /dev/null +++ b/python/tornado/httpclient.py @@ -0,0 +1,659 @@ +"""Blocking and non-blocking HTTP client interfaces. + +This module defines a common interface shared by two implementations, +``simple_httpclient`` and ``curl_httpclient``. Applications may either +instantiate their chosen implementation class directly or use the +`AsyncHTTPClient` class from this module, which selects an implementation +that can be overridden with the `AsyncHTTPClient.configure` method. + +The default implementation is ``simple_httpclient``, and this is expected +to be suitable for most users' needs. However, some applications may wish +to switch to ``curl_httpclient`` for reasons such as the following: + +* ``curl_httpclient`` has some features not found in ``simple_httpclient``, + including support for HTTP proxies and the ability to use a specified + network interface. + +* ``curl_httpclient`` is more likely to be compatible with sites that are + not-quite-compliant with the HTTP spec, or sites that use little-exercised + features of HTTP. + +* ``curl_httpclient`` is faster. + +* ``curl_httpclient`` was the default prior to Tornado 2.0. + +Note that if you are using ``curl_httpclient``, it is highly +recommended that you use a recent version of ``libcurl`` and +``pycurl``. Currently the minimum supported version of libcurl is +7.21.1, and the minimum version of pycurl is 7.18.2. It is highly +recommended that your ``libcurl`` installation is built with +asynchronous DNS resolver (threaded or c-ares), otherwise you may +encounter various problems with request timeouts (for more +information, see +http://curl.haxx.se/libcurl/c/curl_easy_setopt.html#CURLOPTCONNECTTIMEOUTMS +and comments in curl_httpclient.py). + +To select ``curl_httpclient``, call `AsyncHTTPClient.configure` at startup:: + + AsyncHTTPClient.configure("tornado.curl_httpclient.CurlAsyncHTTPClient") +""" + +from __future__ import absolute_import, division, print_function, with_statement + +import functools +import time +import weakref + +from tornado.concurrent import TracebackFuture +from tornado.escape import utf8, native_str +from tornado import httputil, stack_context +from tornado.ioloop import IOLoop +from tornado.util import Configurable + + +class HTTPClient(object): + """A blocking HTTP client. + + This interface is provided for convenience and testing; most applications + that are running an IOLoop will want to use `AsyncHTTPClient` instead. + Typical usage looks like this:: + + http_client = httpclient.HTTPClient() + try: + response = http_client.fetch("http://www.google.com/") + print response.body + except httpclient.HTTPError as e: + # HTTPError is raised for non-200 responses; the response + # can be found in e.response. + print("Error: " + str(e)) + except Exception as e: + # Other errors are possible, such as IOError. + print("Error: " + str(e)) + http_client.close() + """ + def __init__(self, async_client_class=None, **kwargs): + self._io_loop = IOLoop(make_current=False) + if async_client_class is None: + async_client_class = AsyncHTTPClient + self._async_client = async_client_class(self._io_loop, **kwargs) + self._closed = False + + def __del__(self): + self.close() + + def close(self): + """Closes the HTTPClient, freeing any resources used.""" + if not self._closed: + self._async_client.close() + self._io_loop.close() + self._closed = True + + def fetch(self, request, **kwargs): + """Executes a request, returning an `HTTPResponse`. + + The request may be either a string URL or an `HTTPRequest` object. + If it is a string, we construct an `HTTPRequest` using any additional + kwargs: ``HTTPRequest(request, **kwargs)`` + + If an error occurs during the fetch, we raise an `HTTPError` unless + the ``raise_error`` keyword argument is set to False. + """ + response = self._io_loop.run_sync(functools.partial( + self._async_client.fetch, request, **kwargs)) + return response + + +class AsyncHTTPClient(Configurable): + """An non-blocking HTTP client. + + Example usage:: + + def handle_request(response): + if response.error: + print "Error:", response.error + else: + print response.body + + http_client = AsyncHTTPClient() + http_client.fetch("http://www.google.com/", handle_request) + + The constructor for this class is magic in several respects: It + actually creates an instance of an implementation-specific + subclass, and instances are reused as a kind of pseudo-singleton + (one per `.IOLoop`). The keyword argument ``force_instance=True`` + can be used to suppress this singleton behavior. Unless + ``force_instance=True`` is used, no arguments other than + ``io_loop`` should be passed to the `AsyncHTTPClient` constructor. + The implementation subclass as well as arguments to its + constructor can be set with the static method `configure()` + + All `AsyncHTTPClient` implementations support a ``defaults`` + keyword argument, which can be used to set default values for + `HTTPRequest` attributes. For example:: + + AsyncHTTPClient.configure( + None, defaults=dict(user_agent="MyUserAgent")) + # or with force_instance: + client = AsyncHTTPClient(force_instance=True, + defaults=dict(user_agent="MyUserAgent")) + + .. versionchanged:: 4.1 + The ``io_loop`` argument is deprecated. + """ + @classmethod + def configurable_base(cls): + return AsyncHTTPClient + + @classmethod + def configurable_default(cls): + from tornado.simple_httpclient import SimpleAsyncHTTPClient + return SimpleAsyncHTTPClient + + @classmethod + def _async_clients(cls): + attr_name = '_async_client_dict_' + cls.__name__ + if not hasattr(cls, attr_name): + setattr(cls, attr_name, weakref.WeakKeyDictionary()) + return getattr(cls, attr_name) + + def __new__(cls, io_loop=None, force_instance=False, **kwargs): + io_loop = io_loop or IOLoop.current() + if force_instance: + instance_cache = None + else: + instance_cache = cls._async_clients() + if instance_cache is not None and io_loop in instance_cache: + return instance_cache[io_loop] + instance = super(AsyncHTTPClient, cls).__new__(cls, io_loop=io_loop, + **kwargs) + # Make sure the instance knows which cache to remove itself from. + # It can't simply call _async_clients() because we may be in + # __new__(AsyncHTTPClient) but instance.__class__ may be + # SimpleAsyncHTTPClient. + instance._instance_cache = instance_cache + if instance_cache is not None: + instance_cache[instance.io_loop] = instance + return instance + + def initialize(self, io_loop, defaults=None): + self.io_loop = io_loop + self.defaults = dict(HTTPRequest._DEFAULTS) + if defaults is not None: + self.defaults.update(defaults) + self._closed = False + + def close(self): + """Destroys this HTTP client, freeing any file descriptors used. + + This method is **not needed in normal use** due to the way + that `AsyncHTTPClient` objects are transparently reused. + ``close()`` is generally only necessary when either the + `.IOLoop` is also being closed, or the ``force_instance=True`` + argument was used when creating the `AsyncHTTPClient`. + + No other methods may be called on the `AsyncHTTPClient` after + ``close()``. + + """ + if self._closed: + return + self._closed = True + if self._instance_cache is not None: + if self._instance_cache.get(self.io_loop) is not self: + raise RuntimeError("inconsistent AsyncHTTPClient cache") + del self._instance_cache[self.io_loop] + + def fetch(self, request, callback=None, raise_error=True, **kwargs): + """Executes a request, asynchronously returning an `HTTPResponse`. + + The request may be either a string URL or an `HTTPRequest` object. + If it is a string, we construct an `HTTPRequest` using any additional + kwargs: ``HTTPRequest(request, **kwargs)`` + + This method returns a `.Future` whose result is an + `HTTPResponse`. By default, the ``Future`` will raise an `HTTPError` + if the request returned a non-200 response code. Instead, if + ``raise_error`` is set to False, the response will always be + returned regardless of the response code. + + If a ``callback`` is given, it will be invoked with the `HTTPResponse`. + In the callback interface, `HTTPError` is not automatically raised. + Instead, you must check the response's ``error`` attribute or + call its `~HTTPResponse.rethrow` method. + """ + if self._closed: + raise RuntimeError("fetch() called on closed AsyncHTTPClient") + if not isinstance(request, HTTPRequest): + request = HTTPRequest(url=request, **kwargs) + # We may modify this (to add Host, Accept-Encoding, etc), + # so make sure we don't modify the caller's object. This is also + # where normal dicts get converted to HTTPHeaders objects. + request.headers = httputil.HTTPHeaders(request.headers) + request = _RequestProxy(request, self.defaults) + future = TracebackFuture() + if callback is not None: + callback = stack_context.wrap(callback) + + def handle_future(future): + exc = future.exception() + if isinstance(exc, HTTPError) and exc.response is not None: + response = exc.response + elif exc is not None: + response = HTTPResponse( + request, 599, error=exc, + request_time=time.time() - request.start_time) + else: + response = future.result() + self.io_loop.add_callback(callback, response) + future.add_done_callback(handle_future) + + def handle_response(response): + if raise_error and response.error: + future.set_exception(response.error) + else: + future.set_result(response) + self.fetch_impl(request, handle_response) + return future + + def fetch_impl(self, request, callback): + raise NotImplementedError() + + @classmethod + def configure(cls, impl, **kwargs): + """Configures the `AsyncHTTPClient` subclass to use. + + ``AsyncHTTPClient()`` actually creates an instance of a subclass. + This method may be called with either a class object or the + fully-qualified name of such a class (or ``None`` to use the default, + ``SimpleAsyncHTTPClient``) + + If additional keyword arguments are given, they will be passed + to the constructor of each subclass instance created. The + keyword argument ``max_clients`` determines the maximum number + of simultaneous `~AsyncHTTPClient.fetch()` operations that can + execute in parallel on each `.IOLoop`. Additional arguments + may be supported depending on the implementation class in use. + + Example:: + + AsyncHTTPClient.configure("tornado.curl_httpclient.CurlAsyncHTTPClient") + """ + super(AsyncHTTPClient, cls).configure(impl, **kwargs) + + +class HTTPRequest(object): + """HTTP client request object.""" + + # Default values for HTTPRequest parameters. + # Merged with the values on the request object by AsyncHTTPClient + # implementations. + _DEFAULTS = dict( + connect_timeout=20.0, + request_timeout=20.0, + follow_redirects=True, + max_redirects=5, + decompress_response=True, + proxy_password='', + allow_nonstandard_methods=False, + validate_cert=True) + + def __init__(self, url, method="GET", headers=None, body=None, + auth_username=None, auth_password=None, auth_mode=None, + connect_timeout=None, request_timeout=None, + if_modified_since=None, follow_redirects=None, + max_redirects=None, user_agent=None, use_gzip=None, + network_interface=None, streaming_callback=None, + header_callback=None, prepare_curl_callback=None, + proxy_host=None, proxy_port=None, proxy_username=None, + proxy_password=None, allow_nonstandard_methods=None, + validate_cert=None, ca_certs=None, + allow_ipv6=None, + client_key=None, client_cert=None, body_producer=None, + expect_100_continue=False, decompress_response=None, + ssl_options=None): + r"""All parameters except ``url`` are optional. + + :arg string url: URL to fetch + :arg string method: HTTP method, e.g. "GET" or "POST" + :arg headers: Additional HTTP headers to pass on the request + :type headers: `~tornado.httputil.HTTPHeaders` or `dict` + :arg body: HTTP request body as a string (byte or unicode; if unicode + the utf-8 encoding will be used) + :arg body_producer: Callable used for lazy/asynchronous request bodies. + It is called with one argument, a ``write`` function, and should + return a `.Future`. It should call the write function with new + data as it becomes available. The write function returns a + `.Future` which can be used for flow control. + Only one of ``body`` and ``body_producer`` may + be specified. ``body_producer`` is not supported on + ``curl_httpclient``. When using ``body_producer`` it is recommended + to pass a ``Content-Length`` in the headers as otherwise chunked + encoding will be used, and many servers do not support chunked + encoding on requests. New in Tornado 4.0 + :arg string auth_username: Username for HTTP authentication + :arg string auth_password: Password for HTTP authentication + :arg string auth_mode: Authentication mode; default is "basic". + Allowed values are implementation-defined; ``curl_httpclient`` + supports "basic" and "digest"; ``simple_httpclient`` only supports + "basic" + :arg float connect_timeout: Timeout for initial connection in seconds + :arg float request_timeout: Timeout for entire request in seconds + :arg if_modified_since: Timestamp for ``If-Modified-Since`` header + :type if_modified_since: `datetime` or `float` + :arg bool follow_redirects: Should redirects be followed automatically + or return the 3xx response? + :arg int max_redirects: Limit for ``follow_redirects`` + :arg string user_agent: String to send as ``User-Agent`` header + :arg bool decompress_response: Request a compressed response from + the server and decompress it after downloading. Default is True. + New in Tornado 4.0. + :arg bool use_gzip: Deprecated alias for ``decompress_response`` + since Tornado 4.0. + :arg string network_interface: Network interface to use for request. + ``curl_httpclient`` only; see note below. + :arg callable streaming_callback: If set, ``streaming_callback`` will + be run with each chunk of data as it is received, and + ``HTTPResponse.body`` and ``HTTPResponse.buffer`` will be empty in + the final response. + :arg callable header_callback: If set, ``header_callback`` will + be run with each header line as it is received (including the + first line, e.g. ``HTTP/1.0 200 OK\r\n``, and a final line + containing only ``\r\n``. All lines include the trailing newline + characters). ``HTTPResponse.headers`` will be empty in the final + response. This is most useful in conjunction with + ``streaming_callback``, because it's the only way to get access to + header data while the request is in progress. + :arg callable prepare_curl_callback: If set, will be called with + a ``pycurl.Curl`` object to allow the application to make additional + ``setopt`` calls. + :arg string proxy_host: HTTP proxy hostname. To use proxies, + ``proxy_host`` and ``proxy_port`` must be set; ``proxy_username`` and + ``proxy_pass`` are optional. Proxies are currently only supported + with ``curl_httpclient``. + :arg int proxy_port: HTTP proxy port + :arg string proxy_username: HTTP proxy username + :arg string proxy_password: HTTP proxy password + :arg bool allow_nonstandard_methods: Allow unknown values for ``method`` + argument? + :arg bool validate_cert: For HTTPS requests, validate the server's + certificate? + :arg string ca_certs: filename of CA certificates in PEM format, + or None to use defaults. See note below when used with + ``curl_httpclient``. + :arg string client_key: Filename for client SSL key, if any. See + note below when used with ``curl_httpclient``. + :arg string client_cert: Filename for client SSL certificate, if any. + See note below when used with ``curl_httpclient``. + :arg ssl.SSLContext ssl_options: `ssl.SSLContext` object for use in + ``simple_httpclient`` (unsupported by ``curl_httpclient``). + Overrides ``validate_cert``, ``ca_certs``, ``client_key``, + and ``client_cert``. + :arg bool allow_ipv6: Use IPv6 when available? Default is true. + :arg bool expect_100_continue: If true, send the + ``Expect: 100-continue`` header and wait for a continue response + before sending the request body. Only supported with + simple_httpclient. + + .. note:: + + When using ``curl_httpclient`` certain options may be + inherited by subsequent fetches because ``pycurl`` does + not allow them to be cleanly reset. This applies to the + ``ca_certs``, ``client_key``, ``client_cert``, and + ``network_interface`` arguments. If you use these + options, you should pass them on every request (you don't + have to always use the same values, but it's not possible + to mix requests that specify these options with ones that + use the defaults). + + .. versionadded:: 3.1 + The ``auth_mode`` argument. + + .. versionadded:: 4.0 + The ``body_producer`` and ``expect_100_continue`` arguments. + + .. versionadded:: 4.2 + The ``ssl_options`` argument. + """ + # Note that some of these attributes go through property setters + # defined below. + self.headers = headers + if if_modified_since: + self.headers["If-Modified-Since"] = httputil.format_timestamp( + if_modified_since) + self.proxy_host = proxy_host + self.proxy_port = proxy_port + self.proxy_username = proxy_username + self.proxy_password = proxy_password + self.url = url + self.method = method + self.body = body + self.body_producer = body_producer + self.auth_username = auth_username + self.auth_password = auth_password + self.auth_mode = auth_mode + self.connect_timeout = connect_timeout + self.request_timeout = request_timeout + self.follow_redirects = follow_redirects + self.max_redirects = max_redirects + self.user_agent = user_agent + if decompress_response is not None: + self.decompress_response = decompress_response + else: + self.decompress_response = use_gzip + self.network_interface = network_interface + self.streaming_callback = streaming_callback + self.header_callback = header_callback + self.prepare_curl_callback = prepare_curl_callback + self.allow_nonstandard_methods = allow_nonstandard_methods + self.validate_cert = validate_cert + self.ca_certs = ca_certs + self.allow_ipv6 = allow_ipv6 + self.client_key = client_key + self.client_cert = client_cert + self.ssl_options = ssl_options + self.expect_100_continue = expect_100_continue + self.start_time = time.time() + + @property + def headers(self): + return self._headers + + @headers.setter + def headers(self, value): + if value is None: + self._headers = httputil.HTTPHeaders() + else: + self._headers = value + + @property + def body(self): + return self._body + + @body.setter + def body(self, value): + self._body = utf8(value) + + @property + def body_producer(self): + return self._body_producer + + @body_producer.setter + def body_producer(self, value): + self._body_producer = stack_context.wrap(value) + + @property + def streaming_callback(self): + return self._streaming_callback + + @streaming_callback.setter + def streaming_callback(self, value): + self._streaming_callback = stack_context.wrap(value) + + @property + def header_callback(self): + return self._header_callback + + @header_callback.setter + def header_callback(self, value): + self._header_callback = stack_context.wrap(value) + + @property + def prepare_curl_callback(self): + return self._prepare_curl_callback + + @prepare_curl_callback.setter + def prepare_curl_callback(self, value): + self._prepare_curl_callback = stack_context.wrap(value) + + +class HTTPResponse(object): + """HTTP Response object. + + Attributes: + + * request: HTTPRequest object + + * code: numeric HTTP status code, e.g. 200 or 404 + + * reason: human-readable reason phrase describing the status code + + * headers: `tornado.httputil.HTTPHeaders` object + + * effective_url: final location of the resource after following any + redirects + + * buffer: ``cStringIO`` object for response body + + * body: response body as string (created on demand from ``self.buffer``) + + * error: Exception object, if any + + * request_time: seconds from request start to finish + + * time_info: dictionary of diagnostic timing information from the request. + Available data are subject to change, but currently uses timings + available from http://curl.haxx.se/libcurl/c/curl_easy_getinfo.html, + plus ``queue``, which is the delay (if any) introduced by waiting for + a slot under `AsyncHTTPClient`'s ``max_clients`` setting. + """ + def __init__(self, request, code, headers=None, buffer=None, + effective_url=None, error=None, request_time=None, + time_info=None, reason=None): + if isinstance(request, _RequestProxy): + self.request = request.request + else: + self.request = request + self.code = code + self.reason = reason or httputil.responses.get(code, "Unknown") + if headers is not None: + self.headers = headers + else: + self.headers = httputil.HTTPHeaders() + self.buffer = buffer + self._body = None + if effective_url is None: + self.effective_url = request.url + else: + self.effective_url = effective_url + if error is None: + if self.code < 200 or self.code >= 300: + self.error = HTTPError(self.code, message=self.reason, + response=self) + else: + self.error = None + else: + self.error = error + self.request_time = request_time + self.time_info = time_info or {} + + def _get_body(self): + if self.buffer is None: + return None + elif self._body is None: + self._body = self.buffer.getvalue() + + return self._body + + body = property(_get_body) + + def rethrow(self): + """If there was an error on the request, raise an `HTTPError`.""" + if self.error: + raise self.error + + def __repr__(self): + args = ",".join("%s=%r" % i for i in sorted(self.__dict__.items())) + return "%s(%s)" % (self.__class__.__name__, args) + + +class HTTPError(Exception): + """Exception thrown for an unsuccessful HTTP request. + + Attributes: + + * ``code`` - HTTP error integer error code, e.g. 404. Error code 599 is + used when no HTTP response was received, e.g. for a timeout. + + * ``response`` - `HTTPResponse` object, if any. + + Note that if ``follow_redirects`` is False, redirects become HTTPErrors, + and you can look at ``error.response.headers['Location']`` to see the + destination of the redirect. + """ + def __init__(self, code, message=None, response=None): + self.code = code + self.message = message or httputil.responses.get(code, "Unknown") + self.response = response + super(HTTPError, self).__init__(code, message, response) + + def __str__(self): + return "HTTP %d: %s" % (self.code, self.message) + + +class _RequestProxy(object): + """Combines an object with a dictionary of defaults. + + Used internally by AsyncHTTPClient implementations. + """ + def __init__(self, request, defaults): + self.request = request + self.defaults = defaults + + def __getattr__(self, name): + request_attr = getattr(self.request, name) + if request_attr is not None: + return request_attr + elif self.defaults is not None: + return self.defaults.get(name, None) + else: + return None + + +def main(): + from tornado.options import define, options, parse_command_line + define("print_headers", type=bool, default=False) + define("print_body", type=bool, default=True) + define("follow_redirects", type=bool, default=True) + define("validate_cert", type=bool, default=True) + args = parse_command_line() + client = HTTPClient() + for arg in args: + try: + response = client.fetch(arg, + follow_redirects=options.follow_redirects, + validate_cert=options.validate_cert, + ) + except HTTPError as e: + if e.response is not None: + response = e.response + else: + raise + if options.print_headers: + print(response.headers) + if options.print_body: + print(native_str(response.body)) + client.close() + +if __name__ == "__main__": + main() diff --git a/python/tornado/httpserver.py b/python/tornado/httpserver.py new file mode 100644 index 000000000..ff235fe46 --- /dev/null +++ b/python/tornado/httpserver.py @@ -0,0 +1,304 @@ +#!/usr/bin/env python +# +# Copyright 2009 Facebook +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""A non-blocking, single-threaded HTTP server. + +Typical applications have little direct interaction with the `HTTPServer` +class except to start a server at the beginning of the process +(and even that is often done indirectly via `tornado.web.Application.listen`). + +.. versionchanged:: 4.0 + + The ``HTTPRequest`` class that used to live in this module has been moved + to `tornado.httputil.HTTPServerRequest`. The old name remains as an alias. +""" + +from __future__ import absolute_import, division, print_function, with_statement + +import socket + +from tornado.escape import native_str +from tornado.http1connection import HTTP1ServerConnection, HTTP1ConnectionParameters +from tornado import gen +from tornado import httputil +from tornado import iostream +from tornado import netutil +from tornado.tcpserver import TCPServer +from tornado.util import Configurable + + +class HTTPServer(TCPServer, Configurable, + httputil.HTTPServerConnectionDelegate): + r"""A non-blocking, single-threaded HTTP server. + + A server is defined by a subclass of `.HTTPServerConnectionDelegate`, + or, for backwards compatibility, a callback that takes an + `.HTTPServerRequest` as an argument. The delegate is usually a + `tornado.web.Application`. + + `HTTPServer` supports keep-alive connections by default + (automatically for HTTP/1.1, or for HTTP/1.0 when the client + requests ``Connection: keep-alive``). + + If ``xheaders`` is ``True``, we support the + ``X-Real-Ip``/``X-Forwarded-For`` and + ``X-Scheme``/``X-Forwarded-Proto`` headers, which override the + remote IP and URI scheme/protocol for all requests. These headers + are useful when running Tornado behind a reverse proxy or load + balancer. The ``protocol`` argument can also be set to ``https`` + if Tornado is run behind an SSL-decoding proxy that does not set one of + the supported ``xheaders``. + + To make this server serve SSL traffic, send the ``ssl_options`` keyword + argument with an `ssl.SSLContext` object. For compatibility with older + versions of Python ``ssl_options`` may also be a dictionary of keyword + arguments for the `ssl.wrap_socket` method.:: + + ssl_ctx = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH) + ssl_ctx.load_cert_chain(os.path.join(data_dir, "mydomain.crt"), + os.path.join(data_dir, "mydomain.key")) + HTTPServer(applicaton, ssl_options=ssl_ctx) + + `HTTPServer` initialization follows one of three patterns (the + initialization methods are defined on `tornado.tcpserver.TCPServer`): + + 1. `~tornado.tcpserver.TCPServer.listen`: simple single-process:: + + server = HTTPServer(app) + server.listen(8888) + IOLoop.current().start() + + In many cases, `tornado.web.Application.listen` can be used to avoid + the need to explicitly create the `HTTPServer`. + + 2. `~tornado.tcpserver.TCPServer.bind`/`~tornado.tcpserver.TCPServer.start`: + simple multi-process:: + + server = HTTPServer(app) + server.bind(8888) + server.start(0) # Forks multiple sub-processes + IOLoop.current().start() + + When using this interface, an `.IOLoop` must *not* be passed + to the `HTTPServer` constructor. `~.TCPServer.start` will always start + the server on the default singleton `.IOLoop`. + + 3. `~tornado.tcpserver.TCPServer.add_sockets`: advanced multi-process:: + + sockets = tornado.netutil.bind_sockets(8888) + tornado.process.fork_processes(0) + server = HTTPServer(app) + server.add_sockets(sockets) + IOLoop.current().start() + + The `~.TCPServer.add_sockets` interface is more complicated, + but it can be used with `tornado.process.fork_processes` to + give you more flexibility in when the fork happens. + `~.TCPServer.add_sockets` can also be used in single-process + servers if you want to create your listening sockets in some + way other than `tornado.netutil.bind_sockets`. + + .. versionchanged:: 4.0 + Added ``decompress_request``, ``chunk_size``, ``max_header_size``, + ``idle_connection_timeout``, ``body_timeout``, ``max_body_size`` + arguments. Added support for `.HTTPServerConnectionDelegate` + instances as ``request_callback``. + + .. versionchanged:: 4.1 + `.HTTPServerConnectionDelegate.start_request` is now called with + two arguments ``(server_conn, request_conn)`` (in accordance with the + documentation) instead of one ``(request_conn)``. + + .. versionchanged:: 4.2 + `HTTPServer` is now a subclass of `tornado.util.Configurable`. + """ + def __init__(self, *args, **kwargs): + # Ignore args to __init__; real initialization belongs in + # initialize since we're Configurable. (there's something + # weird in initialization order between this class, + # Configurable, and TCPServer so we can't leave __init__ out + # completely) + pass + + def initialize(self, request_callback, no_keep_alive=False, io_loop=None, + xheaders=False, ssl_options=None, protocol=None, + decompress_request=False, + chunk_size=None, max_header_size=None, + idle_connection_timeout=None, body_timeout=None, + max_body_size=None, max_buffer_size=None): + self.request_callback = request_callback + self.no_keep_alive = no_keep_alive + self.xheaders = xheaders + self.protocol = protocol + self.conn_params = HTTP1ConnectionParameters( + decompress=decompress_request, + chunk_size=chunk_size, + max_header_size=max_header_size, + header_timeout=idle_connection_timeout or 3600, + max_body_size=max_body_size, + body_timeout=body_timeout) + TCPServer.__init__(self, io_loop=io_loop, ssl_options=ssl_options, + max_buffer_size=max_buffer_size, + read_chunk_size=chunk_size) + self._connections = set() + + @classmethod + def configurable_base(cls): + return HTTPServer + + @classmethod + def configurable_default(cls): + return HTTPServer + + @gen.coroutine + def close_all_connections(self): + while self._connections: + # Peek at an arbitrary element of the set + conn = next(iter(self._connections)) + yield conn.close() + + def handle_stream(self, stream, address): + context = _HTTPRequestContext(stream, address, + self.protocol) + conn = HTTP1ServerConnection( + stream, self.conn_params, context) + self._connections.add(conn) + conn.start_serving(self) + + def start_request(self, server_conn, request_conn): + return _ServerRequestAdapter(self, server_conn, request_conn) + + def on_close(self, server_conn): + self._connections.remove(server_conn) + + +class _HTTPRequestContext(object): + def __init__(self, stream, address, protocol): + self.address = address + # Save the socket's address family now so we know how to + # interpret self.address even after the stream is closed + # and its socket attribute replaced with None. + if stream.socket is not None: + self.address_family = stream.socket.family + else: + self.address_family = None + # In HTTPServerRequest we want an IP, not a full socket address. + if (self.address_family in (socket.AF_INET, socket.AF_INET6) and + address is not None): + self.remote_ip = address[0] + else: + # Unix (or other) socket; fake the remote address. + self.remote_ip = '0.0.0.0' + if protocol: + self.protocol = protocol + elif isinstance(stream, iostream.SSLIOStream): + self.protocol = "https" + else: + self.protocol = "http" + self._orig_remote_ip = self.remote_ip + self._orig_protocol = self.protocol + + def __str__(self): + if self.address_family in (socket.AF_INET, socket.AF_INET6): + return self.remote_ip + elif isinstance(self.address, bytes): + # Python 3 with the -bb option warns about str(bytes), + # so convert it explicitly. + # Unix socket addresses are str on mac but bytes on linux. + return native_str(self.address) + else: + return str(self.address) + + def _apply_xheaders(self, headers): + """Rewrite the ``remote_ip`` and ``protocol`` fields.""" + # Squid uses X-Forwarded-For, others use X-Real-Ip + ip = headers.get("X-Forwarded-For", self.remote_ip) + ip = ip.split(',')[-1].strip() + ip = headers.get("X-Real-Ip", ip) + if netutil.is_valid_ip(ip): + self.remote_ip = ip + # AWS uses X-Forwarded-Proto + proto_header = headers.get( + "X-Scheme", headers.get("X-Forwarded-Proto", + self.protocol)) + if proto_header in ("http", "https"): + self.protocol = proto_header + + def _unapply_xheaders(self): + """Undo changes from `_apply_xheaders`. + + Xheaders are per-request so they should not leak to the next + request on the same connection. + """ + self.remote_ip = self._orig_remote_ip + self.protocol = self._orig_protocol + + +class _ServerRequestAdapter(httputil.HTTPMessageDelegate): + """Adapts the `HTTPMessageDelegate` interface to the interface expected + by our clients. + """ + def __init__(self, server, server_conn, request_conn): + self.server = server + self.connection = request_conn + self.request = None + if isinstance(server.request_callback, + httputil.HTTPServerConnectionDelegate): + self.delegate = server.request_callback.start_request( + server_conn, request_conn) + self._chunks = None + else: + self.delegate = None + self._chunks = [] + + def headers_received(self, start_line, headers): + if self.server.xheaders: + self.connection.context._apply_xheaders(headers) + if self.delegate is None: + self.request = httputil.HTTPServerRequest( + connection=self.connection, start_line=start_line, + headers=headers) + else: + return self.delegate.headers_received(start_line, headers) + + def data_received(self, chunk): + if self.delegate is None: + self._chunks.append(chunk) + else: + return self.delegate.data_received(chunk) + + def finish(self): + if self.delegate is None: + self.request.body = b''.join(self._chunks) + self.request._parse_body() + self.server.request_callback(self.request) + else: + self.delegate.finish() + self._cleanup() + + def on_connection_close(self): + if self.delegate is None: + self._chunks = None + else: + self.delegate.on_connection_close() + self._cleanup() + + def _cleanup(self): + if self.server.xheaders: + self.connection.context._unapply_xheaders() + + +HTTPRequest = httputil.HTTPServerRequest diff --git a/python/tornado/httputil.py b/python/tornado/httputil.py new file mode 100644 index 000000000..471df54f9 --- /dev/null +++ b/python/tornado/httputil.py @@ -0,0 +1,897 @@ +#!/usr/bin/env python +# +# Copyright 2009 Facebook +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""HTTP utility code shared by clients and servers. + +This module also defines the `HTTPServerRequest` class which is exposed +via `tornado.web.RequestHandler.request`. +""" + +from __future__ import absolute_import, division, print_function, with_statement + +import calendar +import collections +import copy +import datetime +import email.utils +import numbers +import re +import time + +from tornado.escape import native_str, parse_qs_bytes, utf8 +from tornado.log import gen_log +from tornado.util import ObjectDict + +try: + import Cookie # py2 +except ImportError: + import http.cookies as Cookie # py3 + +try: + from httplib import responses # py2 +except ImportError: + from http.client import responses # py3 + +# responses is unused in this file, but we re-export it to other files. +# Reference it so pyflakes doesn't complain. +responses + +try: + from urllib import urlencode # py2 +except ImportError: + from urllib.parse import urlencode # py3 + +try: + from ssl import SSLError +except ImportError: + # ssl is unavailable on app engine. + class SSLError(Exception): + pass + + +# RFC 7230 section 3.5: a recipient MAY recognize a single LF as a line +# terminator and ignore any preceding CR. +_CRLF_RE = re.compile(r'\r?\n') + + +class _NormalizedHeaderCache(dict): + """Dynamic cached mapping of header names to Http-Header-Case. + + Implemented as a dict subclass so that cache hits are as fast as a + normal dict lookup, without the overhead of a python function + call. + + >>> normalized_headers = _NormalizedHeaderCache(10) + >>> normalized_headers["coNtent-TYPE"] + 'Content-Type' + """ + def __init__(self, size): + super(_NormalizedHeaderCache, self).__init__() + self.size = size + self.queue = collections.deque() + + def __missing__(self, key): + normalized = "-".join([w.capitalize() for w in key.split("-")]) + self[key] = normalized + self.queue.append(key) + if len(self.queue) > self.size: + # Limit the size of the cache. LRU would be better, but this + # simpler approach should be fine. In Python 2.7+ we could + # use OrderedDict (or in 3.2+, @functools.lru_cache). + old_key = self.queue.popleft() + del self[old_key] + return normalized + +_normalized_headers = _NormalizedHeaderCache(1000) + + +class HTTPHeaders(collections.MutableMapping): + """A dictionary that maintains ``Http-Header-Case`` for all keys. + + Supports multiple values per key via a pair of new methods, + `add()` and `get_list()`. The regular dictionary interface + returns a single value per key, with multiple values joined by a + comma. + + >>> h = HTTPHeaders({"content-type": "text/html"}) + >>> list(h.keys()) + ['Content-Type'] + >>> h["Content-Type"] + 'text/html' + + >>> h.add("Set-Cookie", "A=B") + >>> h.add("Set-Cookie", "C=D") + >>> h["set-cookie"] + 'A=B,C=D' + >>> h.get_list("set-cookie") + ['A=B', 'C=D'] + + >>> for (k,v) in sorted(h.get_all()): + ... print('%s: %s' % (k,v)) + ... + Content-Type: text/html + Set-Cookie: A=B + Set-Cookie: C=D + """ + def __init__(self, *args, **kwargs): + self._dict = {} + self._as_list = {} + self._last_key = None + if (len(args) == 1 and len(kwargs) == 0 and + isinstance(args[0], HTTPHeaders)): + # Copy constructor + for k, v in args[0].get_all(): + self.add(k, v) + else: + # Dict-style initialization + self.update(*args, **kwargs) + + # new public methods + + def add(self, name, value): + """Adds a new value for the given key.""" + norm_name = _normalized_headers[name] + self._last_key = norm_name + if norm_name in self: + self._dict[norm_name] = (native_str(self[norm_name]) + ',' + + native_str(value)) + self._as_list[norm_name].append(value) + else: + self[norm_name] = value + + def get_list(self, name): + """Returns all values for the given header as a list.""" + norm_name = _normalized_headers[name] + return self._as_list.get(norm_name, []) + + def get_all(self): + """Returns an iterable of all (name, value) pairs. + + If a header has multiple values, multiple pairs will be + returned with the same name. + """ + for name, values in self._as_list.items(): + for value in values: + yield (name, value) + + def parse_line(self, line): + """Updates the dictionary with a single header line. + + >>> h = HTTPHeaders() + >>> h.parse_line("Content-Type: text/html") + >>> h.get('content-type') + 'text/html' + """ + if line[0].isspace(): + # continuation of a multi-line header + new_part = ' ' + line.lstrip() + self._as_list[self._last_key][-1] += new_part + self._dict[self._last_key] += new_part + else: + name, value = line.split(":", 1) + self.add(name, value.strip()) + + @classmethod + def parse(cls, headers): + """Returns a dictionary from HTTP header text. + + >>> h = HTTPHeaders.parse("Content-Type: text/html\\r\\nContent-Length: 42\\r\\n") + >>> sorted(h.items()) + [('Content-Length', '42'), ('Content-Type', 'text/html')] + """ + h = cls() + for line in _CRLF_RE.split(headers): + if line: + h.parse_line(line) + return h + + # MutableMapping abstract method implementations. + + def __setitem__(self, name, value): + norm_name = _normalized_headers[name] + self._dict[norm_name] = value + self._as_list[norm_name] = [value] + + def __getitem__(self, name): + return self._dict[_normalized_headers[name]] + + def __delitem__(self, name): + norm_name = _normalized_headers[name] + del self._dict[norm_name] + del self._as_list[norm_name] + + def __len__(self): + return len(self._dict) + + def __iter__(self): + return iter(self._dict) + + def copy(self): + # defined in dict but not in MutableMapping. + return HTTPHeaders(self) + + # Use our overridden copy method for the copy.copy module. + # This makes shallow copies one level deeper, but preserves + # the appearance that HTTPHeaders is a single container. + __copy__ = copy + + +class HTTPServerRequest(object): + """A single HTTP request. + + All attributes are type `str` unless otherwise noted. + + .. attribute:: method + + HTTP request method, e.g. "GET" or "POST" + + .. attribute:: uri + + The requested uri. + + .. attribute:: path + + The path portion of `uri` + + .. attribute:: query + + The query portion of `uri` + + .. attribute:: version + + HTTP version specified in request, e.g. "HTTP/1.1" + + .. attribute:: headers + + `.HTTPHeaders` dictionary-like object for request headers. Acts like + a case-insensitive dictionary with additional methods for repeated + headers. + + .. attribute:: body + + Request body, if present, as a byte string. + + .. attribute:: remote_ip + + Client's IP address as a string. If ``HTTPServer.xheaders`` is set, + will pass along the real IP address provided by a load balancer + in the ``X-Real-Ip`` or ``X-Forwarded-For`` header. + + .. versionchanged:: 3.1 + The list format of ``X-Forwarded-For`` is now supported. + + .. attribute:: protocol + + The protocol used, either "http" or "https". If ``HTTPServer.xheaders`` + is set, will pass along the protocol used by a load balancer if + reported via an ``X-Scheme`` header. + + .. attribute:: host + + The requested hostname, usually taken from the ``Host`` header. + + .. attribute:: arguments + + GET/POST arguments are available in the arguments property, which + maps arguments names to lists of values (to support multiple values + for individual names). Names are of type `str`, while arguments + are byte strings. Note that this is different from + `.RequestHandler.get_argument`, which returns argument values as + unicode strings. + + .. attribute:: query_arguments + + Same format as ``arguments``, but contains only arguments extracted + from the query string. + + .. versionadded:: 3.2 + + .. attribute:: body_arguments + + Same format as ``arguments``, but contains only arguments extracted + from the request body. + + .. versionadded:: 3.2 + + .. attribute:: files + + File uploads are available in the files property, which maps file + names to lists of `.HTTPFile`. + + .. attribute:: connection + + An HTTP request is attached to a single HTTP connection, which can + be accessed through the "connection" attribute. Since connections + are typically kept open in HTTP/1.1, multiple requests can be handled + sequentially on a single connection. + + .. versionchanged:: 4.0 + Moved from ``tornado.httpserver.HTTPRequest``. + """ + def __init__(self, method=None, uri=None, version="HTTP/1.0", headers=None, + body=None, host=None, files=None, connection=None, + start_line=None): + if start_line is not None: + method, uri, version = start_line + self.method = method + self.uri = uri + self.version = version + self.headers = headers or HTTPHeaders() + self.body = body or b"" + + # set remote IP and protocol + context = getattr(connection, 'context', None) + self.remote_ip = getattr(context, 'remote_ip', None) + self.protocol = getattr(context, 'protocol', "http") + + self.host = host or self.headers.get("Host") or "127.0.0.1" + self.files = files or {} + self.connection = connection + self._start_time = time.time() + self._finish_time = None + + self.path, sep, self.query = uri.partition('?') + self.arguments = parse_qs_bytes(self.query, keep_blank_values=True) + self.query_arguments = copy.deepcopy(self.arguments) + self.body_arguments = {} + + def supports_http_1_1(self): + """Returns True if this request supports HTTP/1.1 semantics. + + .. deprecated:: 4.0 + Applications are less likely to need this information with the + introduction of `.HTTPConnection`. If you still need it, access + the ``version`` attribute directly. + """ + return self.version == "HTTP/1.1" + + @property + def cookies(self): + """A dictionary of Cookie.Morsel objects.""" + if not hasattr(self, "_cookies"): + self._cookies = Cookie.SimpleCookie() + if "Cookie" in self.headers: + try: + self._cookies.load( + native_str(self.headers["Cookie"])) + except Exception: + self._cookies = {} + return self._cookies + + def write(self, chunk, callback=None): + """Writes the given chunk to the response stream. + + .. deprecated:: 4.0 + Use ``request.connection`` and the `.HTTPConnection` methods + to write the response. + """ + assert isinstance(chunk, bytes) + assert self.version.startswith("HTTP/1."), \ + "deprecated interface only supported in HTTP/1.x" + self.connection.write(chunk, callback=callback) + + def finish(self): + """Finishes this HTTP request on the open connection. + + .. deprecated:: 4.0 + Use ``request.connection`` and the `.HTTPConnection` methods + to write the response. + """ + self.connection.finish() + self._finish_time = time.time() + + def full_url(self): + """Reconstructs the full URL for this request.""" + return self.protocol + "://" + self.host + self.uri + + def request_time(self): + """Returns the amount of time it took for this request to execute.""" + if self._finish_time is None: + return time.time() - self._start_time + else: + return self._finish_time - self._start_time + + def get_ssl_certificate(self, binary_form=False): + """Returns the client's SSL certificate, if any. + + To use client certificates, the HTTPServer's + `ssl.SSLContext.verify_mode` field must be set, e.g.:: + + ssl_ctx = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH) + ssl_ctx.load_cert_chain("foo.crt", "foo.key") + ssl_ctx.load_verify_locations("cacerts.pem") + ssl_ctx.verify_mode = ssl.CERT_REQUIRED + server = HTTPServer(app, ssl_options=ssl_ctx) + + By default, the return value is a dictionary (or None, if no + client certificate is present). If ``binary_form`` is true, a + DER-encoded form of the certificate is returned instead. See + SSLSocket.getpeercert() in the standard library for more + details. + http://docs.python.org/library/ssl.html#sslsocket-objects + """ + try: + return self.connection.stream.socket.getpeercert( + binary_form=binary_form) + except SSLError: + return None + + def _parse_body(self): + parse_body_arguments( + self.headers.get("Content-Type", ""), self.body, + self.body_arguments, self.files, + self.headers) + + for k, v in self.body_arguments.items(): + self.arguments.setdefault(k, []).extend(v) + + def __repr__(self): + attrs = ("protocol", "host", "method", "uri", "version", "remote_ip") + args = ", ".join(["%s=%r" % (n, getattr(self, n)) for n in attrs]) + return "%s(%s, headers=%s)" % ( + self.__class__.__name__, args, dict(self.headers)) + + +class HTTPInputError(Exception): + """Exception class for malformed HTTP requests or responses + from remote sources. + + .. versionadded:: 4.0 + """ + pass + + +class HTTPOutputError(Exception): + """Exception class for errors in HTTP output. + + .. versionadded:: 4.0 + """ + pass + + +class HTTPServerConnectionDelegate(object): + """Implement this interface to handle requests from `.HTTPServer`. + + .. versionadded:: 4.0 + """ + def start_request(self, server_conn, request_conn): + """This method is called by the server when a new request has started. + + :arg server_conn: is an opaque object representing the long-lived + (e.g. tcp-level) connection. + :arg request_conn: is a `.HTTPConnection` object for a single + request/response exchange. + + This method should return a `.HTTPMessageDelegate`. + """ + raise NotImplementedError() + + def on_close(self, server_conn): + """This method is called when a connection has been closed. + + :arg server_conn: is a server connection that has previously been + passed to ``start_request``. + """ + pass + + +class HTTPMessageDelegate(object): + """Implement this interface to handle an HTTP request or response. + + .. versionadded:: 4.0 + """ + def headers_received(self, start_line, headers): + """Called when the HTTP headers have been received and parsed. + + :arg start_line: a `.RequestStartLine` or `.ResponseStartLine` + depending on whether this is a client or server message. + :arg headers: a `.HTTPHeaders` instance. + + Some `.HTTPConnection` methods can only be called during + ``headers_received``. + + May return a `.Future`; if it does the body will not be read + until it is done. + """ + pass + + def data_received(self, chunk): + """Called when a chunk of data has been received. + + May return a `.Future` for flow control. + """ + pass + + def finish(self): + """Called after the last chunk of data has been received.""" + pass + + def on_connection_close(self): + """Called if the connection is closed without finishing the request. + + If ``headers_received`` is called, either ``finish`` or + ``on_connection_close`` will be called, but not both. + """ + pass + + +class HTTPConnection(object): + """Applications use this interface to write their responses. + + .. versionadded:: 4.0 + """ + def write_headers(self, start_line, headers, chunk=None, callback=None): + """Write an HTTP header block. + + :arg start_line: a `.RequestStartLine` or `.ResponseStartLine`. + :arg headers: a `.HTTPHeaders` instance. + :arg chunk: the first (optional) chunk of data. This is an optimization + so that small responses can be written in the same call as their + headers. + :arg callback: a callback to be run when the write is complete. + + The ``version`` field of ``start_line`` is ignored. + + Returns a `.Future` if no callback is given. + """ + raise NotImplementedError() + + def write(self, chunk, callback=None): + """Writes a chunk of body data. + + The callback will be run when the write is complete. If no callback + is given, returns a Future. + """ + raise NotImplementedError() + + def finish(self): + """Indicates that the last body data has been written. + """ + raise NotImplementedError() + + +def url_concat(url, args): + """Concatenate url and arguments regardless of whether + url has existing query parameters. + + ``args`` may be either a dictionary or a list of key-value pairs + (the latter allows for multiple values with the same key. + + >>> url_concat("http://example.com/foo", dict(c="d")) + 'http://example.com/foo?c=d' + >>> url_concat("http://example.com/foo?a=b", dict(c="d")) + 'http://example.com/foo?a=b&c=d' + >>> url_concat("http://example.com/foo?a=b", [("c", "d"), ("c", "d2")]) + 'http://example.com/foo?a=b&c=d&c=d2' + """ + if not args: + return url + if url[-1] not in ('?', '&'): + url += '&' if ('?' in url) else '?' + return url + urlencode(args) + + +class HTTPFile(ObjectDict): + """Represents a file uploaded via a form. + + For backwards compatibility, its instance attributes are also + accessible as dictionary keys. + + * ``filename`` + * ``body`` + * ``content_type`` + """ + pass + + +def _parse_request_range(range_header): + """Parses a Range header. + + Returns either ``None`` or tuple ``(start, end)``. + Note that while the HTTP headers use inclusive byte positions, + this method returns indexes suitable for use in slices. + + >>> start, end = _parse_request_range("bytes=1-2") + >>> start, end + (1, 3) + >>> [0, 1, 2, 3, 4][start:end] + [1, 2] + >>> _parse_request_range("bytes=6-") + (6, None) + >>> _parse_request_range("bytes=-6") + (-6, None) + >>> _parse_request_range("bytes=-0") + (None, 0) + >>> _parse_request_range("bytes=") + (None, None) + >>> _parse_request_range("foo=42") + >>> _parse_request_range("bytes=1-2,6-10") + + Note: only supports one range (ex, ``bytes=1-2,6-10`` is not allowed). + + See [0] for the details of the range header. + + [0]: http://greenbytes.de/tech/webdav/draft-ietf-httpbis-p5-range-latest.html#byte.ranges + """ + unit, _, value = range_header.partition("=") + unit, value = unit.strip(), value.strip() + if unit != "bytes": + return None + start_b, _, end_b = value.partition("-") + try: + start = _int_or_none(start_b) + end = _int_or_none(end_b) + except ValueError: + return None + if end is not None: + if start is None: + if end != 0: + start = -end + end = None + else: + end += 1 + return (start, end) + + +def _get_content_range(start, end, total): + """Returns a suitable Content-Range header: + + >>> print(_get_content_range(None, 1, 4)) + bytes 0-0/4 + >>> print(_get_content_range(1, 3, 4)) + bytes 1-2/4 + >>> print(_get_content_range(None, None, 4)) + bytes 0-3/4 + """ + start = start or 0 + end = (end or total) - 1 + return "bytes %s-%s/%s" % (start, end, total) + + +def _int_or_none(val): + val = val.strip() + if val == "": + return None + return int(val) + + +def parse_body_arguments(content_type, body, arguments, files, headers=None): + """Parses a form request body. + + Supports ``application/x-www-form-urlencoded`` and + ``multipart/form-data``. The ``content_type`` parameter should be + a string and ``body`` should be a byte string. The ``arguments`` + and ``files`` parameters are dictionaries that will be updated + with the parsed contents. + """ + if headers and 'Content-Encoding' in headers: + gen_log.warning("Unsupported Content-Encoding: %s", + headers['Content-Encoding']) + return + if content_type.startswith("application/x-www-form-urlencoded"): + try: + uri_arguments = parse_qs_bytes(native_str(body), keep_blank_values=True) + except Exception as e: + gen_log.warning('Invalid x-www-form-urlencoded body: %s', e) + uri_arguments = {} + for name, values in uri_arguments.items(): + if values: + arguments.setdefault(name, []).extend(values) + elif content_type.startswith("multipart/form-data"): + try: + fields = content_type.split(";") + for field in fields: + k, sep, v = field.strip().partition("=") + if k == "boundary" and v: + parse_multipart_form_data(utf8(v), body, arguments, files) + break + else: + raise ValueError("multipart boundary not found") + except Exception as e: + gen_log.warning("Invalid multipart/form-data: %s", e) + + +def parse_multipart_form_data(boundary, data, arguments, files): + """Parses a ``multipart/form-data`` body. + + The ``boundary`` and ``data`` parameters are both byte strings. + The dictionaries given in the arguments and files parameters + will be updated with the contents of the body. + """ + # The standard allows for the boundary to be quoted in the header, + # although it's rare (it happens at least for google app engine + # xmpp). I think we're also supposed to handle backslash-escapes + # here but I'll save that until we see a client that uses them + # in the wild. + if boundary.startswith(b'"') and boundary.endswith(b'"'): + boundary = boundary[1:-1] + final_boundary_index = data.rfind(b"--" + boundary + b"--") + if final_boundary_index == -1: + gen_log.warning("Invalid multipart/form-data: no final boundary") + return + parts = data[:final_boundary_index].split(b"--" + boundary + b"\r\n") + for part in parts: + if not part: + continue + eoh = part.find(b"\r\n\r\n") + if eoh == -1: + gen_log.warning("multipart/form-data missing headers") + continue + headers = HTTPHeaders.parse(part[:eoh].decode("utf-8")) + disp_header = headers.get("Content-Disposition", "") + disposition, disp_params = _parse_header(disp_header) + if disposition != "form-data" or not part.endswith(b"\r\n"): + gen_log.warning("Invalid multipart/form-data") + continue + value = part[eoh + 4:-2] + if not disp_params.get("name"): + gen_log.warning("multipart/form-data value missing name") + continue + name = disp_params["name"] + if disp_params.get("filename"): + ctype = headers.get("Content-Type", "application/unknown") + files.setdefault(name, []).append(HTTPFile( + filename=disp_params["filename"], body=value, + content_type=ctype)) + else: + arguments.setdefault(name, []).append(value) + + +def format_timestamp(ts): + """Formats a timestamp in the format used by HTTP. + + The argument may be a numeric timestamp as returned by `time.time`, + a time tuple as returned by `time.gmtime`, or a `datetime.datetime` + object. + + >>> format_timestamp(1359312200) + 'Sun, 27 Jan 2013 18:43:20 GMT' + """ + if isinstance(ts, numbers.Real): + pass + elif isinstance(ts, (tuple, time.struct_time)): + ts = calendar.timegm(ts) + elif isinstance(ts, datetime.datetime): + ts = calendar.timegm(ts.utctimetuple()) + else: + raise TypeError("unknown timestamp type: %r" % ts) + return email.utils.formatdate(ts, usegmt=True) + + +RequestStartLine = collections.namedtuple( + 'RequestStartLine', ['method', 'path', 'version']) + + +def parse_request_start_line(line): + """Returns a (method, path, version) tuple for an HTTP 1.x request line. + + The response is a `collections.namedtuple`. + + >>> parse_request_start_line("GET /foo HTTP/1.1") + RequestStartLine(method='GET', path='/foo', version='HTTP/1.1') + """ + try: + method, path, version = line.split(" ") + except ValueError: + raise HTTPInputError("Malformed HTTP request line") + if not re.match(r"^HTTP/1\.[0-9]$", version): + raise HTTPInputError( + "Malformed HTTP version in HTTP Request-Line: %r" % version) + return RequestStartLine(method, path, version) + + +ResponseStartLine = collections.namedtuple( + 'ResponseStartLine', ['version', 'code', 'reason']) + + +def parse_response_start_line(line): + """Returns a (version, code, reason) tuple for an HTTP 1.x response line. + + The response is a `collections.namedtuple`. + + >>> parse_response_start_line("HTTP/1.1 200 OK") + ResponseStartLine(version='HTTP/1.1', code=200, reason='OK') + """ + line = native_str(line) + match = re.match("(HTTP/1.[0-9]) ([0-9]+) ([^\r]*)", line) + if not match: + raise HTTPInputError("Error parsing response start line") + return ResponseStartLine(match.group(1), int(match.group(2)), + match.group(3)) + +# _parseparam and _parse_header are copied and modified from python2.7's cgi.py +# The original 2.7 version of this code did not correctly support some +# combinations of semicolons and double quotes. +# It has also been modified to support valueless parameters as seen in +# websocket extension negotiations. + + +def _parseparam(s): + while s[:1] == ';': + s = s[1:] + end = s.find(';') + while end > 0 and (s.count('"', 0, end) - s.count('\\"', 0, end)) % 2: + end = s.find(';', end + 1) + if end < 0: + end = len(s) + f = s[:end] + yield f.strip() + s = s[end:] + + +def _parse_header(line): + """Parse a Content-type like header. + + Return the main content-type and a dictionary of options. + + """ + parts = _parseparam(';' + line) + key = next(parts) + pdict = {} + for p in parts: + i = p.find('=') + if i >= 0: + name = p[:i].strip().lower() + value = p[i + 1:].strip() + if len(value) >= 2 and value[0] == value[-1] == '"': + value = value[1:-1] + value = value.replace('\\\\', '\\').replace('\\"', '"') + pdict[name] = value + else: + pdict[p] = None + return key, pdict + + +def _encode_header(key, pdict): + """Inverse of _parse_header. + + >>> _encode_header('permessage-deflate', + ... {'client_max_window_bits': 15, 'client_no_context_takeover': None}) + 'permessage-deflate; client_max_window_bits=15; client_no_context_takeover' + """ + if not pdict: + return key + out = [key] + # Sort the parameters just to make it easy to test. + for k, v in sorted(pdict.items()): + if v is None: + out.append(k) + else: + # TODO: quote if necessary. + out.append('%s=%s' % (k, v)) + return '; '.join(out) + + +def doctests(): + import doctest + return doctest.DocTestSuite() + + +def split_host_and_port(netloc): + """Returns ``(host, port)`` tuple from ``netloc``. + + Returned ``port`` will be ``None`` if not present. + + .. versionadded:: 4.1 + """ + match = re.match(r'^(.+):(\d+)$', netloc) + if match: + host = match.group(1) + port = int(match.group(2)) + else: + host = netloc + port = None + return (host, port) diff --git a/python/tornado/ioloop.py b/python/tornado/ioloop.py new file mode 100644 index 000000000..c23cb33e4 --- /dev/null +++ b/python/tornado/ioloop.py @@ -0,0 +1,1053 @@ +#!/usr/bin/env python +# +# Copyright 2009 Facebook +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""An I/O event loop for non-blocking sockets. + +Typical applications will use a single `IOLoop` object, in the +`IOLoop.instance` singleton. The `IOLoop.start` method should usually +be called at the end of the ``main()`` function. Atypical applications may +use more than one `IOLoop`, such as one `IOLoop` per thread, or per `unittest` +case. + +In addition to I/O events, the `IOLoop` can also schedule time-based events. +`IOLoop.add_timeout` is a non-blocking alternative to `time.sleep`. +""" + +from __future__ import absolute_import, division, print_function, with_statement + +import datetime +import errno +import functools +import heapq +import itertools +import logging +import numbers +import os +import select +import sys +import threading +import time +import traceback +import math + +from tornado.concurrent import TracebackFuture, is_future +from tornado.log import app_log, gen_log +from tornado import stack_context +from tornado.util import Configurable, errno_from_exception, timedelta_to_seconds + +try: + import signal +except ImportError: + signal = None + +try: + import thread # py2 +except ImportError: + import _thread as thread # py3 + +from tornado.platform.auto import set_close_exec, Waker + + +_POLL_TIMEOUT = 3600.0 + + +class TimeoutError(Exception): + pass + + +class IOLoop(Configurable): + """A level-triggered I/O loop. + + We use ``epoll`` (Linux) or ``kqueue`` (BSD and Mac OS X) if they + are available, or else we fall back on select(). If you are + implementing a system that needs to handle thousands of + simultaneous connections, you should use a system that supports + either ``epoll`` or ``kqueue``. + + Example usage for a simple TCP server: + + .. testcode:: + + import errno + import functools + import tornado.ioloop + import socket + + def connection_ready(sock, fd, events): + while True: + try: + connection, address = sock.accept() + except socket.error as e: + if e.args[0] not in (errno.EWOULDBLOCK, errno.EAGAIN): + raise + return + connection.setblocking(0) + handle_connection(connection, address) + + if __name__ == '__main__': + sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM, 0) + sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) + sock.setblocking(0) + sock.bind(("", port)) + sock.listen(128) + + io_loop = tornado.ioloop.IOLoop.current() + callback = functools.partial(connection_ready, sock) + io_loop.add_handler(sock.fileno(), callback, io_loop.READ) + io_loop.start() + + .. testoutput:: + :hide: + + By default, a newly-constructed `IOLoop` becomes the thread's current + `IOLoop`, unless there already is a current `IOLoop`. This behavior + can be controlled with the ``make_current`` argument to the `IOLoop` + constructor: if ``make_current=True``, the new `IOLoop` will always + try to become current and it raises an error if there is already a + current instance. If ``make_current=False``, the new `IOLoop` will + not try to become current. + + .. versionchanged:: 4.2 + Added the ``make_current`` keyword argument to the `IOLoop` + constructor. + """ + # Constants from the epoll module + _EPOLLIN = 0x001 + _EPOLLPRI = 0x002 + _EPOLLOUT = 0x004 + _EPOLLERR = 0x008 + _EPOLLHUP = 0x010 + _EPOLLRDHUP = 0x2000 + _EPOLLONESHOT = (1 << 30) + _EPOLLET = (1 << 31) + + # Our events map exactly to the epoll events + NONE = 0 + READ = _EPOLLIN + WRITE = _EPOLLOUT + ERROR = _EPOLLERR | _EPOLLHUP + + # Global lock for creating global IOLoop instance + _instance_lock = threading.Lock() + + _current = threading.local() + + @staticmethod + def instance(): + """Returns a global `IOLoop` instance. + + Most applications have a single, global `IOLoop` running on the + main thread. Use this method to get this instance from + another thread. In most other cases, it is better to use `current()` + to get the current thread's `IOLoop`. + """ + if not hasattr(IOLoop, "_instance"): + with IOLoop._instance_lock: + if not hasattr(IOLoop, "_instance"): + # New instance after double check + IOLoop._instance = IOLoop() + return IOLoop._instance + + @staticmethod + def initialized(): + """Returns true if the singleton instance has been created.""" + return hasattr(IOLoop, "_instance") + + def install(self): + """Installs this `IOLoop` object as the singleton instance. + + This is normally not necessary as `instance()` will create + an `IOLoop` on demand, but you may want to call `install` to use + a custom subclass of `IOLoop`. + """ + assert not IOLoop.initialized() + IOLoop._instance = self + + @staticmethod + def clear_instance(): + """Clear the global `IOLoop` instance. + + .. versionadded:: 4.0 + """ + if hasattr(IOLoop, "_instance"): + del IOLoop._instance + + @staticmethod + def current(instance=True): + """Returns the current thread's `IOLoop`. + + If an `IOLoop` is currently running or has been marked as + current by `make_current`, returns that instance. If there is + no current `IOLoop`, returns `IOLoop.instance()` (i.e. the + main thread's `IOLoop`, creating one if necessary) if ``instance`` + is true. + + In general you should use `IOLoop.current` as the default when + constructing an asynchronous object, and use `IOLoop.instance` + when you mean to communicate to the main thread from a different + one. + + .. versionchanged:: 4.1 + Added ``instance`` argument to control the fallback to + `IOLoop.instance()`. + """ + current = getattr(IOLoop._current, "instance", None) + if current is None and instance: + return IOLoop.instance() + return current + + def make_current(self): + """Makes this the `IOLoop` for the current thread. + + An `IOLoop` automatically becomes current for its thread + when it is started, but it is sometimes useful to call + `make_current` explicitly before starting the `IOLoop`, + so that code run at startup time can find the right + instance. + + .. versionchanged:: 4.1 + An `IOLoop` created while there is no current `IOLoop` + will automatically become current. + """ + IOLoop._current.instance = self + + @staticmethod + def clear_current(): + IOLoop._current.instance = None + + @classmethod + def configurable_base(cls): + return IOLoop + + @classmethod + def configurable_default(cls): + if hasattr(select, "epoll"): + from tornado.platform.epoll import EPollIOLoop + return EPollIOLoop + if hasattr(select, "kqueue"): + # Python 2.6+ on BSD or Mac + from tornado.platform.kqueue import KQueueIOLoop + return KQueueIOLoop + from tornado.platform.select import SelectIOLoop + return SelectIOLoop + + def initialize(self, make_current=None): + if make_current is None: + if IOLoop.current(instance=False) is None: + self.make_current() + elif make_current: + if IOLoop.current(instance=False) is not None: + raise RuntimeError("current IOLoop already exists") + self.make_current() + + def close(self, all_fds=False): + """Closes the `IOLoop`, freeing any resources used. + + If ``all_fds`` is true, all file descriptors registered on the + IOLoop will be closed (not just the ones created by the + `IOLoop` itself). + + Many applications will only use a single `IOLoop` that runs for the + entire lifetime of the process. In that case closing the `IOLoop` + is not necessary since everything will be cleaned up when the + process exits. `IOLoop.close` is provided mainly for scenarios + such as unit tests, which create and destroy a large number of + ``IOLoops``. + + An `IOLoop` must be completely stopped before it can be closed. This + means that `IOLoop.stop()` must be called *and* `IOLoop.start()` must + be allowed to return before attempting to call `IOLoop.close()`. + Therefore the call to `close` will usually appear just after + the call to `start` rather than near the call to `stop`. + + .. versionchanged:: 3.1 + If the `IOLoop` implementation supports non-integer objects + for "file descriptors", those objects will have their + ``close`` method when ``all_fds`` is true. + """ + raise NotImplementedError() + + def add_handler(self, fd, handler, events): + """Registers the given handler to receive the given events for ``fd``. + + The ``fd`` argument may either be an integer file descriptor or + a file-like object with a ``fileno()`` method (and optionally a + ``close()`` method, which may be called when the `IOLoop` is shut + down). + + The ``events`` argument is a bitwise or of the constants + ``IOLoop.READ``, ``IOLoop.WRITE``, and ``IOLoop.ERROR``. + + When an event occurs, ``handler(fd, events)`` will be run. + + .. versionchanged:: 4.0 + Added the ability to pass file-like objects in addition to + raw file descriptors. + """ + raise NotImplementedError() + + def update_handler(self, fd, events): + """Changes the events we listen for ``fd``. + + .. versionchanged:: 4.0 + Added the ability to pass file-like objects in addition to + raw file descriptors. + """ + raise NotImplementedError() + + def remove_handler(self, fd): + """Stop listening for events on ``fd``. + + .. versionchanged:: 4.0 + Added the ability to pass file-like objects in addition to + raw file descriptors. + """ + raise NotImplementedError() + + def set_blocking_signal_threshold(self, seconds, action): + """Sends a signal if the `IOLoop` is blocked for more than + ``s`` seconds. + + Pass ``seconds=None`` to disable. Requires Python 2.6 on a unixy + platform. + + The action parameter is a Python signal handler. Read the + documentation for the `signal` module for more information. + If ``action`` is None, the process will be killed if it is + blocked for too long. + """ + raise NotImplementedError() + + def set_blocking_log_threshold(self, seconds): + """Logs a stack trace if the `IOLoop` is blocked for more than + ``s`` seconds. + + Equivalent to ``set_blocking_signal_threshold(seconds, + self.log_stack)`` + """ + self.set_blocking_signal_threshold(seconds, self.log_stack) + + def log_stack(self, signal, frame): + """Signal handler to log the stack trace of the current thread. + + For use with `set_blocking_signal_threshold`. + """ + gen_log.warning('IOLoop blocked for %f seconds in\n%s', + self._blocking_signal_threshold, + ''.join(traceback.format_stack(frame))) + + def start(self): + """Starts the I/O loop. + + The loop will run until one of the callbacks calls `stop()`, which + will make the loop stop after the current event iteration completes. + """ + raise NotImplementedError() + + def _setup_logging(self): + """The IOLoop catches and logs exceptions, so it's + important that log output be visible. However, python's + default behavior for non-root loggers (prior to python + 3.2) is to print an unhelpful "no handlers could be + found" message rather than the actual log entry, so we + must explicitly configure logging if we've made it this + far without anything. + + This method should be called from start() in subclasses. + """ + if not any([logging.getLogger().handlers, + logging.getLogger('tornado').handlers, + logging.getLogger('tornado.application').handlers]): + logging.basicConfig() + + def stop(self): + """Stop the I/O loop. + + If the event loop is not currently running, the next call to `start()` + will return immediately. + + To use asynchronous methods from otherwise-synchronous code (such as + unit tests), you can start and stop the event loop like this:: + + ioloop = IOLoop() + async_method(ioloop=ioloop, callback=ioloop.stop) + ioloop.start() + + ``ioloop.start()`` will return after ``async_method`` has run + its callback, whether that callback was invoked before or + after ``ioloop.start``. + + Note that even after `stop` has been called, the `IOLoop` is not + completely stopped until `IOLoop.start` has also returned. + Some work that was scheduled before the call to `stop` may still + be run before the `IOLoop` shuts down. + """ + raise NotImplementedError() + + def run_sync(self, func, timeout=None): + """Starts the `IOLoop`, runs the given function, and stops the loop. + + The function must return either a yieldable object or + ``None``. If the function returns a yieldable object, the + `IOLoop` will run until the yieldable is resolved (and + `run_sync()` will return the yieldable's result). If it raises + an exception, the `IOLoop` will stop and the exception will be + re-raised to the caller. + + The keyword-only argument ``timeout`` may be used to set + a maximum duration for the function. If the timeout expires, + a `TimeoutError` is raised. + + This method is useful in conjunction with `tornado.gen.coroutine` + to allow asynchronous calls in a ``main()`` function:: + + @gen.coroutine + def main(): + # do stuff... + + if __name__ == '__main__': + IOLoop.current().run_sync(main) + + .. versionchanged:: 4.3 + Returning a non-``None``, non-yieldable value is now an error. + """ + future_cell = [None] + + def run(): + try: + result = func() + if result is not None: + from tornado.gen import convert_yielded + result = convert_yielded(result) + except Exception: + future_cell[0] = TracebackFuture() + future_cell[0].set_exc_info(sys.exc_info()) + else: + if is_future(result): + future_cell[0] = result + else: + future_cell[0] = TracebackFuture() + future_cell[0].set_result(result) + self.add_future(future_cell[0], lambda future: self.stop()) + self.add_callback(run) + if timeout is not None: + timeout_handle = self.add_timeout(self.time() + timeout, self.stop) + self.start() + if timeout is not None: + self.remove_timeout(timeout_handle) + if not future_cell[0].done(): + raise TimeoutError('Operation timed out after %s seconds' % timeout) + return future_cell[0].result() + + def time(self): + """Returns the current time according to the `IOLoop`'s clock. + + The return value is a floating-point number relative to an + unspecified time in the past. + + By default, the `IOLoop`'s time function is `time.time`. However, + it may be configured to use e.g. `time.monotonic` instead. + Calls to `add_timeout` that pass a number instead of a + `datetime.timedelta` should use this function to compute the + appropriate time, so they can work no matter what time function + is chosen. + """ + return time.time() + + def add_timeout(self, deadline, callback, *args, **kwargs): + """Runs the ``callback`` at the time ``deadline`` from the I/O loop. + + Returns an opaque handle that may be passed to + `remove_timeout` to cancel. + + ``deadline`` may be a number denoting a time (on the same + scale as `IOLoop.time`, normally `time.time`), or a + `datetime.timedelta` object for a deadline relative to the + current time. Since Tornado 4.0, `call_later` is a more + convenient alternative for the relative case since it does not + require a timedelta object. + + Note that it is not safe to call `add_timeout` from other threads. + Instead, you must use `add_callback` to transfer control to the + `IOLoop`'s thread, and then call `add_timeout` from there. + + Subclasses of IOLoop must implement either `add_timeout` or + `call_at`; the default implementations of each will call + the other. `call_at` is usually easier to implement, but + subclasses that wish to maintain compatibility with Tornado + versions prior to 4.0 must use `add_timeout` instead. + + .. versionchanged:: 4.0 + Now passes through ``*args`` and ``**kwargs`` to the callback. + """ + if isinstance(deadline, numbers.Real): + return self.call_at(deadline, callback, *args, **kwargs) + elif isinstance(deadline, datetime.timedelta): + return self.call_at(self.time() + timedelta_to_seconds(deadline), + callback, *args, **kwargs) + else: + raise TypeError("Unsupported deadline %r" % deadline) + + def call_later(self, delay, callback, *args, **kwargs): + """Runs the ``callback`` after ``delay`` seconds have passed. + + Returns an opaque handle that may be passed to `remove_timeout` + to cancel. Note that unlike the `asyncio` method of the same + name, the returned object does not have a ``cancel()`` method. + + See `add_timeout` for comments on thread-safety and subclassing. + + .. versionadded:: 4.0 + """ + return self.call_at(self.time() + delay, callback, *args, **kwargs) + + def call_at(self, when, callback, *args, **kwargs): + """Runs the ``callback`` at the absolute time designated by ``when``. + + ``when`` must be a number using the same reference point as + `IOLoop.time`. + + Returns an opaque handle that may be passed to `remove_timeout` + to cancel. Note that unlike the `asyncio` method of the same + name, the returned object does not have a ``cancel()`` method. + + See `add_timeout` for comments on thread-safety and subclassing. + + .. versionadded:: 4.0 + """ + return self.add_timeout(when, callback, *args, **kwargs) + + def remove_timeout(self, timeout): + """Cancels a pending timeout. + + The argument is a handle as returned by `add_timeout`. It is + safe to call `remove_timeout` even if the callback has already + been run. + """ + raise NotImplementedError() + + def add_callback(self, callback, *args, **kwargs): + """Calls the given callback on the next I/O loop iteration. + + It is safe to call this method from any thread at any time, + except from a signal handler. Note that this is the **only** + method in `IOLoop` that makes this thread-safety guarantee; all + other interaction with the `IOLoop` must be done from that + `IOLoop`'s thread. `add_callback()` may be used to transfer + control from other threads to the `IOLoop`'s thread. + + To add a callback from a signal handler, see + `add_callback_from_signal`. + """ + raise NotImplementedError() + + def add_callback_from_signal(self, callback, *args, **kwargs): + """Calls the given callback on the next I/O loop iteration. + + Safe for use from a Python signal handler; should not be used + otherwise. + + Callbacks added with this method will be run without any + `.stack_context`, to avoid picking up the context of the function + that was interrupted by the signal. + """ + raise NotImplementedError() + + def spawn_callback(self, callback, *args, **kwargs): + """Calls the given callback on the next IOLoop iteration. + + Unlike all other callback-related methods on IOLoop, + ``spawn_callback`` does not associate the callback with its caller's + ``stack_context``, so it is suitable for fire-and-forget callbacks + that should not interfere with the caller. + + .. versionadded:: 4.0 + """ + with stack_context.NullContext(): + self.add_callback(callback, *args, **kwargs) + + def add_future(self, future, callback): + """Schedules a callback on the ``IOLoop`` when the given + `.Future` is finished. + + The callback is invoked with one argument, the + `.Future`. + """ + assert is_future(future) + callback = stack_context.wrap(callback) + future.add_done_callback( + lambda future: self.add_callback(callback, future)) + + def _run_callback(self, callback): + """Runs a callback with error handling. + + For use in subclasses. + """ + try: + ret = callback() + if ret is not None: + from tornado import gen + # Functions that return Futures typically swallow all + # exceptions and store them in the Future. If a Future + # makes it out to the IOLoop, ensure its exception (if any) + # gets logged too. + try: + ret = gen.convert_yielded(ret) + except gen.BadYieldError: + # It's not unusual for add_callback to be used with + # methods returning a non-None and non-yieldable + # result, which should just be ignored. + pass + else: + self.add_future(ret, lambda f: f.result()) + except Exception: + self.handle_callback_exception(callback) + + def handle_callback_exception(self, callback): + """This method is called whenever a callback run by the `IOLoop` + throws an exception. + + By default simply logs the exception as an error. Subclasses + may override this method to customize reporting of exceptions. + + The exception itself is not passed explicitly, but is available + in `sys.exc_info`. + """ + app_log.error("Exception in callback %r", callback, exc_info=True) + + def split_fd(self, fd): + """Returns an (fd, obj) pair from an ``fd`` parameter. + + We accept both raw file descriptors and file-like objects as + input to `add_handler` and related methods. When a file-like + object is passed, we must retain the object itself so we can + close it correctly when the `IOLoop` shuts down, but the + poller interfaces favor file descriptors (they will accept + file-like objects and call ``fileno()`` for you, but they + always return the descriptor itself). + + This method is provided for use by `IOLoop` subclasses and should + not generally be used by application code. + + .. versionadded:: 4.0 + """ + try: + return fd.fileno(), fd + except AttributeError: + return fd, fd + + def close_fd(self, fd): + """Utility method to close an ``fd``. + + If ``fd`` is a file-like object, we close it directly; otherwise + we use `os.close`. + + This method is provided for use by `IOLoop` subclasses (in + implementations of ``IOLoop.close(all_fds=True)`` and should + not generally be used by application code. + + .. versionadded:: 4.0 + """ + try: + try: + fd.close() + except AttributeError: + os.close(fd) + except OSError: + pass + + +class PollIOLoop(IOLoop): + """Base class for IOLoops built around a select-like function. + + For concrete implementations, see `tornado.platform.epoll.EPollIOLoop` + (Linux), `tornado.platform.kqueue.KQueueIOLoop` (BSD and Mac), or + `tornado.platform.select.SelectIOLoop` (all platforms). + """ + def initialize(self, impl, time_func=None, **kwargs): + super(PollIOLoop, self).initialize(**kwargs) + self._impl = impl + if hasattr(self._impl, 'fileno'): + set_close_exec(self._impl.fileno()) + self.time_func = time_func or time.time + self._handlers = {} + self._events = {} + self._callbacks = [] + self._callback_lock = threading.Lock() + self._timeouts = [] + self._cancellations = 0 + self._running = False + self._stopped = False + self._closing = False + self._thread_ident = None + self._blocking_signal_threshold = None + self._timeout_counter = itertools.count() + + # Create a pipe that we send bogus data to when we want to wake + # the I/O loop when it is idle + self._waker = Waker() + self.add_handler(self._waker.fileno(), + lambda fd, events: self._waker.consume(), + self.READ) + + def close(self, all_fds=False): + with self._callback_lock: + self._closing = True + self.remove_handler(self._waker.fileno()) + if all_fds: + for fd, handler in self._handlers.values(): + self.close_fd(fd) + self._waker.close() + self._impl.close() + self._callbacks = None + self._timeouts = None + + def add_handler(self, fd, handler, events): + fd, obj = self.split_fd(fd) + self._handlers[fd] = (obj, stack_context.wrap(handler)) + self._impl.register(fd, events | self.ERROR) + + def update_handler(self, fd, events): + fd, obj = self.split_fd(fd) + self._impl.modify(fd, events | self.ERROR) + + def remove_handler(self, fd): + fd, obj = self.split_fd(fd) + self._handlers.pop(fd, None) + self._events.pop(fd, None) + try: + self._impl.unregister(fd) + except Exception: + gen_log.debug("Error deleting fd from IOLoop", exc_info=True) + + def set_blocking_signal_threshold(self, seconds, action): + if not hasattr(signal, "setitimer"): + gen_log.error("set_blocking_signal_threshold requires a signal module " + "with the setitimer method") + return + self._blocking_signal_threshold = seconds + if seconds is not None: + signal.signal(signal.SIGALRM, + action if action is not None else signal.SIG_DFL) + + def start(self): + if self._running: + raise RuntimeError("IOLoop is already running") + self._setup_logging() + if self._stopped: + self._stopped = False + return + old_current = getattr(IOLoop._current, "instance", None) + IOLoop._current.instance = self + self._thread_ident = thread.get_ident() + self._running = True + + # signal.set_wakeup_fd closes a race condition in event loops: + # a signal may arrive at the beginning of select/poll/etc + # before it goes into its interruptible sleep, so the signal + # will be consumed without waking the select. The solution is + # for the (C, synchronous) signal handler to write to a pipe, + # which will then be seen by select. + # + # In python's signal handling semantics, this only matters on the + # main thread (fortunately, set_wakeup_fd only works on the main + # thread and will raise a ValueError otherwise). + # + # If someone has already set a wakeup fd, we don't want to + # disturb it. This is an issue for twisted, which does its + # SIGCHLD processing in response to its own wakeup fd being + # written to. As long as the wakeup fd is registered on the IOLoop, + # the loop will still wake up and everything should work. + old_wakeup_fd = None + if hasattr(signal, 'set_wakeup_fd') and os.name == 'posix': + # requires python 2.6+, unix. set_wakeup_fd exists but crashes + # the python process on windows. + try: + old_wakeup_fd = signal.set_wakeup_fd(self._waker.write_fileno()) + if old_wakeup_fd != -1: + # Already set, restore previous value. This is a little racy, + # but there's no clean get_wakeup_fd and in real use the + # IOLoop is just started once at the beginning. + signal.set_wakeup_fd(old_wakeup_fd) + old_wakeup_fd = None + except ValueError: + # Non-main thread, or the previous value of wakeup_fd + # is no longer valid. + old_wakeup_fd = None + + try: + while True: + # Prevent IO event starvation by delaying new callbacks + # to the next iteration of the event loop. + with self._callback_lock: + callbacks = self._callbacks + self._callbacks = [] + + # Add any timeouts that have come due to the callback list. + # Do not run anything until we have determined which ones + # are ready, so timeouts that call add_timeout cannot + # schedule anything in this iteration. + due_timeouts = [] + if self._timeouts: + now = self.time() + while self._timeouts: + if self._timeouts[0].callback is None: + # The timeout was cancelled. Note that the + # cancellation check is repeated below for timeouts + # that are cancelled by another timeout or callback. + heapq.heappop(self._timeouts) + self._cancellations -= 1 + elif self._timeouts[0].deadline <= now: + due_timeouts.append(heapq.heappop(self._timeouts)) + else: + break + if (self._cancellations > 512 + and self._cancellations > (len(self._timeouts) >> 1)): + # Clean up the timeout queue when it gets large and it's + # more than half cancellations. + self._cancellations = 0 + self._timeouts = [x for x in self._timeouts + if x.callback is not None] + heapq.heapify(self._timeouts) + + for callback in callbacks: + self._run_callback(callback) + for timeout in due_timeouts: + if timeout.callback is not None: + self._run_callback(timeout.callback) + # Closures may be holding on to a lot of memory, so allow + # them to be freed before we go into our poll wait. + callbacks = callback = due_timeouts = timeout = None + + if self._callbacks: + # If any callbacks or timeouts called add_callback, + # we don't want to wait in poll() before we run them. + poll_timeout = 0.0 + elif self._timeouts: + # If there are any timeouts, schedule the first one. + # Use self.time() instead of 'now' to account for time + # spent running callbacks. + poll_timeout = self._timeouts[0].deadline - self.time() + poll_timeout = max(0, min(poll_timeout, _POLL_TIMEOUT)) + else: + # No timeouts and no callbacks, so use the default. + poll_timeout = _POLL_TIMEOUT + + if not self._running: + break + + if self._blocking_signal_threshold is not None: + # clear alarm so it doesn't fire while poll is waiting for + # events. + signal.setitimer(signal.ITIMER_REAL, 0, 0) + + try: + event_pairs = self._impl.poll(poll_timeout) + except Exception as e: + # Depending on python version and IOLoop implementation, + # different exception types may be thrown and there are + # two ways EINTR might be signaled: + # * e.errno == errno.EINTR + # * e.args is like (errno.EINTR, 'Interrupted system call') + if errno_from_exception(e) == errno.EINTR: + continue + else: + raise + + if self._blocking_signal_threshold is not None: + signal.setitimer(signal.ITIMER_REAL, + self._blocking_signal_threshold, 0) + + # Pop one fd at a time from the set of pending fds and run + # its handler. Since that handler may perform actions on + # other file descriptors, there may be reentrant calls to + # this IOLoop that update self._events + self._events.update(event_pairs) + while self._events: + fd, events = self._events.popitem() + try: + fd_obj, handler_func = self._handlers[fd] + handler_func(fd_obj, events) + except (OSError, IOError) as e: + if errno_from_exception(e) == errno.EPIPE: + # Happens when the client closes the connection + pass + else: + self.handle_callback_exception(self._handlers.get(fd)) + except Exception: + self.handle_callback_exception(self._handlers.get(fd)) + fd_obj = handler_func = None + + finally: + # reset the stopped flag so another start/stop pair can be issued + self._stopped = False + if self._blocking_signal_threshold is not None: + signal.setitimer(signal.ITIMER_REAL, 0, 0) + IOLoop._current.instance = old_current + if old_wakeup_fd is not None: + signal.set_wakeup_fd(old_wakeup_fd) + + def stop(self): + self._running = False + self._stopped = True + self._waker.wake() + + def time(self): + return self.time_func() + + def call_at(self, deadline, callback, *args, **kwargs): + timeout = _Timeout( + deadline, + functools.partial(stack_context.wrap(callback), *args, **kwargs), + self) + heapq.heappush(self._timeouts, timeout) + return timeout + + def remove_timeout(self, timeout): + # Removing from a heap is complicated, so just leave the defunct + # timeout object in the queue (see discussion in + # http://docs.python.org/library/heapq.html). + # If this turns out to be a problem, we could add a garbage + # collection pass whenever there are too many dead timeouts. + timeout.callback = None + self._cancellations += 1 + + def add_callback(self, callback, *args, **kwargs): + if thread.get_ident() != self._thread_ident: + # If we're not on the IOLoop's thread, we need to synchronize + # with other threads, or waking logic will induce a race. + with self._callback_lock: + if self._closing: + return + list_empty = not self._callbacks + self._callbacks.append(functools.partial( + stack_context.wrap(callback), *args, **kwargs)) + if list_empty: + # If we're not in the IOLoop's thread, and we added the + # first callback to an empty list, we may need to wake it + # up (it may wake up on its own, but an occasional extra + # wake is harmless). Waking up a polling IOLoop is + # relatively expensive, so we try to avoid it when we can. + self._waker.wake() + else: + if self._closing: + return + # If we're on the IOLoop's thread, we don't need the lock, + # since we don't need to wake anyone, just add the + # callback. Blindly insert into self._callbacks. This is + # safe even from signal handlers because the GIL makes + # list.append atomic. One subtlety is that if the signal + # is interrupting another thread holding the + # _callback_lock block in IOLoop.start, we may modify + # either the old or new version of self._callbacks, but + # either way will work. + self._callbacks.append(functools.partial( + stack_context.wrap(callback), *args, **kwargs)) + + def add_callback_from_signal(self, callback, *args, **kwargs): + with stack_context.NullContext(): + self.add_callback(callback, *args, **kwargs) + + +class _Timeout(object): + """An IOLoop timeout, a UNIX timestamp and a callback""" + + # Reduce memory overhead when there are lots of pending callbacks + __slots__ = ['deadline', 'callback', 'tiebreaker'] + + def __init__(self, deadline, callback, io_loop): + if not isinstance(deadline, numbers.Real): + raise TypeError("Unsupported deadline %r" % deadline) + self.deadline = deadline + self.callback = callback + self.tiebreaker = next(io_loop._timeout_counter) + + # Comparison methods to sort by deadline, with object id as a tiebreaker + # to guarantee a consistent ordering. The heapq module uses __le__ + # in python2.5, and __lt__ in 2.6+ (sort() and most other comparisons + # use __lt__). + def __lt__(self, other): + return ((self.deadline, self.tiebreaker) < + (other.deadline, other.tiebreaker)) + + def __le__(self, other): + return ((self.deadline, self.tiebreaker) <= + (other.deadline, other.tiebreaker)) + + +class PeriodicCallback(object): + """Schedules the given callback to be called periodically. + + The callback is called every ``callback_time`` milliseconds. + Note that the timeout is given in milliseconds, while most other + time-related functions in Tornado use seconds. + + If the callback runs for longer than ``callback_time`` milliseconds, + subsequent invocations will be skipped to get back on schedule. + + `start` must be called after the `PeriodicCallback` is created. + + .. versionchanged:: 4.1 + The ``io_loop`` argument is deprecated. + """ + def __init__(self, callback, callback_time, io_loop=None): + self.callback = callback + if callback_time <= 0: + raise ValueError("Periodic callback must have a positive callback_time") + self.callback_time = callback_time + self.io_loop = io_loop or IOLoop.current() + self._running = False + self._timeout = None + + def start(self): + """Starts the timer.""" + self._running = True + self._next_timeout = self.io_loop.time() + self._schedule_next() + + def stop(self): + """Stops the timer.""" + self._running = False + if self._timeout is not None: + self.io_loop.remove_timeout(self._timeout) + self._timeout = None + + def is_running(self): + """Return True if this `.PeriodicCallback` has been started. + + .. versionadded:: 4.1 + """ + return self._running + + def _run(self): + if not self._running: + return + try: + return self.callback() + except Exception: + self.io_loop.handle_callback_exception(self.callback) + finally: + self._schedule_next() + + def _schedule_next(self): + if self._running: + current_time = self.io_loop.time() + + if self._next_timeout <= current_time: + callback_time_sec = self.callback_time / 1000.0 + self._next_timeout += (math.floor((current_time - self._next_timeout) / callback_time_sec) + 1) * callback_time_sec + + self._timeout = self.io_loop.add_timeout(self._next_timeout, self._run) diff --git a/python/tornado/iostream.py b/python/tornado/iostream.py new file mode 100644 index 000000000..4e304f890 --- /dev/null +++ b/python/tornado/iostream.py @@ -0,0 +1,1550 @@ +#!/usr/bin/env python +# +# Copyright 2009 Facebook +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""Utility classes to write to and read from non-blocking files and sockets. + +Contents: + +* `BaseIOStream`: Generic interface for reading and writing. +* `IOStream`: Implementation of BaseIOStream using non-blocking sockets. +* `SSLIOStream`: SSL-aware version of IOStream. +* `PipeIOStream`: Pipe-based IOStream implementation. +""" + +from __future__ import absolute_import, division, print_function, with_statement + +import collections +import errno +import numbers +import os +import socket +import sys +import re + +from tornado.concurrent import TracebackFuture +from tornado import ioloop +from tornado.log import gen_log, app_log +from tornado.netutil import ssl_wrap_socket, ssl_match_hostname, SSLCertificateError, _client_ssl_defaults, _server_ssl_defaults +from tornado import stack_context +from tornado.util import errno_from_exception + +try: + from tornado.platform.posix import _set_nonblocking +except ImportError: + _set_nonblocking = None + +try: + import ssl +except ImportError: + # ssl is not available on Google App Engine + ssl = None + +# These errnos indicate that a non-blocking operation must be retried +# at a later time. On most platforms they're the same value, but on +# some they differ. +_ERRNO_WOULDBLOCK = (errno.EWOULDBLOCK, errno.EAGAIN) + +if hasattr(errno, "WSAEWOULDBLOCK"): + _ERRNO_WOULDBLOCK += (errno.WSAEWOULDBLOCK,) + +# These errnos indicate that a connection has been abruptly terminated. +# They should be caught and handled less noisily than other errors. +_ERRNO_CONNRESET = (errno.ECONNRESET, errno.ECONNABORTED, errno.EPIPE, + errno.ETIMEDOUT) + +if hasattr(errno, "WSAECONNRESET"): + _ERRNO_CONNRESET += (errno.WSAECONNRESET, errno.WSAECONNABORTED, errno.WSAETIMEDOUT) + +if sys.platform == 'darwin': + # OSX appears to have a race condition that causes send(2) to return + # EPROTOTYPE if called while a socket is being torn down: + # http://erickt.github.io/blog/2014/11/19/adventures-in-debugging-a-potential-osx-kernel-bug/ + # Since the socket is being closed anyway, treat this as an ECONNRESET + # instead of an unexpected error. + _ERRNO_CONNRESET += (errno.EPROTOTYPE,) + +# More non-portable errnos: +_ERRNO_INPROGRESS = (errno.EINPROGRESS,) + +if hasattr(errno, "WSAEINPROGRESS"): + _ERRNO_INPROGRESS += (errno.WSAEINPROGRESS,) + + +class StreamClosedError(IOError): + """Exception raised by `IOStream` methods when the stream is closed. + + Note that the close callback is scheduled to run *after* other + callbacks on the stream (to allow for buffered data to be processed), + so you may see this error before you see the close callback. + + The ``real_error`` attribute contains the underlying error that caused + the stream to close (if any). + + .. versionchanged:: 4.3 + Added the ``real_error`` attribute. + """ + def __init__(self, real_error=None): + super(StreamClosedError, self).__init__('Stream is closed') + self.real_error = real_error + + +class UnsatisfiableReadError(Exception): + """Exception raised when a read cannot be satisfied. + + Raised by ``read_until`` and ``read_until_regex`` with a ``max_bytes`` + argument. + """ + pass + + +class StreamBufferFullError(Exception): + """Exception raised by `IOStream` methods when the buffer is full. + """ + + +class BaseIOStream(object): + """A utility class to write to and read from a non-blocking file or socket. + + We support a non-blocking ``write()`` and a family of ``read_*()`` methods. + All of the methods take an optional ``callback`` argument and return a + `.Future` only if no callback is given. When the operation completes, + the callback will be run or the `.Future` will resolve with the data + read (or ``None`` for ``write()``). All outstanding ``Futures`` will + resolve with a `StreamClosedError` when the stream is closed; users + of the callback interface will be notified via + `.BaseIOStream.set_close_callback` instead. + + When a stream is closed due to an error, the IOStream's ``error`` + attribute contains the exception object. + + Subclasses must implement `fileno`, `close_fd`, `write_to_fd`, + `read_from_fd`, and optionally `get_fd_error`. + """ + def __init__(self, io_loop=None, max_buffer_size=None, + read_chunk_size=None, max_write_buffer_size=None): + """`BaseIOStream` constructor. + + :arg io_loop: The `.IOLoop` to use; defaults to `.IOLoop.current`. + Deprecated since Tornado 4.1. + :arg max_buffer_size: Maximum amount of incoming data to buffer; + defaults to 100MB. + :arg read_chunk_size: Amount of data to read at one time from the + underlying transport; defaults to 64KB. + :arg max_write_buffer_size: Amount of outgoing data to buffer; + defaults to unlimited. + + .. versionchanged:: 4.0 + Add the ``max_write_buffer_size`` parameter. Changed default + ``read_chunk_size`` to 64KB. + """ + self.io_loop = io_loop or ioloop.IOLoop.current() + self.max_buffer_size = max_buffer_size or 104857600 + # A chunk size that is too close to max_buffer_size can cause + # spurious failures. + self.read_chunk_size = min(read_chunk_size or 65536, + self.max_buffer_size // 2) + self.max_write_buffer_size = max_write_buffer_size + self.error = None + self._read_buffer = collections.deque() + self._write_buffer = collections.deque() + self._read_buffer_size = 0 + self._write_buffer_size = 0 + self._write_buffer_frozen = False + self._read_delimiter = None + self._read_regex = None + self._read_max_bytes = None + self._read_bytes = None + self._read_partial = False + self._read_until_close = False + self._read_callback = None + self._read_future = None + self._streaming_callback = None + self._write_callback = None + self._write_future = None + self._close_callback = None + self._connect_callback = None + self._connect_future = None + # _ssl_connect_future should be defined in SSLIOStream + # but it's here so we can clean it up in maybe_run_close_callback. + # TODO: refactor that so subclasses can add additional futures + # to be cancelled. + self._ssl_connect_future = None + self._connecting = False + self._state = None + self._pending_callbacks = 0 + self._closed = False + + def fileno(self): + """Returns the file descriptor for this stream.""" + raise NotImplementedError() + + def close_fd(self): + """Closes the file underlying this stream. + + ``close_fd`` is called by `BaseIOStream` and should not be called + elsewhere; other users should call `close` instead. + """ + raise NotImplementedError() + + def write_to_fd(self, data): + """Attempts to write ``data`` to the underlying file. + + Returns the number of bytes written. + """ + raise NotImplementedError() + + def read_from_fd(self): + """Attempts to read from the underlying file. + + Returns ``None`` if there was nothing to read (the socket + returned `~errno.EWOULDBLOCK` or equivalent), otherwise + returns the data. When possible, should return no more than + ``self.read_chunk_size`` bytes at a time. + """ + raise NotImplementedError() + + def get_fd_error(self): + """Returns information about any error on the underlying file. + + This method is called after the `.IOLoop` has signaled an error on the + file descriptor, and should return an Exception (such as `socket.error` + with additional information, or None if no such information is + available. + """ + return None + + def read_until_regex(self, regex, callback=None, max_bytes=None): + """Asynchronously read until we have matched the given regex. + + The result includes the data that matches the regex and anything + that came before it. If a callback is given, it will be run + with the data as an argument; if not, this method returns a + `.Future`. + + If ``max_bytes`` is not None, the connection will be closed + if more than ``max_bytes`` bytes have been read and the regex is + not satisfied. + + .. versionchanged:: 4.0 + Added the ``max_bytes`` argument. The ``callback`` argument is + now optional and a `.Future` will be returned if it is omitted. + """ + future = self._set_read_callback(callback) + self._read_regex = re.compile(regex) + self._read_max_bytes = max_bytes + try: + self._try_inline_read() + except UnsatisfiableReadError as e: + # Handle this the same way as in _handle_events. + gen_log.info("Unsatisfiable read, closing connection: %s" % e) + self.close(exc_info=True) + return future + except: + if future is not None: + # Ensure that the future doesn't log an error because its + # failure was never examined. + future.add_done_callback(lambda f: f.exception()) + raise + return future + + def read_until(self, delimiter, callback=None, max_bytes=None): + """Asynchronously read until we have found the given delimiter. + + The result includes all the data read including the delimiter. + If a callback is given, it will be run with the data as an argument; + if not, this method returns a `.Future`. + + If ``max_bytes`` is not None, the connection will be closed + if more than ``max_bytes`` bytes have been read and the delimiter + is not found. + + .. versionchanged:: 4.0 + Added the ``max_bytes`` argument. The ``callback`` argument is + now optional and a `.Future` will be returned if it is omitted. + """ + future = self._set_read_callback(callback) + self._read_delimiter = delimiter + self._read_max_bytes = max_bytes + try: + self._try_inline_read() + except UnsatisfiableReadError as e: + # Handle this the same way as in _handle_events. + gen_log.info("Unsatisfiable read, closing connection: %s" % e) + self.close(exc_info=True) + return future + except: + if future is not None: + future.add_done_callback(lambda f: f.exception()) + raise + return future + + def read_bytes(self, num_bytes, callback=None, streaming_callback=None, + partial=False): + """Asynchronously read a number of bytes. + + If a ``streaming_callback`` is given, it will be called with chunks + of data as they become available, and the final result will be empty. + Otherwise, the result is all the data that was read. + If a callback is given, it will be run with the data as an argument; + if not, this method returns a `.Future`. + + If ``partial`` is true, the callback is run as soon as we have + any bytes to return (but never more than ``num_bytes``) + + .. versionchanged:: 4.0 + Added the ``partial`` argument. The callback argument is now + optional and a `.Future` will be returned if it is omitted. + """ + future = self._set_read_callback(callback) + assert isinstance(num_bytes, numbers.Integral) + self._read_bytes = num_bytes + self._read_partial = partial + self._streaming_callback = stack_context.wrap(streaming_callback) + try: + self._try_inline_read() + except: + if future is not None: + future.add_done_callback(lambda f: f.exception()) + raise + return future + + def read_until_close(self, callback=None, streaming_callback=None): + """Asynchronously reads all data from the socket until it is closed. + + If a ``streaming_callback`` is given, it will be called with chunks + of data as they become available, and the final result will be empty. + Otherwise, the result is all the data that was read. + If a callback is given, it will be run with the data as an argument; + if not, this method returns a `.Future`. + + Note that if a ``streaming_callback`` is used, data will be + read from the socket as quickly as it becomes available; there + is no way to apply backpressure or cancel the reads. If flow + control or cancellation are desired, use a loop with + `read_bytes(partial=True) <.read_bytes>` instead. + + .. versionchanged:: 4.0 + The callback argument is now optional and a `.Future` will + be returned if it is omitted. + + """ + future = self._set_read_callback(callback) + self._streaming_callback = stack_context.wrap(streaming_callback) + if self.closed(): + if self._streaming_callback is not None: + self._run_read_callback(self._read_buffer_size, True) + self._run_read_callback(self._read_buffer_size, False) + return future + self._read_until_close = True + try: + self._try_inline_read() + except: + if future is not None: + future.add_done_callback(lambda f: f.exception()) + raise + return future + + def write(self, data, callback=None): + """Asynchronously write the given data to this stream. + + If ``callback`` is given, we call it when all of the buffered write + data has been successfully written to the stream. If there was + previously buffered write data and an old write callback, that + callback is simply overwritten with this new callback. + + If no ``callback`` is given, this method returns a `.Future` that + resolves (with a result of ``None``) when the write has been + completed. If `write` is called again before that `.Future` has + resolved, the previous future will be orphaned and will never resolve. + + .. versionchanged:: 4.0 + Now returns a `.Future` if no callback is given. + """ + assert isinstance(data, bytes) + self._check_closed() + # We use bool(_write_buffer) as a proxy for write_buffer_size>0, + # so never put empty strings in the buffer. + if data: + if (self.max_write_buffer_size is not None and + self._write_buffer_size + len(data) > self.max_write_buffer_size): + raise StreamBufferFullError("Reached maximum write buffer size") + # Break up large contiguous strings before inserting them in the + # write buffer, so we don't have to recopy the entire thing + # as we slice off pieces to send to the socket. + WRITE_BUFFER_CHUNK_SIZE = 128 * 1024 + for i in range(0, len(data), WRITE_BUFFER_CHUNK_SIZE): + self._write_buffer.append(data[i:i + WRITE_BUFFER_CHUNK_SIZE]) + self._write_buffer_size += len(data) + if callback is not None: + self._write_callback = stack_context.wrap(callback) + future = None + else: + future = self._write_future = TracebackFuture() + future.add_done_callback(lambda f: f.exception()) + if not self._connecting: + self._handle_write() + if self._write_buffer: + self._add_io_state(self.io_loop.WRITE) + self._maybe_add_error_listener() + return future + + def set_close_callback(self, callback): + """Call the given callback when the stream is closed. + + This is not necessary for applications that use the `.Future` + interface; all outstanding ``Futures`` will resolve with a + `StreamClosedError` when the stream is closed. + """ + self._close_callback = stack_context.wrap(callback) + self._maybe_add_error_listener() + + def close(self, exc_info=False): + """Close this stream. + + If ``exc_info`` is true, set the ``error`` attribute to the current + exception from `sys.exc_info` (or if ``exc_info`` is a tuple, + use that instead of `sys.exc_info`). + """ + if not self.closed(): + if exc_info: + if not isinstance(exc_info, tuple): + exc_info = sys.exc_info() + if any(exc_info): + self.error = exc_info[1] + if self._read_until_close: + if (self._streaming_callback is not None and + self._read_buffer_size): + self._run_read_callback(self._read_buffer_size, True) + self._read_until_close = False + self._run_read_callback(self._read_buffer_size, False) + if self._state is not None: + self.io_loop.remove_handler(self.fileno()) + self._state = None + self.close_fd() + self._closed = True + self._maybe_run_close_callback() + + def _maybe_run_close_callback(self): + # If there are pending callbacks, don't run the close callback + # until they're done (see _maybe_add_error_handler) + if self.closed() and self._pending_callbacks == 0: + futures = [] + if self._read_future is not None: + futures.append(self._read_future) + self._read_future = None + if self._write_future is not None: + futures.append(self._write_future) + self._write_future = None + if self._connect_future is not None: + futures.append(self._connect_future) + self._connect_future = None + if self._ssl_connect_future is not None: + futures.append(self._ssl_connect_future) + self._ssl_connect_future = None + for future in futures: + future.set_exception(StreamClosedError(real_error=self.error)) + if self._close_callback is not None: + cb = self._close_callback + self._close_callback = None + self._run_callback(cb) + # Delete any unfinished callbacks to break up reference cycles. + self._read_callback = self._write_callback = None + # Clear the buffers so they can be cleared immediately even + # if the IOStream object is kept alive by a reference cycle. + # TODO: Clear the read buffer too; it currently breaks some tests. + self._write_buffer = None + + def reading(self): + """Returns true if we are currently reading from the stream.""" + return self._read_callback is not None or self._read_future is not None + + def writing(self): + """Returns true if we are currently writing to the stream.""" + return bool(self._write_buffer) + + def closed(self): + """Returns true if the stream has been closed.""" + return self._closed + + def set_nodelay(self, value): + """Sets the no-delay flag for this stream. + + By default, data written to TCP streams may be held for a time + to make the most efficient use of bandwidth (according to + Nagle's algorithm). The no-delay flag requests that data be + written as soon as possible, even if doing so would consume + additional bandwidth. + + This flag is currently defined only for TCP-based ``IOStreams``. + + .. versionadded:: 3.1 + """ + pass + + def _handle_events(self, fd, events): + if self.closed(): + gen_log.warning("Got events for closed stream %s", fd) + return + try: + if self._connecting: + # Most IOLoops will report a write failed connect + # with the WRITE event, but SelectIOLoop reports a + # READ as well so we must check for connecting before + # either. + self._handle_connect() + if self.closed(): + return + if events & self.io_loop.READ: + self._handle_read() + if self.closed(): + return + if events & self.io_loop.WRITE: + self._handle_write() + if self.closed(): + return + if events & self.io_loop.ERROR: + self.error = self.get_fd_error() + # We may have queued up a user callback in _handle_read or + # _handle_write, so don't close the IOStream until those + # callbacks have had a chance to run. + self.io_loop.add_callback(self.close) + return + state = self.io_loop.ERROR + if self.reading(): + state |= self.io_loop.READ + if self.writing(): + state |= self.io_loop.WRITE + if state == self.io_loop.ERROR and self._read_buffer_size == 0: + # If the connection is idle, listen for reads too so + # we can tell if the connection is closed. If there is + # data in the read buffer we won't run the close callback + # yet anyway, so we don't need to listen in this case. + state |= self.io_loop.READ + if state != self._state: + assert self._state is not None, \ + "shouldn't happen: _handle_events without self._state" + self._state = state + self.io_loop.update_handler(self.fileno(), self._state) + except UnsatisfiableReadError as e: + gen_log.info("Unsatisfiable read, closing connection: %s" % e) + self.close(exc_info=True) + except Exception: + gen_log.error("Uncaught exception, closing connection.", + exc_info=True) + self.close(exc_info=True) + raise + + def _run_callback(self, callback, *args): + def wrapper(): + self._pending_callbacks -= 1 + try: + return callback(*args) + except Exception: + app_log.error("Uncaught exception, closing connection.", + exc_info=True) + # Close the socket on an uncaught exception from a user callback + # (It would eventually get closed when the socket object is + # gc'd, but we don't want to rely on gc happening before we + # run out of file descriptors) + self.close(exc_info=True) + # Re-raise the exception so that IOLoop.handle_callback_exception + # can see it and log the error + raise + finally: + self._maybe_add_error_listener() + # We schedule callbacks to be run on the next IOLoop iteration + # rather than running them directly for several reasons: + # * Prevents unbounded stack growth when a callback calls an + # IOLoop operation that immediately runs another callback + # * Provides a predictable execution context for e.g. + # non-reentrant mutexes + # * Ensures that the try/except in wrapper() is run outside + # of the application's StackContexts + with stack_context.NullContext(): + # stack_context was already captured in callback, we don't need to + # capture it again for IOStream's wrapper. This is especially + # important if the callback was pre-wrapped before entry to + # IOStream (as in HTTPConnection._header_callback), as we could + # capture and leak the wrong context here. + self._pending_callbacks += 1 + self.io_loop.add_callback(wrapper) + + def _read_to_buffer_loop(self): + # This method is called from _handle_read and _try_inline_read. + try: + if self._read_bytes is not None: + target_bytes = self._read_bytes + elif self._read_max_bytes is not None: + target_bytes = self._read_max_bytes + elif self.reading(): + # For read_until without max_bytes, or + # read_until_close, read as much as we can before + # scanning for the delimiter. + target_bytes = None + else: + target_bytes = 0 + next_find_pos = 0 + # Pretend to have a pending callback so that an EOF in + # _read_to_buffer doesn't trigger an immediate close + # callback. At the end of this method we'll either + # establish a real pending callback via + # _read_from_buffer or run the close callback. + # + # We need two try statements here so that + # pending_callbacks is decremented before the `except` + # clause below (which calls `close` and does need to + # trigger the callback) + self._pending_callbacks += 1 + while not self.closed(): + # Read from the socket until we get EWOULDBLOCK or equivalent. + # SSL sockets do some internal buffering, and if the data is + # sitting in the SSL object's buffer select() and friends + # can't see it; the only way to find out if it's there is to + # try to read it. + if self._read_to_buffer() == 0: + break + + self._run_streaming_callback() + + # If we've read all the bytes we can use, break out of + # this loop. We can't just call read_from_buffer here + # because of subtle interactions with the + # pending_callback and error_listener mechanisms. + # + # If we've reached target_bytes, we know we're done. + if (target_bytes is not None and + self._read_buffer_size >= target_bytes): + break + + # Otherwise, we need to call the more expensive find_read_pos. + # It's inefficient to do this on every read, so instead + # do it on the first read and whenever the read buffer + # size has doubled. + if self._read_buffer_size >= next_find_pos: + pos = self._find_read_pos() + if pos is not None: + return pos + next_find_pos = self._read_buffer_size * 2 + return self._find_read_pos() + finally: + self._pending_callbacks -= 1 + + def _handle_read(self): + try: + pos = self._read_to_buffer_loop() + except UnsatisfiableReadError: + raise + except Exception as e: + gen_log.warning("error on read: %s" % e) + self.close(exc_info=True) + return + if pos is not None: + self._read_from_buffer(pos) + return + else: + self._maybe_run_close_callback() + + def _set_read_callback(self, callback): + assert self._read_callback is None, "Already reading" + assert self._read_future is None, "Already reading" + if callback is not None: + self._read_callback = stack_context.wrap(callback) + else: + self._read_future = TracebackFuture() + return self._read_future + + def _run_read_callback(self, size, streaming): + if streaming: + callback = self._streaming_callback + else: + callback = self._read_callback + self._read_callback = self._streaming_callback = None + if self._read_future is not None: + assert callback is None + future = self._read_future + self._read_future = None + future.set_result(self._consume(size)) + if callback is not None: + assert (self._read_future is None) or streaming + self._run_callback(callback, self._consume(size)) + else: + # If we scheduled a callback, we will add the error listener + # afterwards. If we didn't, we have to do it now. + self._maybe_add_error_listener() + + def _try_inline_read(self): + """Attempt to complete the current read operation from buffered data. + + If the read can be completed without blocking, schedules the + read callback on the next IOLoop iteration; otherwise starts + listening for reads on the socket. + """ + # See if we've already got the data from a previous read + self._run_streaming_callback() + pos = self._find_read_pos() + if pos is not None: + self._read_from_buffer(pos) + return + self._check_closed() + try: + pos = self._read_to_buffer_loop() + except Exception: + # If there was an in _read_to_buffer, we called close() already, + # but couldn't run the close callback because of _pending_callbacks. + # Before we escape from this function, run the close callback if + # applicable. + self._maybe_run_close_callback() + raise + if pos is not None: + self._read_from_buffer(pos) + return + # We couldn't satisfy the read inline, so either close the stream + # or listen for new data. + if self.closed(): + self._maybe_run_close_callback() + else: + self._add_io_state(ioloop.IOLoop.READ) + + def _read_to_buffer(self): + """Reads from the socket and appends the result to the read buffer. + + Returns the number of bytes read. Returns 0 if there is nothing + to read (i.e. the read returns EWOULDBLOCK or equivalent). On + error closes the socket and raises an exception. + """ + while True: + try: + chunk = self.read_from_fd() + except (socket.error, IOError, OSError) as e: + if errno_from_exception(e) == errno.EINTR: + continue + # ssl.SSLError is a subclass of socket.error + if self._is_connreset(e): + # Treat ECONNRESET as a connection close rather than + # an error to minimize log spam (the exception will + # be available on self.error for apps that care). + self.close(exc_info=True) + return + self.close(exc_info=True) + raise + break + if chunk is None: + return 0 + self._read_buffer.append(chunk) + self._read_buffer_size += len(chunk) + if self._read_buffer_size > self.max_buffer_size: + gen_log.error("Reached maximum read buffer size") + self.close() + raise StreamBufferFullError("Reached maximum read buffer size") + return len(chunk) + + def _run_streaming_callback(self): + if self._streaming_callback is not None and self._read_buffer_size: + bytes_to_consume = self._read_buffer_size + if self._read_bytes is not None: + bytes_to_consume = min(self._read_bytes, bytes_to_consume) + self._read_bytes -= bytes_to_consume + self._run_read_callback(bytes_to_consume, True) + + def _read_from_buffer(self, pos): + """Attempts to complete the currently-pending read from the buffer. + + The argument is either a position in the read buffer or None, + as returned by _find_read_pos. + """ + self._read_bytes = self._read_delimiter = self._read_regex = None + self._read_partial = False + self._run_read_callback(pos, False) + + def _find_read_pos(self): + """Attempts to find a position in the read buffer that satisfies + the currently-pending read. + + Returns a position in the buffer if the current read can be satisfied, + or None if it cannot. + """ + if (self._read_bytes is not None and + (self._read_buffer_size >= self._read_bytes or + (self._read_partial and self._read_buffer_size > 0))): + num_bytes = min(self._read_bytes, self._read_buffer_size) + return num_bytes + elif self._read_delimiter is not None: + # Multi-byte delimiters (e.g. '\r\n') may straddle two + # chunks in the read buffer, so we can't easily find them + # without collapsing the buffer. However, since protocols + # using delimited reads (as opposed to reads of a known + # length) tend to be "line" oriented, the delimiter is likely + # to be in the first few chunks. Merge the buffer gradually + # since large merges are relatively expensive and get undone in + # _consume(). + if self._read_buffer: + while True: + loc = self._read_buffer[0].find(self._read_delimiter) + if loc != -1: + delimiter_len = len(self._read_delimiter) + self._check_max_bytes(self._read_delimiter, + loc + delimiter_len) + return loc + delimiter_len + if len(self._read_buffer) == 1: + break + _double_prefix(self._read_buffer) + self._check_max_bytes(self._read_delimiter, + len(self._read_buffer[0])) + elif self._read_regex is not None: + if self._read_buffer: + while True: + m = self._read_regex.search(self._read_buffer[0]) + if m is not None: + self._check_max_bytes(self._read_regex, m.end()) + return m.end() + if len(self._read_buffer) == 1: + break + _double_prefix(self._read_buffer) + self._check_max_bytes(self._read_regex, + len(self._read_buffer[0])) + return None + + def _check_max_bytes(self, delimiter, size): + if (self._read_max_bytes is not None and + size > self._read_max_bytes): + raise UnsatisfiableReadError( + "delimiter %r not found within %d bytes" % ( + delimiter, self._read_max_bytes)) + + def _handle_write(self): + while self._write_buffer: + try: + if not self._write_buffer_frozen: + # On windows, socket.send blows up if given a + # write buffer that's too large, instead of just + # returning the number of bytes it was able to + # process. Therefore we must not call socket.send + # with more than 128KB at a time. + _merge_prefix(self._write_buffer, 128 * 1024) + num_bytes = self.write_to_fd(self._write_buffer[0]) + if num_bytes == 0: + # With OpenSSL, if we couldn't write the entire buffer, + # the very same string object must be used on the + # next call to send. Therefore we suppress + # merging the write buffer after an incomplete send. + # A cleaner solution would be to set + # SSL_MODE_ACCEPT_MOVING_WRITE_BUFFER, but this is + # not yet accessible from python + # (http://bugs.python.org/issue8240) + self._write_buffer_frozen = True + break + self._write_buffer_frozen = False + _merge_prefix(self._write_buffer, num_bytes) + self._write_buffer.popleft() + self._write_buffer_size -= num_bytes + except (socket.error, IOError, OSError) as e: + if e.args[0] in _ERRNO_WOULDBLOCK: + self._write_buffer_frozen = True + break + else: + if not self._is_connreset(e): + # Broken pipe errors are usually caused by connection + # reset, and its better to not log EPIPE errors to + # minimize log spam + gen_log.warning("Write error on %s: %s", + self.fileno(), e) + self.close(exc_info=True) + return + if not self._write_buffer: + if self._write_callback: + callback = self._write_callback + self._write_callback = None + self._run_callback(callback) + if self._write_future: + future = self._write_future + self._write_future = None + future.set_result(None) + + def _consume(self, loc): + if loc == 0: + return b"" + _merge_prefix(self._read_buffer, loc) + self._read_buffer_size -= loc + return self._read_buffer.popleft() + + def _check_closed(self): + if self.closed(): + raise StreamClosedError(real_error=self.error) + + def _maybe_add_error_listener(self): + # This method is part of an optimization: to detect a connection that + # is closed when we're not actively reading or writing, we must listen + # for read events. However, it is inefficient to do this when the + # connection is first established because we are going to read or write + # immediately anyway. Instead, we insert checks at various times to + # see if the connection is idle and add the read listener then. + if self._pending_callbacks != 0: + return + if self._state is None or self._state == ioloop.IOLoop.ERROR: + if self.closed(): + self._maybe_run_close_callback() + elif (self._read_buffer_size == 0 and + self._close_callback is not None): + self._add_io_state(ioloop.IOLoop.READ) + + def _add_io_state(self, state): + """Adds `state` (IOLoop.{READ,WRITE} flags) to our event handler. + + Implementation notes: Reads and writes have a fast path and a + slow path. The fast path reads synchronously from socket + buffers, while the slow path uses `_add_io_state` to schedule + an IOLoop callback. Note that in both cases, the callback is + run asynchronously with `_run_callback`. + + To detect closed connections, we must have called + `_add_io_state` at some point, but we want to delay this as + much as possible so we don't have to set an `IOLoop.ERROR` + listener that will be overwritten by the next slow-path + operation. As long as there are callbacks scheduled for + fast-path ops, those callbacks may do more reads. + If a sequence of fast-path ops do not end in a slow-path op, + (e.g. for an @asynchronous long-poll request), we must add + the error handler. This is done in `_run_callback` and `write` + (since the write callback is optional so we can have a + fast-path write with no `_run_callback`) + """ + if self.closed(): + # connection has been closed, so there can be no future events + return + if self._state is None: + self._state = ioloop.IOLoop.ERROR | state + with stack_context.NullContext(): + self.io_loop.add_handler( + self.fileno(), self._handle_events, self._state) + elif not self._state & state: + self._state = self._state | state + self.io_loop.update_handler(self.fileno(), self._state) + + def _is_connreset(self, exc): + """Return true if exc is ECONNRESET or equivalent. + + May be overridden in subclasses. + """ + return (isinstance(exc, (socket.error, IOError)) and + errno_from_exception(exc) in _ERRNO_CONNRESET) + + +class IOStream(BaseIOStream): + r"""Socket-based `IOStream` implementation. + + This class supports the read and write methods from `BaseIOStream` + plus a `connect` method. + + The ``socket`` parameter may either be connected or unconnected. + For server operations the socket is the result of calling + `socket.accept `. For client operations the + socket is created with `socket.socket`, and may either be + connected before passing it to the `IOStream` or connected with + `IOStream.connect`. + + A very simple (and broken) HTTP client using this class: + + .. testcode:: + + import tornado.ioloop + import tornado.iostream + import socket + + def send_request(): + stream.write(b"GET / HTTP/1.0\r\nHost: friendfeed.com\r\n\r\n") + stream.read_until(b"\r\n\r\n", on_headers) + + def on_headers(data): + headers = {} + for line in data.split(b"\r\n"): + parts = line.split(b":") + if len(parts) == 2: + headers[parts[0].strip()] = parts[1].strip() + stream.read_bytes(int(headers[b"Content-Length"]), on_body) + + def on_body(data): + print(data) + stream.close() + tornado.ioloop.IOLoop.current().stop() + + if __name__ == '__main__': + s = socket.socket(socket.AF_INET, socket.SOCK_STREAM, 0) + stream = tornado.iostream.IOStream(s) + stream.connect(("friendfeed.com", 80), send_request) + tornado.ioloop.IOLoop.current().start() + + .. testoutput:: + :hide: + + """ + def __init__(self, socket, *args, **kwargs): + self.socket = socket + self.socket.setblocking(False) + super(IOStream, self).__init__(*args, **kwargs) + + def fileno(self): + return self.socket + + def close_fd(self): + self.socket.close() + self.socket = None + + def get_fd_error(self): + errno = self.socket.getsockopt(socket.SOL_SOCKET, + socket.SO_ERROR) + return socket.error(errno, os.strerror(errno)) + + def read_from_fd(self): + try: + chunk = self.socket.recv(self.read_chunk_size) + except socket.error as e: + if e.args[0] in _ERRNO_WOULDBLOCK: + return None + else: + raise + if not chunk: + self.close() + return None + return chunk + + def write_to_fd(self, data): + return self.socket.send(data) + + def connect(self, address, callback=None, server_hostname=None): + """Connects the socket to a remote address without blocking. + + May only be called if the socket passed to the constructor was + not previously connected. The address parameter is in the + same format as for `socket.connect ` for + the type of socket passed to the IOStream constructor, + e.g. an ``(ip, port)`` tuple. Hostnames are accepted here, + but will be resolved synchronously and block the IOLoop. + If you have a hostname instead of an IP address, the `.TCPClient` + class is recommended instead of calling this method directly. + `.TCPClient` will do asynchronous DNS resolution and handle + both IPv4 and IPv6. + + If ``callback`` is specified, it will be called with no + arguments when the connection is completed; if not this method + returns a `.Future` (whose result after a successful + connection will be the stream itself). + + In SSL mode, the ``server_hostname`` parameter will be used + for certificate validation (unless disabled in the + ``ssl_options``) and SNI (if supported; requires Python + 2.7.9+). + + Note that it is safe to call `IOStream.write + ` while the connection is pending, in + which case the data will be written as soon as the connection + is ready. Calling `IOStream` read methods before the socket is + connected works on some platforms but is non-portable. + + .. versionchanged:: 4.0 + If no callback is given, returns a `.Future`. + + .. versionchanged:: 4.2 + SSL certificates are validated by default; pass + ``ssl_options=dict(cert_reqs=ssl.CERT_NONE)`` or a + suitably-configured `ssl.SSLContext` to the + `SSLIOStream` constructor to disable. + """ + self._connecting = True + if callback is not None: + self._connect_callback = stack_context.wrap(callback) + future = None + else: + future = self._connect_future = TracebackFuture() + try: + self.socket.connect(address) + except socket.error as e: + # In non-blocking mode we expect connect() to raise an + # exception with EINPROGRESS or EWOULDBLOCK. + # + # On freebsd, other errors such as ECONNREFUSED may be + # returned immediately when attempting to connect to + # localhost, so handle them the same way as an error + # reported later in _handle_connect. + if (errno_from_exception(e) not in _ERRNO_INPROGRESS and + errno_from_exception(e) not in _ERRNO_WOULDBLOCK): + if future is None: + gen_log.warning("Connect error on fd %s: %s", + self.socket.fileno(), e) + self.close(exc_info=True) + return future + self._add_io_state(self.io_loop.WRITE) + return future + + def start_tls(self, server_side, ssl_options=None, server_hostname=None): + """Convert this `IOStream` to an `SSLIOStream`. + + This enables protocols that begin in clear-text mode and + switch to SSL after some initial negotiation (such as the + ``STARTTLS`` extension to SMTP and IMAP). + + This method cannot be used if there are outstanding reads + or writes on the stream, or if there is any data in the + IOStream's buffer (data in the operating system's socket + buffer is allowed). This means it must generally be used + immediately after reading or writing the last clear-text + data. It can also be used immediately after connecting, + before any reads or writes. + + The ``ssl_options`` argument may be either an `ssl.SSLContext` + object or a dictionary of keyword arguments for the + `ssl.wrap_socket` function. The ``server_hostname`` argument + will be used for certificate validation unless disabled + in the ``ssl_options``. + + This method returns a `.Future` whose result is the new + `SSLIOStream`. After this method has been called, + any other operation on the original stream is undefined. + + If a close callback is defined on this stream, it will be + transferred to the new stream. + + .. versionadded:: 4.0 + + .. versionchanged:: 4.2 + SSL certificates are validated by default; pass + ``ssl_options=dict(cert_reqs=ssl.CERT_NONE)`` or a + suitably-configured `ssl.SSLContext` to disable. + """ + if (self._read_callback or self._read_future or + self._write_callback or self._write_future or + self._connect_callback or self._connect_future or + self._pending_callbacks or self._closed or + self._read_buffer or self._write_buffer): + raise ValueError("IOStream is not idle; cannot convert to SSL") + if ssl_options is None: + if server_side: + ssl_options = _server_ssl_defaults + else: + ssl_options = _client_ssl_defaults + + socket = self.socket + self.io_loop.remove_handler(socket) + self.socket = None + socket = ssl_wrap_socket(socket, ssl_options, + server_hostname=server_hostname, + server_side=server_side, + do_handshake_on_connect=False) + orig_close_callback = self._close_callback + self._close_callback = None + + future = TracebackFuture() + ssl_stream = SSLIOStream(socket, ssl_options=ssl_options, + io_loop=self.io_loop) + # Wrap the original close callback so we can fail our Future as well. + # If we had an "unwrap" counterpart to this method we would need + # to restore the original callback after our Future resolves + # so that repeated wrap/unwrap calls don't build up layers. + + def close_callback(): + if not future.done(): + # Note that unlike most Futures returned by IOStream, + # this one passes the underlying error through directly + # instead of wrapping everything in a StreamClosedError + # with a real_error attribute. This is because once the + # connection is established it's more helpful to raise + # the SSLError directly than to hide it behind a + # StreamClosedError (and the client is expecting SSL + # issues rather than network issues since this method is + # named start_tls). + future.set_exception(ssl_stream.error or StreamClosedError()) + if orig_close_callback is not None: + orig_close_callback() + ssl_stream.set_close_callback(close_callback) + ssl_stream._ssl_connect_callback = lambda: future.set_result(ssl_stream) + ssl_stream.max_buffer_size = self.max_buffer_size + ssl_stream.read_chunk_size = self.read_chunk_size + return future + + def _handle_connect(self): + err = self.socket.getsockopt(socket.SOL_SOCKET, socket.SO_ERROR) + if err != 0: + self.error = socket.error(err, os.strerror(err)) + # IOLoop implementations may vary: some of them return + # an error state before the socket becomes writable, so + # in that case a connection failure would be handled by the + # error path in _handle_events instead of here. + if self._connect_future is None: + gen_log.warning("Connect error on fd %s: %s", + self.socket.fileno(), errno.errorcode[err]) + self.close() + return + if self._connect_callback is not None: + callback = self._connect_callback + self._connect_callback = None + self._run_callback(callback) + if self._connect_future is not None: + future = self._connect_future + self._connect_future = None + future.set_result(self) + self._connecting = False + + def set_nodelay(self, value): + if (self.socket is not None and + self.socket.family in (socket.AF_INET, socket.AF_INET6)): + try: + self.socket.setsockopt(socket.IPPROTO_TCP, + socket.TCP_NODELAY, 1 if value else 0) + except socket.error as e: + # Sometimes setsockopt will fail if the socket is closed + # at the wrong time. This can happen with HTTPServer + # resetting the value to false between requests. + if e.errno != errno.EINVAL and not self._is_connreset(e): + raise + + +class SSLIOStream(IOStream): + """A utility class to write to and read from a non-blocking SSL socket. + + If the socket passed to the constructor is already connected, + it should be wrapped with:: + + ssl.wrap_socket(sock, do_handshake_on_connect=False, **kwargs) + + before constructing the `SSLIOStream`. Unconnected sockets will be + wrapped when `IOStream.connect` is finished. + """ + def __init__(self, *args, **kwargs): + """The ``ssl_options`` keyword argument may either be an + `ssl.SSLContext` object or a dictionary of keywords arguments + for `ssl.wrap_socket` + """ + self._ssl_options = kwargs.pop('ssl_options', _client_ssl_defaults) + super(SSLIOStream, self).__init__(*args, **kwargs) + self._ssl_accepting = True + self._handshake_reading = False + self._handshake_writing = False + self._ssl_connect_callback = None + self._server_hostname = None + + # If the socket is already connected, attempt to start the handshake. + try: + self.socket.getpeername() + except socket.error: + pass + else: + # Indirectly start the handshake, which will run on the next + # IOLoop iteration and then the real IO state will be set in + # _handle_events. + self._add_io_state(self.io_loop.WRITE) + + def reading(self): + return self._handshake_reading or super(SSLIOStream, self).reading() + + def writing(self): + return self._handshake_writing or super(SSLIOStream, self).writing() + + def _do_ssl_handshake(self): + # Based on code from test_ssl.py in the python stdlib + try: + self._handshake_reading = False + self._handshake_writing = False + self.socket.do_handshake() + except ssl.SSLError as err: + if err.args[0] == ssl.SSL_ERROR_WANT_READ: + self._handshake_reading = True + return + elif err.args[0] == ssl.SSL_ERROR_WANT_WRITE: + self._handshake_writing = True + return + elif err.args[0] in (ssl.SSL_ERROR_EOF, + ssl.SSL_ERROR_ZERO_RETURN): + return self.close(exc_info=True) + elif err.args[0] == ssl.SSL_ERROR_SSL: + try: + peer = self.socket.getpeername() + except Exception: + peer = '(not connected)' + gen_log.warning("SSL Error on %s %s: %s", + self.socket.fileno(), peer, err) + return self.close(exc_info=True) + raise + except socket.error as err: + # Some port scans (e.g. nmap in -sT mode) have been known + # to cause do_handshake to raise EBADF and ENOTCONN, so make + # those errors quiet as well. + # https://groups.google.com/forum/?fromgroups#!topic/python-tornado/ApucKJat1_0 + if (self._is_connreset(err) or + err.args[0] in (errno.EBADF, errno.ENOTCONN)): + return self.close(exc_info=True) + raise + except AttributeError: + # On Linux, if the connection was reset before the call to + # wrap_socket, do_handshake will fail with an + # AttributeError. + return self.close(exc_info=True) + else: + self._ssl_accepting = False + if not self._verify_cert(self.socket.getpeercert()): + self.close() + return + self._run_ssl_connect_callback() + + def _run_ssl_connect_callback(self): + if self._ssl_connect_callback is not None: + callback = self._ssl_connect_callback + self._ssl_connect_callback = None + self._run_callback(callback) + if self._ssl_connect_future is not None: + future = self._ssl_connect_future + self._ssl_connect_future = None + future.set_result(self) + + def _verify_cert(self, peercert): + """Returns True if peercert is valid according to the configured + validation mode and hostname. + + The ssl handshake already tested the certificate for a valid + CA signature; the only thing that remains is to check + the hostname. + """ + if isinstance(self._ssl_options, dict): + verify_mode = self._ssl_options.get('cert_reqs', ssl.CERT_NONE) + elif isinstance(self._ssl_options, ssl.SSLContext): + verify_mode = self._ssl_options.verify_mode + assert verify_mode in (ssl.CERT_NONE, ssl.CERT_REQUIRED, ssl.CERT_OPTIONAL) + if verify_mode == ssl.CERT_NONE or self._server_hostname is None: + return True + cert = self.socket.getpeercert() + if cert is None and verify_mode == ssl.CERT_REQUIRED: + gen_log.warning("No SSL certificate given") + return False + try: + ssl_match_hostname(peercert, self._server_hostname) + except SSLCertificateError as e: + gen_log.warning("Invalid SSL certificate: %s" % e) + return False + else: + return True + + def _handle_read(self): + if self._ssl_accepting: + self._do_ssl_handshake() + return + super(SSLIOStream, self)._handle_read() + + def _handle_write(self): + if self._ssl_accepting: + self._do_ssl_handshake() + return + super(SSLIOStream, self)._handle_write() + + def connect(self, address, callback=None, server_hostname=None): + self._server_hostname = server_hostname + # Pass a dummy callback to super.connect(), which is slightly + # more efficient than letting it return a Future we ignore. + super(SSLIOStream, self).connect(address, callback=lambda: None) + return self.wait_for_handshake(callback) + + def _handle_connect(self): + # Call the superclass method to check for errors. + super(SSLIOStream, self)._handle_connect() + if self.closed(): + return + # When the connection is complete, wrap the socket for SSL + # traffic. Note that we do this by overriding _handle_connect + # instead of by passing a callback to super().connect because + # user callbacks are enqueued asynchronously on the IOLoop, + # but since _handle_events calls _handle_connect immediately + # followed by _handle_write we need this to be synchronous. + # + # The IOLoop will get confused if we swap out self.socket while the + # fd is registered, so remove it now and re-register after + # wrap_socket(). + self.io_loop.remove_handler(self.socket) + old_state = self._state + self._state = None + self.socket = ssl_wrap_socket(self.socket, self._ssl_options, + server_hostname=self._server_hostname, + do_handshake_on_connect=False) + self._add_io_state(old_state) + + def wait_for_handshake(self, callback=None): + """Wait for the initial SSL handshake to complete. + + If a ``callback`` is given, it will be called with no + arguments once the handshake is complete; otherwise this + method returns a `.Future` which will resolve to the + stream itself after the handshake is complete. + + Once the handshake is complete, information such as + the peer's certificate and NPN/ALPN selections may be + accessed on ``self.socket``. + + This method is intended for use on server-side streams + or after using `IOStream.start_tls`; it should not be used + with `IOStream.connect` (which already waits for the + handshake to complete). It may only be called once per stream. + + .. versionadded:: 4.2 + """ + if (self._ssl_connect_callback is not None or + self._ssl_connect_future is not None): + raise RuntimeError("Already waiting") + if callback is not None: + self._ssl_connect_callback = stack_context.wrap(callback) + future = None + else: + future = self._ssl_connect_future = TracebackFuture() + if not self._ssl_accepting: + self._run_ssl_connect_callback() + return future + + def write_to_fd(self, data): + try: + return self.socket.send(data) + except ssl.SSLError as e: + if e.args[0] == ssl.SSL_ERROR_WANT_WRITE: + # In Python 3.5+, SSLSocket.send raises a WANT_WRITE error if + # the socket is not writeable; we need to transform this into + # an EWOULDBLOCK socket.error or a zero return value, + # either of which will be recognized by the caller of this + # method. Prior to Python 3.5, an unwriteable socket would + # simply return 0 bytes written. + return 0 + raise + + def read_from_fd(self): + if self._ssl_accepting: + # If the handshake hasn't finished yet, there can't be anything + # to read (attempting to read may or may not raise an exception + # depending on the SSL version) + return None + try: + # SSLSocket objects have both a read() and recv() method, + # while regular sockets only have recv(). + # The recv() method blocks (at least in python 2.6) if it is + # called when there is nothing to read, so we have to use + # read() instead. + chunk = self.socket.read(self.read_chunk_size) + except ssl.SSLError as e: + # SSLError is a subclass of socket.error, so this except + # block must come first. + if e.args[0] == ssl.SSL_ERROR_WANT_READ: + return None + else: + raise + except socket.error as e: + if e.args[0] in _ERRNO_WOULDBLOCK: + return None + else: + raise + if not chunk: + self.close() + return None + return chunk + + def _is_connreset(self, e): + if isinstance(e, ssl.SSLError) and e.args[0] == ssl.SSL_ERROR_EOF: + return True + return super(SSLIOStream, self)._is_connreset(e) + + +class PipeIOStream(BaseIOStream): + """Pipe-based `IOStream` implementation. + + The constructor takes an integer file descriptor (such as one returned + by `os.pipe`) rather than an open file object. Pipes are generally + one-way, so a `PipeIOStream` can be used for reading or writing but not + both. + """ + def __init__(self, fd, *args, **kwargs): + self.fd = fd + _set_nonblocking(fd) + super(PipeIOStream, self).__init__(*args, **kwargs) + + def fileno(self): + return self.fd + + def close_fd(self): + os.close(self.fd) + + def write_to_fd(self, data): + return os.write(self.fd, data) + + def read_from_fd(self): + try: + chunk = os.read(self.fd, self.read_chunk_size) + except (IOError, OSError) as e: + if errno_from_exception(e) in _ERRNO_WOULDBLOCK: + return None + elif errno_from_exception(e) == errno.EBADF: + # If the writing half of a pipe is closed, select will + # report it as readable but reads will fail with EBADF. + self.close(exc_info=True) + return None + else: + raise + if not chunk: + self.close() + return None + return chunk + + +def _double_prefix(deque): + """Grow by doubling, but don't split the second chunk just because the + first one is small. + """ + new_len = max(len(deque[0]) * 2, + (len(deque[0]) + len(deque[1]))) + _merge_prefix(deque, new_len) + + +def _merge_prefix(deque, size): + """Replace the first entries in a deque of strings with a single + string of up to size bytes. + + >>> d = collections.deque(['abc', 'de', 'fghi', 'j']) + >>> _merge_prefix(d, 5); print(d) + deque(['abcde', 'fghi', 'j']) + + Strings will be split as necessary to reach the desired size. + >>> _merge_prefix(d, 7); print(d) + deque(['abcdefg', 'hi', 'j']) + + >>> _merge_prefix(d, 3); print(d) + deque(['abc', 'defg', 'hi', 'j']) + + >>> _merge_prefix(d, 100); print(d) + deque(['abcdefghij']) + """ + if len(deque) == 1 and len(deque[0]) <= size: + return + prefix = [] + remaining = size + while deque and remaining > 0: + chunk = deque.popleft() + if len(chunk) > remaining: + deque.appendleft(chunk[remaining:]) + chunk = chunk[:remaining] + prefix.append(chunk) + remaining -= len(chunk) + # This data structure normally just contains byte strings, but + # the unittest gets messy if it doesn't use the default str() type, + # so do the merge based on the type of data that's actually present. + if prefix: + deque.appendleft(type(prefix[0])().join(prefix)) + if not deque: + deque.appendleft(b"") + + +def doctests(): + import doctest + return doctest.DocTestSuite() diff --git a/python/tornado/locale.py b/python/tornado/locale.py new file mode 100644 index 000000000..8310c4d4c --- /dev/null +++ b/python/tornado/locale.py @@ -0,0 +1,521 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# Copyright 2009 Facebook +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""Translation methods for generating localized strings. + +To load a locale and generate a translated string:: + + user_locale = tornado.locale.get("es_LA") + print user_locale.translate("Sign out") + +`tornado.locale.get()` returns the closest matching locale, not necessarily the +specific locale you requested. You can support pluralization with +additional arguments to `~Locale.translate()`, e.g.:: + + people = [...] + message = user_locale.translate( + "%(list)s is online", "%(list)s are online", len(people)) + print message % {"list": user_locale.list(people)} + +The first string is chosen if ``len(people) == 1``, otherwise the second +string is chosen. + +Applications should call one of `load_translations` (which uses a simple +CSV format) or `load_gettext_translations` (which uses the ``.mo`` format +supported by `gettext` and related tools). If neither method is called, +the `Locale.translate` method will simply return the original string. +""" + +from __future__ import absolute_import, division, print_function, with_statement + +import codecs +import csv +import datetime +from io import BytesIO +import numbers +import os +import re + +from tornado import escape +from tornado.log import gen_log +from tornado.util import u + +from tornado._locale_data import LOCALE_NAMES + +_default_locale = "en_US" +_translations = {} +_supported_locales = frozenset([_default_locale]) +_use_gettext = False +CONTEXT_SEPARATOR = "\x04" + + +def get(*locale_codes): + """Returns the closest match for the given locale codes. + + We iterate over all given locale codes in order. If we have a tight + or a loose match for the code (e.g., "en" for "en_US"), we return + the locale. Otherwise we move to the next code in the list. + + By default we return ``en_US`` if no translations are found for any of + the specified locales. You can change the default locale with + `set_default_locale()`. + """ + return Locale.get_closest(*locale_codes) + + +def set_default_locale(code): + """Sets the default locale. + + The default locale is assumed to be the language used for all strings + in the system. The translations loaded from disk are mappings from + the default locale to the destination locale. Consequently, you don't + need to create a translation file for the default locale. + """ + global _default_locale + global _supported_locales + _default_locale = code + _supported_locales = frozenset(list(_translations.keys()) + [_default_locale]) + + +def load_translations(directory, encoding=None): + """Loads translations from CSV files in a directory. + + Translations are strings with optional Python-style named placeholders + (e.g., ``My name is %(name)s``) and their associated translations. + + The directory should have translation files of the form ``LOCALE.csv``, + e.g. ``es_GT.csv``. The CSV files should have two or three columns: string, + translation, and an optional plural indicator. Plural indicators should + be one of "plural" or "singular". A given string can have both singular + and plural forms. For example ``%(name)s liked this`` may have a + different verb conjugation depending on whether %(name)s is one + name or a list of names. There should be two rows in the CSV file for + that string, one with plural indicator "singular", and one "plural". + For strings with no verbs that would change on translation, simply + use "unknown" or the empty string (or don't include the column at all). + + The file is read using the `csv` module in the default "excel" dialect. + In this format there should not be spaces after the commas. + + If no ``encoding`` parameter is given, the encoding will be + detected automatically (among UTF-8 and UTF-16) if the file + contains a byte-order marker (BOM), defaulting to UTF-8 if no BOM + is present. + + Example translation ``es_LA.csv``:: + + "I love you","Te amo" + "%(name)s liked this","A %(name)s les gustó esto","plural" + "%(name)s liked this","A %(name)s le gustó esto","singular" + + .. versionchanged:: 4.3 + Added ``encoding`` parameter. Added support for BOM-based encoding + detection, UTF-16, and UTF-8-with-BOM. + """ + global _translations + global _supported_locales + _translations = {} + for path in os.listdir(directory): + if not path.endswith(".csv"): + continue + locale, extension = path.split(".") + if not re.match("[a-z]+(_[A-Z]+)?$", locale): + gen_log.error("Unrecognized locale %r (path: %s)", locale, + os.path.join(directory, path)) + continue + full_path = os.path.join(directory, path) + if encoding is None: + # Try to autodetect encoding based on the BOM. + with open(full_path, 'rb') as f: + data = f.read(len(codecs.BOM_UTF16_LE)) + if data in (codecs.BOM_UTF16_LE, codecs.BOM_UTF16_BE): + encoding = 'utf-16' + else: + # utf-8-sig is "utf-8 with optional BOM". It's discouraged + # in most cases but is common with CSV files because Excel + # cannot read utf-8 files without a BOM. + encoding = 'utf-8-sig' + try: + # python 3: csv.reader requires a file open in text mode. + # Force utf8 to avoid dependence on $LANG environment variable. + f = open(full_path, "r", encoding=encoding) + except TypeError: + # python 2: csv can only handle byte strings (in ascii-compatible + # encodings), which we decode below. Transcode everything into + # utf8 before passing it to csv.reader. + f = BytesIO() + with codecs.open(full_path, "r", encoding=encoding) as infile: + f.write(escape.utf8(infile.read())) + f.seek(0) + _translations[locale] = {} + for i, row in enumerate(csv.reader(f)): + if not row or len(row) < 2: + continue + row = [escape.to_unicode(c).strip() for c in row] + english, translation = row[:2] + if len(row) > 2: + plural = row[2] or "unknown" + else: + plural = "unknown" + if plural not in ("plural", "singular", "unknown"): + gen_log.error("Unrecognized plural indicator %r in %s line %d", + plural, path, i + 1) + continue + _translations[locale].setdefault(plural, {})[english] = translation + f.close() + _supported_locales = frozenset(list(_translations.keys()) + [_default_locale]) + gen_log.debug("Supported locales: %s", sorted(_supported_locales)) + + +def load_gettext_translations(directory, domain): + """Loads translations from `gettext`'s locale tree + + Locale tree is similar to system's ``/usr/share/locale``, like:: + + {directory}/{lang}/LC_MESSAGES/{domain}.mo + + Three steps are required to have you app translated: + + 1. Generate POT translation file:: + + xgettext --language=Python --keyword=_:1,2 -d mydomain file1.py file2.html etc + + 2. Merge against existing POT file:: + + msgmerge old.po mydomain.po > new.po + + 3. Compile:: + + msgfmt mydomain.po -o {directory}/pt_BR/LC_MESSAGES/mydomain.mo + """ + import gettext + global _translations + global _supported_locales + global _use_gettext + _translations = {} + for lang in os.listdir(directory): + if lang.startswith('.'): + continue # skip .svn, etc + if os.path.isfile(os.path.join(directory, lang)): + continue + try: + os.stat(os.path.join(directory, lang, "LC_MESSAGES", domain + ".mo")) + _translations[lang] = gettext.translation(domain, directory, + languages=[lang]) + except Exception as e: + gen_log.error("Cannot load translation for '%s': %s", lang, str(e)) + continue + _supported_locales = frozenset(list(_translations.keys()) + [_default_locale]) + _use_gettext = True + gen_log.debug("Supported locales: %s", sorted(_supported_locales)) + + +def get_supported_locales(): + """Returns a list of all the supported locale codes.""" + return _supported_locales + + +class Locale(object): + """Object representing a locale. + + After calling one of `load_translations` or `load_gettext_translations`, + call `get` or `get_closest` to get a Locale object. + """ + @classmethod + def get_closest(cls, *locale_codes): + """Returns the closest match for the given locale code.""" + for code in locale_codes: + if not code: + continue + code = code.replace("-", "_") + parts = code.split("_") + if len(parts) > 2: + continue + elif len(parts) == 2: + code = parts[0].lower() + "_" + parts[1].upper() + if code in _supported_locales: + return cls.get(code) + if parts[0].lower() in _supported_locales: + return cls.get(parts[0].lower()) + return cls.get(_default_locale) + + @classmethod + def get(cls, code): + """Returns the Locale for the given locale code. + + If it is not supported, we raise an exception. + """ + if not hasattr(cls, "_cache"): + cls._cache = {} + if code not in cls._cache: + assert code in _supported_locales + translations = _translations.get(code, None) + if translations is None: + locale = CSVLocale(code, {}) + elif _use_gettext: + locale = GettextLocale(code, translations) + else: + locale = CSVLocale(code, translations) + cls._cache[code] = locale + return cls._cache[code] + + def __init__(self, code, translations): + self.code = code + self.name = LOCALE_NAMES.get(code, {}).get("name", u("Unknown")) + self.rtl = False + for prefix in ["fa", "ar", "he"]: + if self.code.startswith(prefix): + self.rtl = True + break + self.translations = translations + + # Initialize strings for date formatting + _ = self.translate + self._months = [ + _("January"), _("February"), _("March"), _("April"), + _("May"), _("June"), _("July"), _("August"), + _("September"), _("October"), _("November"), _("December")] + self._weekdays = [ + _("Monday"), _("Tuesday"), _("Wednesday"), _("Thursday"), + _("Friday"), _("Saturday"), _("Sunday")] + + def translate(self, message, plural_message=None, count=None): + """Returns the translation for the given message for this locale. + + If ``plural_message`` is given, you must also provide + ``count``. We return ``plural_message`` when ``count != 1``, + and we return the singular form for the given message when + ``count == 1``. + """ + raise NotImplementedError() + + def pgettext(self, context, message, plural_message=None, count=None): + raise NotImplementedError() + + def format_date(self, date, gmt_offset=0, relative=True, shorter=False, + full_format=False): + """Formats the given date (which should be GMT). + + By default, we return a relative time (e.g., "2 minutes ago"). You + can return an absolute date string with ``relative=False``. + + You can force a full format date ("July 10, 1980") with + ``full_format=True``. + + This method is primarily intended for dates in the past. + For dates in the future, we fall back to full format. + """ + if isinstance(date, numbers.Real): + date = datetime.datetime.utcfromtimestamp(date) + now = datetime.datetime.utcnow() + if date > now: + if relative and (date - now).seconds < 60: + # Due to click skew, things are some things slightly + # in the future. Round timestamps in the immediate + # future down to now in relative mode. + date = now + else: + # Otherwise, future dates always use the full format. + full_format = True + local_date = date - datetime.timedelta(minutes=gmt_offset) + local_now = now - datetime.timedelta(minutes=gmt_offset) + local_yesterday = local_now - datetime.timedelta(hours=24) + difference = now - date + seconds = difference.seconds + days = difference.days + + _ = self.translate + format = None + if not full_format: + if relative and days == 0: + if seconds < 50: + return _("1 second ago", "%(seconds)d seconds ago", + seconds) % {"seconds": seconds} + + if seconds < 50 * 60: + minutes = round(seconds / 60.0) + return _("1 minute ago", "%(minutes)d minutes ago", + minutes) % {"minutes": minutes} + + hours = round(seconds / (60.0 * 60)) + return _("1 hour ago", "%(hours)d hours ago", + hours) % {"hours": hours} + + if days == 0: + format = _("%(time)s") + elif days == 1 and local_date.day == local_yesterday.day and \ + relative: + format = _("yesterday") if shorter else \ + _("yesterday at %(time)s") + elif days < 5: + format = _("%(weekday)s") if shorter else \ + _("%(weekday)s at %(time)s") + elif days < 334: # 11mo, since confusing for same month last year + format = _("%(month_name)s %(day)s") if shorter else \ + _("%(month_name)s %(day)s at %(time)s") + + if format is None: + format = _("%(month_name)s %(day)s, %(year)s") if shorter else \ + _("%(month_name)s %(day)s, %(year)s at %(time)s") + + tfhour_clock = self.code not in ("en", "en_US", "zh_CN") + if tfhour_clock: + str_time = "%d:%02d" % (local_date.hour, local_date.minute) + elif self.code == "zh_CN": + str_time = "%s%d:%02d" % ( + (u('\u4e0a\u5348'), u('\u4e0b\u5348'))[local_date.hour >= 12], + local_date.hour % 12 or 12, local_date.minute) + else: + str_time = "%d:%02d %s" % ( + local_date.hour % 12 or 12, local_date.minute, + ("am", "pm")[local_date.hour >= 12]) + + return format % { + "month_name": self._months[local_date.month - 1], + "weekday": self._weekdays[local_date.weekday()], + "day": str(local_date.day), + "year": str(local_date.year), + "time": str_time + } + + def format_day(self, date, gmt_offset=0, dow=True): + """Formats the given date as a day of week. + + Example: "Monday, January 22". You can remove the day of week with + ``dow=False``. + """ + local_date = date - datetime.timedelta(minutes=gmt_offset) + _ = self.translate + if dow: + return _("%(weekday)s, %(month_name)s %(day)s") % { + "month_name": self._months[local_date.month - 1], + "weekday": self._weekdays[local_date.weekday()], + "day": str(local_date.day), + } + else: + return _("%(month_name)s %(day)s") % { + "month_name": self._months[local_date.month - 1], + "day": str(local_date.day), + } + + def list(self, parts): + """Returns a comma-separated list for the given list of parts. + + The format is, e.g., "A, B and C", "A and B" or just "A" for lists + of size 1. + """ + _ = self.translate + if len(parts) == 0: + return "" + if len(parts) == 1: + return parts[0] + comma = u(' \u0648 ') if self.code.startswith("fa") else u(", ") + return _("%(commas)s and %(last)s") % { + "commas": comma.join(parts[:-1]), + "last": parts[len(parts) - 1], + } + + def friendly_number(self, value): + """Returns a comma-separated number for the given integer.""" + if self.code not in ("en", "en_US"): + return str(value) + value = str(value) + parts = [] + while value: + parts.append(value[-3:]) + value = value[:-3] + return ",".join(reversed(parts)) + + +class CSVLocale(Locale): + """Locale implementation using tornado's CSV translation format.""" + def translate(self, message, plural_message=None, count=None): + if plural_message is not None: + assert count is not None + if count != 1: + message = plural_message + message_dict = self.translations.get("plural", {}) + else: + message_dict = self.translations.get("singular", {}) + else: + message_dict = self.translations.get("unknown", {}) + return message_dict.get(message, message) + + def pgettext(self, context, message, plural_message=None, count=None): + if self.translations: + gen_log.warning('pgettext is not supported by CSVLocale') + return self.translate(message, plural_message, count) + + +class GettextLocale(Locale): + """Locale implementation using the `gettext` module.""" + def __init__(self, code, translations): + try: + # python 2 + self.ngettext = translations.ungettext + self.gettext = translations.ugettext + except AttributeError: + # python 3 + self.ngettext = translations.ngettext + self.gettext = translations.gettext + # self.gettext must exist before __init__ is called, since it + # calls into self.translate + super(GettextLocale, self).__init__(code, translations) + + def translate(self, message, plural_message=None, count=None): + if plural_message is not None: + assert count is not None + return self.ngettext(message, plural_message, count) + else: + return self.gettext(message) + + def pgettext(self, context, message, plural_message=None, count=None): + """Allows to set context for translation, accepts plural forms. + + Usage example:: + + pgettext("law", "right") + pgettext("good", "right") + + Plural message example:: + + pgettext("organization", "club", "clubs", len(clubs)) + pgettext("stick", "club", "clubs", len(clubs)) + + To generate POT file with context, add following options to step 1 + of `load_gettext_translations` sequence:: + + xgettext [basic options] --keyword=pgettext:1c,2 --keyword=pgettext:1c,2,3 + + .. versionadded:: 4.2 + """ + if plural_message is not None: + assert count is not None + msgs_with_ctxt = ("%s%s%s" % (context, CONTEXT_SEPARATOR, message), + "%s%s%s" % (context, CONTEXT_SEPARATOR, plural_message), + count) + result = self.ngettext(*msgs_with_ctxt) + if CONTEXT_SEPARATOR in result: + # Translation not found + result = self.ngettext(message, plural_message, count) + return result + else: + msg_with_ctxt = "%s%s%s" % (context, CONTEXT_SEPARATOR, message) + result = self.gettext(msg_with_ctxt) + if CONTEXT_SEPARATOR in result: + # Translation not found + result = message + return result diff --git a/python/tornado/locks.py b/python/tornado/locks.py new file mode 100644 index 000000000..a18177286 --- /dev/null +++ b/python/tornado/locks.py @@ -0,0 +1,512 @@ +# Copyright 2015 The Tornado Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +from __future__ import absolute_import, division, print_function, with_statement + +__all__ = ['Condition', 'Event', 'Semaphore', 'BoundedSemaphore', 'Lock'] + +import collections + +from tornado import gen, ioloop +from tornado.concurrent import Future + + +class _TimeoutGarbageCollector(object): + """Base class for objects that periodically clean up timed-out waiters. + + Avoids memory leak in a common pattern like: + + while True: + yield condition.wait(short_timeout) + print('looping....') + """ + def __init__(self): + self._waiters = collections.deque() # Futures. + self._timeouts = 0 + + def _garbage_collect(self): + # Occasionally clear timed-out waiters. + self._timeouts += 1 + if self._timeouts > 100: + self._timeouts = 0 + self._waiters = collections.deque( + w for w in self._waiters if not w.done()) + + +class Condition(_TimeoutGarbageCollector): + """A condition allows one or more coroutines to wait until notified. + + Like a standard `threading.Condition`, but does not need an underlying lock + that is acquired and released. + + With a `Condition`, coroutines can wait to be notified by other coroutines: + + .. testcode:: + + from tornado import gen + from tornado.ioloop import IOLoop + from tornado.locks import Condition + + condition = Condition() + + @gen.coroutine + def waiter(): + print("I'll wait right here") + yield condition.wait() # Yield a Future. + print("I'm done waiting") + + @gen.coroutine + def notifier(): + print("About to notify") + condition.notify() + print("Done notifying") + + @gen.coroutine + def runner(): + # Yield two Futures; wait for waiter() and notifier() to finish. + yield [waiter(), notifier()] + + IOLoop.current().run_sync(runner) + + .. testoutput:: + + I'll wait right here + About to notify + Done notifying + I'm done waiting + + `wait` takes an optional ``timeout`` argument, which is either an absolute + timestamp:: + + io_loop = IOLoop.current() + + # Wait up to 1 second for a notification. + yield condition.wait(timeout=io_loop.time() + 1) + + ...or a `datetime.timedelta` for a timeout relative to the current time:: + + # Wait up to 1 second. + yield condition.wait(timeout=datetime.timedelta(seconds=1)) + + The method raises `tornado.gen.TimeoutError` if there's no notification + before the deadline. + """ + + def __init__(self): + super(Condition, self).__init__() + self.io_loop = ioloop.IOLoop.current() + + def __repr__(self): + result = '<%s' % (self.__class__.__name__, ) + if self._waiters: + result += ' waiters[%s]' % len(self._waiters) + return result + '>' + + def wait(self, timeout=None): + """Wait for `.notify`. + + Returns a `.Future` that resolves ``True`` if the condition is notified, + or ``False`` after a timeout. + """ + waiter = Future() + self._waiters.append(waiter) + if timeout: + def on_timeout(): + waiter.set_result(False) + self._garbage_collect() + io_loop = ioloop.IOLoop.current() + timeout_handle = io_loop.add_timeout(timeout, on_timeout) + waiter.add_done_callback( + lambda _: io_loop.remove_timeout(timeout_handle)) + return waiter + + def notify(self, n=1): + """Wake ``n`` waiters.""" + waiters = [] # Waiters we plan to run right now. + while n and self._waiters: + waiter = self._waiters.popleft() + if not waiter.done(): # Might have timed out. + n -= 1 + waiters.append(waiter) + + for waiter in waiters: + waiter.set_result(True) + + def notify_all(self): + """Wake all waiters.""" + self.notify(len(self._waiters)) + + +class Event(object): + """An event blocks coroutines until its internal flag is set to True. + + Similar to `threading.Event`. + + A coroutine can wait for an event to be set. Once it is set, calls to + ``yield event.wait()`` will not block unless the event has been cleared: + + .. testcode:: + + from tornado import gen + from tornado.ioloop import IOLoop + from tornado.locks import Event + + event = Event() + + @gen.coroutine + def waiter(): + print("Waiting for event") + yield event.wait() + print("Not waiting this time") + yield event.wait() + print("Done") + + @gen.coroutine + def setter(): + print("About to set the event") + event.set() + + @gen.coroutine + def runner(): + yield [waiter(), setter()] + + IOLoop.current().run_sync(runner) + + .. testoutput:: + + Waiting for event + About to set the event + Not waiting this time + Done + """ + def __init__(self): + self._future = Future() + + def __repr__(self): + return '<%s %s>' % ( + self.__class__.__name__, 'set' if self.is_set() else 'clear') + + def is_set(self): + """Return ``True`` if the internal flag is true.""" + return self._future.done() + + def set(self): + """Set the internal flag to ``True``. All waiters are awakened. + + Calling `.wait` once the flag is set will not block. + """ + if not self._future.done(): + self._future.set_result(None) + + def clear(self): + """Reset the internal flag to ``False``. + + Calls to `.wait` will block until `.set` is called. + """ + if self._future.done(): + self._future = Future() + + def wait(self, timeout=None): + """Block until the internal flag is true. + + Returns a Future, which raises `tornado.gen.TimeoutError` after a + timeout. + """ + if timeout is None: + return self._future + else: + return gen.with_timeout(timeout, self._future) + + +class _ReleasingContextManager(object): + """Releases a Lock or Semaphore at the end of a "with" statement. + + with (yield semaphore.acquire()): + pass + + # Now semaphore.release() has been called. + """ + def __init__(self, obj): + self._obj = obj + + def __enter__(self): + pass + + def __exit__(self, exc_type, exc_val, exc_tb): + self._obj.release() + + +class Semaphore(_TimeoutGarbageCollector): + """A lock that can be acquired a fixed number of times before blocking. + + A Semaphore manages a counter representing the number of `.release` calls + minus the number of `.acquire` calls, plus an initial value. The `.acquire` + method blocks if necessary until it can return without making the counter + negative. + + Semaphores limit access to a shared resource. To allow access for two + workers at a time: + + .. testsetup:: semaphore + + from collections import deque + + from tornado import gen + from tornado.ioloop import IOLoop + from tornado.concurrent import Future + + # Ensure reliable doctest output: resolve Futures one at a time. + futures_q = deque([Future() for _ in range(3)]) + + @gen.coroutine + def simulator(futures): + for f in futures: + yield gen.moment + f.set_result(None) + + IOLoop.current().add_callback(simulator, list(futures_q)) + + def use_some_resource(): + return futures_q.popleft() + + .. testcode:: semaphore + + from tornado import gen + from tornado.ioloop import IOLoop + from tornado.locks import Semaphore + + sem = Semaphore(2) + + @gen.coroutine + def worker(worker_id): + yield sem.acquire() + try: + print("Worker %d is working" % worker_id) + yield use_some_resource() + finally: + print("Worker %d is done" % worker_id) + sem.release() + + @gen.coroutine + def runner(): + # Join all workers. + yield [worker(i) for i in range(3)] + + IOLoop.current().run_sync(runner) + + .. testoutput:: semaphore + + Worker 0 is working + Worker 1 is working + Worker 0 is done + Worker 2 is working + Worker 1 is done + Worker 2 is done + + Workers 0 and 1 are allowed to run concurrently, but worker 2 waits until + the semaphore has been released once, by worker 0. + + `.acquire` is a context manager, so ``worker`` could be written as:: + + @gen.coroutine + def worker(worker_id): + with (yield sem.acquire()): + print("Worker %d is working" % worker_id) + yield use_some_resource() + + # Now the semaphore has been released. + print("Worker %d is done" % worker_id) + + In Python 3.5, the semaphore itself can be used as an async context + manager:: + + async def worker(worker_id): + async with sem: + print("Worker %d is working" % worker_id) + await use_some_resource() + + # Now the semaphore has been released. + print("Worker %d is done" % worker_id) + + .. versionchanged:: 4.3 + Added ``async with`` support in Python 3.5. + """ + def __init__(self, value=1): + super(Semaphore, self).__init__() + if value < 0: + raise ValueError('semaphore initial value must be >= 0') + + self._value = value + + def __repr__(self): + res = super(Semaphore, self).__repr__() + extra = 'locked' if self._value == 0 else 'unlocked,value:{0}'.format( + self._value) + if self._waiters: + extra = '{0},waiters:{1}'.format(extra, len(self._waiters)) + return '<{0} [{1}]>'.format(res[1:-1], extra) + + def release(self): + """Increment the counter and wake one waiter.""" + self._value += 1 + while self._waiters: + waiter = self._waiters.popleft() + if not waiter.done(): + self._value -= 1 + + # If the waiter is a coroutine paused at + # + # with (yield semaphore.acquire()): + # + # then the context manager's __exit__ calls release() at the end + # of the "with" block. + waiter.set_result(_ReleasingContextManager(self)) + break + + def acquire(self, timeout=None): + """Decrement the counter. Returns a Future. + + Block if the counter is zero and wait for a `.release`. The Future + raises `.TimeoutError` after the deadline. + """ + waiter = Future() + if self._value > 0: + self._value -= 1 + waiter.set_result(_ReleasingContextManager(self)) + else: + self._waiters.append(waiter) + if timeout: + def on_timeout(): + waiter.set_exception(gen.TimeoutError()) + self._garbage_collect() + io_loop = ioloop.IOLoop.current() + timeout_handle = io_loop.add_timeout(timeout, on_timeout) + waiter.add_done_callback( + lambda _: io_loop.remove_timeout(timeout_handle)) + return waiter + + def __enter__(self): + raise RuntimeError( + "Use Semaphore like 'with (yield semaphore.acquire())', not like" + " 'with semaphore'") + + __exit__ = __enter__ + + @gen.coroutine + def __aenter__(self): + yield self.acquire() + + @gen.coroutine + def __aexit__(self, typ, value, tb): + self.release() + + +class BoundedSemaphore(Semaphore): + """A semaphore that prevents release() being called too many times. + + If `.release` would increment the semaphore's value past the initial + value, it raises `ValueError`. Semaphores are mostly used to guard + resources with limited capacity, so a semaphore released too many times + is a sign of a bug. + """ + def __init__(self, value=1): + super(BoundedSemaphore, self).__init__(value=value) + self._initial_value = value + + def release(self): + """Increment the counter and wake one waiter.""" + if self._value >= self._initial_value: + raise ValueError("Semaphore released too many times") + super(BoundedSemaphore, self).release() + + +class Lock(object): + """A lock for coroutines. + + A Lock begins unlocked, and `acquire` locks it immediately. While it is + locked, a coroutine that yields `acquire` waits until another coroutine + calls `release`. + + Releasing an unlocked lock raises `RuntimeError`. + + `acquire` supports the context manager protocol in all Python versions: + + >>> from tornado import gen, locks + >>> lock = locks.Lock() + >>> + >>> @gen.coroutine + ... def f(): + ... with (yield lock.acquire()): + ... # Do something holding the lock. + ... pass + ... + ... # Now the lock is released. + + In Python 3.5, `Lock` also supports the async context manager + protocol. Note that in this case there is no `acquire`, because + ``async with`` includes both the ``yield`` and the ``acquire`` + (just as it does with `threading.Lock`): + + >>> async def f(): # doctest: +SKIP + ... async with lock: + ... # Do something holding the lock. + ... pass + ... + ... # Now the lock is released. + + .. versionchanged:: 3.5 + Added ``async with`` support in Python 3.5. + + """ + def __init__(self): + self._block = BoundedSemaphore(value=1) + + def __repr__(self): + return "<%s _block=%s>" % ( + self.__class__.__name__, + self._block) + + def acquire(self, timeout=None): + """Attempt to lock. Returns a Future. + + Returns a Future, which raises `tornado.gen.TimeoutError` after a + timeout. + """ + return self._block.acquire(timeout) + + def release(self): + """Unlock. + + The first coroutine in line waiting for `acquire` gets the lock. + + If not locked, raise a `RuntimeError`. + """ + try: + self._block.release() + except ValueError: + raise RuntimeError('release unlocked lock') + + def __enter__(self): + raise RuntimeError( + "Use Lock like 'with (yield lock)', not like 'with lock'") + + __exit__ = __enter__ + + @gen.coroutine + def __aenter__(self): + yield self.acquire() + + @gen.coroutine + def __aexit__(self, typ, value, tb): + self.release() diff --git a/python/tornado/log.py b/python/tornado/log.py new file mode 100644 index 000000000..040889a98 --- /dev/null +++ b/python/tornado/log.py @@ -0,0 +1,259 @@ +#!/usr/bin/env python +# +# Copyright 2012 Facebook +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +"""Logging support for Tornado. + +Tornado uses three logger streams: + +* ``tornado.access``: Per-request logging for Tornado's HTTP servers (and + potentially other servers in the future) +* ``tornado.application``: Logging of errors from application code (i.e. + uncaught exceptions from callbacks) +* ``tornado.general``: General-purpose logging, including any errors + or warnings from Tornado itself. + +These streams may be configured independently using the standard library's +`logging` module. For example, you may wish to send ``tornado.access`` logs +to a separate file for analysis. +""" +from __future__ import absolute_import, division, print_function, with_statement + +import logging +import logging.handlers +import sys + +from tornado.escape import _unicode +from tornado.util import unicode_type, basestring_type + +try: + import curses +except ImportError: + curses = None + +# Logger objects for internal tornado use +access_log = logging.getLogger("tornado.access") +app_log = logging.getLogger("tornado.application") +gen_log = logging.getLogger("tornado.general") + + +def _stderr_supports_color(): + color = False + if curses and hasattr(sys.stderr, 'isatty') and sys.stderr.isatty(): + try: + curses.setupterm() + if curses.tigetnum("colors") > 0: + color = True + except Exception: + pass + return color + + +def _safe_unicode(s): + try: + return _unicode(s) + except UnicodeDecodeError: + return repr(s) + + +class LogFormatter(logging.Formatter): + """Log formatter used in Tornado. + + Key features of this formatter are: + + * Color support when logging to a terminal that supports it. + * Timestamps on every log line. + * Robust against str/bytes encoding problems. + + This formatter is enabled automatically by + `tornado.options.parse_command_line` (unless ``--logging=none`` is + used). + """ + DEFAULT_FORMAT = '%(color)s[%(levelname)1.1s %(asctime)s %(module)s:%(lineno)d]%(end_color)s %(message)s' + DEFAULT_DATE_FORMAT = '%y%m%d %H:%M:%S' + DEFAULT_COLORS = { + logging.DEBUG: 4, # Blue + logging.INFO: 2, # Green + logging.WARNING: 3, # Yellow + logging.ERROR: 1, # Red + } + + def __init__(self, color=True, fmt=DEFAULT_FORMAT, + datefmt=DEFAULT_DATE_FORMAT, colors=DEFAULT_COLORS): + r""" + :arg bool color: Enables color support. + :arg string fmt: Log message format. + It will be applied to the attributes dict of log records. The + text between ``%(color)s`` and ``%(end_color)s`` will be colored + depending on the level if color support is on. + :arg dict colors: color mappings from logging level to terminal color + code + :arg string datefmt: Datetime format. + Used for formatting ``(asctime)`` placeholder in ``prefix_fmt``. + + .. versionchanged:: 3.2 + + Added ``fmt`` and ``datefmt`` arguments. + """ + logging.Formatter.__init__(self, datefmt=datefmt) + self._fmt = fmt + + self._colors = {} + if color and _stderr_supports_color(): + # The curses module has some str/bytes confusion in + # python3. Until version 3.2.3, most methods return + # bytes, but only accept strings. In addition, we want to + # output these strings with the logging module, which + # works with unicode strings. The explicit calls to + # unicode() below are harmless in python2 but will do the + # right conversion in python 3. + fg_color = (curses.tigetstr("setaf") or + curses.tigetstr("setf") or "") + if (3, 0) < sys.version_info < (3, 2, 3): + fg_color = unicode_type(fg_color, "ascii") + + for levelno, code in colors.items(): + self._colors[levelno] = unicode_type(curses.tparm(fg_color, code), "ascii") + self._normal = unicode_type(curses.tigetstr("sgr0"), "ascii") + else: + self._normal = '' + + def format(self, record): + try: + message = record.getMessage() + assert isinstance(message, basestring_type) # guaranteed by logging + # Encoding notes: The logging module prefers to work with character + # strings, but only enforces that log messages are instances of + # basestring. In python 2, non-ascii bytestrings will make + # their way through the logging framework until they blow up with + # an unhelpful decoding error (with this formatter it happens + # when we attach the prefix, but there are other opportunities for + # exceptions further along in the framework). + # + # If a byte string makes it this far, convert it to unicode to + # ensure it will make it out to the logs. Use repr() as a fallback + # to ensure that all byte strings can be converted successfully, + # but don't do it by default so we don't add extra quotes to ascii + # bytestrings. This is a bit of a hacky place to do this, but + # it's worth it since the encoding errors that would otherwise + # result are so useless (and tornado is fond of using utf8-encoded + # byte strings whereever possible). + record.message = _safe_unicode(message) + except Exception as e: + record.message = "Bad message (%r): %r" % (e, record.__dict__) + + record.asctime = self.formatTime(record, self.datefmt) + + if record.levelno in self._colors: + record.color = self._colors[record.levelno] + record.end_color = self._normal + else: + record.color = record.end_color = '' + + formatted = self._fmt % record.__dict__ + + if record.exc_info: + if not record.exc_text: + record.exc_text = self.formatException(record.exc_info) + if record.exc_text: + # exc_text contains multiple lines. We need to _safe_unicode + # each line separately so that non-utf8 bytes don't cause + # all the newlines to turn into '\n'. + lines = [formatted.rstrip()] + lines.extend(_safe_unicode(ln) for ln in record.exc_text.split('\n')) + formatted = '\n'.join(lines) + return formatted.replace("\n", "\n ") + + +def enable_pretty_logging(options=None, logger=None): + """Turns on formatted logging output as configured. + + This is called automatically by `tornado.options.parse_command_line` + and `tornado.options.parse_config_file`. + """ + if options is None: + from tornado.options import options + if options.logging is None or options.logging.lower() == 'none': + return + if logger is None: + logger = logging.getLogger() + logger.setLevel(getattr(logging, options.logging.upper())) + if options.log_file_prefix: + rotate_mode = options.log_rotate_mode + if rotate_mode == 'size': + channel = logging.handlers.RotatingFileHandler( + filename=options.log_file_prefix, + maxBytes=options.log_file_max_size, + backupCount=options.log_file_num_backups) + elif rotate_mode == 'time': + channel = logging.handlers.TimedRotatingFileHandler( + filename=options.log_file_prefix, + when=options.log_rotate_when, + interval=options.log_rotate_interval, + backupCount=options.log_file_num_backups) + else: + error_message = 'The value of log_rotate_mode option should be ' +\ + '"size" or "time", not "%s".' % rotate_mode + raise ValueError(error_message) + channel.setFormatter(LogFormatter(color=False)) + logger.addHandler(channel) + + if (options.log_to_stderr or + (options.log_to_stderr is None and not logger.handlers)): + # Set up color if we are in a tty and curses is installed + channel = logging.StreamHandler() + channel.setFormatter(LogFormatter()) + logger.addHandler(channel) + + +def define_logging_options(options=None): + """Add logging-related flags to ``options``. + + These options are present automatically on the default options instance; + this method is only necessary if you have created your own `.OptionParser`. + + .. versionadded:: 4.2 + This function existed in prior versions but was broken and undocumented until 4.2. + """ + if options is None: + # late import to prevent cycle + from tornado.options import options + options.define("logging", default="info", + help=("Set the Python log level. If 'none', tornado won't touch the " + "logging configuration."), + metavar="debug|info|warning|error|none") + options.define("log_to_stderr", type=bool, default=None, + help=("Send log output to stderr (colorized if possible). " + "By default use stderr if --log_file_prefix is not set and " + "no other logging is configured.")) + options.define("log_file_prefix", type=str, default=None, metavar="PATH", + help=("Path prefix for log files. " + "Note that if you are running multiple tornado processes, " + "log_file_prefix must be different for each of them (e.g. " + "include the port number)")) + options.define("log_file_max_size", type=int, default=100 * 1000 * 1000, + help="max size of log files before rollover") + options.define("log_file_num_backups", type=int, default=10, + help="number of log files to keep") + + options.define("log_rotate_when", type=str, default='midnight', + help=("specify the type of TimedRotatingFileHandler interval " + "other options:('S', 'M', 'H', 'D', 'W0'-'W6')")) + options.define("log_rotate_interval", type=int, default=1, + help="The interval value of timed rotating") + + options.define("log_rotate_mode", type=str, default='size', + help="The mode of rotating files(time or size)") + + options.add_parse_callback(lambda: enable_pretty_logging(options)) diff --git a/python/tornado/netutil.py b/python/tornado/netutil.py new file mode 100644 index 000000000..4fc8d04d9 --- /dev/null +++ b/python/tornado/netutil.py @@ -0,0 +1,521 @@ +#!/usr/bin/env python +# +# Copyright 2011 Facebook +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""Miscellaneous network utility code.""" + +from __future__ import absolute_import, division, print_function, with_statement + +import errno +import os +import sys +import socket +import stat + +from tornado.concurrent import dummy_executor, run_on_executor +from tornado.ioloop import IOLoop +from tornado.platform.auto import set_close_exec +from tornado.util import u, Configurable, errno_from_exception + +try: + import ssl +except ImportError: + # ssl is not available on Google App Engine + ssl = None + +try: + import certifi +except ImportError: + # certifi is optional as long as we have ssl.create_default_context. + if ssl is None or hasattr(ssl, 'create_default_context'): + certifi = None + else: + raise + +try: + xrange # py2 +except NameError: + xrange = range # py3 + +if hasattr(ssl, 'match_hostname') and hasattr(ssl, 'CertificateError'): # python 3.2+ + ssl_match_hostname = ssl.match_hostname + SSLCertificateError = ssl.CertificateError +elif ssl is None: + ssl_match_hostname = SSLCertificateError = None +else: + import backports.ssl_match_hostname + ssl_match_hostname = backports.ssl_match_hostname.match_hostname + SSLCertificateError = backports.ssl_match_hostname.CertificateError + +if hasattr(ssl, 'SSLContext'): + if hasattr(ssl, 'create_default_context'): + # Python 2.7.9+, 3.4+ + # Note that the naming of ssl.Purpose is confusing; the purpose + # of a context is to authentiate the opposite side of the connection. + _client_ssl_defaults = ssl.create_default_context( + ssl.Purpose.SERVER_AUTH) + _server_ssl_defaults = ssl.create_default_context( + ssl.Purpose.CLIENT_AUTH) + else: + # Python 3.2-3.3 + _client_ssl_defaults = ssl.SSLContext(ssl.PROTOCOL_SSLv23) + _client_ssl_defaults.verify_mode = ssl.CERT_REQUIRED + _client_ssl_defaults.load_verify_locations(certifi.where()) + _server_ssl_defaults = ssl.SSLContext(ssl.PROTOCOL_SSLv23) + if hasattr(ssl, 'OP_NO_COMPRESSION'): + # Disable TLS compression to avoid CRIME and related attacks. + # This constant wasn't added until python 3.3. + _client_ssl_defaults.options |= ssl.OP_NO_COMPRESSION + _server_ssl_defaults.options |= ssl.OP_NO_COMPRESSION + +elif ssl: + # Python 2.6-2.7.8 + _client_ssl_defaults = dict(cert_reqs=ssl.CERT_REQUIRED, + ca_certs=certifi.where()) + _server_ssl_defaults = {} +else: + # Google App Engine + _client_ssl_defaults = dict(cert_reqs=None, + ca_certs=None) + _server_ssl_defaults = {} + +# ThreadedResolver runs getaddrinfo on a thread. If the hostname is unicode, +# getaddrinfo attempts to import encodings.idna. If this is done at +# module-import time, the import lock is already held by the main thread, +# leading to deadlock. Avoid it by caching the idna encoder on the main +# thread now. +u('foo').encode('idna') + +# These errnos indicate that a non-blocking operation must be retried +# at a later time. On most platforms they're the same value, but on +# some they differ. +_ERRNO_WOULDBLOCK = (errno.EWOULDBLOCK, errno.EAGAIN) + +if hasattr(errno, "WSAEWOULDBLOCK"): + _ERRNO_WOULDBLOCK += (errno.WSAEWOULDBLOCK,) + +# Default backlog used when calling sock.listen() +_DEFAULT_BACKLOG = 128 + + +def bind_sockets(port, address=None, family=socket.AF_UNSPEC, + backlog=_DEFAULT_BACKLOG, flags=None, reuse_port=False): + """Creates listening sockets bound to the given port and address. + + Returns a list of socket objects (multiple sockets are returned if + the given address maps to multiple IP addresses, which is most common + for mixed IPv4 and IPv6 use). + + Address may be either an IP address or hostname. If it's a hostname, + the server will listen on all IP addresses associated with the + name. Address may be an empty string or None to listen on all + available interfaces. Family may be set to either `socket.AF_INET` + or `socket.AF_INET6` to restrict to IPv4 or IPv6 addresses, otherwise + both will be used if available. + + The ``backlog`` argument has the same meaning as for + `socket.listen() `. + + ``flags`` is a bitmask of AI_* flags to `~socket.getaddrinfo`, like + ``socket.AI_PASSIVE | socket.AI_NUMERICHOST``. + + ``resuse_port`` option sets ``SO_REUSEPORT`` option for every socket + in the list. If your platform doesn't support this option ValueError will + be raised. + """ + if reuse_port and not hasattr(socket, "SO_REUSEPORT"): + raise ValueError("the platform doesn't support SO_REUSEPORT") + + sockets = [] + if address == "": + address = None + if not socket.has_ipv6 and family == socket.AF_UNSPEC: + # Python can be compiled with --disable-ipv6, which causes + # operations on AF_INET6 sockets to fail, but does not + # automatically exclude those results from getaddrinfo + # results. + # http://bugs.python.org/issue16208 + family = socket.AF_INET + if flags is None: + flags = socket.AI_PASSIVE + bound_port = None + for res in set(socket.getaddrinfo(address, port, family, socket.SOCK_STREAM, + 0, flags)): + af, socktype, proto, canonname, sockaddr = res + if (sys.platform == 'darwin' and address == 'localhost' and + af == socket.AF_INET6 and sockaddr[3] != 0): + # Mac OS X includes a link-local address fe80::1%lo0 in the + # getaddrinfo results for 'localhost'. However, the firewall + # doesn't understand that this is a local address and will + # prompt for access (often repeatedly, due to an apparent + # bug in its ability to remember granting access to an + # application). Skip these addresses. + continue + try: + sock = socket.socket(af, socktype, proto) + except socket.error as e: + if errno_from_exception(e) == errno.EAFNOSUPPORT: + continue + raise + set_close_exec(sock.fileno()) + if os.name != 'nt': + sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) + if reuse_port: + sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1) + if af == socket.AF_INET6: + # On linux, ipv6 sockets accept ipv4 too by default, + # but this makes it impossible to bind to both + # 0.0.0.0 in ipv4 and :: in ipv6. On other systems, + # separate sockets *must* be used to listen for both ipv4 + # and ipv6. For consistency, always disable ipv4 on our + # ipv6 sockets and use a separate ipv4 socket when needed. + # + # Python 2.x on windows doesn't have IPPROTO_IPV6. + if hasattr(socket, "IPPROTO_IPV6"): + sock.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_V6ONLY, 1) + + # automatic port allocation with port=None + # should bind on the same port on IPv4 and IPv6 + host, requested_port = sockaddr[:2] + if requested_port == 0 and bound_port is not None: + sockaddr = tuple([host, bound_port] + list(sockaddr[2:])) + + sock.setblocking(0) + sock.bind(sockaddr) + bound_port = sock.getsockname()[1] + sock.listen(backlog) + sockets.append(sock) + return sockets + +if hasattr(socket, 'AF_UNIX'): + def bind_unix_socket(file, mode=0o600, backlog=_DEFAULT_BACKLOG): + """Creates a listening unix socket. + + If a socket with the given name already exists, it will be deleted. + If any other file with that name exists, an exception will be + raised. + + Returns a socket object (not a list of socket objects like + `bind_sockets`) + """ + sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) + set_close_exec(sock.fileno()) + sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) + sock.setblocking(0) + try: + st = os.stat(file) + except OSError as err: + if errno_from_exception(err) != errno.ENOENT: + raise + else: + if stat.S_ISSOCK(st.st_mode): + os.remove(file) + else: + raise ValueError("File %s exists and is not a socket", file) + sock.bind(file) + os.chmod(file, mode) + sock.listen(backlog) + return sock + + +def add_accept_handler(sock, callback, io_loop=None): + """Adds an `.IOLoop` event handler to accept new connections on ``sock``. + + When a connection is accepted, ``callback(connection, address)`` will + be run (``connection`` is a socket object, and ``address`` is the + address of the other end of the connection). Note that this signature + is different from the ``callback(fd, events)`` signature used for + `.IOLoop` handlers. + + .. versionchanged:: 4.1 + The ``io_loop`` argument is deprecated. + """ + if io_loop is None: + io_loop = IOLoop.current() + + def accept_handler(fd, events): + # More connections may come in while we're handling callbacks; + # to prevent starvation of other tasks we must limit the number + # of connections we accept at a time. Ideally we would accept + # up to the number of connections that were waiting when we + # entered this method, but this information is not available + # (and rearranging this method to call accept() as many times + # as possible before running any callbacks would have adverse + # effects on load balancing in multiprocess configurations). + # Instead, we use the (default) listen backlog as a rough + # heuristic for the number of connections we can reasonably + # accept at once. + for i in xrange(_DEFAULT_BACKLOG): + try: + connection, address = sock.accept() + except socket.error as e: + # _ERRNO_WOULDBLOCK indicate we have accepted every + # connection that is available. + if errno_from_exception(e) in _ERRNO_WOULDBLOCK: + return + # ECONNABORTED indicates that there was a connection + # but it was closed while still in the accept queue. + # (observed on FreeBSD). + if errno_from_exception(e) == errno.ECONNABORTED: + continue + raise + callback(connection, address) + io_loop.add_handler(sock, accept_handler, IOLoop.READ) + + +def is_valid_ip(ip): + """Returns true if the given string is a well-formed IP address. + + Supports IPv4 and IPv6. + """ + if not ip or '\x00' in ip: + # getaddrinfo resolves empty strings to localhost, and truncates + # on zero bytes. + return False + try: + res = socket.getaddrinfo(ip, 0, socket.AF_UNSPEC, + socket.SOCK_STREAM, + 0, socket.AI_NUMERICHOST) + return bool(res) + except socket.gaierror as e: + if e.args[0] == socket.EAI_NONAME: + return False + raise + return True + + +class Resolver(Configurable): + """Configurable asynchronous DNS resolver interface. + + By default, a blocking implementation is used (which simply calls + `socket.getaddrinfo`). An alternative implementation can be + chosen with the `Resolver.configure <.Configurable.configure>` + class method:: + + Resolver.configure('tornado.netutil.ThreadedResolver') + + The implementations of this interface included with Tornado are + + * `tornado.netutil.BlockingResolver` + * `tornado.netutil.ThreadedResolver` + * `tornado.netutil.OverrideResolver` + * `tornado.platform.twisted.TwistedResolver` + * `tornado.platform.caresresolver.CaresResolver` + """ + @classmethod + def configurable_base(cls): + return Resolver + + @classmethod + def configurable_default(cls): + return BlockingResolver + + def resolve(self, host, port, family=socket.AF_UNSPEC, callback=None): + """Resolves an address. + + The ``host`` argument is a string which may be a hostname or a + literal IP address. + + Returns a `.Future` whose result is a list of (family, + address) pairs, where address is a tuple suitable to pass to + `socket.connect ` (i.e. a ``(host, + port)`` pair for IPv4; additional fields may be present for + IPv6). If a ``callback`` is passed, it will be run with the + result as an argument when it is complete. + """ + raise NotImplementedError() + + def close(self): + """Closes the `Resolver`, freeing any resources used. + + .. versionadded:: 3.1 + + """ + pass + + +class ExecutorResolver(Resolver): + """Resolver implementation using a `concurrent.futures.Executor`. + + Use this instead of `ThreadedResolver` when you require additional + control over the executor being used. + + The executor will be shut down when the resolver is closed unless + ``close_resolver=False``; use this if you want to reuse the same + executor elsewhere. + + .. versionchanged:: 4.1 + The ``io_loop`` argument is deprecated. + """ + def initialize(self, io_loop=None, executor=None, close_executor=True): + self.io_loop = io_loop or IOLoop.current() + if executor is not None: + self.executor = executor + self.close_executor = close_executor + else: + self.executor = dummy_executor + self.close_executor = False + + def close(self): + if self.close_executor: + self.executor.shutdown() + self.executor = None + + @run_on_executor + def resolve(self, host, port, family=socket.AF_UNSPEC): + # On Solaris, getaddrinfo fails if the given port is not found + # in /etc/services and no socket type is given, so we must pass + # one here. The socket type used here doesn't seem to actually + # matter (we discard the one we get back in the results), + # so the addresses we return should still be usable with SOCK_DGRAM. + addrinfo = socket.getaddrinfo(host, port, family, socket.SOCK_STREAM) + results = [] + for family, socktype, proto, canonname, address in addrinfo: + results.append((family, address)) + return results + + +class BlockingResolver(ExecutorResolver): + """Default `Resolver` implementation, using `socket.getaddrinfo`. + + The `.IOLoop` will be blocked during the resolution, although the + callback will not be run until the next `.IOLoop` iteration. + """ + def initialize(self, io_loop=None): + super(BlockingResolver, self).initialize(io_loop=io_loop) + + +class ThreadedResolver(ExecutorResolver): + """Multithreaded non-blocking `Resolver` implementation. + + Requires the `concurrent.futures` package to be installed + (available in the standard library since Python 3.2, + installable with ``pip install futures`` in older versions). + + The thread pool size can be configured with:: + + Resolver.configure('tornado.netutil.ThreadedResolver', + num_threads=10) + + .. versionchanged:: 3.1 + All ``ThreadedResolvers`` share a single thread pool, whose + size is set by the first one to be created. + """ + _threadpool = None + _threadpool_pid = None + + def initialize(self, io_loop=None, num_threads=10): + threadpool = ThreadedResolver._create_threadpool(num_threads) + super(ThreadedResolver, self).initialize( + io_loop=io_loop, executor=threadpool, close_executor=False) + + @classmethod + def _create_threadpool(cls, num_threads): + pid = os.getpid() + if cls._threadpool_pid != pid: + # Threads cannot survive after a fork, so if our pid isn't what it + # was when we created the pool then delete it. + cls._threadpool = None + if cls._threadpool is None: + from concurrent.futures import ThreadPoolExecutor + cls._threadpool = ThreadPoolExecutor(num_threads) + cls._threadpool_pid = pid + return cls._threadpool + + +class OverrideResolver(Resolver): + """Wraps a resolver with a mapping of overrides. + + This can be used to make local DNS changes (e.g. for testing) + without modifying system-wide settings. + + The mapping can contain either host strings or host-port pairs. + """ + def initialize(self, resolver, mapping): + self.resolver = resolver + self.mapping = mapping + + def close(self): + self.resolver.close() + + def resolve(self, host, port, *args, **kwargs): + if (host, port) in self.mapping: + host, port = self.mapping[(host, port)] + elif host in self.mapping: + host = self.mapping[host] + return self.resolver.resolve(host, port, *args, **kwargs) + + +# These are the keyword arguments to ssl.wrap_socket that must be translated +# to their SSLContext equivalents (the other arguments are still passed +# to SSLContext.wrap_socket). +_SSL_CONTEXT_KEYWORDS = frozenset(['ssl_version', 'certfile', 'keyfile', + 'cert_reqs', 'ca_certs', 'ciphers']) + + +def ssl_options_to_context(ssl_options): + """Try to convert an ``ssl_options`` dictionary to an + `~ssl.SSLContext` object. + + The ``ssl_options`` dictionary contains keywords to be passed to + `ssl.wrap_socket`. In Python 2.7.9+, `ssl.SSLContext` objects can + be used instead. This function converts the dict form to its + `~ssl.SSLContext` equivalent, and may be used when a component which + accepts both forms needs to upgrade to the `~ssl.SSLContext` version + to use features like SNI or NPN. + """ + if isinstance(ssl_options, dict): + assert all(k in _SSL_CONTEXT_KEYWORDS for k in ssl_options), ssl_options + if (not hasattr(ssl, 'SSLContext') or + isinstance(ssl_options, ssl.SSLContext)): + return ssl_options + context = ssl.SSLContext( + ssl_options.get('ssl_version', ssl.PROTOCOL_SSLv23)) + if 'certfile' in ssl_options: + context.load_cert_chain(ssl_options['certfile'], ssl_options.get('keyfile', None)) + if 'cert_reqs' in ssl_options: + context.verify_mode = ssl_options['cert_reqs'] + if 'ca_certs' in ssl_options: + context.load_verify_locations(ssl_options['ca_certs']) + if 'ciphers' in ssl_options: + context.set_ciphers(ssl_options['ciphers']) + if hasattr(ssl, 'OP_NO_COMPRESSION'): + # Disable TLS compression to avoid CRIME and related attacks. + # This constant wasn't added until python 3.3. + context.options |= ssl.OP_NO_COMPRESSION + return context + + +def ssl_wrap_socket(socket, ssl_options, server_hostname=None, **kwargs): + """Returns an ``ssl.SSLSocket`` wrapping the given socket. + + ``ssl_options`` may be either an `ssl.SSLContext` object or a + dictionary (as accepted by `ssl_options_to_context`). Additional + keyword arguments are passed to ``wrap_socket`` (either the + `~ssl.SSLContext` method or the `ssl` module function as + appropriate). + """ + context = ssl_options_to_context(ssl_options) + if hasattr(ssl, 'SSLContext') and isinstance(context, ssl.SSLContext): + if server_hostname is not None and getattr(ssl, 'HAS_SNI'): + # Python doesn't have server-side SNI support so we can't + # really unittest this, but it can be manually tested with + # python3.2 -m tornado.httpclient https://sni.velox.ch + return context.wrap_socket(socket, server_hostname=server_hostname, + **kwargs) + else: + return context.wrap_socket(socket, **kwargs) + else: + return ssl.wrap_socket(socket, **dict(context, **kwargs)) diff --git a/python/tornado/options.py b/python/tornado/options.py new file mode 100644 index 000000000..ba16b1a7f --- /dev/null +++ b/python/tornado/options.py @@ -0,0 +1,582 @@ +#!/usr/bin/env python +# +# Copyright 2009 Facebook +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""A command line parsing module that lets modules define their own options. + +Each module defines its own options which are added to the global +option namespace, e.g.:: + + from tornado.options import define, options + + define("mysql_host", default="127.0.0.1:3306", help="Main user DB") + define("memcache_hosts", default="127.0.0.1:11011", multiple=True, + help="Main user memcache servers") + + def connect(): + db = database.Connection(options.mysql_host) + ... + +The ``main()`` method of your application does not need to be aware of all of +the options used throughout your program; they are all automatically loaded +when the modules are loaded. However, all modules that define options +must have been imported before the command line is parsed. + +Your ``main()`` method can parse the command line or parse a config file with +either:: + + tornado.options.parse_command_line() + # or + tornado.options.parse_config_file("/etc/server.conf") + +Command line formats are what you would expect (``--myoption=myvalue``). +Config files are just Python files. Global names become options, e.g.:: + + myoption = "myvalue" + myotheroption = "myothervalue" + +We support `datetimes `, `timedeltas +`, ints, and floats (just pass a ``type`` kwarg to +`define`). We also accept multi-value options. See the documentation for +`define()` below. + +`tornado.options.options` is a singleton instance of `OptionParser`, and +the top-level functions in this module (`define`, `parse_command_line`, etc) +simply call methods on it. You may create additional `OptionParser` +instances to define isolated sets of options, such as for subcommands. + +.. note:: + + By default, several options are defined that will configure the + standard `logging` module when `parse_command_line` or `parse_config_file` + are called. If you want Tornado to leave the logging configuration + alone so you can manage it yourself, either pass ``--logging=none`` + on the command line or do the following to disable it in code:: + + from tornado.options import options, parse_command_line + options.logging = None + parse_command_line() + +.. versionchanged:: 4.3 + Dashes and underscores are fully interchangeable in option names; + options can be defined, set, and read with any mix of the two. + Dashes are typical for command-line usage while config files require + underscores. +""" + +from __future__ import absolute_import, division, print_function, with_statement + +import datetime +import numbers +import re +import sys +import os +import textwrap + +from tornado.escape import _unicode, native_str +from tornado.log import define_logging_options +from tornado import stack_context +from tornado.util import basestring_type, exec_in + + +class Error(Exception): + """Exception raised by errors in the options module.""" + pass + + +class OptionParser(object): + """A collection of options, a dictionary with object-like access. + + Normally accessed via static functions in the `tornado.options` module, + which reference a global instance. + """ + def __init__(self): + # we have to use self.__dict__ because we override setattr. + self.__dict__['_options'] = {} + self.__dict__['_parse_callbacks'] = [] + self.define("help", type=bool, help="show this help information", + callback=self._help_callback) + + def _normalize_name(self, name): + return name.replace('_', '-') + + def __getattr__(self, name): + name = self._normalize_name(name) + if isinstance(self._options.get(name), _Option): + return self._options[name].value() + raise AttributeError("Unrecognized option %r" % name) + + def __setattr__(self, name, value): + name = self._normalize_name(name) + if isinstance(self._options.get(name), _Option): + return self._options[name].set(value) + raise AttributeError("Unrecognized option %r" % name) + + def __iter__(self): + return (opt.name for opt in self._options.values()) + + def __contains__(self, name): + name = self._normalize_name(name) + return name in self._options + + def __getitem__(self, name): + name = self._normalize_name(name) + return self._options[name].value() + + def items(self): + """A sequence of (name, value) pairs. + + .. versionadded:: 3.1 + """ + return [(opt.name, opt.value()) for name, opt in self._options.items()] + + def groups(self): + """The set of option-groups created by ``define``. + + .. versionadded:: 3.1 + """ + return set(opt.group_name for opt in self._options.values()) + + def group_dict(self, group): + """The names and values of options in a group. + + Useful for copying options into Application settings:: + + from tornado.options import define, parse_command_line, options + + define('template_path', group='application') + define('static_path', group='application') + + parse_command_line() + + application = Application( + handlers, **options.group_dict('application')) + + .. versionadded:: 3.1 + """ + return dict( + (opt.name, opt.value()) for name, opt in self._options.items() + if not group or group == opt.group_name) + + def as_dict(self): + """The names and values of all options. + + .. versionadded:: 3.1 + """ + return dict( + (opt.name, opt.value()) for name, opt in self._options.items()) + + def define(self, name, default=None, type=None, help=None, metavar=None, + multiple=False, group=None, callback=None): + """Defines a new command line option. + + If ``type`` is given (one of str, float, int, datetime, or timedelta) + or can be inferred from the ``default``, we parse the command line + arguments based on the given type. If ``multiple`` is True, we accept + comma-separated values, and the option value is always a list. + + For multi-value integers, we also accept the syntax ``x:y``, which + turns into ``range(x, y)`` - very useful for long integer ranges. + + ``help`` and ``metavar`` are used to construct the + automatically generated command line help string. The help + message is formatted like:: + + --name=METAVAR help string + + ``group`` is used to group the defined options in logical + groups. By default, command line options are grouped by the + file in which they are defined. + + Command line option names must be unique globally. They can be parsed + from the command line with `parse_command_line` or parsed from a + config file with `parse_config_file`. + + If a ``callback`` is given, it will be run with the new value whenever + the option is changed. This can be used to combine command-line + and file-based options:: + + define("config", type=str, help="path to config file", + callback=lambda path: parse_config_file(path, final=False)) + + With this definition, options in the file specified by ``--config`` will + override options set earlier on the command line, but can be overridden + by later flags. + """ + if name in self._options: + raise Error("Option %r already defined in %s" % + (name, self._options[name].file_name)) + frame = sys._getframe(0) + options_file = frame.f_code.co_filename + + # Can be called directly, or through top level define() fn, in which + # case, step up above that frame to look for real caller. + if (frame.f_back.f_code.co_filename == options_file and + frame.f_back.f_code.co_name == 'define'): + frame = frame.f_back + + file_name = frame.f_back.f_code.co_filename + if file_name == options_file: + file_name = "" + if type is None: + if not multiple and default is not None: + type = default.__class__ + else: + type = str + if group: + group_name = group + else: + group_name = file_name + normalized = self._normalize_name(name) + option = _Option(name, file_name=file_name, + default=default, type=type, help=help, + metavar=metavar, multiple=multiple, + group_name=group_name, + callback=callback) + self._options[normalized] = option + + def parse_command_line(self, args=None, final=True): + """Parses all options given on the command line (defaults to + `sys.argv`). + + Note that ``args[0]`` is ignored since it is the program name + in `sys.argv`. + + We return a list of all arguments that are not parsed as options. + + If ``final`` is ``False``, parse callbacks will not be run. + This is useful for applications that wish to combine configurations + from multiple sources. + """ + if args is None: + args = sys.argv + remaining = [] + for i in range(1, len(args)): + # All things after the last option are command line arguments + if not args[i].startswith("-"): + remaining = args[i:] + break + if args[i] == "--": + remaining = args[i + 1:] + break + arg = args[i].lstrip("-") + name, equals, value = arg.partition("=") + name = self._normalize_name(name) + if name not in self._options: + self.print_help() + raise Error('Unrecognized command line option: %r' % name) + option = self._options[name] + if not equals: + if option.type == bool: + value = "true" + else: + raise Error('Option %r requires a value' % name) + option.parse(value) + + if final: + self.run_parse_callbacks() + + return remaining + + def parse_config_file(self, path, final=True): + """Parses and loads the Python config file at the given path. + + If ``final`` is ``False``, parse callbacks will not be run. + This is useful for applications that wish to combine configurations + from multiple sources. + + .. versionchanged:: 4.1 + Config files are now always interpreted as utf-8 instead of + the system default encoding. + """ + config = {} + with open(path, 'rb') as f: + exec_in(native_str(f.read()), config, config) + for name in config: + normalized = self._normalize_name(name) + if normalized in self._options: + self._options[normalized].set(config[name]) + + if final: + self.run_parse_callbacks() + + def print_help(self, file=None): + """Prints all the command line options to stderr (or another file).""" + if file is None: + file = sys.stderr + print("Usage: %s [OPTIONS]" % sys.argv[0], file=file) + print("\nOptions:\n", file=file) + by_group = {} + for option in self._options.values(): + by_group.setdefault(option.group_name, []).append(option) + + for filename, o in sorted(by_group.items()): + if filename: + print("\n%s options:\n" % os.path.normpath(filename), file=file) + o.sort(key=lambda option: option.name) + for option in o: + # Always print names with dashes in a CLI context. + prefix = self._normalize_name(option.name) + if option.metavar: + prefix += "=" + option.metavar + description = option.help or "" + if option.default is not None and option.default != '': + description += " (default %s)" % option.default + lines = textwrap.wrap(description, 79 - 35) + if len(prefix) > 30 or len(lines) == 0: + lines.insert(0, '') + print(" --%-30s %s" % (prefix, lines[0]), file=file) + for line in lines[1:]: + print("%-34s %s" % (' ', line), file=file) + print(file=file) + + def _help_callback(self, value): + if value: + self.print_help() + sys.exit(0) + + def add_parse_callback(self, callback): + """Adds a parse callback, to be invoked when option parsing is done.""" + self._parse_callbacks.append(stack_context.wrap(callback)) + + def run_parse_callbacks(self): + for callback in self._parse_callbacks: + callback() + + def mockable(self): + """Returns a wrapper around self that is compatible with + `mock.patch `. + + The `mock.patch ` function (included in + the standard library `unittest.mock` package since Python 3.3, + or in the third-party ``mock`` package for older versions of + Python) is incompatible with objects like ``options`` that + override ``__getattr__`` and ``__setattr__``. This function + returns an object that can be used with `mock.patch.object + ` to modify option values:: + + with mock.patch.object(options.mockable(), 'name', value): + assert options.name == value + """ + return _Mockable(self) + + +class _Mockable(object): + """`mock.patch` compatible wrapper for `OptionParser`. + + As of ``mock`` version 1.0.1, when an object uses ``__getattr__`` + hooks instead of ``__dict__``, ``patch.__exit__`` tries to delete + the attribute it set instead of setting a new one (assuming that + the object does not catpure ``__setattr__``, so the patch + created a new attribute in ``__dict__``). + + _Mockable's getattr and setattr pass through to the underlying + OptionParser, and delattr undoes the effect of a previous setattr. + """ + def __init__(self, options): + # Modify __dict__ directly to bypass __setattr__ + self.__dict__['_options'] = options + self.__dict__['_originals'] = {} + + def __getattr__(self, name): + return getattr(self._options, name) + + def __setattr__(self, name, value): + assert name not in self._originals, "don't reuse mockable objects" + self._originals[name] = getattr(self._options, name) + setattr(self._options, name, value) + + def __delattr__(self, name): + setattr(self._options, name, self._originals.pop(name)) + + +class _Option(object): + UNSET = object() + + def __init__(self, name, default=None, type=basestring_type, help=None, + metavar=None, multiple=False, file_name=None, group_name=None, + callback=None): + if default is None and multiple: + default = [] + self.name = name + self.type = type + self.help = help + self.metavar = metavar + self.multiple = multiple + self.file_name = file_name + self.group_name = group_name + self.callback = callback + self.default = default + self._value = _Option.UNSET + + def value(self): + return self.default if self._value is _Option.UNSET else self._value + + def parse(self, value): + _parse = { + datetime.datetime: self._parse_datetime, + datetime.timedelta: self._parse_timedelta, + bool: self._parse_bool, + basestring_type: self._parse_string, + }.get(self.type, self.type) + if self.multiple: + self._value = [] + for part in value.split(","): + if issubclass(self.type, numbers.Integral): + # allow ranges of the form X:Y (inclusive at both ends) + lo, _, hi = part.partition(":") + lo = _parse(lo) + hi = _parse(hi) if hi else lo + self._value.extend(range(lo, hi + 1)) + else: + self._value.append(_parse(part)) + else: + self._value = _parse(value) + if self.callback is not None: + self.callback(self._value) + return self.value() + + def set(self, value): + if self.multiple: + if not isinstance(value, list): + raise Error("Option %r is required to be a list of %s" % + (self.name, self.type.__name__)) + for item in value: + if item is not None and not isinstance(item, self.type): + raise Error("Option %r is required to be a list of %s" % + (self.name, self.type.__name__)) + else: + if value is not None and not isinstance(value, self.type): + raise Error("Option %r is required to be a %s (%s given)" % + (self.name, self.type.__name__, type(value))) + self._value = value + if self.callback is not None: + self.callback(self._value) + + # Supported date/time formats in our options + _DATETIME_FORMATS = [ + "%a %b %d %H:%M:%S %Y", + "%Y-%m-%d %H:%M:%S", + "%Y-%m-%d %H:%M", + "%Y-%m-%dT%H:%M", + "%Y%m%d %H:%M:%S", + "%Y%m%d %H:%M", + "%Y-%m-%d", + "%Y%m%d", + "%H:%M:%S", + "%H:%M", + ] + + def _parse_datetime(self, value): + for format in self._DATETIME_FORMATS: + try: + return datetime.datetime.strptime(value, format) + except ValueError: + pass + raise Error('Unrecognized date/time format: %r' % value) + + _TIMEDELTA_ABBREV_DICT = { + 'h': 'hours', + 'm': 'minutes', + 'min': 'minutes', + 's': 'seconds', + 'sec': 'seconds', + 'ms': 'milliseconds', + 'us': 'microseconds', + 'd': 'days', + 'w': 'weeks', + } + + _FLOAT_PATTERN = r'[-+]?(?:\d+(?:\.\d*)?|\.\d+)(?:[eE][-+]?\d+)?' + + _TIMEDELTA_PATTERN = re.compile( + r'\s*(%s)\s*(\w*)\s*' % _FLOAT_PATTERN, re.IGNORECASE) + + def _parse_timedelta(self, value): + try: + sum = datetime.timedelta() + start = 0 + while start < len(value): + m = self._TIMEDELTA_PATTERN.match(value, start) + if not m: + raise Exception() + num = float(m.group(1)) + units = m.group(2) or 'seconds' + units = self._TIMEDELTA_ABBREV_DICT.get(units, units) + sum += datetime.timedelta(**{units: num}) + start = m.end() + return sum + except Exception: + raise + + def _parse_bool(self, value): + return value.lower() not in ("false", "0", "f") + + def _parse_string(self, value): + return _unicode(value) + + +options = OptionParser() +"""Global options object. + +All defined options are available as attributes on this object. +""" + + +def define(name, default=None, type=None, help=None, metavar=None, + multiple=False, group=None, callback=None): + """Defines an option in the global namespace. + + See `OptionParser.define`. + """ + return options.define(name, default=default, type=type, help=help, + metavar=metavar, multiple=multiple, group=group, + callback=callback) + + +def parse_command_line(args=None, final=True): + """Parses global options from the command line. + + See `OptionParser.parse_command_line`. + """ + return options.parse_command_line(args, final=final) + + +def parse_config_file(path, final=True): + """Parses global options from a config file. + + See `OptionParser.parse_config_file`. + """ + return options.parse_config_file(path, final=final) + + +def print_help(file=None): + """Prints all the command line options to stderr (or another file). + + See `OptionParser.print_help`. + """ + return options.print_help(file) + + +def add_parse_callback(callback): + """Adds a parse callback, to be invoked when option parsing is done. + + See `OptionParser.add_parse_callback` + """ + options.add_parse_callback(callback) + + +# Default options +define_logging_options(options) diff --git a/python/tornado/platform/__init__.py b/python/tornado/platform/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/python/tornado/platform/__pycache__/__init__.cpython-35.pyc b/python/tornado/platform/__pycache__/__init__.cpython-35.pyc new file mode 100644 index 0000000000000000000000000000000000000000..392226c4e55b6db732452fed10dfdbb7303fc400 GIT binary patch literal 172 zcmWgR<>fm0Yik$-5IhDEFu(|8H~?`m3y?@*UxT7FS(OniK1US>&ryk0@&Ee@O9{FKt1R6CHB#X!se0KQx=;Q#;t literal 0 HcmV?d00001 diff --git a/python/tornado/platform/__pycache__/asyncio.cpython-35.pyc b/python/tornado/platform/__pycache__/asyncio.cpython-35.pyc new file mode 100644 index 0000000000000000000000000000000000000000..66647f5cd9e3c2cdc830316d800f08912c1f3f51 GIT binary patch literal 7630 zcmb7J%X8bt8DEeTNl_&Auq8i|rfAY6Vw=!;xR1C=YR8sSr}1NKCw5q6KnPe$kU@ai z1+7TqbUJY+Ikd^4(?icaw9|Wg&b66oFFoRvnV$MDY32SYO(ls_rvdd z?uV6DtMS!0e}DJu?+Ec9G4VTz{&`&bmL-HQL=UYc{E`SQF)WE*Nes)PR~EyH=vBmU zM)YQcMHFQbRzH!n)7)1@SQo>V=(U8kyO2+B zi(XsYiEfCBoEP^RLj7Abiyg_DLbzV*8Yk%QR?#lUto7*D4F~h6?;msqGD!>_{2e z$(FSHuHKEjAnx14*dK?|b|c@u9jnOoWBbbWx8r!!Z8W-F`@U2@=ktOnk(dDCmxOem7Q|e19f;sn`3CJ&4u*@-KI~ zwr#j}?@8SV5_=SD9nelmYi#pBSo#~RFEAaJi-zv>1NA=#%*)Ho^-GRro6IdI2eV)rL-n%Kp* zFHCNbY`KXYN$KaSF=>MsVn&BpDj3llZ7J#ckdV83fSrOB56=N=KEW(u#2v+nJa?|q zuz^>m4N8764ivy(z4>*9arX^Z%Qb?8mhD169NJZ?zCCb`)wRL5Pol8{xmNEydC|RP1_5tcvCPB#1jf1U1lITI?O5e2|@6GL%zuf=DM|soVH3 z`J7HAop(2M9F7y|1jA9RlC!YE!k-bJ;1(S?xf&b&3^@_F4sr$^XdQNW$xYn#w`0!@ z*Kf<=Xnhonp5GV;q5pierCu6tdU_oiOzyP!I||(dZfUrlo6u-i5s1&>(rq-tTCrxW z+0tyOX*I3A(^Deo?!UfQB69p@aFH-B;?g5D&>zB3h!5Y&qymvVw8W+NUlAX%BuGRh zacaFJJ}QfkDn!1`QUVo8O`YN)JSfvd>w{C`4$;gA74nm+*shTPtxQa}J9V{$tv z=tR+*q-`aK@qOuJvgp-~fCfI}%!sEmJl2^}$1tN;bHmUX_&O~O{IoU*LK$ytr_F2u zwoJ7GLerd}-f1*H!KD|_2tc=D9kW_i)mlVX_GoZFVTR~724NZ4vYvjQSS{s$G2l*Kl%Vvq+0V2oShRlt|bi0!I4uYSji*Ti<6 zp3p|K)G?1ARscJO5X0xP?X7HX6LbHO&utx=i;Na=$YZopVNj3J}d)m%q>Tvkcim*@1 zAV_EK+`MvoEoEY3Yibb>GXbl|h_IH)Gp%ORAIIpFT+=z)E+f2v=b92lVJ%zp)`Imo zButWC$o}pa)h>wNAUZ$8ho*u0sK7n?T<6_`76 zb1Z<(iv?JR^35@$*qoKdy4Go{CrBWq0P1_xka&%Qe3E)kQS*H?X)CjE<6hIMxm8aS z0f}8bGf8Fip;KX7XH`(EM0Aw++vAasxSMOhbX9bh{zkuH!9Oy2K3F|7*+ zmnz(58LN>CW#j(?I!CAjY`HLgXuzrv-5Z@mq*>mgM8|QD1JbwkPY)}hX zdlmn|~x+imlA!20BPm04-%#;7M5U?t4!&<8+%FW8Pysr<*%q9U@12uP)Rr zUWJnvD{9aK7%-9D_2zI!aF8LuHL70A3#F@af=mn^cdS%24)aVuY4I#sli3k;)#3KllLiZFO>P3wG2A3uw2y5^7v{BOB z1KLFHhRysEI>oj{u%!_O`Xsc8ZV+`jvL+^^a_mLOL^S>a+?F|cRP-*LS%Yi?d0OZo z|B@;#58V&bN)ilZ+HweSdbWY=J)PzBKaNA>h&Xsss>xAN*|e0&QxGAyWR%v6Nfk;M z6&ckO4#+cUQ#S#8LRZm<$}Gay-ic{EGP=S$5pIPf$n;6mkX}!S0cQ`7^eoI2BdyU3 z#p7g}i98X@9{(1r<#IXXEHiu0NDFoz;*M4Xu9gi`e0aRy4qapYuYTz4Rmm4TNZ zQS)PJD2*`!%Yw~MY2X!VjuKLnvV~ljUw}$C&&~G}<2V*xhPdLlxY*lrLkLRf&e{90(3)CZ{QRk?6jhdgM>7AV1${DD5=;&ZG zf>suZFidcn=s6#zLlQ*=&X=0j^4xr**={a3o6QQYa5H^)y9fiGA7O znFA_Lq@PWmQSr25j)*u;XUD_5%;9c8Y5RwYAAQ@@8<2`dcE4)`j*m9VQO(264KDc%B16wycu-T?;U{Ad_?@E)i#~F1oJVn0R!9fnL zHw;%h1x+^R5X;?(kdToV4x8Kr#bSzcniLDJfAS!1PA2dgChEb7e2$VOJM!{1K8Ia~ zQzLa(7pb9j(pttzuYC}`%+Ih5G#Py9oAh) zhU;$hBcR&1!)x^W#%grG z|1+A6>KBzgz?G6*Pm81{wMNYnbvGp`&T*A6x2H+|@}eW3zf3vv+t@@9)6Ti78@389 zHn&|=!3M$RSjp9A|DP#!Cm?@z-wg5vOwI&L;S>dm&s(gy-WIT)m6M;rwnBJzAJ%m;-@L%of3 zl5(S78!tKG(~R|A#uH<`vuhuEazqtO4mbIsfC68yLV`QmbpM9unnJ8V)GJ$0S*>!z z+FO|b`cdYq)-dr=rc3<&1(z}1CQO$`3MgT4`{#*gaATlEHUT)0sAso~CY%_h5BP%< z#oqfDO-Yum(=}f-x|hd@2ND4$wq3iQD#I6z$h3_1YN+*+^PS2K#}UBXqi$#7?$kM)f7(Qbi+X=5fGLSJ68f znKO6-C`hGHnqpVK4-p?i=^)9;;siwhDj+JH4cD*vD~quRMhZ@B3cyF=GX))xh&K5H z(b?|H{0zumi+9+%dI7-xuzcxtqf0(M$|)ugJgAIBZX%WCOKtBHdmRaF*B literal 0 HcmV?d00001 diff --git a/python/tornado/platform/__pycache__/auto.cpython-35.pyc b/python/tornado/platform/__pycache__/auto.cpython-35.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b2176dc1e698cc3aaed6e000a772c8479b3b0b7e GIT binary patch literal 1231 zcmZuw&2A($5H9!pcV;HLn-Fp|9FRsLbKn>dVwDXLiHst4!$NDMUdQ%iO#IWbJ)7jd zkHAat09?6q<=9tFyuzL+_aq}iu;kBn)#vh8WqWTl%Kv!(+Yi5Y0sex2wH=&)g*|*g z00cmRNI+zu378Cu3``4(7ML~^Z7>}uIzSSm1-c7G7epIm7k=)+RTun4v$+Gh4`u+x z0IA66!8;6`gZ;LlZh`2-RR;c-$7~Ed06hegrwzpJHIy9m=+1N(_I9^%polRPV~}GY zukABv%Wb&I!T$+_ke@7pAMc1;pr3$w3dOTT+}{w9MhNDD_J%HoNv;^dCE9jUmM(v$c`6xSZX6U1T(c0U zkb!%3Aq7>IzKPCTCfxKsbbC1q9RUF8*v@_)`%54Fh~6=Fz%<5C4` zoLFf&%}-s3lsh5O%p$c?ia;6X66Hj<=03=PhNW9+L4~7odO1Bj%yWuu;hn(^|540V z7z2ekA8C-W`Ho$x6`yVrhbGyv=Q(BIc&=#<&{^QwBmBAb8bIMef2(p2+GRRb4#~%>1JyBtrn=bHqBcak8FHvc}@^KPGhphR)HMK1>verm`H@>DLUo1))HL~->N{tdkLWAB}>+exn)9YV`(Lad o=a>7q3ZEc!vyZc3woRU7+X=}|wwJY9!>mt6q(fR{7ssvaAGsY*EC2ui literal 0 HcmV?d00001 diff --git a/python/tornado/platform/__pycache__/caresresolver.cpython-35.pyc b/python/tornado/platform/__pycache__/caresresolver.cpython-35.pyc new file mode 100644 index 0000000000000000000000000000000000000000..e123bc8aaca78b98f371a0aa3bfe90b9fda7b31e GIT binary patch literal 3203 zcmZWrOLODK5pM7Sf)pR}K2&n$1*b|WGqIRQZKYBw$7NZoq*QjaTUza|3&jKkfuRHn z1h6xp)>5r5SznS{&iMhUk|E!-g+9KKSoo3Mi=34Q9`t^$g^lsBdUV z62k^X0K#S$7?Bo5j-hQ5Et)$NwJGXQw84vCV?BoKU03%G0x8tWnBb!t(Q&X4E||tV zcN!=WxmoHK=fVwld9|CwQyIvsQK#eLIX#b+i+|vzS-Lw-vhX5K&)gu545K)gB8VVk zC357tM}@lxt}tD=xy+(vD2yalp(^dmq<~0e)T%25DAxi$aHq?{4TIF33U|ro%`$mQ zxshh{E%`*;m*;{VU_%sdWX09q%8AOeSr8}CnXPoWDgziPY*M9=$z&ER;^b;FVV5V9 z{onXckDoj~JeW*AFj`gS#s$x1X>tWIVYWaJu>Wi%taanG+;?<*cnUG^$0IRvj~-t> z7y_9J%rzX5m<52sY@SStOr}AUjTB-q5&n5m48(lZh5?YR#|KR4UW2@7nCGe1G_!sI}nEjhB;%}C^M5}_WA%<|(9 zA!HtY*U=8p)E>H57Ucsfh~@}F42k?26?K{;dMzH#o8Vi_w<#_cAd(4zT08c73V~JL zDnFbK9od3N-3pgd!W7-A%+sy>DjS_kkY6^ZWqx3*$WHBp~M3U{%gL^6}eNc4A- zckp`ihl9QGQw2oEj}Lwqq~rZOe;5?O__HhwlJTin5MAF7zoI9a;CSZR>jb)`c8>voPd*z)us6X@l$E zK)kAvT;ZA`B>>Ah$@dJ>H7Yru+%)J_gVd)6XM;J81F3-F2JRUAI{lnq<@mxTd9zw; z8-9ac*RULNRy#gSf3#ZCT5WY$&FlKN7FLHSEtorOmq*t)IoyA!o6k>9z;nQk=@NDWpmjWK$sPoIgZ-1kC;pR1 z2cI6E`k(ASG>FRkn37xE4R}!pxiu6@`Ln13Msl=iy9CQ?F+!O13_t?Zw~^V#@}Hn7 zhKQ^-em!8r?f)<-53M=~bI|LE-N0rBCqz>d!~lRPqfc%#aRwk7l>XHOlST6yGvWI> zHy}=*aZE7-<;?`{vzsOrfCxRfEe=jKp~9g*VsDe?ZR)E0t#OCOHM+6s2BF#_`41{K zxCnwSx-{?6yw5Z=LHDdqTcB8yP4fX;cSdyM&=-H<19|xSKhX_JC{CwMd{h2I#U{<) zp_>k+?{Q78`2REfJBHsj^j)UeHLSl&7r3WCv-p%d70pKBHX6>Xwzk5d`8{3)xri4` z;Ln%$#lPq|yMLcvIn02?CQ|IK)Y=a693@%XCkB<_ zMwv7kY`$)ok2|~cR}SHZ?jh+LH+*A|4P_|F#*j>U4y%vTS*BZ+A-Z1?ekFx#$~XB& zn6_lJHw2SlY&kR#Dm6@I4}|*D1r!*(zbg5F+m+WvH(BL7ml|0yi~uF)9-jL;=Q~g7&0eql&maEw z?fo9nf9TTVf&UUi=O78u2vktwP$npMXyj1t(#WOUqmf5>gGLP!e8Q#7r;%?@gKyHP zX?c&bCCXbgYLWO|(xAkrzd)u>vPB1NdJbuAzP;OTo@_iQkEAxY<1)|7;&vj7GEa-h zlqyK`StfH?n8>7Mp@VV~%(BQ#N|gtbc@gt!l%?i)@B+Z8?^P?&o-VVwksB@dXA-0^L8On1c&f^xoag277asRSVK(d?<|` z1i_L2ty zkONMG4jZKYXF>E>l{mMgp47ro27vQDtOUAxWB3>eg&!3D*`x0OgvXE%EVGy~?1E!f z=&WIJuh79NW3Xn`zIr*okjlDo^2)-mS6Rhwe`)Il$3Zdqp>^kz&jugky;JpuyK9?k z8ygSrZGE%5`J#>OlWXfXD}p^lH83jO-1zIVC1xc>S08!eR{@2r0v72~zpY$Gz! z_(2&*+4zynXJgdW?Y()LCAW*I`Xt|v^%za8h?4T+yXaoyy4ek8$JJ68rbTMP@CwxE zyCCGPxbHZ=a7DY*6+OIN@T;OBu7keexo%gqvAW{kaCl`Evy)}pH3kb|F$b3){_Xdan_pnDVXDHFRkRJfkrSaeudgbKXK{6qlsfH{0<2r9v6bX zvvuKY{g*xUtlvj80jtw!_ zi=JoZl~CK#)AL;Z5tsBb2;1R`tHKv8wx(BGgOcG>7Y6+#%YKbp+%FRYAODV!hF!Sb6DOT>2weVK{Wcd6ca1Qt+F_WN$vk7e8-`Ej zQFgx548x?1!%)2sAvOWkP3CSf`3)0`<7M7k&L)2oyWDWBH@dzrx_7%SUccK?yw~rj zkMLpTlew8IC|4X`UmcfEV_Z>KZjP>rEpvzsFr?PTlEEdSu+-2 MyytX9*I9Lc1jR%txBvhE literal 0 HcmV?d00001 diff --git a/python/tornado/platform/__pycache__/epoll.cpython-35.pyc b/python/tornado/platform/__pycache__/epoll.cpython-35.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ad304648dd8b331fc3fc696b6daec29c07b0bce6 GIT binary patch literal 824 zcmYjP&2H2%5FY1m(`~!$se;?82c#Uj2gC^>1S$w2l~SqDLlj}TiMMsJ<6t|KR=pL$ ziPu2l0eA>r!BU2`^%h8I;-ZIsrKXl|z{WdC{cMWbhezGT=E}qWe;K zzF&;aA3WdFQf?hG%bV%T*E?EwQ=vM^6>lALLbuboHq#x^_MfKq%sQ^@>3MZHp=iFzOOJ2J&(0y=)4ChP1*JgFH*Qs7c9SjkDW|k$ic{*xlq%iy zGT1AWe(0H8+zf1H4T%OQh?xX?JwVWZ2j>uIg$(;*>_?O~x?YgZd)FHzH6a%hI%q=e sO&TC{pqyL_nh7oOCo@^N3CuS^7gh-A^GAy`ABUaI5i+tFtwihbU$Vl@{Qv*} literal 0 HcmV?d00001 diff --git a/python/tornado/platform/__pycache__/interface.cpython-35.pyc b/python/tornado/platform/__pycache__/interface.cpython-35.pyc new file mode 100644 index 0000000000000000000000000000000000000000..32d32bac37d55c772d5d5065c8d4d7035b5fba98 GIT binary patch literal 2360 zcmb7F%Z?jG6fJvfkK^%7c!+G+lvu!6!j=+=1qe_^PC$_|GbkAdvMhC%yUOEYy1P16 z)p2GPEC^r0hBY6+AF$yUyk*4~AZ5k5)$NH15gFR*+vR%Pd+OeEs&4G=_I`i<>o>o3 z8T*56JT0_u<8?7?yxUq zo5>FSfmPe=8K!rc+-464-HX5O1UKUR&^XT%BNcZGQ>K#ZtT5JdtmC36tU-9;RS(^%dJ%YTO~&Jw z{fRI`wCH#2D!jP zUwX%Q-7bu0>=|P}phykmxOs8#P$5YKH%XC-p~;8psY>`vd%hpXU)+ht_mA&>dv_ei z1ImctbB*Lm|k^>Xsq5oGXCCri%Qw2m=kgb3K^pdaGX7-xwNutjzZxHk ze0oq6cZ3(y`z8_T^h9OFw9v(HR_as^^M$>YohEKtXXx^~uD}$Vub43^&R2U*`6$4T z2>ZLV*Cq^W+8%5|&Gke5xAD3MFbW(*CSaSC4-gg91wDzIEg~t{N$tQ0W^QBHxE~E@)4ADPuJf=G320p;w z;6tO(aqT*e`97){$Eq-CihtqH+2c}`3Y7-FG$npNTBv-DF*xxR&ZWqSHm2G{S%lC8 z>O&6Vgw;Hz4aRVCoF|I97m(P}oT)r`Dyz8xJT2gHVsZy{5@0S3!k+V7LHthGb6zMr z3>iyOCGxVU!PTIUow!a%%y9+;HpmJL@!BCif>UMS-HgJrtE=Rv2g;W=uY^(+>YpkI zhl{7vC&Z|$MwL4(aYD>+K9t&v8KgrhDsj$`VlV4KrKTFF$EAKEP&m(VK0%fNDEUcA zD3?cIwE^mf{A>qYhTK}9%{<7hGS|?(>Qp=!5a)v^Iv4#%l!s9@aP=hI+)TpKYLC7A zcNSh?vwg&W8CpL$i7Fg7F99|$-l_m@4*w6BCsv=H;-ujMcf%8040S52(m)z7p{5h8 zPT?zIwy2~ZZNTf*@X&pASO;TpT;ZCTTx>N-x8NGt}Yr?+<=5{Tgk2jF+ zR!HlStAl-iT#1J61=_v+ch>gZ;B|>>2Niy{foG@2L*9=D*H+yq%0;H4Xw{3N>fT1X zA4QK#k=7&IQG{DNiY#4Uc8|<;GH;N1lMG!j8-{3acVIYPN5t!QJB@y~-=bd_L2UaD z1}N;jgtP&{k~-`bnHHHg8M;4Ko$BNcD>veVut8(pPb=)7-@tT7YV0)M+v>Of28?rg ARR910 literal 0 HcmV?d00001 diff --git a/python/tornado/platform/__pycache__/kqueue.cpython-35.pyc b/python/tornado/platform/__pycache__/kqueue.cpython-35.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f4ee35ff1abffef8a590b988d56016fe2f9d10c8 GIT binary patch literal 3103 zcmai0&2Jk;6o0#3uRmftX((w+8{vK-8AysMBoL|~Y3-t_Y0{>l5tggfW_Rswvi3SN z>$I_w3qf$=$REKKi3>+={0n>K0tfzqp7^~PJ5EXvo6PLV^O{Pw0`T6>s95x zOoFal{0lx&%}S@;Eg46HByb=UizLna;a(^qmR9;A>?KZn(DQhkwDK}cUN{n)s=Zfh zW@>ANY4LF!MI+kY*=37{=k>eVi<7Dx^m+5Z_gL-tF+nV0$URH}00wrLO$H9Q0%R?Xbr5+1I!pt(BKcE_#$t*5vZ8?+^Dq? zbo;G-*k9Nhgpt3{dm(OjJDzMMvFN#eeEjZ5ZUR5LEv;>_zn|tEC+vlZ6!o)v?e?b;cOZk;<$W#S++^sc{y<*D=N#%VUj7$jjgpKm2AGlz;a@92g^=J2r`T zDM5x}#h^J|uxyX1m`qACv~6*58V6WXxPbAFveH}~UTpi-o3iRgB5?ivst7tEDp3S} zEh|dUl;$3+HH3%-qTEbC&D%T1NSnnr%O|Rd&iZkWKUBecywq z?dno4iMK(EIi5Iu zCvi{hZtREc{SUD^%C_=};Omc`$_a3siUo^ktF$dJ>`o+$lO_DI6KaqiSr((3_mtT4nqOSr$&e9&>{e*Tf&m~lnyLT z8J#T152O>*=xT5zgLA-Cm}?1}mc;gdNTA=Hlz4T6`RdQF&|y{~m8b0j9b~Da4kQlT z?k;UAY^+sPpUA#6+m76h6kOA_qG0>e?B@D|jYev_{eIB%6=bI7d{rs0P8|`WQoU-I8G&Wus(V z(sKqmGwDzxCC6FAE^s(h2>u3foHcM=*DgvFxr45Jn4wp{adhg>e^qf(p<(<$WGKO$ zr5%%m$D!G_8BTx>ER!5<=jkq1v23b z3WRT-#audO-n~hiY}_=TH&1Js)$mUgjt=OI3WT^06w++y<1`bOL9398YrLB21c{2e z;IbE0o;W(=rqfuvpJp5D>uc*`o|mU|@u(GS$v25nc+*@Gc7u43h#S0#s(uw&74|p! zfYrx!VHte#9ZY0gR1q#1v&KBeOn%0kN6)%y3`^r;V}aFLiBGJa+YKBiEjkVw#vo$4 z5-l22ohH<2hrXR@A?buvRExz>OYLUfVD-hxqB-0|F?x7on!$28aGxY&t z@4R$HN8+AQa2zjkr36|N-)}<}m)+yT^1?|&7bknxEb#%@1UF}~z!OVO;KW(XMJ!ZF zvq4{Ds1vwkCG_=3cnc zO%jn-mL;x9d|A~esU2@^^EHcur4&wVo4A|i@90GOobyHUS!^5E%#t~0Ue3&Y@eeT* BcE|t# literal 0 HcmV?d00001 diff --git a/python/tornado/platform/__pycache__/posix.cpython-35.pyc b/python/tornado/platform/__pycache__/posix.cpython-35.pyc new file mode 100644 index 0000000000000000000000000000000000000000..565e0509bde55d978f0fe53e0c3dd3c9df3669f7 GIT binary patch literal 2130 zcmbtVOK%)S5U!rrK0Ld2EGL2>fe8XJ2Rl-vAQ2D>S=$l9tSvdjX0=jlJk#rpJ+E}n z;)jxx;m7a;_!0btzH;KsEhnmacGqjm91yeWnwswFs;{fQuD#dkwEuYb`^#Pf;7^!) z9MqrUP`{#x0R|`;#1^CsG7AP4WHtY)zFSN=8YvIvVE@Er)4Rov;6er+~oGi{YUv(s3m?Iw@qOn{o1U zqR>uvOPS;qpYGG$)1-RE6$UY}`T^4|G!SwWMj~S55FZPNi=&4_eSx9^I0krAD-13z zc+0L&TJWZxvEdmWk70KA0@g=#?kLKubkNx1&o^IeZIH5Ol=jvJ?``jF9&GjkOIxED zP1Ep5{RXm)G2gxHJsJL}gjB=b-Vb3u{I)DN!YUkY7g3lFUx=(6mPxsGI8M@dEq^5+ zWk-=3R)x&NxR|}CNgS4x=JoPica^Aklom>GaVDY<=#UNyV0W4H@%{hvV8gLwAIzS;XIx-ohH6cAR61k2nj2%N&2d*fLT%J)FX+LE!(S8A9-`F=H z?qHF-+T9ONgq&r~#%PpJ!=4-!Vh@rAU}$*r)`H5=<&ps-%>tv@M4_oRnZun9P1n~H ziG>{GuJMTTcoIa9ru94mCDu88ZOKDPWY*T{VbG8rQXM3Vkd!1-WbKXOq7=FIqzGdn zwRb8Lya<7<9VOC{W>T}$nZqe}HI^7vQvIY&g5XN}k1~DL-0;EDr1!5+d%pC@ jfFjiQo`T+nMmN8%5BAL+v@n(NtOpjtvX*S$_1%8}n$n?) literal 0 HcmV?d00001 diff --git a/python/tornado/platform/__pycache__/select.cpython-35.pyc b/python/tornado/platform/__pycache__/select.cpython-35.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ed404e5426db29953c13f542938ff4bf1c3aabd5 GIT binary patch literal 2613 zcmaJ@TW{P%6h6N0rQU70r3Hj!dZD$-1_>d7P>WiUsA?%`l@uba7A$A&O`O>4?Toi+ zR?Q1R@Wc~5@E7<2`~zP23-ihof1yu&XS|!;ma0zX?3pviGiSbY^Kz@z{QKFT-`;H! z{X+|n4gMyEoPfkcT~LE!lQM&Hle#A57IiJkZR*;TJJfZ^U=52hm%6UjwkfMoUZ-xI z@&(gcq1$nd;<}dBDQRe_$6IX$jmh3tk|q7JvlGcA_V3)k zR}>>Z%|}_1C&Mx-(_*;NY<|Owk@O>f5M|j;)IabCh4AH(ER$UN@mUI#E{>EBmfqeftm~$@C3rRH}Cej;)dn{Ta)55nuidWwKU(LxDD{ms#;i~H(9&oOAXqGzSMYL4PJ$)25(pl zJNMGz_{j{$z5$#0;a>}56+<#9iB5=~5{1SBZ!Yi_ukIU~nwnZP(83VcBv@-~1dg&~ z0>DKQ#o-{9s&Ob%I6||D5QSj2TN}h7^kiTt8@Cz!L81*Ko^7vw-a{Y=*?X}1bu{eV z8jaSXGV0wc`cc+?cn#%UIJhI``Ue77%qWg&)9T%7)nvZzGt$ldF6~(b~wm0uQTvyKe=H~s)z|!a`a}X;xd5k(B zUqliyRGsnxHj+yqWOR%rqjfswuA8a89NqWvD#xzISOZc8vWD%%MA@50uc?~j;cPGQ zHeQ?BDb9fx=b3PvF8JHci#Qz|eFCZE5Fyw%Y0q3dl~tTnQ%}J# zQzzMcZY|7bhT0l+Rf|*Ek3>9&KQODP)b>7as(1$neug30GXxZujLGXWOXv4KwYCN= z2SZ!?7Z}tC#;^iUTgp620F1S%XVI}q$DlT)M8^(2xyEt1_T5D~Ml3qCZ_`uVSAcnm zsY_o%3eojPpVBsKTd&Zr&9#$C+kS<1eXjj0ZS&#iO6+EHLvT(9P32bQ;l%*9c9T*E zuXqD`;w>i2Aj&}@%VkijBDYqic~Xq0wJzGpl5SP&m_6myk->~Wi3`P+fMtKs3su_j|t#QV@7lx0s6 z_zB(4B1pCZOL<$mZB08W`+y%#2hJNVAd*c(kd?>>v?C;fZmOttCpvY1A)`uFb3@tO z3(@Nxd<_8Wt9Vm2xx}YYmQE6F@H82edvF+uU2ZmwFzjcMl)z_x?`wGXD?XlWLBT$^ zXz0p(&%hR!pdcu1E?1Y8SKKx45-obPCTB&t!cBKiSM^MD zs=InmRdYDwl@gn4Ne*JkPHe+T9K(ihFklBU3?p!CKO2Vgl!pYUS0fJe9wdQ)Ao;%k zR8?Q5N0AiAP4?+i=YIb4pZ{L7IzK=ASKs}UFF)K=>TlG@r-=Lwe8b<%Ddj5VAhnd6 zQ@*9TIpyS3H?N$$>K2q!P~D<(ippY(yz)!RDM@SO%gQOI`HFHXX?{jIGiiQSIkRbg zPB~S{7nDD*x(mu#Fyj`Lzo@!P%2_hcCFLJe-DTx0tL|~-99P{F$~mFBE6Q0>-BsnR zns#O7pH$s7<*cdhDdn6pr4{9$R^2noIb)u&(zEHz$iFm_|JX?WT$-OzzOB0Fm2=+o zpH=?Ls{4v^UNO&e%Ky0PUQo^j^ITQ_Mb*8eoJ-k!mz8r-b~>;8b=7@UIj@?!1?7K2 zb+0Jrig{jC{#DhjDW|5ouPNs>Q@W)5%c}dja$Yyj$CQ6fb>C3V8_M#|EAO~^VX4o% z%gQ~jKEmN0SKcSpi#$u6Pbs!Ip}b}FB8NsNl)EC0R+P7tHu^LwR+W2FDo!fz)2whl z!xMJblzU2jgpuBqkBxc^B2wA5l+Vm+{_0-N%%B zPU_An?~0UuPI=drcS${cTe&tz*1wfAk5AuG-U2t_o>%V6?8OcJC|C7p{gvh|OQ~DB z<8FJQz2!xFp4YRZ9nap}>x7Z#+S+TjqCnfto@;Lgy4Q3A`~H_61VO(xJG;5l3GHs+ z4t&q{y(qNzgMqCFJ-gZO`<+%Z>IA(|y84|h-P9dSvZp&ygh|?go=qp|phq_PNZ*=m z-;eB8vxlYGgAmI>%V;m)oL+ck*7f>c&+YWK?VxAx?Q~i@wwY-EQfLo)uBZKd)_Tvp zUS#`Nw5)KiiKi&wh!dOGX%Y$qfhW7)>qI*lD4uyblnq_GR(rkny6mgx?RmO3`yCR3 z^+LSZ+zJDK5P9`Zw;$*zuDG3N9rlVB`nuDL>g_?VMXErz3rTdQocyCy;o{P)vX2&0BFV6eHHs&m1@<1aqht>PCERT<+dxHqgkSh$L z_Gj^%-}ZX=DdVi_rf=n;hn)jN7tW$atWoz+R*|-hPl!5LI9=Fd2l@#s8w@i^>HNElVC55Y8FN zgPMX^&Pg7HAeQrz2Q^(#?xN(;UdZ%wW|DtIlm~uFu7lx;DrX&j^kMfgi@@ep5%CrXLpt@#1m| zeWgEvvhRN7_FGTh3vp?lynFk{o4qGD`~5r3sQKhUfYG09dfon$ey4wRYtZrCtGyll zMt8dvJ^`|l$Yj5LzZn7A-6xTuNVWbxmR0ZcI#IoTosg~~QPzyLX02Mw)}nQA=I|lb z(jG0)JNfW;7T??WhWC*Gw+K`m1Wv@9m(WUUe?f}C2{6s$9vGlyMFsT~2q%c>;`mVX zlRZm)MGmC6aqnO)0U@Z@V0$OBz2_~jFBiXpESBF6HrGq~0w&gHki^S4zi#jhH#ED` z`eUp;!DJE1x}`5NPvDy53UjX@xr1*8hb1d-E#P}_dKw(e$&P`6pb#*Wk>T!%RTe0c zwox#B+PJ*!MI<#3&?MxARnsR-nvG4!<}%?YkvRv z0wdxgh--5n0;&otNECLph?{Y+Hf3khbo$1A0$qn|7TPo1*pO^l6J!hfD@i}I%kts< zRi^Gwc4A}B?{kyd3%v+Z_HdKD{!=$8O~)|sXq4xTJPKh932bmqL51naVp?qoO|JKv zU9Vn`XX|w%Cm~<0*FP9E{iLQ`ue(93Ue{MKtQ@U=ow+xd@P6n|G5Iu;;mwzvT>sOk z_z`@=B_!2c$*NZHFJCQIOVyHQZ42M*L$tktZ#V-G9K~8e(I~45YXwvN2%Hwg4ve%S zd2miZRP!$YsZ>5qSe;E)6;aj{33*Z)w()6j8J=sSW;TTLf>M<%*lucB5)_?q2b#lGAHtSv zUk(RQYM_-wI{}V9(Sc-714uxkr-bWQY)Ee%N*CBb%lDeSLEr8+_o+5%?}I@HIt3Q< zsPDBpZRlmbzmF9WCNh$S2;PMf;$lKV1@)sSl7h62VIJ)R*4?|*r!HR#cLtFg?DZ}U z&pvJIc0!2tJ&4LTeIK|X^K5w#5g;UWS2$);RXPxw+Gaoc)Cyqu@Ri7Ta)G+8CN6CE8xrD@LNd0k;63=rE(s*no?hd z{Aa0eLIfhbp?NS3C@#4hM2_qR}a%3g(vb3*v+I1Y97luu}|834n*ikq-_~baMXF_%Dx=)!t ze8=3(u@Tl6;&O6z;!-D|vZ2>FTRFMUMtBqT1qyqJWz$5!T%q1}!+5?fGKx`iLrob~ zc%`!fq#wb@P2Jhv_H1MbSSE2Vz2+qt9#1C-m^sk~t$TcWvV)H{m?4eOhYgxlFND;Mfv*!sU z=Kg@C8<5?>4SBSl-^evS?@ z9enJ2PnJ8s_j7y!$*d#5E7;VFatB`FP=G20^gHE`x4m zr6FqLDw=0hoO+T|Uj>c9q2`tTD^`NuAP-`LV+Uj0g<1L$u^?f%Mb&$m$8U@&H}pV7 z#^we6AKk>@#UVhol-jK*{V&tjGwKE&Mn_5-&2VNeX_i9(?JSNor=qGb-6D&mw4k(4 z?#uDPkj)bFvc&&BHt=wX3z&4xN|uOSqT3>s+p#6SZ6zDx65qC_FVU9^u*4yf#XLQB zXo&>wdST-`M6B=pZ+!HZky~HZckmOJ+8y8P1!5R-7GP|ptc9!aAP6JFUkeZK-n^q1 z(AW_ye++x`gFskh$qj&AZd|;_^}PW#J)c8&}C9v>iKDsisu#wD@FhjEGXfM1skyN~m*K2KiHB^b({1-o9x$p0M{ z5>ho*FliL5+oPLl}}eldfhq8G>@?C}9OpnA5kgDgBrUk;#Z73xmE3K3<31 z?fN(i3TgT)Ox|Zg>T0aSX#)=>>qSFlx7eh?CRSay_%K24W+09k1pAMu6+sq9m@CME zE#e??lzBThi{B9=4tmi0RLeU5y`2$AkTn5dP_-ZAjBr4QhM0Oru#inVBVymg8R<65 zrW}lhAf)u@fyfy&SSm-e!`zf3F?aUApf)`c+$!Fo@8?KN=dmNnVf^$$w;vuc`7AU9LnFh}BeSOtiFEd1s~>23E=~84e2>`;n8TO8_v}fl zY<5B^kFRY0Gsqar3eJSmjS+!(K(Hs{=0jjIXCxfX(1O&fqd&*>l|{*ipE1IPJ&O`e zRY$+gleou~$?`bqbRcUsy3F+ykWo~VPfV;vtQQ$!AZYm&(2~AjIRv8UQ@H0; z-{UzqxDN$f!Y$xmKfVz}_cI5+E6P#oE)s{r1pU$&_6pa;iGb~?+c3qqkPxCUkum z*NOO#zQ{)^qoUU8hs@Dhpf{P2L2B6cl*U?>5N@GHy35CaNuLQ1P>xCyHqc9u41p#& zvC_PS3hY&duOg|QEm?~v#PdS$%N%lr>Y3{C>T-1n|MK`(LV2N@*X-BAH~SQmTu6F~ zRZavve8bog#oPRK)TUt%Nx6hQd>t`nP?Ye$qLOlP(y+Y5l1FezRqLAwY)KCG;13=R zwpxBO3~4$aM?(p45NAUVfi#TR38OHv(Mr@0JffE(dpnSb99U)90IqqPY6;BF@Op>v zX|xlw|J6W4)TfD-R_0y{fk0xTmG~O*bxedCaX4|epqnx0j-g&2VB$95!OXQc-einX z?OI%>{Sq-Y@l56`f;DpwBg=N;8CtKr-sbIh6BZm7B=QKsJrtey^-r-Wpu_z>XMVWF zuj278w^%_&!FLD@whmUuHku9JQ8uvl=zZqXedd3deaZ^s`EK+1yV$Q6J?b2I)Asq^ zophi2Yglt!>FXd0T7fSEK+2r9&!0x=gKVFo9Uiey4qrZ+#t>kdqYcSH3RNKpM6l1L z6IF{)O$%#fIj0PLi5MA0zl8@J%@k(@a2oQOa8R&IpbyZ*FRE^dUIf}062^%im`Iy< zFywvX283S)R&U~+Yc~h*T~2s3lN$|}is?(2ncW@m?!iw3(-=HL@W<@i zUK62o>0M7;3^qVdH^G%r=S~;)2Rp!()~O5dszx*dLkW$mF|66J4baKs(F>K5icTb> zFsujhN;rJuFU|Zz;;#bHp*3*0Hseu}o{buuOS|=COs3IqC_pyoKv-ytXWHcgy$9u(+}^>4Sc7uN!$;#XLO*r+nE2M;ujvlVU@4n zyVEKpv+|b{-{!>JG0CwkhSW|y+x3Dl#Ebj0F*5>Ul+oN^7|#QDg6q-;$jdy_4-pbgu>G#~+MVYgaXQ{TWgX0Os;p*XeP)tN zQ(iOnGGc)yto1)-^7BlliSIv)3fK$GovG#|Y^neOy{K8!!Z-V*#31}#6nUU6Vt^fu zNXDfc`uazLrx=lpN0G~uh-Ap?Swu48r?QA-#7|`r$&lRB`|&}effa# zQ|bE#jGr>^7+`G+M%+Ig&nN8O#G5q#D?n+oAsV3R*NIVsVtU-v7KBN>L(tsry3L&OOEZ}51v1dzk zk_n1=FHA(8B>pH_PukQ*98w~@A%bT!i&+xM24|Rv3uC(o8#pEtq!uFlFqJf}$jc8R z-Sw&THL2NsFkpy~s0uRF9z(Klqv1sw7*Pn7J3tG*D#0zlxPba@ABe*!D$(E?2~cW@ zOjCnmnPw5BS@)hJSexP`br}PVRPLkl*JwPt|M}ZrcyI^nPM9G93Xu_;Pb4f6pHkSQ zlA2jD-_bCOq}Vwb{KK0|VmgO!_%4!3B`l{vAQF!Xos4gZw5^m6U#4+H3X?M7L57#Z zguo2$v9Z7ysi|zVG6opMvqM4FS&JrO;c5xM<4PJGClXSeN4QJM?I|+FrQJOVsxK!a zq`JmXgZnm`{aQ-G80bq6Se1-#Z44!V)~R59FXJ=RZ$ z$rXO6PXZ8=Y5-+1{Y@rBSYunG0HlA83AUO!1{y2?lT^CNL zb_fOl=z*}|zeq%Q#_IDlOh~&N6$dn>glX^1@tztz%{fl?GzU8DIktvbtl@7*N1B6i zi|>aHXR8}Tz#6I}rH9ur2Yb0SzJ{!)L5<-W5{%fqx4s|&TvN|iZt27!)9Ga#!`VrT zDK({E4b~rd(rwiKDNmXfc7;>s882COT`&bfYL(8S=0(%v#5CaChe>`GJ_YS}BP5%dddw7&1I$ffDy8 zcSyV)LIdxie!6BOeucl`5)z$ec^tLH5tT=GeoFY3@g+|Ti4#~qz-Xuz)MZ^uISdv8 z%dt{xI5Jcq9I1CTZJjFDV672w(>UJIpvqtpp07BPxE+aRMidqEX}nYf1|n@};3)95 zMX)eNWQxFZaPqw#%v7{CgN?W_Nco+fXJ63ki;#aH37hkc%e_H&3-&&z0Be#T=V9a3 z_pu?o91FXkyoWf$6k)!IS3bp>na%yaVM2zle36CobsT&qMvgcJ$(Ygzqe{9C6DUZ5 z(*&^oGc+>q(@b!kmr?PzJQTr;!ZP$)1QY=)XhE6Aaky5z5}A=69%hCer}02+hvgRY ztx6tX8$%O$mxp3bs(hD|AtzDx8ru9fF0Fu!f?RjRm7ZJ?n-hNocV0s#!&hd+MFNz8 z_+bzNXN&{p>5f=oN(S-|(Odr}le0`-Vj}QP!YhA^1?x-%z!R|jJSzStH^9{>z_Ud6 z;vpWMssErk8rykxV57X@~y zBhwnrw{9YdyTuq;x8!Xd(d|Z!-a!u@2vJW>pAZg;)Ez)&kuODoozPH^*Z8Ho-|2Sn zf<3|_(U>|b-h-j-o=PW)liSp;-Dn`70(gP!||;t43gzmPZ!l9yXxl%ZlvBTWyh#fv~u zLl%ICcC*{@_mdZ)_zDrQ8SmKeZJ+y*yaL2`{JG}@z6OHBj!A^I*-InD>3C7?=DqrR z8;{<-dpmo9>Y;gyC``>I_O&-^*XUtM{s4mX|Nrvx4)rkN^KIhuZy*5?=r1Nh$_t?k zklLVZ87Ym*rXVmVM!hU5RwZF>WRGyeqA?I2g>w|76FC{mDI^`ElA&sziWl#Qkq4=| zT2$#w#{~xd8bSoFxDCNZ#Pm80cGPU)zny)}i(0Soq(UL12TM|5&VUBf_WH4*&|{n= zv~t6p311msch~q3?!L9A8D4J zz1|pURu+lqmxYB0DnufW;>?H*h_Mws_z;oj^DOx?M~B>lbWuIUA9sPM^{ZmQI3{eo z_r;M3MJz%m{ih=0CA z;__u=RAB`YOBI?G(%c%f)MeO^R-~kwn*olOtfkya4)IDxrpa(TnHP&ZQbr&^;+a;U zAxlDT)|`1{H6jp*5b(RGu(%{%km?d*H0Dc|W+^Z8Hp4%33)e+HXeWAb9={SOk4pq9 z2?gRC#z7%!-Pg_Qjt~arjmN}z1?lgbBw!KQ3;lZMSzl5<|AHJ`ZG=p|cP0rzag0~aXbQsTa zQiBo+7MjW7JS<*()-rif>vc_5nP zO_QVPUuSZb$*(c_H%$HviE}D4s01>}Kml;$^h(M#byg468X|FMz?!ABCS9$0A?0{lEL;n({*h6g zm?#uJC67f%y?P6_-QwkDfYqySB`FD=Y+h<}TrL68$(^ixl zjZ#}Oe>=#$YRNzX6OZ;0b3e=E1``VN=5ix2AZ$jFb`~)QLRO;se#i4+veo~dZHR-@ z=tW$)ZvNE7JF93BK0s2+y$pRe`G*{BM3-}`NYk<Wab6n^8sI3bB?K^y>eK^E<~hPBs7sig4*&U%aS#_GnDmUQpP$_CPg zxOaQfZ^8gl-R2>r4x}#p1PHfGqoaX$t#!e`Q2{;?C9Nfiv!W`wRDPO$%|uo*)vtKQ zWW1_bBAa-f$+K8UEJ;C_TwEhvA~9f=b^si`M|z8A8YIAlMr3BcwBb8~3j|^Zqy@PR zmjFNXC(v4~1vf>sn8!y%PBBXxSu z?g5c=EaZgqk`F9pty0wzb&?Pvu7LM+;hWE+7tsenxQO15-eDFEt7?oTj^<^8X>?4A zDyp(-_oU8fy35Y^esP+JNS2)8wEX|BD3(T4w9Z&suEkz;uKI$=IH9E=G5JQ4Ei=2P z0nj$`5DzpPY^zSJO_Vh!am`_ for Python 3.3). This makes +it possible to combine the two libraries on the same event loop. + +Most applications should use `AsyncIOMainLoop` to run Tornado on the +default ``asyncio`` event loop. Applications that need to run event +loops on multiple threads may use `AsyncIOLoop` to create multiple +loops. + +.. note:: + + Tornado requires the `~asyncio.BaseEventLoop.add_reader` family of methods, + so it is not compatible with the `~asyncio.ProactorEventLoop` on Windows. + Use the `~asyncio.SelectorEventLoop` instead. +""" + +from __future__ import absolute_import, division, print_function, with_statement +import functools + +import tornado.concurrent +from tornado.gen import convert_yielded +from tornado.ioloop import IOLoop +from tornado import stack_context + +try: + # Import the real asyncio module for py33+ first. Older versions of the + # trollius backport also use this name. + import asyncio +except ImportError as e: + # Asyncio itself isn't available; see if trollius is (backport to py26+). + try: + import trollius as asyncio + except ImportError: + # Re-raise the original asyncio error, not the trollius one. + raise e + + +class BaseAsyncIOLoop(IOLoop): + def initialize(self, asyncio_loop, close_loop=False, **kwargs): + super(BaseAsyncIOLoop, self).initialize(**kwargs) + self.asyncio_loop = asyncio_loop + self.close_loop = close_loop + # Maps fd to (fileobj, handler function) pair (as in IOLoop.add_handler) + self.handlers = {} + # Set of fds listening for reads/writes + self.readers = set() + self.writers = set() + self.closing = False + + def close(self, all_fds=False): + self.closing = True + for fd in list(self.handlers): + fileobj, handler_func = self.handlers[fd] + self.remove_handler(fd) + if all_fds: + self.close_fd(fileobj) + if self.close_loop: + self.asyncio_loop.close() + + def add_handler(self, fd, handler, events): + fd, fileobj = self.split_fd(fd) + if fd in self.handlers: + raise ValueError("fd %s added twice" % fd) + self.handlers[fd] = (fileobj, stack_context.wrap(handler)) + if events & IOLoop.READ: + self.asyncio_loop.add_reader( + fd, self._handle_events, fd, IOLoop.READ) + self.readers.add(fd) + if events & IOLoop.WRITE: + self.asyncio_loop.add_writer( + fd, self._handle_events, fd, IOLoop.WRITE) + self.writers.add(fd) + + def update_handler(self, fd, events): + fd, fileobj = self.split_fd(fd) + if events & IOLoop.READ: + if fd not in self.readers: + self.asyncio_loop.add_reader( + fd, self._handle_events, fd, IOLoop.READ) + self.readers.add(fd) + else: + if fd in self.readers: + self.asyncio_loop.remove_reader(fd) + self.readers.remove(fd) + if events & IOLoop.WRITE: + if fd not in self.writers: + self.asyncio_loop.add_writer( + fd, self._handle_events, fd, IOLoop.WRITE) + self.writers.add(fd) + else: + if fd in self.writers: + self.asyncio_loop.remove_writer(fd) + self.writers.remove(fd) + + def remove_handler(self, fd): + fd, fileobj = self.split_fd(fd) + if fd not in self.handlers: + return + if fd in self.readers: + self.asyncio_loop.remove_reader(fd) + self.readers.remove(fd) + if fd in self.writers: + self.asyncio_loop.remove_writer(fd) + self.writers.remove(fd) + del self.handlers[fd] + + def _handle_events(self, fd, events): + fileobj, handler_func = self.handlers[fd] + handler_func(fileobj, events) + + def start(self): + old_current = IOLoop.current(instance=False) + try: + self._setup_logging() + self.make_current() + self.asyncio_loop.run_forever() + finally: + if old_current is None: + IOLoop.clear_current() + else: + old_current.make_current() + + def stop(self): + self.asyncio_loop.stop() + + def call_at(self, when, callback, *args, **kwargs): + # asyncio.call_at supports *args but not **kwargs, so bind them here. + # We do not synchronize self.time and asyncio_loop.time, so + # convert from absolute to relative. + return self.asyncio_loop.call_later( + max(0, when - self.time()), self._run_callback, + functools.partial(stack_context.wrap(callback), *args, **kwargs)) + + def remove_timeout(self, timeout): + timeout.cancel() + + def add_callback(self, callback, *args, **kwargs): + if self.closing: + raise RuntimeError("IOLoop is closing") + self.asyncio_loop.call_soon_threadsafe( + self._run_callback, + functools.partial(stack_context.wrap(callback), *args, **kwargs)) + + add_callback_from_signal = add_callback + + +class AsyncIOMainLoop(BaseAsyncIOLoop): + """``AsyncIOMainLoop`` creates an `.IOLoop` that corresponds to the + current ``asyncio`` event loop (i.e. the one returned by + ``asyncio.get_event_loop()``). Recommended usage:: + + from tornado.platform.asyncio import AsyncIOMainLoop + import asyncio + AsyncIOMainLoop().install() + asyncio.get_event_loop().run_forever() + """ + def initialize(self, **kwargs): + super(AsyncIOMainLoop, self).initialize(asyncio.get_event_loop(), + close_loop=False, **kwargs) + + +class AsyncIOLoop(BaseAsyncIOLoop): + """``AsyncIOLoop`` is an `.IOLoop` that runs on an ``asyncio`` event loop. + This class follows the usual Tornado semantics for creating new + ``IOLoops``; these loops are not necessarily related to the + ``asyncio`` default event loop. Recommended usage:: + + from tornado.ioloop import IOLoop + IOLoop.configure('tornado.platform.asyncio.AsyncIOLoop') + IOLoop.current().start() + + Each ``AsyncIOLoop`` creates a new ``asyncio.EventLoop``; this object + can be accessed with the ``asyncio_loop`` attribute. + """ + def initialize(self, **kwargs): + loop = asyncio.new_event_loop() + try: + super(AsyncIOLoop, self).initialize(loop, close_loop=True, **kwargs) + except Exception: + # If initialize() does not succeed (taking ownership of the loop), + # we have to close it. + loop.close() + raise + + +def to_tornado_future(asyncio_future): + """Convert an `asyncio.Future` to a `tornado.concurrent.Future`. + + .. versionadded:: 4.1 + """ + tf = tornado.concurrent.Future() + tornado.concurrent.chain_future(asyncio_future, tf) + return tf + + +def to_asyncio_future(tornado_future): + """Convert a Tornado yieldable object to an `asyncio.Future`. + + .. versionadded:: 4.1 + + .. versionchanged:: 4.3 + Now accepts any yieldable object, not just + `tornado.concurrent.Future`. + """ + tornado_future = convert_yielded(tornado_future) + af = asyncio.Future() + tornado.concurrent.chain_future(tornado_future, af) + return af + +if hasattr(convert_yielded, 'register'): + convert_yielded.register(asyncio.Future, to_tornado_future) diff --git a/python/tornado/platform/auto.py b/python/tornado/platform/auto.py new file mode 100644 index 000000000..fc40c9d97 --- /dev/null +++ b/python/tornado/platform/auto.py @@ -0,0 +1,54 @@ +#!/usr/bin/env python +# +# Copyright 2011 Facebook +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""Implementation of platform-specific functionality. + +For each function or class described in `tornado.platform.interface`, +the appropriate platform-specific implementation exists in this module. +Most code that needs access to this functionality should do e.g.:: + + from tornado.platform.auto import set_close_exec +""" + +from __future__ import absolute_import, division, print_function, with_statement + +import os + +if 'APPENGINE_RUNTIME' in os.environ: + from tornado.platform.common import Waker + + def set_close_exec(fd): + pass +elif os.name == 'nt': + from tornado.platform.common import Waker + from tornado.platform.windows import set_close_exec +else: + from tornado.platform.posix import set_close_exec, Waker + +try: + # monotime monkey-patches the time module to have a monotonic function + # in versions of python before 3.3. + import monotime + # Silence pyflakes warning about this unused import + monotime +except ImportError: + pass +try: + from time import monotonic as monotonic_time +except ImportError: + monotonic_time = None + +__all__ = ['Waker', 'set_close_exec', 'monotonic_time'] diff --git a/python/tornado/platform/caresresolver.py b/python/tornado/platform/caresresolver.py new file mode 100644 index 000000000..5559614f5 --- /dev/null +++ b/python/tornado/platform/caresresolver.py @@ -0,0 +1,79 @@ +from __future__ import absolute_import, division, print_function, with_statement +import pycares +import socket + +from tornado import gen +from tornado.ioloop import IOLoop +from tornado.netutil import Resolver, is_valid_ip + + +class CaresResolver(Resolver): + """Name resolver based on the c-ares library. + + This is a non-blocking and non-threaded resolver. It may not produce + the same results as the system resolver, but can be used for non-blocking + resolution when threads cannot be used. + + c-ares fails to resolve some names when ``family`` is ``AF_UNSPEC``, + so it is only recommended for use in ``AF_INET`` (i.e. IPv4). This is + the default for ``tornado.simple_httpclient``, but other libraries + may default to ``AF_UNSPEC``. + + .. versionchanged:: 4.1 + The ``io_loop`` argument is deprecated. + """ + def initialize(self, io_loop=None): + self.io_loop = io_loop or IOLoop.current() + self.channel = pycares.Channel(sock_state_cb=self._sock_state_cb) + self.fds = {} + + def _sock_state_cb(self, fd, readable, writable): + state = ((IOLoop.READ if readable else 0) | + (IOLoop.WRITE if writable else 0)) + if not state: + self.io_loop.remove_handler(fd) + del self.fds[fd] + elif fd in self.fds: + self.io_loop.update_handler(fd, state) + self.fds[fd] = state + else: + self.io_loop.add_handler(fd, self._handle_events, state) + self.fds[fd] = state + + def _handle_events(self, fd, events): + read_fd = pycares.ARES_SOCKET_BAD + write_fd = pycares.ARES_SOCKET_BAD + if events & IOLoop.READ: + read_fd = fd + if events & IOLoop.WRITE: + write_fd = fd + self.channel.process_fd(read_fd, write_fd) + + @gen.coroutine + def resolve(self, host, port, family=0): + if is_valid_ip(host): + addresses = [host] + else: + # gethostbyname doesn't take callback as a kwarg + self.channel.gethostbyname(host, family, (yield gen.Callback(1))) + callback_args = yield gen.Wait(1) + assert isinstance(callback_args, gen.Arguments) + assert not callback_args.kwargs + result, error = callback_args.args + if error: + raise Exception('C-Ares returned error %s: %s while resolving %s' % + (error, pycares.errno.strerror(error), host)) + addresses = result.addresses + addrinfo = [] + for address in addresses: + if '.' in address: + address_family = socket.AF_INET + elif ':' in address: + address_family = socket.AF_INET6 + else: + address_family = socket.AF_UNSPEC + if family != socket.AF_UNSPEC and family != address_family: + raise Exception('Requested socket family %d but got %d' % + (family, address_family)) + addrinfo.append((address_family, (address, port))) + raise gen.Return(addrinfo) diff --git a/python/tornado/platform/common.py b/python/tornado/platform/common.py new file mode 100644 index 000000000..b409a903f --- /dev/null +++ b/python/tornado/platform/common.py @@ -0,0 +1,92 @@ +"""Lowest-common-denominator implementations of platform functionality.""" +from __future__ import absolute_import, division, print_function, with_statement + +import errno +import socket + +from tornado.platform import interface + + +class Waker(interface.Waker): + """Create an OS independent asynchronous pipe. + + For use on platforms that don't have os.pipe() (or where pipes cannot + be passed to select()), but do have sockets. This includes Windows + and Jython. + """ + def __init__(self): + # Based on Zope select_trigger.py: + # https://github.com/zopefoundation/Zope/blob/master/src/ZServer/medusa/thread/select_trigger.py + + self.writer = socket.socket() + # Disable buffering -- pulling the trigger sends 1 byte, + # and we want that sent immediately, to wake up ASAP. + self.writer.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) + + count = 0 + while 1: + count += 1 + # Bind to a local port; for efficiency, let the OS pick + # a free port for us. + # Unfortunately, stress tests showed that we may not + # be able to connect to that port ("Address already in + # use") despite that the OS picked it. This appears + # to be a race bug in the Windows socket implementation. + # So we loop until a connect() succeeds (almost always + # on the first try). See the long thread at + # http://mail.zope.org/pipermail/zope/2005-July/160433.html + # for hideous details. + a = socket.socket() + a.bind(("127.0.0.1", 0)) + a.listen(1) + connect_address = a.getsockname() # assigned (host, port) pair + try: + self.writer.connect(connect_address) + break # success + except socket.error as detail: + if (not hasattr(errno, 'WSAEADDRINUSE') or + detail[0] != errno.WSAEADDRINUSE): + # "Address already in use" is the only error + # I've seen on two WinXP Pro SP2 boxes, under + # Pythons 2.3.5 and 2.4.1. + raise + # (10048, 'Address already in use') + # assert count <= 2 # never triggered in Tim's tests + if count >= 10: # I've never seen it go above 2 + a.close() + self.writer.close() + raise socket.error("Cannot bind trigger!") + # Close `a` and try again. Note: I originally put a short + # sleep() here, but it didn't appear to help or hurt. + a.close() + + self.reader, addr = a.accept() + self.reader.setblocking(0) + self.writer.setblocking(0) + a.close() + self.reader_fd = self.reader.fileno() + + def fileno(self): + return self.reader.fileno() + + def write_fileno(self): + return self.writer.fileno() + + def wake(self): + try: + self.writer.send(b"x") + except (IOError, socket.error): + pass + + def consume(self): + try: + while True: + result = self.reader.recv(1024) + if not result: + break + except (IOError, socket.error): + pass + + def close(self): + self.reader.close() + self.writer.close() diff --git a/python/tornado/platform/epoll.py b/python/tornado/platform/epoll.py new file mode 100644 index 000000000..b08cc6281 --- /dev/null +++ b/python/tornado/platform/epoll.py @@ -0,0 +1,26 @@ +#!/usr/bin/env python +# +# Copyright 2012 Facebook +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +"""EPoll-based IOLoop implementation for Linux systems.""" +from __future__ import absolute_import, division, print_function, with_statement + +import select + +from tornado.ioloop import PollIOLoop + + +class EPollIOLoop(PollIOLoop): + def initialize(self, **kwargs): + super(EPollIOLoop, self).initialize(impl=select.epoll(), **kwargs) diff --git a/python/tornado/platform/interface.py b/python/tornado/platform/interface.py new file mode 100644 index 000000000..07da6babd --- /dev/null +++ b/python/tornado/platform/interface.py @@ -0,0 +1,63 @@ +#!/usr/bin/env python +# +# Copyright 2011 Facebook +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""Interfaces for platform-specific functionality. + +This module exists primarily for documentation purposes and as base classes +for other tornado.platform modules. Most code should import the appropriate +implementation from `tornado.platform.auto`. +""" + +from __future__ import absolute_import, division, print_function, with_statement + + +def set_close_exec(fd): + """Sets the close-on-exec bit (``FD_CLOEXEC``)for a file descriptor.""" + raise NotImplementedError() + + +class Waker(object): + """A socket-like object that can wake another thread from ``select()``. + + The `~tornado.ioloop.IOLoop` will add the Waker's `fileno()` to + its ``select`` (or ``epoll`` or ``kqueue``) calls. When another + thread wants to wake up the loop, it calls `wake`. Once it has woken + up, it will call `consume` to do any necessary per-wake cleanup. When + the ``IOLoop`` is closed, it closes its waker too. + """ + def fileno(self): + """Returns the read file descriptor for this waker. + + Must be suitable for use with ``select()`` or equivalent on the + local platform. + """ + raise NotImplementedError() + + def write_fileno(self): + """Returns the write file descriptor for this waker.""" + raise NotImplementedError() + + def wake(self): + """Triggers activity on the waker's file descriptor.""" + raise NotImplementedError() + + def consume(self): + """Called after the listen has woken up to do any necessary cleanup.""" + raise NotImplementedError() + + def close(self): + """Closes the waker's file descriptor(s).""" + raise NotImplementedError() diff --git a/python/tornado/platform/kqueue.py b/python/tornado/platform/kqueue.py new file mode 100644 index 000000000..f8f3e4a61 --- /dev/null +++ b/python/tornado/platform/kqueue.py @@ -0,0 +1,91 @@ +#!/usr/bin/env python +# +# Copyright 2012 Facebook +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +"""KQueue-based IOLoop implementation for BSD/Mac systems.""" +from __future__ import absolute_import, division, print_function, with_statement + +import select + +from tornado.ioloop import IOLoop, PollIOLoop + +assert hasattr(select, 'kqueue'), 'kqueue not supported' + + +class _KQueue(object): + """A kqueue-based event loop for BSD/Mac systems.""" + def __init__(self): + self._kqueue = select.kqueue() + self._active = {} + + def fileno(self): + return self._kqueue.fileno() + + def close(self): + self._kqueue.close() + + def register(self, fd, events): + if fd in self._active: + raise IOError("fd %s already registered" % fd) + self._control(fd, events, select.KQ_EV_ADD) + self._active[fd] = events + + def modify(self, fd, events): + self.unregister(fd) + self.register(fd, events) + + def unregister(self, fd): + events = self._active.pop(fd) + self._control(fd, events, select.KQ_EV_DELETE) + + def _control(self, fd, events, flags): + kevents = [] + if events & IOLoop.WRITE: + kevents.append(select.kevent( + fd, filter=select.KQ_FILTER_WRITE, flags=flags)) + if events & IOLoop.READ: + kevents.append(select.kevent( + fd, filter=select.KQ_FILTER_READ, flags=flags)) + # Even though control() takes a list, it seems to return EINVAL + # on Mac OS X (10.6) when there is more than one event in the list. + for kevent in kevents: + self._kqueue.control([kevent], 0) + + def poll(self, timeout): + kevents = self._kqueue.control(None, 1000, timeout) + events = {} + for kevent in kevents: + fd = kevent.ident + if kevent.filter == select.KQ_FILTER_READ: + events[fd] = events.get(fd, 0) | IOLoop.READ + if kevent.filter == select.KQ_FILTER_WRITE: + if kevent.flags & select.KQ_EV_EOF: + # If an asynchronous connection is refused, kqueue + # returns a write event with the EOF flag set. + # Turn this into an error for consistency with the + # other IOLoop implementations. + # Note that for read events, EOF may be returned before + # all data has been consumed from the socket buffer, + # so we only check for EOF on write events. + events[fd] = IOLoop.ERROR + else: + events[fd] = events.get(fd, 0) | IOLoop.WRITE + if kevent.flags & select.KQ_EV_ERROR: + events[fd] = events.get(fd, 0) | IOLoop.ERROR + return events.items() + + +class KQueueIOLoop(PollIOLoop): + def initialize(self, **kwargs): + super(KQueueIOLoop, self).initialize(impl=_KQueue(), **kwargs) diff --git a/python/tornado/platform/posix.py b/python/tornado/platform/posix.py new file mode 100644 index 000000000..41a5794c6 --- /dev/null +++ b/python/tornado/platform/posix.py @@ -0,0 +1,70 @@ +#!/usr/bin/env python +# +# Copyright 2011 Facebook +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""Posix implementations of platform-specific functionality.""" + +from __future__ import absolute_import, division, print_function, with_statement + +import fcntl +import os + +from tornado.platform import interface + + +def set_close_exec(fd): + flags = fcntl.fcntl(fd, fcntl.F_GETFD) + fcntl.fcntl(fd, fcntl.F_SETFD, flags | fcntl.FD_CLOEXEC) + + +def _set_nonblocking(fd): + flags = fcntl.fcntl(fd, fcntl.F_GETFL) + fcntl.fcntl(fd, fcntl.F_SETFL, flags | os.O_NONBLOCK) + + +class Waker(interface.Waker): + def __init__(self): + r, w = os.pipe() + _set_nonblocking(r) + _set_nonblocking(w) + set_close_exec(r) + set_close_exec(w) + self.reader = os.fdopen(r, "rb", 0) + self.writer = os.fdopen(w, "wb", 0) + + def fileno(self): + return self.reader.fileno() + + def write_fileno(self): + return self.writer.fileno() + + def wake(self): + try: + self.writer.write(b"x") + except IOError: + pass + + def consume(self): + try: + while True: + result = self.reader.read() + if not result: + break + except IOError: + pass + + def close(self): + self.reader.close() + self.writer.close() diff --git a/python/tornado/platform/select.py b/python/tornado/platform/select.py new file mode 100644 index 000000000..db52ef910 --- /dev/null +++ b/python/tornado/platform/select.py @@ -0,0 +1,76 @@ +#!/usr/bin/env python +# +# Copyright 2012 Facebook +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +"""Select-based IOLoop implementation. + +Used as a fallback for systems that don't support epoll or kqueue. +""" +from __future__ import absolute_import, division, print_function, with_statement + +import select + +from tornado.ioloop import IOLoop, PollIOLoop + + +class _Select(object): + """A simple, select()-based IOLoop implementation for non-Linux systems""" + def __init__(self): + self.read_fds = set() + self.write_fds = set() + self.error_fds = set() + self.fd_sets = (self.read_fds, self.write_fds, self.error_fds) + + def close(self): + pass + + def register(self, fd, events): + if fd in self.read_fds or fd in self.write_fds or fd in self.error_fds: + raise IOError("fd %s already registered" % fd) + if events & IOLoop.READ: + self.read_fds.add(fd) + if events & IOLoop.WRITE: + self.write_fds.add(fd) + if events & IOLoop.ERROR: + self.error_fds.add(fd) + # Closed connections are reported as errors by epoll and kqueue, + # but as zero-byte reads by select, so when errors are requested + # we need to listen for both read and error. + # self.read_fds.add(fd) + + def modify(self, fd, events): + self.unregister(fd) + self.register(fd, events) + + def unregister(self, fd): + self.read_fds.discard(fd) + self.write_fds.discard(fd) + self.error_fds.discard(fd) + + def poll(self, timeout): + readable, writeable, errors = select.select( + self.read_fds, self.write_fds, self.error_fds, timeout) + events = {} + for fd in readable: + events[fd] = events.get(fd, 0) | IOLoop.READ + for fd in writeable: + events[fd] = events.get(fd, 0) | IOLoop.WRITE + for fd in errors: + events[fd] = events.get(fd, 0) | IOLoop.ERROR + return events.items() + + +class SelectIOLoop(PollIOLoop): + def initialize(self, **kwargs): + super(SelectIOLoop, self).initialize(impl=_Select(), **kwargs) diff --git a/python/tornado/platform/twisted.py b/python/tornado/platform/twisted.py new file mode 100644 index 000000000..d3a4e75d1 --- /dev/null +++ b/python/tornado/platform/twisted.py @@ -0,0 +1,585 @@ +# Author: Ovidiu Predescu +# Date: July 2011 +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +"""Bridges between the Twisted reactor and Tornado IOLoop. + +This module lets you run applications and libraries written for +Twisted in a Tornado application. It can be used in two modes, +depending on which library's underlying event loop you want to use. + +This module has been tested with Twisted versions 11.0.0 and newer. +""" + +from __future__ import absolute_import, division, print_function, with_statement + +import datetime +import functools +import numbers +import socket +import sys + +import twisted.internet.abstract +from twisted.internet.defer import Deferred +from twisted.internet.posixbase import PosixReactorBase +from twisted.internet.interfaces import \ + IReactorFDSet, IDelayedCall, IReactorTime, IReadDescriptor, IWriteDescriptor +from twisted.python import failure, log +from twisted.internet import error +import twisted.names.cache +import twisted.names.client +import twisted.names.hosts +import twisted.names.resolve + +from zope.interface import implementer + +from tornado.concurrent import Future +from tornado.escape import utf8 +from tornado import gen +import tornado.ioloop +from tornado.log import app_log +from tornado.netutil import Resolver +from tornado.stack_context import NullContext, wrap +from tornado.ioloop import IOLoop +from tornado.util import timedelta_to_seconds + + +@implementer(IDelayedCall) +class TornadoDelayedCall(object): + """DelayedCall object for Tornado.""" + def __init__(self, reactor, seconds, f, *args, **kw): + self._reactor = reactor + self._func = functools.partial(f, *args, **kw) + self._time = self._reactor.seconds() + seconds + self._timeout = self._reactor._io_loop.add_timeout(self._time, + self._called) + self._active = True + + def _called(self): + self._active = False + self._reactor._removeDelayedCall(self) + try: + self._func() + except: + app_log.error("_called caught exception", exc_info=True) + + def getTime(self): + return self._time + + def cancel(self): + self._active = False + self._reactor._io_loop.remove_timeout(self._timeout) + self._reactor._removeDelayedCall(self) + + def delay(self, seconds): + self._reactor._io_loop.remove_timeout(self._timeout) + self._time += seconds + self._timeout = self._reactor._io_loop.add_timeout(self._time, + self._called) + + def reset(self, seconds): + self._reactor._io_loop.remove_timeout(self._timeout) + self._time = self._reactor.seconds() + seconds + self._timeout = self._reactor._io_loop.add_timeout(self._time, + self._called) + + def active(self): + return self._active + + +@implementer(IReactorTime, IReactorFDSet) +class TornadoReactor(PosixReactorBase): + """Twisted reactor built on the Tornado IOLoop. + + `TornadoReactor` implements the Twisted reactor interface on top of + the Tornado IOLoop. To use it, simply call `install` at the beginning + of the application:: + + import tornado.platform.twisted + tornado.platform.twisted.install() + from twisted.internet import reactor + + When the app is ready to start, call ``IOLoop.current().start()`` + instead of ``reactor.run()``. + + It is also possible to create a non-global reactor by calling + ``tornado.platform.twisted.TornadoReactor(io_loop)``. However, if + the `.IOLoop` and reactor are to be short-lived (such as those used in + unit tests), additional cleanup may be required. Specifically, it is + recommended to call:: + + reactor.fireSystemEvent('shutdown') + reactor.disconnectAll() + + before closing the `.IOLoop`. + + .. versionchanged:: 4.1 + The ``io_loop`` argument is deprecated. + """ + def __init__(self, io_loop=None): + if not io_loop: + io_loop = tornado.ioloop.IOLoop.current() + self._io_loop = io_loop + self._readers = {} # map of reader objects to fd + self._writers = {} # map of writer objects to fd + self._fds = {} # a map of fd to a (reader, writer) tuple + self._delayedCalls = {} + PosixReactorBase.__init__(self) + self.addSystemEventTrigger('during', 'shutdown', self.crash) + + # IOLoop.start() bypasses some of the reactor initialization. + # Fire off the necessary events if they weren't already triggered + # by reactor.run(). + def start_if_necessary(): + if not self._started: + self.fireSystemEvent('startup') + self._io_loop.add_callback(start_if_necessary) + + # IReactorTime + def seconds(self): + return self._io_loop.time() + + def callLater(self, seconds, f, *args, **kw): + dc = TornadoDelayedCall(self, seconds, f, *args, **kw) + self._delayedCalls[dc] = True + return dc + + def getDelayedCalls(self): + return [x for x in self._delayedCalls if x._active] + + def _removeDelayedCall(self, dc): + if dc in self._delayedCalls: + del self._delayedCalls[dc] + + # IReactorThreads + def callFromThread(self, f, *args, **kw): + assert callable(f), "%s is not callable" % f + with NullContext(): + # This NullContext is mainly for an edge case when running + # TwistedIOLoop on top of a TornadoReactor. + # TwistedIOLoop.add_callback uses reactor.callFromThread and + # should not pick up additional StackContexts along the way. + self._io_loop.add_callback(f, *args, **kw) + + # We don't need the waker code from the super class, Tornado uses + # its own waker. + def installWaker(self): + pass + + def wakeUp(self): + pass + + # IReactorFDSet + def _invoke_callback(self, fd, events): + if fd not in self._fds: + return + (reader, writer) = self._fds[fd] + if reader: + err = None + if reader.fileno() == -1: + err = error.ConnectionLost() + elif events & IOLoop.READ: + err = log.callWithLogger(reader, reader.doRead) + if err is None and events & IOLoop.ERROR: + err = error.ConnectionLost() + if err is not None: + self.removeReader(reader) + reader.readConnectionLost(failure.Failure(err)) + if writer: + err = None + if writer.fileno() == -1: + err = error.ConnectionLost() + elif events & IOLoop.WRITE: + err = log.callWithLogger(writer, writer.doWrite) + if err is None and events & IOLoop.ERROR: + err = error.ConnectionLost() + if err is not None: + self.removeWriter(writer) + writer.writeConnectionLost(failure.Failure(err)) + + def addReader(self, reader): + if reader in self._readers: + # Don't add the reader if it's already there + return + fd = reader.fileno() + self._readers[reader] = fd + if fd in self._fds: + (_, writer) = self._fds[fd] + self._fds[fd] = (reader, writer) + if writer: + # We already registered this fd for write events, + # update it for read events as well. + self._io_loop.update_handler(fd, IOLoop.READ | IOLoop.WRITE) + else: + with NullContext(): + self._fds[fd] = (reader, None) + self._io_loop.add_handler(fd, self._invoke_callback, + IOLoop.READ) + + def addWriter(self, writer): + if writer in self._writers: + return + fd = writer.fileno() + self._writers[writer] = fd + if fd in self._fds: + (reader, _) = self._fds[fd] + self._fds[fd] = (reader, writer) + if reader: + # We already registered this fd for read events, + # update it for write events as well. + self._io_loop.update_handler(fd, IOLoop.READ | IOLoop.WRITE) + else: + with NullContext(): + self._fds[fd] = (None, writer) + self._io_loop.add_handler(fd, self._invoke_callback, + IOLoop.WRITE) + + def removeReader(self, reader): + if reader in self._readers: + fd = self._readers.pop(reader) + (_, writer) = self._fds[fd] + if writer: + # We have a writer so we need to update the IOLoop for + # write events only. + self._fds[fd] = (None, writer) + self._io_loop.update_handler(fd, IOLoop.WRITE) + else: + # Since we have no writer registered, we remove the + # entry from _fds and unregister the handler from the + # IOLoop + del self._fds[fd] + self._io_loop.remove_handler(fd) + + def removeWriter(self, writer): + if writer in self._writers: + fd = self._writers.pop(writer) + (reader, _) = self._fds[fd] + if reader: + # We have a reader so we need to update the IOLoop for + # read events only. + self._fds[fd] = (reader, None) + self._io_loop.update_handler(fd, IOLoop.READ) + else: + # Since we have no reader registered, we remove the + # entry from the _fds and unregister the handler from + # the IOLoop. + del self._fds[fd] + self._io_loop.remove_handler(fd) + + def removeAll(self): + return self._removeAll(self._readers, self._writers) + + def getReaders(self): + return self._readers.keys() + + def getWriters(self): + return self._writers.keys() + + # The following functions are mainly used in twisted-style test cases; + # it is expected that most users of the TornadoReactor will call + # IOLoop.start() instead of Reactor.run(). + def stop(self): + PosixReactorBase.stop(self) + fire_shutdown = functools.partial(self.fireSystemEvent, "shutdown") + self._io_loop.add_callback(fire_shutdown) + + def crash(self): + PosixReactorBase.crash(self) + self._io_loop.stop() + + def doIteration(self, delay): + raise NotImplementedError("doIteration") + + def mainLoop(self): + # Since this class is intended to be used in applications + # where the top-level event loop is ``io_loop.start()`` rather + # than ``reactor.run()``, it is implemented a little + # differently than other Twisted reactors. We override + # ``mainLoop`` instead of ``doIteration`` and must implement + # timed call functionality on top of `.IOLoop.add_timeout` + # rather than using the implementation in + # ``PosixReactorBase``. + self._io_loop.start() + + +class _TestReactor(TornadoReactor): + """Subclass of TornadoReactor for use in unittests. + + This can't go in the test.py file because of import-order dependencies + with the Twisted reactor test builder. + """ + def __init__(self): + # always use a new ioloop + super(_TestReactor, self).__init__(IOLoop()) + + def listenTCP(self, port, factory, backlog=50, interface=''): + # default to localhost to avoid firewall prompts on the mac + if not interface: + interface = '127.0.0.1' + return super(_TestReactor, self).listenTCP( + port, factory, backlog=backlog, interface=interface) + + def listenUDP(self, port, protocol, interface='', maxPacketSize=8192): + if not interface: + interface = '127.0.0.1' + return super(_TestReactor, self).listenUDP( + port, protocol, interface=interface, maxPacketSize=maxPacketSize) + + +def install(io_loop=None): + """Install this package as the default Twisted reactor. + + ``install()`` must be called very early in the startup process, + before most other twisted-related imports. Conversely, because it + initializes the `.IOLoop`, it cannot be called before + `.fork_processes` or multi-process `~.TCPServer.start`. These + conflicting requirements make it difficult to use `.TornadoReactor` + in multi-process mode, and an external process manager such as + ``supervisord`` is recommended instead. + + .. versionchanged:: 4.1 + The ``io_loop`` argument is deprecated. + + """ + if not io_loop: + io_loop = tornado.ioloop.IOLoop.current() + reactor = TornadoReactor(io_loop) + from twisted.internet.main import installReactor + installReactor(reactor) + return reactor + + +@implementer(IReadDescriptor, IWriteDescriptor) +class _FD(object): + def __init__(self, fd, fileobj, handler): + self.fd = fd + self.fileobj = fileobj + self.handler = handler + self.reading = False + self.writing = False + self.lost = False + + def fileno(self): + return self.fd + + def doRead(self): + if not self.lost: + self.handler(self.fileobj, tornado.ioloop.IOLoop.READ) + + def doWrite(self): + if not self.lost: + self.handler(self.fileobj, tornado.ioloop.IOLoop.WRITE) + + def connectionLost(self, reason): + if not self.lost: + self.handler(self.fileobj, tornado.ioloop.IOLoop.ERROR) + self.lost = True + + def logPrefix(self): + return '' + + +class TwistedIOLoop(tornado.ioloop.IOLoop): + """IOLoop implementation that runs on Twisted. + + `TwistedIOLoop` implements the Tornado IOLoop interface on top of + the Twisted reactor. Recommended usage:: + + from tornado.platform.twisted import TwistedIOLoop + from twisted.internet import reactor + TwistedIOLoop().install() + # Set up your tornado application as usual using `IOLoop.instance` + reactor.run() + + Uses the global Twisted reactor by default. To create multiple + ``TwistedIOLoops`` in the same process, you must pass a unique reactor + when constructing each one. + + Not compatible with `tornado.process.Subprocess.set_exit_callback` + because the ``SIGCHLD`` handlers used by Tornado and Twisted conflict + with each other. + """ + def initialize(self, reactor=None, **kwargs): + super(TwistedIOLoop, self).initialize(**kwargs) + if reactor is None: + import twisted.internet.reactor + reactor = twisted.internet.reactor + self.reactor = reactor + self.fds = {} + + def close(self, all_fds=False): + fds = self.fds + self.reactor.removeAll() + for c in self.reactor.getDelayedCalls(): + c.cancel() + if all_fds: + for fd in fds.values(): + self.close_fd(fd.fileobj) + + def add_handler(self, fd, handler, events): + if fd in self.fds: + raise ValueError('fd %s added twice' % fd) + fd, fileobj = self.split_fd(fd) + self.fds[fd] = _FD(fd, fileobj, wrap(handler)) + if events & tornado.ioloop.IOLoop.READ: + self.fds[fd].reading = True + self.reactor.addReader(self.fds[fd]) + if events & tornado.ioloop.IOLoop.WRITE: + self.fds[fd].writing = True + self.reactor.addWriter(self.fds[fd]) + + def update_handler(self, fd, events): + fd, fileobj = self.split_fd(fd) + if events & tornado.ioloop.IOLoop.READ: + if not self.fds[fd].reading: + self.fds[fd].reading = True + self.reactor.addReader(self.fds[fd]) + else: + if self.fds[fd].reading: + self.fds[fd].reading = False + self.reactor.removeReader(self.fds[fd]) + if events & tornado.ioloop.IOLoop.WRITE: + if not self.fds[fd].writing: + self.fds[fd].writing = True + self.reactor.addWriter(self.fds[fd]) + else: + if self.fds[fd].writing: + self.fds[fd].writing = False + self.reactor.removeWriter(self.fds[fd]) + + def remove_handler(self, fd): + fd, fileobj = self.split_fd(fd) + if fd not in self.fds: + return + self.fds[fd].lost = True + if self.fds[fd].reading: + self.reactor.removeReader(self.fds[fd]) + if self.fds[fd].writing: + self.reactor.removeWriter(self.fds[fd]) + del self.fds[fd] + + def start(self): + old_current = IOLoop.current(instance=False) + try: + self._setup_logging() + self.make_current() + self.reactor.run() + finally: + if old_current is None: + IOLoop.clear_current() + else: + old_current.make_current() + + def stop(self): + self.reactor.crash() + + def add_timeout(self, deadline, callback, *args, **kwargs): + # This method could be simplified (since tornado 4.0) by + # overriding call_at instead of add_timeout, but we leave it + # for now as a test of backwards-compatibility. + if isinstance(deadline, numbers.Real): + delay = max(deadline - self.time(), 0) + elif isinstance(deadline, datetime.timedelta): + delay = timedelta_to_seconds(deadline) + else: + raise TypeError("Unsupported deadline %r") + return self.reactor.callLater( + delay, self._run_callback, + functools.partial(wrap(callback), *args, **kwargs)) + + def remove_timeout(self, timeout): + if timeout.active(): + timeout.cancel() + + def add_callback(self, callback, *args, **kwargs): + self.reactor.callFromThread( + self._run_callback, + functools.partial(wrap(callback), *args, **kwargs)) + + def add_callback_from_signal(self, callback, *args, **kwargs): + self.add_callback(callback, *args, **kwargs) + + +class TwistedResolver(Resolver): + """Twisted-based asynchronous resolver. + + This is a non-blocking and non-threaded resolver. It is + recommended only when threads cannot be used, since it has + limitations compared to the standard ``getaddrinfo``-based + `~tornado.netutil.Resolver` and + `~tornado.netutil.ThreadedResolver`. Specifically, it returns at + most one result, and arguments other than ``host`` and ``family`` + are ignored. It may fail to resolve when ``family`` is not + ``socket.AF_UNSPEC``. + + Requires Twisted 12.1 or newer. + + .. versionchanged:: 4.1 + The ``io_loop`` argument is deprecated. + """ + def initialize(self, io_loop=None): + self.io_loop = io_loop or IOLoop.current() + # partial copy of twisted.names.client.createResolver, which doesn't + # allow for a reactor to be passed in. + self.reactor = tornado.platform.twisted.TornadoReactor(io_loop) + + host_resolver = twisted.names.hosts.Resolver('/etc/hosts') + cache_resolver = twisted.names.cache.CacheResolver(reactor=self.reactor) + real_resolver = twisted.names.client.Resolver('/etc/resolv.conf', + reactor=self.reactor) + self.resolver = twisted.names.resolve.ResolverChain( + [host_resolver, cache_resolver, real_resolver]) + + @gen.coroutine + def resolve(self, host, port, family=0): + # getHostByName doesn't accept IP addresses, so if the input + # looks like an IP address just return it immediately. + if twisted.internet.abstract.isIPAddress(host): + resolved = host + resolved_family = socket.AF_INET + elif twisted.internet.abstract.isIPv6Address(host): + resolved = host + resolved_family = socket.AF_INET6 + else: + deferred = self.resolver.getHostByName(utf8(host)) + resolved = yield gen.Task(deferred.addBoth) + if isinstance(resolved, failure.Failure): + resolved.raiseException() + elif twisted.internet.abstract.isIPAddress(resolved): + resolved_family = socket.AF_INET + elif twisted.internet.abstract.isIPv6Address(resolved): + resolved_family = socket.AF_INET6 + else: + resolved_family = socket.AF_UNSPEC + if family != socket.AF_UNSPEC and family != resolved_family: + raise Exception('Requested socket family %d but got %d' % + (family, resolved_family)) + result = [ + (resolved_family, (resolved, port)), + ] + raise gen.Return(result) + +if hasattr(gen.convert_yielded, 'register'): + @gen.convert_yielded.register(Deferred) + def _(d): + f = Future() + + def errback(failure): + try: + failure.raiseException() + # Should never happen, but just in case + raise Exception("errback called without error") + except: + f.set_exc_info(sys.exc_info()) + d.addCallbacks(f.set_result, errback) + return f diff --git a/python/tornado/platform/windows.py b/python/tornado/platform/windows.py new file mode 100644 index 000000000..817bdca13 --- /dev/null +++ b/python/tornado/platform/windows.py @@ -0,0 +1,20 @@ +# NOTE: win32 support is currently experimental, and not recommended +# for production use. + + +from __future__ import absolute_import, division, print_function, with_statement +import ctypes +import ctypes.wintypes + +# See: http://msdn.microsoft.com/en-us/library/ms724935(VS.85).aspx +SetHandleInformation = ctypes.windll.kernel32.SetHandleInformation +SetHandleInformation.argtypes = (ctypes.wintypes.HANDLE, ctypes.wintypes.DWORD, ctypes.wintypes.DWORD) +SetHandleInformation.restype = ctypes.wintypes.BOOL + +HANDLE_FLAG_INHERIT = 0x00000001 + + +def set_close_exec(fd): + success = SetHandleInformation(fd, HANDLE_FLAG_INHERIT, 0) + if not success: + raise ctypes.GetLastError() diff --git a/python/tornado/process.py b/python/tornado/process.py new file mode 100644 index 000000000..daa9677bb --- /dev/null +++ b/python/tornado/process.py @@ -0,0 +1,357 @@ +#!/usr/bin/env python +# +# Copyright 2011 Facebook +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""Utilities for working with multiple processes, including both forking +the server into multiple processes and managing subprocesses. +""" + +from __future__ import absolute_import, division, print_function, with_statement + +import errno +import os +import signal +import subprocess +import sys +import time + +from binascii import hexlify + +from tornado.concurrent import Future +from tornado import ioloop +from tornado.iostream import PipeIOStream +from tornado.log import gen_log +from tornado.platform.auto import set_close_exec +from tornado import stack_context +from tornado.util import errno_from_exception + +try: + import multiprocessing +except ImportError: + # Multiprocessing is not available on Google App Engine. + multiprocessing = None + +try: + long # py2 +except NameError: + long = int # py3 + + +# Re-export this exception for convenience. +try: + CalledProcessError = subprocess.CalledProcessError +except AttributeError: + # The subprocess module exists in Google App Engine, but is empty. + # This module isn't very useful in that case, but it should + # at least be importable. + if 'APPENGINE_RUNTIME' not in os.environ: + raise + + +def cpu_count(): + """Returns the number of processors on this machine.""" + if multiprocessing is None: + return 1 + try: + return multiprocessing.cpu_count() + except NotImplementedError: + pass + try: + return os.sysconf("SC_NPROCESSORS_CONF") + except ValueError: + pass + gen_log.error("Could not detect number of processors; assuming 1") + return 1 + + +def _reseed_random(): + if 'random' not in sys.modules: + return + import random + # If os.urandom is available, this method does the same thing as + # random.seed (at least as of python 2.6). If os.urandom is not + # available, we mix in the pid in addition to a timestamp. + try: + seed = long(hexlify(os.urandom(16)), 16) + except NotImplementedError: + seed = int(time.time() * 1000) ^ os.getpid() + random.seed(seed) + + +def _pipe_cloexec(): + r, w = os.pipe() + set_close_exec(r) + set_close_exec(w) + return r, w + + +_task_id = None + + +def fork_processes(num_processes, max_restarts=100): + """Starts multiple worker processes. + + If ``num_processes`` is None or <= 0, we detect the number of cores + available on this machine and fork that number of child + processes. If ``num_processes`` is given and > 0, we fork that + specific number of sub-processes. + + Since we use processes and not threads, there is no shared memory + between any server code. + + Note that multiple processes are not compatible with the autoreload + module (or the ``autoreload=True`` option to `tornado.web.Application` + which defaults to True when ``debug=True``). + When using multiple processes, no IOLoops can be created or + referenced until after the call to ``fork_processes``. + + In each child process, ``fork_processes`` returns its *task id*, a + number between 0 and ``num_processes``. Processes that exit + abnormally (due to a signal or non-zero exit status) are restarted + with the same id (up to ``max_restarts`` times). In the parent + process, ``fork_processes`` returns None if all child processes + have exited normally, but will otherwise only exit by throwing an + exception. + """ + global _task_id + assert _task_id is None + if num_processes is None or num_processes <= 0: + num_processes = cpu_count() + if ioloop.IOLoop.initialized(): + raise RuntimeError("Cannot run in multiple processes: IOLoop instance " + "has already been initialized. You cannot call " + "IOLoop.instance() before calling start_processes()") + gen_log.info("Starting %d processes", num_processes) + children = {} + + def start_child(i): + pid = os.fork() + if pid == 0: + # child process + _reseed_random() + global _task_id + _task_id = i + return i + else: + children[pid] = i + return None + for i in range(num_processes): + id = start_child(i) + if id is not None: + return id + num_restarts = 0 + while children: + try: + pid, status = os.wait() + except OSError as e: + if errno_from_exception(e) == errno.EINTR: + continue + raise + if pid not in children: + continue + id = children.pop(pid) + if os.WIFSIGNALED(status): + gen_log.warning("child %d (pid %d) killed by signal %d, restarting", + id, pid, os.WTERMSIG(status)) + elif os.WEXITSTATUS(status) != 0: + gen_log.warning("child %d (pid %d) exited with status %d, restarting", + id, pid, os.WEXITSTATUS(status)) + else: + gen_log.info("child %d (pid %d) exited normally", id, pid) + continue + num_restarts += 1 + if num_restarts > max_restarts: + raise RuntimeError("Too many child restarts, giving up") + new_id = start_child(id) + if new_id is not None: + return new_id + # All child processes exited cleanly, so exit the master process + # instead of just returning to right after the call to + # fork_processes (which will probably just start up another IOLoop + # unless the caller checks the return value). + sys.exit(0) + + +def task_id(): + """Returns the current task id, if any. + + Returns None if this process was not created by `fork_processes`. + """ + global _task_id + return _task_id + + +class Subprocess(object): + """Wraps ``subprocess.Popen`` with IOStream support. + + The constructor is the same as ``subprocess.Popen`` with the following + additions: + + * ``stdin``, ``stdout``, and ``stderr`` may have the value + ``tornado.process.Subprocess.STREAM``, which will make the corresponding + attribute of the resulting Subprocess a `.PipeIOStream`. + * A new keyword argument ``io_loop`` may be used to pass in an IOLoop. + + .. versionchanged:: 4.1 + The ``io_loop`` argument is deprecated. + """ + STREAM = object() + + _initialized = False + _waiting = {} + + def __init__(self, *args, **kwargs): + self.io_loop = kwargs.pop('io_loop', None) or ioloop.IOLoop.current() + # All FDs we create should be closed on error; those in to_close + # should be closed in the parent process on success. + pipe_fds = [] + to_close = [] + if kwargs.get('stdin') is Subprocess.STREAM: + in_r, in_w = _pipe_cloexec() + kwargs['stdin'] = in_r + pipe_fds.extend((in_r, in_w)) + to_close.append(in_r) + self.stdin = PipeIOStream(in_w, io_loop=self.io_loop) + if kwargs.get('stdout') is Subprocess.STREAM: + out_r, out_w = _pipe_cloexec() + kwargs['stdout'] = out_w + pipe_fds.extend((out_r, out_w)) + to_close.append(out_w) + self.stdout = PipeIOStream(out_r, io_loop=self.io_loop) + if kwargs.get('stderr') is Subprocess.STREAM: + err_r, err_w = _pipe_cloexec() + kwargs['stderr'] = err_w + pipe_fds.extend((err_r, err_w)) + to_close.append(err_w) + self.stderr = PipeIOStream(err_r, io_loop=self.io_loop) + try: + self.proc = subprocess.Popen(*args, **kwargs) + except: + for fd in pipe_fds: + os.close(fd) + raise + for fd in to_close: + os.close(fd) + for attr in ['stdin', 'stdout', 'stderr', 'pid']: + if not hasattr(self, attr): # don't clobber streams set above + setattr(self, attr, getattr(self.proc, attr)) + self._exit_callback = None + self.returncode = None + + def set_exit_callback(self, callback): + """Runs ``callback`` when this process exits. + + The callback takes one argument, the return code of the process. + + This method uses a ``SIGCHLD`` handler, which is a global setting + and may conflict if you have other libraries trying to handle the + same signal. If you are using more than one ``IOLoop`` it may + be necessary to call `Subprocess.initialize` first to designate + one ``IOLoop`` to run the signal handlers. + + In many cases a close callback on the stdout or stderr streams + can be used as an alternative to an exit callback if the + signal handler is causing a problem. + """ + self._exit_callback = stack_context.wrap(callback) + Subprocess.initialize(self.io_loop) + Subprocess._waiting[self.pid] = self + Subprocess._try_cleanup_process(self.pid) + + def wait_for_exit(self, raise_error=True): + """Returns a `.Future` which resolves when the process exits. + + Usage:: + + ret = yield proc.wait_for_exit() + + This is a coroutine-friendly alternative to `set_exit_callback` + (and a replacement for the blocking `subprocess.Popen.wait`). + + By default, raises `subprocess.CalledProcessError` if the process + has a non-zero exit status. Use ``wait_for_exit(raise_error=False)`` + to suppress this behavior and return the exit status without raising. + + .. versionadded:: 4.2 + """ + future = Future() + + def callback(ret): + if ret != 0 and raise_error: + # Unfortunately we don't have the original args any more. + future.set_exception(CalledProcessError(ret, None)) + else: + future.set_result(ret) + self.set_exit_callback(callback) + return future + + @classmethod + def initialize(cls, io_loop=None): + """Initializes the ``SIGCHLD`` handler. + + The signal handler is run on an `.IOLoop` to avoid locking issues. + Note that the `.IOLoop` used for signal handling need not be the + same one used by individual Subprocess objects (as long as the + ``IOLoops`` are each running in separate threads). + + .. versionchanged:: 4.1 + The ``io_loop`` argument is deprecated. + """ + if cls._initialized: + return + if io_loop is None: + io_loop = ioloop.IOLoop.current() + cls._old_sigchld = signal.signal( + signal.SIGCHLD, + lambda sig, frame: io_loop.add_callback_from_signal(cls._cleanup)) + cls._initialized = True + + @classmethod + def uninitialize(cls): + """Removes the ``SIGCHLD`` handler.""" + if not cls._initialized: + return + signal.signal(signal.SIGCHLD, cls._old_sigchld) + cls._initialized = False + + @classmethod + def _cleanup(cls): + for pid in list(cls._waiting.keys()): # make a copy + cls._try_cleanup_process(pid) + + @classmethod + def _try_cleanup_process(cls, pid): + try: + ret_pid, status = os.waitpid(pid, os.WNOHANG) + except OSError as e: + if errno_from_exception(e) == errno.ECHILD: + return + if ret_pid == 0: + return + assert ret_pid == pid + subproc = cls._waiting.pop(pid) + subproc.io_loop.add_callback_from_signal( + subproc._set_returncode, status) + + def _set_returncode(self, status): + if os.WIFSIGNALED(status): + self.returncode = -os.WTERMSIG(status) + else: + assert os.WIFEXITED(status) + self.returncode = os.WEXITSTATUS(status) + if self._exit_callback: + callback = self._exit_callback + self._exit_callback = None + callback(self.returncode) diff --git a/python/tornado/queues.py b/python/tornado/queues.py new file mode 100644 index 000000000..129b204e3 --- /dev/null +++ b/python/tornado/queues.py @@ -0,0 +1,357 @@ +# Copyright 2015 The Tornado Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +from __future__ import absolute_import, division, print_function, with_statement + +__all__ = ['Queue', 'PriorityQueue', 'LifoQueue', 'QueueFull', 'QueueEmpty'] + +import collections +import heapq + +from tornado import gen, ioloop +from tornado.concurrent import Future +from tornado.locks import Event + + +class QueueEmpty(Exception): + """Raised by `.Queue.get_nowait` when the queue has no items.""" + pass + + +class QueueFull(Exception): + """Raised by `.Queue.put_nowait` when a queue is at its maximum size.""" + pass + + +def _set_timeout(future, timeout): + if timeout: + def on_timeout(): + future.set_exception(gen.TimeoutError()) + io_loop = ioloop.IOLoop.current() + timeout_handle = io_loop.add_timeout(timeout, on_timeout) + future.add_done_callback( + lambda _: io_loop.remove_timeout(timeout_handle)) + + +class _QueueIterator(object): + def __init__(self, q): + self.q = q + + def __anext__(self): + return self.q.get() + + +class Queue(object): + """Coordinate producer and consumer coroutines. + + If maxsize is 0 (the default) the queue size is unbounded. + + .. testcode:: + + from tornado import gen + from tornado.ioloop import IOLoop + from tornado.queues import Queue + + q = Queue(maxsize=2) + + @gen.coroutine + def consumer(): + while True: + item = yield q.get() + try: + print('Doing work on %s' % item) + yield gen.sleep(0.01) + finally: + q.task_done() + + @gen.coroutine + def producer(): + for item in range(5): + yield q.put(item) + print('Put %s' % item) + + @gen.coroutine + def main(): + # Start consumer without waiting (since it never finishes). + IOLoop.current().spawn_callback(consumer) + yield producer() # Wait for producer to put all tasks. + yield q.join() # Wait for consumer to finish all tasks. + print('Done') + + IOLoop.current().run_sync(main) + + .. testoutput:: + + Put 0 + Put 1 + Doing work on 0 + Put 2 + Doing work on 1 + Put 3 + Doing work on 2 + Put 4 + Doing work on 3 + Doing work on 4 + Done + + In Python 3.5, `Queue` implements the async iterator protocol, so + ``consumer()`` could be rewritten as:: + + async def consumer(): + async for item in q: + try: + print('Doing work on %s' % item) + yield gen.sleep(0.01) + finally: + q.task_done() + + .. versionchanged:: 4.3 + Added ``async for`` support in Python 3.5. + + """ + def __init__(self, maxsize=0): + if maxsize is None: + raise TypeError("maxsize can't be None") + + if maxsize < 0: + raise ValueError("maxsize can't be negative") + + self._maxsize = maxsize + self._init() + self._getters = collections.deque([]) # Futures. + self._putters = collections.deque([]) # Pairs of (item, Future). + self._unfinished_tasks = 0 + self._finished = Event() + self._finished.set() + + @property + def maxsize(self): + """Number of items allowed in the queue.""" + return self._maxsize + + def qsize(self): + """Number of items in the queue.""" + return len(self._queue) + + def empty(self): + return not self._queue + + def full(self): + if self.maxsize == 0: + return False + else: + return self.qsize() >= self.maxsize + + def put(self, item, timeout=None): + """Put an item into the queue, perhaps waiting until there is room. + + Returns a Future, which raises `tornado.gen.TimeoutError` after a + timeout. + """ + try: + self.put_nowait(item) + except QueueFull: + future = Future() + self._putters.append((item, future)) + _set_timeout(future, timeout) + return future + else: + return gen._null_future + + def put_nowait(self, item): + """Put an item into the queue without blocking. + + If no free slot is immediately available, raise `QueueFull`. + """ + self._consume_expired() + if self._getters: + assert self.empty(), "queue non-empty, why are getters waiting?" + getter = self._getters.popleft() + self.__put_internal(item) + getter.set_result(self._get()) + elif self.full(): + raise QueueFull + else: + self.__put_internal(item) + + def get(self, timeout=None): + """Remove and return an item from the queue. + + Returns a Future which resolves once an item is available, or raises + `tornado.gen.TimeoutError` after a timeout. + """ + future = Future() + try: + future.set_result(self.get_nowait()) + except QueueEmpty: + self._getters.append(future) + _set_timeout(future, timeout) + return future + + def get_nowait(self): + """Remove and return an item from the queue without blocking. + + Return an item if one is immediately available, else raise + `QueueEmpty`. + """ + self._consume_expired() + if self._putters: + assert self.full(), "queue not full, why are putters waiting?" + item, putter = self._putters.popleft() + self.__put_internal(item) + putter.set_result(None) + return self._get() + elif self.qsize(): + return self._get() + else: + raise QueueEmpty + + def task_done(self): + """Indicate that a formerly enqueued task is complete. + + Used by queue consumers. For each `.get` used to fetch a task, a + subsequent call to `.task_done` tells the queue that the processing + on the task is complete. + + If a `.join` is blocking, it resumes when all items have been + processed; that is, when every `.put` is matched by a `.task_done`. + + Raises `ValueError` if called more times than `.put`. + """ + if self._unfinished_tasks <= 0: + raise ValueError('task_done() called too many times') + self._unfinished_tasks -= 1 + if self._unfinished_tasks == 0: + self._finished.set() + + def join(self, timeout=None): + """Block until all items in the queue are processed. + + Returns a Future, which raises `tornado.gen.TimeoutError` after a + timeout. + """ + return self._finished.wait(timeout) + + @gen.coroutine + def __aiter__(self): + return _QueueIterator(self) + + # These three are overridable in subclasses. + def _init(self): + self._queue = collections.deque() + + def _get(self): + return self._queue.popleft() + + def _put(self, item): + self._queue.append(item) + # End of the overridable methods. + + def __put_internal(self, item): + self._unfinished_tasks += 1 + self._finished.clear() + self._put(item) + + def _consume_expired(self): + # Remove timed-out waiters. + while self._putters and self._putters[0][1].done(): + self._putters.popleft() + + while self._getters and self._getters[0].done(): + self._getters.popleft() + + def __repr__(self): + return '<%s at %s %s>' % ( + type(self).__name__, hex(id(self)), self._format()) + + def __str__(self): + return '<%s %s>' % (type(self).__name__, self._format()) + + def _format(self): + result = 'maxsize=%r' % (self.maxsize, ) + if getattr(self, '_queue', None): + result += ' queue=%r' % self._queue + if self._getters: + result += ' getters[%s]' % len(self._getters) + if self._putters: + result += ' putters[%s]' % len(self._putters) + if self._unfinished_tasks: + result += ' tasks=%s' % self._unfinished_tasks + return result + + +class PriorityQueue(Queue): + """A `.Queue` that retrieves entries in priority order, lowest first. + + Entries are typically tuples like ``(priority number, data)``. + + .. testcode:: + + from tornado.queues import PriorityQueue + + q = PriorityQueue() + q.put((1, 'medium-priority item')) + q.put((0, 'high-priority item')) + q.put((10, 'low-priority item')) + + print(q.get_nowait()) + print(q.get_nowait()) + print(q.get_nowait()) + + .. testoutput:: + + (0, 'high-priority item') + (1, 'medium-priority item') + (10, 'low-priority item') + """ + def _init(self): + self._queue = [] + + def _put(self, item): + heapq.heappush(self._queue, item) + + def _get(self): + return heapq.heappop(self._queue) + + +class LifoQueue(Queue): + """A `.Queue` that retrieves the most recently put items first. + + .. testcode:: + + from tornado.queues import LifoQueue + + q = LifoQueue() + q.put(3) + q.put(2) + q.put(1) + + print(q.get_nowait()) + print(q.get_nowait()) + print(q.get_nowait()) + + .. testoutput:: + + 1 + 2 + 3 + """ + def _init(self): + self._queue = [] + + def _put(self, item): + self._queue.append(item) + + def _get(self): + return self._queue.pop() diff --git a/python/tornado/simple_httpclient.py b/python/tornado/simple_httpclient.py new file mode 100644 index 000000000..37b0bc27f --- /dev/null +++ b/python/tornado/simple_httpclient.py @@ -0,0 +1,549 @@ +#!/usr/bin/env python +from __future__ import absolute_import, division, print_function, with_statement + +from tornado.escape import utf8, _unicode +from tornado import gen +from tornado.httpclient import HTTPResponse, HTTPError, AsyncHTTPClient, main, _RequestProxy +from tornado import httputil +from tornado.http1connection import HTTP1Connection, HTTP1ConnectionParameters +from tornado.iostream import StreamClosedError +from tornado.netutil import Resolver, OverrideResolver, _client_ssl_defaults +from tornado.log import gen_log +from tornado import stack_context +from tornado.tcpclient import TCPClient + +import base64 +import collections +import copy +import functools +import re +import socket +import sys +from io import BytesIO + + +try: + import urlparse # py2 +except ImportError: + import urllib.parse as urlparse # py3 + +try: + import ssl +except ImportError: + # ssl is not available on Google App Engine. + ssl = None + +try: + import certifi +except ImportError: + certifi = None + + +def _default_ca_certs(): + if certifi is None: + raise Exception("The 'certifi' package is required to use https " + "in simple_httpclient") + return certifi.where() + + +class SimpleAsyncHTTPClient(AsyncHTTPClient): + """Non-blocking HTTP client with no external dependencies. + + This class implements an HTTP 1.1 client on top of Tornado's IOStreams. + Some features found in the curl-based AsyncHTTPClient are not yet + supported. In particular, proxies are not supported, connections + are not reused, and callers cannot select the network interface to be + used. + """ + def initialize(self, io_loop, max_clients=10, + hostname_mapping=None, max_buffer_size=104857600, + resolver=None, defaults=None, max_header_size=None, + max_body_size=None): + """Creates a AsyncHTTPClient. + + Only a single AsyncHTTPClient instance exists per IOLoop + in order to provide limitations on the number of pending connections. + ``force_instance=True`` may be used to suppress this behavior. + + Note that because of this implicit reuse, unless ``force_instance`` + is used, only the first call to the constructor actually uses + its arguments. It is recommended to use the ``configure`` method + instead of the constructor to ensure that arguments take effect. + + ``max_clients`` is the number of concurrent requests that can be + in progress; when this limit is reached additional requests will be + queued. Note that time spent waiting in this queue still counts + against the ``request_timeout``. + + ``hostname_mapping`` is a dictionary mapping hostnames to IP addresses. + It can be used to make local DNS changes when modifying system-wide + settings like ``/etc/hosts`` is not possible or desirable (e.g. in + unittests). + + ``max_buffer_size`` (default 100MB) is the number of bytes + that can be read into memory at once. ``max_body_size`` + (defaults to ``max_buffer_size``) is the largest response body + that the client will accept. Without a + ``streaming_callback``, the smaller of these two limits + applies; with a ``streaming_callback`` only ``max_body_size`` + does. + + .. versionchanged:: 4.2 + Added the ``max_body_size`` argument. + """ + super(SimpleAsyncHTTPClient, self).initialize(io_loop, + defaults=defaults) + self.max_clients = max_clients + self.queue = collections.deque() + self.active = {} + self.waiting = {} + self.max_buffer_size = max_buffer_size + self.max_header_size = max_header_size + self.max_body_size = max_body_size + # TCPClient could create a Resolver for us, but we have to do it + # ourselves to support hostname_mapping. + if resolver: + self.resolver = resolver + self.own_resolver = False + else: + self.resolver = Resolver(io_loop=io_loop) + self.own_resolver = True + if hostname_mapping is not None: + self.resolver = OverrideResolver(resolver=self.resolver, + mapping=hostname_mapping) + self.tcp_client = TCPClient(resolver=self.resolver, io_loop=io_loop) + + def close(self): + super(SimpleAsyncHTTPClient, self).close() + if self.own_resolver: + self.resolver.close() + self.tcp_client.close() + + def fetch_impl(self, request, callback): + key = object() + self.queue.append((key, request, callback)) + if not len(self.active) < self.max_clients: + timeout_handle = self.io_loop.add_timeout( + self.io_loop.time() + min(request.connect_timeout, + request.request_timeout), + functools.partial(self._on_timeout, key)) + else: + timeout_handle = None + self.waiting[key] = (request, callback, timeout_handle) + self._process_queue() + if self.queue: + gen_log.debug("max_clients limit reached, request queued. " + "%d active, %d queued requests." % ( + len(self.active), len(self.queue))) + + def _process_queue(self): + with stack_context.NullContext(): + while self.queue and len(self.active) < self.max_clients: + key, request, callback = self.queue.popleft() + if key not in self.waiting: + continue + self._remove_timeout(key) + self.active[key] = (request, callback) + release_callback = functools.partial(self._release_fetch, key) + self._handle_request(request, release_callback, callback) + + def _connection_class(self): + return _HTTPConnection + + def _handle_request(self, request, release_callback, final_callback): + self._connection_class()( + self.io_loop, self, request, release_callback, + final_callback, self.max_buffer_size, self.tcp_client, + self.max_header_size, self.max_body_size) + + def _release_fetch(self, key): + del self.active[key] + self._process_queue() + + def _remove_timeout(self, key): + if key in self.waiting: + request, callback, timeout_handle = self.waiting[key] + if timeout_handle is not None: + self.io_loop.remove_timeout(timeout_handle) + del self.waiting[key] + + def _on_timeout(self, key): + request, callback, timeout_handle = self.waiting[key] + self.queue.remove((key, request, callback)) + timeout_response = HTTPResponse( + request, 599, error=HTTPError(599, "Timeout"), + request_time=self.io_loop.time() - request.start_time) + self.io_loop.add_callback(callback, timeout_response) + del self.waiting[key] + + +class _HTTPConnection(httputil.HTTPMessageDelegate): + _SUPPORTED_METHODS = set(["GET", "HEAD", "POST", "PUT", "DELETE", "PATCH", "OPTIONS"]) + + def __init__(self, io_loop, client, request, release_callback, + final_callback, max_buffer_size, tcp_client, + max_header_size, max_body_size): + self.start_time = io_loop.time() + self.io_loop = io_loop + self.client = client + self.request = request + self.release_callback = release_callback + self.final_callback = final_callback + self.max_buffer_size = max_buffer_size + self.tcp_client = tcp_client + self.max_header_size = max_header_size + self.max_body_size = max_body_size + self.code = None + self.headers = None + self.chunks = [] + self._decompressor = None + # Timeout handle returned by IOLoop.add_timeout + self._timeout = None + self._sockaddr = None + with stack_context.ExceptionStackContext(self._handle_exception): + self.parsed = urlparse.urlsplit(_unicode(self.request.url)) + if self.parsed.scheme not in ("http", "https"): + raise ValueError("Unsupported url scheme: %s" % + self.request.url) + # urlsplit results have hostname and port results, but they + # didn't support ipv6 literals until python 2.7. + netloc = self.parsed.netloc + if "@" in netloc: + userpass, _, netloc = netloc.rpartition("@") + host, port = httputil.split_host_and_port(netloc) + if port is None: + port = 443 if self.parsed.scheme == "https" else 80 + if re.match(r'^\[.*\]$', host): + # raw ipv6 addresses in urls are enclosed in brackets + host = host[1:-1] + self.parsed_hostname = host # save final host for _on_connect + + if request.allow_ipv6 is False: + af = socket.AF_INET + else: + af = socket.AF_UNSPEC + + ssl_options = self._get_ssl_options(self.parsed.scheme) + + timeout = min(self.request.connect_timeout, self.request.request_timeout) + if timeout: + self._timeout = self.io_loop.add_timeout( + self.start_time + timeout, + stack_context.wrap(self._on_timeout)) + self.tcp_client.connect(host, port, af=af, + ssl_options=ssl_options, + max_buffer_size=self.max_buffer_size, + callback=self._on_connect) + + def _get_ssl_options(self, scheme): + if scheme == "https": + if self.request.ssl_options is not None: + return self.request.ssl_options + # If we are using the defaults, don't construct a + # new SSLContext. + if (self.request.validate_cert and + self.request.ca_certs is None and + self.request.client_cert is None and + self.request.client_key is None): + return _client_ssl_defaults + ssl_options = {} + if self.request.validate_cert: + ssl_options["cert_reqs"] = ssl.CERT_REQUIRED + if self.request.ca_certs is not None: + ssl_options["ca_certs"] = self.request.ca_certs + elif not hasattr(ssl, 'create_default_context'): + # When create_default_context is present, + # we can omit the "ca_certs" parameter entirely, + # which avoids the dependency on "certifi" for py34. + ssl_options["ca_certs"] = _default_ca_certs() + if self.request.client_key is not None: + ssl_options["keyfile"] = self.request.client_key + if self.request.client_cert is not None: + ssl_options["certfile"] = self.request.client_cert + + # SSL interoperability is tricky. We want to disable + # SSLv2 for security reasons; it wasn't disabled by default + # until openssl 1.0. The best way to do this is to use + # the SSL_OP_NO_SSLv2, but that wasn't exposed to python + # until 3.2. Python 2.7 adds the ciphers argument, which + # can also be used to disable SSLv2. As a last resort + # on python 2.6, we set ssl_version to TLSv1. This is + # more narrow than we'd like since it also breaks + # compatibility with servers configured for SSLv3 only, + # but nearly all servers support both SSLv3 and TLSv1: + # http://blog.ivanristic.com/2011/09/ssl-survey-protocol-support.html + if sys.version_info >= (2, 7): + # In addition to disabling SSLv2, we also exclude certain + # classes of insecure ciphers. + ssl_options["ciphers"] = "DEFAULT:!SSLv2:!EXPORT:!DES" + else: + # This is really only necessary for pre-1.0 versions + # of openssl, but python 2.6 doesn't expose version + # information. + ssl_options["ssl_version"] = ssl.PROTOCOL_TLSv1 + return ssl_options + return None + + def _on_timeout(self): + self._timeout = None + if self.final_callback is not None: + raise HTTPError(599, "Timeout") + + def _remove_timeout(self): + if self._timeout is not None: + self.io_loop.remove_timeout(self._timeout) + self._timeout = None + + def _on_connect(self, stream): + if self.final_callback is None: + # final_callback is cleared if we've hit our timeout. + stream.close() + return + self.stream = stream + self.stream.set_close_callback(self.on_connection_close) + self._remove_timeout() + if self.final_callback is None: + return + if self.request.request_timeout: + self._timeout = self.io_loop.add_timeout( + self.start_time + self.request.request_timeout, + stack_context.wrap(self._on_timeout)) + if (self.request.method not in self._SUPPORTED_METHODS and + not self.request.allow_nonstandard_methods): + raise KeyError("unknown method %s" % self.request.method) + for key in ('network_interface', + 'proxy_host', 'proxy_port', + 'proxy_username', 'proxy_password'): + if getattr(self.request, key, None): + raise NotImplementedError('%s not supported' % key) + if "Connection" not in self.request.headers: + self.request.headers["Connection"] = "close" + if "Host" not in self.request.headers: + if '@' in self.parsed.netloc: + self.request.headers["Host"] = self.parsed.netloc.rpartition('@')[-1] + else: + self.request.headers["Host"] = self.parsed.netloc + username, password = None, None + if self.parsed.username is not None: + username, password = self.parsed.username, self.parsed.password + elif self.request.auth_username is not None: + username = self.request.auth_username + password = self.request.auth_password or '' + if username is not None: + if self.request.auth_mode not in (None, "basic"): + raise ValueError("unsupported auth_mode %s", + self.request.auth_mode) + auth = utf8(username) + b":" + utf8(password) + self.request.headers["Authorization"] = (b"Basic " + + base64.b64encode(auth)) + if self.request.user_agent: + self.request.headers["User-Agent"] = self.request.user_agent + if not self.request.allow_nonstandard_methods: + # Some HTTP methods nearly always have bodies while others + # almost never do. Fail in this case unless the user has + # opted out of sanity checks with allow_nonstandard_methods. + body_expected = self.request.method in ("POST", "PATCH", "PUT") + body_present = (self.request.body is not None or + self.request.body_producer is not None) + if ((body_expected and not body_present) or + (body_present and not body_expected)): + raise ValueError( + 'Body must %sbe None for method %s (unless ' + 'allow_nonstandard_methods is true)' % + ('not ' if body_expected else '', self.request.method)) + if self.request.expect_100_continue: + self.request.headers["Expect"] = "100-continue" + if self.request.body is not None: + # When body_producer is used the caller is responsible for + # setting Content-Length (or else chunked encoding will be used). + self.request.headers["Content-Length"] = str(len( + self.request.body)) + if (self.request.method == "POST" and + "Content-Type" not in self.request.headers): + self.request.headers["Content-Type"] = "application/x-www-form-urlencoded" + if self.request.decompress_response: + self.request.headers["Accept-Encoding"] = "gzip" + req_path = ((self.parsed.path or '/') + + (('?' + self.parsed.query) if self.parsed.query else '')) + self.connection = self._create_connection(stream) + start_line = httputil.RequestStartLine(self.request.method, + req_path, '') + self.connection.write_headers(start_line, self.request.headers) + if self.request.expect_100_continue: + self._read_response() + else: + self._write_body(True) + + def _create_connection(self, stream): + stream.set_nodelay(True) + connection = HTTP1Connection( + stream, True, + HTTP1ConnectionParameters( + no_keep_alive=True, + max_header_size=self.max_header_size, + max_body_size=self.max_body_size, + decompress=self.request.decompress_response), + self._sockaddr) + return connection + + def _write_body(self, start_read): + if self.request.body is not None: + self.connection.write(self.request.body) + elif self.request.body_producer is not None: + fut = self.request.body_producer(self.connection.write) + if fut is not None: + fut = gen.convert_yielded(fut) + + def on_body_written(fut): + fut.result() + self.connection.finish() + if start_read: + self._read_response() + self.io_loop.add_future(fut, on_body_written) + return + self.connection.finish() + if start_read: + self._read_response() + + def _read_response(self): + # Ensure that any exception raised in read_response ends up in our + # stack context. + self.io_loop.add_future( + self.connection.read_response(self), + lambda f: f.result()) + + def _release(self): + if self.release_callback is not None: + release_callback = self.release_callback + self.release_callback = None + release_callback() + + def _run_callback(self, response): + self._release() + if self.final_callback is not None: + final_callback = self.final_callback + self.final_callback = None + self.io_loop.add_callback(final_callback, response) + + def _handle_exception(self, typ, value, tb): + if self.final_callback: + self._remove_timeout() + if isinstance(value, StreamClosedError): + if value.real_error is None: + value = HTTPError(599, "Stream closed") + else: + value = value.real_error + self._run_callback(HTTPResponse(self.request, 599, error=value, + request_time=self.io_loop.time() - self.start_time, + )) + + if hasattr(self, "stream"): + # TODO: this may cause a StreamClosedError to be raised + # by the connection's Future. Should we cancel the + # connection more gracefully? + self.stream.close() + return True + else: + # If our callback has already been called, we are probably + # catching an exception that is not caused by us but rather + # some child of our callback. Rather than drop it on the floor, + # pass it along, unless it's just the stream being closed. + return isinstance(value, StreamClosedError) + + def on_connection_close(self): + if self.final_callback is not None: + message = "Connection closed" + if self.stream.error: + raise self.stream.error + try: + raise HTTPError(599, message) + except HTTPError: + self._handle_exception(*sys.exc_info()) + + def headers_received(self, first_line, headers): + if self.request.expect_100_continue and first_line.code == 100: + self._write_body(False) + return + self.code = first_line.code + self.reason = first_line.reason + self.headers = headers + + if self._should_follow_redirect(): + return + + if self.request.header_callback is not None: + # Reassemble the start line. + self.request.header_callback('%s %s %s\r\n' % first_line) + for k, v in self.headers.get_all(): + self.request.header_callback("%s: %s\r\n" % (k, v)) + self.request.header_callback('\r\n') + + def _should_follow_redirect(self): + return (self.request.follow_redirects and + self.request.max_redirects > 0 and + self.code in (301, 302, 303, 307)) + + def finish(self): + data = b''.join(self.chunks) + self._remove_timeout() + original_request = getattr(self.request, "original_request", + self.request) + if self._should_follow_redirect(): + assert isinstance(self.request, _RequestProxy) + new_request = copy.copy(self.request.request) + new_request.url = urlparse.urljoin(self.request.url, + self.headers["Location"]) + new_request.max_redirects = self.request.max_redirects - 1 + del new_request.headers["Host"] + # http://www.w3.org/Protocols/rfc2616/rfc2616-sec10.html#sec10.3.4 + # Client SHOULD make a GET request after a 303. + # According to the spec, 302 should be followed by the same + # method as the original request, but in practice browsers + # treat 302 the same as 303, and many servers use 302 for + # compatibility with pre-HTTP/1.1 user agents which don't + # understand the 303 status. + if self.code in (302, 303): + new_request.method = "GET" + new_request.body = None + for h in ["Content-Length", "Content-Type", + "Content-Encoding", "Transfer-Encoding"]: + try: + del self.request.headers[h] + except KeyError: + pass + new_request.original_request = original_request + final_callback = self.final_callback + self.final_callback = None + self._release() + self.client.fetch(new_request, final_callback) + self._on_end_request() + return + if self.request.streaming_callback: + buffer = BytesIO() + else: + buffer = BytesIO(data) # TODO: don't require one big string? + response = HTTPResponse(original_request, + self.code, reason=getattr(self, 'reason', None), + headers=self.headers, + request_time=self.io_loop.time() - self.start_time, + buffer=buffer, + effective_url=self.request.url) + self._run_callback(response) + self._on_end_request() + + def _on_end_request(self): + self.stream.close() + + def data_received(self, chunk): + if self._should_follow_redirect(): + # We're going to follow a redirect so just discard the body. + return + if self.request.streaming_callback is not None: + self.request.streaming_callback(chunk) + else: + self.chunks.append(chunk) + + +if __name__ == "__main__": + AsyncHTTPClient.configure(SimpleAsyncHTTPClient) + main() diff --git a/python/tornado/speedups.cp35-win32.pyd b/python/tornado/speedups.cp35-win32.pyd new file mode 100644 index 0000000000000000000000000000000000000000..59834870252a84e3b47c797a20424b9b09293c8e GIT binary patch literal 8704 zcmeHMe{@vUoxhXJBmpL5poyX)4u|kVg>;gag#4IjkPHGQGMF$B1%~{1VJ2i|(wX-( zp|A}*q|Nw9MT=dwgD)+?%vIk8DmCNO=IjBAeD07{`JOUU~K9Qzn;pD zPI_hbF+=4mvuixwkhL`!*c`0$SsUv7{(xky7p+0rZ}s}E6|1VPzCfdxlb${~JFfbz zyEg6EFHC(sQTN=j|MhN;#&^(Z!gNbl6bgn=1{5Tr?*b3O0bn*9rocn*}(ol#{-o*b;yst)0OKT+RwO zu9P!&+c@-_?%&{n(sZMZSoCD>c)oF$vM~L7VzV zZ0Mj#Jq3{QRCMqLO}1!qnmPk>)ec(1+bt~P;V3vx^=fQY?S>c3tl8q}n8PgUfk})t zGj)y$^qo;TRfv8xfMMza=<0HO@E?Y*4IhTZF%~X6Dj)`>)aF$6lT^lfO@!0Nt1BV8 zGb)MdFyhbi<~7jzB#xZ=JrXl!yJpZdM0iLi?6oqBXAc^+eF|o+?TpGds-Iw)P-6A$ zChjicuHjsLB8t?!cVJ7eW9$n})6@zOcD|F*SxJZsik0E;Elir_QWxk;k-nwL=H?{$ zK`B&TReuiKv^O-_>Q$GbByMM!nn*4zprV+7bv*!SLOI)S7((5p6db|+!#*C~(=k?gH9qZH>(kQkj8Cg`zvY_kKvN8cS(4;$tvaun}SF6up2RfsY zsX3|kl^c8Z<}ynpeGj!+yQ#f?7qtal)Ryf;+q5g|2nXqpaBzJG2c~Tt%x&Xfkp!^s z0n;G>#ld>^acJy00NCwF>p4t#=kLZet=FU?*bL0T4idBH5n#Qhy=ckqUei9#rSo?v z-fK3l+Y)8H9fyI_G0|~_=a5LHX%zVJ0j8uQ<68*#9L1W|_uwVv_k8J!Bh95Ow z!r-6Ot=Ku7;^e$&tnnaa9oKWmkR24KanM+LTEc--UK-ALaw=qo^P(GLM+_Tds)G-b zQq`X#{E)rcfFCi$kc>`2!_H_ea#l?JKB&k`q?%7dIZFKwh=ZR%Slx`CTCb19E~>+b z56m)yM+T3as!S(^-OnjT)?{k(OiiAlreH*E?S_~d4VB9&>3-PJPjImeQa4F4U^;g9E#ac~NNmFCCeTaWA1;JNVHN19x+H$m~+& zG@K_>_zQ!)O|1YMk#-XTeV#u5cWD4f&@{N#(?=$GK)Tppdu{ZK=wQV`x)-|Tk+tE0 z8=fbNo2|7s_l5hkGkt=IcoD})#4#Fij6M43qtDZLoq!>r9Wj9F-blONY*H5>R$gvS z^9)eh`3dDOq@80`_}*zM#)dG9yy>7FV8<~+=o3s{_4E`Z{HEeAY_UaNg1C8_csCz+ zu2zpC5l||ZZn{{W4QhuxJASf{7zS@p9775dPVOjz=4i$qWI;Orae=(3N|zj+Le&NV zD9Py94lhpk+jmUu0 z!)25(;V{^*Wi3P>R~#ikPd{9sWf?(<@xJ;gJY5?*HUN6;m`bhs-&nA++!#(XMwT1Z z4-;4tu-Gw{4-rhH=f`RnGUPu)1@_3KRHeOgqi~^{=Pl36h$ojbQZNji>JKTNPM9N| zyAT$U?k;K{-bwBL4r>2%8@122p>5jr@FN^_@8w{>#6f2(2amRJ@Pr3o-vgcd02GHY z(zyqnZt5IBrdgbyuC+WQ5nRG=oe@#hj7C%_DB*$=L4~;t?L#Q<( zdQfWJFbBr3K2JNV$;gWF+u4ICvUn(ysIS3IxTp0KFn0tCX5pcQ)xtYVCtYuWK=WjE zH$*5;N+c={F(DdGCsvYh(zP~nJ&oz!ezrQ}XUgaIL{cO786&IGB5TcxN?@1K5O@evytRT$QZdJEm953`yZztIT55Y$EY+Zd@4y-=ic}@qtxWn9boJ= z(cG>l}^Vl(^memGM_(U3~C*n0cWa&0$JG(2g z&D3O4V{v+B&;TiWp_a9c51N4^g_CKCtp(LcA8Hcl$|%&~gVW?D76M$$>Ht}|5I$j4 z`irG(^FPd2-q`UzOz3Dgtqi|3cAHX}ZNa(7yx%-I&z#a_p0%tLV%y$AkoR&eaM<5d zIWdp1U!WeH%h+kuPf?SAUxk{7T8UbZ+J^cNDwRI~*34&YCF)J6X{b~dE?{gHY6tLL zs6RnHjQT3-$EYdbQMn&;+>5#%bvY`P(wT_qzp>C4Dt);o#1n5dlyK-%qhdBHz0aVm z*TMg(rQB3LTmR+Rg}9iY#GqqHm+bbXHI>Arw-f%3lI`E_OK9Bgi8OpBF}7y{Z3UOxJCVla_D!HUx!i$?G%okZ1X>N3 zJ2a8T<#5@vT@34rzx%)+y8Rr_4z^%8ogib21vYV?FKH9OnoTC`Q>;IBR;)lW!rLzwh6So zmucJvqF-6)fq`hJ<1m4Hr|I#Qz|UU6KlfMoFHUEc-=mV=#Q6Sk<6plDd{p97Nt~ZN z_`?7^Q5OJTh-yPUfXBv_fQ*G&MX^zC4dpbn3I&Vq^ZJFnoYwY6=4@Z#_e$;r8%XmC5<~7K zp=v4U^&|MllHJbsCBe;ZXI(HP*2t|bqPyC=Rjgs}8=UP`fkqjGr9rVy5)mJ^OB$91 z13q2H5`QBNV6E+vC*T(favED&7<<69#_y@~H@1k4j%e*a;7)0EgszvE9 zvL`3qE=o(~U{Lf+&S0QHL;(9uHDb``^}~t;iG3$|MJSF0g3Dl_90XGgVHgvPt7S3R z?i7PffuOI>-ykjx$oQxVvMFEIzM_$3jHA|gu-Zlt*;_=dZVySKug2>WmxQnYk$P;p zp~4%gs|Ut~E4}r>x?ua#x|Ws@Ta`>P=B;aiQ>rVyA!#|(u`RH+LadiJZx(|IEAMdJ zb*G~ep~*eAfVtiH>=S*W6l{0<>b!op*MDzdi|EEy(J*%7mnDKCmhHbJF&7dcsnIR9 zw~83u6mU0+At@MWcelV0WDe#NeIXIO0_GO^zPR;W3g){T0zO~B&s8Rh(VWEg8O`|_;TMZvL(zN5MKkG6&t zPLK%r#WtKsiJZyUEzBJhH^Wph=#F!mWPbw%4-zOW78_(qOtADpH!ycYP;w(Qpom=! zjCWXFt9OwvM2CM7_>1uW9SK{vs3mt%?xOg@zVftwL;SjV;K^&Aub>wOTo3#nO62~~ zoDgnW#_kjALxF}ZqJ&cu+HyJj6OUxCEVlS}8R=^s_l=g2vw{}Iw;qihqaS+Tl&_Ho z3~UChLG74ml!5=d4WB(u0Ikx#$(cchg z#0^CH(kI*NLLt#t-_mY{0)MD%K5}tMsKFy5 z4j1{n4Z%Pt&?GHFVlAl)`Eu^foo~fua4`2U)O6F3uxG0C>s z_G8;)wr6dx+TOLDv;EnYl$(*eKDRwLocmyIPwuhY3%RHAk_4wvD>MpQ1VwmI_$Oh% z&?7u991(scJSY5Ccv(0jye+&hd@P(7z7SIL&H2;wXXek#FU((-@5*=QpU9UA9xWIt zNG-g!a9&|y;q8UK!hbA`6h2URu<+jt&5N&DT)ueu;=08@UHsJI7Z$&^`0d3XF21U0 zR*_J2YtbD=tBckaH57S^b`?Ea{4ewzS8M

0: + last_ctx -= 1 + c = stack[last_ctx] + + try: + c.exit(*exc) + except: + exc = sys.exc_info() + top = c.old_contexts[1] + break + else: + top = None + + # If if exception happened while unrolling, take longer exception handler path + if top is not None: + exc = _handle_exception(top, exc) + + # If exception was not handled, raise it + if exc != (None, None, None): + raise_exc_info(exc) + finally: + _state.contexts = current_state + return ret + + wrapped._wrapped = True + return wrapped + + +def _handle_exception(tail, exc): + while tail is not None: + try: + if tail.exit(*exc): + exc = (None, None, None) + except: + exc = sys.exc_info() + + tail = tail.old_contexts[1] + + return exc + + +def run_with_stack_context(context, func): + """Run a coroutine ``func`` in the given `StackContext`. + + It is not safe to have a ``yield`` statement within a ``with StackContext`` + block, so it is difficult to use stack context with `.gen.coroutine`. + This helper function runs the function in the correct context while + keeping the ``yield`` and ``with`` statements syntactically separate. + + Example:: + + @gen.coroutine + def incorrect(): + with StackContext(ctx): + # ERROR: this will raise StackContextInconsistentError + yield other_coroutine() + + @gen.coroutine + def correct(): + yield run_with_stack_context(StackContext(ctx), other_coroutine) + + .. versionadded:: 3.1 + """ + with context: + return func() diff --git a/python/tornado/tcpclient.py b/python/tornado/tcpclient.py new file mode 100644 index 000000000..f594d91b8 --- /dev/null +++ b/python/tornado/tcpclient.py @@ -0,0 +1,183 @@ +#!/usr/bin/env python +# +# Copyright 2014 Facebook +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""A non-blocking TCP connection factory. +""" +from __future__ import absolute_import, division, print_function, with_statement + +import functools +import socket + +from tornado.concurrent import Future +from tornado.ioloop import IOLoop +from tornado.iostream import IOStream +from tornado import gen +from tornado.netutil import Resolver + +_INITIAL_CONNECT_TIMEOUT = 0.3 + + +class _Connector(object): + """A stateless implementation of the "Happy Eyeballs" algorithm. + + "Happy Eyeballs" is documented in RFC6555 as the recommended practice + for when both IPv4 and IPv6 addresses are available. + + In this implementation, we partition the addresses by family, and + make the first connection attempt to whichever address was + returned first by ``getaddrinfo``. If that connection fails or + times out, we begin a connection in parallel to the first address + of the other family. If there are additional failures we retry + with other addresses, keeping one connection attempt per family + in flight at a time. + + http://tools.ietf.org/html/rfc6555 + + """ + def __init__(self, addrinfo, io_loop, connect): + self.io_loop = io_loop + self.connect = connect + + self.future = Future() + self.timeout = None + self.last_error = None + self.remaining = len(addrinfo) + self.primary_addrs, self.secondary_addrs = self.split(addrinfo) + + @staticmethod + def split(addrinfo): + """Partition the ``addrinfo`` list by address family. + + Returns two lists. The first list contains the first entry from + ``addrinfo`` and all others with the same family, and the + second list contains all other addresses (normally one list will + be AF_INET and the other AF_INET6, although non-standard resolvers + may return additional families). + """ + primary = [] + secondary = [] + primary_af = addrinfo[0][0] + for af, addr in addrinfo: + if af == primary_af: + primary.append((af, addr)) + else: + secondary.append((af, addr)) + return primary, secondary + + def start(self, timeout=_INITIAL_CONNECT_TIMEOUT): + self.try_connect(iter(self.primary_addrs)) + self.set_timout(timeout) + return self.future + + def try_connect(self, addrs): + try: + af, addr = next(addrs) + except StopIteration: + # We've reached the end of our queue, but the other queue + # might still be working. Send a final error on the future + # only when both queues are finished. + if self.remaining == 0 and not self.future.done(): + self.future.set_exception(self.last_error or + IOError("connection failed")) + return + future = self.connect(af, addr) + future.add_done_callback(functools.partial(self.on_connect_done, + addrs, af, addr)) + + def on_connect_done(self, addrs, af, addr, future): + self.remaining -= 1 + try: + stream = future.result() + except Exception as e: + if self.future.done(): + return + # Error: try again (but remember what happened so we have an + # error to raise in the end) + self.last_error = e + self.try_connect(addrs) + if self.timeout is not None: + # If the first attempt failed, don't wait for the + # timeout to try an address from the secondary queue. + self.io_loop.remove_timeout(self.timeout) + self.on_timeout() + return + self.clear_timeout() + if self.future.done(): + # This is a late arrival; just drop it. + stream.close() + else: + self.future.set_result((af, addr, stream)) + + def set_timout(self, timeout): + self.timeout = self.io_loop.add_timeout(self.io_loop.time() + timeout, + self.on_timeout) + + def on_timeout(self): + self.timeout = None + self.try_connect(iter(self.secondary_addrs)) + + def clear_timeout(self): + if self.timeout is not None: + self.io_loop.remove_timeout(self.timeout) + + +class TCPClient(object): + """A non-blocking TCP connection factory. + + .. versionchanged:: 4.1 + The ``io_loop`` argument is deprecated. + """ + def __init__(self, resolver=None, io_loop=None): + self.io_loop = io_loop or IOLoop.current() + if resolver is not None: + self.resolver = resolver + self._own_resolver = False + else: + self.resolver = Resolver(io_loop=io_loop) + self._own_resolver = True + + def close(self): + if self._own_resolver: + self.resolver.close() + + @gen.coroutine + def connect(self, host, port, af=socket.AF_UNSPEC, ssl_options=None, + max_buffer_size=None): + """Connect to the given host and port. + + Asynchronously returns an `.IOStream` (or `.SSLIOStream` if + ``ssl_options`` is not None). + """ + addrinfo = yield self.resolver.resolve(host, port, af) + connector = _Connector( + addrinfo, self.io_loop, + functools.partial(self._create_stream, max_buffer_size)) + af, addr, stream = yield connector.start() + # TODO: For better performance we could cache the (af, addr) + # information here and re-use it on subsequent connections to + # the same host. (http://tools.ietf.org/html/rfc6555#section-4.2) + if ssl_options is not None: + stream = yield stream.start_tls(False, ssl_options=ssl_options, + server_hostname=host) + raise gen.Return(stream) + + def _create_stream(self, max_buffer_size, af, addr): + # Always connect in plaintext; we'll convert to ssl if necessary + # after one connection has completed. + stream = IOStream(socket.socket(af), + io_loop=self.io_loop, + max_buffer_size=max_buffer_size) + return stream.connect(addr) diff --git a/python/tornado/tcpserver.py b/python/tornado/tcpserver.py new file mode 100644 index 000000000..c9d148a80 --- /dev/null +++ b/python/tornado/tcpserver.py @@ -0,0 +1,273 @@ +#!/usr/bin/env python +# +# Copyright 2011 Facebook +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""A non-blocking, single-threaded TCP server.""" +from __future__ import absolute_import, division, print_function, with_statement + +import errno +import os +import socket + +from tornado.log import app_log +from tornado.ioloop import IOLoop +from tornado.iostream import IOStream, SSLIOStream +from tornado.netutil import bind_sockets, add_accept_handler, ssl_wrap_socket +from tornado import process +from tornado.util import errno_from_exception + +try: + import ssl +except ImportError: + # ssl is not available on Google App Engine. + ssl = None + + +class TCPServer(object): + r"""A non-blocking, single-threaded TCP server. + + To use `TCPServer`, define a subclass which overrides the `handle_stream` + method. + + To make this server serve SSL traffic, send the ``ssl_options`` keyword + argument with an `ssl.SSLContext` object. For compatibility with older + versions of Python ``ssl_options`` may also be a dictionary of keyword + arguments for the `ssl.wrap_socket` method.:: + + ssl_ctx = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH) + ssl_ctx.load_cert_chain(os.path.join(data_dir, "mydomain.crt"), + os.path.join(data_dir, "mydomain.key")) + TCPServer(ssl_options=ssl_ctx) + + `TCPServer` initialization follows one of three patterns: + + 1. `listen`: simple single-process:: + + server = TCPServer() + server.listen(8888) + IOLoop.current().start() + + 2. `bind`/`start`: simple multi-process:: + + server = TCPServer() + server.bind(8888) + server.start(0) # Forks multiple sub-processes + IOLoop.current().start() + + When using this interface, an `.IOLoop` must *not* be passed + to the `TCPServer` constructor. `start` will always start + the server on the default singleton `.IOLoop`. + + 3. `add_sockets`: advanced multi-process:: + + sockets = bind_sockets(8888) + tornado.process.fork_processes(0) + server = TCPServer() + server.add_sockets(sockets) + IOLoop.current().start() + + The `add_sockets` interface is more complicated, but it can be + used with `tornado.process.fork_processes` to give you more + flexibility in when the fork happens. `add_sockets` can + also be used in single-process servers if you want to create + your listening sockets in some way other than + `~tornado.netutil.bind_sockets`. + + .. versionadded:: 3.1 + The ``max_buffer_size`` argument. + """ + def __init__(self, io_loop=None, ssl_options=None, max_buffer_size=None, + read_chunk_size=None): + self.io_loop = io_loop + self.ssl_options = ssl_options + self._sockets = {} # fd -> socket object + self._pending_sockets = [] + self._started = False + self.max_buffer_size = max_buffer_size + self.read_chunk_size = read_chunk_size + + # Verify the SSL options. Otherwise we don't get errors until clients + # connect. This doesn't verify that the keys are legitimate, but + # the SSL module doesn't do that until there is a connected socket + # which seems like too much work + if self.ssl_options is not None and isinstance(self.ssl_options, dict): + # Only certfile is required: it can contain both keys + if 'certfile' not in self.ssl_options: + raise KeyError('missing key "certfile" in ssl_options') + + if not os.path.exists(self.ssl_options['certfile']): + raise ValueError('certfile "%s" does not exist' % + self.ssl_options['certfile']) + if ('keyfile' in self.ssl_options and + not os.path.exists(self.ssl_options['keyfile'])): + raise ValueError('keyfile "%s" does not exist' % + self.ssl_options['keyfile']) + + def listen(self, port, address=""): + """Starts accepting connections on the given port. + + This method may be called more than once to listen on multiple ports. + `listen` takes effect immediately; it is not necessary to call + `TCPServer.start` afterwards. It is, however, necessary to start + the `.IOLoop`. + """ + sockets = bind_sockets(port, address=address) + self.add_sockets(sockets) + + def add_sockets(self, sockets): + """Makes this server start accepting connections on the given sockets. + + The ``sockets`` parameter is a list of socket objects such as + those returned by `~tornado.netutil.bind_sockets`. + `add_sockets` is typically used in combination with that + method and `tornado.process.fork_processes` to provide greater + control over the initialization of a multi-process server. + """ + if self.io_loop is None: + self.io_loop = IOLoop.current() + + for sock in sockets: + self._sockets[sock.fileno()] = sock + add_accept_handler(sock, self._handle_connection, + io_loop=self.io_loop) + + def add_socket(self, socket): + """Singular version of `add_sockets`. Takes a single socket object.""" + self.add_sockets([socket]) + + def bind(self, port, address=None, family=socket.AF_UNSPEC, backlog=128): + """Binds this server to the given port on the given address. + + To start the server, call `start`. If you want to run this server + in a single process, you can call `listen` as a shortcut to the + sequence of `bind` and `start` calls. + + Address may be either an IP address or hostname. If it's a hostname, + the server will listen on all IP addresses associated with the + name. Address may be an empty string or None to listen on all + available interfaces. Family may be set to either `socket.AF_INET` + or `socket.AF_INET6` to restrict to IPv4 or IPv6 addresses, otherwise + both will be used if available. + + The ``backlog`` argument has the same meaning as for + `socket.listen `. + + This method may be called multiple times prior to `start` to listen + on multiple ports or interfaces. + """ + sockets = bind_sockets(port, address=address, family=family, + backlog=backlog) + if self._started: + self.add_sockets(sockets) + else: + self._pending_sockets.extend(sockets) + + def start(self, num_processes=1): + """Starts this server in the `.IOLoop`. + + By default, we run the server in this process and do not fork any + additional child process. + + If num_processes is ``None`` or <= 0, we detect the number of cores + available on this machine and fork that number of child + processes. If num_processes is given and > 1, we fork that + specific number of sub-processes. + + Since we use processes and not threads, there is no shared memory + between any server code. + + Note that multiple processes are not compatible with the autoreload + module (or the ``autoreload=True`` option to `tornado.web.Application` + which defaults to True when ``debug=True``). + When using multiple processes, no IOLoops can be created or + referenced until after the call to ``TCPServer.start(n)``. + """ + assert not self._started + self._started = True + if num_processes != 1: + process.fork_processes(num_processes) + sockets = self._pending_sockets + self._pending_sockets = [] + self.add_sockets(sockets) + + def stop(self): + """Stops listening for new connections. + + Requests currently in progress may still continue after the + server is stopped. + """ + for fd, sock in self._sockets.items(): + self.io_loop.remove_handler(fd) + sock.close() + + def handle_stream(self, stream, address): + """Override to handle a new `.IOStream` from an incoming connection. + + This method may be a coroutine; if so any exceptions it raises + asynchronously will be logged. Accepting of incoming connections + will not be blocked by this coroutine. + + If this `TCPServer` is configured for SSL, ``handle_stream`` + may be called before the SSL handshake has completed. Use + `.SSLIOStream.wait_for_handshake` if you need to verify the client's + certificate or use NPN/ALPN. + + .. versionchanged:: 4.2 + Added the option for this method to be a coroutine. + """ + raise NotImplementedError() + + def _handle_connection(self, connection, address): + if self.ssl_options is not None: + assert ssl, "Python 2.6+ and OpenSSL required for SSL" + try: + connection = ssl_wrap_socket(connection, + self.ssl_options, + server_side=True, + do_handshake_on_connect=False) + except ssl.SSLError as err: + if err.args[0] == ssl.SSL_ERROR_EOF: + return connection.close() + else: + raise + except socket.error as err: + # If the connection is closed immediately after it is created + # (as in a port scan), we can get one of several errors. + # wrap_socket makes an internal call to getpeername, + # which may return either EINVAL (Mac OS X) or ENOTCONN + # (Linux). If it returns ENOTCONN, this error is + # silently swallowed by the ssl module, so we need to + # catch another error later on (AttributeError in + # SSLIOStream._do_ssl_handshake). + # To test this behavior, try nmap with the -sT flag. + # https://github.com/tornadoweb/tornado/pull/750 + if errno_from_exception(err) in (errno.ECONNABORTED, errno.EINVAL): + return connection.close() + else: + raise + try: + if self.ssl_options is not None: + stream = SSLIOStream(connection, io_loop=self.io_loop, + max_buffer_size=self.max_buffer_size, + read_chunk_size=self.read_chunk_size) + else: + stream = IOStream(connection, io_loop=self.io_loop, + max_buffer_size=self.max_buffer_size, + read_chunk_size=self.read_chunk_size) + future = self.handle_stream(stream, address) + if future is not None: + self.io_loop.add_future(future, lambda f: f.result()) + except Exception: + app_log.error("Error in connection callback", exc_info=True) diff --git a/python/tornado/template.py b/python/tornado/template.py new file mode 100644 index 000000000..fa588991e --- /dev/null +++ b/python/tornado/template.py @@ -0,0 +1,975 @@ +#!/usr/bin/env python +# +# Copyright 2009 Facebook +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""A simple template system that compiles templates to Python code. + +Basic usage looks like:: + + t = template.Template("{{ myvalue }}") + print t.generate(myvalue="XXX") + +`Loader` is a class that loads templates from a root directory and caches +the compiled templates:: + + loader = template.Loader("/home/btaylor") + print loader.load("test.html").generate(myvalue="XXX") + +We compile all templates to raw Python. Error-reporting is currently... uh, +interesting. Syntax for the templates:: + + ### base.html + + + {% block title %}Default title{% end %} + + +

    + {% for student in students %} + {% block student %} +
  • {{ escape(student.name) }}
  • + {% end %} + {% end %} +
+ + + + ### bold.html + {% extends "base.html" %} + + {% block title %}A bolder title{% end %} + + {% block student %} +
  • {{ escape(student.name) }}
  • + {% end %} + +Unlike most other template systems, we do not put any restrictions on the +expressions you can include in your statements. ``if`` and ``for`` blocks get +translated exactly into Python, so you can do complex expressions like:: + + {% for student in [p for p in people if p.student and p.age > 23] %} +
  • {{ escape(student.name) }}
  • + {% end %} + +Translating directly to Python means you can apply functions to expressions +easily, like the ``escape()`` function in the examples above. You can pass +functions in to your template just like any other variable +(In a `.RequestHandler`, override `.RequestHandler.get_template_namespace`):: + + ### Python code + def add(x, y): + return x + y + template.execute(add=add) + + ### The template + {{ add(1, 2) }} + +We provide the functions `escape() <.xhtml_escape>`, `.url_escape()`, +`.json_encode()`, and `.squeeze()` to all templates by default. + +Typical applications do not create `Template` or `Loader` instances by +hand, but instead use the `~.RequestHandler.render` and +`~.RequestHandler.render_string` methods of +`tornado.web.RequestHandler`, which load templates automatically based +on the ``template_path`` `.Application` setting. + +Variable names beginning with ``_tt_`` are reserved by the template +system and should not be used by application code. + +Syntax Reference +---------------- + +Template expressions are surrounded by double curly braces: ``{{ ... }}``. +The contents may be any python expression, which will be escaped according +to the current autoescape setting and inserted into the output. Other +template directives use ``{% %}``. These tags may be escaped as ``{{!`` +and ``{%!`` if you need to include a literal ``{{`` or ``{%`` in the output. + +To comment out a section so that it is omitted from the output, surround it +with ``{# ... #}``. + +``{% apply *function* %}...{% end %}`` + Applies a function to the output of all template code between ``apply`` + and ``end``:: + + {% apply linkify %}{{name}} said: {{message}}{% end %} + + Note that as an implementation detail apply blocks are implemented + as nested functions and thus may interact strangely with variables + set via ``{% set %}``, or the use of ``{% break %}`` or ``{% continue %}`` + within loops. + +``{% autoescape *function* %}`` + Sets the autoescape mode for the current file. This does not affect + other files, even those referenced by ``{% include %}``. Note that + autoescaping can also be configured globally, at the `.Application` + or `Loader`.:: + + {% autoescape xhtml_escape %} + {% autoescape None %} + +``{% block *name* %}...{% end %}`` + Indicates a named, replaceable block for use with ``{% extends %}``. + Blocks in the parent template will be replaced with the contents of + the same-named block in a child template.:: + + + {% block title %}Default title{% end %} + + + {% extends "base.html" %} + {% block title %}My page title{% end %} + +``{% comment ... %}`` + A comment which will be removed from the template output. Note that + there is no ``{% end %}`` tag; the comment goes from the word ``comment`` + to the closing ``%}`` tag. + +``{% extends *filename* %}`` + Inherit from another template. Templates that use ``extends`` should + contain one or more ``block`` tags to replace content from the parent + template. Anything in the child template not contained in a ``block`` + tag will be ignored. For an example, see the ``{% block %}`` tag. + +``{% for *var* in *expr* %}...{% end %}`` + Same as the python ``for`` statement. ``{% break %}`` and + ``{% continue %}`` may be used inside the loop. + +``{% from *x* import *y* %}`` + Same as the python ``import`` statement. + +``{% if *condition* %}...{% elif *condition* %}...{% else %}...{% end %}`` + Conditional statement - outputs the first section whose condition is + true. (The ``elif`` and ``else`` sections are optional) + +``{% import *module* %}`` + Same as the python ``import`` statement. + +``{% include *filename* %}`` + Includes another template file. The included file can see all the local + variables as if it were copied directly to the point of the ``include`` + directive (the ``{% autoescape %}`` directive is an exception). + Alternately, ``{% module Template(filename, **kwargs) %}`` may be used + to include another template with an isolated namespace. + +``{% module *expr* %}`` + Renders a `~tornado.web.UIModule`. The output of the ``UIModule`` is + not escaped:: + + {% module Template("foo.html", arg=42) %} + + ``UIModules`` are a feature of the `tornado.web.RequestHandler` + class (and specifically its ``render`` method) and will not work + when the template system is used on its own in other contexts. + +``{% raw *expr* %}`` + Outputs the result of the given expression without autoescaping. + +``{% set *x* = *y* %}`` + Sets a local variable. + +``{% try %}...{% except %}...{% else %}...{% finally %}...{% end %}`` + Same as the python ``try`` statement. + +``{% while *condition* %}... {% end %}`` + Same as the python ``while`` statement. ``{% break %}`` and + ``{% continue %}`` may be used inside the loop. + +``{% whitespace *mode* %}`` + Sets the whitespace mode for the remainder of the current file + (or until the next ``{% whitespace %}`` directive). See + `filter_whitespace` for available options. New in Tornado 4.3. +""" + +from __future__ import absolute_import, division, print_function, with_statement + +import datetime +import linecache +import os.path +import posixpath +import re +import threading + +from tornado import escape +from tornado.log import app_log +from tornado.util import ObjectDict, exec_in, unicode_type + +try: + from cStringIO import StringIO # py2 +except ImportError: + from io import StringIO # py3 + +_DEFAULT_AUTOESCAPE = "xhtml_escape" +_UNSET = object() + + +def filter_whitespace(mode, text): + """Transform whitespace in ``text`` according to ``mode``. + + Available modes are: + + * ``all``: Return all whitespace unmodified. + * ``single``: Collapse consecutive whitespace with a single whitespace + character, preserving newlines. + * ``oneline``: Collapse all runs of whitespace into a single space + character, removing all newlines in the process. + + .. versionadded:: 4.3 + """ + if mode == 'all': + return text + elif mode == 'single': + text = re.sub(r"([\t ]+)", " ", text) + text = re.sub(r"(\s*\n\s*)", "\n", text) + return text + elif mode == 'oneline': + return re.sub(r"(\s+)", " ", text) + else: + raise Exception("invalid whitespace mode %s" % mode) + + +class Template(object): + """A compiled template. + + We compile into Python from the given template_string. You can generate + the template from variables with generate(). + """ + # note that the constructor's signature is not extracted with + # autodoc because _UNSET looks like garbage. When changing + # this signature update website/sphinx/template.rst too. + def __init__(self, template_string, name="", loader=None, + compress_whitespace=_UNSET, autoescape=_UNSET, + whitespace=None): + """Construct a Template. + + :arg str template_string: the contents of the template file. + :arg str name: the filename from which the template was loaded + (used for error message). + :arg tornado.template.BaseLoader loader: the `~tornado.template.BaseLoader` responsible for this template, + used to resolve ``{% include %}`` and ``{% extend %}`` + directives. + :arg bool compress_whitespace: Deprecated since Tornado 4.3. + Equivalent to ``whitespace="single"`` if true and + ``whitespace="all"`` if false. + :arg str autoescape: The name of a function in the template + namespace, or ``None`` to disable escaping by default. + :arg str whitespace: A string specifying treatment of whitespace; + see `filter_whitespace` for options. + + .. versionchanged:: 4.3 + Added ``whitespace`` parameter; deprecated ``compress_whitespace``. + """ + self.name = escape.native_str(name) + + if compress_whitespace is not _UNSET: + # Convert deprecated compress_whitespace (bool) to whitespace (str). + if whitespace is not None: + raise Exception("cannot set both whitespace and compress_whitespace") + whitespace = "single" if compress_whitespace else "all" + if whitespace is None: + if loader and loader.whitespace: + whitespace = loader.whitespace + else: + # Whitespace defaults by filename. + if name.endswith(".html") or name.endswith(".js"): + whitespace = "single" + else: + whitespace = "all" + # Validate the whitespace setting. + filter_whitespace(whitespace, '') + + if autoescape is not _UNSET: + self.autoescape = autoescape + elif loader: + self.autoescape = loader.autoescape + else: + self.autoescape = _DEFAULT_AUTOESCAPE + + self.namespace = loader.namespace if loader else {} + reader = _TemplateReader(name, escape.native_str(template_string), + whitespace) + self.file = _File(self, _parse(reader, self)) + self.code = self._generate_python(loader) + self.loader = loader + try: + # Under python2.5, the fake filename used here must match + # the module name used in __name__ below. + # The dont_inherit flag prevents template.py's future imports + # from being applied to the generated code. + self.compiled = compile( + escape.to_unicode(self.code), + "%s.generated.py" % self.name.replace('.', '_'), + "exec", dont_inherit=True) + except Exception: + formatted_code = _format_code(self.code).rstrip() + app_log.error("%s code:\n%s", self.name, formatted_code) + raise + + def generate(self, **kwargs): + """Generate this template with the given arguments.""" + namespace = { + "escape": escape.xhtml_escape, + "xhtml_escape": escape.xhtml_escape, + "url_escape": escape.url_escape, + "json_encode": escape.json_encode, + "squeeze": escape.squeeze, + "linkify": escape.linkify, + "datetime": datetime, + "_tt_utf8": escape.utf8, # for internal use + "_tt_string_types": (unicode_type, bytes), + # __name__ and __loader__ allow the traceback mechanism to find + # the generated source code. + "__name__": self.name.replace('.', '_'), + "__loader__": ObjectDict(get_source=lambda name: self.code), + } + namespace.update(self.namespace) + namespace.update(kwargs) + exec_in(self.compiled, namespace) + execute = namespace["_tt_execute"] + # Clear the traceback module's cache of source data now that + # we've generated a new template (mainly for this module's + # unittests, where different tests reuse the same name). + linecache.clearcache() + return execute() + + def _generate_python(self, loader): + buffer = StringIO() + try: + # named_blocks maps from names to _NamedBlock objects + named_blocks = {} + ancestors = self._get_ancestors(loader) + ancestors.reverse() + for ancestor in ancestors: + ancestor.find_named_blocks(loader, named_blocks) + writer = _CodeWriter(buffer, named_blocks, loader, + ancestors[0].template) + ancestors[0].generate(writer) + return buffer.getvalue() + finally: + buffer.close() + + def _get_ancestors(self, loader): + ancestors = [self.file] + for chunk in self.file.body.chunks: + if isinstance(chunk, _ExtendsBlock): + if not loader: + raise ParseError("{% extends %} block found, but no " + "template loader") + template = loader.load(chunk.name, self.name) + ancestors.extend(template._get_ancestors(loader)) + return ancestors + + +class BaseLoader(object): + """Base class for template loaders. + + You must use a template loader to use template constructs like + ``{% extends %}`` and ``{% include %}``. The loader caches all + templates after they are loaded the first time. + """ + def __init__(self, autoescape=_DEFAULT_AUTOESCAPE, namespace=None, + whitespace=None): + """Construct a template loader. + + :arg str autoescape: The name of a function in the template + namespace, such as "xhtml_escape", or ``None`` to disable + autoescaping by default. + :arg dict namespace: A dictionary to be added to the default template + namespace, or ``None``. + :arg str whitespace: A string specifying default behavior for + whitespace in templates; see `filter_whitespace` for options. + Default is "single" for files ending in ".html" and ".js" and + "all" for other files. + + .. versionchanged:: 4.3 + Added ``whitespace`` parameter. + """ + self.autoescape = autoescape + self.namespace = namespace or {} + self.whitespace = whitespace + self.templates = {} + # self.lock protects self.templates. It's a reentrant lock + # because templates may load other templates via `include` or + # `extends`. Note that thanks to the GIL this code would be safe + # even without the lock, but could lead to wasted work as multiple + # threads tried to compile the same template simultaneously. + self.lock = threading.RLock() + + def reset(self): + """Resets the cache of compiled templates.""" + with self.lock: + self.templates = {} + + def resolve_path(self, name, parent_path=None): + """Converts a possibly-relative path to absolute (used internally).""" + raise NotImplementedError() + + def load(self, name, parent_path=None): + """Loads a template.""" + name = self.resolve_path(name, parent_path=parent_path) + with self.lock: + if name not in self.templates: + self.templates[name] = self._create_template(name) + return self.templates[name] + + def _create_template(self, name): + raise NotImplementedError() + + +class Loader(BaseLoader): + """A template loader that loads from a single root directory. + """ + def __init__(self, root_directory, **kwargs): + super(Loader, self).__init__(**kwargs) + self.root = os.path.abspath(root_directory) + + def resolve_path(self, name, parent_path=None): + if parent_path and not parent_path.startswith("<") and \ + not parent_path.startswith("/") and \ + not name.startswith("/"): + current_path = os.path.join(self.root, parent_path) + file_dir = os.path.dirname(os.path.abspath(current_path)) + relative_path = os.path.abspath(os.path.join(file_dir, name)) + if relative_path.startswith(self.root): + name = relative_path[len(self.root) + 1:] + return name + + def _create_template(self, name): + path = os.path.join(self.root, name) + with open(path, "rb") as f: + template = Template(f.read(), name=name, loader=self) + return template + + +class DictLoader(BaseLoader): + """A template loader that loads from a dictionary.""" + def __init__(self, dict, **kwargs): + super(DictLoader, self).__init__(**kwargs) + self.dict = dict + + def resolve_path(self, name, parent_path=None): + if parent_path and not parent_path.startswith("<") and \ + not parent_path.startswith("/") and \ + not name.startswith("/"): + file_dir = posixpath.dirname(parent_path) + name = posixpath.normpath(posixpath.join(file_dir, name)) + return name + + def _create_template(self, name): + return Template(self.dict[name], name=name, loader=self) + + +class _Node(object): + def each_child(self): + return () + + def generate(self, writer): + raise NotImplementedError() + + def find_named_blocks(self, loader, named_blocks): + for child in self.each_child(): + child.find_named_blocks(loader, named_blocks) + + +class _File(_Node): + def __init__(self, template, body): + self.template = template + self.body = body + self.line = 0 + + def generate(self, writer): + writer.write_line("def _tt_execute():", self.line) + with writer.indent(): + writer.write_line("_tt_buffer = []", self.line) + writer.write_line("_tt_append = _tt_buffer.append", self.line) + self.body.generate(writer) + writer.write_line("return _tt_utf8('').join(_tt_buffer)", self.line) + + def each_child(self): + return (self.body,) + + +class _ChunkList(_Node): + def __init__(self, chunks): + self.chunks = chunks + + def generate(self, writer): + for chunk in self.chunks: + chunk.generate(writer) + + def each_child(self): + return self.chunks + + +class _NamedBlock(_Node): + def __init__(self, name, body, template, line): + self.name = name + self.body = body + self.template = template + self.line = line + + def each_child(self): + return (self.body,) + + def generate(self, writer): + block = writer.named_blocks[self.name] + with writer.include(block.template, self.line): + block.body.generate(writer) + + def find_named_blocks(self, loader, named_blocks): + named_blocks[self.name] = self + _Node.find_named_blocks(self, loader, named_blocks) + + +class _ExtendsBlock(_Node): + def __init__(self, name): + self.name = name + + +class _IncludeBlock(_Node): + def __init__(self, name, reader, line): + self.name = name + self.template_name = reader.name + self.line = line + + def find_named_blocks(self, loader, named_blocks): + included = loader.load(self.name, self.template_name) + included.file.find_named_blocks(loader, named_blocks) + + def generate(self, writer): + included = writer.loader.load(self.name, self.template_name) + with writer.include(included, self.line): + included.file.body.generate(writer) + + +class _ApplyBlock(_Node): + def __init__(self, method, line, body=None): + self.method = method + self.line = line + self.body = body + + def each_child(self): + return (self.body,) + + def generate(self, writer): + method_name = "_tt_apply%d" % writer.apply_counter + writer.apply_counter += 1 + writer.write_line("def %s():" % method_name, self.line) + with writer.indent(): + writer.write_line("_tt_buffer = []", self.line) + writer.write_line("_tt_append = _tt_buffer.append", self.line) + self.body.generate(writer) + writer.write_line("return _tt_utf8('').join(_tt_buffer)", self.line) + writer.write_line("_tt_append(_tt_utf8(%s(%s())))" % ( + self.method, method_name), self.line) + + +class _ControlBlock(_Node): + def __init__(self, statement, line, body=None): + self.statement = statement + self.line = line + self.body = body + + def each_child(self): + return (self.body,) + + def generate(self, writer): + writer.write_line("%s:" % self.statement, self.line) + with writer.indent(): + self.body.generate(writer) + # Just in case the body was empty + writer.write_line("pass", self.line) + + +class _IntermediateControlBlock(_Node): + def __init__(self, statement, line): + self.statement = statement + self.line = line + + def generate(self, writer): + # In case the previous block was empty + writer.write_line("pass", self.line) + writer.write_line("%s:" % self.statement, self.line, writer.indent_size() - 1) + + +class _Statement(_Node): + def __init__(self, statement, line): + self.statement = statement + self.line = line + + def generate(self, writer): + writer.write_line(self.statement, self.line) + + +class _Expression(_Node): + def __init__(self, expression, line, raw=False): + self.expression = expression + self.line = line + self.raw = raw + + def generate(self, writer): + writer.write_line("_tt_tmp = %s" % self.expression, self.line) + writer.write_line("if isinstance(_tt_tmp, _tt_string_types):" + " _tt_tmp = _tt_utf8(_tt_tmp)", self.line) + writer.write_line("else: _tt_tmp = _tt_utf8(str(_tt_tmp))", self.line) + if not self.raw and writer.current_template.autoescape is not None: + # In python3 functions like xhtml_escape return unicode, + # so we have to convert to utf8 again. + writer.write_line("_tt_tmp = _tt_utf8(%s(_tt_tmp))" % + writer.current_template.autoescape, self.line) + writer.write_line("_tt_append(_tt_tmp)", self.line) + + +class _Module(_Expression): + def __init__(self, expression, line): + super(_Module, self).__init__("_tt_modules." + expression, line, + raw=True) + + +class _Text(_Node): + def __init__(self, value, line, whitespace): + self.value = value + self.line = line + self.whitespace = whitespace + + def generate(self, writer): + value = self.value + + # Compress whitespace if requested, with a crude heuristic to avoid + # altering preformatted whitespace. + if "
    " not in value:
    +            value = filter_whitespace(self.whitespace, value)
    +
    +        if value:
    +            writer.write_line('_tt_append(%r)' % escape.utf8(value), self.line)
    +
    +
    +class ParseError(Exception):
    +    """Raised for template syntax errors.
    +
    +    ``ParseError`` instances have ``filename`` and ``lineno`` attributes
    +    indicating the position of the error.
    +
    +    .. versionchanged:: 4.3
    +       Added ``filename`` and ``lineno`` attributes.
    +    """
    +    def __init__(self, message, filename, lineno):
    +        self.message = message
    +        # The names "filename" and "lineno" are chosen for consistency
    +        # with python SyntaxError.
    +        self.filename = filename
    +        self.lineno = lineno
    +
    +    def __str__(self):
    +        return '%s at %s:%d' % (self.message, self.filename, self.lineno)
    +
    +
    +class _CodeWriter(object):
    +    def __init__(self, file, named_blocks, loader, current_template):
    +        self.file = file
    +        self.named_blocks = named_blocks
    +        self.loader = loader
    +        self.current_template = current_template
    +        self.apply_counter = 0
    +        self.include_stack = []
    +        self._indent = 0
    +
    +    def indent_size(self):
    +        return self._indent
    +
    +    def indent(self):
    +        class Indenter(object):
    +            def __enter__(_):
    +                self._indent += 1
    +                return self
    +
    +            def __exit__(_, *args):
    +                assert self._indent > 0
    +                self._indent -= 1
    +
    +        return Indenter()
    +
    +    def include(self, template, line):
    +        self.include_stack.append((self.current_template, line))
    +        self.current_template = template
    +
    +        class IncludeTemplate(object):
    +            def __enter__(_):
    +                return self
    +
    +            def __exit__(_, *args):
    +                self.current_template = self.include_stack.pop()[0]
    +
    +        return IncludeTemplate()
    +
    +    def write_line(self, line, line_number, indent=None):
    +        if indent is None:
    +            indent = self._indent
    +        line_comment = '  # %s:%d' % (self.current_template.name, line_number)
    +        if self.include_stack:
    +            ancestors = ["%s:%d" % (tmpl.name, lineno)
    +                         for (tmpl, lineno) in self.include_stack]
    +            line_comment += ' (via %s)' % ', '.join(reversed(ancestors))
    +        print("    " * indent + line + line_comment, file=self.file)
    +
    +
    +class _TemplateReader(object):
    +    def __init__(self, name, text, whitespace):
    +        self.name = name
    +        self.text = text
    +        self.whitespace = whitespace
    +        self.line = 1
    +        self.pos = 0
    +
    +    def find(self, needle, start=0, end=None):
    +        assert start >= 0, start
    +        pos = self.pos
    +        start += pos
    +        if end is None:
    +            index = self.text.find(needle, start)
    +        else:
    +            end += pos
    +            assert end >= start
    +            index = self.text.find(needle, start, end)
    +        if index != -1:
    +            index -= pos
    +        return index
    +
    +    def consume(self, count=None):
    +        if count is None:
    +            count = len(self.text) - self.pos
    +        newpos = self.pos + count
    +        self.line += self.text.count("\n", self.pos, newpos)
    +        s = self.text[self.pos:newpos]
    +        self.pos = newpos
    +        return s
    +
    +    def remaining(self):
    +        return len(self.text) - self.pos
    +
    +    def __len__(self):
    +        return self.remaining()
    +
    +    def __getitem__(self, key):
    +        if type(key) is slice:
    +            size = len(self)
    +            start, stop, step = key.indices(size)
    +            if start is None:
    +                start = self.pos
    +            else:
    +                start += self.pos
    +            if stop is not None:
    +                stop += self.pos
    +            return self.text[slice(start, stop, step)]
    +        elif key < 0:
    +            return self.text[key]
    +        else:
    +            return self.text[self.pos + key]
    +
    +    def __str__(self):
    +        return self.text[self.pos:]
    +
    +    def raise_parse_error(self, msg):
    +        raise ParseError(msg, self.name, self.line)
    +
    +
    +def _format_code(code):
    +    lines = code.splitlines()
    +    format = "%%%dd  %%s\n" % len(repr(len(lines) + 1))
    +    return "".join([format % (i + 1, line) for (i, line) in enumerate(lines)])
    +
    +
    +def _parse(reader, template, in_block=None, in_loop=None):
    +    body = _ChunkList([])
    +    while True:
    +        # Find next template directive
    +        curly = 0
    +        while True:
    +            curly = reader.find("{", curly)
    +            if curly == -1 or curly + 1 == reader.remaining():
    +                # EOF
    +                if in_block:
    +                    reader.raise_parse_error(
    +                        "Missing {%% end %%} block for %s" % in_block)
    +                body.chunks.append(_Text(reader.consume(), reader.line,
    +                                         reader.whitespace))
    +                return body
    +            # If the first curly brace is not the start of a special token,
    +            # start searching from the character after it
    +            if reader[curly + 1] not in ("{", "%", "#"):
    +                curly += 1
    +                continue
    +            # When there are more than 2 curlies in a row, use the
    +            # innermost ones.  This is useful when generating languages
    +            # like latex where curlies are also meaningful
    +            if (curly + 2 < reader.remaining() and
    +                    reader[curly + 1] == '{' and reader[curly + 2] == '{'):
    +                curly += 1
    +                continue
    +            break
    +
    +        # Append any text before the special token
    +        if curly > 0:
    +            cons = reader.consume(curly)
    +            body.chunks.append(_Text(cons, reader.line,
    +                                     reader.whitespace))
    +
    +        start_brace = reader.consume(2)
    +        line = reader.line
    +
    +        # Template directives may be escaped as "{{!" or "{%!".
    +        # In this case output the braces and consume the "!".
    +        # This is especially useful in conjunction with jquery templates,
    +        # which also use double braces.
    +        if reader.remaining() and reader[0] == "!":
    +            reader.consume(1)
    +            body.chunks.append(_Text(start_brace, line,
    +                                     reader.whitespace))
    +            continue
    +
    +        # Comment
    +        if start_brace == "{#":
    +            end = reader.find("#}")
    +            if end == -1:
    +                reader.raise_parse_error("Missing end comment #}")
    +            contents = reader.consume(end).strip()
    +            reader.consume(2)
    +            continue
    +
    +        # Expression
    +        if start_brace == "{{":
    +            end = reader.find("}}")
    +            if end == -1:
    +                reader.raise_parse_error("Missing end expression }}")
    +            contents = reader.consume(end).strip()
    +            reader.consume(2)
    +            if not contents:
    +                reader.raise_parse_error("Empty expression")
    +            body.chunks.append(_Expression(contents, line))
    +            continue
    +
    +        # Block
    +        assert start_brace == "{%", start_brace
    +        end = reader.find("%}")
    +        if end == -1:
    +            reader.raise_parse_error("Missing end block %}")
    +        contents = reader.consume(end).strip()
    +        reader.consume(2)
    +        if not contents:
    +            reader.raise_parse_error("Empty block tag ({% %})")
    +
    +        operator, space, suffix = contents.partition(" ")
    +        suffix = suffix.strip()
    +
    +        # Intermediate ("else", "elif", etc) blocks
    +        intermediate_blocks = {
    +            "else": set(["if", "for", "while", "try"]),
    +            "elif": set(["if"]),
    +            "except": set(["try"]),
    +            "finally": set(["try"]),
    +        }
    +        allowed_parents = intermediate_blocks.get(operator)
    +        if allowed_parents is not None:
    +            if not in_block:
    +                reader.raise_parse_error("%s outside %s block" %
    +                                         (operator, allowed_parents))
    +            if in_block not in allowed_parents:
    +                reader.raise_parse_error(
    +                    "%s block cannot be attached to %s block" %
    +                    (operator, in_block))
    +            body.chunks.append(_IntermediateControlBlock(contents, line))
    +            continue
    +
    +        # End tag
    +        elif operator == "end":
    +            if not in_block:
    +                reader.raise_parse_error("Extra {% end %} block")
    +            return body
    +
    +        elif operator in ("extends", "include", "set", "import", "from",
    +                          "comment", "autoescape", "whitespace", "raw",
    +                          "module"):
    +            if operator == "comment":
    +                continue
    +            if operator == "extends":
    +                suffix = suffix.strip('"').strip("'")
    +                if not suffix:
    +                    reader.raise_parse_error("extends missing file path")
    +                block = _ExtendsBlock(suffix)
    +            elif operator in ("import", "from"):
    +                if not suffix:
    +                    reader.raise_parse_error("import missing statement")
    +                block = _Statement(contents, line)
    +            elif operator == "include":
    +                suffix = suffix.strip('"').strip("'")
    +                if not suffix:
    +                    reader.raise_parse_error("include missing file path")
    +                block = _IncludeBlock(suffix, reader, line)
    +            elif operator == "set":
    +                if not suffix:
    +                    reader.raise_parse_error("set missing statement")
    +                block = _Statement(suffix, line)
    +            elif operator == "autoescape":
    +                fn = suffix.strip()
    +                if fn == "None":
    +                    fn = None
    +                template.autoescape = fn
    +                continue
    +            elif operator == "whitespace":
    +                mode = suffix.strip()
    +                # Validate the selected mode
    +                filter_whitespace(mode, '')
    +                reader.whitespace = mode
    +                continue
    +            elif operator == "raw":
    +                block = _Expression(suffix, line, raw=True)
    +            elif operator == "module":
    +                block = _Module(suffix, line)
    +            body.chunks.append(block)
    +            continue
    +
    +        elif operator in ("apply", "block", "try", "if", "for", "while"):
    +            # parse inner body recursively
    +            if operator in ("for", "while"):
    +                block_body = _parse(reader, template, operator, operator)
    +            elif operator == "apply":
    +                # apply creates a nested function so syntactically it's not
    +                # in the loop.
    +                block_body = _parse(reader, template, operator, None)
    +            else:
    +                block_body = _parse(reader, template, operator, in_loop)
    +
    +            if operator == "apply":
    +                if not suffix:
    +                    reader.raise_parse_error("apply missing method name")
    +                block = _ApplyBlock(suffix, line, block_body)
    +            elif operator == "block":
    +                if not suffix:
    +                    reader.raise_parse_error("block missing name")
    +                block = _NamedBlock(suffix, block_body, template, line)
    +            else:
    +                block = _ControlBlock(contents, line, block_body)
    +            body.chunks.append(block)
    +            continue
    +
    +        elif operator in ("break", "continue"):
    +            if not in_loop:
    +                reader.raise_parse_error("%s outside %s block" %
    +                                         (operator, set(["for", "while"])))
    +            body.chunks.append(_Statement(contents, line))
    +            continue
    +
    +        else:
    +            reader.raise_parse_error("unknown operator: %r" % operator)
    diff --git a/python/tornado/test/__init__.py b/python/tornado/test/__init__.py
    new file mode 100644
    index 000000000..e69de29bb
    diff --git a/python/tornado/test/__main__.py b/python/tornado/test/__main__.py
    new file mode 100644
    index 000000000..5953443b1
    --- /dev/null
    +++ b/python/tornado/test/__main__.py
    @@ -0,0 +1,14 @@
    +"""Shim to allow python -m tornado.test.
    +
    +This only works in python 2.7+.
    +"""
    +from __future__ import absolute_import, division, print_function, with_statement
    +
    +from tornado.test.runtests import all, main
    +
    +# tornado.testing.main autodiscovery relies on 'all' being present in
    +# the main module, so import it here even though it is not used directly.
    +# The following line prevents a pyflakes warning.
    +all = all
    +
    +main()
    diff --git a/python/tornado/test/__pycache__/__init__.cpython-35.pyc b/python/tornado/test/__pycache__/__init__.cpython-35.pyc
    new file mode 100644
    index 0000000000000000000000000000000000000000..1a008e9cd78c7eda88d54f8c85b358cbf2df20a0
    GIT binary patch
    literal 168
    zcmWgR<>fm0Yik$-5IhDEFu(|8H~?`m3y?@*U+Se0*kJW=VX!UP0w84x8Nkl+v73JCJq7K+FID@W(DV
    
    literal 0
    HcmV?d00001
    
    diff --git a/python/tornado/test/__pycache__/__main__.cpython-35.pyc b/python/tornado/test/__pycache__/__main__.cpython-35.pyc
    new file mode 100644
    index 0000000000000000000000000000000000000000..a6f24bd0f4893438884c2590969118512e43a6a2
    GIT binary patch
    literal 474
    zcmY+A%Sr<=6o${-+Hs(|vg^TJkyx1<)pCJQmm&AawwfTM{defd
    zBlbuf%FTbSx)t8p=$;Ss9iBmFzV`UIn(S3qhGA?>OCLFJ@-l
    zCRY1G5%7S-D}R9>z&j7T@E7(K@zB4}C(fDmy`c@3=Xhq$%$ak(bD8mCrBeLs#qYa+
    zo&oq9#y%D7Z(x)EAaU`_zy}BzSPTLUEDeGTSQ!X(uykOwLIXbwK@O~3I?jNfhrj^K
    zfS>?YA)VF1FG5fPs{}zAtTF@@uqqJDfHec`cwQZr1-}YG4Xhfld*NBo`58DYg7_5}
    zV=f+1g9qVxxKGFC)!JFGW+9jZYYrHvR)T|h;5ES2hX%L?Jk#KFw*YPtzQ1&!6Qk=%
    zfgkZ!x`icqhopJY7D_HD$+y3}6GffmOdQ#L9x4@g1@E2fkLy9?c70wCqqyD?T%vj{
    zGH~$Gv9p0qB8b3&9E&}4EV>$fi6EhSUHs~*uLIA5i{Bg#3AP;Yyqe4tyy+y(8^96f
    z0n#*61h<3$my_z4?u1Q?;o0=j`fU?`i7__+%8QMd(WU;ld$#o>DJw!ES6-JvXHEH}|#$xpBVi1t6It77?7HfSw^(~S!{18|>
    z*pzk{!#k$7_52;2NNpmQkM)#^#=7qWd#-b%LHn>l@A@k(N>i+{?o)`!8`w$ri*vZw
    z?4zDl(I6Ia%BqTPXnUbe$=t=zNlK2ykc)49+jV?XQ@|(eXj-wk2^`Z?O_Y}gk)C7A
    ztoQaQG#w(J!Oi3~h5Q&jLX@^|a0JJ#y&uTIJl97|(ZKN<>8SUdHZF3N~TlHd2EJ#=Z3OI<6d2AP@EKZVc<
    zxa>GBp5(BQcKvwTUvEtKOKEJA#_ypNX&`}YHCJFWtas&=UK{NkdXDm?h>bibKg1wU
    zbLK~YHo8iA>jzbjsa9$bqoZ(*DhK2=Rlk?emD9Ye;^koJY33zfUhxd%>F^V#URKc@
    z9OMCG0do?oJEh??56}Bg9%?kfGayDG41PHeEH1)9iS#}ws{@oh>>k#Fuea}~hfFL{
    zHcpDm!o&(?|7ZnD+A)ih^@57gH3Ibtk`z=rO6ynET}E>{L5y;~L)}S+wlzp~D(9Ad
    z*O7-}mbzufb?rfwOmf>Q;#8bW)Yo}jOSiVVMC@VWe1_BgvO3RtAHOn49M|c@T#TyF
    zOVWD&Fwj++)hQiAC-xrk(UCSiT`+C!zP#yd?dOa`S491#BgPUnd9CQ)lhf*RN@a%E
    zQd$~UPEc=D0FnzQnm#zH_w)Z%ZuK=#O>1}J@e|08DSPq6)dOW$)9|YZ3csW0l)-0_jmXDzlbbKCj
    zyx7zOff7Zm5TUzB4DZ%KfUDS^pD2a>*xJDL@&hE0JFizW6%AX4T8>p&o{dqx#>(sr
    z&Xn<~L>&6Vww)AhJAD9Sziiud-=-X~fNF>(A_ft9oC&J1;xdsE
    zk*h>bcrl0?G6-8qw?
    xvU)IAX;+T^c-(F)rBAXmG1gP?ZqWN$(o<$xtFcA4tYz3dtFV%`sOgzy_Ag#ngoXeB
    
    literal 0
    HcmV?d00001
    
    diff --git a/python/tornado/test/__pycache__/auth_test.cpython-35.pyc b/python/tornado/test/__pycache__/auth_test.cpython-35.pyc
    new file mode 100644
    index 0000000000000000000000000000000000000000..b3243025e6e2a81f801a81db8a6b2bd28c11581c
    GIT binary patch
    literal 25603
    zcmd5^Yiu0HeV^kUkFVoH6h(=8o}Sj(7O98rIErOcre#}*O*tYhJ6E-r)7?@$>mJPP
    z(ULB9nnZ4srjPUyq)FSfO`5b#n-nOJ0!0f1{ZOE3(GMwFpum1jzvN2`v_T*F)Zgzv
    zJA1p!JyKL0BuC!d?9A-U?0^36`Oi*|jt>9*5B}oX!0SqVSA{bw<_4RbxUGC(L!fs_#&ZNmZOQ*I8BHsTxzNIAyK}
    zRDG9fOsnFwxgJ#Y-Kw!i75Avdj4IBU(jisfs~Y=Mai6&!R`va=aX=LhsK!B6Jg6Fn
    zRPm5%(IJPD$xhsQGbJ>{8WfshGCCpFrtuRox?{du-`LD4kK&
    zy;8bYIvJEPPoiRc
    z3`r-CqT;Bk9+Qe=%6U|t=oCtitLh0UJz+~{QTmXoo|Mv)wsa1q1yy}mN*}hRr&0Qd
    zsy-^EkJ{2RC_SaBvr;;1CwCU5bEAI;b0|Hds%NG2tSx;ErRP-jF)4jaIeTQj
    zkE8T)ReeHApOD8Jk;i)i74K2iC#B*^+sS)S`d(E%FQw;g>60jZpQ=73rBB(?_oDQI
    zsy;2HPul^{tLigqsWAUNQdc44jhAn@t@?V~Db*TlE#3A8tFCPm#Z(=?$nyz@Rf_}?NyT>k^EV6kz2-%ZAY7{
    zA-1}tbxV6YUTC#$)}3(M$>+-z=SHh_>jhn2Go2MuUiSI*_PTbE8+pTRHA_{e(yH<@
    zebZ^;XLw!LOO;l$Qf}jF0EMM`>t>VWZ2Vr
    zWE@4V1QrCY1U>~u1lk2}ax$u#R@DqI?}2mmt1AApQkzxP0j$cPx5q$qpvT6NBK^qY>JD+BWAmXGED
    zIDk{T1jrR9N>^UFw0QZs(xt`cUb%94aT%2v*Qu{~nKt0^HKh+=oF80UIDh?Bbmd-u
    zW#Q#=^E$xvT)ADo{!*(_u3ukv8f(|rYHO!%tk>$*Q_WRm}!?k9uT`SjX9cPLUB~O~_9PI^)*>3wv3ODe>dnhCXDgXx|VW5JAxYwuj
    zIVlkcy@7SO4g@s^Cbr_|di)$S);+u*f*%3W6N0rd&o3rK^tVTvn$
    z32!T;7ds;c(6bGor8lzLZm*RpvV?^pFGnz!_04q>jW;b*zj7?u3#dwGDb+bqmrkzW5@;#^R!*vnt
    zYOqvlmK#o~Q%7PEEavooIKayU20Zd^DTammH}8)=Px^6V8>^X1gH)e1D|iD
    zl|HQ6$fwC?{6zrH1GnuW^hZ&)I8|A7Dz{1@gabBp_SBk{mL}5WL-?IPt~;YOxAZ0`
    zwtBv;*PTKd0|F)wBk}rg>l)ZgO3*I^I>EfmTFY&p=
    zStfH#`0Swpx@&V96%XMnoFtdhypF6(8U^!ga23fW>tdKSG)gDLtN}#ml;GFZLE+uV
    z45{j{uEk|MW0}4-!mZS9-Ey#`v0655X>eW&i^zTDO2u(qb6cU`8!KJ8
    z`0DbDrE8b2E?vH|sP|&V-X6d3>ZK37dTD97w0!0LmljK}UVX`%@@sg{m8+MFm!x#J
    zEnQf+w6xT14g?~z(j*ga0HTWeM`&q<;z*fXcXtqpN)4qZ@dqi<1JFrQf}|5j2~w|N
    zRUjJB3h6`jEfx9~z4R^Tu9qph)fF%6Iu-4-z08MiI~p28rdq2&$w6+vpUt99pC-HT2NV*bfhVrP_`u@n|k?=A7
    z2qt2u{3!DADIZ1IMSP=DsMlnDDO3qlKUp9N-~AAhfQkDHdQdocpXK1tMJ$pUtr4h7
    zMt@nF4Uiv9gE^4d_eoQ5BG^ePg+e2DhZlq2ZmiXv*-EPs77O|U
    zdh*E}I(3M*c!SU$>o>}kTQLZG<33H;i9L@l++idNgr7_8OHHIYCwrnt?B>P7P+We9
    zsDB#W#pOK*3QeXfSk9q>@<(O?p`V4=AIkMZx$Grih((~+ut8u?Ak8W!r~buItG{p0-=QoQFI&ea%LA%Y^>`
    z?ps{58diEmI0)FVG!T877{@Ne-0|@;_1Xu4*Z2p!
    z`DjYLjviibj>tXzrc=~WeCcN}f!hCo?{#+EY_;=_)eO()kGq9zi1moFF$`r{EYfvX
    zpTV7a9*I>RQ%-0RMTC&eyn?S)eADUFIB;n+^-y2u_?EDo1tg(H#?Z(cD3z+MN~xsB
    z(X%%cK-egdf;`@!m2clZsAn=DdaFs;WddOT9e#wgl2gDzGk?!a2`OP0y&C=fPkok
    zhuAiFHtc;Citt`MJ5&{Ox=mDD
    zMbTGld(P4Tbc0FM6O11~-woS8Od?)CFbEGCqVY{tk>&3e1gUi-L69oQ)(~{<6RK3_
    z$mWyWE&ypMYTG>k$m?VPQor2<2O;Lky@g{h6|%mics9XOB=YgY-vALe_xSnd-C1k=
    z1xh)Hn--gX0*U3|B$gx&eh1}IJWQBf64&M=)A%m1gZCgY6m!pb)WGLNi={^m{)x2p
    zsENzXJ?EELVbd?7JZ3^@TJ?7Nz3hmOaCCADx`xD~MS@Ry1d-?ciA<>{AuXZb+Z-W-
    zg#1XeB!US9!92b$>Ae>wz#B{z&`NNy6gn09p#){Uz19hSzeqs|2;drrC%OXM;Rg0g
    z#D-vr@gqQ<2VMh=?t2aP&aL>4_YJ7iIk2%Vq`!A2;??lf1h}S&2F1zJ^A{H`J$vQK
    z`%BMWToPY^UBzBV_z42gtfJWY$9u!17b?iHzTvOhs{eh7N@DE@Y)Ci+aib8wRdO;ogvwm>CYzn@7@5?w~^$2q!BqWzjB5fUY*
    znrD;!NP3Yd=!6PG-cIygl6O18*6#(Xa$5_w&vUMvGz8nRt>&{`(MFB^H|+orZM=S9
    zXzh;9?qzGVR<@^B^?l^+t)cQoJ83{=UypIO3ycq;&)>2A0vNnb2F5n!{FJ{7YIwR;
    zHf2pQ1b_Kmz}KaI`(W(cze_y*U|rTN40r$LZsGlNZL>!kaQ`re@9i|s1^0m|75WX|
    zr^8eDzGxf89i`>hE-x=%x_aNd|H}lzAQC!yg!>;#0$vi|k0|XU=z$Dec=impM0FlT
    zKrFH{{D{-JYjx1z@TS0gWFw>$+Q$21)L8l}B+e!2u0=p3wr4>LS>IsN9ky%ilu_{|
    z4&`&MtY%3B{n99yXA}CN!ko*2Fc)$5j&Uf7`^8r_W%q)b#Y|-Gk^N;tjvxMV5Wz$m
    z`~d{3Msl9;B`3>8w+J>*MS|L0KJny06*!D8M5GvJ@jNb*AukO58>lu*52-PMP07xI
    zo>L}i|7dn3@81LbP0rUUo7l@b+luqV1}=4hVa8zIE{!qvT@cgIL%8}9@B!{i$M|c`K^R4vUc8A+=fu`eVkAP`jc0to;=;>1^5DG{3=o3)D-LZI
    zBc0MLi7W;oG|wi-k;KT9+GS`Z7zkpP;dtWYE4#;v-)mGkC}W~26fJV0kJ0xMcNzoJ`>eA@{Bysz`a5Fq@EndP#3%NzG=v9Bj#
    zQih|G^7V^Mh$6l<=M5s-WDUEYiJtd=K||wt@6W*!n1Oi?MCt3~Hv{%o)T%*To7e(3
    zQTxvvm&lNV&r-AG-9nQn-?V6Ag4N>4LXI%ukL`XHnY&MP36_*AECK5(m;(F(vH%aT
    z`UX={f+?>xFJtc|EqgB?$KFdHRNsR1gXUeF>1XH?b^_6z#gt$NrcZse54$heg#jKB
    z?S_>5SsUSmc?9OH;1@j6G5pf2n0yz#bd8tXDAZ82lpq)Oh;Ok#WWQhvZSgL;cxf1*
    z`UJ}C&VQZtmzi{>AJs8o!vBVhJB37H!zK&VIUt)hI}x=62XvI>CoaMdi9~dqewT9y
    zkccG_BqA%AXOsO%!u&qq^w{?E*}PR|-z9tU$&kbO0!cl|e@ePP;Uba&p^;<{7qB_7
    zwSo10J`o=cSa#H>(q6MRFus3}bPK)ym+c?yo7WEvQp>N+$+JDHpH$hIIOGFW(
    z&jAkPek58YBn$q|s?SBMhUKmisrvJ16~d^|;iy#oW2j2V9x>s3E^OIiKI9!DXso=L
    zSj2WcIlmC94ew|ocGmI6eN3t~5SfW>d|TkwZZ^vd!1X3giy**-Zm1c?ah9x*h*V9R
    z=E40XY(1{Fs-2-1U%t3-YU#y`XS`9<#E<#uq|eNr7A-{BiZ>EMS>Ze2EPJqU`lpe2
    zLpG|?6<%W4jX0>tbG+dSjMBEmm0E8jx3`lx%>SX>a2N=sTxyT}?L&wG{{jJ(h+W&o
    z`!x*pzZ{O(2)yU@1LM8b7|qPCIl8uTcL5$m#txS2!5*(wSoj+d0L8Jdq=@t(qJs?j
    z=$H2$m_a!Rz?2)p2+BbKQch|Ce-pn0q!zKc60Hi;z(=E1VH)^Cv?@#k-wah{7@&*u
    z5atL3aGknT7sWM|G3_#GGg)WyCKI~uL+Gcu*AYWHEl9%sdBHd@{XAG@RxnO?SW3AS
    z!?jH;+5rIFrQla_w6Y5F=8Lyo{K8VGEXVZmi`U8cl|!pdM3wJ5uxiLVPzV9@AnZ}p
    zQa|hwh4zpNj@Sg}QgF~FIE{kiHo*ZD9J&dPo#5zAaMGlR11f}=UXfsi7Ov^?Y=1HloU;P6KggFXUX`cZF?
    zTSYu7P$8-C!(ZCxA~QaRD~Y3b8)L-^CsZ3_
    z#R_+*HpYq-POA2(T0!AX)gF_~lxmMlW|wMDNM>5KcSvTpYEMcA{AZ_RW>kBMnVVR$
    zy&`mx5gah9Z`R$c_`OvBQsL-gXUZOTF#95NbsnFPn9|NJKe+YuT%_(vEUZv}=U}{n
    zZ|wNHC!J|qUyk(9$v|F}JNe{V1cLg}R-Ho|TEqsx=}mbI?{GXiHG1t)w;0#l2jd
    zO45Zy{1kU@s4{zX#r?@jl&RIP15b;)q{&Kb7f=^jHx;cjw-u%}qXpzxf(i3E$4l-m
    zNWhuwT+7~=I1|N*C0-PWQ#zFzhN^UMU-O1s{zl?bKXgO0yl_1V7yU-qguQkJ7Umv|sWzlxcA!=_{z$1J>o
    zRqWlu(_^(+F`A?HvS%>C3>(a6o}d@!9DuqL;OiU+fG3y}Yac)|jVbUekeo**fFyuI
    z=PsbfZETeS|K|mPtKgMjI~YqM6lMr=K)3408hqVCCCt*EF;kEeR2Od4%d02+f~GrL
    zzH`A3hO=c(d(PEw&Xw=Xf&bc;tR
    zq1$kjW!>Hv3u4xM^o^m-{rVFq=Pr30X9I8RFFvcnqj>oodYPh^`b-
    zm|nc+yDK%G8U}InSSi~+Sg8oklXQcXx-XpP+KHw&!>AO&c`T$Q2#GIe{h%U2_>FN9
    zEGyhZJ!I_1Fc(7(`s2uU3ce%^b{g#olxK`3C{g7SN!K|Ui&`{QN1W#B8`YZ^F1+>f
    z^5xFr#S0gXbcUCl_NfIN8?8A|)wwjX3_?O0nyb=3hr}CnnpKyP(3??^;j=QCGp7R*
    zdWj3eRaQbmJ5$4bSq7jVj1dxrc<8oZebTsd0AHe$OJ7H9eZYYj5Mnsf)>PGxfE28f
    z&$A4r4X7DfMH2ZB#T+WRc$j^{%Pn?JSS2K)HEe*pfmPOkvvVSG5jLzrGM$9ngctG5
    z-AGltN}psY!Ky#SgtQYrDxihM{Q^s4d@j05mK?5iSBX->kkC}mRkF85R*5Smt3+cj
    zSS3)ycKKbG2bx$SyIMkEV2!?LVKXtTx<$e%2N$=}N{wB6p3vdOjTR7#=}W73GZ2#`
    zBCgjY5Faw4A;4(y{u_l@BFGV9W=X;VdJ58|bFvpn#5)MU{^Ms}J@kA1F#zf(gnQ{<
    z!rV~hCS7756CD~N-r
    z-n1F3c{mDQi;Ug5mV$#@?-0<&_ArRuIoAWkoAnX_cE$qxUAOJZ8(c6U3DB7_E3`4X
    z{uxni5$zR)U{~|p43@sdCPN2I`_qJ47SCza%?(ySv~@Ccnj2B^0W4o1nc?E?Wjn(S
    zY0(&FKo6Hy7>@fk_-gn#atz%-5)L)%=ZLDR43U=8G~fF+9_|4FzEhCb4!}3X)S90m
    zytzqj;Jp`0erNwh%xW3V;Ms&5Dx`XT$a4dP>>6IMk>Dl<%m{L04}mDs~`3fI-=aYqjima#Rg
    z>(0*iOPsG@!3O&x-gKLD+~8yt6b-p?H=nMNeepSjZvRSX<}rD<$z9ugi2Lz0Z~h!d
    zh3pb%6!@zDDD)_gM(K6)8-u4I{f^)(X!AG71Za~Wy|*;XX@Cn6Gb3&kX0Qv7{UT&!
    ztCJat=we28IGKU9o>DdJ2q7=~`^=2?zH!V}Nmn)Fz!bxAoR+@-^ilR1_poeVRR|gM
    z68F>HD-9xmOnlJWfTTA_mlnfKHnT;i&~i&#G)jV2Q}`3E{vO<~#3gJLxgl)XPlA6-
    z;CPpLM3BjL5MTl6nBrEj;8|n>EWoNn41ZSO6${|p;xa>ZfG1e_8W_G6hS2FAimyYk
    zUGj`X;kc~^-gXA3;VOpo2AAw5!Rig#S}W(oS#l3mU609TiiNQ>EExnN|0RKD6fyz~
    z5mYf7W5XR0C{aiXlwLsweg+t>9&Di~uZ+=?O-@7a1a!Z1sRJNw>HR>@7o^2K9i4J^@>D#k-Rp
    zxj#6C5RIV$mn4vO1HM{_3BhtNr#NW#axOQ;*|`~(MMYm6YzEo>fxt5+!Z=Z_hxkjr
    zd9ko3el+E0F-86JOg_!zmzewtlg}{uRVKg2{>G#}
    zt9f4q-{6x*ZXVwUK72cAUP>wP>PZ2FloSgbfC*3t=v0yn@QjSml$hNx-vE#{2H#L3
    zTV%WoYQMiDHX0GrcuChyc`>FnH_O&KLWo1OMs`GiX!|i2^GdFr!7I6*yL|Q1!ZO~w
    zby?0~$%!nBbJD)M4zdJ?(8oV@33A8?&;uL6k#z2g77*R|TnqO*NbrD$4)Kf-4n~9`
    zk7#2AU9r{>rvM5KuK7k1z=S$;I9kFnNvL8o@jZ>#-VUzbl8~FH*cyhB05#QImb+oA
    znm|q5XCgDPC&DAVFr1)*@fN>e2nDiJ0+s6~S|b1nD}>M^d5Nx^(9Qa%BX%sxZc=A=
    zYze61)S4?TqZwVpqwCj@;I)skhGzG~ysYDctQ0-zI&#HYrjCS+r=ZwTokT87JPt$8)-jKftSu4zsHxOuovS$4u}D+y~;N>f%H9yskcmhB(`?
    zk2lHIa@Isc^gO})uSAACn)3v2t}h2=Aq%7!>l>3(yhEuS_~YwqI@1YA0u^iH%X04f
    z4DVMk>UTJP1{v^!tY!(ZEBKHQ=kdLtkn8~HU0Dc^EVS)!F4{!?^ZXh9Bw9;b?NQo_8?Q!_Tu
    ztmg$*`3`+_w*$ec~q4&-XQ!7VXPYLTR
    z4m)?&@LE+DZ$;~jHXQ`^obpRxk{rfOCfZZZxFntzmA6pxeO10ny${R6_=;W+zwMV<
    z{wXH$-=Q_(^#iZTzVNhO^*L9Z3@}*NpZ&uwOAo+zjl|@QCXcyY
    zn)o_KpkK+-1h13fe-bqgm@y;>QZNb;|L-6#yaI^Jyr3`7;!cDoxg3u<4L00fLa#8h
    z&{?mTQ$|@Lyfh<$lZYGbm%vGkBWH(#cmSc7)KXz&u@ludOo-$*m5$zW@DDLzk$1&i
    zq_@&dBL_{e58h-U60kUvQ0IrOWlh-HSJ)3(qOc)Nyw?PNvXf3zeI4OCQ4Nt0GYK6V
    zPlFnxtcV_6IsoD!W3-7E@O5t>3A3WWjxa&4;5y8R%E638y)$DRKgbxz4>IETp;O0b
    zf6@}t!iWlN@W&Qn6$9(HnQ$H(qK-txNZ?|=rBNI=_Qi8}L#c@%qif~QGP%N#mT5~4?M3dGHkIMN#5c=*PIQ}1G0{23?H)j1_6V(lBK80KghWFYG&G)oo>
    z@8fHljlGAr>!E?tX(nu@DL0Ba|K;Md6toS5gl#8D=uVgpX33v^O9Utg?<*
    z8#mkXg3{uct#DkN8gT?@#R0$2oQ9mRO&E)Y_c7K*au-KzF`y2c^zSnlWYbnn0I+|aw&a+(PZOLNp^hPl&B&M|p`37JADEOUvM%S>R3C=;MrW{!+1
    zF|LylNTKjeno*)*#b`}dB5BdQ!Kcg%w4WxxC9LyE>M;aF*}rMzj-+R%rUz#7(-Sk}
    m)BV%^Gs82}@;fp;Ha$9>o1Va5Kk~WhZ_ON>9-JPY9{zukrsEd?
    
    literal 0
    HcmV?d00001
    
    diff --git a/python/tornado/test/__pycache__/concurrent_test.cpython-35.pyc b/python/tornado/test/__pycache__/concurrent_test.cpython-35.pyc
    new file mode 100644
    index 0000000000000000000000000000000000000000..77f41e010a7c499cbdd5b13d79d37408aaf0c712
    GIT binary patch
    literal 19184
    zcmc&+TXP)8b?(_4b{C5~0bW3mAVo@CiGr+~b+JrA25E^74ap)+N^2-v%iRIE#NOO9
    z5XH6R*dl#Tti-mQ$f^7Qzb2KcR9^Ftq$-ce%RHnim8w+fDS3?_Qu)5qbJ?BU1t^G;
    z7nt6jp6Q-G=X~eVrw3S=n8^S0@Ba4c`cq2%r|S7kqP~j5X(p7a;nz|PrJ9y1Sqe?1
    zglZ;LDXE$%RZ1y~cO+CJtx9RR8}*DTWrF&cDvbs8tSV)LdQO#cQctQzUNy&6Xx{BbOY4JZomI6tX`NH{L(+N%t(eEWw9YI0jI7#soG&_Jsh+?g4QFdwkWNO!MJD9dQ{bxq;*NzXE|kiQQd)1AH}s}
    zs&-ti9q+%kf@{mFc0#V5=)d-uvX85qUr_c5!ebk9KdJ5{(BT19J0%@X`HZ;(WIfKC
    zE*DRKOdJ+d-c;qf({AjzcDdf{w6*JHYxTG5PQBgoCOf*`a?6`Lt*R^Mx9aYVvg1}<
    zyJ@%Fb@a}Bb;sS&wl|?|e7A6~oF05#POnxPJNCt`R$JS(3tG3eH>G!4<#wxVzhhT-
    z+_o+zywr}n`K*_B+vS~Bz1ptXMaxTW*)4o!FTQ%&)pn(c`UIX^-7Z(#E!Td>MMLh&
    z+H03>{kE+!kBOI@+pX#q+i}+_j_pldYHwZ9l}^|&eZAhQ;Q>32T`LdRvY_-y8Tn8@nAzYB+cN=TZZ+z3SwX^Zs+Se-O(CY7o@CoZhPaznGf?K
    z=P8p|V2(=zbL_pdZ<3)H3g~YTzG#GyT>lt4ptKZx@@`X^sW+Q;
    ztqy}x-mKIcuoRE?%;)65djjM3PXjL*%um>=Ji_T+%p^ihw45tFzcG#sN9BK@kv~Hp+TrQ=TI@7!J;KT>lljN
    z*fkX!>1o^_2yc0>$M&&`(h-oQPyx+U9zm2)x!W8H+F0_pX}g~bCy?$AqNn%H;ZGk&
    ze<<-C<-^)q-}Cv?t#(=4&Q61lht6a*oI|2}Vt@(pyQ`#}A4)xsL%7gH#Q???d-4Gy
    z6P_o?8&;B-qh(!#nfb=+Z?GYyZl>i5Dj5<08-LKh6H3b37@OZS{j;LLuyla}171Yw
    z)^9%I=eI8wbJRUav8u~0+ib)~Y%NWEE
    zC;-M#J}D9>-hUYwBiKJ1z~V+ogEckM{0;O9-IpUdZo1|}v(VmIC?oHa$B!?DIuIQ4MW;iO?Xd>JFC
    z@NK0y2w%nsD*%B=#u?=zlCabVQ$PuvNtV(MLJ`qkYrUA$58z=YiexAnc@rZ~@u3g#
    z0y*LP6JueYWoyhX9T2sSMVq&AV<=N2Q8<;;!?iosw~s+d47A3<7{7wwG*ZC|nQVel
    zg_;<|3oCqMYxpH3r`D_JD<7H-g5_H{oLLlLIYvB8e;cd}d;P7@ibZ{9`&LD7IdRgh
    z&G$JFM^uUK)8*__*K4?M?=TvSyRH
    zk#LN63%O0+5X-`CF7*r?4Z~|mS$iX{Wviv!AcP5n)RYNQ`-UT9?qRir2yF}G2;*)`-F4t6ATy64{*poLIE?1r4=~t3r1u^!9X5@#oLE{
    z!+O;sYmtJ4G+-aEwa%%Oo08ml7q-B1(_GQQ=NL~@OR2*gAtAEQNtqA)lVsS%Ag9->
    z6dynQ=G>md;ao?7xd9di5L`)H>Z)}cxk#DgB4+TVa0YJ;n?VxBJjL7ZNM`>qMk?pj
    z`^qHv{m1KNJV~-OAJas6=|;U-cZ+#l;92TQ#o3OMESj#+%elHzwV59D^2izE1bMvc
    zUedYkcv<_MDooI3+ncoC>DX15`5RqnBKvICrM)XqK_N~1U*V9U^CR$08n;u4D}6q1_Ab^#a8;BaUX
    z3f64O{~4Q26>^1CAyderOykJTrcob53l7bGRUF}m&|Jmga5Nk8fDWLPJd=rmu$>3Z
    z1m5qW9jH&k0A{@0TBTz)Gpb4S2z<)uUgGUl?E$6gC}N!)tF8~1y%DS*)xboa)vszACa
    z9$0DSBmM@5AJ0r!h-VIlWUcf~q)I;V!|xOhA+n81ACZh$0{eyvT`J6rQrij2m!4SC
    zbbch*T8^4r?24M)jC5FlO~S7PLg*ryJL8w(r>^3$4wH2f3*?ZK0m-+QtCddOtu*Rg
    zyO!^!+uOO*PBE)r$9?)T3z|SLz0>KSn=mGQOV?dn=cT>cXghY0o=erR0nr0I!5km^
    zcR2izdji&E)|yK$Tiy8qu5m^TAeipb>nvVnL6*z(d&Z7H{TiCpzp`Uj3jxfU$V^S%eNeHF(i)sKmj>gzM804!Oo5sfSmI;=Z5whyc%jh#tmiM8r@wG+lLU36{(M
    z#DNJ=MK~|?&0;0EDMT}G4AFcZCn1_cBttn3hBp%Qx$l{f5!<$a^i%5Hq`C}WjW3|}
    zeSh-F^=|&m%EQGRi@cX;Z$pC<*mO-dD{l3M0b*uLw|6>@a2z>R*&O;7X5c}$EAEct
    zjWwaOl`XpmDnjaXN4KlE#F(bjZaMb9pqC%ToB}3OiSDs}vf~|w022aGdXq)O*a(SUKV4C=&Mjf0M{buV8Ht&sq>
    zn|u6;XI8#|pT{L!sZx{q;Wssc-cYm}PvvwN!V^?}w0#zr5$Vva_yLcRlvt*dX#IEg
    z$Dz7sV?11mrV_<7;ks=qvXCCOlD@se9SAXh6BrC=Y*)8!qlLz5j$xCvh4@QT8NEs0
    zO$3b@G|DW!878xs+X};NJ;@%aTiE9h6uQpaY08C~fHEQK23r3OhjRdhLNJp5N{iY714tZkN2wcanND~*uG5Hu_-UlHs*FfoY
    z-N103l{Ze>1fjkD<_a$BH(AibXd(5RtkIExR8jdz-Gpck`5!n!pS)yskHyLCXL)GM
    zs2S5Zgj7OdIH`b@b$dz36=zJkp&3Fm$-dTl8qxoltUp@VQ$cXjoeyqDe$!-euDuB^
    z;1`#_n^14?#U5b`b}faxTm&nDjV@tA1{itAcKrpJF*zH!gx5vT8ivyWKEB0ER@?^4
    zBmN7QLRUP2;C>12xVt!lSrQdx;x3UG6ETi<%3HYQU->8^yI@Tf;Fj4|#SwnU`c)jx
    z^C)_)-{9*eusi_MT^K-sP3#BjVhL#gXV2T!wyxmqhR_D~nZOhtAOhy|`vrWzOrY^BLX|TrZ)Ssjm??i>d0t>
    zL-SeA-=pYbIRZ$;A|`Z%Q!z}3_do#Y-zPDnpWQ(K$*9DLMWPbKomVY3=M-WR$O*lp>H+wani1)@QFkCRuFXgnne1eAJWDjOyW}twc`aFxs7DSFwe4+qMu$4BBF)|GT
    zz+q4$KGKMvOoWh}uogva4x(JZk+-@_gW~Hk7VE{~xRdFAjnfqbMlL#vh#>)$;-d+G
    z0-TR#OCRczXyC_z==)IjBa^8W$o&xPP6pAWcm}pemchJ#NG}n*c}Q3W?;P^Y;oxmU
    zzBwGeYKR-cR}J$V`Eqf_n=;F=D~RpBUVo?F3e6-Z8AjDK&maS7OH7&T9!Zs3fH9(T
    zAHZTh%4vo5I`G6bk)$Gd6=_P@4&92vCrm8pNgd#L3EYZgp`Z@CKm?pP2P~A*&JNe+
    zZ=(ry+x=!o7Qk{30#SK&RL|n7dC|s1rjA#r${Mp=ZD3o~%a}7ifWX^1h+oYE@CItx
    z+*B_2xSDq`;5;SRI}xvA8;m%d>)8f|glUiy^_moB}E499mcs?n1(
    ztUb))F%*H1ILn4eh3F%^2}#QbuUIUTs0`W_@|AeWVrh^pEJ=d9L)s{SkhUjrvIlJl
    z@j(x7!h$dv?RtClICF%zTEn>CyF=B=>T?JeYjWf3o=4-f0Txjur+lD9e?
    z$r5SW2X-ae^ax4sbM}{Fq~Gb;>4N)#7)i6qGWyih5uiBkVevFbH?hGhS)JqMyDIZWL1R{X*Wh&{gwb7Xo0IzC)@E=D7GjkgClsUEHgNX}qP
    zsE6>VckxS$puddbi{ypafy5l&jw=r31l|ZS0a=vG*o3h!aFnmHpalxR$YBMHfnJLF
    znG`sN13eu+*IG=EAzI!2xdV{AFYL!eB
    zk3?3CDR!34X6JIN?JjqaR_BfdZVf+4)K+mgH&MjoXaXrf2pS2QGT|d}yx+3lza~O(
    zf4NxjW?tWEz1q6q|MAro#P6Z`NntKLI8)~$ikHKwz&?O62y)EJTYlI>55ROD!f@jQ
    zu5vR&OgI*ZSbK&M0Nx$Ic;P1`pPRb9~|i`Y+95=@?mx
    zSFhhhX1Pij^TTf*mvJ~JQJ7!`n-%8O3YtQ3PN1TU=hLMjg|Un~5t%OGOV6Y5GVTp7
    zTfNyU^8H%7-3VC9_X>i)Y%>c_=@x03=smYb-XxI7aD`-4_xVpuRkZtrf#kz)9zq^R
    z1~EdwLDUD$ais6G$!~Ew!l*>|$xl9+&2>b!G21AcKsi2}4;W@?yo{018+e|x-tJj#FH>nbw-5#O^V*w=q2nr_l|>(wSjf$C-E(43
    za(|yn!q{mSCAkAJ_ZtXbPo5n?6&F5b`9n@<3}2A^tkp&TF(}Y~!s6#Bf>=rf+%GFl
    z#M}t5AbRoAeyO<6>e1*?k5Rh1CudjorzM&QM;2rk714i6_6kdf>WBU_UjGG)ewKUY
    z-N}Ke+XGs1=3XW7`&1ULCU;R5qbYD(;Z6y?tfa)SS<>BGW&FRz{A^SPZyosKi<|j`
    zdUGoP$UW-K2vS15F=+Y_3=IW}C+y$Be24S%9n9VuG-`IALWAB87&O$$+&>NSr;d;!
    z=4bZSAirzKaJobupJPn+=``rMJ&i;V9(^_{WI)RxEz%xurfi~&a?gJq*gZD;VE`4;
    zrLXLps7RUU`ShPtlo(n*{N}N*Xziz*ps!3{+W+)Eg*N?wGZ-1N?O&S?O1KvJqW~{3
    z)G>7`-ZR}H&p$tXN<*@%I2^j;Jw1|fN7&2=_hvvY{ZbAAe7^hio|Im1yEi@~jiLvL
    z`y&E*uQ=|*03f16=k`THq(2hc$W-pLCVkYSNzd$WlAl70eibn3KKrC-gTRFP#K$iN
    zH0bm_(aS$Ri>>VDh>2ga_!Wy^v-k@ZAF+rcOsUC=_h-0pg%il5Qb^2BnIFye;uCy9
    z+)rbdHO-o4jzCjgH8sg>1y@*O%EY|xegU=8tp9&IR-!D7X3p2VSk7gh<_fel8Qf<(
    zczw;hq$@8@md5-mlJYH02lv(6=ACFg!7(N|#!PU{t#au>os4cOqI67avp
    zn&k2v)+BqyoT6mxG*e}oIjT~|*{*jkZVI=DiO0*nU{Y4+ICbX*6ekmWFHU|EC$X_I
    o?f;y#rjtqQ0M^ap$%UUT%q*l9CKeVK<`?H+`XvgQ6t>7}W&JV*QxvwzYG?fl1=AGH
    zkTsL_s}#&qc#f=d6wZ+~M`E|Fw_2m1Lsp0G4i>23&C^be
    zJ5`eZ(sSw*EKs;e)*=aShVDN{<0mTQHt4{hpSVqOTl7l^+9Iz__s`RTNe58K1&mFR
    z+ty=k^5%HJdLGnia%VJkCZ}Emb(Y+7ntCp$euut-p%1pKN_f3Ed4=h_lJXCs{4=pblXl+C-p*akamSKL-z;x
    zD-6^ATllZPx3Tfz9q+-=OL%nd=Hs3h^VId!>FumWgVcM>1Jl=&-9e8Z-oTTqPU3+$
    zwVkGMFYvLlon{MEh66vdrq8YpWpK3as$OTb81ME#
    zUJ4_37CHWDmR3CypqWgNu6l-J20v(VzAY)$;51Et}XixCN@ELfA
    zKxe&MTvKH~vV$m!VI8mU41+XLH75w7N46h7{Nb%`QBHx0#5p8YaQ$9t)tn^pWV#`T
    zo~pr~1K4fQ^SXvABwo;0rA*a-Cb@t~htBHTn;&CBviafa2hLy<+P>kW&gT13&j~g+
    zyfEI3{rKgrp&z&}54Pp&;k{n6nMQKpxY6-H?6%1ow@(>!CEit2_q*@%1&477|B6|?aH{x^1u=^Oq%2!DN;ZjYVv$QPJT2}S$D3YfvGU~o9rzV?EB?Fjn?a*MQI)Q+It
    zM0*uI
    zrN(Y`<7c1KevS5ObiW7>)C2X2wd%CrpuGlT*CK!m>&buU{`pgVz}p4-l|hTy^Ltg=
    ztJBp7U(j7Nfpm-TU*65(csER)0@cvew#fF#)HbN5roK2qod&g_saGecGoWJA_?7h7
    zr{l4-OeuCWOPB-3dqwJ-)PcPC-LJBs`%M}@bS~F3)R3>$H%>j>%AdxPO~U^6T2C)I
    zMSJ)`ggIJdP5oQ&f>38ipWp|Vjd6co_Xn3XLnwDa_jwSpVUgVD^rpeY$e0*9P_1{L
    zUsDAJL95^-ZeJB%d;N_!mD!IXWo|iAHQDDP>5nut9*U(W3_?~ZbI3z+=f(-Me>RX8u4_#%6DHIb*w|`Mk4yDOtV(*aeeuJ7cUnPyU0Cd=)$Ewp3YWVMCR-
    zJ;wzODHG>i6%&Ads#@1Im?4tflroYar_zsAA$HPj$!AM4*sAKb_t@3vP?bUtT5(nJ
    zk@Qm_U;^yn$TdtX-Ojjl+p@#6yIfu7>ib+>L8U4&5cE+bT~&7=p`Fc8RowyBc&ZdT
    z(h0Gk{OG%GiDl<#koc_H-J5-T;?nl;j#J5+zYGP*5+3hFB0YXm`X#
    zu_W4}g;w^xATD8q>$YecZKElw!W5&qOzmunG5j!G0wU#ktSv8a^*mS1bYdfn{Zgah
    zMYLW($i!wqCUc)j)cF?
    zQ!J!JoNno0i0H@d;yBQ88!-?_ThnyJI-qPrw=0Cz*1JvBIhjH{m6);fF<|hnlDAMH
    z*GhHv1;b75xa%m*iJD+&w{;vwD!3+RFkjB1y45WMlZLS;CHo)R
    zUixu-oRdj*b5-Z0Xq^0vJs62=)dmSrW82xe)Ti;Mczy#V;k}ZW1tN@2WuCccE1K6)
    zw!s1xQC3lsOQ;|q%OMEi4Wc0PM~Idya5_%q&91r-6as`!pBo^39j>tVu?RBWyWiZgBL7Gz6oqca60TWb7rV!W+2A+|wxTn#xRfV^w_U1PI|qx`ihy-s7I_
    z5BkvrF0v!3CR{R-_I7qr>M`@or8=S}MxT6z`Z?XOaZ^HI^Xwky^SA{BTe!q=5Jla~
    z?s6+v1HK<5*H%ujITWv0b7xLJGAO$#=iTePBX)@l2hKC_OLkkGvgi}h6;^@wFcG%2
    zXqBsh6K=WAHK>si6dHZ>f0HL)TUBz&k9XCr%PQ}Xx1ISm@9ur>75iS=+m;`4yYR?C
    z>Qc|{VK@0kqRhY>Se1-&J5ke?C!-tV>$c8px@Km;k9qyqxZ>#6H6(*UHl$_$-v4T$
    zpJQIaIE4hmm@77sJaoh&Y>(^KkH4k1j*Q8MWM!Y(sJJ6xUQ#Vnm!dB$&LVr44
    zSYfhG7j&gH+18rOkk4j5F&-M9kr`Y}M=w1~OB3@>XoF9?*2ary9BBgw{Iu*RGM=Z9
    z_f&wN6s$B(h@K(Zj27(#mJeAth9SOlkGjti?ZmvZh2uQ=OyLk@*tR;hoJ{J@3?`#_
    zexb7T=Buy1y_BUTdAe7aFr|s4UlldbJj?q5zIXAe2l5i+%7&FC2xzTPE8wH&;bYI}N82w>71nAn5
    zomsLUNw$|PqQd3KNwx!jOW%2{*7)_yikI{p78RAZ7V>r9%SB&`fnKaeboRnO^
    zMUhUItWvV$$G7{|!dJax)9-0cPADf&1ErF?Yz_}c8bg5zkKmVHUIzs
    
    literal 0
    HcmV?d00001
    
    diff --git a/python/tornado/test/__pycache__/escape_test.cpython-35.pyc b/python/tornado/test/__pycache__/escape_test.cpython-35.pyc
    new file mode 100644
    index 0000000000000000000000000000000000000000..4e2573f8b1a436c1eb2400934c0d188df4bdcb88
    GIT binary patch
    literal 10566
    zcmcIqO>Eo96&^~mWXtkzV#oR0D9YNg@*_F6lQ^;C#QELLCThA#b_2o-l@=*m2`MQg
    zwYG`f9-0E}WpBL{MHgr2MM+kIZDn)x
    z_~yMgGv9k}W;oRNz(DekJHPtq^=Zcb!2+Ktps&GY{W!{)3jY$*88aj%OU#HcIl_!6
    zlcUUtF*(L0l!!3Bhskl0jv{R&n4DlnFOz$j(Z}RIW+a)MWJW)e`UBQ_YoGHOMDLB(i%`){AQ%|$?2&+hh-WgGQmTI9&
    z5>+`-l^3RF1m~RK%nHt&;LHoodBM3LI2T0+76Ki(u>amJ2cdgE0M1=e^#N1wG4;M^
    zx*+)9BK`xWew%%tiP{fEE!TS3Tq|_fPcV6$-BBMg^)V_#sg1Fx5Ll3T6PRxgvmbqp
    zIh&2SL&};}(;K!{tQhs0X}i5@<>2(#d4!svgv-aVs8{JTd_4mtJ;NZ
    z(v3Cj@`9V%+OQ41s97bYuDOH!u2J>wlMU0aOCh~mW7puOQmUz%+yB(6Rg0UQqUK91V4d!C`zUV0VS**9w>=PY^=;E&X^G2Qi}WF6%jb+Ky;)6m3(*S~VDMowPS
    zm5m}6&ed?^Eh0WZEj1KB2RoV0J1bpn=Q|tEllAQM!TCF?v0
    zi*{|jbK}@{)_J$n(2CQWm1mW@rd9ykkGr-L6z|+-P>RNWx?S_r=(TK6mitlLvlkt-
    z+IOadF!kwt802<+r05Djlyx@3YJ2uJtlz$yVJYsjj^UVdu61p+b3LVQz33e7*hqfadH;=C={6E-r=us%d}B|Xai%<=
    zHa9nO+7_&Wy2e+px@B+cTK;PN%G`zdoFj)3+grhhZ*MUk4gIF7n6OzmH#;{k@2hL3
    zfqMzHST}2StyI%3H-^2GV<_iL`aKD#WtIaslwe8DK%35qkP0$|2GXQ((nF$QX=X*e
    zTvsd$b}#C+04kRkW)^1JEYw{zd@#D1rhW)_bKzj~t?NLmOsK7(I$3|GX_UcgKDZBj
    zsP-1zqa89tjfpMZ9^-Z%wr{X4{-H+O%230%4b8HYbq#`Qa;`KtH=|uFFU-$ex;T5m
    zyr9glDHj*iXXSHc_2Rkt*_qP0wep2>`I@0r^xQg?uw$K^)7*q-$N7oh%m0n#%<})n
    z4vw}l0*@7*xBmX+&vKhq{-3}6P2(JJ{a&|J`%vy`5OPPCY-L^S6$7Ealt;dE7pFC#
    zaAFD@4@mpg13Wa3+GvR8wIg>MmYr78p(z5kC)H;;XT_@xw-S=hEQc7_3^ySUo7(e6
    z#nk+XUL1@)=y#kX_4xhE$gtY+=
    zYg*O$#Y#HdtGBk6N!t}$*ZilTw>Kc1)J79_+vLm(ZAPosAi%3t1vi6=y;;%qHO;Qo
    zd>}R|$DN<=HNfr$I*;uQbX$kjq~s0lxRYN=mn%AEcwS7^R?gmnfU2s($D7(3I+w!1
    zxwn@>*q@%JAIILkA6#w>8+RTCfNI;D%yL^c+3D2LF&@#jW>yvbEM)@>LQZL0wy6~9
    zao2K=nVNnjQ>~S2@U*>|Ntbk3udZZ3hSW07cUp!-D}2amCf&u3mtG+?imt`B{UrRc
    z2F@{>N049BYwK2;hEbAWcES&SN8q{vmvsP&&lr13N0@M~WbUJr6EzB_C$WNa#GA$1
    zT6cy$2t>P?t{B0&MG}ryOxwbzd$^0?!Nf~u9)y1EJh^ds^)amM*6M>B_m%4EO1*wl
    zv6a<(wUVN*KGKZ(YQ0iFyVj`a>e=dsIcuz!tW~=PUQ=tk-*6_f%1>pAcs`P=Z@axq
    zx?-%U%JO${&~P#HFci)o{@*Sm-?VE)VCF{~_mv-3qMpyT;w~gjLEY+I^e}lh1_T^U
    zKg>o7&SV-sT|y58&=Fb5oPaN)d2tAD5wY*VMfjC~@IM$%*Wj{#1ck=%6daDc;S?Mi
    zhl4mcorAM*h$%Hjbd0Gz49=#J4@3e_rICii@jj*|i3VmrQ&U9uGIfBdgG7VC5L1VV
    zPBL|bsiQ=LFof=YqEk#AW9m53pm+cPm~aPg(ZSs#h=?1CrTOX516_{6MSx+kwZv@3
    zZhkV)R-^25iG3bnpGTQ3v8NICG|E1c*k=*;1@L2(IZ(oXl)Z>B>3M>EtTJd$Dr9@y
    z0h~xGW%T0KJ>c)9+$_cOD?T3j(GwoZ?8;TBkF-z0SZp;S9y76b5in&hPaYL8m
    zsG+GL$G9mB^P*?%hcE;y4h56Or7_7l+&u1FP}TY?P2uLJF?Fd^>AZA-n5T~1;JYcJU9s!864A_b=8!#kuXgWgBK{J4!FTtx(
    zlp{;MAbf9o_>LBKdY0fIWqGF`H#k7y{^m9MlGv~NfeO@s^zu~Z(V}ZUi6Ih@}vN7{Ge7Qa7H_Po4$I7ak;9=Af4I|{?;9>O_
    zV8GfB1&c%>B%)A8BV$ruWS^9goOHVY397e`k|$se9M7{9@)_eGF5-aF6%y5phEqHm
    zGT~@|w804A(SZ4r^ZYwp*y%^7o|v0mTvN=Q1l**NND+5yc81ve?g_th26&yaR>I0g
    zA9N5nfBWjU0mW^H6#Gb&$RyW78r(AvO6GYefW8o*F9zrZq_F^*OGsk@^kt;6$VLP1
    z3^>u8L5W!`@KGV0#)kur6K*b68wNZamwpQJ)+iL{Li`(|5?$z=>g>R0aAXCDB
    z3Iw{3@(hxcXFJT-Wk|swKZgDVUkCYIY@Mdct@e%%jtWQ|*v!RrOKC`3BQX
    zP$#A()Z5h{#CJ8qblIj4PPcE1osuXPNW8Ag`7fZFZxwJ{e*Jlknbvu
    zHc3TX7~vrv^j?7lYa9wD#p!QcIszMuBe0GPNQWgS+sSJ!mVCG2_7pRR+;$xZ-${Tv
    zpM)TS0hj=Ea1Q|vpm9v8w0lT`K#LUIC`{zpIH3WXCwNIpi>Mp5Y?DvNBhVCVZ7GOD
    z089`7UYAYHZkSbi&uaY%bS#WB7Cj<4C)*9SjaK`iLLlxAl@A05?5}=JFu>^fAlpmO
    z!U=)Mg%B=~13W+nqLmSc!J01-c!jvOY6pY|{f6HI}3
    z*na^nzS#tLGEwJPJAeZ6CGaG%&xzuXAuNCK`UMgAbMPcnI?=
    z{BS3RD}fIPOvRve5JLjw=*Lh<^kTw*m8OXXWn&#=p_LZdo({6GNotXeH_Q6Dnk=|6
    zwNkQQM`3)Bu=EW(Qa7eoEE_ie4R}jrz6X_*$!?VIR_`OD0EO_x5HiRUk-%adn(W3Y
    zi(7tX@^8?!P?tqg5Q%Xq6*(i}pEKDmt6TI7*+I9rSj2PAV$n?&i$+at=t!rEu%IY9
    zuQ4aFk8@ZoVsRCV6)bQmFmGdV2aCH{e1OG=SnLjhC_0fi0u?`iixw#8bWdtHHIPcd
    zHIRy@`rt2WqD=N6_NUx{c^&#=VnmsUw2A3s;w&i-dQay$KAoHIU=61F?xR>>jm7nkWS
    z;Gf&D5)b#Fr{Icu7t%40hx8j=_&||@M+*S4MV{9MYobygNpl^fu^q16TsI_*#{LVK
    CZtQvh
    
    literal 0
    HcmV?d00001
    
    diff --git a/python/tornado/test/__pycache__/gen_test.cpython-35.pyc b/python/tornado/test/__pycache__/gen_test.cpython-35.pyc
    new file mode 100644
    index 0000000000000000000000000000000000000000..77eec60e1292ef5dcf622a27d24b5895b55e7099
    GIT binary patch
    literal 56247
    zcmd^o37lLPA$}a+;7@W&x%ls>
    zhEns6syJ#srz$x$pI4Q66@u7geQblEb+W*l+^r)s*I@l4XUz1*HloAjcR_As%+A~`&47Injck_QT@B98e7!-
    zR#n-mfA_1#HZ?z{Dr5ThfNE@4^E*^!hyFdN8avheE>+p3=EqfKT+Q!RmECH7kE-m^
    zIfhhYubRJ3RjyO>WmPGw`F*OgPtEUFmHlddLRBWz{G_T(>fFPsaX`&quPWE8`5RQ_
    z1~q?BRSv58DOH)$IZ6V-jjD2^axQI^rjDq_O=|vTRk>L?XPY}z!MjCW7*hU+9DIOW
    zq`%N~)bl6Z4a(i9-T=loDsNm}1|1HmlkO(vZkE)|HuW%4N0qxpQnx7Y2FZIXQc>eJ
    zN!?~sk05nSx!WamyG=cc)E&y*DXBYc>M^A5Qtr5YYfPRPF&uJz!JsLhAL(y+KlMP~I+S+ucY#
    zsN5+@ow9hi2dOtI_a;fb$)?_`y!(`Uv$~K|{vYPN>(op4tIG=H$*EIQx4cbRm@0%D
    ztLK7NW3lbc)aMskemfj+>#x*Jv(}d1uh!e=XM%RM?ah15_ViRP
    z9Pz#OqTfUw{PDu#qw;sk35OQ_#*7!#stX=|ZG14e)T}*r`t&m=8g;yiq~haGKiO(6
    zAl6@9SeR+F<`5l0;k64hwN|t3U2Fro&C!dey`X)f8hGJ|Bt<_5AGx^T)!I+C=E8wF
    zuQ}63zNun3u-L4(84pXrh5EwN&C`Ar&CulA)QEQbDVRv`bY3Y>QT0I2K=r(-)GXropHm+|N?y76Um$&IK}r~h42~EB983qtYYFJt}d?<1yvlCUF38
    zyK>(laY*DH%Dq$K0Ol^`-Ys#+a-9m<_i?t3H-q*axBPU3e;2{q-q{K*#GB`KbAXKl*e%AHg0dC3pN)|LB`
    z#P3z^1?4s*4kXVjw<+=amD^J8g2aLFmzC>F{J3%h<+ddb
    z&ME$$$x|LrO*zw0MSa2Ig6I2t@B-pq`?-a1L$f;XxmH~T5c5LdHD>X5aAu~~s0P8z
    z%=fC#J$K^n7oQ6}KX~z(6Hir}FG7<%Ty0lhe6m%mHeNjK%`d#TP+z#^++w}q-qJkp
    zADy481uwQ+ezWSf;=gFai&n!OTDat22T1nf8x#;IXN$A6CDq_V0%Q&5jeOW-=H%aC
    zKLRxSC5O0r(^0dzSla-HujkZj>?QKkHDruFw3&|K8{CcH%N_NSlc9R%|P0|-=ZlaqG_b4xd;F_5Cqp?e!dcyRwA+rQ+(Aro!X&xIRIe7@SO
    z&UyY+URQe%#rsnXG)TTA2t)$RNU9Pd)(~WG|I+v};6#KWfyk35`08c^uR&8F2@D7X
    zBPIv~Z@pTrx2JLfRp`uWd=Mss6o0Wf1OB*~#S{=Ir{7uHmWIIOsmXI>;ol;@Qum{X
    zpl)exuXH~`d77e?I$GlEh
    zHtI^ei#2b7hS0q%mOP|#1J2T(EP$fCk^Uw4e24f7$gPngO4>ZsLJrji6bTUsE)-}x
    zR1z6w3k%wSfVo9tB3x4=x2-mDI`oKt2=6?^mkma*Pd9+(<4m(PbE)n%+*o_ef@&IF
    zo^&q$VH9C-CPfO?rc7;ePih7V$3f7$Qe5Z~`<-GbhBff}uQR8g8Mm}U77(R0u
    zN`nc~8!+K@jZ~3$p#Q>v5Q2Dyup_gOBsU%AE*zQa(^wfeVWqTi778mI(yINmzv%h*
    zp+fYId9b_1&jgZ0feGp!!!IL$o1IPU({9M1Nxj)Xr#@3V*G&}vewMc~MK%bYqk>Md
    zUPF>%r_86bUVWON?wBT20h*7|Icr^Oi31T}0tNt@S`PE|CF$TF$J$N%=&s;Q$kZd$0H;_MZ5-eirHtE*K6?e}mlFC=5SPZ@3w+zyX
    zh4UE69!YYq79QbVYJ3_)&sOV!7lfNDp5J=djLBtGna$oBwn@McZWE5}tnPV&r_sDE
    zzcjH50K2fAXA$ftS5(=+xz-ZY-92#5(nJs4hRAAp9)y8~VQ5pB7LJbz1SJ%8uARJ3
    zVr6Tc89Yg{{VIrevbnoXyBP>$wg$ioK{G&A--
    z1~&zI7cT3bR!sI}Pun*gmCIgZ4H5z@XIV0Xv(=a6od%qWd&)Lb|k!
    za??wD-MU+Dw%X-6uU+=U)#SN|7z4<5dWF*fGK4S6q-zdfcc-;&&VPdOeKx_LWQ_I)
    z3>Fm(AyBzIRPH8c#94aB`cPu*lI6Oiw^o4-vB4Ln@XHDXi1w{%$|EI*s
    ze-aJypJE`IbTx8xkvLet93^pa)#d0Y8N_rjqid9-9?Jbn#!b}b6dBQ!4ae
    zxXD?{8j#5(1cvXiyx0NRa*D5eG-k{BTdbA(Njc-i%+Z+5nXS>T1tnnAlM=-wCHf^T
    z8j!gIJt#4#_1D|f2XB+iZ?kdGC1S;%((%*y<3G1tx-m$pul3Mjbu|1+zngFiB8?bcLk1$dvX!yW0E^nX>9gND-^TOcDB8HeNdH+=gBQxC`E;8Q)tBQg}~{
    zKg5I$N1pI5J
    z2T`bW`dVrB{T(#Bbp;%)LXy6iz^puC-`+(nUH118qDb2vSQR|io^p{q?FM*PyQ}$;
    zsLAMRma;Ti!>eB4^r-O%FDp4)o|Meicf4BZ(VDHJq92YRbH)oxS(>fmg=Q
    zO`I#Wx+$y22+&s}xOj-tp>48<-&Iz|$QF+nd_)-Myrx}9moYGUc|@IMVqdB^O5`=R
    z9rK!e*3{5%8RxxumM}bE?%OF97#`G^2~UKWApK#VUu|M?I`Ru-^>&zHaV}WfY=Xs1
    zGN8BVXYma-AyBz3&R$rEbn}a)csPR|m|BJGdW}pGHvG8C`Ia%^i>?BHhVU4@IkYsn
    z8W_6x;RMUzY-*(8U?#j_{hFX0rvMPHViv$*QTz@f03FY0`yw~(zY_^Py+MN7U*UVy
    zxGJ|amC=xMt+~aZdoHodh8TIM>b}4k$7OV2!q>k^5Su9cmkEbPDct;fmxtt9B9pj?
    zk(sy1?4{tA$L_DQFoWGJR!tya70S#-Y1zswfmzM6T?KzKh2Jce?RupP%j^z&$TI%A>nr^B#dfv
    z#{1wl-;u%Vk}(_qSJH)`W|SUTToFPeCS8K!QpkS0`d^)O$J@obrbTZe~$0oUs8cVQw4iSw_(~%||l5mx1
    z++yYmD2|F4T&2OAAP
    z;rI&s^+KHEOq4u>o^|~+(+f(tAdKaFhV^QdKi;+IOsH#f
    zUVYl3Y8A~BJd7XFw3{KHOuGqfWZG>Trro47=FHpeoOzS^wrqFuDyQGHgHq-TWE{7G
    zK>azmwsci}fifAth=1tVqXCj7X-9-s7lwC8CSC`Y$2pRKGTVA3|JD7YX?(zOmFwkDhG}
    zbJ?Y_hV|Sl8DT4I;-&jCbRR{3-kXs{j5FOdS-&t(nl5eG;y;f@SvDD3I=XiFQanS6
    z6(fYhI<0LpjgU)wWIi=of`>tiCT>!e3o?uTbV`;J%aRJ|MZ`4XMA37Zq^{a%3NXqH
    zG`4Yla}=C(eTSP{KGw}N6CQYi)$0`6$fI%%~8JJx!r)W!EBDB2{0|(H=UGfF|
    zNKtAvUGiltUxVa>pd$0=p5tjm49ZZ%N`13mN4*JYH`ddGOcjw&^vphC%$D*U#Ox3r
    zE7(t4o$J^uXBnpRSb50!;shph@PDp;gS)P@qW0;?9wZi#
    zNC|`J<&2eV($PB$>5!hbn1!r(0c7O^(>*7s34i`+)aBp7Ku9W%O6iETMhyF&q73Qf
    z8@DrCLPEnqbf62ZCe}1&%jw~Wp82A)!`YvMoMx?r(o_&@Y@x*sin1o(%UGcuYv|=w
    zvR(w$v)lx&2Q!yI^?LK#P@bwx1e{BcNOM^y45`)fN;+#IgO-rYoD)g5c?5~YBdjOL
    zIh0!rRn4HOkA6|}_5X)#wA^Neu%08V8yhW{!aWwXRR
    zF-w%3aj*nL{GJTcr<(wf_ABR${Rv4<@>`7eZ
    zAy|?nV5lgIlP%YirlWy2&iw*kN
    zhgnOkQHl$x|0)z5;N|C!ADGIAg?1gAM638&F}sbP^nw8P`#vOLvu+z(1j9V`nf4lu
    zY0@OBeK+A8U~(4wns}1^)!f35RetLlZ3mZo&fbDm|-ht8+1QE=enOFv(Z+6p;oNn~mF-
    z>QU?R{TK~ln`qWFQR0QDT_hd-`_eA%chRh32JnetA#6+;M)0|vc%#)JW`(vrEk0m%
    zfS1K6HHN1&8Rq73RG=-*(~ODXnJ_mKi+qF^Ba72odo(OMk)em*x^46Ii|{72FD-b%
    z%MnTnTS0Ev8Ce7GuGdnC$)h?!46&|>X%J&5FJxyDjB9BlCKkobk@GnjB-XNpoeK~<
    zVILM1q_x@-0WKgD_dYM}%#bCWF9jj<3xupL389G#JIY91@13#$gE~pl*d_9(PzvDf
    zf}LaWPT2K2CyhW2*_XsE0J^i*IVE#P?I4yT79?LJ+YyPFW<@ZTc`B+BKj#XOsNory
    zP8gM(DbFIxV3v}t)5a`oNww2#4d;xyI37A1JD$;3V5KXM5Aoh$v)KTJ?ZpS2%(>!Z
    z!)ILkW`qJtAyl9yAh0J+v!LN$eNg3m~+w4mIikRRBmy@r=E2$>$vrXQNg3tnXfg((AJ)?KaYqja9
    zx9*Z&tU$jru>e;Fnkmc;)Wj*m6i6X&vlHdf1@Cj)3{D~lOW4uRj#jTr5D%z~?Cxa_
    z4)nu3dZQ4h=GaY8WEP{(!dz{>Q%dyqH_hwOOTk;@k{x)Hift5*fP>D?{3iSzmvF~2
    z&btKoHIyPB%1XpIbfTvi=VS*pZzbc9IE5~j(J2WsMA%R83n)_*ev#GsS^RQP*VtV?
    z&J44h5|!M=CUTz7D&e~ZY~ufIAh3%w{I`&bU37*G{0}q6byd1&5Kf`VZ@ad?!$%WQ
    zFo{^WyIW(%Eb;ARiCxI1Sz;7wlPm$|Kv)zeS;|_sn^L23aVg#?pj37#S9B5qnXC`r
    zV=U-f0WFv;Ue|vaK7DOWGg-PCsv8I`wTa29RggC6)4Ag(ac<$1GxX4aAu;h{3f9TS
    zn*$cp4johq*NqfoN}dIT{o>$}*Kzs_c#6OnFcZ(nY}oZ5SB;_wJeoldl7=5Ybn?no
    z<0LAWyPY-SOe$6cEsGMdwFT!`y&Hf)Fz#Y$IE`UYv3oOuPjq^lVQs*K^aak^4$
    zwy=vRs1%OyED)~m3X2zBWm4GpO0}`*Rr-$d*V{MYQN%J-D`Xk=iNRI`02_ovKP3GN
    z4zi=Uo}Ui;M9=E3l)R2%5trt4^8FxJRG@!s)_#dhPDWDB(%l*69ALrpW3wG3hOluo
    zD#ri2tc5)|#7$PMeM4s)3sFjmb%Rvh+V6gXG2k7C7CvZe)7}2pAu%iv9olnqAAX~J
    zF;c&g@yy*QKh7qxc$M3XSyjxe=C)&2YbVU>-6l5XY{`|N=#iGoJnJE=*=wAc(K}P3
    zf_{upk%pLQ+m(6TK$JBiyA|*}-U@J1n;|%G^+7A_Z{}s65JbW5s{Wu}cWBR8dK2fm
    zA&+*f7S#tJads~e`0|4|>;&sW#X3W}=A-_2>XZ=hD~$IrB3f^7e$7!kAsOI5^j#Nk
    zFdTVdQ27tY$jVW7m`+Vp5A(jfQ=NrOr8m1`>jW1?ghhSCA5P2B$M6M}Qmc_TG_|E*oDilZp)C&X&P+FRP8wBrpgzsP6zB$s`z1i@v!a
    z?>S6@=C`^nKm9g6f@0TUMN^e=>4T-&C-~Ed@B0wPsfR+Ydb8$xIF$l8`uj}YQpW*`
    zI9Dl&d;i<`@;(L=3?`YeuZ6=S{r^lXv0884US!ivkWWP55NOi~dKuEHABwO8?
    z0fLv%3!owoqgDcRpYNQ-f_r;X!(Fn*1PRZ}
    zDtxhUfHe%$fJ9(J1O$kbF&h-36%ZuBw1@@7|8`Wa1!SmGKpw-RFb~}(+R*=I=KK~0
    zCm19o$Nvr_fL%NIaDv&$j6ON`FNw%ah)^!_FjYsKLAcx?9qUJ`mJ>6%CaV*^o7EAs
    z32+bNr?t3a+35n8kcRuj`d*Htf6R6aAg{`e_w5@Rbuw4gELTE}0>n=q-$C;?cpQ;8
    zyN)1)Gt_8vQHBs6&=>#T+6EI9Ng_J%-XO^i+FjaR9ZQW^(-wS#8xin59g5pv4h7QS
    zl?qTGxQC@lF*wl5sX}VRGAiTU#A02N7ibk*ZXX{-^roZPTR1v^b{g{n6WAS2ZhMyQ
    zjO;;9p(_>|DaQX^R@5g{t&~ntN1xFBv43+4BIEki5dA|7Q3^z%@%S=z7YK6^@2bMK
    z!p<(A)G}|@z!&l%b&2m9zQJ<{Gy!$jW?j9Jbwgv{x`pR}c{7f{c*aP5Mg~I~|2PUy
    zt{m~d4+#lcJ;djF2^PE#=Q{0^vhcbrh;3stO)%^ErjIlB&PF0by0QXKW7)!e)LAD2
    zEmK5<6=EmC%0c|lSYhi-2j8oq9jnFDZA{rJF$3zR$xtA>X*=1p0>Gd_p4q5nUXB_?
    zQbz)OB5D-GxT7bexgsqUca?ezrY{z~E)*+%hh6rCWhMd?vS$&+d
    z(%EMF;pOq8AwWXFhD!e%+1U7ACEzaa&+B3TS`h4*OC1gx%EgB&QJ30}6s|m#|AI+aw
    zS(!vOv-q;B{HY{8aOh{&+)vj_Ur2(8#dHKMFx{d^6-Wsw(!1d?suayBo*-GYg%!FY
    zSe8g;(W(prSdtt-stNZ){5gTZ;)-!?!w{FZN7#We2$E&3a?uBaUK5&Wo4&h(+aj=V
    za0`f*wRFDwt%z!PTeR2-{0)vHh&n77j&F!HV1~`w7!hPsg;8U#R)w8n;nZVM;hcG5-&Y-!VLmm&-c8A~rqtdj|1Wi40h(fn0x
    zT>N3cz2e42y=-mSN*?VXQkFFc@
    zE?=*AztEH;W2}a9s}sL)OxaYSH#|Ny`eVIx=V(Bw&NVZc9x0O
    z{ZT!wxSXe-5>_BerYl!GNV;OwfICCW$|EG~Ci}a?3!eZ;(OSqOP?0*$AIe>#_
    zzR%*SCw~rS&&?+0Bch>5IACGcPS$)}UZucCR-I0;8Ui5fevtrdz%P{>%MHT8I*u>)
    z&1HFPYy}xeu@0qZL>Ug~Q@9@w{GVZ6Ka0S4m84$5hy_W|p_j9C)&Wnyahw*$hevwt
    zJ?ngH>QVn=to7pzeu@ESxU}a_#=ElCksgG2jYT+EYx(dE<7j-n7~=sxeuM$-GM}zi
    z|BDz*GkAue+!$t3
    z?7S214fEh$#ecav>@qG&!i{10iM1Ybff!TzB?TAxr0)=8w&C~=G3FfN-C|6~y8
    zlnHZ38B=g$Ao3hAHA18Hi317Gg
    z7TyUfwWo34NR7&#AO6joZ%{@A${zd(kwhN?rGhv-cpUxMX|3G!qqwGOU=|a%!FkX1
    z_u?i0=NV9U7*%B%olLK}oe!~6K^tAAk+_
    zOrbNh!!dDbM^HiEdsY8ooR;YS0?G(CJSevw;8qz~VAvDhUqt2)NAPkcjfr6@tQg>@t#|Zapb$MTLIh`fM-xVzK1;8o@UJB}
    z0|L&r{16s#h?{L?pi-Ep)|$XKcnATQ1(-M5Ni3@mfR$w>49z5sfAk8l9aH#6(lYp8
    z%?LA-ChHndek-vex|q}B9HwPpM342Z5}k}Vh{&4zJvz((mY7uI1tArW&t3&!DO%-0
    zB69atmlA4^bqpB4m#xB=^1xkH8ST-IDw%e;TC$+G%p?l_?ijg74)(1GObV~$7QyQY
    zM6|-ZjEHDUsCEu2ph86+cH)8E50sj%(MkwjE_NP$(@U=pmCffk*bLX&8S^hncD>p1
    z?7^wKqXcHxi^2KAX;{?;wc#>yNY9wF2<}_(UG!a%PXR?2l^Y-Oy!T#&QwAM75
    zbp^CvPM{dpm@s?~U6Xh#!Y%a-za(hDfmttIqt~CAepn10(+|rc4J=}%8bS|@aE@oU
    z=!5RDq8uX%n7PGhM_4~+iDz)A0JYM<^wNA56^?Xjp{qrP8k!h=fVx%rnma84St7kw
    zOCZe69{qh1i|Cd~f|JWhf;4zWZ#mQ(6Pmd4@)gf)j!N`gM^W;TcX--4~qD>M8Ckl$gz>
    z9Qdz?OTo0VjfO|@!lQ?&aBhWQA6gXVC5MA8yr5|LjhZw~D)hd}A13!$9
    z5^6C^-eOq)h%MiMnqmguLl%GF3ULszd7ee%OC_~7gIY%Sjw@^A?64=pEgT!v|m2gY794fG*)vl*9snQI3R5G|3c^za85US21K{NE%n
    z#9T^Dl6JZ*bDM}MG!8pmD737dE^nnsgc7t+#2TwZC`dQ4#~~xDaS7B=V)P*!i#%*W
    zB$7Flv>w0Yt!%Cl49=}(Xn_UHm}OlRV7!t^0TF`7-Fxypg{k
    zUBG4$?MhIk&`+H-fp2gQL4=>kA&g0>l-Ssr6d-Wb>Pj}?TCpkia2u~vU40Tz-;8hRbkDIcJ;Wa(*)pZ=T|2
    z9Qgh501Or1nu;zkC(17zi0(m}Krs*}f~+;4^nRbf&j<1m9rI9fSYCQ%?KqmDiGQ2b
    zCN(iDPRY3dCW=^Zg;@@1cMxLIjGHCK`;F-HFtRmbKgEnxYQ$1-7J=rwq%K$uGg<|q
    z9p@PKa4$9Xl*Nwo#C}rLB3sH%vCb>tz^91~qZfKmE_fAZl($}8CkNgJ08<>eFXF(%
    zh6AA@fh(=pVm`F9E_vM(OYfXq^OwPlr&i01t8CKI)lA3;Ge%m^N=bqlm#gdvLM(QI
    zFr&Erd$8hi#iUp<(u>#Ja){K-mw?C-(}R#0$t!bisiv&G^Nnjf#aZ=+Od2_t4$6Cd
    zV!gNX_GS^FdrcVM;Wk#4TtH@=aDT~
    z72?02$&WD*bx2^Xfg^yE;lv++-KuuW;%2xX$FOa2BlhP*Gxu=)SY^3v8`hE0SI@I
    z$iia6`%46vi<7uPZZ*8Wss^m97wF{uo;AHfn5mx>R+vc#(1Y`PmXOW)e}xj~k_TL5
    z?eEhQm>jjsViZ?vW%^)c<`T-6aZFr_rLw;;H9J*8H6O%K0o2i+EE
    z3Or6+1cRN8sQHYX-2jAeW%S#9cnM~46@UB#jOYD>_){4aZ)Ejc!}H(AWGC$7eThM$
    zAE62URitqd5ru8!(8N4&Hbb|~w|GKKLUv>qap;^2c`_zH=lDVVoc(v2KltI(seyzO
    zqaU?}Ljg_&)0f_eJ2ou5gfqAOyYVb#Q4`3@V10hxbL%+dPQXX^@_WoT$%6FRg&M#Q
    zvNifVr(%v4^oX+uCps0K3G_3AxuR2c29h%Z
    z5N{|hd186N|3HwjnwYk|tP6fqOIUiKd1<4Fke=Yc0PG1>0IP8T_Em~Yin05HjfACv
    zrB-C_NS>IA0Vo7h4rg-Cz5q1fKVEuYPCegr)bplrIX{6`Qe60FdKRuvgIY!5IK+wA
    z$4rY6Cp16$kaEZI?X{@+@oelaTskI)!^DQtjOjaKu76Ul5`DxTLx&~Kor`IOx^XZG
    zIKqt`uYDg#Cj%R-Z2oV^5RugK+!hNx$bohO@7Awk{O}Ow!8&9C(|oNagn^|b}Q;qku{e%&P4?r)M5Q_|7ZcLeuqW}u>!b+?$
    zQ9KzQ9l>{SbSqW_@4$E0XkXruQfeq5`Vo2pFo6?Eq$4u+n@-G*PWv(d*X^2
    zl%Wy_hs-7H=!OyVH0wN`h&?KdXAJGR-HM`K<=wKJUQ!?^&Rxp9C870-H
    zb%6XghGYCc1I940_$>bTf6m}97{r6?G?(~)$*1hwvl&FF_z4n=>Id%XZU*5lw8+@$
    zK)dFYVkf5v45HL~GRfyq+pn=+=przQ&)+E)A@dx^cO4pqJGml=pKIDh=N+*Bf59?P
    zXJULUr(`tC46cb~mf@C9N8Hk_PW0`;p@d-4A@+Fmkc32i=zKeCYPWnl-FeGe>{%AkLuAQ%xpw?&X
    z@sSmDIyfJ(^{Oa`uyC&BUc!v$KE5e67Hf?<#%tk#tu3MJCz*?){pTctxVG}6u$->L
    z5sFLf&lBidk<`m~CV!1uKAR#nQ?3@Nw*c127Vo4tH50!=Z;q}>rG)XJ>CI6L21xpd
    zS*LBS4Qp=WxUNF0wGhDG&7zcON)kzXM(NbH8~H?~Pw!&atEcc+{Ea&In&v}x5$!B!>A_xzB3b+14bn{AMV
    z{R^0Os`}m}(WHH_gU)sw61H4x4w;uCv1hA@NomyuL?W>_P{Wh1eRyn^G$+|O5lkfT
    zHtk;7&@Tct;sS>(BQx{6^u58v;ZRttRR8x81K$n+B5_4`AjTi$1DQmWfz5wq_Dt8P
    zm?t+9D?}*I=I80&z)X2r%w&&z+dBrLv)`>3|c3aVB3b7h}i00$?1{AFo
    z88%|om``^+$TfA(XtK+0)Ma_}22|4q#bjSDh(qd$?!~oS7+DJ)Vq2yDHGtP1Hy~yS
    zv6sO5F8u6)aH8fm+jxZ|)R)S=K+R-8_B|)?4K^dt4A>zvMFQunG@8AOFt9MF)!jWI
    zxH3Wy1CnF5tsr!t&y;LJlUp+g-KQ=TxW*RSvORkghZT#0Xr!@e*@F=&BSU9>ujQvv
    zYGRL_EGv{fMQE68o9ozECz~J2b9Q6h<4->ctMrgjGY6-Z*<2@k@Wj`2aDVH<{&?pd
    zmr|Vu7(QWvq4hDwKhEH%5a7awA#q$HWvv5cKY%45y7rh$yR$pwt>Q@)SqU*WT
    zaua+h4kr1NcTV3&6jQ5}Ft`}R5%4{c{1IeIA{m^cjRhPw5DO7>zw8cr1FlI#pp)cu
    z0ScErR{+NhM8^qKV5it41S-g2j}5>91Fk(P0EY^=_IQ8_P8UG_^J=>fBRP!_!NHhXdnqUD|ROeV#s;V{){tD%{5aS7vrLn_{aX$;kzNs7U3k%Pfu4zVYs94&3*z7}!
    zGU9?4nuOv~6sM8Tp}+sv3^+&;gW0E@Ai7uNQuroYYhhO?2gM5uC&_9%P>O=XauEOW
    zxV(jb?LQgU8Vb^`;Y&4CBOj`pJibE7>k&!nFD}Rw4a{+(5SA0lpf_jz60`_d!jYrL
    zFeUWA5R)lQ|MQ615uw9YP4JNflZct^)|PTJ=d}a<3|oUQT7$zUIaINb%fv`$_N&p3
    zEJj*ai}c)0MDU2~Y};^HNTO}rkdbOz?-#E|D_4+=bv0P;dW+%CkT~1+ccxISOV>qe6$6`iDd|a4bDv3^b0Dh|b4g^WA
    zW_O5G>;G9p_W0sxH^v4+pwvCELP6NSlO>fBPpyTv!)tP+eR9HA0p7bXf`d5
    zoO)}kJ|sj7tF-*AE7z|PMbe{a93&C2@405Jx;S^f9p7(OKKaP$azaTa6lVRi5N2Ru
    zBdXT-m_;%l7HTcmTTbZp9d?nLwRphwEp`yFiCe|?b@E5o8`lsWH}yhC%;*s~*}cay
    zDDrm_duG6zr55#R;~7<1Ts1my#r-rXu36^Y6xQ;ViPom}yLv`3Suto|i}KfJ-F9@b
    zwuOEy73c%FC@0mv9&cTZw)N=a*QFfwZIzieL-0zfg*;Fs)xzwTu0{hhjTJq1N|JAN
    zOqyZ$SQ0L!8ra36%e!RcTSyo#Z2XGW
    zi#|!V)5f#Se_t%48+n!z4|7OSxYvu*QyV)*BmNeGvz5U%24f6%FxbUloWX7edl>9x
    zu$7v*%$QhPaWKgTqj>Nr;TyxRl2h7+K^a7J#mS99f8f0Tm
    zoP^dZrycN^oFx`@YHDM+@oc?){yq0417F1dqJ+r(ji08d=g4dy=bjr{!N16Yi362*%*C@dK$#Q71HOxGGRCa}A0}q-
    zS1mn|i4)$R-td6SnR6a)NB8W){%*&3eG?qW+OK>NFvD#?>ga?|F?Il{{`CyR>ozGr
    z@xb=>cQO-kZ6PI)p;jcl!1xfs=SW-O6wVz`CEKV@Zdp@PB6j<$-R)FS%7zS^F{})0%wQBey
    zisu0fCA@}j@GJsNagOVu*Yu#F!}|^Ijhm+oQ&i<>EY1*9hGo?+*8?C2#oBlRu)&A&
    zAc7prh!qbJ&j=d{8IcJZTAr{Q_u-Eg-bhAt*ENX;_FT3G7L~a?F)3&@Uh&*-C6=^@
    zoGYD0E)Ha3sM_)7^wKW9TRbT1V|mL>EKPv>CV)pTLY>tq;M
    zH7DqNH}O1xf)r+taS0*L0M5j6&jL~yrTP(!K8{EVqsNHRkb5hO9_
    zQl*$(HZc#fj_5svaHGYJR=KD(zWj5BTYq`HOjj!&G1113J=-)z0
    zWr@NeJg}X_VP@)}kR1~0o+_ixA0)Wa{ewo(6t}Jw=&6xTt5cT!-aufc7~I6*W(IVM
    zREoic`oiP0x?Q(2`3Qp;Zi!8@)%)<^%LpG!#IoI@EZQB|=4|WVhWRf%P0o%;|KS~HV&WWL`p8GXQyaofXK|+4+i%-t{gn
    zjddn#)rek>oEnyrD3H4v!-9*o?fUNeA-Uv6tXc7Zg(FrLr0{Tq{b>r#iKY*oxdf+7
    zvYIVo%t{MpG~6x!Cknu=HhoIY7^XpY=inKdOaerjXwx?RYNT?woGj}9c&0h9u8CwK
    zP~SR5`v#*Pkt^Tu!z{RlQpXSUicwz)t&S{M1_u(n4$V|hulK2of2Ll;auS(E51w@x
    zmr3U{uY+tD02S1QobvyPSl39wV~l~28$Btx15?aM!tM^;xiSg4^!z)rG~bj$F#Co=
    z^72Mh2&(BUM7kQA3x(+4&4;9rq7?FoeXJp2H{SsX+Gmn9B<45T(ueDh`_#+t!Hvfn
    zPF)7r66Sr(tkNJdE4H^FRqJ<%AdL%LKn|c(?)83k@uTWB^pF~JCv%J&%mFF*0X$(?
    zh<1NQK*pQWpQ02bwm#*&28ob31ep*wpRunE$!q&^gfe;!SD50p?K!?Car@d%p^Yem
    zmrAAx5S?@<0ZpSn;@C!O&^j9}u*Ho=?9|lA^xK@Hkv)rVpJoi2Ps!#I*;%qi`P64V
    zd;&!*?N3TX*{#>wW!!+-UJM}Na0U8UmA|xyZfr_=Dz6A>`Ms}0`-@2%+iJ(C&2K{x
    z7Mhsy)b8zfF!ARI^&N~+_f-lsV6l?F<&9>TO~Jp%)gVFsUt(jujJf0
    zeEm-OHwTm&LyZdN=P3Qtc#8uc{u|*xtnx0xARft=V2bcx`tK%Z19Ze#ZU?T!9znjn
    zSk)kyY37tQH^s1sGb(|2;&=wZdk8S*+lk+Q~sFqyh}me5X9u
    zLViM?_u;u1HJ8O~ahiuDi090^&g)6U$o4#EwZOphDyZ0zV?0DQk4d%CdA5
    zqPCqPWP5?3X+5ib3ZVLT6YZa73=#D?_}&ABx1FMQ2a*blu?UVkeu}oH|wS6
    z@ib>(&GbLy>$@3DM8PCu52BdAAk{|kOMx<5@!gK^W?W8=bCyzkDwyx8qef8qq0Wv?
    zMWkvi-5t(B@!CmBkOugo_-i$TA0Lu!d3b;|%NIXSgc}|6uybj%{4{E&faQKSW4RxY
    zjV*v*ZI}DWV26rX2)~m`6v%#|`CaOH9@wdQ;g^Io&}lNWl<2IHgbHC!i1UkYHs{Sj
    zpVVbJ(PjEQ-4P%Mi5tBK^#rBN_imT>v<|$(WV5hkrlcNi$qHB1VISGY6gt4&qgn
    z+nd{z+lc+H8!;8W8HxaE0t~nXyI|uGs$g$Y8OEx>78o&O9%WJu)bI^j2)fx&r(`oD
    zmy5;k?#5rn%PZ5w(&K1%c?75nf1t%}MA;f?(N#_t9geI>2d`m)
    zJ+^va?_3AttPJe2RRG(IVY-;bt-H+|II&KiNifL)cYpanx!%p(8H5&)?OrelGS51X
    z*a!>ppf&pw>h>3q(0LB*31%{mHc_p=!b}rUFo{^GU6Pjg$pc^aP`QdrvbYz{$idvk
    zor6%OOE)I>
    zbsB5C(rXCyw&}arYdyf=K?V;o*h&yiF!nHmm;@3d6#h{>_;J)M7>CPbc=TAQG&+ub
    z&DYB_UM)6Xtv6?8rXJwsVp@%3k2&2Eex3p0_K9WxMg}r9ag4DMR=SD7`}y;qBB+eQ
    zUC^w$EnIbAoqs;PNy1Z=4fd%Q)T#@fPfId><$Sxnp!Zw(9BjwA>#YXXY09GT%CIe>
    z(VCMbBb9!W;?v5jY_!kWqvKjNpEEu_`Ax4fq3fu!IhpAYF63&6FG}BhwS{>Fwc8lu
    z)Mi}ktKK=i^iD=#JDGv2frU*f+_`Mr6k>FQ!;jA|wEVXAYVg7;|K}O}DTB{3_$vl~
    z&0yJOdoqbC4lW^@Fi2P+-Li-t4&1Aq(X@25uYNy_mc&1dUU(p*{zxkc7zwsL@N_|Vkep0BP#o@l4
    zRH};qmZ~e&u#|17Mnc&M)krEksTwI|rx0T34C@7D7sC3GvWKLeQuS@BF|6!i(^p#6M^s}}*`ww>qv~U-v0d5Q
    z&G~?;?@*0#Wsj@Igt8}8W2drrs>Uv5?^2CPWlx&>vZ}sYHTEcbk2%k&`d-!8r|f;^
    zJg@4Hsm9~Ve%zc7s(Mj1_A7h8IWNe24=DS9IUiE>CsgC0vJa}pA!Q#jt=m-nN!55t
    z*-xp)VPzjyjVWbMsm9aFe%jnItm@CG#t~&7QH`U@K5AM=RDD`CW|Tc+&PP>!RyB?(
    z`pswUqb64Ree%gpA1{iqV*|NJuIz&OY5|9j;MFe%Y_TLFr%uoa$z=h;UX>^
    zQ`I@SFc-V<3N9R1)e~~zMC`&PTzFPhPs)Xpu?v^UGmBHteMr8WO8LX(Ww%vdZ#$)0
    zW38pzezsb>U2|)#roU}X*P89p%6hZXmh(Hc_G-y(m)lOmX|@-pEPtRtLZxaP$_7?rdxOK`8i&^
    zpmj_8c~KOr_hyGJFB%*!FW5?yfnN_G@@q!q2WZYi(&RxVLI+SDQ<^3j8>($_e6$?-ChuuKPs(Bplb0|Kg)H@<^(aHUU
    z`m`l2=o7uA*kbVxye6$aWvPk#mT>;!RA#|XHOmdhPT%2syn+C=**>~-Z_V-Y1oq5Y
    zypHDcp40rLw-cPD=9gF3whEwti-%)~va!fJ(}B>MQyQ4DRQYK8#=A
    zxiNqG#_KLXa^u?km2&e2KzqL2F5h^qRVmkREIEy}8*8<-qs!~Hdi7{?RnIkUR@@uy
    zmTs1-t9D$3YMHdGMWdkwcK_uf{Exf4ngoy
    zR3ZekBD4Y=2KA&ueI*e?YihyU$!?3l(XrD&+7Z^~ShG`XlX7kVm`&C_#?2lX4SULYV^S-M1_na*I>x1#h1YUgj1ng5)Eg0!yy!L(d{JFJE@KU2UKX$8W~=gUtMqB>IZ$khW)!IQS^cS
    zeK1=)q22+o1p{vcw2O`-s!c(9m)+{h%}?(5g;)Ng_ufl(>fJlgu>oMyEA@4ERp;;m
    zKke2X2RgSOlZ&7`&9P)rsKk(!PizC9$H#8uW~P_VrSDv&rCU(gO=okF$?ggJ5WGC^
    z0x#ilmN)E05eIJ{%{X{&y}jBGsKn7cJK2N60F|XBs?SoB5#$mJ{t!E9>6$0HeJW{0
    zAxXd=Fv<)@z^33j&$n>Q%JPO|!wK|DFPt`2CFDIBG5dll;rS!OWn{j!-fUM}cbXw^
    zWQ6FFzaxEe87FPPhwLk+0`!TcZl#nyB|wX@KJEd*?ScV8&^3qTy5axOgh(%FYAYX9
    z5WM_Dp4%zg(JheTDV%89DE6RsU;(;L31*Y9KJ*V&i=UQ9V{9p^f-vJy4OLnN{3|BB
    z9ldk>s1Sb{U0!0h0mOO!$PgdCP+4uo^kF5m(D<7#5ZvxWrH2|LO0*V0dX$5jq^Bv`
    zHX%Fe)ay7xvfI(~?vJdCFQT)LhaH0Uc>c(c7@=i%4mNgQT|?tRK=A}BJ)oe7GZsxu
    zAt#}b+2c{^(i>yFSZ4RdW~*I<#TiIB1GaPMR)o(cup=$Ki6eqt?`S_N6}^Ja-UvGc
    z?0DWEcC>xAkY_N@-y^rsG>1Lz?+h&P*PQ0f_UiH36Co?dDJzLS1)Td)G2jFUNmEAL
    zL1j=+!0Z?0oLZ>O;&E?VXx*F6KO|J961LdyMltI{Y}#Dx-N5CJnX_k;u@abFoWQZm
    zcu5B`Bx^y~z5lEHiBhHRly%@#xFtAPC4k}w`F3)ZN_x|qy}SRT3sdR7GWq+^SxoSB
    z3{{GVPvDT*LiVIITl%ATFRpBXVmfn$Yvp#Oug-d)l(0Y-3Gd8xIK{eIY_B@Sz$h)Q
    zmG9MCh91
    z<3KDB@xPSt(dfB){fYw(Uv&cx^3zwBE?v0ZMVJo+K5g4>5$$Ih(rtgz)mJf#wQ#=5
    z@^)<)$FzAK0T|DWXlkgJG#^6;_UPj4*REZ?zI5Sy>B@zrOIOb?O6SJL5(kS;grBK8
    zb%(A>OX4K1CMx`N3xuNkV&n?0H*kmrU9g4{g`{R%7gmyJILn*92;PljrNT$Wb=)wjs$)_ILc>@j
    z1G-@Z(%z<`xFaIFQMAz>QBjN$5#A`WXpgBVo`^_q6h*Xms3>-bh;I}zMBF!u6C(N>
    zMF$ZBj$(p{1UtJ`6b$67_ee_^1!N22oVX_p|FLDCw1ja#wmhbyXdj}*)#p_8l+3-T
    zs?V$HX{qm5)fYgEFZ#PWp|$x|qtR-HF|P`xEPwcW7{@#g_h(VS*lUZ82ZsVmfpwIk
    zAXtZQCov+{Q>vW~?@6n6Cah;vdmyY2s5Y>O|2ec+d@8TntV_>>s+|w(1=Su5>qDwt
    z23!%`99eyUH3ly<9du1^QPZap175$(8qF!O3q*4Wi;48>
    zFE|(gMNDmN?UQ&+oFHW-#ptEs%YS3h*_h=OH=_dTUUm~v135W^L#Tn$7aSbK2T>2*
    zc@r4CZzVpR5Q73f0Gv-;6$%K<3N|4M081mZECv=_QkvUpIM7DW9K|XkdLiIMUis;3
    z=a%L#iDmoYui|EC!}JOagjIjA>;ft63yit>sY(msQn7!N#_eLA(E$@}j>$5*2mT%g
    zZAuBCZLTQT3cwIz
    z!eb%C&;;`~sV2c$gqpF>c&Au=ohGmyTLEtHQwQik5%?~`7|>!K&315*i`Yif#N9lG
    z>lSd?gmER>yI&kPZ`knMON@G+dD&^Y`RF#B?VlRZZ=pedKMR}1ITl2Y_;z+7Fh?Z*
    z9k_Iwrc*D*Mg-Kt;3XNrkK!hG8w!;e29kE-m_*H++8SB09wH18J2Wl@Ll;oFsbW+P
    zkb7WD-_>
    z1`Oyls{*r-$}g7dAodu2`x%jCZeLjK6j*J1{raSJ@X9tspFn&3fS=;aJkqr*Q%co|7BhTt`zbzo~~QCmdt
    z>7uH4;_v~5^uomOJ?YMfP?5BtFC#Uf;W`fhg2MhcJF+4r^ojG3^fDwpLm?bL<9rP6
    z=^x;XNa}2nip2a3&+^{BiG+Txi~ejP$94IhrbE_?KoV9>o9Ii<<@}G
    zlQJyPszjra^_T&*LMZO>N+W5a&^dJI0Jll};A}9OU!Ioq~=k73-Upo>+L&NdStWH&$tQ};%hrQCLF%}?Da*V@d7
    zwO4iPPDjr8+qBbY-F8YrFU$bLm~H`@lsZTQw<@=oM$~QXN&_iHpwk5|fz()B5*dp4
    zI-YTNpis$S(9_uPcxJ-d4cf|6m_PFoKo`j-(oiIuei|n?A#+R#$pf(ylSPo(kVR~X
    z1QDnTG-rg@u87OU_a&7fBNj*w@_B+H0lE?HQcT!G9^~kNfiNK`LL*QJ6~5FY595Xz
    zY8_mtaLoMS?@0RW#tF$@SO6o!>sR2_QG-a(XkiKLN<9E_O}6i?`RUu``nuyM+RI#h
    z;oPm1YE8t}I;1)T1%p^yh)X;w?4hUuUfUYh9Ww)=#LBIbcvg4Y9ggnmIzk5J6Gf^p
    z^dVHD32`2VwQnVq-XHiXd%}=rr26!EJSC=VI9m%sjBX;%JNixttk4-{W%=LXxhRcT
    zu)J5l%L&H&4t4h?>N+;&j3#w=EF3=x_VKgEHM;Ejqs*h64_5VDAbLWDexJ{Mjs+12
    zZj#A>N!
    z8FH54CTMKfmK`rsMDqZRM|$`E5X%z%cDg41z_~hvR;?MduK_~vBC6hC3$`{g0xoWH
    z58!n#m)%;W`0+RIuG+6$Ut4}>?RMprH`Xs-TBuj%F1GEDpE&~viaR4_<#9aHrPey^
    zFf0&UW`89P<~KPb3Q=Y4qU1ijHJ~~VcEKix=LI&*_-p|im9DD*$SOl7KG_
    zbK`qsxPkDFTYrNK-WZnAmDg{swu_LOWJbddY2Dv;qB}eOsqHE^);$lEO?7h|7|=fUaCrwv&Rf7
    zj&F^MzCCr}ff`8g@Ksa{1$Xceb94Z30#D=LJdL31mTh
    zN8T=E6=KH&^teQlzg>#S=*m>y
    z1W37bU=d33S??!aWDkHKZ6=2T%vqL!436i_{8&2ogMu
    zaNFBKDY%Wfrl-_@;g
    zsshH2R60G}?hcXV;WW1yj!pF&^QN7PO)$LhZHoV2`8n}toabYaeUlTb;Hn8po$-|HVNz21f@@5
    z7Mkj4D@}<7&Bz(BEB9$a*s%4mHEHd`;XSdnFqP*b7*d4N6y$F=^dc<<%j644Y>7t+
    zU-EspWMBr7sElHn&{jgdpQNj1c!urp6T!e>5>bkV&&gg^!r}!1tML}0_*h}&3m3$P
    z1l!VQB&f~JpGQZMWet2zf%DKp*Vs93ak^Tl#PJWqr1||}i^w8Gh|nVjf!lyQt9hC=
    z4E|P9>05Fu<`_n}ZiF#DjGx2-n!JCCL$7)9`(l_h%N_8wj^sQUIbsOQU{7Ov&
    z*=#czi^G^_5ReJ(52IlxNehyPu!a*!@YM*oYnSC++?pPGdg*w&Jr^Rd<26Dq9bXp^
    z67YaPCMoxc!8oKt1}Ox2%A=$~ZV%P&ReEeeq4ta_%X#tJDt5yZneV?l_qtOd|*QFY%oWT6cV2Z&I)=3ELaf
    z@{3{3>8FSCGL-Jfc0BqIR>4NCz-D%C!sWQatfS+GK5^%5L^s5-O#UzO^&JRI;nX)~
    zRu8I<5gPQz#VdwiJf7b*FlG%vIsiHkFt&R7iqK9@i{ee6#z`O@x})9@4MrRQYYdQs
    zDMe*}@DMR3hLVIe=`VtY$j*o+fk(5TD;)7?QtrEtCg~r-Ec9ns{6Q4(op)Ex>8mG{#RPbJ=sY&Y>H{h!Hg!BY)O4rg7P5@7UrHrAR6+>B8Bau<)
    z<+BGUPq4hn^$afPpTUUi9ZN74&O0mR^?Ey6E^6nI`-Kl#Tf2SoqP-2l(+c+9H9@Y;
    zinE06cjkWAi!V6QE1V|9O200r2K`jER%we%Hj(=+Je*c@rFL^&JEd@}Zxhoacv@NG3C6XO*5gosI};D|b2fApA(%Ou
    z91cM+$@A_HbzB{w8HUOEw{OuNy1Puj2PUO7ElKtin$TbjhJ#p1F9sAKHU^rQO*L3Y
    ziPW8#qI5i@4#5ee!oHfIne^_H>dkkzE25f-JbUoUy$GEy<#O!2YteBR3eVw2?};$W
    zb>@&hQj}o2)F?@?NzTzY19$Glz4{kf>}Mffs_12L{Y_#-q$d~62TvOHMD5TcZ2ZwU
    zuZj%{*Dr|OrY8{$$RQUpE|Hz2mcdDoL%z*s@TS1xq0jsPD!zU!$2+LcfdXG`ZwF^f^3ssx*!zE+s^<(kwXwHdWnS#Ab8S0KmMSy~@*A
    zAfGZ0deXh;8ef3c3QSH~J^;J82i=0opEOzKXjugEK0#*XX9?~Uo>T<(+gpn+Y&b91
    zJDqIeIBfc^F!r=z{^CqfhMM5bp2ex;LdS&_C5`#@Bc<$%^>SmmTK?Fd44F^Y&ApCCv$$nA%5dlynfPV5&uW2>S3BMukjn%HZ3(+`+_&%<3)&SR!BWwMHyGVCo(^Mn1=
    z<7|;Q8Y?h=BOru{O16hEi4KshF=Eii$e>A(bgRhf}3$#^ds2&H>DOvs1IWh=e%cz+m_nyr$&
    z;+HyvbindXeMCS+-9!)}G$M%nfe0c!+7~2cEc!R1Jd<&x4Jm~&B$GK358G%9kYy0&
    zXAg1#O>YFwH5d{hX*kyp$&h(1lBz>)0#O>zCQbL_sQGz(&L;4$HF-rGU+5drCXcg$
    zq-f_t*KDqr3X6gP?I62@)USNUrnI74jcCc@H;94)I#C&F_X!01K$FjJjfyS$i*S)d
    zmjgKFak#&Y;_ih&n?PaTOu_>`5wbYcjPY&B-&TMWa6dpSs~HToh?jRq)SGu-W$N7I
    z?4+O5efmKhgh_!7TRQk52O~w;+gs{Q%8Ky)Odf7{e~??~x(_>dWbf9-{;{Kn;}oVE
    zS@4suTv+lm=P$f=Vd;WDaP`{K<*N&e-5YH>IXpjk?e!)720$ff{vA}Xgz(aZbLaH}
    ztCJ@B(!}S75OJxNbv4|qv*gl?l^uRv#vc`lrmzVk#8=tSPi>p+39HlQ7jyz1Q
    z2+}OFn&f@%V_3l=6(R=I18$Wi&}I`!$iuP64Z#p&=mgJTc?G%(HgbU$20&SVo_p`k
    zzx}lh<2ll2JRK6)d{o05M+H+gFS(G?`Nz0(N!EGjZ}Q2%&*C4jAlXE@2(KVd22V{TJ=xLV
    zkybsky9sTib1$HCFQoHOt$vw(ibq1fE?V8!)L-L;Ut!VFCV^}dLj60u6ghWBw8gwS
    zN$2X15qMdXSg5vHOUUX{}{&
    z2qnLW3egi9%MfotI?!w#f$IT@1A^sEqIww4jpcx`=x<-N%kL%dHa-$_=J@P$PhtVa
    z8EyhJB0|tBs3Eh;TgP
    zHa`T6KVk9DSp0Jqhr0_Bm0m=_AcK1v
    z1$GyW6f$Fjg2C8`vS496Q?`@1j={jnM0}s380TNo$-t&?~eaV$h&b$jx5=0#`$Yh7DocgH=n;U3}`T?o3x&
    zjTy-p=s%8b5uc$cVN8`@WK9_RXL!kY8Q5db&*jAsAPjBPPiUYZrMnG4i^!0hy}bRs
    zvzrb~8)MYqnTMF}eTgFkCb6;}X}bO>KGBE8x3@YD(N>fHSDnf0e@T#ZXfHPPARSS{
    zG=b5*#jLzdmO^*^P9}{jvT?4x0;qA=ca=Nkl1&PnLQrI>E^^7d!lPD7n8X(~3`heD^BSeP+{+u3xU=EPDM5z>a6PO9sP1(-w6;C{_{
    zm=6X18f*WW#jmsYw=B3XL;rgg9XTO2(NX^b7l!z1idpEWjA$ijCe14q9MKQ%QfF~I
    z%s0h$(A~QpY+|G4n8z~G2n)nOwIT?i^B;aOa@Uxb1=e
    z#T+jTNrzSWv9Jq4+=K)A7C
    zVZXv4xs_4+ATdnGOQP?e1;W93VhESar!!QLLj4q?_vWJ+SR$R{g20=Ce=hUER#bEd
    zzYYD9E=UulK{*`^GMuCod@ti2eDcI>9>sQ7_}D@|r4hMx#I)eP(QpX(;0dLA7GJts
    zx>IwpEx>$t*B_Rz7a4R#VMg=2IooREg@dVTGA5-Ck<2qF@DflB
    za;GSCJ<<1Z0?cTKp^P`%2RLnf;p;rURoKZi#~!x_7(Q94x=UD3!2#&sX7S%Rl+-(|
    zTGIq$Yd8
    z;4_e0!Rh9+yJ9+x2O>MZ{q_*QAW6~O2|ypbd7MEXr^Kf87I!$fGP
    z_XAAYIkN^8>o|JQ>6@fS3=SrV;1C+((qI*T5y8WUfQNZ<0s9botft?>Nc8Wp_z4!@
    zX7N8*bVy5T+MPt-`41sNxF!E23D$_lN>moFyFoH$a|4d
    zM)z!Z@#hFN!dhW(3qK=gR*CkNSQ{Sh()i{E9@8I#mR2mMc6
    zz~2Choc0u)_H+TI=9T_hh)+MH$Hx!nj*=84N)w9UF1$yQ=mNTp(>08W$leYdb)j@1
    zox+Gx7!)9;lc9r{h^l?r0&dc>YJwkxS-WQk-?Y2zF0|UQoR0YhlW*bU8_tFdwP9O6
    z{-qA4xW5m$pQb
    z-2D8*Vebt2Xtqp!AOCj8idkDzqPkL=Sk@eaCZFp+#^O()@bkeEm3nR2d_{$^Hj$0{X^I?{a13{yFRo@;9Zy{R-EANGSCH}$ym4P9>
    z&F-6xlO&F8=S6|XYkcLf>l%a1_zshNoX8#woAIY|4wIUjpGZ0=hAX1**k&X*5|
    zH&7?SJ=PN(+)ftsYUPUr_GtX3Y5oZwy_*;Jps)+!MQPv5rf4vCoMq#kKgQd+D8n8J
    zZzpKvks`0{XK{eVG>cgl&$2kp;w2U@v$)9O6&9CRyvpJVi$xT6He_*$`AO7B%vq5p
    zLLB|*$j`nAfsDWO@-dQ$yM*FE0^}sy{rFqzi4mM1#u5ITunr*THDc|xCXy2a6B+y)
    en8;70CvrH_6N5M#n)qGymB|B>xyffIAOHU;4vG{2
    
    literal 0
    HcmV?d00001
    
    diff --git a/python/tornado/test/__pycache__/httpserver_test.cpython-35.pyc b/python/tornado/test/__pycache__/httpserver_test.cpython-35.pyc
    new file mode 100644
    index 0000000000000000000000000000000000000000..fb1ada9851363b0588d6cde6598ea019efc45210
    GIT binary patch
    literal 48952
    zcmc(I33MDsdS1LLxx$&_!J!!BZkZiiAY%EVZ-4nINNH^k*@f!rs_MV~tN!}y=&_-p+%JChCtv=`cPsU_3jKN!pT^~UC96~w
    z|5K{2R3oK|Db+};Vp=sas+duYtSV+ziaFA%-lH15s@SXVGpgRF8vUx+uNniYIG`Fi
    zRm`czpehckMqU;3sxhRBLppa>O0QAHHTu3s)rVDML={K$eXpvoRgF$7lWJ^M#m)MDK-KS4jV-FUMc?OC{eIQhs)}3n
    z{h+Eppc>m$ahtx+t9n5-wyWZH)!3nmJ5*z*D(+N`U8=ZCH6B#O2X($7Ro|@|52@lq
    zsA*CRA}kH6~SYQZ@Fe;y#^YMAfHMW4|iy
    z*Y|5x{eUVS5bPsaL=Dn5nxty9(Y>W)&yr`5^o22~y74MJe_S4
    znpPIxwOLhPVtVyubsdGbsM}zaBPe;ls&18%Ta~p#N*+b(1FE`BQnx813bQq||w^|Y!^Yb+g6)uU1x
    zB-zQA5$!Ld+8I?nCJ&Ao*e)UUxT>CzR8U5f_7$X_RMk_G3Oc+Z*#X=8@ZhwnJ}(cR
    z4?QU2!3(N-Mjo6owZ9*!XH|7pQfE!-Wo7NbgLA5SULKtH9uUe`@!&;Oy&w-Rm=YgA
    z>P1zZlT=_rvw2BX*Q$%-m)@Z!9M8JLx0ZoB=}+Uqr^)@r(I7VTQI
    zUAnr|thD9+My_=B+0F44;~tJHLOX)ofbwekA3XbkVY3
    zw`_nu$h%WkxoX*tyN)+!EypR(TgR-rH4iA=`-A*JJ^s_R7c6J7)pV?j?XulIQ)^my
    zMgQq@7u&W~ZXn(VKuY!2JfdTcQ!lj^i3g|DZj~ynX4|?+9BjA<#MP}M&aGyJubZjY
    zh@2jGt)Db!IqjLUW4WV#(nUXQNYebvZ+U*DS+Kjt-=X!1N
    zTyxGYgG_jTYKf>gvbb2URe(j(?Sl2%60)8uH>-8acJpPzwqUoKttB7|NJjHYw$4<#
    z(yHEKTPyXJQ)|x8GJf>NKV
    zU1VEPdfrH@c1nE+DUhVJdLyGglu_5x>O*OD8yQki$XW1VPv@lzprBJIHw(mG!S-lU
    zxN5f=g%4_AKZwWGg1yvq3YBubex+QwULZ3TYHcT1z;8iwfn;NYCMy8a{}F8doEZcb
    zdH_I+(16W8AtMbzI&^!M4MCS5w5~^0dnFN(K0w{y*#R6@$*)AEyz%LJ3$QuQP8$BI
    znAh?zF?o?73?TpxfS3|+gT(QapxkT0c*e~-R{g4*gPhszQXO1r??#!=DSI4$zIu7)
    z@a2~raQx--GiS@q%OL!*a=U!_4Bviv&T1@PUaT!nTv@8ss}s!y`#@v9;#_XG>}I*z
    z3jTsUE-$p(i>L|0BwRMRc*`B}zyNntEJ!4J8^A1Vkm?*R
    z8Yx^!p)`N|OJ)}+3y1(pkP3i;!>^^(M+ICkyf83}pTqX$bhm$S$G7g2I}#3sfe#LF+Wt+PsjPME+`OACOQ$E_a|*s?^Jl
    z19bO!^?L-@^re_;4n_abE`gtkaYN-T2_Q{+`FzmGf&}dx3$i}VgAf*|j*ly&O#vNP
    zwhe6p)e1G6(%K~J_FT2vl?AuYw%SnpH|#!C;bukCXSG~q%l5qE_FlgsfBOwJ<1NTZ
    zMJ4v_j}jJghDwd5a;eS(Nev0I&yM%lhmhH3Ir}jN(+o~B5EKOg`NDo0!G2s08&B?&
    zlnQSAA5k)m%OR63X9F<69aux6!NC9?C-hQfGY;L|4+IUkn@(Bvdh1flu2*9{WtJ*&
    znaY~sD@AV$cSQ8|T1&cXt6EyLTkTe*RX3We-z0=uD*}*%J
    zKQLp!L}E?YFW@!y^9WqnC>2X=AyF({xF2R*`#EIym}WyNpX%J-wUNjiX&t#hTDKh$
    z-8!^In-plEX7@3$JO}2K(9Zgkt#;wTYT*ivmqM*sIDhWq+;~<%bTf6UX`e+^HY@D{
    zadxrgw2$Grp~=>+;4&xFaGb@@@C}q&`$Yx;i%U%S{1hHg!FgJCIFs+OndYuPcmDY7
    z#fxW3FCV{f@$|Xb(&^a~=iVWuJ9}F7Y61N;&GJI)Y5~S*LF;^4Jo{R9p@mKpsTJh*
    zf4F8
    zNJQjQHt&-W^a`G=fFM|eM3!_bFa#MFLlF9eacGzURn1AfPgMt1H81ggRUHCy*SL8P
    zv1e;HYt0CDvv{#!mn72IC9jKO2ad_*N_~q;k-^zaCs;nhJ1!06`Zqfua*epIQ_@a32U}&fpRtXnXle;&w{Cp%orN>B_10(w?TsXdZP2Q6#MC
    zGWHRqxP$1~ExUc3W|Yx52o_DA2;}UM1#xba@C@;+~v2~mHYUeSDq
    zJE8#(yTfLv-L5sP7W^e6u%q2!+Q=B`mrUv*9N$w!4fvrh@f@V%JD3|Oy
    zJLScCxng0k*lsPl*&F3rTS!UMyJ~qhNFvX!7vePGtf6Ropk2fJoKXZSJCYiM^4)~1
    zH`SNAFV)$;oSS2%efnll0p&{QdjpXOeP1+U8K$KeB&4O7Z9XkY5>iRiPg58aLrGAq
    zM<^DeWlm@bLK<_RceXR&(=mxCHNkd1z_tzjuLR5XRmSESEHJ1sxXgf9DtZdy1jhPS
    zF%R$1Jd8gK)+^W1)jh-hY16dN8he59s-h;rI*lghv)un8`KXiI!bc=lt09E!3^lEWob{vCV)
    zu>eZ&J{iM%@MHx|1S{eWbh20tbP26JTCM`jbJ~@^4B@_Awind`Gq=ThU{5529*u3-
    zE80@4feM+FwjHvbf$lZd?YnN>x>~DSosoTT(A69|*Gr|zirt3oUO?^i%hS}nP_8+a
    zTVxBx`#?)x>qG;f|Cn_FBp6^)CjDHE6~qAl$B>GP((aEoyU$8JoFSm^8zSRa28ri
    z7)-;kb1Y6e)EW(|TEk>fVB}49#I7f)yci1F9pC}}To6pCI;^Sc6V?7YJde>D#b#mA
    z6<9Lea@_1r0mKf!lZbT!4h`Zx7A3M7Ot_f_D;T+V
    z`4e#7W)!N4d}z%uFzfc}d6CZ8D-)+=s%qjmCr3fG2?FZ&8|iiX$r8dIJ&ZXvf!3+?
    z-|C@SpLXb<#UqQIdu|p%|IEG^!&X+uW
    zE{r3?8uWm}?x;-4`m@MHcZ4Ab$_znaKM4&6T?g$1K7bPA6hIoaz*U;t*D$Qr>D{+L
    z4>EK#8z!!;WF%|4Ea*uH=a=~w^0|1owk?NCI!u#00wnP$K#m{+2sK0A7CS&2&!T1Q
    zZ^h$eQ0!?u`c+)M!QAEj=x`*#v0j!*F=L<1T*ycq+MGDYNKQtC^z35xWh4GTw{MgyEw`!mAYYPD
    zk(i&
    zt5vyfwJ{^EnZ_NWaLHJl3nYR;q1$uAuC*<#y$4wSJcFkYVDf`QK@m>vk-<`l8#0;u
    zB+~;`C<=MNF5f80+{tg?sY4>G)RxpX%(L`n4yH!fjmugFfbOk^ARkZq#e8PTpd^QJ
    zGq{}JM?gFYUu%nHUWVB)X3*tApyYPKYVIIB~
    zao^Au1zEIRYDmti)-VIDgJEb!Z&=?GyqQl%;!oC^GNbV)0t3se(@%uJ>t~%!Xe<3+
    zg+NYaLBYQLlT!|zY=-J{w-&9#1z(u=U2WNo3C;#TRk+exYF5kkt!MThIC$vM#~y#;
    z$tjr_^p&=(Xra<>1N(;y?|6a&qdTboSPhFx9L(^vnuiOa%Bj2nPLW%!Pss6%e
    zIz3n*Gx3Csz9%MvS1*x~4Ad*uxT}>P_pvqcW1+J^D1-{SoO6&aU5d7$LybySVOr#W
    zgW04b@EJ|WYRkBkc1GB}oVLFcDfV|EK))e)z0A1w8A+K$XVH)})KebLpQDKGu+u}p
    z5++IH-7&QgDdJ;=LmPj1gKpH*;%M+~f8v|EHw8%sMyL@qMqtY5K2h66P~@<%i7HQ#
    zBQJVhsB`qWqE&!vcF7
    z@Q|Y?K>{jD>=U@|2-Tw^Op&9=kSLanUr|>8vZ|d|*M?MkjdW?lo*u*d@c#|rnQJ4e
    zy;gk)O{Qi8(h}OCEyPjvA=oJ+{#(}(quMxJJL~q!pjeKLIf8&oKB03_i(V
    zfkBPITMVdl+?=cxSpx8NH>XF-bM}(uEsk=y1k7e<>Gr_n$5@&@pWEv#Lezz2HrUNz
    zA*1$>_ehh+`*i;-o$;sTRaa{^6cxMZ;-I$HOQO%Hr=$yyYqqr_X26mx1|)j9e8`zZ
    zewE#l+JVk{ShQOP9`#&m9Xz_5Q)|7yyn7(E72!y#v#Xna3s;`t>it%NHqAiv$vaf!E9B}O&gVDh`{YRlPy8SC-88OyU2`*dyt2?r8hK?<*$gY(
    zih!~rMkFILAD|^m3k8q3j$jzABmG`vzMsAH&4cWOj=lPbx(u^kFCZ0GK`%fr9O6*;(j!2%-_D@3{(!oq
    zxrsI`G_+(vC#5Hmz+dP_HsfW?>}4E`W`tcUqnYIy^P!9bE7gb|!+$k0hHFGG6n*=e
    zP~O3nDjkky3|A_k!YD?YsvPT#%-SCSdECsBU32@q84!2Cwi+$2@m|zxGqMYq#kIeY
    z8GEq0$G%m}c{-2oA_|C`_Yz*GL#mjov|86|Rtb-bc`vS?^g%8qnF3mpwDBQWik#kZ
    zFs0MB-CTv7RBD5#xTYEl$lJCS>TDgpEA7I_qH3&2lL{OTzW6@J?U4i#IJZZXIaCA3
    zRxXxMTJ*ZIt$bq&W{L2s;WytvF|(BES?$j0wSO9AgXKx~=XopL
    z;6Fob1x53mQ40!vOJ~D!fI;R6wA76PEo}!4Ekm(teBGN~_1#ohGkmwFYu7J(GZijT
    z!yo+z8C>$>gNU^8ZW-p&VE|#y%7V3h;x>?5_zcE0Tu=f}3u9x2*E&Pu!h_Duj*x`tgq_fV1TBDFLt)m
    zv(JJl0M8leBk1c4?b}yvEnR_M6Dw#J%-S$^XM%(-oIZ``v&0$)btDnSJ!4UD#dQYP
    zmCx)M*xKd5~9(2qlpbX$#y?oSeY$1k{j*jYz|uyV`0!
    z^We;rmd}%)#n1$#O
    z2N@&cTq6gGrJ^}#z=($Q)HX~fJ;3M;5M{fslx>YS``;3I8Qi~juF{C0`B_3Y2v-#R
    zmd*zP6mIFtRf#2iEM@^B6{lzi=o6|2;12>rFM{q3#pMV+PS=8%h7dD4j*&+jatJfD
    zBaL?uwS@f$aQ2~_VEN4I%2rP{RVfO3~
    z?c41t%sF9OYCea^(u;ebx;6mOBkAo#_vR!a_DcYldW|iJN>gup41WERT9tY8MN@Td-c=NC|`;jhbx%POZpf5BNrYb((=rANS8p_fFXWB0VD
    zrL)5r>1yv(Tqwf@d`4oIH;hYK%H>e#W*~Og`xQp`1RbtqGz#p6M!vaMTksGtFo*D$
    z&oN*|aGF2G!TSG2UR2JTcUNLz_NUa@74B3bj=X`d|
    z_L(MRzr=u^1^eGK_!S1f#^Bc(1Rb5k*xT9UyR!sl4I-TR&G*Q^T)tnPRgmoeGG5GC
    zM8tyG0wN;>@rC>EAf8blftM1u!VVusitONHiYODNEF0^tY8;_XP9m-iFms`NV=itV
    z5j+w@{K8@T2$~;)D%%sZH3V`MkkO{evx2qrVIn4rJPID9u%htZzRcV~elf8LiNnQw
    zT71Cc;A?6I)CBQCmqeW)gIqi->7i63nQ+DXZJcWWu9#lXv(J3xD5T1
    z@+MzhN$O&G&~oJ+^@OLnvbZ2L{p(tp6z6$67c1oqWsS{4M1D
    zL_j30fWB`zjHZ?dn560qF6S8p5iofpt}s#wtSYR1TWh*NWeI^a{2BO;9qhPnR-O63
    z{M;Y^+Sx1TpM&NITJ#yb0xR09RkO$#ySuSEge#8S0ftqpjwzc3WE0nGGDo`Jf>klW
    zyamJG#2nY@K}gtWjYyS%NLbvZ1L!U$5|4*{NQo|J(j;fHMDe
    z>wom*mpXTz3$Ml2IDHF`wcGl47;_o?M+U1QaI@ed(4>K{`BTJW4s|G~;*rcI?yP$x
    ziIX^qW}1OzL&jJAw}R45%TTI)MUKqE%Dk9AB{l@V{a-V7y>)Otc06FdH-cKMPXucB
    ztcqH%u#eXaNaK$q+57k+3vPwDY6Ap%l59$=OB4@7C>l?DVi=*sjR~7g4>e_pn|6U<
    zH5|-alx9Tc%VGo0-dXAO^b8P3)CRZvb2tQ`>0Gt!iR0dCBFyM;zqD1&h3$~Tg;nj4
    zT<%bgmU8Z34u)}VDwh+ZB0DP@C-tVyXO~PruB-;5u{#SOn2cG?L19u~gW@*i_&4?=tvL4E{5N-(%ogDyD0a
    zUWuK;HxV4qR_Ph-9nHd88NkE|pH*=AKPvcXT+Z_d!nVpu-&g_DSnw_2)EQW?qRZ}R
    z>qIOS#$}-+>{!O3@3dEb@Rd`tcuedMRPO(Lf6@I&3cu2SqrcOyvo2K0WZb)tHU=~2
    zdvT-L8HPA&VqE$sfbYfGnWoA4RhkCaTJ;ks!woOuZ8lp%Z0Fj_*s27;$Hq+*0D
    zKtBk=Rg2U&+HDaYf0I;_5vz4%6rGanP&Ldhb3e?kX{u}Yvs0}_vV&g-WON_abbfY#
    z;AQ6yEM0p}vTNUWtahqBUAvbZwGT>mZI{NfQ#tDFUQ+>kUb5?1_E>g0W+K^9`;cVE
    zvNK;x`>@kf6Uk1FUgK?OCBKHu-p;@)6Bn#>%7XxZ3RU$v@?
    zbw>S#XL}1?&d#O-j~$pgJau?~<*Ie~DzQJ|6aJc-pDh
    zwZ-Lh!=A*#>Je_>xYWN
    zK^$}7PCNAQvCzZE^uwW=9v_z~3z&B=jz(^Emb^)mnWjJ*sZU+KpRl%A@&xMNP&5wX
    ziArt7vY%c|nD^b|-_Dk>NQu1_)bGm@)rl`l9D~Ie;XTWeEM&h+S@K116-
    zDcpe&_0a8balFsvHl9$Me3kl-|05Is*4mk+mEn_c_Fs+`(A9i|k_J2WM040OtFC#9<
    z_q-|h{9}v8inl~r^G*h#W_
    zzZVidZ@;zuC1Qcn77{+nq6nY;UlEH)nYSxc@88}h`;j_z(_Sj6T
    zFx~v9W2bRBWQnjy-Lw6Rb(iia4!OeAt!OcvuJWEt-}{!=m1qI81v6|x0fFYam<0@z
    z5M70Fh-pjKTd@=vxi6q}7wM&mx{-*)1xj`v=nmPtF>=RD)%R@VN>hZn0eN>DS2*Q9
    z^A`=Aj92R;^`8u-tXQJF7glQ*%|`
    z^k9!-iVQuW6WOU@^a*FgfH(S-4ld`ec?qChl)Vv@!ZHWMfjfmK^EKg8bJ`Y;sA(3M
    z*>6X?)tb!7L4B7NZsBAh?!dd%S}NQqH`|4Ft59u8%bqI~X0aRorc?!mck>n>^0Ysk
    z2CkyX1&zJ+uKVx#?q?a+Kcj*Z9PQ#
    zp^4H62P+_IS8oB35YRlN2nlwbFbOz#Em4Yfxjgh?FA!L`VHFn2ucKMO5zsD(!m3&%
    zHmj3TDAHq6caawyx;U2i@pMt?830Hd-2d92%EK@)A`+i7#%+9rC3HEim}a_c@ql%J
    z711ya+Czca0!x<*q=l`RbC`oD79Wv+Rn}z%yDJi`*FD_BF0o
    zdE@rB8;jUFX@3JVeIo;|HZJzFHs-V`rp1vfwPuw^l-?|9F3~eLr<5b8nwgho=
    z<|ep`cX7Sf`j;XSxze1-5mZ5R;y=iN>Ubm09Ny>&2PVmChLqlai+eBwsc-O(b-B6F
    z^unK@Qt|u&3ky64*6p<$I17rmZvc{};=Nw$-gI;RtcxF&jItZmEY
    zQpo6jNBvK6<2Mf)kDar3FD_cfwI#t
    z>TN0xiOX}O=_np?YGVwu%nTn$6%gxeTSi^XZ-mNL(6`8s&ClWD8=WEot}g{bat(+J&U$(CxL6ANL
    za4<>@rssXfxT_Djl#L5Gzz2IQVz~CkPl+D!`^ESOA)~~>f;S*|o=7D^R0i`!Pzso~
    ze`FP8hRZJ-Rl9wqQngjVCUoi>?HK<*jQw8*K8=molGHY3db+>X=w#bDay4!vMmi%04>Mwj+d=ldBth1bh=4d*ZyoC>yiBkG7j
    zBE3F$-CReR39LOqkMCK2o{2HHfMZ6)1aM$4H6Lmhc!Gg{ptL@A8a9kR*#K69F|IH_
    z+_qbFZ7TTtM==YkPe8y~2>Rsbm@$zB$$D^!;3Ozml`}RFdZ|2g
    z-ppixeqO3<27u;+)WPim0~2NgBiI0z5sLx}lTL@+Y=noX0j6=?<35zmBN6Nyf5@bN
    zOtWalr1=lN@FHKh7J=>(ydI@WErIn9krOggvKQa$-2#CT@0iSET&AS3QkClLy9b~M
    zvabiJ55Jaxy@>isA;6~Y1+aIsi^a`{5&nL@njA(ag8dhWB-8wUJc;QIC=E&_xK2*4
    zT$Rpeur0w#qBEIW=p0$NllmbiPFa}??rF8yB*`Fmz%ZKh8hQiiQ@cwFUK$k-W-r;^R*;~gGz%w|Z9gEr1lT&W07UQF@4m^#s;IaDM
    zeijm=xv5umj;=zqaMQ_60rUIS)yP$F@4W?}^`5&P%qh>~Bz(Q>&+h;Ney$h@RAyCv
    zjze${vvqDhT=~r+M=4eZdnzK`dHh{~bMLAG4hJ@;
    z3ZM=G>SWm5bvPvu-G{ebR9iCmG>!*rwsZ2`z?x8J!b!S20F~ZM(F6R3y2DPcn*K72JnMj@~Aata8b9b!KC6!;Y^R08Ab&PGN>#_|VzGN}40
    z%IeXr%-#5h!fFl!F=O6%nszq~VmOrStcyard^H_WC~%e#(M%212#v@^St3*LfifKm
    z(i=L#+6LR}Lvf&-k&L5Jn5EC#2gf6uI84&xN}McVqy&CFS$bs_vB^LMhjM&|?d-PJ
    z%I#YyJF=Bb?Cu|8-T*tR4mrP$_|TR#w(8=fBdp;Yi(+Njn3Dt~VrFnTvj{W+^^jUq
    z4ufjX1)|4$;%Nu$JBynp%G@1fK_)70U@j-8|qhVo<;UI6C$Ou0beB|Y%gS`9jyfNSw$fjB)YE|16>$o=#dU=0*SG30_F0tNWwcDZ_YMf*m}jQySOWGg8M3e%}0
    z-qqO_MY(tn!lnF{v(*_~(()^a+(XMTO%Uj?(CHsAaaoIu>hcC>vOihlBQxtfh&RRY
    zfz^r8GbqHNasW9Y$w_$nGdv10rwBF)Z#eYRd>#{9y=>XcqzV^&mKfB+t1(CsUW0uq
    z2{?=tOh)22;H_SVt+5Hd(HsmWimzS{J%N(c>X4GGu_?>CJ#5N>SRrgeEfUEC>N*!G
    zX}-jXD2C^tBq@WQcAQ$yfKxtpU9JoDFh4$M_fZYpExN_?w;Ij|h5b0Izklk-iR@88
    zrcwhQvubpYVM{{-!gVPh)6@_dTr;>FH1^&xtyhu3Z7PMR(Sgf?d67sWMI
    zVT8GX>j1@vL+%`*!4MkN$+7Jd;uu2r-jxQIQMK05g7n1h-Q5tvH+JD;k2_sQ|9m;-
    zH;Tf*34c4gVm>JUH${WMKdV(JxqNjRU_o?*-}l}V4Y15VP6g-QsQ8|VCSE*QA-aDm
    zKzfPi?3POvyMFawuRy=ayP#jFst74jtrn-J5h1c+iF^90d+!$XfYE+#R~+rq&qajc
    zUG{u>%P&4-W^s##@B4Osm1xP~wWj~$qM`(JEH9ai1@P`6_BYadSa%
    zVx5iHp<;b504ja_(+b*di)cHIl2R2XoM4N56{{NSt#bAEh@!A@#6y=l;|c7MTt0#<
    zUr^pXi)=@OL>OZJh6q}gTZMv=6;QBAP~g>hV*#_(+Ko~Da)d?L_T33>Pbe5^xX&eD
    z`wsVOqwJptTJ=^KgUA(n-0u5h$df4ex84=#y-U|k$BhNmvVG@|NX$_*)ia$kIULSQ
    ziys~Mt^~3!t6+RXeD&;~qcT}F8k*=6X%vq02P5_%ZNfZ*H4KIsj4)WsV3ff+2J0DY
    zVBnkG#_`1N)TWPWj?G{{k?+Aaq^9|4|C3
    zaXE(&g#EhO0d>br1HiUpT$VCnzKL;I*z%!HH-EBITRiH0|1X)<*P(Vea!@Ipad6MX
    zSLEK^mfgQ(TM4nk!i?}s>73lVn93Px@vp7NA
    za#J_lJ}!9SK{@o2hIX?PII%uEBd}+&sH^R7GIO)@z$uhDM|SpoU;($E4L-r8GjB1C
    zt&^cGzhT?fCpaBJ8r0oGe8kG#K5@9zTEziYCr6~*ISM$}xC?)H2|iDt
    zoy$kSAiySYts^oR1G5mV=ozc@SeP}!A3)8alReBS6rp`|`k)x|5dit#loK4gpf}I0
    zdiDa=rTGZ!&j2G?yq8A@cOHn~K{}0a<^+%XG(qM8dUhcq)tUw((_?t>7eqWU$f>*h
    zfX%cDZv7uIJdNufCu}tj&0&t*rxj`rMu0=O(NanWqtaWlMtRnw<{0B8S$T7oEL>ef
    zOc6|o>NetuZxu0JVL3YUQ*woIjv(SQ4%opPxQl~4!YtwoCD#c1#Fz!k*b*5-h_DT<
    zzXyJh^>D)KYXd&OAtR%H`-BH53kjd*d>ietck+$CG7piR#~BWpT4lzTDHW9Qe?HOg
    zN74$anuqkSBS>WD)la&ge
    zXr)310*@*A)S_NWnzlKh(eY&Oi+p{NIs-O17wkT0NPwyF-N|k6)r?mp0++8}#$s@3
    zdjQ4&*TC~UMPQ}m04??oPpKBOHA!FuQ}VdZJd0ibxD{D{at
    zTmonV5#1jUTMt*By@4
    zI6-jOa4`onSj{zsnkr%PW${$Vw-u4(RfMFCp1iCekN+d_r*YlI=LS2EbYU}Z^cAdq
    zI)b4XfR-Qs@+B8>nN9!b1~~BIQbcfm+TKmfuu-0r76(10NZnGz+_0P`BkN@1$Qr^n
    z8M|#P(<6ABF*~2U2WTfFwIrAY`w3y7F?_SY&7iH*=B+Rb4zLv|n~nt|!AuI36$zfk
    z^XFKMl*2T4248mT-c4|5i%R6j4vfH3MQJFEd8r
    zcC$y%9eW>6KwfGThxOt0XV8_&NY8$lXr~@ghS~68oH@^92RnPZ&~RBbekVa{@|wZr)Db}U-o6V*
    zh1RsK#X+uJ>?i7i0zjkx)TZ3x0&fbbg#oqaj
    zY!rgkV7w49(ZC1byhkq**EAm$l<+3tS^FQjx1kH3P00#gB|j=2AH9#_#upYwC>`=C
    z4(aJ=0P6qvZi)5-8J7bVPe~IVLcQ;>PUxw}*wTXx$c{$VH7g-Ow~DgvtunB!`6W2*
    zrPFyoTc#y{L%J*1$Y?TL%x@N(#{mz4a%eHcn#EhuuiB^u(b*xZw3%Slt=Wx&%Uw_6
    zZ8Gh9!Zb43*7bIN8nk;Jr4I(JgO{XRry;G2mGm2k8ia2A>l{Py5xrAER>gf-)UBc%Bad@*KJsi!?bzx+U9POwUjtTnK
    zNNx5rB#^KrTk{URbznu|O<_y#+c%QU=S>4+h&x3Id-S_X5XoLDHJTY+g+Iduee6<1
    zSb>m_TZ%M)G6bSH*-!Ys{oKDTaiO3Ml2BAO}NYBYgWB!Omt+%
    zMsciZ9upls_}eeJD#+mf$codr90nG8Afb8eSW{>baM&F@U2O>Mu0316c}k85GcJ<|
    z=K(;im8n?JmxIMtobfn?Y(^Yo4W7oM4jarSD1J9G{$_vL12~f(|N0Old2@@yjq*)z
    zxFiORo-|(d90FL4J{)_J;aJ75OLZ`VOX#500mV-wU90T%GH5iP#-dU;o=9kTj2SsA
    z5NL8enIf`+^La2Yf{76~RelF@u4Efwcw44-$n;S>(`Pgh5+gAum>R{scO0jK;~?v1
    zaQSzM0=7cQM*};%mZL$mcBGKc8>Ar#A}HWc2)v)xLt)ok+r$Jp+cR{dY9%ogYK&olAfc
    zSUG{XeG&o8x4>0<1ho|VwIzMJDMwhZjzL+aU@-Vi0S3oXyR%&}7%7x6j-Cc2TB#f-
    zq^B4J^pluqjH7sPi|_150dMTwV>7ja1pkK|@^cpP=WaBF*Ga(*(yKlUq{#V1+(#)c
    zT&9S_?QRb~9zI>ITXQ%m7KhNxb)B2}+US#SB|6B9&?g&X?|m2AWMd7Whqtfrz*{t9
    z{!`z<-%mdW)kd#r7Y+9|9(^(d(e5OOR?sV;q^LVm&?z9$LIv#tk)Q#LX3mSLVK#ff
    z+u-f*I8CnMrVX6$D+6!y%`3o~u(jjc2_{V}+1jxo4rp|92N6t|1TKy`65zVIMT{a2
    zv`56fCeloZU)$<^5fQ-l=~EnW`+0)Rk(QA?E-qgVGV^|lAZvxSj{d7naiEv&
    zy)bakptQsDb2Qwv*~CbD1H(pu37xH%B*SF@vs^D)#<&t{h>G2tGDl2rI}nSHUl5ER
    zY#rWrAb#JV+&I!3uDDUzT5LVux@wBtf5LQ#8@!
    zyLHP%Li(6x#r#&0MR}nb<@_W9n7P-qOF?+oGvaT`QK>8-qffl{1e@D{S#-^W0}
    zuQB#H2A2_-Jv6d`_5boa*m51^VytP2lojVaPXsu>oahn
    z?7WNicRAkoZ302nsgE)c(LKY)oMRyUuLq(iQi5nxgMFMJafF&rV_8E6OO<-CEU8DH
    zRgmoekWWqHa()=W-So)*R`})9UaR)P!Tc%aRsH!>%&YqIr+H;zI@L^}g6nXO0aG^e
    zG=n>$89A@I8QgfEyXGiH`)6v6THA?q=4_MiiJHYtvX>q<;mN8Qjld3s28Npz@8r@Y
    zC|)>i_1=DX_Vn3?qM+zvji8VcGmYzR-1_^XAR9X%;QpgaS8)QiRZZHNOJ#GBFWQMf
    zt5CFoGh7f;nJ)tH`oFI`y|apgIhwO5D?Ta1);<&IHAxv9fcSY}(3ma;~TCH*Z_&kXoB)rgrqYd3pn^{q)
    zO27`);pSvr7$!~1+@1Uo5(BXnjN50B_ODEoOu2VaChM7K0BMa$?`j
    zU!iBtvsvNQ7B>7bz-4oCvAC9fJIBkCO_l=uARm4lL2-l+!{xaJy;bHpok{rod6fAD
    z;xms3oh>+`PQuO;_l#NdiA0i`KmPU6x*0dXTK1ZZFQF`eg2&T_1!T7EA{r--BV(U@
    z8fD@Pz$xZm2{FL4_Y7w8WHWgH)oLES4-rjq=nO^-(R@|m9h)Gkf^c_1FD3ccL(m?&
    zXJ|!3Ec@sn#lR-*^ptH^g)tJB$uAI?eiW3xt{-!@F9XERz`iSBqbTa@>CVw=9WDX8P?q%(eP4DCH
    z`k-KnwF@ZmbwU)VK3oC=?J~`yVMJ+*1K&nZbF?79c|#*@<$5Ksge&CXf{on0L{;0p
    zAr_IP!fxjLOLKa@=6e8%5I`)=LsL&a7Gt6#$Z&W5Eqyw;c2!-068sXeM0TZfYaOm-
    zxDxX?DByDxaYdU>a76_QdO?_hD;WDBmsS)F7z=oqNOEaK1_GVUlMkE4glA+e56)?9
    z73!_#d>mKyB@jG}t35!NIJM%AER7}TNPld30`ZuXxq2grc1ICQB_q6FCc5+}Z6vij
    z)p;n!Sc0sCa>-Z=M%o_McKeRNWmvmIbCW!`I^OUJ8PDM_UsKaqlLI;eEu>eY3)-Fm
    z?tJ(V541GN3x3w@53<3|#b+oqHv+?gHRy!5mSfO9i;BKNWZ{|W9Jxz0nJpTD1!~4$
    z?}eIrOJD+3%lPY7feRn-?(5TWA3j_GQR3d#?iY)l;PE{t!QkID=eQd=6!#*Og7@flvS&U(Ae)@
    z9|K$7d-p1xW&dpFfxw7*&V0FA=Cz*LKXu^HR9E(QXaq?Xm|90D8J8JQMvPIM)FJHX
    z-bk)zvTIfA{{7vaK91dD4FwgX(ZAih#B!)X+<6X1mfTWCi;lbKJSJnVnT
    zFZk@F%+td3EQBQNN@%$J%oI-2H0NQ+fef;NOaz`h8iQdU<*tG}D7RxtZD{|t5KAG*GZ*}1ZFU4J=GbbE}cgrB^rzxkSS>x2UW;R^M)22TxxGm&*R8sQ
    zB{kE8>1_tGq~;aa)iyuRZeu-*vft0(GJ{tce1O6I%!~uCl>I>lL3}4R=Q5h$>KPrDToq*Ze>85Vaoy|g!*yq@`EupfX>4`1`AM$1@MJeD1t@aK
    zyv0YBt@wi|aOB}jD2q@^>Bn&1L_pb*9+2ax6lb1r{whMam7EvlslS2246NW>hwi+M
    zQI$7)i5#Lk#RG*={u_FD->-169>(un;4GE%uu%#!-wAmQZi3SA$#?oZ6)A_A%I*ex
    z5?GJV4s{+fJ0N)Qnw&fy7qSPdlrZdr_$O$ooVe!p(2ufToR#)I5}BCL`yUQ6Cdzbx
    zvGWLE*l>J~Gy3&zpNK1s{NS!xzy}X70p_i@-|urZ);14hxDg@;lO`+QvtCQ%=3oLv
    zmrox=Yt_R=Z>JCl<&Pi5Z7O^@4PYkta$Im_jt+OWtni`;KahoHa5+yS(EOm8!J}hJ
    z;jk>Gg8_8!$s1WjsueW(ph{Pa_aT(dT4JLHF*jQ4W}JAKr9XnV*;mjQeBt1_-t2mJ
    z7NtImq%MMi4N{(D9A)GCQKtgV2Z-tMBm`&^CRR3K>*+M1vka<3sz9(^#)G{E+gK*E
    z!Db4$1tDfB5g>likAKk3l}cBa+Do>D6U`}uHj!?VRyM`4*w=2^&2qJ+zwxlfyPLF}
    zN*SNArsc1<@d#(;c*_Y>o1Sr7RQE5Q;VBTs#9w$LRJW(-kB&{
    z8G}+J4lEv<-GWW8yiISnUQl&EYg%h+V$Ml<19$J(5Ab2$Je2fpO!6VTVO_Bc%(bWIV9oJWLK_Pm9~9}B^nGC8Q2W^`PwDKiv85{GdLc?
    zzQqU7$I7O*mJhVsKhD@MF!&7yUu5uQ27iFS?KdCdk%RB$_zKw#Z*OPyyBNscAuj5Y
    zd1UFWgn>hB?)P|D2!KNwmg?5CiwuXbRu$RsZQV_g5+1MiQ%hd@OBjb!qpW4W=>
    zUeSp-mUe1$&q!)muHC8O{>=qEeQ~URY|q#eV^5A9-MnEezk}26`^SD@Y;^O$=G@r(
    M#x}rRKRMR=|96FmP5=M^
    
    literal 0
    HcmV?d00001
    
    diff --git a/python/tornado/test/__pycache__/httputil_test.cpython-35.pyc b/python/tornado/test/__pycache__/httputil_test.cpython-35.pyc
    new file mode 100644
    index 0000000000000000000000000000000000000000..0c98955d6940e0fae370431fe2ad4f6143920f5a
    GIT binary patch
    literal 13605
    zcmd^GTWlLwdOpKDY3gEGmMvdmMvfITjqTWRldK$V;+5stt<7~TWt|X8xg&BU(dNx(
    zhPK4oXxi-dp)I=YqD8SSioUc!7e#>rMS;HbrEh)d1?bbDPc8b8hdvf4^wjVB4>=rB
    zlwwsf`OmDbvgvM2YCtKLS*
    z^=!|nI}IMj2uo0lB@$kBJ5hi%mQN6lo~-13>2uvTq2`G_BDdu#9bnTG9EA312!
    zdCO0(JB?DUxsGCbakJ%=y<2jVYB#E$hZ)gewF_mwbng>E;eA{#*i1Q!Jctr_5L^ki
    z1J%HZ75v4d98;CJswAY0LXvo7pM(mi4DJjF3my$<2~Gw1mh$8N$THUQk*+NW&dQJi
    z`EV(5Tno6|Jb)*%VIH_1QJ+WDQ%gNj>OLS=7h?^@V=133bfz{uujS5NxKL}B?b=4u
    z^)9q*Z^O5?e5=!WX?W{Ske*6Qr>pI$7eAu>YM850P
    z*8C{w{!^uoqQiD^VQ%Fkba7YiEZnvmD_2{sYasf{EiPtd*{QcyTGiIM)poU3IoH_G
    zm+I?fcg1V!hFxj)e}R`3uC(n{Yb8?bd~3@;#X_miWTi&41P0MtCGdw`cezkE1)!`b
    zzGIQjxg7{T|BUg^Bmjt;R6__5?-L-kCjkVf+mQ=fg3!*(dlKTzrRzIraU3r+gcka+
    zsh`4+K0-hO7%+{Je|+$DokqD?aZ0vccRA-yROR4R@^l)Yue$MWXpZ?Gngr1{&pl36LpWI%3W}i=-pNXN_K@2@C
    ziPneU1uqPpO&zt9n=bEM$WAk0ngiIuMtbteW<7e?3{yAbx;{s;>XenDv3%M#f0
    z7iU-JFNJV5y9YwMJzo^F?b26t9=fXi?Oh0ahQ11o-GQ$tCK9`P8U;`6>blxxQLVm0
    zj}-P)LmHU!<}Y<^<1>Y6dXOU^MaFP_d*|DwBb`_GXNO>9A)oP6rBcJLJEfAJDV6HY
    zO1s8#wglT~*G!8(h9~F~1SbhzA~;2mBY2shZ#|?$6QU;o=nuG~09h-W?0==LjOySM
    zcu??6z77w{IKc4u1D_XuKiu4<{G@;QcF*y=4*!xK2oV}^0==ui!+L-lIC=L0;q67c
    z%urS8X+#l5l_yRXT&*V&^#mOdjNk|U*fc!A>8COA#n1$&Fp0X9a1Z@6Qj;UJZmuoCzl
    zvPP{*t1~;4NXAPG4gWtwwL5r^FGDrHgBzlm!T0k}Ot35T(y!v7JyPkj#AHAzO}o2`
    zO8Pa{{u}{mq+ciK3%8KykWodAhwY~4R78EaAEBS2QgR2C^7~M!y<>)eP-Gll3!L(w
    z089*Jq9+r^(2-6ujM@B4#xU1D{WFRi6f$BO2oMBKVwyBd5F^MBVkuOyoMkM9@)Sig
    zN{M9cD5EOWer{1@DY!9#apgfS$Jq@6`UJi@#TP#6L5Sr9#5W=08&^-@-XBua-A7Y=
    zP2D8=9hT+5ExadT)>NUBw6BN+qA+6$DSM_{m9npN>(Z#3xk5?pr0kV$Wzt?TBPKk(
    z1$Q%$>f9OKx$|W>Y=~Ys;|}PAZEK|ySy|b(X7DrPTbWKIq#Fh}Z6}8%{3N32bu7`3
    zSDe-My5YDiC>mnBq)`rOvO9H?D?Ix=Y&a!s=psO`+O<+J9w=5WJ#Z7pF_z;u9T~C4
    z6EoJ)unu{BAMV^WaloMeKlDE%_8A(0nE@)laD9>&LasL3jf#zMToN>H$!=6iPNU+Q
    zm_c-n`;@e$H!)xf-rI+c&ogg8S{gkvsOX**6rO3?Jtz+eO@DM$S4z}PcK#*kL+5t
    z@>RU0&heXOlH3qVy*SF`gfiXrGFeLJuT<$fe}52Asp~8WmEPKiO8ZRq%~5RW859eI
    z;Q44~h(F-qeoZ7Z4k^>krApIrk#+Q_){IswH?>B7S+dckRqfah{xl$9Sj?{NL%`>r
    z`kN!?_2(yN&y3H{JUc|`%nlK@9Avre1jnoUD+~YC5j^G{Gwb=LpUd{5-*11k7gXcL=^kFh_8O
    zpl`;dBsrKfXrTDHOvWGwGnyUCP6KAMBfK=*%eaD1kcasgRnE_5Cu=D`>W_u?@s`8c
    zl_;J|-&0($zeZuV3x1R>fm+JKSNv?SL-G-7ql_|
    zf>3f-f23g0U-nzgMnrcy578jvJ;ae1guI_Dzyy>CAlD`NV|LQU3Y~8wA2OF)wRHxd
    zley|v*5+~#A3Hje?rfGH(c7ELIggx1CdW^`sU1_ILU%LzB3_-rRxxH{=ysYza~$bM
    zF|>08GjrnCIfabQ$v{84)@*JkyRF+}gUvB61WCv5V0t|dfHE_P`u$M(D$6P3MsiEc
    zkINpaq;ve#x0Lk`=iTDPVvLVa%W4J=Ub>XNm@{f1qtt20HsWg
    za0Y9@HKr!fD}cmSFU#;>Cd=@jyYpTl6*2ZvIqAX
    zhgA&G!Gny_KUS7yJuazF8kYJP*$AW(jvym(aZi5m9L
    z8y6)!SZFpMR2?QEoAZ)v0w?+jQ`?EL){mm9b7Bp
    zHxM;}yCo@^YB-OL+$6gtKP3-k+9GKlUor904bGQxbN8i}N=(P#pRiKulN$%$ju2zq9$&Y7W1Klp@lkM&8QKyT7K
    zfUTJhGg{K@_#;OKMG5T*z8C#vI2jxnR$d1Ro!N!Ex316S%DT2Tl3}T~mRY#Vijmt;
    zRCA(mMlyb6L2r-jsPtnC805z|)Q{Z4RinE36bsV71b}jhzVrHbnO@459!ath6Z)PpNyXgWACmh`N+f1M!Az^H$6j
    zZ^r@&m?21s
    zaxS@tjxe0T1@_7^*WY&KKV9&xtG;#7w=VhCcYLc1Iv|&!G@Js`M9OQXyw3U!mNP;|
    zKWbNC7RpVWyGT_Wr^Ql8Gy~ceyGKH9x`n2vuW^R7P`j9hCBX()6{mb68(72x-+F}7
    zC0uXfx~zw>Lt2E84?96Vpc|xMP14vin6$Dejak#tS!*gXZgr-I@&v~Q%n-wHUcwB2
    zfkK!1ATne~WQ#V1?nZDAjoQD5?_u1LF0G9$LJEO&;TMt~JOHW~*8RjWk5_Se%guL_N+d`zX~tEG>2{-A9F$q3
    zDNSQZGwbJR9O@y##xaEgxyI#7a-H2v4-SqGh(kaWE^G)xcHajT$5)R3oXCXS(h-}{8F7^Z8x4yBMNQ_vq`f%-jwBCKMk7<<@m
    zZ~F1gtxji4+%iA*A!_oGjyQd^oDjV&4%!6hX-z6jKjS)H#aXkFDttcWX?(E$sJBT1
    z31(=3BJjX?S$lY2{c#0w&J34yn&1_Jn*=KaWdcp`nBbQQD3>81fy%q6`)6FDyb-7z
    z&Ay5Ng2j?QVz`9rR6Jd#m6mPg$MQd%kFYOz@r<+n2i>ZUW8$cRzzJDWq#0&gqZ0z4DrF3W4OX@qSbq
    z5w_}${Uk)9m-k$GuXiDTU{IfahDGbA1Qb90s|24D{5k>mX7q0p^rcWrqFM2>YJm%&
    zbd4_V
    zU~mUsGc$gU;22L>%0~-43|!r|E2l0eD0@F3VXAD!(x$V5M&zlY%UVz&$
    zgTWzt{bSz#9>FjU`VksRy=THUCp2q@a}AS~16&if$wioAaiGXY2(1#Jd(xl2!}I>#
    z1N%!npnnTVF!GNd{a|sq6T`M=vyT_Vch8!#9F_1E
    zt~L}H+-^}=-Mk_68GLbQF&Wa+YNpmCR=N&8+D)wq$U>BlAW@Rbr~TOdX0_obZZ9t1
    z`0h17cIWEy4L|w4#k)&4zgy4;usEas5_QMhXd*h2bM;C5qXPdyqMI};W@PtUQewq}
    zl60$t{$n9T9pRCaA_>y1wEO=XS|l0eNLeo<`kA8Me0xt!8`0Qnu0lRJ$TL6TEBztC
    z9})Zs!7%RmeKb7aQW#xi<@iM+qFI|i%|nN4ZLh6y=ulJiG)bySQ%xtnINm+nf1amE
    zd7fYXlfdj~b3RRbBhn@)E{^v4J8s!-Ig+1}1X1xouLXxvBo|a1>(vm85G&0c^Wwp9
    z?Rj}56=yNqtCuQjFb!5y`nL#};?ZP4v3SK4S$Dz4e=EwrTv4pbcQp%ql(f)FYy2q8g1qWoErEZ5#9rncAYu3J)x
    z6TA!$z(eo~zH;If%7q!biC3ZsCp-Cecl_A$7oWfRTpJCrQm@07`{pmaZ%gDrf%aX
    zaR42}X9P872DS`r8`v>$%fM{|&l$L5;CTag4ZL9Bo`L%YUNrEMfd>X&Ht>ppuNe5M
    zfmaQ@X5e)LU#l=+q^*5Phv{y3JuPGwL}EKA^w?W5dP4F<1W_{F1h1|~tFW`x@|LEA8P^rxY(V6c#j5ehr3krF-p2G5g(Z-1-;lgIo697v}+6Wd$o?wYI>rjEj*{C`HO4sZhPx)a(h#e)uW3C(TQ#FcRt8^4kj>@@<
    zmA`@PVpPxH?cE!`#QuZ+{GLf;tm>cdJI2SWeNTFb9SRC@a@alXLWDm
    z0t?uxy1$EHGMJWoRr+tU6>v0z<1WaXz!-a<_2J#cg@Bg;UN&a(+hseSU`8k9LapJk
    zltMP?OH)SNU0Zf=P1z?iPlgV=Hl(UtG3D3HE{-d517@=U>$8@1cKsh4uHV>qTV==h
    zvmoWZUv_;z6;Xlt=a)U-|5yY`JtKR_DVNAJDk?-$Bn706O*v>V2%FW&zB^BhM};mV
    yGD^BbB^|0HH;Ml>&?H?Ia!N``L|*3rBfl45RuT7QizV~(SzS-I7U-_f2eDoJSZp^n#>NAV-SsdZV
    zbH+6A-!m;^+McO+rkyjDoN4DxC2!gVQz;maHFBm^G?k*%M!94vrMNs{Did*e(o`nn
    z@|3Ae#pP*JnU2ddrZOYtylIt9d)8EDwU2^n&6)NtQ`u$O^QJOy+Ph6#u
    zV@}S_nP9h^y&GqDnZ~@Foj1V|Ir}c0-EA5Ra(2N4$IRt>;YdzY$Vuht9NL8rI6>_z#_po2zA&wTjVZg(px
    zUud?2?so49Yr8g@ey-bUxhwO}1-993H0$SUt=4+2eg!vA)V8*&t?nj^)wQ4%)O%=a
    z?-SSR!4^AQ>!IHHZl@Pq>qX_vufkPZ+lppZw_7b&VL2boo(r#c>KB5rcfJ+|(foR|
    z)2MEDw!@%Nm8nfVacv8oJc}-i(d2fg+3TTlG#g%NZauTH+U>m1X?SJFe--ITY$TR!3qgD9;#PC(-u3Nf
    zt8s7Vl0DtttcMqSUE8TOx`S_E@nW;v0^S^GVMPVU+
    zsi7WT1(6}<#|hVDWO+BTyxCNOTOw(>U=QO<>0H=?d@6?WI@}3P8S)#If>$8nAuxB8
    z)qMgY&E5fUCysC$z<39|l2`C{jpB|?FHew_I<Tqa=x6GxzuqY`4C-TJPb2G5MqdqZ3KSrQmyAVJFj$Gub|l;dfI}h);0sbyCDtA{TyGX
    z>qD6a5X2V7x)pR9L8smfLhT_VQ$-+;m<-j>Mel;o(XO$^VOl-+=N!c&I(F>Iu--RBcY3iJ2t<~%Y!VOMpV+Qb1M_F(M
    zB?>}V0x(|LJLL73Vi{60tqj^q1c?I7<2;UV4*&-%ye
    zItfTLi%k0K*B$YAifC2>uP*28y=YMQ7iCd4xATOb0O4Qq_d_^T@^k$}^4+MV7og`P
    zZ(Am~8vw?!+UVA+Rm+*HkoYVS#ORUsiAUBE%#=$dZ=q;;u8t%5g6erD`P(a(&%rwaTp
    z)QNp2_A+M@X9+GRoFX_x>}8&A5quz)ar+?yw}<5gIzRApSVlWXO~aQmT*70faa_vq
    zzHT>-6H?x18h4n+ol-{Q@V-t;8E)6IY1}Pkbo4IMxJSzHy6!cN6)D5P)!Sq)XH4Tg@l_1wesI-$qiMP&O6EH7E+2AZB4yu$AFq2E7vZxwqbEgI
    zH40Q$uX^S)5HVp~SICKMf{Am+<0-ZCEEQ<|FUxiQjOlkH=SjtqG&P~zPidW4&9Djw
    zxQHX%4*=fyjQA9Q5b_%ky@pzr3(L8v1k2wF8t@l--L0522UeHt8BMYdeZR-qW&uDf
    z{QU#etWd30YmG)Vu3Ehc)w;d4@-QT^)(Rh4i7$*F=sjqY1NG?qaLV|f~c%$@U(*qD)6ivhHwLNi_baFX+2KS`=3Yp>Vuzo2JBNq?z
    zY1DFxYRa3>^&fd#lO1WUQrc?OU_0ID~faqh9~^TbN}c
    z24n;(B_5##a0WW|l)>>ZKQu7n)7&r~yd5&3xU_J1z~&?`Pto##&?R%Z#1cOkCW7@kwOYxf%)n05?g~RUHPnsvzy7bD9xltIhA)^W~@hUH`R5
    zq0f(_717(#6vac?h@`W$WA-$G58zCml*QIXC9vIQ(7S+(%0;^g7|4${Xdgj~o1gy?etz-M<>_cj>%p)^r3-S4@Rke^ivx1MrRmbSd;q12W20w-
    zFpLV9yG<+vRUX(jMlP*H#YWJ=-9FC6(-V%mV<(QvjQe@q7R~{f+(L1|TLgP8dh=fY
    zzPF2jgxfr{MUFd>bMuU%Xiqr=agg8+aHj#V36E&T_Jnr@H7&rHkdy)y~
    zaWe%}Cj2=Rz{23)i#eJSNK*erL4hbrctWZn6Gxy|5TVK|7D}c!F|IbymZQmYwTp

    iH79 z?^a-=(nhltbh>teR|?NRaqe-L8&T8G*P3AvMu<_k@JBRfgLe0opc>y>gu59A%Wkb! z2;rMXSi?aweRd{5@UdH1DLrE#(u5q6f%?e9W1h}0cnjWKZlBjbd^1#K*k~H}3md&3 zg<&>27}Fa|(KNk<6L12ugx;0rSeh3H7_yXdyGpPI;3jA<@WcxQegb5A`(=6#Ib4BZ z#Y(bTRFkEl$#aqibN%DPQyaZ0nIIXDR#hg*eiFsN0(>a)E=uZMInIR26B-{#yA0J0 z+ih3F<|bBIs@!iP_D5tgJKg?WoKgY6HhjbXzj1JnzIIlX?yXr5a5tNFvdL zm{_!`w2C@Eo-GI=)vED|m{gHpxQw8s;2^guwgkQiS=v5IaF*a4!Ak_E2_7RzkvA%< zebNAspo+l2Pq}v4z*T`?58itGV;2qllw!;07Krj;&=bN7d9hTO zpc;dtVJo<-G(W}?-G)DlLq%992pT?gQ=Y$i3F;J)NojDP)>BT7#5wC|Hu-oct>GB4 zFgg|=mr|pmUjI?n#$oyozmw6#My9_z4kCq&91sV~Ga@lj@d?J;RD{l=Bb6Siji_jA z9R!Am)u{aZc88W;BM(*bX2@D z==^?E3J(C7d^xwbI0bt#E&f;D+mA9V$KAJ%jYO!ZVrfT2sMb(;JsTH?YJ%>ej9{@T z!KNF=yn$^ZJw*=<|0QfLk^#2<$n!7pqDq&rIox?tnIC>9YjD{Xd0f*5*CJ( zoS{ab*?2Rz3KY@LguwFkFq#am)e#}z=tjFMf$cu-_Dh5)qU_+|o#CD7xU-;qNlp1+ zb&HmCuj31-?_9itUjNx)#O`?CfaiCtQb*Ithv+BPMab04JH#lReF`UKzQQfTfTI@| z=S#d7hv~on9gH!S1TZSiroBQiPasM|%%qE-$t53`Vk`;*Zxf!~ajHLo`Wl(eFXYQ` za*lZYvVN9^YGGqzc~?}T&l~g*j*;)JTGJ{ceTah+4^q4k884S& z8V;F~EzEAW6-EUv&0G_f}En!~LpU1%sOIm-GNfM7xwUMh~xsg9SRwGC`A zR5zM7_&UcdQTp65`eP%axZ|D4^vSnLjPyEwTzi(%2+f5FJ^ryrBp!+hi?2eyMnU)( z4$6MWma`N{Ag;99{B>|wbAE0pqV8c> zWS?7iD1ME#ahU%5Mwvm@d@(ZXt)SLP$3J5gTIcQYhfK1FBP3Ai1|nFHnaajwUL|GV zo%yI6Gc?P(yF!2GH*m=CmM83Y@`@^fB5WB3zKF)E6O#Gnih^T zRKD}9HB!k6h{9vIaO7pU>`~ZldiBW+jX5)+FRyX2H~ zLNFiqAxomj&~zw{BEnOr8r+ynhoC1f~KJXT_kcE_E*nuAJok~>r_<2B# zQu9d0{S*h{#{g{9kIV7!PX8{>gyeZFgWlobPP@VV(4(7Qz3o$f_>r&v*rOM~?ZW8C z*`aW|=*1KKN#!U8F}4x;PWP&Xf;IM40v`Z@3mL2U;9R-ufMu_?I;85-IQuPB4aHn| z<>Ic~f&Bj5LT<@B=*?1TzUysZKM@Bahl=++oBk%irtBO!gP=RMsvTfEgh|0s$bymx z2QI)hkiegc2UdEn670&LQYp4Y;Jl<6@b#buHU?!0Os^f9boc@~f3)qRBM1m;y@ud!;sprq8-}@MR6FiA3G47TuF&wNmFf)CLu|+5r9fg`5HFQ@=#$v)gZqVsofBfOqC89kjRlT& z^xsieU61?;Y#EG30!MH8{{-Lvn=`{&|AXlz5!O#P3Jfa@tF~&2jd=l%fTeLTlp{0|^$#K~;vO99p1tLGd-R?zq}pxq_K^L<~ynLY+^j5SDqVT zFE(-uqun_XQ-(duVS_R7U)W8SAUP|8;yOKE9{4EhD37tkOM5s#!G+g<^c@dR_t_HJ zK$`=c9>foaN*yIUAmMy9iMiDaaOHl0 zXTFI|y}gJ#FRU(4MkN&l-2$AzSt|h?3EOaBmK%&h0iZ+NY0g2Z@d8SfISl|+nFh|h zWpVd>))62`aSC>LToarFV~%EW-^?zg4^By78%YJDs&@F^gqk`LB-V+t-q3X( z*L~_TzX(5>A#j|)|LJ%k=zgO&i7TSH|EFD*WdT|=OI&v{fQY_l!&#G_&pcPLCo2hqlQh&G zi#S52G{G7}4M7df zV5~VAX}Ldj>huFE_u=0u^^qjZzq~88_b!4bB1q}OYDk~AXf@Y$zo^5r6PzLjn~Il; zLIRuXsyH1!)jj(^k4u#q4WtfG0KNJ{yhmjE&#@$;daOqe^JbGgP8=P_4JNnCE9DLp zm$0T<^mb#LklVdl-h(wWwx#ZQJJ`#JG=u)B^fQ*GPN6Zi=b z+V2(G4|n~3j&lw*k+700BN{TFzmv4}hiN-}V?s|(GVhKM3V;OM^b9uVT|u@Uk#aj6 z2$WEoIxZk@tTdd8fU=N%9w@D)_!6@J(5O3shy|V>=s$EG8jNY;k~(*C+3#I#)&t-9 zua>z!cwoSX>4K+=wEP~@@oNovULYIxBTI@Zdm4r7heVslP8Czu*)_s|FLn+iLjaGF z;c2%A;griI%o&&Di8rd-lx|A`gl^e#4vi}bCFJGfCm*QV?Dne37G<=k>q@eSB)CXY zphz<7izLgw=Z8qUGENz6e$0AvIFPeACXU4|?^>rfV6#~5OHhY7LSonNXX_6Uh+Dwb zRqW)R;aQ(2^7cnr{)+@MCSk5QVJ;iEo}Ny)DmUd#O(ki;a;B2`25{p>u-}^$R zUfbTh)Wb6yZksstkqzqOnYm3^JuH2=G>Vr$7*p~g^RMA}r`x_+Ua4b<$%lg{?!u!0>=XZWaOxr| zJ06PTe1k212+I%&e6)~3{4tymVdhgJx?Id58ZnZMlMFlN20Y@!hav||5mSFhrkGTw z;DRNZs&_52`GAqs2#5H?B?z412z`Kf+#GVG>2y1digZ_Q!A9(%!aP%nN0RDpb~CzA z^P8xGb({dxXi!DtC&0Ulw16X!yz-Drx&+k0G0@3kI>0jU^aor+g&`>cNS+msKC2Br za4TF>W8y#e)A4i}9`_nBPXBTePO0%JS(uxRL@-Y-Ka^Z$OoY#TuDGWmRv0LaX?Rku zFo%6PbHgeB-Tr;a8R459$vtJ6Qxr{Xwz}&G@kS+mvSU~?j3s(B#b*Y?Ej$7tq*ow> zKQ5Ynly)6FsD~U{@)e%^c6KSht1t!4zlZd{cUbq|w2ee_psj&DVAPM~69F8t0rn=hrjb^c7h5Q0`i~sPqp+|v?&V1^WRu`Vf!|aTEW4VE`AgdlRgIAOtxD-jp zwD7!pseWm@b0xGPE~%pkDx@ht<5tnk%K1T3d=l3Ue2cQTm>Yfa#_j9IYgh4%-H(Ax zq?$~&o3TmRVb5IxOCUCR5D1g41aXkW_oboHWtB64KgAP104Dy|SRR^^@Uib4;Pit8 z>b|={VgY1A21E^>2%x(@n=j=Kl@5D{G39A&Mc-r#dfcQ}mwkJZqgf`nn}8u~`!K;1 z1m8jMQGyoC#iv&MS;Aa65W$y!c6o-WV@of3R!W@F9bLFXWsXSBOTb^B5L}?DCa(SX$ z#J@x3rSfcfcX_%zvrsP2N(*(gkbKDUXK{?Y3L>!(H{VTYOq$maM1fc$IYghXg}*R^ z^<_EQJ+NBmx2?tcCQ&mWs7Tv+cFpNSNFY{KaZ(^q5RxRopxN2RBgplEq*Yz=5# zhH_IFE@}p)$Tk!>V`+-uv@BO9ows#5#hYZyXphJ*oV6M<5MtW=Eay9o%Xs=dw}hqa zQYK-_F@!!$J9KM;fjsUm;^24q1prJ=`SJ$zx!?g1vs4T;BvlHRZa@)VvrG@6E5aPS z2A4z-%uJFL!p?#}I5xVwDA{ztj;?okQS)FYT}81tJ9YSp_qpd6Civ_fo(PUH@nkf~ zy+poG!G13$QOA*dNaRHvp%1`mz-EZuNk6!WGYQK%T74PSqg{?nRYw`s&Z3UlT!tk{ zDT!_U5(%Sn0Y*%DmKl%mA=4#ba_MB<3_db9VoPh-=KTzN`|AWhOYjQ>e}ll! z0*=Lg3E+!y=dx*6fZdsw^BIoLqar^c?PUO+jnyRJTfJN;kQDUv6GePw1OBUl!KxxL zFjJJ|wdI*;??8*5Yc$Tg{g@2xIT{|Zd|?d?vO`gMs-OKw?q(L zK7|bsS)IvCp7_2Q@-n`TvZ7BQ7-H?;B!&to80JMB8aqB`cqsY&ur?2FZHSXYwi87F zMGd`#VEpHx&*JeplS3hkW7lU;#@>$I#6tDTReq-^C_`iIf6k#vrky_~D$H@e*@zh1L3MSNZ)S?t#U{@W14Ja`KXQ;kzk!||-6f!}|~E|@0^#qd;8 zp1cLFO6NVxAnn(2e^k=6*irVoEw=KkHt7fJCZAB{G=UOZO{sKB2RMo zgY~7@T9JVt=D6mZwrbZpcmF68is1SEl&RPEMA9U|Y^z{XBvfByn zzynyl@u+2QH$g2H9rgBKL}V_ZFCxen_IU?3f>ZqeV5FvnX&X;*bCl z;=OsQ!aTggLbiBD6&Q!LD;^fsGfVg#E$HiHp2OpJPvGU5+eqz{H4a`wl#CBg+HJ45 zn=ZfP{k*Nv;L7KhQFKO)ue#{A?62UA;3V#zd>0MUz_SxBb2MZCEOaEF7D2qsb7*gY zu!NC{Xd+4Y`fdDBMgmJA8s+#T<^3S4Mt=yFM_<|n4_(iso+4sKEh>O_z>4SixPA|Q z5quE*JO+mZi)HcKHM5p3pZKig!j!FxcZ~EgXxyVoVH%qyoH3WnG=NKR@`ZzO4oPBw zv%H8I#McqQjh)Jw>{9^=V^HvwF;s-ysveDmcQAVx(t}YVT91?%x_nX=rbm+YQ@FXW zZ^ZHCC6iaX_DW!(Ug*0-#b&sMXFKQ4N%}wXqMl%Kgk~l9sDQVS78#Vit|B3HFZ62R zmHi~LII+c9WCHa+Fi^WYkryF@`W0*n$+BZ$HQ8EytVEENVev6{jic%GF;STu9u+^s z5$2RVnF|BZ7c(%ZffnTtLBih@YK%$K#FnugWWU-b=LL0BzDCYuDqLkTn&7 z^h?E$9$^k+5pn^Iw~SFs24ROW0^Ssc^3lUsUZOdFgk#5d=IO4%$gyl?HUS=s;S+KLMH;&oVBhE1 zJhgLW_OV*yheLTzosp^C!Prcx2r$J>{DM9Ngbgm=? zT2nYxn|zBnB{Qwz2qyuQ5O3>c8jmH&ePEz1N9U2~*fIDmD1O13PVvz<*#VbsPn+~g znmju|CvqWDua>*XFcOS%{v-u8N=UB_!U+i(a)*Fj?oH#8y3DD}DY^);%Ypa6p@rnb zw$I=Q?*h3MlDPMAG|BT=1< zTRf4VBe%y(zeFyB4#}Vy;rR`l>f8P}T2qTdKDR;^w{gpkGGk!(k-oGo-Sp$hV34bn z4#?~{m<(m6(5g`8Nfh3aGJFRPRLfqCgvGf^ICV3nk`T#Hy2ujA2}K-J8<=u9eI$|n z9Rk4+DU`vA&l3q{o=uQJ%ADFkUKD)^cMk++1KR}B;g7Rb(Mdxxb5r8zHr!C_ALNkO zsF7T<^mDyqD3lB;LnIPNh;%0kLxSU^Mr2Ce{}_)MN#X5_IKm?UL_eqv?_M@~`70XIRbD`#Uld9J86_EaO|VeYqzOm+E%dT0&&(OZ8KE zHRHO=;E`kyJU)WLtuQ+=6LUG#5Y}lo^2nOj^?3%r)Np5TgeQ}xd9zpwSCB1V@cMpe zvyW;cSQ0#i`c4(4`20bfwZ{p3ULSsmj_f18oFF~7Bhp7+xUgZ#L&-}QMj3vRoeH^c zl`O80mj+}xl;4pou5yMf#NBxu;e7xg3ym$H@FrEaJdm3BGGKD}GLjrAgvhVK{kqGUZHGrF@vg|A79h9V>Lk-`zx;#7V;nP{wy~BUQ(!VA6U4m~B{2sx-C*bvbH-2X)O^93jECxh`14D~0zE1H*sz zCE*Saov_FAI7D-SJ8r~|Jvky)VJc40TzPqyz0&CCa92ije4H4#`jL9Z zr34A{x(g|SbR4x+$>mZnM)7%81SwZvw<%P~10rKkg*C={Eo{~Y#=O4XoWBe2`rwvI z^S=OR&HqVI0>zv21P1}2|EAg1IRY8ar&!`Wx}o9nCVn3$qTE(}bfhxKX|Yc-fG!6s z`xkfuZ7-ffn?{zvtk;(?|4jy-@H{z^Ij2J9pG1M~k?1_NK%g6mA~8a&xHqQ_o+i~5 zx9-sEpH|TIwXUr3hZHtv=wGK9#y!!5Ka8*_JR?aw;1m2^Dv+4ZufYc&a6^WHWY>(| zM$8L)SKp*3`6iM1CP)9~9)jBeF$*9L1DF4a5vHC2E1)yc5U|E6Kld4i8S2 z$hKfwc}V7_!{y^TC)xZU_B60ef5;2}mB7ye!O?%jxlImX5(RwNTENfFYcJ`OvY x`iEqZVgd0x-%-fM5~{Qy8bcmzQJ2a-nsd_g2g9e4yth~R<39SLd!X|>p$J1nq| zv=oc3g2qj$_A;?TM{WRuT(kOG)f< z;xKWP@B9CG?cVO;08mP*q6Ma}r>CcJ*GTjDx)SlYARzU zJEo<*raW#c6DB*M=Y6I;X)2pcc9Wi`O?k7aY%$p_dfso!x0%XTlig}6+e~(wscbjd z?WVHBWOta#?IwG>sZ5#dl-ACe@*SqK(`0v=%AF>Ar>X2R*>e#0H08Y}yH{on`Dv4#4)XgBBKsc^cwIlTTwUN2%&?=v5$TINMbil{$gt{Rg) zV4fN+(Tmlay`>N*_e&W>efErCW^sfU&ol zE5M6~aN#yn+$tBg%7txm;bC0ZW{TV8!gganXzcBB;SetDFvZ*D!tJpO?=kjHbN&%y zKWc2u&Xl>5Fqz^V{PR?Crzzekcib7>aRr!q7}s{0;$3p>E_nuq_83Zco8lfR-4m2P zZtOd80Z^Nk3)B7uw)0+t>06lIca8Kho$`kAXWd$PwQlE1m6e)P_j-$^mrHJ`R`mu~ zoKm%(TUxCa>T-UuR6mz<>-oA}v8(lI%j-V2T3>Z+6o8__Cr<(g|3k2+4xK_mT7FD^d!w4E>7jys+3deC&PTwC^f z@+&LMpvsWzmU9=K{7TNP6)xE9OaJ_8xja{^*6m9??OU9CZozh5ww>v2Z{&!3xmsYe zi?&;z%e%HW8Wt^tWdl+co(`WaRg1aR>MHt{ley_TdTGTj)K8!@z3f#TtxYGr-qmWU z&b&9^UMQ^`Uz)E~pRblKc>|8UwCa|s%W^x$GI+{{3${~VDwXXmeXY`bZV3L?pL&#YYb#&T#Fkj)VbYAc*P_jb-v zHxgrYTX$H_G;&Q7nLxoT4tE<88_WU|h7=PfGe`E3?cQa4h<#NpjQo+Iv{P8ba z5pawk^NKO&1tOSyN{xl-#C*0(?gcEnE;^+;X(Q#@<)vt9oV~b}1XJg1xopu`jp6ts zXK-)L zF3z&TcOhA4Mv5tN5lF^jmnn8j9+@6f?3H}CDfXFSTJk9DH^q$PdyS3q0rDH%PTrH2 zk|6_z?U(dR$%p~h_DeEk+zZp=-Z<$nU=*kAv(J_;m8wm!AUp&X1IU0ht{C%C0qVlE zMPeG8uhoxNR?5PyLiXa^gW9nP6KG82D)|d`u23$4vE>4UW6c0Q``UG9rtNJ_o#Z4- zC&#ptz&4QWQo<}-oD-tVJJx;E31>UIMVL)nZMz6ISkHk2-iIndr?$22lwUFI1~IG) zhjfE<5#lX41~9&uG9`A#`iNy-0dGMb7+EAfcg;Hw;0CYfoU&st4Gvoc3#2^gDv)i1 zt932e^}@Mqk7L)*Ikk%z3}?>kb!>M9q-nR#_9XdDt`N3_c_;rrh@0FL64SHQYTVs+ zZ0+w1hsxPz3o_L2Mn;E<@q}Xo09Z&qNHAauc$(z+E#|-@F?!AeketPO(j8XAc~I>m zdUKC?1^l>|6N&0_2Wn_3NI;k~OJz#C=No+o4n91yAO8-33Fph@v-!dWcND#S>geLs zeN&WE_Z^toKad_syBQSw1qY@M?%(eclFDMz&i%-HJ^n;`0|flJdVNKORm!c`R=m{3 ze5vl-&5Fa2#*}A}&pP8?I`0BH^+jjZ_RJWQF7Bs1!cE#V2A?n@K$h6 z2vAxZKa5AZ!$^#kv36Q%YalU<+@#fLPfBeYDFnQiI2;1@&v8N+2+1RpQ;>>S^uY_j z#lcI!D0l%3f){tAI#`zvVLb=?N=eHiD5aLvC-R_1FUf=8OoT1q4#kZ1?&xW`1MQ%- z_3r44-+_~W9@BB;aSmf1I^fN)XyhElsq+|<$C}5d7>sPEfN+IMNLKUCjonx#XssJkI#v`~kY^Ro!CZ0k?cLx$<4Ov@( z8)<7eISK4&N)wIg#Wpfvrd^96qL4gDGde^J`3=BX5CuR4a)9l=Y0T^RFI-qT;K#uq zK+33zIw`p&|??l4yQ^vRdRk&a0UWNuz`>Tx9{`$ja`h%DUqV1 z;gwv#*7#WE>40(LL}z%4vXr}cj+$g38LG=Ohs)F#-N$FvHXUQ5LJ8}S5P;y1!ZUJrXzx2uZKGQL6WWH7dJ)7YNA`LQ)O>)wTSTP49A zmePb!wak{<2vLVQDjRZ7M5G|yc~o_tVcVO%F!`hF%M6N2oL zp-~sn@yXD(3j1h}ZaGwScOs$8>Pt){H(NbP{>@lD)@U+mO(n)viX|HRZ*)Eifh_Aq z`;=uu8^4JRr9r@=d?wVaq=2Mf@O2Gv8sStTgM`5VuX(*mWqE-d39UjliGCNPB#9VkF%` zu8HOgwTR|dDA^#zrcOSCQqHOn1|{RX*IkBTT|Lkk1oeQD>-V0ptIPFsF*F=#LBnjX z%C_TfT!{CE+D;p5awN%iahrvx7d{%0U%gE!VyD zQpthpTP{`YY`^N%IZ>$|!tGL<{mhl6pI+*0t#~=2RTZeJZ7qr8C5I>5&QiW$zl!_a z+mIOGYB#j3VQUb_aAMNh%)j`zp5E2kT!^b=C}|vmtINo22v-Fcfp`IC0#^bb#9MH5 z5|E0GVD)=%A%~Sc3Yr78Bf*73C zN>RN>IA6s6ZWkWmPl~WxUoue(0#~AC;j%fC$XK_@Oix&i2Rd_v&P|0gokVNY*JP$& zN9LwxS~-F!wE;)Klm^TLWDJWZQapQ1T}b||Q3Xn1tOPItWEQ5I43>=xr;r#Ua=S(j z80edkswu4hG%hKZ7drb8PeXwDW{AS-3@^0+Y+D>)t(M3S;0CosKpTYCpfz9(r$7-0 zt_!i&w&te?oEdh2X1KGT$^A?oWb!DJrT2XOrp+9j$+9deDWzs10d&OC11YT!6$ z9xnUs2EKCU(a7!ei8DV=?@*gQ++pA*M+LAC#7W>Uhi9Di;m*Iyz#q;$9=gZC1OT^DIStMdi5Std_?j)P4Q7vJS=&1?=e$+T=MYX!vQ}ld35!NDLx_j-JIlw={avC zj*;r}A`9k^04D)a(zxwiUJhL-fz6)wWe;67-4`zAon<$Md7=yXZZF@P8o0fFk`Iul zT8%wx`s4e&TJ9;_WG@}!v{PgfQH2l$$?B&!(vudaUC2xKD={@!ELSn9izPZ$n?_4`3M{FC0edlEAb{`PPGYX5>lkM9Fq~7U{ss0N zhiN=?W4#WHwdsCm61O|ONW4)XLe2-wIgMR87kHIa>UC=nPOZ6Flw=1TyHb1E&iQq{ zZbpczHC2$ib?%8#UT?YNLghs$EX4?$a}I?Lj3I@}bjpEx=ko_5pN>>uHM$mueg-vE z35HuKlh~ZvoE(DNVY4*~^KBcHVz?l7ZDck=9zy=XgLvnD1_?l*rW)A@3?pFz^cqEe z7Z6!O>X9RWNJaU*B2FUa#XVHiYp=sciF<{Izy}0#%vX)UCr~JUATkz{=+F$bH~J#M zjZ}3eZ-^<;ee*6+Cx5n397x+s%jW{~iM&xu!=X~N@DPL>L?#@n`bs3El3L0mxQp0U z;oNHVLQ|w-FK+RM)RP}Ff?q>nAWB|XJM(qM0hAsYBn_x8CxK?#MKEx3XIGb&>==J& zd4y8X=-O6_5FtM`@tGt2sLs$}BQ#j^uxbH&}+7c1k0%rtm zfvmi$K2GL{Z;Dee6qo}}X$};-FrA!tj-y4H_UW$J%tn(cQ>l>y=Mw8)X2Pk9@mjTG z)ejSShJOc@1ZPchR0tN6(nbf7Nol<|96qs|QlHb{P6MeU$uA^J%!Y$SNw%IabqHA* zH7|&DMD;-2qB-%^l1M?;Nna~~PX+cXE7Of)EErLB$Za8GGJ=Hl)?Ju4)Phe|Y%Yj= z5Olx})HCbby=*2UjZq;`elcL`gY?{-ACkTe@lHKu6ZK(p9_n+CxsWu@T~>Y6oF5|} zq1R^I;It@g!RfW2WAN$(uRe^+eAa;9(s^jh@a~|>1##*qm8V-g#i0n=gt_#mL|H?V z2C>Ms4RLt~=dj>IoL&S0_M#nO@Ye_&BB`&d;AZdhGdyqfe@Pdy}&jy~w5^ zD!kV@IyHYX7awgL7=(kjN$6%|E61(I>%=b6EazP5kqR(b{yP5fwY1V9Bsxs@HSmSnl}z(M7Dc zIyv`DZej7%(Id|)CJPe3hPsNdjJ(l_iJ(2!G3870G^Y(3XNLoK=y4x)9Y}=G!nEAB z-m?M0s-WF>aDzBC7}cKu_GBRVhE*B6y#v(;_ckHM7BnP?@sE%ph(+TM1zO`{qCAC; zbMj@ZfC5t{M>cOA!?^^_-rAwk4IW70>B_gSQn$7OSQ5k%+9gNkv%pcosC*E!pieyH z5TgC24!^f}I^{Z*F&|sdh~{1tr0AJ%?A!Z%wUA$3K3AWzFVSRz%KysIAU9R2P6e!G zT1*%XY*8)akAKOK7jU?}NH8>vVXFw`592%v?m$O5#)4*{E0)0onxQ^q8izYfzc{^u zj^qS8BCZ?Q2Y<%i9GD(d4d6AL#JJ%QPe+(M!{lxzvrL{~@=+#4ByS{(nDCSSg2SU^ zimHcQPkGoZDAuN*{T!UAScy?92|P<$1Ie^C1uiyXZBIa(Xj**`<5+DP3(=1X1)Y&e z9PU;;f>wJA`hCqT8}lX{3P=$kMu1rZyaJ++i~1i0jzd?1YW%1pVHs8WA?dGJ3DLWN zpyHqvHvw+yW5d)&!mu&i1VmO;l;CfyBI4Vq2)~@hmwkVN2BgKE2iFJL7ZDZ};i!X7 zgQgDj-6}BjTI(P}=(X3{_n#6o zbf`%GF&Io7hTsnd^NDT<{`QcIbl-T_ZwD{^3K@J<7dWn3NcW zqP~hH9pv_k;LI9@w7G^>$iQ51q@z>lQPb58t+<;4 zG!N7d+!W+k@KKN_Y>Z=RMVLLpS;&Q$6MiGDh@ieMFB2OK3=*u*=VbwB()-DoG^d z+uZ1exKjweVax!A4hDsxW84=K;8dX#*h|9oC`_t7!#cqMpas)tPVZf(`d_F~hr@^akBbU18MgSkuA*UyeLl_m~22`#C)e4aru%*W9 zKBLm83soy!y9Z!PpaxKX6J8u)Dt;IxP_TFjrk`HI(!G?d+Y@CA>R3f z=LJir6LT+T{xvGCxW+T(Gxe#6otni`Oq!J~ZA3;Uu zN0~%Dx0axahw!=~HnRp^OW}k@^XRKFGLKlE>D2HJ7Gf$F(2H!Zvs%sZ^(Xap6Pmy; zq5i)g^b(9CLO2L3mo7$8uU3uVn=Fg&yM7<@`n3wgg5vt7SN*n znpnhVQPj#J5~gugREfCpPg(^RQ0;sXo6-E z5ZhNPkb~+0?$eGxX!wP9Yfe=UL}ECM0|&!Vadb%>p$u0xfyaQ3MD+<|xTr@MO_O<{ z5zeG)CC{K8HJsJ#$E!nqf&G3k2HF;a3hNwb5PB-au{6eN%yxiR(JtpN<88QRX&qxG zegODD(=a)5874gf$}aq23Xfo9NyZTvNNenhAIE*KvWslV+(@`$?;DJezT+=~7R{0E~dGU87iE znpUE@_lAHF&Uh2KoJRXi)9F$7%_UOkLtd!cyG|WQ*TL=wy&F3BbkL1ek2H8t?4@5r27Kdq79j zu>=Lo_JOoH}j;xMr`lB>YmRIsVk5X?wng{*w^TI6+UsKM4K^GWU!P` zz$v&UXeRpjEM_ZW7%GMPxqzZxs@M?CUWys;4H3eEe4^C*!v$IWu?IHhZJU}6T#h3`@F9$GJ@k-KG z&OpcuTzQj5Jed>0*|a9z2c(Eeziy~{<55j@wVeQN(StOn%m}Q2C@X%f4a3D^GzOd_p&beQT@Cf#sAnfA7Gfp@b zX2<>z&Z^b0Zd|`f%{ONJm$zqvbzL)f10a0WeP}vZL_@+^6hi`_hWqN}=HkmfAu_EfgegAjlKruRiJ$6A5j6k)War z!AQt}D>25bour1V*h8 z)8dnx3DAE>&1cI9gmp2X#T5!*@8FBLus*f*w4*jLW@T|mNOy4txh~zQ9=i+kwAP71 zzL*`1oSp67L2X@zvj#*l+WcJtT;4u}!&PWg80IkKJeP|R0VGm%{lz%zJtf41?l>lR z?J?n}NUNYOEJnfPBdQ9*B>NPb8}2NY07eqB1dQn3Ll8V^F5Sf!cqCwCp8Vh@SwRLg zfJ;;?84<=Wy=YF8Tu}(nlF5RK5eSE61H|y(-a$@Qr6AhF(u}ENod}Qyy#nLtGUKpy zHpTG*tHGg(vt7+87w(<+QqNs}?s8+-%H@^IQ_BU!GqGsHMWnxW7BAwvQzVtC8e%-y zyPKA^`po;q$B5JNW6)a%k^`$D)%bU*ZO7W3}*|j zWqTK}_Xa7}>vHndWm{E1D!{7I&oXy{$=_o_bo5tJM2nQN-Sf{sn|tO7&niW}{YW+R zbkR~7t1y&_DNFfU6GFi|HVRbg!!d?{x32RNe$r_nQ)_!6>NGKtUqXk$%YBDc;3Ctt z%m5N}YeEhP1Q0h~l~oF|L_$C==md4kui{g_as}`T+$g?$poS=8!b$|d!HvK-;M7$- zw#~hGp&9sQ_?iHpCcLAmiMXinCAGY;MY~}j9gY|CC_(N8{1tJIj)gHNEIYI=wdh9@ zaT-G_)Wb~Ucn5~vY_2NrXPA+T4oSn2iXQlKr~4HDUPH4ngHW((mSuuS!qT3=sn4{$ z#K~tOWb%?&-qO-x?=Dg2uh*e24rv=2HmPo4lZ3IANkJ}6rZUzh%BP2Ka=77qC-DH< zdUKaEXYC0DSS0rA2>eufC;p z$#&SYHJAYGieW-K;uKuCJH&-YI!sPX1?jZ3Ay7zmVn&2d=u)s1p2~|!pbS+Zud99N zRu^lYxt-nQk};E}2$}?J?;u3?ZnV?kL=l8sZNa}T8Cx8LaASd8&~9&H4h1O!LAOAP zAO;`myGQY@;Ue!Q*alE&xrL8t4ogn0l3OYYi-7No1Q?SwBTL+pCSLJT4miaQ z0%IEQ>wqiQAM~1r8v`;uQ?)AI$jui6yY@A9NHlFRm&AJdMV4)1A}bzenG;d!>)~R_ z{t`>6hX z+E?Ec@bOIG!vG;L0uGm0=oJfn!y-dTKVm&ujW2lC|$4t$WoT3Ni9&w_^!3+RJuYqyg@3-BH^G?pq^_*s$rT_3hQZE#mk|O z;cz!2!C?8cjqdlLVM(#E2vK#BeG<775jFxmox;Apx2aj+MfBHP?)(Cu@L?uo3V7F2 z{41}qK(J5n@B&V;bqyRU+PI>q2;^B_>eiR7b@dH#Q&yd=wJSa;mHjpZTNAOt3d(9| zwCxyk`fTR)pphebe*T4r0h&58kt{m-6LBd4>2K_hfV4#|a@xU#vOhq8OUU z>B7h3C=LSUf+@ng?nbFI%Oup|6j%f_3+T20*DT3YkI-jEbxK@IQ!vTa7OVoGB@{35>~@e0+dpS7@?L z)%>+0G=J2^HEQ<22f-p%(iRA2#i$Ln0L=jJLjWJB96a2%MdCo%A`DPpk_LV96-M?z zy9U-4!x>+xKu0yFIFo4(zyn0e@%W;wP9i1?Pm+ZpKa4_%CP5D&{P>n@dvQ^8$xDPY zPE5zwas-m<0j%UN>E1>WZwqV*LDHhVoL@z=5^&>NSoV_hclemm=c`!jUvig9 zc^JzgimwsXr6N6>4?ZLes!2Xa25t+YM?zZUb7yxmWD3ZFLOF}p$;Rw zTO!0y28ch5Qd+y>-fY^wEf#%f*K&LG|1_SYkOPl66Nu{0=>I|SrYQ>qM!_XwdUd!5#cLVjlCq%tYa<$z&`tH>> z7)T$KB2YUd3lP-#BC0ujXKyW<335d!6=B^ht8av}Hlf)XuWU#f{vf`aGcb;}eQfJu zTzX4@T^sPswl?@}B#Rjr!rwj}BH1$?bnEs=wu~2fZ8wK~cAb0&TvM+0{|K(_>B!P@ zrTADr)q--VCX{>6P0vw#%=@-<|Gz8Ue>TMaPVXMZu-{?m37frMSH>#~-KcaIUb~)j zZ`!j{IuUb5Mw~a|%QqtGb(f>gqx>vqXu|@(X)m4j0>5Pz+tbDW&tm`AL!9j7-02Lh zZ=vx24T^kGo+A{z5yacw%8*+S!yn48weaI+72weI`EmPZzm0aSion|;+P(iKCN(ZX zzZL4htzzH3*Tug0h`!WxI!4<@oXk)+h zm^mG|7R4qKhk)h+*P=Hha!_{V(DhMi^j9Ld;=j9Alv7xfK^#SFho3`f+hsSxIo7#!z*M)Yl0!x=~DuYojJ$HE{9GHoB2Jt%_T>!6qog|$A=C>(8 zJ90gs;dOLwfaUyKBoUxW-$-Jf=x9wlzr)Kk|0E_~GHs#P(dgSa^!5tzMU$-o^foa3 z%VrqT^4kI!P7#KkLVFu%wh5BNTY=;+BZD~)L+k2Dg%fq$hbg=dNz?c^T>^BMVyz6X z`ObPo)zt$_-zyvqN^SIO3hk#Ds)vh3`8Sp2t{ z{9PuWV)74|{1lVVF!>oKeEq=rStg%j@=uujJQI$>`DaXsOwPYx@-LbED<;3rsLHE~01}h&TerokSJ( z6^mP%9P55J+JA}zA5m$=jQiPQ^nhrde;*g_2%duPrSQCfBmCXSOpUjhsfDJg>P5SA zINZCDw4Ew&AzhydKc+}+3)=z!4D+(27_UP`U%$cY9{7#}xEmN>_0&ByJM5h^Y)WX3|I#(p}brL z(n4l-!MES-W#SWG5QISkc#<5?;BT;ZCYg4E+ahvZ3Fd>S7_8@DSK97MNxpJ#&~KpP z&iLatur($-a?70{3h%#8C;Zp?g0;Bw2!GCSQ1>y}&m>}z?Wo}oQRN;!gP1_B&E;2A*T1vM_+IA zbNFbR^B>U3Mdv>v^)Wo7!2ctZctg*fI=Ogq?&OIa=Je%*_j^O-8a@mp5~Of0U#jZr zr8`mA*~R28CcBvoGYPRO2%{HRT0|n>A|l9Q0+vWDyyhtanGF!1x~(19MvPQ~&EJ}q zNJMbH4yiOjg}iMoRDAZJ+BxPEnl(U0CIN!MMutKfBp9CqIGi8n8k5e0bpDvv_k@WI zkOcF74lp2?(d!NCR_W%ec;Zn$k@JvAWYP{x2#WB>EBv{Xu3-0}YWI5c>H&QC&My%L z`|&`Y!fd&Ue3!B1qclK17)dCB;e+p>`h~J#v0sQhwo}7@Y@#+`uHw_GvQH7V4#)OF zS8*4<_l8Ts_d40mfRqFu?PSTIv4_n0VPlUN8|C99jtn*w#~g{M5J+%H4yhOVaEMSB$t1g_`+59vZR@W48#P&;18TFD9wqB1j`+ZV&>;$ z(Ic+%l5F^hWm{y*>PgWx3fKw^3+Hv$FZ7O8@jfLg$%vR4l)W1jBfGd6fmS;|%JGTC zI3_)5x=9z;v{pG?J!n$c@sS@+{R_jdw^@x*Q=cHCK&#eAjXC`RbRPe4?XTiH~)PJ1+udszHI4#a#23T=n&2E6;3VFje)UK zNH42bd8u2=V)}iF*M#8yIdhLO3GGV{U)J=uL&L^&{OqXzj=E>rP|d!;%0h8MY+uLe zi-4s638^GZ#R>2ceq3xq)+VIkD;Pu$=HqQmyxm8O85v@M77Ks;i{=zL&LG3dmwCI; zXPj;RsTwi@B1m8?cE`0tat-FV7&&rHJOn@!3+x{6Y@jIs7EGT-GWSq6dz}A_r|8mR znHQPoudz%(&|A0j&Xcu^RbNeCI|YA^x+P9Q1{r=uYd{X{54FARfP)4L94CMLOQM*= z;SvsQ#w>alF$pS%;uI?7w5q5d7!NN8D7C{qS6eL?v5d5gcRy@=_oZr2`I;8?m;sZO zVT10BBF29~4j-PwzM8sZuSqXiVl@GJ1o{7pvX?k)Vp=fl#@4n&Rtzj~ko$d_cxN8ltWHdX`4SzJk zUHs7qYthkwjc_Q~YFCl%f8vgSU_|K`IJNdUn9Shm*cKyDQr78n>s=wM#N7gc0{*&x z6!w`4u6%i2wAV#Zv zD*5Q?;o)zaE8`zjjws`RMd`CeSFo%jkY#ntZs=cwgZlL8;OwO&;S3W@$L{&{rsuO4Y>Vi6O*!?f`#`zma8hx;D zrjE*&&eO3;7m%>?`dY<@n0p(E*Ngg6T^)KDNdO3kdW-W7)@&hqopgSCLH!-vwEt6> z9l1fa?pjt^+UBLQKsEX$Kg|7gNKp^R5f5w>j}Cf}jO# z36}jSt_IBcI&dnSvkMn;f#mrN;Uuxk9$Lxx3?X5Kun0EWZhAYcnIxJUnI3O-_EBYa zs5ClMp`0EjKgL8Dg0KcbL16=%dBNf^l1%8!cm5rd*O|P{q>C3qvi5RF&CTpM6iQB7Uoi0pgyz$fcO7gLq^K?Kn)F?wiO?`}dHv8-TJQ9#~ z3cQ0c!PjU;UNN#jImIpbA&??l$d54ZzX(Jpl9qm6?t-L)!mQRG7}A$83XvUg zJ8(daK1u=E4L|39*M;0)+0Px!z0KsyOlZV;L%|;5_}uzDG=A*XTUt2>Ym!!l*f_z4 z_0aU4mt}_xHwqPcDLzHFuMZJQcTXMhZHHXFmeX5*F$oAT)MgMGOv2>BcQo+*j0tNT zTP9^-@4@4`yB#cp@5e?>g<>Ot)>1g`#Ni%A0)B*D2NkSu;!~`^`j!m`2{K3(*@vFo zD=?2_`#WD@!0t$BEBvA3@)Oe|j^6&xV}jlgTI!4OpV#b;fnU3EyzSsb$nrzqIebVU z=I9Rg0E?F85U&>m3WcCY{L48Q!{O48^@i|c;4xG=^^1UO@|7YCH3y_bsro!bc?*qN z#9ksy!0vN3G=*ChI#(Nbt&cierl@6a%~k-w4+2AUt&5 zH^^YhOrhA`LxoW9M06Kf6LKWe=A?haeE212Aw*l38IVHc!FFh+)% zZ|F&8&z%vWKT`XvB(M*JTcv zS!E9Uk!jBySSq54+)!j7)ai&bINX*$hVXO)Vn>&i{6$3K{4E2$5uf5>4kJHI72^jj zh~^L*zaC(iycYfZyEFwY41mbMAOG?oqZA{C>`*iS-TM@fX(suEli}cn#$%tHg~}v^ z6Or{8s{Ls=ltjPP_`th7lxSw@f}Ft!V&Pz+>mHW<7!&qpEiFd9C9Qrf=q*S)bbq7eYd5or0iq;|sK7?Dn%qhMIS+Ui& z-5BLk?@}oSBd1aP<_x!mkb$@I<6jc841NR|p-@@#i5}B3yLp<`HPpnk7LBtl628+y!o48jCvJVF+EesljIBv`kH05P5OLD!WQ!=H;8Qe5Azgsn8?S1MRQ3aq6aq>@)57htE2YGI4MLhr1mKD34V9ghSlLlW(03Z9*%PvHqX^X7*oaavnx~ zI}Jhv0{f%N!x}Azdcz>QN)rI<%2HTawn3x7Z%gcf=}t;Rz<@%sN%LX7c$i7_CARg0 z^*nxSvont`4fU}UD8!$<{hWh;^?-HQaE<6b1fI?BcAjI$F3_?j^$-04SF*E~r0^fXV! zD>lm<1I7AA%pvBEAn`qAp5=JfMK9sFUgDzL_BEJMApkxl7p(ETotQp_Rzz^~JqmsO;#5Vl>7zA5@wQ6ws>!T6I8rEHav0VmX;_$<5tsr@Z7g|6< zXuv+YhnKeTp6yI_Fu5H`wvT&6KJkIYqYG^zvu!P8a%W4+Ir&`eA;M@JEe9~_PV$`x zf5#Zv@%=h*iP~!8oS9-L?_d&f8OaH&*@6q(*#UwROUAZ1ESaA5(z#%R@ti|8;}EGG z)^fU#WJiOoeZf@=tB!+jc8Uup+v8u*fC9N}b~vaTprtPLparLsPfvPO>s-CQg0EGS zodFj22RCD*HjRW0v3MvbuG)2}%%xn}iJ%nmKtxc`H(7c}T|PS!T&ovW)O9TGrl=(X z309Ir_1_^toCzkAOeCPg=tZmKtInSth^F{X=p*Rt+w!;9g_i&|FTrJy=qn2?Ok?e%!#ee)e>E~w<7$o0_{Yxa_V&@K5!$^md knV!k6$yX=8VZLE5Z|U07y`^`{$Zl(BWb*vvnPKby036dtw*UYD literal 0 HcmV?d00001 diff --git a/python/tornado/test/__pycache__/locale_test.cpython-35.pyc b/python/tornado/test/__pycache__/locale_test.cpython-35.pyc new file mode 100644 index 0000000000000000000000000000000000000000..55d7358e4d429be0e09fff2056e79f52672f9e9a GIT binary patch literal 6213 zcmb7IyK~&e8DHRld-2FS@g-4~tutjOK8Io?Inii59$OSeDM}Y&p$7pEAP*EC zr(K*V-biWaGBc^urgNDhT{>6EpRhBjQn5;t%2j^f0=S2Gk`EK_l# zU1U}g-^i9piKh?5>NgDr%rIGO&mN>^r3frbY(Nk`!C5 z+6jbaEko9wT@eSp#Iyad8}me#UH_Rc{5X;`UG7JTz1NG{iF!Zula4JCC-FirN;X@% zGaSi60BpK{7&NtSL;p#jbv)ujkOY4kJJ8udfdo3%8GnD+^LvQGa1eb{7VWmv?szRzmO74b zl7!1r67$G$V_8f$wTk!E=6nVVw=`)8FWCEz@j1Ny@b&tqJCB6N#m;ZnA2`tt^ncq) zoEEGe+WU1QQK^lq$!+dp37@kR93du9&gx>*B)-k5-deZT3+%#6BfrDwv0_aK*zy>wIk(%7xl#RA{HWseZ+~fA0 zhq5Tzofuh;g#7-yb{u%E`q2FHOn`VFpQy(|8eJ#pNaG;(BUy5N1YqdNvTg7AfoI#Y z06&sdMpxN;}U&B=Hg+<$UMS3)K#?P}VS50^d^BA!{HzLB%@z z(~^q1qS8Cer*bD;^eaTz?;d{r{GVh2nAm`_Q4FpRO-&&>r?>o{t|frUeRDL;AJW68RZPr zJ4?L>b5sz(n#@y$M1y>VR!k%tNQ zyjC&G1AA7b-9ZL-Q4eHUZR1D#Rxt^?aPWz4Sj*ke-i<>#f;bPt^!MP1OXi$b(+b-A zNQMo(FKU&?BH zZ?GP~;z*`k*tF&|v@(|TtXhTvRK1_*;8r1U4yDQc{Z7&^2VT?Vv7cDRejK~~65e-x zFS2Se-*+OvudtM5w1eKRG>M86GQ47>*Ap^-zXA-^RHK$D62JYVS3x(e_5ELxIODO0B*k}-acMa4M~ z%GU2`C6%u)B5yBiAF1Cr21vHZW^0Nn4OJXenyPJwvD;Hsr)Jw!NYW9GJ57pSI0YrY zNaW{4NQL|}B5Op3rA4*osgH0G!AC))rq}hFQ7hKWT6uOsjkVD|_#sQ(#3L?%oQCG# zLXoO&Y~e~Vld6(1CqZZmYxP5G7=94HKzzqBeN?o_+~<@(zF4SRF# z!N%6mM>x4)nqn!ta>`h_Kir{?p?XQPa!#epD2{ALw0&QhaT5a>c$`O`T-5r@<7!%+ zG^u6qTeJbGjNc}5m&kCxp*c4&@Boh>HPG-lxYPx{=#1Hz>`OIH+4wq+GHPSk2o@$x2wze6Iqa40a%>X%rF5sDFQI%e zs7;SGjmmeHq4VK%l@#++8?5Tacg(bNG_MAa&~W8@i+M=QK$>Io>Tffzae7_~K}OF8 zSTrUTd^D5T6>!971Fg`x(1)$l%^NikGA}O!JSUX ztxhQQoX{QZL*PnX`}$j;6C81AO@rK7R=6&zh>=?8H4T`cv1Ms-gM}*EtgW3);OTI`kEQ zTa_c^IFDn*UqPnpn;(kihpzu$YFAWy1@EnyVVY4u4&Pbu9VPKYPbC&eqXZ#j1_0Pg z+EILn+XBX2FGw7#Vjp_m6WU*vvX(5NEJR61NK^fn1q@5$2-iB~4N{Z>bP}h=nguL4 zk~mbNYN(tuvh!n15(Fq2%GNvDIjy8$MG4Cuy{@gIXAxy>UeRSm`xizsL7K)!mK;m} z1+1)60~M*$sevK|@d7tSUQn6e1+dF=y0OXcVVZyQRvoInx^m^p6?$6cE2>QuTHoWq zB5rVGN4;>@<9WhX7ye-aReG3D^)Nm`V6W`U#;n$Fo|bW(eN0X>m-CN5D(AksS7m^9 zOab`ZmDaUX2d|7Fb>iklWlvQ}1z!icLw-uD+N&^W$d6On9N|VHONa{;(t+*)Q&U#VnFPu^$aSk*`-Z8ih z^d@R9O|3s#JRmXE5A{9pnCU1DuhxMf2PIdX>P7Bn)0v-9=Hb7_G5BwY+$Ta6mJ@cd z3T}M3-EVC5B1GLvGfzKv`sKWdTi5UtlR}*jX*MBDt2&5~qgMKnCcS*o(yvBoeLPB! z{s5i~N>Gq4<2%$Ty$+Yhm#|kc#13up4UskxB;3(Ghmhh|7-5~M{5Njs8V5|n|*Gp2*u+tN5r|T(nrS8er znVg*zuu2zNPB>11+rece?N_*rV479l2J^auuQCL16%0~G9?|MRLRtPS1=gPvMiul$ UZCQt)^o#!l(-sR$Uo4gX2TpD;>Hq)$ literal 0 HcmV?d00001 diff --git a/python/tornado/test/__pycache__/locks_test.cpython-35.pyc b/python/tornado/test/__pycache__/locks_test.cpython-35.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a4c92e12b77bdd60db9bcf0d8611f1f74cf5b95f GIT binary patch literal 17993 zcmcg!TZ~*sTCUUg?&-OB#vW&E$DSmc#GOqhv6Ha7A!|FalWamVY~sY(?X_vz-Dk#a z_sw;BvUam!+1Q(l1nUKHk+|&g2rtV63tA*V;sFU39uNo#9U&oDgk&KhUU-Rw;QRhM zRp<1X_Bh@wbbM+~*Qu&g_5Jt1sItfE2=l6 z>NDnhUUjRgH>>KisyCN;LmQvRH} zhDj``7usdju1Iag*6v2_jA~bQ`jk2*cS?RQ7bS|-eH`u#B99keb5 zcvO8Ij|N+#=d>PZJT2jAW8?>;aQ2ztYQOb5ZcjD?zm^S4Tm8;R9#w-&o#C_o#z6az zKN*(&D}GD*v^ZJ&$woK9KjOw$q%(*LI=`pX2JZ1MquxbLR<-djM^r%hc3!m$a-Uaz zK{@hV1E+vgmM-7{M-6a-qd^_?LN5zGd(B9V1)epa9@;D^K#Z5NW>#wUsP?k00WHyH zuWH|AYv!c}wAyED7Pywvwf*7j$w9x}A^OCt1rC=F{}KjX!77Ro{(0&vcxOgk%m|q` z6~8xt0EhC{LF;_5HT1QfM+e;dqc?_O@j@pU4fIt|EEo9Q4HC7{Xmy)G&}jUw`tZ$@ zPn~@u@O5zZ^Cw?v_Rl^u9G+^9nrB}gw3^+sulv2>*Kl_^B8tC@w=z_nhePMSXx;ofcZLU7QHQLgw#-G05 zUOj(x)y-q=%gxScZ36WSUcH1XcpQa!*EM`Qq}~Vc9LVoSZ31_(C?mawk{pSx1fMR zdDYBPX8cJvc{0qJmW4OkgTBA^Gzie_2FKQH-*vocSF_!g#|>Ls%bAz$$C&y6i@RCy z-LMi2_UmZ3;YmNK)tH1acw8C$f8S&osXc{ zj>Sdcc3=Sn1PixIC z8&~!HcvZA0nRt3xGcr}v_z})c7%szwrAeGYxMBvj8@s47wS5R24dG7L7v%3?tvkAZ&N2pW+4q~}x)?4s+ z?eWI@r*My%TjPj>n%v{0>Cf;b$w5~hQX{LMPMtfT-Uh;eKtzk|1hr{!zPzH|B%H9A zsEFVzs8Lb+OBHI_x8&%8wlQ5?TOrM~wlP^;TO;4eaU-(aB)i?@()7c;;{tC|Z`8^$ zDZ;#N_BZ`BMaZsUt_ve4#DNAAy+!d1y-u3ZC#cbfrx{aY$=jEi%^m_Zc<3ux7DgI3(z;YJ-p z!zFyT8|Gj;Zsb0~BW_>hn^?m&GPcH|N2W`)U_WDlD5|y}Ya?zo2ChgYY`9f|O33!4 z@cF=qAZ9;lt6DlI)7{DkT(Lc1UI~mb^NrlsINSGSQ%_VdV_MuP(x-0kM zTF%aS%b8j5!~y&VzwGc7&WKdx zCJN{jiNu@&#*8||OT*Gy7PDIOBJVuRtUad5$f%+BVvH{1uMaCv z)4ypAdc$MChVKE@R#}aomDFqHu0kL^^}v8SiqO`9N-i z5inTu1R96lM#^LPu>3T#Q~s5qKK5H2l4BRt`0+cKr|n-WrX4Cq=mJq#F_Gl!ddm;9 z;M_=5!lDWE150oruSC|1MqrXe`z9wyDyUpFdnk7|ozXaB@j8S4?VTKQ6bWb1{s>>3 zSbzbi#Rp>EQija4@c}KHLe7Pn2r`mWkOUe;5f0kAQHh4c*d}AYM3mCa*uTP@&BYmx zcrA+Ns{A4{Haq!HG+A-|rjG?onw^^)sEU;s!-d*hnj*2cA<;p$i@as2LC*=f0392k zALsBVSv-g0-H^JzPaf)T7z!amkjE+(IME4LS>C3T=iC< zTEgn#I#BKFfm9jy}%fMHcHU+`1s%oVO&OB!5UVE_Su*E#|6~YH_hpovF@MXBSJ0 zmBqb_bBpuRu7!5)lf~T$T)}H7Ks=YZHLg3gO#&%K4rG^N0?JcT)ji-Zh*7DbZf$$1hIyHwI8v9!WzJ<95_z9Xi%DW=LF#^& zE32Yle6@oAD!9f=4ho|Xn3Zy%PzMkg0p)ABnE(Y#L~sewPI#n}Uoo|bgh8U#=2H+& zG3boMSTqAAd!nkSzJk!qRS7Q0P)C`SH!vR)7IFRhGz$u!kwpSi%SRbFzg{xRv+PCmkGi#BBywiz5KHN$iC7;3lx^jRQ6- z1t-q1xQV0AcyD6KufFyLvY!O=$p+M*ab5*V)pVw^WoR1(T)Fj0%_lb7CPau}ST*M6 zwPq*q16@Xy0ky!p!zk)AO#ViWvL@EndHVvJIN>)bXJd;53ft!|(9GB(xNHmFL8M2< z2c}_XJ4|5c_>XSmRQ`*(OXM-Zcfi3oSKykw(eJ`F!Z75Ou+Rw_lrNL3)zemF)U;U< znl^gK#CL{W|8Kdh5(>&wg{t|Usf$YXaVw*$aVtBIFH=|cQ6$TV&TXX0icaKq#G)fo z4JAR<;+ASj2X;vzW+>9-Ds{7LlE zpJ&0$gBEkdY=*H(U*g?47C+4*<~g~e6QpS@ejk@`8eU(jRu?PPg49z6-6xB?6S#u6 zP<#~g;%rNO+Lj=<>8ynTzoj+?`PA;u@@a*8gQsf?;p}O@*BoBJ7UqN*q6bZ_dy*5p z8wF(_+yV1pvoCFkc*Zk)or9HltJ&T1MXqoD4?ZAY9sR<}8P?0bs&IZ-c5j54UP6-u zjN(Qv2~qq0W^>ee%WufIKj1vbs0!@u2H=#f|Cns;*qc4b;qVv6JVW&KXO~y!KXRQXEXeb!d6i@V>Xusn_JIzFY3@Okcb^ibVqj)Yvvd zUlopzNco>=B+<6fUQ6P9Q;X5qqs&w?@R#;Yp;$D4<9-s1hP1^IzmLKY4IGbEn#_Um zoDl3-Z0s%gIF(DK1~3_FM<>QiCPOXxJQFh^w~xAtxD*vopGjBiK-jm$?~8b1MnWbD zp)AY9bT#pmdnmLitebRQB71xNxC^B=AD7SH9k=0BOgraKn9Mhnno0ywR9VLPh{Guw zXlsmwmcj$4*fKT%qPcb_>1e%(9CWG4>1F1<8T@DR;Y-*7ryD21)V;T+rX9vnHH#OM zvO-mI)DBb?VFcAcN>VsV-U=0%1_F&KO3@ZXij*!{4yYNKM&Ke@n!r4{$q;4CW2g_K zR{~{0-o>jnV;&=#0`uTBFyplM9>OYN1Yxyl^M`K_WITj}lxBk0NnFAED5h$+m}#wc z0+6DLOo%0nD~J>8J9%L85UC!*=5QDzUrAsUZY4iz;X%$iuon^=Sh~3{i>|6lxflFWtx4jE+VLnA)Lw5nq-2YX3djU?97Yl~Lba;cCin2lJrs;n z=T__<+^ibSNt1c8;FrIvsJl+rux)UjXah9m)=r)wSJR6L4x`vK$-RSdf)pMT`cvhK zm_^A_&?_NCIQ)3sK^c=&+{BjSGO;Ee{0lCT z6u9$QWRG?)?pmCwRyCisaJdiF@(EnQZ=(QyE@R|__rVAmrh>rGINyg{5i3O|OeO0J zQkl#l6{W_ek=O?3Gtn#Q>X)U)9?E1Joc=?bq{EM=_AnsM{o(xO>Dpqr-!Tu8c_aVI z2xsPy1Jv}ZTcCz~DBy&QD-BMgn5rDl+}LMDNZda#{UxLY;Qm?LFH-LO45J_U94#d? z9|gv-`FI}hGLc}k0Zn!Zi&r~-x9ztR)FPomV|!a>9gUt@$SJZ^36GV8{QI)w$97n< z2_;fPOyH2iNP+|XyCe?KY4Dh?p;ELs(+{u07?d`P4k(R@7?X_waQ!0%WoooAQL@TAal{95r0+1gq=ZU~uE= zQ&^78X9)pl7sBFZcW@pj03;B>U*q?RHaN)+NwSog$qk0hmY*ifs@U-qsts92qsid= z6%vUYhCPDJe69>38$W%gWJ(U`NJbsGfUAWoVA!4{8BK+u8ggIv+T0f@bz>G>DdM`{ za&Ll(^WsVwV=aVqOpFsNbdCV`zEeY?afZ?$ z`d3>1kjHJ$IS7T%v16PaKXu0ty5S}OEL#9yN&{&8NGKY?D$4iW$WMs8i4`Oh8+%C+ z!>4RubgVH}0JMg2thJMmW43D3@I<*nY(B?IV4qjajz4|J&_qK`0OL68q65Yu!RX5J z7gZft>;c1Kd2Txyy}uo?1e+o0416Vb&-`jaQy^h*p2%QXsr@9IP!$Np28+8*3da70 z$<>Ph@4#^PPL0xytq|0hk?tB+nAa z46QT#BznMMJcU1rnLmTC?{elG9nSFjk7CY=(*;K{>;&o)U?&4J%2`;(v3(qr@AQ2+ zoER41QQ?HVxXzKWtS?~S9^0!p)e!xZAwH9cCbGVmM&j5^SN#s`G1pI3L>H%!IaoZH zDL@(zWqt?);0ASM!{D(6C0A1dAdRB>K`?lI9vmOz&7Dc;{v_+lu7^`c;q0@`_7~~I z#YthwfZie3qy^uEFM(lV&r68ZF@+tS>1-L@(!!Qj#4s<@;TE0+-4Tdyce zZ&zR2>QgIzw6&O1fN49`>jYbY|Eq3cS)QUEf9kdin?BBZZ8jYMQ6*{3VwS~QEUvJ) z%3{odTh03CSiHkRv_o8w5%mzoNluasijDMg*4s0W?ej9O-HRo8-ohjIp;Vs0CD}~a zBo_`DU40!ja317GFX8TI2V2w~ZqeIfkSsgtwSgSSOH+T}AN(QHe~W zA-tp~%^Tz=3&=`>TfxpiFL7#;@0NU`+3MhgX4I$}kAt^Nx{EjI*}CPCL(90tXM6xR z37-+a%zYo@iNlCtY&2>q7G(|T?!n2+cG}Lx6Z*vRdF+pX1!a|#X1D5@(#!g6{^$FBPq857V`g3edYEHO_LSw3#(EI?y z$`48Sr=0q3Jfl~*;uYu_<_~$RNc89NbD$j-<1i(Yl3hgXcv9DiowK>Lo>Rn5A~bC5 zLF^FgUK5c*i9{HZVzI!%_U9;t+NCeI#45IV=xhQQ`Q6tEdZ3juuH?)I?EJt`GpjpKYR1LXqIm-RF zm0B@+#OC}H;Y%v4*+TbNhw5kW^`9*7K`fryfm~7Ei}}D}JR3=D60@VHW$MC&CI^#b!o=ME#_?m`@*^{ShYUaNq>K`SOkhuQ z(!v!0_es_?u}S38feU5R8a#93kg=1JD_{~v%m9-x%8R$Cp>9p;1uTjC$nP?{>yT#*sG_2!5PzYE2meZ_=&@6Dx$A#Vc6=*SIV~-f zMbDGCf|pQimsxW_WvZVnIZbhdZ{h1k89G_0tnq7jMeN>2uF$lVh2}h-oLRG8!GE}g zMjioNqXQ{cpXlJ|kB&d;s8oNIODVD7@@^4}n%@eVfRei7zqkaFHws2qQqzxHZkxN1 z=wrp6TF}4ElqH<|Z#PX7THc?U=0^@BA0$5Svmk@&UtlrIf?tZ?brYMaqba1wzsdzP+TvPpO znkoEWK=2E33WNNxv0jY+j}9gWMuh4!QPmnOv`A%r-qtX`(CKfQCGn_ieOL0)8ZtUv zO?6|IDr$3{UteRsf-a=*>XHu8F1KH)@ STz+)9x%{Q&h2=M^-v0tPcl;m# literal 0 HcmV?d00001 diff --git a/python/tornado/test/__pycache__/log_test.cpython-35.pyc b/python/tornado/test/__pycache__/log_test.cpython-35.pyc new file mode 100644 index 0000000000000000000000000000000000000000..38538330cc69f3b4f9a83ef6ad15884a81c62abc GIT binary patch literal 9233 zcmcIqOOqSdb-vwrV|v~%ks@VqL`np$87nHqw%(M;q09;eQk06iFhjc)LE zL*l@iI2Kh&cB*Xh2eR@HNUFSc*{N)^N!6`NRUVav7Fl}3mA`XtHwM69C`whvun%tE zN8_Axzw@}KxiURn{Ld(SBAmz-CE&CB!30eMY?B0DDt(59+Oo_F3@=DtHb9;BT4- zriJ$$_x=FAGomrey|Y8pD;Ss)jd>oJ9~wAE3f!tLd_$X1&8k^^ZGYH#xqE;BFnXYUFv9s3;>M`Gc-ir5~D0XA7;{|c8YN%{4-ur1aqpTZY^I9l7ZXA13 ztu(y7R^T~)*lf0frW1B)eWaGXz}@vdrz^cUK1hf080#j_xaG>olejPTf)<_>#Z^-o zJvF=QMz{etO(#C+dUfJG{Y;@j9ns4mu@C^#BO$&%D-60Z2~m(Z&Op5e;pZdcF6f81 zKL*{ZW@;+mY6hY7RIv`hnxQ;^R*IjyGQiJ8Y6iXaeP?iQMbMKE`FxVpdhY;i-qn+T zBjgfzVDk9RmDhLfK$>Xh)|C(3VCUU#_o^GaJ2%3*>+fuPo$gMz)m`81wfx3xq{Y+j!bl1#ufbv z#1mv(kRHb6WkfqmDkAK`q@b=0L555U{W)&Kq;Ol{Hn5t)l}svyDw$M@PBN(!lVnmU z49TPxpz=jEH;k5TNETfMUl-qrs>aSIo0U&qUVrW0FMoWPG+sG;UH1$%DydHN&<}U+ znU#whm#TGm5&qD32GCFB-oO@V zh>TF>HgDg)d0Sc-w+jwGBSHhv@3_9oV@sLQYX=-hA6 zjVcgrw4^e_NM*ZjysxtDuob8x%?sR)r>u^9-*fiepyA_jg|uNsUVP*z%5J0aGd+cz zN#yx^M{G8u#8#bSz{S^@2`OQ!9xj88@y-)3}gZf zhQC0Aw99*qEi*I@sEfCp?{ho_`y{(LZ%efOKY^_@)ni4`i6e4S6>Jl;N^f1%IYI zgKjl-4O@^Z)zo|te`CzZi|BhFSH!=N>z_Tw!|)CcbeYAHGA`C-6Akb|x`RgnE2j9m zCJvb`xL#-%76y6<2b>jcguA@>0{w|%vgM+{Y zaA5nt{t5eBU_{4bY%WWNt=3kDiEz3IGK^*nla#}oZ`LBx)i*Fg3T`Z_yiBckiI9Mj z0-(ZfcacF%G`~bvj-0;sil1OjVu)q)3DOR)8Ig7d4`6Bk5sgIJ^Fz`i@6bX~m}QWY z@QG+z=(s@Q$_6W=$h1USLr@v4PE-pt+Ka-3yI@-}07mL&5DYMy6+P-2)O%Ejb}oQ5#U#tX)5=0dK&HW^pL z%La-j?kUYK~L+>?qq&q=1FW+oFa$HYu_^NcFt(yV>k{TovJy&!iWGVj0{Y7 z+E~t5#+=z-pR8(~LLPfctvaKMjzdA*aa7rHI$@)Sq}@@L8@-g-npJIU{O0TyOMhE_iwuye4u83p; zgkp9&(7n!!3_6f96r3ytzhtwpc?g4)5U~qK@7PJBp%1P7_2i_uL8{#F(-YzeoL#Gy zkpcC($PGRMJK9poA*(`iHkU=}KSSg>B8-R%TFLGsJuf*9hXu#c+rfAyoBw-UiF;o} zT0V^wsDJhZPR25GirMqHuFxaig5I>TK{H4q`+r~$ZHQIUjwu{{TGROupH!(IM>1hD zOAGk;4Qz8x#!mivQ^3?d0 zzo6+Zktf6Q+z5yquAY+24Tt$d)c7YnBzgfPg{lRx9ziv4l#GR8xW4vOaGmr^0|--k zLkayAT+suN{@WC62##D8OCXDOdhU#eu?zP$z^-YslS{lChb@MWKK{cCLM-fmTLbAq zP2AO9Dp#{hQHb=C3P*PNa~Yzp%X>BOSkt7xMi}M*h_P3b989wFFNGwojTIF5nzxqV|pYS-G zS;z7kPLy|vBjhAPQWfPW!$H9PIdyP&rD%<;LqrswEn}fzEmSh|iRh(3Wm}iNUyP`> zODR=lKMe0j8?`z~M<0qf7X{R* zGAPHC1e*8r&;UH#PxfTkfs9G{ihj9LNp4@Nbm>(Gs<537bl^~tsed7vlZgLD zs)vnMuoqrj@J*Fxn z_w7lp7*tC-YL^BthE&#-&B)GQ|KQfm+uN#mG_<=T z%BKJdo=CmnXtt_mjVjtvgjY|=!Tg%*qn7p%%T727S?uWX(#!4n@qb{RuBlGsZ+PnW zV&AXZ2Rl7*j7Sv!qGV1TFGQVI0Q-nqaqBbhzehxU^E-%o>`^1KKfTfPQ&&e8$nN{C${KCq z+8Yy`9l`ZmclMbv`5PWyKGRF!|2vY_GWwUkRocPA?hnLQZN@L9n$(Qev7c_ zI#jPmlPOhjoJLqjQGy(_q=Jw0xuh_w%Jnday@#>i+SSL4GcfY$ZEniZ}+_utw z?&9(urT_SU4VqQtA1+?_@;d>$xQ)eE;9I!f%Jno|Fe3d-Qk*KjsmsG}lPr9cS;K~o X{o&0DoxtaemCSPC)Q2n9ski?G0C*)s literal 0 HcmV?d00001 diff --git a/python/tornado/test/__pycache__/netutil_test.cpython-35.pyc b/python/tornado/test/__pycache__/netutil_test.cpython-35.pyc new file mode 100644 index 0000000000000000000000000000000000000000..96b92bd17d351893451f99cbd9168a64b1e5df87 GIT binary patch literal 9483 zcmc&)O>i97RzBVHtI^0B{aUgudo0CCCXV%N=jT0yapWiK<%xPx{4Ci{8ijtX>6SEB zPxttCk7Z9KRDlBz*sy~Fs@Smz8`!Xh6;)JGRI%+XtL#{N#dl73&x}T+#F$XD)pu^+ zpL5SW|My(aj*XT6_2oZ4{PzW-|D?X>GU}^%gI{HcZ2W2D5;Zk4G-~Q(=+w-Rk)dXm zj4Ww9LMJyzMovbfo+l%p)(d15q@E$SNX-#4M!IuKWR%izqhyRqJxgwxnqy>)QS%HL zXQ(+&#yB-AWK`mrIdUhcIZ4K3{GBIvikj18OjC1)j2UXqk}(_i705kH%{emWs5wu@ zJZaCTWtk$m=csv}jPs;D^5!V(T%f}eDbny@Lumip)9AD3l@!ATG8U+R=+$)S0)|e| zXH1C^axYTz5*e4U(uH(tm5gdyzs&W1Fe(L8?SUzgyGYF?GL}gD*al(B^fjQD$$5ib zfcjVHV|$G3Gtzn{ZGDsMak4AiXuL(vJO)jWJt>1G$(bhS3cbkE3$XHS^i7dHEq&;~ z-<#ZL)OemVM~~kj=R9-vut@d{z0fgXmh7`K;p{PB4DTws=g6LyZUE@Qd-qg$$s#3s ze2wgLbf}YkK3)fOTM$4Cg2^&(8H>b!#*g|SE~FS3$h9snOeC%q;6u++ho^d5NHSj3fjhd=y-?h ztI~0`uj75jVXOA;x2$@#Y&34|2EN-49kbDF`6`TxcH?OyX!u@qrllHQXzsPWdMMvV zjqtz>LMwEdju&p$v?zDaZ#f>oRDSCE^}~j@f8PPPr;ds$+Xu?AY{%}lj5Y%EspU3o zv(bvmyA97a159$l0FY+ZpVgffud@|e^}{va3!P_SEfbC137&iPZ6^rVtiXwi`;KRZ z_=+k5q3yK;$2J-DnjRI~UL%y=@!+u0+SuFlH=XdvSBF@lu-6XT%Hh?<)+|&~%+N5t zebflT9XQPb5N`qV6nS|KWeBr)#_ zNiE6-q2G$KM^*!ZVFiJs!VNFV$83uVcYbDWY_4yE@Y%p|_o6)JwcYR=Qi}lj^7FNo zod=i`?A%-X#PW9Tv|4wq(AxP0qqei{G+R5ZM(f&cyW!f`yaRQ!xnB=WM=$gGUwGi*Uxcr5k)itB0kvMy0^Zj;1Wq)LIK89 z@naM%+lCJA`9jaQSVBiMBb$lk*@TihYR{@W@1QFvq9A=-8`CDW&fBNaVQ}gu1hr_I zp4D_rGb)*8)3@6$*UP5)q;0uzk9q^Z)fg9ValvfvD;PI5je>^-Ox3bh&PfZCxBFyJ zTg4l^mO`EFZfadAU!Bw{24BX@zrikwUlZ-6D)mRAKOy)mrv1+@llI4=anQ0@fuT>_ z0#$?3J+g^@dH*wRfD6N1T{?60rA}eKU*+n&96$W43SLzd#k2zZr3z* z9(}cpQE;Bs&G30Ek_s$jl$VxW45?yBAKwcHmS?+;`Vh@RUjm;yAD%*xSmbvt+hjYi zd`r|NxV@YJ=}cH&wc&|LuG)UnYIq`}>MY|`P@}wdPg)sUbpeG@Ni0FI;lUetbtf9> zTJKsW#+f;*IHnWq)154#AsQ3mP0{)QO+g6-X_MJ8ZApjvUU{Vrbyb+HDI;6Q8!)_B zg~OP^2#NSx)Ht@PVV3n5-u7Y2UNCv9mf1XwBDK6xj`w`uY6MQ8euPE(NW+S(-b9h^ zv!@rb>!Bqz#!X#C z@jl*ET}P#bNhuH0mWBCaya7uzG$(dXaB_bjP70LkynnadY(B3dhHxb~u-KDV{k`hG z6Q)M23OiPXw}#ABn{fO=XNrGTs;=)Jwp-ObtKmBKCXAmL#^S!!a9F|Np0k$P5Bks- zs!p3c12Ko$w}{}$2$QaOTiuOtQ-%vX0dL=dBm;kx`k;Y;JBP`@zM|>DxQ4CGF~MV) z{08r$0C6C75I4AD_Sz8Y5J5reuV08LnsQ=cBM=oDKAf1}g7wV>?H0Dn;)}WyK4_@| z>Z0l*YKd%t8Y8AzcOj3a*)t95Cm6hmH!%xSTIbv_+?NG(EjuVFJiSL?ti`TSEpfpp z{Q_^mWG(CRT}QS1@b>u$nBd<_R<^_vD8UlrBqxNBj}; zidm#^gw3`oheKw8(6?4nKL$Lt!i7-cIO$`79PJacsDj|u?H4?ewl0ri^0Ao3G-RNU z@W$?0o6=@DOu2OGreih@-#Rm;YvD||p$x&IB|-hT{18Lc%!grYn>) zlUc-30a0SYcaZL{c?5$IBsPIY}tU`Q1ElT!cDNsYmV;bd|Fc2d?n(Fj~MHi*ME7(sZOB8vCB* z8ad0gR8zbPf}+Cq#wY8aKG=@E42x`3yyaTWUE8|-r}zr+Qs*awQq|)_d|HZ;@12!W z5Q+og{lEH4e!lsrQ%N`3XgYS)Z-=#FlnI^(QHcep9kSF!*$};oSsGXZ<%A+6%(Zo3Ewb1sZimP5!JjhZHMu$;v;%}xf%zE1YW z&E*@NS>Ls*Ne`dS5)jAKk15B`s~?i429l@$cMRDAUNJ-1K)fPD{*E^j`xQh_^Fxq* z4VEw_f6IV{B{{|4$H>U-t32Qpyu;W=@Q@SZ8~28iZjHhG7mBVEhG&31WP99ef=3u^ zLJDiJN$z2ePm~LV^6@rgwlo@OD-3B9r!X*1{K7z6X~^8sA#LSuoA{Nn%}y^}-6_1k zEdM&0m6aQvxtkx}yuNaMg2O%I@{+^_7+PI%kfKj+Xa*f8TYMVNfM0+bJfK zZ*@x5ZU#J}ms&N_N|A<~YgCHO^R{X`Y6Rp?f~(lhEutZ|x0Tr5_AWF2f%!9sf^e{l z|4nQC|2h|sF*xxwJpe;Yt-;I5n!RF)+qO~sJ>FRGH2FRzcr59wJm4;heskI%mf*Gm zDKIBb4jeheM5i)h2>iqW#Mkj12JYYr@VdollI8Lq?pW@)oJ8d0PMs|+KN-B;Z0n!n z1M9?I`!LT5LzYm$74ej82u8??h-5lHPokIkIJ=6~=MnPolpLSa6AyssqrgE8M>>)` z`9q!N??OW3Kgr@fo8#`gUsO0ni&zH{$_p^$$yfA^|@NTQ0g3(DA`kcn@!I z1qJZ`;=EkbXq@>49>8FpgD)L9$0w2zLh?cFU@Tvwab|(@LT$ zSeP@$%(dY6Q9xj0^+1YlM`zYS3_BljHV963iPHQy4AR3X#!oE=!P~9PpH4r8Wvl zU}=mcm1Q<`O1F-nbW|^ literal 0 HcmV?d00001 diff --git a/python/tornado/test/__pycache__/options_test.cpython-35.pyc b/python/tornado/test/__pycache__/options_test.cpython-35.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d3551c6d5ca4eadeea7c073184ea7cf6d4031dac GIT binary patch literal 9783 zcmb_iOLH98b-q3C84TVa1dx;n!xXKdIWR?$elUVenGzj37A#AYNR1)48`F)!fb-(* z9*LZ?s^ZY)WRs{Wi|kUBMK;+daaH^ayvQo6bX69qR9uCP*Dh_G?>o1RPH{scuF!GOC+Z zjjZbCR3oP>Vq{b&uNry523$~$LJSvGqbP7zbxNvRR*kaiR#c;6M&?v!Ms=&IQ8mwb z)tOb@In|g`){_+(SWumL)m>1H1!djuZK|BNs2*07{%2)bvN{l!VuX)+mU<8kEUM0& z>MpCsG6!xa14|fZJz0o%ChDSe&7UN z*Xsp4wMF{AAGP@VSkT ze+Nx~ohbF0rG9FuLl9PI3#vYaD6tDW7Rt6V+_Cjjr4F-#i{1hcL`iwH@0D8OUg-Zj9j@|5cyH3xw zJM5crd`MI9_M@TG(X+sYpyhn8vmX}wB5wXaDSZb0Uwm}?=I#f;^>^RB{f^VyeRD8) z%L$y_@AsQdXZN1h9qbO;gZkdE-Er%^1AVRAYWllDU-ulhKmHFw?M5@(6j5wEe1U*{ za(Cyju3tiH}FsBalDgp{BD3VQv zGcmtP!Y{};-hp3aVMV$i-lZ`M%R543J%`=sc{I>f(6r}ty)e7q?~fKHXw%$p2`7i- z<8S`{Z~ph+ep#yuQ}qG|FUBPuWx^|L$amAM0t#0r2%aqYCA{-_pqNW9SrzhX^Hg3XLlaif4pC+J z-~UH639Fzif2EXdslQZeKUSPyK$|i`UbK_B zr`{*4Jr|J*S=hPL#9oshDn6;_Q<0*n01!2^a}Nf%09EODuF$k*8A=P0g}x=e-E*1` zw>SqUQ`YF(X;h4G{jJwK)Ik4@t$2815yCw5-s#i|Mr6pS#**W@v8qK==taJWEWsU2 z<%Ja^BKP#rt7TIXu?ftH-OI*T(t;81>oQP%zNT1|GfSD-?5eepU9c8%WoyA2Z9MN_ ziEvObDDJoM@u68`;TXNA0hJKp~ z!~Bqo9aqk>@PNCyMKV#LN%$;2Rgn9vwVbI~qcbOn&JhWvX=V~i{~iyC8cy+C(L2#V zdNpxj0kzS1HE1JfE-?b4gI12r0ErE6KvCF5_G3U~F!Q3(aClVIjQBwMNcCAR9+Xop zxY7t6y+FeJ73mt?RzXP}mepr4)#$?JOUf?G&>0m}Ig`kO%*@4$vN#rtWI>pZ!7}WD zsO`*7Sp3A(dwm#bnz%c99c`FnpfwhQa=jiQg%y9e7dhAwx?$3{_Z%H&(bQ&BmOG1x zO?a=x^j^fQ`fF@BzrMr0vg2afpWGonL2$-D+hq2O7-&pAiJBBdi!4VhIR|#>cD9SMoF^eGDV3MrQa%NRS z1`iQ-p!a~B1RCp5nbF$GlN!{CW0o;aj(G>b^N&f<&#FK);dA6L(XsPlc3_M|^n-q^rr-{ep(1cJF z)EquSsMi3TCM>b>H1;$diZa{?Eysk8l#9^XGZe^-?aH%cVBYqf?x2&{jZZO0BJ>hn zPpJ1X?1`h~iu07`X|-~a~yvG17YEivKffq#aMKQ(``Ugvi7w=spjg(l3=7s2nn z$sX!&n1$?xxpv^`XC2JAK4N`_K|blMtZHV8vm?&LK@y%^#F&!@!6~q3WCZJpJZ5){ z6mr|xRI=A+ZP3^n_;{?}{R<3?_c+bYL}FX2_tc=X4b4FYqWYo$pkT>46RS%Xt=2~0}}x>afyjv z+Cm(nNz}qlL!y`wphS@CB)rBWr}-2xQmTe?3S zG>Ut+07eEazCl*f>-W6JZG_9cpjH_hIth`&vJGw7*UX*^`zUONUfq$mm1Zk{GZFcPT<1m?oz)lo(l2PG20nj{Zhurt6Km(0)w!+-B5j+_-w<=Br;H&DZP6LfhAG_>CNAZ{#@P(Piux!8w0^_wDw7 zA(_+<03V%4qdy|FfF>;ZUbEkG{i8abYBMQ&D+i_knbgB3^bCS z_{^XEe+8<~jVY04k#OXlt9Z&Lzbqx1->P+fT1+5QO^A8oD?h;0Q;j9W+=Xy65zE9i zL2;KUlL_+#ABNtff{_ZGDv|BDiWS0A7q$2{BI|o&{*rCT{hcd&SDL91*J@7WD?-#U z?PxxpMLSbxi&sNEh?hS9jzLKnzYbbWsjM9vGC?iY6~wjt2*4kYT1KN=sT{tVqM2sm znIhM$A=j%{Ub}MR<~K}ia33flnz{k#i2Ix%o}|EkIPn%G)J!M=_4*IYI7zt|Pehz) z{Df?D2o-#UY*e0;WQ)yLWIxsCD{;jd=23E>v`a-H1R*>EG>NWYB@P{-_RTf>O)BI!S|f?q{Ly`4p*zQSnz!U@_pHMKIaCLC}b|G*yhP*L`qeT_@JPW!(xnNO>&GBl86#d2mjvto@do?yk4 z;R!z|%?UqnnXevuX@1xkO3PBgh><YH?pL5@MXBFR+n6h6*kB6vh}H?MhV7f5#DxTf=)=8z~P=!+_W0#`qU)!mNktI0i9klnjs6geX!&Kn5m5A7^CRC>umC z!l`tx%loj(&Zx{P8{!%NK<|Luq19^4OW|!S=8||fK{@%!qUFcuhwKMMK3-J28A_A2l|cQb4KP_1vPr}xlCq_x$m>E zfv;nXuiui{Xcv;RF}jn7@u|{RxJFvPi>39SvSBqo%sup;`1&57>1M);`Em%$WBRM? zWzdu^wBYit(@CxXvU|h*V`mLFr;7T-Nq&b(eX5qqm~;c#t5vf$QH+^I0y2xI(boTu zIJNU3uAOP%Y&)#jcDL^iQGm3=s%`Td2j-1_0c+|FHZQZ;VsnGdx7e`8sNZ1oCY!g} z++p))Y~Eq>AsZIs8wLMidvJGOGYQilvuUw8WJBAg`)nvd8cthD#V|!HF%YYKLH|Z# z_*c+Wt!klKuC7+A_!O$8Y7uR|n#2Emb?)M7^#we$f3{kYk+oGGcYhcRVXJLT(b5!M zO`D);ZS_ysh!ML4s4*XXQ(~)cZgq@DXVBBA#;=A!yQ3L{Hx}bAvUN-PdHk^4jKA(< zBxmNXe9=h0D->qheW6REbPuC$;|fXkI_>yj>2<@aH@J2GBAU$%sDQF6FNPK}SvdV! O>rC`Ni@sT0?*1QQh2c*C literal 0 HcmV?d00001 diff --git a/python/tornado/test/__pycache__/process_test.cpython-35.pyc b/python/tornado/test/__pycache__/process_test.cpython-35.pyc new file mode 100644 index 0000000000000000000000000000000000000000..e6a53fac57d2d1a26db65465251d69dcb137b1c4 GIT binary patch literal 8794 zcmc&(%X1v(aqrnT_5myw1o#5Q6)Bor%LE@v;)IfE$si@MN`hnpBB?Q)(O_o)%wlF| z_06nEY=J%qF3YM^#n=1?Ne({x;!Dn{oO79Da#EFVIi$)b|GK}~UF;HsY?rGHU>e`E z=llBIJxjG(`9FXEn@0~?ME^@ie)4F)jVJO9A_sp4`9y(1ra?i5%nSutGP4xq$jp(! zD>CHg$;``Yv$ULL_GvuG8;2fFf zC^%2%c?y=vT&CaxnHThmGWoAiaFNW56g0_f>d^}MuTpS{%u5tpCi5~ztK`(^=S1df z#fE&%!5`gnX|W|(C^2t z?X54<#l6gyWefc(tqm3yH*t2u^YJEc&(L5%I|M%e7x#==q5AV zzOPiMT85rjchwgz_X-a_`XCH@Xcc$duH}b2t!y&89jZO6r^2=yMQ)T7V>{ZjJSQoy z_qWnv>{wjK!F>0fXusPYzYQm^Z+TtE>UaB*>sWI1nfuRrZae-U+(~M(e#4F8RXcK% z62CS^4`(j*yI#!gq!#UYy$9QCVfT}+_Y70B58coEn0epsI=-ut$~(QD@3n2dewzt4 z{K=xRiYIy{jsWSkrf||j< zq}~p@+ulxJfm&_fj-sSwSzSADEh{OxT_)l;-JJ@MQn{lYRwiAy31MOzY zUE#<|Z$Fu1Mp)jq6>HKg;(bv8fQ&g~#!wg0dV&5t9o+$Np%Fq}7z>yW$T#aqmcAm| zMproU3ykE*0XqsZo9Ef}Rxzoh)aL89VS?l*1DL}jEQZB`t_|@5JNOMndu~uH*bfZa z&d?4dzyQYF+QdN_)7yBWWq_9*6i39y!Rtqkva;jGmfh>EyyY`_qq{5VWGb4YqwD8{R&I!!3Opmu^(mg;b+X@sH z$LmsjK(pS;t|dA5nHML8$lK}Keye0wJ{8E$ztgxzHt?Rj7E0ZXh2k!jKPgRfrw1xwN_4@W0Ymf z3PPvvvp!TU>+`2co#e@@SQjzFs|KhJjWKRO5m}bi5&+uY}(i>M}2VjRA8p$=klIf+QdHdsx}zq0$g- z%Mh|b5@sxIxGYPPk140V4{#k%DhYGNno%}tnVK;;b(DHTY$NiOu&0Sf$kzwgH2IiP zv6vB1;a5*GBI()Gl!Jak%mNckNIB5i^5LS%R7Q8VHKiLFgfVz}WvgCabnQ zfQN?hJR_P|rUOwsuuHHW-SoJ-Nx8T}du39;Bjb6NjKN#7t4ar1>eR?7GN0slWr}C{ zM9lh<;DSl&%#Aqs_*2Z_4_JPdr6*m3p6q{Q@XMEIuR!XaU;$zn2|>h&pPLn;Et&Q#i-RiWezfV!U7DWM#I+Ux^iU zDugKK7kFQu4q&WtPYH5BHG}&IX-56{v@F31aoaQc-dNH&amn+7Y+5*ZQBm%GmJYBw zrSmlAbJRI6On`Ph?!FFgEep5242~LKpw27goDza8a(s4(;V+3kW#T^uFMj;+3E+C` z^xELgwe{H6+O_iCp=!C&fmSsPv7L0C1B79Ae`hyFzwP!oBx$u-+xUn7XYsK6j%ER% z_rDZh0N1p%-pZ^E7I)*gcjv~9TerWzaudI|?p$%MM50M-Wf5QO%6};OSiLx2i>6U#x074-es8-{XYmzD1QhW*nwd_1!n*v~A5Ex2$bwnE+FNyhwcTkxaB zOY3;#Ha|gQ|1{qQZta%_a|R5@-}>Y34?uQ9$o>dQ0;wHbU;z#9GU4I#Km`b6bdNy# zkrrg=XBn*p!hy`1_ZTlmK$_onSsSkGeNU zt-l#u7zsqv>rPqdD-MFYXKl;tZijF{EGI(o)_gStyjf%8-JrQN>6@)^MwwuV&7a>P9`gP^o7YF^(r& z&&(S&4q#q6SzZv%bGV|u$ABH5kW$>B`Z0G$0XrP$-}8XD$tGIdkf^$vsn!~E;%D0! z8~w1}y^TlW$P>`{UmLPcuHz;t)NC7^NI%0=+(}!=7k<%{=viz3(DFoJcPR|1jpk z2`TBZTQk@DgUof0Rod~oP&`C`eNGEv2l|&3y%4p4u=j!xis9#&S+kX|F9}QsAr)6I zjI~x?$B^H_BK0u?woOTK{lR-1_aA4pC;O6*- zEM+UA?CDO_CdLkJxK%>*yLaz4BQ}dtCb@QX*Y*AIYO5Tvku8g&$xu0($AGfE$Zd{| zWcmLvNUbF?wdyRd5WPOnExx~6W>5n_CbSws0Go?)}d2D1udcQWPM zOiKRA;2G)rZ_t5Y=(JWR=B&tw(jh0J*J%T)txGTn<1mG=eWb|6p(2abI-XPD2YL#Gv&b|ETBjI47 z$l+PV6a6y)gaF)=x{9P3EDI>35YkkU3N}uh?8}n0%z}lGAs8slb1X+K0BtPQvY`tB zj6ljSX`KK;1$K((6XfV)hosgcamlZVqQSbfIhBiaZq=&kAYYtFc^u7;l`^TK7(R;f z$MjF;BjkOYh*+pT`dfB;?}*llt0*fs?yIiOlXPW^{X&w(xOTTAqm$G$BfK5SY5#+X z*Z`nR5lYJOFWiDWgXLqHk#2l}9c{)G08Frh8Ke2ZT+me*wBkIwfRVodZ&KnT!zX%_ zQX8BQJ`J@LeIS89KnOWhPE^D|XyxI>y53bD(^++R^fLx~3`T}AsjHo7Br;FkLjqTcD8Cv?Q z2$ER-IXGJQHU?@O6Aa|Ihz}MG045lC_@^iWD2>EN!XK!{uNuT00WZW9Ju=dw6*EK( zk`9Dm(2lT#wPJ93+Fh{~xpu3i%MP>C?D!0ap3)Svs%=awOp+>%BYl-Wwv#VKM`r;i!TtIk*F*K=Ck4wr!G8Q=GG+!5(KmT zHyi0!&=s!1rsYG9HKo=#Ih)p|O1y1$xXlYWO-`$F!$Evp(UrTo;Yd$^mXYF|l-wk$ zGS3d@Bl(hU9XSNo*UZs=W*|P-=vp7gN=hj*|jswWc_2;*{Oi#%nFTo%Vf0tk&`F9!5C78;kY;~q?%ysI< F9{|n`Xh8q~ literal 0 HcmV?d00001 diff --git a/python/tornado/test/__pycache__/queues_test.cpython-35.pyc b/python/tornado/test/__pycache__/queues_test.cpython-35.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6eec6c539fe170c3eb782ef7f852daacda533b4d GIT binary patch literal 15982 zcmd5@OOPAKdG5g*SYY2=YDJ0yDOnaQYfEp(3Mo<|C0eq)p)7{9AuLgV)sX8fDO(};p5~`h6os6nx%yUw;v#OI-)vS3=sdi3v@~WCwor0MW@0g2~OQ_M+-6sp^vIEUW6W>a3{higGSCbE=tFw=sNPxeIC^ zlUh|5n+4UJliay5_Xu)}s#%iUQkc7j+J#)g9EfhTX2j#O2SWWyInuCGjxZphl1i|)anffuWH@XD9)j{HgLUxUwYZw>ozXq_I%xQ zD@i{$?6w9{RP=7O`meZKJ?%dGyq|Y>-G;PjaNgl3iJNn{ycdzUiogI001Q9{SO7}^ zk0!n;xlbuKt(y2|__z-sWmGc@YUKR+w>aBZ>RwBx(*WY+!x2)r&g1f~B6pzFO>{2( z?B(Af-NV*e$+;baYORq>`9q7(1h~9FPQAhdg%HE;9n0sZUsMyp#a*H=eD$mz(-Wayp z%_qC>>SsFJ4e#2Zr@QrLZ~Pyd7O32~!s2qp-##Coh;O8drdPofT_m%oosoh6|T>uzvCxOouh0XAZ zYzDIpoSoz9>c>iLVY8%&#*fegHX|_Ss0+9PJflpulj$e`ihv@ssh2VA9Zuj*0*Rd( zADFv>%R7!_TXAn(Uk)b$94qz1gxbldw}l@NM8r?6+k3y>8bAysHlFgGTE)>1qq$Vk zCvo?A;dHy@4H~^p|E1^IJBP}v(UY^t*3(T_Z=7zErQS=p30qduf;fEyjeSBg7}rlj zN^ZM;fV5Y4hV^zOsZX$))AG|T$W||yubCL<^ClviecpQIOsFDL$2Xzi5QWH|s6W_5A$hy}m07F^ZEb z_4Y8xJ%*-!8bV^Q7lP$AV9fPXTf=r+Kf&&0w1-$R)$Da${UB}wFbQ_5DFE*o1MM;! z2$qV1r9i6w3bH+xsl>Xoo>+EPdX0Jt}vjW3w<8^s-sW`ng)ITkp8FnqR2Z zI=$ww&HG}lMu?jdO$ntpn9MQx3X^g?;S}{Ek_T{k86-ugc(j<4Jc8GLQn)*Z%lk_t zQRVL%G>iTR9AFy)d*JnLm;(|059QCAS!yd3v1V3s0E|MIlam~Pv>4{V#zU);g3T7R zDoDi|*huWP5|a;U^Sx zvTL71hLylHJOFbsbq0_n}WosOwtu}W>6%kNr(uFh{7_<1usV1 z+<=U`)#`57AVW7%Cah73qEm84C!%vV`XpL?YBlHC{#hjVu^MjAdsxgD&}-$8#jFbW zyS;97f|u=jAX2fOlhf;&Xfb<9k4-MgScXWA>;qfERXCe5ZcwOK3o`7h-17Qv>o_rM z9W{5i;r3~(f14{xA)%5OJ#p9dj9c23CDE8y1D8h%8D;@tfZG^-BA~@y6!fvoSgF`` zxB*0=JR;&%Y|tyHX1JL&8s6YDJC=n8pOkH5kX8c``ntCXX%`iAS}K-VTN1(T5g`#( zzW6PHS$G|ljQUJ0fY%EUZ=+Mw^qFjC$-*6(#U*6<32yF!;hUBY*s9P-$#AQ(z~~>b z)WZS@D6@yfUWW%j0flzT!8f9$kVoPkSWEUw&TRbc@2vjO#eW^$J}aszc>2*<(NTVu zJ{-I=KkK=TUbpF0auGI&cFDGSwRW%9_X{v{+O;+uKs{!QrjR~t&#eg~q8qM86AU6- zc|{~fia`B1k2#}9rg<^d(lUbxrzs@N*ul+AW*D2(5|Q{6$q86~2r6QG!qx#w9rYOk z0?__f#3y;{o8Jm3ECxzo*^p;RV=V}Y)0_Gn2G%bl8Pl2aJ4QzeSs|3>R@a>LSOiy6 zW9eI+BlH5Eh(?cSNWt}@#dT1ti_W}xBqZc9Z1_c|2$O&G&@?F{eOn?X(J+lmhe#D|s@k)eLAf}ahu;$O9`x&INkUW59vb}R?>!-c83%`fvPfP#^eaq}! z5+T%slZ2}EwM&c2ZSJ9FAO%RWk~8m&zIZ6P#%(R(h(2DAu(O{ciAqR#NP+7k`<|{3 zZUjl<`WROUGE=g~GE<;PFA4@OL@o3MWB_dGC~1Z2pm?kraU4blfNU~_ld!Chufv71|{x-oAWZkoqtxwhljYKyJiU# z%E}cIh%5`7w3A~e4MV^M!4T9dka3C7XM;eAZ7Dnp3Ibt6Rja>@WXgZjk0bZ{xI)M6 zmDzl4wB5S)WT!Q2kbkv6LCo^z?r^_aklqpFRkrv-L{xqWb<7;OR6p595G)t=s&g!T zjmdLNUSJ}iwekP~G}jxp2ca||)ucZ^W2Pq14%aIPKWPzu&}x1Mop>~*Rbm02;sS*1 zJgyZ${wV%K`EA@Ec`S2d=>JCyV8zB5`rvFryndtm0;+@-?yRe;jFE_SJCJAu16xUvkr28`wN4THA%{!dN=+6t~;8SNm0R8NPP|#9 z=|;WZce~BUTa{RQ)@W^^@tHH_=cXH*4hBQr4X0pR`_5J+KM5WY)(cn`BD3w@4TLWI zj0uy8^7c~AWp>9 z7trfr4#`AB1KRaZI2p=z|u`%T&i#f&`#_tnX zkA9y>ftV+piEJlX7sace7ghZl<9`g_9Ja*i_yDFo_VWJ%&CPmY5(E;pY*!OT>=2bB4ceaL~qpP0;L>`CbniQ ziXsZj!XVT^4Db6X2>+)feLAvTMV^>A0hcIofXj z1mfH`fgETU#7_(%;9g-X%%Ia`t;5I_4Y3Hh2qaPHex}poYhB}8MLbgd8>~`eQnrau z&X}K*K#5ZEuhGDxPr!Y}zfvLxI+mPVsJ3I2Jgh5ARYH!?6WD!*o4fBh##&6g1nQb7 zlbtw@zy!I#9*8X1BMyb&fa!K3v{SRx;a1dw7j)&!*TfBECubuuEgKTB#+F?NWd|>h z7sLIS)%B$?v<43j=7Y0?%|IIg@j^$OhR|Mo8x1D-@p=9<-mJU3ef`ouu}^lMQKJ_` zYQ*SlBPs%2#9|_bV%SAYr3*@4;gIK97-X;0(%TjIQ?t*pNk114^qzrG!69%R2U|#bP(Oftt|x!WL}T3yMIR#rQoN(v|*(w5BuQw(WI~a zSHE=eqyHi8DL?Vz-^7om7`B_c}gX#nxLEgHr7KEUU;){FSu`s&WhD2C@L(NTl7KS_qe+Y+g)z}} zF;0Vs(%n%WbZNPoM6VRx4P4$+7|4olT9x+5Oc+W6jQ4|A_n}7^W!%~1wdli-b>#X%2&y)vWI8I!{HKu6-bq1$k?*f1_s zbNuMctV1?b-<-$9oI{PyQ!U$n|9AZS*V(bBCjwJHg2Gs&l7_Q>mdSHWcw)=ESQGqQ zSpPV%ve zBM?!=Q5ob!<4`y%gB*PA0UI2Z!DIA*4InfJ|EB1dgj#R(THVXDPQaY!yvThb2LmFo z0B;!ga)6B($9@2m1x4_dQvTcykMu&*;oy}qn4dv&oLRCHQ! zqB*_Rz>R#k0?wXW&3g^W?pb-0D3ibPp{yhYW`HGMNz}rba}s#{4)B;jITp7n(gS|2 zd1si|3{=PMvz3f_RWMg?G=`lajv4fDVjM$7ltdPfILVpfIZ5!3u^kgaF7uAS_cQPJ z^etDPA*1LJszefdbipY&qc0!EuCanM9l!zRn|1vqCgL<`-ce!s`9@FUonfns^LRG_ zSrgMvdc#gAR2a?H5Ws$iIcIeIuvy2TMXQ1w z^L`IWY@>j~A+$uF2rvS%E$=kIA}|ZQkz!FKMoLeV8dq#)5jUYZ#Ll73yJ*|1?8`80 zTQHOrqVmN{ME8%~u{sNc{gm+`U4`S)rp$6lgrMeRmnCQoKTD|ZeVt^b091H6P(u7^ z1q59|^l61~FtNN)A;egq(x_jypQ2#n!KPN=pvMsTUjzhQNpa2lRix3m^dWS zRFh+$I5-LU^=+;ogQhfB00whDCO+rMHnB<5IulIr&BgSjjkpQ)F}5lY@Y%x#1Tb>( zy9Nk!ku8jl-~>{5G)5MO!!M-C4A&cbSvJpvDJP8MY7QfW3D3)-g6IY0iSoGdOn4rL z*o|(TL585kCXW11=+-=E%JbM}W_{kxG1+CZ$7IChJ50*)EEe+p_C$^b};qyFmZQCEe=j)?hET>tifs@50i* z#f@R9QOtpfX*N4fS_iusJ0Ax=0!wq!_(Wwe8t0vZ%6Pyl@Srl7KOY#7`$1gH^lPuordo;fgMUp4W&aI z2?Br1+&{wn-!}Ks%XiIvv`(~bw9dF~AoxW*iG3+nr9gRXONyYPHarB%dIRMUyrE0I z3>idok?9(YL|&uvOokh2Bq#ni5x$y%Wxo__XizPH=BP6f46LYaPA7#MB3Lu0fy;i9 zxI2gIXF%@Ko2{+h{}r_&fdVNCizWo(Y}AS?B9%o}g_VI%^NRp6dLpPby-cCj_&Jl6 z%?pEgl%P_gEwQOu;v0IFlBcXqD5ElT$7v{9}G;riin9s>I zaM@1+ca#nudB1xqu$5_P%V2T>noq_;x8^X+g`}S`Cp64!7O{ZJ=mbAm%D#(36Ft2b zIj4UTC4LSEcZG02CTonfkaSj^(V1D4HK6DRG_Qqi$M$cSV;6*8$HsrKAaKFh4i~Pk zD%$_T7044%2=A>**1nhECxoQf(G;Qx*m&lj7;|OZ8_PWx&gmy1!c=1AXI(_tml-~K zrkRF>9x@SgL`3=(K5~Ykoe3BzjWt>O{UhGXgoWhhRaQ8NCF1XL=8LGR@LQTp=0xUL zItz0XhT9XDW`O(vR`ACG6B1tp`={Z2BGR3+(b!Sy2OOHPD?0MbU*vS^uqJDpm52DF z9P?7HR@1DclT5g;bQ%eM_KQ>9G@i^UW%^mwZ17`5ukvg2s%wDi2Z3=hCV z@Cv?i>MQibcz;x!%;%XelNo>GlleUR^Yr`6GywPu|E-Yx*JS1gK}#qdyY(l&6o>9rDO6>$TBxw7e2(&}d!SiaTM1?%iTQ(c9|V*f(|?-wWGS#@3cA z-rJnH_}zy~tq1asZ#v|4%iE8f%H44!j*X3Pa#&vYip-o4SUML28+iG2D3Z7d=vd*E zn@Zi(veU*sUQ=Qox)k>{U2^bJ`tl2KPTLlqzOG)CpAAP-Ua#KQefee>-qyA*FMYA{ zhO40*#BjPEg=|mz4qvM8jVZ0hzHasN&W$biJt|8<)p0p+JR2TddM<0VZtIH~H8o^_ zOM;j)T`WJlyxg>>D3UMfZ&CIrP|-{(wdN#nxUlgk6yb30lgG4br|z-}oo6ORa7 o(u{~S^diNvugpW=Jd59X3!j#rE8UKgUoeVHM(~0i`zM*Qe~Cq>I{*Lx literal 0 HcmV?d00001 diff --git a/python/tornado/test/__pycache__/runtests.cpython-35.pyc b/python/tornado/test/__pycache__/runtests.cpython-35.pyc new file mode 100644 index 0000000000000000000000000000000000000000..cf8a52ef66b1d6830192d3010e11633d2e026c2a GIT binary patch literal 6226 zcmbtYTXz%J72Zc0U6JqDU?4;gAc%lMleSqaWl@YVDXC*vwh779S+mA7$MV>t8Rg6f zW3AAKIIFkS0^au1uW@GD z>+}DdC;Bg)_%zVJhbQ^BAoB4O6cV)s*@D_RvUAkVlbxrwMYctPA#xNJ$S!DT^owK{ z2mKP+r9r<;c6rdRkX;${t7KPoKTqK)YS+lFnR6`)C#XG1_M{mXD4e49G}+T;T%>S@ z+OuTOnsJH3IclFK`!u!Z$)2b70@({@txVw=YM&+hEQ!N1$wd;+qRV8-bF^P2l@s_d zEfosSQ+ttY0DKt<`ohQEPLW?De}eo;@~6n3CVz(fS@P$|KTZBT`3vNqA^$A-i{zgp z|2+8@$bXCci{!seex3YF!+_17KcRPv0ZYrIi-HBD2mHps(kOXm*O?FferOr+_@=`rM2-00AN!?Vo zWt6TpL^idQ97f*nHaDLvhXLj=Rprr!RL`ZtsQBpdY8-daE3Zo+;@Nyw^yN+v$*dH2 z_~ay;b$#FIxGIs3=Z4|7>+SQFZWN@cO!(NMl782dy~Q+Ek?Y6J-8AiV(;#g6vBxmU z@lp^csgiE{cxisH>kT{jfTqi|1hK<|-ojvqS?YpvJ%3?j-isrzs}v|VtCvPsyDD_} zo}Olo0XaJYA`{Q;NV77$q(<42(Kw8+A2)g*UQIOb&E9h($ObGMcM&7~u`6*13648& zz~^zh!We?=6Lx0g7FgL&%$A`rHN}xkSv+QGlo?^%8n>gbKI4`;D)wa3$1}?Fue-7< z`z!B^TrB|?aOu~1{PrkN3hi_t`Bc8>i~alvKzS%XZxNlOy!x+w2@>dH9>(@co$(5^5Ih2!VN?$0xarMuHN-5emWY&qO=u$rl}UcbQLSMii-cb1a*h5Ki-sb|1;z=5S;0osGCH>soB}1F2*^*$+A$>DRZV z=XMkMas6u2%iXMJqS`o>6_Rd8Dm4d8Sso|ar9OC;PbBVX0z_6gP(doQGMn;&ayyx| z6NF)A?ZrU^AjCuIBS>0_40qH#9|j5&Xt7=6^BhBNg7{Q%94~a?a2!KC)2+^-xrayF z28pwxA})w2kr(I0wCF9qk$#{oJ5B_HaGb2_IPKW)!Y?^l&2hf&x}jOo#~-u0A=DK{ zsxdR+zh%tK>zdGv@X#H8c>W%qq>1LFRzshm$xvx%64VWe9rOwM!hPg*MM%DsRafKI za?DnwdLQaeQcrhfJ?gf%rK-m}^=C`#Yme4GsmH3m8?<&Y2_Ifp(u0hvV#RF~vJ%d2!8vBdhe3!tG4}%G4&d0t zfvY0+n;u^`I|ZePl^Hbh>N56cmaAGxR@?_>$;VdQ&&_zX|Nx1`*H=*=C|-W;GA z+BUP5ElBJ6BXmH+o;Gh7_I`mNBG6&MtU=O8IS>?GIss#8m=yy9Zfh9)vz_MGUNO0w zjUwok&t(|JiZk@Au(H1XcwOD%eyPuQbrYjT&JYNlWwyUYT2_RvgIVisa_bo`ii~O%z3~pm=Vm zj?7sO5zM#bTW}C$L%=9C;2gm%L=-SU`ztfhE-vLMp#N^Ic7YHva5YCvx-_uFhZ1S*Yth9z&RU@>&CZa1??}gV$g!MAy9b8&RVfL z%!)eZKDL;AZ3|m^ll}co z*W-NEt{x|!h^dDi6iNNWiIJPq$Q#6K{%5P&V9U9H#&}QI42Xt-7%zTA0&1|)u+~uA znIsH&a8`Wa1fA!2 z4>umijUZ_4_LLCIN=S%J<8pKGL~0#|`=x*@n)-5jO7d zWeb!%;hQQ?`|OgYpujWVv)^JjQRt^2BAXm)vR3Tg;&5E|K-Yd9dS2f_4&vJtt@->~ zrBPO=a9g#4#vWF@TmtUIs@-(E5CiistD%J8T`Qcus0X~K%*{PCP>%Px&w#J7E1Mo# zv}3I92LSc6(IT?ZX;H(sik!4}aa40&-{@iE@WcFOktUqr8o3^6xxyG0k=wwvf}zJ5 znq?A|3RD`2MT--(T{rSW6mwYxuYOF{rY{evpF%cgGBh|}ol$62Uk0v!Fj749ipBkk z?~#@J86JEZR|lrp>T$Z+f_J#;*9Yr$pyaW3>500H&ETDlKlKOhpI2La-lR!*2D`F} zNBYIsGm}SVvkznk`IO5qt_E|9Ph{jH$sDs(yMKsV^#L^-)aQ()41HhaEG{cAuRq#+ zw7j&cI0-|2k}6d~PYYL7xL4!mIvRUw5H6Th&Uo|5A&O?aR^wN8NP3|-EE&5r0I*oD zIvvy*A~YUIEcqfxQJFKr>ZyS@HLKAj<~r($FL_xrpN)z+m2g8=&6>#1<>!ztmV|CC z^zwLe{4D0?#RQ(JSb&D{|GX&XcxED3t5kCpWHxg-P6elmRjlU4>B2>=jf(Mb_Rg1f zoW>Qdes;PkLK5;!7Pn%-EB2;}y|VUU+JhM0uJRnaSS8WevjcV=M^zo_Mi)$g(oDC9 z@z~glHnU@{_g_(IsT|MtUqfm68G~kl*)s#=EQ%(-iP3LDjGL|S>ke(sSw+8LjpHy; zQ+#uN17S~&zC6%ERX2E<^+5B<^*y67g(GG7ALVd77KtU1QN#rAVGl(61zM!0zDqg^a2CSv8?5Xo^O=VaoS zeBWPPJ>9cCyI7KTjsd2&y1KfK|E|CO>KPv#?Ej^=f9}Q4EGhLnD)Q+<{xrVcU`DAD z{yVCoRMk;AM^#fQmr~WV%B58`qjDMLutrK%x>T-9YNu7DTUC2ht|z?Lt8%?^5B2&~ zuFvNCRj%LW2UKpr=Cdl7wfRAn8?^Z$l^e48VU-(}d`4A9RCS%otuy^~smiFTj;Y+3 zx$ah#^{To-9#t7v)r~5*QB^mo+$K}nt16pSb&JYvG1q;nvQ<^LsoXYm-LEPS zsOol=+pelRRBneU9Z;1CRo$s_JI!@gRd%WBZk5}us(Vy!kE-rfxxK2oPv!QR8iT6x zpsMayx&7vPNL40P^?=G9P}PGfcTiOisoWt|J*;wvO^snynNrmwDtAOxr&VrRRgbFN zQB^&ra>rElxXK+@)rVB>Aya!qRUTH=M^x?+ReecIs(kD>5L6yd(bljFciPDX#v`I=g z+0s)e-Ks(N< z$~`3QJd4ufs`QYQJ|v9*1G6Z7Sd|`;(noCRWt2XuN{>nDW481;ls>LXC#3X*EqxxP z?^C6dQhL&sUP0*-s`R9kK50wmP%C+mr_BZPJrCPaIFS)o%Uw3QxGknH-y;gjBe*U?cN*T{gru+e3J?$1suJ-(O zS6pwgUh~|!MnN|&m1}&q`{LzG_4*=mJ!mCgsb9xqy{K8Qyyj{@d#zk6<-L0GmfP@9 zX~P^wUUA!d%J|ut8%woYPZw&X3LYDosV~(UrTXofEm`-pTdCAvs_RP077br#>^pyF5iMWBbh`blcdNX3 zxi+s00JgchxG-D4_}puc_=DPASRz#O1|fiIgFTySXWfOmc8@>o_qli6q8U<`KXk=? zbqRIsL_CLf*>wiAQo=udab_X`ROHH@yEP|b_>-U48gGwN!jOSJs{BDEOzoPUZRDZiTbK>d?c)+{*+|09u z+Eu{sT%l38dZ}J4RIbju)y1oe<;BC-mdcgV;o1#-ta`oZU2W8Ltx&4B{sIM8J)y^Z z8{JGTzU~iScNk&6uN3LUZkVg4wR$vpI=)ld86iOMA1w}6G zhc4i%+aH?O~RKPZyX753#8|fNl8YiZEn(;1-`mK*+Amp?ZPcL z4{TJf6^gee)A|6O)5n=S#Dr)x31^w}-UM|blL?D3X>bQfSkhZ5rO!%@ZVfP|Oq=JIJzC`9G}>SQ zDcm5G42`7*oiS&V(;RIB04mK+cIzXksHd46W%4kST})0gX|*glAs(_Y2}od@b+Q@F zk|Mt0hfq9?ug8|6e1KE`GVGsqoYzhhVq(40sD zVH&Lp2-hPMf)H#<$O*90%Yo%Q3Q(NE1IZBaGlbnK+&6^fG)E%cPw^(CayuZ-qkbX} zad@1(1q82)_=cag5Y^D-MJjx6MOf@a)9tX}HMkMND?&wE&lHQU=N%@#b-i-<46LQw zhc82Jm219pK%YiKlPLieL|sRC_0-Lv&Y?;`MI%mgJU-N^pi%;^Q% z>vs&gk=c4c*DC_=H2P=(PfQb_Isl&)|CT9Rx&gwjs|DNcS-;EGx~`Wos`GHJ>jtgT zd%uj1Kt^QH7K9D&(gI(C<=c^8pTR?cSuyI2ID<}eef#Xz%C8sEB)^tSnSaNy9-DLB z%Z(eZey$!`tfB5DGSm19W?w)Cy4S!Lat`E4^AM!kt!{NI?;#qAAltCQIBI;aF(9E) z@E>3gq+}PL3+cOO_TCgU%Dp2IiDrYqXIVcbNXx;*H|vZ%*Gm!V7SkoO z2)JB9^$>}>fX3#g1h7r9Y68=|$Pp&#>{;A+o=pc#!|Qj9$Wc3@r(C=82=NHGw(v+j zY}f{3M(2AR z$;r5z04cBEF-V7>213slZ`4;5LtD{>foDkhfam6rx>K`$h7G|= zK8vaXDZhuh2`iixBuBwcLpjZ{wvntAECk7S+V-I0c^v~w8U?3WVHc99xzy1%91q1< zve`rg@H@j+$v`2^(MA&P2QLPNX))eaAW&|K`Az^rO58Lydu>qa_OvU;Zmx8X9Q8J# z2a}zeo;Wr=J#qQ{gZ+d3BEVs8N~H-(2DlVTlf3~J;KLC2N(C^wJ*VBpf_9U5KZFRK1_mOA zqQ%;frBulN>DlrR0*Zbg93B zGtn=36k<0bD|u{PUc4I%o-K1{9|85=K9KaS8y0>$1 z!ds6vwlzXOb`EZM>$=AFh713@by%~72`RD8YL@I&p_e+hONG8^Y5JK4?Q<| z(%;s4c%`RgDDA(Oj~GdH<$w%L0m z@bk1BRn6`rMLMRNV@HbCC4FT72d57|{>s5gKSPgQt}ndP>^gD-4iP*Qs1-aUj)Qzf zJO{iFwT_O06;yNJNUaXDQYtlvBep9|#dc%1*?;5)it{dHadR~Cq;0 zXmQt<;Bw6e{NL0L(^*2w6nyj&{vr&GNDM(>yacY0`4fl zb+)+pJ*eXCN1{^0P8wb@xUZa1NttW^H|jKZb|inQQnneg0Y&uyz8tjo77`<_8^+=l z1s2WU0-@9H5G@zI5$28d1Wn!Qld4$sfD7)5v-Pb>v0xG(7KIX5bd3eAGgO=A^(=ygsBzC3J8#vpKMI^c`QiixCk>@r=#ckAUe-_VpBS=(g)EUD! z3-e$l@H{=8q}$F-I-HJ``Q@QNRpsyn05x#HOJ> zdgMMjhWhAM9|h2rSn)L^&g*(9> zhj)+ca4&xH>Yg2 zb=+FSCn8kZd$LlfUMm%z{46`bS2fS9LXRy|JYOkS%Z;g%7489fPaNgfB8-b^;$C5MiQ zNtbLKdFd_Q(HC$>CV4N-I)fsC{J|o2C$T4Ow#obfvzK?CJ6ifx)bqPEl%nf*Ex3*1 z4I|aLt%=Q)a>I0a9Mxp1Fl_7CA6{tqX?VWvzR^M6=90i5APNZ~oDCwI+G*i0;Q?@#2(uxE!KLkY<-x++p??3|hQauQg zDQJuUJZ0Ce#*PwC7;wOvVnK=X%Mrx_HyBpBsF$*f+bG9&&k9s^6J0g45wqmDKS)lt z8s5`f98C5YHGzJV{%$58VM4Cd--CqSum*OUZ^vMbDMVKV0_5!iWZOE8mS7QVIPVwm zNU)sU3V8QGr!*g11*RPvivhO>Eemizg$&^(dn=T70HRo&kd0;XuHB9XSRsv2p+{}T zJ<2>jCtK8nwiv6yQViBDD~r^`N`!S`?NXPBEQjRfZ9IX^Zi70s4#c1;#$j3#Jv}&U zjm2E=lIP}WUtQ22#F(4Ibj~j}4xg{Vg9s(8%Pd8ZgV=pxv3L**UTNXg+pfO13w8Vq z_J|=ewcoQ)uDJDUHz)fI7C(%;(s*vj@Y`&?VOVaScZ8AIi{JYc$AO3a?rTd63ka^r zfL%)#-Ndx-EVLU8eS&OG;UA)cM+L9CMzQg|(HTo`27a5nSHUfx4bezpOQj-c{4QkL z&1dAKJf`V_jTIf8>}z4c&xl)iKU$~a_+t$et*naS;b}CB5g0Q~_{SQjK!UKAXm%r$ zY=j3HMXVjjtLuO$*hL`PJ^OY)LoyDZK^PFE_l0`hE`#ip|1^Q~XOO^>rQa(G!b%w- zGsfPpR4jPNJMDzIPQYWFqY!mqw6CI;aT2Ft%sS13t3W3DYzUY%8tKJX2=q@ObLWp! z0GxZ}u7wEz6%jX}CjbYo@Rm@3ETCL~z;B5gTxV23w_@Bz$P@Pr-HJtOZvgZVNai^s zTwd)_6#sLR1JPZO_O+tlU4tL5#480$h) zp-A~zT!4-tLm;oOrl>qWjAfV6!Wr6gSkeQqtT5QBKq&+*0V&Z(bW#dI_!vzAyg^eK zgPBEwl{Zk@tupCbO4UE4-bgECYAKa+%#9Z28=w!=1qRWhcsxX0^>MvH{s9;Sr$e*X zKm93wK7Z=#-~Q6G*G67O=D2*mc*;-J8dx9k^VL&dH}!tg-u|6aSmFoE^ivE=d13)} zuj>!nD6vMl>eiPU<(I(p&2@BePdxni<0mHOO*x3*$I-C<2_(q>Mdp8!`2mC;c=`sf z#a<9i6Hb7XpfQf=T=b zv@4-UD%B0cV%R~Nf-H1)IsK%gWsz9(*4;Ig)LckJTtScFD{Q|GQu1{yL_tbcDq>MD z4DDRCdzmmIfINtT&4D!_{4KSNi`l&rYsUcTD&cbLTa{@k>!M3ZnA&2nLcsiDB_t4)pF%^F*{|I(Hz_Boj7vj zgSh$1iM=J`^2+ri#1h>0y8zTO$$`>i3?xkiR&4 ze*O`E0KOR(idSubA$fUd6l}qu)sl@O^?R=u%CaHvXAoB9<%Z6mxtKq5?%b6NXP&)y z=>s_lxLVZpMjehV-!x;ECWf9v(^t1BdmaNqR2A1&aBg7;KDKocU+cKW_XV0*riqM+ z#EH_C>e17$QfZh*BhGs0>G5<1TbAR{)>*h%#%T_(W-_s@kTk)ytu5IQbdq#wyb~UIX`Oz93e* zTiQr`ihB%_W{h9>9r?F;y0gBzzouj8&0x$Td1y9<=rs$Qs6`wg8CVM}3$wlvj6~Nr z#&8xh*k~7E$#|SacVn0N{F!t5$Iw}G2+KD{v>i5%X?+fbV1O1pMc~r_v+MZ_)GlV* z&csC|O7E6=W}}sN5tuZs6pdG`AMVRQvri|(tBTg%19*jCK#OdNSBYu;AKH zVAHm?zy`Qi(~WIS#30V5ghJ^I3L)Mu8e#${1S(V{{kB|BV}A(#S`jdBt+&qeP{st# z4K{BcmW4YX6j6h$FeP_Bk{%oOwEz~N)g2dRV0{DgNF07>17RxD7 z98Vk-`I1cUdNbPBgGg4AB*P*|S`9NYDKKv#D!Mnq!-RO1$&*mj)bs*K?lK9jDo|e z09b@AnR4DbXgER)Njg7Z>=79)zZJJyiMR-(V95eI4_2i?9yVC9ivfz>~WifT|^=+B@X ziSew}bJ=JgvamrUR>3PYu*~^I(i90){}i9-WoQp01_Q$}mo1uc$RRBKw^)$`nG;=G zz?qgjs-uq)8@`wiL{vMqnl*I9!768r+|tt|TF#m$PzFJ;TcA*3o20fQza5^TQJxrB zZB4zr-6GnRPa0_!-x++puOY$sNs6FWA=to`{Y9}q#q#7`kTVDw6plQ!CjL{>M_kzw zrDbK65&J=12qx;?NZmmCbc-~@?Uryd?iK96rMT**G-}=JW{3fcu++v#TdVf0x+x1B zFpw<3F*4)=MiOcqh`Q5T5dhpgxJUJ8y;%Nm0JwaxoDCQ5##l{QVGphBdXKC>gsT2B zdJ|%7Ur+}=mxH`7BCbuk8JD3Pys?A3N>X9S8}~lojZgJ6#iCgpFAPv!&4CzeE~Jpy zs^mPHCnBS(5>SPZ3SBN!a@WW?h`!pGT#F{6#)^0kyJnN(89c5N+amLt>jJUiiIzWd zbn1>rSC9l;*cHJ%3c#N@wKoqxKn_RhV7eOjxVMs4A8*sj!NQ?Njm;h;5@`ne=f+a`Nl*i=2WPHxIIcMr+l^WoZ z<8WtuB|IIYcD5qOI)zL~?M8c3Tg&W&ttSzQ6fi6ecbmlSH9+W|g>@9h=*U7s|2eel z4>A&qP9FpXV7h|C;Qktof%cDt76qJ%VEZ!ReE$?UAu~+i zFH3@dY#9ljScL>vXe4|WD;j0}Gufecp*qe+Botkn>_X%ImV zzQW=!BLm?Br#7S>Q~**cv$SH%1|qMi&o0$fg+-22@2LEM<3v`?O}&Bm55U z)o1%Bf{%=FqU0{fnV(Qn?l;FFPlWYYy}-bd3AwZX+Jj{N2PYFMV54%1if6xhg~3#y zdG=3E#Z*tileTG!=Sb7ZvOR&Hdh`we4*Q*z`Zc)K{B9GpWR}2f3}%c!zmHCFZ;^}O zBx%1>h@*xxP}pXU=@QFqKTNWqagQL%u0BM<3{pP5Zxx1Hsl^ycgd7Ldt(_!{F2GtK zPzP8nB$J0R`Gq{NG4cNmc_eDD1j%6tNB>K}SpO>~vc^rM82wwkpF}JBSD=*`&;KMz zMw}!8$@CkgcJ>_+O|0pVc4!MzIEpxV44LH?Luf19b;M8*i_4e`veIn%2@`EA@gs$% zv_EE6-}$4{)8hXx*Ou@yigd&WJC9MUWLX&>cl7zc8C7K6(EY3WxT8;put-N?aSj(| z)%_-%KW24}5?MVNh)|rG1vKDdme;5TzJbV%@k!VLhAE2=?l;H=2gt)LK5z_Em?|~f zT<_heFI7qtwR&UXnma)g90#HtO<9jW2`0EmxX%*?#HMnZ&#yut(b?zSg08#{2ex^) zOe=shCdCrN*<1FrF#*4d&gbPYO$?vh%CK<@Vk#yvg)vrE7})1HKRN_(77?-1g5}lB zl@*!`agsrcp0Tgq^h|?AJlm?36+r1mQ8%3Q$=>`!x zJiZ|YH%JN1oI^#{HSS;WBN{hg=x|>jo)b8Qn58Fzz1`s;bjGgAa--&S8zS1mm0B|! z-Rli!Ny6Yvh4S{AS%TAF*=k)$n5Bdj!DWIGeln$A?PeHpvrE?hpgTrZA{I9_HSIe$ z+Lpa<_n%|{5k1LI{)(l;KGl476*_DUe*tG8ux)LlRlF7%wpy00Z^ToI%zYfg(e%&z z>1tt7(;_nKGDhA7j%ktV@L=))iI*F$EmiqKCfz{5WX<*3=HLelHhI+#A=*BqU*N>( za#uawjDLWb>F1VO3ejMzF5Nn|Iy*q6`8<;^F!{SozR2X4nfwZqUu7bTS;~EeXKRM3)Li{#-udTDeuK$xGVz&k zZ(VLd7iCy?NP$j0noc5AjbJDlyy_GJ4;M@Bbh z2eRGB4`=(c>$4-sWj1t62}(x?M~9H_m3E3~BK%NB^Rw&~I{*^{_3=NYppebWL%@Q7 zAs+D zC~6+QG>&yjS`J4{)}{xJ%YBK_!HCf-Wt9K}?EDEHunNzyq*_N<0yXP*-@46MjtDAc zP#pYkv<8I@_6=SpoH-XT)ZX zIR-hf!ZY@KBxE_?+!!8_95m#%E_w=ZZbsgUW^ou|@7>Uw{w4sSe~(EEyg!4>kk$-< z1>zx0DL;8OUTYPcp2Dm0+=T)H zKRkaBfhy&aoLInO>H6Wj4fr zdy8!o^UV%vq`j}V(6k}8FWQbKSxrP9^!w-LE{PxRPNTU6H>O(GVpQTVq6ZIe3>*|7 z0*>}20ORFg%H zkgpv(zTCI^`>56#SlAP4xOW=g3`ruLg-(ucBX@MsJSgBA=lwvFUBLR4nyyoJ}9 zl2$C8b0-;FU$S#7dvlDUkL(hgdlIV5P*h7naI9>@bSKVp_wtPwYQ@6R^&5=|H+(I} z-$(BkER_i;p{Btr3^VZ%F}#^D;aH6=M5fXI1&Kd6bNu61U$}VX{5e0^c)3z2 zdoFfd(Wi7NK19o9N$?F!p%!uHajdrF-QYXr77FFcw+ZfkbjbID^urk16*GZ4g4~9> z#9Iq7kVb<8_zEFHw6`z-(7pATY~F(mxz!+{}D@MD*uhSKVZTLSDj)K z>cg1zL>u6@NElq7(0a;tWiw+A4C)@Fy<#I2aVz|g)%fvFBU#y&3Jhy;e9*8KJVWfg ziF}u_bb9;;X5)y4zSgX#(L}7__FxGdGs^3h=ms zxEIRTmo)sZcqvR5%jhyWfGQSF0=UC`kWUX0rx|3#2->~k=-Uh-44}0_cn}xh6d2oN z1aSd4m;h7)uE9AB2tyyONyw2AU6P}>HsGn||lb(|*N0s!C4IlP3o<5ap(ZL~EN zgca;tc`CNWknd>=ahW4BcNWc=*+5c~J2@3&9s%pJngG}1##>vgKoH@}jRRu_1Y90_ z&JaxSbSn92vqx*67Jy<1v?dt|?W|=WW(UJI6`cTDgML_fAhIa10V?g&K-AaDPa7p4 z^ess53+!M0PXJ^&IC_sN?p~G&RrozRc;VF`VaQo&A!OIs#I$maVJyP$RaOnG*FgU_?falaG@&cA+Lu{d6NbX$#Wr&KAgq$d!~er&gmJyul#SG@`DOmUyH{J-{R! zo8>@ZzBFzPa#WleB^6eBEQ?7e66I-pmq{lB{x`w+5Jw`Bm^TDwB@%HQd9@8rQ1}}u zlHd%Sfwd_9=3pgVWJmn)Z)gHHaY5Tws1QAZVkwG@H!!8V=jEHJR0~VASHI^Sc<$cU0$5fcF6Nb)~@@Og&A~GiU$kmodjBuQ7L) zl?RLyxRl51xh$E{Vc0^YEW%p#FpMdH&RdU4jCk$?h4q8dGRW~wAvR(a)A49Q&XG;J`lA5fcdw zB)n9e&^JuHiMbXcFx_0Zg8A8ZPo zxnNMpvv`{oR*4}`qkH-AFU1s6!_~NQ`m@W_JzUxZx1ij>(P7nPVWE(q;OCH zVBLxYQuLb`px(@aZ!s5oWc@)Z7FrbY0~_(Jj3oCm0*@^vj6qHI;lOb7fex@rSXpJz z`wYHzCj$@zY`7f33**EV<12csPX=;hCx^S6$rzFqxD{=L+`6OFfL7F^&!Pij2LEML z%wu56oEI_Ll4amU6EYwUQshOg%Eqd3P=$62-l)DKID65Czwkt_mqo8JzjANnc*Qni ztaZlXqlAMj73K+y6%GoTSN61%(d`!U`VhA(VX22Z!AiKey@gP+qNSEKH-n1=$iaML zDIgcZOJFCZ;mT5QZ&VZDwKITM8%>{hY7!hm?{sXc>`Uc-T zl}O~v4g$;_kU(6_WoBbjVn?Uh$u=a>DTV!ozZcOwnmlMDZd#!QXA7mEq7KQH^8k$b z>ng@UBqG=Tct&s%IW(Zz1*?McpI4+D*e2+vt!d61CZ|^XkBnclX9Zr)(X*UB)5KWO+Bhe}s zuT&_&ad0Ns4AlP=&&JOr#dZK`TCF6I?Rz%_^Y$3g4{<^q9aq=QgB_+Mf;gZV`(y|A zj0liAAwsmo)Kx?9>=kqXRIgSeDF+r}6j-tO%p@&HHiBtb4X_=V^J}`W$|M zh$1GW7qVlBozCLrDIm)9hBOd5vTI5Aig{tBoM?kNWnKz(HDHIANhOOQk-*>Olj5lNJdPW?yNHNO1Wng=`5(S3;Y`+@l- z#IU%32r$=sG4k>^K#OaE`cJGG>e2D98S2r8e>?(pzLEek$|F7EZ-p=d-AoXDJCWF> zH?gBxIf7IX2^1NhW`lvb(>30OgW8)rQc;ZByYTP~zSf+FpbzL<+@^S`A2(q2K2*sK zSMm15AjHvwASjdbG1PD^(t*UM@bG8sjG=IMCJ#;4Fy=)x;Slju%SfHZ1qUa>AIwK5 z83ewVCc>34U~mmd6SqhM;0l@JN27wbqTIhJZnhi+LDo_G*Ek)DU@oU&T<%#uB@z0u zhhnp$b{4=tflR1mAz$nZAgs#|vkdktICtYuzKNtOtFq2aB0i9Rz?#Sn~TlLK%#H*8DkuKkWkPB+5Qdlo2W5_o@3I z-CCt;pXf5q>vOh4D6+#Hsbd=kw1PndUTUrmOOfR71uXSG40Lr^{vA8D`+{XSx;4Wx zeDVvnL%>pA_jw=ci7=PS?bJX<|J!UZP*0ih4Aj%c4m1%XPYT$M%8Bkb6Te91WM1ui zjfaFo)uaz$Ol45u{VJ(1+Ia)eJCX+2NkxG#%s^w&dElRwR1{eZR;1XJBFAN|A{s;X zYzm@ZWfE)GS?2vaM*R>fzsRXY6xOooMbs7Rhj_qq>b=!fk0-x}XktOvtJtroxhE## z+kz8~dVQi&m*0&LweoIMngNV7Bj*7BaSJe_&yoSts=uNtJ4z-v#w5Y;)(3Ip=h

    MdXh^Tz#qH=20)d*fnf2?w3y;sC zWdmBSqeA%;5+6YGPIT0L%GF;dE(i{=`}A7Iw?@e-V)4tivzAO1qKcXNfn$dZ5z{Z-o+?B8B&g0t*B7a7~+2j<<nJjjF`U%Bl$ldJua*?HcRy_ z-%h96F}_G+CSv>UL?&wcMtnThU%fo?UZGP5f#v4#=4| zxZbDN=NrVT;bz1IKGSK?1poa{agCzM-_)twuXA+7HssYKpf|qXopa?c{1y2I@(J8C z4vIi(u~w4Q`nRolj(0crbm}lV&lsZ6tPx-g%c1Wu2sm}b@vwr=;rHwmAm0zI+HEK4 z^WGXiA6VV^ffIQVKYQ#>5Y%{^1CnKgH-T{fHY+KPpm$>;nWMgfZA52t6m2hp{WbwB zvM)7)H}^K5SP{4)23Ke;A+n$)7axriRm(5%5N`Y%I}XUjemKHeHu<98pT`^K8cT@Y z&1=+9nyjwLX`1dXeG`fNNF`6ERLa-PTgZNix1iCKew4{iG5LE;{yGzh_x=aS<%SR! zTq~5YVeb_SI88{Z%=H9?=5lz{{V1ijNmm)mCV6C+ctvE_Lk`L(h!RF(36l`7kzF@hPj0|#5k~l3Bxjs{`R_nESj9~^RF7;m4ImU$Uw6*`rn%`Z&b`w8BA?I1;WTQE@ zmnQi!=1ihiO#Fpx%h3TP2E8a_p}dIf3(3^)J&A)ROBI*z(^Osw$*xouua^VGM*pSw zgF9S**p$DKyvvz#_NO=Zjt`D^<6jrPz4+II-0*l8+UOqtUuu4H|K{oOiSY;U-8#M= Jf47d0{co12mRJA) literal 0 HcmV?d00001 diff --git a/python/tornado/test/__pycache__/stack_context_test.cpython-35.pyc b/python/tornado/test/__pycache__/stack_context_test.cpython-35.pyc new file mode 100644 index 0000000000000000000000000000000000000000..27a27f7534e98942cfe6c0edeeaf46a34dc05b21 GIT binary patch literal 12543 zcmd5?%X1q?dhfyDMFM<*Bt)8&3_sRuIBRZDXH zr8>5%*{YLPwY0L>Bc<9I)yb+_*8H|rJEuB%Rm+>-Y1N)lomo|zRh@#W6;!9FYDLwV zQ?)tOnOC)W)mc!r1=U$pwM8=~quNWVQ&P2(>MX0;vg)j;+KTF|s@kgQkyY)o>YP%w zQ|5P0wbxWdnol)1F8Rg8X&%w;B zayQgNu=qS$3(6@)W1*4>uZ>UD$pzHW8>`re@1^yT-xmVZ$9{D$v# z+^%1>0(;->;w#ta_v`K6el-^qZ~Beq;gw$3cklZ_`krp|gTnTp-Hw`0Cp)~`ZT7le z%kweaRjqqESiO3`>Gru?vR_cvgKj-qzIj;3Fj*jL$N?Wqm%Rtw<~z4;{ol{B!2&{l44u-|y`QdC*<=(GSGu2i=x0bvr0FILU$Tb$bIZC|vIM+pT89Fn`1S zWPlOxG`ddP)!R+VFZ$SF#rp^qSJ5nCgD}f4l-ffb|0(rHXt9->R*tQlGR}y~Nc}Kb%yY9^2ufsToRDTZT$|sdgO~RT3LpQL`V`Bg)SZ<26!jGA z)~7aG>!5YBm%d-v?S?tI|6+S~cXm7h1dI}qX94ZpGTey`bR@7!`b{hfZR|N8Er z)plO*9_VwO{ie6$_jI@6^hV!ca|ebt(zIx6tN%c+W6>pCp73q0Tf>c`LfMK~XaX1LOVJ=JK3`! zTSy_{xSKP?^tM-WK0G=Cy!ZGB2537->h+OMfa?dwu1^h(5^4q9;*HOSr z;UI@urGHG)QAf)r*w7W^(hUNVp zStS~|&WPt>Y0!;Z_ZqGCK)cmcV0*p}GWT@LcOezpRZxI$_yZ3x=(s`l@Logjdy~di zkP&rwbx#uxyetZ3y<#n|LlEdTIxaw9wqEb_oI#uQV!i&!pwTvW#1-`l zCe?H(u^CNG=+{}qwhx`r8Z=!%5M~h-Zx%(-Dy2)gVz!u;_9mL*57l%Dmv;e05>gGA zelB1dBPpW(2u6XYIXG`VSYbFw#u2hTX0McqKmx4?O2_2DOk=Cmw2g>q?b&MIqJC+# zUt4b&;7e<|vVqLB(`x!59!~ZA9Gtoi@Z7?X&`4*j;pQ~@w!*GsbW&PEr)TH{ksuXd z{%Gq6dKot(^)O1pJ!)C-p{&~gwO`)&hz&V0+`PJjAOj}DZnHFO-o}jrT>(rRB+P6N z*FNYr8-x9WgqK#XU%gd%L!Sdtt$%~5du7z78+Wy+3EMh{A}AdAejibzg}5JN_5gSX zfrx0J+rh$tgxI=vy*{!77g9CY=>fX$_M8XPfg&go zWtcOJA`6KZYgE>v^}MxYm963Trz+;?*ceP`t-M4>jG~OTh_R?Zn$Q&Ut}xoB^v}=% z5!FXzO&N1x%gAGltNa9Qzlm#PFE39eYeHPx)vTtO={H#XkOf^xUu3~K6YGeNieH|^ z4Z5wMMA0gyi#g4aL2iZ-W>Esic4y-Xs zZ;_bCNiEWIB(lyTr$UnMBH3M1PAQssUOCIkS&_R7a(7iZ<>>CBw474TTGX;+@~QP; z@rmF-c6oUf6aa^0HuVq+-52d9^C-QBp866BWK^#IK_BTga_75ll(2fCT}GHVQov;l z`Yl|cwwJA8d6Kv-nR3kLNM4hpXvH)&u{IcA=(rz-sZDr`a7NRlzenbn)^A~KkOexq zT}S_vKkR;Nnn9XqpRgI4OOOi_@GJOX0!wPq${_3>p*idrqZuil!NqS6LZvFcu+*MJ zTazV!W=V?#T_?@F`76^0En-1v`79M`1bN$+7J(WD1-8KVqXl4|Np^u=A?C3UVs_+) z`R$k>M(6^rWfJr|C_og%g^4~Bl_MBnb~D~!F4uTLS!~J>TEiY@n2->yDl$%OBXMl) z>V|$$&1v$k^C*HD*;w^@Z7;|%lWH~E`Wo-XIU08-QD%umpd8ME%%bb+IOj|R9MR2U z)GgY$y^l#fE~!#QYtusDdHaZ(1l!#ENc*GFq2jjQY%_KDE^b9PqG`t@n8xfPK|;@$ zVu&myQ-T!!5LpzI3q05l_aKmPq>Rk=V`3)|ezf1x*U|F$zrOkA8$cv&Qmhni^B(GB z;4bjRB0G?N<=9snU|#|GTAOB_P`SEk*L+9NpmO1A1>q@#8TPV8@b!i;4z&X%G~SYID(12 zvrP<^&;Kg>=T6H0ZO{TczJ)Jvdwzmj{Q=q!aXl@&=l*ZlJ%3Vme|L=C3zO^;Li*3Z z?)jKqOrkQl#JDMBTC6GIFZefjZ*~_jZG`gmhk$`6cEc3n8KKdOwP&`)gf-XDWKY1f znodAy95M|)#oGSSB>OcCAK)_a5Yez;Z+mb zRMkl$f&M+r4~U5JL!5hk7oYqQ9AG3PLH2Ox@ge3kLd%Mah(BmriKeV%T_Z#R`UL^{ zk>67o9yj#L%BI#b@|Q2!i`KC6ZIBTIJ7q{ivT=F;ih}uIBtb%M1oBK0BP0jHGm%wl zC&gs&Gn1kr2NOA)JP|43zA?Xvt~dd8@}V^%i;G9e0c@0^jL*&?GgF2jf?yRa$}RNK zA+ytmWTPQs;E9Mq8N@b4SVF`=7DXl{sv+iq+xxGPd5k@rQTj4@hIw&y#0h*c8^+NU zMD)p;x=rItD?g+BY?#=h8^r{Q(aPIMoq4*EGHFURujjFHEgzFA=7Brfn|si{@<@!b z7p$zcfLOEyryXAWHsh-IwBzbgR2rYQ$r<^fs4KVx!~P5vh#NNb=#L>9$Wf&3%dzm} z2EbvCF?LHN3OPP{OLiy8QDTI=RiU98p;HN>2Ur%Y6t;cyHXst`GgH5C>t;1G{MBtd zQSmB`N|c3HaM0@?XxFWHJk_W~3i?3>rzjQe`UBndW-E!0f8dTrH`=}KeuYm~FpFD> z9UBi|;!fVy*!UpT{NV_YdE77y!6mZCJbyCVkg+jw6Nqm~|d!3jD~K zN3hyy9J+NrHR`)g{N(9R(8-e=GCPay%ZB_7uYOk>m^!x}r%N+uG0FPRQG{rsxe;hRN{9Xii|{>I6m_TZCCDj~J}%Qeky|!Po^Rgp=S&L`4fABu;>>Fnj}SLLgzB7BDqO1-bFGmY6SI^ZTfb(%J}o!maoc z{0Iqiw4Dxf*vam(=M({7W@DJlcB619iQM$P3X^Q?mUE%yz`B^$O<0#^tot>tIQTCU zOD;?odJB(5<+?40X~yakgeNXQ15z zAVm9&UenzZhAf^L#sRwGw0$X{6i3E9J2=iY{|>i9*PMmo;hJZ^O~6m~OTwcV^a?6u zM(ozG8=rrsPVR7jPXalqdJ58x@wFF78t9tpYq4WRGPX4mK;u*2=0^X3jw5vV&XRn3 z4_cY{1%;^Su*kcg%NaT!bl)76&5+G9lz8Hw&;+BxaPTSTW7E9A)Nm zEN4!E!}!8}cuEF^qV_Rt-Jn<#v&SEKQB-4HD2*=(ItcHmId7Z_~LHPce@7iuw~w~aHxAsOgTFAh+M=ku46(YmuK>eC;9=OHDL`%4I=(r zg2$uvXv0J&kl;ZFiNqXDM-utrH*;7fK!Q=2i`&m@#+K~B@&oH`kn)cRGXG|~(b;tx z7ypw(ajD@)CorP=Xz%GIAO0rHe^iZ@z_~a#9mXrtw;JAILg7iHkM*qS5C4J7059Bd zcKhCi9Ew&^d$j%;pXo0aRKbgG9mSCBToAhPfT6ub=&n07b zFw^X5_zDu51YyVNCW6w@p?FeHlG|*E4^Ef-3^|a~%G8G6_VI$>HH!EKiyanFiqZ!xS}gdqN0(R-)bs-uLl(%168MMx z*=aVAOMMB&MO@w*3cjYdnlG*uE5)s1xwukXDdmfWQcimDO~DS1ckA`)%Y0XGZ-5g= zyep`=PtrtXO}o>?idrsw(N|(wtq}g&!fUniURZ54YVndm+ISJuaC6cqkx!sg^hkl% z7NU_<4_?vL3+&EAD)ZVI`L8V{J8i)z+IpGoF(LQdUGqZYDhHNX;MhGp+pMs5ip5zL zmso6}2-4&~5lG@_66D`BuTEa1FuY9^FXBC!Me9Q9MU%RfN CgK``I literal 0 HcmV?d00001 diff --git a/python/tornado/test/__pycache__/tcpclient_test.cpython-35.pyc b/python/tornado/test/__pycache__/tcpclient_test.cpython-35.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b9aa49d6c8eb81f7c1ec857e7f96043904d24ef1 GIT binary patch literal 12038 zcmcgyTWlOzT0Yg??wh;gYaBbbak8^B+p~a|Ns8W`A;oXtL1SwpFd_$@KP$NG%++V@ve`qOXh9f`<(Jhs$Ev~vgtRaylK_0 zsCq@UtEyf#wRzi$`2cnb~l%3hF$g}D2wva@QdcKijoS<8hp&0W9i^#iBjwtHP2 zghktZ==yHA6CUenw-Yq>`khuF&yU>TUc(QXfzx(6LCp#aEwAglox7X3&u{dDzIJMv zu(IoR?1tZM?K^=V7H>It!9z!*rL?_%bKP?>I4n2TyPb~H3c4DD*|6nk)2n*bf6{4f zJASa<^qsJH*Xc9@Jk_#c#ou>(H}*EWH*P+>6c+m(H(*Ox)y`g@lr*@87XXENkwMYo zD&wDQbmL#ajiZPa00UV0Q>FHBkH3ui0cx`3k)0#sfS#yzO4)g7onp;atq@j8VnTR} z@8b_&lZ*cQw{a8T&r(m7dY)1DGwQig)?LNtMpgy62)<7(3hh~j_4lXLQ}nZxg@2%F z<$j){K-3CH31+uyxy`WTbz4pEUe^yam#pVe)QaJh-|snEFXAaIHX1It)M$hS^m3YQ zUl&ndpKjVi8BO1Xq#&T-}UVGJNNX(_T84h1F3hK zc6az6oZJaoy_SGOq`lUAqECR*w{iJp6v|q#O4htpwFXNm&acHwY{e4he~M!rmrpj3 zsrZ8wz-I6ja^I?1o3)HyMr)XF_Iggou4VNpc{H2~3*eDouGpn8n-S=|>od;*s7cu!$ zdI`ggpmToE?V06FQ7xf~vKoC3U22v-!=ADbu0fNnYq){9bt_1;IY@%8E5#sAPro7V458GKD|> zzoR(1lG>u4Ujo>oo>XV3Nl_Fwuk4bt%c9N&<-p8OE0|Yw1{CUOsw&u3jxE!=!kY4a zS~)Pma|$*z>Ip-Pngsj`Bq$E~o+E6P48tuV}Z?8eO&YkowB^8u92310?_m^aodEM47be7?DL^ZI(2>H1;8={$5bTwRfV zh3pqVf;b@gB#KyvPP1an3RFOd4jP7#kI})WVO7>ySg09maB_@QBb`$+PB=zE89N0D z?w6GQdh`eeLb$Y9WDfj5V!VXTw=pgf;y8)ZRN;uiE9e%9a)vf^Wt_gGXNo++kC#tl zVaSW<;e+vJf%q{z)4@%}#EOBMuD#Z44p5pK`h1ws>wC}2|M^IAcHx89kFj5=hw za@%@Qmv>czx|xkpVm z2(z#N`hB(*p;8_!fZ+fj))oGwSxUoC1QM!o6DRhnEp_<-{6#1yytjcS7iEvkF}D%h<~CIh@DOR zt4VP<^+>ycV+;>Nl6o1Th27YF5;%T1jbO{s!F4*^THbi|(;StP(2s^nynL9m5qgb* zC7iLl5>Pdw7jB`uUqPWVGu9GJLDf2G4bG$)JT@i)bL^GYISIovazWx-ury+Xa3Hfe zC|$Vt>uVq2_X6PXDf$|u6e6S?VzZX8;vlBA0RVm^#SULcpet+ecAD171KxyW(c@8K z^>5>aM<?>GohD@e>15*qs$Ii!i2^dx=h7_<5;;{^9EHmtVU z%;bD19Zl{`Hh4R54rnlAeg5G<#Zrr+^y3HZo)~41PfXzX9!+9+36~!bhY2(q!si&hUeF;&sWTs2{!_t5fJ;K#{n) z-@$_kb9OR}E21nyvfHILB90Ny^+P0@_zWK>o(pb}J}~9I(zZLV=P-Z03~gHXoMxxr z3oFJU-Dv7_2tLM4H4I?3uM80?M07xKI$Ce9|?m_;N zOTx`vF}f+JApAM3Cu}kCf*u$tA-=KA+0D>uhSn~8&oo|!Ehec6vsX7R=xeBmL%;&E zi0!6+5|(@iM%Tveke`~23}-~7-Hy{R$)-O+WfY<0trCpqJYjHt8-YQ3?Fj!7D+~uF zx=da$u3G0ze~e{&O_@1h%rsE&eWB z=NZu*!Y>)MiMP|`jcCpH_{i{OOM)^Ab~&3*&u%t zCv1?&A7*QixrtLXb!33fDEqAFB+}_`DSK7!7fk-o zwj|e>0NetHIlBP7iW_hkY6|HI*fN_NOvy_g(4HIuNY3s@7#Vx}bBDcjm|x{v`SZ%R zZ{wX!bUftMUq{oAamBAbd+4iW1rxAJ|7@}fL&Zq28kUC~`%AutzyxHodKWh-9@1cD z*@bEeBQc(+cO{lZqCUF8R#Dt0?Wu&Xya7y&4Z%&0#soHm!T@Bs864Goy@U54ivZRX zO)lj7tf0dRXIjRv!&~ws#g}BR7zX|O?DPqWh@to=ekfV8%di~+9zx`hHRP0j%_%{~1ac*Y@!NMyt=ir=cX zut~?snK=jeJM=VE!U8YgEFeu~s&9fpN`Ry?Ch-|2kh~a%2--|~3QQ+3W$%=1j&OXD z6KBEfrp6XYXb*Gy&J#c8DW)2==0f!`6TCHe;(-&hpYY8jLs^5$(V10tl;|v)m1TQ4 z;P^h?Cu(d!RtBIO&=qX?!Yc2b$1SJF<4S!CJ>ldiNxjEHL|O3rtrpL85X&53%}kWy ziqj82MW0Bamf>6%tP`2RnKX~aMzn;J7)ECC5+*Sv1`Lut27M?I8kqAw5i-m-c_vX7 z*al!C8@TDt!mmdGR=wbM8(tSIxQE#gZZV-8AyrI6m}|j)Ok7vhq^jAD6W71T_z0-W zh_T0kn(Et%O+^-i^0;I@&v0XeZq@_RQvs92LPp#sAB+yPk`jmBHR&=o7ou831z?86 znUa`88UPS5BiIj)-%=}kus(5SAZwzj?OIv;AVwHqlp*IKUm$?df_?^8x`!gn8=MGm zqUcm>woZbANkh5qwqv`Z;|4bWi2ey8mJyah(cexZLL##R15=DlfWeT93lxnXvgaT> zu`Lkhfss)-4ugQI5zZNd3^c(SkFjZ^RxoT5X6$p#@{5vj_RK< zHrlbB1wUTNk!V4rPtRnGvXxInoTomI&c~1U<1u2Ct?& z=>TeR06z-7QFp$veF$f-#Y{aK+D-~+h8r22*+5zCMxdYk3%M)lTI*fmu5kDp6EHV% zc#6vj?*8nT&E=siCc!i}vN!ra!s_|MIf`AT;Pzi9a`uCxayFhO#o+|UKmWxzY+|h_ z^E(0L;G)t0A!AK1QIr_urdJB&aJ$At7?WeZH$kBKa*!9t%tu)6;+q_|?R5KrF(t1F z_?L+%n)lx*o>>%%z|WS4;6J*EqNyf}=x-AdQPIT#AM|2rvkQh|yP7h);_IULA3Fkx z|7m|Yr|FInL;r><>4k(&YO66;!G{K{;NyOl^4qt`E9x&3H|Dsq-x?x#Fa$3s4g$a7|@|h+}g<&)qV9HlZ z1)5?$jNu?74Dpo7_AC|#4j5#|23f_NFNiXjU_;tC2ZD{EGOz*zjnId#0`deb?a|%c z4(Q(ANW~*Vi44$aBqDzIHN~=WP%II!|C_=vIznNTEehkE3|#QLuM)>gRpTH2ABA9e zIavtO771Y(@kK%ymtI+LQd0NvIS&K^AXJ9ihS;TuSZdXYdh$}KAmJ^iuG_lrXpi#a z-u0C+lJVrxCZo~B*C{$Sl>c+6uqJu)iBqIhCWrYX94{eACy#y^Dl?{GDRXBu42OI} z;f{2NfW#;!>NMVKirb#J*NAGyRc0ViFdSG?0Z;&kHa(1aBReS;azj!rVliu#qhP=R zPjQTnQ0jGqVxf zeGe0Brbc8lsKwBTWf6D4gC+fl#bXvcQPF?E0w7VEn>YQ3EdGdvq>xy|xfdo0WSjjL zxRLZp#ab%N5Y`bJV!2Ys&o8S&?HS74b45V&axn6 zHS-5yxz+6i&f~yycjXHR90{X|pOVz)qpw5P(52Pa8lMYm=4R?MaYM%m`hn}om#pSI zcP?&9J|dNm+$6zXA8jLq&sG-y E7g3UO2mk;8 literal 0 HcmV?d00001 diff --git a/python/tornado/test/__pycache__/tcpserver_test.cpython-35.pyc b/python/tornado/test/__pycache__/tcpserver_test.cpython-35.pyc new file mode 100644 index 0000000000000000000000000000000000000000..47355eb15341f663494da0b272f856149c5bc133 GIT binary patch literal 1944 zcmb_dOK%)S5U!rb?#@2E>n9Hjuq;3tiR>Xq6h)3qh>*gPV3S0{N~7^idp+x(2i-lv zvFwvMamnxC2XNuaiJ!n6xNyo}$cd^Rdt>JW5@Yw*Q#IYyU)NXNyD=Da|9JiDlgBQ= z-*D}5@O=lH`VAq$e+B{|V-PUNEC?*fYzS<~90(j>G{OSmLg1RwHi#BvZ3x=UHwQ!q zG7kc;v0V^d$a)a;8oLFe57_{ML1VW;EI~GeVA$9l5F^NzAy|fN1%egGRv}o0Yz=}n zU=I=xk}mv&B)Y(t;R30x6QMo7_X_!g@6^NSKoz3WJWR8)kh<<9>GM>jMP4tJGR<{( zSmm)c_K&GP3YCsD&v>pUKCA5+&#~)#_067^Jj(FB9hGG$iW$E4CY2DoMXvdYM$_Bh zeYD5rb1re+!9DdNkN3IKyOH9xcmJg1v3^+0>d`@(Ct;OWiYH-130Da<4vmTQ)nnt! z4mP!oz=0INbfW^?CRuQsxbX{A#(~7fE#sOC0<><-a2`D^Z0}&B=;|+o2aD+#L}L&e z&P{OV+=6o(PJaf?-~wQZrUU0LXp793o50S@2OM{1!Q6&3hbXY8$n5dA&$I*Dg}FTn&zV-Mu-X3aj0 zq&b(~KY?NnjQ1J(_I!5|<0co6jc+^H)F%jU<#CR=yPis{Vw1|#FK?uBOyYz`QJx5n z1wxe&#)T{@o#vd1VV35z@f}gbkx+NXl>MdMd-F+J^|3_PN8hjptVgoT+Tk}LHK$PFC+`up9_r{5_q z)%4Ns*HJ#bSC(HyI+{KtmeYNnmD4gUZyi*rNN(jv@^&_h)l?TUkCNg|k9(QwxWqtM z*rrOy<%@c7BN;2Wo+6H5by%03?*Bj88{b%D;JircF#Ms4 zMDwM|mA8;bzKgU>My>%Jhr0Vlr)9Ji>CQ*dLsse|i*A87EZ*T15e^ahRTx}tMv(fG{n+;JpkFBv4 zHbU&NRjZGlK0Cer-_@M>u6z$e%l8R336=;xC7|lO2Bd`?B2b>LL?GMWAe9d*T}f0~ zl01^mk#q>~mkZ=a2toU@W=#%*(WNy`Q6)O5MvS0$<;K%$#0b`}Jb2^B$9SI2J>F=! zeD!Tn`9|$vxbWk#z%ws<#KH({3c+lWxL@0!*{&(L&Yh-Y@6xGOBZO_%w?>%n=5O2n E2fE1KkN^Mx literal 0 HcmV?d00001 diff --git a/python/tornado/test/__pycache__/template_test.cpython-35.pyc b/python/tornado/test/__pycache__/template_test.cpython-35.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3324f67873fa958a3bec78fc42bb673e0d736f71 GIT binary patch literal 20730 zcmd5^OK=>=dG6U=EOr-*2SM;nf<&r9&l3O0CTvC;CuBk*PSLNVq=8&Y4%0b7JD;J;g{r^AHGqbw@ zLCJOuavD9;@BiI@|BZvw)0Ln7@~7|r{##1@mrDHN(ZPt9o8_3aVaE2Gz)^_JpcWaBbv^s$Puqld3++`MheERHv-!W!0&udPQ}nRDDX? z7gW2dI@78?tvWNRKBGFbsy?eabE-ZkH6~PhpX$u3`nKst@1FCaS z)ekD;O$(!y)JK?9N!jyi^N@Oi*&bGJT4iNbxS*0MID&#HWmUPLnkqPof@x*VaKTKf z;1~*Kl{Lo&bE$&k1c%$TeV-C6YlU#Ox$gDa1K(~qoqo^t!;`_YWQae#7&dzTL6A{#q>`76$&tOJSwk^qq$`irlc`_Zow))9P6^dN19vJN-5q zgq2%P%l}%hY1ydrZqxPbH(a;phDG^SGr|&@`M&M>=+_v8m3P)R?Uql?!fMoG!{6%L zEt+}!lSk%NJl-2116vXHfofnM^nir|!k_^Q{|lTiC=2*6avqr&3q%Mp;E51oSl3HR zgBKK`Aqu*%_9<(g`$6UX%39!jRapm=b&&JG%OPbQ=KPGZjwtIW=Yf`E$~w;ZIc1$t z)*|Q8;YnqgoS#?LDP=8j9-W_7))~$(DC?}U&T$?CEi3Ch=MO5YrmW{Ue@NMf)#edp zA61*jlzm)no={JedVuvlubu!h7nF68YCZw8EGlb7S(msWM!yWwSPiQ&*6sisEdZ8( za!3n!ZsPGSAn}>&(92^({hm^rNDZ}#0q{SMAq}fQ;}P%Ou1%~3``)tK?Vh>4ZFZX- z+kE`^R9K)~n19{wdKL6274UYdmUqke5Kc8c&vyMcJ{UCHVdi@ES^V;2SpoMq$-J5SWyLVsf_ir`*=H0LLTFv%dTG!pa)4#YraN5?z z?tS-4=U&UZ>-XGl)9NL^vF^KZ*A1e{mHt*ZMVW>NK55%C7|bgoQAX7`U<8NLAX`aQ z%z$qK^%;Cukl6)%KGQ59yB8q3^XSaok7O8z1%{!!K;;K0p$XS=?m@~QqJ&^3$r0qh zytKmgE&SsC;sqWvfw%l8j(Y?l-*V_$E&a(^fnOo|s=V-4uS4SbI4wGbz$2r}j%toy^2=y1C=`AMGdnqw8N=KTDh1mU z!;^5;crC646{l<2k5=ycopx|``>g47TkU~mn@jg?(gR$uWS)IoS?^g}rf)y;gQ-+e z{K6$35wH~Ud(x&>a7lJBZG?q3R16b)L?L03zS!c#IpGCJ?EtUxdegH7T$|T@$8Xy=0JZgYul2y>SHQ}$H=2XCFYhR|yB5A)za-Tw z*9nwKD+X-Rh+b1|xNapV_nY^kVb`hjk&jM2b4kyJaWpa$YBe;Cimnt5%ylxOR=ib! zIa*!P#n1wDyX%)^l4&Hni|8SZWA`L|7?Ndz;Ak;5RPab<6N2-3Jca5GNRC=$kUWWo zEZ1H`W{4@opl}$gyRpV(!WyNupwzsyDwsY9n8l2-e1*R)1EW+!-hyN48Y?T8)-Sz~ zsusV6x!wbhbad~*vjc0MC5LOnR3k%aX;vWLn zZ9L5TT3_9+<<^4S_IAw>{h1IJ=U7vguD6?=b*p*fb!txi1s6s^V|%S;-|604xegv~ zw!It3NVC09D@?Ve0~M(Bl`l1V`+3R3`>QVQoO#4rH3vqr+014D;C1 zxpT;WCTN|^K&$QeFy7mZ#MpWp9eX7tYNDDyM)G(J55=$>ZJvQ5A(A=aO=N_XAx|90 z8kQn@+lMbt3}f$tx=d>jNnHhiAgRxPp$3SBW!k!U{bmdM7kt%|A;G8R3M~#o2?#ty zQsq-BWJQJjuJLF{WidB@2Q@^VTpgvO*1+?7orWHHRIdDjMtJer6@U=-aQ}6hj2O;m z#uD~$0}X{dT59o6jH{(SKrciN#1Kt_q;sqskmksagv`f2l6F2BYf^VjE2KvYZqXqi z_uDmN4S<~(^!v6;5cY;4Otwb@4@*g91m{L!-0b(;TM1yfFG$d{CKs6e4*k5*>*1SN zUoLg)(UQOX&O3lTOGlP03-~5-GB0wHYVNZ2;xWEQy+xLLZhq|6i|c8K(8yX!zKfSp zV3Wj*;hlgS5)&YYfQdmK$xQ&BF{1@WaZL|Kpx5+Gq5;?h2G9TwfEWommnfg%0`*0^ z-Eo*8ffr+9WfT)J=a0k4z(m_|NHIV8IE$A7K@xZ|Kl#yaLjP&_5eq3ZC5xsX;|aoR zjBS*uuR-`5#}^IA>_R(#;l2oEP#=stW}Q4c2=;`!2eft~12EATq0O#=#%~4U#2b*B z5UkJ)+Q#?;QnfYGXt7$Nt>H!BsC9FsQXs{6cQ@<=BO#S81B*ezK}*Lvd2%f{2g-Fe z%tz)8^D0ElmhBNKw>=v#-ITal!RVh7!?VmmwNDAbjE-(^2l=y$*JqN|w6oEtPK2K4 zVWI^Kqb%ucWZ*%=-lSHMJ5a29hywf{f(OV9AknLZ*THX5+b{H+G<9|tr?(PNOOWB4>vJF~=JGsJ_j8mA%+bzPT) zI(t~CG#*D6KRXWKmxUt{eQi<4Y~mTki7HXmfl;bLy$sAh7g-$zI3mXuTqn)@lCZ*N zc7fu4_Vpa(^+eq^EXT#k1UAYb>J(j_9$nc;3%yQSqh4oZvktQl1PRM$7)an9m`0+l zF)HzwSWU3@MYYFrq_7tJIZ;HEpZ8^O|7xFgScF>_9lE zma>gBiijy_Uv!gtp-FF7ZBu)IsBV8f-^gTAl>lS1cp}t22Gpa`7Y=LAJ|a9=bpUAVA3R`GtUAz z_cbhiH;|<%#_5W+@)sIha1ut~+L2=7M#0f;4^C;EjIOtBb7SBST-%i6R8NDE><7*8 zFXBbEEieJdFbDS%_p`ez(wwLf3xM=aXmX8Z`KQ$1Jv{jNk1v6`w#`-Z5;z)LQ{%EO z0Oyd<7Y-xl+U?*(q+GYxaJu(x*P)|dTFPIenb-w|xf~8q#>lymHkoB>9ZDEUn@wOH z-j5F(3u!?ZIsXJk%uJE@!vEysXlGuZxKAgG9+^a(gP~6(pK|Oh(rbGC<6Oo<5ZxYV z!D~4V@3a374HMta7*fbqU#pp6slhE9jj+;abb8jHZ6jZ8H0ThM_U?^_dz9vLjFRJ& zoS?*{Kef0L3#)NqbGeC|FZ z)VD|Tf@qz<8KVIG!!0?#_& z6jnejou&`}T`LwMDrC%ptEkfDYxH05T42Bs*Ga3iUXPb#y(O@ zm$UegQ}o?B`mMv*a*VOFFphvUvg!$G+h<_}rA)2Qi#zun0MC7wlCM+p9woHF-wiQL z4UJKFYHRkN(7`yoT-qC6bU!0#AsOTSEfUR0;1Mta&XdJ2OY(kF{XnS=WDs*xR)4G_ z%^?2TAHn!KqkhPHXxVYgfip7|{8yETW%C(y^>gYIj|y(=4Slk@ z#PJ!QRPBXUHJq-k2G!GM42Wy1wX#T^Sxh8ZL5h4eIL?yX7qxS(hJ31 z^a6ZCDOx-XTGS`TF|6gxIDXS4_*CpIrqN}df9N#L6gF!kY9ocay^Eg39dH({;$I1A z6vY&WU&Jb*Yy=nghOnq7jWH5A8e_oL9x%pgTQ=OWCXX?;s5SXr7Aq5Klh4<%2WNpn zf&^iMMcRu;vV|?xEZ$TSKg^4lt*_iG)FeQ;V z%lwOQM95z7St3O>}_pBTQ@o9+hO(=w~zxOSw1$z~78SJLrcdrQA(|K5FJc9;X?7NQ ziR|I6A#tx;LlTk^hJy!q3c}Kc)3!NOyGUC)95SVpNSJGNQn+FrBEfeS1Jau#@|i=I zBo$_HqP<|0;afXs1gFLUUG{xZ+cmgChbV_ooEk52Nqi3lvoscIj;i6(`*?czGnDK? z57?S~1Zn}EuwVZTUKY4rI~>luHt>6IP~cS7DS>q<+9KS<GEceAAQu_r z3WHo;kZTKaVL`4c$Rz~_dm#>oU|Z$TT2SS9HQN%JT82t$Vlwj4d_-ZqjpzlsPe<_w zGlOoX;mK^nHEfDWStbptQ4p()1Impr1O(Mc;MEnLyVXY1+D z+!@!5#?97CPljs+^U)9u*48qYFJ~7u42@W`6O&+0WFuh|y)ZP6EQJLk=A$ss1x;S3&aL*@=i=g|t2SEp7 zAVpx5S=rWjP%i56;ap;aD?$H`(dOSL{@*Uz@V{ciKa zl;3PGs3fVMhE@e#FuCKmaM{_%h>U`lQSQx<_khGoN}#d;pTIfvH~J??nvx${<%)=l zFXr-vd=CE%Jd~4AYWbbZ=Yxf;@LJ(cwE0F#w19_2*xw@qB`R8zWEmZ5Qd}O+1d@O- z(V$}1OPCTJh{lm&)L6$*(?=w-`|yl7I!6~ z?sy6!i%<(5Fgl4G9ZjLhg2*be)FavfH4W1ayN3wl9GNj1&sDnawOps~-xz06B*HvY)8@?Uxzpr7BGFy2<;XY#Xt0$ zX0;!WM{;Y(=PqV7a7jTMjH5uK5$@?k92#60yQcLngu+vB>&@*G6bTH<2(FBST^4eQ z-(8vqPqFC6P{PwKG`lg7%#N7zN;l6*YVz9By%|gZCiGBx5PlvmviwwomX?j{lJOjn@UTPv6F8z%3 zR=9rS+|piX%_Ttn!P5wB$cWTeks?AHXJmS((Xp74L?k4T&NVyzm(P7L==m>;P05q| zEEvqr2ZmW3ASf6dkF{*04A#h`WI6LEf;Hh_IjSeb<@i*Q4;&$Ceq@mF`GKJ};=@Og zKR<+ng)gvEb2bViv63m^t7MU6$rMbfd+f)-VWKt_ae`PRr%Qh;sAcyH{jFpp?a94o z-tpj&UoQ=CNdy(y;3BASR0`ANiFlfzkmQPwe8N|bCloJzXF2_S`MXOM zETTdSB@=m6^yO14WJEEJB)B;P{6C@viI67>poP8WlaAWsHnkFIX@&yxq@GBS07cQ7rV|$1I zl4$F2g2P~AC@1ITns!3Dtk!9E5v0}VI9;dX1a{-Y`xuOGdHsaITtcs^fJo*X?nAAN z1MSWoWZ%o#Uhn5#bUfl@w7I! zbZglcN%72RvFwPXcs3Dvb7`maEKaa0<%-$#tc@~7EEU(A?#iYY%$k)-rEGriq3zNk zml;g1Hv^`TDXM1%k6I5aV5E#Q@Tf#CM_$0F0Vu|-n88$Blj!VtxeOYLzG$yE)x;0S zF(Sap+K93|36Ic`8k)()IY0bO|o$W?o$9Qj(5ETB-MkDsC|0NNZ ziAg9pQ`pslbhdC9Jd6i7BPXML@Z30}8{erWxIFqLEy;{s#8AT3LTw1b0bzjzp^1g^ zXhRKgEVKl*2t^sQI?)mPOKOtt8PP1uhoh+9jsX%rhMwf{ZUy!X-!b$?)Fpql{9;wl&dX(pih_i&qZ`&M* zN|aZT92M3l;RdI}6J2QP!kvgII%Hw{c$~;OGKQk$uU-5j(GzJ6a@;{D7$=RBsWdqM zbSjdLG!4b}G1CwQpK75cmey{<3FwU7XSkh^oLU9o(*(3;1R!uKpMj^$Ff${}462ipM*T1oXMn$ttwjNAXG8?OHLM zk8d7)jsGaio%9(i=8#CK6}*7LVC_Quz{ZF&2%Z&302|v7IuIKT{wr>Dz1k&CsZF}% zyuhEs@Z9&2abKagNkEeiFcF+hLKh*}#8KnD-!6go-w7tPPHf=yUw2BTy_;|Q78;Q} zV+ksywWW>5etBJkZ_< z3GyfF1lML@c=kE;J#yJ%BLHW=`z4g7D1gn$eJnv}2psk^VE&VqT|`v{3lhQp`mRW7 z=_8HSV0FDk-G7CWuTsJk`hCicBf4!A{2Pr_Kn6sYcj-&*1Vzltr2`;{fY0&iF4b~r z6X61^%KcqR-a}HKiT-leitIz={whglzQkIei;Lsq4!H+Gj3`ImL@D)sNiAF`82Cj;tecbu|H-BS1Rx? WcidRQq4@Fqk-s@oKQevfpZ^bhH8|V= literal 0 HcmV?d00001 diff --git a/python/tornado/test/__pycache__/testing_test.cpython-35.pyc b/python/tornado/test/__pycache__/testing_test.cpython-35.pyc new file mode 100644 index 0000000000000000000000000000000000000000..cab877143c3f07a3961924f828d2df78ae9bdb2d GIT binary patch literal 12773 zcmcIqNst^@T7FquS65fBYHgNem1TRZvNer8wqfkiYDuH85w zs%KRvr|LOnu}4O=^Qu!&^@8fmsQQel&8l`$b!Jt4)_mtwyQDgEsy?SWWmPY$&b+G6 zo3^}aFR0F4FoA5`v&x_wAF zGs-Ed2TJV%>ci@qGpn2uzfrn#Y8UVyLG7G!%2Hca?xMPVR2rT_!@P19q+ub^a10HL z%2|?zrD&ex$~~c+WpyW`^cxnZd0Oo%0A5h*)s-)a*J>_Uv^Tw8d*Hi`R;S<7eo%B; zcUxYo*9{i>y4Cd?TZ3-Xm+yNmf2ZO3w(oY_u3yar*=@HQ6k5G@uh*|y!HnJSH`=}J zYBrd^i^(U%~9v`+c|RzbCzlgKo>0>ausI)ql&~>S_1Y*MnL2 zzT1@kwPuEZ;+Mr!#?!>(eTc$W>b9kBXVhnc&Cl^-g_Wo|I|OEx`p~+G^0N%z?-Z53 zGU{=zJI@9)4M%*0KI>Jh2JkaIFPL$=cUu~Gz%G5%<+^sq4f1#G_P`Bhdu^v7uRm4# zAfWlh$Cux{b;EPDck2h2-?zKBF7^9YY~Q~1Uax7lZ*91p{;hti|IN)otL=QVyQ9C= z*=~Bb{GRUGPH*%VIJ!lQw7T1ZfVKXo!JOy%jS!keZhrxV%B)!PnXEONDdVp`j2faQ z{t<5%@pvp;MH#{VAR~|(h{XfEXO)BhIeE`1C$GR7tj9~lrjQ%3=z6sjETvOexw%8(;h z+q0{6E>lEdWx3H9Ws(Q)@(i!@b{h z`(*k?tJ?tm+M9Os&f3{FY3ZHEi{dHiuDiN-C6v0WTK6<$Dd)NEt??Z^H66I8xhKkpvJP2Ut7I)( z^VaapKOvQWd|~@%Pygt@KKR+6T-bh9et&zR zR?Q1R_4DXwXhBp98LO6|&=_%y5PE}6&;4Y8bFd*;CaB)GF;p~@%0COrJUd+gCk~HE zL;+mJljVGWj3U;fzmp)>Zz;6}T`ns8H{}c*(IsqyCeNv`&Bs}XzK}Rb%jrlod{74x zxU53aM)MwFA(3a4U*rLMdzSriK2Y36ii))uZn73(7Y)6H7aXn_i~+wPr;nVj7Hxn? zLBeS0xSnTkyJIq-sn9Ot@i?KfhObTG@y?*_xB6|j;Zk#YBPC6#YjSM#<3tDWcqJ6S zEl34q>(xwcI4grIy>0>wke@IZxG0wTDHK=nczD$C+m8!SbZR^%Q2xE%2se{~`fd!= zi3lB)L3hXQI_>0foNSn7Xud!XMa^|>oJhNo2SVyekHy#a>wdzbzY&2+wH5x&w z(dhJ?0SMm+%8dpgG%flWf`6JtmBq^}MlvjKLe3*-;4VdC%T~EqUMZI}Yf}`;qT(XE z5zEsIDHp0v1o45eJ58wAoPr4zn^Sl(=G2kYo^NWH&aU1_T2(nL$LBKFCd*xmwE(m# zBn!?0g;rDm7#RkzFZS@RQlDpO+QPv44Eh=d79}{RAnW+PEoUzDLUdpQrl; z#=Z~;WDS98Nl({^Y~tU=d-lGku1oCdrD7jHd1lHL?5s8{$93*IaW zD4q|E_OWUa^HorvptG=HQP>dEf54HUCVA(H0W&+mw6$tZzruCD#o|>Kud|rIZb6P) z`T<%6jHV5~tY25`S&T`%Lp!I^R9(45?)@Ivu z#Eb?EibiPP?gF?92XY};EEpIRvTgyBfO#|fhC|X8hP^=AiV&kIO27)W3!|4)H<_1KXGU@KMdCzz$8_4;*nbLl5g7ycxbDa3ZtU93Vs1850)LgW z^Wd@7%;RxcJY&pdDgF5g*7>^hXRKIZ6vZ3`4Mhi|iFM;&7H=2vcrT$yYEM@`tXt1L5%ywgrud&H=hwymUz7ciwy*cFd=KuQZfMsZ=x&<7V+=(6 zyiPO|9gr}Z`(KKIOC>eM#WVhQa)lwMcr^PVIV=sBVdZ4|1pDC~tdvloqDrYng^@pv zz=#-GdW-q+{B=P^MS_x*E#2#c&b90B^qh)0L!3PZ#Z-Pb8b4t{OyzY=Anp^gkd;Sf z!i)Q}avBxIaq&LSZO^ly>8-!VVk&xGLu(CBWX+dWR?BmmwJ9A;8eU|#hg-10^1AEa z=x-p{aHV&z%L6}VEt)WVsM0++*a{H`_~tNV5R=}80dA>lp${%99bJskj))j-n@rps zx$iJrVA#OU+0BXhj(Ty;Vd}ud--Sg3^8{6}O|~%sYOxmV_8eVVvDb@W?pgaFxIVTe zrpH)PgpoiCBQWqmDji`MuYy>7JP!iUH~JO$li(?XjkQwT_;weP*z(^3H7f||*qCS2 z*hsyt$|h^mi(#60Yzo6?Dm<|yuG7;F=Im})yq<{(G1z0^>utrO)AQbIAzB3MIW^}f zCX`|W(v3fl43a#G$B2d)c~}$C37I8O0cZR0d}<-GhY@fft1;*!dT;B71I8ICfz*JQ zm#C)5mInwA!#*JBZDR$K>NF%WwIGjaZ!pVJpsUz*Le<3z_b$Rvo>3a2D&E5+zsK1^ zjddlp@MA-|CQCmiyl}k?rvT9R0ZR<8rj#atrGxR~1hChh5ZGv4hW5sMGvY;aJy{+Q zr9lbiQ%nCYf#R`IwpOy5p4(&=I)#?~S;#RS@!8b$k3xooXgwbKMOi-#^EBB108hjw zm!1SZ;r$8cnRo@^Sv!XAxy1HcitOT=9Q5gCd1Dg$fWR!EP_qSus!qFugh@c5**dR0`zi4!< zzl(-lJVFF)p5K%C?C;RBKcDSYRc;~>g);={Jl4Qsi<8iJk8;rUqKY%mNR7wI=6XrR z`DLWYkyCb76mWz@xr2oFs=UuDcR@J^m2-%nTx(Hk4lCzKRI@~7x?VjR%v^KR=P7j{ zbuu5Xf&wQgLIODBaJ2EvL;wiZ8@?M*#6~547wPM~oC*2{MrbliP~2*DTiy=LbQ8UY zew=MIQq0V4ndv@=U{EStD`2&qYa34ll>=j$aC zi`mcvSKq{7{UHkhmwG4$Qh&ty5m&JB&+vrcEm^~*2)szY?NJDq*`MZ8h$y@mL~URh zE*`FyU_+4s6KY_z;-{QJ&Wy}(d9)rS1g(1(Y_j_^C_pSAgD8=pBZ!aaoS_y@%Ls_P zE}$>sV77NB5q#bUG80{}x%i)RtsDxKS+#~MDd40-3?1Q*qf4LQ1JD5&m|w_TK@&TY z@CJ;q(U_7pTowjNjsK31o86a?l9uf-E8S#Pnj6G^3Jj#0s1xljldGN)-h?o5x5b*~ zp|Jf&f5_k8T!>*Kn{yL%$2^9p$1oS7P`C&%AIAJ)oHJ81uBt(zu4C+2;%YK=MUVa! zmka57Duucza~7wzsSh8P)*gOm+wHEAj1tgV3sc-{iEQW@r108XaSN9@U9G%Sd8N7m zZ@Wb3CAl(#x1hXX(ymvT!4bbBnDsTnOU$|lB^aU|TXOP-i^Ig}SxzDn6N^ny7IhwO z>MyzL6%;CStYA{&OW6XF<13lr>Le`0WFo34^CD3U?e4=uBmRL|2>La^Lgayh;Q^sq zYT}swYnYe>7vTXj8g2_~jMXoko8p3)r=rB=bC^X0%kaeW_}nM8EbQ(ukxRtXl5$CCl=`=kT$12VNEew>{rtjLqf1~y4(%SV{TONZC(Jma z(v=k2!YWDIfvPo*IwDEW(_$Hqb4O*HY>)&>Y1ci zO<2+&lo)3ZJUocS8?}kkdSVuC$Gmo>86m_zgO?c%DUSeK(Al9eJ zj6#{>#FyxZ1WY33oFCm;h;;oug2p4+C}3z3S`0YfO;VM$ChsK14LK)U3%#tN2sw^=$r6!a=@MNRJO+Pa#J-nfOeDeaFZe&mnM4 z)Yw~?cbsy@2O6LIOTr;~+gi!vD%pvBAxY7N&+lGSkP*x*|4nEja)EJ9J_Mf01bY#)fMEM2V?nCjC@ht& z4eN0*?&5ZM$2YDKyFAW-PD*}?kr;Vwm2(yPN%3$)ysV@(^+n?15{k&Uxyp*MbINdX z*Y;a?UCEtFj`4rdGqgaMV;tk;@!l!K#D2sr%rOlypPrOU7)VryxWS*~-PWh&ZYyH7 zbR=%lkq9}yphovx^R>j)xEPhE!-M`Dj_lF>w}<1^=AE<(2(sq2UAKQaF<7wrFbjX9 zIx~j$k-5J2IG99=6T+wC$c=29JRBhtV9AmsS!)6JRl&7mw&)lkqtVsn>4815QJGcN zzsjP);@4ShvT#^zv$)Mdatn9()@BiN6I?0r$0eA015H1{;~hbPP+(5}<;$z(Qn_57 zFVB`|S4!g2^LA3l#*N`d^(5~nZ4Ho|!u=%8Ge+~Y($p6^kD@*kUi~(zi05p5F05L^ z9eTNrRbPy1h)smUO}tC?NY<<`C)?M+?QIh!lZ`Qe6_jw1(s%Fs?bfDVM1}q}7H_d> zA#Cc{UEaTwTd8vWNndB5*I2yEf*25t2RUwUgeUVRL4@L2a~tbCx9QQhJC!*ElH!^d WbNZ6kKe%w{m7}j8J#lDIw*C+HpXew6 literal 0 HcmV?d00001 diff --git a/python/tornado/test/__pycache__/twisted_test.cpython-35.pyc b/python/tornado/test/__pycache__/twisted_test.cpython-35.pyc new file mode 100644 index 0000000000000000000000000000000000000000..98e9af26f9027744f1b6bbe2911426069bdff162 GIT binary patch literal 28137 zcmd6QdypK*d0)@I?zQ*89S#oy1eQB(MgC~Ip5O)B`5vax9&fH;v+1+E$ zE`VE$kttA?TxCm%rNntUPHftd%TAn%<;1BZcH$^5mt+57J5Dr7Y$s01#YB}5OsUw{3*`s)X_jgOE1@_S!7_n9k7{hmsEo<#f*zW!uR zDMu+Ap{1OZ@+?(NDLbXAX=SHXHKXi|s%Dj)RTfL6m6ua?PD&#_r0k(6KCJBFC_bX> zktjZ@?9nJbrtGmOo>z80ijOOMTy1_ndYn-9gv2w-n^e^)WlyQ<7G-Zy)oEo<8&I;! zn^D!R%HFD~v&x<|sX66sQ`POt-fo_Ul($1wcPe|Qc^+2Y9jdxZ*}GJAx3YJe)Dh*~ zsj7RFy$8<#c&|EN+o3XUL0uVD`Vk8s2e9u|uhlH|>gJ*Vmsj3hs(QDw?^e}&lzoq> zYFv5us_K2pzRx^QDDQq%-KXq*%5q25A6E9m=6PCqkErSaWgjrlGs-)ts*fuB zQS-c2d5@{;AN$@_?M9+JF=5_wA|Z@+TpC2u~FcM5ls+S+k72?tj}w(AoF|m?q?CQKjW0A$%&f>8$5H4h|xa&JpD-sP(jRjw9)nq>_^ozAWLAgr_7tE#WK5Iis9q<*X>@ta8pN=e$~n9D0?BZ0|4D_|<@4%>Qb%9^A$0-d_Nua9Q=3!DeNnluO80HB+Ad)=_v30U zCH%iX3y!!jm0Kb>msgd0QC%)Gjxn&__*Uk;yHj0u)Mb}x*5+1`2A@#gnyRiV zdtF)QYtykr_!R>9T5UqT>aMHHmxu{gL*n|w>0H4cBiJ*6O;@xa1ZZ*D8L)b@uy>P0uZ8w^VM__5A3&R4Q|s zV6wF8*S%)LEmo=mU-5%6?KYaacCO?#-MLgSxp>AcIY(W;tScLUAedS_ zuPY6=oib+y!>4tgk-D?Vde69iy{XGC;+bW%`fi%N$3wHR_7r~8cm+R`hyBf3`K6VW z(+ge&l^|)9Ps^^p>S{dY;$|m&w^1y6b>A(z*WEJm^2@I0mK$-2sntr&DK=|O-*t*a zEy@fx!{I@E+#tMIU%iai*wr)c8%+$|OQo9QxjGm-ys_a`$|WMdbw@akg=*btVjyev zMqxv{KA?Ozh0eS?grN0ERH{-d*Q*Sf0GV47rX);LDja=%r+U+*X4EG|0DV$1^4uFx!BA<9tU>~a z`ASadx@zQ9V@O>dCb2d~$aOvvVP2$I=F=7nFVCfy?4hs+t+}Wxer3H@@(Py>R{TQA z=s;dkZOnU`%w7G64eG#FWT8)2c0b~kh|TJinn zYS}CKzTS!c+Ee9vZLPB21T|w|nLL5yVC2~Kvb(`CAZ_`jt049?YWlL$4O^QZNLrD6TpF%3MZ2hWCQ3$SFa) zeiR9FmVN?1-B1oU+>$<8zgB}5ibiY3YVGI-Gb*+;mqpi$wNlkB7K71Z(TFC*^TpyD z&5~zwH0Lb(na&2uI+`S-49 zm)zQ!W~~NkNUG_;c1$tn&<>DHf`}vXcsWH{A<8qNs2M|2mIP(-As3F)4QTQ9v&cs? zKB=YSb3?&&@md=W>P!fgoy8J_`XyHVN1&_`{LNV7)|A!S*B@*1-Ea3tBgZ#JBe4NllO)(M38+M? zLcOV_mAR~b5${Q#^m zO8@hz{4jWnY5jTYeTv?#VJAqTCt(rl4sK#k)A%m1`L7^|Q5KT{#vJh?gu0MK(FkHt zh!_swFP6+vCqYp$J|rOuh!Fveg~CXw zVw&KnH58`D*Z<_p|NGZpUOoNdTpnW0V^5PzIRvFsuQ`5@Dc0)OAPyUqs#|Y1f)R47 zAST!nNBO3Z?U-TEW0aD?ButZ|u2@_*8P29j=^0BF$!JO%^}Ndja4Qv)@F{Eknt(Dl|caJ7}HEC;~RfS z#E0cgz@0u)>2h&Mbq~7pa{uX>^bYHur zaj7(8_%NH~L&K*N$^7AD{$vL@Nj}XiMSObb#WurE1>=Tc4-oaxu;JrD^Ozvaj{*7i zNW{Ab^vie-rpwWQu{_!KK)cMrKG_F_XZ2~kEG!IZz$#qlKpZ=$X}-ghru}87o$>(& z2NM)Zp4KGYFX8L&K|n^1Sefj6SIE#)Rsnx8gLG5aZSnP9R38&u$i)fkScD*Taz>v; z$ppq;!{Y!hK98(-@D1(#k+#z90kIduf2QJxW?TYh_{wi8eqKfXmkGGB(TVfeUbr>r z2m2^#5kbU6x!O=e|ajXQuxu2K#Fl$(z8xM9S1n+TOudZCun09w)c}OeK z@&wPnA3+2EL|#FI!qS60u^MTym5t;|%&st%Zf?MJ^l`in(v5rZHW>l`Ai3B9%#lTu zvQG&Ohxu_9`Z)wTbGcrr=?i$-^oY})7-D^q>FqIRCXJ#H)^V$K_h49~*;WT<5Yz?M z@CgJ4T*JM9mXldy!-SJtU|lRFs9qL|lm!f`j?&Bx zh72%w`#DtU4@PUQ4T)Kep9PpYjWyHtW!PyV!LlUtk(QZ`gYs1tqyKvZy;2{ znYDBcgl_tULCKf_ILsbY87v?e0RJ?N=1yw)r8e{s9uEzMzW3F+k#5_w?eY+Wa^ww6 zoMyN60HC|b1d(8rg#Q3vKaU`vn$ApT^0|Bl`hYp)qxh2!WBzeOk`9s2_UHqr(`(jO z6X31oxshNd(dWfb>h{nan|{Bt-pIu|-ZZ|TSYU|!2!|-bMO#b~iND6?G6+PZCv{Q; zVV5u`d@MhSum3cH&w)@xEg6NxM+ivF@E8-$?m`*~Oc*`A)PZzryBXFx9@lDAgz=TO z*5esHb z3%cw)`sGFa)I>@L-rBTKMb>YzUN)et)&qS(`*W8o;#Q+viTd<4!W`4@z5Sq$LgW}30%<=;hLFepEt?!hKp6u0k;d{%m;8ulwxpt@{4kc}+hR%Hj^r3*LptNi znGkytM&=}>%T$m<-yK(%$)No3H>Q?tc;M7i9IjZ2U<3*+bj7EFH2m4H`$#R+G!cEL zkUAVCw#BN*Q@W-rSZym7e+QL^V}QvB*9-v1e3aWkzT|NsUk(|Nu{@Vr(oMuWm`BJm zrp;gkE0w3UyH>gWdu%DB(5bMUIJ194TkuNECpQx6ETylqU5c;)Ji)B5;~TY@4_g#( zfB)7fx{|GhqKhpBxwVSt*6ITq!&~azL}TM&W9Fr@bQ;mXO zHw2-xLCL)GInW3*Et~VL95kPIIiLyV&Hz6ua!|y{>ycN~ke<)={{*-MHcTi(fVGG}0 zAwiCeTFi$nelQ^+gfyUUq_K3OrSY((4=Ew~@ug64F45L()T<75{dO?pkW-hoMiQ-0 zgsq2JOTF`fe(Wfm_-P<5L5OWmd*f@$y;4k}f@vS!;hW_Q2lyI+gxj2-3c{PlsIn>xhe zEf~}Z@ZB4=5I&fBE_Tg?$&7_*n9yUzD)RaPId}zbemdHajy)5yq!y;b{Gt(+Sqf5m zRik?zQKUgEd3egWUx`i)r4E$!ROqXQ{QlFu)a19FgO_5U6u~4YY+n$ zS+q^FtZ9}goI->XwrQ+9ja#GmgG1m>;E|godeGT_+k@cn0O|o|S;uhX}g>=UibB;61K6!W@15FW~_uB&L}~6}e?) zbiiy_(hS*8{Ig7vtvH;M$}$TksC-d2s4x-qf#ElaIhoVSZYpMBjTAFh#*j23d9t0Q zUlLe-d2Xon>Cr;?S*&4`V8t(R5+d6LuW?hJF31n|TGa~suJ0=}>V-b@0(gfrkp^OK$m!RCATv{_wg}aIrdSHX@>l`S_)km}y9qM4HU4>5F4mjNvFm zj^1&XSU6*k{F~7f{awEP2Mn&yW$b)nT@`DxgseR;$7oaehhyKS+h9WA#wa_BwT))u zBOHGt1>mE50qK3oR<>#JC6RcTn}?J1&dNHvNv$v4yay4}O^vP55s{FRO=sIKh~xS% z5$04d=3lBeJ=0i_mGK$ap%KYGRV%v>voD60xr)?!`u+AMu8Bq_N!m<@$WHKnYjht@ z{I{4+zCx0`patu}NXan(Yl#wB*hXnqzw{UR(qCaf2@;Kp2N|VVgziMBwqAb?5n3pu;M85ewyzOa!ej`OO^B-6dRvCXXab65<|(|z z88smj>C4VI)PSj_*38RI2sv(?U3Cle8o18E4?k^jJN{@30LyP@+jND*x zuw;xwJy03|w)jJ+1>!x37>H>MLGAFNQ_zWj`>&#$(P850JHf{!@3cv9nA@Vr2#YcG zrJ>0pe`62z3A^+HAJx)>=w&o5eo(_KkYGSGksMgpfy%#(>=2_y%*K#u+NiG(DEzh^ zptlQ9<%>sMVPCG=ls*_RP?$fLi-)Fe&~ zM|a@oFu^zCXgWQeolWmb&sY#g>A~FH@qQ0BKa@23+l2Tg(kM^K5)JJqVshXLY2=RV z!G7|UUf+Tkh+;L0WhLV9ZxRXt<%c5-Mrgdq1fo&Q%}GK)#}L-KGt;Bf+5Ap;8Sa$4 z*7j)6_ceDlxx>AT^6@8)*dcuVe}Q1obmz~|d5ObWZ-aUu<+dniTH;tOpHa?MiQ_2T zta7$V9A3}u%Gn`toP*n`oI4~wsa%{2+ohb{d<3iDK%8?Y7-ml}y%6rxuXvTlCRx_+ zPan^sw=jioh|cQT#Y`@Y?P0U?o{JpDp^BCM(2qJ~wlt6T+0i@{ZfFkjRS4u_xrsv^ zwT9W=Jfx%D&HdH_W&Tj%s15@{v$YwbW=C(ox>joNir9UmUH=6Hq2e_nkPop43?HDp zjCsVJ&t|uiF@B|k=_UOXYFUB*{2lz-<2dUOI_D01p8gR@D|^`E9CuIV1Ds>%zl)&1 zI+vZx^>+U_qXF;i>eIiT`9KD>#uMLjcsOA?03v`%%h62BDGH;ig)+ijS<-b7G3Rrl zvu~uVH}M#ave_kB497Otxl%<{bqHVoZUkK%f-NFw8Z`q3ZVt(S3bU;j`t(NDe~ZzX z1neiq5tz1Kd&%`Y9Lh1rYF5f@k{|wZU`0W&`{=MyzVbq04LeDqBT07|2@yJiL1l9K zrAuK1qaZ|BjhyXoaulQ`tF`#yAcp5Ss-UnN}Z zY~ISH^xxx4#DP76<5R9)E0x{W$bo}T&Od^`10YXs&(zfXhEPX5p3thTES$E7urU7; zeBGFe^DI_A!oD0I${6-z{Wxa8X&QGcCEycXwYA^4H3^~N&u!UBY$9vzyG{JX!iaPn z)3V0HKw2i*T0DZs(|t*U)3Rutife}!qX?}VNR(MF1ltAd^_6|UIEFz=htDG;yL=-f zZg>El?*=$sbPggXg@f98GA`44esF)yQ5-mR`cQay7AwXA$CVT)^i4~Xz38WbDCJDA7GM5kqy5$*0(b zJStGuw)C_$34bAW3*LmOI^OQUAV*wu;3J3_a^l=gW^Li4nTzs4HY`A<-iTNNCik#^ zso8LFZU@@}r2m{41S2%w%^@WZ&qmZtu2r6A0T5#rP>W7seLe~D zq}AFp0OZiDDW_2_Ka_5i;(Qig`npozD@NDnVqcE{=}2 zZGd>?owew{$N*I-kl}X5e*^(M?|eu0%i&0`TeG3(vF)sWE#$%=-PB%?!|>H`sjDR`lC=2QXhG>@pX_X~@acEM{Uu);4PbUfWysBOdo5G!TzFwJXP6 z&bEFIW|;6=XnMhLLb|9oi~M!g$_acZvhct0eX1K7nbVmMqbH>*t?kg4kqYcST9;{XPB0RWs6+MG&t8perNMBu7jmT$oN>!;1&4%xA_4A^4}`|~WOe;gY6^dmB`{kk;}M4-u06$Z z#ZX=uQ8*8r$mBM>2pkTP*2gxFnIjR-8NdlML$oOZML=Uv~xY6l#I6ZMC1bnUB z)(bakf!aM7Mn1Mlt<`z%5~4!MYxaaLReU!-`5p|x{#)lh;ViuUJm+i=(Q68~ZHGP< zr2PzP`B@^3ULFP0XdL@U(h$k%0m`L6@*<-qP*_floms3T1iBGaBCkw=K6N7+gZ0jVvvWQ^d5G#WG=YJ)~Q2Nr_TT@*7Ma#XSx#ieQ7-dGm^<3^x`~ zN`_jv8yA&iR&f(Zf)?6yA8YNGIj&;MV`{w98ix&m@tM(xA1t(i-W z#>O)bKO8$hp1IFy%_JwshCS|r$@r6KQ2zphpJ4E_3{Ej1Dk23YvRcMY0zu2Y^j5E! zik4*_w_+w+<_;HgA4@uo;PTe* z_TcNO_l>Wxs{RP0Y5G3t>wOcKA%nZ1+5QpH^D7K~nn6rlx27voM@UzO$dYvR^&UJ8 zq^n!h^c{$%n5b@sCPPmHI~NkcH+t|ike+T;)prb{e(U|w(|%#Re!YvdM3S(+86zwe z3amqe81QzhT$-Ei_6W9D@kIOzn+S8r(EkEEC|o05{r7zQ4-CG>AV3g|`d2C&i)*KG zm6ZOEOl=G2SVHT+gAB}rmA@Mi>?h9QZxnx7{Eg@D&u^6!q)N#$3dJAlq(k_=-TC4p(B8y#Dxzp8FbNw3s5kzy(A*Rwv+Ix+ zktVp$BVxvMSF}~E?V;+%dz(~=i4jy{u%EdJ_QUy468o@XV#Mp;MNQFuv@@Jd`N0<3 z)%By{^?JuRk<$MKrEYT+!Ul&4ov~!5v(vn!XYe?Luk@~P>Hms$^?zf~k755aGT!58 zXAl9yj%lW}o(Wwn^Rlj0a~m?+X2@Ywy7Q;xD!Z5Km|!o~VCZYH&B=SMCvWv`!Yj=e z{3ER`*k%_6*ohu4s@piBOx*3&ns8rNU}%uop(Kf3HTIx64ZKvQs% z`;Fr`{L;RQu=T`E>fF$E;X7Pb$ls0%C#1qMu18yUPb?m_lh0@Iyd%s<-@rYIqb04! zZqj0-yfKgEL8Bi!+MAU6R^_Zr!@b^U9lUilr?DW?QHfa4MY*9ax`1)<^wlR0I}XHz zzg)&9V1Rz7G{x1F6K0X}RE?#g?7fn07E*4fz-iaj9W}I`x;5x+oFzNZI&h0OWy{Yp z*O##+Qr^7j02AGZSJBbV<`!_nT+@%q7GF)iVVl7(8)96kpRClH*AK7y4UKug60UZ`eUW9? zzNaCKA`8{0Y6A)xZo-w6PA50nV)|f#wL4=)SJu~Yb>is~F3AMxIn6TW{b6HHd{-zg zN20&Zb&iM&>&Pa6v1QA0v2?Xwaj;g;-TR%5`ZuMg7#F;&5pt@U#K}#0F@Cd~jbd#F z!6|znMB7nW zvgklBjJOqx67p8fGTw?mtcM>T7ESl+0Gv>~L&WS(NB!dUf$s^c1MS3UIRhs~+KJI3 z2Cf(nmo;#w_=p@V8H!S{g=$Pv!b?W-t$niFNN6j%jI*s!!)ZK6rgb0Yq`WY5egi8E z@MBizBR(uLzk905Az(je*w14KU~m}EDmra;!x}S=k6FpZvJ1+Xg&Fv{;Mu}6OgwYl zSiBK<#2l5#VTHYX&A~+o2`F(dNmDx&9pEO#>IS@2ZZK56;_xAu?JUSNmdutf;l~_S zpfMhj2lsm?D%KZ~)i%py%T+zL12U8FM#2u*f1Ai0un$XcMHqmqcJIXk<|Qcb_Ygs1 zj&b)D*QxMKk6`v(Dtx;#^JIPqLXZyTA{BW8v@^9@qux4(EGtU(%N;^FJeB z<)YOj^@}`@w;N`IiDoU?kX*WI_GuqL?fRPtA`i+tj9dC!OmFw_5Apa#_RvuJR1O;q zpSayVb~fH~_7HcLY2(xL%~3C5PEv~|Vl~?~nkl3!!Uhm8GM$q^p*D}uary>fKUdlXjy0+45ctTL}_@N^MwKqYxwmlj$)u?9{xf-I1AR* zI{?YMDuJsI2XR$lW7jU#ap^eL8OvAtoYhEU^f-!KnetZY-0rV0ufaROjaA)>*i-;ae*mg~_)SpWP{)P3A+h!U$tz!=&V0t z<|uEBsFb`)%hmr52~tTkKc_5~MaYOd+6??2vi=zfdKeLvnTGJiwN!ii(G+F?Q4_~w zkwm0KU%BKBNP!C~?64SmluhXE^#g(!S^GWu`aKl+L;#-65kW3@14ptkn40;G`=OUm z0A~Y})rbhg-4;)w;ki(269YzU5nuKc)`dYWxO*LRjhVO~9iT9eW*PsF`F?nyi*-T0 zOW|G=J&oM25=Enk;O4Fr^*MCY9et71c|0Ajwb64w9zvFmmPWB=X=FGEhIooBk}k`a z)rggI7b5WsHvkPE%#ENTTc)F-O%X<3Cil*Xp!^L~`|oAEHo(|^LE zxD|Y@iT?)zH-u|crZlNs6F)l3;FlPDhQT)&+=BqO>u>t{_xT8|tn{xjcpHJeEwcGz z|C1x`U209^c_Xa$uAX9Tv8U;V)FeTBXHPLWcI1DbFk1^IqWle&%NJzk}u|Y@GaU+AB-)^f3_h$K=`zb&Y8!lm-({U04EzBw(!NR z$z1qjx%aMnxs8U8jFgu@3jAQfyuIVakl2c$7N5Nb&RF@iWv(# zXA<;(V)1zfwB_t!&Nk@?l3VKKI&HZ)SnljJS3lNKF36ex)x(@kdyB6_%qUsk5MQc2 z9uBLV4D&VT_4bIEzq~cy9$EFEK6x-eY{g*WFs@asz(#Nl&6uM6uUTZ*PJnx;@&B^m z=0%!Os2otjZEya2i(tsuRJaMOxq&@pu02CDBI?Nk|E~);R3^bpB`gY@S`EjN!T|&l z5+`>6Ve`-bmBdxrX0Xw?CupTEQbD9mkfC5`OB53m6L9!B^WOu!FpV1gjD@w0)ZHoU z?MnWo(sd>(k2BPGw_5mQ~2GIVwvv0Z7CdcPT}6`G**_fJIAK1 z{B-!;jbDDZY}*E5mfkkQ5YOk*)}7Y$53tth?Kw=G(pEZ^Lo4{E5zpju5=RREa@hP6 y{-se?N<=x#&8H@ZhOmu<_03qrfOIl_M=JkV-nt_f7xA literal 0 HcmV?d00001 diff --git a/python/tornado/test/__pycache__/util.cpython-35.pyc b/python/tornado/test/__pycache__/util.cpython-35.pyc new file mode 100644 index 0000000000000000000000000000000000000000..cdf5c02db73ff17687240121cfce867c3a5609f1 GIT binary patch literal 2308 zcmZ8iOK;mo5FS$0(~@O7vExS)Y|={vG;ZSb-J)pX8iCP9Q5`1*2mv%juI!b`C9%8o zL$FWovHc1C0ll}!qUWA^=d~yQg`7IO5*@dsxtg7CX7(}PjF(!i#$RuLfAjYYz&~*9 zaZvsmKlzIV5aOQ&5kPE#Ye8&-YeQTDw*;{Rt^<~~upuggTQ=4xSHP_lauwWaA=kjI z6>=Tix{*r|H6We=cLpq4gHaR0H8=ziKNY4eaA$$fnQ6sX&uWcDXJOO^T80r~RX8j` z`UmI`xOfc?Z3yddfg7HOiDlR@#tU$W1~Ue_Q9#XWP|H9!3uyKl)HcxK6rxoaEkQG! zGum5)c5;Z@`VirQftLRZ-7(Nj1FaNL1xBlIfi8>sB;k@l*C4zF;WGRP;NFIC1;SM$ z-_dves&n^J=dI)9ZGRw>C{xs9aheE~*Ftv0BujWcn+nF2H_UjT%>0-cCgzm_-p0bcmE7K8iuYIW+{5>x~A% zXA*)O`B6p*ljK1VQJ-h2VcPp9V&@ACM#&T)1qF?JoJmCn2x4>^5|v;UC^hzGGpImR zP|eA8<8g`Y2*<8=tH{!uN5fA}XlKXCzevJjNjsF0V)wsLHF;4d)fjjTE^XM2V!G zvfGkW98uv-4IRCj+X^Cv^te)L?|f#e^utj63St=-d;9(7=l$0hLH2u_FMQrdW`5%< zzyF=S#{Mpi(|*d*hl7ko;X{5PHsbw2_EjRdA13d9u`K$TV$phfnzzg~Y?k z4hLwot(MiaZdeOe17ERzfU-bysHEA0 zj6r%J4=zeb4+nW*jsM&_p#gRcvbQJq_6Xxhh<$cMxz4wuCs>U<#3X34nABhuWf4}C zA5)q70iALIZ^Wc9q(iLHt3*qJO<~#tFm~z-#eyvI1xZcCn6zRpbf1z_-X$3pBxWA# zP5CE;3Xuq@ca}sUlC!X$Py#!j6S6?%4#k7yHNE9y;iq{eq#@=L(>-elCcOP98TgUJ zOl!gk&+uO6mUQPPkau+`w{5_83X}TDu!2{e*Y#?i?rqQX_1AEKdfTd4E0!QAcWj}1 zr_cb(O)cx8kXu^5n>ReH6}(#Nc|tc_p_40gR)tP)ZYMIY6|GgwYgI{}N^u7zw>{<5 zy50toXHpBz>HT<&4->aubnXpN*FF__$kOd$?&zapA(&Y=;jgiW=T*v&m`J$W)cTiv zchaWYlNs`S+JKE>d)Q67ljfV(4t(k9hCA`p&~~qwJ)=kg`sB$3o6bJU7bgVJ|E;cd zNq1H+hFx^pL=EN5jb6)uE<<6`Q2R1Y!Yra+>Bb-*;c(w>V#S!(o^#rE+nKZPS#@W@ Us@wRP-=)%;wPr0^^-}ZZzfcx+2mk;8 literal 0 HcmV?d00001 diff --git a/python/tornado/test/__pycache__/util_test.cpython-35.pyc b/python/tornado/test/__pycache__/util_test.cpython-35.pyc new file mode 100644 index 0000000000000000000000000000000000000000..61cf41666f61522536862e5df85da5eab3fbc595 GIT binary patch literal 9070 zcmbta-*X#R6~3#LWLcJfJ8qmGt?N>PXlvR~pzW04l4deyid$l*X@rMG)~+2TE6Lnl zC9$UEfn*8{GrR*2ymh7nJn&FnV1^kw%)l#;`^r=P0ABjU_no`DTFJ8GX%+9)>i*hu zzVn@P&%K+)$;tA+Kl|j}r87$XS7kmG+^^vZ{$(lU;?GjPQY}l>EY-@XT28fWRkKwq zuWET^u}4n%1yw6ZZ`>DEt(e@8sai?yZRL-t*0`#Tn=yIimsM**)h1M{qG}b@npCw( z)tXYZDb<=*wQ1FwQMDP>npL$~Gp3;YIn|n1wRvS7EXcG)6a zowCyZQI;jLp~DkNhnsCny?50eQ*KE;gxn?N&8yp|)dMKBq^`Q-$}LMnS$P$yJ%ic_ zMb=%D_^a8ox3?bFt`r8dEkbH8uS;G~aL*XfM6GQ|z!B*%C zMbdDDX8~z9D*k;;W^fgn{+YEnNvT_srN`>{MxJ*HwF-)4*Ueq zqR1lhb4yXH1R7`sXi&3RwR>yB(5w=5C%o5j>Uxjn_4Y}H)z|$_qwWWnR=4hTU{HW* z8XC!C`ORydB77AEa04_@2s+#b015zDR4s=(2O6RTmXyQVfRzeCcOMKXIUr3lbezbl zN7hc&j`D%$?`qDg7f@7l(e#iI(YWI@{CW^L&d2JrcQ3!TeLe7Wu>H>E@7LSgZ|v{C zSr6;mZ&8EoEw8n|z2Dq_VW->l-51)o^u^X*BiIf*x?OiW{cqr~-3^<*Lqt~h5A+-! z{sgWdk3w1VR&V`_qPLnT3;m&ZyWaAkc-e7U9k=WAzT!AP>DGPIA|hxKwrc4`^F@jD z5(`%F3g{aZ)8P@B{)iR5^KR2^UkI1?yl~mOzYjAr%^PPxG@mE2(DO(Uc?xT2!$I2E!<#^*lszAMbX%PCt#3F9D$&iDAgw+rH{&t zXU)3b?0GNYcN{*(t=?iEx|de_oeop`c~0G*wGRw+c#(5bbhtc!e=%6HoVfTo;h}eH z!Z84i)b^DS!&0$t;Tnm_#m|Sy3z+YloHNFRzXvd3{Te&KpogfENgc4vKV;mai@Qa+ z$4!zuGQ&;M#TqJ;B$3%}k{Tk_MMAh)orz|$p@C)}a8F7k#N#4|@L-lpvJQ)O%sN{* zUz>M=`d!aSy;AbD0_$9OutelRjIpkI;r0D*Q(-;_X~yEPFsssKv`93Y5ef1zxj`Z` zB-5g!p;1QEFZu~Fv(z)7mY!VBQY8tQA|b{!V#mf0gM(PfT2@KFp9<-aSq@K zj|^r4mvlW5T(hFMnaJeGH}RN&Igmo?HGta6XwabutZ95Gk88_JSQ zwlvraB_5gr82g{uS;0@?ZC2u>M-DPBNes|vr2@9FRMvU2RMQm4QFSJZJu-y=d5H>`TyXbbpr0dcLD_N+3Bl9H z8h|rA4u3H82>gfXcq06d9CQ-=Vc5svPaZ!ipwO@Ju83j)e}+$E1i@cue{%fI4kSxo z=4(M0lOAV<3&&c)>@zvfMzuVY(`x2onn|97G0ikYzrx~m7T;mf4`*^GY0xSf*h4~D z#j4o&H-UeZ%8X`P16TUt61axzBqtwp*W1lT$MxQ7hMunbTf>P*>cPvHklrLqsUZBq8QfBG@fJ24B*rpHknvDhz&AK*xbqBvgAsPB6L4j|Hn|H5H0 z%{j!-m=U|a@ABEBFkM5#hsiUL8uRx6j&yrg2272zI|l6W5LLVa*D!BP-7b;GcM$3& zy#OExH4hO-u>C`Rmq&hAh{{)R+O^&$z`Zl=&N4OTN!apkH|%O}*&J{Ux-WTunF6e% zNMmK1s$fM$Ahc)E3#Hl1OxyXU~2ek zvLnrJG&qnSe)Fhd6PGB3NqE#C+T)1Hx*u5wQ3=Oj{!YDdC$f4OT*QW@@VHW0=YBap)+k7xwYM3qWMt2A$9hk4}5Qy)aji7{iprSXsM*v~jAj z4J|1@v;D|!d-oDu(^{z6zc1>m=qL$-zQ!SwAg0stT}QmgIMrB;P`pcy@?2D+;+=c^ z9zrTIX>X^+V{!NQm@=S`DyxV*;wkH_)jKx~oCMu?z-auq5rEDo!p~qa0I!f8j3mOq z4}U^d?w0dvFCT}rqfrDL`c;UaNmm?tr116Zq^VjkK*};hAoUiD>l_fDKfcR~8!Y;( zfKYi-0Kg#)4qrO$kI-WnvhyP$OGXS}MoJ6k9^wZ3euJ4=3rHB~j1-iKr;TI!0hm&b zfrTba4>V)2B&RWon5Zl)l(5gKah{M&oyBq@X5nZBfw}>O>}yw`TvyDDoJSNdEx^Qj@*d83GMxn2UY>PC^r!<`4C|(U( z#k~59t@6)QnSAdtRv8#KBG~RwiUtcNPMSe!q;J>J@K;<(u;p&l)~u~Q!*`P=t#=$v z-`BLbCThCCVwuH{P}GKuYe1lL)hdbE`Fd4eE^6)%3|35QG?!b=cqm+`%_alLUU)1< zaXWwmvgH2;nBB{Sq3so|J37iWJGBxc#bs^+W;ofiEYfpX*;>oKQ1X6RP9%_tuA`ud zO0Sz2vzMs4r0FXbmZMw+r#LTTb7NZzI8-}jFP&N}FBTVzOW#{6EKMz)TPpq^%?S$R literal 0 HcmV?d00001 diff --git a/python/tornado/test/__pycache__/web_test.cpython-35.pyc b/python/tornado/test/__pycache__/web_test.cpython-35.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6bfd740cfb130dde3844c8b4df270575977b6f10 GIT binary patch literal 130569 zcmd?S33yz`btYPkZtMU75+t}&O^B2TN(8uxlth381umi%P!_4hEy}G%-wUwGZZvdn zLnN=II5FvL-m*A$cFVGr*m07X$)3!cEaS|)>@SNancQ)f`I1Z`XEOPcOeTpYZ|42~ zsk?UHZU7XM_r32m*@fz=x^=70R;Ny#sv7R^@A<{IfArbW&!^NMsmNyy;#2tAM|zYp z@IR%>N>x%Smr|8Bm1|R#w92JbC8Kf~m12oDRc=?64wdWBztgJRsVZG6*QI}FRJmJK zdQ`4QReDvfS5^8{u1{6^RjyxE)~MVXRT)sZ0bROXl?PR2t;(&{zdKZUovI9}+>rj= zsmkkBWrNCX(7(InZNn-ztbcc_@++J0=Pv!2@B?eUaepT70a{E+eOy$N@WnAUPRb{`*?N^lp zDtACt4yxQiRXL<`hg9XT${kjf2UP9>RXL(^M^xpg${kge2UYGtRe4C|9#WNuRqkO` zIi_;QRAoZtCRAlo!jEa;Nd=q~?&i zO&QxIb-SPX0#bJ<<1R_POPRaWl}l3mMPy`^u~Ra3Ml#-mj1gtrEg5(F@6020moj!s z>TbXGmymjoGVYbsd;PKnr0!A1sHBeiWoMDPR~h$7>V1CMB2w>H#y&~i=a)5*I;MNRj02K-z%M(8)Pu@6B&mn|)On;HR>lL8`hcH$8L3Bm<&J?5ugMe2kyCM9*!Pc0+$xH3*i>IpyfI8vvSaZ*xG`l%J9f{v#p zb=pr|Kq{#Cw4|Q)Q>#clql~kXde%=}R3^y<1bt32&Upa32{HFFG9FXL?Oe z0n&a_GM%!1d#k1omW%FlH>u)LEhJ*6|X4cnq&Ztl9Bs5{ls-;+>i|5DR|=R zb)KV)_evgcAtH=8`=HKyRT-~I9^f53_aVILE0pm*$pAq0n?9_c_)2Abm1F>7!4n_B z6Yp2XP06_FKk-rh#4TlfKr#T8;E8Y0d0(xJuaP|TJZRfD>b$R2#@9(6`WWPWlg|5k zWqeTb(2pSRn|0oYl<{H7LyLpFkD-MhQN~9l0}Ydm+_xz6VPt%RGQLqV@MaO`gwV&4 z@lDG3X30R+pAf8nD^fqEjBk-tJpQdx74(^JQ|4~ueOwuzki1WXd93Z*b=$sG8Q&(^ zC?3G#JMi4ME8{yP<2!uleJ4`CQyJeSso$l{VC+Dg0_eMy@kz=1WTdL^)_Jch<5QCN zsYo8m@wqpY@oCBXbR_R}o%b1KyeWBaMnH!;_}up><9j9Vdn0*o=)BJ=<8zYtxk#O# z#zWtyjL%EP=OZ2X3^Kl789yKyKcLL4K=w^#45|yGKlm0E%F&EFP?)u=<)xaLFI5(+ zR?Y1)O0SgcQgy*yvuKqTYWcaPg(CF+Gxt{II~n+vP{I%^X9^6 zhueF_t}f&avsg7uH?vfmd(iEvRr5;=B}rLpnZ+f`F1=y~Nj(dN8h_h0%k4+e*@A5% zURsz(t%Gw_t5T@tYo!Vj3zbE_^W6C-s#V6i&Yizdv&=$;spV?1P&P-~+|I({V!m9R zcRSG}Mv%MV0yM7zyH54hdtb+f1oW!%(~+jD;Q3LfW6 z-98T}`Pz*|(_POl8fLjx$O8yA0Jvb-qwC!@Pn$0M42rL+G z@5#l*GULQgUxEF*Y*iPkOSYR{vdV5Zdwbfls+PO<0uWa!o+*`0zq0lDBESdK+ogG+ z0hr9ca=;ymB$`J4l|p&RbcZxfrDTxUCpG)Ebv<+L$*QqbMrWQZ*)}kE(wbi)rRW9> z0j)q*-qJnF&sL2aprF!1$-eCNT4tsCifNY%_|a7`3>_PI+B8ZanxOxi=gnHAxmV2_ z`BzL(6uLijHdk7lt}fL0#s$zDS}+6TcRq3hs5y7ONQ&ntg>M?)v-sL~A*d;JC8e&k z`H{4$WjN%%s?>*4V3yR(XuI21wcX5Op?2BLT&b28+)gxXp-?g1u6%wDK+Wgzu2kW3 z%9=oxZ$C5r@TF&LH0{z;(@z!_E&)-e3bn$eCrGE4E}E6aON*t&eX~oYvaxUBvURXB zU$igPs;JYbhJS(jOV`X{+1lkoEn6tt)vUd=NC9SNpSy7OT($xUG}hCT z#pgUI#3~tPmb7zOGm@RfUrC_2xO%N%&D+`1f?YEUMz%WV=Zr}Oz#Tx7tyQxD82ICI z;T1Dms%5RE1w5+r*r;s5Mgz-b{csF5Bs)P3(FfE3A)vgQI?~3-hlT9&X~{}!bY-l& zQQYl|w%=XDUt#0jG~&Qs#x~1y_2`2rl(L6VjfcD)DQ74G;W4j7thFR2X>Gr5Et>a! z!KV;W${m`i*3MC|lF3a&*nBi)k+14nV4XtRQG9I@h)Ow|>fi0-CAk44xRNGbskzBp zPNNtJ&wtC7`nQDeni=gR=aPf-dABE@2f;vgA>NnIzr0i^dpVu?yiqOY^VXxNSGr;G zEs_3_-7Rv7MOf{_*X}~l*WQup%k-rswTOh^6Y$6gl9uI>3y?kD_~l93fGfUAhPBdM z$uzPYHzN!ZVUBB6>uQ!mceaqFR4$vKTp3Mtd5=pjRTm=hb_)^= z(v19>ar+(xvsb+q#^^VI2qm!w2jFrOlGq?Ix6Qcim-#Hf(tgb<)u2fwaSyet8tKoc z>%r(735%K0Og#uLqI_cS--Qer`q;X@l*Qk%zGsjyg|AJbZtOdH4bgoIhDJn0bQ2m7 z=8)~3l-{=>>XF`t9(KE%rmNMfC6hE;r^@u~KXCBS;RlW!eej`~Vj8dFhkrZqoyOOG z9s#%>fXgF9^-5jsQr00=gA_`s8dBQStCXK9{AaPWTG&~ii}vE#o2o4+Bf!thxLvkc ztAQhIx4$T@q~c|nHMcjSR{+6Xd7W)rJJ2FGV^DwVg)H$^uhFzd8yjrBo54g7$oL6K z13jmf&y&z>m~AmqcBlGMJKENzoSn@;H0D25B&zt~Uw#EVWHU`v7()mKPgiNPa>%hr z0HbZvFgHW~t#4EhWxnp&b13T**WFE<6P7QMM)=`h;!GMOR1S;?-=jej0%&MYDQOXC zRECE4puF3z^~yf;`XUrOJ8u+jG-w??z>n8J_GtMAhUG^)>iOYC)L?;*lr_sBM3BTp zem{c@iVwR3L0{XtZb)yY74Z!|1pgGi_F)8#+;Pj74CE2b7XgnT(g~*Ma@SqpJjm35 ze)a&FBaj=cza3wJ=p%?gO}MGlYl31R8iY?y6DGU~6D*-ETyOeB`CoSoD*mg0m0kQXKSZ9IJ#>| z3$c8k)H*$}9D%T~zyOD&yj5LVF!Gq+Exv@;$Uv!5-KpW!rj#?%2$iwAhXN=GhERh* zX$;+Hgc8I9K>{!UDj~R_=tEYe)aKWq@3*TfP&nH~K(wjXwAR(BUQLUT#-A%)8l392 z3{&~DgL$vv834Xp)q2Q!S9;ZzK4Nz>QP8Dc1q$#$yP%+X!ER;X@r>FS7OwRxqgU0| zNEMKAh@h4fhtXmD;k%^y6SV!2-w?mV-y@`{AIQ@-VbW*;Yl+OGgJ%Vl zdr=?Aylqpe6SJmu_zvJ-M`{p5KqvH%LHz4%>qy<*Hi%T`p=OM*?AwZDXny`_q==>& zzT>TsP&C%G+z_m3_zQ6L@M96U_3&FS@GDrerJ7Z+ZscbRWFCxYfLCgD8Uq|0|i)s7@pA2GC_}Uz{8s)u~ETu+mNUK8N z3CuiBEfP+I`t3@Qu&{C@nX2OPAnrrZiv zWws(*0Yz}K*3rB=WGYD23>9d`hJuz>d99{(i0%e~t63ciwXYek1oB;Vkasi1DonBI z#p zd?>4@@U>aH3HU%lBLw0^RNq1zO1kLjjEkrSLK&BN6AcT9V+0LGQy?@LO)-K7qbUju zaG8tcdz1lvc%8&icnIQsz1yR8(2LP|7ZpT)h5i2#k+VU=Foyt{-Ar>5b~b3Dk?~X8 zkcu`+V-b2~(@6D|05sl9%^=n90F-|(wH>K`SD+qvsU1l5I|W6-OYKCe-#v8FOGW?J zazMj80lfu_Ffv5CXJkF66O3%&bb^s#^=gMiHmX-UC9+Aq+QrB`?eCjsl99KqHS%B% za}I>SjhX4jCS;AyK0bEAtm!gIHnR-gZ~)2lxM@U?XT9H8paIbcGe=Sd1rg4M z+KxrV43#Fg(_ezjynGGIXzikH$|S|PUVH^}*IB&M+KwRE1Si&%f6Zob=Bd(y)J`8* zbx&7mt8)kILG@E?0$B-$yRXBae?lysT@jWDGL036z>07)uyE72a;EiE6ANbMTw$qP z1F(r-vBk6i_7_mbxA-g%==#R48qoC>{9FI^0Q3hDzimJtT{)4Qs#Xf6g(Rk+PAE)q zKO#}4fHVqN0Wyeju}|D=YtuI7&Y4`W(#%D!TxPahxa@Wq@-C+nW}&fSwPJB`#>tH1 z=L(tO|HK1&Eh)V<_MJjQDM1Yhva7xByRpQlUJ3xnL!s zERALe$_^tEMOj2*c~+@Ft~E%o4y=`9annf`CTDZqwpoE6MY+0w-^XU%_LrBcH52iX zS@01BSPORgHDr6!ELt0!sdv)R)naXHfVD=T{VE&gq3#QB`t!&Fiogl}$38n<}~~!%cD7 zXBqT;5zl()8%_BSr<)Q9d5Wj8ItwKgVS-5_3-uGY$rWTx} z2YB0pAIW$#fZ5S@Z#+uFf|2X26t3qB^X6MUcxZVvd}58sCPZR6qa%31mh#)KPZ%7uA5nFB7N9myQvr9yIH4uIhY%7V9s1bK$p z3Q{qXfMy-0Vz#1Fby?w|9$%^*a64>SCAw&RBVKyOqB+m)x?HO*Ru{@QtZ&B81f9q>YY4I6<@7QRe-Nr^PXD;AHGq5( zDuA;_^5Da!M;50~etY7@Su4Myi=ED(B+U_(`8nbeaN0?5b?X%gs`sLLTGVJPj*S+j ztZ=)Kn#Bf2X6BRpc?!W#;%jFSsMK)RaAvr3xI_LT)}4y}b2c`Cb3j)FMPRf!jvoPC zLBXeGRkVJrwb7W01v1RrMfXRp9vLx8&cjsW8(cbOJc#xH`1|zRopmxDo8(mNSoSIS z*iDW*gP6Qca_$zStoP<{lbpi&sql_zpEI!#B*sQiBV54X7}GX@APAIdnCpJDb$q6=I;97ywkP*V*e14+WeEOkzaz*NB~N zCX66u1f)D^UN4%9^fEnWcj7UBDwnpt8^1K@Gj!trb$kij%D?H zW73;1`Q*p>QYHj~z|{H_TGR>30YGS!;8y_h zcZ~qSys!=$1QeJSaDy33FMtcj`m+-;R%0O893e}c)`2vw;PPphR%LY9j165^gC7Fr zP2AL_H*sOp)_gnH*RMZMlhmmfpHi2w-k#w)daYBI)nTjRuoKLM*3(!;GgTeCpb=mf zz3?t8<9z1jPb#i%Ul`p!Q!kezczugBBg<*ddOfk49KpnD#7CwW+MV?>twPSFkw_L- zx!&Fw%)pR+EX;j-OElxs{-cK?dBRzE&D)&By{*}bZ}p?72U*8v-|iERE9l4H?(u%1 z5y&2yb-HWT4XiL{cUuwuwLXJ(f!039o_(K=f1dI0*YO{S#D9?Sf1%U=WhDNu82=%r z_i9T1Bokg|KuXr!oi3XTmW`ioCw%JX(Ga)YFlU$Mtv6YQmQ#NzU3mX>7R|tlU2r?J zEOxsv{K1vmH0lMzCgBGy6!OtQwU-<*f|_Xf-UPj1eQJGMKbFXQ+bE9H2)o;cFr9@3 zBI35Bxh0{#fL}-m8GMCbP9jn-KtN(zD9d2xaf{_?F(nH@PdM3dU2qPebGpQ4?PW6E{wHY=C&sC|PzC>^uf;`0%z$TPp!FGkU zD!x#pXV*-Uq8dfZo&YVLE<3sB#8VSq{P@^?qdtzv`|#-d1YqHIdiie=OrwJl+P;Vc z>tzNOg4}@iy2+uN>JZ{_Qms_B-R?yw zFDA>o-QosBFHJ46(%9oBcK3j*E0a6R&5= zsdaSlX}b$jY7`DEEr=*uLlHH|hYZH=G`==xPj!^kdL-vqoK~B$$V_4g$5TCWw+A_# z%IT3-=fH8zdqyjYe5uaq4sv=zS=T$$ID2s-WTMB|p~B>WV_9yQnH)cs)zd^h#P-gH zITlA&XG4)G{BuhQ0s?Q*1}`b>d$}!ANgq z_SHlGXqeMWj(M6$kT;~9-OU>@7CsSh54nh6X%8nVntOChEWQ_$?6de&|N4HJCi)G1 zG}3}SEm{zJDAtGoDB=?xnVtBa|E2$qcK|x@afU-IHYioCa=iD<+4Hz%tmVWJF*NQ5#**Da@e+%n6usZTIH*d3bz0 zqI&51YLPZZ{ zRAwBV)E)K=aggQO2^@LQ5};~VVp7B8mr}bU}Enk$avm%7K!?s4g#-_7cbP0+OG3F00+q*6QJHQSdON;&s809izNas>fN z#b8b~L(7DB4)ab`>*7olFeg6XX+rHwTf%}L{wi#Q{H$L`BQ)flbImC6l12fucONE6I3NvlHk2dF0+1z!GC0sB(450WW0;7_oP)vU#G>in6yeGVG7@t z!=WkOWZ))m2#kz~;cz&Nt;GwNzJg1p%T+sAf+BhG!@vBB6i@n zl++;1VEDt@;pLh#gr*dOIw4;G+a_)K66$?9XiLi3+oUZCH8vX+ccDr>AhPlZ zI)vf`C%=mfm|z1sORvjVOpDPOF+YW`&A^0`C6ln*^KQE^+T#w@b**8L3`UX7cwuyq zd=+Y(43e_z63#d0Q9!T`ETM)Q#^gW84suONZ}8h;qd2<{DT;zLJi zDdQX3ifIPu6K-cWIqMZkYUph^^kb1tec084a@tCW(kRE@-636LMVBP!bRRf)bZkHV z9dJ8KRh<1$T@=0f?CFcy@hp!*7(Xz!zrUxy2N1?eLGdc~`gNW>lRr0e`l8!=;r#UD z`3o1HK7H~@x6>m@R}@1T`M9~<8YBRJ9|4}1;|#Pf69uft*KVp|AKL~Qro0*7mH<}G+-j3&W5@#z?|1#)Kx+Mz+NH3@Hb7@!>pfwmcEz+?Ta&-xkbN7yeK zF04OB;I_A7L#{l3B^ytC^B4e5kYYlC)H>EEVv^gu6_q5kIoja0A?&ap83Mw1tZR z83kusP+s`y+*O8M{nw|$NAbYsQ!kFuUe9769+Zcr2#-G@kLzkB-Fy7yC)9J-Pe>KA z2!JOVYUq?2KGLj)kErKf?nf<0s7=Ykuyf({!tz)o;Ol=`9@fp0bpQ3gte&G#fSWD! zF<34T$A4&HTB5)y7gjQzC@S_%l|~Z-8w?kSCj0 zCHJm`T$((xSUO?>ajc_p3c4*4>9R@|(w zO~BeW0~H*#b?s!g7uQb@MMHkg-S0XA(*X( zQyUR>z|1}@e>cf)wJnfMnBojXNN|-a?be^+4PVE1k>=qj!QaWe@H}!_@Pby-ZqGaX z!sr@z$TMX42gEdtt7*J{8ee-DfgV@sE-fD5bdKgEGtH$#u`!PC8QVMRY#Nu-YD{~4 z&kOGve{t`q{})1uJ#8YJC{8|2plCVTopQ1Xt&0!0gdacr8*3^gDCrv$Nuh5Hx1eva zd!(%{6xLIMHxcvo&!bG82m;cC+apWqd7k3*Ssz_0P+D zozZMTR(Lyg!um4)I_tvR6DTo>FNPAlUs->{fZUXfE?2Ko3EFW$2u3)Lx`Y?7Hz*p}aC8%TgYI1!e}Afczxl3&4g#u&aPtu?ExK@QjdM zkJ!U`b`@j*R!GLNa1|1tY?9N?ZbF0W194Hn{C#%3h|qu=@A#~fE{;>RSrI+<2_!wq z^OFQ2l~a{o2V+TJst0t)zsWKs`e-r?_n<1s6dK;ZV%TE7POII3hSt@iyU8IC*N*Ky zw;;QWvYzuqz-yYG4nDBkdku&wD7 z-zU8Nw~YNAgTH6+4-6*afeeg)jkGuTRxShphV3wfcH&>3#hfC(!G~Nhh3}o3_6=Ty z0x*5qEM9HqUnv9*7LOc^B4Qs{BFO_cKpMC^t+g#AuBFV-%t(qRi!?1?^kVyyroR=i&_)^X3ZA@VQYODv7zO+%6A zgtrH0IMu`Hc|TN;ReL>TC0U>vE3diA!iKcNw&B#~w!7M#P4x^i<~?2{EAqp?6c^L@+V>(LRz;PAW(0BO)3s&` z(1AYY|M5Lx5m)7z!9+w|#$hzFN!fwgJn(K#i8b)tkD!2MKPvI(sG8??0jG zx3^HNVK3Y!_$&U) zFhFd7KCl1_JRrfu|l+T|9=hBu{qxzPp>zF8Yw*{{zVY;>cJ1_12SEG ziMd349bVl5@7z&eO8o=D&LYT<%K^bV~}kC-_h#PvL8CL(s^rppo#zzco*k7Os}&ZX_vbBm$wYjflMJ zk3FC=L8IK5ApF!<)^&`@(B6pyj&1Bia@I922*OLC!-AIpd|MKSQ0evn!Nxvdy1p*n zyww_Oh5@KGeswU`@b_(lYNLND@YAMUU9yV7x)~~!5C3wA5twa9qz-1B=ac0;$idAL z#G~| z{1l9aK8Xox0UiEL`{0U^7-SZ$A--&Cn-N|FAjp5d(sC5>2!{kp%Vxjv0LsRM_pcTp zTLFMgew_daf5!mAK1|`;urO<4rtM2*bcUuru@sR36BlOJVO{(hjQTaJTK1uZx5#QA!>$V+g(zd%$Vt3FsuH0-&Xq)sd);c_* z`IBok2_U=)sbf%2fDWBHDm7c z0u=YV$ZwdN;`?=aukQ=ov4u39-skt7_qUp956aH#`zmaj_L=N_exEedO&!LW_;Qe@ z=Y`*c!_}b<$dUKWhm&!_OzYCzE)L7faAU_;D-?DC@MEl)0EbZE@7qO$br5yLh?E$f z#@GIO1R><1gJJB&A&A{138o9nYh^4b25}(&Q>le>{=*PE(TAPL!+Hfu5ALF!`48jl z7Liu??JI2UGg$sHagPC~lzKJ(f1-uX9;0L|+*`{+6l8<^EDkr182I@~Azz z*tw?NwyX?(${f52Z&43ANe@p7Em?WFl&P8d9w4rmjNE{c> zLe#%i$J*3dNZDTebGA3_pTB@TX;-mF!kZBVP(aAkMP~$ zvITfW&$KuneNO6-xiFEnkXG+k>bZA=2k{@zdB8!;5ge)g?!~35VFDkMQ7-l1zvlQ)gsU{%CSdP;`<(-v)rC%dGW<*I_8Q2 z%o4172U>m%~- z;M_XGemJcd&N-YbSR06rHwkRKS6P(oo;1v@gWTjg9F7k4C1p-dCFVBNASsG9x1(a# zE?4WfpIJ2fTq0D3XCW#3_{I6$&Xz_H`S!+y6W7IjG{JU;fl$VNyx@fh+W7mn(KZxm zQ{pKM!7|uMG)3c$%;?KWE$G0T%z|hXz|+!!pS~E3dPl&yieT3W2=D<`AwS4^sr8r0 z&kZ%=!`4WOA;fMXI0-z=^Zcp*R&Gz*n$XqTf&U@Y4Vz@ZHvAofe;OU&$L7?rZtl-n zzf9h5p_3W)HK+>E6rzO*hEo#RF~M9!WQ~w<=y32p=~av)5i2155{G*VmLCQrjsiHr zd9B28`Wsdzh9r){vip_KVpU|=yD|}%a0VA8;wsMIdc+(m+2&n}h$}gRs}OUjFzelZ zh^sk+3lDRse3y64A+G2QE;q#WoWYfbxs58g$Pm|bqSqHj?{^1ywaNuZPw!q0XM-0R zn?8U3@pGs17fw$~Hu-1IJ7%eh{WGY8kYcTR`Y(2EA z^o3FEIf|J~obB)3&0Ah@X-C#GrR?#HD5KfkuSKi`&MOnNu^gYE$Ui7!QJ#rb5J^8{27NKn%Gi2LHAK|LB#9zEW8$Ruo z9T~@xN2R(jc7&P~I8ZGH4-+EGW$9E>9u>I6hg zVy1xL*WQHyL<~v=X=2dQ3$7_`)YV()0$=M6Ovaa>*;|KEh1-Rzg{)Js)yg3s?poZt z6oZ3Jr%M#^85l9g(2xZQ#c$;3Ko84;>uj?b^xD>9~9UVPJ~_@JMm@sW*MAkDLV@p$J#KK zY;}3eiXa2!C`+2CFndA)oL}!JYB5O5y3Se`W`*U4WU*OSxSr?kdzEjXZy)0GYveo< zdOZ!p$90^G6tdhSPFi|(AqD7R>teV*AbD<2nBZ)Q>XcS{$7`Bb0tx>`e7_=+kL$rqZa%;IQ92Bd!b&TVED4cetPg)0odpGmc`3o0CftQmeSrV&Mv`SdSjdg@n^4*NF?^@HR^OFzm|C_0A zF!(VBA_fClh^r;(^uPw$pCrh&pvUqKNM;D(AP5SUs|N6gj|F7Yj_L!&nJeponu5cN zFm<{9e!}?Fi;jP43P?&c=ce?;#c@%ou^k>I zc*x&4K1GU)6PrHv*YiF&fqe1Jrjt!`($lwBgu{l+qCZa}+=jL(Ii92&#{U7Voere# zO^v4gn6s;18VEVoRaHbYM?+X-?+Vp2wHR@Ze=3 zz~mB9Up_3b4K?`kjqSy76mot^d;0A@bEfnS#KM-wJdN=)XE0HWd;Ep_5dAeN;cww; z2{>hTcO?M@0~j0*N``-V?9US36ebGyrVI$nJBL>S-TI0GXvf6Bx1eGz2Ee3uLq^fhPmB$8J)tF9U|WQ;y)~d?ZTv2X-Iazx&`L2oDI{BfmqA)10}#{<-_Yrx1PQ zz5{hngly-uAIRR9Js^y?8g{Zyk>>=H14C-SL!&Fg zGV2IXONJtPWq{?7PEOGw++cz+77<{{-9CipCJNchmN_>$(&%V0a=bBZ zV!Uu1b5PMy1PmmdR+38}Fci!V;58*vTjLK22(1)SKXL9&B;m&A?gRvqW-Ip30F^Pr z2f3-*kU zPwaeQ`qasbCtv6ZjW$Sr@kKn`P)(wJrOv|iP6#uYw*ElG44hF-F+Zhj3~)1#9o9OJWZ07n@1Z_uJv zB5R$9tk|>viy)zrNH0S=z&Rs{+!}i*fCoiY245Lzov`ae5~@g0_%C zL0}1i1IGS3i$cshAPa0bfiJ)a#r19k5aOW-dmU|B_K?c80+y)hB9o3tu#GsMG7%=x zCz3$|aD*W7EKrT7p~_(E4YP!TM>q|qrpl=6m&1&LsWR&}H@4*`5ew z|MkfLe*Y=~tf>wp7akttk?gYY)J)D{H}<$tDh`@$YT^2IH1SM{fR=MhR%YxH;KwML z5Ej!zD==$Hj#|yu#zDPNfZc?iqt7LSn|vOycvJcd={W)1K4-VG<3)(G70gx-pB+Mm z(SFA4hnqvTCrvtIi4@uN@fgfdl@^Pu)8HB{n6d(n75gZ=EvK33V;Jgfp*z?;ifbPE z=Il=FT>UdapV*(kY(yuWfDUdp(@MP(G6;IJabN0Fh7!;9Be zA%*r-*}yB5ymkf*9W^P-tQ5w-;B;y1!{us=XMmP$j?=2nG95i^TCYGqjY48uv>u`G zAkJ+nA99bnGaRed_Ft=4us_a6X*GfgG8_z9gWCsVPq{P;vpu#~x!w3B1}#&sZ&MQh zrHKLf4OYTu>Wz_F%0zoLLdx2KqNFU^BjuafB&S`|N=b7!ybO}Z(P&LYG+I8SH_joZ z@wLAP0jxoo_E4*m#T<_kKu}1BL>4td>kz=?&6HeAh%Dbi9GR`6D3)U)2IYj(;j|T{ zL0_#)$7iu13}q2}&PWVNC$y`_q_}e2?{Ok0ytpmYZ0a3?$Y{h);|iTC2q(gAfRL)VVtme_VbR`VdyH zmjNDyxS%nh^Nylf&*+ICHX#sN_Gx07#!Z?w1i!f8S^woS7DkigIeueJ0iro|pfsDt z*M1iQZ?uS8IiO=`xg4-Kh7Et@fE>{0(dp~Y;gcexN__qh^tr4;Bqt1 zViT$L9N_3?=AF`_Gf4M(cqar!JSD@w6CTI)o?FmMKo`;IR?3*(wZ2fzv*Kq6LEk2F zSE_M|Pc##+@aX3Hb1h}{=rmqvaaCLv6ZKB-Y5qLT;0%Ma49+oljKSj!o?!4KgBb?r z89c>+N6J~xFnE>$&mpufF$kC+EA$gg5Z^AY?Jodr_WfuF?!av8Te~jPH-t^&eSI5n zSSvs6eVegEupPg;5N43Rrf*QZ0*ZJj_z?M1_`Z{qH3x8PGR4bt=Ili*j>XNNfVQTr zVMH)sfOEaGP2}E44C`{XN&om94~fKqNZe9^+aKVlP~#mXaLom3x3TYg5j64SI)%zF z5wRIW6bHC%b&V2Fi-wh3)EHX$viPBCpCM>kIDlr_$W$1%y?9$iy}*6*{?uROImLH! zQ(Zk~M|)df!-)umak;+PLXFYVF}@9;yE@%A2Hx9IYqiNWf+LLjKJX!fvWdNj6mNF= z;(LhaJcEGp!?DkL5g9M=MPy&B)#H!^Nh=a|{wJX5DWsV2W& zD%G^ZcuOpC*a)!UVd;RRG(l8q0gCyGF!gIIz!YVqH|FBBqBdvpw!24U6-8F!wWw7H zk$SZK6oRmep2WtgG`z;)=zq-XEI1G;AVA(Rvl+6T8`V<_xW-tEe3^Yp8lWU+JN5Z` z$VJ%L9k_pBQ3BqXr3KzCQpBq6T-E6h4o<_qblV~!<7j$0zyk?9T}OQU$uOc%cv`;O zt1WpL_TWo+l5{`8A`*zU-zzsp>Z7`E5Qy49JP1_}U_5>MV2so|gXQwa_}a%A&~~gh zeSn{%?Fk#Twt?s@HjN?yLK)kJ?-UoDSZV*x;7$h9XlPa-sEH5~jYuq-JGGqlgb=n~ z&LK_ECa`ZIbH0i_75Ym#)9%n&#U;Bdy zfISY`Kp;G3mR9O9BYqg1W_3_#0rz4rgS?8`ca#350(B7V3pY@ zempGYOGPvL%mQ4+OXUK*L1Fb0PaJy5MeJW%dzPTKcoxQ%9x zBrJ*A>^+u@r4QiEF)URBXk+k**c1p?Xo;eF2|l1cViZ9tu;Q6cmq!sE61~oQXa)F* zQM$&_Hncf1)Z|R7daCqF$;N&;jU}voP$AF*MgyjL@wh)K0b{U5HAT*Hc+nGp$jd%n zkDDQln~_Ei65K={O~wz$^B8^}zD@i@LCpo-1Xf5ABLZ#sYTQfd{9D7;cNeU zls7RO64hSMp)h;WyJ{h%yhx`q#mh*~lXI}_3ya*sgD}`aICUt-VYLsYSJ-YO3jj>V zVgR!Gqzugc;07kM%%etiE$LZi@TK=$Gw3&xm$^GJa{*ED>1%>dzP?UiE&hNFLx_3j=fK# z+^xhoD!fk__e*?-GWIEBOyZ~<2UP8scvcw)lyOkvc+nwc9G3WqG9FOI5sBlaN0srQ z#CIvK7f;lf_C1=9T=Db=k(kdjgT}EW8%LsON=%ogs5G*zD(v=Ll z(|`N(-v{DwXfpZLg`Tm3^F2iCLx;&6?f8l^`5+>YQm^_3Bh3OFKJ@fghcaYC8IYAR zuAAxEZ+a#-fTNttONN=}A$t(xqKQIl$&iC(Cd}91Fa<%KTxjI!csx}l*Kw_cGoD|m zS7I82T!=u#2U;Dg^<%Fr~}C}jXjm-PCD=d~sg(1kOcP0gzNkw|~4_5EQ#r|`8oOxN`XouQ}z zgUZPRE9@1I+ptPTL$87vFm+CHq|oBuBNC-W(3wYwZK@9xnvW18GeDIf+c<5~lOO-} zHtJa^ptll;PK2i}@EN%!5Ixz*t5m_XT0NBwAGjj{KMm9Y^%P7Gr3Ys))Fld^TfeAY zmDNA&*sgv-y^65`YA?K=yVb%b$-?>T$ohTB!hwI1#n(%VUX|SgXp!cnt{Dp(^|37} zjXRd~RF0FnxBfRi)*^CX%EN8J)w1R)mbJt&XX}l zd-N&letj08_gHR? z{^>u+Be!gvrxKw_vG6wY8A(gNc7Ezac_?PTAjjug~ z0PF`b&zYa*7nS1bhPLyRin2q`x{9YT7&dYc1dB^FT9sPq7bDWoGZGjy58*uU4L)q`0=D^@W7jL?v3aKsyn`T^ za!FskHZCiz!wHYgItQIi(UMhMqFmdj@1F^g7{2Ij>@|jE6t8@HAo?5U5Ba#2@lMLw8XV68o6I0a%pBDayP)!UqX?S&V4PA zTwhNN-vMv$L%l8VO;(bjbYs8=SSR%1%JCq|65LzvgFNJd6R%8qE8N13}GVIep|oA82F>5@ksm11oyv% zp!o}eM$f(h8l3a&+?xcPgePNALM>Vx+|wFL1dcBsK^J>GI9w9aY>VR1sWP=+%uIhoQl6!tu8R1?ikr;*2GMq4~oxXaqD9TN1n( z8BDaqtD_kb36{2qu~``z5!nUP{&n?5NrN; z05OoREJ;k`uQ=>Qxy&Dng{l0x;+cy$pJ8hrXOGU$o0hMG<~fPUiyoP*M7Udo0(|0< z;=Ypgze42WT*lEPRrMB^5a0C+f7@Kk`52M*-wbcj-&4!8hjI5wbNU7z4O zI_GErnM}43 zE=cS4;Gp=hAN)1itp&<^K_zZCtn5||(@7qoav7_^qLhlA#;V!0%D8D9dZ~wU&fl;S zA|v;g5tpU8Tt7X$c`%PaA=gL$xZu|SJ)z+J=AGbSyQ$0dl3gAEq@_qF$(Bnvs$&y8 z1G{l2(14r+xB-9D+CL&`O5;Bt&@LoG=q`DWB+?wi7n8m$Ql zjZ+H_m0IWspeztGjwY9r2Smsr6=WSvHK@#D!%@OSL165xJvX=S$trMGGL3x~AfHU9 z|A~TK3ld8_2gF(9qxcD#dSf{FAd>amK_o=52<5NFcl*#k;0Doy^-$j#)v^xlKb$Yk z<#8yBv?gz}HCu@ay{CqH*DzM7pkVB;lOxMklZ=r^?+``=%9yJ)g>pZj3@kbjPSaDd3c472e9I} z6pTj)qp;HC3S#r|=$6EDY)Uvsgrx|Jk!o zUVQ5p&={tcC`tGVnRT(yO{MjxN{I}@+cL+fDf%?Y>>Roctr8$q0!kh`oS=q4``FVw7Sq*=idjX`b~l&VUtR57&#r>yWv z{F_-aL49zS!5nMLJ0ST3#GII%+Be`>ux;Y0zb)mg4ue5;0R~C@s9Ixi3X!Nv8JHdD zmj{%I8o-H(V3v|xPudWRP27@L;iG`S-{7l8pm&9oO_fK>R}Of~&d&4_GJy}v7|wi~ zKM_zg#w_^r-PMAkSw7bBfa||6+VK@}J$tSN0mw6SssQc#)D5i)i&P${pex~lPPGxs zRRXH;dC|YLX-1!_AVjb0)_K^Ug5gr{g%YanZ!Uw~_NNOAxSpYV_6B6adK<7@q@)0+irO zI9-Fz6B`|{%sfFhY;;Nd^g6m^eJe58<)hl7m~wi;DNiGE4<4yQ?r48KIw{}uZgs9x z2CInJ`|D<^i{^s1sS@9A#)38)a<4yOqMUOFJuB24R*uDn?WX4H$M0@tm*%UbTR1D#ex0$vn+jB-$??BbN_M#Vtp5$ zvA&x@C!h#+yN__yASwpV05F6TGMsH@T7b$X9TKqho zWyFR@9tZXXqG^?o%&Aa%~46s3Q@lUk?xm_8okHBg8Ty<&IHIOm~V<3^vEs zO1OW`M>9N*B2vU7n*Ed_t+b~W6$W$@n>v7CR!KJ@618R}Mi`C`p62T-7#N8-T0jZV z$OpB^+D+?m!BYVa9^5CkS}dgX)FV@l+^EOq8jVdnr_t$6f@LBiEdDw=su_D(8*(E% zgNgH|F>h9h6g}hBL%j_0iI&mkmHdAL5#9j3@9_T{c+iI?G=gx!oTos&mWYOZD-)(( z9m!a@^D}8tck~&b)O1!gL)M44d0_C5=M$ateT0Bs@{&36o)&$NJ(%3>nUzZc&(;OD zj!qyr(E`86C$X(L-6yaH&Amsn@?J>g5(i%2pe4Kc zn#_L=f}E{e3@8iqc?xyrTTy4jsc!5vjd12F$dTzQy47-NIA(mHA^w|)1U-h~#F@6C z)J9q%TbUsER1DIA_3Y~#Aq}_!jt~xF=4v<`rBP34m{OvGZtPmq=ND*{g;4L+P}kQ` zcfd5!7lC>c6GI4f0h=&}r)M14ci5r^nT(3v%n7Ux?-&&^`+;5)q5a@Hh!C-o!D>Jc zc@HJ6YzN`fF~rcYEYpFHwLpvp=|-SvBac1}IKwxWMd)280U_bEUp#Wi0&rmzlYp?0 zwlPR55XUJ2s*k)VFbv?LiPjP;M2r)+%!}nm?3u~PBA$FZ51AsgXX-hgxK;@PkZ)QE zHN7&f#i}<}z^)t`;Xgx!`4jw()D}V6FuACeCa4FUSVb}1rG4D3&c~wPm;bYH&zvuP zj8IsO6+Y%nb9n#ZA7tn7)ov24Txn1^)&Y*6LVV96LfnhAy@`roiNzlz;fQ6}s9Y9g3rW`vAqkp-WuBi=_Z-gXu`^-h3mAc z)(vN~50J4jFP7)AnQ$yB(Vz_y`AxLX`cnq$8OYY4A;v-r?{dn8R#55Ch-LBueJ40| zgF~&p)9Z%_MA0YG{V)OzriR;{MD?_34Q~5l)h=Ck`apL3B$_>cApkdNg*Xy$zl4Zh z!od2D7m=GDFxEjSgX{!5;=hKuHw?l$C_KvramnS%gtw-NAr0%4*w7LOe8QSUAGD6Z zRaM5c(Oy|=2yJl|on+ijIEt{Z7VoLiaK1nu6&sQ$vkQKlv>Jm^ElBJUI<-Omu( zvZmIt3k$-SBJWC4D(WAb8NI)LJFv)&BYe9wfwn|xzi#$-vBjsDcACL;{si!ph0D{F z^*#n)$>6IP0GvwvEOc)_!k;|O+xjL3-@@P%48DWG8w@_p;4=)~Wbi!Z=LpEe4@;tAvpDB@tTO%mrnc)j?e7l@mCG^hRLSa4uyGkiIvVOpOIhftG_q8x{=l z&sjCp{PRzL_nkq_uO?v^r^wG!fSdKBWWgV25Q-^@iLCk|Wc)H)LFtZNoEgZec8jUW z66Y|A-G;=$MA{t=hm7ao)btbvtBY{x3Q&%Bz9SK1{V+TEBM5Foiu`B70ADRdA`^pX zQ(K9Uc04NVybTdbMuZsuA&|+=oU3uWA#ZnrhSkl}@57BJJPJf#re%+VmhMFjt)w2& z`X^y~aTnO#$)Ig6eA0evSEmZEk#^%Ih6hDL%Q1D2e&KW2kkJFpB9td@qWV@$p>Mvy zy7MdMm&1<0X?SOoj%b;|@(IU1g|z;Yq<|zc{|RLL0pCOsC1l=iF)f+Mr%>oNh`fGS zT|d2K;|9F2OEGdM3eX}=G>;&%g59qmr<=0-72^L@*@efUppD0%32Al2ce0Wur|`8u zf*{%FSG2ezlI8tt zBKa=tzNOZ&&N-e0o*2o7$45PRBZQ)VIuQZL57iO^@&dkctui2knGr#Rwqd6qh6C{m zM3>P3ZcBuwgaR2^T!U*Ye&Yw&bt96w;0AwK00jQ7Sh&Y^y_5z>~m(Sb_gDsD*&wrTtgz@QCJxH$^Q2IsN5mlQcP z^Fx@E!UXPT5TQ;YvX4;Kh7I`%z!DQ6T#$l{&4hkhAg!_FfgNNlaJK$6`50g2WOm!| zmrYs!2EV)o<6IZEnCSg)KaJE}zYkIYVLgo2$eFm@A0dzYBS9oH831Sluh)9=9fBub zf6=E({w1UYI(#9h4^Y@Bhwp4idgsH^bjZ<@C>@eaeTU?d;9yq(r+jU|g4RUaoTrj% zUO7|g5zgBHEdYykh$xW`5%=g#s3+^4V(gXcNXm)&hae5^T%4QN=P_KsiCXE1q34pDO8 zGEN%!-87WWdWeeX@w~jd2}U9CwjD5xWKzy!Nv&EXNqChPNdWxtZveo(_@QO42PK@I z5kg6X@oE5NeHQgO>yn^keE~1lph;;T_;9cni8x3h>@^lH)Q&kwaOio-0D*D-&H+Q} zk3l5H9zql?r;)C(a^UezFU$=QCb$!<9IL!JD+6zoOK#g??WDe(5VHOcbahkE*0wuB zW1bkt?VXQ-0Ky~W++Mjx*1PP%i!l2{5*qSVDO6!eu)7a7hw@05uuKu1U2HX zGawN97WSpqb@z4P@Y{4cwSmXormbH^MMY!<9}4s-eC=%rOpHCk`;CSdNCW5CDkx1W zH_tR45W*J*d`1(L<}-#OmLPxdU(ah(Gf=rH(c~}!U;P#{n}9$9ne<&mq3r?oQqGBY z&{PVN#)C2RR^0Hl;k&twew+Vp?dkP?P`pp(NL|R4e&B3s#3XFdaT9TkA<;R-~hT%XO^4AdAa~)fNvHGA%j)> zaP?xv6I`)Z-%r@+p=TJ%rn9RBbOIGV!IVQn*7k6j-ij$aiL-tR`1nho;4#NN1Y=Id zVrmgJ1Rv7d6u$Ox1ZG5r#pa)wwcSkDPYi=uA&)ff1jxJG!AGlhP4@Ma7UpGsi&}#Z z@jQjEy$3<`T^iH3@OGIJcvDY7D>ngds;_m-pY*Xte)yMPp&ADUctGv}0S-c8IiXIAoRmXfASj{M}*JPSoQp%CyClSU;E}pxlfIkOO|i0HZ}+ z+;o*8!&vM~uIpNIEE!?B&d?MWFNgZztPewhrpWHba$+L5iMg$i8aDx8!V69a{U}gE zY|XI}SZ~o+iLgy5AugLOOJIb-b^sh+7i?v!d*hnG7gXN3OzDAWX&jT(y6(YnEg1LU z3|zZ*ZOpu`4*|ge7fy(OC;h_y7oD{zaLF%mX-pU4T_@=m4!nrcvjqpygD=W3>=TSE zqFb*6v2KQhAaf(srvD!B{vLc0OUY+gs1N&J1L6p2t z?!r4b1aI@da^55zMeuFgiog1sARlCKi`AGQ!FQs1VGg~v`5(wD6HE&OQCY}D2iYK! zJkFBn1gkPFCci1Frtn>*Om&B6u)bzI^YjxJF!wxbRhJg+m_3s924a9-nz8W!0s-Rz zs8${AXzM^ZtvW8uPca7CMPFzURbK6SNf^ zAKN>Mvw4;lN<8N~mW(b5oo&Zce)m{IJ&{ooKj#yfN>n#NaT@}r1o zI0L7lY?R3fH^Yhr)|18{orDtFOVHq`jU!dNq`MemL=@mJ);0a!MlRr5jiTymC*K8< zKka}v=s!O%43H-ry3hrlSi)0go&8N9A$*=xMZ!_lgs}BH&6xbRk?{=QOJ;+b)@3mz z#^TJG!q+Anni0NYaruDUri92r>*J_0*Gt#1>m{4V7=X6#!Ao;}MRw6U z)S!vhLA?s&w2$LK*n4p}$<`))S}ng)-6^qlas>D_rd7qbWC{fWxI}pXFrpHHgVRD! zKRv~fS>|RBW*&FQ`83w=qW6}|fUxn`@omIqL=|TrUjV$p)QKSADC6B46NTO)JE3PP za+2B3dH@OaN1sSoh1rvzDtw-aHqwTJ52ihn(A?$!aE}T z4Y6uklRdj0pf^JHGFk?d4Fhp-5!DlLOI^aATesa@$Uk#IE|&8MTaQo4Mg!9G4IhAt zs~4LSn|V8we7zYZle+^Q{3oJajZ(5EV{|_0K46eBFUj!PU)xb+4(UAxS8u5AbbA3q z$lL=Kd1~U}wl>kenHpmjr0PRI>Qj4Y(+|rE2jP}Bo}*L0-N{Wo(1;o9_ejOBvv{4C zEZt!BxgcvV!sXP)@i5`VV$qYL_^{}t7MasGST#M?Li>^P9FWT*w{5IU`rDod;6uI< z8~pVNAj56WjyT?hK_id05s2UAJvDJi;rfj+%?nYhrB5HWj{1#XZ!%79v}X@*c`HZRCJ z5olKHmRKIfnXBxG#yKFzBMNyEto2_Rka@9h8q-`_pO)Ag3BF{*?q0H zm@k$~7{tHXf2nHI=VnHUfW63H4ht9A5`Ngqz-;&>{9s$8QHhes7dKiNm1(`7gomP5 z#RPRVcqk^eqO<$pp{6@7YWi3*kzu|kHr_0oazSXG@lpd6L`6D`T{`OYjGv&MkL$S) zYl+i`s|vIJiA9(Q1ljj!(wjU}t-5IhLEe!E5BhnJ6s*$s0fufGvBb*@{P*D@xwD|r z`%OqxETXiR`h@!i;t`1=YDTZfBu(m{AiDXK=-GN+9fa`RP9o{Z<1MGue?+GFFLBP9 zV0(mp{ty`-^w|d@tpg@erX>rt+t#<&oWgpx7Rm2Oj19-@X%J#)d1!MC=eV9WErT4G z@@bTUM$CzD3&r?z0S~5}Ey?e_9eMIuzuJ0@n6dg)GP=GMj7C6|+$PglZ zWv>g^Mmvmr2#48e9YM|n(hp$|!%Zwm@`ry3Fa#9d0lA>1GklRb^t%~$)cR>gJ8csy z#fEKF@XJ4u?Pn0T{w;$qF!*@}uQL$+`ezyYIRyS-PQuUi>hU^nuxcT_fc{)Ps58eK z)xV$Mp%5c0bl}tqZVju0zL0+8nUceQBC?Xb&_`E}Q`3tD!khF^;^@~8Y4VaUBZsS~Y6-832_ zfPDgyI=I3YU39)khs;Vg!+LJK!$Wf;L^xo()Y2w*^!!=#|#b2Uy0g zgr;4hauqx>11mfEKlHfJhD4dmlX9%TVDR4Sn%6;O)H+&8zu!KdBWkh*g1-J#mUdE#O?{FhK~fGCe_`6{$<9o7qjlp zcHO&=ky=*MyMAdZs*}F`PONYGhO;`F9*aGYpbMWuAGL$PUlII&%^>7ai3y|rB{F`2 z@1x*@<+%fUJu((kV~k2jPvL9NA~5fiQh-?x#2Nj><@zPY;X>XItN$9e?<_7;C;Ltp zpp)UP!H4LZ!q?u0Alep9psYpabzDJOAFZXedO**ikkDn~4}NEg$cG^7CTvpP*d?MI z+@Mc@ThGm${s87?Ug3rF?*C^byV+aDlgmDbA6Ejp8 z<-CZ$@l}s(vg5CP#)3J!vHRxP)AT%(w5IA-9L@ue-d`T1Ddk4HP(G!nAtkAyb+u!x7zs9%jkheDB&C7AaFPHX1k*6(*fkF550w(HMPjV!i4m??1lBDd@z=IxsJ+W>TF&c*`JZBGWB_f!mm z6qmv;3<7EkRKI}cKvkNSe2EgQ^CZ(2wmCp&Wn0fQf~++Wy+COrL_~6GHPq@mB?TaG zn!}mla>lx8aLn{Xk8J1M)dU`~hhkt!`(W7{1q&D~0Eov~;fjI5`2bc)hR07(piX)$ z;;)tHK4!o47Eng_Zgl62}smL~%wmpOZchWfbG(sThu^C+O z)l`{2jn-KIgUISjJ7s;T8vF3@HEA+%55d0Z`?%X z0GL?Q@t5TK??@AOBk)Q0DAMcc<08`5vV9puWbgaV)-A+{nc%FzZnkzE?Dt=i>^c}Q z`ZhGf!wD_}4bUQS@eaam9NptMX+z)IHaLFuTHJv}ub?+U$0^jl!EcH9OOGUXSIijo zGcGm~4{K~Zj)DrC-Y_T%Aa+@Tjv-Zfu8*$_h}#PL z1$Pxqbi|{kJa0X4icqH7Ed~p{MDIDUzroZWV=xf}6aX%6!Gfb2>fOw4`^JhMx)Se5 z?z;a#5pRCQBcMrs_l`%4|_A!8cE;BjN%LbgL#^X4sgIomz=+;h*B zX^VFvm+*`QXQ7RCDZm9HcGO~8h4g&{J5>=HVKy{u#M)wRX%vTC;l!#8>P}+(AJi~V zkzrO{*zuomW0$2Tuw$dgDdUA}g5enqm*nQ+G?J3YTLcf{aFG^3N+&xLxc$B<3$sKv|V2fOS ztC-3$L+&r6Z5zcR*BjYNnpWze*sux;Try%CLEpmmg0vvZ%0uFvxfttl0*| zW%vqWlPK;Hn$H2#YJNezJtEL5R1G)}(3>Ef3f03zxEV^r8N3_EDjelZJy=nSIlT`V zh#NJPj1uXEDiHwGl<*!C!H`UBe5)*lM<4=t_&^mz>;XUyFyj+|uBc8{z^L1Tk;-r= zfSkcI03b4bkJUi11nnZ=B;nVzolTXt0=5vs^gFf@l1ttf@r3HZTTv|!^`d_YO<`j9 z7Ja3Xn!9<`dmh&+4RaTx5aKImd!-NPnV6JnpuO3#22`0eC1 z$%~*6zjA`2-^>In__%I@fzZ+5*&YU5!@3Ix1}xaVopx$}c~@4#Zh&AeU4lg>gMbQ| zhzK~A*;x|GL4bxdNrANC(W9Z#jQwwC@tP`fgjA&iIdPXmWl0l-z-#0y&*Q$=6hllc zxb`GLW%ah!zWbiG+yPizDNq12({`(TS1D|a(@*l zM4-mJ7t=wpx6~y79l%*^2i1-nGqyxpH9cObViaVgv792hEoS@9fb^hc-Vv(=_VBok zWy;WL#XNzFa7TI-CMQs+sGG0giUhobUlQ-O@Gf@AsH}VX?Bn^l%f%eNi@Et}F>IEf zCtG1N(8;;X(`PdpasoS%Vh>V8(!fJF_wt!6EX9o8k}Z0Si93ChPqbz5fnQ27TacUd zHj!;bFRL+1w-61rl!QIPmM)QH`{L?epKaHm80wZ*bGL*1atp?f8g4V`Z2+golaE*k zZxe5TAy&`lkt2$wre33TKJiMR^lf!ZNA#b0#EFY% zR%R7TnN*!#c9dDQN}4Q8T~3(wEjUpj2%3VHbVi3g_yIZu7lS6@q)5FAlxYIBGx`WTJ|-kI*fk-;cYRkB4Sp< zXnD`s+04w%iTT2W60PhxD&G8WIMR7q0s?8U+Y6aph z0MTYp4}c0e3q=`0bplfnI0`$xHw3=nWwm9%L(HeV7ZYWL3>zrbjRPtzwi(}Hxdzs< zHK?P{@8!X_zdcMchYt2_MJx9`LGx~IMkon`I7el${TNUL{{>;kYzb!!q+e_IY7&^ zANFy)wxN7aY<=fo13d|4-qjQZ`F53E_<=e)>1+mG!t2HyV2AqX`rA1_3K93vWGOc# z5f7uxPfW%zKc64S1c_V_TkA<6^ZN+}P9iVpXt`SS1V|&O|Ul>8w6Zf}OR*lVE{~MT^i9QHc1B>SWF6+gL`wrn^y48xlfkyg7nn z`3m{EGx<48j>6Ytc22y~s6B+rAeNP*Zf*&Yjxpuq z&TSZtF3g@xq|xhKX@lhe`^4i?d51hNDm=Opb$sCo%;QGWd<(b&|9Bd~W}cnJA#4IG z6TUGqh1lH_Tnk-UMNM)hUn*fKqjhzXnbv)H&RdVxdp$_3)$N14>aeC3i!ogL1kt6o za;6?UsA9wjMCKMKLCO-UApQIhiy=m$GY7mzS{0Zc!O5*SUJT9*st$^7 zn{#4-n&nQj%s`t?U`hMS>mH~58MY`uPN)S&sba|GhpNSbJf=h5EO)t9qrgwGMSPW9 z)=ej=ov}a@J`228S9k9v+I%eeFkFWlA7_ba6bRkD)#H^Az4Oipj^*>z`p4&|=L%P+ z^KR^3#Z)O<$RV6WmNky% zjF`ZanVrlP2MWdMV)3f9 zyV9JU$1rMkPNTeJVYqPqY;izG?&3_;)&<)cGCVtc_8^Bj zuzNC*W>1?;9B!DvH}Qj&R8zRi5jOB41KUa~Y<-wHn@1diMVlHv!>SUa5We@lU$BOJgPU^Bz*Yfc57DDaGriSjOYCEul!P}5wtjJUP1@Te9E$#_fIg3dO21ZhQ zvA`022iISTQB)UPH3yUn+CvfRSQd4hk-@d+!`x$|Ejhf#w)?_~^|})B9%8|-gq#Usl1_e(1*cIiR%^FQD?;qVJ0m#mLPBDC z__4|I%_lv?xI@HPED)%vcCe&~gakJ40NOIXQc0ygSrCl8F4Kc%H5X7Q8-$eai7nDf zOD8BP%>V%vl=tF5nb26G?=z*;%)m&P9vW^iWG*N+yWyKB9B;2Ss3gJ!T=4fJ6IE`S0X7ZG(hc{-4gSo ze@VluwIZZSpd0LgW!V0&SZSgr0q5m%e>{w0-m5Fywh>l7rOTPfwk^2vhqgp$f2%!S z$+CF+A&B@X-9%DL>=^5;c$B>Yfg?J!6F&JS*^o@!Zq!J2M^q^;9D$t-`wid~EF6&- zc09+$)rnlETwkTe-L6VEwBQrU2Ty}RMLt@tC%{^92t5~Z5(=sWRe;931xbRn*iaFWmjt%;9X7d z`VIRBTMxi?C#sB4k4jR~waA=-EP*Sp=5_yKxf_PXU{?a8VtGUG-3gscku1sqdn3ya zk?d|-9n0%o{-GD|@eP%H(XY6?CALbho1m^`lO(}N%WB#r|Art~2b?iZUQarO5jOv5 znZ#Io>w?G$b)uAxJeFgZ+|H^l)hgm8ptqsVN) zvohNO8izPHS2@I*QeOnMNF_U_t<%xn9qqJ1It1;AQt0xok_o2dB zLeqW-f)N~x=`C!^ai)?At%aTVifIxzbxcC|!?k)=bY zix>@ner(&>se`@~_3%dJc(nygfXTfvZQvf z6In?FMY=`|f_9@Zi^s{@5Kd~Tf7SeMJ4Dl#r6Qy=@_u&!8++~s8;>p;8v({u^)(JH zPK!nT4Bwd8?+x>Xco9|Uxdc&e_uG?c4D6em&2O+xOr*=t-gPZSa#a@+Gh?xr^bjYy zHq?SZudOLqW8eQ9WCZxLHZqng#A>RBOXik)37IrVK_u9zMPsp6^Onjs8){{=C4jPl zyFuB*OKOv^62~#ma`6?fjxQNmJ3`1hes>L9DyG(30T&|Y?Ey6H2b#(q`>zLvJn-)2 zPZ{Zy*M)AGfeE5@yC=*gNTIDDNI06G!IwvULpn5%a#Z-&BSm;DFCYiRkq**63 z1ZnAM#9uShR@ghCUER9?*vhT5`FyUFnJV1KQdshXf|hB_iom1Ph@4JOPM7jaGG*N( zCaFu1!a>z;q5w@8R_%R`ENIj^5YxW8udhTa185d$GUm-Mt%aN8tB9Z=S4u;!ee~iU zSakd3<&|h%Ea5H_m3!kGI^`8-#1&1Pf-Ghahgu$|UoIuLl>kLGw=NIgdlpr3!QdN0 zKzL};=#PL@@~YY?E#M$S(II=*SJK?G-aA0?GRWj0lOZNf>wt_J<|sjg4owYTrQL(R zScnp|7F(kAZ${iq#0uiQ&}fOB?Ff$L&t~;KJzK!u$9ekDlUkHtQdSpE zBExwMsLMO*$`PmY~^V)VRBNJu2B!iV_KaXyrK^64>MnSKsGL(*Jk zY0OgfgM8sNG}tE{p_$5dVF2vY;A4`ZQf7159LPANmQE3z>g2N#JFoV_CWUBTBXyCs zh-~XlW*i0j0(BLF7b1TEH%2TgVlpG;@k%^t)1M+pZXuIctxXurgogN_bDym3p>~cI zXU`k()S`A&CozJf)E6lpcBJ4GE{)gsFCufx08VZKmkAbff`xE`>Q>P1kW(X7rLj1N z6_*eYG*85jsJu5In{$>~5~=dw5~?#G`Nxclm}xBP$eK^t(l7+#Z`UX`2(n8Xwd`bS5aX=!8j227nxtkHUd)Gh<0@UaE?D|Yq`(Nnivd~;KS)+z z$M!!l_Qq4^DjQ`%?F6>rh_dzA%0NWO;5kBs=wmn&Xt&FSiJ__Hs5|LaqKEY2Jw)mn zsYPPPaAVZgr9Fk9(M=w&#mfhh*^=c0H)-icP=tCN9!gA%;Hc_cgCYO|C=Jtf8jc%j ztbIviwF~D;byjOrbqV$503GC24lq@>HfwK$KSWCB@V|1=41tTg5F)A_l4rMW0;;!9o!@~@<>&d&vSZJ2cu*-ixKEpJt?x(fB) z4EOBh9>gl(#!gzj8jz~n3Z^IE|8(P&>B-UGUP@^nt#(hXwc1YRxg?E-t20j(J7_lXD2SbzxN<$$UDr0{k(UCXRo8p z7_En0yFE^c1HWhRrCq$hU!dwhTUjpxC@KQ5cTJ+yg%=a(*ViJT_4?#J+Pm+LL40f$ zb$jtsthHYgR~_$B+{ijiBe=U>VNLAEDX1&bi+3XKPWG~P*<&@Gt{y<)L4g1k<@)vJ zL@(!4pNAj*pCET6qjd$OY;&huv_6$)>d^&ty-9o94O+N^ ze*^03(udQ$gzxFh?jV)bX`Pg}zV=}g!>hQccA(75uYc(+G zc069AJLeSZdk8H+h%#b(oziPgE6Xs0oy44-+kDwLfnxPyUkQF6X5(hK|7N*xwoo$b zZ&9ni#d|d>M-Ab9u5#(}z4a;wqhqZpWabk~D-;p$W&!LE(1%^>k zMM52^a~xa2m!A&NlB=L#Dg3W7nx=%hCJM8RS;^po-({=G>W+#o_(( zdPt?C)kYP)I$~D?45GXrATaDQqP1ME*Bb$ekPV)U;T}TM8wGe`Y2x9OkHH?GF0*u)ip@DwNZX& z&|GiZv0!Rcws0=5H6dV-Wd?C`ufjf%$5IU!YN`eYQrP~ar3#s>J+{0CEOp+m%N1iF z@DmoXv?KunmGwE5Jl5L_8E0$EK#rhg3CVvy@~gq0MAfHRbILFYw!3Gc zx-Uf3OIM9FI8;zAlDUXQbZA4r?$;fP(2ja_H&i0>{?7hVBmWGBAlXdBFj`v!3gdv4-bK4o+b#ML4xzAP@uOHNe%KL z;C|aeCDFjB`q;2k!{GRhi6?oK;OklNgADaV@%psstI*zBi*d{ox;eOHBKCh>op@PWKXLJ#kKia>MUsaQk-^zGj8!6Q?%;|ju^=p- zrxSUYuVh{mKLK}Guz1C~dh<%NT(NH6ywW09?EWvj;w;7(wC^DmZ{a?Ob7Ux=_w%DdqU#x2vl^MWOQ(s4np80=cB5rj-^mB+luMyI@R= zCToZ*+Bd!wXvUjYq8YXJ0}yye+aSIH(KcvM_-{hPEe<{jI7Dzz>A*gbli0W;n=ibW z&;8dxGoD(BW`uH&N5~G;3X}s`9YP2%r5Gwdh@G^-j#?n{Igib&H@7^7%)i1$rE!v= z)_GnYZ)LSub^zSN{HY?{RUyDxf5BRE5G`r|myo^-`GmEH+YEM5QaIs4Sq_4NNU0S)nwvTu_CELjUE9#9Il-2Z!Di& zVlz)n7fbn*n6JzR0(h^u*!`cj&530BiIu2fExAkhaTHnFg9KeCy1rZBX*eFCO9e;b z#|yIjn^Rn2W^dG(TA#y<%J%J9!e(M>9#ona);FYvDDCTG;T8?I#k~GEo9@KHvn$g0 z;w*xC%+4y#Fyu!ytK$cXc#zf%qZ+9CB!~@P!(bg94)DT%x7DK@kVg=(t0daZ1nqSK z3V6~YAlURx2ySvLBI7D(a+YqOV-65*TbW28H8VF$p(0c+lF3v;OFxNc|D2^W+HeA! z7PYU&(bSV{Z&{y&JH^gLfhub`rV*JC%qMV^UPc1!L&!4~1q*10atO5wfeo=NkFjrb zIvi@YFcSqoLUrRF*yE6ggwW~KJ3^GwG91UFO;5MJ_+T@yJm06KI=*IoGb?C z4_Nv%WKOwt@ZjNr1Na-1UFU@y&YT*{lvbgh89G($9~>C$Xe*I)u??HS?6Hi18P{p! z8uKP+OH+Bz?azkWmk~TUpX+GrXv40kpT|Sq7nuAdlP@y)5)%qhDOkI%Mpcn^;%cQU zN=M#*;lqE$gk+ObMXZ2RK%=d!Q%ixA2SvG=gp{<0Zr4&lsme*>xK@g9J$WW`=*@7bqm2RYfNTOsV&$33*RJP1bD5{56z(6u1-zPg`%C?+J&xu* z41UbUJkRm$i@ZkdP2%B=w`;VTQ`hF<0Plh8;LHy>Q0c-jCO3ijKn3iVLOG&aaD0+7 zg1E6h7D5nl3Z04cV248@GulZd+wgWmVj{LTtWUOJvbv2t4r<|d2d=Dbz-HomYtoI3 zB~=n(oFy5@)6j7gM1I9sAWR<^MuF>zLm5n2L`jeXVqXY`h{{RdIVw?ALXyrSeohP8 zESF{u9T-IXMS-t_qsJbzdiRo=P+KF73(C~(gc%W;0JONlbA7l3UyX+fr|V*&LU;g9 z$}Jcd0yw-Rzb%L$25|#!9(J=C1yNIN5QRyYwt^7Angay6Y^do%g<#WllWc1J{e&M5 zfnNt9cxdqu^s{lW1U`K!br?T<>7f=G=}y!=#z-TjGoxfAAt;2b(xW0Uqdlfl7G}YO zd7mfw2uTUknL(_OPgn{Y|4h^U&U4A^#QasR$v)r%pdDfvTNw@32S7|7G1~#2MANlq z+?LuygA-n!2_`WU9-O~}{e;w2I*Yg9#^-=&VI53E?`U7u-P_&N-R$vB7FoFu#n}js z67_~!Dlekskz=im1WZd4PIKY~I1h>(zaz>VEHvg~(6j^pGzkn<={b%adrxDp74)%l zUUAwEJy0e2@SDd893@7C0%L%O)XKnK@4Xmos=AyR14Plv_n%-p z(riZ>vFW$esJk__u*UrG+hO$0_@On|YQMJ+dW+yw;{dbfB9j$YPn~;~Ps1JvdRKK8 zx(X&(Y^Mgl4Z5F7yWq46x=7S)nW_si*+MYXXehIL#?ddL8E!$aj#^DEmf*p%DmuSL z7O1pAFPzKYn42zKtgJTYGn2Cj&FcBpth6F%Jz}gBX7g(4kj4z-{V4n|qsGIm77p4p z8rQe4Z}xbV6yZMjofyGUdJIV|VdKP1_3S-Ntam`>=ozi0nDP2oLDz4uY$p_=GB+S? zLiS-|y%EO=R)%Un`WL6ZuybbYrN&W!i8}s72?j>*yYbj~cP^hT&VYzYCA$Ic7*HjH zuGsKox`dp2%s{()5%J%QU-H`sEy4q9AdW4f>S`6O$|l5tDdZ+C^6fUHPmaBl^mma_ z2!z5qJg4WeRrbCF@MK;68p=aUsUH;C0wk8J7~4qoL@G9p6USiMeFNH}=ZOC@c#XBS zmIC@P-et$*uj+(`03PP6 z+ioTd{g!|jKqrVfl#=yhqNZB3$X4#EpyEx6K8dx)C~NxP5k<@DQ5{tQA96yZ$O$py z>P|voy@y!$43Zlg6YosGb$*Rm-lo0fGs$bs#+f}iDsl)E&7oE$i6{=0CgIqU3MerG zb`bQ8gqNGg?q-xw+DkMbaLHSi2YEB{7o_@AD4}w6Sc&i1<*90jU3-QA+I^t)vleEMG}puQX3V$tqrap|lBYs3S`_)KlRgdX`9) zkKtO(M(mg-Qzj{kM}Dr+&ug=%2tiw%eYZ#!5oBy@n3*M)VMU>eDNkI zp->!emEGFeknv92Zy8|}yEb!c3;~($&b;=`q#C`f5DO^s> zMq5`&=U*ZSJ)XVD1k@5#MnpG}ABNO9f6j(1^HW&teq~=I#`Y{rC}{g@*7I-@cr5bl?v#0s2RsL>4dA#_%+83R#Tc zDD6ZNtgTr$f*cwW5d4tLoEO%WUE_q|UBiz!i+h4tth$8*q7 z-U@)B|E;Sj+hI+NK}_C%zc1o35V-7WzW~mb-uQvgglCLXP&0txiDYNHv`-Qeb#z*R zhnXR%YT&+)4Iu@=-|(B6fruK8^Z)`X*C7`}c_N&iQl6YbDt&jr>M%l=dQtup74%!$s+#I9!w| zjg1_fDA$cGs_HeUPT#F2iSj4gxOJHQOqOoWW;4)oGa*vkUCK{S?Uif&$3idgMMj;; z@CjHr%s_tkeWCa7i+PK0PF|lZ%w;a+X9vb6OILQEec~|$<{vPTS$6jiOy&@paMHx& z50xv7854K84{)tCUz#pngk`+aB*C#u@^Bn&RW{Uu%lBC#Z%=lny_d-wb(uw;h+&qG z*nAMgv#ES==_p(Zr~C_4^Lrepq&@zQ=afSc^3$>E9Slfiz6`wv&qbk!2G4T{fAx0X zbavKkJx8+}MrM!YXD`iN9)tg$V;lA=Bz1n5o53nj zmRb~s4=<+$j}@(9pG^$p8}%F@x=1hfL2pA2C26;!<3 zxc@TP2n(lR;Xo_c7+pPO2B$>5$0tMNBIJ4!;P@fGSxw2-R9|afE6DY}MK&vhTt*=| zL7JLDGKYV#Dr6EiroLY$o9S~Y?z-|6Ew>91P4liFET7+fVse&S8<(&(Uq%?)t$)c(PQkd3jp`BB z*MngnTc1Nl-#QIDJ}mxo#mv<7eCcuq3-;*e=w9nLh$vhjQVxuH1$|n!PRAT+Mh1Wx z>_J-M2Bou^umFO)g1-+eMPOioKmF!41ZRf#Fn$v<-UGs*LmO= zG(jZ%P7SO-Sck)FVIA4U45=tZyjLk;-f5*wgf2bsY2xFo2q$+j8LHfOJ>o zH6A?b3m`S?7j*;Ea$iM}9vT6dqhFJ`BjY2bu1YjfiLt82q!rbN&l4dyps1{GTAS=j zl{eRD_ClRi6x~lE&%a>qgn3+yCu$$kQyj;Mf{s0fTD<{7(J!ySqwIdUi+)2@K0azE zAfVMc@!x1xW3vaU0#^(Kg;Oz4E7slD@zQt;!|ah}bb5gz*Qx}sG1Fm}h@e$}6`fa6 zKcT<&q1g%B8!XvdqtH~g5)##SVwhYL8b! ztVA{&!BP4Yk_fF+XBU71fNH(Pu%w_YB{K|$rDO_$=O9nFT?>r7Zo3vd+?B|!k+BWT zzTJu3S~k_&3at;G;Pa@YV=g4G3BDYb_Ve;LS$ZeK$WZ0L;-6Q9niQJY%?p`@Z z<{7Z{)=KC3!ll`K?ip^HN)(khot?W6TY01R##Y|g@4J}EV;=9OxMzYLur7t}Vi%dn z7I5P&*eF&0W$&`_=7U3r`0si!zT{|HArYhuWwzTX8^%Z!G7KBZbusUNG%&Ul2IV`} zmlT6cOkBmi<3F9G3o7132{_ReU6@>SlM@)zae7(k)h|KpvU~^EF`d9#1>ZrrcqdXT zEfeoxFs|?9Xy!x=roRVByyuySSe?RIr4cSc*(;@CoabgvmvR@Le(2WV&;j$gH8hw# zI&|phgGZ+x7|I_yGWg)3>;nf5KX~Ngp##~Axx@K`xk-EuO$pJJnC-z^gU1fs8a!|; zn9H#%`J1c52eS`m z4@@0Bgj+`rAIJ|*9?fTShmIV~4?b|vyM&Tt0i4WFz;@e@3*NgyD3zx3r%s$ZH6~P} zl5P*~;gK`=8gDHLqfNXx=${@OUu~|!rp1^J<83+EN4ez%9`nx+24q!0L|&Q2uE`Tp zkM{rV2q?ULX zBFw)F`VoQX--R(nY$jw15DUmp2^76h$`UI`36d0ceG;B~8vap{yp&AwG>Xxuh2-GM zkOtxhmFC_@KpgXHV%xVE4 zHvpa}U7x(V5px74LN%uzaR4Xnuw5`MZ;6G!d)Wa$9R&V2FAV%3z#`yP$pL@~@T3*q%jWR&Hz&U^8DwMa}E<%~+Ez&+FzmU@T7sIc7m$5m98K` z|L`tZ_JYMPH0KG`y&}6lzZ0SqT^Il*ZW_0tS@IG{6PGY(7ELQ~Nj$&8PssqAmjswC zqm_}nl}cImZv61O8Hd>UcHsmq@p(~@51qCb9LGuv$3YWhAMB%0r8UtC6ba%e-F$YT z*8#d`_<2|*?m7xm1c`8JfkY}`P?kSLyijT+AdNupM`E#Yb@C=%N+;$fXRvu6d`x{8 z1DjEK=c0tWJ~p^eA5h*=o!hOUXirhzRsGZfc`V$c00^7}tgKS`p)uc$%gpx@aO~ki zkP;win4+q-A0WuYz}e8$us*%Efq&&)ivye^0}?cah7k0Ja6*DkBwodTH9=!Y5G80C zB`85dCs_wtCAorf44pd)_Akk*h452uX@dWhcqZ^EA^laOi$rX8vGfh9Mn@y`jmj>U zzC98&Q1)S>k3z~PZjUHE5uxj2vFItGzn|o4u6mU1{Y&ImNgnXtgYWT?LwBj3jNh#& z85)1&E``zfqj#CX&=JE@-bVlv%Q-<-^6?_9v_blU7;ujePjqKVV3+6h=~kHKdJ%D< zEpU`~FBW%>2FX}X6?z6gz+54f2V&OsqpH*dRUX`pC^4%3pif38g6FN-6*8G*|CN9| z4rrUcJoY!c18M|Pq0)D!zs2p}z}spy7p zT@WBT)X-s!uiGseELav20gJ~8Esqz2`dwv}s^b$U&z>+kxzj?b0OfrQ(24>{w=RI* zIDr8$+>2(p{y~;7iQRd^=v6gXXg<0qK~oeduE|0hQ6W% zCOMfR#-J#->yQOQXLMBoD_dQ%4l30?+~0x9df&pt$ADRG`8=;u0*p7$=C1=NrC18T zHx3nrSy8)4){J~?R?J$mruD6fq`iMJ@DJqW08aVo0RATYSOUNSJhAQ?n}$F;C^Kt8 zu@3bP5mW^v)j~n!3e`8i|F3L{f{e?QF&_n(y+ml~^@0pi z1?4i15`Ozma^80_`5q?U$K<0-et^lpV)BDbUT5-gCVm^_jJ>9qEdCLADD~kNy<#^o zuvn_Suf3zat-Y&#LwgH62wU4%$&*<;GEjnE`3l~I z{S&N!f3=P_FfsGmyd%>I|fK(HTf#2z3Th zWa12@YXSyRWSB|hwkGbNl4}#WZHe4=IbTOD`h5S6O3Sn7&z&BlTZzEJB!G5e1jiDC zdX~FvZdJ1(%#i?>-t{O>FrK?5BTn!QMyAjgZd=sI zT3O0m@k)Ny6ZGoR7}&vwQFUM5c|U}AzRQ9_g3PRT6_uPuv#bz5>`5J^QsPn#)}?}5 z41#e52!`~Y1>-wUceU%`$rhFjNX|k6h(g#y+aMa`LL+E1xljgv?qY$V@j{&65K7_^ zqo_T!Gv|E_73go|YA>O1<8lyiD5s>>IIzax*IJy06+%EFRL&gY+{V36045(MOw{HC zb-96CU-v=h!%En02OkIGtdJ+N<2S)|0!L{(5(6SMe7yk?h7w9kxYY|!f1#%z#G~b= z39{@%z)^r9D~(#?2nGKzN_w4DRF-T<)yh7Fg<;|-jI|>f)7zH z@OfJlJ`cpfM;B7+VlFZ#z$FVNtAHk9)wsYo479BSdsw}UVl0y@Vyc?HH8b3bAoJuC z=f~n;S7{VLEA(2U&^u2KA7diRFG);`o0!$z34Msl3Lt=0EB6zGYa247Z(b7ykvkE! zuNG{>uZAGpi5vvNPvay+fqUa13BhZfqG+8;apqLr^3l1d6m!5IVMtfhvF|Q)j7_C=DFJ zxu8Qsse~q97xM(YDPYyJ77DxmFv7S$22gN@WHNU#pPv$-c|St%NF1$Z6~PfATo(9B z^Vuv{rXoVVXF9WA_Q6LI+`y4a91+I6`V9rX^o`<;;!1-~hkL@mw*4FxU(Vl9$@ptA zz}p?e!Lcugz=w>K`pz#%eIzb)Y28 zwZzhz2)xA;oH~Gophl%qxb?Tf5Zla1r90|C%)JTQ{%o$js>3Z6ghMKO;3_vy+=J~8&hn@^3NuQUnxN*+gXQ7ukG1m_ARgk{JYAz%8u zqaG;Ut?;z%#qLaDh(z9S(4aod0jEG?HR`7{Lr^3WH>TvLKm+r>C!c%T?mz~WS4P{W z_Y|)~YIO>WpzwmUk`XxxOGn@`f9|VxgyfCy3>5@OvKlfsC*^vHw>BiO!1pCM-Lxj= zVC)c1e($4B1FBUUN&-gYFrpN-G4s@{O+YPjmD2wF+6T=d^E!RHFCxkdtEH+Yty1iZ z#QlCi-K*Q}`#P0%no-I)sOrOO$e_}0bRHp@N}K4BaU-Bgr)cw8uLuQU9;-~gOq$t( zED*CDvA|S2=uY+`3QV6-ST`WbTzNy7j{FQ932~h3#Lmy+hmsI^N+<^lS%Cm@*2@wK z*>KjJUDz$-BR+wr#eA&1Cxds8RA6)kBQp9ugn%>HM6amGRbeJ^;gAtYP0h+}GA@Qa zOa=e7oyxRriMe(|mSO$vD{M=wh?9QeW4ueL=6woh{T(v!6b|?PEqRp#VP*0~AXm8G z*r%xuj;~U-pD5*Fgb>~*hNA+YTkr^f12}?QRz2lwL}d@2O@R438`mZ~5!`?X-3-wP zQLs6T!qB@8i_Li5VR45O1j>iX>;QJ;C*@t_62-=l;F)#`$c&vIH+WP}81WkUBL zaJQ2ZAR#=7D^5y4Vo3LR3$zD=t2**s`?L`caOTSsNi^?xzew~iLJeyhU#-p&*t$o8 z0xIDM2^E-8P3jR>;b!&6YI$+cSZ7Ep<1H^9G(Ufi)%bZPuQ9V>m&oj}$^m}{uN_EN zIl$O=sc!Th+f$uzirbiK2mf2S5S0Ud2L0=!#>A~Gk@|;jGES43wotnMALY*?sWI6PA>Xy z4*WI2guk}E+vD?DJnBAF2}W>~9!3IuF2R7W^mGUB2vu=|Au;uy5Hs7GuX8_*v6@!# zzDRk>7t9Sht*#^PBlAXctFnrNUws4eI{YBfYAWJZ4c>^((;Gl9c5rLG5Xq5wyADZU`J0DsX*Adg;OpoU&3QsA!v6jd~S1BKMH?`g*%u&_BE@Y`p{ z6IF=xjePd4n049_j{X(2K0r3ZsF~8vr*LBz3#D2KPE2DHFJ6o1HpYoqGPiNG zEx5ZY1~VHZhBAxguN0~)=zbO_L3G*LF?=J&|kxGPtZ zxOY2|zzrdb=23~SUhcYZ;ljR0=dq&ityJb_s{Ecj0_P>7AM;x?5bnP4*mitf3ZjfQbi5!T& zUw6^ar-7?LHC$c1U&W0FSTY5qZ1%`2S^RJx{7#JEC>=r)lmU?*5a_yt-cWD_cHub` zIJ1%8NsXOS^o0LG*76~>uwbE<<2VV;!lN6xEire_fGjk3q~9wFiH7pA<<&u1xoXCkeO~QtFS@JS^0=)E*!$l_>48Wa!LfLV z4lSlNkLGeK3eNMk4FKoiWmKRBEC=htvNO_(bG8xy=C#yMb9tm&nm)wwLk4^@}=g55|f& znh-`-W;iFKJ&0m3-A49><}wsLneV3D#zeQ(rRu*Pw(N!>;{7fvcDk?+mG*uQrq*q=Sylk&pwJa>B>#1d3B3J|EeG>t#ohgHj_iD)TEn#3sU;0J| zxKDFcz;1KoxWBx{om+sGyWJ%R`lSQ8L*Qk}9)t842-{Fw+FhvErF|o&pCeCrzfO+% z_e}im*6-&&gBx#RS*q=}ceb~BycXZneGnNgqo;%EioKr9G@@ZV2X{y?L!QGHCLs-Z z4)X+Dxw_;Tm`LHi)h*|E7%p4u*)r@=ka1MdXWv4))UxAJG^Tkn&C6{cKnF% zMZu%sOLmj=%&4_1?e4`l9qN7gepj6u9p$OH$xF6}WJUSmHxKC{eHSX#2;K14cu(ni zlu8GKBF-b8-$Wic25>A=k^QGf^hy!%@LWQ*SaHX*6qc@Pj5h6rEaiO`uaw*N6PWvV5A5l;{f}Ax!`p!zUDtEfW`d|&E!Zm75@QJa4w_UISmFsB z;xhz|3!Mb|8~iS^&U>$W-otm0Z#i+q*YrYt%^LpQq4HHt=!``V{gWuFOk>O$IU@LQrQnj+IC;0ZMEK8!A1sf zwIqU6=K{YyjtV~-Y16hyn_^yBspbsT#MzRXreU08VDH-agON4{G_Ek8#x<6mFF!QN z@sN)AL7Zski|!cxZ?BI3&<(@E#P}N+QJOYnBwf1-ff1FWFWbP-}R2|nM4jHCK%jwmhwAHewWGbG5LKaf57Aqnfzxaf5e0xazMdxs)P3*aATGg;Vdm;QngD! zYR*r&KY1pL+wMa+kKkB-j4D`R;3!m@J3l`&GwJCb$u4`GpjKK!A5Q9wO=!W|RGn9> zm7IC{s+lDUY~DlaQ1u3S8U5J|`Y|b$mKamxtsEuKwvD@R!gk3#6_!?+?-U>v+N%6E2{n05OU7ugaY9630R*1Fm0mkf7gKe+(p)tJS;Q ze=la(yA?Iix!XSpmoG39@cc1u{FE&b=G_*LS3;zV--!_%r9LF}39Hh<0A!~;ubvrq z64}fJCIi(jM2|obHiW#A7E;mutu0B0r!e(%Ccrm5=~eMnGS+@0h{Q|B`%BPWx%o_y z0grFO^@dpX-hpJ?1T{EY8ZDaOT_Stp#ni03K|`Km}Lt%8DV@#@V_6LvT-Xy^vM zy6Do}8v=qFC-`hkG}b=@a07JVMi##%~6o z%isMU#}Lx&Q9Ci=;Yz|Id^`fkW8XO5Jy-N*Cv(LCL`*c1vSrt~@n&;F_Wu}PZ6C{* zvXcn=!48Y7O~n9#jiz}LR*P9#N-f@=a@Xt1hN-SJ4}9# z$uBVZJtlv|#98e3{8tnfx7-e`NACCjZ3bZ&@L#BVLk8JCoH+)-l<@WDAq+ zOdeozjLD-+PBVEElgF7n!Q>p1F(z+eav4db37ffK(`MNMM0O>Rfa?+(RDv4Hx?b@O z5|;&;#T0X{XlByKiAeRx`2h*!1aPHg7@Nn;Pv=QPi4x(q4br_UIUp&|dgN>yJS?{* zUpC>#7@&zEu=E>gLkiOZ9mzF_c!TxFjXi66S}`NDcH_Q{dwbs9)6uh`r@d!g&&HlT zJ$rk4I9G$eH9ajoZ9Sd1v#O^BdHr<{_j7&-$Nx#}K)M!3CywJdw&7TVgHNo(u?a^9 z4nD(U00*yi^&oa{`ydX!xf{oto<5{KIL2^v^sGnP-NQV(_?y4X_nMyPahB=X-+@p^ gt!-UsQ~N4hd9bHv;|K8lQ5?4X?Ko$>R&Q+mzXwW6w*UYD literal 0 HcmV?d00001 diff --git a/python/tornado/test/__pycache__/websocket_test.cpython-35.pyc b/python/tornado/test/__pycache__/websocket_test.cpython-35.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c0fafea55371af65b627f91150e8679625dc07d8 GIT binary patch literal 17125 zcmdU0ON<=Hd9I%K&d$DO$yri-4M|bdNaC(Y$&#$d3gs0gF_2a?NmI(0_GosxmpkN4 z&!~DvD~`>?&?=4}No>GQd~kpSxdcXX$R)W1IRyxiTYww_^ff0Q10Mp&iIngAtGnm1 zJrYR^PBLWIc2`$dSAGBcujhUw`rB*&@I|HmU5)%oI6sfWJC;<+#(zt-lxkb5 zYN>WYRTHY6RMn(vr&KkiEVf9fR$5il(mJVH8P(3JYS!FKsa8(4XH<2@T&GnluiCS! zI;+|RRV|p>jB3rP_PnajtM-DbE|}V^YAvevlBzDLc2QM}s=chL%c{LkRri?|In~;) z+9g#jnd=$VT2bu-s(L`R531@xQ=3<t zkExV%RNcxey=>vIw9|PkNk_ zNuN~JlkD+8)Z;hUBj7^4boHt|uj~c&A$nX;PFd{&*@xAu_M)PS)E1S! zEVavF?IWng8~01?{;>8mYD>yqk=hmI9F!g&MePA)AC%gIVcQC7A5!)qsXY|dK8D)E z%042sN0hT9Jv@%uqsl%ewa1k6xYRy@T3~uyYL6@DjMT28_Jp!eO6^JItV-<~Y9Chi zDXBfBoHgkLjPxXKJfiH=a^v*KjkCD%sIn_^qcU>i9Bw?O?8oKC<9q?fgRY*!jVF|S zMsA!5$MZC5SCzdcwRr8*$|CCR z9GhOLJaw0nQ%U&?^-ZtS>i3*lv)%3Jo}aUu?=`(<$MxsCy6N_6TYb0Dlk1&kZ@cF8 z>OH6JxV?>vbv$eNxxU-% zu_5}NeW}}RH5+v^%;24`)m^*gXf(C@C`z>*2UBg_a(cCP-Mdxm-tKL8+)C14e9PIq zA~%9Z{H0;jM#pvWPJcdXB(viHysqQe{jS#_l;ID$$7rE1;lxo?K9D;oAE+ZJAAoP8 zOsNlXnNl`%PllAD63Qw&C+8VMqZxl6VToQGjBfD#F9WD@qgD63@>WNe2}0AoQQkHa z_R78Odavxf-|ct~FPw76tu

    Dm(9iaI5)z8NaLBO|M*UwK_Yd8{6qQy4`e}=&gb7 zox0oamOES01K!nXG(ma0Y%pAvJ~kShFMs@-z_E_QqbmNErM{(94JQdX5ge{mtPRks zAfo|ik$L>(MyK7?jz^}eb-E;?2ePC*r?sUYLRb0_ik~Ze1b;sI+WPa?zv?;KyMAf? zjk)mGe%x1sYvd_5N`ds@)!@J(=Xt!>6hJS(C^&yoB z;cEA`pU3oj&3daja2~B-;eyiu_4b>VQ+X*SCow^4Zil0#~WV#g8UwpOz{jap3~L(iI1(hsvBgNzi!RUbrg9EX=fQAlO1 zg;XIawGC86KO%e{hez5>$Ss7HA_}=p0-TgTd!e!238b+hq`@=%o5mq1b07v~h(QlP zqXfS4KY}>v&+X_YG;bTcU%%mi`@E(L{pvOxKfQSyOj$_?ulU)Z$;9ORJb(*sox>|b zkV_*2Tpc#qsHF5MG}Gj5eVWB8i#XUSxN#PTbXl-8uOAq&715a&-8bDg%v9sRBY*;4 z0VmLwK>d)xD!Fl&BBiC^U;|h*8Ea(pLT}FKSsWo2C#Jmn0dZMF*UyCA0~cODFfMbi zId$97AsULxrvm=rNPWWi9%(;LnI#H$8J?WSFJJH0X#3C#w~9n`$@e!~%6JjlF#hC=JBO13Kc zfG$Aa^I?vICVt^^-|aQqj$o~+viz(zrr*zQ%OpLz2)!w)BGu_S?hAO#9xGkaVO1jW0YzU4=>?0MgasQ z6TO<)@DrU|@Ey`3`0-$9a_?*5kfHTgV&1w6EMs8cqp>g`SW>TseS&Ry{lG93*Xc}L z7Tq7myGe8|yh}1*{s4l009aD^7Mut+G@_l_c%^w4?cpAFI`-ymr;?~Hgw~)2i{5f< zKP`r53R+Kt(=7*>WAi-)8Xeu~ z!^d^>d0bC7TEB_AmvIO$6s-Lz&6+W~rBHF6O}>O;8r?vy4`r?(T|iIq)dPBhhL+H9 zf20iPl2b`Ch8a4=d>IcRBdrF=XygHsunUo}=ysK$%i$#NTGq8YN%{%Y+>jAb?>7Ch zdl@BaS+77=YP-9Wv^*FK?K7n=HeC}FWVjW zbk8*3cC_;ZV;1d{!TShT+73;VS4Kp03vINEV96^tT{e;^hW)_V>Uk!p;U%IJ{UT5h zFF0iJS6CsYWoTd6NXSb%t1O^86c3frVX+LZUt-5p&*q)OR}A0tW!(K54ljeEU}dag zMzfCZj($m;oyXyAfdZq<9jPPZ#4`Sb_DCrtiJ}mcnhKm9MWI=_JEQD88}n{XYG#>5 z3TkGgW=qxrpG@01YxffBZX(V;zV0Qxp*XT`_|-i3UKE z9>g7r#e`tg2-cPa8ND?1f}K*mOk9Js>Sg2V(Kr`ZkH#}G^*1u4>Xk}vV~}2R5Yh;< z53*}6RPv686XUrJ=GKIH5eG8b8|+(inBb{35v@nz$6$V~&SXw4uqBneCZ|f5n%X?L}i^QiQ zf+X-bB}53q5QzFn6f$7AW1?XR)esZYy$*d8XkUX*^GBd5G85@7r`Oo_XOR?fbngNK zQ$NM9I8YRR4iR$~Sqx`FTlh;1_uoqDtDRC-LkH-~&4h zJ2kx|QCpRy$!my~++gickBNaP%5)-SUqhzv0}elrLM0Zhc{u!wqozivxc8wk5XfW% z!uu-}#=L%4%nT+=!^#WEx$%TXHDU2AMKBN;p$x!+>O&=BBVv`6=fno@X>R(=(PZ-p za0xq{Q8)AI_mzU?H5&ChqBEtpS-xi2k8$F%8-vmwn=WGLXPX@)NIP9WM+bPb-ngYN zVRq5Hsabd|;}G;X08uU`&4kohuhV7Xb8AeZgbKMATJ`p(U4QY%d>V%uJQu@tLo$V9 z@K#@FF~{b;h!fLAr69hCUPmvQUZ!TW>ZfS8{ftQ)Ni?Xx!kfY>Oz8Pbb=yV?$92TT zml@KK#34Vx1_kBDcD2F82Xc=mO>{P^Ym>$d{_cqB0;&4(tms*K0$HyTLS{v93*JQUEg5+fTE^9Y`crjkSl`*6rqnm93z z@bv_VNq1BM?CN<&<*Bn@{MgoOqvL4*hhf)xqOVb-r%0Vf21 z1d7bYAhG(`pt#r2in;O^f`GHe2$3m<#d%g|~=;{$Cyvvk1>N&n2f%M|!l62|NC87QM8Gw44nrLr7%l3g!1<8`R!ELCyhni&I%kT%B3h@K#*|t%lr&?0;}JZD zGe6_?;4ilCDVp|AOz@$JJ2v}UTL|ajF<_zZpU`QbW|B}bbJkp9&Kf-Vscae3O@vtz z{n3dO%xKnt8T0EV9uTLQekAY#t2L=~tPvD}+Jtcnd_rPAVb);n5A0<`6JzOT@a8bn zE`WweQVAG1hd$Ijxd5dJEQ2)szo0*ZUor_VDewax{#5*eMnajYuu{C^TrHst)utpEp$ycVHTsy=&AGNp%N)gJ}XXjD%)j%JJY-lF9l@7*c5R z1U00e1NZX;&rY(SBMKMmFwb8>;V0YP4bk^>T^Z?4%CS-QCJ$v6)Ps=N$KxJ#2E&*+ z9KbcPn2I%iF^@;MrqDx#>s4ID<7(J31Sz2eB*7^F$;fs9B5(~|j!2S&c$S%W16Imd zKQfjH*=YxZyrG~Yz-UI`@u)OfO(denP`Qi7k-tgBHpLx?c{DU!!|EC11nKyOwM$;WE(_ffKPm4w~K_P=z7}a(B zG1ftJn#7CafEy1qV`P$^f}iZ`mg$Kis^iiM#{|*Fw$r#(>&SR&?_mdrU2{7%CcHnw zgI*DZN)!^Xu!;Rb7s)l8iBWsgd-{J%C=HvY21jD3v?l5CZDhnh7kcSFUi2CCIzEkF zvb~HH%fb1bMzOVfqgd2eL^BFj0fz|JZJgYjaIuxh7X;#mr)h{x(l$oiAXHew3EfaW z2+4x5Wz|h=%`@^PN@%)78A26CmwASCY5o|vRA!7EiCZsWi}+hZiIODCCdnY7OJLwA zOe4f02$Ra})LyzYT_Z_YMC7ih8icV#+89%3q>0f@(HcDUS#*i%E1ouFp--cYQF{Ly zbU6?R5M^eE3rtSSb5^lu>(@r3gHvgAVvH`yf<&Fg#NhNj(&hgwEhOi--SK)@@-(Xy zqRbu&Y0=;8Ic^=h8l#54HflUPl^Qj#ziD^cb!_K4JB2Xk?vXGfU2RkjOsN3EZQu(EZ$||v3QRKJ$n5<3+hQdU@=S>$k|cW zy@tZY;XR6?U=`*I>0+)hSIptxOfgeT7iWuk{9BOg`Qm)BP+X9wsDGkg5@+XecvM{& zW9*h$xI^48E73mB#bd!@CC@X;kyT53hL=#(+#jFE%GRvEZ+*D8^eWu1H=6ImUpf35 z7d6Tfv96X`&;7G!&ppK>k^~S=QNyltY|om{ zV3_GO-t{5$h5(NapQ%{lS%t+#6#h}JU4@ata0b!t*w_u1n#}Mr*)5L~vqn~6CgE@# z2z!>tI9(nx-4sr2vlUcGAD=hUn5}x9Z)D~{!3s=$)AY%V;eDS2FhGR?#9pA@yr;2E ze?A%;@!-+G;r$y5W1--b!y7lY2&^IQwx!;GSyJ4HV=eXa*PjV?%xU7-q!D#zJcRgy7=Q3|>lUd2J!4D~fCv%avKhG)*beQG| z7K6#73=mai!32kQaPFsd-M!(+qK=<^``Vt`7QcU$j;;NPN4WqMR}!?_V;5c zAdm{j9_qgfSFp)bwzEB8BT5CDKGzRwBMd14*-uHApJcKy*)PL;ntXyGu2Bmy<_8F7E9IIM?4c6^HT1 z&lw&AE6qgQHiETSt6enYzL+4FeH>JdMQ3#nGj1SFG(lxrJk1_Q-ZC_d-@y$`J=}+u zu{3KMIHDi!8d@^!0abR5ywqGKz}X1n}A{GBCp5Z%=+^P@$Gdtx+^X5>V8tg}n1NQ4?(hW;$G!QI@U*?4a1Ys5-Tw;eHCU`6WUfcXQA z?cMOD0EX8O4$QjbyFPDV?qEzi;TU00pPTv`0cN`4iUEdw$8zf*;>LU7O92e8A_LsgUQdaT1UR z5Q`~;FaTat%EkuM!rYT(4NxHT+eYuf6~uGH6l`=);u_wY+522x+1OcxdxgFCWJ@OR zEyz6~_~M4>=lW*D{socw1yEC)9?PUhuKU8%Pn|m}XIHOXx^_ugSKnU0Ci(0yKKJZ1 zY#^rS4CD`QQKb@ki$Lqfb8h_?L2hxQ5^dgNbf0p42{9$N;xZud~lWZKzvj57QQx*1{KAv^sD zH@?R=k!1=A&FcqdrP3w&#&mRObi&5OJ&6|xKQVwugb9TPVFl@ePX;;TQ0{(=b8Ml+ zD(J}AMGAQ5Ps3rua2$$%^Xe2h`1nYdLpc5zH+~lO3){f!!O7rj%GK}-*s}RfPd*~T z{!b4pK@X1E`h8$V=LwiW`o>{PXu9{+`JJ&(en(-{e}V=hQ;tyli*U9?F@7xfjp9%7 zu0Id^1yk|5@;kGTgw+%^t_08yn3#o$Ud>Gp)uqu!tJo>g z(gc6lC!jX_e7V$8EriXbh$5m7XG+n>ECAD+tw3xCFZbB_HtdOvQO<)Rgyk)ZO~ zrb&~m@$xJSe#&I_5%XgY*%U81CO^~JyoqnwB#sgXTzp~i`^3i)Ga&+A%~34Z@$H5% zcs1kQYIZMf37tlGBoYfh_k!X07eQ3zZJ;=T#U-wAmhkU{{9VMi69w*2Naw9w;xMkb lTVXa?S}djUuYe;{nkg-p{nOJ z1!%<_`=!one@K2ne?g!67xpzz{R@8TcNPQ*QM7A2FQtg(*@MO6T)uNzHRtB)|9<|@ z@Bcne^a~X}7U)e}$yWwZfUiLzQDl&7P*ftfM3G6ZNs&daMFy`ZQCOy^LT)9Sn-o?l zs*zjE<`#uB6xGSCXY(?JvlQ9n+S$B9;T%QtxG@R&o6dNwQz6y_Ll{gnq$UR+)e#XS4cIh?+1P_ z6|&8m<)0FWO0oy*sZe=(0Y3GWDcL zdT}=qs=|KiC#sQiyhGs!LMC#Vx8dRW_gkOr{xtz__wLp=es>oJ{@hRf-LK=eAMWmm zsJGke^w#(KoiJGM9?GlHK|9%nx4V82kAGp1-J|57)5v_dm_V!Dz5hK$3PaK`^uR+_-$w zeWoh!i?n^HX8i;vPrrEB_d{jv#ld5^mgQEp+*Okr%(7v<1OXa~CRQdE1~L|m!MT%q z6xOz!imG{D*N=qfsk-MyanKK$wmpyEpWV^Mlc%wzT;pMEKTWX_Wdj34GGS}mhF!5u z&1+*i`iz2Y6RbA~wxFg1uR=(o+fH3Avv!53*P)lAgnd)RwS_DB0mJQ4NW&d~3{o%L zbbA!s2)`j0w+%`$>PUqP_u;|{2P+d*N-ZW1%puq`-fFGss-DH0U!v-^V-4P$qQ=G$ zZW}j34z}dxh7R^&`Wu0em1V1L<4QRGiy-83x9yZ#1N&H_FjVPN36_gz0d{H`CT zo$f(yTdg7;V( zlH9ruXn6+1pnjRfzWn=*KRBRMl$tizWv47>dCfSBa=w^T0fwPOQSNG9XWVwW!0FGI!HCV}fvLDBnu3p*m z<)y7_mu`LBpTVBst%38ep9kfBy1#xMembt4tYej`h^}t6vg$ESQ?;mdVCN&<>may> zC&zkUdZFIMXGYqy=8O)l{ZAmWjN2L2LvVB}rDMo=s`D(eYF4(<7qPW4n$$shhB1cL z(=8YBF7iJthCHpGH~;tZ+L$vfRoRN;2OTk}!gQa(bc3Za%KtRvsp_x+Du#fnL?S(m z17+R)c6%p7j+I%PYl~)RP$AvvfLLcPgGyQGd;JM$DRgkC)Er^SM_80_XpvzXD=5~D z(nT)W=U-@Z{aDMXPA1lIW&R^}lkf2`hP0-1O#TKpINo)~g&MtVTec;ci!OWNFim@C zJRS+D`!uj>M`BOYZ7dmmI5anLC5*Y3S2nf8x}b&a3VlL2R-d_aS-3PxypwN> zO2zY$l}t>^xU^4G68(6cRlIM|=Rd4xokxDe0H_(!=tn@{tHT5w#Kl`T9yaJ6 zIGhwlZ@`|qG;QMd75p%`c_jUwm%B6JZZ|(XjT?*!+}*!E^P!wt2dk2i_GFLbCSI=FG7t)SWO zhp95#VWPJvgfvI9I{jjWzrp>4YXBds8b-6cV#phqjqt-h{QpAehm+c5XY~nmX3T<} zN2MpvCK{QXT3-d3DSCQcaWZh`iKZ&~BqJH0@WC%*m|B`{|HaHBV*xwlb$4Ka7yE{MpXTpX^+I~++F1qg7oXN#>W1?>9 z_S9V{EjqoZC-3p>C4g*_?&sn034C`|(OGmQGi?3uf2rbFD6$<7F&~yvjE@ zsNE(07uNB*r%Qy6Lj1!ajQ4P+Q~}4rp_=Y}c1fyZ&D-;e@&X literal 0 HcmV?d00001 diff --git a/python/tornado/test/asyncio_test.py b/python/tornado/test/asyncio_test.py new file mode 100644 index 000000000..b50b2048e --- /dev/null +++ b/python/tornado/test/asyncio_test.py @@ -0,0 +1,113 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +from __future__ import absolute_import, division, print_function, with_statement + +from tornado import gen +from tornado.testing import AsyncTestCase, gen_test +from tornado.test.util import unittest, skipBefore33, skipBefore35, exec_test + +try: + from tornado.platform.asyncio import asyncio +except ImportError: + asyncio = None +else: + from tornado.platform.asyncio import AsyncIOLoop, to_asyncio_future + # This is used in dynamically-evaluated code, so silence pyflakes. + to_asyncio_future + + +@unittest.skipIf(asyncio is None, "asyncio module not present") +class AsyncIOLoopTest(AsyncTestCase): + def get_new_ioloop(self): + io_loop = AsyncIOLoop() + asyncio.set_event_loop(io_loop.asyncio_loop) + return io_loop + + def test_asyncio_callback(self): + # Basic test that the asyncio loop is set up correctly. + asyncio.get_event_loop().call_soon(self.stop) + self.wait() + + @gen_test + def test_asyncio_future(self): + # Test that we can yield an asyncio future from a tornado coroutine. + # Without 'yield from', we must wrap coroutines in asyncio.async. + x = yield asyncio.async( + asyncio.get_event_loop().run_in_executor(None, lambda: 42)) + self.assertEqual(x, 42) + + @skipBefore33 + @gen_test + def test_asyncio_yield_from(self): + # Test that we can use asyncio coroutines with 'yield from' + # instead of asyncio.async(). This requires python 3.3 syntax. + namespace = exec_test(globals(), locals(), """ + @gen.coroutine + def f(): + event_loop = asyncio.get_event_loop() + x = yield from event_loop.run_in_executor(None, lambda: 42) + return x + """) + result = yield namespace['f']() + self.assertEqual(result, 42) + + @skipBefore35 + def test_asyncio_adapter(self): + # This test demonstrates that when using the asyncio coroutine + # runner (i.e. run_until_complete), the to_asyncio_future + # adapter is needed. No adapter is needed in the other direction, + # as demonstrated by other tests in the package. + @gen.coroutine + def tornado_coroutine(): + yield gen.Task(self.io_loop.add_callback) + raise gen.Return(42) + native_coroutine_without_adapter = exec_test(globals(), locals(), """ + async def native_coroutine_without_adapter(): + return await tornado_coroutine() + """)["native_coroutine_without_adapter"] + + native_coroutine_with_adapter = exec_test(globals(), locals(), """ + async def native_coroutine_with_adapter(): + return await to_asyncio_future(tornado_coroutine()) + """)["native_coroutine_with_adapter"] + + # Use the adapter, but two degrees from the tornado coroutine. + native_coroutine_with_adapter2 = exec_test(globals(), locals(), """ + async def native_coroutine_with_adapter2(): + return await to_asyncio_future(native_coroutine_without_adapter()) + """)["native_coroutine_with_adapter2"] + + # Tornado supports native coroutines both with and without adapters + self.assertEqual( + self.io_loop.run_sync(native_coroutine_without_adapter), + 42) + self.assertEqual( + self.io_loop.run_sync(native_coroutine_with_adapter), + 42) + self.assertEqual( + self.io_loop.run_sync(native_coroutine_with_adapter2), + 42) + + # Asyncio only supports coroutines that yield asyncio-compatible + # Futures. + with self.assertRaises(RuntimeError): + asyncio.get_event_loop().run_until_complete( + native_coroutine_without_adapter()) + self.assertEqual( + asyncio.get_event_loop().run_until_complete( + native_coroutine_with_adapter()), + 42) + self.assertEqual( + asyncio.get_event_loop().run_until_complete( + native_coroutine_with_adapter2()), + 42) diff --git a/python/tornado/test/auth_test.py b/python/tornado/test/auth_test.py new file mode 100644 index 000000000..59c96b232 --- /dev/null +++ b/python/tornado/test/auth_test.py @@ -0,0 +1,545 @@ +# These tests do not currently do much to verify the correct implementation +# of the openid/oauth protocols, they just exercise the major code paths +# and ensure that it doesn't blow up (e.g. with unicode/bytes issues in +# python 3) + + +from __future__ import absolute_import, division, print_function, with_statement +from tornado.auth import OpenIdMixin, OAuthMixin, OAuth2Mixin, TwitterMixin, AuthError, GoogleOAuth2Mixin, FacebookGraphMixin +from tornado.concurrent import Future +from tornado.escape import json_decode +from tornado import gen +from tornado.httputil import url_concat +from tornado.log import gen_log +from tornado.testing import AsyncHTTPTestCase, ExpectLog +from tornado.util import u +from tornado.web import RequestHandler, Application, asynchronous, HTTPError + + +class OpenIdClientLoginHandler(RequestHandler, OpenIdMixin): + def initialize(self, test): + self._OPENID_ENDPOINT = test.get_url('/openid/server/authenticate') + + @asynchronous + def get(self): + if self.get_argument('openid.mode', None): + self.get_authenticated_user( + self.on_user, http_client=self.settings['http_client']) + return + res = self.authenticate_redirect() + assert isinstance(res, Future) + assert res.done() + + def on_user(self, user): + if user is None: + raise Exception("user is None") + self.finish(user) + + +class OpenIdServerAuthenticateHandler(RequestHandler): + def post(self): + if self.get_argument('openid.mode') != 'check_authentication': + raise Exception("incorrect openid.mode %r") + self.write('is_valid:true') + + +class OAuth1ClientLoginHandler(RequestHandler, OAuthMixin): + def initialize(self, test, version): + self._OAUTH_VERSION = version + self._OAUTH_REQUEST_TOKEN_URL = test.get_url('/oauth1/server/request_token') + self._OAUTH_AUTHORIZE_URL = test.get_url('/oauth1/server/authorize') + self._OAUTH_ACCESS_TOKEN_URL = test.get_url('/oauth1/server/access_token') + + def _oauth_consumer_token(self): + return dict(key='asdf', secret='qwer') + + @asynchronous + def get(self): + if self.get_argument('oauth_token', None): + self.get_authenticated_user( + self.on_user, http_client=self.settings['http_client']) + return + res = self.authorize_redirect(http_client=self.settings['http_client']) + assert isinstance(res, Future) + + def on_user(self, user): + if user is None: + raise Exception("user is None") + self.finish(user) + + def _oauth_get_user(self, access_token, callback): + if self.get_argument('fail_in_get_user', None): + raise Exception("failing in get_user") + if access_token != dict(key='uiop', secret='5678'): + raise Exception("incorrect access token %r" % access_token) + callback(dict(email='foo@example.com')) + + +class OAuth1ClientLoginCoroutineHandler(OAuth1ClientLoginHandler): + """Replaces OAuth1ClientLoginCoroutineHandler's get() with a coroutine.""" + @gen.coroutine + def get(self): + if self.get_argument('oauth_token', None): + # Ensure that any exceptions are set on the returned Future, + # not simply thrown into the surrounding StackContext. + try: + yield self.get_authenticated_user() + except Exception as e: + self.set_status(503) + self.write("got exception: %s" % e) + else: + yield self.authorize_redirect() + + +class OAuth1ClientRequestParametersHandler(RequestHandler, OAuthMixin): + def initialize(self, version): + self._OAUTH_VERSION = version + + def _oauth_consumer_token(self): + return dict(key='asdf', secret='qwer') + + def get(self): + params = self._oauth_request_parameters( + 'http://www.example.com/api/asdf', + dict(key='uiop', secret='5678'), + parameters=dict(foo='bar')) + self.write(params) + + +class OAuth1ServerRequestTokenHandler(RequestHandler): + def get(self): + self.write('oauth_token=zxcv&oauth_token_secret=1234') + + +class OAuth1ServerAccessTokenHandler(RequestHandler): + def get(self): + self.write('oauth_token=uiop&oauth_token_secret=5678') + + +class OAuth2ClientLoginHandler(RequestHandler, OAuth2Mixin): + def initialize(self, test): + self._OAUTH_AUTHORIZE_URL = test.get_url('/oauth2/server/authorize') + + def get(self): + res = self.authorize_redirect() + assert isinstance(res, Future) + assert res.done() + + +class FacebookClientLoginHandler(RequestHandler, FacebookGraphMixin): + def initialize(self, test): + self._OAUTH_AUTHORIZE_URL = test.get_url('/facebook/server/authorize') + self._OAUTH_ACCESS_TOKEN_URL = test.get_url('/facebook/server/access_token') + self._FACEBOOK_BASE_URL = test.get_url('/facebook/server') + + @gen.coroutine + def get(self): + if self.get_argument("code", None): + user = yield self.get_authenticated_user( + redirect_uri=self.request.full_url(), + client_id=self.settings["facebook_api_key"], + client_secret=self.settings["facebook_secret"], + code=self.get_argument("code")) + self.write(user) + else: + yield self.authorize_redirect( + redirect_uri=self.request.full_url(), + client_id=self.settings["facebook_api_key"], + extra_params={"scope": "read_stream,offline_access"}) + + +class FacebookServerAccessTokenHandler(RequestHandler): + def get(self): + self.write('access_token=asdf') + + +class FacebookServerMeHandler(RequestHandler): + def get(self): + self.write('{}') + + +class TwitterClientHandler(RequestHandler, TwitterMixin): + def initialize(self, test): + self._OAUTH_REQUEST_TOKEN_URL = test.get_url('/oauth1/server/request_token') + self._OAUTH_ACCESS_TOKEN_URL = test.get_url('/twitter/server/access_token') + self._OAUTH_AUTHORIZE_URL = test.get_url('/oauth1/server/authorize') + self._TWITTER_BASE_URL = test.get_url('/twitter/api') + + def get_auth_http_client(self): + return self.settings['http_client'] + + +class TwitterClientLoginHandler(TwitterClientHandler): + @asynchronous + def get(self): + if self.get_argument("oauth_token", None): + self.get_authenticated_user(self.on_user) + return + self.authorize_redirect() + + def on_user(self, user): + if user is None: + raise Exception("user is None") + self.finish(user) + + +class TwitterClientLoginGenEngineHandler(TwitterClientHandler): + @asynchronous + @gen.engine + def get(self): + if self.get_argument("oauth_token", None): + user = yield self.get_authenticated_user() + self.finish(user) + else: + # Old style: with @gen.engine we can ignore the Future from + # authorize_redirect. + self.authorize_redirect() + + +class TwitterClientLoginGenCoroutineHandler(TwitterClientHandler): + @gen.coroutine + def get(self): + if self.get_argument("oauth_token", None): + user = yield self.get_authenticated_user() + self.finish(user) + else: + # New style: with @gen.coroutine the result must be yielded + # or else the request will be auto-finished too soon. + yield self.authorize_redirect() + + +class TwitterClientShowUserHandler(TwitterClientHandler): + @asynchronous + @gen.engine + def get(self): + # TODO: would be nice to go through the login flow instead of + # cheating with a hard-coded access token. + response = yield gen.Task(self.twitter_request, + '/users/show/%s' % self.get_argument('name'), + access_token=dict(key='hjkl', secret='vbnm')) + if response is None: + self.set_status(500) + self.finish('error from twitter request') + else: + self.finish(response) + + +class TwitterClientShowUserFutureHandler(TwitterClientHandler): + @asynchronous + @gen.engine + def get(self): + try: + response = yield self.twitter_request( + '/users/show/%s' % self.get_argument('name'), + access_token=dict(key='hjkl', secret='vbnm')) + except AuthError as e: + self.set_status(500) + self.finish(str(e)) + return + assert response is not None + self.finish(response) + + +class TwitterServerAccessTokenHandler(RequestHandler): + def get(self): + self.write('oauth_token=hjkl&oauth_token_secret=vbnm&screen_name=foo') + + +class TwitterServerShowUserHandler(RequestHandler): + def get(self, screen_name): + if screen_name == 'error': + raise HTTPError(500) + assert 'oauth_nonce' in self.request.arguments + assert 'oauth_timestamp' in self.request.arguments + assert 'oauth_signature' in self.request.arguments + assert self.get_argument('oauth_consumer_key') == 'test_twitter_consumer_key' + assert self.get_argument('oauth_signature_method') == 'HMAC-SHA1' + assert self.get_argument('oauth_version') == '1.0' + assert self.get_argument('oauth_token') == 'hjkl' + self.write(dict(screen_name=screen_name, name=screen_name.capitalize())) + + +class TwitterServerVerifyCredentialsHandler(RequestHandler): + def get(self): + assert 'oauth_nonce' in self.request.arguments + assert 'oauth_timestamp' in self.request.arguments + assert 'oauth_signature' in self.request.arguments + assert self.get_argument('oauth_consumer_key') == 'test_twitter_consumer_key' + assert self.get_argument('oauth_signature_method') == 'HMAC-SHA1' + assert self.get_argument('oauth_version') == '1.0' + assert self.get_argument('oauth_token') == 'hjkl' + self.write(dict(screen_name='foo', name='Foo')) + + +class AuthTest(AsyncHTTPTestCase): + def get_app(self): + return Application( + [ + # test endpoints + ('/openid/client/login', OpenIdClientLoginHandler, dict(test=self)), + ('/oauth10/client/login', OAuth1ClientLoginHandler, + dict(test=self, version='1.0')), + ('/oauth10/client/request_params', + OAuth1ClientRequestParametersHandler, + dict(version='1.0')), + ('/oauth10a/client/login', OAuth1ClientLoginHandler, + dict(test=self, version='1.0a')), + ('/oauth10a/client/login_coroutine', + OAuth1ClientLoginCoroutineHandler, + dict(test=self, version='1.0a')), + ('/oauth10a/client/request_params', + OAuth1ClientRequestParametersHandler, + dict(version='1.0a')), + ('/oauth2/client/login', OAuth2ClientLoginHandler, dict(test=self)), + + ('/facebook/client/login', FacebookClientLoginHandler, dict(test=self)), + + ('/twitter/client/login', TwitterClientLoginHandler, dict(test=self)), + ('/twitter/client/login_gen_engine', TwitterClientLoginGenEngineHandler, dict(test=self)), + ('/twitter/client/login_gen_coroutine', TwitterClientLoginGenCoroutineHandler, dict(test=self)), + ('/twitter/client/show_user', TwitterClientShowUserHandler, dict(test=self)), + ('/twitter/client/show_user_future', TwitterClientShowUserFutureHandler, dict(test=self)), + + # simulated servers + ('/openid/server/authenticate', OpenIdServerAuthenticateHandler), + ('/oauth1/server/request_token', OAuth1ServerRequestTokenHandler), + ('/oauth1/server/access_token', OAuth1ServerAccessTokenHandler), + + ('/facebook/server/access_token', FacebookServerAccessTokenHandler), + ('/facebook/server/me', FacebookServerMeHandler), + ('/twitter/server/access_token', TwitterServerAccessTokenHandler), + (r'/twitter/api/users/show/(.*)\.json', TwitterServerShowUserHandler), + (r'/twitter/api/account/verify_credentials\.json', TwitterServerVerifyCredentialsHandler), + ], + http_client=self.http_client, + twitter_consumer_key='test_twitter_consumer_key', + twitter_consumer_secret='test_twitter_consumer_secret', + facebook_api_key='test_facebook_api_key', + facebook_secret='test_facebook_secret') + + def test_openid_redirect(self): + response = self.fetch('/openid/client/login', follow_redirects=False) + self.assertEqual(response.code, 302) + self.assertTrue( + '/openid/server/authenticate?' in response.headers['Location']) + + def test_openid_get_user(self): + response = self.fetch('/openid/client/login?openid.mode=blah&openid.ns.ax=http://openid.net/srv/ax/1.0&openid.ax.type.email=http://axschema.org/contact/email&openid.ax.value.email=foo@example.com') + response.rethrow() + parsed = json_decode(response.body) + self.assertEqual(parsed["email"], "foo@example.com") + + def test_oauth10_redirect(self): + response = self.fetch('/oauth10/client/login', follow_redirects=False) + self.assertEqual(response.code, 302) + self.assertTrue(response.headers['Location'].endswith( + '/oauth1/server/authorize?oauth_token=zxcv')) + # the cookie is base64('zxcv')|base64('1234') + self.assertTrue( + '_oauth_request_token="enhjdg==|MTIzNA=="' in response.headers['Set-Cookie'], + response.headers['Set-Cookie']) + + def test_oauth10_get_user(self): + response = self.fetch( + '/oauth10/client/login?oauth_token=zxcv', + headers={'Cookie': '_oauth_request_token=enhjdg==|MTIzNA=='}) + response.rethrow() + parsed = json_decode(response.body) + self.assertEqual(parsed['email'], 'foo@example.com') + self.assertEqual(parsed['access_token'], dict(key='uiop', secret='5678')) + + def test_oauth10_request_parameters(self): + response = self.fetch('/oauth10/client/request_params') + response.rethrow() + parsed = json_decode(response.body) + self.assertEqual(parsed['oauth_consumer_key'], 'asdf') + self.assertEqual(parsed['oauth_token'], 'uiop') + self.assertTrue('oauth_nonce' in parsed) + self.assertTrue('oauth_signature' in parsed) + + def test_oauth10a_redirect(self): + response = self.fetch('/oauth10a/client/login', follow_redirects=False) + self.assertEqual(response.code, 302) + self.assertTrue(response.headers['Location'].endswith( + '/oauth1/server/authorize?oauth_token=zxcv')) + # the cookie is base64('zxcv')|base64('1234') + self.assertTrue( + '_oauth_request_token="enhjdg==|MTIzNA=="' in response.headers['Set-Cookie'], + response.headers['Set-Cookie']) + + def test_oauth10a_get_user(self): + response = self.fetch( + '/oauth10a/client/login?oauth_token=zxcv', + headers={'Cookie': '_oauth_request_token=enhjdg==|MTIzNA=='}) + response.rethrow() + parsed = json_decode(response.body) + self.assertEqual(parsed['email'], 'foo@example.com') + self.assertEqual(parsed['access_token'], dict(key='uiop', secret='5678')) + + def test_oauth10a_request_parameters(self): + response = self.fetch('/oauth10a/client/request_params') + response.rethrow() + parsed = json_decode(response.body) + self.assertEqual(parsed['oauth_consumer_key'], 'asdf') + self.assertEqual(parsed['oauth_token'], 'uiop') + self.assertTrue('oauth_nonce' in parsed) + self.assertTrue('oauth_signature' in parsed) + + def test_oauth10a_get_user_coroutine_exception(self): + response = self.fetch( + '/oauth10a/client/login_coroutine?oauth_token=zxcv&fail_in_get_user=true', + headers={'Cookie': '_oauth_request_token=enhjdg==|MTIzNA=='}) + self.assertEqual(response.code, 503) + + def test_oauth2_redirect(self): + response = self.fetch('/oauth2/client/login', follow_redirects=False) + self.assertEqual(response.code, 302) + self.assertTrue('/oauth2/server/authorize?' in response.headers['Location']) + + def test_facebook_login(self): + response = self.fetch('/facebook/client/login', follow_redirects=False) + self.assertEqual(response.code, 302) + self.assertTrue('/facebook/server/authorize?' in response.headers['Location']) + response = self.fetch('/facebook/client/login?code=1234', follow_redirects=False) + self.assertEqual(response.code, 200) + + def base_twitter_redirect(self, url): + # Same as test_oauth10a_redirect + response = self.fetch(url, follow_redirects=False) + self.assertEqual(response.code, 302) + self.assertTrue(response.headers['Location'].endswith( + '/oauth1/server/authorize?oauth_token=zxcv')) + # the cookie is base64('zxcv')|base64('1234') + self.assertTrue( + '_oauth_request_token="enhjdg==|MTIzNA=="' in response.headers['Set-Cookie'], + response.headers['Set-Cookie']) + + def test_twitter_redirect(self): + self.base_twitter_redirect('/twitter/client/login') + + def test_twitter_redirect_gen_engine(self): + self.base_twitter_redirect('/twitter/client/login_gen_engine') + + def test_twitter_redirect_gen_coroutine(self): + self.base_twitter_redirect('/twitter/client/login_gen_coroutine') + + def test_twitter_get_user(self): + response = self.fetch( + '/twitter/client/login?oauth_token=zxcv', + headers={'Cookie': '_oauth_request_token=enhjdg==|MTIzNA=='}) + response.rethrow() + parsed = json_decode(response.body) + self.assertEqual(parsed, + {u('access_token'): {u('key'): u('hjkl'), + u('screen_name'): u('foo'), + u('secret'): u('vbnm')}, + u('name'): u('Foo'), + u('screen_name'): u('foo'), + u('username'): u('foo')}) + + def test_twitter_show_user(self): + response = self.fetch('/twitter/client/show_user?name=somebody') + response.rethrow() + self.assertEqual(json_decode(response.body), + {'name': 'Somebody', 'screen_name': 'somebody'}) + + def test_twitter_show_user_error(self): + with ExpectLog(gen_log, 'Error response HTTP 500'): + response = self.fetch('/twitter/client/show_user?name=error') + self.assertEqual(response.code, 500) + self.assertEqual(response.body, b'error from twitter request') + + def test_twitter_show_user_future(self): + response = self.fetch('/twitter/client/show_user_future?name=somebody') + response.rethrow() + self.assertEqual(json_decode(response.body), + {'name': 'Somebody', 'screen_name': 'somebody'}) + + def test_twitter_show_user_future_error(self): + response = self.fetch('/twitter/client/show_user_future?name=error') + self.assertEqual(response.code, 500) + self.assertIn(b'Error response HTTP 500', response.body) + + +class GoogleLoginHandler(RequestHandler, GoogleOAuth2Mixin): + def initialize(self, test): + self.test = test + self._OAUTH_REDIRECT_URI = test.get_url('/client/login') + self._OAUTH_AUTHORIZE_URL = test.get_url('/google/oauth2/authorize') + self._OAUTH_ACCESS_TOKEN_URL = test.get_url('/google/oauth2/token') + + @gen.coroutine + def get(self): + code = self.get_argument('code', None) + if code is not None: + # retrieve authenticate google user + access = yield self.get_authenticated_user(self._OAUTH_REDIRECT_URI, + code) + user = yield self.oauth2_request( + self.test.get_url("/google/oauth2/userinfo"), + access_token=access["access_token"]) + # return the user and access token as json + user["access_token"] = access["access_token"] + self.write(user) + else: + yield self.authorize_redirect( + redirect_uri=self._OAUTH_REDIRECT_URI, + client_id=self.settings['google_oauth']['key'], + client_secret=self.settings['google_oauth']['secret'], + scope=['profile', 'email'], + response_type='code', + extra_params={'prompt': 'select_account'}) + + +class GoogleOAuth2AuthorizeHandler(RequestHandler): + def get(self): + # issue a fake auth code and redirect to redirect_uri + code = 'fake-authorization-code' + self.redirect(url_concat(self.get_argument('redirect_uri'), + dict(code=code))) + + +class GoogleOAuth2TokenHandler(RequestHandler): + def post(self): + assert self.get_argument('code') == 'fake-authorization-code' + # issue a fake token + self.finish({ + 'access_token': 'fake-access-token', + 'expires_in': 'never-expires' + }) + + +class GoogleOAuth2UserinfoHandler(RequestHandler): + def get(self): + assert self.get_argument('access_token') == 'fake-access-token' + # return a fake user + self.finish({ + 'name': 'Foo', + 'email': 'foo@example.com' + }) + + +class GoogleOAuth2Test(AsyncHTTPTestCase): + def get_app(self): + return Application( + [ + # test endpoints + ('/client/login', GoogleLoginHandler, dict(test=self)), + + # simulated google authorization server endpoints + ('/google/oauth2/authorize', GoogleOAuth2AuthorizeHandler), + ('/google/oauth2/token', GoogleOAuth2TokenHandler), + ('/google/oauth2/userinfo', GoogleOAuth2UserinfoHandler), + ], + google_oauth={ + "key": 'fake_google_client_id', + "secret": 'fake_google_client_secret' + }) + + def test_google_login(self): + response = self.fetch('/client/login') + self.assertDictEqual({ + u('name'): u('Foo'), + u('email'): u('foo@example.com'), + u('access_token'): u('fake-access-token'), + }, json_decode(response.body)) diff --git a/python/tornado/test/concurrent_test.py b/python/tornado/test/concurrent_test.py new file mode 100644 index 000000000..bf90ad0ec --- /dev/null +++ b/python/tornado/test/concurrent_test.py @@ -0,0 +1,415 @@ +#!/usr/bin/env python +# +# Copyright 2012 Facebook +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +from __future__ import absolute_import, division, print_function, with_statement + +import logging +import re +import socket +import sys +import traceback + +from tornado.concurrent import Future, return_future, ReturnValueIgnoredError, run_on_executor +from tornado.escape import utf8, to_unicode +from tornado import gen +from tornado.iostream import IOStream +from tornado import stack_context +from tornado.tcpserver import TCPServer +from tornado.testing import AsyncTestCase, LogTrapTestCase, bind_unused_port, gen_test +from tornado.test.util import unittest + + +try: + from concurrent import futures +except ImportError: + futures = None + + +class ReturnFutureTest(AsyncTestCase): + @return_future + def sync_future(self, callback): + callback(42) + + @return_future + def async_future(self, callback): + self.io_loop.add_callback(callback, 42) + + @return_future + def immediate_failure(self, callback): + 1 / 0 + + @return_future + def delayed_failure(self, callback): + self.io_loop.add_callback(lambda: 1 / 0) + + @return_future + def return_value(self, callback): + # Note that the result of both running the callback and returning + # a value (or raising an exception) is unspecified; with current + # implementations the last event prior to callback resolution wins. + return 42 + + @return_future + def no_result_future(self, callback): + callback() + + def test_immediate_failure(self): + with self.assertRaises(ZeroDivisionError): + # The caller sees the error just like a normal function. + self.immediate_failure(callback=self.stop) + # The callback is not run because the function failed synchronously. + self.io_loop.add_timeout(self.io_loop.time() + 0.05, self.stop) + result = self.wait() + self.assertIs(result, None) + + def test_return_value(self): + with self.assertRaises(ReturnValueIgnoredError): + self.return_value(callback=self.stop) + + def test_callback_kw(self): + future = self.sync_future(callback=self.stop) + result = self.wait() + self.assertEqual(result, 42) + self.assertEqual(future.result(), 42) + + def test_callback_positional(self): + # When the callback is passed in positionally, future_wrap shouldn't + # add another callback in the kwargs. + future = self.sync_future(self.stop) + result = self.wait() + self.assertEqual(result, 42) + self.assertEqual(future.result(), 42) + + def test_no_callback(self): + future = self.sync_future() + self.assertEqual(future.result(), 42) + + def test_none_callback_kw(self): + # explicitly pass None as callback + future = self.sync_future(callback=None) + self.assertEqual(future.result(), 42) + + def test_none_callback_pos(self): + future = self.sync_future(None) + self.assertEqual(future.result(), 42) + + def test_async_future(self): + future = self.async_future() + self.assertFalse(future.done()) + self.io_loop.add_future(future, self.stop) + future2 = self.wait() + self.assertIs(future, future2) + self.assertEqual(future.result(), 42) + + @gen_test + def test_async_future_gen(self): + result = yield self.async_future() + self.assertEqual(result, 42) + + def test_delayed_failure(self): + future = self.delayed_failure() + self.io_loop.add_future(future, self.stop) + future2 = self.wait() + self.assertIs(future, future2) + with self.assertRaises(ZeroDivisionError): + future.result() + + def test_kw_only_callback(self): + @return_future + def f(**kwargs): + kwargs['callback'](42) + future = f() + self.assertEqual(future.result(), 42) + + def test_error_in_callback(self): + self.sync_future(callback=lambda future: 1 / 0) + # The exception gets caught by our StackContext and will be re-raised + # when we wait. + self.assertRaises(ZeroDivisionError, self.wait) + + def test_no_result_future(self): + future = self.no_result_future(self.stop) + result = self.wait() + self.assertIs(result, None) + # result of this future is undefined, but not an error + future.result() + + def test_no_result_future_callback(self): + future = self.no_result_future(callback=lambda: self.stop()) + result = self.wait() + self.assertIs(result, None) + future.result() + + @gen_test + def test_future_traceback(self): + @return_future + @gen.engine + def f(callback): + yield gen.Task(self.io_loop.add_callback) + try: + 1 / 0 + except ZeroDivisionError: + self.expected_frame = traceback.extract_tb( + sys.exc_info()[2], limit=1)[0] + raise + try: + yield f() + self.fail("didn't get expected exception") + except ZeroDivisionError: + tb = traceback.extract_tb(sys.exc_info()[2]) + self.assertIn(self.expected_frame, tb) + +# The following series of classes demonstrate and test various styles +# of use, with and without generators and futures. + + +class CapServer(TCPServer): + def handle_stream(self, stream, address): + logging.info("handle_stream") + self.stream = stream + self.stream.read_until(b"\n", self.handle_read) + + def handle_read(self, data): + logging.info("handle_read") + data = to_unicode(data) + if data == data.upper(): + self.stream.write(b"error\talready capitalized\n") + else: + # data already has \n + self.stream.write(utf8("ok\t%s" % data.upper())) + self.stream.close() + + +class CapError(Exception): + pass + + +class BaseCapClient(object): + def __init__(self, port, io_loop): + self.port = port + self.io_loop = io_loop + + def process_response(self, data): + status, message = re.match('(.*)\t(.*)\n', to_unicode(data)).groups() + if status == 'ok': + return message + else: + raise CapError(message) + + +class ManualCapClient(BaseCapClient): + def capitalize(self, request_data, callback=None): + logging.info("capitalize") + self.request_data = request_data + self.stream = IOStream(socket.socket(), io_loop=self.io_loop) + self.stream.connect(('127.0.0.1', self.port), + callback=self.handle_connect) + self.future = Future() + if callback is not None: + self.future.add_done_callback( + stack_context.wrap(lambda future: callback(future.result()))) + return self.future + + def handle_connect(self): + logging.info("handle_connect") + self.stream.write(utf8(self.request_data + "\n")) + self.stream.read_until(b'\n', callback=self.handle_read) + + def handle_read(self, data): + logging.info("handle_read") + self.stream.close() + try: + self.future.set_result(self.process_response(data)) + except CapError as e: + self.future.set_exception(e) + + +class DecoratorCapClient(BaseCapClient): + @return_future + def capitalize(self, request_data, callback): + logging.info("capitalize") + self.request_data = request_data + self.stream = IOStream(socket.socket(), io_loop=self.io_loop) + self.stream.connect(('127.0.0.1', self.port), + callback=self.handle_connect) + self.callback = callback + + def handle_connect(self): + logging.info("handle_connect") + self.stream.write(utf8(self.request_data + "\n")) + self.stream.read_until(b'\n', callback=self.handle_read) + + def handle_read(self, data): + logging.info("handle_read") + self.stream.close() + self.callback(self.process_response(data)) + + +class GeneratorCapClient(BaseCapClient): + @return_future + @gen.engine + def capitalize(self, request_data, callback): + logging.info('capitalize') + stream = IOStream(socket.socket(), io_loop=self.io_loop) + logging.info('connecting') + yield gen.Task(stream.connect, ('127.0.0.1', self.port)) + stream.write(utf8(request_data + '\n')) + logging.info('reading') + data = yield gen.Task(stream.read_until, b'\n') + logging.info('returning') + stream.close() + callback(self.process_response(data)) + + +class ClientTestMixin(object): + def setUp(self): + super(ClientTestMixin, self).setUp() + self.server = CapServer(io_loop=self.io_loop) + sock, port = bind_unused_port() + self.server.add_sockets([sock]) + self.client = self.client_class(io_loop=self.io_loop, port=port) + + def tearDown(self): + self.server.stop() + super(ClientTestMixin, self).tearDown() + + def test_callback(self): + self.client.capitalize("hello", callback=self.stop) + result = self.wait() + self.assertEqual(result, "HELLO") + + def test_callback_error(self): + self.client.capitalize("HELLO", callback=self.stop) + self.assertRaisesRegexp(CapError, "already capitalized", self.wait) + + def test_future(self): + future = self.client.capitalize("hello") + self.io_loop.add_future(future, self.stop) + self.wait() + self.assertEqual(future.result(), "HELLO") + + def test_future_error(self): + future = self.client.capitalize("HELLO") + self.io_loop.add_future(future, self.stop) + self.wait() + self.assertRaisesRegexp(CapError, "already capitalized", future.result) + + def test_generator(self): + @gen.engine + def f(): + result = yield self.client.capitalize("hello") + self.assertEqual(result, "HELLO") + self.stop() + f() + self.wait() + + def test_generator_error(self): + @gen.engine + def f(): + with self.assertRaisesRegexp(CapError, "already capitalized"): + yield self.client.capitalize("HELLO") + self.stop() + f() + self.wait() + + +class ManualClientTest(ClientTestMixin, AsyncTestCase, LogTrapTestCase): + client_class = ManualCapClient + + +class DecoratorClientTest(ClientTestMixin, AsyncTestCase, LogTrapTestCase): + client_class = DecoratorCapClient + + +class GeneratorClientTest(ClientTestMixin, AsyncTestCase, LogTrapTestCase): + client_class = GeneratorCapClient + + +@unittest.skipIf(futures is None, "concurrent.futures module not present") +class RunOnExecutorTest(AsyncTestCase): + @gen_test + def test_no_calling(self): + class Object(object): + def __init__(self, io_loop): + self.io_loop = io_loop + self.executor = futures.thread.ThreadPoolExecutor(1) + + @run_on_executor + def f(self): + return 42 + + o = Object(io_loop=self.io_loop) + answer = yield o.f() + self.assertEqual(answer, 42) + + @gen_test + def test_call_with_no_args(self): + class Object(object): + def __init__(self, io_loop): + self.io_loop = io_loop + self.executor = futures.thread.ThreadPoolExecutor(1) + + @run_on_executor() + def f(self): + return 42 + + o = Object(io_loop=self.io_loop) + answer = yield o.f() + self.assertEqual(answer, 42) + + @gen_test + def test_call_with_io_loop(self): + class Object(object): + def __init__(self, io_loop): + self._io_loop = io_loop + self.executor = futures.thread.ThreadPoolExecutor(1) + + @run_on_executor(io_loop='_io_loop') + def f(self): + return 42 + + o = Object(io_loop=self.io_loop) + answer = yield o.f() + self.assertEqual(answer, 42) + + @gen_test + def test_call_with_executor(self): + class Object(object): + def __init__(self, io_loop): + self.io_loop = io_loop + self.__executor = futures.thread.ThreadPoolExecutor(1) + + @run_on_executor(executor='_Object__executor') + def f(self): + return 42 + + o = Object(io_loop=self.io_loop) + answer = yield o.f() + self.assertEqual(answer, 42) + + @gen_test + def test_call_with_both(self): + class Object(object): + def __init__(self, io_loop): + self._io_loop = io_loop + self.__executor = futures.thread.ThreadPoolExecutor(1) + + @run_on_executor(io_loop='_io_loop', executor='_Object__executor') + def f(self): + return 42 + + o = Object(io_loop=self.io_loop) + answer = yield o.f() + self.assertEqual(answer, 42) diff --git a/python/tornado/test/csv_translations/fr_FR.csv b/python/tornado/test/csv_translations/fr_FR.csv new file mode 100644 index 000000000..6321b6e7c --- /dev/null +++ b/python/tornado/test/csv_translations/fr_FR.csv @@ -0,0 +1 @@ +"school","école" diff --git a/python/tornado/test/curl_httpclient_test.py b/python/tornado/test/curl_httpclient_test.py new file mode 100644 index 000000000..d06a7bd2a --- /dev/null +++ b/python/tornado/test/curl_httpclient_test.py @@ -0,0 +1,124 @@ +from __future__ import absolute_import, division, print_function, with_statement + +from hashlib import md5 + +from tornado.escape import utf8 +from tornado.httpclient import HTTPRequest +from tornado.stack_context import ExceptionStackContext +from tornado.testing import AsyncHTTPTestCase +from tornado.test import httpclient_test +from tornado.test.util import unittest +from tornado.web import Application, RequestHandler + + +try: + import pycurl +except ImportError: + pycurl = None + +if pycurl is not None: + from tornado.curl_httpclient import CurlAsyncHTTPClient + + +@unittest.skipIf(pycurl is None, "pycurl module not present") +class CurlHTTPClientCommonTestCase(httpclient_test.HTTPClientCommonTestCase): + def get_http_client(self): + client = CurlAsyncHTTPClient(io_loop=self.io_loop, + defaults=dict(allow_ipv6=False)) + # make sure AsyncHTTPClient magic doesn't give us the wrong class + self.assertTrue(isinstance(client, CurlAsyncHTTPClient)) + return client + + +class DigestAuthHandler(RequestHandler): + def get(self): + realm = 'test' + opaque = 'asdf' + # Real implementations would use a random nonce. + nonce = "1234" + username = 'foo' + password = 'bar' + + auth_header = self.request.headers.get('Authorization', None) + if auth_header is not None: + auth_mode, params = auth_header.split(' ', 1) + assert auth_mode == 'Digest' + param_dict = {} + for pair in params.split(','): + k, v = pair.strip().split('=', 1) + if v[0] == '"' and v[-1] == '"': + v = v[1:-1] + param_dict[k] = v + assert param_dict['realm'] == realm + assert param_dict['opaque'] == opaque + assert param_dict['nonce'] == nonce + assert param_dict['username'] == username + assert param_dict['uri'] == self.request.path + h1 = md5(utf8('%s:%s:%s' % (username, realm, password))).hexdigest() + h2 = md5(utf8('%s:%s' % (self.request.method, + self.request.path))).hexdigest() + digest = md5(utf8('%s:%s:%s' % (h1, nonce, h2))).hexdigest() + if digest == param_dict['response']: + self.write('ok') + else: + self.write('fail') + else: + self.set_status(401) + self.set_header('WWW-Authenticate', + 'Digest realm="%s", nonce="%s", opaque="%s"' % + (realm, nonce, opaque)) + + +class CustomReasonHandler(RequestHandler): + def get(self): + self.set_status(200, "Custom reason") + + +class CustomFailReasonHandler(RequestHandler): + def get(self): + self.set_status(400, "Custom reason") + + +@unittest.skipIf(pycurl is None, "pycurl module not present") +class CurlHTTPClientTestCase(AsyncHTTPTestCase): + def setUp(self): + super(CurlHTTPClientTestCase, self).setUp() + self.http_client = CurlAsyncHTTPClient(self.io_loop, + defaults=dict(allow_ipv6=False)) + + def get_app(self): + return Application([ + ('/digest', DigestAuthHandler), + ('/custom_reason', CustomReasonHandler), + ('/custom_fail_reason', CustomFailReasonHandler), + ]) + + def test_prepare_curl_callback_stack_context(self): + exc_info = [] + + def error_handler(typ, value, tb): + exc_info.append((typ, value, tb)) + self.stop() + return True + + with ExceptionStackContext(error_handler): + request = HTTPRequest(self.get_url('/'), + prepare_curl_callback=lambda curl: 1 / 0) + self.http_client.fetch(request, callback=self.stop) + self.wait() + self.assertEqual(1, len(exc_info)) + self.assertIs(exc_info[0][0], ZeroDivisionError) + + def test_digest_auth(self): + response = self.fetch('/digest', auth_mode='digest', + auth_username='foo', auth_password='bar') + self.assertEqual(response.body, b'ok') + + def test_custom_reason(self): + response = self.fetch('/custom_reason') + self.assertEqual(response.reason, "Custom reason") + + def test_fail_custom_reason(self): + response = self.fetch('/custom_fail_reason') + self.assertEqual(str(response.error), "HTTP 400: Custom reason") + diff --git a/python/tornado/test/escape_test.py b/python/tornado/test/escape_test.py new file mode 100644 index 000000000..65765b68a --- /dev/null +++ b/python/tornado/test/escape_test.py @@ -0,0 +1,245 @@ +#!/usr/bin/env python + + +from __future__ import absolute_import, division, print_function, with_statement +import tornado.escape + +from tornado.escape import utf8, xhtml_escape, xhtml_unescape, url_escape, url_unescape, to_unicode, json_decode, json_encode, squeeze, recursive_unicode +from tornado.util import u, unicode_type +from tornado.test.util import unittest + +linkify_tests = [ + # (input, linkify_kwargs, expected_output) + + ("hello http://world.com/!", {}, + u('hello http://world.com/!')), + + ("hello http://world.com/with?param=true&stuff=yes", {}, + u('hello http://world.com/with?param=true&stuff=yes')), + + # an opened paren followed by many chars killed Gruber's regex + ("http://url.com/w(aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", {}, + u('http://url.com/w(aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa')), + + # as did too many dots at the end + ("http://url.com/withmany.......................................", {}, + u('http://url.com/withmany.......................................')), + + ("http://url.com/withmany((((((((((((((((((((((((((((((((((a)", {}, + u('http://url.com/withmany((((((((((((((((((((((((((((((((((a)')), + + # some examples from http://daringfireball.net/2009/11/liberal_regex_for_matching_urls + # plus a fex extras (such as multiple parentheses). + ("http://foo.com/blah_blah", {}, + u('http://foo.com/blah_blah')), + + ("http://foo.com/blah_blah/", {}, + u('http://foo.com/blah_blah/')), + + ("(Something like http://foo.com/blah_blah)", {}, + u('(Something like http://foo.com/blah_blah)')), + + ("http://foo.com/blah_blah_(wikipedia)", {}, + u('http://foo.com/blah_blah_(wikipedia)')), + + ("http://foo.com/blah_(blah)_(wikipedia)_blah", {}, + u('http://foo.com/blah_(blah)_(wikipedia)_blah')), + + ("(Something like http://foo.com/blah_blah_(wikipedia))", {}, + u('(Something like http://foo.com/blah_blah_(wikipedia))')), + + ("http://foo.com/blah_blah.", {}, + u('http://foo.com/blah_blah.')), + + ("http://foo.com/blah_blah/.", {}, + u('http://foo.com/blah_blah/.')), + + ("", {}, + u('<http://foo.com/blah_blah>')), + + ("", {}, + u('<http://foo.com/blah_blah/>')), + + ("http://foo.com/blah_blah,", {}, + u('http://foo.com/blah_blah,')), + + ("http://www.example.com/wpstyle/?p=364.", {}, + u('http://www.example.com/wpstyle/?p=364.')), + + ("rdar://1234", + {"permitted_protocols": ["http", "rdar"]}, + u('rdar://1234')), + + ("rdar:/1234", + {"permitted_protocols": ["rdar"]}, + u('rdar:/1234')), + + ("http://userid:password@example.com:8080", {}, + u('http://userid:password@example.com:8080')), + + ("http://userid@example.com", {}, + u('http://userid@example.com')), + + ("http://userid@example.com:8080", {}, + u('http://userid@example.com:8080')), + + ("http://userid:password@example.com", {}, + u('http://userid:password@example.com')), + + ("message://%3c330e7f8409726r6a4ba78dkf1fd71420c1bf6ff@mail.gmail.com%3e", + {"permitted_protocols": ["http", "message"]}, + u('message://%3c330e7f8409726r6a4ba78dkf1fd71420c1bf6ff@mail.gmail.com%3e')), + + (u("http://\u27a1.ws/\u4a39"), {}, + u('http://\u27a1.ws/\u4a39')), + + ("http://example.com", {}, + u('<tag>http://example.com</tag>')), + + ("Just a www.example.com link.", {}, + u('Just a www.example.com link.')), + + ("Just a www.example.com link.", + {"require_protocol": True}, + u('Just a www.example.com link.')), + + ("A http://reallylong.com/link/that/exceedsthelenglimit.html", + {"require_protocol": True, "shorten": True}, + u('A http://reallylong.com/link...')), + + ("A http://reallylongdomainnamethatwillbetoolong.com/hi!", + {"shorten": True}, + u('A http://reallylongdomainnametha...!')), + + ("A file:///passwords.txt and http://web.com link", {}, + u('A file:///passwords.txt and http://web.com link')), + + ("A file:///passwords.txt and http://web.com link", + {"permitted_protocols": ["file"]}, + u('A file:///passwords.txt and http://web.com link')), + + ("www.external-link.com", + {"extra_params": 'rel="nofollow" class="external"'}, + u('www.external-link.com')), + + ("www.external-link.com and www.internal-link.com/blogs extra", + {"extra_params": lambda href: 'class="internal"' if href.startswith("http://www.internal-link.com") else 'rel="nofollow" class="external"'}, + u('www.external-link.com and www.internal-link.com/blogs extra')), + + ("www.external-link.com", + {"extra_params": lambda href: ' rel="nofollow" class="external" '}, + u('www.external-link.com')), +] + + +class EscapeTestCase(unittest.TestCase): + def test_linkify(self): + for text, kwargs, html in linkify_tests: + linked = tornado.escape.linkify(text, **kwargs) + self.assertEqual(linked, html) + + def test_xhtml_escape(self): + tests = [ + ("", "<foo>"), + (u(""), u("<foo>")), + (b"", b"<foo>"), + + ("<>&\"'", "<>&"'"), + ("&", "&amp;"), + + (u("<\u00e9>"), u("<\u00e9>")), + (b"<\xc3\xa9>", b"<\xc3\xa9>"), + ] + for unescaped, escaped in tests: + self.assertEqual(utf8(xhtml_escape(unescaped)), utf8(escaped)) + self.assertEqual(utf8(unescaped), utf8(xhtml_unescape(escaped))) + + def test_xhtml_unescape_numeric(self): + tests = [ + ('foo bar', 'foo bar'), + ('foo bar', 'foo bar'), + ('foo bar', 'foo bar'), + ('foo઼bar', u('foo\u0abcbar')), + ('foo&#xyz;bar', 'foo&#xyz;bar'), # invalid encoding + ('foo&#;bar', 'foo&#;bar'), # invalid encoding + ('foo&#x;bar', 'foo&#x;bar'), # invalid encoding + ] + for escaped, unescaped in tests: + self.assertEqual(unescaped, xhtml_unescape(escaped)) + + def test_url_escape_unicode(self): + tests = [ + # byte strings are passed through as-is + (u('\u00e9').encode('utf8'), '%C3%A9'), + (u('\u00e9').encode('latin1'), '%E9'), + + # unicode strings become utf8 + (u('\u00e9'), '%C3%A9'), + ] + for unescaped, escaped in tests: + self.assertEqual(url_escape(unescaped), escaped) + + def test_url_unescape_unicode(self): + tests = [ + ('%C3%A9', u('\u00e9'), 'utf8'), + ('%C3%A9', u('\u00c3\u00a9'), 'latin1'), + ('%C3%A9', utf8(u('\u00e9')), None), + ] + for escaped, unescaped, encoding in tests: + # input strings to url_unescape should only contain ascii + # characters, but make sure the function accepts both byte + # and unicode strings. + self.assertEqual(url_unescape(to_unicode(escaped), encoding), unescaped) + self.assertEqual(url_unescape(utf8(escaped), encoding), unescaped) + + def test_url_escape_quote_plus(self): + unescaped = '+ #%' + plus_escaped = '%2B+%23%25' + escaped = '%2B%20%23%25' + self.assertEqual(url_escape(unescaped), plus_escaped) + self.assertEqual(url_escape(unescaped, plus=False), escaped) + self.assertEqual(url_unescape(plus_escaped), unescaped) + self.assertEqual(url_unescape(escaped, plus=False), unescaped) + self.assertEqual(url_unescape(plus_escaped, encoding=None), + utf8(unescaped)) + self.assertEqual(url_unescape(escaped, encoding=None, plus=False), + utf8(unescaped)) + + def test_escape_return_types(self): + # On python2 the escape methods should generally return the same + # type as their argument + self.assertEqual(type(xhtml_escape("foo")), str) + self.assertEqual(type(xhtml_escape(u("foo"))), unicode_type) + + def test_json_decode(self): + # json_decode accepts both bytes and unicode, but strings it returns + # are always unicode. + self.assertEqual(json_decode(b'"foo"'), u("foo")) + self.assertEqual(json_decode(u('"foo"')), u("foo")) + + # Non-ascii bytes are interpreted as utf8 + self.assertEqual(json_decode(utf8(u('"\u00e9"'))), u("\u00e9")) + + def test_json_encode(self): + # json deals with strings, not bytes. On python 2 byte strings will + # convert automatically if they are utf8; on python 3 byte strings + # are not allowed. + self.assertEqual(json_decode(json_encode(u("\u00e9"))), u("\u00e9")) + if bytes is str: + self.assertEqual(json_decode(json_encode(utf8(u("\u00e9")))), u("\u00e9")) + self.assertRaises(UnicodeDecodeError, json_encode, b"\xe9") + + def test_squeeze(self): + self.assertEqual(squeeze(u('sequences of whitespace chars')), u('sequences of whitespace chars')) + + def test_recursive_unicode(self): + tests = { + 'dict': {b"foo": b"bar"}, + 'list': [b"foo", b"bar"], + 'tuple': (b"foo", b"bar"), + 'bytes': b"foo" + } + self.assertEqual(recursive_unicode(tests['dict']), {u("foo"): u("bar")}) + self.assertEqual(recursive_unicode(tests['list']), [u("foo"), u("bar")]) + self.assertEqual(recursive_unicode(tests['tuple']), (u("foo"), u("bar"))) + self.assertEqual(recursive_unicode(tests['bytes']), u("foo")) diff --git a/python/tornado/test/gen_test.py b/python/tornado/test/gen_test.py new file mode 100644 index 000000000..1b118f948 --- /dev/null +++ b/python/tornado/test/gen_test.py @@ -0,0 +1,1359 @@ +from __future__ import absolute_import, division, print_function, with_statement + +import contextlib +import datetime +import functools +import sys +import textwrap +import time +import weakref + +from tornado.concurrent import return_future, Future +from tornado.escape import url_escape +from tornado.httpclient import AsyncHTTPClient +from tornado.ioloop import IOLoop +from tornado.log import app_log +from tornado import stack_context +from tornado.testing import AsyncHTTPTestCase, AsyncTestCase, ExpectLog, gen_test +from tornado.test.util import unittest, skipOnTravis, skipBefore33, skipBefore35, skipNotCPython, exec_test +from tornado.web import Application, RequestHandler, asynchronous, HTTPError + +from tornado import gen + +try: + from concurrent import futures +except ImportError: + futures = None + + +class GenEngineTest(AsyncTestCase): + def setUp(self): + super(GenEngineTest, self).setUp() + self.named_contexts = [] + + def named_context(self, name): + @contextlib.contextmanager + def context(): + self.named_contexts.append(name) + try: + yield + finally: + self.assertEqual(self.named_contexts.pop(), name) + return context + + def run_gen(self, f): + f() + return self.wait() + + def delay_callback(self, iterations, callback, arg): + """Runs callback(arg) after a number of IOLoop iterations.""" + if iterations == 0: + callback(arg) + else: + self.io_loop.add_callback(functools.partial( + self.delay_callback, iterations - 1, callback, arg)) + + @return_future + def async_future(self, result, callback): + self.io_loop.add_callback(callback, result) + + @gen.coroutine + def async_exception(self, e): + yield gen.moment + raise e + + def test_no_yield(self): + @gen.engine + def f(): + self.stop() + self.run_gen(f) + + def test_inline_cb(self): + @gen.engine + def f(): + (yield gen.Callback("k1"))() + res = yield gen.Wait("k1") + self.assertTrue(res is None) + self.stop() + self.run_gen(f) + + def test_ioloop_cb(self): + @gen.engine + def f(): + self.io_loop.add_callback((yield gen.Callback("k1"))) + yield gen.Wait("k1") + self.stop() + self.run_gen(f) + + def test_exception_phase1(self): + @gen.engine + def f(): + 1 / 0 + self.assertRaises(ZeroDivisionError, self.run_gen, f) + + def test_exception_phase2(self): + @gen.engine + def f(): + self.io_loop.add_callback((yield gen.Callback("k1"))) + yield gen.Wait("k1") + 1 / 0 + self.assertRaises(ZeroDivisionError, self.run_gen, f) + + def test_exception_in_task_phase1(self): + def fail_task(callback): + 1 / 0 + + @gen.engine + def f(): + try: + yield gen.Task(fail_task) + raise Exception("did not get expected exception") + except ZeroDivisionError: + self.stop() + self.run_gen(f) + + def test_exception_in_task_phase2(self): + # This is the case that requires the use of stack_context in gen.engine + def fail_task(callback): + self.io_loop.add_callback(lambda: 1 / 0) + + @gen.engine + def f(): + try: + yield gen.Task(fail_task) + raise Exception("did not get expected exception") + except ZeroDivisionError: + self.stop() + self.run_gen(f) + + def test_with_arg(self): + @gen.engine + def f(): + (yield gen.Callback("k1"))(42) + res = yield gen.Wait("k1") + self.assertEqual(42, res) + self.stop() + self.run_gen(f) + + def test_with_arg_tuple(self): + @gen.engine + def f(): + (yield gen.Callback((1, 2)))((3, 4)) + res = yield gen.Wait((1, 2)) + self.assertEqual((3, 4), res) + self.stop() + self.run_gen(f) + + def test_key_reuse(self): + @gen.engine + def f(): + yield gen.Callback("k1") + yield gen.Callback("k1") + self.stop() + self.assertRaises(gen.KeyReuseError, self.run_gen, f) + + def test_key_reuse_tuple(self): + @gen.engine + def f(): + yield gen.Callback((1, 2)) + yield gen.Callback((1, 2)) + self.stop() + self.assertRaises(gen.KeyReuseError, self.run_gen, f) + + def test_key_mismatch(self): + @gen.engine + def f(): + yield gen.Callback("k1") + yield gen.Wait("k2") + self.stop() + self.assertRaises(gen.UnknownKeyError, self.run_gen, f) + + def test_key_mismatch_tuple(self): + @gen.engine + def f(): + yield gen.Callback((1, 2)) + yield gen.Wait((2, 3)) + self.stop() + self.assertRaises(gen.UnknownKeyError, self.run_gen, f) + + def test_leaked_callback(self): + @gen.engine + def f(): + yield gen.Callback("k1") + self.stop() + self.assertRaises(gen.LeakedCallbackError, self.run_gen, f) + + def test_leaked_callback_tuple(self): + @gen.engine + def f(): + yield gen.Callback((1, 2)) + self.stop() + self.assertRaises(gen.LeakedCallbackError, self.run_gen, f) + + def test_parallel_callback(self): + @gen.engine + def f(): + for k in range(3): + self.io_loop.add_callback((yield gen.Callback(k))) + yield gen.Wait(1) + self.io_loop.add_callback((yield gen.Callback(3))) + yield gen.Wait(0) + yield gen.Wait(3) + yield gen.Wait(2) + self.stop() + self.run_gen(f) + + def test_bogus_yield(self): + @gen.engine + def f(): + yield 42 + self.assertRaises(gen.BadYieldError, self.run_gen, f) + + def test_bogus_yield_tuple(self): + @gen.engine + def f(): + yield (1, 2) + self.assertRaises(gen.BadYieldError, self.run_gen, f) + + def test_reuse(self): + @gen.engine + def f(): + self.io_loop.add_callback((yield gen.Callback(0))) + yield gen.Wait(0) + self.stop() + self.run_gen(f) + self.run_gen(f) + + def test_task(self): + @gen.engine + def f(): + yield gen.Task(self.io_loop.add_callback) + self.stop() + self.run_gen(f) + + def test_wait_all(self): + @gen.engine + def f(): + (yield gen.Callback("k1"))("v1") + (yield gen.Callback("k2"))("v2") + results = yield gen.WaitAll(["k1", "k2"]) + self.assertEqual(results, ["v1", "v2"]) + self.stop() + self.run_gen(f) + + def test_exception_in_yield(self): + @gen.engine + def f(): + try: + yield gen.Wait("k1") + raise Exception("did not get expected exception") + except gen.UnknownKeyError: + pass + self.stop() + self.run_gen(f) + + def test_resume_after_exception_in_yield(self): + @gen.engine + def f(): + try: + yield gen.Wait("k1") + raise Exception("did not get expected exception") + except gen.UnknownKeyError: + pass + (yield gen.Callback("k2"))("v2") + self.assertEqual((yield gen.Wait("k2")), "v2") + self.stop() + self.run_gen(f) + + def test_orphaned_callback(self): + @gen.engine + def f(): + self.orphaned_callback = yield gen.Callback(1) + try: + self.run_gen(f) + raise Exception("did not get expected exception") + except gen.LeakedCallbackError: + pass + self.orphaned_callback() + + def test_multi(self): + @gen.engine + def f(): + (yield gen.Callback("k1"))("v1") + (yield gen.Callback("k2"))("v2") + results = yield [gen.Wait("k1"), gen.Wait("k2")] + self.assertEqual(results, ["v1", "v2"]) + self.stop() + self.run_gen(f) + + def test_multi_dict(self): + @gen.engine + def f(): + (yield gen.Callback("k1"))("v1") + (yield gen.Callback("k2"))("v2") + results = yield dict(foo=gen.Wait("k1"), bar=gen.Wait("k2")) + self.assertEqual(results, dict(foo="v1", bar="v2")) + self.stop() + self.run_gen(f) + + # The following tests explicitly run with both gen.Multi + # and gen.multi_future (Task returns a Future, so it can be used + # with either). + def test_multi_yieldpoint_delayed(self): + @gen.engine + def f(): + # callbacks run at different times + responses = yield gen.Multi([ + gen.Task(self.delay_callback, 3, arg="v1"), + gen.Task(self.delay_callback, 1, arg="v2"), + ]) + self.assertEqual(responses, ["v1", "v2"]) + self.stop() + self.run_gen(f) + + def test_multi_yieldpoint_dict_delayed(self): + @gen.engine + def f(): + # callbacks run at different times + responses = yield gen.Multi(dict( + foo=gen.Task(self.delay_callback, 3, arg="v1"), + bar=gen.Task(self.delay_callback, 1, arg="v2"), + )) + self.assertEqual(responses, dict(foo="v1", bar="v2")) + self.stop() + self.run_gen(f) + + def test_multi_future_delayed(self): + @gen.engine + def f(): + # callbacks run at different times + responses = yield gen.multi_future([ + gen.Task(self.delay_callback, 3, arg="v1"), + gen.Task(self.delay_callback, 1, arg="v2"), + ]) + self.assertEqual(responses, ["v1", "v2"]) + self.stop() + self.run_gen(f) + + def test_multi_future_dict_delayed(self): + @gen.engine + def f(): + # callbacks run at different times + responses = yield gen.multi_future(dict( + foo=gen.Task(self.delay_callback, 3, arg="v1"), + bar=gen.Task(self.delay_callback, 1, arg="v2"), + )) + self.assertEqual(responses, dict(foo="v1", bar="v2")) + self.stop() + self.run_gen(f) + + @skipOnTravis + @gen_test + def test_multi_performance(self): + # Yielding a list used to have quadratic performance; make + # sure a large list stays reasonable. On my laptop a list of + # 2000 used to take 1.8s, now it takes 0.12. + start = time.time() + yield [gen.Task(self.io_loop.add_callback) for i in range(2000)] + end = time.time() + self.assertLess(end - start, 1.0) + + @gen_test + def test_multi_empty(self): + # Empty lists or dicts should return the same type. + x = yield [] + self.assertTrue(isinstance(x, list)) + y = yield {} + self.assertTrue(isinstance(y, dict)) + + @gen_test + def test_multi_mixed_types(self): + # A YieldPoint (Wait) and Future (Task) can be combined + # (and use the YieldPoint codepath) + (yield gen.Callback("k1"))("v1") + responses = yield [gen.Wait("k1"), + gen.Task(self.delay_callback, 3, arg="v2")] + self.assertEqual(responses, ["v1", "v2"]) + + @gen_test + def test_future(self): + result = yield self.async_future(1) + self.assertEqual(result, 1) + + @gen_test + def test_multi_future(self): + results = yield [self.async_future(1), self.async_future(2)] + self.assertEqual(results, [1, 2]) + + @gen_test + def test_multi_future_duplicate(self): + f = self.async_future(2) + results = yield [self.async_future(1), f, self.async_future(3), f] + self.assertEqual(results, [1, 2, 3, 2]) + + @gen_test + def test_multi_dict_future(self): + results = yield dict(foo=self.async_future(1), bar=self.async_future(2)) + self.assertEqual(results, dict(foo=1, bar=2)) + + @gen_test + def test_multi_exceptions(self): + with ExpectLog(app_log, "Multiple exceptions in yield list"): + with self.assertRaises(RuntimeError) as cm: + yield gen.Multi([self.async_exception(RuntimeError("error 1")), + self.async_exception(RuntimeError("error 2"))]) + self.assertEqual(str(cm.exception), "error 1") + + # With only one exception, no error is logged. + with self.assertRaises(RuntimeError): + yield gen.Multi([self.async_exception(RuntimeError("error 1")), + self.async_future(2)]) + + # Exception logging may be explicitly quieted. + with self.assertRaises(RuntimeError): + yield gen.Multi([self.async_exception(RuntimeError("error 1")), + self.async_exception(RuntimeError("error 2"))], + quiet_exceptions=RuntimeError) + + @gen_test + def test_multi_future_exceptions(self): + with ExpectLog(app_log, "Multiple exceptions in yield list"): + with self.assertRaises(RuntimeError) as cm: + yield [self.async_exception(RuntimeError("error 1")), + self.async_exception(RuntimeError("error 2"))] + self.assertEqual(str(cm.exception), "error 1") + + # With only one exception, no error is logged. + with self.assertRaises(RuntimeError): + yield [self.async_exception(RuntimeError("error 1")), + self.async_future(2)] + + # Exception logging may be explicitly quieted. + with self.assertRaises(RuntimeError): + yield gen.multi_future( + [self.async_exception(RuntimeError("error 1")), + self.async_exception(RuntimeError("error 2"))], + quiet_exceptions=RuntimeError) + + def test_arguments(self): + @gen.engine + def f(): + (yield gen.Callback("noargs"))() + self.assertEqual((yield gen.Wait("noargs")), None) + (yield gen.Callback("1arg"))(42) + self.assertEqual((yield gen.Wait("1arg")), 42) + + (yield gen.Callback("kwargs"))(value=42) + result = yield gen.Wait("kwargs") + self.assertTrue(isinstance(result, gen.Arguments)) + self.assertEqual(((), dict(value=42)), result) + self.assertEqual(dict(value=42), result.kwargs) + + (yield gen.Callback("2args"))(42, 43) + result = yield gen.Wait("2args") + self.assertTrue(isinstance(result, gen.Arguments)) + self.assertEqual(((42, 43), {}), result) + self.assertEqual((42, 43), result.args) + + def task_func(callback): + callback(None, error="foo") + result = yield gen.Task(task_func) + self.assertTrue(isinstance(result, gen.Arguments)) + self.assertEqual(((None,), dict(error="foo")), result) + + self.stop() + self.run_gen(f) + + def test_stack_context_leak(self): + # regression test: repeated invocations of a gen-based + # function should not result in accumulated stack_contexts + def _stack_depth(): + head = stack_context._state.contexts[1] + length = 0 + + while head is not None: + length += 1 + head = head.old_contexts[1] + + return length + + @gen.engine + def inner(callback): + yield gen.Task(self.io_loop.add_callback) + callback() + + @gen.engine + def outer(): + for i in range(10): + yield gen.Task(inner) + + stack_increase = _stack_depth() - initial_stack_depth + self.assertTrue(stack_increase <= 2) + self.stop() + initial_stack_depth = _stack_depth() + self.run_gen(outer) + + def test_stack_context_leak_exception(self): + # same as previous, but with a function that exits with an exception + @gen.engine + def inner(callback): + yield gen.Task(self.io_loop.add_callback) + 1 / 0 + + @gen.engine + def outer(): + for i in range(10): + try: + yield gen.Task(inner) + except ZeroDivisionError: + pass + stack_increase = len(stack_context._state.contexts) - initial_stack_depth + self.assertTrue(stack_increase <= 2) + self.stop() + initial_stack_depth = len(stack_context._state.contexts) + self.run_gen(outer) + + def function_with_stack_context(self, callback): + # Technically this function should stack_context.wrap its callback + # upon entry. However, it is very common for this step to be + # omitted. + def step2(): + self.assertEqual(self.named_contexts, ['a']) + self.io_loop.add_callback(callback) + + with stack_context.StackContext(self.named_context('a')): + self.io_loop.add_callback(step2) + + @gen_test + def test_wait_transfer_stack_context(self): + # Wait should not pick up contexts from where callback was invoked, + # even if that function improperly fails to wrap its callback. + cb = yield gen.Callback('k1') + self.function_with_stack_context(cb) + self.assertEqual(self.named_contexts, []) + yield gen.Wait('k1') + self.assertEqual(self.named_contexts, []) + + @gen_test + def test_task_transfer_stack_context(self): + yield gen.Task(self.function_with_stack_context) + self.assertEqual(self.named_contexts, []) + + def test_raise_after_stop(self): + # This pattern will be used in the following tests so make sure + # the exception propagates as expected. + @gen.engine + def f(): + self.stop() + 1 / 0 + + with self.assertRaises(ZeroDivisionError): + self.run_gen(f) + + def test_sync_raise_return(self): + # gen.Return is allowed in @gen.engine, but it may not be used + # to return a value. + @gen.engine + def f(): + self.stop(42) + raise gen.Return() + + result = self.run_gen(f) + self.assertEqual(result, 42) + + def test_async_raise_return(self): + @gen.engine + def f(): + yield gen.Task(self.io_loop.add_callback) + self.stop(42) + raise gen.Return() + + result = self.run_gen(f) + self.assertEqual(result, 42) + + def test_sync_raise_return_value(self): + @gen.engine + def f(): + raise gen.Return(42) + + with self.assertRaises(gen.ReturnValueIgnoredError): + self.run_gen(f) + + def test_sync_raise_return_value_tuple(self): + @gen.engine + def f(): + raise gen.Return((1, 2)) + + with self.assertRaises(gen.ReturnValueIgnoredError): + self.run_gen(f) + + def test_async_raise_return_value(self): + @gen.engine + def f(): + yield gen.Task(self.io_loop.add_callback) + raise gen.Return(42) + + with self.assertRaises(gen.ReturnValueIgnoredError): + self.run_gen(f) + + def test_async_raise_return_value_tuple(self): + @gen.engine + def f(): + yield gen.Task(self.io_loop.add_callback) + raise gen.Return((1, 2)) + + with self.assertRaises(gen.ReturnValueIgnoredError): + self.run_gen(f) + + def test_return_value(self): + # It is an error to apply @gen.engine to a function that returns + # a value. + @gen.engine + def f(): + return 42 + + with self.assertRaises(gen.ReturnValueIgnoredError): + self.run_gen(f) + + def test_return_value_tuple(self): + # It is an error to apply @gen.engine to a function that returns + # a value. + @gen.engine + def f(): + return (1, 2) + + with self.assertRaises(gen.ReturnValueIgnoredError): + self.run_gen(f) + + @skipNotCPython + def test_task_refcounting(self): + # On CPython, tasks and their arguments should be released immediately + # without waiting for garbage collection. + @gen.engine + def f(): + class Foo(object): + pass + arg = Foo() + self.arg_ref = weakref.ref(arg) + task = gen.Task(self.io_loop.add_callback, arg=arg) + self.task_ref = weakref.ref(task) + yield task + self.stop() + + self.run_gen(f) + self.assertIs(self.arg_ref(), None) + self.assertIs(self.task_ref(), None) + + +class GenCoroutineTest(AsyncTestCase): + def setUp(self): + # Stray StopIteration exceptions can lead to tests exiting prematurely, + # so we need explicit checks here to make sure the tests run all + # the way through. + self.finished = False + super(GenCoroutineTest, self).setUp() + + def tearDown(self): + super(GenCoroutineTest, self).tearDown() + assert self.finished + + @gen_test + def test_sync_gen_return(self): + @gen.coroutine + def f(): + raise gen.Return(42) + result = yield f() + self.assertEqual(result, 42) + self.finished = True + + @gen_test + def test_async_gen_return(self): + @gen.coroutine + def f(): + yield gen.Task(self.io_loop.add_callback) + raise gen.Return(42) + result = yield f() + self.assertEqual(result, 42) + self.finished = True + + @gen_test + def test_sync_return(self): + @gen.coroutine + def f(): + return 42 + result = yield f() + self.assertEqual(result, 42) + self.finished = True + + @skipBefore33 + @gen_test + def test_async_return(self): + namespace = exec_test(globals(), locals(), """ + @gen.coroutine + def f(): + yield gen.Task(self.io_loop.add_callback) + return 42 + """) + result = yield namespace['f']() + self.assertEqual(result, 42) + self.finished = True + + @skipBefore33 + @gen_test + def test_async_early_return(self): + # A yield statement exists but is not executed, which means + # this function "returns" via an exception. This exception + # doesn't happen before the exception handling is set up. + namespace = exec_test(globals(), locals(), """ + @gen.coroutine + def f(): + if True: + return 42 + yield gen.Task(self.io_loop.add_callback) + """) + result = yield namespace['f']() + self.assertEqual(result, 42) + self.finished = True + + @skipBefore35 + @gen_test + def test_async_await(self): + # This test verifies that an async function can await a + # yield-based gen.coroutine, and that a gen.coroutine + # (the test method itself) can yield an async function. + namespace = exec_test(globals(), locals(), """ + async def f(): + await gen.Task(self.io_loop.add_callback) + return 42 + """) + result = yield namespace['f']() + self.assertEqual(result, 42) + self.finished = True + + @skipBefore35 + @gen_test + def test_async_await_mixed_multi_native_future(self): + namespace = exec_test(globals(), locals(), """ + async def f1(): + await gen.Task(self.io_loop.add_callback) + return 42 + """) + + @gen.coroutine + def f2(): + yield gen.Task(self.io_loop.add_callback) + raise gen.Return(43) + + results = yield [namespace['f1'](), f2()] + self.assertEqual(results, [42, 43]) + self.finished = True + + @skipBefore35 + @gen_test + def test_async_await_mixed_multi_native_yieldpoint(self): + namespace = exec_test(globals(), locals(), """ + async def f1(): + await gen.Task(self.io_loop.add_callback) + return 42 + """) + + @gen.coroutine + def f2(): + yield gen.Task(self.io_loop.add_callback) + raise gen.Return(43) + + f2(callback=(yield gen.Callback('cb'))) + results = yield [namespace['f1'](), gen.Wait('cb')] + self.assertEqual(results, [42, 43]) + self.finished = True + + @gen_test + def test_sync_return_no_value(self): + @gen.coroutine + def f(): + return + result = yield f() + self.assertEqual(result, None) + self.finished = True + + @gen_test + def test_async_return_no_value(self): + # Without a return value we don't need python 3.3. + @gen.coroutine + def f(): + yield gen.Task(self.io_loop.add_callback) + return + result = yield f() + self.assertEqual(result, None) + self.finished = True + + @gen_test + def test_sync_raise(self): + @gen.coroutine + def f(): + 1 / 0 + # The exception is raised when the future is yielded + # (or equivalently when its result method is called), + # not when the function itself is called). + future = f() + with self.assertRaises(ZeroDivisionError): + yield future + self.finished = True + + @gen_test + def test_async_raise(self): + @gen.coroutine + def f(): + yield gen.Task(self.io_loop.add_callback) + 1 / 0 + future = f() + with self.assertRaises(ZeroDivisionError): + yield future + self.finished = True + + @gen_test + def test_pass_callback(self): + @gen.coroutine + def f(): + raise gen.Return(42) + result = yield gen.Task(f) + self.assertEqual(result, 42) + self.finished = True + + @gen_test + def test_replace_yieldpoint_exception(self): + # Test exception handling: a coroutine can catch one exception + # raised by a yield point and raise a different one. + @gen.coroutine + def f1(): + 1 / 0 + + @gen.coroutine + def f2(): + try: + yield f1() + except ZeroDivisionError: + raise KeyError() + + future = f2() + with self.assertRaises(KeyError): + yield future + self.finished = True + + @gen_test + def test_swallow_yieldpoint_exception(self): + # Test exception handling: a coroutine can catch an exception + # raised by a yield point and not raise a different one. + @gen.coroutine + def f1(): + 1 / 0 + + @gen.coroutine + def f2(): + try: + yield f1() + except ZeroDivisionError: + raise gen.Return(42) + + result = yield f2() + self.assertEqual(result, 42) + self.finished = True + + @gen_test + def test_replace_context_exception(self): + # Test exception handling: exceptions thrown into the stack context + # can be caught and replaced. + # Note that this test and the following are for behavior that is + # not really supported any more: coroutines no longer create a + # stack context automatically; but one is created after the first + # YieldPoint (i.e. not a Future). + @gen.coroutine + def f2(): + (yield gen.Callback(1))() + yield gen.Wait(1) + self.io_loop.add_callback(lambda: 1 / 0) + try: + yield gen.Task(self.io_loop.add_timeout, + self.io_loop.time() + 10) + except ZeroDivisionError: + raise KeyError() + + future = f2() + with self.assertRaises(KeyError): + yield future + self.finished = True + + @gen_test + def test_swallow_context_exception(self): + # Test exception handling: exceptions thrown into the stack context + # can be caught and ignored. + @gen.coroutine + def f2(): + (yield gen.Callback(1))() + yield gen.Wait(1) + self.io_loop.add_callback(lambda: 1 / 0) + try: + yield gen.Task(self.io_loop.add_timeout, + self.io_loop.time() + 10) + except ZeroDivisionError: + raise gen.Return(42) + + result = yield f2() + self.assertEqual(result, 42) + self.finished = True + + @gen_test + def test_moment(self): + calls = [] + + @gen.coroutine + def f(name, yieldable): + for i in range(5): + calls.append(name) + yield yieldable + # First, confirm the behavior without moment: each coroutine + # monopolizes the event loop until it finishes. + immediate = Future() + immediate.set_result(None) + yield [f('a', immediate), f('b', immediate)] + self.assertEqual(''.join(calls), 'aaaaabbbbb') + + # With moment, they take turns. + calls = [] + yield [f('a', gen.moment), f('b', gen.moment)] + self.assertEqual(''.join(calls), 'ababababab') + self.finished = True + + calls = [] + yield [f('a', gen.moment), f('b', immediate)] + self.assertEqual(''.join(calls), 'abbbbbaaaa') + + @gen_test + def test_sleep(self): + yield gen.sleep(0.01) + self.finished = True + + @skipBefore33 + @gen_test + def test_py3_leak_exception_context(self): + class LeakedException(Exception): + pass + + @gen.coroutine + def inner(iteration): + raise LeakedException(iteration) + + try: + yield inner(1) + except LeakedException as e: + self.assertEqual(str(e), "1") + self.assertIsNone(e.__context__) + + try: + yield inner(2) + except LeakedException as e: + self.assertEqual(str(e), "2") + self.assertIsNone(e.__context__) + + self.finished = True + + +class GenSequenceHandler(RequestHandler): + @asynchronous + @gen.engine + def get(self): + self.io_loop = self.request.connection.stream.io_loop + self.io_loop.add_callback((yield gen.Callback("k1"))) + yield gen.Wait("k1") + self.write("1") + self.io_loop.add_callback((yield gen.Callback("k2"))) + yield gen.Wait("k2") + self.write("2") + # reuse an old key + self.io_loop.add_callback((yield gen.Callback("k1"))) + yield gen.Wait("k1") + self.finish("3") + + +class GenCoroutineSequenceHandler(RequestHandler): + @gen.coroutine + def get(self): + self.io_loop = self.request.connection.stream.io_loop + self.io_loop.add_callback((yield gen.Callback("k1"))) + yield gen.Wait("k1") + self.write("1") + self.io_loop.add_callback((yield gen.Callback("k2"))) + yield gen.Wait("k2") + self.write("2") + # reuse an old key + self.io_loop.add_callback((yield gen.Callback("k1"))) + yield gen.Wait("k1") + self.finish("3") + + +class GenCoroutineUnfinishedSequenceHandler(RequestHandler): + @asynchronous + @gen.coroutine + def get(self): + self.io_loop = self.request.connection.stream.io_loop + self.io_loop.add_callback((yield gen.Callback("k1"))) + yield gen.Wait("k1") + self.write("1") + self.io_loop.add_callback((yield gen.Callback("k2"))) + yield gen.Wait("k2") + self.write("2") + # reuse an old key + self.io_loop.add_callback((yield gen.Callback("k1"))) + yield gen.Wait("k1") + # just write, don't finish + self.write("3") + + +class GenTaskHandler(RequestHandler): + @asynchronous + @gen.engine + def get(self): + io_loop = self.request.connection.stream.io_loop + client = AsyncHTTPClient(io_loop=io_loop) + response = yield gen.Task(client.fetch, self.get_argument('url')) + response.rethrow() + self.finish(b"got response: " + response.body) + + +class GenExceptionHandler(RequestHandler): + @asynchronous + @gen.engine + def get(self): + # This test depends on the order of the two decorators. + io_loop = self.request.connection.stream.io_loop + yield gen.Task(io_loop.add_callback) + raise Exception("oops") + + +class GenCoroutineExceptionHandler(RequestHandler): + @gen.coroutine + def get(self): + # This test depends on the order of the two decorators. + io_loop = self.request.connection.stream.io_loop + yield gen.Task(io_loop.add_callback) + raise Exception("oops") + + +class GenYieldExceptionHandler(RequestHandler): + @asynchronous + @gen.engine + def get(self): + io_loop = self.request.connection.stream.io_loop + # Test the interaction of the two stack_contexts. + + def fail_task(callback): + io_loop.add_callback(lambda: 1 / 0) + try: + yield gen.Task(fail_task) + raise Exception("did not get expected exception") + except ZeroDivisionError: + self.finish('ok') + + +# "Undecorated" here refers to the absence of @asynchronous. +class UndecoratedCoroutinesHandler(RequestHandler): + @gen.coroutine + def prepare(self): + self.chunks = [] + yield gen.Task(IOLoop.current().add_callback) + self.chunks.append('1') + + @gen.coroutine + def get(self): + self.chunks.append('2') + yield gen.Task(IOLoop.current().add_callback) + self.chunks.append('3') + yield gen.Task(IOLoop.current().add_callback) + self.write(''.join(self.chunks)) + + +class AsyncPrepareErrorHandler(RequestHandler): + @gen.coroutine + def prepare(self): + yield gen.Task(IOLoop.current().add_callback) + raise HTTPError(403) + + def get(self): + self.finish('ok') + + +class NativeCoroutineHandler(RequestHandler): + if sys.version_info > (3, 5): + exec(textwrap.dedent(""" + async def get(self): + await gen.Task(IOLoop.current().add_callback) + self.write("ok") + """)) + + +class GenWebTest(AsyncHTTPTestCase): + def get_app(self): + return Application([ + ('/sequence', GenSequenceHandler), + ('/coroutine_sequence', GenCoroutineSequenceHandler), + ('/coroutine_unfinished_sequence', + GenCoroutineUnfinishedSequenceHandler), + ('/task', GenTaskHandler), + ('/exception', GenExceptionHandler), + ('/coroutine_exception', GenCoroutineExceptionHandler), + ('/yield_exception', GenYieldExceptionHandler), + ('/undecorated_coroutine', UndecoratedCoroutinesHandler), + ('/async_prepare_error', AsyncPrepareErrorHandler), + ('/native_coroutine', NativeCoroutineHandler), + ]) + + def test_sequence_handler(self): + response = self.fetch('/sequence') + self.assertEqual(response.body, b"123") + + def test_coroutine_sequence_handler(self): + response = self.fetch('/coroutine_sequence') + self.assertEqual(response.body, b"123") + + def test_coroutine_unfinished_sequence_handler(self): + response = self.fetch('/coroutine_unfinished_sequence') + self.assertEqual(response.body, b"123") + + def test_task_handler(self): + response = self.fetch('/task?url=%s' % url_escape(self.get_url('/sequence'))) + self.assertEqual(response.body, b"got response: 123") + + def test_exception_handler(self): + # Make sure we get an error and not a timeout + with ExpectLog(app_log, "Uncaught exception GET /exception"): + response = self.fetch('/exception') + self.assertEqual(500, response.code) + + def test_coroutine_exception_handler(self): + # Make sure we get an error and not a timeout + with ExpectLog(app_log, "Uncaught exception GET /coroutine_exception"): + response = self.fetch('/coroutine_exception') + self.assertEqual(500, response.code) + + def test_yield_exception_handler(self): + response = self.fetch('/yield_exception') + self.assertEqual(response.body, b'ok') + + def test_undecorated_coroutines(self): + response = self.fetch('/undecorated_coroutine') + self.assertEqual(response.body, b'123') + + def test_async_prepare_error_handler(self): + response = self.fetch('/async_prepare_error') + self.assertEqual(response.code, 403) + + @skipBefore35 + def test_native_coroutine_handler(self): + response = self.fetch('/native_coroutine') + self.assertEqual(response.code, 200) + self.assertEqual(response.body, b'ok') + + +class WithTimeoutTest(AsyncTestCase): + @gen_test + def test_timeout(self): + with self.assertRaises(gen.TimeoutError): + yield gen.with_timeout(datetime.timedelta(seconds=0.1), + Future()) + + @gen_test + def test_completes_before_timeout(self): + future = Future() + self.io_loop.add_timeout(datetime.timedelta(seconds=0.1), + lambda: future.set_result('asdf')) + result = yield gen.with_timeout(datetime.timedelta(seconds=3600), + future, io_loop=self.io_loop) + self.assertEqual(result, 'asdf') + + @gen_test + def test_fails_before_timeout(self): + future = Future() + self.io_loop.add_timeout( + datetime.timedelta(seconds=0.1), + lambda: future.set_exception(ZeroDivisionError())) + with self.assertRaises(ZeroDivisionError): + yield gen.with_timeout(datetime.timedelta(seconds=3600), + future, io_loop=self.io_loop) + + @gen_test + def test_already_resolved(self): + future = Future() + future.set_result('asdf') + result = yield gen.with_timeout(datetime.timedelta(seconds=3600), + future, io_loop=self.io_loop) + self.assertEqual(result, 'asdf') + + @unittest.skipIf(futures is None, 'futures module not present') + @gen_test + def test_timeout_concurrent_future(self): + with futures.ThreadPoolExecutor(1) as executor: + with self.assertRaises(gen.TimeoutError): + yield gen.with_timeout(self.io_loop.time(), + executor.submit(time.sleep, 0.1)) + + @unittest.skipIf(futures is None, 'futures module not present') + @gen_test + def test_completed_concurrent_future(self): + with futures.ThreadPoolExecutor(1) as executor: + yield gen.with_timeout(datetime.timedelta(seconds=3600), + executor.submit(lambda: None)) + + +class WaitIteratorTest(AsyncTestCase): + @gen_test + def test_empty_iterator(self): + g = gen.WaitIterator() + self.assertTrue(g.done(), 'empty generator iterated') + + with self.assertRaises(ValueError): + g = gen.WaitIterator(False, bar=False) + + self.assertEqual(g.current_index, None, "bad nil current index") + self.assertEqual(g.current_future, None, "bad nil current future") + + @gen_test + def test_already_done(self): + f1 = Future() + f2 = Future() + f3 = Future() + f1.set_result(24) + f2.set_result(42) + f3.set_result(84) + + g = gen.WaitIterator(f1, f2, f3) + i = 0 + while not g.done(): + r = yield g.next() + # Order is not guaranteed, but the current implementation + # preserves ordering of already-done Futures. + if i == 0: + self.assertEqual(g.current_index, 0) + self.assertIs(g.current_future, f1) + self.assertEqual(r, 24) + elif i == 1: + self.assertEqual(g.current_index, 1) + self.assertIs(g.current_future, f2) + self.assertEqual(r, 42) + elif i == 2: + self.assertEqual(g.current_index, 2) + self.assertIs(g.current_future, f3) + self.assertEqual(r, 84) + i += 1 + + self.assertEqual(g.current_index, None, "bad nil current index") + self.assertEqual(g.current_future, None, "bad nil current future") + + dg = gen.WaitIterator(f1=f1, f2=f2) + + while not dg.done(): + dr = yield dg.next() + if dg.current_index == "f1": + self.assertTrue(dg.current_future == f1 and dr == 24, + "WaitIterator dict status incorrect") + elif dg.current_index == "f2": + self.assertTrue(dg.current_future == f2 and dr == 42, + "WaitIterator dict status incorrect") + else: + self.fail("got bad WaitIterator index {}".format( + dg.current_index)) + + i += 1 + + self.assertEqual(dg.current_index, None, "bad nil current index") + self.assertEqual(dg.current_future, None, "bad nil current future") + + def finish_coroutines(self, iteration, futures): + if iteration == 3: + futures[2].set_result(24) + elif iteration == 5: + futures[0].set_exception(ZeroDivisionError()) + elif iteration == 8: + futures[1].set_result(42) + futures[3].set_result(84) + + if iteration < 8: + self.io_loop.add_callback(self.finish_coroutines, iteration + 1, futures) + + @gen_test + def test_iterator(self): + futures = [Future(), Future(), Future(), Future()] + + self.finish_coroutines(0, futures) + + g = gen.WaitIterator(*futures) + + i = 0 + while not g.done(): + try: + r = yield g.next() + except ZeroDivisionError: + self.assertIs(g.current_future, futures[0], + 'exception future invalid') + else: + if i == 0: + self.assertEqual(r, 24, 'iterator value incorrect') + self.assertEqual(g.current_index, 2, 'wrong index') + elif i == 2: + self.assertEqual(r, 42, 'iterator value incorrect') + self.assertEqual(g.current_index, 1, 'wrong index') + elif i == 3: + self.assertEqual(r, 84, 'iterator value incorrect') + self.assertEqual(g.current_index, 3, 'wrong index') + i += 1 + + @skipBefore35 + @gen_test + def test_iterator_async_await(self): + # Recreate the previous test with py35 syntax. It's a little clunky + # because of the way the previous test handles an exception on + # a single iteration. + futures = [Future(), Future(), Future(), Future()] + self.finish_coroutines(0, futures) + self.finished = False + + namespace = exec_test(globals(), locals(), """ + async def f(): + i = 0 + g = gen.WaitIterator(*futures) + try: + async for r in g: + if i == 0: + self.assertEqual(r, 24, 'iterator value incorrect') + self.assertEqual(g.current_index, 2, 'wrong index') + else: + raise Exception("expected exception on iteration 1") + i += 1 + except ZeroDivisionError: + i += 1 + async for r in g: + if i == 2: + self.assertEqual(r, 42, 'iterator value incorrect') + self.assertEqual(g.current_index, 1, 'wrong index') + elif i == 3: + self.assertEqual(r, 84, 'iterator value incorrect') + self.assertEqual(g.current_index, 3, 'wrong index') + else: + raise Exception("didn't expect iteration %d" % i) + i += 1 + self.finished = True + """) + yield namespace['f']() + self.assertTrue(self.finished) + + @gen_test + def test_no_ref(self): + # In this usage, there is no direct hard reference to the + # WaitIterator itself, only the Future it returns. Since + # WaitIterator uses weak references internally to improve GC + # performance, this used to cause problems. + yield gen.with_timeout(datetime.timedelta(seconds=0.1), + gen.WaitIterator(gen.sleep(0)).next()) + + +if __name__ == '__main__': + unittest.main() diff --git a/python/tornado/test/gettext_translations/fr_FR/LC_MESSAGES/tornado_test.mo b/python/tornado/test/gettext_translations/fr_FR/LC_MESSAGES/tornado_test.mo new file mode 100644 index 0000000000000000000000000000000000000000..a97bf9c57460ecfc27761accf90d712ea5cebb44 GIT binary patch literal 665 zcmYk3PjAyO7{(2hf8hd1hzkcFL6b*PG%=;?w5-jxh@`2~Zqm59iI;kKc4a#Z&~AJN zz5pk7=gei_VK?{)ddc_y6;;R;R4AB&o;|(*{A@;`klngX$w<1GoS%|xSutEHQbYJ5j z2>p#U|CR4UkQD4acQ0S&j^n5xSx$x#KFGr?S$mt0VlSn}lBuTB2x^rM@!nyY;!%{v zcq`7LB;ARI!y=wcwjnC(hSrQs89fVe8jbc3-N;*Mx+C~H{DoBpM$M8eUVUG%?t23z zEt9a_#|6x7*$4Y_At;wUT+XRB%=R05LN-@9H`WQ$B$lPBxU56GIpfwFi$+sH_LM#| zA(o5w*7UnQ{MYuMOT3MP7d;ONhG(2fr, YEAR. +# +#, fuzzy +msgid "" +msgstr "" +"Project-Id-Version: PACKAGE VERSION\n" +"Report-Msgid-Bugs-To: \n" +"POT-Creation-Date: 2015-01-27 11:05+0300\n" +"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" +"Last-Translator: FULL NAME \n" +"Language-Team: LANGUAGE \n" +"Language: \n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=utf-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Plural-Forms: nplurals=2; plural=(n > 1);\n" + +#: extract_me.py:11 +msgid "school" +msgstr "école" + +#: extract_me.py:12 +msgctxt "law" +msgid "right" +msgstr "le droit" + +#: extract_me.py:13 +msgctxt "good" +msgid "right" +msgstr "le bien" + +#: extract_me.py:14 +msgctxt "organization" +msgid "club" +msgid_plural "clubs" +msgstr[0] "le club" +msgstr[1] "les clubs" + +#: extract_me.py:15 +msgctxt "stick" +msgid "club" +msgid_plural "clubs" +msgstr[0] "le bâton" +msgstr[1] "les bâtons" diff --git a/python/tornado/test/httpclient_test.py b/python/tornado/test/httpclient_test.py new file mode 100644 index 000000000..e7551c93b --- /dev/null +++ b/python/tornado/test/httpclient_test.py @@ -0,0 +1,661 @@ +#!/usr/bin/env python + +from __future__ import absolute_import, division, print_function, with_statement + +import base64 +import binascii +from contextlib import closing +import copy +import functools +import sys +import threading +import datetime +from io import BytesIO + +from tornado.escape import utf8 +from tornado import gen +from tornado.httpclient import HTTPRequest, HTTPResponse, _RequestProxy, HTTPError, HTTPClient +from tornado.httpserver import HTTPServer +from tornado.ioloop import IOLoop +from tornado.iostream import IOStream +from tornado.log import gen_log +from tornado import netutil +from tornado.stack_context import ExceptionStackContext, NullContext +from tornado.testing import AsyncHTTPTestCase, bind_unused_port, gen_test, ExpectLog +from tornado.test.util import unittest, skipOnTravis +from tornado.util import u +from tornado.web import Application, RequestHandler, url +from tornado.httputil import format_timestamp, HTTPHeaders + + +class HelloWorldHandler(RequestHandler): + def get(self): + name = self.get_argument("name", "world") + self.set_header("Content-Type", "text/plain") + self.finish("Hello %s!" % name) + + +class PostHandler(RequestHandler): + def post(self): + self.finish("Post arg1: %s, arg2: %s" % ( + self.get_argument("arg1"), self.get_argument("arg2"))) + + +class PutHandler(RequestHandler): + def put(self): + self.write("Put body: ") + self.write(self.request.body) + + +class RedirectHandler(RequestHandler): + def prepare(self): + self.write('redirects can have bodies too') + self.redirect(self.get_argument("url"), + status=int(self.get_argument("status", "302"))) + + +class ChunkHandler(RequestHandler): + @gen.coroutine + def get(self): + self.write("asdf") + self.flush() + # Wait a bit to ensure the chunks are sent and received separately. + yield gen.sleep(0.01) + self.write("qwer") + + +class AuthHandler(RequestHandler): + def get(self): + self.finish(self.request.headers["Authorization"]) + + +class CountdownHandler(RequestHandler): + def get(self, count): + count = int(count) + if count > 0: + self.redirect(self.reverse_url("countdown", count - 1)) + else: + self.write("Zero") + + +class EchoPostHandler(RequestHandler): + def post(self): + self.write(self.request.body) + + +class UserAgentHandler(RequestHandler): + def get(self): + self.write(self.request.headers.get('User-Agent', 'User agent not set')) + + +class ContentLength304Handler(RequestHandler): + def get(self): + self.set_status(304) + self.set_header('Content-Length', 42) + + def _clear_headers_for_304(self): + # Tornado strips content-length from 304 responses, but here we + # want to simulate servers that include the headers anyway. + pass + + +class PatchHandler(RequestHandler): + + def patch(self): + "Return the request payload - so we can check it is being kept" + self.write(self.request.body) + + +class AllMethodsHandler(RequestHandler): + SUPPORTED_METHODS = RequestHandler.SUPPORTED_METHODS + ('OTHER',) + + def method(self): + self.write(self.request.method) + + get = post = put = delete = options = patch = other = method + +# These tests end up getting run redundantly: once here with the default +# HTTPClient implementation, and then again in each implementation's own +# test suite. + + +class HTTPClientCommonTestCase(AsyncHTTPTestCase): + def get_app(self): + return Application([ + url("/hello", HelloWorldHandler), + url("/post", PostHandler), + url("/put", PutHandler), + url("/redirect", RedirectHandler), + url("/chunk", ChunkHandler), + url("/auth", AuthHandler), + url("/countdown/([0-9]+)", CountdownHandler, name="countdown"), + url("/echopost", EchoPostHandler), + url("/user_agent", UserAgentHandler), + url("/304_with_content_length", ContentLength304Handler), + url("/all_methods", AllMethodsHandler), + url('/patch', PatchHandler), + ], gzip=True) + + def test_patch_receives_payload(self): + body = b"some patch data" + response = self.fetch("/patch", method='PATCH', body=body) + self.assertEqual(response.code, 200) + self.assertEqual(response.body, body) + + @skipOnTravis + def test_hello_world(self): + response = self.fetch("/hello") + self.assertEqual(response.code, 200) + self.assertEqual(response.headers["Content-Type"], "text/plain") + self.assertEqual(response.body, b"Hello world!") + self.assertEqual(int(response.request_time), 0) + + response = self.fetch("/hello?name=Ben") + self.assertEqual(response.body, b"Hello Ben!") + + def test_streaming_callback(self): + # streaming_callback is also tested in test_chunked + chunks = [] + response = self.fetch("/hello", + streaming_callback=chunks.append) + # with streaming_callback, data goes to the callback and not response.body + self.assertEqual(chunks, [b"Hello world!"]) + self.assertFalse(response.body) + + def test_post(self): + response = self.fetch("/post", method="POST", + body="arg1=foo&arg2=bar") + self.assertEqual(response.code, 200) + self.assertEqual(response.body, b"Post arg1: foo, arg2: bar") + + def test_chunked(self): + response = self.fetch("/chunk") + self.assertEqual(response.body, b"asdfqwer") + + chunks = [] + response = self.fetch("/chunk", + streaming_callback=chunks.append) + self.assertEqual(chunks, [b"asdf", b"qwer"]) + self.assertFalse(response.body) + + def test_chunked_close(self): + # test case in which chunks spread read-callback processing + # over several ioloop iterations, but the connection is already closed. + sock, port = bind_unused_port() + with closing(sock): + def write_response(stream, request_data): + if b"HTTP/1." not in request_data: + self.skipTest("requires HTTP/1.x") + stream.write(b"""\ +HTTP/1.1 200 OK +Transfer-Encoding: chunked + +1 +1 +1 +2 +0 + +""".replace(b"\n", b"\r\n"), callback=stream.close) + + def accept_callback(conn, address): + # fake an HTTP server using chunked encoding where the final chunks + # and connection close all happen at once + stream = IOStream(conn, io_loop=self.io_loop) + stream.read_until(b"\r\n\r\n", + functools.partial(write_response, stream)) + netutil.add_accept_handler(sock, accept_callback, self.io_loop) + self.http_client.fetch("http://127.0.0.1:%d/" % port, self.stop) + resp = self.wait() + resp.rethrow() + self.assertEqual(resp.body, b"12") + self.io_loop.remove_handler(sock.fileno()) + + def test_streaming_stack_context(self): + chunks = [] + exc_info = [] + + def error_handler(typ, value, tb): + exc_info.append((typ, value, tb)) + return True + + def streaming_cb(chunk): + chunks.append(chunk) + if chunk == b'qwer': + 1 / 0 + + with ExceptionStackContext(error_handler): + self.fetch('/chunk', streaming_callback=streaming_cb) + + self.assertEqual(chunks, [b'asdf', b'qwer']) + self.assertEqual(1, len(exc_info)) + self.assertIs(exc_info[0][0], ZeroDivisionError) + + def test_basic_auth(self): + self.assertEqual(self.fetch("/auth", auth_username="Aladdin", + auth_password="open sesame").body, + b"Basic QWxhZGRpbjpvcGVuIHNlc2FtZQ==") + + def test_basic_auth_explicit_mode(self): + self.assertEqual(self.fetch("/auth", auth_username="Aladdin", + auth_password="open sesame", + auth_mode="basic").body, + b"Basic QWxhZGRpbjpvcGVuIHNlc2FtZQ==") + + def test_unsupported_auth_mode(self): + # curl and simple clients handle errors a bit differently; the + # important thing is that they don't fall back to basic auth + # on an unknown mode. + with ExpectLog(gen_log, "uncaught exception", required=False): + with self.assertRaises((ValueError, HTTPError)): + response = self.fetch("/auth", auth_username="Aladdin", + auth_password="open sesame", + auth_mode="asdf") + response.rethrow() + + def test_follow_redirect(self): + response = self.fetch("/countdown/2", follow_redirects=False) + self.assertEqual(302, response.code) + self.assertTrue(response.headers["Location"].endswith("/countdown/1")) + + response = self.fetch("/countdown/2") + self.assertEqual(200, response.code) + self.assertTrue(response.effective_url.endswith("/countdown/0")) + self.assertEqual(b"Zero", response.body) + + def test_credentials_in_url(self): + url = self.get_url("/auth").replace("http://", "http://me:secret@") + self.http_client.fetch(url, self.stop) + response = self.wait() + self.assertEqual(b"Basic " + base64.b64encode(b"me:secret"), + response.body) + + def test_body_encoding(self): + unicode_body = u("\xe9") + byte_body = binascii.a2b_hex(b"e9") + + # unicode string in body gets converted to utf8 + response = self.fetch("/echopost", method="POST", body=unicode_body, + headers={"Content-Type": "application/blah"}) + self.assertEqual(response.headers["Content-Length"], "2") + self.assertEqual(response.body, utf8(unicode_body)) + + # byte strings pass through directly + response = self.fetch("/echopost", method="POST", + body=byte_body, + headers={"Content-Type": "application/blah"}) + self.assertEqual(response.headers["Content-Length"], "1") + self.assertEqual(response.body, byte_body) + + # Mixing unicode in headers and byte string bodies shouldn't + # break anything + response = self.fetch("/echopost", method="POST", body=byte_body, + headers={"Content-Type": "application/blah"}, + user_agent=u("foo")) + self.assertEqual(response.headers["Content-Length"], "1") + self.assertEqual(response.body, byte_body) + + def test_types(self): + response = self.fetch("/hello") + self.assertEqual(type(response.body), bytes) + self.assertEqual(type(response.headers["Content-Type"]), str) + self.assertEqual(type(response.code), int) + self.assertEqual(type(response.effective_url), str) + + def test_header_callback(self): + first_line = [] + headers = {} + chunks = [] + + def header_callback(header_line): + if header_line.startswith('HTTP/1.1 101'): + # Upgrading to HTTP/2 + pass + elif header_line.startswith('HTTP/'): + first_line.append(header_line) + elif header_line != '\r\n': + k, v = header_line.split(':', 1) + headers[k.lower()] = v.strip() + + def streaming_callback(chunk): + # All header callbacks are run before any streaming callbacks, + # so the header data is available to process the data as it + # comes in. + self.assertEqual(headers['content-type'], 'text/html; charset=UTF-8') + chunks.append(chunk) + + self.fetch('/chunk', header_callback=header_callback, + streaming_callback=streaming_callback) + self.assertEqual(len(first_line), 1, first_line) + self.assertRegexpMatches(first_line[0], 'HTTP/[0-9]\\.[0-9] 200.*\r\n') + self.assertEqual(chunks, [b'asdf', b'qwer']) + + def test_header_callback_stack_context(self): + exc_info = [] + + def error_handler(typ, value, tb): + exc_info.append((typ, value, tb)) + return True + + def header_callback(header_line): + if header_line.lower().startswith('content-type:'): + 1 / 0 + + with ExceptionStackContext(error_handler): + self.fetch('/chunk', header_callback=header_callback) + self.assertEqual(len(exc_info), 1) + self.assertIs(exc_info[0][0], ZeroDivisionError) + + def test_configure_defaults(self): + defaults = dict(user_agent='TestDefaultUserAgent', allow_ipv6=False) + # Construct a new instance of the configured client class + client = self.http_client.__class__(self.io_loop, force_instance=True, + defaults=defaults) + try: + client.fetch(self.get_url('/user_agent'), callback=self.stop) + response = self.wait() + self.assertEqual(response.body, b'TestDefaultUserAgent') + finally: + client.close() + + def test_header_types(self): + # Header values may be passed as character or utf8 byte strings, + # in a plain dictionary or an HTTPHeaders object. + # Keys must always be the native str type. + # All combinations should have the same results on the wire. + for value in [u("MyUserAgent"), b"MyUserAgent"]: + for container in [dict, HTTPHeaders]: + headers = container() + headers['User-Agent'] = value + resp = self.fetch('/user_agent', headers=headers) + self.assertEqual( + resp.body, b"MyUserAgent", + "response=%r, value=%r, container=%r" % + (resp.body, value, container)) + + def test_multi_line_headers(self): + # Multi-line http headers are rare but rfc-allowed + # http://www.w3.org/Protocols/rfc2616/rfc2616-sec4.html#sec4.2 + sock, port = bind_unused_port() + with closing(sock): + def write_response(stream, request_data): + if b"HTTP/1." not in request_data: + self.skipTest("requires HTTP/1.x") + stream.write(b"""\ +HTTP/1.1 200 OK +X-XSS-Protection: 1; +\tmode=block + +""".replace(b"\n", b"\r\n"), callback=stream.close) + + def accept_callback(conn, address): + stream = IOStream(conn, io_loop=self.io_loop) + stream.read_until(b"\r\n\r\n", + functools.partial(write_response, stream)) + netutil.add_accept_handler(sock, accept_callback, self.io_loop) + self.http_client.fetch("http://127.0.0.1:%d/" % port, self.stop) + resp = self.wait() + resp.rethrow() + self.assertEqual(resp.headers['X-XSS-Protection'], "1; mode=block") + self.io_loop.remove_handler(sock.fileno()) + + def test_304_with_content_length(self): + # According to the spec 304 responses SHOULD NOT include + # Content-Length or other entity headers, but some servers do it + # anyway. + # http://www.w3.org/Protocols/rfc2616/rfc2616-sec10.html#sec10.3.5 + response = self.fetch('/304_with_content_length') + self.assertEqual(response.code, 304) + self.assertEqual(response.headers['Content-Length'], '42') + + def test_final_callback_stack_context(self): + # The final callback should be run outside of the httpclient's + # stack_context. We want to ensure that there is not stack_context + # between the user's callback and the IOLoop, so monkey-patch + # IOLoop.handle_callback_exception and disable the test harness's + # context with a NullContext. + # Note that this does not apply to secondary callbacks (header + # and streaming_callback), as errors there must be seen as errors + # by the http client so it can clean up the connection. + exc_info = [] + + def handle_callback_exception(callback): + exc_info.append(sys.exc_info()) + self.stop() + self.io_loop.handle_callback_exception = handle_callback_exception + with NullContext(): + self.http_client.fetch(self.get_url('/hello'), + lambda response: 1 / 0) + self.wait() + self.assertEqual(exc_info[0][0], ZeroDivisionError) + + @gen_test + def test_future_interface(self): + response = yield self.http_client.fetch(self.get_url('/hello')) + self.assertEqual(response.body, b'Hello world!') + + @gen_test + def test_future_http_error(self): + with self.assertRaises(HTTPError) as context: + yield self.http_client.fetch(self.get_url('/notfound')) + self.assertEqual(context.exception.code, 404) + self.assertEqual(context.exception.response.code, 404) + + @gen_test + def test_future_http_error_no_raise(self): + response = yield self.http_client.fetch(self.get_url('/notfound'), raise_error=False) + self.assertEqual(response.code, 404) + + @gen_test + def test_reuse_request_from_response(self): + # The response.request attribute should be an HTTPRequest, not + # a _RequestProxy. + # This test uses self.http_client.fetch because self.fetch calls + # self.get_url on the input unconditionally. + url = self.get_url('/hello') + response = yield self.http_client.fetch(url) + self.assertEqual(response.request.url, url) + self.assertTrue(isinstance(response.request, HTTPRequest)) + response2 = yield self.http_client.fetch(response.request) + self.assertEqual(response2.body, b'Hello world!') + + def test_all_methods(self): + for method in ['GET', 'DELETE', 'OPTIONS']: + response = self.fetch('/all_methods', method=method) + self.assertEqual(response.body, utf8(method)) + for method in ['POST', 'PUT', 'PATCH']: + response = self.fetch('/all_methods', method=method, body=b'') + self.assertEqual(response.body, utf8(method)) + response = self.fetch('/all_methods', method='HEAD') + self.assertEqual(response.body, b'') + response = self.fetch('/all_methods', method='OTHER', + allow_nonstandard_methods=True) + self.assertEqual(response.body, b'OTHER') + + def test_body_sanity_checks(self): + # These methods require a body. + for method in ('POST', 'PUT', 'PATCH'): + with self.assertRaises(ValueError) as context: + resp = self.fetch('/all_methods', method=method) + resp.rethrow() + self.assertIn('must not be None', str(context.exception)) + + resp = self.fetch('/all_methods', method=method, + allow_nonstandard_methods=True) + self.assertEqual(resp.code, 200) + + # These methods don't allow a body. + for method in ('GET', 'DELETE', 'OPTIONS'): + with self.assertRaises(ValueError) as context: + resp = self.fetch('/all_methods', method=method, body=b'asdf') + resp.rethrow() + self.assertIn('must be None', str(context.exception)) + + # In most cases this can be overridden, but curl_httpclient + # does not allow body with a GET at all. + if method != 'GET': + resp = self.fetch('/all_methods', method=method, body=b'asdf', + allow_nonstandard_methods=True) + resp.rethrow() + self.assertEqual(resp.code, 200) + + # This test causes odd failures with the combination of + # curl_httpclient (at least with the version of libcurl available + # on ubuntu 12.04), TwistedIOLoop, and epoll. For POST (but not PUT), + # curl decides the response came back too soon and closes the connection + # to start again. It does this *before* telling the socket callback to + # unregister the FD. Some IOLoop implementations have special kernel + # integration to discover this immediately. Tornado's IOLoops + # ignore errors on remove_handler to accommodate this behavior, but + # Twisted's reactor does not. The removeReader call fails and so + # do all future removeAll calls (which our tests do at cleanup). + # + # def test_post_307(self): + # response = self.fetch("/redirect?status=307&url=/post", + # method="POST", body=b"arg1=foo&arg2=bar") + # self.assertEqual(response.body, b"Post arg1: foo, arg2: bar") + + def test_put_307(self): + response = self.fetch("/redirect?status=307&url=/put", + method="PUT", body=b"hello") + response.rethrow() + self.assertEqual(response.body, b"Put body: hello") + + +class RequestProxyTest(unittest.TestCase): + def test_request_set(self): + proxy = _RequestProxy(HTTPRequest('http://example.com/', + user_agent='foo'), + dict()) + self.assertEqual(proxy.user_agent, 'foo') + + def test_default_set(self): + proxy = _RequestProxy(HTTPRequest('http://example.com/'), + dict(network_interface='foo')) + self.assertEqual(proxy.network_interface, 'foo') + + def test_both_set(self): + proxy = _RequestProxy(HTTPRequest('http://example.com/', + proxy_host='foo'), + dict(proxy_host='bar')) + self.assertEqual(proxy.proxy_host, 'foo') + + def test_neither_set(self): + proxy = _RequestProxy(HTTPRequest('http://example.com/'), + dict()) + self.assertIs(proxy.auth_username, None) + + def test_bad_attribute(self): + proxy = _RequestProxy(HTTPRequest('http://example.com/'), + dict()) + with self.assertRaises(AttributeError): + proxy.foo + + def test_defaults_none(self): + proxy = _RequestProxy(HTTPRequest('http://example.com/'), None) + self.assertIs(proxy.auth_username, None) + + +class HTTPResponseTestCase(unittest.TestCase): + def test_str(self): + response = HTTPResponse(HTTPRequest('http://example.com'), + 200, headers={}, buffer=BytesIO()) + s = str(response) + self.assertTrue(s.startswith('HTTPResponse(')) + self.assertIn('code=200', s) + + +class SyncHTTPClientTest(unittest.TestCase): + def setUp(self): + if IOLoop.configured_class().__name__ in ('TwistedIOLoop', + 'AsyncIOMainLoop'): + # TwistedIOLoop only supports the global reactor, so we can't have + # separate IOLoops for client and server threads. + # AsyncIOMainLoop doesn't work with the default policy + # (although it could with some tweaks to this test and a + # policy that created loops for non-main threads). + raise unittest.SkipTest( + 'Sync HTTPClient not compatible with TwistedIOLoop or ' + 'AsyncIOMainLoop') + self.server_ioloop = IOLoop() + + sock, self.port = bind_unused_port() + app = Application([('/', HelloWorldHandler)]) + self.server = HTTPServer(app, io_loop=self.server_ioloop) + self.server.add_socket(sock) + + self.server_thread = threading.Thread(target=self.server_ioloop.start) + self.server_thread.start() + + self.http_client = HTTPClient() + + def tearDown(self): + def stop_server(): + self.server.stop() + # Delay the shutdown of the IOLoop by one iteration because + # the server may still have some cleanup work left when + # the client finishes with the response (this is noticable + # with http/2, which leaves a Future with an unexamined + # StreamClosedError on the loop). + self.server_ioloop.add_callback(self.server_ioloop.stop) + self.server_ioloop.add_callback(stop_server) + self.server_thread.join() + self.http_client.close() + self.server_ioloop.close(all_fds=True) + + def get_url(self, path): + return 'http://127.0.0.1:%d%s' % (self.port, path) + + def test_sync_client(self): + response = self.http_client.fetch(self.get_url('/')) + self.assertEqual(b'Hello world!', response.body) + + def test_sync_client_error(self): + # Synchronous HTTPClient raises errors directly; no need for + # response.rethrow() + with self.assertRaises(HTTPError) as assertion: + self.http_client.fetch(self.get_url('/notfound')) + self.assertEqual(assertion.exception.code, 404) + + +class HTTPRequestTestCase(unittest.TestCase): + def test_headers(self): + request = HTTPRequest('http://example.com', headers={'foo': 'bar'}) + self.assertEqual(request.headers, {'foo': 'bar'}) + + def test_headers_setter(self): + request = HTTPRequest('http://example.com') + request.headers = {'bar': 'baz'} + self.assertEqual(request.headers, {'bar': 'baz'}) + + def test_null_headers_setter(self): + request = HTTPRequest('http://example.com') + request.headers = None + self.assertEqual(request.headers, {}) + + def test_body(self): + request = HTTPRequest('http://example.com', body='foo') + self.assertEqual(request.body, utf8('foo')) + + def test_body_setter(self): + request = HTTPRequest('http://example.com') + request.body = 'foo' + self.assertEqual(request.body, utf8('foo')) + + def test_if_modified_since(self): + http_date = datetime.datetime.utcnow() + request = HTTPRequest('http://example.com', if_modified_since=http_date) + self.assertEqual(request.headers, + {'If-Modified-Since': format_timestamp(http_date)}) + + +class HTTPErrorTestCase(unittest.TestCase): + def test_copy(self): + e = HTTPError(403) + e2 = copy.copy(e) + self.assertIsNot(e, e2) + self.assertEqual(e.code, e2.code) + + def test_str(self): + e = HTTPError(403) + self.assertEqual(str(e), "HTTP 403: Forbidden") diff --git a/python/tornado/test/httpserver_test.py b/python/tornado/test/httpserver_test.py new file mode 100644 index 000000000..065f5b1fa --- /dev/null +++ b/python/tornado/test/httpserver_test.py @@ -0,0 +1,1094 @@ +#!/usr/bin/env python + + +from __future__ import absolute_import, division, print_function, with_statement +from tornado import netutil +from tornado.escape import json_decode, json_encode, utf8, _unicode, recursive_unicode, native_str +from tornado import gen +from tornado.http1connection import HTTP1Connection +from tornado.httpserver import HTTPServer +from tornado.httputil import HTTPHeaders, HTTPMessageDelegate, HTTPServerConnectionDelegate, ResponseStartLine +from tornado.iostream import IOStream +from tornado.log import gen_log +from tornado.netutil import ssl_options_to_context +from tornado.simple_httpclient import SimpleAsyncHTTPClient +from tornado.testing import AsyncHTTPTestCase, AsyncHTTPSTestCase, AsyncTestCase, ExpectLog, gen_test +from tornado.test.util import unittest, skipOnTravis +from tornado.util import u +from tornado.web import Application, RequestHandler, asynchronous, stream_request_body +from contextlib import closing +import datetime +import gzip +import os +import shutil +import socket +import ssl +import sys +import tempfile +from io import BytesIO + + +def read_stream_body(stream, callback): + """Reads an HTTP response from `stream` and runs callback with its + headers and body.""" + chunks = [] + + class Delegate(HTTPMessageDelegate): + def headers_received(self, start_line, headers): + self.headers = headers + + def data_received(self, chunk): + chunks.append(chunk) + + def finish(self): + callback((self.headers, b''.join(chunks))) + conn = HTTP1Connection(stream, True) + conn.read_response(Delegate()) + + +class HandlerBaseTestCase(AsyncHTTPTestCase): + def get_app(self): + return Application([('/', self.__class__.Handler)]) + + def fetch_json(self, *args, **kwargs): + response = self.fetch(*args, **kwargs) + response.rethrow() + return json_decode(response.body) + + +class HelloWorldRequestHandler(RequestHandler): + def initialize(self, protocol="http"): + self.expected_protocol = protocol + + def get(self): + if self.request.protocol != self.expected_protocol: + raise Exception("unexpected protocol") + self.finish("Hello world") + + def post(self): + self.finish("Got %d bytes in POST" % len(self.request.body)) + + +# In pre-1.0 versions of openssl, SSLv23 clients always send SSLv2 +# ClientHello messages, which are rejected by SSLv3 and TLSv1 +# servers. Note that while the OPENSSL_VERSION_INFO was formally +# introduced in python3.2, it was present but undocumented in +# python 2.7 +skipIfOldSSL = unittest.skipIf( + getattr(ssl, 'OPENSSL_VERSION_INFO', (0, 0)) < (1, 0), + "old version of ssl module and/or openssl") + + +class BaseSSLTest(AsyncHTTPSTestCase): + def get_app(self): + return Application([('/', HelloWorldRequestHandler, + dict(protocol="https"))]) + + +class SSLTestMixin(object): + def get_ssl_options(self): + return dict(ssl_version=self.get_ssl_version(), + **AsyncHTTPSTestCase.get_ssl_options()) + + def get_ssl_version(self): + raise NotImplementedError() + + def test_ssl(self): + response = self.fetch('/') + self.assertEqual(response.body, b"Hello world") + + def test_large_post(self): + response = self.fetch('/', + method='POST', + body='A' * 5000) + self.assertEqual(response.body, b"Got 5000 bytes in POST") + + def test_non_ssl_request(self): + # Make sure the server closes the connection when it gets a non-ssl + # connection, rather than waiting for a timeout or otherwise + # misbehaving. + with ExpectLog(gen_log, '(SSL Error|uncaught exception)'): + with ExpectLog(gen_log, 'Uncaught exception', required=False): + self.http_client.fetch( + self.get_url("/").replace('https:', 'http:'), + self.stop, + request_timeout=3600, + connect_timeout=3600) + response = self.wait() + self.assertEqual(response.code, 599) + + def test_error_logging(self): + # No stack traces are logged for SSL errors. + with ExpectLog(gen_log, 'SSL Error') as expect_log: + self.http_client.fetch( + self.get_url("/").replace("https:", "http:"), + self.stop) + response = self.wait() + self.assertEqual(response.code, 599) + self.assertFalse(expect_log.logged_stack) + +# Python's SSL implementation differs significantly between versions. +# For example, SSLv3 and TLSv1 throw an exception if you try to read +# from the socket before the handshake is complete, but the default +# of SSLv23 allows it. + + +class SSLv23Test(BaseSSLTest, SSLTestMixin): + def get_ssl_version(self): + return ssl.PROTOCOL_SSLv23 + + +@skipIfOldSSL +class SSLv3Test(BaseSSLTest, SSLTestMixin): + def get_ssl_version(self): + return ssl.PROTOCOL_SSLv3 + + +@skipIfOldSSL +class TLSv1Test(BaseSSLTest, SSLTestMixin): + def get_ssl_version(self): + return ssl.PROTOCOL_TLSv1 + + +@unittest.skipIf(not hasattr(ssl, 'SSLContext'), 'ssl.SSLContext not present') +class SSLContextTest(BaseSSLTest, SSLTestMixin): + def get_ssl_options(self): + context = ssl_options_to_context( + AsyncHTTPSTestCase.get_ssl_options(self)) + assert isinstance(context, ssl.SSLContext) + return context + + +class BadSSLOptionsTest(unittest.TestCase): + def test_missing_arguments(self): + application = Application() + self.assertRaises(KeyError, HTTPServer, application, ssl_options={ + "keyfile": "/__missing__.crt", + }) + + def test_missing_key(self): + """A missing SSL key should cause an immediate exception.""" + + application = Application() + module_dir = os.path.dirname(__file__) + existing_certificate = os.path.join(module_dir, 'test.crt') + existing_key = os.path.join(module_dir, 'test.key') + + self.assertRaises((ValueError, IOError), + HTTPServer, application, ssl_options={ + "certfile": "/__mising__.crt", + }) + self.assertRaises((ValueError, IOError), + HTTPServer, application, ssl_options={ + "certfile": existing_certificate, + "keyfile": "/__missing__.key" + }) + + # This actually works because both files exist + HTTPServer(application, ssl_options={ + "certfile": existing_certificate, + "keyfile": existing_key, + }) + + +class MultipartTestHandler(RequestHandler): + def post(self): + self.finish({"header": self.request.headers["X-Header-Encoding-Test"], + "argument": self.get_argument("argument"), + "filename": self.request.files["files"][0].filename, + "filebody": _unicode(self.request.files["files"][0]["body"]), + }) + + +# This test is also called from wsgi_test +class HTTPConnectionTest(AsyncHTTPTestCase): + def get_handlers(self): + return [("/multipart", MultipartTestHandler), + ("/hello", HelloWorldRequestHandler)] + + def get_app(self): + return Application(self.get_handlers()) + + def raw_fetch(self, headers, body, newline=b"\r\n"): + with closing(IOStream(socket.socket())) as stream: + stream.connect(('127.0.0.1', self.get_http_port()), self.stop) + self.wait() + stream.write( + newline.join(headers + + [utf8("Content-Length: %d" % len(body))]) + + newline + newline + body) + read_stream_body(stream, self.stop) + headers, body = self.wait() + return body + + def test_multipart_form(self): + # Encodings here are tricky: Headers are latin1, bodies can be + # anything (we use utf8 by default). + response = self.raw_fetch([ + b"POST /multipart HTTP/1.0", + b"Content-Type: multipart/form-data; boundary=1234567890", + b"X-Header-encoding-test: \xe9", + ], + b"\r\n".join([ + b"Content-Disposition: form-data; name=argument", + b"", + u("\u00e1").encode("utf-8"), + b"--1234567890", + u('Content-Disposition: form-data; name="files"; filename="\u00f3"').encode("utf8"), + b"", + u("\u00fa").encode("utf-8"), + b"--1234567890--", + b"", + ])) + data = json_decode(response) + self.assertEqual(u("\u00e9"), data["header"]) + self.assertEqual(u("\u00e1"), data["argument"]) + self.assertEqual(u("\u00f3"), data["filename"]) + self.assertEqual(u("\u00fa"), data["filebody"]) + + def test_newlines(self): + # We support both CRLF and bare LF as line separators. + for newline in (b"\r\n", b"\n"): + response = self.raw_fetch([b"GET /hello HTTP/1.0"], b"", + newline=newline) + self.assertEqual(response, b'Hello world') + + def test_100_continue(self): + # Run through a 100-continue interaction by hand: + # When given Expect: 100-continue, we get a 100 response after the + # headers, and then the real response after the body. + stream = IOStream(socket.socket(), io_loop=self.io_loop) + stream.connect(("127.0.0.1", self.get_http_port()), callback=self.stop) + self.wait() + stream.write(b"\r\n".join([b"POST /hello HTTP/1.1", + b"Content-Length: 1024", + b"Expect: 100-continue", + b"Connection: close", + b"\r\n"]), callback=self.stop) + self.wait() + stream.read_until(b"\r\n\r\n", self.stop) + data = self.wait() + self.assertTrue(data.startswith(b"HTTP/1.1 100 "), data) + stream.write(b"a" * 1024) + stream.read_until(b"\r\n", self.stop) + first_line = self.wait() + self.assertTrue(first_line.startswith(b"HTTP/1.1 200"), first_line) + stream.read_until(b"\r\n\r\n", self.stop) + header_data = self.wait() + headers = HTTPHeaders.parse(native_str(header_data.decode('latin1'))) + stream.read_bytes(int(headers["Content-Length"]), self.stop) + body = self.wait() + self.assertEqual(body, b"Got 1024 bytes in POST") + stream.close() + + +class EchoHandler(RequestHandler): + def get(self): + self.write(recursive_unicode(self.request.arguments)) + + def post(self): + self.write(recursive_unicode(self.request.arguments)) + + +class TypeCheckHandler(RequestHandler): + def prepare(self): + self.errors = {} + fields = [ + ('method', str), + ('uri', str), + ('version', str), + ('remote_ip', str), + ('protocol', str), + ('host', str), + ('path', str), + ('query', str), + ] + for field, expected_type in fields: + self.check_type(field, getattr(self.request, field), expected_type) + + self.check_type('header_key', list(self.request.headers.keys())[0], str) + self.check_type('header_value', list(self.request.headers.values())[0], str) + + self.check_type('cookie_key', list(self.request.cookies.keys())[0], str) + self.check_type('cookie_value', list(self.request.cookies.values())[0].value, str) + # secure cookies + + self.check_type('arg_key', list(self.request.arguments.keys())[0], str) + self.check_type('arg_value', list(self.request.arguments.values())[0][0], bytes) + + def post(self): + self.check_type('body', self.request.body, bytes) + self.write(self.errors) + + def get(self): + self.write(self.errors) + + def check_type(self, name, obj, expected_type): + actual_type = type(obj) + if expected_type != actual_type: + self.errors[name] = "expected %s, got %s" % (expected_type, + actual_type) + + +class HTTPServerTest(AsyncHTTPTestCase): + def get_app(self): + return Application([("/echo", EchoHandler), + ("/typecheck", TypeCheckHandler), + ("//doubleslash", EchoHandler), + ]) + + def test_query_string_encoding(self): + response = self.fetch("/echo?foo=%C3%A9") + data = json_decode(response.body) + self.assertEqual(data, {u("foo"): [u("\u00e9")]}) + + def test_empty_query_string(self): + response = self.fetch("/echo?foo=&foo=") + data = json_decode(response.body) + self.assertEqual(data, {u("foo"): [u(""), u("")]}) + + def test_empty_post_parameters(self): + response = self.fetch("/echo", method="POST", body="foo=&bar=") + data = json_decode(response.body) + self.assertEqual(data, {u("foo"): [u("")], u("bar"): [u("")]}) + + def test_types(self): + headers = {"Cookie": "foo=bar"} + response = self.fetch("/typecheck?foo=bar", headers=headers) + data = json_decode(response.body) + self.assertEqual(data, {}) + + response = self.fetch("/typecheck", method="POST", body="foo=bar", headers=headers) + data = json_decode(response.body) + self.assertEqual(data, {}) + + def test_double_slash(self): + # urlparse.urlsplit (which tornado.httpserver used to use + # incorrectly) would parse paths beginning with "//" as + # protocol-relative urls. + response = self.fetch("//doubleslash") + self.assertEqual(200, response.code) + self.assertEqual(json_decode(response.body), {}) + + def test_malformed_body(self): + # parse_qs is pretty forgiving, but it will fail on python 3 + # if the data is not utf8. On python 2 parse_qs will work, + # but then the recursive_unicode call in EchoHandler will + # fail. + if str is bytes: + return + with ExpectLog(gen_log, 'Invalid x-www-form-urlencoded body'): + response = self.fetch( + '/echo', method="POST", + headers={'Content-Type': 'application/x-www-form-urlencoded'}, + body=b'\xe9') + self.assertEqual(200, response.code) + self.assertEqual(b'{}', response.body) + + +class HTTPServerRawTest(AsyncHTTPTestCase): + def get_app(self): + return Application([ + ('/echo', EchoHandler), + ]) + + def setUp(self): + super(HTTPServerRawTest, self).setUp() + self.stream = IOStream(socket.socket()) + self.stream.connect(('127.0.0.1', self.get_http_port()), self.stop) + self.wait() + + def tearDown(self): + self.stream.close() + super(HTTPServerRawTest, self).tearDown() + + def test_empty_request(self): + self.stream.close() + self.io_loop.add_timeout(datetime.timedelta(seconds=0.001), self.stop) + self.wait() + + def test_malformed_first_line(self): + with ExpectLog(gen_log, '.*Malformed HTTP request line'): + self.stream.write(b'asdf\r\n\r\n') + # TODO: need an async version of ExpectLog so we don't need + # hard-coded timeouts here. + self.io_loop.add_timeout(datetime.timedelta(seconds=0.01), + self.stop) + self.wait() + + def test_malformed_headers(self): + with ExpectLog(gen_log, '.*Malformed HTTP headers'): + self.stream.write(b'GET / HTTP/1.0\r\nasdf\r\n\r\n') + self.io_loop.add_timeout(datetime.timedelta(seconds=0.01), + self.stop) + self.wait() + + def test_chunked_request_body(self): + # Chunked requests are not widely supported and we don't have a way + # to generate them in AsyncHTTPClient, but HTTPServer will read them. + self.stream.write(b"""\ +POST /echo HTTP/1.1 +Transfer-Encoding: chunked +Content-Type: application/x-www-form-urlencoded + +4 +foo= +3 +bar +0 + +""".replace(b"\n", b"\r\n")) + read_stream_body(self.stream, self.stop) + headers, response = self.wait() + self.assertEqual(json_decode(response), {u('foo'): [u('bar')]}) + + +class XHeaderTest(HandlerBaseTestCase): + class Handler(RequestHandler): + def get(self): + self.write(dict(remote_ip=self.request.remote_ip, + remote_protocol=self.request.protocol)) + + def get_httpserver_options(self): + return dict(xheaders=True) + + def test_ip_headers(self): + self.assertEqual(self.fetch_json("/")["remote_ip"], "127.0.0.1") + + valid_ipv4 = {"X-Real-IP": "4.4.4.4"} + self.assertEqual( + self.fetch_json("/", headers=valid_ipv4)["remote_ip"], + "4.4.4.4") + + valid_ipv4_list = {"X-Forwarded-For": "127.0.0.1, 4.4.4.4"} + self.assertEqual( + self.fetch_json("/", headers=valid_ipv4_list)["remote_ip"], + "4.4.4.4") + + valid_ipv6 = {"X-Real-IP": "2620:0:1cfe:face:b00c::3"} + self.assertEqual( + self.fetch_json("/", headers=valid_ipv6)["remote_ip"], + "2620:0:1cfe:face:b00c::3") + + valid_ipv6_list = {"X-Forwarded-For": "::1, 2620:0:1cfe:face:b00c::3"} + self.assertEqual( + self.fetch_json("/", headers=valid_ipv6_list)["remote_ip"], + "2620:0:1cfe:face:b00c::3") + + invalid_chars = {"X-Real-IP": "4.4.4.4 + +' + for p in paths) + sloc = html.rindex(b'') + html = html[:sloc] + utf8(js) + b'\n' + html[sloc:] + if js_embed: + js = b'' + sloc = html.rindex(b'') + html = html[:sloc] + js + b'\n' + html[sloc:] + if css_files: + paths = [] + unique_paths = set() + for path in css_files: + if not is_absolute(path): + path = self.static_url(path) + if path not in unique_paths: + paths.append(path) + unique_paths.add(path) + css = ''.join('' + for p in paths) + hloc = html.index(b'') + html = html[:hloc] + utf8(css) + b'\n' + html[hloc:] + if css_embed: + css = b'' + hloc = html.index(b'') + html = html[:hloc] + css + b'\n' + html[hloc:] + if html_heads: + hloc = html.index(b'') + html = html[:hloc] + b''.join(html_heads) + b'\n' + html[hloc:] + if html_bodies: + hloc = html.index(b'') + html = html[:hloc] + b''.join(html_bodies) + b'\n' + html[hloc:] + self.finish(html) + + def render_string(self, template_name, **kwargs): + """Generate the given template with the given arguments. + + We return the generated byte string (in utf8). To generate and + write a template as a response, use render() above. + """ + # If no template_path is specified, use the path of the calling file + template_path = self.get_template_path() + if not template_path: + frame = sys._getframe(0) + web_file = frame.f_code.co_filename + while frame.f_code.co_filename == web_file: + frame = frame.f_back + template_path = os.path.dirname(frame.f_code.co_filename) + with RequestHandler._template_loader_lock: + if template_path not in RequestHandler._template_loaders: + loader = self.create_template_loader(template_path) + RequestHandler._template_loaders[template_path] = loader + else: + loader = RequestHandler._template_loaders[template_path] + t = loader.load(template_name) + namespace = self.get_template_namespace() + namespace.update(kwargs) + return t.generate(**namespace) + + def get_template_namespace(self): + """Returns a dictionary to be used as the default template namespace. + + May be overridden by subclasses to add or modify values. + + The results of this method will be combined with additional + defaults in the `tornado.template` module and keyword arguments + to `render` or `render_string`. + """ + namespace = dict( + handler=self, + request=self.request, + current_user=self.current_user, + locale=self.locale, + _=self.locale.translate, + pgettext=self.locale.pgettext, + static_url=self.static_url, + xsrf_form_html=self.xsrf_form_html, + reverse_url=self.reverse_url + ) + namespace.update(self.ui) + return namespace + + def create_template_loader(self, template_path): + """Returns a new template loader for the given path. + + May be overridden by subclasses. By default returns a + directory-based loader on the given path, using the + ``autoescape`` and ``template_whitespace`` application + settings. If a ``template_loader`` application setting is + supplied, uses that instead. + """ + settings = self.application.settings + if "template_loader" in settings: + return settings["template_loader"] + kwargs = {} + if "autoescape" in settings: + # autoescape=None means "no escaping", so we have to be sure + # to only pass this kwarg if the user asked for it. + kwargs["autoescape"] = settings["autoescape"] + if "template_whitespace" in settings: + kwargs["whitespace"] = settings["template_whitespace"] + return template.Loader(template_path, **kwargs) + + def flush(self, include_footers=False, callback=None): + """Flushes the current output buffer to the network. + + The ``callback`` argument, if given, can be used for flow control: + it will be run when all flushed data has been written to the socket. + Note that only one flush callback can be outstanding at a time; + if another flush occurs before the previous flush's callback + has been run, the previous callback will be discarded. + + .. versionchanged:: 4.0 + Now returns a `.Future` if no callback is given. + """ + chunk = b"".join(self._write_buffer) + self._write_buffer = [] + if not self._headers_written: + self._headers_written = True + for transform in self._transforms: + self._status_code, self._headers, chunk = \ + transform.transform_first_chunk( + self._status_code, self._headers, + chunk, include_footers) + # Ignore the chunk and only write the headers for HEAD requests + if self.request.method == "HEAD": + chunk = None + + # Finalize the cookie headers (which have been stored in a side + # object so an outgoing cookie could be overwritten before it + # is sent). + if hasattr(self, "_new_cookie"): + for cookie in self._new_cookie.values(): + self.add_header("Set-Cookie", cookie.OutputString(None)) + + start_line = httputil.ResponseStartLine('', + self._status_code, + self._reason) + return self.request.connection.write_headers( + start_line, self._headers, chunk, callback=callback) + else: + for transform in self._transforms: + chunk = transform.transform_chunk(chunk, include_footers) + # Ignore the chunk and only write the headers for HEAD requests + if self.request.method != "HEAD": + return self.request.connection.write(chunk, callback=callback) + else: + future = Future() + future.set_result(None) + return future + + def finish(self, chunk=None): + """Finishes this response, ending the HTTP request.""" + if self._finished: + raise RuntimeError("finish() called twice") + + if chunk is not None: + self.write(chunk) + + # Automatically support ETags and add the Content-Length header if + # we have not flushed any content yet. + if not self._headers_written: + if (self._status_code == 200 and + self.request.method in ("GET", "HEAD") and + "Etag" not in self._headers): + self.set_etag_header() + if self.check_etag_header(): + self._write_buffer = [] + self.set_status(304) + if self._status_code == 304: + assert not self._write_buffer, "Cannot send body with 304" + self._clear_headers_for_304() + elif "Content-Length" not in self._headers: + content_length = sum(len(part) for part in self._write_buffer) + self.set_header("Content-Length", content_length) + + if hasattr(self.request, "connection"): + # Now that the request is finished, clear the callback we + # set on the HTTPConnection (which would otherwise prevent the + # garbage collection of the RequestHandler when there + # are keepalive connections) + self.request.connection.set_close_callback(None) + + self.flush(include_footers=True) + self.request.finish() + self._log() + self._finished = True + self.on_finish() + # Break up a reference cycle between this handler and the + # _ui_module closures to allow for faster GC on CPython. + self.ui = None + + def send_error(self, status_code=500, **kwargs): + """Sends the given HTTP error code to the browser. + + If `flush()` has already been called, it is not possible to send + an error, so this method will simply terminate the response. + If output has been written but not yet flushed, it will be discarded + and replaced with the error page. + + Override `write_error()` to customize the error page that is returned. + Additional keyword arguments are passed through to `write_error`. + """ + if self._headers_written: + gen_log.error("Cannot send error response after headers written") + if not self._finished: + # If we get an error between writing headers and finishing, + # we are unlikely to be able to finish due to a + # Content-Length mismatch. Try anyway to release the + # socket. + try: + self.finish() + except Exception: + gen_log.error("Failed to flush partial response", + exc_info=True) + return + self.clear() + + reason = kwargs.get('reason') + if 'exc_info' in kwargs: + exception = kwargs['exc_info'][1] + if isinstance(exception, HTTPError) and exception.reason: + reason = exception.reason + self.set_status(status_code, reason=reason) + try: + self.write_error(status_code, **kwargs) + except Exception: + app_log.error("Uncaught exception in write_error", exc_info=True) + if not self._finished: + self.finish() + + def write_error(self, status_code, **kwargs): + """Override to implement custom error pages. + + ``write_error`` may call `write`, `render`, `set_header`, etc + to produce output as usual. + + If this error was caused by an uncaught exception (including + HTTPError), an ``exc_info`` triple will be available as + ``kwargs["exc_info"]``. Note that this exception may not be + the "current" exception for purposes of methods like + ``sys.exc_info()`` or ``traceback.format_exc``. + """ + if self.settings.get("serve_traceback") and "exc_info" in kwargs: + # in debug mode, try to send a traceback + self.set_header('Content-Type', 'text/plain') + for line in traceback.format_exception(*kwargs["exc_info"]): + self.write(line) + self.finish() + else: + self.finish("%(code)d: %(message)s" + "%(code)d: %(message)s" % { + "code": status_code, + "message": self._reason, + }) + + @property + def locale(self): + """The locale for the current session. + + Determined by either `get_user_locale`, which you can override to + set the locale based on, e.g., a user preference stored in a + database, or `get_browser_locale`, which uses the ``Accept-Language`` + header. + + .. versionchanged: 4.1 + Added a property setter. + """ + if not hasattr(self, "_locale"): + self._locale = self.get_user_locale() + if not self._locale: + self._locale = self.get_browser_locale() + assert self._locale + return self._locale + + @locale.setter + def locale(self, value): + self._locale = value + + def get_user_locale(self): + """Override to determine the locale from the authenticated user. + + If None is returned, we fall back to `get_browser_locale()`. + + This method should return a `tornado.locale.Locale` object, + most likely obtained via a call like ``tornado.locale.get("en")`` + """ + return None + + def get_browser_locale(self, default="en_US"): + """Determines the user's locale from ``Accept-Language`` header. + + See http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.4 + """ + if "Accept-Language" in self.request.headers: + languages = self.request.headers["Accept-Language"].split(",") + locales = [] + for language in languages: + parts = language.strip().split(";") + if len(parts) > 1 and parts[1].startswith("q="): + try: + score = float(parts[1][2:]) + except (ValueError, TypeError): + score = 0.0 + else: + score = 1.0 + locales.append((parts[0], score)) + if locales: + locales.sort(key=lambda pair: pair[1], reverse=True) + codes = [l[0] for l in locales] + return locale.get(*codes) + return locale.get(default) + + @property + def current_user(self): + """The authenticated user for this request. + + This is set in one of two ways: + + * A subclass may override `get_current_user()`, which will be called + automatically the first time ``self.current_user`` is accessed. + `get_current_user()` will only be called once per request, + and is cached for future access:: + + def get_current_user(self): + user_cookie = self.get_secure_cookie("user") + if user_cookie: + return json.loads(user_cookie) + return None + + * It may be set as a normal variable, typically from an overridden + `prepare()`:: + + @gen.coroutine + def prepare(self): + user_id_cookie = self.get_secure_cookie("user_id") + if user_id_cookie: + self.current_user = yield load_user(user_id_cookie) + + Note that `prepare()` may be a coroutine while `get_current_user()` + may not, so the latter form is necessary if loading the user requires + asynchronous operations. + + The user object may any type of the application's choosing. + """ + if not hasattr(self, "_current_user"): + self._current_user = self.get_current_user() + return self._current_user + + @current_user.setter + def current_user(self, value): + self._current_user = value + + def get_current_user(self): + """Override to determine the current user from, e.g., a cookie. + + This method may not be a coroutine. + """ + return None + + def get_login_url(self): + """Override to customize the login URL based on the request. + + By default, we use the ``login_url`` application setting. + """ + self.require_setting("login_url", "@tornado.web.authenticated") + return self.application.settings["login_url"] + + def get_template_path(self): + """Override to customize template path for each handler. + + By default, we use the ``template_path`` application setting. + Return None to load templates relative to the calling file. + """ + return self.application.settings.get("template_path") + + @property + def xsrf_token(self): + """The XSRF-prevention token for the current user/session. + + To prevent cross-site request forgery, we set an '_xsrf' cookie + and include the same '_xsrf' value as an argument with all POST + requests. If the two do not match, we reject the form submission + as a potential forgery. + + See http://en.wikipedia.org/wiki/Cross-site_request_forgery + + .. versionchanged:: 3.2.2 + The xsrf token will now be have a random mask applied in every + request, which makes it safe to include the token in pages + that are compressed. See http://breachattack.com for more + information on the issue fixed by this change. Old (version 1) + cookies will be converted to version 2 when this method is called + unless the ``xsrf_cookie_version`` `Application` setting is + set to 1. + + .. versionchanged:: 4.3 + The ``xsrf_cookie_kwargs`` `Application` setting may be + used to supply additional cookie options (which will be + passed directly to `set_cookie`). For example, + ``xsrf_cookie_kwargs=dict(httponly=True, secure=True)`` + will set the ``secure`` and ``httponly`` flags on the + ``_xsrf`` cookie. + """ + if not hasattr(self, "_xsrf_token"): + version, token, timestamp = self._get_raw_xsrf_token() + output_version = self.settings.get("xsrf_cookie_version", 2) + cookie_kwargs = self.settings.get("xsrf_cookie_kwargs", {}) + if output_version == 1: + self._xsrf_token = binascii.b2a_hex(token) + elif output_version == 2: + mask = os.urandom(4) + self._xsrf_token = b"|".join([ + b"2", + binascii.b2a_hex(mask), + binascii.b2a_hex(_websocket_mask(mask, token)), + utf8(str(int(timestamp)))]) + else: + raise ValueError("unknown xsrf cookie version %d", + output_version) + if version is None: + expires_days = 30 if self.current_user else None + self.set_cookie("_xsrf", self._xsrf_token, + expires_days=expires_days, + **cookie_kwargs) + return self._xsrf_token + + def _get_raw_xsrf_token(self): + """Read or generate the xsrf token in its raw form. + + The raw_xsrf_token is a tuple containing: + + * version: the version of the cookie from which this token was read, + or None if we generated a new token in this request. + * token: the raw token data; random (non-ascii) bytes. + * timestamp: the time this token was generated (will not be accurate + for version 1 cookies) + """ + if not hasattr(self, '_raw_xsrf_token'): + cookie = self.get_cookie("_xsrf") + if cookie: + version, token, timestamp = self._decode_xsrf_token(cookie) + else: + version, token, timestamp = None, None, None + if token is None: + version = None + token = os.urandom(16) + timestamp = time.time() + self._raw_xsrf_token = (version, token, timestamp) + return self._raw_xsrf_token + + def _decode_xsrf_token(self, cookie): + """Convert a cookie string into a the tuple form returned by + _get_raw_xsrf_token. + """ + + try: + m = _signed_value_version_re.match(utf8(cookie)) + + if m: + version = int(m.group(1)) + if version == 2: + _, mask, masked_token, timestamp = cookie.split("|") + + mask = binascii.a2b_hex(utf8(mask)) + token = _websocket_mask( + mask, binascii.a2b_hex(utf8(masked_token))) + timestamp = int(timestamp) + return version, token, timestamp + else: + # Treat unknown versions as not present instead of failing. + raise Exception("Unknown xsrf cookie version") + else: + version = 1 + try: + token = binascii.a2b_hex(utf8(cookie)) + except (binascii.Error, TypeError): + token = utf8(cookie) + # We don't have a usable timestamp in older versions. + timestamp = int(time.time()) + return (version, token, timestamp) + except Exception: + # Catch exceptions and return nothing instead of failing. + gen_log.debug("Uncaught exception in _decode_xsrf_token", + exc_info=True) + return None, None, None + + def check_xsrf_cookie(self): + """Verifies that the ``_xsrf`` cookie matches the ``_xsrf`` argument. + + To prevent cross-site request forgery, we set an ``_xsrf`` + cookie and include the same value as a non-cookie + field with all ``POST`` requests. If the two do not match, we + reject the form submission as a potential forgery. + + The ``_xsrf`` value may be set as either a form field named ``_xsrf`` + or in a custom HTTP header named ``X-XSRFToken`` or ``X-CSRFToken`` + (the latter is accepted for compatibility with Django). + + See http://en.wikipedia.org/wiki/Cross-site_request_forgery + + Prior to release 1.1.1, this check was ignored if the HTTP header + ``X-Requested-With: XMLHTTPRequest`` was present. This exception + has been shown to be insecure and has been removed. For more + information please see + http://www.djangoproject.com/weblog/2011/feb/08/security/ + http://weblog.rubyonrails.org/2011/2/8/csrf-protection-bypass-in-ruby-on-rails + + .. versionchanged:: 3.2.2 + Added support for cookie version 2. Both versions 1 and 2 are + supported. + """ + token = (self.get_argument("_xsrf", None) or + self.request.headers.get("X-Xsrftoken") or + self.request.headers.get("X-Csrftoken")) + if not token: + raise HTTPError(403, "'_xsrf' argument missing from POST") + _, token, _ = self._decode_xsrf_token(token) + _, expected_token, _ = self._get_raw_xsrf_token() + if not _time_independent_equals(utf8(token), utf8(expected_token)): + raise HTTPError(403, "XSRF cookie does not match POST argument") + + def xsrf_form_html(self): + """An HTML ```` element to be included with all POST forms. + + It defines the ``_xsrf`` input value, which we check on all POST + requests to prevent cross-site request forgery. If you have set + the ``xsrf_cookies`` application setting, you must include this + HTML within all of your HTML forms. + + In a template, this method should be called with ``{% module + xsrf_form_html() %}`` + + See `check_xsrf_cookie()` above for more information. + """ + return '' + + def static_url(self, path, include_host=None, **kwargs): + """Returns a static URL for the given relative static file path. + + This method requires you set the ``static_path`` setting in your + application (which specifies the root directory of your static + files). + + This method returns a versioned url (by default appending + ``?v=``), which allows the static files to be + cached indefinitely. This can be disabled by passing + ``include_version=False`` (in the default implementation; + other static file implementations are not required to support + this, but they may support other options). + + By default this method returns URLs relative to the current + host, but if ``include_host`` is true the URL returned will be + absolute. If this handler has an ``include_host`` attribute, + that value will be used as the default for all `static_url` + calls that do not pass ``include_host`` as a keyword argument. + + """ + self.require_setting("static_path", "static_url") + get_url = self.settings.get("static_handler_class", + StaticFileHandler).make_static_url + + if include_host is None: + include_host = getattr(self, "include_host", False) + + if include_host: + base = self.request.protocol + "://" + self.request.host + else: + base = "" + + return base + get_url(self.settings, path, **kwargs) + + def require_setting(self, name, feature="this feature"): + """Raises an exception if the given app setting is not defined.""" + if not self.application.settings.get(name): + raise Exception("You must define the '%s' setting in your " + "application to use %s" % (name, feature)) + + def reverse_url(self, name, *args): + """Alias for `Application.reverse_url`.""" + return self.application.reverse_url(name, *args) + + def compute_etag(self): + """Computes the etag header to be used for this request. + + By default uses a hash of the content written so far. + + May be overridden to provide custom etag implementations, + or may return None to disable tornado's default etag support. + """ + hasher = hashlib.sha1() + for part in self._write_buffer: + hasher.update(part) + return '"%s"' % hasher.hexdigest() + + def set_etag_header(self): + """Sets the response's Etag header using ``self.compute_etag()``. + + Note: no header will be set if ``compute_etag()`` returns ``None``. + + This method is called automatically when the request is finished. + """ + etag = self.compute_etag() + if etag is not None: + self.set_header("Etag", etag) + + def check_etag_header(self): + """Checks the ``Etag`` header against requests's ``If-None-Match``. + + Returns ``True`` if the request's Etag matches and a 304 should be + returned. For example:: + + self.set_etag_header() + if self.check_etag_header(): + self.set_status(304) + return + + This method is called automatically when the request is finished, + but may be called earlier for applications that override + `compute_etag` and want to do an early check for ``If-None-Match`` + before completing the request. The ``Etag`` header should be set + (perhaps with `set_etag_header`) before calling this method. + """ + computed_etag = utf8(self._headers.get("Etag", "")) + # Find all weak and strong etag values from If-None-Match header + # because RFC 7232 allows multiple etag values in a single header. + etags = re.findall( + br'\*|(?:W/)?"[^"]*"', + utf8(self.request.headers.get("If-None-Match", "")) + ) + if not computed_etag or not etags: + return False + + match = False + if etags[0] == b'*': + match = True + else: + # Use a weak comparison when comparing entity-tags. + val = lambda x: x[2:] if x.startswith(b'W/') else x + for etag in etags: + if val(etag) == val(computed_etag): + match = True + break + return match + + def _stack_context_handle_exception(self, type, value, traceback): + try: + # For historical reasons _handle_request_exception only takes + # the exception value instead of the full triple, + # so re-raise the exception to ensure that it's in + # sys.exc_info() + raise_exc_info((type, value, traceback)) + except Exception: + self._handle_request_exception(value) + return True + + @gen.coroutine + def _execute(self, transforms, *args, **kwargs): + """Executes this request with the given output transforms.""" + self._transforms = transforms + try: + if self.request.method not in self.SUPPORTED_METHODS: + raise HTTPError(405) + self.path_args = [self.decode_argument(arg) for arg in args] + self.path_kwargs = dict((k, self.decode_argument(v, name=k)) + for (k, v) in kwargs.items()) + # If XSRF cookies are turned on, reject form submissions without + # the proper cookie + if self.request.method not in ("GET", "HEAD", "OPTIONS") and \ + self.application.settings.get("xsrf_cookies"): + self.check_xsrf_cookie() + + result = self.prepare() + if result is not None: + result = yield result + if self._prepared_future is not None: + # Tell the Application we've finished with prepare() + # and are ready for the body to arrive. + self._prepared_future.set_result(None) + if self._finished: + return + + if _has_stream_request_body(self.__class__): + # In streaming mode request.body is a Future that signals + # the body has been completely received. The Future has no + # result; the data has been passed to self.data_received + # instead. + try: + yield self.request.body + except iostream.StreamClosedError: + return + + method = getattr(self, self.request.method.lower()) + result = method(*self.path_args, **self.path_kwargs) + if result is not None: + result = yield result + if self._auto_finish and not self._finished: + self.finish() + except Exception as e: + try: + self._handle_request_exception(e) + except Exception: + app_log.error("Exception in exception handler", exc_info=True) + if (self._prepared_future is not None and + not self._prepared_future.done()): + # In case we failed before setting _prepared_future, do it + # now (to unblock the HTTP server). Note that this is not + # in a finally block to avoid GC issues prior to Python 3.4. + self._prepared_future.set_result(None) + + def data_received(self, chunk): + """Implement this method to handle streamed request data. + + Requires the `.stream_request_body` decorator. + """ + raise NotImplementedError() + + def _log(self): + """Logs the current request. + + Sort of deprecated since this functionality was moved to the + Application, but left in place for the benefit of existing apps + that have overridden this method. + """ + self.application.log_request(self) + + def _request_summary(self): + return "%s %s (%s)" % (self.request.method, self.request.uri, + self.request.remote_ip) + + def _handle_request_exception(self, e): + if isinstance(e, Finish): + # Not an error; just finish the request without logging. + if not self._finished: + self.finish(*e.args) + return + try: + self.log_exception(*sys.exc_info()) + except Exception: + # An error here should still get a best-effort send_error() + # to avoid leaking the connection. + app_log.error("Error in exception logger", exc_info=True) + if self._finished: + # Extra errors after the request has been finished should + # be logged, but there is no reason to continue to try and + # send a response. + return + if isinstance(e, HTTPError): + if e.status_code not in httputil.responses and not e.reason: + gen_log.error("Bad HTTP status code: %d", e.status_code) + self.send_error(500, exc_info=sys.exc_info()) + else: + self.send_error(e.status_code, exc_info=sys.exc_info()) + else: + self.send_error(500, exc_info=sys.exc_info()) + + def log_exception(self, typ, value, tb): + """Override to customize logging of uncaught exceptions. + + By default logs instances of `HTTPError` as warnings without + stack traces (on the ``tornado.general`` logger), and all + other exceptions as errors with stack traces (on the + ``tornado.application`` logger). + + .. versionadded:: 3.1 + """ + if isinstance(value, HTTPError): + if value.log_message: + format = "%d %s: " + value.log_message + args = ([value.status_code, self._request_summary()] + + list(value.args)) + gen_log.warning(format, *args) + else: + app_log.error("Uncaught exception %s\n%r", self._request_summary(), + self.request, exc_info=(typ, value, tb)) + + def _ui_module(self, name, module): + def render(*args, **kwargs): + if not hasattr(self, "_active_modules"): + self._active_modules = {} + if name not in self._active_modules: + self._active_modules[name] = module(self) + rendered = self._active_modules[name].render(*args, **kwargs) + return rendered + return render + + def _ui_method(self, method): + return lambda *args, **kwargs: method(self, *args, **kwargs) + + def _clear_headers_for_304(self): + # 304 responses should not contain entity headers (defined in + # http://www.w3.org/Protocols/rfc2616/rfc2616-sec7.html#sec7.1) + # not explicitly allowed by + # http://www.w3.org/Protocols/rfc2616/rfc2616-sec10.html#sec10.3.5 + headers = ["Allow", "Content-Encoding", "Content-Language", + "Content-Length", "Content-MD5", "Content-Range", + "Content-Type", "Last-Modified"] + for h in headers: + self.clear_header(h) + + +def asynchronous(method): + """Wrap request handler methods with this if they are asynchronous. + + This decorator is for callback-style asynchronous methods; for + coroutines, use the ``@gen.coroutine`` decorator without + ``@asynchronous``. (It is legal for legacy reasons to use the two + decorators together provided ``@asynchronous`` is first, but + ``@asynchronous`` will be ignored in this case) + + This decorator should only be applied to the :ref:`HTTP verb + methods `; its behavior is undefined for any other method. + This decorator does not *make* a method asynchronous; it tells + the framework that the method *is* asynchronous. For this decorator + to be useful the method must (at least sometimes) do something + asynchronous. + + If this decorator is given, the response is not finished when the + method returns. It is up to the request handler to call + `self.finish() ` to finish the HTTP + request. Without this decorator, the request is automatically + finished when the ``get()`` or ``post()`` method returns. Example: + + .. testcode:: + + class MyRequestHandler(RequestHandler): + @asynchronous + def get(self): + http = httpclient.AsyncHTTPClient() + http.fetch("http://friendfeed.com/", self._on_download) + + def _on_download(self, response): + self.write("Downloaded!") + self.finish() + + .. testoutput:: + :hide: + + .. versionchanged:: 3.1 + The ability to use ``@gen.coroutine`` without ``@asynchronous``. + + .. versionchanged:: 4.3 Returning anything but ``None`` or a + yieldable object from a method decorated with ``@asynchronous`` + is an error. Such return values were previously ignored silently. + """ + # Delay the IOLoop import because it's not available on app engine. + from tornado.ioloop import IOLoop + + @functools.wraps(method) + def wrapper(self, *args, **kwargs): + self._auto_finish = False + with stack_context.ExceptionStackContext( + self._stack_context_handle_exception): + result = method(self, *args, **kwargs) + if result is not None: + result = gen.convert_yielded(result) + # If @asynchronous is used with @gen.coroutine, (but + # not @gen.engine), we can automatically finish the + # request when the future resolves. Additionally, + # the Future will swallow any exceptions so we need + # to throw them back out to the stack context to finish + # the request. + def future_complete(f): + f.result() + if not self._finished: + self.finish() + IOLoop.current().add_future(result, future_complete) + # Once we have done this, hide the Future from our + # caller (i.e. RequestHandler._when_complete), which + # would otherwise set up its own callback and + # exception handler (resulting in exceptions being + # logged twice). + return None + return result + return wrapper + + +def stream_request_body(cls): + """Apply to `RequestHandler` subclasses to enable streaming body support. + + This decorator implies the following changes: + + * `.HTTPServerRequest.body` is undefined, and body arguments will not + be included in `RequestHandler.get_argument`. + * `RequestHandler.prepare` is called when the request headers have been + read instead of after the entire body has been read. + * The subclass must define a method ``data_received(self, data):``, which + will be called zero or more times as data is available. Note that + if the request has an empty body, ``data_received`` may not be called. + * ``prepare`` and ``data_received`` may return Futures (such as via + ``@gen.coroutine``, in which case the next method will not be called + until those futures have completed. + * The regular HTTP method (``post``, ``put``, etc) will be called after + the entire body has been read. + + There is a subtle interaction between ``data_received`` and asynchronous + ``prepare``: The first call to ``data_received`` may occur at any point + after the call to ``prepare`` has returned *or yielded*. + """ + if not issubclass(cls, RequestHandler): + raise TypeError("expected subclass of RequestHandler, got %r", cls) + cls._stream_request_body = True + return cls + + +def _has_stream_request_body(cls): + if not issubclass(cls, RequestHandler): + raise TypeError("expected subclass of RequestHandler, got %r", cls) + return getattr(cls, '_stream_request_body', False) + + +def removeslash(method): + """Use this decorator to remove trailing slashes from the request path. + + For example, a request to ``/foo/`` would redirect to ``/foo`` with this + decorator. Your request handler mapping should use a regular expression + like ``r'/foo/*'`` in conjunction with using the decorator. + """ + @functools.wraps(method) + def wrapper(self, *args, **kwargs): + if self.request.path.endswith("/"): + if self.request.method in ("GET", "HEAD"): + uri = self.request.path.rstrip("/") + if uri: # don't try to redirect '/' to '' + if self.request.query: + uri += "?" + self.request.query + self.redirect(uri, permanent=True) + return + else: + raise HTTPError(404) + return method(self, *args, **kwargs) + return wrapper + + +def addslash(method): + """Use this decorator to add a missing trailing slash to the request path. + + For example, a request to ``/foo`` would redirect to ``/foo/`` with this + decorator. Your request handler mapping should use a regular expression + like ``r'/foo/?'`` in conjunction with using the decorator. + """ + @functools.wraps(method) + def wrapper(self, *args, **kwargs): + if not self.request.path.endswith("/"): + if self.request.method in ("GET", "HEAD"): + uri = self.request.path + "/" + if self.request.query: + uri += "?" + self.request.query + self.redirect(uri, permanent=True) + return + raise HTTPError(404) + return method(self, *args, **kwargs) + return wrapper + + +class Application(httputil.HTTPServerConnectionDelegate): + """A collection of request handlers that make up a web application. + + Instances of this class are callable and can be passed directly to + HTTPServer to serve the application:: + + application = web.Application([ + (r"/", MainPageHandler), + ]) + http_server = httpserver.HTTPServer(application) + http_server.listen(8080) + ioloop.IOLoop.current().start() + + The constructor for this class takes in a list of `URLSpec` objects + or (regexp, request_class) tuples. When we receive requests, we + iterate over the list in order and instantiate an instance of the + first request class whose regexp matches the request path. + The request class can be specified as either a class object or a + (fully-qualified) name. + + Each tuple can contain additional elements, which correspond to the + arguments to the `URLSpec` constructor. (Prior to Tornado 3.2, + only tuples of two or three elements were allowed). + + A dictionary may be passed as the third element of the tuple, + which will be used as keyword arguments to the handler's + constructor and `~RequestHandler.initialize` method. This pattern + is used for the `StaticFileHandler` in this example (note that a + `StaticFileHandler` can be installed automatically with the + static_path setting described below):: + + application = web.Application([ + (r"/static/(.*)", web.StaticFileHandler, {"path": "/var/www"}), + ]) + + We support virtual hosts with the `add_handlers` method, which takes in + a host regular expression as the first argument:: + + application.add_handlers(r"www\.myhost\.com", [ + (r"/article/([0-9]+)", ArticleHandler), + ]) + + You can serve static files by sending the ``static_path`` setting + as a keyword argument. We will serve those files from the + ``/static/`` URI (this is configurable with the + ``static_url_prefix`` setting), and we will serve ``/favicon.ico`` + and ``/robots.txt`` from the same directory. A custom subclass of + `StaticFileHandler` can be specified with the + ``static_handler_class`` setting. + + """ + def __init__(self, handlers=None, default_host="", transforms=None, + **settings): + if transforms is None: + self.transforms = [] + if settings.get("compress_response") or settings.get("gzip"): + self.transforms.append(GZipContentEncoding) + else: + self.transforms = transforms + self.handlers = [] + self.named_handlers = {} + self.default_host = default_host + self.settings = settings + self.ui_modules = {'linkify': _linkify, + 'xsrf_form_html': _xsrf_form_html, + 'Template': TemplateModule, + } + self.ui_methods = {} + self._load_ui_modules(settings.get("ui_modules", {})) + self._load_ui_methods(settings.get("ui_methods", {})) + if self.settings.get("static_path"): + path = self.settings["static_path"] + handlers = list(handlers or []) + static_url_prefix = settings.get("static_url_prefix", + "/static/") + static_handler_class = settings.get("static_handler_class", + StaticFileHandler) + static_handler_args = settings.get("static_handler_args", {}) + static_handler_args['path'] = path + for pattern in [re.escape(static_url_prefix) + r"(.*)", + r"/(favicon\.ico)", r"/(robots\.txt)"]: + handlers.insert(0, (pattern, static_handler_class, + static_handler_args)) + if handlers: + self.add_handlers(".*$", handlers) + + if self.settings.get('debug'): + self.settings.setdefault('autoreload', True) + self.settings.setdefault('compiled_template_cache', False) + self.settings.setdefault('static_hash_cache', False) + self.settings.setdefault('serve_traceback', True) + + # Automatically reload modified modules + if self.settings.get('autoreload'): + from tornado import autoreload + autoreload.start() + + def listen(self, port, address="", **kwargs): + """Starts an HTTP server for this application on the given port. + + This is a convenience alias for creating an `.HTTPServer` + object and calling its listen method. Keyword arguments not + supported by `HTTPServer.listen <.TCPServer.listen>` are passed to the + `.HTTPServer` constructor. For advanced uses + (e.g. multi-process mode), do not use this method; create an + `.HTTPServer` and call its + `.TCPServer.bind`/`.TCPServer.start` methods directly. + + Note that after calling this method you still need to call + ``IOLoop.current().start()`` to start the server. + + Returns the `.HTTPServer` object. + + .. versionchanged:: 4.3 + Now returns the `.HTTPServer` object. + """ + # import is here rather than top level because HTTPServer + # is not importable on appengine + from tornado.httpserver import HTTPServer + server = HTTPServer(self, **kwargs) + server.listen(port, address) + return server + + def add_handlers(self, host_pattern, host_handlers): + """Appends the given handlers to our handler list. + + Host patterns are processed sequentially in the order they were + added. All matching patterns will be considered. + """ + if not host_pattern.endswith("$"): + host_pattern += "$" + handlers = [] + # The handlers with the wildcard host_pattern are a special + # case - they're added in the constructor but should have lower + # precedence than the more-precise handlers added later. + # If a wildcard handler group exists, it should always be last + # in the list, so insert new groups just before it. + if self.handlers and self.handlers[-1][0].pattern == '.*$': + self.handlers.insert(-1, (re.compile(host_pattern), handlers)) + else: + self.handlers.append((re.compile(host_pattern), handlers)) + + for spec in host_handlers: + if isinstance(spec, (tuple, list)): + assert len(spec) in (2, 3, 4) + spec = URLSpec(*spec) + handlers.append(spec) + if spec.name: + if spec.name in self.named_handlers: + app_log.warning( + "Multiple handlers named %s; replacing previous value", + spec.name) + self.named_handlers[spec.name] = spec + + def add_transform(self, transform_class): + self.transforms.append(transform_class) + + def _get_host_handlers(self, request): + host = split_host_and_port(request.host.lower())[0] + matches = [] + for pattern, handlers in self.handlers: + if pattern.match(host): + matches.extend(handlers) + # Look for default host if not behind load balancer (for debugging) + if not matches and "X-Real-Ip" not in request.headers: + for pattern, handlers in self.handlers: + if pattern.match(self.default_host): + matches.extend(handlers) + return matches or None + + def _load_ui_methods(self, methods): + if isinstance(methods, types.ModuleType): + self._load_ui_methods(dict((n, getattr(methods, n)) + for n in dir(methods))) + elif isinstance(methods, list): + for m in methods: + self._load_ui_methods(m) + else: + for name, fn in methods.items(): + if not name.startswith("_") and hasattr(fn, "__call__") \ + and name[0].lower() == name[0]: + self.ui_methods[name] = fn + + def _load_ui_modules(self, modules): + if isinstance(modules, types.ModuleType): + self._load_ui_modules(dict((n, getattr(modules, n)) + for n in dir(modules))) + elif isinstance(modules, list): + for m in modules: + self._load_ui_modules(m) + else: + assert isinstance(modules, dict) + for name, cls in modules.items(): + try: + if issubclass(cls, UIModule): + self.ui_modules[name] = cls + except TypeError: + pass + + def start_request(self, server_conn, request_conn): + # Modern HTTPServer interface + return _RequestDispatcher(self, request_conn) + + def __call__(self, request): + # Legacy HTTPServer interface + dispatcher = _RequestDispatcher(self, None) + dispatcher.set_request(request) + return dispatcher.execute() + + def reverse_url(self, name, *args): + """Returns a URL path for handler named ``name`` + + The handler must be added to the application as a named `URLSpec`. + + Args will be substituted for capturing groups in the `URLSpec` regex. + They will be converted to strings if necessary, encoded as utf8, + and url-escaped. + """ + if name in self.named_handlers: + return self.named_handlers[name].reverse(*args) + raise KeyError("%s not found in named urls" % name) + + def log_request(self, handler): + """Writes a completed HTTP request to the logs. + + By default writes to the python root logger. To change + this behavior either subclass Application and override this method, + or pass a function in the application settings dictionary as + ``log_function``. + """ + if "log_function" in self.settings: + self.settings["log_function"](handler) + return + if handler.get_status() < 400: + log_method = access_log.info + elif handler.get_status() < 500: + log_method = access_log.warning + else: + log_method = access_log.error + request_time = 1000.0 * handler.request.request_time() + log_method("%d %s %.2fms", handler.get_status(), + handler._request_summary(), request_time) + + +class _RequestDispatcher(httputil.HTTPMessageDelegate): + def __init__(self, application, connection): + self.application = application + self.connection = connection + self.request = None + self.chunks = [] + self.handler_class = None + self.handler_kwargs = None + self.path_args = [] + self.path_kwargs = {} + + def headers_received(self, start_line, headers): + self.set_request(httputil.HTTPServerRequest( + connection=self.connection, start_line=start_line, + headers=headers)) + if self.stream_request_body: + self.request.body = Future() + return self.execute() + + def set_request(self, request): + self.request = request + self._find_handler() + self.stream_request_body = _has_stream_request_body(self.handler_class) + + def _find_handler(self): + # Identify the handler to use as soon as we have the request. + # Save url path arguments for later. + app = self.application + handlers = app._get_host_handlers(self.request) + if not handlers: + self.handler_class = RedirectHandler + self.handler_kwargs = dict(url="%s://%s/" + % (self.request.protocol, + app.default_host)) + return + for spec in handlers: + match = spec.regex.match(self.request.path) + if match: + self.handler_class = spec.handler_class + self.handler_kwargs = spec.kwargs + if spec.regex.groups: + # Pass matched groups to the handler. Since + # match.groups() includes both named and + # unnamed groups, we want to use either groups + # or groupdict but not both. + if spec.regex.groupindex: + self.path_kwargs = dict( + (str(k), _unquote_or_none(v)) + for (k, v) in match.groupdict().items()) + else: + self.path_args = [_unquote_or_none(s) + for s in match.groups()] + return + if app.settings.get('default_handler_class'): + self.handler_class = app.settings['default_handler_class'] + self.handler_kwargs = app.settings.get( + 'default_handler_args', {}) + else: + self.handler_class = ErrorHandler + self.handler_kwargs = dict(status_code=404) + + def data_received(self, data): + if self.stream_request_body: + return self.handler.data_received(data) + else: + self.chunks.append(data) + + def finish(self): + if self.stream_request_body: + self.request.body.set_result(None) + else: + self.request.body = b''.join(self.chunks) + self.request._parse_body() + self.execute() + + def on_connection_close(self): + if self.stream_request_body: + self.handler.on_connection_close() + else: + self.chunks = None + + def execute(self): + # If template cache is disabled (usually in the debug mode), + # re-compile templates and reload static files on every + # request so you don't need to restart to see changes + if not self.application.settings.get("compiled_template_cache", True): + with RequestHandler._template_loader_lock: + for loader in RequestHandler._template_loaders.values(): + loader.reset() + if not self.application.settings.get('static_hash_cache', True): + StaticFileHandler.reset() + + self.handler = self.handler_class(self.application, self.request, + **self.handler_kwargs) + transforms = [t(self.request) for t in self.application.transforms] + + if self.stream_request_body: + self.handler._prepared_future = Future() + # Note that if an exception escapes handler._execute it will be + # trapped in the Future it returns (which we are ignoring here, + # leaving it to be logged when the Future is GC'd). + # However, that shouldn't happen because _execute has a blanket + # except handler, and we cannot easily access the IOLoop here to + # call add_future (because of the requirement to remain compatible + # with WSGI) + self.handler._execute(transforms, *self.path_args, + **self.path_kwargs) + # If we are streaming the request body, then execute() is finished + # when the handler has prepared to receive the body. If not, + # it doesn't matter when execute() finishes (so we return None) + return self.handler._prepared_future + + +class HTTPError(Exception): + """An exception that will turn into an HTTP error response. + + Raising an `HTTPError` is a convenient alternative to calling + `RequestHandler.send_error` since it automatically ends the + current function. + + To customize the response sent with an `HTTPError`, override + `RequestHandler.write_error`. + + :arg int status_code: HTTP status code. Must be listed in + `httplib.responses ` unless the ``reason`` + keyword argument is given. + :arg string log_message: Message to be written to the log for this error + (will not be shown to the user unless the `Application` is in debug + mode). May contain ``%s``-style placeholders, which will be filled + in with remaining positional parameters. + :arg string reason: Keyword-only argument. The HTTP "reason" phrase + to pass in the status line along with ``status_code``. Normally + determined automatically from ``status_code``, but can be used + to use a non-standard numeric code. + """ + def __init__(self, status_code=500, log_message=None, *args, **kwargs): + self.status_code = status_code + self.log_message = log_message + self.args = args + self.reason = kwargs.get('reason', None) + if log_message and not args: + self.log_message = log_message.replace('%', '%%') + + def __str__(self): + message = "HTTP %d: %s" % ( + self.status_code, + self.reason or httputil.responses.get(self.status_code, 'Unknown')) + if self.log_message: + return message + " (" + (self.log_message % self.args) + ")" + else: + return message + + +class Finish(Exception): + """An exception that ends the request without producing an error response. + + When `Finish` is raised in a `RequestHandler`, the request will + end (calling `RequestHandler.finish` if it hasn't already been + called), but the error-handling methods (including + `RequestHandler.write_error`) will not be called. + + If `Finish()` was created with no arguments, the pending response + will be sent as-is. If `Finish()` was given an argument, that + argument will be passed to `RequestHandler.finish()`. + + This can be a more convenient way to implement custom error pages + than overriding ``write_error`` (especially in library code):: + + if self.current_user is None: + self.set_status(401) + self.set_header('WWW-Authenticate', 'Basic realm="something"') + raise Finish() + + .. versionchanged:: 4.3 + Arguments passed to ``Finish()`` will be passed on to + `RequestHandler.finish`. + """ + pass + + +class MissingArgumentError(HTTPError): + """Exception raised by `RequestHandler.get_argument`. + + This is a subclass of `HTTPError`, so if it is uncaught a 400 response + code will be used instead of 500 (and a stack trace will not be logged). + + .. versionadded:: 3.1 + """ + def __init__(self, arg_name): + super(MissingArgumentError, self).__init__( + 400, 'Missing argument %s' % arg_name) + self.arg_name = arg_name + + +class ErrorHandler(RequestHandler): + """Generates an error response with ``status_code`` for all requests.""" + def initialize(self, status_code): + self.set_status(status_code) + + def prepare(self): + raise HTTPError(self._status_code) + + def check_xsrf_cookie(self): + # POSTs to an ErrorHandler don't actually have side effects, + # so we don't need to check the xsrf token. This allows POSTs + # to the wrong url to return a 404 instead of 403. + pass + + +class RedirectHandler(RequestHandler): + """Redirects the client to the given URL for all GET requests. + + You should provide the keyword argument ``url`` to the handler, e.g.:: + + application = web.Application([ + (r"/oldpath", web.RedirectHandler, {"url": "/newpath"}), + ]) + """ + def initialize(self, url, permanent=True): + self._url = url + self._permanent = permanent + + def get(self): + self.redirect(self._url, permanent=self._permanent) + + +class StaticFileHandler(RequestHandler): + """A simple handler that can serve static content from a directory. + + A `StaticFileHandler` is configured automatically if you pass the + ``static_path`` keyword argument to `Application`. This handler + can be customized with the ``static_url_prefix``, ``static_handler_class``, + and ``static_handler_args`` settings. + + To map an additional path to this handler for a static data directory + you would add a line to your application like:: + + application = web.Application([ + (r"/content/(.*)", web.StaticFileHandler, {"path": "/var/www"}), + ]) + + The handler constructor requires a ``path`` argument, which specifies the + local root directory of the content to be served. + + Note that a capture group in the regex is required to parse the value for + the ``path`` argument to the get() method (different than the constructor + argument above); see `URLSpec` for details. + + To serve a file like ``index.html`` automatically when a directory is + requested, set ``static_handler_args=dict(default_filename="index.html")`` + in your application settings, or add ``default_filename`` as an initializer + argument for your ``StaticFileHandler``. + + To maximize the effectiveness of browser caching, this class supports + versioned urls (by default using the argument ``?v=``). If a version + is given, we instruct the browser to cache this file indefinitely. + `make_static_url` (also available as `RequestHandler.static_url`) can + be used to construct a versioned url. + + This handler is intended primarily for use in development and light-duty + file serving; for heavy traffic it will be more efficient to use + a dedicated static file server (such as nginx or Apache). We support + the HTTP ``Accept-Ranges`` mechanism to return partial content (because + some browsers require this functionality to be present to seek in + HTML5 audio or video). + + **Subclassing notes** + + This class is designed to be extensible by subclassing, but because + of the way static urls are generated with class methods rather than + instance methods, the inheritance patterns are somewhat unusual. + Be sure to use the ``@classmethod`` decorator when overriding a + class method. Instance methods may use the attributes ``self.path`` + ``self.absolute_path``, and ``self.modified``. + + Subclasses should only override methods discussed in this section; + overriding other methods is error-prone. Overriding + ``StaticFileHandler.get`` is particularly problematic due to the + tight coupling with ``compute_etag`` and other methods. + + To change the way static urls are generated (e.g. to match the behavior + of another server or CDN), override `make_static_url`, `parse_url_path`, + `get_cache_time`, and/or `get_version`. + + To replace all interaction with the filesystem (e.g. to serve + static content from a database), override `get_content`, + `get_content_size`, `get_modified_time`, `get_absolute_path`, and + `validate_absolute_path`. + + .. versionchanged:: 3.1 + Many of the methods for subclasses were added in Tornado 3.1. + """ + CACHE_MAX_AGE = 86400 * 365 * 10 # 10 years + + _static_hashes = {} + _lock = threading.Lock() # protects _static_hashes + + def initialize(self, path, default_filename=None): + self.root = path + self.default_filename = default_filename + + @classmethod + def reset(cls): + with cls._lock: + cls._static_hashes = {} + + def head(self, path): + return self.get(path, include_body=False) + + @gen.coroutine + def get(self, path, include_body=True): + # Set up our path instance variables. + self.path = self.parse_url_path(path) + del path # make sure we don't refer to path instead of self.path again + absolute_path = self.get_absolute_path(self.root, self.path) + self.absolute_path = self.validate_absolute_path( + self.root, absolute_path) + if self.absolute_path is None: + return + + self.modified = self.get_modified_time() + self.set_headers() + + if self.should_return_304(): + self.set_status(304) + return + + request_range = None + range_header = self.request.headers.get("Range") + if range_header: + # As per RFC 2616 14.16, if an invalid Range header is specified, + # the request will be treated as if the header didn't exist. + request_range = httputil._parse_request_range(range_header) + + size = self.get_content_size() + if request_range: + start, end = request_range + if (start is not None and start >= size) or end == 0: + # As per RFC 2616 14.35.1, a range is not satisfiable only: if + # the first requested byte is equal to or greater than the + # content, or when a suffix with length 0 is specified + self.set_status(416) # Range Not Satisfiable + self.set_header("Content-Type", "text/plain") + self.set_header("Content-Range", "bytes */%s" % (size, )) + return + if start is not None and start < 0: + start += size + if end is not None and end > size: + # Clients sometimes blindly use a large range to limit their + # download size; cap the endpoint at the actual file size. + end = size + # Note: only return HTTP 206 if less than the entire range has been + # requested. Not only is this semantically correct, but Chrome + # refuses to play audio if it gets an HTTP 206 in response to + # ``Range: bytes=0-``. + if size != (end or size) - (start or 0): + self.set_status(206) # Partial Content + self.set_header("Content-Range", + httputil._get_content_range(start, end, size)) + else: + start = end = None + + if start is not None and end is not None: + content_length = end - start + elif end is not None: + content_length = end + elif start is not None: + content_length = size - start + else: + content_length = size + self.set_header("Content-Length", content_length) + + if include_body: + content = self.get_content(self.absolute_path, start, end) + if isinstance(content, bytes): + content = [content] + for chunk in content: + try: + self.write(chunk) + yield self.flush() + except iostream.StreamClosedError: + return + else: + assert self.request.method == "HEAD" + + def compute_etag(self): + """Sets the ``Etag`` header based on static url version. + + This allows efficient ``If-None-Match`` checks against cached + versions, and sends the correct ``Etag`` for a partial response + (i.e. the same ``Etag`` as the full file). + + .. versionadded:: 3.1 + """ + version_hash = self._get_cached_version(self.absolute_path) + if not version_hash: + return None + return '"%s"' % (version_hash, ) + + def set_headers(self): + """Sets the content and caching headers on the response. + + .. versionadded:: 3.1 + """ + self.set_header("Accept-Ranges", "bytes") + self.set_etag_header() + + if self.modified is not None: + self.set_header("Last-Modified", self.modified) + + content_type = self.get_content_type() + if content_type: + self.set_header("Content-Type", content_type) + + cache_time = self.get_cache_time(self.path, self.modified, + content_type) + if cache_time > 0: + self.set_header("Expires", datetime.datetime.utcnow() + + datetime.timedelta(seconds=cache_time)) + self.set_header("Cache-Control", "max-age=" + str(cache_time)) + + self.set_extra_headers(self.path) + + def should_return_304(self): + """Returns True if the headers indicate that we should return 304. + + .. versionadded:: 3.1 + """ + if self.check_etag_header(): + return True + + # Check the If-Modified-Since, and don't send the result if the + # content has not been modified + ims_value = self.request.headers.get("If-Modified-Since") + if ims_value is not None: + date_tuple = email.utils.parsedate(ims_value) + if date_tuple is not None: + if_since = datetime.datetime(*date_tuple[:6]) + if if_since >= self.modified: + return True + + return False + + @classmethod + def get_absolute_path(cls, root, path): + """Returns the absolute location of ``path`` relative to ``root``. + + ``root`` is the path configured for this `StaticFileHandler` + (in most cases the ``static_path`` `Application` setting). + + This class method may be overridden in subclasses. By default + it returns a filesystem path, but other strings may be used + as long as they are unique and understood by the subclass's + overridden `get_content`. + + .. versionadded:: 3.1 + """ + abspath = os.path.abspath(os.path.join(root, path)) + return abspath + + def validate_absolute_path(self, root, absolute_path): + """Validate and return the absolute path. + + ``root`` is the configured path for the `StaticFileHandler`, + and ``path`` is the result of `get_absolute_path` + + This is an instance method called during request processing, + so it may raise `HTTPError` or use methods like + `RequestHandler.redirect` (return None after redirecting to + halt further processing). This is where 404 errors for missing files + are generated. + + This method may modify the path before returning it, but note that + any such modifications will not be understood by `make_static_url`. + + In instance methods, this method's result is available as + ``self.absolute_path``. + + .. versionadded:: 3.1 + """ + # os.path.abspath strips a trailing /. + # We must add it back to `root` so that we only match files + # in a directory named `root` instead of files starting with + # that prefix. + root = os.path.abspath(root) + if not root.endswith(os.path.sep): + # abspath always removes a trailing slash, except when + # root is '/'. This is an unusual case, but several projects + # have independently discovered this technique to disable + # Tornado's path validation and (hopefully) do their own, + # so we need to support it. + root += os.path.sep + # The trailing slash also needs to be temporarily added back + # the requested path so a request to root/ will match. + if not (absolute_path + os.path.sep).startswith(root): + raise HTTPError(403, "%s is not in root static directory", + self.path) + if (os.path.isdir(absolute_path) and + self.default_filename is not None): + # need to look at the request.path here for when path is empty + # but there is some prefix to the path that was already + # trimmed by the routing + if not self.request.path.endswith("/"): + self.redirect(self.request.path + "/", permanent=True) + return + absolute_path = os.path.join(absolute_path, self.default_filename) + if not os.path.exists(absolute_path): + raise HTTPError(404) + if not os.path.isfile(absolute_path): + raise HTTPError(403, "%s is not a file", self.path) + return absolute_path + + @classmethod + def get_content(cls, abspath, start=None, end=None): + """Retrieve the content of the requested resource which is located + at the given absolute path. + + This class method may be overridden by subclasses. Note that its + signature is different from other overridable class methods + (no ``settings`` argument); this is deliberate to ensure that + ``abspath`` is able to stand on its own as a cache key. + + This method should either return a byte string or an iterator + of byte strings. The latter is preferred for large files + as it helps reduce memory fragmentation. + + .. versionadded:: 3.1 + """ + with open(abspath, "rb") as file: + if start is not None: + file.seek(start) + if end is not None: + remaining = end - (start or 0) + else: + remaining = None + while True: + chunk_size = 64 * 1024 + if remaining is not None and remaining < chunk_size: + chunk_size = remaining + chunk = file.read(chunk_size) + if chunk: + if remaining is not None: + remaining -= len(chunk) + yield chunk + else: + if remaining is not None: + assert remaining == 0 + return + + @classmethod + def get_content_version(cls, abspath): + """Returns a version string for the resource at the given path. + + This class method may be overridden by subclasses. The + default implementation is a hash of the file's contents. + + .. versionadded:: 3.1 + """ + data = cls.get_content(abspath) + hasher = hashlib.md5() + if isinstance(data, bytes): + hasher.update(data) + else: + for chunk in data: + hasher.update(chunk) + return hasher.hexdigest() + + def _stat(self): + if not hasattr(self, '_stat_result'): + self._stat_result = os.stat(self.absolute_path) + return self._stat_result + + def get_content_size(self): + """Retrieve the total size of the resource at the given path. + + This method may be overridden by subclasses. + + .. versionadded:: 3.1 + + .. versionchanged:: 4.0 + This method is now always called, instead of only when + partial results are requested. + """ + stat_result = self._stat() + return stat_result[stat.ST_SIZE] + + def get_modified_time(self): + """Returns the time that ``self.absolute_path`` was last modified. + + May be overridden in subclasses. Should return a `~datetime.datetime` + object or None. + + .. versionadded:: 3.1 + """ + stat_result = self._stat() + modified = datetime.datetime.utcfromtimestamp( + stat_result[stat.ST_MTIME]) + return modified + + def get_content_type(self): + """Returns the ``Content-Type`` header to be used for this request. + + .. versionadded:: 3.1 + """ + mime_type, encoding = mimetypes.guess_type(self.absolute_path) + # per RFC 6713, use the appropriate type for a gzip compressed file + if encoding == "gzip": + return "application/gzip" + # As of 2015-07-21 there is no bzip2 encoding defined at + # http://www.iana.org/assignments/media-types/media-types.xhtml + # So for that (and any other encoding), use octet-stream. + elif encoding is not None: + return "application/octet-stream" + elif mime_type is not None: + return mime_type + # if mime_type not detected, use application/octet-stream + else: + return "application/octet-stream" + + def set_extra_headers(self, path): + """For subclass to add extra headers to the response""" + pass + + def get_cache_time(self, path, modified, mime_type): + """Override to customize cache control behavior. + + Return a positive number of seconds to make the result + cacheable for that amount of time or 0 to mark resource as + cacheable for an unspecified amount of time (subject to + browser heuristics). + + By default returns cache expiry of 10 years for resources requested + with ``v`` argument. + """ + return self.CACHE_MAX_AGE if "v" in self.request.arguments else 0 + + @classmethod + def make_static_url(cls, settings, path, include_version=True): + """Constructs a versioned url for the given path. + + This method may be overridden in subclasses (but note that it + is a class method rather than an instance method). Subclasses + are only required to implement the signature + ``make_static_url(cls, settings, path)``; other keyword + arguments may be passed through `~RequestHandler.static_url` + but are not standard. + + ``settings`` is the `Application.settings` dictionary. ``path`` + is the static path being requested. The url returned should be + relative to the current host. + + ``include_version`` determines whether the generated URL should + include the query string containing the version hash of the + file corresponding to the given ``path``. + + """ + url = settings.get('static_url_prefix', '/static/') + path + if not include_version: + return url + + version_hash = cls.get_version(settings, path) + if not version_hash: + return url + + return '%s?v=%s' % (url, version_hash) + + def parse_url_path(self, url_path): + """Converts a static URL path into a filesystem path. + + ``url_path`` is the path component of the URL with + ``static_url_prefix`` removed. The return value should be + filesystem path relative to ``static_path``. + + This is the inverse of `make_static_url`. + """ + if os.path.sep != "/": + url_path = url_path.replace("/", os.path.sep) + return url_path + + @classmethod + def get_version(cls, settings, path): + """Generate the version string to be used in static URLs. + + ``settings`` is the `Application.settings` dictionary and ``path`` + is the relative location of the requested asset on the filesystem. + The returned value should be a string, or ``None`` if no version + could be determined. + + .. versionchanged:: 3.1 + This method was previously recommended for subclasses to override; + `get_content_version` is now preferred as it allows the base + class to handle caching of the result. + """ + abs_path = cls.get_absolute_path(settings['static_path'], path) + return cls._get_cached_version(abs_path) + + @classmethod + def _get_cached_version(cls, abs_path): + with cls._lock: + hashes = cls._static_hashes + if abs_path not in hashes: + try: + hashes[abs_path] = cls.get_content_version(abs_path) + except Exception: + gen_log.error("Could not open static file %r", abs_path) + hashes[abs_path] = None + hsh = hashes.get(abs_path) + if hsh: + return hsh + return None + + +class FallbackHandler(RequestHandler): + """A `RequestHandler` that wraps another HTTP server callback. + + The fallback is a callable object that accepts an + `~.httputil.HTTPServerRequest`, such as an `Application` or + `tornado.wsgi.WSGIContainer`. This is most useful to use both + Tornado ``RequestHandlers`` and WSGI in the same server. Typical + usage:: + + wsgi_app = tornado.wsgi.WSGIContainer( + django.core.handlers.wsgi.WSGIHandler()) + application = tornado.web.Application([ + (r"/foo", FooHandler), + (r".*", FallbackHandler, dict(fallback=wsgi_app), + ]) + """ + def initialize(self, fallback): + self.fallback = fallback + + def prepare(self): + self.fallback(self.request) + self._finished = True + + +class OutputTransform(object): + """A transform modifies the result of an HTTP request (e.g., GZip encoding) + + Applications are not expected to create their own OutputTransforms + or interact with them directly; the framework chooses which transforms + (if any) to apply. + """ + def __init__(self, request): + pass + + def transform_first_chunk(self, status_code, headers, chunk, finishing): + return status_code, headers, chunk + + def transform_chunk(self, chunk, finishing): + return chunk + + +class GZipContentEncoding(OutputTransform): + """Applies the gzip content encoding to the response. + + See http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.11 + + .. versionchanged:: 4.0 + Now compresses all mime types beginning with ``text/``, instead + of just a whitelist. (the whitelist is still used for certain + non-text mime types). + """ + # Whitelist of compressible mime types (in addition to any types + # beginning with "text/"). + CONTENT_TYPES = set(["application/javascript", "application/x-javascript", + "application/xml", "application/atom+xml", + "application/json", "application/xhtml+xml"]) + # Python's GzipFile defaults to level 9, while most other gzip + # tools (including gzip itself) default to 6, which is probably a + # better CPU/size tradeoff. + GZIP_LEVEL = 6 + # Responses that are too short are unlikely to benefit from gzipping + # after considering the "Content-Encoding: gzip" header and the header + # inside the gzip encoding. + # Note that responses written in multiple chunks will be compressed + # regardless of size. + MIN_LENGTH = 1024 + + def __init__(self, request): + self._gzipping = "gzip" in request.headers.get("Accept-Encoding", "") + + def _compressible_type(self, ctype): + return ctype.startswith('text/') or ctype in self.CONTENT_TYPES + + def transform_first_chunk(self, status_code, headers, chunk, finishing): + if 'Vary' in headers: + headers['Vary'] += b', Accept-Encoding' + else: + headers['Vary'] = b'Accept-Encoding' + if self._gzipping: + ctype = _unicode(headers.get("Content-Type", "")).split(";")[0] + self._gzipping = self._compressible_type(ctype) and \ + (not finishing or len(chunk) >= self.MIN_LENGTH) and \ + ("Content-Encoding" not in headers) + if self._gzipping: + headers["Content-Encoding"] = "gzip" + self._gzip_value = BytesIO() + self._gzip_file = gzip.GzipFile(mode="w", fileobj=self._gzip_value, + compresslevel=self.GZIP_LEVEL) + chunk = self.transform_chunk(chunk, finishing) + if "Content-Length" in headers: + # The original content length is no longer correct. + # If this is the last (and only) chunk, we can set the new + # content-length; otherwise we remove it and fall back to + # chunked encoding. + if finishing: + headers["Content-Length"] = str(len(chunk)) + else: + del headers["Content-Length"] + return status_code, headers, chunk + + def transform_chunk(self, chunk, finishing): + if self._gzipping: + self._gzip_file.write(chunk) + if finishing: + self._gzip_file.close() + else: + self._gzip_file.flush() + chunk = self._gzip_value.getvalue() + self._gzip_value.truncate(0) + self._gzip_value.seek(0) + return chunk + + +def authenticated(method): + """Decorate methods with this to require that the user be logged in. + + If the user is not logged in, they will be redirected to the configured + `login url `. + + If you configure a login url with a query parameter, Tornado will + assume you know what you're doing and use it as-is. If not, it + will add a `next` parameter so the login page knows where to send + you once you're logged in. + """ + @functools.wraps(method) + def wrapper(self, *args, **kwargs): + if not self.current_user: + if self.request.method in ("GET", "HEAD"): + url = self.get_login_url() + if "?" not in url: + if urlparse.urlsplit(url).scheme: + # if login url is absolute, make next absolute too + next_url = self.request.full_url() + else: + next_url = self.request.uri + url += "?" + urlencode(dict(next=next_url)) + self.redirect(url) + return + raise HTTPError(403) + return method(self, *args, **kwargs) + return wrapper + + +class UIModule(object): + """A re-usable, modular UI unit on a page. + + UI modules often execute additional queries, and they can include + additional CSS and JavaScript that will be included in the output + page, which is automatically inserted on page render. + + Subclasses of UIModule must override the `render` method. + """ + def __init__(self, handler): + self.handler = handler + self.request = handler.request + self.ui = handler.ui + self.locale = handler.locale + + @property + def current_user(self): + return self.handler.current_user + + def render(self, *args, **kwargs): + """Override in subclasses to return this module's output.""" + raise NotImplementedError() + + def embedded_javascript(self): + """Override to return a JavaScript string + to be embedded in the page.""" + return None + + def javascript_files(self): + """Override to return a list of JavaScript files needed by this module. + + If the return values are relative paths, they will be passed to + `RequestHandler.static_url`; otherwise they will be used as-is. + """ + return None + + def embedded_css(self): + """Override to return a CSS string + that will be embedded in the page.""" + return None + + def css_files(self): + """Override to returns a list of CSS files required by this module. + + If the return values are relative paths, they will be passed to + `RequestHandler.static_url`; otherwise they will be used as-is. + """ + return None + + def html_head(self): + """Override to return an HTML string that will be put in the + element. + """ + return None + + def html_body(self): + """Override to return an HTML string that will be put at the end of + the element. + """ + return None + + def render_string(self, path, **kwargs): + """Renders a template and returns it as a string.""" + return self.handler.render_string(path, **kwargs) + + +class _linkify(UIModule): + def render(self, text, **kwargs): + return escape.linkify(text, **kwargs) + + +class _xsrf_form_html(UIModule): + def render(self): + return self.handler.xsrf_form_html() + + +class TemplateModule(UIModule): + """UIModule that simply renders the given template. + + {% module Template("foo.html") %} is similar to {% include "foo.html" %}, + but the module version gets its own namespace (with kwargs passed to + Template()) instead of inheriting the outer template's namespace. + + Templates rendered through this module also get access to UIModule's + automatic javascript/css features. Simply call set_resources + inside the template and give it keyword arguments corresponding to + the methods on UIModule: {{ set_resources(js_files=static_url("my.js")) }} + Note that these resources are output once per template file, not once + per instantiation of the template, so they must not depend on + any arguments to the template. + """ + def __init__(self, handler): + super(TemplateModule, self).__init__(handler) + # keep resources in both a list and a dict to preserve order + self._resource_list = [] + self._resource_dict = {} + + def render(self, path, **kwargs): + def set_resources(**kwargs): + if path not in self._resource_dict: + self._resource_list.append(kwargs) + self._resource_dict[path] = kwargs + else: + if self._resource_dict[path] != kwargs: + raise ValueError("set_resources called with different " + "resources for the same template") + return "" + return self.render_string(path, set_resources=set_resources, + **kwargs) + + def _get_resources(self, key): + return (r[key] for r in self._resource_list if key in r) + + def embedded_javascript(self): + return "\n".join(self._get_resources("embedded_javascript")) + + def javascript_files(self): + result = [] + for f in self._get_resources("javascript_files"): + if isinstance(f, (unicode_type, bytes)): + result.append(f) + else: + result.extend(f) + return result + + def embedded_css(self): + return "\n".join(self._get_resources("embedded_css")) + + def css_files(self): + result = [] + for f in self._get_resources("css_files"): + if isinstance(f, (unicode_type, bytes)): + result.append(f) + else: + result.extend(f) + return result + + def html_head(self): + return "".join(self._get_resources("html_head")) + + def html_body(self): + return "".join(self._get_resources("html_body")) + + +class _UIModuleNamespace(object): + """Lazy namespace which creates UIModule proxies bound to a handler.""" + def __init__(self, handler, ui_modules): + self.handler = handler + self.ui_modules = ui_modules + + def __getitem__(self, key): + return self.handler._ui_module(key, self.ui_modules[key]) + + def __getattr__(self, key): + try: + return self[key] + except KeyError as e: + raise AttributeError(str(e)) + + +class URLSpec(object): + """Specifies mappings between URLs and handlers.""" + def __init__(self, pattern, handler, kwargs=None, name=None): + """Parameters: + + * ``pattern``: Regular expression to be matched. Any groups + in the regex will be passed in to the handler's get/post/etc + methods as arguments. + + * ``handler``: `RequestHandler` subclass to be invoked. + + * ``kwargs`` (optional): A dictionary of additional arguments + to be passed to the handler's constructor. + + * ``name`` (optional): A name for this handler. Used by + `Application.reverse_url`. + """ + if not pattern.endswith('$'): + pattern += '$' + self.regex = re.compile(pattern) + assert len(self.regex.groupindex) in (0, self.regex.groups), \ + ("groups in url regexes must either be all named or all " + "positional: %r" % self.regex.pattern) + + if isinstance(handler, str): + # import the Module and instantiate the class + # Must be a fully qualified name (module.ClassName) + handler = import_object(handler) + + self.handler_class = handler + self.kwargs = kwargs or {} + self.name = name + self._path, self._group_count = self._find_groups() + + def __repr__(self): + return '%s(%r, %s, kwargs=%r, name=%r)' % \ + (self.__class__.__name__, self.regex.pattern, + self.handler_class, self.kwargs, self.name) + + def _find_groups(self): + """Returns a tuple (reverse string, group count) for a url. + + For example: Given the url pattern /([0-9]{4})/([a-z-]+)/, this method + would return ('/%s/%s/', 2). + """ + pattern = self.regex.pattern + if pattern.startswith('^'): + pattern = pattern[1:] + if pattern.endswith('$'): + pattern = pattern[:-1] + + if self.regex.groups != pattern.count('('): + # The pattern is too complicated for our simplistic matching, + # so we can't support reversing it. + return (None, None) + + pieces = [] + for fragment in pattern.split('('): + if ')' in fragment: + paren_loc = fragment.index(')') + if paren_loc >= 0: + pieces.append('%s' + fragment[paren_loc + 1:]) + else: + pieces.append(fragment) + + return (''.join(pieces), self.regex.groups) + + def reverse(self, *args): + assert self._path is not None, \ + "Cannot reverse url regex " + self.regex.pattern + assert len(args) == self._group_count, "required number of arguments "\ + "not found" + if not len(args): + return self._path + converted_args = [] + for a in args: + if not isinstance(a, (unicode_type, bytes)): + a = str(a) + converted_args.append(escape.url_escape(utf8(a), plus=False)) + return self._path % tuple(converted_args) + +url = URLSpec + + +if hasattr(hmac, 'compare_digest'): # python 3.3 + _time_independent_equals = hmac.compare_digest +else: + def _time_independent_equals(a, b): + if len(a) != len(b): + return False + result = 0 + if isinstance(a[0], int): # python3 byte strings + for x, y in zip(a, b): + result |= x ^ y + else: # python2 + for x, y in zip(a, b): + result |= ord(x) ^ ord(y) + return result == 0 + + +def create_signed_value(secret, name, value, version=None, clock=None, + key_version=None): + if version is None: + version = DEFAULT_SIGNED_VALUE_VERSION + if clock is None: + clock = time.time + + timestamp = utf8(str(int(clock()))) + value = base64.b64encode(utf8(value)) + if version == 1: + signature = _create_signature_v1(secret, name, value, timestamp) + value = b"|".join([value, timestamp, signature]) + return value + elif version == 2: + # The v2 format consists of a version number and a series of + # length-prefixed fields "%d:%s", the last of which is a + # signature, all separated by pipes. All numbers are in + # decimal format with no leading zeros. The signature is an + # HMAC-SHA256 of the whole string up to that point, including + # the final pipe. + # + # The fields are: + # - format version (i.e. 2; no length prefix) + # - key version (integer, default is 0) + # - timestamp (integer seconds since epoch) + # - name (not encoded; assumed to be ~alphanumeric) + # - value (base64-encoded) + # - signature (hex-encoded; no length prefix) + def format_field(s): + return utf8("%d:" % len(s)) + utf8(s) + to_sign = b"|".join([ + b"2", + format_field(str(key_version or 0)), + format_field(timestamp), + format_field(name), + format_field(value), + b'']) + + if isinstance(secret, dict): + assert key_version is not None, 'Key version must be set when sign key dict is used' + assert version >= 2, 'Version must be at least 2 for key version support' + secret = secret[key_version] + + signature = _create_signature_v2(secret, to_sign) + return to_sign + signature + else: + raise ValueError("Unsupported version %d" % version) + +# A leading version number in decimal +# with no leading zeros, followed by a pipe. +_signed_value_version_re = re.compile(br"^([1-9][0-9]*)\|(.*)$") + + +def _get_version(value): + # Figures out what version value is. Version 1 did not include an + # explicit version field and started with arbitrary base64 data, + # which makes this tricky. + m = _signed_value_version_re.match(value) + if m is None: + version = 1 + else: + try: + version = int(m.group(1)) + if version > 999: + # Certain payloads from the version-less v1 format may + # be parsed as valid integers. Due to base64 padding + # restrictions, this can only happen for numbers whose + # length is a multiple of 4, so we can treat all + # numbers up to 999 as versions, and for the rest we + # fall back to v1 format. + version = 1 + except ValueError: + version = 1 + return version + + +def decode_signed_value(secret, name, value, max_age_days=31, + clock=None, min_version=None): + if clock is None: + clock = time.time + if min_version is None: + min_version = DEFAULT_SIGNED_VALUE_MIN_VERSION + if min_version > 2: + raise ValueError("Unsupported min_version %d" % min_version) + if not value: + return None + + value = utf8(value) + version = _get_version(value) + + if version < min_version: + return None + if version == 1: + return _decode_signed_value_v1(secret, name, value, + max_age_days, clock) + elif version == 2: + return _decode_signed_value_v2(secret, name, value, + max_age_days, clock) + else: + return None + + +def _decode_signed_value_v1(secret, name, value, max_age_days, clock): + parts = utf8(value).split(b"|") + if len(parts) != 3: + return None + signature = _create_signature_v1(secret, name, parts[0], parts[1]) + if not _time_independent_equals(parts[2], signature): + gen_log.warning("Invalid cookie signature %r", value) + return None + timestamp = int(parts[1]) + if timestamp < clock() - max_age_days * 86400: + gen_log.warning("Expired cookie %r", value) + return None + if timestamp > clock() + 31 * 86400: + # _cookie_signature does not hash a delimiter between the + # parts of the cookie, so an attacker could transfer trailing + # digits from the payload to the timestamp without altering the + # signature. For backwards compatibility, sanity-check timestamp + # here instead of modifying _cookie_signature. + gen_log.warning("Cookie timestamp in future; possible tampering %r", + value) + return None + if parts[1].startswith(b"0"): + gen_log.warning("Tampered cookie %r", value) + return None + try: + return base64.b64decode(parts[0]) + except Exception: + return None + + +def _decode_fields_v2(value): + def _consume_field(s): + length, _, rest = s.partition(b':') + n = int(length) + field_value = rest[:n] + # In python 3, indexing bytes returns small integers; we must + # use a slice to get a byte string as in python 2. + if rest[n:n + 1] != b'|': + raise ValueError("malformed v2 signed value field") + rest = rest[n + 1:] + return field_value, rest + + rest = value[2:] # remove version number + key_version, rest = _consume_field(rest) + timestamp, rest = _consume_field(rest) + name_field, rest = _consume_field(rest) + value_field, passed_sig = _consume_field(rest) + return int(key_version), timestamp, name_field, value_field, passed_sig + + +def _decode_signed_value_v2(secret, name, value, max_age_days, clock): + try: + key_version, timestamp, name_field, value_field, passed_sig = _decode_fields_v2(value) + except ValueError: + return None + signed_string = value[:-len(passed_sig)] + + if isinstance(secret, dict): + try: + secret = secret[key_version] + except KeyError: + return None + + expected_sig = _create_signature_v2(secret, signed_string) + if not _time_independent_equals(passed_sig, expected_sig): + return None + if name_field != utf8(name): + return None + timestamp = int(timestamp) + if timestamp < clock() - max_age_days * 86400: + # The signature has expired. + return None + try: + return base64.b64decode(value_field) + except Exception: + return None + + +def get_signature_key_version(value): + value = utf8(value) + version = _get_version(value) + if version < 2: + return None + try: + key_version, _, _, _, _ = _decode_fields_v2(value) + except ValueError: + return None + + return key_version + + +def _create_signature_v1(secret, *parts): + hash = hmac.new(utf8(secret), digestmod=hashlib.sha1) + for part in parts: + hash.update(utf8(part)) + return utf8(hash.hexdigest()) + + +def _create_signature_v2(secret, s): + hash = hmac.new(utf8(secret), digestmod=hashlib.sha256) + hash.update(utf8(s)) + return utf8(hash.hexdigest()) + + +def _unquote_or_none(s): + """None-safe wrapper around url_unescape to handle unamteched optional + groups correctly. + + Note that args are passed as bytes so the handler can decide what + encoding to use. + """ + if s is None: + return s + return escape.url_unescape(s, encoding=None, plus=False) diff --git a/python/tornado/websocket.py b/python/tornado/websocket.py new file mode 100644 index 000000000..11e526687 --- /dev/null +++ b/python/tornado/websocket.py @@ -0,0 +1,1063 @@ +"""Implementation of the WebSocket protocol. + +`WebSockets `_ allow for bidirectional +communication between the browser and server. + +WebSockets are supported in the current versions of all major browsers, +although older versions that do not support WebSockets are still in use +(refer to http://caniuse.com/websockets for details). + +This module implements the final version of the WebSocket protocol as +defined in `RFC 6455 `_. Certain +browser versions (notably Safari 5.x) implemented an earlier draft of +the protocol (known as "draft 76") and are not compatible with this module. + +.. versionchanged:: 4.0 + Removed support for the draft 76 protocol version. +""" + +from __future__ import absolute_import, division, print_function, with_statement +# Author: Jacob Kristhammar, 2010 + +import base64 +import collections +import hashlib +import os +import struct +import tornado.escape +import tornado.web +import zlib + +from tornado.concurrent import TracebackFuture +from tornado.escape import utf8, native_str, to_unicode +from tornado import httpclient, httputil +from tornado.ioloop import IOLoop +from tornado.iostream import StreamClosedError +from tornado.log import gen_log, app_log +from tornado import simple_httpclient +from tornado.tcpclient import TCPClient +from tornado.util import _websocket_mask + +try: + from urllib.parse import urlparse # py2 +except ImportError: + from urlparse import urlparse # py3 + +try: + xrange # py2 +except NameError: + xrange = range # py3 + + +class WebSocketError(Exception): + pass + + +class WebSocketClosedError(WebSocketError): + """Raised by operations on a closed connection. + + .. versionadded:: 3.2 + """ + pass + + +class WebSocketHandler(tornado.web.RequestHandler): + """Subclass this class to create a basic WebSocket handler. + + Override `on_message` to handle incoming messages, and use + `write_message` to send messages to the client. You can also + override `open` and `on_close` to handle opened and closed + connections. + + See http://dev.w3.org/html5/websockets/ for details on the + JavaScript interface. The protocol is specified at + http://tools.ietf.org/html/rfc6455. + + Here is an example WebSocket handler that echos back all received messages + back to the client: + + .. testcode:: + + class EchoWebSocket(tornado.websocket.WebSocketHandler): + def open(self): + print("WebSocket opened") + + def on_message(self, message): + self.write_message(u"You said: " + message) + + def on_close(self): + print("WebSocket closed") + + .. testoutput:: + :hide: + + WebSockets are not standard HTTP connections. The "handshake" is + HTTP, but after the handshake, the protocol is + message-based. Consequently, most of the Tornado HTTP facilities + are not available in handlers of this type. The only communication + methods available to you are `write_message()`, `ping()`, and + `close()`. Likewise, your request handler class should implement + `open()` method rather than ``get()`` or ``post()``. + + If you map the handler above to ``/websocket`` in your application, you can + invoke it in JavaScript with:: + + var ws = new WebSocket("ws://localhost:8888/websocket"); + ws.onopen = function() { + ws.send("Hello, world"); + }; + ws.onmessage = function (evt) { + alert(evt.data); + }; + + This script pops up an alert box that says "You said: Hello, world". + + Web browsers allow any site to open a websocket connection to any other, + instead of using the same-origin policy that governs other network + access from javascript. This can be surprising and is a potential + security hole, so since Tornado 4.0 `WebSocketHandler` requires + applications that wish to receive cross-origin websockets to opt in + by overriding the `~WebSocketHandler.check_origin` method (see that + method's docs for details). Failure to do so is the most likely + cause of 403 errors when making a websocket connection. + + When using a secure websocket connection (``wss://``) with a self-signed + certificate, the connection from a browser may fail because it wants + to show the "accept this certificate" dialog but has nowhere to show it. + You must first visit a regular HTML page using the same certificate + to accept it before the websocket connection will succeed. + """ + def __init__(self, application, request, **kwargs): + tornado.web.RequestHandler.__init__(self, application, request, + **kwargs) + self.ws_connection = None + self.close_code = None + self.close_reason = None + self.stream = None + self._on_close_called = False + + @tornado.web.asynchronous + def get(self, *args, **kwargs): + self.open_args = args + self.open_kwargs = kwargs + + # Upgrade header should be present and should be equal to WebSocket + if self.request.headers.get("Upgrade", "").lower() != 'websocket': + self.set_status(400) + log_msg = "Can \"Upgrade\" only to \"WebSocket\"." + self.finish(log_msg) + gen_log.debug(log_msg) + return + + # Connection header should be upgrade. + # Some proxy servers/load balancers + # might mess with it. + headers = self.request.headers + connection = map(lambda s: s.strip().lower(), + headers.get("Connection", "").split(",")) + if 'upgrade' not in connection: + self.set_status(400) + log_msg = "\"Connection\" must be \"Upgrade\"." + self.finish(log_msg) + gen_log.debug(log_msg) + return + + # Handle WebSocket Origin naming convention differences + # The difference between version 8 and 13 is that in 8 the + # client sends a "Sec-Websocket-Origin" header and in 13 it's + # simply "Origin". + if "Origin" in self.request.headers: + origin = self.request.headers.get("Origin") + else: + origin = self.request.headers.get("Sec-Websocket-Origin", None) + + # If there was an origin header, check to make sure it matches + # according to check_origin. When the origin is None, we assume it + # did not come from a browser and that it can be passed on. + if origin is not None and not self.check_origin(origin): + self.set_status(403) + log_msg = "Cross origin websockets not allowed" + self.finish(log_msg) + gen_log.debug(log_msg) + return + + self.stream = self.request.connection.detach() + self.stream.set_close_callback(self.on_connection_close) + + self.ws_connection = self.get_websocket_protocol() + if self.ws_connection: + self.ws_connection.accept_connection() + else: + if not self.stream.closed(): + self.stream.write(tornado.escape.utf8( + "HTTP/1.1 426 Upgrade Required\r\n" + "Sec-WebSocket-Version: 7, 8, 13\r\n\r\n")) + self.stream.close() + + def write_message(self, message, binary=False): + """Sends the given message to the client of this Web Socket. + + The message may be either a string or a dict (which will be + encoded as json). If the ``binary`` argument is false, the + message will be sent as utf8; in binary mode any byte string + is allowed. + + If the connection is already closed, raises `WebSocketClosedError`. + + .. versionchanged:: 3.2 + `WebSocketClosedError` was added (previously a closed connection + would raise an `AttributeError`) + + .. versionchanged:: 4.3 + Returns a `.Future` which can be used for flow control. + """ + if self.ws_connection is None: + raise WebSocketClosedError() + if isinstance(message, dict): + message = tornado.escape.json_encode(message) + return self.ws_connection.write_message(message, binary=binary) + + def select_subprotocol(self, subprotocols): + """Invoked when a new WebSocket requests specific subprotocols. + + ``subprotocols`` is a list of strings identifying the + subprotocols proposed by the client. This method may be + overridden to return one of those strings to select it, or + ``None`` to not select a subprotocol. Failure to select a + subprotocol does not automatically abort the connection, + although clients may close the connection if none of their + proposed subprotocols was selected. + """ + return None + + def get_compression_options(self): + """Override to return compression options for the connection. + + If this method returns None (the default), compression will + be disabled. If it returns a dict (even an empty one), it + will be enabled. The contents of the dict may be used to + control the memory and CPU usage of the compression, + but no such options are currently implemented. + + .. versionadded:: 4.1 + """ + return None + + def open(self, *args, **kwargs): + """Invoked when a new WebSocket is opened. + + The arguments to `open` are extracted from the `tornado.web.URLSpec` + regular expression, just like the arguments to + `tornado.web.RequestHandler.get`. + """ + pass + + def on_message(self, message): + """Handle incoming messages on the WebSocket + + This method must be overridden. + """ + raise NotImplementedError + + def ping(self, data): + """Send ping frame to the remote end.""" + if self.ws_connection is None: + raise WebSocketClosedError() + self.ws_connection.write_ping(data) + + def on_pong(self, data): + """Invoked when the response to a ping frame is received.""" + pass + + def on_close(self): + """Invoked when the WebSocket is closed. + + If the connection was closed cleanly and a status code or reason + phrase was supplied, these values will be available as the attributes + ``self.close_code`` and ``self.close_reason``. + + .. versionchanged:: 4.0 + + Added ``close_code`` and ``close_reason`` attributes. + """ + pass + + def close(self, code=None, reason=None): + """Closes this Web Socket. + + Once the close handshake is successful the socket will be closed. + + ``code`` may be a numeric status code, taken from the values + defined in `RFC 6455 section 7.4.1 + `_. + ``reason`` may be a textual message about why the connection is + closing. These values are made available to the client, but are + not otherwise interpreted by the websocket protocol. + + .. versionchanged:: 4.0 + + Added the ``code`` and ``reason`` arguments. + """ + if self.ws_connection: + self.ws_connection.close(code, reason) + self.ws_connection = None + + def check_origin(self, origin): + """Override to enable support for allowing alternate origins. + + The ``origin`` argument is the value of the ``Origin`` HTTP + header, the url responsible for initiating this request. This + method is not called for clients that do not send this header; + such requests are always allowed (because all browsers that + implement WebSockets support this header, and non-browser + clients do not have the same cross-site security concerns). + + Should return True to accept the request or False to reject it. + By default, rejects all requests with an origin on a host other + than this one. + + This is a security protection against cross site scripting attacks on + browsers, since WebSockets are allowed to bypass the usual same-origin + policies and don't use CORS headers. + + To accept all cross-origin traffic (which was the default prior to + Tornado 4.0), simply override this method to always return true:: + + def check_origin(self, origin): + return True + + To allow connections from any subdomain of your site, you might + do something like:: + + def check_origin(self, origin): + parsed_origin = urllib.parse.urlparse(origin) + return parsed_origin.netloc.endswith(".mydomain.com") + + .. versionadded:: 4.0 + """ + parsed_origin = urlparse(origin) + origin = parsed_origin.netloc + origin = origin.lower() + + host = self.request.headers.get("Host") + + # Check to see that origin matches host directly, including ports + return origin == host + + def set_nodelay(self, value): + """Set the no-delay flag for this stream. + + By default, small messages may be delayed and/or combined to minimize + the number of packets sent. This can sometimes cause 200-500ms delays + due to the interaction between Nagle's algorithm and TCP delayed + ACKs. To reduce this delay (at the expense of possibly increasing + bandwidth usage), call ``self.set_nodelay(True)`` once the websocket + connection is established. + + See `.BaseIOStream.set_nodelay` for additional details. + + .. versionadded:: 3.1 + """ + self.stream.set_nodelay(value) + + def on_connection_close(self): + if self.ws_connection: + self.ws_connection.on_connection_close() + self.ws_connection = None + if not self._on_close_called: + self._on_close_called = True + self.on_close() + + def send_error(self, *args, **kwargs): + if self.stream is None: + super(WebSocketHandler, self).send_error(*args, **kwargs) + else: + # If we get an uncaught exception during the handshake, + # we have no choice but to abruptly close the connection. + # TODO: for uncaught exceptions after the handshake, + # we can close the connection more gracefully. + self.stream.close() + + def get_websocket_protocol(self): + websocket_version = self.request.headers.get("Sec-WebSocket-Version") + if websocket_version in ("7", "8", "13"): + return WebSocketProtocol13( + self, compression_options=self.get_compression_options()) + + +def _wrap_method(method): + def _disallow_for_websocket(self, *args, **kwargs): + if self.stream is None: + method(self, *args, **kwargs) + else: + raise RuntimeError("Method not supported for Web Sockets") + return _disallow_for_websocket +for method in ["write", "redirect", "set_header", "set_cookie", + "set_status", "flush", "finish"]: + setattr(WebSocketHandler, method, + _wrap_method(getattr(WebSocketHandler, method))) + + +class WebSocketProtocol(object): + """Base class for WebSocket protocol versions. + """ + def __init__(self, handler): + self.handler = handler + self.request = handler.request + self.stream = handler.stream + self.client_terminated = False + self.server_terminated = False + + def _run_callback(self, callback, *args, **kwargs): + """Runs the given callback with exception handling. + + On error, aborts the websocket connection and returns False. + """ + try: + callback(*args, **kwargs) + except Exception: + app_log.error("Uncaught exception in %s", + self.request.path, exc_info=True) + self._abort() + + def on_connection_close(self): + self._abort() + + def _abort(self): + """Instantly aborts the WebSocket connection by closing the socket""" + self.client_terminated = True + self.server_terminated = True + self.stream.close() # forcibly tear down the connection + self.close() # let the subclass cleanup + + +class _PerMessageDeflateCompressor(object): + def __init__(self, persistent, max_wbits): + if max_wbits is None: + max_wbits = zlib.MAX_WBITS + # There is no symbolic constant for the minimum wbits value. + if not (8 <= max_wbits <= zlib.MAX_WBITS): + raise ValueError("Invalid max_wbits value %r; allowed range 8-%d", + max_wbits, zlib.MAX_WBITS) + self._max_wbits = max_wbits + if persistent: + self._compressor = self._create_compressor() + else: + self._compressor = None + + def _create_compressor(self): + return zlib.compressobj(tornado.web.GZipContentEncoding.GZIP_LEVEL, + zlib.DEFLATED, -self._max_wbits) + + def compress(self, data): + compressor = self._compressor or self._create_compressor() + data = (compressor.compress(data) + + compressor.flush(zlib.Z_SYNC_FLUSH)) + assert data.endswith(b'\x00\x00\xff\xff') + return data[:-4] + + +class _PerMessageDeflateDecompressor(object): + def __init__(self, persistent, max_wbits): + if max_wbits is None: + max_wbits = zlib.MAX_WBITS + if not (8 <= max_wbits <= zlib.MAX_WBITS): + raise ValueError("Invalid max_wbits value %r; allowed range 8-%d", + max_wbits, zlib.MAX_WBITS) + self._max_wbits = max_wbits + if persistent: + self._decompressor = self._create_decompressor() + else: + self._decompressor = None + + def _create_decompressor(self): + return zlib.decompressobj(-self._max_wbits) + + def decompress(self, data): + decompressor = self._decompressor or self._create_decompressor() + return decompressor.decompress(data + b'\x00\x00\xff\xff') + + +class WebSocketProtocol13(WebSocketProtocol): + """Implementation of the WebSocket protocol from RFC 6455. + + This class supports versions 7 and 8 of the protocol in addition to the + final version 13. + """ + # Bit masks for the first byte of a frame. + FIN = 0x80 + RSV1 = 0x40 + RSV2 = 0x20 + RSV3 = 0x10 + RSV_MASK = RSV1 | RSV2 | RSV3 + OPCODE_MASK = 0x0f + + def __init__(self, handler, mask_outgoing=False, + compression_options=None): + WebSocketProtocol.__init__(self, handler) + self.mask_outgoing = mask_outgoing + self._final_frame = False + self._frame_opcode = None + self._masked_frame = None + self._frame_mask = None + self._frame_length = None + self._fragmented_message_buffer = None + self._fragmented_message_opcode = None + self._waiting = None + self._compression_options = compression_options + self._decompressor = None + self._compressor = None + self._frame_compressed = None + # The total uncompressed size of all messages received or sent. + # Unicode messages are encoded to utf8. + # Only for testing; subject to change. + self._message_bytes_in = 0 + self._message_bytes_out = 0 + # The total size of all packets received or sent. Includes + # the effect of compression, frame overhead, and control frames. + self._wire_bytes_in = 0 + self._wire_bytes_out = 0 + + def accept_connection(self): + try: + self._handle_websocket_headers() + self._accept_connection() + except ValueError: + gen_log.debug("Malformed WebSocket request received", + exc_info=True) + self._abort() + return + + def _handle_websocket_headers(self): + """Verifies all invariant- and required headers + + If a header is missing or have an incorrect value ValueError will be + raised + """ + fields = ("Host", "Sec-Websocket-Key", "Sec-Websocket-Version") + if not all(map(lambda f: self.request.headers.get(f), fields)): + raise ValueError("Missing/Invalid WebSocket headers") + + @staticmethod + def compute_accept_value(key): + """Computes the value for the Sec-WebSocket-Accept header, + given the value for Sec-WebSocket-Key. + """ + sha1 = hashlib.sha1() + sha1.update(utf8(key)) + sha1.update(b"258EAFA5-E914-47DA-95CA-C5AB0DC85B11") # Magic value + return native_str(base64.b64encode(sha1.digest())) + + def _challenge_response(self): + return WebSocketProtocol13.compute_accept_value( + self.request.headers.get("Sec-Websocket-Key")) + + def _accept_connection(self): + subprotocol_header = '' + subprotocols = self.request.headers.get("Sec-WebSocket-Protocol", '') + subprotocols = [s.strip() for s in subprotocols.split(',')] + if subprotocols: + selected = self.handler.select_subprotocol(subprotocols) + if selected: + assert selected in subprotocols + subprotocol_header = ("Sec-WebSocket-Protocol: %s\r\n" + % selected) + + extension_header = '' + extensions = self._parse_extensions_header(self.request.headers) + for ext in extensions: + if (ext[0] == 'permessage-deflate' and + self._compression_options is not None): + # TODO: negotiate parameters if compression_options + # specifies limits. + self._create_compressors('server', ext[1]) + if ('client_max_window_bits' in ext[1] and + ext[1]['client_max_window_bits'] is None): + # Don't echo an offered client_max_window_bits + # parameter with no value. + del ext[1]['client_max_window_bits'] + extension_header = ('Sec-WebSocket-Extensions: %s\r\n' % + httputil._encode_header( + 'permessage-deflate', ext[1])) + break + + if self.stream.closed(): + self._abort() + return + self.stream.write(tornado.escape.utf8( + "HTTP/1.1 101 Switching Protocols\r\n" + "Upgrade: websocket\r\n" + "Connection: Upgrade\r\n" + "Sec-WebSocket-Accept: %s\r\n" + "%s%s" + "\r\n" % (self._challenge_response(), + subprotocol_header, extension_header))) + + self._run_callback(self.handler.open, *self.handler.open_args, + **self.handler.open_kwargs) + self._receive_frame() + + def _parse_extensions_header(self, headers): + extensions = headers.get("Sec-WebSocket-Extensions", '') + if extensions: + return [httputil._parse_header(e.strip()) + for e in extensions.split(',')] + return [] + + def _process_server_headers(self, key, headers): + """Process the headers sent by the server to this client connection. + + 'key' is the websocket handshake challenge/response key. + """ + assert headers['Upgrade'].lower() == 'websocket' + assert headers['Connection'].lower() == 'upgrade' + accept = self.compute_accept_value(key) + assert headers['Sec-Websocket-Accept'] == accept + + extensions = self._parse_extensions_header(headers) + for ext in extensions: + if (ext[0] == 'permessage-deflate' and + self._compression_options is not None): + self._create_compressors('client', ext[1]) + else: + raise ValueError("unsupported extension %r", ext) + + def _get_compressor_options(self, side, agreed_parameters): + """Converts a websocket agreed_parameters set to keyword arguments + for our compressor objects. + """ + options = dict( + persistent=(side + '_no_context_takeover') not in agreed_parameters) + wbits_header = agreed_parameters.get(side + '_max_window_bits', None) + if wbits_header is None: + options['max_wbits'] = zlib.MAX_WBITS + else: + options['max_wbits'] = int(wbits_header) + return options + + def _create_compressors(self, side, agreed_parameters): + # TODO: handle invalid parameters gracefully + allowed_keys = set(['server_no_context_takeover', + 'client_no_context_takeover', + 'server_max_window_bits', + 'client_max_window_bits']) + for key in agreed_parameters: + if key not in allowed_keys: + raise ValueError("unsupported compression parameter %r" % key) + other_side = 'client' if (side == 'server') else 'server' + self._compressor = _PerMessageDeflateCompressor( + **self._get_compressor_options(side, agreed_parameters)) + self._decompressor = _PerMessageDeflateDecompressor( + **self._get_compressor_options(other_side, agreed_parameters)) + + def _write_frame(self, fin, opcode, data, flags=0): + if fin: + finbit = self.FIN + else: + finbit = 0 + frame = struct.pack("B", finbit | opcode | flags) + l = len(data) + if self.mask_outgoing: + mask_bit = 0x80 + else: + mask_bit = 0 + if l < 126: + frame += struct.pack("B", l | mask_bit) + elif l <= 0xFFFF: + frame += struct.pack("!BH", 126 | mask_bit, l) + else: + frame += struct.pack("!BQ", 127 | mask_bit, l) + if self.mask_outgoing: + mask = os.urandom(4) + data = mask + _websocket_mask(mask, data) + frame += data + self._wire_bytes_out += len(frame) + try: + return self.stream.write(frame) + except StreamClosedError: + self._abort() + + def write_message(self, message, binary=False): + """Sends the given message to the client of this Web Socket.""" + if binary: + opcode = 0x2 + else: + opcode = 0x1 + message = tornado.escape.utf8(message) + assert isinstance(message, bytes) + self._message_bytes_out += len(message) + flags = 0 + if self._compressor: + message = self._compressor.compress(message) + flags |= self.RSV1 + return self._write_frame(True, opcode, message, flags=flags) + + def write_ping(self, data): + """Send ping frame.""" + assert isinstance(data, bytes) + self._write_frame(True, 0x9, data) + + def _receive_frame(self): + try: + self.stream.read_bytes(2, self._on_frame_start) + except StreamClosedError: + self._abort() + + def _on_frame_start(self, data): + self._wire_bytes_in += len(data) + header, payloadlen = struct.unpack("BB", data) + self._final_frame = header & self.FIN + reserved_bits = header & self.RSV_MASK + self._frame_opcode = header & self.OPCODE_MASK + self._frame_opcode_is_control = self._frame_opcode & 0x8 + if self._decompressor is not None and self._frame_opcode != 0: + self._frame_compressed = bool(reserved_bits & self.RSV1) + reserved_bits &= ~self.RSV1 + if reserved_bits: + # client is using as-yet-undefined extensions; abort + self._abort() + return + self._masked_frame = bool(payloadlen & 0x80) + payloadlen = payloadlen & 0x7f + if self._frame_opcode_is_control and payloadlen >= 126: + # control frames must have payload < 126 + self._abort() + return + try: + if payloadlen < 126: + self._frame_length = payloadlen + if self._masked_frame: + self.stream.read_bytes(4, self._on_masking_key) + else: + self.stream.read_bytes(self._frame_length, + self._on_frame_data) + elif payloadlen == 126: + self.stream.read_bytes(2, self._on_frame_length_16) + elif payloadlen == 127: + self.stream.read_bytes(8, self._on_frame_length_64) + except StreamClosedError: + self._abort() + + def _on_frame_length_16(self, data): + self._wire_bytes_in += len(data) + self._frame_length = struct.unpack("!H", data)[0] + try: + if self._masked_frame: + self.stream.read_bytes(4, self._on_masking_key) + else: + self.stream.read_bytes(self._frame_length, self._on_frame_data) + except StreamClosedError: + self._abort() + + def _on_frame_length_64(self, data): + self._wire_bytes_in += len(data) + self._frame_length = struct.unpack("!Q", data)[0] + try: + if self._masked_frame: + self.stream.read_bytes(4, self._on_masking_key) + else: + self.stream.read_bytes(self._frame_length, self._on_frame_data) + except StreamClosedError: + self._abort() + + def _on_masking_key(self, data): + self._wire_bytes_in += len(data) + self._frame_mask = data + try: + self.stream.read_bytes(self._frame_length, + self._on_masked_frame_data) + except StreamClosedError: + self._abort() + + def _on_masked_frame_data(self, data): + # Don't touch _wire_bytes_in; we'll do it in _on_frame_data. + self._on_frame_data(_websocket_mask(self._frame_mask, data)) + + def _on_frame_data(self, data): + self._wire_bytes_in += len(data) + if self._frame_opcode_is_control: + # control frames may be interleaved with a series of fragmented + # data frames, so control frames must not interact with + # self._fragmented_* + if not self._final_frame: + # control frames must not be fragmented + self._abort() + return + opcode = self._frame_opcode + elif self._frame_opcode == 0: # continuation frame + if self._fragmented_message_buffer is None: + # nothing to continue + self._abort() + return + self._fragmented_message_buffer += data + if self._final_frame: + opcode = self._fragmented_message_opcode + data = self._fragmented_message_buffer + self._fragmented_message_buffer = None + else: # start of new data message + if self._fragmented_message_buffer is not None: + # can't start new message until the old one is finished + self._abort() + return + if self._final_frame: + opcode = self._frame_opcode + else: + self._fragmented_message_opcode = self._frame_opcode + self._fragmented_message_buffer = data + + if self._final_frame: + self._handle_message(opcode, data) + + if not self.client_terminated: + self._receive_frame() + + def _handle_message(self, opcode, data): + if self.client_terminated: + return + + if self._frame_compressed: + data = self._decompressor.decompress(data) + + if opcode == 0x1: + # UTF-8 data + self._message_bytes_in += len(data) + try: + decoded = data.decode("utf-8") + except UnicodeDecodeError: + self._abort() + return + self._run_callback(self.handler.on_message, decoded) + elif opcode == 0x2: + # Binary data + self._message_bytes_in += len(data) + self._run_callback(self.handler.on_message, data) + elif opcode == 0x8: + # Close + self.client_terminated = True + if len(data) >= 2: + self.handler.close_code = struct.unpack('>H', data[:2])[0] + if len(data) > 2: + self.handler.close_reason = to_unicode(data[2:]) + # Echo the received close code, if any (RFC 6455 section 5.5.1). + self.close(self.handler.close_code) + elif opcode == 0x9: + # Ping + self._write_frame(True, 0xA, data) + elif opcode == 0xA: + # Pong + self._run_callback(self.handler.on_pong, data) + else: + self._abort() + + def close(self, code=None, reason=None): + """Closes the WebSocket connection.""" + if not self.server_terminated: + if not self.stream.closed(): + if code is None and reason is not None: + code = 1000 # "normal closure" status code + if code is None: + close_data = b'' + else: + close_data = struct.pack('>H', code) + if reason is not None: + close_data += utf8(reason) + self._write_frame(True, 0x8, close_data) + self.server_terminated = True + if self.client_terminated: + if self._waiting is not None: + self.stream.io_loop.remove_timeout(self._waiting) + self._waiting = None + self.stream.close() + elif self._waiting is None: + # Give the client a few seconds to complete a clean shutdown, + # otherwise just close the connection. + self._waiting = self.stream.io_loop.add_timeout( + self.stream.io_loop.time() + 5, self._abort) + + +class WebSocketClientConnection(simple_httpclient._HTTPConnection): + """WebSocket client connection. + + This class should not be instantiated directly; use the + `websocket_connect` function instead. + """ + def __init__(self, io_loop, request, on_message_callback=None, + compression_options=None): + self.compression_options = compression_options + self.connect_future = TracebackFuture() + self.protocol = None + self.read_future = None + self.read_queue = collections.deque() + self.key = base64.b64encode(os.urandom(16)) + self._on_message_callback = on_message_callback + self.close_code = self.close_reason = None + + scheme, sep, rest = request.url.partition(':') + scheme = {'ws': 'http', 'wss': 'https'}[scheme] + request.url = scheme + sep + rest + request.headers.update({ + 'Upgrade': 'websocket', + 'Connection': 'Upgrade', + 'Sec-WebSocket-Key': self.key, + 'Sec-WebSocket-Version': '13', + }) + if self.compression_options is not None: + # Always offer to let the server set our max_wbits (and even though + # we don't offer it, we will accept a client_no_context_takeover + # from the server). + # TODO: set server parameters for deflate extension + # if requested in self.compression_options. + request.headers['Sec-WebSocket-Extensions'] = ( + 'permessage-deflate; client_max_window_bits') + + self.tcp_client = TCPClient(io_loop=io_loop) + super(WebSocketClientConnection, self).__init__( + io_loop, None, request, lambda: None, self._on_http_response, + 104857600, self.tcp_client, 65536, 104857600) + + def close(self, code=None, reason=None): + """Closes the websocket connection. + + ``code`` and ``reason`` are documented under + `WebSocketHandler.close`. + + .. versionadded:: 3.2 + + .. versionchanged:: 4.0 + + Added the ``code`` and ``reason`` arguments. + """ + if self.protocol is not None: + self.protocol.close(code, reason) + self.protocol = None + + def on_connection_close(self): + if not self.connect_future.done(): + self.connect_future.set_exception(StreamClosedError()) + self.on_message(None) + self.tcp_client.close() + super(WebSocketClientConnection, self).on_connection_close() + + def _on_http_response(self, response): + if not self.connect_future.done(): + if response.error: + self.connect_future.set_exception(response.error) + else: + self.connect_future.set_exception(WebSocketError( + "Non-websocket response")) + + def headers_received(self, start_line, headers): + if start_line.code != 101: + return super(WebSocketClientConnection, self).headers_received( + start_line, headers) + + self.headers = headers + self.protocol = self.get_websocket_protocol() + self.protocol._process_server_headers(self.key, self.headers) + self.protocol._receive_frame() + + if self._timeout is not None: + self.io_loop.remove_timeout(self._timeout) + self._timeout = None + + self.stream = self.connection.detach() + self.stream.set_close_callback(self.on_connection_close) + # Once we've taken over the connection, clear the final callback + # we set on the http request. This deactivates the error handling + # in simple_httpclient that would otherwise interfere with our + # ability to see exceptions. + self.final_callback = None + + self.connect_future.set_result(self) + + def write_message(self, message, binary=False): + """Sends a message to the WebSocket server.""" + return self.protocol.write_message(message, binary) + + def read_message(self, callback=None): + """Reads a message from the WebSocket server. + + If on_message_callback was specified at WebSocket + initialization, this function will never return messages + + Returns a future whose result is the message, or None + if the connection is closed. If a callback argument + is given it will be called with the future when it is + ready. + """ + assert self.read_future is None + future = TracebackFuture() + if self.read_queue: + future.set_result(self.read_queue.popleft()) + else: + self.read_future = future + if callback is not None: + self.io_loop.add_future(future, callback) + return future + + def on_message(self, message): + if self._on_message_callback: + self._on_message_callback(message) + elif self.read_future is not None: + self.read_future.set_result(message) + self.read_future = None + else: + self.read_queue.append(message) + + def on_pong(self, data): + pass + + def get_websocket_protocol(self): + return WebSocketProtocol13(self, mask_outgoing=True, + compression_options=self.compression_options) + + +def websocket_connect(url, io_loop=None, callback=None, connect_timeout=None, + on_message_callback=None, compression_options=None): + """Client-side websocket support. + + Takes a url and returns a Future whose result is a + `WebSocketClientConnection`. + + ``compression_options`` is interpreted in the same way as the + return value of `.WebSocketHandler.get_compression_options`. + + The connection supports two styles of operation. In the coroutine + style, the application typically calls + `~.WebSocketClientConnection.read_message` in a loop:: + + conn = yield websocket_connect(url) + while True: + msg = yield conn.read_message() + if msg is None: break + # Do something with msg + + In the callback style, pass an ``on_message_callback`` to + ``websocket_connect``. In both styles, a message of ``None`` + indicates that the connection has been closed. + + .. versionchanged:: 3.2 + Also accepts ``HTTPRequest`` objects in place of urls. + + .. versionchanged:: 4.1 + Added ``compression_options`` and ``on_message_callback``. + The ``io_loop`` argument is deprecated. + """ + if io_loop is None: + io_loop = IOLoop.current() + if isinstance(url, httpclient.HTTPRequest): + assert connect_timeout is None + request = url + # Copy and convert the headers dict/object (see comments in + # AsyncHTTPClient.fetch) + request.headers = httputil.HTTPHeaders(request.headers) + else: + request = httpclient.HTTPRequest(url, connect_timeout=connect_timeout) + request = httpclient._RequestProxy( + request, httpclient.HTTPRequest._DEFAULTS) + conn = WebSocketClientConnection(io_loop, request, + on_message_callback=on_message_callback, + compression_options=compression_options) + if callback is not None: + io_loop.add_future(conn.connect_future, callback) + return conn.connect_future diff --git a/python/tornado/wsgi.py b/python/tornado/wsgi.py new file mode 100644 index 000000000..59e6c559f --- /dev/null +++ b/python/tornado/wsgi.py @@ -0,0 +1,358 @@ +#!/usr/bin/env python +# +# Copyright 2009 Facebook +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""WSGI support for the Tornado web framework. + +WSGI is the Python standard for web servers, and allows for interoperability +between Tornado and other Python web frameworks and servers. This module +provides WSGI support in two ways: + +* `WSGIAdapter` converts a `tornado.web.Application` to the WSGI application + interface. This is useful for running a Tornado app on another + HTTP server, such as Google App Engine. See the `WSGIAdapter` class + documentation for limitations that apply. +* `WSGIContainer` lets you run other WSGI applications and frameworks on the + Tornado HTTP server. For example, with this class you can mix Django + and Tornado handlers in a single server. +""" + +from __future__ import absolute_import, division, print_function, with_statement + +import sys +from io import BytesIO +import tornado + +from tornado.concurrent import Future +from tornado import escape +from tornado import httputil +from tornado.log import access_log +from tornado import web +from tornado.escape import native_str +from tornado.util import unicode_type + + +try: + import urllib.parse as urllib_parse # py3 +except ImportError: + import urllib as urllib_parse + +# PEP 3333 specifies that WSGI on python 3 generally deals with byte strings +# that are smuggled inside objects of type unicode (via the latin1 encoding). +# These functions are like those in the tornado.escape module, but defined +# here to minimize the temptation to use them in non-wsgi contexts. +if str is unicode_type: + def to_wsgi_str(s): + assert isinstance(s, bytes) + return s.decode('latin1') + + def from_wsgi_str(s): + assert isinstance(s, str) + return s.encode('latin1') +else: + def to_wsgi_str(s): + assert isinstance(s, bytes) + return s + + def from_wsgi_str(s): + assert isinstance(s, str) + return s + + +class WSGIApplication(web.Application): + """A WSGI equivalent of `tornado.web.Application`. + + .. deprecated:: 4.0 + + Use a regular `.Application` and wrap it in `WSGIAdapter` instead. + """ + def __call__(self, environ, start_response): + return WSGIAdapter(self)(environ, start_response) + + +# WSGI has no facilities for flow control, so just return an already-done +# Future when the interface requires it. +_dummy_future = Future() +_dummy_future.set_result(None) + + +class _WSGIConnection(httputil.HTTPConnection): + def __init__(self, method, start_response, context): + self.method = method + self.start_response = start_response + self.context = context + self._write_buffer = [] + self._finished = False + self._expected_content_remaining = None + self._error = None + + def set_close_callback(self, callback): + # WSGI has no facility for detecting a closed connection mid-request, + # so we can simply ignore the callback. + pass + + def write_headers(self, start_line, headers, chunk=None, callback=None): + if self.method == 'HEAD': + self._expected_content_remaining = 0 + elif 'Content-Length' in headers: + self._expected_content_remaining = int(headers['Content-Length']) + else: + self._expected_content_remaining = None + self.start_response( + '%s %s' % (start_line.code, start_line.reason), + [(native_str(k), native_str(v)) for (k, v) in headers.get_all()]) + if chunk is not None: + self.write(chunk, callback) + elif callback is not None: + callback() + return _dummy_future + + def write(self, chunk, callback=None): + if self._expected_content_remaining is not None: + self._expected_content_remaining -= len(chunk) + if self._expected_content_remaining < 0: + self._error = httputil.HTTPOutputError( + "Tried to write more data than Content-Length") + raise self._error + self._write_buffer.append(chunk) + if callback is not None: + callback() + return _dummy_future + + def finish(self): + if (self._expected_content_remaining is not None and + self._expected_content_remaining != 0): + self._error = httputil.HTTPOutputError( + "Tried to write %d bytes less than Content-Length" % + self._expected_content_remaining) + raise self._error + self._finished = True + + +class _WSGIRequestContext(object): + def __init__(self, remote_ip, protocol): + self.remote_ip = remote_ip + self.protocol = protocol + + def __str__(self): + return self.remote_ip + + +class WSGIAdapter(object): + """Converts a `tornado.web.Application` instance into a WSGI application. + + Example usage:: + + import tornado.web + import tornado.wsgi + import wsgiref.simple_server + + class MainHandler(tornado.web.RequestHandler): + def get(self): + self.write("Hello, world") + + if __name__ == "__main__": + application = tornado.web.Application([ + (r"/", MainHandler), + ]) + wsgi_app = tornado.wsgi.WSGIAdapter(application) + server = wsgiref.simple_server.make_server('', 8888, wsgi_app) + server.serve_forever() + + See the `appengine demo + `_ + for an example of using this module to run a Tornado app on Google + App Engine. + + In WSGI mode asynchronous methods are not supported. This means + that it is not possible to use `.AsyncHTTPClient`, or the + `tornado.auth` or `tornado.websocket` modules. + + .. versionadded:: 4.0 + """ + def __init__(self, application): + if isinstance(application, WSGIApplication): + self.application = lambda request: web.Application.__call__( + application, request) + else: + self.application = application + + def __call__(self, environ, start_response): + method = environ["REQUEST_METHOD"] + uri = urllib_parse.quote(from_wsgi_str(environ.get("SCRIPT_NAME", ""))) + uri += urllib_parse.quote(from_wsgi_str(environ.get("PATH_INFO", ""))) + if environ.get("QUERY_STRING"): + uri += "?" + environ["QUERY_STRING"] + headers = httputil.HTTPHeaders() + if environ.get("CONTENT_TYPE"): + headers["Content-Type"] = environ["CONTENT_TYPE"] + if environ.get("CONTENT_LENGTH"): + headers["Content-Length"] = environ["CONTENT_LENGTH"] + for key in environ: + if key.startswith("HTTP_"): + headers[key[5:].replace("_", "-")] = environ[key] + if headers.get("Content-Length"): + body = environ["wsgi.input"].read( + int(headers["Content-Length"])) + else: + body = b"" + protocol = environ["wsgi.url_scheme"] + remote_ip = environ.get("REMOTE_ADDR", "") + if environ.get("HTTP_HOST"): + host = environ["HTTP_HOST"] + else: + host = environ["SERVER_NAME"] + connection = _WSGIConnection(method, start_response, + _WSGIRequestContext(remote_ip, protocol)) + request = httputil.HTTPServerRequest( + method, uri, "HTTP/1.1", headers=headers, body=body, + host=host, connection=connection) + request._parse_body() + self.application(request) + if connection._error: + raise connection._error + if not connection._finished: + raise Exception("request did not finish synchronously") + return connection._write_buffer + + +class WSGIContainer(object): + r"""Makes a WSGI-compatible function runnable on Tornado's HTTP server. + + .. warning:: + + WSGI is a *synchronous* interface, while Tornado's concurrency model + is based on single-threaded asynchronous execution. This means that + running a WSGI app with Tornado's `WSGIContainer` is *less scalable* + than running the same app in a multi-threaded WSGI server like + ``gunicorn`` or ``uwsgi``. Use `WSGIContainer` only when there are + benefits to combining Tornado and WSGI in the same process that + outweigh the reduced scalability. + + Wrap a WSGI function in a `WSGIContainer` and pass it to `.HTTPServer` to + run it. For example:: + + def simple_app(environ, start_response): + status = "200 OK" + response_headers = [("Content-type", "text/plain")] + start_response(status, response_headers) + return ["Hello world!\n"] + + container = tornado.wsgi.WSGIContainer(simple_app) + http_server = tornado.httpserver.HTTPServer(container) + http_server.listen(8888) + tornado.ioloop.IOLoop.current().start() + + This class is intended to let other frameworks (Django, web.py, etc) + run on the Tornado HTTP server and I/O loop. + + The `tornado.web.FallbackHandler` class is often useful for mixing + Tornado and WSGI apps in the same server. See + https://github.com/bdarnell/django-tornado-demo for a complete example. + """ + def __init__(self, wsgi_application): + self.wsgi_application = wsgi_application + + def __call__(self, request): + data = {} + response = [] + + def start_response(status, response_headers, exc_info=None): + data["status"] = status + data["headers"] = response_headers + return response.append + app_response = self.wsgi_application( + WSGIContainer.environ(request), start_response) + try: + response.extend(app_response) + body = b"".join(response) + finally: + if hasattr(app_response, "close"): + app_response.close() + if not data: + raise Exception("WSGI app did not call start_response") + + status_code, reason = data["status"].split(' ', 1) + status_code = int(status_code) + headers = data["headers"] + header_set = set(k.lower() for (k, v) in headers) + body = escape.utf8(body) + if status_code != 304: + if "content-length" not in header_set: + headers.append(("Content-Length", str(len(body)))) + if "content-type" not in header_set: + headers.append(("Content-Type", "text/html; charset=UTF-8")) + if "server" not in header_set: + headers.append(("Server", "TornadoServer/%s" % tornado.version)) + + start_line = httputil.ResponseStartLine("HTTP/1.1", status_code, reason) + header_obj = httputil.HTTPHeaders() + for key, value in headers: + header_obj.add(key, value) + request.connection.write_headers(start_line, header_obj, chunk=body) + request.connection.finish() + self._log(status_code, request) + + @staticmethod + def environ(request): + """Converts a `tornado.httputil.HTTPServerRequest` to a WSGI environment. + """ + hostport = request.host.split(":") + if len(hostport) == 2: + host = hostport[0] + port = int(hostport[1]) + else: + host = request.host + port = 443 if request.protocol == "https" else 80 + environ = { + "REQUEST_METHOD": request.method, + "SCRIPT_NAME": "", + "PATH_INFO": to_wsgi_str(escape.url_unescape( + request.path, encoding=None, plus=False)), + "QUERY_STRING": request.query, + "REMOTE_ADDR": request.remote_ip, + "SERVER_NAME": host, + "SERVER_PORT": str(port), + "SERVER_PROTOCOL": request.version, + "wsgi.version": (1, 0), + "wsgi.url_scheme": request.protocol, + "wsgi.input": BytesIO(escape.utf8(request.body)), + "wsgi.errors": sys.stderr, + "wsgi.multithread": False, + "wsgi.multiprocess": True, + "wsgi.run_once": False, + } + if "Content-Type" in request.headers: + environ["CONTENT_TYPE"] = request.headers.pop("Content-Type") + if "Content-Length" in request.headers: + environ["CONTENT_LENGTH"] = request.headers.pop("Content-Length") + for key, value in request.headers.items(): + environ["HTTP_" + key.replace("-", "_").upper()] = value + return environ + + def _log(self, status_code, request): + if status_code < 400: + log_method = access_log.info + elif status_code < 500: + log_method = access_log.warning + else: + log_method = access_log.error + request_time = 1000.0 * request.request_time() + summary = request.method + " " + request.uri + " (" + \ + request.remote_ip + ")" + log_method("%d %s %.2fms", status_code, summary, request_time) + + +HTTPRequest = httputil.HTTPServerRequest diff --git a/server/input_methods/chewing/chewing_config.py b/server/input_methods/chewing/chewing_config.py index 68c7762fb..2abc59d84 100644 --- a/server/input_methods/chewing/chewing_config.py +++ b/server/input_methods/chewing/chewing_config.py @@ -4,6 +4,9 @@ DEF_FONT_SIZE = 16 +# from libchewing/include/internal/userphrase-private.h +DB_NAME = "chewing.sqlite3" + selKeys=( "1234567890", "asdfghjkl;", @@ -48,6 +51,9 @@ def getConfigDir(self): def getConfigFile(self): return os.path.join(self.getConfigDir(), "config.json") + def getUserPhrase(self): + return os.path.join(cfg.getConfigDir(), DB_NAME) + def getSelKeys(self): return selKeys[self.selKeyType] diff --git a/server/input_methods/chewing/chewing_ime.py b/server/input_methods/chewing/chewing_ime.py index ae3bd3057..222b66045 100644 --- a/server/input_methods/chewing/chewing_ime.py +++ b/server/input_methods/chewing/chewing_ime.py @@ -11,9 +11,6 @@ FULLSHAPE_MODE = 1 HALFSHAPE_MODE = 0 -# from libchewing/include/internal/userphrase-private.h -DB_NAME = "chewing.sqlite3" - keyNames = { VK_ESCAPE: "Esc", VK_RETURN: "Enter", @@ -53,7 +50,7 @@ def onActivate(self): TextService.onActivate(self) # load libchewing context datadir = self.datadir.encode("UTF-8") - user_phrase = os.path.join(cfg.getConfigDir(), DB_NAME).encode("UTF-8") + user_phrase = cfg.getUserPhrase().encode("UTF-8") ctx = ChewingContext(syspath = datadir, userpath = user_phrase) self.ctx = ctx