diff --git a/.editorconfig b/.editorconfig index 22fb1f90..77f690c9 100644 --- a/.editorconfig +++ b/.editorconfig @@ -10,5 +10,9 @@ insert_final_newline = true charset = utf-8 end_of_line = lf +[{*.yml,*.yaml}] +indent_style = space +indent_size = 2 + [Makefile] indent_style = tab diff --git a/.github/workflows/gh-pages.yml b/.github/workflows/deploy-docs.yml similarity index 66% rename from .github/workflows/gh-pages.yml rename to .github/workflows/deploy-docs.yml index 68f1475b..7876651e 100644 --- a/.github/workflows/gh-pages.yml +++ b/.github/workflows/deploy-docs.yml @@ -8,28 +8,34 @@ on: release: types: [created] branches: - - 'master' + - master jobs: build: - name: "Build docs" + name: Build docs runs-on: ubuntu-latest steps: - - uses: actions/setup-python@v4 - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: fetch-depth: 0 # otherwise, you will failed to push refs to dest repo - - name: "Install runtime dependencies in order to get package metadata" - run: "scripts/install" - - name: "Install deps and build with Sphinx" - run: make docs - - name: "Upload artifacts" - uses: actions/upload-pages-artifact@v1 + + - uses: actions/setup-python@v5 + with: + python-version: '3.12' + + - name: Install dependencies + run: pip install -r requirements.txt + + - name: Build docs + run: scripts/build-docs.sh + + - name: Upload artifacts + uses: actions/upload-pages-artifact@v3 with: # Upload built docs - path: "./Documentation" + path: "./site" deploy: - name: "Deploy docs" + name: Deploy docs if: github.event_name == 'release' && github.event.action == 'published' needs: build runs-on: ubuntu-latest @@ -42,6 +48,6 @@ jobs: name: github-pages url: ${{ steps.deployment.outputs.page_url }} steps: - - uses: actions/deploy-pages@v1 + - uses: actions/deploy-pages@v4 id: deployment name: "Deploy to GitHub Pages" diff --git a/.github/workflows/dist.yml b/.github/workflows/dist.yml deleted file mode 100644 index 52fe3ba1..00000000 --- a/.github/workflows/dist.yml +++ /dev/null @@ -1,47 +0,0 @@ -# vim:ts=2:sw=2:et:ai:sts=2 -name: 'Build distribution' - -on: - # Only run when release is created in the master branch - release: - types: [created] - branches: - - 'master' - -jobs: - build: - name: 'Build distributable files' - runs-on: 'ubuntu-latest' - steps: - - uses: actions/checkout@v3 - name: 'Checkout source repository' - with: - fetch-depth: 0 - - - uses: actions/setup-python@v4 - - - name: 'Build sdist and wheel' - run: python3 setup.py sdist bdist_wheel - - - uses: actions/upload-artifact@v2 - name: 'Upload build artifacts' - with: - path: 'dist/*' - - upload_pypi: - name: 'Upload packages' - needs: ['build'] - runs-on: 'ubuntu-latest' - if: github.event_name == 'release' && github.event.action == 'created' - steps: - - uses: actions/download-artifact@v3 - name: 'Download artifacts' - with: - name: 'artifact' - path: 'dist' - - - uses: pypa/gh-action-pypi-publish@release/v1 - name: "Publish package to PyPI" - with: - user: '__token__' - password: '${{ secrets.PYPI_API_TOKEN }}' diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml new file mode 100644 index 00000000..54021005 --- /dev/null +++ b/.github/workflows/publish.yml @@ -0,0 +1,49 @@ +name: Publish Package + +on: + # Only run when release is created in the master branch + release: + types: [created] + branches: + - 'master' + +jobs: + build: + name: Build distributable files + runs-on: 'ubuntu-latest' + steps: + - name: 'Checkout source repository' + uses: actions/checkout@v4 + with: + fetch-depth: 0 + + - uses: actions/setup-python@v5 + + - name: Install build dependencies + run: pip install build twine + + - name: 'Build package' + run: scripts/build.sh + + - name: Upload build artifacts + uses: actions/upload-artifact@v4 + with: + path: 'dist/*' + + upload_pypi: + name: Upload packages + needs: ['build'] + runs-on: 'ubuntu-latest' + if: github.event_name == 'release' && github.event.action == 'created' + steps: + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifact + path: dist + + - name: Publish package to PyPI + uses: pypa/gh-action-pypi-publish@release/v1 + with: + user: '__token__' + password: '${{ secrets.PYPI_API_TOKEN }}' diff --git a/.github/workflows/python-package.yml b/.github/workflows/tests.yml similarity index 63% rename from .github/workflows/python-package.yml rename to .github/workflows/tests.yml index fbeb126d..0a5bda48 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/tests.yml @@ -1,7 +1,7 @@ # This workflow will install Python dependencies, run tests and lint with a variety of Python versions -# For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions +# For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-python -name: Run Python tests +name: Python package on: push: @@ -31,24 +31,30 @@ jobs: experimental: true steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: fetch-depth: 0 - - uses: "actions/setup-python@v4" + + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v5 with: - python-version: "${{ matrix.python-version }}" - cache: "pip" + python-version: ${{ matrix.python-version }} + cache: pip cache-dependency-path: | - requirements/*.txt - requirements/**/*.txt - - name: "Install dependencies" - run: "scripts/install" - - name: "Run linting checks" - run: "scripts/check" - - name: "Run tests" - run: "scripts/tests" - - name: "Enforce coverage" - uses: codecov/codecov-action@v3 + requirements-*.txt + pyproject.toml + + - name: Install dependencies + run: pip install -r requirements.txt + + - name: Run linting checks + run: scripts/lint.sh + + - name: Run tests + run: scripts/tests.sh + + - name: Enforce coverage + uses: codecov/codecov-action@v4 with: token: ${{ secrets.CODECOV_TOKEN }} diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 15d6d113..42c8326d 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -9,7 +9,10 @@ repos: - id: check-yaml - id: check-toml - id: check-added-large-files - + - repo: https://github.com/commitizen-tools/commitizen + rev: v3.18.0 + hooks: + - id: commitizen - repo: https://github.com/charliermarsh/ruff-pre-commit rev: v0.3.0 hooks: diff --git a/CHANGELOG.md b/CHANGELOG.md new file mode 100644 index 00000000..b80da178 --- /dev/null +++ b/CHANGELOG.md @@ -0,0 +1,4 @@ + +## v0.2.0 (2024-03-02) + +## 0.1.0 (2023-01-10) diff --git a/CODE_OF_CONDUCT.rst b/CODE_OF_CONDUCT.md similarity index 98% rename from CODE_OF_CONDUCT.rst rename to CODE_OF_CONDUCT.md index 7141c712..b5d27c3a 100644 --- a/CODE_OF_CONDUCT.rst +++ b/CODE_OF_CONDUCT.md @@ -1,5 +1,4 @@ -Code of Conduct -=============== +# Code of Conduct Everyone interacting in the project's codebases, issue trackers, chat rooms, and mailing lists is expected to follow the Mode Code of Conduct. diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 00000000..f1b7210c --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,131 @@ +# Contributing + +Contributions to mode are very welcome, feel free to open an issue to propose your ideas. + +To make a contribution please create a pull request. + +## Developing + +First it is recommended to fork this repository into your personal Github account then clone your freshly generated fork locally. + +### Setup environment + +Here are some guidelines to set up your environment: + +```sh +$ cd mode/ +$ python -m venv env # Create python virtual environment, a `env/` has been created +$ source env/bin/active # Activate the environment +(venv) $ which pip # Ensure everything is well configured +/some/directory/mode/env/bin/pip +``` + +### Install project dependencies + +```sh +(venv) $ pip install -r requirements.txt +Obtaining file:///some/directory/mode + Installing build dependencies ... done + Checking if build backend supports build_editable ... done + Getting requirements to build editable ... done + Installing backend dependencies ... done + Preparing editable metadata (pyproject.toml) ... done +Ignoring pre-commit: markers 'python_version < "3.9"' don't match your environment +... +``` + +This project apply some quality rules on code and commit, to enforce them at commit time you should install the [pre-commit](https://pre-commit.com/) hook: + +```sh +(venv) $ pre-commit install +pre-commit installed at .git/hooks/pre-commit +``` + +### Format & lint the code + +You can run the format script to make your change compliant: + +```sh +(venv) $ ./script/format.sh ++ ruff format mode tests +79 files left unchanged ++ ruff check mode tests --fix +``` + +_The script uses [ruff](https://github.com/astral-sh/ruff) & [mypy](https://mypy-lang.org/)._ + +### Run tests + +A script is also available to run them: + +``` +(venv) $ ./scripts/tests.sh ++ pytest tests --cov=mode +Test session starts (platform: linux, Python 3.12.2, pytest 8.1.1, pytest-sugar 1.0.0) +... +``` + +_The script uses [pytest](https://docs.pytest.org/en/8.0.x/contents.html)._ + +### Commit format + +Commit should be formatted following [Conventional Commits 1.0.0](https://www.conventionalcommits.org/en/v1.0.0/). + +You can commit manually and respect the convention or you can use the cli to help you formatting correctly: + +```sh +(venv) $ cz commit +? Select the type of change you are committing docs: Documentation only changes +? What is the scope of this change? (class or file name): (press [enter] to skip) + README +? Write a short and imperative summary of the code changes: (lower case and no period) + correct spelling of README +? Provide additional contextual information about the code changes: (press [enter] to skip) + +? Is this a BREAKING CHANGE? Correlates with MAJOR in SemVer No +? Footer. Information about Breaking Changes and reference issues that this commit closes: (press [enter] to skip) + + +docs(README): correct spelling of README +``` + +## Documentation + +To be able to run the documentation locally you should setup your environment and install dependencies, if not already you can read the two first part of the Developing section. + +```sh +(venv) $ mkdocs serve +INFO - Building documentation... +INFO - Cleaning site directory +INFO - Documentation built in 1.78 seconds +INFO - [19:38:48] Watching paths for changes: 'docs', 'mkdocs.yml' +INFO - [19:38:48] Serving on http://127.0.0.1:8000/ +``` + +Then, you can browse the documentation on http://127.0.0.1:8000. + +## Maintainers + +### Publish a new release + +1. First create a new tag and update changelog + +```sh +(venv) $ ./scripts/bump.sh ++ cz bump --changelog +bump: version 0.2.0 → 0.2.1 +tag to create: 0.2.1 +increment detected: PATCH + +[master b35722f] bump: version 0.2.0 → 0.2.1 + 2 files changed, 2 insertions(+), 1 deletion(-) + +... + +Done! +``` + +!!! note + If this pass it will automatically push the commit and tags to the remote server, you may want to use `cz bump --changelog --dry-run` to check generated changes. + +2. Then after ensuring github pass you could publish the release via the Github interface. (An action will triggered and will publish the package to pypi and documentation to the Github pages). diff --git a/Changelog b/Changelog deleted file mode 100644 index 234e0da0..00000000 --- a/Changelog +++ /dev/null @@ -1,35 +0,0 @@ -.. _changelog: - -================ - Change history -================ - -.. version-0.2.0: - -0.2.0 -===== -:release-date: 2021-10-14 -:release-by: Taybin Rutkin (:github_user:`taybin`) - -- Support python-3.10 - -- format with black and isort - -- add crontab timer from Faust (:github_user:`lqhuang`) - -.. version-0.1.0: - -0.1.0 -===== -:release-date: 2020-12-17 14:00 P.M CET -:release-by: Thomas Sarboni (:github_user:`max-k`) - -- Friendly fork of ask/mode : Initial release - -- Move to new travis-ci.com domain - -- Add tests on Python 3.8.1-3.8.6 - -- Fix broken tests - -- Add Python 3.9 support diff --git a/Makefile b/Makefile deleted file mode 100644 index 58801857..00000000 --- a/Makefile +++ /dev/null @@ -1,147 +0,0 @@ -PROJ ?= mode -PGPIDENT ?= "Faust Security Team" -PYTHON ?= python -PYTEST ?= py.test -PIP ?= pip -GIT ?= git -TOX ?= tox -NOSETESTS ?= nosetests -ICONV ?= iconv -MYPY ?= mypy - -TESTDIR ?= t -README ?= README.rst -README_SRC ?= "docs/templates/readme.txt" -CONTRIBUTING ?= CONTRIBUTING.rst -CONTRIBUTING_SRC ?= "docs/contributing.rst" -COC ?= CODE_OF_CONDUCT.rst -COC_SRC ?= "docs/includes/code-of-conduct.txt" -DOCUMENTATION=Documentation - -all: help - -help: - @echo "docs - Build documentation." - @echo "test-all - Run tests for all supported python versions." - @echo "develop - Install all dependencies into current virtualenv." - @echo "distcheck ---------- - Check distribution for problems." - @echo " test - Run unittests using current python." - @echo " lint ------------ - Check codebase for problems." - @echo " apicheck - Check API reference coverage." - @echo " readmecheck - Check README.rst encoding." - @echo " contribcheck - Check CONTRIBUTING.rst encoding" - @echo " ruff - Check code for syntax and style errors." - @echo "readme - Regenerate README.rst file." - @echo "contrib - Regenerate CONTRIBUTING.rst file" - @echo "coc - Regenerate CODE_OF_CONDUCT.rst file" - @echo "clean-dist --------- - Clean all distribution build artifacts." - @echo " clean-git-force - Remove all uncomitted files." - @echo " clean ------------ - Non-destructive clean" - @echo " clean-pyc - Remove .pyc/__pycache__ files" - @echo " clean-docs - Remove documentation build artifacts." - @echo " clean-build - Remove setup artifacts." - @echo "release - Make PyPI release." - -clean: clean-docs clean-pyc clean-build - -clean-dist: clean clean-git-force - -release: - $(PYTHON) register sdist bdist_wheel upload --sign --identity="$(PGPIDENT)" - -. PHONY: deps-default -deps-default: - $(PIP) install -U -e "." - -. PHONY: deps-docs -deps-docs: - $(PIP) install -U -r requirements-docs.txt - -. PHONY: deps-test -deps-test: - $(PIP) install -U -r requirements-test.txt - -. PHONY: deps-extras -deps-extras: - $(PIP) install -U -r requirements/extras/eventlet.txt - $(PIP) install -U -r requirements/extras/uvloop.txt - -. PHONY: develop -develop: deps-default deps-dist deps-docs deps-test deps-extras - $(PYTHON) develop - -. PHONY: Documentation -Documentation: - mkdocs build - -. PHONY: docs -docs: Documentation - -. PHONE: serve-docs -serve-docs: - mkdocs serve - -clean-docs: - -rm -rf "$(SPHINX_BUILDDIR)" - -ruff: - ruff check . --fix - -lint: ruff apicheck readmecheck - -clean-readme: - -rm -f $(README) - -readmecheck: - $(ICONV) -f ascii -t ascii $(README) >/dev/null - -readme: clean-readme $(README) readmecheck - -clean-contrib: - -rm -f "$(CONTRIBUTING)" - -contrib: clean-contrib $(CONTRIBUTING) - -clean-coc: - -rm -f "$(COC)" - -coc: clean-coc $(COC) - -clean-pyc: - -find . -type f -a \( -name "*.pyc" -o -name "*$$py.class" \) | xargs rm - -find . -type d -name "__pycache__" | xargs rm -r - -removepyc: clean-pyc - -clean-build: - rm -rf build/ dist/ .eggs/ *.egg-info/ .tox/ .coverage cover/ - -clean-git: - $(GIT) clean -xdn - -clean-git-force: - $(GIT) clean -xdf - -test-all: clean-pyc - $(TOX) - -test: - $(PYTEST) . - -cov: - $(PYTEST) -x --cov="$(PROJ)" --cov-report=html - -build: - $(PYTHON) sdist bdist_wheel - -distcheck: lint test clean - -dist: readme contrib clean-dist build - -typecheck: - $(PYTHON) -m $(MYPY) -p $(PROJ) - -.PHONY: requirements -requirements: - $(PIP) install --upgrade pip;\ - $(PIP) install -r requirements.txt diff --git a/README.md b/README.md new file mode 100644 index 00000000..f50b41a7 --- /dev/null +++ b/README.md @@ -0,0 +1,492 @@ +# AsyncIO Services Fork + +
+ +--- + +**Documentation**: https://faust-streaming.github.io/mode/ + +**Source Code**: https://github.com/faust-streaming/mode + +--- + +## Why the fork + +We have decided to fork the original *Mode* project because there is a critical process of releasing new versions which causes uncertainty in the community. Everybody is welcome to contribute to this *fork*, and you can be added as a maintainer. + +We want to: + +- Ensure continues release +- Code quality +- Support latest Python versions +- Update the documentation + +and more... + +## What is Mode? + +Mode is a very minimal Python library built-on top of AsyncIO that makes +it much easier to use. + +In Mode your program is built out of services that you can start, stop, +restart and supervise. + +A service is just a class: + +```python +class PageViewCache(Service): + redis: Redis = None + + async def on_start(self) -> None: + self.redis = connect_to_redis() + + async def update(self, url: str, n: int = 1) -> int: + return await self.redis.incr(url, n) + + async def get(self, url: str) -> int: + return await self.redis.get(url) +``` + +Services are started, stopped and restarted and have +callbacks for those actions. + +It can start another service: + +```python +class App(Service): + page_view_cache: PageViewCache = None + + async def on_start(self) -> None: + await self.add_runtime_dependency(self.page_view_cache) + + @cached_property + def page_view_cache(self) -> PageViewCache: + return PageViewCache() +``` + +It can include background tasks: + +```python +class PageViewCache(Service): + + @Service.timer(1.0) + async def _update_cache(self) -> None: + self.data = await cache.get('key') +``` + +Services that depends on other services actually form a graph +that you can visualize. + +### Worker + +Mode optionally provides a worker that you can use to start the program, +with support for logging, blocking detection, remote debugging and more. + +To start a worker add this to your program: + + +```python +if __name__ == '__main__': + from mode import Worker + Worker(Service(), loglevel="info").execute_from_commandline() +``` + +Then execute your program to start the worker: + +```log +$ python examples/tutorial.py +[2018-03-27 15:47:12,159: INFO]: [^Worker]: Starting... +[2018-03-27 15:47:12,160: INFO]: [^-AppService]: Starting... +[2018-03-27 15:47:12,160: INFO]: [^--Websockets]: Starting... +STARTING WEBSOCKET SERVER +[2018-03-27 15:47:12,161: INFO]: [^--UserCache]: Starting... +[2018-03-27 15:47:12,161: INFO]: [^--Webserver]: Starting... +[2018-03-27 15:47:12,164: INFO]: [^--Webserver]: Serving on port 8000 +REMOVING EXPIRED USERS +REMOVING EXPIRED USERS +``` + +To stop it hit `Control-c`: + +```log +[2018-03-27 15:55:08,084: INFO]: [^Worker]: Stopping on signal received... +[2018-03-27 15:55:08,084: INFO]: [^Worker]: Stopping... +[2018-03-27 15:55:08,084: INFO]: [^-AppService]: Stopping... +[2018-03-27 15:55:08,084: INFO]: [^--UserCache]: Stopping... +REMOVING EXPIRED USERS +[2018-03-27 15:55:08,085: INFO]: [^Worker]: Gathering service tasks... +[2018-03-27 15:55:08,085: INFO]: [^--UserCache]: -Stopped! +[2018-03-27 15:55:08,085: INFO]: [^--Webserver]: Stopping... +[2018-03-27 15:55:08,085: INFO]: [^Worker]: Gathering all futures... +[2018-03-27 15:55:08,085: INFO]: [^--Webserver]: Closing server +[2018-03-27 15:55:08,086: INFO]: [^--Webserver]: Waiting for server to close handle +[2018-03-27 15:55:08,086: INFO]: [^--Webserver]: Shutting down web application +[2018-03-27 15:55:08,086: INFO]: [^--Webserver]: Waiting for handler to shut down +[2018-03-27 15:55:08,086: INFO]: [^--Webserver]: Cleanup +[2018-03-27 15:55:08,086: INFO]: [^--Webserver]: -Stopped! +[2018-03-27 15:55:08,086: INFO]: [^--Websockets]: Stopping... +[2018-03-27 15:55:08,086: INFO]: [^--Websockets]: -Stopped! +[2018-03-27 15:55:08,087: INFO]: [^-AppService]: -Stopped! +[2018-03-27 15:55:08,087: INFO]: [^Worker]: -Stopped! +``` + +### Beacons + +The `beacon` object that we pass to services keeps track of the services +in a graph. + +They are not strictly required, but can be used to visualize a running +system, for example we can render it as a pretty graph. + +This requires you to have the `pydot` library and GraphViz +installed: + +```sh +$ pip install pydot +``` + +Let's change the app service class to dump the graph to an image +at startup: + +```python +class AppService(Service): + + async def on_start(self) -> None: + print('APP STARTING') + import pydot + import io + o = io.StringIO() + beacon = self.app.beacon.root or self.app.beacon + beacon.as_graph().to_dot(o) + graph, = pydot.graph_from_dot_data(o.getvalue()) + print('WRITING GRAPH TO image.png') + with open('image.png', 'wb') as fh: + fh.write(graph.create_png()) +``` + +## Creating a Service + +To define a service, simply subclass and fill in the methods +to do stuff as the service is started/stopped etc.: + + +```python +class MyService(Service): + + async def on_start(self) -> None: + print('Im starting now') + + async def on_started(self) -> None: + print('Im ready') + + async def on_stop(self) -> None: + print('Im stopping now') +``` + +To start the service, call `await service.start()`: + +```python +await service.start() +``` + +Or you can use `mode.Worker` (or a subclass of this) to start your +services-based asyncio program from the console: + +```python +if __name__ == '__main__': + import mode + worker = mode.Worker( + MyService(), + loglevel='INFO', + logfile=None, + daemon=False, + ) + worker.execute_from_commandline() +``` + +## It's a Graph! + +Services can start other services, coroutines, and background tasks. + +1) Starting other services using `add_dependency`: + +```python +class MyService(Service): + def __post_init__(self) -> None: + self.add_dependency(OtherService(loop=self.loop)) +``` + +1) Start a list of services using `on_init_dependencies`: + +```python +class MyService(Service): + + def on_init_dependencies(self) -> None: + return [ + ServiceA(loop=self.loop), + ServiceB(loop=self.loop), + ServiceC(loop=self.loop), + ] +``` + +1) Start a future/coroutine (that will be waited on to complete on stop): + +```python +class MyService(Service): + + async def on_start(self) -> None: + self.add_future(self.my_coro()) + + async def my_coro(self) -> None: + print('Executing coroutine') +``` + +1) Start a background task: + +```python +class MyService(Service): + + @Service.task + async def _my_coro(self) -> None: + print('Executing coroutine') +``` + +1) Start a background task that keeps running: + +```python +class MyService(Service): + + @Service.task + async def _my_coro(self) -> None: + while not self.should_stop: + # NOTE: self.sleep will wait for one second, or + # until service stopped/crashed. + await self.sleep(1.0) + print('Background thread waking up') +``` + +## Installation + +You can install Mode either via the Python Package Index (PyPI) +or from source. + +To install using `pip`: + +```sh +$ pip install -U mode-streaming +``` + +Downloading and installing from source: http://pypi.org/project/mode-streaming + +You can install it by doing the following: + +```sh +$ tar xvfz mode-streaming-0.2.1.tar.gz +$ cd mode-0.2.1 +$ python -m build . +# python install +``` + +The last command must be executed as a privileged user if +you are not currently using a virtualenv. + + +Using the development version: + +With pip: + +You can install the latest snapshot of Mode using the following +pip command: + +```sh +$ pip install mode-streaming +``` + +## Developing + +The guideline and associated information are stored in [CONTRIBUTING.md](./CONTRIBUTING.md) + +## FAQ + +#### Can I use Mode with Django/Flask/etc.? + +Yes! Use gevent/eventlet as a bridge to integrate with asyncio. + +Using `gevent`: + +This works with any blocking Python library that can work with gevent. + +Using gevent requires you to install the `aiogevent` module, +and you can install this as a bundle with Mode: + +```sh +$ pip install -U mode-streaming[gevent] +``` + +Then to actually use gevent as the event loop you have to +execute the following in your entrypoint module (usually where you +start the worker), before any other third party libraries are imported: + + +```python +#!/usr/bin/env python3 +import mode.loop +mode.loop.use('gevent') +# execute program +``` + +REMEMBER: This must be located at the very top of the module, +in such a way that it executes before you import other libraries. + + +Using `eventlet`: + +This works with any blocking Python library that can work with eventlet. + +Using eventlet requires you to install the `aioeventlet` module, +and you can install this as a bundle with Mode: + +```sh +$ pip install -U mode-streaming[eventlet] +``` + +Then to actually use eventlet as the event loop you have to +execute the following in your entrypoint module (usually where you +start the worker), before any other third party libraries are imported: + +```python +#!/usr/bin/env python3 +import mode.loop +mode.loop.use('eventlet') +# execute program +``` + +REMEMBER: It's very important this is at the very top of the module, +and that it executes before you import libraries. + +#### Can I use Mode with Tornado? + +Yes! Use the `tornado.platform.asyncio` bridge: http://www.tornadoweb.org/en/stable/asyncio.html + +#### Can I use Mode with Twisted? + +Yes! Use the asyncio reactor implementation: +https://twistedmatrix.com/documents/17.1.0/api/twisted.internet.asyncioreactor.html + +#### Will you support Python 3.5 or earlier? + +There are no immediate plans to support Python 3.5, but you are welcome to +contribute to the project. + +Here are some of the steps required to accomplish this: + +- Source code transformation to rewrite variable annotations to comments for example, the code: + +```python +class Point: + x: int = 0 + y: int = 0 +``` +must be rewritten into: + +```python +class Point: + x = 0 # type: int + y = 0 # type: int +``` + +- Source code transformation to rewrite async functions for example, the code: + +```python +async def foo(): + await asyncio.sleep(1.0) +``` + +must be rewritten into: + +```python +@coroutine +def foo(): + yield from asyncio.sleep(1.0) +``` + +#### Will you support Python 2? + +There are no plans to support Python 2, but you are welcome to contribute to +the project (details in question above is relevant also for Python 2). + + +### At Shutdown I get lots of warnings, what is this about? + +If you get warnings such as this at shutdown: + +```log +Task was destroyed but it is pending! +task: