Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

make context variable name short #46

Merged
merged 1 commit into from
Nov 9, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion httpout/__init__.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
# Copyright (c) 2024 nggit

__version__ = '0.0.50'
__version__ = '0.0.51'
__all__ = ('HTTPOut',)

from .httpout import HTTPOut # noqa: E402
60 changes: 29 additions & 31 deletions httpout/httpout.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,14 +30,14 @@ def __init__(self, app):
app.add_middleware(self._on_close, 'close')

async def _on_worker_start(self, **worker):
worker_ctx = worker['context']
loop = worker['loop']
logger = worker['logger']
thread_pool_size = worker_ctx.options.get('thread_pool_size', 5)
g = worker['globals']
thread_pool_size = g.options.get('thread_pool_size', 5)
document_root = os.path.abspath(
worker_ctx.options.get('document_root', os.getcwd())
g.options.get('document_root', os.getcwd())
)
worker_ctx.options['document_root'] = document_root
g.options['document_root'] = document_root

logger.info('entering directory: %s', document_root)
os.chdir(document_root)
Expand Down Expand Up @@ -148,27 +148,29 @@ def ho_import(name, globals=None, locals=None, fromlist=(), level=0):

builtins.__import__ = ho_import

worker_ctx.wait = wait
worker_ctx.caches = {}
worker_ctx.executor = MultiThreadExecutor(thread_pool_size)
worker_ctx.executor.start()
g.wait = wait
g.caches = {}
g.executor = MultiThreadExecutor(thread_pool_size)
g.executor.start()

if worker['__globals__']:
exec_module(worker['__globals__'])

builtins.__globals__ = worker['__globals__']
exec_module(worker['__globals__'])
else:
builtins.__globals__ = ModuleType('__globals__')

async def _on_worker_stop(self, **worker):
await worker['context'].executor.shutdown()
g = worker['globals']

await g.executor.shutdown()

async def _on_request(self, **server):
request = server['request']
response = server['response']
logger = server['logger']
worker_ctx = server['globals']
document_root = worker_ctx.options['document_root']
ctx = server['context']
g = server['globals']
document_root = g.options['document_root']

if not request.is_valid:
raise BadRequest
Expand Down Expand Up @@ -249,26 +251,24 @@ async def _on_request(self, **server):
module.__server__ = server
module.print = server['response'].print
module.run = server['response'].run_coroutine
module.wait = worker_ctx.wait
code = worker_ctx.caches.get(module_path, None)
module.wait = g.wait
code = g.caches.get(module_path, None)

if code:
logger.info('%s: using cache', path)

try:
# execute module in another thread
result = await worker_ctx.executor.submit(
exec_module, module, code
)
result = await g.executor.submit(exec_module, module, code)
await server['response'].join()

if result:
worker_ctx.caches[module_path] = result
g.caches[module_path] = result
logger.info('%s: cached', path)
else:
# cache is going to be deleted on @app.on_close
# but it can be delayed on a Keep-Alive request
server['context'].module_path = module_path
ctx.module_path = module_path
except BaseException as exc:
await server['response'].join()

Expand Down Expand Up @@ -298,12 +298,12 @@ async def _on_request(self, **server):
else:
request.protocol.print_exception(exc)
finally:
await worker_ctx.executor.submit(
await g.executor.submit(
cleanup_modules, server['modules'], (module.print,
module.run,
module.wait,
worker_ctx,
server['context'],
g,
ctx,
server['response'])
)
await server['response'].join()
Expand All @@ -317,18 +317,16 @@ async def _on_request(self, **server):

logger.info('%s -> %s: %s', path, mime_types[ext], module_path)
await response.sendfile(
module_path,
content_type=mime_types[ext],
executor=worker_ctx.executor
module_path, content_type=mime_types[ext], executor=g.executor
)
# exit middleware without closing the connection
return True

async def _on_close(self, **server):
request_ctx = server['context']
worker_ctx = server['globals']
logger = server['logger']
ctx = server['context']
g = server['globals']

if 'module_path' in request_ctx:
worker_ctx.caches[request_ctx.module_path] = None
logger.info('cache deleted: %s', request_ctx.module_path)
if 'module_path' in ctx:
g.caches[ctx.module_path] = None
logger.info('cache deleted: %s', ctx.module_path)
Loading