From bac9d4e3fa6a4cd385db7eb571750c4efd4d927f Mon Sep 17 00:00:00 2001 From: Kuan Butts Date: Wed, 27 Dec 2017 10:12:21 -0500 Subject: [PATCH 01/12] update makefile to make graph-tool nb --- Makefile | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/Makefile b/Makefile index df09bbd..8d6228b 100644 --- a/Makefile +++ b/Makefile @@ -2,4 +2,10 @@ test: PYTHONPATH=. MPLBACKEND="agg" coverage run --source peartree -m py.test --verbose performance: - PYTHONPATH=. MPLBACKEND="agg" pytest profiler/test_graph_assembly.py -s + PYTHONPATH=. MPLBACKEND="agg" pytest profiler/test_graph_assembly.py -s + +notebook: + docker pull tiagopeixoto/graph-tool + # Will need to then run this: jupyter notebook --ip 0.0.0.0 + docker run -p 8888:8888 -p 6006:6006 -it -u user -w /home/user tiagopeixoto/graph-tool bash + From c23ac2ba1301553a33efd3561039679027e286cb Mon Sep 17 00:00:00 2001 From: Kuan Butts Date: Wed, 27 Dec 2017 12:19:24 -0500 Subject: [PATCH 02/12] udpate fiona version --- Makefile | 3 +-- setup.py | 2 +- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/Makefile b/Makefile index 8d6228b..a81fa9e 100644 --- a/Makefile +++ b/Makefile @@ -1,4 +1,4 @@ -test: +it test: PYTHONPATH=. MPLBACKEND="agg" coverage run --source peartree -m py.test --verbose performance: @@ -8,4 +8,3 @@ notebook: docker pull tiagopeixoto/graph-tool # Will need to then run this: jupyter notebook --ip 0.0.0.0 docker run -p 8888:8888 -p 6006:6006 -it -u user -w /home/user tiagopeixoto/graph-tool bash - diff --git a/setup.py b/setup.py index 6248f1b..283afc9 100644 --- a/setup.py +++ b/setup.py @@ -13,7 +13,7 @@ exec(f.read(), about) requirements = [ - 'fiona==1.6.1', + 'fiona>=1.6.1', 'networkx>=2.0', 'osmnx==0.6', 'partridge==0.3.0' From c1b1b0123c48a2ee52f1591464891928eeba3671 Mon Sep 17 00:00:00 2001 From: Kuan Butts Date: Wed, 27 Dec 2017 12:51:01 -0500 Subject: [PATCH 03/12] jupyter notebook configuration --- Dockerfile | 8 + docker-compose.yml | 16 + jupyter_notebook_config.py | 671 +++++++++++++++++++++++++++++++++++++ 3 files changed, 695 insertions(+) create mode 100644 Dockerfile create mode 100644 docker-compose.yml create mode 100644 jupyter_notebook_config.py diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000..1c1dd4e --- /dev/null +++ b/Dockerfile @@ -0,0 +1,8 @@ +FROM calthorpeanalytics/python3-geo:3.6.3-1.0.0 + +RUN mkdir -p /provisioning/peartree +COPY . /provisioning/peartree + +# can now install Peartree via repo +RUN cd /provisioning/peartree && \ + pip install . \ No newline at end of file diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 0000000..5f0def3 --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,16 @@ + +version: "1" + +services: + + # Jupyter Notebook + notebook: + build: + context: . + env_file: .env + command: jupyter notebook --config jupyter_notebook_config.py + volumes: + - .:/code + - /tmp:/tmp + ports: + - "9898:9898" diff --git a/jupyter_notebook_config.py b/jupyter_notebook_config.py new file mode 100644 index 0000000..54f9fda --- /dev/null +++ b/jupyter_notebook_config.py @@ -0,0 +1,671 @@ +# flake8: noqa + +# Configuration file for jupyter-notebook. + +# ------------------------------------------------------------------------------ +# Application(SingletonConfigurable) configuration +# ------------------------------------------------------------------------------ + +# This is an application. + +# The date format used by logging formatters for %(asctime)s +# c.Application.log_datefmt = '%Y-%m-%d %H:%M:%S' + +# The Logging format template +# c.Application.log_format = '[%(name)s]%(highlevel)s %(message)s' + +# Set the log level by value or name. +# c.Application.log_level = 30 + +# ------------------------------------------------------------------------------ +# JupyterApp(Application) configuration +# ------------------------------------------------------------------------------ + +# Base class for Jupyter applications + +# Answer yes to any prompts. +# c.JupyterApp.answer_yes = False + +# Full path of a config file. +# c.JupyterApp.config_file = '' + +# Specify a config file to load. +# c.JupyterApp.config_file_name = '' + +# Generate default config file. +# c.JupyterApp.generate_config = False + +# ------------------------------------------------------------------------------ +# NotebookApp(JupyterApp) configuration +# ------------------------------------------------------------------------------ + +# Set the Access-Control-Allow-Credentials: true header +# c.NotebookApp.allow_credentials = False + +# Set the Access-Control-Allow-Origin header +# +# Use '*' to allow any origin to access your server. +# +# Takes precedence over allow_origin_pat. +c.NotebookApp.allow_origin = '*' + +# Use a regular expression for the Access-Control-Allow-Origin header +# +# Requests from an origin matching the expression will get replies with: +# +# Access-Control-Allow-Origin: origin +# +# where `origin` is the origin of the request. +# +# Ignored if allow_origin is set. +# c.NotebookApp.allow_origin_pat = '' + +# Whether to allow the user to run the notebook as root. +c.NotebookApp.allow_root = True + +# DEPRECATED use base_url +# c.NotebookApp.base_project_url = '/' + +# The base URL for the notebook server. +# +# Leading and trailing slashes can be omitted, and will automatically be added. +# c.NotebookApp.base_url = '/' + +# Specify what command to use to invoke a web browser when opening the notebook. +# If not specified, the default browser will be determined by the `webbrowser` +# standard library module, which allows setting of the BROWSER environment +# variable to override it. +# c.NotebookApp.browser = '' + +# The full path to an SSL/TLS certificate file. +# c.NotebookApp.certfile = '' + +# The full path to a certificate authority certificate for SSL/TLS client +# authentication. +# c.NotebookApp.client_ca = '' + +# The config manager class to use +# c.NotebookApp.config_manager_class = 'notebook.services.config.manager.ConfigManager' + +# The notebook manager class to use. +# c.NotebookApp.contents_manager_class = 'notebook.services.contents.largefilemanager.LargeFileManager' + +# Extra keyword arguments to pass to `set_secure_cookie`. See tornado's +# set_secure_cookie docs for details. +# c.NotebookApp.cookie_options = {} + +# The random bytes used to secure cookies. By default this is a new random +# number every time you start the Notebook. Set it to a value in a config file +# to enable logins to persist across server sessions. +# +# Note: Cookie secrets should be kept private, do not share config files with +# cookie_secret stored in plaintext (you can read the value from a file). +# c.NotebookApp.cookie_secret = b'' + +# The file where the cookie secret is stored. +# c.NotebookApp.cookie_secret_file = '' + +# The default URL to redirect to from `/` +# c.NotebookApp.default_url = '/tree' + +# Disable cross-site-request-forgery protection +# +# Jupyter notebook 4.3.1 introduces protection from cross-site request +# forgeries, requiring API requests to either: +# +# - originate from pages served by this server (validated with XSRF cookie and +# token), or - authenticate with a token +# +# Some anonymous compute resources still desire the ability to run code, +# completely without authentication. These services can disable all +# authentication and security checks, with the full knowledge of what that +# implies. +c.NotebookApp.disable_check_xsrf = True + +# Whether to enable MathJax for typesetting math/TeX +# +# MathJax is the javascript library Jupyter uses to render math/LaTeX. It is +# very large, so you may want to disable it if you have a slow internet +# connection, or for offline use of the notebook. +# +# When disabled, equations etc. will appear as their untransformed TeX source. +# c.NotebookApp.enable_mathjax = True + +# extra paths to look for Javascript notebook extensions +# c.NotebookApp.extra_nbextensions_path = [] + +# Extra paths to search for serving static files. +# +# This allows adding javascript/css to be available from the notebook server +# machine, or overriding individual files in the IPython +# c.NotebookApp.extra_static_paths = [] + +# Extra paths to search for serving jinja templates. +# +# Can be used to override templates from notebook.templates. +# c.NotebookApp.extra_template_paths = [] + +# +# c.NotebookApp.file_to_run = '' + +# Deprecated: Use minified JS file or not, mainly use during dev to avoid JS +# recompilation +# c.NotebookApp.ignore_minified_js = False + +# (bytes/sec) Maximum rate at which stream output can be sent on iopub before +# they are limited. +# c.NotebookApp.iopub_data_rate_limit = 1000000 + +# (msgs/sec) Maximum rate at which messages can be sent on iopub before they are +# limited. +# c.NotebookApp.iopub_msg_rate_limit = 1000 + +# The IP address the notebook server will listen on. +c.NotebookApp.ip = '0.0.0.0' + +# Supply extra arguments that will be passed to Jinja environment. +# c.NotebookApp.jinja_environment_options = {} + +# Extra variables to supply to jinja templates when rendering. +# c.NotebookApp.jinja_template_vars = {} + +# The kernel manager class to use. +# c.NotebookApp.kernel_manager_class = 'notebook.services.kernels.kernelmanager.MappingKernelManager' + +# The kernel spec manager class to use. Should be a subclass of +# `jupyter_client.kernelspec.KernelSpecManager`. +# +# The Api of KernelSpecManager is provisional and might change without warning +# between this version of Jupyter and the next stable one. +# c.NotebookApp.kernel_spec_manager_class = 'jupyter_client.kernelspec.KernelSpecManager' + +# The full path to a private key file for usage with SSL/TLS. +# c.NotebookApp.keyfile = '' + +# The login handler class to use. +# c.NotebookApp.login_handler_class = 'notebook.auth.login.LoginHandler' + +# The logout handler class to use. +# c.NotebookApp.logout_handler_class = 'notebook.auth.logout.LogoutHandler' + +# The MathJax.js configuration file that is to be used. +# c.NotebookApp.mathjax_config = 'TeX-AMS-MML_HTMLorMML-full,Safe' + +# A custom url for MathJax.js. Should be in the form of a case-sensitive url to +# MathJax, for example: /static/components/MathJax/MathJax.js +# c.NotebookApp.mathjax_url = '' + +# Dict of Python modules to load as notebook server extensions.Entry values can +# be used to enable and disable the loading ofthe extensions. The extensions +# will be loaded in alphabetical order. +# c.NotebookApp.nbserver_extensions = {} + +# The directory to use for notebooks and kernels. +c.NotebookApp.notebook_dir = '/code/notebooks' + +# Whether to open in a browser after starting. The specific browser used is +# platform dependent and determined by the python standard library `webbrowser` +# module, unless it is overridden using the --browser (NotebookApp.browser) +# configuration option. +c.NotebookApp.open_browser = False + +# Hashed password to use for web authentication. +# +# To generate, type in a python/IPython shell: +# +# from notebook.auth import passwd; passwd() +# +# The string should be of the form type:salt:hashed-password. +# c.NotebookApp.password = '' + +# Forces users to use a password for the Notebook server. This is useful in a +# multi user environment, for instance when everybody in the LAN can access each +# other's machine through ssh. +# +# In such a case, server the notebook server on localhost is not secure since +# any user can connect to the notebook server via ssh. +# c.NotebookApp.password_required = False + +# The port the notebook server will listen on. +c.NotebookApp.port = 9898 + +# The number of additional ports to try if the specified port is not available. +# c.NotebookApp.port_retries = 50 + +# DISABLED: use %pylab or %matplotlib in the notebook to enable matplotlib. +# c.NotebookApp.pylab = 'disabled' + +# (sec) Time window used to check the message and data rate limits. +# c.NotebookApp.rate_limit_window = 3 + +# Reraise exceptions encountered loading server extensions? +# c.NotebookApp.reraise_server_extension_failures = False + +# DEPRECATED use the nbserver_extensions dict instead +# c.NotebookApp.server_extensions = [] + +# The session manager class to use. +# c.NotebookApp.session_manager_class = 'notebook.services.sessions.sessionmanager.SessionManager' + +# Supply SSL options for the tornado HTTPServer. See the tornado docs for +# details. +# c.NotebookApp.ssl_options = {} + +# Supply overrides for terminado. Currently only supports "shell_command". +# c.NotebookApp.terminado_settings = {} + +# Token used for authenticating first-time connections to the server. +# +# When no password is enabled, the default is to generate a new, random token. +# +# Setting to an empty string disables authentication altogether, which is NOT +# RECOMMENDED. +# c.NotebookApp.token = '' + +# Supply overrides for the tornado.web.Application that the Jupyter notebook +# uses. +# c.NotebookApp.tornado_settings = {} + +# Whether to trust or not X-Scheme/X-Forwarded-Proto and X-Real-Ip/X-Forwarded- +# For headerssent by the upstream reverse proxy. Necessary if the proxy handles +# SSL +# c.NotebookApp.trust_xheaders = False + +# DEPRECATED, use tornado_settings +# c.NotebookApp.webapp_settings = {} + +# Specify Where to open the notebook on startup. This is the +# `new` argument passed to the standard library method `webbrowser.open`. +# The behaviour is not guaranteed, but depends on browser support. Valid +# values are: +# 2 opens a new tab, +# 1 opens a new window, +# 0 opens in an existing window. +# See the `webbrowser.open` documentation for details. +# c.NotebookApp.webbrowser_open_new = 2 + +# Set the tornado compression options for websocket connections. +# +# This value will be returned from +# :meth:`WebSocketHandler.get_compression_options`. None (default) will disable +# compression. A dict (even an empty one) will enable compression. +# +# See the tornado docs for WebSocketHandler.get_compression_options for details. +# c.NotebookApp.websocket_compression_options = None + +# The base URL for websockets, if it differs from the HTTP server (hint: it +# almost certainly doesn't). +# +# Should be in the form of an HTTP origin: ws[s]://hostname[:port] +# c.NotebookApp.websocket_url = '' + +# ------------------------------------------------------------------------------ +# ConnectionFileMixin(LoggingConfigurable) configuration +# ------------------------------------------------------------------------------ + +# Mixin for configurable classes that work with connection files + +# JSON file in which to store connection info [default: kernel-.json] +# +# This file will contain the IP, ports, and authentication key needed to connect +# clients to this kernel. By default, this file will be created in the security +# dir of the current profile, but can be specified by absolute path. +# c.ConnectionFileMixin.connection_file = '' + +# set the control (ROUTER) port [default: random] +# c.ConnectionFileMixin.control_port = 0 + +# set the heartbeat port [default: random] +# c.ConnectionFileMixin.hb_port = 0 + +# set the iopub (PUB) port [default: random] +# c.ConnectionFileMixin.iopub_port = 0 + +# Set the kernel's IP address [default localhost]. If the IP address is +# something other than localhost, then Consoles on other machines will be able +# to connect to the Kernel, so be careful! +# c.ConnectionFileMixin.ip = '' + +# set the shell (ROUTER) port [default: random] +# c.ConnectionFileMixin.shell_port = 0 + +# set the stdin (ROUTER) port [default: random] +# c.ConnectionFileMixin.stdin_port = 0 + +# +# c.ConnectionFileMixin.transport = 'tcp' + +# ------------------------------------------------------------------------------ +# KernelManager(ConnectionFileMixin) configuration +# ------------------------------------------------------------------------------ + +# Manages a single kernel in a subprocess on this host. +# +# This version starts kernels with Popen. + +# Should we autorestart the kernel if it dies. +# c.KernelManager.autorestart = True + +# DEPRECATED: Use kernel_name instead. +# +# The Popen Command to launch the kernel. Override this if you have a custom +# kernel. If kernel_cmd is specified in a configuration file, Jupyter does not +# pass any arguments to the kernel, because it cannot make any assumptions about +# the arguments that the kernel understands. In particular, this means that the +# kernel does not receive the option --debug if it given on the Jupyter command +# line. +# c.KernelManager.kernel_cmd = [] + +# Time to wait for a kernel to terminate before killing it, in seconds. +# c.KernelManager.shutdown_wait_time = 5.0 + +# ------------------------------------------------------------------------------ +# Session(Configurable) configuration +# ------------------------------------------------------------------------------ + +# Object for handling serialization and sending of messages. +# +# The Session object handles building messages and sending them with ZMQ sockets +# or ZMQStream objects. Objects can communicate with each other over the +# network via Session objects, and only need to work with the dict-based IPython +# message spec. The Session will handle serialization/deserialization, security, +# and metadata. +# +# Sessions support configurable serialization via packer/unpacker traits, and +# signing with HMAC digests via the key/keyfile traits. +# +# Parameters ---------- +# +# debug : bool +# whether to trigger extra debugging statements +# packer/unpacker : str : 'json', 'pickle' or import_string +# importstrings for methods to serialize message parts. If just +# 'json' or 'pickle', predefined JSON and pickle packers will be used. +# Otherwise, the entire importstring must be used. +# +# The functions must accept at least valid JSON input, and output *bytes*. +# +# For example, to use msgpack: +# packer = 'msgpack.packb', unpacker='msgpack.unpackb' +# pack/unpack : callables +# You can also set the pack/unpack callables for serialization directly. +# session : bytes +# the ID of this Session object. The default is to generate a new UUID. +# username : unicode +# username added to message headers. The default is to ask the OS. +# key : bytes +# The key used to initialize an HMAC signature. If unset, messages +# will not be signed or checked. +# keyfile : filepath +# The file containing a key. If this is set, `key` will be initialized +# to the contents of the file. + +# Threshold (in bytes) beyond which an object's buffer should be extracted to +# avoid pickling. +# c.Session.buffer_threshold = 1024 + +# Whether to check PID to protect against calls after fork. +# +# This check can be disabled if fork-safety is handled elsewhere. +# c.Session.check_pid = True + +# Threshold (in bytes) beyond which a buffer should be sent without copying. +# c.Session.copy_threshold = 65536 + +# Debug output in the Session +# c.Session.debug = False + +# The maximum number of digests to remember. +# +# The digest history will be culled when it exceeds this value. +# c.Session.digest_history_size = 65536 + +# The maximum number of items for a container to be introspected for custom +# serialization. Containers larger than this are pickled outright. +# c.Session.item_threshold = 64 + +# execution key, for signing messages. +# c.Session.key = b'' + +# path to file containing execution key. +# c.Session.keyfile = '' + +# Metadata dictionary, which serves as the default top-level metadata dict for +# each message. +# c.Session.metadata = {} + +# The name of the packer for serializing messages. Should be one of 'json', +# 'pickle', or an import name for a custom callable serializer. +# c.Session.packer = 'json' + +# The UUID identifying this session. +# c.Session.session = '' + +# The digest scheme used to construct the message signatures. Must have the form +# 'hmac-HASH'. +# c.Session.signature_scheme = 'hmac-sha256' + +# The name of the unpacker for unserializing messages. Only used with custom +# functions for `packer`. +# c.Session.unpacker = 'json' + +# Username for the Session. Default is your system username. +# c.Session.username = 'username' + +# ------------------------------------------------------------------------------ +# MultiKernelManager(LoggingConfigurable) configuration +# ------------------------------------------------------------------------------ + +# A class for managing multiple kernels. + +# The name of the default kernel to start +# c.MultiKernelManager.default_kernel_name = 'python3' + +# The kernel manager class. This is configurable to allow subclassing of the +# KernelManager for customized behavior. +# c.MultiKernelManager.kernel_manager_class = 'jupyter_client.ioloop.IOLoopKernelManager' + +# ------------------------------------------------------------------------------ +# MappingKernelManager(MultiKernelManager) configuration +# ------------------------------------------------------------------------------ + +# A KernelManager that handles notebook mapping and HTTP error handling + +# Whether messages from kernels whose frontends have disconnected should be +# buffered in-memory. +# +# When True (default), messages are buffered and replayed on reconnect, avoiding +# lost messages due to interrupted connectivity. +# +# Disable if long-running kernels will produce too much output while no +# frontends are connected. +# c.MappingKernelManager.buffer_offline_messages = True + +# Whether to consider culling kernels which are busy. Only effective if +# cull_idle_timeout is not 0. +# c.MappingKernelManager.cull_busy = False + +# Whether to consider culling kernels which have one or more connections. Only +# effective if cull_idle_timeout is not 0. +# c.MappingKernelManager.cull_connected = False + +# Timeout (in seconds) after which a kernel is considered idle and ready to be +# culled. Values of 0 or lower disable culling. The minimum timeout is 300 +# seconds (5 minutes). Positive values less than the minimum value will be set +# to the minimum. +# c.MappingKernelManager.cull_idle_timeout = 0 + +# The interval (in seconds) on which to check for idle kernels exceeding the +# cull timeout value. +# c.MappingKernelManager.cull_interval = 300 + +# +# c.MappingKernelManager.root_dir = '' + +# ------------------------------------------------------------------------------ +# ContentsManager(LoggingConfigurable) configuration +# ------------------------------------------------------------------------------ + +# Base class for serving files and directories. +# +# This serves any text or binary file, as well as directories, with special +# handling for JSON notebook documents. +# +# Most APIs take a path argument, which is always an API-style unicode path, and +# always refers to a directory. +# +# - unicode, not url-escaped +# - '/'-separated +# - leading and trailing '/' will be stripped +# - if unspecified, path defaults to '', +# indicating the root path. + +# +# c.ContentsManager.checkpoints = None + +# +# c.ContentsManager.checkpoints_class = 'notebook.services.contents.checkpoints.Checkpoints' + +# +# c.ContentsManager.checkpoints_kwargs = {} + +# handler class to use when serving raw file requests. +# +# Default is a fallback that talks to the ContentsManager API, which may be +# inefficient, especially for large files. +# +# Local files-based ContentsManagers can use a StaticFileHandler subclass, which +# will be much more efficient. +# +# Access to these files should be Authenticated. +# c.ContentsManager.files_handler_class = 'notebook.files.handlers.FilesHandler' + +# Extra parameters to pass to files_handler_class. +# +# For example, StaticFileHandlers generally expect a `path` argument specifying +# the root directory from which to serve files. +# c.ContentsManager.files_handler_params = {} + +# Glob patterns to hide in file and directory listings. +# c.ContentsManager.hide_globs = ['__pycache__', '*.pyc', '*.pyo', '.DS_Store', '*.so', '*.dylib', '*~'] + +# Python callable or importstring thereof +# +# To be called on a contents model prior to save. +# +# This can be used to process the structure, such as removing notebook outputs +# or other side effects that should not be saved. +# +# It will be called as (all arguments passed by keyword):: +# +# hook(path=path, model=model, contents_manager=self) +# +# - model: the model to be saved. Includes file contents. +# Modifying this dict will affect the file that is stored. +# - path: the API path of the save destination +# - contents_manager: this ContentsManager instance +# c.ContentsManager.pre_save_hook = None + +# +# c.ContentsManager.root_dir = '/' + +# The base name used when creating untitled directories. +# c.ContentsManager.untitled_directory = 'Untitled Folder' + +# The base name used when creating untitled files. +# c.ContentsManager.untitled_file = 'untitled' + +# The base name used when creating untitled notebooks. +# c.ContentsManager.untitled_notebook = 'Untitled' + +# ------------------------------------------------------------------------------ +# FileManagerMixin(Configurable) configuration +# ------------------------------------------------------------------------------ + +# Mixin for ContentsAPI classes that interact with the filesystem. +# +# Provides facilities for reading, writing, and copying both notebooks and +# generic files. +# +# Shared by FileContentsManager and FileCheckpoints. +# +# Note ---- Classes using this mixin must provide the following attributes: +# +# root_dir : unicode +# A directory against against which API-style paths are to be resolved. +# +# log : logging.Logger + +# By default notebooks are saved on disk on a temporary file and then if +# succefully written, it replaces the old ones. This procedure, namely +# 'atomic_writing', causes some bugs on file system whitout operation order +# enforcement (like some networked fs). If set to False, the new notebook is +# written directly on the old one which could fail (eg: full filesystem or quota +# ) +# c.FileManagerMixin.use_atomic_writing = True + +# ------------------------------------------------------------------------------ +# FileContentsManager(FileManagerMixin,ContentsManager) configuration +# ------------------------------------------------------------------------------ + +# Python callable or importstring thereof +# +# to be called on the path of a file just saved. +# +# This can be used to process the file on disk, such as converting the notebook +# to a script or HTML via nbconvert. +# +# It will be called as (all arguments passed by keyword):: +# +# hook(os_path=os_path, model=model, contents_manager=instance) +# +# - path: the filesystem path to the file just written - model: the model +# representing the file - contents_manager: this ContentsManager instance +# c.FileContentsManager.post_save_hook = None + +# +# c.FileContentsManager.root_dir = '' + +# DEPRECATED, use post_save_hook. Will be removed in Notebook 5.0 +# c.FileContentsManager.save_script = False + +# ------------------------------------------------------------------------------ +# NotebookNotary(LoggingConfigurable) configuration +# ------------------------------------------------------------------------------ + +# A class for computing and verifying notebook signatures. + +# The hashing algorithm used to sign notebooks. +# c.NotebookNotary.algorithm = 'sha256' + +# The sqlite file in which to store notebook signatures. By default, this will +# be in your Jupyter data directory. You can set it to ':memory:' to disable +# sqlite writing to the filesystem. +# c.NotebookNotary.db_file = '' + +# The secret key with which notebooks are signed. +# c.NotebookNotary.secret = b'' + +# The file where the secret key is stored. +# c.NotebookNotary.secret_file = '' + +# A callable returning the storage backend for notebook signatures. The default +# uses an SQLite database. +# c.NotebookNotary.store_factory = traitlets.Undefined + +# ------------------------------------------------------------------------------ +# KernelSpecManager(LoggingConfigurable) configuration +# ------------------------------------------------------------------------------ + +# If there is no Python kernelspec registered and the IPython kernel is +# available, ensure it is added to the spec list. +# c.KernelSpecManager.ensure_native_kernel = True + +# The kernel spec class. This is configurable to allow subclassing of the +# KernelSpecManager for customized behavior. +# c.KernelSpecManager.kernel_spec_class = 'jupyter_client.kernelspec.KernelSpec' + +# Whitelist of allowed kernel names. +# +# By default, all installed kernels are allowed. +# c.KernelSpecManager.whitelist = set() From 5d9f12031f99d658612261b78c36644468e8b15b Mon Sep 17 00:00:00 2001 From: Kuan Butts Date: Wed, 27 Dec 2017 13:46:35 -0500 Subject: [PATCH 04/12] update the docker configuration for jupyter --- .gitignore | 1 + Dockerfile | 14 +++++++++----- Makefile | 6 +++--- docker-compose.yml | 2 +- requirements_dev.txt | 4 ++++ 5 files changed, 18 insertions(+), 9 deletions(-) diff --git a/.gitignore b/.gitignore index ab6a0db..3705964 100644 --- a/.gitignore +++ b/.gitignore @@ -1,4 +1,5 @@ .DS_Store +.env *.pyc __pycache__/ ptenv/ diff --git a/Dockerfile b/Dockerfile index 1c1dd4e..8d491cd 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,8 +1,12 @@ FROM calthorpeanalytics/python3-geo:3.6.3-1.0.0 -RUN mkdir -p /provisioning/peartree -COPY . /provisioning/peartree +RUN mkdir /code +WORKDIR /code -# can now install Peartree via repo -RUN cd /provisioning/peartree && \ - pip install . \ No newline at end of file +RUN pip install numpy==1.12.1 --src /usr/local/src + +COPY requirements.txt /code/ +RUN pip install -r requirements.txt --src /usr/local/src --exists-action=w + +COPY requirements_dev.txt /code/ +RUN pip install -r requirements_dev.txt --src /usr/local/src --exists-action=w \ No newline at end of file diff --git a/Makefile b/Makefile index a81fa9e..93de250 100644 --- a/Makefile +++ b/Makefile @@ -5,6 +5,6 @@ performance: PYTHONPATH=. MPLBACKEND="agg" pytest profiler/test_graph_assembly.py -s notebook: - docker pull tiagopeixoto/graph-tool - # Will need to then run this: jupyter notebook --ip 0.0.0.0 - docker run -p 8888:8888 -p 6006:6006 -it -u user -w /home/user tiagopeixoto/graph-tool bash + docker-compose build + mkdir ./notebooks + docker-compose up notebook diff --git a/docker-compose.yml b/docker-compose.yml index 5f0def3..3bbe57f 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,5 +1,5 @@ -version: "1" +version: "3" services: diff --git a/requirements_dev.txt b/requirements_dev.txt index 6a41080..dbf9386 100644 --- a/requirements_dev.txt +++ b/requirements_dev.txt @@ -6,3 +6,7 @@ flake8 coverage pytest pytest-runner +jupyter==1.0.0 +jupyter-client==5.1.0 +jupyter-console==5.2.0 +jupyter-core==4.4.0 From dab26209799b9393b1b6981d4cf851dcc6cbd315 Mon Sep 17 00:00:00 2001 From: Kuan Butts Date: Sat, 6 Jan 2018 08:03:34 -0800 Subject: [PATCH 05/12] docker clean step --- Makefile | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/Makefile b/Makefile index 93de250..b2a5c22 100644 --- a/Makefile +++ b/Makefile @@ -8,3 +8,8 @@ notebook: docker-compose build mkdir ./notebooks docker-compose up notebook + +docker-clean: + docker network prune --force + docker volume prune --force + docker image prune --force From 447013713fe47ed8fbac480131b702eda131bbe8 Mon Sep 17 00:00:00 2001 From: Kuan Butts Date: Sat, 6 Jan 2018 08:04:05 -0800 Subject: [PATCH 06/12] update port configuation for notebook --- docker-compose.yml | 2 +- jupyter_notebook_config.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/docker-compose.yml b/docker-compose.yml index 3bbe57f..0a6361a 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -13,4 +13,4 @@ services: - .:/code - /tmp:/tmp ports: - - "9898:9898" + - "9797:9797" diff --git a/jupyter_notebook_config.py b/jupyter_notebook_config.py index 54f9fda..fc55344 100644 --- a/jupyter_notebook_config.py +++ b/jupyter_notebook_config.py @@ -227,7 +227,7 @@ # c.NotebookApp.password_required = False # The port the notebook server will listen on. -c.NotebookApp.port = 9898 +c.NotebookApp.port = 9797 # The number of additional ports to try if the specified port is not available. # c.NotebookApp.port_retries = 50 From c9878030be3534ebb5492bacdc32f67da6d191c8 Mon Sep 17 00:00:00 2001 From: Kuan Butts Date: Sat, 6 Jan 2018 09:50:36 -0800 Subject: [PATCH 07/12] whole build script for the Docker image --- Dockerfile | 71 +++++++++++++++++++++++++++++++++++++++++++++++++++--- Makefile | 4 +-- 2 files changed, 69 insertions(+), 6 deletions(-) diff --git a/Dockerfile b/Dockerfile index 8d491cd..419371c 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,9 +1,72 @@ -FROM calthorpeanalytics/python3-geo:3.6.3-1.0.0 +FROM python:3.6-stretch -RUN mkdir /code -WORKDIR /code +RUN mkdir -p /provisioning +WORKDIR /provisioning + +# Install OS dependencies +RUN apt-get update && apt-get install -y \ + build-essential \ + dialog \ + curl \ + less \ + nano \ + unzip \ + vim \ + gcc \ + libgeos-dev \ + zlib1g-dev && \ + rm -rf /var/lib/apt/lists/* + +RUN echo "Installing Spatial Index library..." && \ + mkdir -p /provisioning/spatialindex && \ + cd /provisioning/spatialindex && \ + curl -# -O http://download.osgeo.org/libspatialindex/spatialindex-src-1.8.5.tar.gz && \ + tar -xzf spatialindex-src-1.8.5.tar.gz && \ + cd spatialindex-src-1.8.5 && \ + ./configure --prefix=/usr/local && \ + make -j$(python -c 'import multiprocessing; print(multiprocessing.cpu_count())') && \ + make install && \ + ldconfig && \ + rm -rf /provisioning/spatialindex* + +RUN echo "Installing GEOS library..." && \ + mkdir -p /provisioning/geos && \ + cd /provisioning/geos && \ + curl -# -O http://download.osgeo.org/geos/geos-3.5.1.tar.bz2 && \ + tar -xjf geos-3.5.1.tar.bz2 && \ + cd geos-3.5.1 && \ + ./configure && \ + make -j$(python -c 'import multiprocessing; print(multiprocessing.cpu_count())') && \ + make install && \ + ldconfig -v && \ + rm -rf /provisioning/geos* -RUN pip install numpy==1.12.1 --src /usr/local/src +RUN echo "Installing Proj4 library..." && \ + mkdir -p /provisioning/proj4 && \ + cd /provisioning/proj4 && \ + curl -# -O http://download.osgeo.org/proj/proj-4.9.3.tar.gz && \ + tar -xzf proj-4.9.3.tar.gz && \ + cd proj-4.9.3 && \ + ./configure && \ + make -j$(python -c 'import multiprocessing; print(multiprocessing.cpu_count())') && \ + make install && \ + ldconfig -v && \ + rm -rf /provisioning/proj4 + +# basemap (incorrectly) requires numpy to be installed *before* installing it +RUN pip install --upgrade numpy && \ + echo "Installing Basemap plotting library..." && \ + mkdir -p /provisioning/matplotlib-basemap && \ + cd /provisioning/matplotlib-basemap && \ + curl -# -o basemap-1.0.7rel.tar.gz https://codeload.github.com/matplotlib/basemap/tar.gz/v1.0.7rel && \ + tar -xzf basemap-1.0.7rel.tar.gz && \ + cd basemap-1.0.7rel && \ + python setup.py install && \ + rm -rf /provisioning/matplotlib-basemap + +RUN mkdir /code && \ + pip install numpy==1.12.1 --src /usr/local/src +WORKDIR /code COPY requirements.txt /code/ RUN pip install -r requirements.txt --src /usr/local/src --exists-action=w diff --git a/Makefile b/Makefile index b2a5c22..03ce70b 100644 --- a/Makefile +++ b/Makefile @@ -1,4 +1,4 @@ -it test: +test: PYTHONPATH=. MPLBACKEND="agg" coverage run --source peartree -m py.test --verbose performance: @@ -6,7 +6,7 @@ performance: notebook: docker-compose build - mkdir ./notebooks + mkdir -p ./notebooks docker-compose up notebook docker-clean: From 25ba10da862fbbcae38dc9ab7ea661490e5f4b09 Mon Sep 17 00:00:00 2001 From: Kuan Butts Date: Sun, 7 Jan 2018 09:27:55 -0800 Subject: [PATCH 08/12] add jupyter reqs, separate out base from rest of Dockerfile --- Dockerfile | 73 +++---------------------------------- docker/Dockerfile | 92 +++++++++++++++++++++++++++++++++++++++++++++++ requirements.txt | 2 +- 3 files changed, 98 insertions(+), 69 deletions(-) create mode 100644 docker/Dockerfile diff --git a/Dockerfile b/Dockerfile index 419371c..1838e52 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,75 +1,12 @@ FROM python:3.6-stretch -RUN mkdir -p /provisioning -WORKDIR /provisioning - -# Install OS dependencies -RUN apt-get update && apt-get install -y \ - build-essential \ - dialog \ - curl \ - less \ - nano \ - unzip \ - vim \ - gcc \ - libgeos-dev \ - zlib1g-dev && \ - rm -rf /var/lib/apt/lists/* - -RUN echo "Installing Spatial Index library..." && \ - mkdir -p /provisioning/spatialindex && \ - cd /provisioning/spatialindex && \ - curl -# -O http://download.osgeo.org/libspatialindex/spatialindex-src-1.8.5.tar.gz && \ - tar -xzf spatialindex-src-1.8.5.tar.gz && \ - cd spatialindex-src-1.8.5 && \ - ./configure --prefix=/usr/local && \ - make -j$(python -c 'import multiprocessing; print(multiprocessing.cpu_count())') && \ - make install && \ - ldconfig && \ - rm -rf /provisioning/spatialindex* - -RUN echo "Installing GEOS library..." && \ - mkdir -p /provisioning/geos && \ - cd /provisioning/geos && \ - curl -# -O http://download.osgeo.org/geos/geos-3.5.1.tar.bz2 && \ - tar -xjf geos-3.5.1.tar.bz2 && \ - cd geos-3.5.1 && \ - ./configure && \ - make -j$(python -c 'import multiprocessing; print(multiprocessing.cpu_count())') && \ - make install && \ - ldconfig -v && \ - rm -rf /provisioning/geos* - -RUN echo "Installing Proj4 library..." && \ - mkdir -p /provisioning/proj4 && \ - cd /provisioning/proj4 && \ - curl -# -O http://download.osgeo.org/proj/proj-4.9.3.tar.gz && \ - tar -xzf proj-4.9.3.tar.gz && \ - cd proj-4.9.3 && \ - ./configure && \ - make -j$(python -c 'import multiprocessing; print(multiprocessing.cpu_count())') && \ - make install && \ - ldconfig -v && \ - rm -rf /provisioning/proj4 - -# basemap (incorrectly) requires numpy to be installed *before* installing it -RUN pip install --upgrade numpy && \ - echo "Installing Basemap plotting library..." && \ - mkdir -p /provisioning/matplotlib-basemap && \ - cd /provisioning/matplotlib-basemap && \ - curl -# -o basemap-1.0.7rel.tar.gz https://codeload.github.com/matplotlib/basemap/tar.gz/v1.0.7rel && \ - tar -xzf basemap-1.0.7rel.tar.gz && \ - cd basemap-1.0.7rel && \ - python setup.py install && \ - rm -rf /provisioning/matplotlib-basemap - RUN mkdir /code && \ pip install numpy==1.12.1 --src /usr/local/src -WORKDIR /code -COPY requirements.txt /code/ -RUN pip install -r requirements.txt --src /usr/local/src --exists-action=w +WORKDIR /code COPY requirements_dev.txt /code/ -RUN pip install -r requirements_dev.txt --src /usr/local/src --exists-action=w \ No newline at end of file +RUN pip install -r requirements_dev.txt + +COPY requirements.txt /code/ +RUN pip install -r requirements.txt \ No newline at end of file diff --git a/docker/Dockerfile b/docker/Dockerfile new file mode 100644 index 0000000..66479ac --- /dev/null +++ b/docker/Dockerfile @@ -0,0 +1,92 @@ +FROM python:3.6-stretch + +RUN mkdir -p /provisioning +WORKDIR /provisioning + +# Install OS dependencies +RUN apt-get update && apt-get install -y \ + build-essential \ + dialog \ + curl \ + less \ + nano \ + unzip \ + vim \ + gcc \ + libgeos-dev \ + zlib1g-dev && \ + rm -rf /var/lib/apt/lists/* + +# Get libgeos for Python Shapely package +# https://trac.osgeo.org/geos/ +ARG GEOS_VERSION=3.6.2 +RUN echo "Installing GEOS libraries..." && \ + mkdir -p /provisioning/geos && \ + cd /provisioning/geos && \ + curl -# -O http://download.osgeo.org/geos/geos-${GEOS_VERSION}.tar.bz2 && \ + tar -xjf geos-${GEOS_VERSION}.tar.bz2 && \ + cd geos-${GEOS_VERSION} && \ + ./configure && \ + make -j$(python -c 'import multiprocessing; print(multiprocessing.cpu_count())') && \ + make install && \ + ldconfig -v && \ + rm -rf /provisioning/geos* + +# Get ESRI FileGDB libraries for Fiona/Geopandas Python packages +# http://appsforms.esri.com/products/download/ +ARG FILEGDB_VERSION=1_4 +RUN echo "Installing ESRI FileGDB libraries..." && \ + mkdir -p /provisioning/filegdb && \ + curl -# -o filegdb_api_${FILEGDB_VERSION}-64.tar.gz https://www.dropbox.com/s/dti2x6ydibyfs68/filegdb_api_1_2-64.tar.gz?dl=1 && \ + tar -zxvf filegdb_api_${FILEGDB_VERSION}-64.tar.gz && \ + cp -r FileGDB_API-64/lib/* /usr/local/lib && \ + cp -r FileGDB_API-64/include/* /usr/local/include && \ + ldconfig -v && \ + rm -rf /provisioning/filegdb* /provisioning/FileGDB* + +# Compile GDAL with FileGDB support for Fiona/Geopandas Python packages +RUN echo "Installing GDAL libraries..." && \ + mkdir -p /provisioning/gdal && \ + cd /provisioning/gdal && \ + curl -# -o gdal-2.2.1.tar.gz http://download.osgeo.org/gdal/2.2.1/gdal-2.2.1.tar.gz && \ + tar -zxvf gdal-2.2.1.tar.gz && \ + cd /provisioning/gdal/gdal-2.2.1 && \ + ./configure --prefix=/usr/ --with-python --with-fgdb && \ + make -j$(python -c 'import multiprocessing; print(multiprocessing.cpu_count())') && \ + make install && \ + rm -rf /provisioning/gdal* + +RUN echo "Installing Spatial Index library..." && \ + mkdir -p /provisioning/spatialindex && \ + cd /provisioning/spatialindex && \ + curl -# -O http://download.osgeo.org/libspatialindex/spatialindex-src-1.8.5.tar.gz && \ + tar -xzf spatialindex-src-1.8.5.tar.gz && \ + cd spatialindex-src-1.8.5 && \ + ./configure --prefix=/usr/local && \ + make -j$(python -c 'import multiprocessing; print(multiprocessing.cpu_count())') && \ + make install && \ + ldconfig && \ + rm -rf /provisioning/spatialindex* + +RUN echo "Installing Proj4 library..." && \ + mkdir -p /provisioning/proj4 && \ + cd /provisioning/proj4 && \ + curl -# -O http://download.osgeo.org/proj/proj-4.9.3.tar.gz && \ + tar -xzf proj-4.9.3.tar.gz && \ + cd proj-4.9.3 && \ + ./configure && \ + make -j$(python -c 'import multiprocessing; print(multiprocessing.cpu_count())') && \ + make install && \ + ldconfig -v && \ + rm -rf /provisioning/proj4 + +# basemap (incorrectly) requires numpy to be installed *before* installing it +RUN pip install --upgrade numpy && \ + echo "Installing Basemap plotting library..." && \ + mkdir -p /provisioning/matplotlib-basemap && \ + cd /provisioning/matplotlib-basemap && \ + curl -# -o basemap-1.0.7rel.tar.gz https://codeload.github.com/matplotlib/basemap/tar.gz/v1.0.7rel && \ + tar -xzf basemap-1.0.7rel.tar.gz && \ + cd basemap-1.0.7rel && \ + python setup.py install && \ + rm -rf /provisioning/matplotlib-basemap \ No newline at end of file diff --git a/requirements.txt b/requirements.txt index f086808..1fe940a 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,4 +1,4 @@ fiona==1.6.1 networkx>=2.0 osmnx==0.6 -partridge==0.3.0 +partridge>=0.3.0 From 2245e73e6308ffd3b7d59988e1b1309ea18ca9a3 Mon Sep 17 00:00:00 2001 From: Kuan Butts Date: Tue, 9 Jan 2018 15:19:21 -0800 Subject: [PATCH 09/12] update gcc51 on edsri gdb to address gdal compile issues --- docker/Dockerfile | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/docker/Dockerfile b/docker/Dockerfile index 66479ac..7dcf5b6 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -34,13 +34,13 @@ RUN echo "Installing GEOS libraries..." && \ # Get ESRI FileGDB libraries for Fiona/Geopandas Python packages # http://appsforms.esri.com/products/download/ -ARG FILEGDB_VERSION=1_4 +ARG FILEGDB_VERSION=1_5 RUN echo "Installing ESRI FileGDB libraries..." && \ mkdir -p /provisioning/filegdb && \ - curl -# -o filegdb_api_${FILEGDB_VERSION}-64.tar.gz https://www.dropbox.com/s/dti2x6ydibyfs68/filegdb_api_1_2-64.tar.gz?dl=1 && \ + curl -L -o filegdb_api_${FILEGDB_VERSION}-64.tar.gz https://www.dropbox.com/s/xi11vshwt9uojsy/FileGDB_API_1_5_64gcc51.tar.gz?dl=1 && \ tar -zxvf filegdb_api_${FILEGDB_VERSION}-64.tar.gz && \ - cp -r FileGDB_API-64/lib/* /usr/local/lib && \ - cp -r FileGDB_API-64/include/* /usr/local/include && \ + cp -r FileGDB_API-64gcc51/lib/* /usr/local/lib && \ + cp -r FileGDB_API-64gcc51/include/* /usr/local/include && \ ldconfig -v && \ rm -rf /provisioning/filegdb* /provisioning/FileGDB* From ea45bdd0a045c1bb7769f88801b5dab85c3f9d46 Mon Sep 17 00:00:00 2001 From: Kuan Butts Date: Tue, 9 Jan 2018 17:26:26 -0800 Subject: [PATCH 10/12] now refer to new Docker base image --- Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index 1838e52..ae52036 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,4 +1,4 @@ -FROM python:3.6-stretch +FROM kuanb/peartree RUN mkdir /code && \ pip install numpy==1.12.1 --src /usr/local/src From 9a4c0ef5fd876e38fa90e3a56b706bf537651ffe Mon Sep 17 00:00:00 2001 From: Kuan Butts Date: Tue, 9 Jan 2018 19:08:51 -0800 Subject: [PATCH 11/12] get dollar sign in --- Makefile | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/Makefile b/Makefile index 03ce70b..39e1a81 100644 --- a/Makefile +++ b/Makefile @@ -13,3 +13,10 @@ docker-clean: docker network prune --force docker volume prune --force docker image prune --force + +install-graph-tool: + sed -i -e '$$a\ + deb http://downloads.skewed.de/apt/stretch stretch main\ + deb-src http://downloads.skewed.de/apt/stretch stretch main' /etc/apt/sources.list && \ + apt-get update && \ + apt-get install python3-graph-tool From a202d216abb7b1dab77d84602862dcf02bd91a65 Mon Sep 17 00:00:00 2001 From: Kuan Butts Date: Tue, 9 Jan 2018 19:09:02 -0800 Subject: [PATCH 12/12] update deps --- Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index ae52036..38b7dca 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,7 +1,7 @@ FROM kuanb/peartree RUN mkdir /code && \ - pip install numpy==1.12.1 --src /usr/local/src + pip install numpy==1.14.0 scipy==1.0.0 WORKDIR /code