Skip to content

Commit

Permalink
black as pre-commit hook
Browse files Browse the repository at this point in the history
  • Loading branch information
maxpumperla committed Nov 29, 2019
1 parent 5ee7120 commit ca2a323
Show file tree
Hide file tree
Showing 18 changed files with 286 additions and 213 deletions.
5 changes: 5 additions & 0 deletions python/.pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
repos:
- repo: https://github.com/ambv/black
rev: stable
hooks:
- id: black
6 changes: 4 additions & 2 deletions python/konduit/__init__.py
Original file line number Diff line number Diff line change
@@ -1,13 +1,15 @@
import os
jar = os.getenv('KONDUIT_JAR_PATH', 'konduit.jar')

jar = os.getenv("KONDUIT_JAR_PATH", "konduit.jar")

try:
import pydl4j

pydl4j.add_classpath(jar)
except Exception as e:
print("VM already running from previous test")
print(e)

from .inference import *
from .server import *
from .client import *
from .client import *
1 change: 1 addition & 0 deletions python/tests/config.json
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
{"@type": "InferenceConfiguration", "steps": [{"@type": "ModelStep", "inputNames": ["IteratorGetNext:0", "IteratorGetNext:1", "IteratorGetNext:4"], "outputNames": ["loss/Softmax"], "modelConfig": {"@type": "TensorFlowConfig", "tensorDataTypesConfig": {"@type": "TensorDataTypesConfig", "inputDataTypes": {"IteratorGetNext:0": "INT32", "IteratorGetNext:1": "INT32", "IteratorGetNext:4": "INT32"}}, "modelConfigType": {"@type": "ModelConfigType", "modelType": "TENSORFLOW", "modelLoadingPath": "bert_mrpc_frozen.pb"}}, "parallelInferenceConfig": {"@type": "ParallelInferenceConfig", "workers": 1}}], "servingConfig": {"@type": "ServingConfig", "httpPort": 5337, "inputDataFormat": "NUMPY", "outputDataFormat": "NUMPY", "predictionType": "RAW", "logTimings": true}}
2 changes: 1 addition & 1 deletion python/tests/simple.py
Original file line number Diff line number Diff line change
@@ -1,2 +1,2 @@
first += 2
second = first
second = first
64 changes: 38 additions & 26 deletions python/tests/test_bert_serving.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,12 @@
import random

import numpy as np
from konduit import ParallelInferenceConfig, ServingConfig, TensorFlowConfig, ModelConfigType
from konduit import (
ParallelInferenceConfig,
ServingConfig,
TensorFlowConfig,
ModelConfigType,
)
from konduit import TensorDataTypesConfig, ModelStep, InferenceConfiguration
from konduit.client import Client
from konduit.server import Server
Expand All @@ -19,40 +24,47 @@ def test_server_start():
output_names = ["loss/Softmax"]
port = random.randint(1000, 65535)
parallel_inference_config = ParallelInferenceConfig(workers=1)
serving_config = ServingConfig(http_port=port,
input_data_format='NUMPY',
output_data_format='NUMPY',
log_timings=True)
serving_config = ServingConfig(
http_port=port,
input_data_format="NUMPY",
output_data_format="NUMPY",
log_timings=True,
)

tensorflow_config = TensorFlowConfig(
model_config_type=ModelConfigType(model_type='TENSORFLOW',
model_loading_path='bert_mrpc_frozen.pb'),
model_config_type=ModelConfigType(
model_type="TENSORFLOW", model_loading_path="bert_mrpc_frozen.pb"
),
tensor_data_types_config=TensorDataTypesConfig(
input_data_types={'IteratorGetNext:0': 'INT32',
'IteratorGetNext:1': 'INT32',
'IteratorGetNext:4': 'INT32'
}))
input_data_types={
"IteratorGetNext:0": "INT32",
"IteratorGetNext:1": "INT32",
"IteratorGetNext:4": "INT32",
}
),
)

model_pipeline_step = ModelStep(model_config=tensorflow_config,
parallel_inference_config=parallel_inference_config,
input_names=input_names,
output_names=output_names)
model_pipeline_step = ModelStep(
model_config=tensorflow_config,
parallel_inference_config=parallel_inference_config,
input_names=input_names,
output_names=output_names,
)

inference = InferenceConfiguration(serving_config=serving_config,
steps=[model_pipeline_step])
inference = InferenceConfiguration(
serving_config=serving_config, steps=[model_pipeline_step]
)

server = Server(inference_config=inference,
extra_start_args='-Xmx8g',
jar_path='konduit.jar')
server = Server(
inference_config=inference, extra_start_args="-Xmx8g", jar_path="konduit.jar"
)
server.start()
client = Client(input_data_format='NUMPY',
output_data_format='NUMPY',
port=port)
client = Client(input_data_format="NUMPY", output_data_format="NUMPY", port=port)

data_input = {
'IteratorGetNext:0': np.load('../data/input-0.npy'),
'IteratorGetNext:1': np.load('../data/input-1.npy'),
'IteratorGetNext:4': np.load('../data/input-4.npy')
"IteratorGetNext:0": np.load("../data/input-0.npy"),
"IteratorGetNext:1": np.load("../data/input-1.npy"),
"IteratorGetNext:4": np.load("../data/input-4.npy"),
}

assert is_port_in_use(port)
Expand Down
15 changes: 6 additions & 9 deletions python/tests/test_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,9 +10,9 @@ def test_client_from_server():
serving_config = ServingConfig(http_port=port)

python_config = PythonConfig(
python_code='first += 2',
python_inputs={'first': 'NDARRAY'},
python_outputs={'first': 'NDARRAY'},
python_code="first += 2",
python_inputs={"first": "NDARRAY"},
python_outputs={"first": "NDARRAY"},
)

step = PythonStep().step(python_config)
Expand All @@ -27,16 +27,13 @@ def test_client_from_server():

@pytest.mark.unit
def test_multipart_regex():
client = Client(input_names=['partname'], output_names=['nobody_cares'])
client = Client(input_names=["partname"], output_names=["nobody_cares"])

test_data = {
'partname[0]': 'foo',
"partname[1]": 'bar'
}
test_data = {"partname[0]": "foo", "partname[1]": "bar"}

client._validate_multi_part(test_data)

test_data['foo'] = 'baz'
test_data["foo"] = "baz"

with pytest.raises(Exception):
client._validate_multi_part(test_data)
26 changes: 13 additions & 13 deletions python/tests/test_client_serde.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,15 +10,14 @@

@pytest.mark.unit
def test_multipart_encode():
input_names = ["IteratorGetNext:0",
"IteratorGetNext:1", "IteratorGetNext:4"]
input_names = ["IteratorGetNext:0", "IteratorGetNext:1", "IteratorGetNext:4"]
output_names = ["loss/Softmax"]
port = random.randint(1000, 65535)
client = Client(input_names=input_names, output_names=output_names, port=port)

input_data = {
'input1': Client._convert_numpy_to_binary(np.ones(1)),
'input2': Client._convert_numpy_to_binary(np.ones(2))
"input1": Client._convert_numpy_to_binary(np.ones(1)),
"input2": Client._convert_numpy_to_binary(np.ones(2)),
}

converted = Client._convert_multi_part_inputs(input_data)
Expand All @@ -30,19 +29,20 @@ def test_multipart_encode():
@pytest.mark.unit
def test_python_serde():
python_configuration = PythonConfig(
python_code='first += 2',
python_inputs=['first'],
python_outputs=['first']
python_code="first += 2", python_inputs=["first"], python_outputs=["first"]
)

port = random.randint(1000, 65535)
serving_config = ServingConfig(http_port=port,
input_data_format='NUMPY',
output_data_format='NUMPY',
log_timings=True)
serving_config = ServingConfig(
http_port=port,
input_data_format="NUMPY",
output_data_format="NUMPY",
log_timings=True,
)

python_pipeline_step = PythonStep().step(python_configuration)
inference_config = InferenceConfiguration(serving_config=serving_config,
steps=[python_pipeline_step])
inference_config = InferenceConfiguration(
serving_config=serving_config, steps=[python_pipeline_step]
)

json.dumps(config_to_dict_with_type(inference_config))
82 changes: 48 additions & 34 deletions python/tests/test_docs.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,17 +4,17 @@
import pytest


def markdown_blocks(file_path, language='python'):
def markdown_blocks(file_path, language="python"):
"""Get language-specific markdown code blocks.
:param file_path: path to markdown file
:param language: 'python', 'java' (or literally any valid code fence language)
:return: list of code as string
"""
code_blocks = []
code_regex = r'^```.*'
code_block_open_re = r'^```(`*)({0})$'.format(language)
code_regex = r"^```.*"
code_block_open_re = r"^```(`*)({0})$".format(language)

with open(file_path, 'r') as f:
with open(file_path, "r") as f:
block = []
python = True
in_code_block = False
Expand All @@ -25,7 +25,7 @@ def markdown_blocks(file_path, language='python'):
if in_code_block:
if code_block_delimiter:
if python:
code_blocks.append(''.join(block))
code_blocks.append("".join(block))
block = []
python = True
in_code_block = False
Expand All @@ -40,13 +40,18 @@ def markdown_blocks(file_path, language='python'):

def is_markdown(f):
"""Does this file have a markdown extension?"""
markdown_extensions = ['.markdown', '.mdown', '.mkdn', '.mkd', '.md']
markdown_extensions = [".markdown", ".mdown", ".mkdn", ".mkd", ".md"]
return os.path.splitext(f)[1] in markdown_extensions


def get_files(input_dir):
"""Get all markdown files in a directory recursively."""
return [os.path.join(dp, f) for dp, dn, file_names in os.walk(input_dir) for f in file_names if is_markdown(f)]
return [
os.path.join(dp, f)
for dp, dn, file_names in os.walk(input_dir)
for f in file_names
if is_markdown(f)
]


def exec_python_code(code_blocks):
Expand All @@ -66,13 +71,13 @@ def exec_python_code(code_blocks):

def make_folders():
"""Make base folders for Java application logic."""
if not os.path.isdir('./ai/konduit/serving'):
os.makedirs('./ai/konduit/serving')
if not os.path.isdir("./ai/konduit/serving"):
os.makedirs("./ai/konduit/serving")


def is_self_contained_example(block):
"""Basic sanity check if markdown block contains a class and a main method."""
return 'public static void main' in block and 'public class' in block
return "public static void main" in block and "public class" in block


def write_self_contained_example(block):
Expand All @@ -81,14 +86,14 @@ def write_self_contained_example(block):
:param block: code block as string
"""
class_regex = r'public\s+class\s+(\w+)'
class_regex = r"public\s+class\s+(\w+)"
class_name = re.search(class_regex, block)
if class_name:
class_name = class_name.group(1)
with open('ai/konduit/serving/' + class_name + '.java', 'w') as f:
with open("ai/konduit/serving/" + class_name + ".java", "w") as f:
f.write(block)
else:
raise Exception('Could not determine proper class name')
raise Exception("Could not determine proper class name")


def write_example_from_snippet(block, markdown_root, i):
Expand All @@ -97,17 +102,26 @@ def write_example_from_snippet(block, markdown_root, i):
:param markdown_root: name of the file this snippet comes from (without extension)
:param i: this block is the i-th example in the current markdown file
"""
lines = block.split('\n')
import_lines = [l for l in lines if l.startswith('import ')]
code_lines = [l for l in lines if not l.startswith('import ')]
class_name = 'BasicsTest' + markdown_root + str(i)

code = "package ai.konduit.serving;\n\n" + '\n'.join(import_lines) \
+ "\n\npublic class " + class_name + " {\n" \
+ "\tpublic " + class_name + " () {}\n" \
+ "\tpublic void main() {\n\t\t" + '\n\t\t'.join(code_lines) + "\n\t}\n}"

with open('ai/konduit/serving/' + class_name + '.java', 'w') as f:
lines = block.split("\n")
import_lines = [l for l in lines if l.startswith("import ")]
code_lines = [l for l in lines if not l.startswith("import ")]
class_name = "BasicsTest" + markdown_root + str(i)

code = (
"package ai.konduit.serving;\n\n"
+ "\n".join(import_lines)
+ "\n\npublic class "
+ class_name
+ " {\n"
+ "\tpublic "
+ class_name
+ " () {}\n"
+ "\tpublic void main() {\n\t\t"
+ "\n\t\t".join(code_lines)
+ "\n\t}\n}"
)

with open("ai/konduit/serving/" + class_name + ".java", "w") as f:
f.write(code)


Expand Down Expand Up @@ -147,21 +161,21 @@ def exec_java_code(code_blocks, file_path):

for i, block in enumerate(code_blocks):
if is_self_contained_example(block):
class_regex = r'public\s+class\s+(\w+)'
class_regex = r"public\s+class\s+(\w+)"
class_name = re.search(class_regex, block).group(1)
else:
class_name = 'BasicsTest{}{}'.format(markdown_root, str(i))
basic_test_class = autoclass('ai.konduit.serving.' + class_name)
class_name = "BasicsTest{}{}".format(markdown_root, str(i))
basic_test_class = autoclass("ai.konduit.serving." + class_name)
basic_test = basic_test_class()
if is_self_contained_example(block):
basic_test.main([])
else:
basic_test.main()


def clean_java_files(ext=''):
def clean_java_files(ext=""):
"""Clean Java source files and classes"""
for file_name in glob.glob('./ai/konduit/serving/*' + ext):
for file_name in glob.glob("./ai/konduit/serving/*" + ext):
try:
os.remove(file_name)
except FileNotFoundError:
Expand All @@ -172,21 +186,21 @@ def clean_java_files(ext=''):
def test_docs():
"""This is the main unit test for testing documentation code snippets
contained in markdown files."""
files = get_files('../docs')
files = get_files("../docs")
for file_path in files:
python_blocks = markdown_blocks(file_path, 'python')
python_blocks = markdown_blocks(file_path, "python")
exec_python_code(python_blocks)

java_blocks = markdown_blocks(file_path, 'java')
java_blocks = markdown_blocks(file_path, "java")
exec_java_code(java_blocks, file_path)
clean_java_files()


def _prepare_docs_jar():
"""This helper will be called from the `prepare_doc_tests.sh` script."""
files = get_files('../docs')
files = get_files("../docs")
for file_path in files:
java_blocks = markdown_blocks(file_path, 'java')
java_blocks = markdown_blocks(file_path, "java")
write_java_files(java_blocks, file_path)


Expand Down
Loading

0 comments on commit ca2a323

Please sign in to comment.