Skip to content

Commit

Permalink
Fix: apply formatting
Browse files Browse the repository at this point in the history
  • Loading branch information
RollerKnobster committed Jun 24, 2024
1 parent 695c592 commit 9a75758
Show file tree
Hide file tree
Showing 15 changed files with 70 additions and 53 deletions.
9 changes: 7 additions & 2 deletions docs/_static/architecture_diagram.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,9 +9,14 @@
from diagrams.k8s.storage import PV
from diagrams.custom import Custom

directory=os.path.dirname(__file__)
directory = os.path.dirname(__file__)

with Diagram("Gordo flow", filename=os.path.join(directory, "architecture_diagram"), outformat="png", show=False) as diag:
with Diagram(
"Gordo flow",
filename=os.path.join(directory, "architecture_diagram"),
outformat="png",
show=False,
) as diag:
with Cluster("K8s"):
gordo = CRD("Gordo")
api = API("")
Expand Down
15 changes: 11 additions & 4 deletions docs/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,11 @@
author = "Equinor ASA"
version = gordo.__version__
_parsed_version = parse_version(version)
commit = f"{version}" if type(_parsed_version) is GordoRelease and not _parsed_version.suffix else "HEAD"
commit = (
f"{version}"
if type(_parsed_version) is GordoRelease and not _parsed_version.suffix
else "HEAD"
)

# -- General configuration ---------------------------------------------------
# https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration
Expand All @@ -44,7 +48,7 @@
"IPython.sphinxext.ipython_console_highlighting",
"sphinx_copybutton",
"sphinx_click",
"nbsphinx"
"nbsphinx",
]

root_doc = "index"
Expand All @@ -59,8 +63,11 @@
_ignore_linkcode_infos = [
# caused "OSError: could not find class definition"
{"module": "gordo_core.utils", "fullname": "PredictionResult"},
{'module': 'gordo.workflow.config_elements.schemas', 'fullname': 'Model.Config.extra'},
{'module': 'gordo.reporters.postgres', 'fullname': 'Machine.DoesNotExist'}
{
"module": "gordo.workflow.config_elements.schemas",
"fullname": "Model.Config.extra",
},
{"module": "gordo.reporters.postgres", "fullname": "Machine.DoesNotExist"},
]


Expand Down
36 changes: 18 additions & 18 deletions gordo/machine/model/anomaly/diff.py
Original file line number Diff line number Diff line change
Expand Up @@ -95,13 +95,13 @@ def get_metadata(self):
if hasattr(self, "aggregate_threshold_"):
metadata["aggregate-threshold"] = self.aggregate_threshold_
if hasattr(self, "feature_thresholds_per_fold_"):
metadata[
"feature-thresholds-per-fold"
] = self.feature_thresholds_per_fold_.to_dict()
metadata["feature-thresholds-per-fold"] = (
self.feature_thresholds_per_fold_.to_dict()
)
if hasattr(self, "aggregate_thresholds_per_fold_"):
metadata[
"aggregate-thresholds-per-fold"
] = self.aggregate_thresholds_per_fold_
metadata["aggregate-thresholds-per-fold"] = (
self.aggregate_thresholds_per_fold_
)
# Window threshold metadata
if hasattr(self, "window"):
metadata["window"] = self.window
Expand All @@ -111,23 +111,23 @@ def get_metadata(self):
hasattr(self, "smooth_feature_thresholds_")
and self.smooth_aggregate_threshold_ is not None
):
metadata[
"smooth-feature-thresholds"
] = self.smooth_feature_thresholds_.tolist()
metadata["smooth-feature-thresholds"] = (
self.smooth_feature_thresholds_.tolist()
)
if (
hasattr(self, "smooth_aggregate_threshold_")
and self.smooth_aggregate_threshold_ is not None
):
metadata["smooth-aggregate-threshold"] = self.smooth_aggregate_threshold_

if hasattr(self, "smooth_feature_thresholds_per_fold_"):
metadata[
"smooth-feature-thresholds-per-fold"
] = self.smooth_feature_thresholds_per_fold_.to_dict()
metadata["smooth-feature-thresholds-per-fold"] = (
self.smooth_feature_thresholds_per_fold_.to_dict()
)
if hasattr(self, "smooth_aggregate_thresholds_per_fold_"):
metadata[
"smooth-aggregate-thresholds-per-fold"
] = self.smooth_aggregate_thresholds_per_fold_
metadata["smooth-aggregate-thresholds-per-fold"] = (
self.smooth_aggregate_thresholds_per_fold_
)

if isinstance(self.base_estimator, GordoBase):
metadata.update(self.base_estimator.get_metadata())
Expand Down Expand Up @@ -241,9 +241,9 @@ def cross_validate(
smooth_aggregate_threshold_fold = (
scaled_mse.rolling(self.window).min().max()
)
self.smooth_aggregate_thresholds_per_fold_[
f"fold-{i}"
] = smooth_aggregate_threshold_fold
self.smooth_aggregate_thresholds_per_fold_[f"fold-{i}"] = (
smooth_aggregate_threshold_fold
)

smooth_tag_thresholds_fold = mae.rolling(self.window).min().max()
smooth_tag_thresholds_fold.name = f"fold-{i}"
Expand Down
1 change: 0 additions & 1 deletion gordo/machine/model/factories/lstm_autoencoder.py
Original file line number Diff line number Diff line change
Expand Up @@ -188,7 +188,6 @@ def lstm_hourglass(
compile_kwargs: Dict[str, Any] = dict(),
**kwargs,
) -> tensorflow.keras.models.Sequential:

"""
Builds an hourglass shaped neural network, with decreasing number of neurons
Expand Down
1 change: 0 additions & 1 deletion gordo/machine/model/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -541,7 +541,6 @@ def _validate_and_fix_size_of_X(self, X):
def fit( # type: ignore
self, X: np.ndarray, y: np.ndarray, **kwargs
) -> "KerasLSTMForecast":

"""
This fits a one step forecast LSTM architecture.
Expand Down
16 changes: 10 additions & 6 deletions gordo/machine/model/transformers/imputer.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,14 +71,18 @@ def fit(self, X: Union[pd.DataFrame, np.ndarray], y=None):

# Calculate a 1d arrays of fill values for each feature
self._posinf_fill_values = _posinf_fill_values.apply(
lambda val: val + self.delta
if max_allowable_value - self.delta > val
else max_allowable_value
lambda val: (
val + self.delta
if max_allowable_value - self.delta > val
else max_allowable_value
)
)
self._neginf_fill_values = _neginf_fill_values.apply(
lambda val: val - self.delta
if min_allowable_value + self.delta < val
else min_allowable_value
lambda val: (
val - self.delta
if min_allowable_value + self.delta < val
else min_allowable_value
)
)

return self
Expand Down
10 changes: 6 additions & 4 deletions gordo/machine/model/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -111,9 +111,11 @@ def make_base_dataframe(

# Calculate the end times if possible, or also all 'None's
end_series = start_series.map(
lambda start: (start + frequency).isoformat()
if isinstance(start, datetime) and frequency is not None
else None
lambda start: (
(start + frequency).isoformat()
if isinstance(start, datetime) and frequency is not None
else None
)
)

# Convert to isoformatted string for JSON serialization.
Expand All @@ -134,7 +136,7 @@ def make_base_dataframe(
# the multiindex column dataframe, and naming their second level labels as needed.
name: str
values: np.ndarray
for (name, values) in filter(lambda nv: nv[1] is not None, names_n_values):
for name, values in filter(lambda nv: nv[1] is not None, names_n_values):

_tags = tags if name == "model-input" else target_tag_list

Expand Down
6 changes: 3 additions & 3 deletions gordo/serializer/from_definition.py
Original file line number Diff line number Diff line change
Expand Up @@ -174,9 +174,9 @@ def _build_step(
import_str = list(step.keys())[0]

try:
StepClass: Union[
None, FeatureUnion, Pipeline, BaseEstimator
] = import_location(import_str)
StepClass: Union[None, FeatureUnion, Pipeline, BaseEstimator] = (
import_location(import_str)
)
except (ImportError, ValueError):
StepClass = None

Expand Down
8 changes: 5 additions & 3 deletions gordo/serializer/into_definition.py
Original file line number Diff line number Diff line change
Expand Up @@ -172,9 +172,11 @@ def load_definition_from_params(params: dict, tuples_to_list: bool = True) -> di
# TODO: Make this more robust, probably via another function to parse the iterable recursively
# TODO: b/c it _could_, in theory, be a dict of {str: BaseEstimator} or similar.
definition[param] = [
_decompose_node(leaf[1], tuples_to_list=tuples_to_list)
if isinstance(leaf, tuple)
else leaf
(
_decompose_node(leaf[1], tuples_to_list=tuples_to_list)
if isinstance(leaf, tuple)
else leaf
)
for leaf in param_val
]

Expand Down
8 changes: 5 additions & 3 deletions gordo/server/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -131,9 +131,11 @@ def dataframe_to_dict(df: pd.DataFrame) -> dict:
data.index = data.index.astype(str)
if isinstance(df.columns, pd.MultiIndex):
return {
col: data[col].to_dict()
if isinstance(data[col], pd.DataFrame)
else pd.DataFrame(data[col]).to_dict()
col: (
data[col].to_dict()
if isinstance(data[col], pd.DataFrame)
else pd.DataFrame(data[col]).to_dict()
)
for col in data.columns.get_level_values(0)
}
else:
Expand Down
3 changes: 1 addition & 2 deletions gordo/util/version.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,7 @@

class Version(metaclass=ABCMeta):
@abstractmethod
def get_version(self):
...
def get_version(self): ...


class Special(Enum):
Expand Down
6 changes: 2 additions & 4 deletions gordo/workflow/config_elements/normalized_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -119,10 +119,8 @@ def __init__(
if gordo_version is None:
gordo_version = __version__
default_globals = self.get_default_globals(gordo_version)
default_globals["runtime"]["influx"][ # type: ignore
"resources"
] = _calculate_influx_resources( # type: ignore
len(config["machines"])
default_globals["runtime"]["influx"]["resources"] = ( # type: ignore
_calculate_influx_resources(len(config["machines"])) # type: ignore
)

passed_globals = load_globals_config(
Expand Down
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ def requirements(fp: str):
],
description="Train and build models for Argo / Kubernetes",
long_description=long_description,
long_description_content_type='text/markdown',
long_description_content_type="text/markdown",
entry_points={"console_scripts": ["gordo=gordo.cli:gordo"]},
install_requires=install_requires,
license="AGPLv3",
Expand Down
1 change: 1 addition & 0 deletions tests/gordo/client/test_client.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Tests for gordo-client."""

# TODO: Move those tests to gordo-client project.

import json
Expand Down
1 change: 0 additions & 1 deletion tests/gordo/util/test_sensor_tag.py
Original file line number Diff line number Diff line change
@@ -1,2 +1 @@


0 comments on commit 9a75758

Please sign in to comment.