From 89343ce75c887cad0334df495c9029d5716ef663 Mon Sep 17 00:00:00 2001 From: "Arun S. Maiya" Date: Sat, 27 Jan 2024 14:32:10 -0500 Subject: [PATCH] black --- .pre-commit-config.yaml | 2 +- ktrain/imports.py | 6 +++--- ktrain/lroptimize/optimization.py | 16 +++++++++------- ktrain/text/ner/anago/callbacks.py | 1 + ktrain/text/ner/anago/models.py | 1 + ktrain/text/ner/anago/tagger.py | 1 + ktrain/text/ner/anago/utils.py | 1 - ktrain/text/qa/qa_finetuner.py | 8 +++++--- 8 files changed, 21 insertions(+), 15 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index ddbe97311..6d1551fae 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,6 +1,6 @@ repos: - repo: https://github.com/ambv/black - rev: 23.1.0 + rev: 24.1.0 hooks: - id: black #language_version: python3.6 diff --git a/ktrain/imports.py b/ktrain/imports.py index 3ab644c44..321307f3b 100644 --- a/ktrain/imports.py +++ b/ktrain/imports.py @@ -10,9 +10,9 @@ from packaging import version -os.environ[ - "NUMEXPR_MAX_THREADS" -] = "8" # suppress warning from NumExpr on machines with many CPUs +os.environ["NUMEXPR_MAX_THREADS"] = ( + "8" # suppress warning from NumExpr on machines with many CPUs +) # TensorFlow SUPPRESS_DEP_WARNINGS = strtobool(os.environ.get("SUPPRESS_DEP_WARNINGS", "1")) diff --git a/ktrain/lroptimize/optimization.py b/ktrain/lroptimize/optimization.py index 1c97d3115..fcd4db2c6 100644 --- a/ktrain/lroptimize/optimization.py +++ b/ktrain/lroptimize/optimization.py @@ -346,14 +346,16 @@ def __call__(self, gradients): _ = self.step # Create the step variable. self._gradients.extend( [ - tf.Variable( - tf.zeros_like(gradient), - trainable=False, - synchronization=tf.VariableSynchronization.ON_READ, - aggregation=tf.VariableAggregation.ONLY_FIRST_REPLICA, + ( + tf.Variable( + tf.zeros_like(gradient), + trainable=False, + synchronization=tf.VariableSynchronization.ON_READ, + aggregation=tf.VariableAggregation.ONLY_FIRST_REPLICA, + ) + if gradient is not None + else gradient ) - if gradient is not None - else gradient for gradient in gradients ] ) diff --git a/ktrain/text/ner/anago/callbacks.py b/ktrain/text/ner/anago/callbacks.py index 0b6a8cfb7..4fbf3d9c0 100644 --- a/ktrain/text/ner/anago/callbacks.py +++ b/ktrain/text/ner/anago/callbacks.py @@ -1,6 +1,7 @@ """ Custom callbacks. """ + from ....imports import * from .. import metrics diff --git a/ktrain/text/ner/anago/models.py b/ktrain/text/ner/anago/models.py index 7a66f9e47..62f511adb 100644 --- a/ktrain/text/ner/anago/models.py +++ b/ktrain/text/ner/anago/models.py @@ -1,6 +1,7 @@ """ Model definition. """ + from .... import utils as U from ....imports import * diff --git a/ktrain/text/ner/anago/tagger.py b/ktrain/text/ner/anago/tagger.py index e738f4aee..726e0ce37 100644 --- a/ktrain/text/ner/anago/tagger.py +++ b/ktrain/text/ner/anago/tagger.py @@ -1,6 +1,7 @@ """ Model API. """ + from ....imports import * from .. import metrics diff --git a/ktrain/text/ner/anago/utils.py b/ktrain/text/ner/anago/utils.py index 4acf3bdcb..20343dec0 100644 --- a/ktrain/text/ner/anago/utils.py +++ b/ktrain/text/ner/anago/utils.py @@ -2,7 +2,6 @@ Utility functions. """ - from ....imports import * diff --git a/ktrain/text/qa/qa_finetuner.py b/ktrain/text/qa/qa_finetuner.py index 35b157dae..2835d2ad4 100644 --- a/ktrain/text/qa/qa_finetuner.py +++ b/ktrain/text/qa/qa_finetuner.py @@ -69,9 +69,11 @@ def convert_dataset_for_tensorflow( def densify_ragged_batch(features, label=None): features = { - feature: ragged_tensor.to_tensor(shape=batch_shape[feature]) - if feature in tensor_keys - else ragged_tensor + feature: ( + ragged_tensor.to_tensor(shape=batch_shape[feature]) + if feature in tensor_keys + else ragged_tensor + ) for feature, ragged_tensor in features.items() } if label is None: