From c87cd581963d53ed80d00074bdd4e4fb502690dd Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Edgar=20Ram=C3=ADrez=20Mondrag=C3=B3n?= Date: Thu, 30 Nov 2023 23:06:44 -0600 Subject: [PATCH] chore: Enable Ruff checks in helper private modules --- pyproject.toml | 8 +------- singer_sdk/helpers/_batch.py | 10 +++++----- singer_sdk/helpers/_flattening.py | 14 +++++++++----- singer_sdk/helpers/_secrets.py | 2 +- singer_sdk/helpers/_state.py | 14 +++++++------- singer_sdk/helpers/_typing.py | 14 +++++++------- 6 files changed, 30 insertions(+), 32 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index f59eecec20..ab873b7c4d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -151,9 +151,6 @@ xdoctest = ">=1.1.1" [tool.poetry.group.benchmark.dependencies] pytest-codspeed = ">=2.2.0" -[tool.black] -exclude = ".*simpleeval.*" - [tool.pytest.ini_options] addopts = '--ignore=singer_sdk/helpers/_simpleeval.py -m "not external"' markers = [ @@ -247,8 +244,7 @@ target-version = "py37" [tool.ruff.lint] exclude = [ "cookiecutter/*", - "singer_sdk/helpers/_simpleeval.py", - "tests/core/test_simpleeval.py", + "*simpleeval*", ] ignore = [ "ANN101", # Missing type annotation for `self` in method @@ -318,8 +314,6 @@ unfixable = [ "tests/*" = ["ANN", "D1", "D2", "FBT001", "FBT003", "PLR2004", "S101"] # Disabled some checks in samples code "samples/*" = ["ANN", "D"] -# Don't require docstrings conventions or type annotations in private modules -"singer_sdk/helpers/_*.py" = ["ANN", "D105"] # Templates support a generic resource of type Any. "singer_sdk/testing/*.py" = ["S101"] "singer_sdk/testing/templates.py" = ["ANN401"] diff --git a/singer_sdk/helpers/_batch.py b/singer_sdk/helpers/_batch.py index 0d30d05407..490e2ef8ba 100644 --- a/singer_sdk/helpers/_batch.py +++ b/singer_sdk/helpers/_batch.py @@ -92,7 +92,7 @@ class SDKBatchMessage(Message): manifest: list[str] = field(default_factory=list) """The manifest of files in the batch.""" - def __post_init__(self): + def __post_init__(self) -> None: if isinstance(self.encoding, dict): self.encoding = BaseBatchFileEncoding.from_dict(self.encoding) @@ -112,7 +112,7 @@ class StorageTarget: params: dict = field(default_factory=dict) """"The storage parameters.""" - def asdict(self): + def asdict(self) -> dict[str, t.Any]: """Return a dictionary representation of the message. Returns: @@ -144,7 +144,7 @@ def split_url(url: str) -> tuple[str, str]: """ if platform.system() == "Windows" and "\\" in url: # Original code from pyFileSystem split - # Augemnted slitly to properly Windows paths + # Augmented slightly to properly handle Windows paths split = url.rsplit("\\", 1) return (split[0] or "\\", split[1]) @@ -224,7 +224,7 @@ class BatchConfig: batch_size: int = DEFAULT_BATCH_SIZE """The max number of records in a batch.""" - def __post_init__(self): + def __post_init__(self) -> None: if isinstance(self.encoding, dict): self.encoding = BaseBatchFileEncoding.from_dict(self.encoding) @@ -234,7 +234,7 @@ def __post_init__(self): if self.batch_size is None: self.batch_size = DEFAULT_BATCH_SIZE - def asdict(self): + def asdict(self) -> dict[str, t.Any]: """Return a dictionary representation of the message. Returns: diff --git a/singer_sdk/helpers/_flattening.py b/singer_sdk/helpers/_flattening.py index eeb244277d..3cf1f172e2 100644 --- a/singer_sdk/helpers/_flattening.py +++ b/singer_sdk/helpers/_flattening.py @@ -1,4 +1,4 @@ -"""Internal helper library for record flatteting functions.""" +"""Internal helper library for record flattening functions.""" from __future__ import annotations @@ -70,7 +70,7 @@ def flatten_key(key_name: str, parent_keys: list[str], separator: str = "__") -> inflection.camelize(inflected_key[reducer_index]), ) inflected_key[reducer_index] = ( - reduced_key if len(reduced_key) > 1 else inflected_key[reducer_index][0:3] + reduced_key if len(reduced_key) > 1 else inflected_key[reducer_index][:3] ).lower() reducer_index += 1 @@ -358,8 +358,8 @@ def _flatten_schema( # noqa: C901, PLR0912 items.append((new_key, next(iter(field_schema.values()))[0])) # Sort and check for duplicates - def _key_func(item): - return item[0] # first item is tuple is the key name. + def _key_func(item: tuple[str, dict]) -> str: + return item[0] # first item in tuple is the key name. sorted_items = sorted(items, key=_key_func) for field_name, g in itertools.groupby(sorted_items, key=_key_func): @@ -451,7 +451,11 @@ def _flatten_record( return dict(items) -def _should_jsondump_value(key: str, value: t.Any, flattened_schema=None) -> bool: +def _should_jsondump_value( + key: str, + value: t.Any, # noqa: ANN401 + flattened_schema: dict[str, t.Any] | None = None, +) -> bool: """Return True if json.dump() should be used to serialize the value. Args: diff --git a/singer_sdk/helpers/_secrets.py b/singer_sdk/helpers/_secrets.py index ad7d050320..bbababa70a 100644 --- a/singer_sdk/helpers/_secrets.py +++ b/singer_sdk/helpers/_secrets.py @@ -28,7 +28,7 @@ def is_common_secret_key(key_name: str) -> bool: class SecretString(str): """For now, this class wraps a sensitive string to be identified as such later.""" - def __init__(self, contents): + def __init__(self, contents: str) -> None: """Initialize secret string.""" self.contents = contents diff --git a/singer_sdk/helpers/_state.py b/singer_sdk/helpers/_state.py index 1c188852cf..ba5121bbac 100644 --- a/singer_sdk/helpers/_state.py +++ b/singer_sdk/helpers/_state.py @@ -23,7 +23,7 @@ def get_state_if_exists( tap_stream_id: str, state_partition_context: dict | None = None, key: str | None = None, -) -> t.Any | None: +) -> t.Any | None: # noqa: ANN401 """Return the stream or partition state, creating a new one if it does not exist. Args: @@ -135,10 +135,10 @@ def get_writeable_state_dict( def write_stream_state( - tap_state, + tap_state: dict, tap_stream_id: str, - key, - val, + key: str, + val: t.Any, # noqa: ANN401 *, state_partition_context: dict | None = None, ) -> None: @@ -165,7 +165,7 @@ def reset_state_progress_markers(stream_or_partition_state: dict) -> dict | None def write_replication_key_signpost( stream_or_partition_state: dict, - new_signpost_value: t.Any, + new_signpost_value: t.Any, # noqa: ANN401 ) -> None: """Write signpost value.""" stream_or_partition_state[SIGNPOST_MARKER] = to_json_compatible(new_signpost_value) @@ -173,13 +173,13 @@ def write_replication_key_signpost( def write_starting_replication_value( stream_or_partition_state: dict, - initial_value: t.Any, + initial_value: t.Any, # noqa: ANN401 ) -> None: """Write initial replication value to state.""" stream_or_partition_state[STARTING_MARKER] = to_json_compatible(initial_value) -def get_starting_replication_value(stream_or_partition_state: dict): +def get_starting_replication_value(stream_or_partition_state: dict) -> t.Any | None: # noqa: ANN401 """Retrieve initial replication marker value from state.""" if not stream_or_partition_state: return None diff --git a/singer_sdk/helpers/_typing.py b/singer_sdk/helpers/_typing.py index 3a87ab4b92..81ffcc333f 100644 --- a/singer_sdk/helpers/_typing.py +++ b/singer_sdk/helpers/_typing.py @@ -39,7 +39,7 @@ def __init__(self, *args: object) -> None: super().__init__(msg, *args) -def to_json_compatible(val: t.Any) -> t.Any: +def to_json_compatible(val: t.Any) -> t.Any: # noqa: ANN401 """Return as string if datetime. JSON does not support proper datetime types. If given a naive datetime object, pendulum automatically makes it utc @@ -185,7 +185,7 @@ def get_datelike_property_type(property_schema: dict) -> str | None: return None -def _is_string_with_format(type_dict): +def _is_string_with_format(type_dict: dict[str, t.Any]) -> bool | None: if "string" in type_dict.get("type", []) and type_dict.get("format") in { "date-time", "time", @@ -196,14 +196,14 @@ def _is_string_with_format(type_dict): def handle_invalid_timestamp_in_record( - record, # noqa: ARG001 + record: dict[str, t.Any], # noqa: ARG001 key_breadcrumb: list[str], invalid_value: str, datelike_typename: str, ex: Exception, treatment: DatetimeErrorTreatmentEnum | None, logger: logging.Logger, -) -> t.Any: +) -> t.Any: # noqa: ANN401 """Apply treatment or raise an error for invalid time values.""" treatment = treatment or DatetimeErrorTreatmentEnum.ERROR msg = ( @@ -331,7 +331,7 @@ def _warn_unmapped_properties( stream_name: str, property_names: tuple[str], logger: logging.Logger, -): +) -> None: logger.warning( "Properties %s were present in the '%s' stream but " "not found in catalog schema. Ignoring.", @@ -470,9 +470,9 @@ def _conform_record_data_types( # noqa: PLR0912 def _conform_primitive_property( # noqa: PLR0911 - elem: t.Any, + elem: t.Any, # noqa: ANN401 property_schema: dict, -) -> t.Any: +) -> t.Any: # noqa: ANN401 """Converts a primitive (i.e. not object or array) to a json compatible type.""" if isinstance(elem, (datetime.datetime, pendulum.DateTime)): return to_json_compatible(elem)