Skip to content

Commit

Permalink
Fix flake8 complains
Browse files Browse the repository at this point in the history
SDESK-7484
  • Loading branch information
eos87 committed Feb 5, 2025
1 parent d40b884 commit 9d6be3a
Show file tree
Hide file tree
Showing 2 changed files with 4 additions and 4 deletions.
2 changes: 1 addition & 1 deletion superdesk/core/resources/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -150,4 +150,4 @@ def gen_url_for_related_resource(resource_name: str, item_id: str) -> str:
return join_url_parts(url_prefix, api_version, resource_url, item_id)


from .model import ResourceModel # noqa: F401
from .model import ResourceModel # noqa: E402
6 changes: 3 additions & 3 deletions superdesk/tests/steps.py
Original file line number Diff line number Diff line change
Expand Up @@ -111,7 +111,7 @@ async def fail_and_print_body_async(response, message):

async def expect_json_async(response, expected_json, path=None):
"""
checks if json response equals some json,
Checks if json response equals some json,
Note: Copied from wooper.expect.expect_json to support Quart ``await response.get_data()``
Expand All @@ -135,7 +135,7 @@ async def assert_and_print_body_async(response, assert_function, first, second,

async def expect_json_contains_async(response, expected_json, path=None, reverse_expectation=False):
"""
checks if json response contains some json subset,
Checks if json response contains some json subset,
Note: Copied from wooper.expect.expect_json_contains_async to support Quart ``await response.get_data()``
Expand Down Expand Up @@ -173,7 +173,7 @@ async def expect_json_contains_async(response, expected_json, path=None, reverse

async def expect_json_not_contains_async(response, expected_json, path=None):
"""
checks if json response not contains some json subset,
Checks if json response not contains some json subset,
Note: Copied from wooper.expect.expect_json_not_contains to support Quart ``await response.get_data()``
Expand Down

0 comments on commit 9d6be3a

Please sign in to comment.