diff --git a/.flake8 b/.flake8 deleted file mode 100644 index 0831b268e..000000000 --- a/.flake8 +++ /dev/null @@ -1,4 +0,0 @@ -[flake8] -max-line-length = 120 -max-complexity = 20 -ignore = E203,W503 diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 3e18dacd4..9b56c42be 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -71,7 +71,7 @@ jobs: name: pytest artifacts path: artifacts.tar - - name: Lint + - name: Lint Rust uses: actions-rs/cargo@v1 with: command: clippy @@ -83,11 +83,11 @@ jobs: command: fmt args: --all -- --check + - name: Lint Python + run: python -m ruff check + - name: Check Python formatting - run: | - export DIRECTORIES="scripts tests benchmarks crates/pyhq/python" - python -m black --check $DIRECTORIES - python -m flake8 $DIRECTORIES + run: python -m ruff format --check - name: Build docs run: | diff --git a/benchmarks/src/benchmark/runner.py b/benchmarks/src/benchmark/runner.py index 2dc7930b9..3cd53ab3d 100644 --- a/benchmarks/src/benchmark/runner.py +++ b/benchmarks/src/benchmark/runner.py @@ -84,8 +84,10 @@ def _handle_result(self, identifier: BenchmarkIdentifier, result: BenchmarkResul if isinstance(result, Failure): logging.error(f"Benchmark {key} has failed: {result.traceback}") if self.exit_on_error: - raise Exception(f"""Benchmark {identifier} has failed: {result} -You can find details in {identifier.workdir}""") + raise Exception( + f"""Benchmark {identifier} has failed: {result} +You can find details in {identifier.workdir}""" + ) elif isinstance(result, Timeout): logging.info(f"Benchmark {key} has timeouted after {result.timeout}s") elif isinstance(result, Success): diff --git a/benchmarks/src/postprocessing/monitor.py b/benchmarks/src/postprocessing/monitor.py index 609ff08e3..8c110e8ec 100644 --- a/benchmarks/src/postprocessing/monitor.py +++ b/benchmarks/src/postprocessing/monitor.py @@ -623,12 +623,14 @@ def render_process(pid: int) -> Optional[Model]: ] cpu_time_figures = [render_process_cpu_time(process_data, key, time, label) for (label, key) in cpu_times] - summary = PreText(text=f""" + summary = PreText( + text=f""" PID: {pid} Key: {process.key} Max. RSS: {humanize.naturalsize(max_rss, binary=True)} Avg. CPU: {avg_cpu:.02f} % -""".strip()) +""".strip() + ) right_col = Column(children=cpu_time_figures) left_col = Column(children=[summary, mem_figure, cpu_figure]) diff --git a/benchmarks/src/utils/process.py b/benchmarks/src/utils/process.py index 21eccc00c..edd3ac182 100644 --- a/benchmarks/src/utils/process.py +++ b/benchmarks/src/utils/process.py @@ -24,8 +24,10 @@ def execute_process( if result.returncode != 0: with open(stdout) as stdout_file: with open(stderr) as stderr_file: - raise Exception(f"""The process {args} has exited with error code {result.returncode} + raise Exception( + f"""The process {args} has exited with error code {result.returncode} Stdout: {stdout_file.read()} Stderr: {stderr_file.read()} -""".strip()) +""".strip() + ) return result diff --git a/crates/pyhq/python/hyperqueue/client.py b/crates/pyhq/python/hyperqueue/client.py index 3a9aff9e1..619154ef9 100644 --- a/crates/pyhq/python/hyperqueue/client.py +++ b/crates/pyhq/python/hyperqueue/client.py @@ -83,9 +83,7 @@ def submit(self, job: Job) -> SubmittedJob: raise Exception("Submitted job must have at least a single task") job_id = self.connection.submit_job(job_desc) - logging.info( - f"Submitted job {job_id} with {task_count} {pluralize('task', task_count)}" - ) + logging.info(f"Submitted job {job_id} with {task_count} {pluralize('task', task_count)}") return SubmittedJob(job=job, id=job_id) def wait_for_jobs(self, jobs: Sequence[SubmittedJob], raise_on_error=True) -> bool: @@ -95,9 +93,7 @@ def wait_for_jobs(self, jobs: Sequence[SubmittedJob], raise_on_error=True) -> bo job_ids_str = ",".join(str(id) for id in job_ids) if len(jobs) > 1: job_ids_str = "{" + job_ids_str + "}" - logging.info( - f"Waiting for {pluralize('job', len(jobs))} {job_ids_str} to finish" - ) + logging.info(f"Waiting for {pluralize('job', len(jobs))} {job_ids_str} to finish") callback = create_progress_callback() diff --git a/crates/pyhq/python/hyperqueue/ffi/protocol.py b/crates/pyhq/python/hyperqueue/ffi/protocol.py index b1bf6760c..da3e324e5 100644 --- a/crates/pyhq/python/hyperqueue/ffi/protocol.py +++ b/crates/pyhq/python/hyperqueue/ffi/protocol.py @@ -7,9 +7,7 @@ class ResourceRequest: n_nodes: int = 0 - resources: Dict[str, Union[int, float, str]] = dataclasses.field( - default_factory=dict - ) + resources: Dict[str, Union[int, float, str]] = dataclasses.field(default_factory=dict) min_time: Optional[float] = None def __init__( diff --git a/crates/pyhq/python/hyperqueue/job.py b/crates/pyhq/python/hyperqueue/job.py index ef9f89994..de29f7be5 100644 --- a/crates/pyhq/python/hyperqueue/job.py +++ b/crates/pyhq/python/hyperqueue/job.py @@ -31,11 +31,7 @@ def __init__( self.tasks: List[Task] = [] self.task_map: Dict[TaskId, Task] = {} self.max_fails = max_fails - self.default_workdir = ( - Path(default_workdir).resolve() - if default_workdir is not None - else default_workdir - ) + self.default_workdir = Path(default_workdir).resolve() if default_workdir is not None else default_workdir self.default_env = default_env or {} def task_by_id(self, id: TaskId) -> Optional[Task]: diff --git a/crates/pyhq/python/hyperqueue/output.py b/crates/pyhq/python/hyperqueue/output.py index 4b1265f13..ffa8910ac 100644 --- a/crates/pyhq/python/hyperqueue/output.py +++ b/crates/pyhq/python/hyperqueue/output.py @@ -40,13 +40,9 @@ def default_stderr() -> str: # TODO: how to resolve TASK_ID in the context of some other task? class Output: - def __init__( - self, name: str, filepath: Optional[str] = None, extension: Optional[str] = None - ): + def __init__(self, name: str, filepath: Optional[str] = None, extension: Optional[str] = None): if filepath and extension: - raise ValidationException( - "Parameters `filepath` and `extension` are mutually exclusive" - ) + raise ValidationException("Parameters `filepath` and `extension` are mutually exclusive") self.name = name self.filepath = filepath diff --git a/crates/pyhq/python/hyperqueue/task/function/wrapper.py b/crates/pyhq/python/hyperqueue/task/function/wrapper.py index 5919c6bd9..e99b26c56 100644 --- a/crates/pyhq/python/hyperqueue/task/function/wrapper.py +++ b/crates/pyhq/python/hyperqueue/task/function/wrapper.py @@ -9,9 +9,7 @@ class CloudWrapper: Wraps a callable so that cloudpickle is used to pickle it, caching the pickle. """ - def __init__( - self, fn, pickled_fn=None, cache=True, protocol=cloudpickle.DEFAULT_PROTOCOL - ): + def __init__(self, fn, pickled_fn=None, cache=True, protocol=cloudpickle.DEFAULT_PROTOCOL): if fn is None: if pickled_fn is None: raise ValueError("Pass at least one of `fn` and `pickled_fn`") @@ -27,9 +25,7 @@ def __init__( self.pickled_fn = pickled_fn self.cache = cache self.protocol = protocol - self.__doc__ = "CloudWrapper for {!r}. Original doc:\n\n{}".format( - self.fn, self.fn.__doc__ - ) + self.__doc__ = "CloudWrapper for {!r}. Original doc:\n\n{}".format(self.fn, self.fn.__doc__) if hasattr(self.fn, "__name__"): self.__name__ = self.fn.__name__ @@ -56,9 +52,7 @@ def _get_pickled_fn(self): return pfn def __call__(self, *args, **kwargs): - logging.debug( - f"Running function {self.fn} using args {args} and kwargs {kwargs}" - ) + logging.debug(f"Running function {self.fn} using args {args} and kwargs {kwargs}") return self.fn(*args, **kwargs) def __reduce__(self): diff --git a/crates/pyhq/python/hyperqueue/task/program.py b/crates/pyhq/python/hyperqueue/task/program.py index 8a8aa22f9..48b869bb0 100644 --- a/crates/pyhq/python/hyperqueue/task/program.py +++ b/crates/pyhq/python/hyperqueue/task/program.py @@ -95,8 +95,6 @@ def get_task_outputs(task: ExternalProgram) -> Dict[str, Output]: outputs = gather_outputs(task.args) + gather_outputs(task.env) for output in outputs: if output.name in output_map: - raise ValidationException( - f"Output `{output.name}` has been defined multiple times" - ) + raise ValidationException(f"Output `{output.name}` has been defined multiple times") output_map[output.name] = output return output_map diff --git a/pyproject.toml b/pyproject.toml deleted file mode 100644 index 23bac3964..000000000 --- a/pyproject.toml +++ /dev/null @@ -1,3 +0,0 @@ -[tool.black] -line-length = 120 -preview = true diff --git a/ruff.toml b/ruff.toml new file mode 100644 index 000000000..e4b3eddbf --- /dev/null +++ b/ruff.toml @@ -0,0 +1,32 @@ +exclude = [ + ".git", + ".git-rewrite", + ".ipynb_checkpoints", + ".pytest_cache", + ".ruff_cache", + "__pypackages__", + "_build", + "buck-out", + "build", + "dist", + "node_modules", + "site-packages", + "venv", + "target", +] +include = [ + "benchmarks/**/*.py", + "crates/pyhq/python/**/*.py", + "crates/pyhq/pyproject.toml", + "scripts/**/*.py", + "tests/**/*.py", +] + +line-length = 120 +indent-width = 4 + +target-version = "py37" + +[lint] +select = ["E4", "E7", "E9", "F"] +ignore = ["E203"] diff --git a/scripts/check.sh b/scripts/check.sh index 1bee66c7c..df946f9bc 100755 --- a/scripts/check.sh +++ b/scripts/check.sh @@ -8,11 +8,10 @@ cd `dirname $0`/.. cargo fmt --all # Format Python code -isort --profile black scripts tests benchmarks crates/pyhq/python -black scripts tests benchmarks crates/pyhq/python +ruff format # Lint Python code -flake8 scripts tests benchmarks crates/pyhq/python +ruff check # Test Rust code cargo test diff --git a/tests/requirements.txt b/tests/requirements.txt index d8205e8b1..d9a51e4e3 100644 --- a/tests/requirements.txt +++ b/tests/requirements.txt @@ -1,8 +1,5 @@ pytest==7.1.2 pytest-xdist==2.5.0 -flake8==4.0.1 -black==23.7.0 -isort==5.10.1 iso8601==1.0.2 schema==0.7.5 maturin==1.3.0 @@ -11,3 +8,4 @@ jinja2==3.0.3 requests==2.31.0 aiohttp==3.9.0 inline-snapshot==0.2.1 +ruff==0.1.9 diff --git a/tests/test_job.py b/tests/test_job.py index 8d7e74eda..106e04397 100644 --- a/tests/test_job.py +++ b/tests/test_job.py @@ -454,7 +454,8 @@ def test_cancel_send_sigint(hq_env: HqEnv): [ "submit", "--", - *python(""" + *python( + """ import sys import time import signal @@ -467,7 +468,8 @@ def signal_handler(sig, frame): print("ready", flush=True) time.sleep(3600) -"""), +""" + ), ] ) wait_for_job_state(hq_env, 1, "RUNNING") @@ -487,7 +489,8 @@ def test_cancel_kill_if_sigint_fails(hq_env: HqEnv): [ "submit", "--", - *python(""" + *python( + """ import os import sys import time @@ -501,7 +504,8 @@ def signal_handler(sig, frame): print("ready", flush=True) time.sleep(3600) -"""), +""" + ), ] ) wait_for_job_state(hq_env, 1, "RUNNING") @@ -890,8 +894,7 @@ def test_job_submit_program_not_found(hq_env: HqEnv): table = hq_env.command(["task", "list", "1", "-v"], as_table=True) assert ( 'Error: Cannot execute "foo --bar --baz=5": No such file or directory (os error 2)\n' - "The program that you have tried to execute (`foo`) was not found." - == table.get_column_value("Error")[0] + "The program that you have tried to execute (`foo`) was not found." == table.get_column_value("Error")[0] ) @@ -963,9 +966,11 @@ def test_job_shell_script_fail_not_executable(hq_env: HqEnv): def test_job_shell_script_read_interpreter(hq_env: HqEnv): hq_env.start_server() hq_env.start_worker() - Path("test.sh").write_text("""#!/bin/bash + Path("test.sh").write_text( + """#!/bin/bash echo 'Hello' > out.txt -""") +""" + ) for job_id, path in enumerate(("test.sh", "./test.sh", os.path.realpath("test.sh"))): hq_env.command(["submit", path]) wait_for_job_state(hq_env, job_id + 1, "FINISHED") @@ -1060,7 +1065,9 @@ def test_job_cat_header(hq_env: HqEnv): output = hq_env.command(["job", "cat", "1", "stdout", "--print-task-header"]) print(output) - assert output == """ + assert ( + output + == """ # Task 1 1 out1 @@ -1074,9 +1081,12 @@ def test_job_cat_header(hq_env: HqEnv): out1 out2 """.lstrip() + ) output = hq_env.command(["job", "cat", "1", "stderr", "--print-task-header"]) - assert output == """ + assert ( + output + == """ # Task 1 1 err1 @@ -1087,6 +1097,7 @@ def test_job_cat_header(hq_env: HqEnv): 3 err1 """.lstrip() + ) def test_job_cat_status(hq_env: HqEnv): @@ -1113,7 +1124,9 @@ def test_job_cat_status(hq_env: HqEnv): wait_for_job_state(hq_env, 1, "FAILED") output = hq_env.command(["job", "cat", "--task-status=finished", "1", "stdout", "--print-task-header"]) - assert output == """ + assert ( + output + == """ # Task 3 3 out @@ -1124,9 +1137,12 @@ def test_job_cat_status(hq_env: HqEnv): 9 out """.lstrip() + ) output = hq_env.command(["job", "cat", "--task-status=failed", "--tasks", "3-7", "1", "stdout"]) - assert output == """ + assert ( + output + == """ 4 out 5 @@ -1134,10 +1150,14 @@ def test_job_cat_status(hq_env: HqEnv): 6 out """.lstrip() + ) output_selected = hq_env.command(["job", "cat", "--task-status", "finished,failed", "1", "stdout"]) output_default = hq_env.command(["job", "cat", "1", "stdout"]) - assert output_selected == output_default == """ + assert ( + output_selected + == output_default + == """ 3 out 4 @@ -1153,6 +1173,7 @@ def test_job_cat_status(hq_env: HqEnv): 9 out """.lstrip() + ) def test_job_cat_last(hq_env: HqEnv): @@ -1320,13 +1341,15 @@ def test_kill_task_when_worker_dies(hq_env: HqEnv): [ "submit", "--", - *python(""" + *python( + """ import os import time print(os.getpid(), flush=True) time.sleep(3600) -"""), +""" + ), ] ) wait_for_job_state(hq_env, 1, "RUNNING") @@ -1399,7 +1422,8 @@ def check_child_process_exited(hq_env: HqEnv, stop_fn: Callable[[subprocess.Pope [ "submit", "--", - *python(""" + *python( + """ import os import sys import time @@ -1408,7 +1432,8 @@ def check_child_process_exited(hq_env: HqEnv, stop_fn: Callable[[subprocess.Pope if pid > 0: print(pid, flush=True) time.sleep(3600) -"""), +""" + ), ] ) wait_for_job_state(hq_env, 1, "RUNNING") diff --git a/tests/test_jobfile.py b/tests/test_jobfile.py index 569dfddbf..ce9defd84 100644 --- a/tests/test_jobfile.py +++ b/tests/test_jobfile.py @@ -9,10 +9,12 @@ def test_job_file_submit_minimal(hq_env: HqEnv, tmp_path): hq_env.start_server() hq_env.start_worker() - tmp_path.joinpath("job.toml").write_text(""" + tmp_path.joinpath("job.toml").write_text( + """ [[task]] command = ["sleep", "0"] - """) + """ + ) hq_env.command(["job", "submit-file", "job.toml"]) wait_for_job_state(hq_env, 1, "FINISHED") @@ -20,7 +22,8 @@ def test_job_file_submit_minimal(hq_env: HqEnv, tmp_path): def test_job_file_submit_maximal(hq_env: HqEnv, tmp_path): hq_env.start_server() hq_env.start_workers(3, cpus=4, args=["--resource", "gpus=[0,1]"]) - tmp_path.joinpath("job.toml").write_text(""" + tmp_path.joinpath("job.toml").write_text( + """ name = "test-job" stream_log = "output.log" max_fails = 11 @@ -59,7 +62,8 @@ def test_job_file_submit_maximal(hq_env: HqEnv, tmp_path): command = ["sleep", "0"] [[task.request]] resources = { "gpus" = 1.1 } -""") +""" + ) hq_env.command(["job", "submit-file", "job.toml"]) wait_for_job_state(hq_env, 1, "FINISHED") @@ -106,7 +110,8 @@ def test_job_file_resource_variants1(hq_env: HqEnv, tmp_path): hq_env.start_worker(cpus=2, args=["--resource", "gpus=[0,1]"]) hq_env.start_workers(2, cpus=4) - tmp_path.joinpath("job.toml").write_text(""" + tmp_path.joinpath("job.toml").write_text( + """ [[task]] id = 0 command = ["sleep", "1"] @@ -116,7 +121,8 @@ def test_job_file_resource_variants1(hq_env: HqEnv, tmp_path): [[task.request]] resources = { "cpus" = "1", "gpus" = "1" } -""") +""" + ) hq_env.command(["job", "submit-file", "job.toml"]) wait_for_job_state(hq_env, 1, "RUNNING") @@ -135,7 +141,8 @@ def test_job_file_resource_variants2(hq_env: HqEnv, tmp_path): hq_env.start_workers(1, cpus=4, args=["--resource", "x=[0,1]"]) hq_env.start_workers(2, cpus=2, args=["--resource", "x=[0]", "--resource", "y=[0]"]) - tmp_path.joinpath("job.toml").write_text(""" + tmp_path.joinpath("job.toml").write_text( + """ [[task]] id = 0 command = ["/bin/bash", @@ -149,7 +156,8 @@ def test_job_file_resource_variants2(hq_env: HqEnv, tmp_path): [[task.request]] resources = { "cpus" = "4", "x" = "1" } - """) + """ + ) hq_env.command(["job", "submit-file", "job.toml"]) wait_for_job_state(hq_env, 1, "FINISHED") table = hq_env.command(["task", "info", "1", "0"], as_table=True) @@ -162,7 +170,10 @@ def test_job_file_resource_variants3(hq_env: HqEnv, tmp_path): hq_env.start_server() hq_env.start_worker(cpus=16, args=["--resource", "x=[0,1]"]) - tmp_path.joinpath("job.toml").write_text("\n".join([f""" + tmp_path.joinpath("job.toml").write_text( + "\n".join( + [ + f""" [[task]] id = {x} command = ["/bin/bash", @@ -172,7 +183,11 @@ def test_job_file_resource_variants3(hq_env: HqEnv, tmp_path): resources = {{ "cpus" = "1", "x"=1 }} [[task.request]] resources = {{ "cpus" = "4" }} - """ for x in range(5)])) + """ + for x in range(5) + ] + ) + ) hq_env.command(["job", "submit-file", "job.toml"]) wait_for_job_state(hq_env, 1, "FINISHED") @@ -185,7 +200,8 @@ def test_job_file_resource_variants3(hq_env: HqEnv, tmp_path): def test_job_file_auto_id(hq_env: HqEnv, tmp_path): hq_env.start_server() hq_env.start_worker() - tmp_path.joinpath("job.toml").write_text(""" + tmp_path.joinpath("job.toml").write_text( + """ [[task]] command = ["sleep", "0"] @@ -202,7 +218,8 @@ def test_job_file_auto_id(hq_env: HqEnv, tmp_path): [[task]] command = ["sleep", "0"] - """) + """ + ) hq_env.command(["job", "submit-file", "job.toml"]) wait_for_job_state(hq_env, 1, "FINISHED") r = hq_env.command(["--output-mode=json", "job", "info", "1"], as_json=True) @@ -212,11 +229,13 @@ def test_job_file_auto_id(hq_env: HqEnv, tmp_path): def test_job_file_array(hq_env: HqEnv, tmp_path): hq_env.start_server() - tmp_path.joinpath("job.toml").write_text(""" + tmp_path.joinpath("job.toml").write_text( + """ [array] ids = "2, 10-14, 120" command = ["sleep", "0"] - """) + """ + ) hq_env.command(["job", "submit-file", "job.toml"]) r = hq_env.command(["job", "info", "1"], as_table=True) r.check_row_value("Tasks", "7; Ids: 2,10-14,120") @@ -224,7 +243,8 @@ def test_job_file_array(hq_env: HqEnv, tmp_path): def test_job_file_fail_mixing_array_and_tasks(hq_env: HqEnv, tmp_path): hq_env.start_server() - tmp_path.joinpath("job.toml").write_text(""" + tmp_path.joinpath("job.toml").write_text( + """ [array] ids = "2" command = ["sleep", "0"] @@ -232,7 +252,8 @@ def test_job_file_fail_mixing_array_and_tasks(hq_env: HqEnv, tmp_path): [[task]] id = 1 command = ["sleep", "0"] - """) + """ + ) hq_env.command( ["job", "submit-file", "job.toml"], expect_fail="Definition of array job and individual task cannot be mixed", @@ -242,11 +263,13 @@ def test_job_file_fail_mixing_array_and_tasks(hq_env: HqEnv, tmp_path): def test_job_file_array_entries_without_ids(hq_env: HqEnv, tmp_path): hq_env.start_server() hq_env.start_worker() - tmp_path.joinpath("job.toml").write_text(""" + tmp_path.joinpath("job.toml").write_text( + """ [array] entries = ["a", "bb", "ccc"] command = ["/bin/bash", "-c", "echo $HQ_ENTRY"] - """) + """ + ) expected = {0: "a", 1: "bb", 2: "ccc"} @@ -261,12 +284,14 @@ def test_job_file_array_entries_without_ids(hq_env: HqEnv, tmp_path): def test_job_file_array_entries_with_ids(hq_env: HqEnv, tmp_path): hq_env.start_server() hq_env.start_worker() - tmp_path.joinpath("job.toml").write_text(""" + tmp_path.joinpath("job.toml").write_text( + """ [array] ids = "2,10-12" entries = ["a", "bb", "ccc", "x"] command = ["/bin/bash", "-c", "echo $HQ_ENTRY"] - """) + """ + ) expected = {2: "a", 10: "bb", 11: "ccc", 12: "x"} @@ -281,7 +306,8 @@ def test_job_file_array_entries_with_ids(hq_env: HqEnv, tmp_path): def test_job_file_dependencies(hq_env: HqEnv, tmp_path): hq_env.start_server() hq_env.start_worker() - tmp_path.joinpath("job.toml").write_text(""" + tmp_path.joinpath("job.toml").write_text( + """ [[task]] id = 1 command = ["sleep", "0"] @@ -294,7 +320,8 @@ def test_job_file_dependencies(hq_env: HqEnv, tmp_path): id = 5 command = ["sleep", "0"] deps = [1, 3] - """) + """ + ) hq_env.command(["job", "submit-file", "job.toml"]) table = hq_env.command(["task", "info", "1", "5"], as_table=True) table.check_row_value("Dependencies", "1,3") diff --git a/tests/test_resources.py b/tests/test_resources.py index 609c7e0b2..43b1cdde9 100644 --- a/tests/test_resources.py +++ b/tests/test_resources.py @@ -423,12 +423,14 @@ def test_resource_name_ensure_normalization(hq_env: HqEnv): "--resource", f"{res_name}=1", "--", - *python(""" + *python( + """ import os import sys print(os.environ["HQ_RESOURCE_REQUEST_gpus_amd"], flush=True) print(os.environ["HQ_RESOURCE_VALUES_gpus_amd"], flush=True) -"""), +""" + ), ] ) hq_env.start_worker(args=["--resource", f"{res_name}=[0]"]) diff --git a/tests/test_utils.py b/tests/test_utils.py index 313bc6f60..c6b550b32 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -2,13 +2,15 @@ def test_parse_table_horizontal(): - table = parse_table("""+---------+--------------+ + table = parse_table( + """+---------+--------------+ | Id | Name | +---------+--------------+ | a | b | | c | d | +---------+--------------+ -""") +""" + ) assert table.header == ["Id", "Name"] assert table.rows == [ ["a", "b"], @@ -17,21 +19,25 @@ def test_parse_table_horizontal(): def test_parse_table_horizontal_empty(): - table = parse_table("""+---------+--------------+ + table = parse_table( + """+---------+--------------+ | Id | Name | +---------+--------------+ -""") +""" + ) assert table.header == ["Id", "Name"] assert table.rows == [] def test_parse_table_vertical(): - table = parse_table("""+---------+--------------+ + table = parse_table( + """+---------+--------------+ | Id | 1 | | Name | 2 | | Value | c | +---------+--------------+ -""") +""" + ) assert table.header is None assert table.rows == [ ["Id", "1"], @@ -41,14 +47,16 @@ def test_parse_table_vertical(): def test_parse_table_multiline_value(): - table = parse_table("""+---------+--------------+ + table = parse_table( + """+---------+--------------+ | Id | 1 | | Name | line1 | | | line2 | | | line3 | | Value | c | +---------+--------------+ -""") +""" + ) assert table.header is None assert table.rows == [ ["Id", "1"], @@ -58,28 +66,33 @@ def test_parse_table_multiline_value(): def test_parse_table_empty(): - table = parse_table(""" + table = parse_table( + """ +---------+--------------+ +---------+--------------+ - """) + """ + ) assert table is None def test_parse_table_ignore_suffix(): - table, remainder = parse_table(""" + table, remainder = parse_table( + """ +---------+--------------+ |a|b| +---------+--------------+ |c|d| +---------+--------------+ hello world - """) + """ + ) assert table.header == ["a", "b"] assert table.rows == [["c", "d"]] def test_parse_tables_horizontal(): - tables = parse_tables("""+---------+--------------+ + tables = parse_tables( + """+---------+--------------+ | Id | Name | +---------+--------------+ | a | b | @@ -91,7 +104,8 @@ def test_parse_tables_horizontal(): | e | f | | g | h | +---------+--------------+ -""") +""" + ) print("Found tables:", len(tables)) assert tables[0].header == ["Id", "Name"] assert tables[0].rows == [ @@ -106,7 +120,8 @@ def test_parse_tables_horizontal(): def test_parse_tables_vertical(): - tables = parse_tables("""+---------+--------------+ + tables = parse_tables( + """+---------+--------------+ | Id | 1 | | Name | 2 | | Value | a | @@ -116,7 +131,8 @@ def test_parse_tables_vertical(): | Name | 4 | | Value | b | +---------+--------------+ -""") +""" + ) assert tables[0].header is None assert tables[0].rows == [ ["Id", "1"], diff --git a/tests/test_worker.py b/tests/test_worker.py index 890b77815..64f06a8c4 100644 --- a/tests/test_worker.py +++ b/tests/test_worker.py @@ -105,9 +105,9 @@ def test_worker_stop_all(hq_env: HqEnv): hq_env.start_server() processes = [hq_env.start_worker() for _ in range(4)] - wait_for_worker_state(hq_env, [1, 2, 3, 4], ["RUNNING" for _ in range(4)]), + (wait_for_worker_state(hq_env, [1, 2, 3, 4], ["RUNNING" for _ in range(4)]),) hq_env.command(["worker", "stop", "all"]) - wait_for_worker_state(hq_env, [1, 2, 3, 4], ["STOPPED" for _ in range(4)]), + (wait_for_worker_state(hq_env, [1, 2, 3, 4], ["STOPPED" for _ in range(4)]),) for process in processes: hq_env.check_process_exited(process) @@ -117,9 +117,9 @@ def test_worker_stop_last(hq_env: HqEnv): hq_env.start_server() processes = [hq_env.start_worker() for _ in range(4)] - wait_for_worker_state(hq_env, [1, 2, 3, 4], ["RUNNING" for _ in range(4)]), + (wait_for_worker_state(hq_env, [1, 2, 3, 4], ["RUNNING" for _ in range(4)]),) hq_env.command(["worker", "stop", "last"]) - wait_for_worker_state(hq_env, [4], ["STOPPED" for _ in range(4)]), + (wait_for_worker_state(hq_env, [4], ["STOPPED" for _ in range(4)]),) hq_env.check_process_exited(processes[3]) diff --git a/tests/utils/table.py b/tests/utils/table.py index fa3c84349..182bcbfa8 100644 --- a/tests/utils/table.py +++ b/tests/utils/table.py @@ -78,9 +78,11 @@ def __repr__(self): def parse_table(table_info): - if type(table_info) == str: + if isinstance(table_info, str): lines = table_info.strip().split("\n") - elif type(table_info) == list: + elif isinstance( + table_info, list + ): # asdsssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssss lines = table_info rows = []