diff --git a/tests/MCPServer/test_gearman.py b/tests/MCPServer/test_gearman.py index ca8fab26f8..92abcbf11f 100644 --- a/tests/MCPServer/test_gearman.py +++ b/tests/MCPServer/test_gearman.py @@ -1,5 +1,6 @@ import math import uuid +from unittest import mock import gearman import pytest @@ -18,8 +19,8 @@ def run(self, *args, **kwargs): @pytest.fixture -def simple_job(request, mocker): - return MockJob(mocker.Mock(), mocker.Mock(), mocker.Mock(), name="test_job_name") +def simple_job(request): + return MockJob(mock.Mock(), mock.Mock(), mock.Mock(), name="test_job_name") @pytest.fixture @@ -57,12 +58,10 @@ def format_gearman_response(task_results): return response -def test_gearman_task_submission(simple_job, simple_task, mocker): - # Mock to avoid db writes - mocker.patch("server.tasks.backends.gearman_backend.Task.bulk_log") - mocker.patch.object(GearmanTaskBackend, "TASK_BATCH_SIZE", 1) - mock_client = mocker.patch("server.tasks.backends.gearman_backend.MCPGearmanClient") - +@mock.patch("server.tasks.backends.gearman_backend.MCPGearmanClient") +@mock.patch("server.tasks.GearmanTaskBackend.TASK_BATCH_SIZE", 1) +@mock.patch("server.tasks.backends.gearman_backend.Task.bulk_log") +def test_gearman_task_submission(bulk_log, mock_client, simple_job, simple_task): backend = GearmanTaskBackend() backend.submit_task(simple_job, simple_task) @@ -81,14 +80,12 @@ def test_gearman_task_submission(simple_job, simple_task, mocker): assert submit_job_kwargs["max_retries"] == GearmanTaskBackend.MAX_RETRIES -def test_gearman_task_result_success(simple_job, simple_task, mocker): - # Mock to avoid db writes - mocker.patch("server.tasks.backends.gearman_backend.Task.bulk_log") - - mock_client = mocker.patch("server.tasks.backends.gearman_backend.MCPGearmanClient") +@mock.patch("server.tasks.backends.gearman_backend.MCPGearmanClient") +@mock.patch("server.tasks.backends.gearman_backend.Task.bulk_log") +def test_gearman_task_result_success(bulk_log, mock_client, simple_job, simple_task): backend = GearmanTaskBackend() - mock_gearman_job = mocker.Mock() + mock_gearman_job = mock.Mock() job_request = gearman.job.GearmanJobRequest( mock_gearman_job, background=True, max_attempts=0 ) @@ -130,14 +127,12 @@ def mock_jobs_completed(*args): assert task_result.done is True -def test_gearman_task_result_error(simple_job, simple_task, mocker): - # Mock to avoid db writes - mocker.patch("server.tasks.backends.gearman_backend.Task.bulk_log") - - mock_client = mocker.patch("server.tasks.backends.gearman_backend.MCPGearmanClient") +@mock.patch("server.tasks.backends.gearman_backend.MCPGearmanClient") +@mock.patch("server.tasks.backends.gearman_backend.Task.bulk_log") +def test_gearman_task_result_error(bulk_log, mock_client, simple_job, simple_task): backend = GearmanTaskBackend() - mock_gearman_job = mocker.Mock() + mock_gearman_job = mock.Mock() job_request = gearman.job.GearmanJobRequest( mock_gearman_job, background=True, max_attempts=0 ) @@ -169,14 +164,12 @@ def mock_jobs_completed(*args): @pytest.mark.parametrize( "reverse_result_order", (False, True), ids=["regular", "reversed"] ) +@mock.patch("server.tasks.backends.gearman_backend.MCPGearmanClient") +@mock.patch.object(GearmanTaskBackend, "TASK_BATCH_SIZE", 2) +@mock.patch("server.tasks.backends.gearman_backend.Task.bulk_log") def test_gearman_multiple_batches( - simple_job, simple_task, mocker, reverse_result_order + bulk_log, mock_client, simple_job, simple_task, reverse_result_order ): - # Mock to avoid db writes - mocker.patch("server.tasks.backends.gearman_backend.Task.bulk_log") - mocker.patch.object(GearmanTaskBackend, "TASK_BATCH_SIZE", 2) - mock_client = mocker.patch("server.tasks.backends.gearman_backend.MCPGearmanClient") - tasks = [] for i in range(5): task = Task( @@ -192,7 +185,7 @@ def test_gearman_multiple_batches( job_requests = [] for _ in range(3): - mock_gearman_job = mocker.Mock() + mock_gearman_job = mock.Mock() job_request = gearman.job.GearmanJobRequest( mock_gearman_job, background=True, max_attempts=0 ) diff --git a/tests/MCPServer/test_integration.py b/tests/MCPServer/test_integration.py index b5ea3e5b5c..468bba4739 100644 --- a/tests/MCPServer/test_integration.py +++ b/tests/MCPServer/test_integration.py @@ -3,6 +3,7 @@ import threading import uuid from io import StringIO +from unittest import mock import pytest from django.utils import timezone @@ -93,8 +94,13 @@ def dummy_file_replacements(request): @pytest.mark.django_db(transaction=True) +@mock.patch("server.jobs.decisions.load_processing_xml") +@mock.patch("server.jobs.decisions.load_preconfigured_choice") +@mock.patch("server.jobs.client.get_task_backend") def test_workflow_integration( - mocker, + mock_get_task_backend, + mock_load_preconfigured_choice, + mock_load_processing_xml, settings, tmp_path, workflow, @@ -106,162 +112,161 @@ def test_workflow_integration( echo_backend = EchoBackend() settings.SHARED_DIRECTORY = str(tmp_path) settings.PROCESSING_DIRECTORY = str(tmp_path / "processing") - mocker.patch.dict( + mock_get_task_backend.return_value = echo_backend + + with mock.patch.dict( "server.packages.BASE_REPLACEMENTS", {r"%processingDirectory%": settings.PROCESSING_DIRECTORY}, - ) - - mock_get_task_backend = mocker.patch( - "server.jobs.client.get_task_backend", return_value=echo_backend - ) - mock_load_preconfigured_choice = mocker.patch( - "server.jobs.decisions.load_preconfigured_choice" - ) - mock_load_processing_xml = mocker.patch("server.jobs.decisions.load_processing_xml") - mocker.patch.object(transfer, "files", return_value=dummy_file_replacements) - - # Schedule the first job - first_workflow_chain = workflow.get_chains()["3816f689-65a8-4ad0-ac27-74292a70b093"] - first_job_chain = JobChain(transfer, first_workflow_chain, workflow) - job = next(first_job_chain) - package_queue.schedule_job(job) - - assert package_queue.job_queue.qsize() == 1 - assert len(package_queue.active_packages) == 1 - assert transfer.uuid in package_queue.active_packages - - # Process the first job (DirectoryClientScriptJob) - future = package_queue.process_one_job(timeout=1.0) - concurrent.futures.wait([future], timeout=1.0) - - mock_get_task_backend.assert_called_once() - task = echo_backend.tasks[job.uuid][0] - - assert isinstance(job, DirectoryClientScriptJob) - assert job.exit_code == 0 - assert task.arguments == f'"{settings.PROCESSING_DIRECTORY}" "{transfer.uuid}"' - - # Next job in chain should be queued - assert package_queue.job_queue.qsize() == 1 - job = future.result() - - # Process the second job (FilesClientScriptJob) - future = package_queue.process_one_job(timeout=1.0) - concurrent.futures.wait([future], timeout=1.0) - - tasks = echo_backend.tasks[job.uuid] - - assert isinstance(job, FilesClientScriptJob) - assert job.exit_code == 0 - assert len(tasks) == len(dummy_file_replacements) - for task, replacement in zip(tasks, dummy_file_replacements): - assert task.arguments == '"{}"'.format(replacement[r"%fileUUID%"]) - - # Next job in chain should be queued - assert package_queue.job_queue.qsize() == 1 - job = future.result() - - # Process the third job (OutputClientScriptJob) - future = package_queue.process_one_job(timeout=1.0) - concurrent.futures.wait([future], timeout=1.0) - - assert isinstance(job, OutputClientScriptJob) - assert job.exit_code == 0 - assert job.job_chain.generated_choices == { - "default": {"description": "Default Location", "uri": DEFAULT_STORAGE_LOCATION} - } + ), mock.patch.object(transfer, "files", return_value=dummy_file_replacements): + # Schedule the first job + first_workflow_chain = workflow.get_chains()[ + "3816f689-65a8-4ad0-ac27-74292a70b093" + ] + first_job_chain = JobChain(transfer, first_workflow_chain, workflow) + job = next(first_job_chain) + package_queue.schedule_job(job) + + assert package_queue.job_queue.qsize() == 1 + assert len(package_queue.active_packages) == 1 + assert transfer.uuid in package_queue.active_packages + + # Process the first job (DirectoryClientScriptJob) + future = package_queue.process_one_job(timeout=1.0) + concurrent.futures.wait([future], timeout=1.0) + + mock_get_task_backend.assert_called_once() + task = echo_backend.tasks[job.uuid][0] + + assert isinstance(job, DirectoryClientScriptJob) + assert job.exit_code == 0 + assert task.arguments == f'"{settings.PROCESSING_DIRECTORY}" "{transfer.uuid}"' + + # Next job in chain should be queued + assert package_queue.job_queue.qsize() == 1 + job = future.result() + + # Process the second job (FilesClientScriptJob) + future = package_queue.process_one_job(timeout=1.0) + concurrent.futures.wait([future], timeout=1.0) + + tasks = echo_backend.tasks[job.uuid] + + assert isinstance(job, FilesClientScriptJob) + assert job.exit_code == 0 + assert len(tasks) == len(dummy_file_replacements) + for task, replacement in zip(tasks, dummy_file_replacements): + assert task.arguments == '"{}"'.format(replacement[r"%fileUUID%"]) + + # Next job in chain should be queued + assert package_queue.job_queue.qsize() == 1 + job = future.result() + + # Process the third job (OutputClientScriptJob) + future = package_queue.process_one_job(timeout=1.0) + concurrent.futures.wait([future], timeout=1.0) + + assert isinstance(job, OutputClientScriptJob) + assert job.exit_code == 0 + assert job.job_chain.generated_choices == { + "default": { + "description": "Default Location", + "uri": DEFAULT_STORAGE_LOCATION, + } + } - # Next job in chain should be queued - assert package_queue.job_queue.qsize() == 1 - job = future.result() + # Next job in chain should be queued + assert package_queue.job_queue.qsize() == 1 + job = future.result() - # Setup preconfigured choice for next job - mock_load_preconfigured_choice.return_value = DEFAULT_STORAGE_LOCATION + # Setup preconfigured choice for next job + mock_load_preconfigured_choice.return_value = DEFAULT_STORAGE_LOCATION - # Process the fourth job (OutputDecisionJob) - future = package_queue.process_one_job(timeout=1.0) - concurrent.futures.wait([future], timeout=1.0) + # Process the fourth job (OutputDecisionJob) + future = package_queue.process_one_job(timeout=1.0) + concurrent.futures.wait([future], timeout=1.0) - assert isinstance(job, OutputDecisionJob) - assert job.exit_code == 0 - assert job.job_chain.context[r"%AIPsStore%"] == DEFAULT_STORAGE_LOCATION + assert isinstance(job, OutputDecisionJob) + assert job.exit_code == 0 + assert job.job_chain.context[r"%AIPsStore%"] == DEFAULT_STORAGE_LOCATION - # Next job in chain should be queued - assert package_queue.job_queue.qsize() == 1 - job = future.result() + # Next job in chain should be queued + assert package_queue.job_queue.qsize() == 1 + job = future.result() - # Setup preconfigured choice for next job - mock_load_preconfigured_choice.return_value = "7b814362-c679-43c4-a2e2-1ba59957cd18" + # Setup preconfigured choice for next job + mock_load_preconfigured_choice.return_value = ( + "7b814362-c679-43c4-a2e2-1ba59957cd18" + ) - # Process the fifth job (NextChainDecisionJob) - future = package_queue.process_one_job(timeout=1.0) - concurrent.futures.wait([future], timeout=1.0) + # Process the fifth job (NextChainDecisionJob) + future = package_queue.process_one_job(timeout=1.0) + concurrent.futures.wait([future], timeout=1.0) - assert isinstance(job, NextChainDecisionJob) - assert job.exit_code == 0 + assert isinstance(job, NextChainDecisionJob) + assert job.exit_code == 0 - # Next job in chain should be queued - assert package_queue.job_queue.qsize() == 1 - job = future.result() + # Next job in chain should be queued + assert package_queue.job_queue.qsize() == 1 + job = future.result() - # We should be on chain 2 now - assert job.job_chain is not first_job_chain - assert job.job_chain.chain.id == "7b814362-c679-43c4-a2e2-1ba59957cd18" + # We should be on chain 2 now + assert job.job_chain is not first_job_chain + assert job.job_chain.chain.id == "7b814362-c679-43c4-a2e2-1ba59957cd18" - # Setup preconfigured choice for next job - mock_load_processing_xml.return_value = TEST_PROCESSING_CONFIG + # Setup preconfigured choice for next job + mock_load_processing_xml.return_value = TEST_PROCESSING_CONFIG - # Process the sixth job (UpdateContextDecisionJob) - future = package_queue.process_one_job(timeout=1.0) - concurrent.futures.wait([future], timeout=1.0) + # Process the sixth job (UpdateContextDecisionJob) + future = package_queue.process_one_job(timeout=1.0) + concurrent.futures.wait([future], timeout=1.0) - assert isinstance(job, UpdateContextDecisionJob) - assert job.exit_code == 0 - assert job.job_chain.context[r"%TestValue%"] == "7" + assert isinstance(job, UpdateContextDecisionJob) + assert job.exit_code == 0 + assert job.job_chain.context[r"%TestValue%"] == "7" - # Next job in chain should be queued - assert package_queue.job_queue.qsize() == 1 - job = future.result() + # Next job in chain should be queued + assert package_queue.job_queue.qsize() == 1 + job = future.result() - # Process the seventh job (SetUnitVarLinkJob) - future = package_queue.process_one_job(timeout=1.0) - concurrent.futures.wait([future], timeout=1.0) + # Process the seventh job (SetUnitVarLinkJob) + future = package_queue.process_one_job(timeout=1.0) + concurrent.futures.wait([future], timeout=1.0) - assert isinstance(job, SetUnitVarLinkJob) - assert job.exit_code == 0 + assert isinstance(job, SetUnitVarLinkJob) + assert job.exit_code == 0 - unit_var = models.UnitVariable.objects.get( - unittype=transfer.UNIT_VARIABLE_TYPE, - unituuid=transfer.uuid, - variable="test_unit_variable", - variablevalue="", - microservicechainlink="f8e4c1ee-3e43-4caa-a664-f6b6bd8f156e", - ) - assert unit_var is not None + unit_var = models.UnitVariable.objects.get( + unittype=transfer.UNIT_VARIABLE_TYPE, + unituuid=transfer.uuid, + variable="test_unit_variable", + variablevalue="", + microservicechainlink="f8e4c1ee-3e43-4caa-a664-f6b6bd8f156e", + ) + assert unit_var is not None - # Next job in chain should be queued - assert package_queue.job_queue.qsize() == 1 - job = future.result() + # Next job in chain should be queued + assert package_queue.job_queue.qsize() == 1 + job = future.result() - # Process the eighth job (GetUnitVarLinkJob) - future = package_queue.process_one_job(timeout=1.0) - concurrent.futures.wait([future], timeout=1.0) + # Process the eighth job (GetUnitVarLinkJob) + future = package_queue.process_one_job(timeout=1.0) + concurrent.futures.wait([future], timeout=1.0) - assert isinstance(job, GetUnitVarLinkJob) - assert job.exit_code == 0 + assert isinstance(job, GetUnitVarLinkJob) + assert job.exit_code == 0 - # Out job chain should have been redirected to the final link - assert job.job_chain.current_link.id == "f8e4c1ee-3e43-4caa-a664-f6b6bd8f156e" + # Out job chain should have been redirected to the final link + assert job.job_chain.current_link.id == "f8e4c1ee-3e43-4caa-a664-f6b6bd8f156e" - # Next job in chain should be queued - assert package_queue.job_queue.qsize() == 1 - job = future.result() + # Next job in chain should be queued + assert package_queue.job_queue.qsize() == 1 + job = future.result() - # Process the last job (DirectoryClientScriptJob) - future = package_queue.process_one_job(timeout=1.0) - concurrent.futures.wait([future], timeout=1.0) + # Process the last job (DirectoryClientScriptJob) + future = package_queue.process_one_job(timeout=1.0) + concurrent.futures.wait([future], timeout=1.0) - assert job.exit_code == 0 + assert job.exit_code == 0 - # Workflow is over; we're done - assert package_queue.job_queue.qsize() == 0 + # Workflow is over; we're done + assert package_queue.job_queue.qsize() == 0 diff --git a/tests/MCPServer/test_mcp.py b/tests/MCPServer/test_mcp.py index a7ce22a791..5c07d8affc 100644 --- a/tests/MCPServer/test_mcp.py +++ b/tests/MCPServer/test_mcp.py @@ -1,5 +1,6 @@ import threading import uuid +from unittest import mock import pytest from server.mcp import main @@ -7,7 +8,14 @@ @pytest.mark.django_db(transaction=True) -def test_watched_dir_handler_creates_transfer_if_it_does_not_exist(mocker, tmpdir): +@mock.patch("server.packages.models.Transfer.objects.create") +@mock.patch("server.packages.uuid4") +@mock.patch( + "server.mcp.JobChain", mock.MagicMock(return_value=iter(["some_chain_link"])) +) +def test_watched_dir_handler_creates_transfer_if_it_does_not_exist( + uuid4, create_mock, tmpdir +): """Test that a models.Transfer object exists for an unknown path. This for example simulates the case when a user drops a directory @@ -20,22 +28,17 @@ def test_watched_dir_handler_creates_transfer_if_it_does_not_exist(mocker, tmpdi """ # We're not interested in the package queue or the link chaining logics here, # so we mock very limited implementations for those. - job_chain_mock = mocker.MagicMock() - job_chain_mock.return_value = iter(["some_chain_link"]) - mocker.patch("server.mcp.JobChain", job_chain_mock) - package_queue = mocker.Mock() - watched_dir = mocker.MagicMock(unit_type="Transfer") + package_queue = mock.Mock() + watched_dir = mock.MagicMock(unit_type="Transfer") # Mock a known UUID for the new transfer. transfer_uuid = uuid.uuid4() - mocker.patch("server.packages.uuid4", return_value=transfer_uuid) + uuid4.return_value = transfer_uuid # Mock the Django manager for the Transfer model. This is mocked from the # `server.packages` module since its path from the Dashboard is not available. - transfer_mock = mocker.Mock(uuid=transfer_uuid) - create_mock = mocker.patch( - "server.packages.models.Transfer.objects.create", return_value=transfer_mock - ) + transfer_mock = mock.Mock(uuid=transfer_uuid) + create_mock.return_value = transfer_mock # The new/unknown path for creating the transfer. path = tmpdir.mkdir("some_transfer") @@ -48,26 +51,26 @@ def test_watched_dir_handler_creates_transfer_if_it_does_not_exist(mocker, tmpdi @pytest.mark.django_db(transaction=True) -def test_watched_dir_handler_creates_transfer_for_file(mocker, tmpdir): +@mock.patch("server.packages.models.Transfer.objects.create") +@mock.patch("server.packages.uuid4") +@mock.patch( + "server.mcp.JobChain", mock.MagicMock(return_value=iter(["some_chain_link"])) +) +def test_watched_dir_handler_creates_transfer_for_file(uuid4, create_mock, tmpdir): """Test that a models.Transfer object exists for a file path.""" # We're not interested in the package queue or the link chaining logics here, # so we mock very limited implementations for those. - job_chain_mock = mocker.MagicMock() - job_chain_mock.return_value = iter(["some_chain_link"]) - mocker.patch("server.mcp.JobChain", job_chain_mock) - package_queue = mocker.Mock() - watched_dir = mocker.MagicMock(unit_type="Transfer") + package_queue = mock.Mock() + watched_dir = mock.MagicMock(unit_type="Transfer") # Mock a known UUID for the new transfer. transfer_uuid = uuid.uuid4() - mocker.patch("server.packages.uuid4", return_value=transfer_uuid) + uuid4.return_value = transfer_uuid # Mock the Django manager for the Transfer model. This is mocked from the # `server.packages` module since its path from the Dashboard is not available. - transfer_mock = mocker.Mock(uuid=transfer_uuid) - create_mock = mocker.patch( - "server.packages.models.Transfer.objects.create", return_value=transfer_mock - ) + transfer_mock = mock.Mock(uuid=transfer_uuid) + create_mock.return_value = transfer_mock # The new/unknown path of a file for creating the transfer. path = tmpdir.join("file.txt") @@ -80,7 +83,21 @@ def test_watched_dir_handler_creates_transfer_for_file(mocker, tmpdir): create_mock.assert_called_once_with(uuid=transfer_uuid, currentlocation=str(path)) -def test_mcp_main(mocker, settings): +@mock.patch("server.mcp.metrics") +@mock.patch("server.mcp.Task") +@mock.patch("server.mcp.Job") +@mock.patch("server.mcp.Package") +@mock.patch("server.mcp.shared_dirs") +@mock.patch("server.mcp.load_workflow") +def test_mcp_main( + mock_load_workflow, + mock_shared_dirs, + mock_package, + mock_job, + mock_task, + mock_metrics, + settings, +): """Test spin up with immediate shutdown. This test has limited utility because everything is mocked, but it should @@ -91,13 +108,6 @@ def test_mcp_main(mocker, settings): settings.WORKER_THREADS = 1 settings.PROMETHEUS_ENABLED = True - mock_load_workflow = mocker.patch("server.mcp.load_workflow") - mock_shared_dirs = mocker.patch("server.mcp.shared_dirs") - mock_package = mocker.patch("server.mcp.Package") - mock_job = mocker.patch("server.mcp.Job") - mock_task = mocker.patch("server.mcp.Task") - mock_metrics = mocker.patch("server.mcp.metrics") - shutdown_event = threading.Event() shutdown_event.set() diff --git a/tests/MCPServer/test_package.py b/tests/MCPServer/test_package.py index fad43d7e62..919ed8b92a 100644 --- a/tests/MCPServer/test_package.py +++ b/tests/MCPServer/test_package.py @@ -1,6 +1,7 @@ import uuid from concurrent.futures import ThreadPoolExecutor from pathlib import Path +from unittest import mock import pytest from django.core.exceptions import ValidationError @@ -396,10 +397,10 @@ def test_package_statuses(tmp_path, package_class, model_class): @pytest.mark.django_db(transaction=True) -def test_create_package(mocker, tmp_path, admin_user, settings): - package_queue = mocker.Mock(spec=PackageQueue) - executor = mocker.Mock(spec=ThreadPoolExecutor) - workflow = mocker.Mock(spec=Workflow) +def test_create_package(tmp_path, admin_user, settings): + package_queue = mock.Mock(spec=PackageQueue) + executor = mock.Mock(spec=ThreadPoolExecutor) + workflow = mock.Mock(spec=Workflow) d = tmp_path / "sub" d.mkdir() diff --git a/tests/MCPServer/test_processing_config.py b/tests/MCPServer/test_processing_config.py index 83e355614f..80d39f9b67 100644 --- a/tests/MCPServer/test_processing_config.py +++ b/tests/MCPServer/test_processing_config.py @@ -1,5 +1,6 @@ import os import pathlib +from unittest import mock import pytest from server.processing_config import ChainChoicesField @@ -23,15 +24,30 @@ def _workflow(): return load(fp) -def test_get_processing_fields(mocker, _workflow): - mocker.patch("storageService.get_location", return_value=[]) - +@mock.patch("storageService.get_location", return_value=[]) +def test_get_processing_fields(_workflow): fields = get_processing_fields(_workflow) assert len(fields) == len(processing_fields) -def test_storage_location_field(mocker, _workflow): +@mock.patch( + "server.processing_config.processing_fields", + new=[ + StorageLocationField( + link_id="b320ce81-9982-408a-9502-097d0daa48fa", + name="store_aip_location", + purpose="AS", + ), + StorageLocationField( + link_id="cd844b6e-ab3c-4bc6-b34f-7103f88715de", + name="store_dip_location", + purpose="DS", + ), + ], +) +@mock.patch("storageService.get_location") +def test_storage_location_field(get_location, _workflow): def mocked_get_location(purpose): return [ { @@ -41,23 +57,7 @@ def mocked_get_location(purpose): } ] - mocker.patch("storageService.get_location", side_effect=mocked_get_location) - - mocker.patch( - "server.processing_config.processing_fields", - new=[ - StorageLocationField( - link_id="b320ce81-9982-408a-9502-097d0daa48fa", - name="store_aip_location", - purpose="AS", - ), - StorageLocationField( - link_id="cd844b6e-ab3c-4bc6-b34f-7103f88715de", - name="store_dip_location", - purpose="DS", - ), - ], - ) + get_location.side_effect = mocked_get_location assert get_processing_fields(_workflow) == [ { @@ -121,20 +121,19 @@ def mocked_get_location(purpose): ] -def test_replace_dict_field(mocker, _workflow): - mocker.patch( - "server.processing_config.processing_fields", - new=[ - ReplaceDictField( - link_id="f09847c2-ee51-429a-9478-a860477f6b8d", - name="select_format_id_tool_transfer", - ), - ReplaceDictField( - link_id="f19926dd-8fb5-4c79-8ade-c83f61f55b40", name="delete_packages" - ), - ], - ) - +@mock.patch( + "server.processing_config.processing_fields", + new=[ + ReplaceDictField( + link_id="f09847c2-ee51-429a-9478-a860477f6b8d", + name="select_format_id_tool_transfer", + ), + ReplaceDictField( + link_id="f19926dd-8fb5-4c79-8ade-c83f61f55b40", name="delete_packages" + ), + ], +) +def test_replace_dict_field(_workflow): assert get_processing_fields(_workflow) == [ { "choices": [ @@ -197,22 +196,21 @@ def test_replace_dict_field(mocker, _workflow): ] -def test_chain_choices_field(mocker, _workflow): - mocker.patch( - "server.processing_config.processing_fields", - new=[ - ChainChoicesField( - link_id="eeb23509-57e2-4529-8857-9d62525db048", name="reminder" - ), - ChainChoicesField( - link_id="cb8e5706-e73f-472f-ad9b-d1236af8095f", - name="normalize", - ignored_choices=["Reject SIP"], - find_duplicates="Normalize", - ), - ], - ) - +@mock.patch( + "server.processing_config.processing_fields", + new=[ + ChainChoicesField( + link_id="eeb23509-57e2-4529-8857-9d62525db048", name="reminder" + ), + ChainChoicesField( + link_id="cb8e5706-e73f-472f-ad9b-d1236af8095f", + name="normalize", + ignored_choices=["Reject SIP"], + find_duplicates="Normalize", + ), + ], +) +def test_chain_choices_field(_workflow): assert get_processing_fields(_workflow) == [ { "choices": [ @@ -318,23 +316,22 @@ def test_chain_choices_field(mocker, _workflow): ] -def test_shared_choices_field(mocker, _workflow): - mocker.patch( - "server.processing_config.processing_fields", - new=[ - SharedChainChoicesField( - link_id="856d2d65-cd25-49fa-8da9-cabb78292894", - name="virus_scanning", - related_links=[ - "1dad74a2-95df-4825-bbba-dca8b91d2371", - "7e81f94e-6441-4430-a12d-76df09181b66", - "390d6507-5029-4dae-bcd4-ce7178c9b560", - "97a5ddc0-d4e0-43ac-a571-9722405a0a9b", - ], - ) - ], - ) - +@mock.patch( + "server.processing_config.processing_fields", + new=[ + SharedChainChoicesField( + link_id="856d2d65-cd25-49fa-8da9-cabb78292894", + name="virus_scanning", + related_links=[ + "1dad74a2-95df-4825-bbba-dca8b91d2371", + "7e81f94e-6441-4430-a12d-76df09181b66", + "390d6507-5029-4dae-bcd4-ce7178c9b560", + "97a5ddc0-d4e0-43ac-a571-9722405a0a9b", + ], + ) + ], +) +def test_shared_choices_field(_workflow): assert get_processing_fields(_workflow) == [ { "id": "856d2d65-cd25-49fa-8da9-cabb78292894", @@ -412,14 +409,14 @@ def test_processing_configuration_file_exists_with_None(): assert not processing_configuration_file_exists(None) -def test_processing_configuration_file_exists_with_existent_file(mocker): - mocker.patch("os.path.isfile", return_value=True) +@mock.patch("os.path.isfile", return_value=True) +def test_processing_configuration_file_exists_with_existent_file(isfile): assert processing_configuration_file_exists("defaultProcessingMCP.xml") -def test_processing_configuration_file_exists_with_nonexistent_file(mocker): - mocker.patch("os.path.isfile", return_value=False) - logger = mocker.patch("server.processing_config.logger") +@mock.patch("server.processing_config.logger") +@mock.patch("os.path.isfile", return_value=False) +def test_processing_configuration_file_exists_with_nonexistent_file(isfile, logger): assert not processing_configuration_file_exists("bogus.xml") logger.debug.assert_called_once_with( "Processing configuration file for %s does not exist", "bogus.xml" diff --git a/tests/MCPServer/test_queues.py b/tests/MCPServer/test_queues.py index debce6b4b5..6f67e84779 100644 --- a/tests/MCPServer/test_queues.py +++ b/tests/MCPServer/test_queues.py @@ -3,6 +3,7 @@ import threading import time import uuid +from unittest import mock import pytest from server.jobs import DecisionJob @@ -120,8 +121,8 @@ def dip(request, tmp_path): dip_2 = dip -def test_schedule_job(package_queue, transfer, workflow_link, mocker): - test_job = MockJob(mocker.Mock(), workflow_link, transfer) +def test_schedule_job(package_queue, transfer, workflow_link): + test_job = MockJob(mock.Mock(), workflow_link, transfer) package_queue.schedule_job(test_job) @@ -140,9 +141,9 @@ def test_schedule_job(package_queue, transfer, workflow_link, mocker): assert package_queue.dip_queue.qsize() == 0 -def test_active_transfer_limit(package_queue, transfer, sip, workflow_link, mocker): - test_job1 = MockJob(mocker.Mock(), workflow_link, transfer) - test_job2 = MockJob(mocker.Mock(), workflow_link, sip) +def test_active_transfer_limit(package_queue, transfer, sip, workflow_link): + test_job1 = MockJob(mock.Mock(), workflow_link, transfer) + test_job2 = MockJob(mock.Mock(), workflow_link, sip) package_queue.schedule_job(test_job1) @@ -176,11 +177,9 @@ def test_activate_and_deactivate_package(package_queue, transfer): assert transfer.uuid not in package_queue.active_packages -def test_queue_next_job_raises_full( - package_queue, transfer, sip, workflow_link, mocker -): - test_job1 = MockJob(mocker.Mock(), workflow_link, transfer) - test_job2 = MockJob(mocker.Mock(), workflow_link, sip) +def test_queue_next_job_raises_full(package_queue, transfer, sip, workflow_link): + test_job1 = MockJob(mock.Mock(), workflow_link, transfer) + test_job2 = MockJob(mock.Mock(), workflow_link, sip) package_queue.schedule_job(test_job1) package_queue.schedule_job(test_job2) @@ -191,8 +190,8 @@ def test_queue_next_job_raises_full( package_queue.queue_next_job() -def test_await_job_decision(package_queue, transfer, workflow_link, mocker): - test_job = MockDecisionJob(mocker.Mock(), workflow_link, transfer) +def test_await_job_decision(package_queue, transfer, workflow_link): + test_job = MockDecisionJob(mock.Mock(), workflow_link, transfer) package_queue.await_decision(test_job) assert package_queue.job_queue.qsize() == 0 @@ -203,10 +202,10 @@ def test_await_job_decision(package_queue, transfer, workflow_link, mocker): def test_decision_job_moved_to_awaiting_decision( - package_queue, transfer, sip, workflow_link, mocker + package_queue, transfer, sip, workflow_link ): - test_job1 = MockDecisionJob(mocker.Mock(), workflow_link, transfer) - test_job2 = MockJob(mocker.Mock(), workflow_link, sip) + test_job1 = MockDecisionJob(mock.Mock(), workflow_link, transfer) + test_job2 = MockJob(mock.Mock(), workflow_link, sip) package_queue.schedule_job(test_job1) @@ -229,12 +228,12 @@ def test_decision_job_moved_to_awaiting_decision( def test_all_scheduled_decisions_are_processed( - package_queue_regular, dip_1, dip_2, workflow_link, mocker + package_queue_regular, dip_1, dip_2, workflow_link ): package_queue = package_queue_regular - test_job1 = MockDecisionJob(mocker.Mock(), workflow_link, dip_1) - test_job2 = MockDecisionJob(mocker.Mock(), workflow_link, dip_2) + test_job1 = MockDecisionJob(mock.Mock(), workflow_link, dip_1) + test_job2 = MockDecisionJob(mock.Mock(), workflow_link, dip_2) # Schedule two jobs simultaneously. # We want to confirm that both are eventually processed. @@ -271,7 +270,7 @@ def test_all_scheduled_decisions_are_processed( @pytest.mark.django_db(transaction=True) def test_all_scheduled_jobs_are_processed( - package_queue_regular, dip_1, dip_2, workflow_link, mocker + package_queue_regular, dip_1, dip_2, workflow_link ): package_queue = package_queue_regular @@ -279,8 +278,8 @@ def test_all_scheduled_jobs_are_processed( # It causes the queue manager to hit the database. workflow_link._src["end"] = True - test_job1 = MockJob(mocker.Mock(), workflow_link, dip_1) - test_job2 = MockJob(mocker.Mock(), workflow_link, dip_2) + test_job1 = MockJob(mock.Mock(), workflow_link, dip_1) + test_job2 = MockJob(mock.Mock(), workflow_link, dip_2) # Schedule two jobs simultaneously. # We want to confirm that both are eventually processed. diff --git a/tests/MCPServer/test_rpc_server.py b/tests/MCPServer/test_rpc_server.py index 333611940b..bd586f437b 100644 --- a/tests/MCPServer/test_rpc_server.py +++ b/tests/MCPServer/test_rpc_server.py @@ -1,6 +1,7 @@ import pathlib import threading import uuid +from unittest import mock import pytest from django.utils import timezone @@ -14,7 +15,7 @@ @pytest.mark.django_db -def test_approve_partial_reingest_handler(mocker): +def test_approve_partial_reingest_handler(): sip = models.SIP.objects.create(uuid=str(uuid.uuid4())) models.Job.objects.create( sipuuid=sip.pk, @@ -22,7 +23,7 @@ def test_approve_partial_reingest_handler(mocker): createdtime=timezone.now(), currentstep=models.Job.STATUS_AWAITING_DECISION, ) - package_queue = mocker.MagicMock() + package_queue = mock.MagicMock() with open(ASSETS_DIR / "workflow.json") as fp: wf = workflow.load(fp) shutdown_event = threading.Event() diff --git a/tests/MCPServer/test_translation.py b/tests/MCPServer/test_translation.py index 0ff10e46fd..565cd4d9ff 100644 --- a/tests/MCPServer/test_translation.py +++ b/tests/MCPServer/test_translation.py @@ -1,9 +1,11 @@ +from unittest import mock + from server.translation import UNKNOWN_TRANSLATION_LABEL from server.translation import TranslationLabel -def test_translation_label(mocker): - mocker.patch("server.translation.FALLBACK_LANG", "en") +@mock.patch("server.translation.FALLBACK_LANG", "en") +def test_translation_label(): tr = TranslationLabel({"en": "cat", "es": "gato"}) assert str(tr) == "cat" assert tr["es"] == "gato" @@ -11,12 +13,12 @@ def test_translation_label(mocker): assert tr.get_label(lang="es") == "gato" assert tr.get_label(lang="is", fallback_label="köttur") == "köttur" assert tr.get_label(lang="??") == "cat" - mocker.patch("server.translation.FALLBACK_LANG", "xx") - assert tr.get_label(lang="yy") == UNKNOWN_TRANSLATION_LABEL + with mock.patch("server.translation.FALLBACK_LANG", "xx"): + assert tr.get_label(lang="yy") == UNKNOWN_TRANSLATION_LABEL -def test_translation_label_with_prepared_codes(mocker): - mocker.patch("server.translation.FALLBACK_LANG", "en") +@mock.patch("server.translation.FALLBACK_LANG", "en") +def test_translation_label_with_prepared_codes(): tr = TranslationLabel({"en": "dog", "pt_BR": "cão"}) assert tr.get_label(lang="en") == "dog" assert tr.get_label(lang="pt-br") == "cão" diff --git a/tests/MCPServer/test_workflow.py b/tests/MCPServer/test_workflow.py index 23b3021875..d8b318854b 100644 --- a/tests/MCPServer/test_workflow.py +++ b/tests/MCPServer/test_workflow.py @@ -1,6 +1,7 @@ import os import pathlib from io import StringIO +from unittest import mock import pytest from django.utils.translation import gettext_lazy @@ -13,15 +14,15 @@ FIXTURES_DIR = pathlib.Path(__file__).parent / "fixtures" -def test_invert_job_statuses(mocker): - mocker.patch( - "server.jobs.Job.STATUSES", - ( - (1, gettext_lazy("Uno")), - (2, gettext_lazy("Dos")), - (3, gettext_lazy("Tres")), - ), - ) +@mock.patch( + "server.jobs.Job.STATUSES", + ( + (1, gettext_lazy("Uno")), + (2, gettext_lazy("Dos")), + (3, gettext_lazy("Tres")), + ), +) +def test_invert_job_statuses(): ret = workflow._invert_job_statuses() assert ret == {"Uno": 1, "Dos": 2, "Tres": 3} @@ -98,7 +99,7 @@ def test_load_valid_document(path): assert first_link.get_label("foobar") is None -def test_link_browse_methods(mocker): +def test_link_browse_methods(): with open(os.path.join(ASSETS_DIR, "workflow.json")) as fp: wf = workflow.load(fp) ln = wf.get_link("1ba589db-88d1-48cf-bb1a-a5f9d2b17378") @@ -113,7 +114,7 @@ def test_get_schema(): assert schema["$id"] == "https://www.archivematica.org/labs/workflow/schema/v1.json" -def test_get_schema_not_found(mocker): - mocker.patch("server.workflow._LATEST_SCHEMA", "non-existen-schema") +@mock.patch("server.workflow._LATEST_SCHEMA", "non-existen-schema") +def test_get_schema_not_found(): with pytest.raises(IOError): workflow._get_schema()