diff --git a/.github/workflows/ci-code.yml b/.github/workflows/ci-code.yml index a8d0cb9a08..57e85bb004 100644 --- a/.github/workflows/ci-code.yml +++ b/.github/workflows/ci-code.yml @@ -104,7 +104,7 @@ jobs: AIIDA_WARN_v3: 1 # Python 3.12 has a performance regression when running with code coverage # so run code coverage only for python 3.9. - run: pytest --db-backend psql -m 'not nightly' tests/ ${{ matrix.python-version == '3.9' && '--cov aiida' || '' }} + run: pytest -n auto --db-backend psql -m 'not nightly' tests/ ${{ matrix.python-version == '3.9' && '--cov aiida' || '' }} - name: Upload coverage report if: matrix.python-version == 3.9 && github.repository == 'aiidateam/aiida-core' @@ -139,7 +139,7 @@ jobs: - name: Run test suite env: AIIDA_WARN_v3: 0 - run: pytest -m 'presto' tests/ + run: pytest -n auto -m 'presto' tests/ verdi: diff --git a/.github/workflows/test-install.yml b/.github/workflows/test-install.yml index d315a50691..3abeb4a2ba 100644 --- a/.github/workflows/test-install.yml +++ b/.github/workflows/test-install.yml @@ -229,7 +229,7 @@ jobs: env: AIIDA_TEST_PROFILE: test_aiida AIIDA_WARN_v3: 1 - run: pytest --db-backend psql tests -m 'not nightly' tests/ + run: pytest -n auto --db-backend psql tests -m 'not nightly' tests/ - name: Freeze test environment run: pip freeze | sed '1d' | tee requirements-py-${{ matrix.python-version }}.txt diff --git a/pyproject.toml b/pyproject.toml index 35f2f23b7f..7641b7ed72 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -241,6 +241,7 @@ tests = [ 'pytest-rerunfailures~=12.0', 'pytest-benchmark~=4.0', 'pytest-regressions~=2.2', + 'pytest-xdist~=3.6', 'pympler~=1.0', 'coverage~=7.0', 'sphinx~=7.2.0', diff --git a/requirements/requirements-py-3.10.txt b/requirements/requirements-py-3.10.txt index b2408e8087..23e3b24c45 100644 --- a/requirements/requirements-py-3.10.txt +++ b/requirements/requirements-py-3.10.txt @@ -149,6 +149,7 @@ pytest-datadir==1.4.1 pytest-regressions==2.4.2 pytest-rerunfailures==12.0 pytest-timeout==2.2.0 +pytest-xdist==3.6.1 python-dateutil==2.8.2 python-json-logger==2.0.7 python-memcached==1.59 diff --git a/requirements/requirements-py-3.11.txt b/requirements/requirements-py-3.11.txt index 24acc25a6b..a0f1aa7a75 100644 --- a/requirements/requirements-py-3.11.txt +++ b/requirements/requirements-py-3.11.txt @@ -148,6 +148,7 @@ pytest-datadir==1.4.1 pytest-regressions==2.4.2 pytest-rerunfailures==12.0 pytest-timeout==2.2.0 +pytest-xdist==3.6.1 python-dateutil==2.8.2 python-json-logger==2.0.7 python-memcached==1.59 diff --git a/requirements/requirements-py-3.12.txt b/requirements/requirements-py-3.12.txt index 3f5d72ebb6..8047763fea 100644 --- a/requirements/requirements-py-3.12.txt +++ b/requirements/requirements-py-3.12.txt @@ -148,6 +148,7 @@ pytest-datadir==1.5.0 pytest-regressions==2.5.0 pytest-rerunfailures==12.0 pytest-timeout==2.2.0 +pytest-xdist==3.6.1 python-dateutil==2.8.2 python-json-logger==2.0.7 python-memcached==1.59 diff --git a/requirements/requirements-py-3.9.txt b/requirements/requirements-py-3.9.txt index 3087e62844..7f6c5bbc6f 100644 --- a/requirements/requirements-py-3.9.txt +++ b/requirements/requirements-py-3.9.txt @@ -151,6 +151,7 @@ pytest-datadir==1.4.1 pytest-regressions==2.4.2 pytest-rerunfailures==12.0 pytest-timeout==2.2.0 +pytest-xdist==3.6.1 python-dateutil==2.8.2 python-json-logger==2.0.7 python-memcached==1.59 diff --git a/tests/orm/test_querybuilder.py b/tests/orm/test_querybuilder.py index c434c93411..6d5d7cefc4 100644 --- a/tests/orm/test_querybuilder.py +++ b/tests/orm/test_querybuilder.py @@ -372,6 +372,7 @@ def test_dict_multiple_projections(self): assert dictionary['*'].pk == node.pk assert dictionary['id'] == node.pk + @pytest.mark.usefixtures('aiida_profile_clean') def test_operators_eq_lt_gt(self): nodes = [orm.Data() for _ in range(8)] @@ -394,6 +395,7 @@ def test_operators_eq_lt_gt(self): assert orm.QueryBuilder().append(orm.Node, filters={'attributes.fa': {'>': 1.02}}).count() == 4 assert orm.QueryBuilder().append(orm.Node, filters={'attributes.fa': {'>=': 1.02}}).count() == 5 + @pytest.mark.usefixtures('aiida_profile_clean') def test_subclassing(self): s = orm.StructureData() s.base.attributes.set('cat', 'miau') @@ -514,6 +516,7 @@ def test_append_validation(self): # So this should work now: qb.append(orm.StructureData, tag='s').limit(2).dict() + @pytest.mark.usefixtures('aiida_profile_clean') def test_tuples(self): """Test appending ``cls`` tuples.""" orm.Group(label='helloworld').store() @@ -696,6 +699,7 @@ def test_query_links(self): class TestMultipleProjections: """Unit tests for the QueryBuilder ORM class.""" + @pytest.mark.usefixtures('aiida_profile_clean') def test_first_multiple_projections(self): """Test `first()` returns correct types and numbers for multiple projections.""" orm.Data().store() diff --git a/tests/restapi/conftest.py b/tests/restapi/conftest.py index 8abe8f9296..442814a808 100644 --- a/tests/restapi/conftest.py +++ b/tests/restapi/conftest.py @@ -8,18 +8,27 @@ ########################################################################### """pytest fixtures for use with the aiida.restapi tests""" +from typing import Optional + import pytest @pytest.fixture(scope='function') def restapi_server(): """Make REST API server""" + import socket + from werkzeug.serving import make_server from aiida.restapi.common.config import CLI_DEFAULTS from aiida.restapi.run_api import configure_api def _restapi_server(restapi=None): + # Dynamically find a free port + with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as sock: + sock.bind(('', 0)) # Bind to a free port provided by the OS + _, port = sock.getsockname() # Get the dynamically assigned port + if restapi is None: flask_restapi = configure_api() else: @@ -27,7 +36,7 @@ def _restapi_server(restapi=None): return make_server( host=CLI_DEFAULTS['HOST_NAME'], - port=int(CLI_DEFAULTS['PORT']), + port=port, app=flask_restapi.app, threaded=True, processes=1, @@ -44,7 +53,10 @@ def _restapi_server(restapi=None): def server_url(): from aiida.restapi.common.config import API_CONFIG, CLI_DEFAULTS - return f"http://{CLI_DEFAULTS['HOST_NAME']}:{CLI_DEFAULTS['PORT']}{API_CONFIG['PREFIX']}" + def _server_url(hostname: Optional[str] = None, port: Optional[int] = None): + return f"http://{hostname or CLI_DEFAULTS['HOST_NAME']}:{port or CLI_DEFAULTS['PORT']}{API_CONFIG['PREFIX']}" + + return _server_url @pytest.fixture diff --git a/tests/restapi/test_identifiers.py b/tests/restapi/test_identifiers.py index 8e3f245f6b..0c593e433e 100644 --- a/tests/restapi/test_identifiers.py +++ b/tests/restapi/test_identifiers.py @@ -105,9 +105,11 @@ def test_full_type_unregistered(process_class, restapi_server, server_url): server = restapi_server() server_thread = Thread(target=server.serve_forever) + _server_url = server_url(port=server.server_port) + try: server_thread.start() - type_count_response = requests.get(f'{server_url}/nodes/full_types', timeout=10) + type_count_response = requests.get(f'{_server_url}/nodes/full_types', timeout=10) finally: server.shutdown() @@ -189,9 +191,11 @@ def test_full_type_backwards_compatibility(node_class, restapi_server, server_ur server = restapi_server() server_thread = Thread(target=server.serve_forever) + _server_url = server_url(port=server.server_port) + try: server_thread.start() - type_count_response = requests.get(f'{server_url}/nodes/full_types', timeout=10) + type_count_response = requests.get(f'{_server_url}/nodes/full_types', timeout=10) finally: server.shutdown() diff --git a/tests/restapi/test_statistics.py b/tests/restapi/test_statistics.py index e059de70ac..f125ccff1d 100644 --- a/tests/restapi/test_statistics.py +++ b/tests/restapi/test_statistics.py @@ -42,10 +42,12 @@ def test_count_consistency(restapi_server, server_url): server = restapi_server() server_thread = Thread(target=server.serve_forever) + _server_url = server_url(port=server.server_port) + try: server_thread.start() - type_count_response = requests.get(f'{server_url}/nodes/full_types_count', timeout=10) - statistics_response = requests.get(f'{server_url}/nodes/statistics', timeout=10) + type_count_response = requests.get(f'{_server_url}/nodes/full_types_count', timeout=10) + statistics_response = requests.get(f'{_server_url}/nodes/statistics', timeout=10) finally: server.shutdown() diff --git a/tests/restapi/test_threaded_restapi.py b/tests/restapi/test_threaded_restapi.py index bad1a8a76f..40a994776b 100644 --- a/tests/restapi/test_threaded_restapi.py +++ b/tests/restapi/test_threaded_restapi.py @@ -36,17 +36,19 @@ def test_run_threaded_server(restapi_server, server_url, aiida_localhost): This test will fail, if database connections are not being properly closed by the end-point calls. """ server = restapi_server() - computer_id = aiida_localhost.uuid # Create a thread that will contain the running server, # since we do not wish to block the main thread server_thread = Thread(target=server.serve_forever) + _server_url = server_url(port=server.server_port) + + computer_id = aiida_localhost.uuid try: server_thread.start() for _ in range(NO_OF_REQUESTS): - response = requests.get(f'{server_url}/computers/{computer_id}', timeout=10) + response = requests.get(f'{_server_url}/computers/{computer_id}', timeout=10) assert response.status_code == 200 diff --git a/tests/tools/archive/orm/test_authinfo.py b/tests/tools/archive/orm/test_authinfo.py index 650724a514..e2bb653cbb 100644 --- a/tests/tools/archive/orm/test_authinfo.py +++ b/tests/tools/archive/orm/test_authinfo.py @@ -16,6 +16,7 @@ @pytest.mark.usefixtures('aiida_localhost') +@pytest.mark.usefixtures('aiida_profile_clean') def test_create_all_no_authinfo(tmp_path): """Test archive creation that does not include authinfo.""" filename1 = tmp_path / 'export1.aiida' @@ -25,6 +26,7 @@ def test_create_all_no_authinfo(tmp_path): @pytest.mark.usefixtures('aiida_localhost') +@pytest.mark.usefixtures('aiida_profile_clean') def test_create_all_with_authinfo(tmp_path): """Test archive creation that does include authinfo.""" filename1 = tmp_path / 'export1.aiida' @@ -33,6 +35,7 @@ def test_create_all_with_authinfo(tmp_path): assert archive.querybuilder().append(orm.AuthInfo).count() == 1 +@pytest.mark.usefixtures('aiida_profile_clean') def test_create_comp_with_authinfo(tmp_path, aiida_localhost): """Test archive creation that does include authinfo.""" filename1 = tmp_path / 'export1.aiida' @@ -41,6 +44,7 @@ def test_create_comp_with_authinfo(tmp_path, aiida_localhost): assert archive.querybuilder().append(orm.AuthInfo).count() == 1 +@pytest.mark.usefixtures('aiida_profile_clean') def test_import_authinfo(aiida_profile, tmp_path, aiida_localhost): """Test archive import, including authinfo""" filename1 = tmp_path / 'export1.aiida' diff --git a/tests/tools/archive/orm/test_groups.py b/tests/tools/archive/orm/test_groups.py index e40e1ba4be..556d1617b3 100644 --- a/tests/tools/archive/orm/test_groups.py +++ b/tests/tools/archive/orm/test_groups.py @@ -18,7 +18,7 @@ from aiida.tools.archive import create_archive, import_archive -def test_nodes_in_group(aiida_profile, tmp_path, aiida_localhost): +def test_nodes_in_group(aiida_profile_clean, tmp_path, aiida_localhost): """This test checks that nodes that belong to a specific group are correctly imported and exported. """ @@ -52,7 +52,7 @@ def test_nodes_in_group(aiida_profile, tmp_path, aiida_localhost): filename1 = tmp_path / 'export1.aiida' create_archive([sd1, jc1, gr1], filename=filename1) n_uuids = [sd1.uuid, jc1.uuid] - aiida_profile.reset_storage() + aiida_profile_clean.reset_storage() import_archive(filename1) # Check that the imported nodes are correctly imported and that @@ -66,7 +66,7 @@ def test_nodes_in_group(aiida_profile, tmp_path, aiida_localhost): assert builder.count() == 1, 'The group was not found.' -def test_group_export(tmp_path, aiida_profile): +def test_group_export(tmp_path, aiida_profile_clean): """Exporting a group includes its extras and nodes.""" # Create a new user new_email = uuid.uuid4().hex @@ -90,7 +90,7 @@ def test_group_export(tmp_path, aiida_profile): filename = tmp_path / 'export.aiida' create_archive([group], filename=filename) n_uuids = [sd1.uuid] - aiida_profile.reset_storage() + aiida_profile_clean.reset_storage() import_archive(filename) # Check that the imported nodes are correctly imported and that @@ -106,7 +106,7 @@ def test_group_export(tmp_path, aiida_profile): assert imported_group.base.extras.get('test') == 1, 'Extra missing on imported group' -def test_group_import_existing(tmp_path, aiida_profile): +def test_group_import_existing(tmp_path, aiida_profile_clean): """Testing what happens when I try to import a group that already exists in the database. This should raise an appropriate exception """ @@ -131,7 +131,7 @@ def test_group_import_existing(tmp_path, aiida_profile): # At this point we export the generated data filename = tmp_path / 'export1.aiida' create_archive([group], filename=filename) - aiida_profile.reset_storage() + aiida_profile_clean.reset_storage() # Creating a group of the same name group = orm.Group(label='node_group_existing') @@ -155,7 +155,7 @@ def test_group_import_existing(tmp_path, aiida_profile): assert builder.count() == 2 -def test_import_to_group(tmp_path, aiida_profile): +def test_import_to_group(tmp_path, aiida_profile_clean): """Test `group` parameter Make sure an unstored Group is stored by the import function, forwarding the Group object. Make sure the Group is correctly handled and used for imported nodes. @@ -168,7 +168,7 @@ def test_import_to_group(tmp_path, aiida_profile): # Export Nodes filename = tmp_path / 'export.aiida' create_archive([data1, data2], filename=filename) - aiida_profile.reset_storage() + aiida_profile_clean.reset_storage() # Create Group, do not store group_label = 'import_madness' diff --git a/tests/tools/archive/orm/test_logs.py b/tests/tools/archive/orm/test_logs.py index a6c80edb6b..b355ca8ddb 100644 --- a/tests/tools/archive/orm/test_logs.py +++ b/tests/tools/archive/orm/test_logs.py @@ -12,7 +12,7 @@ from aiida.tools.archive import create_archive, import_archive -def test_critical_log_msg_and_metadata(tmp_path, aiida_profile): +def test_critical_log_msg_and_metadata(tmp_path, aiida_profile_clean): """Testing logging of critical message""" message = 'Testing logging of critical failure' calc = orm.CalculationNode() @@ -33,7 +33,7 @@ def test_critical_log_msg_and_metadata(tmp_path, aiida_profile): export_file = tmp_path.joinpath('export.aiida') create_archive([calc], filename=export_file) - aiida_profile.reset_storage() + aiida_profile_clean.reset_storage() import_archive(export_file) @@ -45,7 +45,7 @@ def test_critical_log_msg_and_metadata(tmp_path, aiida_profile): assert logs[0].metadata == log_metadata -def test_exclude_logs_flag(tmp_path, aiida_profile): +def test_exclude_logs_flag(tmp_path, aiida_profile_clean): """Test that the `include_logs` argument for `export` works.""" log_msg = 'Testing logging of critical failure' @@ -65,7 +65,7 @@ def test_exclude_logs_flag(tmp_path, aiida_profile): create_archive([calc], filename=export_file, include_logs=False) # Clean database and reimport exported data - aiida_profile.reset_storage() + aiida_profile_clean.reset_storage() import_archive(export_file) # Finding all the log messages @@ -80,7 +80,7 @@ def test_exclude_logs_flag(tmp_path, aiida_profile): assert str(import_calcs[0][0]) == calc_uuid -def test_export_of_imported_logs(tmp_path, aiida_profile): +def test_export_of_imported_logs(tmp_path, aiida_profile_clean): """Test export of imported Log""" log_msg = 'Testing export of imported log' @@ -102,7 +102,7 @@ def test_export_of_imported_logs(tmp_path, aiida_profile): create_archive([calc], filename=export_file) # Clean database and reimport exported data - aiida_profile.reset_storage() + aiida_profile_clean.reset_storage() import_archive(export_file) # Finding all the log messages @@ -123,7 +123,7 @@ def test_export_of_imported_logs(tmp_path, aiida_profile): create_archive([calc], filename=re_export_file) # Clean database and reimport exported data - aiida_profile.reset_storage() + aiida_profile_clean.reset_storage() import_archive(re_export_file) # Finding all the log messages diff --git a/tests/tools/archive/test_simple.py b/tests/tools/archive/test_simple.py index 2f7377eb22..ac6209ab95 100644 --- a/tests/tools/archive/test_simple.py +++ b/tests/tools/archive/test_simple.py @@ -21,7 +21,7 @@ @pytest.mark.parametrize('entities', ['all', 'specific']) -def test_base_data_nodes(aiida_profile, tmp_path, entities): +def test_base_data_nodes(aiida_profile_clean, tmp_path, entities): """Test ex-/import of Base Data nodes""" # producing values for each base type values = ('Hello', 6, -1.2399834e12, False) @@ -46,7 +46,7 @@ def test_base_data_nodes(aiida_profile, tmp_path, entities): # actually export now create(filename=filename) # cleaning: - aiida_profile.reset_storage() + aiida_profile_clean.reset_storage() # Importing back the data: import_archive(filename) # Checking whether values are preserved: diff --git a/tests/tools/groups/test_paths.py b/tests/tools/groups/test_paths.py index 1cad363d46..6ff2459650 100644 --- a/tests/tools/groups/test_paths.py +++ b/tests/tools/groups/test_paths.py @@ -125,6 +125,7 @@ def test_walk_with_invalid_path(): assert [c.path for c in sorted(group_path.walk())] == expected +@pytest.mark.usefixtures('aiida_profile_clean') def test_walk_nodes(): """Test the ``GroupPath.walk_nodes()`` function.""" group, _ = orm.Group.collection.get_or_create('a') diff --git a/tests/tools/visualization/test_graph.py b/tests/tools/visualization/test_graph.py index db082017a9..276c000044 100644 --- a/tests/tools/visualization/test_graph.py +++ b/tests/tools/visualization/test_graph.py @@ -354,6 +354,7 @@ def test_graph_node_identifiers(self, node_id_type, monkeypatch, file_regression # The order of certain output lines can be randomly ordered so we split the file in lines, sort, and then join # them into a single string again. The node identifiers generated by the engine are of the form ``N{pk}`` and # they also clearly vary, so they are replaced with the ``NODE`` placeholder. - string = '\n'.join(sorted(graph.graphviz.source.strip().split('\n'))) + string = graph.graphviz.source string = re.sub(r'N\d+', 'NODE', string) + string = '\n'.join(sorted(string.strip().split('\n'))) file_regression.check(string)