From 91e88ab6af0e69b87eb9ceb12585869bf846d7c7 Mon Sep 17 00:00:00 2001 From: Alexey Snigir Date: Fri, 17 Jan 2025 11:29:53 +0100 Subject: [PATCH] refactor tests structure and naming --- ... => test-e2e-dashboard-score-vizro-ai.yml} | 26 ++++++++-------- ...izro-ai.yml => test-e2e-plot-vizro-ai.yml} | 20 ++++++------- ...e-vizro-ai-ui.yml => test-vizro-ai-ui.yml} | 13 ++++---- tools/tests/e2e_common_asserts.py | 19 ++++++++++++ .../tests/e2e_common_waiters.py | 3 +- .../tests}/wait-for-it.sh | 0 vizro-ai/hatch.toml | 17 +++++------ vizro-ai/pyproject.toml | 2 +- vizro-ai/tests/e2e/test_dashboard.py | 2 +- vizro-ai/tests/tests_utils/__init__.py | 0 vizro-ai/tests/tests_utils/e2e_asserts.py | 30 ------------------- vizro-ai/tests/tests_utils/e2e_constants.py | 11 ------- vizro-ai/tests/vizro_ai_ui/conftest.py | 2 +- .../fake_data_generator.py} | 0 .../tests/vizro_ai_ui/test_vizro_ai_ui.py | 8 ++--- vizro-core/hatch.toml | 5 ---- 16 files changed, 64 insertions(+), 94 deletions(-) rename .github/workflows/{test-score-vizro-ai.yml => test-e2e-dashboard-score-vizro-ai.yml} (80%) rename .github/workflows/{test-integration-vizro-ai.yml => test-e2e-plot-vizro-ai.yml} (85%) rename .github/workflows/{test-e2e-vizro-ai-ui.yml => test-vizro-ai-ui.yml} (83%) create mode 100644 tools/tests/e2e_common_asserts.py rename vizro-ai/tests/tests_utils/e2e_waiters.py => tools/tests/e2e_common_waiters.py (97%) rename {vizro-ai/tests/tests_utils => tools/tests}/wait-for-it.sh (100%) delete mode 100644 vizro-ai/tests/tests_utils/__init__.py delete mode 100644 vizro-ai/tests/tests_utils/e2e_asserts.py delete mode 100644 vizro-ai/tests/tests_utils/e2e_constants.py rename vizro-ai/tests/{tests_utils/e2e_fake_data_generator.py => vizro_ai_ui/fake_data_generator.py} (100%) diff --git a/.github/workflows/test-score-vizro-ai.yml b/.github/workflows/test-e2e-dashboard-score-vizro-ai.yml similarity index 80% rename from .github/workflows/test-score-vizro-ai.yml rename to .github/workflows/test-e2e-dashboard-score-vizro-ai.yml index 813ecba8c..b71b0efd0 100644 --- a/.github/workflows/test-score-vizro-ai.yml +++ b/.github/workflows/test-e2e-dashboard-score-vizro-ai.yml @@ -1,4 +1,4 @@ -name: Score tests for VizroAI +name: e2e dashboard score tests for VizroAI defaults: run: @@ -12,9 +12,9 @@ env: FORCE_COLOR: 1 jobs: - test-score-vizro-ai-fork: + test-e2e-dashboard-score-vizro-ai-fork: if: ${{ github.event.pull_request.head.repo.fork }} - name: test-score-vizro-ai on Py${{ matrix.config.python-version }} ${{ matrix.config.label }} + name: test-e2e-dashboard-score-vizro-ai on Py${{ matrix.config.python-version }} ${{ matrix.config.label }} runs-on: ubuntu-latest strategy: fail-fast: false @@ -38,9 +38,9 @@ jobs: - name: Passed fork step run: echo "Success!" - test-score-vizro-ai: + test-e2e-dashboard-score-vizro-ai: if: ${{ ! github.event.pull_request.head.repo.fork }} - name: test-score-vizro-ai on Py${{ matrix.config.python-version }} ${{ matrix.config.label }} + name: test-e2e-dashboard-score-vizro-ai on Py${{ matrix.config.python-version }} ${{ matrix.config.label }} runs-on: ubuntu-latest strategy: fail-fast: false @@ -72,8 +72,8 @@ jobs: - name: Show dependency tree run: hatch run ${{ matrix.config.hatch-env }}:pip tree - - name: Run vizro-ai score tests with PyPI vizro - run: hatch run ${{ matrix.config.hatch-env }}:test-score + - name: Run vizro-ai e2e dashboard score tests with PyPI vizro + run: hatch run ${{ matrix.config.hatch-env }}:test-e2e-dashboard-score env: OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }} OPENAI_API_BASE: ${{ secrets.OPENAI_API_BASE }} @@ -81,10 +81,10 @@ jobs: BRANCH: ${{ github.head_ref }} PYTHON_VERSION: ${{ matrix.config.python-version }} - - name: Run vizro-ai score tests with local vizro + - name: Run vizro-ai e2e dashboard score tests with local vizro run: | hatch run ${{ matrix.config.hatch-env }}:pip install ../vizro-core - hatch run ${{ matrix.config.hatch-env }}:test-score + hatch run ${{ matrix.config.hatch-env }}:test-e2e-dashboard-score env: OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }} OPENAI_API_BASE: ${{ secrets.OPENAI_API_BASE }} @@ -99,7 +99,7 @@ jobs: with: payload: | { - "text": "Vizro-ai ${{ matrix.config.hatch-env }} score tests build result: ${{ job.status }}\nBranch: ${{ github.head_ref }}\n${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}" + "text": "Vizro-ai ${{ matrix.config.hatch-env }} e2e dashboard score tests build result: ${{ job.status }}\nBranch: ${{ github.head_ref }}\n${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}" } env: SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }} @@ -111,10 +111,10 @@ jobs: with: name: Report-${{ matrix.config.python-version }}-${{ matrix.config.label }} path: | - /home/runner/work/vizro/vizro/vizro-ai/tests/score/reports/report*.csv + /home/runner/work/vizro/vizro/vizro-ai/tests/e2e/reports/report*.csv - test-score-vizro-ai-report: - needs: test-score-vizro-ai + test-e2e-dashboard-score-vizro-ai-report: + needs: test-e2e-dashboard-score-vizro-ai runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 diff --git a/.github/workflows/test-integration-vizro-ai.yml b/.github/workflows/test-e2e-plot-vizro-ai.yml similarity index 85% rename from .github/workflows/test-integration-vizro-ai.yml rename to .github/workflows/test-e2e-plot-vizro-ai.yml index 95eb14531..5b3e37e16 100644 --- a/.github/workflows/test-integration-vizro-ai.yml +++ b/.github/workflows/test-e2e-plot-vizro-ai.yml @@ -1,4 +1,4 @@ -name: Integration tests for VizroAI +name: e2e plot tests for VizroAI defaults: run: @@ -20,9 +20,9 @@ env: FORCE_COLOR: 1 jobs: - test-integration-vizro-ai-fork: + test-e2e-plot-vizro-ai-fork: if: ${{ github.event.pull_request.head.repo.fork }} - name: test-integration-vizro-ai on Py${{ matrix.config.python-version }} ${{ matrix.config.label }} + name: test-e2e-plot-vizro-ai on Py${{ matrix.config.python-version }} ${{ matrix.config.label }} runs-on: ${{ matrix.config.os }} strategy: fail-fast: false @@ -69,9 +69,9 @@ jobs: - name: Passed fork step run: echo "Success!" - test-integration-vizro-ai: + test-e2e-plot-vizro-ai: if: ${{ ! github.event.pull_request.head.repo.fork }} - name: test-integration-vizro-ai on Py${{ matrix.config.python-version }} ${{ matrix.config.label }} + name: test-e2e-plot-vizro-ai on Py${{ matrix.config.python-version }} ${{ matrix.config.label }} runs-on: ${{ matrix.config.os }} strategy: fail-fast: false @@ -126,17 +126,17 @@ jobs: - name: Show dependency tree run: hatch run ${{ matrix.config.hatch-env }}:pip tree - - name: Run vizro-ai integration tests with PyPI vizro - run: hatch run ${{ matrix.config.hatch-env }}:test-integration + - name: Run vizro-ai e2e plot tests with PyPI vizro + run: hatch run ${{ matrix.config.hatch-env }}:test-e2e-plot env: OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }} OPENAI_API_BASE: ${{ secrets.OPENAI_API_BASE }} VIZRO_TYPE: pypi - - name: Run vizro-ai integration tests with local vizro + - name: Run vizro-ai e2e plot tests with local vizro run: | hatch run ${{ matrix.config.hatch-env }}:pip install ../vizro-core - hatch run ${{ matrix.config.hatch-env }}:test-integration + hatch run ${{ matrix.config.hatch-env }}:test-e2e-plot env: OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }} OPENAI_API_BASE: ${{ secrets.OPENAI_API_BASE }} @@ -149,7 +149,7 @@ jobs: with: payload: | { - "text": "Vizro-ai ${{ matrix.config.hatch-env }} integration tests build result: ${{ job.status }}\nBranch: ${{ github.head_ref }}\n${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}" + "text": "Vizro-ai ${{ matrix.config.hatch-env }} e2e plot tests build result: ${{ job.status }}\nBranch: ${{ github.head_ref }}\n${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}" } env: SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }} diff --git a/.github/workflows/test-e2e-vizro-ai-ui.yml b/.github/workflows/test-vizro-ai-ui.yml similarity index 83% rename from .github/workflows/test-e2e-vizro-ai-ui.yml rename to .github/workflows/test-vizro-ai-ui.yml index f6109e100..9c009d5e1 100644 --- a/.github/workflows/test-e2e-vizro-ai-ui.yml +++ b/.github/workflows/test-vizro-ai-ui.yml @@ -1,4 +1,4 @@ -name: e2e tests for VizroAI UI +name: tests for VizroAI UI defaults: run: @@ -20,7 +20,7 @@ env: PYTHON_VERSION: "3.12" jobs: - test-e2e-vizro-ai-ui-fork: + test-vizro-ai-ui-fork: if: ${{ github.event.pull_request.head.repo.fork }} runs-on: ubuntu-latest @@ -30,7 +30,7 @@ jobs: - name: Passed fork step run: echo "Success!" - test-e2e-vizro-ai-ui: + test-vizro-ai-ui: if: ${{ ! github.event.pull_request.head.repo.fork }} runs-on: ubuntu-latest @@ -48,11 +48,10 @@ jobs: - name: Show dependency tree run: hatch run pip tree - - name: Run e2e VizroAI UI tests + - name: Run VizroAI UI tests run: | hatch run vizro-ai-ui - tests/tests_utils/wait-for-it.sh 127.0.0.1:8050 -t 30 - hatch run test-e2e-vizro-ai-ui + hatch run test-vizro-ai-ui env: OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }} OPENAI_API_BASE: ${{ secrets.OPENAI_API_BASE }} @@ -61,6 +60,6 @@ jobs: if: failure() uses: ./.github/actions/failed-artifacts-and-slack-notifications env: - TESTS_NAME: e2e VizroAI UI tests + TESTS_NAME: VizroAI UI tests SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }} PROJECT_PATH: /home/runner/work/vizro/vizro/vizro-ai/ diff --git a/tools/tests/e2e_common_asserts.py b/tools/tests/e2e_common_asserts.py new file mode 100644 index 000000000..113ee349f --- /dev/null +++ b/tools/tests/e2e_common_asserts.py @@ -0,0 +1,19 @@ +from hamcrest import any_of, assert_that, contains_string + + +def browser_console_warnings_checker(log_level, log_levels): + assert_that( + log_level["message"], + any_of( + contains_string("Invalid prop `persisted_props[0]` of value `on` supplied to `t`"), + contains_string("React does not recognize the `%s` prop on a DOM element"), + contains_string("_scrollZoom"), + contains_string("unstable_flushDiscreteUpdates: Cannot flush updates when React is already rendering"), + contains_string("React state update on an unmounted component"), + contains_string("componentWillMount has been renamed"), + contains_string("componentWillReceiveProps has been renamed"), + contains_string("GPU stall due to ReadPixels"), + contains_string("WebGL"), # https://issues.chromium.org/issues/40277080 + ), + reason=f"Error outoput: {log_levels}", + ) diff --git a/vizro-ai/tests/tests_utils/e2e_waiters.py b/tools/tests/e2e_common_waiters.py similarity index 97% rename from vizro-ai/tests/tests_utils/e2e_waiters.py rename to tools/tests/e2e_common_waiters.py index f75b79cc2..d2716640a 100644 --- a/vizro-ai/tests/tests_utils/e2e_waiters.py +++ b/tools/tests/e2e_common_waiters.py @@ -1,6 +1,5 @@ import time -from e2e_constants import TIMEOUT from selenium.common.exceptions import ( StaleElementReferenceException, ) @@ -8,6 +7,8 @@ from selenium.webdriver.support import expected_conditions from selenium.webdriver.support.wait import WebDriverWait +TIMEOUT = 30 + def wait_for(condition_function, *args): """Function wait for any condition to be True.""" diff --git a/vizro-ai/tests/tests_utils/wait-for-it.sh b/tools/tests/wait-for-it.sh similarity index 100% rename from vizro-ai/tests/tests_utils/wait-for-it.sh rename to tools/tests/wait-for-it.sh diff --git a/vizro-ai/hatch.toml b/vizro-ai/hatch.toml index 6b267093b..07f94a594 100644 --- a/vizro-ai/hatch.toml +++ b/vizro-ai/hatch.toml @@ -50,16 +50,19 @@ prep-release = [ ] pypath = "hatch run python -c 'import sys; print(sys.executable)'" test = "pytest tests {args}" -test-e2e-vizro-ai-ui = "pytest -vs --reruns 1 tests/e2e/test_vizro_ai_ui.py --headless {args}" -test-integration = "pytest -vs --reruns 1 tests/integration --headless {args}" -test-score = "pytest -vs --reruns 1 tests/score --headless {args}" +test-e2e-dashboard-score = "pytest -vs tests/e2e/test_dashboard.py --headless {args}" +test-e2e-plot = "pytest -vs --reruns 1 tests/e2e/test_plot.py --headless {args}" test-unit = "pytest tests/unit {args}" test-unit-coverage = [ "coverage run -m pytest tests/unit {args}", "- coverage combine", "coverage report" ] -vizro-ai-ui = "python examples/dashboard_ui/app.py &" +test-vizro-ai-ui = "pytest -vs --reruns 1 tests/vizro_ai_ui/test_vizro_ai_ui.py --headless {args}" +vizro-ai-ui = [ + "python examples/dashboard_ui/app.py &", + "../tools/tests/wait-for-it.sh 127.0.0.1:8051 -t 30" +] [envs.docs] dependencies = [ @@ -79,12 +82,6 @@ build = "mkdocs build --strict" # throwing 403 errors, but these are not real errors. link-check = "linkchecker site --check-extern --no-warnings --ignore=404.html --ignore-url=127.0.0.1 --ignore-url=https://vizro.readthedocs.io/ --ignore-url=https://platform.openai.com/docs/models --ignore-url=openai.com --ignore-url=https://openai.com/" pip = '"{env:HATCH_UV}" pip {args}' -serve = "mkdocs serve --open" - -[envs.hatch-uv] -dependencies = [ - "uv<0.5.10" # https://github.com/astral-sh/uv/issues/10039 -] [envs.lower-bounds] extra-dependencies = ["pydantic==1.10.16"] diff --git a/vizro-ai/pyproject.toml b/vizro-ai/pyproject.toml index c1d3262b9..34197f720 100644 --- a/vizro-ai/pyproject.toml +++ b/vizro-ai/pyproject.toml @@ -70,7 +70,7 @@ filterwarnings = [ # Ignore deprecation warning until this is solved: https://github.com/plotly/dash/issues/2590: "ignore:HTTPResponse.getheader():DeprecationWarning" ] -pythonpath = ["tests/tests_utils", "../tools/tests"] +pythonpath = ["../tools/tests"] [tool.ruff] extend = "../pyproject.toml" diff --git a/vizro-ai/tests/e2e/test_dashboard.py b/vizro-ai/tests/e2e/test_dashboard.py index 53d2e9033..53786f451 100644 --- a/vizro-ai/tests/e2e/test_dashboard.py +++ b/vizro-ai/tests/e2e/test_dashboard.py @@ -55,7 +55,7 @@ def logic( # noqa: PLR0912, PLR0915 config: json config of the expected dashboard """ - report_dir = "tests/score/reports" + report_dir = "tests/e2e/reports" os.makedirs(report_dir, exist_ok=True) app = Vizro().build(dashboard).dash diff --git a/vizro-ai/tests/tests_utils/__init__.py b/vizro-ai/tests/tests_utils/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/vizro-ai/tests/tests_utils/e2e_asserts.py b/vizro-ai/tests/tests_utils/e2e_asserts.py deleted file mode 100644 index 7896479e7..000000000 --- a/vizro-ai/tests/tests_utils/e2e_asserts.py +++ /dev/null @@ -1,30 +0,0 @@ -from e2e_constants import ( - INVALID_PROP_ERROR, - REACT_NOT_RECOGNIZE_ERROR, - REACT_RENDERING_ERROR, - READPIXELS_WARNING, - SCROLL_ZOOM_ERROR, - UNMOUNT_COMPONENTS_ERROR, - WEBGL_WARNING, - WILLMOUNT_RENAMED_WARNING, - WILLRECEIVEPROPS_RENAMED_WARNING, -) -from hamcrest import any_of, assert_that, contains_string - - -def browser_console_warnings_checker(log_level, log_levels): - assert_that( - log_level["message"], - any_of( - contains_string(INVALID_PROP_ERROR), - contains_string(REACT_NOT_RECOGNIZE_ERROR), - contains_string(SCROLL_ZOOM_ERROR), - contains_string(REACT_RENDERING_ERROR), - contains_string(UNMOUNT_COMPONENTS_ERROR), - contains_string(WILLMOUNT_RENAMED_WARNING), - contains_string(WILLRECEIVEPROPS_RENAMED_WARNING), - contains_string(READPIXELS_WARNING), - contains_string(WEBGL_WARNING), - ), - reason=f"Error outoput: {log_levels}", - ) diff --git a/vizro-ai/tests/tests_utils/e2e_constants.py b/vizro-ai/tests/tests_utils/e2e_constants.py deleted file mode 100644 index 211535976..000000000 --- a/vizro-ai/tests/tests_utils/e2e_constants.py +++ /dev/null @@ -1,11 +0,0 @@ -INVALID_PROP_ERROR = "Invalid prop `persisted_props[0]` of value `on` supplied to `t`" -REACT_NOT_RECOGNIZE_ERROR = "React does not recognize the `%s` prop on a DOM element" -SCROLL_ZOOM_ERROR = "_scrollZoom" -REACT_RENDERING_ERROR = "unstable_flushDiscreteUpdates: Cannot flush updates when React is already rendering" -UNMOUNT_COMPONENTS_ERROR = "React state update on an unmounted component" -WILLMOUNT_RENAMED_WARNING = "componentWillMount has been renamed" -WILLRECEIVEPROPS_RENAMED_WARNING = "componentWillReceiveProps has been renamed" -READPIXELS_WARNING = "GPU stall due to ReadPixels" -WEBGL_WARNING = "WebGL" # https://issues.chromium.org/issues/40277080 - -TIMEOUT = 30 diff --git a/vizro-ai/tests/vizro_ai_ui/conftest.py b/vizro-ai/tests/vizro_ai_ui/conftest.py index 5675e704d..b5afa7295 100644 --- a/vizro-ai/tests/vizro_ai_ui/conftest.py +++ b/vizro-ai/tests/vizro_ai_ui/conftest.py @@ -1,7 +1,7 @@ from datetime import datetime import pytest -from e2e_asserts import browser_console_warnings_checker +from e2e_common_asserts import browser_console_warnings_checker from selenium import webdriver from selenium.webdriver.chrome.options import Options diff --git a/vizro-ai/tests/tests_utils/e2e_fake_data_generator.py b/vizro-ai/tests/vizro_ai_ui/fake_data_generator.py similarity index 100% rename from vizro-ai/tests/tests_utils/e2e_fake_data_generator.py rename to vizro-ai/tests/vizro_ai_ui/fake_data_generator.py diff --git a/vizro-ai/tests/vizro_ai_ui/test_vizro_ai_ui.py b/vizro-ai/tests/vizro_ai_ui/test_vizro_ai_ui.py index e3a3eb6b2..fca260e5f 100644 --- a/vizro-ai/tests/vizro_ai_ui/test_vizro_ai_ui.py +++ b/vizro-ai/tests/vizro_ai_ui/test_vizro_ai_ui.py @@ -1,13 +1,13 @@ import os import pytest -from e2e_fake_data_generator import create_genre_popularity_by_country -from e2e_waiters import ( +from e2e_common_waiters import ( wait_for, webdriver_click_waiter, webdriver_waiter, webdriver_waiter_css, ) +from fake_data_generator import create_genre_popularity_by_country from selenium.common import InvalidSelectorException, TimeoutException @@ -20,7 +20,7 @@ def test_chart_ui(chromedriver): # Create test dataset popularity_dataset = create_genre_popularity_by_country(start_year=1980, end_year=2023, records_per_year=10) # Save to a CSV file - popularity_dataset.to_csv("tests/tests_utils/genre_popularity_by_country.csv", index=False) + popularity_dataset.to_csv("tests/vizro_ai_ui/genre_popularity_by_country.csv", index=False) # fill in values api_key = webdriver_waiter(chromedriver, '//*[@id="settings-api-key"]') @@ -33,7 +33,7 @@ def test_chart_ui(chromedriver): # upload file file_input = webdriver_waiter_css(chromedriver, 'input[type="file"]') - file_input.send_keys(os.path.abspath("tests/tests_utils/genre_popularity_by_country.csv")) + file_input.send_keys(os.path.abspath("tests/vizro_ai_ui/genre_popularity_by_country.csv")) webdriver_click_waiter(chromedriver, '//*[@id="data-upload"]') # enter prompt diff --git a/vizro-core/hatch.toml b/vizro-core/hatch.toml index de9860689..c18cc24fd 100644 --- a/vizro-core/hatch.toml +++ b/vizro-core/hatch.toml @@ -111,11 +111,6 @@ template = "examples" DASH_DEBUG = "true" VIZRO_LOG_LEVEL = "DEBUG" -[envs.hatch-uv] -dependencies = [ - "uv<0.5.10" # https://github.com/astral-sh/uv/issues/10039 -] - [envs.lower-bounds] extra-dependencies = [ "pydantic==1.10.16",