diff --git a/.github/actions/breeze/action.yml b/.github/actions/breeze/action.yml index dee5f48eb1610..cc6005f39554a 100644 --- a/.github/actions/breeze/action.yml +++ b/.github/actions/breeze/action.yml @@ -24,7 +24,7 @@ inputs: default: "3.10" uv-version: description: 'uv version to use' - default: "0.11.1" # Keep this comment to allow automatic replacement of uv version + default: "0.11.3" # Keep this comment to allow automatic replacement of uv version outputs: host-python-version: description: Python version used in host diff --git a/.github/actions/install-prek/action.yml b/.github/actions/install-prek/action.yml index 4f2cbc47e1b6d..7e43c3245f44a 100644 --- a/.github/actions/install-prek/action.yml +++ b/.github/actions/install-prek/action.yml @@ -24,7 +24,7 @@ inputs: default: "3.10" uv-version: description: 'uv version to use' - default: "0.11.1" # Keep this comment to allow automatic replacement of uv version + default: "0.11.3" # Keep this comment to allow automatic replacement of uv version prek-version: description: 'prek version to use' default: "0.3.8" # Keep this comment to allow automatic replacement of prek version diff --git a/.github/actions/prepare_breeze_and_image/action.yml b/.github/actions/prepare_breeze_and_image/action.yml index 753cfd0378231..2498caad2b80d 100644 --- a/.github/actions/prepare_breeze_and_image/action.yml +++ b/.github/actions/prepare_breeze_and_image/action.yml @@ -45,6 +45,9 @@ runs: shell: bash run: ./scripts/ci/make_mnt_writeable.sh if: inputs.make-mnt-writeable-and-cleanup == 'true' + - name: "Free up disk space" + shell: bash + run: ./scripts/tools/free_up_disk_space.sh - name: "Install Breeze" uses: ./.github/actions/breeze id: breeze diff --git a/.github/instructions/code-review.instructions.md b/.github/instructions/code-review.instructions.md index 0d4ce8a87913a..cd480bdcaf706 100644 --- a/.github/instructions/code-review.instructions.md +++ b/.github/instructions/code-review.instructions.md @@ -11,7 +11,7 @@ Use these rules when reviewing pull requests to the Apache Airflow repository. - **Scheduler must never run user code.** It only processes serialized Dags. Flag any scheduler-path code that deserializes or executes Dag/task code. - **Flag any task execution code that accesses the metadata DB directly** instead of through the Execution API (`/execution` endpoints). -- **Flag any code in Dag Processor or Triggerer that breaks process isolation** — these components run user code in isolated processes. +- **Flag any code in Dag Processor or Triggerer that breaks process isolation** — these components run user code in separate processes from the Scheduler and API Server, but note that they potentially have direct metadata database access and potentially bypass JWT authentication via in-process Execution API transport. This is an intentional design choice documented in the security model, not a security vulnerability. - **Flag any provider importing core internals** like `SUPERVISOR_COMMS` or task-runner plumbing. Providers interact through the public SDK and execution API only. ## Database and Query Correctness diff --git a/.github/workflows/additional-ci-image-checks.yml b/.github/workflows/additional-ci-image-checks.yml index 6a33cea24a937..e39fcbd43b546 100644 --- a/.github/workflows/additional-ci-image-checks.yml +++ b/.github/workflows/additional-ci-image-checks.yml @@ -116,8 +116,10 @@ jobs: (github.event_name == 'schedule' || github.event_name == 'workflow_dispatch') # Check that after earlier cache push, breeze command will build quickly + # This build is a bit slow from in-the scratch builds, so we should run it only in + # regular PRs check-that-image-builds-quickly: - timeout-minutes: 17 + timeout-minutes: 25 name: Check that image builds quickly runs-on: ${{ fromJSON(inputs.runners) }} env: @@ -141,4 +143,6 @@ jobs: - name: "Install Breeze" uses: ./.github/actions/breeze - name: "Check that image builds quickly" - run: breeze shell --max-time 900 --platform "${PLATFORM}" + # Synchronize to be a little bit shorter than above timeout-minutes to make sure that + # if the build takes too long the job will fail with logs. 22 minutes * 60 s = 1320 seconds + run: breeze shell --max-time 1320 --platform "${PLATFORM}" diff --git a/.github/workflows/asf-allowlist-check.yml b/.github/workflows/asf-allowlist-check.yml new file mode 100644 index 0000000000000..a91ab62975390 --- /dev/null +++ b/.github/workflows/asf-allowlist-check.yml @@ -0,0 +1,34 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +--- +name: "ASF Allowlist Check" +"on": + pull_request: + paths: [".github/**"] + push: + branches: [main, v*-test] + paths: [".github/**"] +permissions: + contents: read +jobs: + asf-allowlist-check: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + with: + persist-credentials: false + - uses: apache/infrastructure-actions/allowlist-check@4e9c961f587f72b170874b6f5cd4ac15f7f26eb8 # main diff --git a/.github/workflows/basic-tests.yml b/.github/workflows/basic-tests.yml index 87909137c1a60..7b71ae5c6e6c3 100644 --- a/.github/workflows/basic-tests.yml +++ b/.github/workflows/basic-tests.yml @@ -74,7 +74,7 @@ on: # yamllint disable-line rule:truthy type: string uv-version: description: 'uv version to use' - default: "0.11.1" # Keep this comment to allow automatic replacement of uv version + default: "0.11.3" # Keep this comment to allow automatic replacement of uv version type: string platform: description: 'Platform for the build - linux/amd64 or linux/arm64' diff --git a/.github/workflows/check-newsfragment-pr-number.yml b/.github/workflows/check-newsfragment-pr-number.yml index 3e161ed7b2343..be31eae726e85 100644 --- a/.github/workflows/check-newsfragment-pr-number.yml +++ b/.github/workflows/check-newsfragment-pr-number.yml @@ -21,9 +21,6 @@ on: # yamllint disable-line rule:truthy pull_request: branches: - main - - v[0-9]+-[0-9]+-test - - v[0-9]+-[0-9]+-stable - - providers-[a-z]+-?[a-z]*/v[0-9]+-[0-9]+ types: [opened, reopened, synchronize, labeled, unlabeled] permissions: @@ -32,7 +29,7 @@ permissions: concurrency: group: check-newsfragment-${{ github.event.pull_request.number }} - cancel-in-progress: true + cancel-in-progress: false jobs: check-newsfragment-pr-number: diff --git a/.github/workflows/ci-amd-arm.yml b/.github/workflows/ci-amd-arm.yml index 53f0a09764a93..c094a6ee82565 100644 --- a/.github/workflows/ci-amd-arm.yml +++ b/.github/workflows/ci-amd-arm.yml @@ -40,7 +40,7 @@ env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} GITHUB_USERNAME: ${{ github.actor }} SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }} - UV_VERSION: "0.11.1" # Keep this comment to allow automatic replacement of uv version + UV_VERSION: "0.11.3" # Keep this comment to allow automatic replacement of uv version VERBOSE: "true" concurrency: @@ -91,7 +91,6 @@ jobs: kubernetes-versions-list-as-string: >- ${{ steps.selective-checks.outputs.kubernetes-versions-list-as-string }} latest-versions-only: ${{ steps.selective-checks.outputs.latest-versions-only }} - mypy-checks: ${{ steps.selective-checks.outputs.mypy-checks }} mysql-exclude: ${{ steps.selective-checks.outputs.mysql-exclude }} mysql-versions: ${{ steps.selective-checks.outputs.mysql-versions }} platform: ${{ steps.selective-checks.outputs.platform }} @@ -115,7 +114,7 @@ jobs: run-go-sdk-tests: ${{ steps.selective-checks.outputs.run-go-sdk-tests }} run-helm-tests: ${{ steps.selective-checks.outputs.run-helm-tests }} run-kubernetes-tests: ${{ steps.selective-checks.outputs.run-kubernetes-tests }} - run-mypy: ${{ steps.selective-checks.outputs.run-mypy }} + run-mypy-providers: ${{ steps.selective-checks.outputs.run-mypy-providers }} run-remote-logging-elasticsearch-e2e-tests: ${{ steps.selective-checks.outputs.run-remote-logging-elasticsearch-e2e-tests }} run-remote-logging-s3-e2e-tests: ${{ steps.selective-checks.outputs.run-remote-logging-s3-e2e-tests }} run-system-tests: ${{ steps.selective-checks.outputs.run-system-tests }} @@ -307,8 +306,6 @@ jobs: with: runners: ${{ needs.build-info.outputs.runner-type }} platform: ${{ needs.build-info.outputs.platform }} - run-mypy: ${{ needs.build-info.outputs.run-mypy }} - mypy-checks: ${{ needs.build-info.outputs.mypy-checks }} python-versions-list-as-string: ${{ needs.build-info.outputs.python-versions-list-as-string }} branch: ${{ needs.build-info.outputs.default-branch }} canary-run: ${{ needs.build-info.outputs.canary-run }} @@ -333,6 +330,48 @@ jobs: DOCS_AWS_SECRET_ACCESS_KEY: ${{ secrets.DOCS_AWS_SECRET_ACCESS_KEY }} SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }} + mypy-providers: + timeout-minutes: 45 + name: "MyPy providers checks" + needs: [build-info, build-ci-images] + runs-on: ${{ fromJSON(needs.build-info.outputs.runner-type) }} + if: needs.build-info.outputs.run-mypy-providers == 'true' + env: + PYTHON_MAJOR_MINOR_VERSION: "${{ needs.build-info.outputs.default-python-version }}" + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + steps: + - name: "Cleanup repo" + shell: bash + run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" + - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + with: + persist-credentials: false + - name: "Prepare breeze & CI image: ${{ needs.build-info.outputs.default-python-version }}" + uses: ./.github/actions/prepare_breeze_and_image + with: + platform: ${{ needs.build-info.outputs.platform }} + python: "${{ needs.build-info.outputs.default-python-version }}" + use-uv: ${{ needs.build-info.outputs.use-uv }} + make-mnt-writeable-and-cleanup: true + id: breeze + - name: "Install prek" + uses: ./.github/actions/install-prek + id: prek + with: + python-version: ${{steps.breeze.outputs.host-python-version}} + platform: ${{ needs.build-info.outputs.platform }} + save-cache: false + - name: "MyPy checks for providers" + run: prek --color always --verbose --stage manual mypy-providers --all-files + env: + VERBOSE: "false" + COLUMNS: "202" + SKIP_GROUP_OUTPUT: "true" + DEFAULT_BRANCH: ${{ needs.build-info.outputs.default-branch }} + RUFF_FORMAT: "github" + INCLUDE_MYPY_VOLUME: "false" + providers: name: "provider distributions tests" uses: ./.github/workflows/test-providers.yml @@ -843,7 +882,7 @@ jobs: persist-credentials: false # keep this in sync with go.mod in go-sdk/ - name: Setup Go - uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v6.3.0 + uses: actions/setup-go@4a3601121dd01d1626a1e23e37211e3254c1c06c # v6.4.0 with: go-version: 1.24 cache-dependency-path: go-sdk/go.sum @@ -895,6 +934,7 @@ jobs: - build-prod-images - ci-image-checks - generate-constraints + - mypy-providers - providers - tests-helm - tests-integration-system diff --git a/.github/workflows/ci-image-checks.yml b/.github/workflows/ci-image-checks.yml index 9d21ad2f92c40..1f25943204179 100644 --- a/.github/workflows/ci-image-checks.yml +++ b/.github/workflows/ci-image-checks.yml @@ -28,14 +28,6 @@ on: # yamllint disable-line rule:truthy description: "Platform for the build - 'linux/amd64' or 'linux/arm64'" required: true type: string - run-mypy: - description: "Whether to run mypy checks (true/false)" - required: true - type: string - mypy-checks: - description: "List of folders to run mypy checks on" - required: false - type: string python-versions-list-as-string: description: "The list of python versions as string separated by spaces" required: true @@ -169,55 +161,6 @@ jobs: run: cat ~/.cache/prek/prek.log || true if: failure() - mypy: - timeout-minutes: 45 - name: "MyPy checks" - runs-on: ${{ fromJSON(inputs.runners) }} - if: inputs.run-mypy == 'true' - strategy: - fail-fast: false - matrix: - mypy-check: ${{ fromJSON(inputs.mypy-checks) }} - env: - PYTHON_MAJOR_MINOR_VERSION: "${{inputs.default-python-version}}" - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - steps: - - name: "Cleanup repo" - shell: bash - run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" - - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - with: - persist-credentials: false - - name: "Free up disk space" - shell: bash - run: ./scripts/tools/free_up_disk_space.sh - - name: "Prepare breeze & CI image: ${{ inputs.default-python-version }}" - uses: ./.github/actions/prepare_breeze_and_image - with: - platform: ${{ inputs.platform }} - python: "${{ inputs.default-python-version }}" - use-uv: ${{ inputs.use-uv }} - make-mnt-writeable-and-cleanup: true - id: breeze - - name: "Install prek" - uses: ./.github/actions/install-prek - id: prek - with: - python-version: ${{steps.breeze.outputs.host-python-version}} - platform: ${{ inputs.platform }} - save-cache: false - - name: "MyPy checks for ${{ matrix.mypy-check }}" - run: prek --color always --verbose --stage manual "$MYPY_CHECK" --all-files - env: - VERBOSE: "false" - COLUMNS: "202" - SKIP_GROUP_OUTPUT: "true" - DEFAULT_BRANCH: ${{ inputs.branch }} - RUFF_FORMAT: "github" - INCLUDE_MYPY_VOLUME: "false" - MYPY_CHECK: ${{ matrix.mypy-check }} - build-docs: timeout-minutes: 150 name: "Build documentation" diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml index 0c21c6e43d652..a6ae94fd3fa65 100644 --- a/.github/workflows/codeql-analysis.yml +++ b/.github/workflows/codeql-analysis.yml @@ -52,15 +52,15 @@ jobs: persist-credentials: false - name: Initialize CodeQL - uses: github/codeql-action/init@0d579ffd059c29b07949a3cce3983f0780820c98 # v4.32.6 + uses: github/codeql-action/init@c10b8064de6f491fea524254123dbe5e09572f13 # v4.35.1 with: languages: ${{ matrix.language }} - name: Autobuild - uses: github/codeql-action/autobuild@0d579ffd059c29b07949a3cce3983f0780820c98 # v4.32.6 + uses: github/codeql-action/autobuild@c10b8064de6f491fea524254123dbe5e09572f13 # v4.35.1 - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@0d579ffd059c29b07949a3cce3983f0780820c98 # v4.32.6 + uses: github/codeql-action/analyze@c10b8064de6f491fea524254123dbe5e09572f13 # v4.35.1 with: # Provide more context to the SARIF output (shows up in run.automationDetails.id field) category: "/language:${{matrix.language}}" diff --git a/.github/workflows/k8s-tests.yml b/.github/workflows/k8s-tests.yml index ac86ac2ee4497..dd45336d5e79f 100644 --- a/.github/workflows/k8s-tests.yml +++ b/.github/workflows/k8s-tests.yml @@ -84,9 +84,6 @@ jobs: uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 with: persist-credentials: false - - name: "Free up disk space" - shell: bash - run: ./scripts/tools/free_up_disk_space.sh # env.PYTHON_MAJOR_MINOR_VERSION, env.KUBERNETES_VERSION are set in the previous # step id: prepare-versions - name: "Prepare breeze & PROD image: ${{ env.PYTHON_MAJOR_MINOR_VERSION }}" diff --git a/.github/workflows/registry-backfill.yml b/.github/workflows/registry-backfill.yml index 62483ca8c3d6b..a421ee4680a3d 100644 --- a/.github/workflows/registry-backfill.yml +++ b/.github/workflows/registry-backfill.yml @@ -118,7 +118,7 @@ jobs: done - name: "Install uv" - uses: astral-sh/setup-uv@37802adc94f370d6bfd71619e3f0bf239e1f3b78 # v7.6.0 + uses: astral-sh/setup-uv@cec208311dfd045dd5311c1add060b2062131d57 # v8.0.0 - name: "Install Breeze" uses: ./.github/actions/breeze diff --git a/.github/workflows/registry-tests.yml b/.github/workflows/registry-tests.yml index 38143ab8a6bf3..2c38e724b8658 100644 --- a/.github/workflows/registry-tests.yml +++ b/.github/workflows/registry-tests.yml @@ -50,7 +50,7 @@ jobs: persist-credentials: false - name: "Install uv" - uses: astral-sh/setup-uv@37802adc94f370d6bfd71619e3f0bf239e1f3b78 # v7.6.0 + uses: astral-sh/setup-uv@cec208311dfd045dd5311c1add060b2062131d57 # v8.0.0 with: python-version: "3.12" diff --git a/.github/workflows/release_dockerhub_image.yml b/.github/workflows/release_dockerhub_image.yml index 67750688f293c..3614f4c7eefb3 100644 --- a/.github/workflows/release_dockerhub_image.yml +++ b/.github/workflows/release_dockerhub_image.yml @@ -58,7 +58,7 @@ jobs: AIRFLOW_VERSION: ${{ github.event.inputs.airflowVersion }} AMD_ONLY: ${{ github.event.inputs.amdOnly }} LIMIT_PYTHON_VERSIONS: ${{ github.event.inputs.limitPythonVersions }} - UV_VERSION: "0.11.1" # Keep this comment to allow automatic replacement of uv version + UV_VERSION: "0.11.3" # Keep this comment to allow automatic replacement of uv version if: contains(fromJSON('[ "ashb", "bugraoz93", diff --git a/.github/workflows/test-providers.yml b/.github/workflows/test-providers.yml index d6c268db849e0..db1f31f2453aa 100644 --- a/.github/workflows/test-providers.yml +++ b/.github/workflows/test-providers.yml @@ -92,9 +92,6 @@ jobs: uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 with: persist-credentials: false - - name: "Free up disk space" - shell: bash - run: ./scripts/tools/free_up_disk_space.sh - name: "Install prek" uses: ./.github/actions/install-prek id: prek @@ -201,9 +198,6 @@ jobs: uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 with: persist-credentials: false - - name: "Free up disk space" - shell: bash - run: ./scripts/tools/free_up_disk_space.sh - name: "Install prek" uses: ./.github/actions/install-prek id: prek diff --git a/.github/workflows/update-constraints-on-push.yml b/.github/workflows/update-constraints-on-push.yml index 3dcd806c3aea1..182ad7374e4bc 100644 --- a/.github/workflows/update-constraints-on-push.yml +++ b/.github/workflows/update-constraints-on-push.yml @@ -26,6 +26,10 @@ on: # yamllint disable-line rule:truthy - 'uv.lock' permissions: contents: read + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true env: GITHUB_REPOSITORY: ${{ github.repository }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index c94bd75e18ac6..c8c3f701c5545 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -256,7 +256,7 @@ repos: (?x) ^\.pre-commit-config\.yaml$| ^\.github/\.pre-commit-config\.yaml$| - ^scripts/ci/prek/update_installers_and_prek\.py$ + ^scripts/ci/prek/upgrade_important_versions\.py$ pass_filenames: false require_serial: true - repo: https://github.com/adamchainz/blacken-docs @@ -448,7 +448,7 @@ repos: types_or: [python, pyi] args: [--fix] require_serial: true - additional_dependencies: ['ruff==0.15.7'] + additional_dependencies: ['ruff==0.15.9'] exclude: ^airflow-core/tests/unit/dags/test_imports\.py$|^performance/tests/test_.*\.py$ - id: ruff-format name: Run 'ruff format' @@ -1013,39 +1013,23 @@ repos: ^uv\.lock$ pass_filenames: false require_serial: true - ## ADD MOST PREK HOOK ABOVE THAT LINE - # The below prek hooks are those requiring CI image to be built - ## ONLY ADD PREK HOOKS HERE THAT REQUIRE CI IMAGE - id: mypy-dev - stages: ['pre-push'] name: Run mypy for dev language: python - entry: ./scripts/ci/prek/mypy.py - files: ^dev/.*\.py$|^scripts/.*\.py$ - require_serial: true - - id: mypy-dev - stages: ['manual'] - name: Run mypy for dev (manual) - language: python - entry: ./scripts/ci/prek/mypy_folder.py dev scripts + entry: ./scripts/ci/prek/mypy_local_folder.py dev scripts pass_filenames: false files: ^.*\.py$ require_serial: true - id: mypy-devel-common - stages: ['pre-push'] name: Run mypy for devel-common language: python - entry: ./scripts/ci/prek/mypy.py - files: ^devel-common/.*\.py$ - require_serial: true - - id: mypy-devel-common - stages: ['manual'] - name: Run mypy for devel-common (manual) - language: python - entry: ./scripts/ci/prek/mypy_folder.py devel-common + entry: ./scripts/ci/prek/mypy_local_folder.py devel-common pass_filenames: false files: ^.*\.py$ require_serial: true + ## ADD MOST PREK HOOK ABOVE THAT LINE + # The below prek hooks are those requiring CI image to be built + ## ONLY ADD PREK HOOKS HERE THAT REQUIRE CI IMAGE - id: check-template-fields-valid name: Check templated fields mapped in operators/sensors language: python @@ -1085,17 +1069,3 @@ repos: language: python files: .*test.*\.py$ pass_filenames: true - # This is a manual hook, run by `breeze ci upgrade` - upgrading all dependencies inside the - # Breeze CI image - which allows checking all dependencies for all providers. - # ALWAYS keep it at the end so that it can take into account all the other hook's changes. - - id: update-uv-lock - stages: ['manual'] - name: Update uv.lock (manual) - entry: breeze run uv lock --upgrade - language: system - files: > - (?x) - (^|/)pyproject\.toml$| - ^uv\.lock$ - pass_filenames: false - require_serial: true diff --git a/AGENTS.md b/AGENTS.md index f01a011273325..36af179f50b9d 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -30,7 +30,8 @@ - **Run other suites of tests** `breeze testing ` (test groups: `airflow-ctl-tests`, `docker-compose-tests`, `task-sdk-tests`) - **Run scripts tests:** `uv run --project scripts pytest scripts/tests/ -xvs` - **Run Airflow CLI:** `breeze run airflow dags list` -- **Type-check:** `breeze run mypy path/to/code` +- **Type-check (non-providers):** `uv run --project --with "apache-airflow-devel-common[mypy]" mypy path/to/code` +- **Type-check (providers):** `breeze run mypy path/to/code` - **Lint with ruff only:** `prek run ruff --from-ref ` - **Format with ruff only:** `prek run ruff-format --from-ref ` - **Run regular (fast) static checks:** `prek run --from-ref --stage pre-commit` @@ -66,15 +67,38 @@ UV workspace monorepo. Key paths: ## Architecture Boundaries 1. Users author Dags with the Task SDK (`airflow.sdk`). -2. Dag Processor parses Dag files in isolated processes and stores serialized Dags in the metadata DB. +2. Dag File Processor parses Dag files in separate processes and stores serialized Dags in the metadata DB. Software guards prevent individual parsing processes from accessing the database directly and enforce use of the Execution API, but these guards do not protect against intentional bypassing by malicious or misconfigured code. 3. Scheduler reads serialized Dags — **never runs user code** — and creates Dag runs / task instances. -4. Workers execute tasks via Task SDK and communicate with the API server through the Execution API — **never access the metadata DB directly**. +4. Workers execute tasks via Task SDK and communicate with the API server through the Execution API — **never access the metadata DB directly**. Each task receives a short-lived JWT token scoped to its task instance ID. 5. API Server serves the React UI and handles all client-database interactions. -6. Triggerer evaluates deferred tasks/sensors in isolated processes. +6. Triggerer evaluates deferred tasks/sensors in separate processes. Like the Dag File Processor, software guards steer it through the Execution API rather than direct database access, but these guards do not protect against intentional bypassing by malicious or misconfigured code. 7. Shared libraries that are symbolically linked to different Python distributions are in `shared` folder. 8. Airflow uses `uv workspace` feature to keep all the distributions sharing dependencies and venv 9. Each of the distributions should declare other needed distributions: `uv --project sync` command acts on the selected project in the monorepo with only dependencies that it has +## Security Model + +When reviewing code, writing security documentation, or performing security research, keep in +mind the following aspects of Airflow's security model. The authoritative reference is +[`airflow-core/docs/security/security_model.rst`](airflow-core/docs/security/security_model.rst) +and [`airflow-core/docs/security/jwt_token_authentication.rst`](airflow-core/docs/security/jwt_token_authentication.rst). + +Particularly, the intentional design choices, that are not security vulnerabilities and should not be +reported as such are described in "What is NOT considered a security vulnerability" chapter of the security model. + +**When flagging security concerns, distinguish between:** + +1. **Actual vulnerabilities** — code that violates the documented security model (e.g., a worker + gaining database access it shouldn't have, a Scheduler executing user code, an unauthenticated + user accessing protected endpoints). +2. **Known limitations** — documented gaps where the current implementation doesn't provide full + isolation (e.g., DFP/Triggerer database access, shared Execution API resources, multi-team + not enforcing task-level isolation). These are tracked for improvement in future versions and + should not be reported as new findings. +3. **Deployment hardening opportunities** — measures a Deployment Manager can take to improve + isolation beyond what Airflow enforces natively (e.g., per-component configuration, asymmetric + JWT keys, network policies). These belong in deployment guidance, not as code-level issues. + # Shared libraries - shared libraries provide implementation of some common utilities like logging, configuration where the code should be reused in different distributions (potentially in different versions) @@ -146,7 +170,7 @@ code review checklist in [`.github/instructions/code-review.instructions.md`](.g 3. Confirm the code follows the project's coding standards and architecture boundaries described in this file. 4. Run regular (fast) static checks (`prek run --from-ref --stage pre-commit`) - and fix any failures. + and fix any failures. This includes mypy checks for non-provider projects (airflow-core, task-sdk, airflow-ctl, dev, scripts, devel-common). 5. Run manual (slower) checks (`prek run --from-ref --stage manual`) and fix any failures. 6. Run relevant individual tests and confirm they pass. 7. Find which tests to run for the changes with selective-checks and run those tests in parallel to confirm they pass and check for CI-specific issues. diff --git a/Dockerfile b/Dockerfile index 99da53498994e..d56e1fb165f8d 100644 --- a/Dockerfile +++ b/Dockerfile @@ -73,7 +73,7 @@ ARG PYTHON_LTO="true" # Also use `force pip` label on your PR to swap all places we use `uv` to `pip` ARG AIRFLOW_PIP_VERSION=26.0.1 # ARG AIRFLOW_PIP_VERSION="git+https://github.com/pypa/pip.git@main" -ARG AIRFLOW_UV_VERSION=0.11.1 +ARG AIRFLOW_UV_VERSION=0.11.3 ARG AIRFLOW_USE_UV="false" ARG AIRFLOW_IMAGE_REPOSITORY="https://github.com/apache/airflow" ARG AIRFLOW_IMAGE_README_URL="https://raw.githubusercontent.com/apache/airflow/main/docs/docker-stack/README.md" @@ -122,6 +122,8 @@ fi AIRFLOW_PYTHON_VERSION=${AIRFLOW_PYTHON_VERSION:-3.10.18} PYTHON_LTO=${PYTHON_LTO:-true} GOLANG_MAJOR_MINOR_VERSION=${GOLANG_MAJOR_MINOR_VERSION:-1.24.4} +RUSTUP_DEFAULT_TOOLCHAIN=${RUSTUP_DEFAULT_TOOLCHAIN:-stable} +RUSTUP_VERSION=${RUSTUP_VERSION:-1.29.0} COSIGN_VERSION=${COSIGN_VERSION:-3.0.5} if [[ "${1}" == "runtime" ]]; then @@ -493,6 +495,33 @@ function install_golang() { rm -rf /usr/local/go && tar -C /usr/local -xzf go"${GOLANG_MAJOR_MINOR_VERSION}".linux.tar.gz } +function install_rustup() { + local arch + arch="$(dpkg --print-architecture)" + declare -A rustup_targets=( + [amd64]="x86_64-unknown-linux-gnu" + [arm64]="aarch64-unknown-linux-gnu" + ) + declare -A rustup_sha256s=( + # https://static.rust-lang.org/rustup/archive/${RUSTUP_VERSION}/{target}/rustup-init.sha256 + [amd64]="4acc9acc76d5079515b46346a485974457b5a79893cfb01112423c89aeb5aa10" + [arm64]="9732d6c5e2a098d3521fca8145d826ae0aaa067ef2385ead08e6feac88fa5792" + ) + local target="${rustup_targets[${arch}]}" + local rustup_sha256="${rustup_sha256s[${arch}]}" + if [[ -z "${target}" ]]; then + echo "Unsupported architecture for rustup: ${arch}" + exit 1 + fi + curl --proto '=https' --tlsv1.2 -sSf \ + "https://static.rust-lang.org/rustup/archive/${RUSTUP_VERSION}/${target}/rustup-init" \ + -o /tmp/rustup-init + echo "${rustup_sha256} /tmp/rustup-init" | sha256sum --check + chmod +x /tmp/rustup-init + /tmp/rustup-init -y --default-toolchain "${RUSTUP_DEFAULT_TOOLCHAIN}" + rm -f /tmp/rustup-init +} + function apt_clean() { apt-get purge -y --auto-remove -o APT::AutoRemove::RecommendsImportant=false rm -rf /var/lib/apt/lists/* /var/log/* @@ -508,6 +537,7 @@ else install_debian_dev_dependencies install_python install_additional_dev_dependencies + install_rustup if [[ "${INSTALLATION_TYPE}" == "CI" ]]; then install_golang fi @@ -1417,6 +1447,14 @@ EOF COPY <<"EOF" /entrypoint_prod.sh #!/usr/bin/env bash AIRFLOW_COMMAND="${1:-}" +AIRFLOW_COMMAND_TO_RUN="${AIRFLOW_COMMAND}" +if [[ "${AIRFLOW_COMMAND}" == "airflow" ]]; then + AIRFLOW_COMMAND_TO_RUN="${2:-}" +elif [[ "${AIRFLOW_COMMAND}" =~ ^(bash|sh)$ ]] \ + && [[ "${2:-}" == "-c" ]] \ + && [[ "${3:-}" =~ (^|[[:space:]])(exec[[:space:]]+)?airflow[[:space:]]+(scheduler|dag-processor|triggerer|api-server)([[:space:]]|$) ]]; then + AIRFLOW_COMMAND_TO_RUN="${BASH_REMATCH[3]}" +fi set -euo pipefail @@ -1668,7 +1706,8 @@ readonly CONNECTION_CHECK_SLEEP_TIME create_system_user_if_missing set_pythonpath_for_root_user -if [[ "${CONNECTION_CHECK_MAX_COUNT}" -gt "0" ]]; then +if [[ "${CONNECTION_CHECK_MAX_COUNT}" -gt "0" ]] \ + && [[ ${AIRFLOW_COMMAND_TO_RUN} =~ ^(scheduler|dag-processor|triggerer|api-server)$ ]]; then wait_for_airflow_db fi @@ -1834,6 +1873,10 @@ ENV DEV_APT_DEPS=${DEV_APT_DEPS} \ ARG PYTHON_LTO +ENV RUSTUP_HOME="/usr/local/rustup" +ENV CARGO_HOME="/usr/local/cargo" +ENV PATH="${CARGO_HOME}/bin:${PATH}" + COPY --from=scripts install_os_dependencies.sh /scripts/docker/ RUN PYTHON_LTO=${PYTHON_LTO} bash /scripts/docker/install_os_dependencies.sh dev diff --git a/Dockerfile.ci b/Dockerfile.ci index 1c51fa875060f..7aaf2814e3e38 100644 --- a/Dockerfile.ci +++ b/Dockerfile.ci @@ -62,6 +62,8 @@ fi AIRFLOW_PYTHON_VERSION=${AIRFLOW_PYTHON_VERSION:-3.10.18} PYTHON_LTO=${PYTHON_LTO:-true} GOLANG_MAJOR_MINOR_VERSION=${GOLANG_MAJOR_MINOR_VERSION:-1.24.4} +RUSTUP_DEFAULT_TOOLCHAIN=${RUSTUP_DEFAULT_TOOLCHAIN:-stable} +RUSTUP_VERSION=${RUSTUP_VERSION:-1.29.0} COSIGN_VERSION=${COSIGN_VERSION:-3.0.5} if [[ "${1}" == "runtime" ]]; then @@ -433,6 +435,33 @@ function install_golang() { rm -rf /usr/local/go && tar -C /usr/local -xzf go"${GOLANG_MAJOR_MINOR_VERSION}".linux.tar.gz } +function install_rustup() { + local arch + arch="$(dpkg --print-architecture)" + declare -A rustup_targets=( + [amd64]="x86_64-unknown-linux-gnu" + [arm64]="aarch64-unknown-linux-gnu" + ) + declare -A rustup_sha256s=( + # https://static.rust-lang.org/rustup/archive/${RUSTUP_VERSION}/{target}/rustup-init.sha256 + [amd64]="4acc9acc76d5079515b46346a485974457b5a79893cfb01112423c89aeb5aa10" + [arm64]="9732d6c5e2a098d3521fca8145d826ae0aaa067ef2385ead08e6feac88fa5792" + ) + local target="${rustup_targets[${arch}]}" + local rustup_sha256="${rustup_sha256s[${arch}]}" + if [[ -z "${target}" ]]; then + echo "Unsupported architecture for rustup: ${arch}" + exit 1 + fi + curl --proto '=https' --tlsv1.2 -sSf \ + "https://static.rust-lang.org/rustup/archive/${RUSTUP_VERSION}/${target}/rustup-init" \ + -o /tmp/rustup-init + echo "${rustup_sha256} /tmp/rustup-init" | sha256sum --check + chmod +x /tmp/rustup-init + /tmp/rustup-init -y --default-toolchain "${RUSTUP_DEFAULT_TOOLCHAIN}" + rm -f /tmp/rustup-init +} + function apt_clean() { apt-get purge -y --auto-remove -o APT::AutoRemove::RecommendsImportant=false rm -rf /var/lib/apt/lists/* /var/log/* @@ -448,6 +477,7 @@ else install_debian_dev_dependencies install_python install_additional_dev_dependencies + install_rustup if [[ "${INSTALLATION_TYPE}" == "CI" ]]; then install_golang fi @@ -1474,8 +1504,26 @@ function check_force_lowest_dependencies() { # --no-binary is needed in order to avoid libxml and xmlsec using different version of libxml2 # (binary lxml embeds its own libxml2, while xmlsec uses system one). # See https://bugs.launchpad.net/lxml/+bug/2110068 - uv sync --resolution lowest-direct --no-binary-package lxml --no-binary-package xmlsec --all-extras \ - --no-python-downloads --no-managed-python + + local sync_successful="false" + for attempt in 1 2 3; do + echo "Attempt ${attempt} of syncing to lowest dependencies" + set -x + if UV_LOCK_TIMEOUT=200 uv sync --resolution lowest-direct --no-binary-package lxml --no-binary-package xmlsec --all-extras \ + --no-python-downloads --no-managed-python; then + set +x + sync_successful="true" + break + fi + set +x + echo "Sleeping 30s" + sleep 30 + echo "Attempt ${attempt} failed. Retrying..." + done + if [[ "${sync_successful}" != "true" ]]; then + echo "${COLOR_RED}Failed to sync lowest dependencies after 3 attempts.${COLOR_RESET}" + exit 1 + fi else echo echo "${COLOR_BLUE}Forcing dependencies to lowest versions for Airflow.${COLOR_RESET}" @@ -1628,6 +1676,9 @@ ENV DEV_APT_COMMAND=${DEV_APT_COMMAND} \ ARG AIRFLOW_PYTHON_VERSION="3.12.13" ENV AIRFLOW_PYTHON_VERSION=${AIRFLOW_PYTHON_VERSION} ENV GOLANG_MAJOR_MINOR_VERSION="1.26.1" +ENV RUSTUP_HOME="/usr/local/rustup" +ENV CARGO_HOME="/usr/local/cargo" +ENV PATH="${CARGO_HOME}/bin:${PATH}" ARG PYTHON_LTO @@ -1777,7 +1828,7 @@ COPY --from=scripts common.sh install_packaging_tools.sh install_additional_depe # Also use `force pip` label on your PR to swap all places we use `uv` to `pip` ARG AIRFLOW_PIP_VERSION=26.0.1 # ARG AIRFLOW_PIP_VERSION="git+https://github.com/pypa/pip.git@main" -ARG AIRFLOW_UV_VERSION=0.11.1 +ARG AIRFLOW_UV_VERSION=0.11.3 ARG AIRFLOW_PREK_VERSION="0.3.8" # UV_LINK_MODE=copy is needed since we are using cache mounted from the host @@ -1787,7 +1838,7 @@ ENV AIRFLOW_PIP_VERSION=${AIRFLOW_PIP_VERSION} \ AIRFLOW_PREK_VERSION=${AIRFLOW_PREK_VERSION} # The PATH is needed for python to find installed and cargo to build the wheels -ENV PATH="/usr/python/bin:/root/.local/bin:/root/.cargo/bin:${PATH}" +ENV PATH="/usr/python/bin:/root/.local/bin:${PATH}" # Useful for creating a cache id based on the underlying architecture, preventing the use of cached python packages from # an incorrect architecture. ARG TARGETARCH diff --git a/airflow-core/.pre-commit-config.yaml b/airflow-core/.pre-commit-config.yaml index 7573eec4e6533..3fb564eceb78d 100644 --- a/airflow-core/.pre-commit-config.yaml +++ b/airflow-core/.pre-commit-config.yaml @@ -221,23 +221,15 @@ repos: additional_dependencies: ['pnpm@10.25.0'] pass_filenames: true require_serial: true - ## ADD MOST PREK HOOK ABOVE THAT LINE - # The below prek hooks are those requiring CI image to be built - id: mypy-airflow-core - stages: ['pre-push'] name: Run mypy for airflow-core language: python - entry: ../scripts/ci/prek/mypy.py - files: ^.*\.py$ - require_serial: true - - id: mypy-airflow-core - stages: ['manual'] - name: Run mypy for airflow-core (manual) - language: python - entry: ../scripts/ci/prek/mypy_folder.py airflow-core + entry: ../scripts/ci/prek/mypy_local_folder.py airflow-core pass_filenames: false files: ^.*\.py$ require_serial: true + ## ADD MOST PREK HOOK ABOVE THAT LINE + # The below prek hooks are those requiring CI image to be built - id: generate-openapi-spec name: Generate the FastAPI API spec language: python @@ -271,6 +263,16 @@ repos: require_serial: true pass_filenames: false files: ^src/airflow/config_templates/config\.yml$ + - id: check-security-doc-constants + name: Check security docs match config.yml constants + entry: ../scripts/ci/prek/check_security_doc_constants.py + language: python + pass_filenames: false + files: > + (?x) + ^src/airflow/config_templates/config\.yml$| + ^docs/security/jwt_token_authentication\.rst$| + ^docs/security/security_model\.rst$ - id: check-airflow-version-checks-in-core language: pygrep name: No AIRFLOW_V_* imports in airflow-core @@ -313,6 +315,7 @@ repos: ^src/airflow/api_fastapi/core_api/services/ui/task_group.py$| ^src/airflow/api_fastapi/execution_api/routes/hitl\.py$| ^src/airflow/api_fastapi/execution_api/routes/task_instances\.py$| + ^src/airflow/api_fastapi/execution_api/versions/v2026_04_06\.py$| ^src/airflow/api_fastapi/logging/decorators\.py$| ^src/airflow/assets/evaluation\.py$| ^src/airflow/assets/manager\.py$| diff --git a/airflow-core/docs/administration-and-deployment/production-deployment.rst b/airflow-core/docs/administration-and-deployment/production-deployment.rst index e69d436488713..e88b94d94ba8b 100644 --- a/airflow-core/docs/administration-and-deployment/production-deployment.rst +++ b/airflow-core/docs/administration-and-deployment/production-deployment.rst @@ -62,9 +62,12 @@ the :doc:`Celery executor `. Once you have configured the executor, it is necessary to make sure that every node in the cluster contains -the same configuration and Dags. Airflow sends simple instructions such as "execute task X of Dag Y", but -does not send any Dag files or configuration. You can use a simple cronjob or any other mechanism to sync -Dags and configs across your nodes, e.g., checkout Dags from git repo every 5 minutes on all nodes. +the Dags and configuration appropriate for its role. Airflow sends simple instructions such as +"execute task X of Dag Y", but does not send any Dag files or configuration. For synchronization of Dags +we recommend the Dag Bundle mechanism (including ``GitDagBundle``), which allows you to make use of +DAG versioning. For security-sensitive deployments, restrict sensitive configuration (JWT signing keys, +database credentials, Fernet keys) to only the components that need them rather than sharing all +configuration across all nodes — see :doc:`/security/security_model` for guidance. Logging diff --git a/airflow-core/docs/best-practices.rst b/airflow-core/docs/best-practices.rst index cd0f102d7efaa..b0b75b0086aff 100644 --- a/airflow-core/docs/best-practices.rst +++ b/airflow-core/docs/best-practices.rst @@ -319,7 +319,7 @@ Installing and Using ruff .. code-block:: bash - pip install "ruff>=0.15.7" + pip install "ruff>=0.15.9" 2. **Running ruff**: Execute ``ruff`` to check your Dags for potential issues: @@ -1098,8 +1098,10 @@ The benefits of using those operators are: environment is optimized for the case where you have multiple similar, but different environments. * The dependencies can be pre-vetted by the admins and your security team, no unexpected, new code will be added dynamically. This is good for both, security and stability. -* Complete isolation between tasks. They cannot influence one another in other ways than using standard - Airflow XCom mechanisms. +* Strong process-level isolation between tasks. Tasks run in separate containers/pods and cannot + influence one another at the process or filesystem level. They can still interact through standard + Airflow mechanisms (XComs, connections, variables) via the Execution API. See + :doc:`/security/security_model` for the full isolation model. The drawbacks: diff --git a/airflow-core/docs/configurations-ref.rst b/airflow-core/docs/configurations-ref.rst index 83c5d8a8ed51a..1afe00f1e2c1f 100644 --- a/airflow-core/docs/configurations-ref.rst +++ b/airflow-core/docs/configurations-ref.rst @@ -22,15 +22,22 @@ Configuration Reference This page contains the list of all the available Airflow configurations that you can set in ``airflow.cfg`` file or using environment variables. -Use the same configuration across all the Airflow components. While each component -does not require all, some configurations need to be same otherwise they would not -work as expected. A good example for that is :ref:`secret_key` which -should be same on the Webserver and Worker to allow Webserver to fetch logs from Worker. - -The webserver key is also used to authorize requests to Celery workers when logs are retrieved. The token -generated using the secret key has a short expiry time though - make sure that time on ALL the machines -that you run Airflow components on is synchronized (for example using ntpd) otherwise you might get -"forbidden" errors when the logs are accessed. +Different Airflow components may require different configuration parameters, and for +improved security, you should restrict sensitive configuration to only the components that +need it. Some configuration values must be shared across specific components to work +correctly — for example, the JWT signing key (``[api_auth] jwt_secret`` or +``[api_auth] jwt_private_key_path``) must be consistent across all components that generate +or validate JWT tokens (Scheduler, API Server). However, other sensitive parameters such as +database connection strings or Fernet keys should only be provided to components that need them. + +For security-sensitive deployments, pass configuration values via environment variables +scoped to individual components rather than sharing a single configuration file across all +components. See :doc:`/security/security_model` for details on which configuration +parameters should be restricted to which components. + +Make sure that time on ALL the machines that you run Airflow components on is synchronized +(for example using ntpd) otherwise you might get "forbidden" errors when the logs are +accessed or API calls are made. .. note:: For more information see :doc:`/howto/set-config`. diff --git a/airflow-core/docs/core-concepts/executor/index.rst b/airflow-core/docs/core-concepts/executor/index.rst index 0600c6dd5a7f1..9420c55d84e26 100644 --- a/airflow-core/docs/core-concepts/executor/index.rst +++ b/airflow-core/docs/core-concepts/executor/index.rst @@ -312,6 +312,7 @@ The following methods must be overridden at minimum to have your executor suppor * ``sync``: Sync will get called periodically during executor heartbeats. Implement this method to update the state of the tasks which the executor knows about. Optionally, attempting to execute queued tasks that have been received from the scheduler. * ``execute_async``: Executes a *workload* asynchronously. This method is called (after a few layers) during executor heartbeat which is run periodically by the scheduler. In practice, this method often just enqueues tasks into an internal or external queue of tasks to be run (e.g. ``KubernetesExecutor``). But can also execute the tasks directly as well (e.g. ``LocalExecutor``). This will depend on the executor. +* ``_process_workloads``: Processes a list of workloads that have been queued via ``queue_workload``. This method is called during executor heartbeat and defines how the executor handles the execution of workloads (e.g., queuing them to workers, submitting to external systems, etc.). Optional Interface Methods to Implement diff --git a/airflow-core/docs/core-concepts/multi-team.rst b/airflow-core/docs/core-concepts/multi-team.rst index 6beccc249b1cf..609a79cdf1888 100644 --- a/airflow-core/docs/core-concepts/multi-team.rst +++ b/airflow-core/docs/core-concepts/multi-team.rst @@ -38,7 +38,7 @@ Multi-Team mode is designed for medium to large organizations that typically hav **Use Multi-Team mode when:** - You have many teams that need to share Airflow infrastructure -- You need resource isolation (Variables, Connections, Secrets, etc) between teams +- You need resource isolation (Variables, Connections, Secrets, etc) between teams at the UI and API level (see :doc:`/security/security_model` for task-level isolation limitations) - You want separate execution environments per team - You want separate views per team in the Airflow UI - You want to minimize operational overhead or cost by sharing a single Airflow deployment diff --git a/airflow-core/docs/howto/customize-ui.rst b/airflow-core/docs/howto/customize-ui.rst index 3d696f52969b7..b9d03cdf94da5 100644 --- a/airflow-core/docs/howto/customize-ui.rst +++ b/airflow-core/docs/howto/customize-ui.rst @@ -71,6 +71,7 @@ We can provide a JSON configuration to customize the UI. .. important:: - You can customize the ``brand``, ``gray``, ``black``, and ``white`` color tokens, ``globalCss``, and the navigation icon via ``icon`` (and ``icon_dark_mode``). + - All top-level fields (``tokens``, ``globalCss``, ``icon``, ``icon_dark_mode``) are **optional** — you can supply any combination, including an empty ``{}`` to restore OSS defaults. - All color tokens are **optional** — you can override any subset without supplying the others. - ``brand`` and ``gray`` each accept an 11-shade scale with keys ``50``–``950``. - ``black`` and ``white`` each accept a single color: ``{ "value": "oklch(...)" }``. diff --git a/airflow-core/docs/howto/set-config.rst b/airflow-core/docs/howto/set-config.rst index 30d29c924c689..c35df0f4c894b 100644 --- a/airflow-core/docs/howto/set-config.rst +++ b/airflow-core/docs/howto/set-config.rst @@ -157,15 +157,20 @@ the example below. See :doc:`/administration-and-deployment/modules_management` for details on how Python and Airflow manage modules. .. note:: - Use the same configuration across all the Airflow components. While each component - does not require all, some configurations need to be same otherwise they would not - work as expected. A good example for that is :ref:`secret_key` which - should be same on the Webserver and Worker to allow Webserver to fetch logs from Worker. - - The webserver key is also used to authorize requests to Celery workers when logs are retrieved. The token - generated using the secret key has a short expiry time though - make sure that time on ALL the machines - that you run Airflow components on is synchronized (for example using ntpd) otherwise you might get - "forbidden" errors when the logs are accessed. + Different Airflow components may require different configuration parameters. For improved + security, restrict sensitive configuration to only the components that need it rather than + sharing all configuration across all components. Some values must be consistent across specific + components — for example, the JWT signing key must match between components that generate and + validate tokens. However, sensitive parameters such as database connection strings, Fernet keys, + and secrets backend credentials should only be provided to components that actually need them. + + For security-sensitive deployments, pass configuration values via environment variables scoped + to individual components. See :doc:`/security/security_model` for detailed guidance on + restricting configuration parameters. + + Make sure that time on ALL the machines that you run Airflow components on is synchronized + (for example using ntpd) otherwise you might get "forbidden" errors when the logs are + accessed or API calls are made. .. _set-config:configuring-local-settings: diff --git a/airflow-core/docs/installation/upgrading_to_airflow3.rst b/airflow-core/docs/installation/upgrading_to_airflow3.rst index 2d9c878390db8..ad0b5507b629e 100644 --- a/airflow-core/docs/installation/upgrading_to_airflow3.rst +++ b/airflow-core/docs/installation/upgrading_to_airflow3.rst @@ -54,7 +54,7 @@ In Airflow 3, direct metadata database access from task code is now restricted. - **No Direct Database Access**: Task code can no longer directly import and use Airflow database sessions or models. - **API-Based Resource Access**: All runtime interactions (state transitions, heartbeats, XComs, and resource fetching) are handled through a dedicated Task Execution API. -- **Enhanced Security**: This ensures isolation and security by preventing malicious task code from accessing or modifying the Airflow metadata database. +- **Enhanced Security**: This improves isolation and security by preventing worker task code from directly accessing or modifying the Airflow metadata database. Note that Dag author code potentially still executes with direct database access in the Dag File Processor and Triggerer — see :doc:`/security/security_model` for details. - **Stable Interface**: The Task SDK provides a stable, forward-compatible interface for accessing Airflow resources without direct database dependencies. Step 1: Take care of prerequisites diff --git a/airflow-core/docs/public-airflow-interface.rst b/airflow-core/docs/public-airflow-interface.rst index c768c36a7b170..4f4c09d66d173 100644 --- a/airflow-core/docs/public-airflow-interface.rst +++ b/airflow-core/docs/public-airflow-interface.rst @@ -548,9 +548,10 @@ but in Airflow they are not parts of the Public Interface and might change any t internal implementation detail and you should not assume they will be maintained in a backwards-compatible way. -**Direct metadata database access from task code is no longer allowed**. -Task code cannot directly access the metadata database to query Dag state, task history, -or Dag runs. Instead, use one of the following alternatives: +**Direct metadata database access from code authored by Dag Authors is no longer allowed**. +The code authored by Dag Authors cannot directly access the metadata database to query Dag state, task history, +or Dag runs — workers communicate exclusively through the Execution API. Instead, use one +of the following alternatives: * **Task Context**: Use :func:`~airflow.sdk.get_current_context` to access task instance information and methods like :meth:`~airflow.sdk.types.RuntimeTaskInstanceProtocol.get_dr_count`, diff --git a/airflow-core/docs/security/jwt_token_authentication.rst b/airflow-core/docs/security/jwt_token_authentication.rst new file mode 100644 index 0000000000000..7aa85bba9a381 --- /dev/null +++ b/airflow-core/docs/security/jwt_token_authentication.rst @@ -0,0 +1,398 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +JWT Token Authentication +======================== + +This document describes how JWT (JSON Web Token) authentication works in Apache Airflow +for both the public REST API (Core API) and the internal Execution API used by workers. + +.. contents:: + :local: + :depth: 2 + +Overview +-------- + +Airflow uses JWT tokens as the primary authentication mechanism for its APIs. There are two +distinct JWT authentication flows: + +1. **REST API (Core API)** — used by UI users, CLI tools, and external clients to interact + with the Airflow public API. +2. **Execution API** — used internally by workers, the Dag File Processor, and the Triggerer + to communicate task state and retrieve runtime data (connections, variables, XComs). + +Both flows share the same underlying JWT infrastructure (``JWTGenerator`` and ``JWTValidator`` +classes in ``airflow.api_fastapi.auth.tokens``) but differ in audience, token lifetime, subject +claims, and scope semantics. + + +Signing and Cryptography +------------------------ + +Airflow supports two mutually exclusive signing modes: + +**Symmetric (shared secret)** + Uses a pre-shared secret key (``[api_auth] jwt_secret``) with the **HS512** algorithm. + All components that generate or validate tokens must share the same secret. If no secret + is configured, Airflow auto-generates a random 16-byte key at startup — but this key is + ephemeral and different across processes, which will cause authentication failures in + multi-component deployments. Deployment Managers must explicitly configure this value. + +**Asymmetric (public/private key pair)** + Uses a PEM-encoded private key (``[api_auth] jwt_private_key_path``) for signing and + the corresponding public key for validation. Supported algorithms: **RS256** (``RSA``) and + **EdDSA** (``Ed25519``). The algorithm is auto-detected from the key type when + ``[api_auth] jwt_algorithm`` is set to ``GUESS`` (the default). + + Validation can use either: + + - A JWKS (JSON Web Key Set) endpoint configured via ``[api_auth] trusted_jwks_url`` + (local file or remote HTTP/HTTPS URL, polled periodically for updates). + - The public key derived from the configured private key (automatic fallback when + ``trusted_jwks_url`` is not set). + +REST API Authentication Flow +----------------------------- + +Token acquisition +^^^^^^^^^^^^^^^^^ + +1. A client sends a ``POST`` request to ``/auth/token`` with credentials (e.g., username + and password in JSON body). +2. The auth manager validates the credentials and creates a user object. +3. The auth manager serializes the user into JWT claims and calls ``JWTGenerator.generate()``. +4. The generated token is returned in the response as ``access_token``. + +For UI-based authentication, the token is stored in a secure, HTTP-only cookie (``_token``) +with ``SameSite=Lax``. + +The CLI uses a separate endpoint (``/auth/token/cli``) with a different (shorter) expiration +time. + +Token structure (REST API) +^^^^^^^^^^^^^^^^^^^^^^^^^^ + +.. list-table:: + :header-rows: 1 + :widths: 15 85 + + * - Claim + - Description + * - ``jti`` + - Unique token identifier (UUID4 hex). Used for token revocation. + * - ``iss`` + - Issuer (from ``[api_auth] jwt_issuer``). + * - ``aud`` + - Audience (from ``[api_auth] jwt_audience``). + * - ``sub`` + - User identifier (serialized by the auth manager). + * - ``iat`` + - Issued-at timestamp (Unix epoch seconds). + * - ``nbf`` + - Not-before timestamp (same as ``iat``). + * - ``exp`` + - Expiration timestamp (``iat + jwt_expiration_time``). + +Token validation (REST API) +^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +On each API request, the token is extracted in this order of precedence: + +1. ``Authorization: Bearer `` header. +2. OAuth2 query parameter. +3. ``_token`` cookie. + +The ``JWTValidator`` verifies the signature, expiry (``exp``), not-before (``nbf``), +issued-at (``iat``), audience, and issuer claims. A configurable leeway +(``[api_auth] jwt_leeway``, default 10 seconds) accounts for clock skew. + +Token revocation (REST API only) +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Token revocation applies only to REST API and UI tokens — it is **not** used for Execution API +tokens issued to workers. + +Revoked tokens are tracked in the ``revoked_token`` database table by their ``jti`` claim. +On logout or explicit revocation, the token's ``jti`` and ``exp`` are inserted into this +table. Expired entries are automatically cleaned up at a cadence of ``2× jwt_expiration_time``. + +Token refresh (REST API) +^^^^^^^^^^^^^^^^^^^^^^^^ + +The ``JWTRefreshMiddleware`` runs on UI requests. When the middleware detects that the +current token's ``_token`` cookie is approaching expiry, it calls +``auth_manager.refresh_user()`` to generate a new token and sets it as the updated cookie. + +Default timings (REST API) +^^^^^^^^^^^^^^^^^^^^^^^^^^ + +.. list-table:: + :header-rows: 1 + :widths: 50 50 + + * - Setting + - Default + * - ``[api_auth] jwt_expiration_time`` + - 86400 seconds (24 hours) + * - ``[api_auth] jwt_cli_expiration_time`` + - 3600 seconds (1 hour) + * - ``[api_auth] jwt_leeway`` + - 10 seconds + + +Execution API Authentication Flow +---------------------------------- + +The Execution API is an API used for use by Airflow itself (not third party callers) +to report and set task state transitions, send heartbeats, and to retrieve connections, +variables, and XComs at task runtime, trigger execution and Dag parsing. + +Token generation (Execution API) +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +1. The **Scheduler** generates a JWT for each task instance before + dispatching it (via the executor) to a worker. The executor's + ``jwt_generator`` property creates a ``JWTGenerator`` configured with the ``[execution_api]`` settings. +2. The token's ``sub`` (subject) claim is set to the **task instance UUID**. +3. The token is embedded in the workload JSON payload (``BaseWorkloadSchema.token`` field) + that is sent to the worker process. + +Token structure (Execution API) +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +.. list-table:: + :header-rows: 1 + :widths: 15 85 + + * - Claim + - Description + * - ``jti`` + - Unique token identifier (UUID4 hex). + * - ``iss`` + - Issuer (from ``[api_auth] jwt_issuer``). + * - ``aud`` + - Audience (from ``[execution_api] jwt_audience``, default: ``urn:airflow.apache.org:task``). + * - ``sub`` + - Task instance UUID — the identity of the workload. + * - ``scope`` + - Token scope: ``"execution"`` or ``"workload"``. + * - ``iat`` + - Issued-at timestamp. + * - ``nbf`` + - Not-before timestamp. + * - ``exp`` + - Expiration timestamp (``iat + [execution_api] jwt_expiration_time``). + +Token scopes (Execution API) +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +The Execution API defines two token scopes: + +**workload** + A restricted scope accepted only on endpoints that explicitly opt in via + ``Security(require_auth, scopes=["token:workload"])``. Used for endpoints that + manage task state transitions. + +**execution** + Accepted by all Execution API endpoints. This is the standard scope for worker + communication and allows access + +Tokens without a ``scope`` claim default to ``"execution"`` for backwards compatibility. + +Token delivery to workers +^^^^^^^^^^^^^^^^^^^^^^^^^ + +The token flows through the execution stack as follows: + +1. **Scheduler** generates the token and embeds it in the workload JSON payload that it passes to + **Executor**. +2. The workload JSON is passed to the worker process (via the executor-specific mechanism: + Celery message, Kubernetes Pod spec, local subprocess arguments, etc.). +3. The worker's ``execute_workload()`` function reads the workload JSON and extracts the token. +4. The ``supervise()`` function receives the token and creates an ``httpx.Client`` instance + with ``BearerAuth(token)`` for all Execution API HTTP requests. +5. The token is included in the ``Authorization: Bearer `` header of every request. + +Token validation (Execution API) +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +The ``JWTBearer`` security dependency validates the token once per request: + +1. Extracts the token from the ``Authorization: Bearer`` header. +2. Performs cryptographic signature validation via ``JWTValidator``. +3. Verifies standard claims (``exp``, ``iat``, ``aud`` — ``nbf`` and ``iss`` if configured). +4. Defaults the ``scope`` claim to ``"execution"`` if absent. +5. Creates a ``TIToken`` object with the task instance ID and claims. +6. Caches the validated token on the ASGI request scope for the duration of the request. + +Route-level enforcement is handled by ``require_auth``: + +- Checks the token's ``scope`` against the route's ``allowed_token_types`` (precomputed + by ``ExecutionAPIRoute`` from ``token:*`` Security scopes at route registration time). +- Enforces ``ti:self`` scope — verifies that the token's ``sub`` claim matches the + ``{task_instance_id}`` path parameter, preventing a worker from accessing another task's + endpoints. + +Token refresh (Execution API) +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +The ``JWTReissueMiddleware`` automatically refreshes valid tokens that are approaching expiry: + +1. After each response, the middleware checks the token's remaining validity. +2. If less than **20%** of the total validity remains (minimum 30 seconds), the server + generates a new token preserving all original claims (including ``scope`` and ``sub``). +3. The refreshed token is returned in the ``Refreshed-API-Token`` response header. +4. The client's ``_update_auth()`` hook detects this header and transparently updates + the ``BearerAuth`` instance for subsequent requests. + +This mechanism ensures long-running tasks do not lose API access due to token expiry, +without requiring the worker to re-authenticate. + +No token revocation (Execution API) +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Execution API tokens are not subject to revocation. They are short-lived (default 10 minutes) +and automatically refreshed by the ``JWTReissueMiddleware``, so revocation is not part of the +Execution API security model. Once an Execution API token is issued to a worker, it remains +valid until it expires. + + + +Default timings (Execution API) +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +.. list-table:: + :header-rows: 1 + :widths: 50 50 + + * - Setting + - Default + * - ``[execution_api] jwt_expiration_time`` + - 600 seconds (10 minutes) + * - ``[execution_api] jwt_audience`` + - ``urn:airflow.apache.org:task`` + * - Token refresh threshold + - 20% of validity remaining (minimum 30 seconds, i.e., at ~120 seconds before expiry + with the default 600-second token lifetime) + + +Dag File Processor and Triggerer +--------------------------------- + +The **Dag File Processor** and **Triggerer** are internal Airflow components that also +interact with the Execution API, but they do so via an **in-process** transport +(``InProcessExecutionAPI``) rather than over the network. This in-process API: + +- Runs the Execution API application directly within the same process, using an ASGI/WSGI + bridge. +- **Potentially bypasses JWT authentication** — the JWT bearer dependency is overridden to + always return a synthetic ``TIToken`` with the ``"execution"`` scope, effectively bypassing + token validation. +- Also potentially bypasses per-resource access controls (connection, variable, and XCom access + checks are overridden to always allow). + +Airflow implements software guards that prevent accidental direct database access from Dag +author code in these components. However, because the child processes that parse Dag files and +execute trigger code run as the **same Unix user** as their parent processes, these guards do +not protect against intentional access. A deliberately malicious Dag author can potentially +retrieve the parent process's database credentials (via ``/proc//environ``, configuration +files, or secrets manager access) and gain full read/write access to the metadata database and +all Execution API operations — without needing a valid JWT token. + +This is in contrast to workers/task execution, where the isolation is implemented ad deployment +level - where sensitive configuration of database credentials is not available to Airflow +processes because they are not set in their deployment configuration at all, and communicate +exclusively through the Execution API. + +In the default deployment, a **single Dag File Processor instance** parses Dag files for all +teams and a **single Triggerer instance** handles all triggers across all teams. This means +that Dag author code from different teams executes within the same process, with potentially +shared access to the in-process Execution API and the metadata database. + +For multi-team deployments that require isolation, Deployment Managers must run **separate +Dag File Processor and Triggerer instances per team** as a deployment-level measure — Airflow +does not provide built-in support for per-team DFP or Triggerer instances. Even with separate +instances, each retains the same Unix user as the parent process. To prevent credential +retrieval, Deployment Managers must implement Unix user-level isolation (running child +processes as a different, low-privilege user) or network-level restrictions. + +See :doc:`/security/security_model` for the full security implications, deployment hardening +guidance, and the planned strategic and tactical improvements. + + +Workload Isolation and Current Limitations +------------------------------------------ + +For a detailed discussion of workload isolation protections, current limitations, and planned +improvements, see :ref:`workload-isolation`. + + +Configuration Reference +------------------------ + +All JWT-related configuration parameters: + +.. list-table:: + :header-rows: 1 + :widths: 40 15 45 + + * - Parameter + - Default + - Description + * - ``[api_auth] jwt_secret`` + - Auto-generated if missing + - Symmetric secret key for signing tokens. Must be the same across all components. Mutually exclusive with ``jwt_private_key_path``. + * - ``[api_auth] jwt_private_key_path`` + - None + - Path to PEM-encoded private key (``RSA`` or ``Ed25519``). Mutually exclusive with ``jwt_secret``. + * - ``[api_auth] jwt_algorithm`` + - ``GUESS`` + - Signing algorithm. Auto-detected from key type: ``HS512`` for symmetric, ``RS256`` for ``RSA``, ``EdDSA`` for ``Ed25519``. + * - ``[api_auth] jwt_kid`` + - Auto (``RFC 7638`` thumbprint) + - Key ID placed in token header. Ignored for symmetric keys. + * - ``[api_auth] jwt_issuer`` + - None + - Issuer claim (``iss``). Recommended to be unique per deployment. + * - ``[api_auth] jwt_audience`` + - None + - Audience claim (``aud``) for REST API tokens. + * - ``[api_auth] jwt_expiration_time`` + - 86400 (24h) + - REST API token lifetime in seconds. + * - ``[api_auth] jwt_cli_expiration_time`` + - 3600 (1h) + - CLI token lifetime in seconds. + * - ``[api_auth] jwt_leeway`` + - 10 + - Clock skew tolerance in seconds for token validation. + * - ``[api_auth] trusted_jwks_url`` + - None + - JWKS endpoint URL or local file path for token validation. Mutually exclusive with ``jwt_secret``. + * - ``[execution_api] jwt_expiration_time`` + - 600 (10 min) + - Execution API token lifetime in seconds. + * - ``[execution_api] jwt_audience`` + - ``urn:airflow.apache.org:task`` + - Audience claim for Execution API tokens. + +.. important:: + + Time synchronization across all Airflow components is critical. Use NTP (e.g., ``ntpd`` or + ``chrony``) to keep clocks in sync. Clock skew beyond the configured ``jwt_leeway`` will cause + authentication failures. diff --git a/airflow-core/docs/security/security_model.rst b/airflow-core/docs/security/security_model.rst index 15b59b250904c..96f6f66783b14 100644 --- a/airflow-core/docs/security/security_model.rst +++ b/airflow-core/docs/security/security_model.rst @@ -62,11 +62,24 @@ Dag authors ........... They can create, modify, and delete Dag files. The -code in Dag files is executed on workers and in the Dag Processor. -Therefore, Dag authors can create and change code executed on workers -and the Dag Processor and potentially access the credentials that the Dag -code uses to access external systems. Dag authors have full access -to the metadata database. +code in Dag files is executed on workers, in the Dag File Processor, +and in the Triggerer. +Therefore, Dag authors can create and change code executed on workers, +the Dag File Processor, and the Triggerer, and potentially access the credentials that the Dag +code uses to access external systems. + +In Airflow 3, the level of database isolation depends on the component: + +* **Workers**: Task code on workers communicates with the API server exclusively through the + Execution API. Workers do not receive database credentials and genuinely cannot access the + metadata database directly. +* **Dag File Processor and Triggerer**: Airflow implements software guards that prevent + accidental direct database access from Dag author code. However, because Dag parsing and + trigger execution processes run as the same Unix user as their parent processes (which do + have database credentials), a deliberately malicious Dag author can potentially retrieve + credentials from the parent process and gain direct database access. See + :ref:`jwt-authentication-and-workload-isolation` for details on the specific mechanisms and + deployment hardening measures. Authenticated UI users ....................... @@ -115,6 +128,8 @@ The primary difference between an operator and admin is the ability to manage an to other users, and access audit logs - only admins are able to do this. Otherwise assume they have the same access as an admin. +.. _connection-configuration-users: + Connection configuration users .............................. @@ -170,6 +185,8 @@ Viewers also do not have permission to access audit logs. For more information on the capabilities of authenticated UI users, see :doc:`apache-airflow-providers-fab:auth-manager/access-control`. +.. _capabilities-of-dag-authors: + Capabilities of Dag authors --------------------------- @@ -193,15 +210,21 @@ not open new security vulnerabilities. Limiting Dag Author access to subset of Dags -------------------------------------------- -Airflow does not have multi-tenancy or multi-team features to provide isolation between different groups of users when -it comes to task execution. While, in Airflow 3.0 and later, Dag Authors cannot directly access database and cannot run -arbitrary queries on the database, they still have access to all Dags in the Airflow installation and they can +Airflow does not yet provide full task-level isolation between different groups of users when +it comes to task execution. While, in Airflow 3.0 and later, worker task code cannot directly access the +metadata database (it communicates through the Execution API), Dag author code that runs in the Dag File +Processor and Triggerer potentially still has direct database access. Regardless of execution context, Dag authors +have access to all Dags in the Airflow installation and they can modify any of those Dags - no matter which Dag the task code is executed for. This means that Dag authors can modify state of any task instance of any Dag, and there are no finer-grained access controls to limit that access. -There is a work in progress on multi-team feature in Airflow that will allow to have some isolation between different -groups of users and potentially limit access of Dag authors to only a subset of Dags, but currently there is no -such feature in Airflow and you can assume that all Dag authors have access to all Dags and can modify their state. +There is an **experimental** multi-team feature in Airflow (``[core] multi_team``) that provides UI-level and +REST API-level RBAC isolation between teams. However, this feature **does not yet guarantee task-level isolation**. +At the task execution level, workloads from different teams still share the same Execution API, signing keys, +connections, and variables. A task from one team can access the same shared resources as a task from another team. +The multi-team feature is a work in progress — task-level isolation and Execution API enforcement of team +boundaries will be improved in future versions of Airflow. Until then, you should assume that all Dag authors +have access to all Dags and shared resources, and can modify their state regardless of team assignment. Security contexts for Dag author submitted code @@ -239,8 +262,15 @@ Triggerer In case of Triggerer, Dag authors can execute arbitrary code in Triggerer. Currently there are no enforcement mechanisms that would allow to isolate tasks that are using deferrable functionality from -each other and arbitrary code from various tasks can be executed in the same process/machine. Deployment -Manager must trust that Dag authors will not abuse this capability. +each other and arbitrary code from various tasks can be executed in the same process/machine. The default +deployment runs a single Triggerer instance that handles triggers from all teams — there is no built-in +support for per-team Triggerer instances. Additionally, the Triggerer uses an in-process Execution API +transport that potentially bypasses JWT authentication and potentially has direct access to the metadata +database. For multi-team deployments, Deployment Managers must run separate Triggerer instances per team +as a deployment-level measure, but even then each instance potentially retains direct database access +and a Dag author +whose trigger code runs there can potentially access the database directly — including data belonging +to other teams. Deployment Manager must trust that Dag authors will not abuse this capability. Dag files not needed for Scheduler and API Server ................................................. @@ -282,6 +312,292 @@ Access to all Dags All Dag authors have access to all Dags in the Airflow deployment. This means that they can view, modify, and update any Dag without restrictions at any time. +.. _jwt-authentication-and-workload-isolation: + +JWT authentication and workload isolation +----------------------------------------- + +Airflow uses JWT (JSON Web Token) authentication for both its public REST API and its internal +Execution API. For a detailed description of the JWT authentication flows, token structure, and +configuration, see :doc:`/security/jwt_token_authentication`. For the current state of workload +isolation protections and their limitations, see :ref:`workload-isolation`. + +Current isolation limitations +............................. + +While Airflow 3 significantly improved the security model by preventing worker task code from +directly accessing the metadata database (workers now communicate exclusively through the +Execution API), **perfect isolation between Dag authors is not yet achieved**. Dag author code +potentially still executes with direct database access in the Dag File Processor and Triggerer. + +**Software guards vs. intentional access** + Airflow implements software-level guards that prevent **accidental and unintentional** direct database + access from Dag author code. The Dag File Processor removes the database session and connection + information before forking child processes that parse Dag files, and worker tasks use the Execution + API exclusively. + + However, these software guards **do not protect against intentional, malicious access**. The child + processes that parse Dag files and execute trigger code run as the **same Unix user** as their parent + processes (the Dag File Processor manager and the Triggerer respectively). Because of how POSIX + process isolation works, a child process running as the same user can retrieve the parent's + credentials through several mechanisms: + + * **Environment variables**: By default, on Linux, any process can read ``/proc//environ`` of another + process running as the same user — so database credentials passed via environment variables + (e.g., ``AIRFLOW__DATABASE__SQL_ALCHEMY_CONN``) can be read from the parent process. This can be + prevented by setting dumpable property of the process which is implemented in supervisor of tasks. + * **Configuration files**: If configuration is stored in files, those files must be readable by the + parent process and are therefore also readable by the child process running as the same user. + * **Command-based secrets** (``_CMD`` suffix options): The child process can execute the same + commands to retrieve secrets. + * **Secrets manager access**: If the parent uses a secrets backend, the child can access the same + secrets manager using credentials available in the process environment or filesystem. + + This means that a deliberately malicious Dag author can retrieve database credentials and gain + **full read/write access to the metadata database** — including the ability to modify any Dag, + task instance, connection, or variable. The software guards address accidental access (e.g., a Dag + author importing ``airflow.settings.Session`` out of habit from Airflow 2) but do not prevent a + determined actor from circumventing them. + + On workers, the isolation can be stronger when Deployment Manager configures worker processes to + not receive database credentials at all (neither via environment variables nor configuration). + Workers should communicate exclusively through the Execution API using short-lived JWT tokens. + A task running on a worker genuinely should not access the metadata database directly — + when it is configured to not have any credentials accessible to it. + +**Dag File Processor and Triggerer run user code only have soft protection to bypass JWT authentication** + The Dag File Processor and Triggerer processes that run user code, + use an in-process transport to access the Execution API, which bypasses JWT authentication. + Since these components execute user-submitted code (Dag files and trigger code respectively), + a Dag author whose code runs in these components + has unrestricted access to all Execution API operations if they bypass the soft protections + — including the ability to read any connection, variable, or XCom — without needing a valid JWT token. + + Furthermore, the Dag File Processor has direct access to the metadata database (it needs this to + store serialized Dags). As described above, Dag author code executing in the Dag File Processor + context could potentially retrieve the database credentials from the parent process and access + the database directly, including the JWT signing key configuration if it is available in the + process environment. If a Dag author obtains the JWT signing key, they could forge arbitrary tokens. + +**Dag File Processor and Triggerer are shared across teams** + In the default deployment, a **single Dag File Processor instance** parses all Dag files and a + **single Triggerer instance** handles all triggers — regardless of team assignment. There is no + built-in support for running per-team Dag File Processor or Triggerer instances. This means that + Dag author code from different teams executes within the same process, potentially sharing the + in-process Execution API and direct database access. + + For multi-team deployments that require separation, Deployment Managers must run **separate + Dag File Processor and Triggerer instances per team** as a deployment-level measure (for example, + by configuring each instance to only process bundles belonging to a specific team). However, even + with separate instances, each Dag File Processor and Triggerer potentially retains direct access + to the metadata database — a Dag author whose code runs in these components can potentially + retrieve credentials from the parent process and access the database directly, including reading + or modifying data belonging to other teams, unless the Deployment Manager implements Unix + user-level isolation (see :ref:`deployment-hardening-for-improved-isolation`). + +**No cross-workload isolation in the Execution API** + All worker workloads authenticate to the same Execution API with tokens signed by the same key and + sharing the same audience. While the ``ti:self`` scope enforcement prevents a worker from accessing + another task's specific endpoints (heartbeat, state transitions), shared resources such as connections, + variables, and XComs are accessible to all tasks. There is no isolation between tasks belonging to + different teams or Dag authors at the Execution API level. + +**Token signing key might be a shared secret** + In symmetric key mode (``[api_auth] jwt_secret``), the same secret key is used to both generate and + validate tokens. Any component that has access to this secret can forge tokens with arbitrary claims, + including tokens for other task instances or with elevated scopes. This does not impact the security + of the system though if the secret is only available to api-server and scheduler via deployment + configuration. + +**Sensitive configuration values can be leaked through logs** + Dag authors can write code that prints environment variables or configuration values to task logs + (e.g., ``print(os.environ)``). Airflow masks known sensitive values in logs, but masking depends on + recognizing the value patterns. Dag authors who intentionally or accidentally log raw environment + variables may expose database credentials, JWT signing keys, Fernet keys, or other secrets in task + logs. Deployment Managers should restrict access to task logs and ensure that sensitive configuration + is only provided to components where it is needed (see the sensitive variables tables below). + +.. _deployment-hardening-for-improved-isolation: + +Deployment hardening for improved isolation +........................................... + +Deployment Managers who require stronger isolation between Dag authors and teams can take the following +measures. Note that these are deployment-specific actions that go beyond Airflow's built-in security +model — Airflow does not enforce these natively. + +**Mandatory code review of Dag files** + Implement a review process for all Dag submissions to Dag bundles. This can include: + + * Requiring pull request reviews before Dag files are deployed. + * Static analysis of Dag code to detect suspicious patterns (e.g., direct database access attempts, + reading environment variables, importing configuration modules). + * Automated linting rules that flag potentially dangerous code. + +**Restrict sensitive configuration to components that need them** + Do not share all configuration parameters across all components. In particular: + + * The JWT signing key (``[api_auth] jwt_secret`` or ``[api_auth] jwt_private_key_path``) should only + be available to components that need to generate tokens (Scheduler/Executor, API Server) and + components that need to validate tokens (API Server). Workers should not have access to the signing + key — they only need the tokens provided to them. + * Connection credentials for external systems (via Secrets Managers) should only be available to the API Server + (which serves them to workers via the Execution API), not to the Scheduler, Dag File Processor, + or Triggerer processes directly. This however limits some of the features of Airflow - such as Deadline + Alerts or triggers that need to authenticate with the external systems. + * Database connection strings should only be available to components that need direct database access + (API Server, Scheduler, Dag File Processor, Triggerer), not to workers. + +**Pass configuration via environment variables** + For higher security, pass sensitive configuration values via environment variables rather than + configuration files. Environment variables are inherently safer than configuration files in + Airflow's worker processes because of a built-in protection: on Linux, the supervisor process + calls ``prctl(PR_SET_DUMPABLE, 0)`` before forking the task process, and this flag is inherited + by the forked child. This marks both processes as non-dumpable, which prevents same-UID sibling + processes from reading ``/proc//environ``, ``/proc//mem``, or attaching via + ``ptrace``. In contrast, configuration files on disk are readable by any process running as + the same Unix user. Environment variables can also be scoped to individual processes or + containers, making it easier to restrict which components have access to which secrets. + + The following tables list all security-sensitive configuration variables (marked ``sensitive: true`` + in Airflow's configuration). Deployment Managers should review each variable and ensure it is only + provided to the components that need it. The "Needed by" column indicates which components + typically require the variable — but actual needs depend on the specific deployment topology and + features in use. + + .. START AUTOGENERATED CORE SENSITIVE VARS + + **Core Airflow sensitive configuration variables:** + + .. list-table:: + :header-rows: 1 + :widths: 40 30 30 + + * - Environment variable + - Description + - Needed by + * - ``AIRFLOW__API_AUTH__JWT_SECRET`` + - JWT signing key (symmetric mode) + - API Server, Scheduler + * - ``AIRFLOW__API__SECRET_KEY`` + - API secret key for log token signing + - API Server, Scheduler, Workers, Triggerer + * - ``AIRFLOW__CORE__ASSET_MANAGER_KWARGS`` + - Asset manager credentials + - Dag File Processor + * - ``AIRFLOW__CORE__FERNET_KEY`` + - Fernet encryption key for connections/variables at rest + - API Server, Scheduler, Workers, Dag File Processor, Triggerer + * - ``AIRFLOW__DATABASE__SQL_ALCHEMY_CONN`` + - Metadata database connection string + - API Server, Scheduler, Dag File Processor, Triggerer + * - ``AIRFLOW__DATABASE__SQL_ALCHEMY_CONN_ASYNC`` + - Async metadata database connection string + - API Server, Scheduler, Dag File Processor, Triggerer + * - ``AIRFLOW__DATABASE__SQL_ALCHEMY_ENGINE_ARGS`` + - SQLAlchemy engine parameters (may contain credentials) + - API Server, Scheduler, Dag File Processor, Triggerer + * - ``AIRFLOW__LOGGING__REMOTE_TASK_HANDLER_KWARGS`` + - Remote logging handler credentials + - Scheduler, Workers, Triggerer + * - ``AIRFLOW__SECRETS__BACKEND_KWARGS`` + - Secrets backend credentials (non-worker mode) + - Scheduler, Dag File Processor, Triggerer + * - ``AIRFLOW__SENTRY__SENTRY_DSN`` + - Sentry error reporting endpoint + - Scheduler, Triggerer + * - ``AIRFLOW__WORKERS__SECRETS_BACKEND_KWARGS`` + - Worker-specific secrets backend credentials + - Workers + + .. END AUTOGENERATED CORE SENSITIVE VARS + + Note that ``AIRFLOW__API_AUTH__JWT_PRIVATE_KEY_PATH`` (path to the JWT private key for asymmetric + signing) is not marked as ``sensitive`` in config.yml because it is a file path, not a secret + value itself. However, access to the file it points to should be restricted to the Scheduler + (which generates tokens) and the API Server (which validates them). + + .. START AUTOGENERATED PROVIDER SENSITIVE VARS + + **Provider-specific sensitive configuration variables:** + + The following variables are defined by Airflow providers and should only be set on components where + the corresponding provider functionality is needed. The decision of which components require these + variables depends on the Deployment Manager's choices about which providers and features are + enabled in each component. + + .. list-table:: + :header-rows: 1 + :widths: 40 30 30 + + * - Environment variable + - Provider + - Description + * - ``AIRFLOW__CELERY_BROKER_TRANSPORT_OPTIONS__SENTINEL_KWARGS`` + - celery + - Sentinel kwargs + * - ``AIRFLOW__CELERY_RESULT_BACKEND_TRANSPORT_OPTIONS__SENTINEL_KWARGS`` + - celery + - Sentinel kwargs + * - ``AIRFLOW__CELERY__BROKER_URL`` + - celery + - Broker url + * - ``AIRFLOW__CELERY__FLOWER_BASIC_AUTH`` + - celery + - Flower basic auth + * - ``AIRFLOW__CELERY__RESULT_BACKEND`` + - celery + - Result backend + * - ``AIRFLOW__KEYCLOAK_AUTH_MANAGER__CLIENT_SECRET`` + - keycloak + - Client secret + * - ``AIRFLOW__OPENSEARCH__PASSWORD`` + - opensearch + - Password + * - ``AIRFLOW__OPENSEARCH__USERNAME`` + - opensearch + - Username + + .. END AUTOGENERATED PROVIDER SENSITIVE VARS + + Deployment Managers should review the full configuration reference and identify any additional + parameters that contain credentials or secrets relevant to their specific deployment. + +**Use asymmetric keys for JWT signing** + Using asymmetric keys (``[api_auth] jwt_private_key_path`` with a JWKS endpoint) provides better + security than symmetric keys because: + + * The private key (used for signing) can be restricted to the Scheduler/Executor. + * The API Server only needs the public key (via JWKS) for validation. + * Workers cannot forge tokens even if they could access the JWKS endpoint, since they would + not have the private key. + +**Network-level isolation** + Use network policies, VPCs, or similar mechanisms to restrict which components can communicate + with each other. For example, workers should only be able to reach the Execution API endpoint, + not the metadata database or internal services directly. The Dag File Processor and Triggerer + child processes should ideally not have network access to the metadata database either, if + Unix user-level isolation is implemented. + +**Other measures and future improvements** + Deployment Managers may need to implement additional measures depending on their security + requirements. These may include monitoring and auditing of Execution API access patterns, + runtime sandboxing of Dag code, or dedicated infrastructure per team. + + Future versions of Airflow plan to address these limitations through two approaches: + + * **Strategic (longer-term)**: Move the Dag File Processor and Triggerer to communicate with + the metadata database exclusively through the API server (similar to how workers use the + Execution API today). This would eliminate the need for these components to have database + credentials at all, providing security by design rather than relying on deployment-level + measures. + * **Tactical (shorter-term)**: Native support for Unix user impersonation in the Dag File + Processor and Triggerer child processes, so that Dag author code runs as a different, low- + privilege user that cannot access the parent's credentials or the database. + + The Airflow community is actively working on these improvements. + + Custom RBAC limitations ----------------------- @@ -309,6 +625,8 @@ you trust them not to abuse the capabilities they have. You should also make sur properly configured the Airflow installation to prevent Dag authors from executing arbitrary code in the Scheduler and API Server processes. +.. _deploying-and-protecting-airflow-installation: + Deploying and protecting Airflow installation ............................................. @@ -354,13 +672,150 @@ Examples of fine-grained access control include (but are not limited to): * Access restrictions to views or Dags: Controlling user access to certain views or specific Dags, ensuring that users can only view or interact with authorized components. -Future: multi-tenancy isolation -............................... +Future: multi-team isolation +............................ These examples showcase ways in which Deployment Managers can refine and limit user privileges within Airflow, providing tighter control and ensuring that users have access only to the necessary components and functionalities based on their roles and responsibilities. However, fine-grained access control does not -provide full isolation and separation of access to allow isolation of different user groups in a -multi-tenant fashion yet. In future versions of Airflow, some fine-grained access control features could -become part of the Airflow security model, as the Airflow community is working on a multi-tenant model -currently. +yet provide full isolation and separation of access between different groups of users. + +The experimental multi-team feature (``[core] multi_team``) is a step towards cross-team isolation, but it +currently only enforces team-based isolation at the UI and REST API level. **Task-level isolation is not yet +guaranteed** — workloads from different teams share the same Execution API, JWT signing keys, and access to +connections, variables, and XComs. In deployments where additional hardening measures (described in +:ref:`deployment-hardening-for-improved-isolation`) are not implemented, a task belonging to one team can +potentially access shared resources available to tasks from other teams. Deployment Managers who enable the +multi-team feature should not rely on it alone for security-critical isolation between teams at the task +execution layer — a deep understanding of configuration and deployment security is required by Deployment +Managers to configure it in a way that can guarantee separation between teams. + +Future versions of Airflow will improve task-level isolation, including team-scoped Execution API enforcement, +finer-grained JWT token scopes, and better sandboxing of user-submitted code. The Airflow community is +actively working on these improvements. + + +What is NOT considered a security vulnerability +----------------------------------------------- + +The following scenarios are **not** considered security vulnerabilities in Airflow. They are either +intentional design choices, consequences of the trust model described above, or issues that fall +outside Airflow's threat model. Security researchers (and AI agents performing security analysis) +should review this section before reporting issues to the Airflow security team. + +For full details on reporting policies, see +`Airflow's Security Policy `_. + +Dag authors executing arbitrary code +..................................... + +Dag authors can execute arbitrary code on workers, the Dag File Processor, and the Triggerer. This +includes accessing credentials, environment variables, and (in the case of the Dag File Processor +and Triggerer) potentially the metadata database directly. This is the intended behavior as described in +:ref:`capabilities-of-dag-authors` — Dag authors are trusted users. Reports that a Dag author can +"achieve RCE" or "access the database" by writing Dag code are restating a documented capability, +not discovering a vulnerability. + +Dag author code passing unsanitized input to operators and hooks +................................................................ + +When a Dag author writes code that passes unsanitized UI user input (such as Dag run parameters, +variables, or connection configuration values) to operators, hooks, or third-party libraries, the +responsibility lies with the Dag author. Airflow's hooks and operators are low-level interfaces — +Dag authors are Python programmers who must sanitize inputs before passing them to these interfaces. + +SQL injection or command injection is only considered a vulnerability if it can be triggered by a +**non-Dag-author** user role (e.g., an authenticated UI user) **without** the Dag author deliberately +writing code that passes that input unsafely. If the only way to exploit the injection requires writing +or modifying a Dag file, it is not a vulnerability — the Dag author already has the ability to execute +arbitrary code. See also :doc:`/security/sql`. + +An exception exists when official Airflow documentation explicitly recommends a pattern that leads to +injection — in that case, the documentation guidance itself is the issue and may warrant an advisory. + +Dag File Processor and Triggerer potentially having database access +................................................................... + +The Dag File Processor potentially has direct database access to store serialized Dags. The Triggerer +potentially has direct database access to manage trigger state. Both components execute user-submitted +code (Dag files and trigger code respectively) and potentially bypass JWT authentication via an +in-process Execution API transport. These are intentional architectural choices, not vulnerabilities. +They are documented in :ref:`jwt-authentication-and-workload-isolation`. + +Workers accessing shared Execution API resources +................................................. + +Worker tasks can access connections, variables, and XComs via the Execution API using their JWT token. +While the ``ti:self`` scope prevents cross-task state manipulation, shared resources are accessible to +all tasks. This is the current design — not a vulnerability. Reports that "a task can read another +team's connection" are describing a known limitation of the current isolation model, documented in +:ref:`jwt-authentication-and-workload-isolation`. + +Execution API tokens not being revocable +........................................ + +Execution API tokens issued to workers are short-lived (default 10 minutes) with automatic refresh +and are intentionally not subject to revocation. This is a design choice documented in +:doc:`/security/jwt_token_authentication`, not a missing security control. + +Connection configuration capabilities +...................................... + +Users with the **Connection configuration** role can configure connections with arbitrary credentials +and connection parameters. When the ``test connection`` feature is enabled, these users can potentially +trigger RCE, arbitrary file reads, or Denial of Service through connection parameters. This is by +design — connection configuration users are highly privileged and must be trusted not to abuse these +capabilities. The ``test connection`` feature is disabled by default since Airflow 2.7.0, and enabling +it is an explicit Deployment Manager decision that acknowledges these risks. See +:ref:`connection-configuration-users` for details. + +Denial of Service by authenticated users +........................................ + +Airflow is not designed to be exposed to untrusted users on the public internet. All users who can +access the Airflow UI and API are authenticated and known. Denial of Service scenarios triggered by +authenticated users (such as creating very large Dag runs, submitting expensive queries, or flooding +the API) are not considered security vulnerabilities. They are operational concerns that Deployment +Managers should address through rate limiting, resource quotas, and monitoring — standard measures +for any internal application. See :ref:`deploying-and-protecting-airflow-installation`. + +Self-XSS by authenticated users +................................ + +Cross-site scripting (XSS) scenarios where the only victim is the user who injected the payload +(self-XSS) are not considered security vulnerabilities. Airflow's users are authenticated and +known, and self-XSS does not allow an attacker to compromise other users. If you discover an XSS +scenario where a lower-privileged user can inject a payload that executes in a higher-privileged +user's session without that user's action, that is a valid vulnerability and should be reported. + +Simple Auth Manager +................... + +The Simple Auth Manager is intended for development and testing only. This is clearly documented and +a prominent warning banner is displayed on the login page. Security issues specific to the Simple +Auth Manager (such as weak password handling, lack of rate limiting, or missing CSRF protections) are +not considered production security vulnerabilities. Production deployments must use a production-grade +auth manager. + +Third-party dependency vulnerabilities in Docker images +....................................................... + +Airflow's reference Docker images are built with the latest available dependencies at release time. +Vulnerabilities found by scanning these images against CVE databases are expected to appear over time +as new CVEs are published. These should **not** be reported to the Airflow security team. Instead, +users should build their own images with updated dependencies as described in the +`Docker image documentation `_. + +If you discover that a third-party dependency vulnerability is **actually exploitable** in Airflow +(with a proof-of-concept demonstrating the exploitation in Airflow's context), that is a valid +report and should be submitted following the security policy. + +Automated scanning results without human verification +..................................................... + +Automated security scanner reports that list findings without human verification against Airflow's +security model are not considered valid vulnerability reports. Airflow's trust model differs +significantly from typical web applications — many scanner findings (such as "admin user can execute +code" or "database credentials accessible in configuration") are expected behavior. Reports must +include a proof-of-concept that demonstrates how the finding violates the security model described +in this document, including identifying the specific user role involved and the attack scenario. diff --git a/airflow-core/docs/security/workload.rst b/airflow-core/docs/security/workload.rst index 31714aa21fbb2..0496cddc7f54a 100644 --- a/airflow-core/docs/security/workload.rst +++ b/airflow-core/docs/security/workload.rst @@ -50,3 +50,86 @@ not set. [core] default_impersonation = airflow + +.. _workload-isolation: + +Workload Isolation and Current Limitations +------------------------------------------ + +This section describes the current state of workload isolation in Apache Airflow, +including the protections that are in place, the known limitations, and planned improvements. + +For the full security model and deployment hardening guidance, see :doc:`/security/security_model`. +For details on the JWT authentication flows used by workers and internal components, see +:doc:`/security/jwt_token_authentication`. + +Worker process memory protection (Linux) +'''''''''''''''''''''''''''''''''''''''' + +On Linux, the supervisor process calls ``prctl(PR_SET_DUMPABLE, 0)`` at the start of +``supervise()`` before forking the task process. This flag is inherited by the forked +child. Marking processes as non-dumpable prevents same-UID sibling processes from reading +``/proc//mem``, ``/proc//environ``, or ``/proc//maps``, and blocks +``ptrace(PTRACE_ATTACH)``. This is critical because each supervisor holds a distinct JWT +token in memory — without this protection, a malicious task process running as the same +Unix user could steal tokens from sibling supervisor processes. + +This protection is one of the reasons that passing sensitive configuration via environment +variables is safer than via configuration files: environment variables are only readable +by the process itself (and root), whereas configuration files on disk are readable by any +process with filesystem access running as the same user. + +.. note:: + + This protection is Linux-specific. On non-Linux platforms, the + ``_make_process_nondumpable()`` call is a no-op. Deployment Managers running Airflow + on non-Linux platforms should implement alternative isolation measures. + +No cross-workload isolation +''''''''''''''''''''''''''' + +All worker workloads authenticate to the same Execution API with tokens that share the +same signing key, audience, and issuer. While the ``ti:self`` scope enforcement prevents +a worker from accessing *another task instance's* specific endpoints (e.g., heartbeat, +state transitions), the token grants access to shared resources such as connections, +variables, and XComs that are not scoped to individual tasks. + +No team-level isolation in Execution API (experimental multi-team feature) +'''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''' + +The experimental multi-team feature (``[core] multi_team``) provides UI-level and REST +API-level RBAC isolation between teams, but **does not yet guarantee task-level isolation**. +At the Execution API level, there is no enforcement of team-based access boundaries. +A task from one team can access the same connections, variables, and XComs as a task from +another team. All workloads share the same JWT signing keys and audience regardless of team +assignment. + +In deployments where additional hardening measures are not implemented at the deployment +level, a task from one team can potentially access resources belonging to another team +(see :doc:`/security/security_model`). A deep understanding of configuration and deployment +security is required by Deployment Managers to configure it in a way that can guarantee +separation between teams. Task-level team isolation will be improved in future versions +of Airflow. + +Dag File Processor and Triggerer potentially bypass JWT and access the database +''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''' + +As described in :doc:`/security/jwt_token_authentication`, the default deployment runs a +single Dag File Processor and a single Triggerer for all teams. Both potentially bypass +JWT authentication via in-process transport. For multi-team isolation, Deployment Managers +must run separate instances per team, but even then, each instance potentially retains +direct database access. A Dag author whose code runs in these components can potentially +access the database directly — including data belonging to other teams or the JWT signing +key configuration — unless the Deployment Manager restricts the database credentials and +configuration available to each instance. + +Planned improvements +'''''''''''''''''''' + +Future versions of Airflow will address these limitations with: + +- Finer-grained token scopes tied to specific resources (connections, variables) and teams. +- Enforcement of team-based isolation in the Execution API. +- Built-in support for per-team Dag File Processor and Triggerer instances. +- Improved sandboxing of user-submitted code in the Dag File Processor and Triggerer. +- Full task-level isolation for the multi-team feature. diff --git a/airflow-core/newsfragments/64067.bugfix.rst b/airflow-core/newsfragments/64067.bugfix.rst new file mode 100644 index 0000000000000..8ae9a97f5441b --- /dev/null +++ b/airflow-core/newsfragments/64067.bugfix.rst @@ -0,0 +1 @@ +Restore live stdout logging for Elasticsearch in Airflow 3 by correctly configuring the handler in ``airflow_local_settings.py`` and forwarding task logs to stdout in ``LocalExecutor``. diff --git a/airflow-core/newsfragments/64552.improvement.rst b/airflow-core/newsfragments/64552.improvement.rst new file mode 100644 index 0000000000000..ae70554cd22ee --- /dev/null +++ b/airflow-core/newsfragments/64552.improvement.rst @@ -0,0 +1 @@ +Allow UI theme config with only CSS overrides, icon only, or empty ``{}`` to restore OSS defaults. The ``tokens`` field is now optional in the theme configuration. diff --git a/airflow-core/src/airflow/api_fastapi/auth/managers/simple/ui/package.json b/airflow-core/src/airflow/api_fastapi/auth/managers/simple/ui/package.json index c4f3bc0cf41b1..85217c6ad39d1 100644 --- a/airflow-core/src/airflow/api_fastapi/auth/managers/simple/ui/package.json +++ b/airflow-core/src/airflow/api_fastapi/auth/managers/simple/ui/package.json @@ -75,7 +75,22 @@ "minimatch@<10.2.3": ">=10.2.3", "ajv@<6.14.0": ">=6.14.0", "rollup@>=4.0.0 <4.59.0": ">=4.59.0", - "flatted@<3.4.0": ">=3.4.0" + "flatted@<=3.4.1": ">=3.4.2", + "happy-dom@>=15.10.0 <=20.8.7": ">=20.8.8", + "picomatch@>=4.0.0 <4.0.4": ">=4.0.4", + "brace-expansion@>=4.0.0 <5.0.5": ">=5.0.5", + "lodash-es@>=4.0.0 <=4.17.23": ">=4.18.0", + "lodash@>=4.0.0 <=4.17.23": ">=4.18.0", + "lodash-es@<=4.17.23": ">=4.18.0", + "lodash@<=4.17.23": ">=4.18.0", + "defu@<=6.1.4": ">=6.1.5", + "handlebars@>=4.0.0 <=4.7.8": ">=4.7.9", + "handlebars@>=4.0.0 <4.7.9": ">=4.7.9", + "yaml@>=1.0.0 <1.10.3": ">=1.10.3", + "handlebars@>=4.6.0 <=4.7.8": ">=4.7.9", + "happy-dom@<20.8.9": ">=20.8.9", + "vite@>=7.0.0 <=7.3.1": ">=7.3.2", + "vite@>=7.1.0 <=7.3.1": ">=7.3.2" } } } diff --git a/airflow-core/src/airflow/api_fastapi/auth/managers/simple/ui/pnpm-lock.yaml b/airflow-core/src/airflow/api_fastapi/auth/managers/simple/ui/pnpm-lock.yaml index 2b18466c3b8f7..c0b82ea335ae6 100644 --- a/airflow-core/src/airflow/api_fastapi/auth/managers/simple/ui/pnpm-lock.yaml +++ b/airflow-core/src/airflow/api_fastapi/auth/managers/simple/ui/pnpm-lock.yaml @@ -10,7 +10,22 @@ overrides: minimatch@<10.2.3: '>=10.2.3' ajv@<6.14.0: '>=6.14.0' rollup@>=4.0.0 <4.59.0: '>=4.59.0' - flatted@<3.4.0: '>=3.4.0' + flatted@<=3.4.1: '>=3.4.2' + happy-dom@>=15.10.0 <=20.8.7: '>=20.8.8' + picomatch@>=4.0.0 <4.0.4: '>=4.0.4' + brace-expansion@>=4.0.0 <5.0.5: '>=5.0.5' + lodash-es@>=4.0.0 <=4.17.23: '>=4.18.0' + lodash@>=4.0.0 <=4.17.23: '>=4.18.0' + lodash-es@<=4.17.23: '>=4.18.0' + lodash@<=4.17.23: '>=4.18.0' + defu@<=6.1.4: '>=6.1.5' + handlebars@>=4.0.0 <=4.7.8: '>=4.7.9' + handlebars@>=4.0.0 <4.7.9: '>=4.7.9' + yaml@>=1.0.0 <1.10.3: '>=1.10.3' + handlebars@>=4.6.0 <=4.7.8: '>=4.7.9' + happy-dom@<20.8.9: '>=20.8.9' + vite@>=7.0.0 <=7.3.1: '>=7.3.2' + vite@>=7.1.0 <=7.3.1: '>=7.3.2' importers: @@ -88,10 +103,10 @@ importers: version: 8.57.0(eslint@10.0.3(jiti@2.6.1))(typescript@5.9.3) '@vitejs/plugin-react-swc': specifier: ^4.3.0 - version: 4.3.0(@swc/helpers@0.5.19)(vite@7.3.1(@types/node@25.3.5)(jiti@2.6.1)) + version: 4.3.0(@swc/helpers@0.5.19)(vite@8.0.5(@emnapi/core@1.9.2)(@emnapi/runtime@1.9.2)(@types/node@25.3.5)(esbuild@0.27.2)(jiti@2.6.1)(yaml@2.8.3)) '@vitest/coverage-v8': specifier: ^4.1.0 - version: 4.1.0(vitest@4.1.0(@types/node@25.3.5)(happy-dom@20.8.3)(vite@7.3.1(@types/node@25.3.5)(jiti@2.6.1))) + version: 4.1.0(vitest@4.1.0(@types/node@25.3.5)(happy-dom@20.8.9)(vite@8.0.5(@emnapi/core@1.9.2)(@emnapi/runtime@1.9.2)(@types/node@25.3.5)(esbuild@0.27.2)(jiti@2.6.1)(yaml@2.8.3))) eslint: specifier: ^10.0.3 version: 10.0.3(jiti@2.6.1) @@ -120,8 +135,8 @@ importers: specifier: ^63.0.0 version: 63.0.0(eslint@10.0.3(jiti@2.6.1)) happy-dom: - specifier: ^20.8.3 - version: 20.8.3 + specifier: '>=20.8.9' + version: 20.8.9 prettier: specifier: ^3.8.1 version: 3.8.1 @@ -135,14 +150,14 @@ importers: specifier: ^8.57.0 version: 8.57.0(eslint@10.0.3(jiti@2.6.1))(typescript@5.9.3) vite: - specifier: ^7.3.1 - version: 7.3.1(@types/node@25.3.5)(jiti@2.6.1) + specifier: '>=7.3.2' + version: 8.0.5(@emnapi/core@1.9.2)(@emnapi/runtime@1.9.2)(@types/node@25.3.5)(esbuild@0.27.2)(jiti@2.6.1)(yaml@2.8.3) vite-plugin-css-injected-by-js: specifier: ^4.0.1 - version: 4.0.1(vite@7.3.1(@types/node@25.3.5)(jiti@2.6.1)) + version: 4.0.1(vite@8.0.5(@emnapi/core@1.9.2)(@emnapi/runtime@1.9.2)(@types/node@25.3.5)(esbuild@0.27.2)(jiti@2.6.1)(yaml@2.8.3)) vitest: specifier: ^4.1.0 - version: 4.1.0(@types/node@25.3.5)(happy-dom@20.8.3)(vite@7.3.1(@types/node@25.3.5)(jiti@2.6.1)) + version: 4.1.0(@types/node@25.3.5)(happy-dom@20.8.9)(vite@8.0.5(@emnapi/core@1.9.2)(@emnapi/runtime@1.9.2)(@types/node@25.3.5)(esbuild@0.27.2)(jiti@2.6.1)(yaml@2.8.3)) packages: @@ -288,6 +303,15 @@ packages: react: '>=18' react-dom: '>=18' + '@emnapi/core@1.9.2': + resolution: {integrity: sha512-UC+ZhH3XtczQYfOlu3lNEkdW/p4dsJ1r/bP7H8+rhao3TTTMO1ATq/4DdIi23XuGoFY+Cz0JmCbdVl0hz9jZcA==} + + '@emnapi/runtime@1.9.2': + resolution: {integrity: sha512-3U4+MIWHImeyu1wnmVygh5WlgfYDtyf0k8AbLhMFxOipihf6nrWC4syIm/SwEeec0mNSafiiNnMJwbza/Is6Lw==} + + '@emnapi/wasi-threads@1.2.1': + resolution: {integrity: sha512-uTII7OYF+/Mes/MrcIOYp5yOtSMLBWSIoLPpcgwipoiKbli6k322tcoFsxoIIxPDqW01SQGAgko4EzZi2BNv2w==} + '@emotion/babel-plugin@11.13.5': resolution: {integrity: sha512-pxHCpT2ex+0q+HH91/zsdHkw/lXd468DIN2zvfvLtPKLLMo6gQj7oLObq8PhkrxOZb/gGCq03S3Z7PDhS8pduQ==} @@ -636,6 +660,15 @@ packages: '@jsdevtools/ono@7.1.3': resolution: {integrity: sha512-4JQNk+3mVzK3xh2rqd6RB4J46qUR19azEHBneZyTZM+c456qOrbbM/5xcR8huNCCcbVt7+UmizG6GuUvPvKUYg==} + '@napi-rs/wasm-runtime@1.1.2': + resolution: {integrity: sha512-sNXv5oLJ7ob93xkZ1XnxisYhGYXfaG9f65/ZgYuAu3qt7b3NadcOEhLvx28hv31PgX8SZJRYrAIPQilQmFpLVw==} + peerDependencies: + '@emnapi/core': ^1.7.1 + '@emnapi/runtime': ^1.7.1 + + '@oxc-project/types@0.122.0': + resolution: {integrity: sha512-oLAl5kBpV4w69UtFZ9xqcmTi+GENWOcPF7FCrczTiBbmC0ibXxCwyvZGbO39rCVEuLGAZM84DH0pUIyyv/YJzA==} + '@pandacss/is-valid-prop@1.9.0': resolution: {integrity: sha512-AZvpXWGyjbHc8TC+YVloQ31Z2c4j2xMvYj6UfVxuZdB5w4c9+4N8wy5R7I/XswNh8e4cfUlkvsEGDXjhJRgypw==} @@ -643,133 +676,106 @@ packages: resolution: {integrity: sha512-QNqXyfVS2wm9hweSYD2O7F0G06uurj9kZ96TRQE5Y9hU7+tgdZwIkbAKc5Ocy1HxEY2kuDQa6cQ1WRs/O5LFKA==} engines: {node: ^12.20.0 || ^14.18.0 || >=16.0.0} - '@rolldown/pluginutils@1.0.0-rc.7': - resolution: {integrity: sha512-qujRfC8sFVInYSPPMLQByRh7zhwkGFS4+tyMQ83srV1qrxL4g8E2tyxVVyxd0+8QeBM1mIk9KbWxkegRr76XzA==} - - '@rollup/rollup-android-arm-eabi@4.59.0': - resolution: {integrity: sha512-upnNBkA6ZH2VKGcBj9Fyl9IGNPULcjXRlg0LLeaioQWueH30p6IXtJEbKAgvyv+mJaMxSm1l6xwDXYjpEMiLMg==} - cpu: [arm] - os: [android] - - '@rollup/rollup-android-arm64@4.59.0': - resolution: {integrity: sha512-hZ+Zxj3SySm4A/DylsDKZAeVg0mvi++0PYVceVyX7hemkw7OreKdCvW2oQ3T1FMZvCaQXqOTHb8qmBShoqk69Q==} + '@rolldown/binding-android-arm64@1.0.0-rc.12': + resolution: {integrity: sha512-pv1y2Fv0JybcykuiiD3qBOBdz6RteYojRFY1d+b95WVuzx211CRh+ytI/+9iVyWQ6koTh5dawe4S/yRfOFjgaA==} + engines: {node: ^20.19.0 || >=22.12.0} cpu: [arm64] os: [android] - '@rollup/rollup-darwin-arm64@4.59.0': - resolution: {integrity: sha512-W2Psnbh1J8ZJw0xKAd8zdNgF9HRLkdWwwdWqubSVk0pUuQkoHnv7rx4GiF9rT4t5DIZGAsConRE3AxCdJ4m8rg==} + '@rolldown/binding-darwin-arm64@1.0.0-rc.12': + resolution: {integrity: sha512-cFYr6zTG/3PXXF3pUO+umXxt1wkRK/0AYT8lDwuqvRC+LuKYWSAQAQZjCWDQpAH172ZV6ieYrNnFzVVcnSflAg==} + engines: {node: ^20.19.0 || >=22.12.0} cpu: [arm64] os: [darwin] - '@rollup/rollup-darwin-x64@4.59.0': - resolution: {integrity: sha512-ZW2KkwlS4lwTv7ZVsYDiARfFCnSGhzYPdiOU4IM2fDbL+QGlyAbjgSFuqNRbSthybLbIJ915UtZBtmuLrQAT/w==} + '@rolldown/binding-darwin-x64@1.0.0-rc.12': + resolution: {integrity: sha512-ZCsYknnHzeXYps0lGBz8JrF37GpE9bFVefrlmDrAQhOEi4IOIlcoU1+FwHEtyXGx2VkYAvhu7dyBf75EJQffBw==} + engines: {node: ^20.19.0 || >=22.12.0} cpu: [x64] os: [darwin] - '@rollup/rollup-freebsd-arm64@4.59.0': - resolution: {integrity: sha512-EsKaJ5ytAu9jI3lonzn3BgG8iRBjV4LxZexygcQbpiU0wU0ATxhNVEpXKfUa0pS05gTcSDMKpn3Sx+QB9RlTTA==} - cpu: [arm64] - os: [freebsd] - - '@rollup/rollup-freebsd-x64@4.59.0': - resolution: {integrity: sha512-d3DuZi2KzTMjImrxoHIAODUZYoUUMsuUiY4SRRcJy6NJoZ6iIqWnJu9IScV9jXysyGMVuW+KNzZvBLOcpdl3Vg==} + '@rolldown/binding-freebsd-x64@1.0.0-rc.12': + resolution: {integrity: sha512-dMLeprcVsyJsKolRXyoTH3NL6qtsT0Y2xeuEA8WQJquWFXkEC4bcu1rLZZSnZRMtAqwtrF/Ib9Ddtpa/Gkge9Q==} + engines: {node: ^20.19.0 || >=22.12.0} cpu: [x64] os: [freebsd] - '@rollup/rollup-linux-arm-gnueabihf@4.59.0': - resolution: {integrity: sha512-t4ONHboXi/3E0rT6OZl1pKbl2Vgxf9vJfWgmUoCEVQVxhW6Cw/c8I6hbbu7DAvgp82RKiH7TpLwxnJeKv2pbsw==} - cpu: [arm] - os: [linux] - - '@rollup/rollup-linux-arm-musleabihf@4.59.0': - resolution: {integrity: sha512-CikFT7aYPA2ufMD086cVORBYGHffBo4K8MQ4uPS/ZnY54GKj36i196u8U+aDVT2LX4eSMbyHtyOh7D7Zvk2VvA==} + '@rolldown/binding-linux-arm-gnueabihf@1.0.0-rc.12': + resolution: {integrity: sha512-YqWjAgGC/9M1lz3GR1r1rP79nMgo3mQiiA+Hfo+pvKFK1fAJ1bCi0ZQVh8noOqNacuY1qIcfyVfP6HoyBRZ85Q==} + engines: {node: ^20.19.0 || >=22.12.0} cpu: [arm] os: [linux] - '@rollup/rollup-linux-arm64-gnu@4.59.0': - resolution: {integrity: sha512-jYgUGk5aLd1nUb1CtQ8E+t5JhLc9x5WdBKew9ZgAXg7DBk0ZHErLHdXM24rfX+bKrFe+Xp5YuJo54I5HFjGDAA==} + '@rolldown/binding-linux-arm64-gnu@1.0.0-rc.12': + resolution: {integrity: sha512-/I5AS4cIroLpslsmzXfwbe5OmWvSsrFuEw3mwvbQ1kDxJ822hFHIx+vsN/TAzNVyepI/j/GSzrtCIwQPeKCLIg==} + engines: {node: ^20.19.0 || >=22.12.0} cpu: [arm64] os: [linux] + libc: [glibc] - '@rollup/rollup-linux-arm64-musl@4.59.0': - resolution: {integrity: sha512-peZRVEdnFWZ5Bh2KeumKG9ty7aCXzzEsHShOZEFiCQlDEepP1dpUl/SrUNXNg13UmZl+gzVDPsiCwnV1uI0RUA==} + '@rolldown/binding-linux-arm64-musl@1.0.0-rc.12': + resolution: {integrity: sha512-V6/wZztnBqlx5hJQqNWwFdxIKN0m38p8Jas+VoSfgH54HSj9tKTt1dZvG6JRHcjh6D7TvrJPWFGaY9UBVOaWPw==} + engines: {node: ^20.19.0 || >=22.12.0} cpu: [arm64] os: [linux] + libc: [musl] - '@rollup/rollup-linux-loong64-gnu@4.59.0': - resolution: {integrity: sha512-gbUSW/97f7+r4gHy3Jlup8zDG190AuodsWnNiXErp9mT90iCy9NKKU0Xwx5k8VlRAIV2uU9CsMnEFg/xXaOfXg==} - cpu: [loong64] - os: [linux] - - '@rollup/rollup-linux-loong64-musl@4.59.0': - resolution: {integrity: sha512-yTRONe79E+o0FWFijasoTjtzG9EBedFXJMl888NBEDCDV9I2wGbFFfJQQe63OijbFCUZqxpHz1GzpbtSFikJ4Q==} - cpu: [loong64] - os: [linux] - - '@rollup/rollup-linux-ppc64-gnu@4.59.0': - resolution: {integrity: sha512-sw1o3tfyk12k3OEpRddF68a1unZ5VCN7zoTNtSn2KndUE+ea3m3ROOKRCZxEpmT9nsGnogpFP9x6mnLTCaoLkA==} - cpu: [ppc64] - os: [linux] - - '@rollup/rollup-linux-ppc64-musl@4.59.0': - resolution: {integrity: sha512-+2kLtQ4xT3AiIxkzFVFXfsmlZiG5FXYW7ZyIIvGA7Bdeuh9Z0aN4hVyXS/G1E9bTP/vqszNIN/pUKCk/BTHsKA==} + '@rolldown/binding-linux-ppc64-gnu@1.0.0-rc.12': + resolution: {integrity: sha512-AP3E9BpcUYliZCxa3w5Kwj9OtEVDYK6sVoUzy4vTOJsjPOgdaJZKFmN4oOlX0Wp0RPV2ETfmIra9x1xuayFB7g==} + engines: {node: ^20.19.0 || >=22.12.0} cpu: [ppc64] os: [linux] + libc: [glibc] - '@rollup/rollup-linux-riscv64-gnu@4.59.0': - resolution: {integrity: sha512-NDYMpsXYJJaj+I7UdwIuHHNxXZ/b/N2hR15NyH3m2qAtb/hHPA4g4SuuvrdxetTdndfj9b1WOmy73kcPRoERUg==} - cpu: [riscv64] - os: [linux] - - '@rollup/rollup-linux-riscv64-musl@4.59.0': - resolution: {integrity: sha512-nLckB8WOqHIf1bhymk+oHxvM9D3tyPndZH8i8+35p/1YiVoVswPid2yLzgX7ZJP0KQvnkhM4H6QZ5m0LzbyIAg==} - cpu: [riscv64] - os: [linux] - - '@rollup/rollup-linux-s390x-gnu@4.59.0': - resolution: {integrity: sha512-oF87Ie3uAIvORFBpwnCvUzdeYUqi2wY6jRFWJAy1qus/udHFYIkplYRW+wo+GRUP4sKzYdmE1Y3+rY5Gc4ZO+w==} + '@rolldown/binding-linux-s390x-gnu@1.0.0-rc.12': + resolution: {integrity: sha512-nWwpvUSPkoFmZo0kQazZYOrT7J5DGOJ/+QHHzjvNlooDZED8oH82Yg67HvehPPLAg5fUff7TfWFHQS8IV1n3og==} + engines: {node: ^20.19.0 || >=22.12.0} cpu: [s390x] os: [linux] + libc: [glibc] - '@rollup/rollup-linux-x64-gnu@4.59.0': - resolution: {integrity: sha512-3AHmtQq/ppNuUspKAlvA8HtLybkDflkMuLK4DPo77DfthRb71V84/c4MlWJXixZz4uruIH4uaa07IqoAkG64fg==} + '@rolldown/binding-linux-x64-gnu@1.0.0-rc.12': + resolution: {integrity: sha512-RNrafz5bcwRy+O9e6P8Z/OCAJW/A+qtBczIqVYwTs14pf4iV1/+eKEjdOUta93q2TsT/FI0XYDP3TCky38LMAg==} + engines: {node: ^20.19.0 || >=22.12.0} cpu: [x64] os: [linux] + libc: [glibc] - '@rollup/rollup-linux-x64-musl@4.59.0': - resolution: {integrity: sha512-2UdiwS/9cTAx7qIUZB/fWtToJwvt0Vbo0zmnYt7ED35KPg13Q0ym1g442THLC7VyI6JfYTP4PiSOWyoMdV2/xg==} + '@rolldown/binding-linux-x64-musl@1.0.0-rc.12': + resolution: {integrity: sha512-Jpw/0iwoKWx3LJ2rc1yjFrj+T7iHZn2JDg1Yny1ma0luviFS4mhAIcd1LFNxK3EYu3DHWCps0ydXQ5i/rrJ2ig==} + engines: {node: ^20.19.0 || >=22.12.0} cpu: [x64] os: [linux] + libc: [musl] - '@rollup/rollup-openbsd-x64@4.59.0': - resolution: {integrity: sha512-M3bLRAVk6GOwFlPTIxVBSYKUaqfLrn8l0psKinkCFxl4lQvOSz8ZrKDz2gxcBwHFpci0B6rttydI4IpS4IS/jQ==} - cpu: [x64] - os: [openbsd] - - '@rollup/rollup-openharmony-arm64@4.59.0': - resolution: {integrity: sha512-tt9KBJqaqp5i5HUZzoafHZX8b5Q2Fe7UjYERADll83O4fGqJ49O1FsL6LpdzVFQcpwvnyd0i+K/VSwu/o/nWlA==} + '@rolldown/binding-openharmony-arm64@1.0.0-rc.12': + resolution: {integrity: sha512-vRugONE4yMfVn0+7lUKdKvN4D5YusEiPilaoO2sgUWpCvrncvWgPMzK00ZFFJuiPgLwgFNP5eSiUlv2tfc+lpA==} + engines: {node: ^20.19.0 || >=22.12.0} cpu: [arm64] os: [openharmony] - '@rollup/rollup-win32-arm64-msvc@4.59.0': - resolution: {integrity: sha512-V5B6mG7OrGTwnxaNUzZTDTjDS7F75PO1ae6MJYdiMu60sq0CqN5CVeVsbhPxalupvTX8gXVSU9gq+Rx1/hvu6A==} - cpu: [arm64] - os: [win32] + '@rolldown/binding-wasm32-wasi@1.0.0-rc.12': + resolution: {integrity: sha512-ykGiLr/6kkiHc0XnBfmFJuCjr5ZYKKofkx+chJWDjitX+KsJuAmrzWhwyOMSHzPhzOHOy7u9HlFoa5MoAOJ/Zg==} + engines: {node: '>=14.0.0'} + cpu: [wasm32] - '@rollup/rollup-win32-ia32-msvc@4.59.0': - resolution: {integrity: sha512-UKFMHPuM9R0iBegwzKF4y0C4J9u8C6MEJgFuXTBerMk7EJ92GFVFYBfOZaSGLu6COf7FxpQNqhNS4c4icUPqxA==} - cpu: [ia32] + '@rolldown/binding-win32-arm64-msvc@1.0.0-rc.12': + resolution: {integrity: sha512-5eOND4duWkwx1AzCxadcOrNeighiLwMInEADT0YM7xeEOOFcovWZCq8dadXgcRHSf3Ulh1kFo/qvzoFiCLOL1Q==} + engines: {node: ^20.19.0 || >=22.12.0} + cpu: [arm64] os: [win32] - '@rollup/rollup-win32-x64-gnu@4.59.0': - resolution: {integrity: sha512-laBkYlSS1n2L8fSo1thDNGrCTQMmxjYY5G0WFWjFFYZkKPjsMBsgJfGf4TLxXrF6RyhI60L8TMOjBMvXiTcxeA==} + '@rolldown/binding-win32-x64-msvc@1.0.0-rc.12': + resolution: {integrity: sha512-PyqoipaswDLAZtot351MLhrlrh6lcZPo2LSYE+VDxbVk24LVKAGOuE4hb8xZQmrPAuEtTZW8E6D2zc5EUZX4Lw==} + engines: {node: ^20.19.0 || >=22.12.0} cpu: [x64] os: [win32] - '@rollup/rollup-win32-x64-msvc@4.59.0': - resolution: {integrity: sha512-2HRCml6OztYXyJXAvdDXPKcawukWY2GpR5/nxKp4iBgiO3wcoEGkAaqctIbZcNB6KlUQBIqt8VYkNSj2397EfA==} - cpu: [x64] - os: [win32] + '@rolldown/pluginutils@1.0.0-rc.12': + resolution: {integrity: sha512-HHMwmarRKvoFsJorqYlFeFRzXZqCt2ETQlEDOb9aqssrnVBB1/+xgTGtuTrIk5vzLNX1MjMtTf7W9z3tsSbrxw==} + + '@rolldown/pluginutils@1.0.0-rc.7': + resolution: {integrity: sha512-qujRfC8sFVInYSPPMLQByRh7zhwkGFS4+tyMQ83srV1qrxL4g8E2tyxVVyxd0+8QeBM1mIk9KbWxkegRr76XzA==} '@standard-schema/spec@1.1.0': resolution: {integrity: sha512-l2aFy5jALhniG5HgqrD6jXLi/rUWrKvqN/qJx6yoJsgKhblVd+iqqU4RCXavm/jPityDo5TCvKMnpjKnOriy0w==} @@ -803,24 +809,28 @@ packages: engines: {node: '>=10'} cpu: [arm64] os: [linux] + libc: [glibc] '@swc/core-linux-arm64-musl@1.15.18': resolution: {integrity: sha512-0a+Lix+FSSHBSBOA0XznCcHo5/1nA6oLLjcnocvzXeqtdjnPb+SvchItHI+lfeiuj1sClYPDvPMLSLyXFaiIKw==} engines: {node: '>=10'} cpu: [arm64] os: [linux] + libc: [musl] '@swc/core-linux-x64-gnu@1.15.18': resolution: {integrity: sha512-wG9J8vReUlpaHz4KOD/5UE1AUgirimU4UFT9oZmupUDEofxJKYb1mTA/DrMj0s78bkBiNI+7Fo2EgPuvOJfuAA==} engines: {node: '>=10'} cpu: [x64] os: [linux] + libc: [glibc] '@swc/core-linux-x64-musl@1.15.18': resolution: {integrity: sha512-4nwbVvCphKzicwNWRmvD5iBaZj8JYsRGa4xOxJmOyHlMDpsvvJ2OR2cODlvWyGFH6BYL1MfIAK3qph3hp0Az6g==} engines: {node: '>=10'} cpu: [x64] os: [linux] + libc: [musl] '@swc/core-win32-arm64-msvc@1.15.18': resolution: {integrity: sha512-zk0RYO+LjiBCat2RTMHzAWaMky0cra9loH4oRrLKLLNuL+jarxKLFDA8xTZWEkCPLjUTwlRN7d28eDLLMgtUcQ==} @@ -911,6 +921,9 @@ packages: '@ts-morph/common@0.28.1': resolution: {integrity: sha512-W74iWf7ILp1ZKNYXY5qbddNaml7e9Sedv5lvU1V8lftlitkc9Pq1A+jlH23ltDgWYeZFFEqGCD1Ies9hqu3O+g==} + '@tybys/wasm-util@0.10.1': + resolution: {integrity: sha512-9tTaPJLSiejZKx+Bmog4uSubteqTvFrVrURwkmHixBo0G4seD0zUxp98E1DzUBJxLQ3NPwXrGKDiVjwx/DpPsg==} + '@types/aria-query@5.0.4': resolution: {integrity: sha512-rfT93uj5s0PRL7EzccGMs3brplhcrghnDoV26NqKhCAS1hVo+WdNsPvE/yb6ilfr5hi2MEk6d5EWJTKdxg8jVw==} @@ -1015,7 +1028,7 @@ packages: resolution: {integrity: sha512-mOkXCII839dHyAt/gpoSlm28JIVDwhZ6tnG6wJxUy2bmOx7UaPjvOyIDf3SFv5s7Eo7HVaq6kRcu6YMEzt5Z7w==} engines: {node: ^20.19.0 || >=22.12.0} peerDependencies: - vite: ^4 || ^5 || ^6 || ^7 || ^8 + vite: '>=7.3.2' '@vitest/coverage-v8@4.1.0': resolution: {integrity: sha512-nDWulKeik2bL2Va/Wl4x7DLuTKAXa906iRFooIRPR+huHkcvp9QDkPQ2RJdmjOFrqOqvNfoSQLF68deE3xC3CQ==} @@ -1033,7 +1046,7 @@ packages: resolution: {integrity: sha512-evxREh+Hork43+Y4IOhTo+h5lGmVRyjqI739Rz4RlUPqwrkFFDF6EMvOOYjTx4E8Tl6gyCLRL8Mu7Ry12a13Tw==} peerDependencies: msw: ^2.4.9 - vite: ^6.0.0 || ^7.0.0 || ^8.0.0-0 + vite: '>=7.3.2' peerDependenciesMeta: msw: optional: true @@ -1395,8 +1408,8 @@ packages: resolution: {integrity: sha512-ipDqC8FrAl/76p2SSWKSI+H9tFwm7vYqXQrItCuiVPt26Km0jS+NzSsBWAaBusvSbQcfJG+JitdMm+wZAgTYqg==} hasBin: true - brace-expansion@5.0.4: - resolution: {integrity: sha512-h+DEnpVvxmfVefa4jFbCf5HdH5YMDXRsmKflpf1pILZWRFlTbJpxeU55nJl4Smt5HQaGzg1o6RHFPJaOqnmBDg==} + brace-expansion@5.0.5: + resolution: {integrity: sha512-VZznLgtwhn+Mact9tfiwx64fA9erHH/MCXEUfB/0bX/6Fz6ny5EGTXYltMocqg4xFAQZtnO3DHWWXi8RiuN7cQ==} engines: {node: 18 || 20 || >=22} browserslist@4.28.1: @@ -1598,8 +1611,8 @@ packages: resolution: {integrity: sha512-8QmQKqEASLd5nx0U1B1okLElbUuuttJ/AnYmRXbbbGDWh6uS208EjD4Xqq/I9wK7u0v6O08XhTWnt5XtEbR6Dg==} engines: {node: '>= 0.4'} - defu@6.1.4: - resolution: {integrity: sha512-mEQCMmwJu317oSz8CwdIOdwf3xMif1ttiM8LTufzc3g6kR+9Pe236twL8j3IYT1F7GfRgGcW6MWxzZjLIkuHIg==} + defu@6.1.6: + resolution: {integrity: sha512-f8mefEW4WIVg4LckePx3mALjQSPQgFlg9U8yaPdlsbdYcHQyj9n2zL2LJEA52smeYxOvmd/nB7TpMtHGMTHcug==} delayed-stream@1.0.0: resolution: {integrity: sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==} @@ -1612,6 +1625,10 @@ packages: destr@2.0.5: resolution: {integrity: sha512-ugFTXCtDZunbzasqBxrK93Ik/DRYsO6S/fedkWEMKqt04xZ4csmnmwGDBAb07QWNaGMAmnTIemsYZCksjATwsA==} + detect-libc@2.1.2: + resolution: {integrity: sha512-Btj2BOOO83o3WyH59e8MgXsxEQVcarkUOpEYrubB0urwnN10yQ364rsiByU11nZlqWYZm05i/of7io4mzihBtQ==} + engines: {node: '>=8'} + doctrine@2.1.0: resolution: {integrity: sha512-35mSku4ZXK0vfCuHEDAwt55dg2jNajHZ1odvF+8SSr82EsZY4QmXfuWso8oEd8zRhVObSN18aM0CjSdoBX7zIw==} engines: {node: '>=0.10.0'} @@ -1830,7 +1847,7 @@ packages: resolution: {integrity: sha512-tIbYtZbucOs0BRGqPJkshJUYdL+SDH7dVM8gjy+ERp3WAUjLEFJE+02kanyHtwjWOnwrKYBiwAmM0p4kLJAnXg==} engines: {node: '>=12.0.0'} peerDependencies: - picomatch: ^3 || ^4 + picomatch: '>=4.0.4' peerDependenciesMeta: picomatch: optional: true @@ -1854,8 +1871,8 @@ packages: resolution: {integrity: sha512-f7ccFPK3SXFHpx15UIGyRJ/FJQctuKZ0zVuN3frBo4HnK3cay9VEW0R6yPYFHC0AgqhukPzKjq22t5DmAyqGyw==} engines: {node: '>=16'} - flatted@3.4.1: - resolution: {integrity: sha512-IxfVbRFVlV8V/yRaGzk0UVIcsKKHMSfYw66T/u4nTwlWteQePsxe//LjudR1AMX4tZW3WFCh3Zqa/sjlqpbURQ==} + flatted@3.4.2: + resolution: {integrity: sha512-PjDse7RzhcPkIJwy5t7KPWQSZ9cAbzQXcafsetQoD7sOJRQlGikNbx7yZp2OotDnJyrDcbyRq3Ttb18iYOqkxA==} follow-redirects@1.15.11: resolution: {integrity: sha512-deG2P0JfjrTxl50XGCDyfI97ZGVCxIpfKYmfyrQ54n5FO/0gfIES8C/Psl6kWVDolizcaaxZJnTS0QSMxvnsBQ==} @@ -1929,13 +1946,13 @@ packages: resolution: {integrity: sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==} engines: {node: '>= 0.4'} - handlebars@4.7.8: - resolution: {integrity: sha512-vafaFqs8MZkRrSX7sFVUdo3ap/eNiLnb4IakshzvP56X5Nr1iGKAIqdX6tMlm6HcNRIkr6AxO5jFEoJzzpT8aQ==} + handlebars@4.7.9: + resolution: {integrity: sha512-4E71E0rpOaQuJR2A3xDZ+GM1HyWYv1clR58tC8emQNeQe3RH7MAzSbat+V0wG78LQBo6m6bzSG/L4pBuCsgnUQ==} engines: {node: '>=0.4.7'} hasBin: true - happy-dom@20.8.3: - resolution: {integrity: sha512-lMHQRRwIPyJ70HV0kkFT7jH/gXzSI7yDkQFe07E2flwmNDFoWUTRMKpW2sglsnpeA7b6S2TJPp98EbQxai8eaQ==} + happy-dom@20.8.9: + resolution: {integrity: sha512-Tz23LR9T9jOGVZm2x1EPdXqwA37G/owYMxRwU0E4miurAtFsPMQ1d2Jc2okUaSjZqAFz2oEn3FLXC5a0a+siyA==} engines: {node: '>=20.0.0'} has-bigints@1.1.0: @@ -2201,6 +2218,80 @@ packages: resolution: {integrity: sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==} engines: {node: '>= 0.8.0'} + lightningcss-android-arm64@1.32.0: + resolution: {integrity: sha512-YK7/ClTt4kAK0vo6w3X+Pnm0D2cf2vPHbhOXdoNti1Ga0al1P4TBZhwjATvjNwLEBCnKvjJc2jQgHXH0NEwlAg==} + engines: {node: '>= 12.0.0'} + cpu: [arm64] + os: [android] + + lightningcss-darwin-arm64@1.32.0: + resolution: {integrity: sha512-RzeG9Ju5bag2Bv1/lwlVJvBE3q6TtXskdZLLCyfg5pt+HLz9BqlICO7LZM7VHNTTn/5PRhHFBSjk5lc4cmscPQ==} + engines: {node: '>= 12.0.0'} + cpu: [arm64] + os: [darwin] + + lightningcss-darwin-x64@1.32.0: + resolution: {integrity: sha512-U+QsBp2m/s2wqpUYT/6wnlagdZbtZdndSmut/NJqlCcMLTWp5muCrID+K5UJ6jqD2BFshejCYXniPDbNh73V8w==} + engines: {node: '>= 12.0.0'} + cpu: [x64] + os: [darwin] + + lightningcss-freebsd-x64@1.32.0: + resolution: {integrity: sha512-JCTigedEksZk3tHTTthnMdVfGf61Fky8Ji2E4YjUTEQX14xiy/lTzXnu1vwiZe3bYe0q+SpsSH/CTeDXK6WHig==} + engines: {node: '>= 12.0.0'} + cpu: [x64] + os: [freebsd] + + lightningcss-linux-arm-gnueabihf@1.32.0: + resolution: {integrity: sha512-x6rnnpRa2GL0zQOkt6rts3YDPzduLpWvwAF6EMhXFVZXD4tPrBkEFqzGowzCsIWsPjqSK+tyNEODUBXeeVHSkw==} + engines: {node: '>= 12.0.0'} + cpu: [arm] + os: [linux] + + lightningcss-linux-arm64-gnu@1.32.0: + resolution: {integrity: sha512-0nnMyoyOLRJXfbMOilaSRcLH3Jw5z9HDNGfT/gwCPgaDjnx0i8w7vBzFLFR1f6CMLKF8gVbebmkUN3fa/kQJpQ==} + engines: {node: '>= 12.0.0'} + cpu: [arm64] + os: [linux] + libc: [glibc] + + lightningcss-linux-arm64-musl@1.32.0: + resolution: {integrity: sha512-UpQkoenr4UJEzgVIYpI80lDFvRmPVg6oqboNHfoH4CQIfNA+HOrZ7Mo7KZP02dC6LjghPQJeBsvXhJod/wnIBg==} + engines: {node: '>= 12.0.0'} + cpu: [arm64] + os: [linux] + libc: [musl] + + lightningcss-linux-x64-gnu@1.32.0: + resolution: {integrity: sha512-V7Qr52IhZmdKPVr+Vtw8o+WLsQJYCTd8loIfpDaMRWGUZfBOYEJeyJIkqGIDMZPwPx24pUMfwSxxI8phr/MbOA==} + engines: {node: '>= 12.0.0'} + cpu: [x64] + os: [linux] + libc: [glibc] + + lightningcss-linux-x64-musl@1.32.0: + resolution: {integrity: sha512-bYcLp+Vb0awsiXg/80uCRezCYHNg1/l3mt0gzHnWV9XP1W5sKa5/TCdGWaR/zBM2PeF/HbsQv/j2URNOiVuxWg==} + engines: {node: '>= 12.0.0'} + cpu: [x64] + os: [linux] + libc: [musl] + + lightningcss-win32-arm64-msvc@1.32.0: + resolution: {integrity: sha512-8SbC8BR40pS6baCM8sbtYDSwEVQd4JlFTOlaD3gWGHfThTcABnNDBda6eTZeqbofalIJhFx0qKzgHJmcPTnGdw==} + engines: {node: '>= 12.0.0'} + cpu: [arm64] + os: [win32] + + lightningcss-win32-x64-msvc@1.32.0: + resolution: {integrity: sha512-Amq9B/SoZYdDi1kFrojnoqPLxYhQ4Wo5XiL8EVJrVsB8ARoC1PWW6VGtT0WKCemjy8aC+louJnjS7U18x3b06Q==} + engines: {node: '>= 12.0.0'} + cpu: [x64] + os: [win32] + + lightningcss@1.32.0: + resolution: {integrity: sha512-NXYBzinNrblfraPGyrbPoD19C1h9lfI/1mzgWYvXUTe414Gz/X1FD2XBZSZM7rRTrMA8JL3OtAaGifrIKhQ5yQ==} + engines: {node: '>= 12.0.0'} + lines-and-columns@1.2.4: resolution: {integrity: sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==} @@ -2208,8 +2299,8 @@ packages: resolution: {integrity: sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==} engines: {node: '>=10'} - lodash-es@4.17.23: - resolution: {integrity: sha512-kVI48u3PZr38HdYz98UmfPnXl2DXrpdctLrFLCd3kOx1xUkOmpFPx7gCWWM5MPkL/fD8zb+Ph0QzjGFs4+hHWg==} + lodash-es@4.18.1: + resolution: {integrity: sha512-J8xewKD/Gk22OZbhpOVSwcs60zhd95ESDwezOFuA3/099925PdHJ7OFHNTGtajL3AlZkykD32HykiMo+BIBI8A==} loose-envify@1.4.0: resolution: {integrity: sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q==} @@ -2416,8 +2507,8 @@ packages: picocolors@1.1.1: resolution: {integrity: sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==} - picomatch@4.0.3: - resolution: {integrity: sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==} + picomatch@4.0.4: + resolution: {integrity: sha512-QP88BAKvMam/3NxH6vj2o21R6MjxZUAd6nlwAS/pnGvN9IVLocLHxGYIzFhg6fUQ+5th6P4dv4eW9jX3DSIj7A==} engines: {node: '>=12'} pkg-types@1.3.1: @@ -2434,8 +2525,8 @@ packages: resolution: {integrity: sha512-/+5VFTchJDoVj3bhoqi6UeymcD00DAwb1nJwamzPvHEszJ4FpF6SNNbUbOS8yI56qHzdV8eK0qEfOSiodkTdxg==} engines: {node: '>= 0.4'} - postcss@8.5.6: - resolution: {integrity: sha512-3Ybi1tAuwAP9s0r1UQ2J4n5Y0G05bJkpUIO0/bI9MhwmD70S5aTWbXGBwxHrelT+XM1k6dM0pk+SwNkpTRN7Pg==} + postcss@8.5.8: + resolution: {integrity: sha512-OW/rX8O/jXnm82Ey1k44pObPtdblfiuWnrd8X7GJ7emImCOstunGbXUpp7HdBrFQX6rJzn3sPT397Wp5aCwCHg==} engines: {node: ^10 || ^12 || >=14} powershell-utils@0.1.0: @@ -2562,9 +2653,9 @@ packages: resolution: {integrity: sha512-U7WjGVG9sH8tvjW5SmGbQuui75FiyjAX72HX15DwBBwF9dNiQZRQAg9nnPhYy+TUnE0+VcrttuvNI8oSxZcocA==} hasBin: true - rollup@4.59.0: - resolution: {integrity: sha512-2oMpl67a3zCH9H79LeMcbDhXW/UmWG/y2zuqnF2jQq5uq9TbM9TVyXvA4+t+ne2IIkBdrLpAaRQAvo7YI/Yyeg==} - engines: {node: '>=18.0.0', npm: '>=8.0.0'} + rolldown@1.0.0-rc.12: + resolution: {integrity: sha512-yP4USLIMYrwpPHEFB5JGH1uxhcslv6/hL0OyvTuY+3qlOSJvZ7ntYnoWpehBxufkgN0cvXxppuTu5hHa/zPh+A==} + engines: {node: ^20.19.0 || >=22.12.0} hasBin: true run-applescript@7.1.0: @@ -2805,17 +2896,18 @@ packages: vite-plugin-css-injected-by-js@4.0.1: resolution: {integrity: sha512-WfyRojojQyAO/KzWf+efcXpTPv6zJPXaRmr9EYq5a4v5I3PWCR7kR01hiri2lW6+rHm3a57kpwsf+iahIJi1Qw==} peerDependencies: - vite: '>2.0.0-0' + vite: '>=7.3.2' - vite@7.3.1: - resolution: {integrity: sha512-w+N7Hifpc3gRjZ63vYBXA56dvvRlNWRczTdmCBBa+CotUzAPf5b7YMdMR/8CQoeYE5LX3W4wj6RYTgonm1b9DA==} + vite@8.0.5: + resolution: {integrity: sha512-nmu43Qvq9UopTRfMx2jOYW5l16pb3iDC1JH6yMuPkpVbzK0k+L7dfsEDH4jRgYFmsg0sTAqkojoZgzLMlwHsCQ==} engines: {node: ^20.19.0 || >=22.12.0} hasBin: true peerDependencies: '@types/node': ^20.19.0 || >=22.12.0 + '@vitejs/devtools': ^0.1.0 + esbuild: ^0.27.0 || ^0.28.0 jiti: '>=1.21.0' less: ^4.0.0 - lightningcss: ^1.21.0 sass: ^1.70.0 sass-embedded: ^1.70.0 stylus: '>=0.54.8' @@ -2826,12 +2918,14 @@ packages: peerDependenciesMeta: '@types/node': optional: true + '@vitejs/devtools': + optional: true + esbuild: + optional: true jiti: optional: true less: optional: true - lightningcss: - optional: true sass: optional: true sass-embedded: @@ -2859,9 +2953,9 @@ packages: '@vitest/browser-preview': 4.1.0 '@vitest/browser-webdriverio': 4.1.0 '@vitest/ui': 4.1.0 - happy-dom: '*' + happy-dom: '>=20.8.9' jsdom: '*' - vite: ^6.0.0 || ^7.0.0 || ^8.0.0-0 + vite: '>=7.3.2' peerDependenciesMeta: '@edge-runtime/vm': optional: true @@ -2942,9 +3036,10 @@ packages: resolution: {integrity: sha512-YgvUTfwqyc7UXVMrB+SImsVYSmTS8X/tSrtdNZMImM+n7+QTriRXyXim0mBrTXNeqzVF0KWGgHPeiyViFFrNDw==} engines: {node: '>=18'} - yaml@1.10.2: - resolution: {integrity: sha512-r3vXyErRCYJ7wg28yvBY5VSoAF8ZvlcW9/BwUzEtUsjvX/DKs24dIkuwjtuprwJJHsbyUbLApepYTR1BN4uHrg==} - engines: {node: '>= 6'} + yaml@2.8.3: + resolution: {integrity: sha512-AvbaCLOO2Otw/lW5bmh9d/WEdcDFdQp2Z2ZUH3pX9U2ihyUY0nvLv7J6TrWowklRGPYbB/IuIMfYgxaCPg5Bpg==} + engines: {node: '>= 14.6'} + hasBin: true yocto-queue@0.1.0: resolution: {integrity: sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==} @@ -3225,6 +3320,22 @@ snapshots: react: 19.2.4 react-dom: 19.2.4(react@19.2.4) + '@emnapi/core@1.9.2': + dependencies: + '@emnapi/wasi-threads': 1.2.1 + tslib: 2.8.1 + optional: true + + '@emnapi/runtime@1.9.2': + dependencies: + tslib: 2.8.1 + optional: true + + '@emnapi/wasi-threads@1.2.1': + dependencies: + tslib: 2.8.1 + optional: true + '@emotion/babel-plugin@11.13.5': dependencies: '@babel/helper-module-imports': 7.28.6 @@ -3450,7 +3561,7 @@ snapshots: '@apidevtools/json-schema-ref-parser': 11.7.0 c12: 2.0.1(magicast@0.3.5) commander: 12.1.0 - handlebars: 4.7.8 + handlebars: 4.7.9 typescript: 5.9.3 transitivePeerDependencies: - magicast @@ -3529,86 +3640,72 @@ snapshots: '@jsdevtools/ono@7.1.3': {} - '@pandacss/is-valid-prop@1.9.0': {} - - '@pkgr/core@0.2.9': {} - - '@rolldown/pluginutils@1.0.0-rc.7': {} - - '@rollup/rollup-android-arm-eabi@4.59.0': - optional: true - - '@rollup/rollup-android-arm64@4.59.0': - optional: true - - '@rollup/rollup-darwin-arm64@4.59.0': - optional: true - - '@rollup/rollup-darwin-x64@4.59.0': - optional: true - - '@rollup/rollup-freebsd-arm64@4.59.0': + '@napi-rs/wasm-runtime@1.1.2(@emnapi/core@1.9.2)(@emnapi/runtime@1.9.2)': + dependencies: + '@emnapi/core': 1.9.2 + '@emnapi/runtime': 1.9.2 + '@tybys/wasm-util': 0.10.1 optional: true - '@rollup/rollup-freebsd-x64@4.59.0': - optional: true + '@oxc-project/types@0.122.0': {} - '@rollup/rollup-linux-arm-gnueabihf@4.59.0': - optional: true + '@pandacss/is-valid-prop@1.9.0': {} - '@rollup/rollup-linux-arm-musleabihf@4.59.0': - optional: true + '@pkgr/core@0.2.9': {} - '@rollup/rollup-linux-arm64-gnu@4.59.0': + '@rolldown/binding-android-arm64@1.0.0-rc.12': optional: true - '@rollup/rollup-linux-arm64-musl@4.59.0': + '@rolldown/binding-darwin-arm64@1.0.0-rc.12': optional: true - '@rollup/rollup-linux-loong64-gnu@4.59.0': + '@rolldown/binding-darwin-x64@1.0.0-rc.12': optional: true - '@rollup/rollup-linux-loong64-musl@4.59.0': + '@rolldown/binding-freebsd-x64@1.0.0-rc.12': optional: true - '@rollup/rollup-linux-ppc64-gnu@4.59.0': + '@rolldown/binding-linux-arm-gnueabihf@1.0.0-rc.12': optional: true - '@rollup/rollup-linux-ppc64-musl@4.59.0': + '@rolldown/binding-linux-arm64-gnu@1.0.0-rc.12': optional: true - '@rollup/rollup-linux-riscv64-gnu@4.59.0': + '@rolldown/binding-linux-arm64-musl@1.0.0-rc.12': optional: true - '@rollup/rollup-linux-riscv64-musl@4.59.0': + '@rolldown/binding-linux-ppc64-gnu@1.0.0-rc.12': optional: true - '@rollup/rollup-linux-s390x-gnu@4.59.0': + '@rolldown/binding-linux-s390x-gnu@1.0.0-rc.12': optional: true - '@rollup/rollup-linux-x64-gnu@4.59.0': + '@rolldown/binding-linux-x64-gnu@1.0.0-rc.12': optional: true - '@rollup/rollup-linux-x64-musl@4.59.0': + '@rolldown/binding-linux-x64-musl@1.0.0-rc.12': optional: true - '@rollup/rollup-openbsd-x64@4.59.0': + '@rolldown/binding-openharmony-arm64@1.0.0-rc.12': optional: true - '@rollup/rollup-openharmony-arm64@4.59.0': + '@rolldown/binding-wasm32-wasi@1.0.0-rc.12(@emnapi/core@1.9.2)(@emnapi/runtime@1.9.2)': + dependencies: + '@napi-rs/wasm-runtime': 1.1.2(@emnapi/core@1.9.2)(@emnapi/runtime@1.9.2) + transitivePeerDependencies: + - '@emnapi/core' + - '@emnapi/runtime' optional: true - '@rollup/rollup-win32-arm64-msvc@4.59.0': + '@rolldown/binding-win32-arm64-msvc@1.0.0-rc.12': optional: true - '@rollup/rollup-win32-ia32-msvc@4.59.0': + '@rolldown/binding-win32-x64-msvc@1.0.0-rc.12': optional: true - '@rollup/rollup-win32-x64-gnu@4.59.0': - optional: true + '@rolldown/pluginutils@1.0.0-rc.12': {} - '@rollup/rollup-win32-x64-msvc@4.59.0': - optional: true + '@rolldown/pluginutils@1.0.0-rc.7': {} '@standard-schema/spec@1.1.0': {} @@ -3620,7 +3717,7 @@ snapshots: eslint-visitor-keys: 4.2.1 espree: 10.4.0 estraverse: 5.3.0 - picomatch: 4.0.3 + picomatch: 4.0.4 '@swc/core-darwin-arm64@1.15.18': optional: true @@ -3723,7 +3820,7 @@ snapshots: '@babel/traverse': 7.28.6 '@babel/types': 7.28.6 javascript-natural-sort: 0.7.1 - lodash-es: 4.17.23 + lodash-es: 4.18.1 minimatch: 10.2.4 parse-imports-exports: 0.2.4 prettier: 3.8.1 @@ -3736,6 +3833,11 @@ snapshots: path-browserify: 1.0.1 tinyglobby: 0.2.15 + '@tybys/wasm-util@0.10.1': + dependencies: + tslib: 2.8.1 + optional: true + '@types/aria-query@5.0.4': {} '@types/chai@5.2.3': @@ -3867,15 +3969,15 @@ snapshots: '@typescript-eslint/types': 8.57.0 eslint-visitor-keys: 5.0.1 - '@vitejs/plugin-react-swc@4.3.0(@swc/helpers@0.5.19)(vite@7.3.1(@types/node@25.3.5)(jiti@2.6.1))': + '@vitejs/plugin-react-swc@4.3.0(@swc/helpers@0.5.19)(vite@8.0.5(@emnapi/core@1.9.2)(@emnapi/runtime@1.9.2)(@types/node@25.3.5)(esbuild@0.27.2)(jiti@2.6.1)(yaml@2.8.3))': dependencies: '@rolldown/pluginutils': 1.0.0-rc.7 '@swc/core': 1.15.18(@swc/helpers@0.5.19) - vite: 7.3.1(@types/node@25.3.5)(jiti@2.6.1) + vite: 8.0.5(@emnapi/core@1.9.2)(@emnapi/runtime@1.9.2)(@types/node@25.3.5)(esbuild@0.27.2)(jiti@2.6.1)(yaml@2.8.3) transitivePeerDependencies: - '@swc/helpers' - '@vitest/coverage-v8@4.1.0(vitest@4.1.0(@types/node@25.3.5)(happy-dom@20.8.3)(vite@7.3.1(@types/node@25.3.5)(jiti@2.6.1)))': + '@vitest/coverage-v8@4.1.0(vitest@4.1.0(@types/node@25.3.5)(happy-dom@20.8.9)(vite@8.0.5(@emnapi/core@1.9.2)(@emnapi/runtime@1.9.2)(@types/node@25.3.5)(esbuild@0.27.2)(jiti@2.6.1)(yaml@2.8.3)))': dependencies: '@bcoe/v8-coverage': 1.0.2 '@vitest/utils': 4.1.0 @@ -3887,7 +3989,7 @@ snapshots: obug: 2.1.1 std-env: 4.0.0 tinyrainbow: 3.1.0 - vitest: 4.1.0(@types/node@25.3.5)(happy-dom@20.8.3)(vite@7.3.1(@types/node@25.3.5)(jiti@2.6.1)) + vitest: 4.1.0(@types/node@25.3.5)(happy-dom@20.8.9)(vite@8.0.5(@emnapi/core@1.9.2)(@emnapi/runtime@1.9.2)(@types/node@25.3.5)(esbuild@0.27.2)(jiti@2.6.1)(yaml@2.8.3)) '@vitest/expect@4.1.0': dependencies: @@ -3898,13 +4000,13 @@ snapshots: chai: 6.2.2 tinyrainbow: 3.1.0 - '@vitest/mocker@4.1.0(vite@7.3.1(@types/node@25.3.5)(jiti@2.6.1))': + '@vitest/mocker@4.1.0(vite@8.0.5(@emnapi/core@1.9.2)(@emnapi/runtime@1.9.2)(@types/node@25.3.5)(esbuild@0.27.2)(jiti@2.6.1)(yaml@2.8.3))': dependencies: '@vitest/spy': 4.1.0 estree-walker: 3.0.3 magic-string: 0.30.21 optionalDependencies: - vite: 7.3.1(@types/node@25.3.5)(jiti@2.6.1) + vite: 8.0.5(@emnapi/core@1.9.2)(@emnapi/runtime@1.9.2)(@types/node@25.3.5)(esbuild@0.27.2)(jiti@2.6.1)(yaml@2.8.3) '@vitest/pretty-format@4.1.0': dependencies: @@ -4612,7 +4714,7 @@ snapshots: baseline-browser-mapping@2.9.19: {} - brace-expansion@5.0.4: + brace-expansion@5.0.5: dependencies: balanced-match: 4.0.4 @@ -4634,7 +4736,7 @@ snapshots: dependencies: chokidar: 4.0.3 confbox: 0.1.8 - defu: 6.1.4 + defu: 6.1.6 dotenv: 16.6.1 giget: 1.2.5 jiti: 2.6.1 @@ -4651,7 +4753,7 @@ snapshots: dependencies: chokidar: 5.0.0 confbox: 0.2.4 - defu: 6.1.4 + defu: 6.1.6 dotenv: 17.3.1 exsolve: 1.0.8 giget: 2.0.0 @@ -4758,7 +4860,7 @@ snapshots: import-fresh: 3.3.1 parse-json: 5.2.0 path-type: 4.0.0 - yaml: 1.10.2 + yaml: 2.8.3 cross-spawn@7.0.6: dependencies: @@ -4817,7 +4919,7 @@ snapshots: has-property-descriptors: 1.0.2 object-keys: 1.1.1 - defu@6.1.4: {} + defu@6.1.6: {} delayed-stream@1.0.0: {} @@ -4825,6 +4927,8 @@ snapshots: destr@2.0.5: {} + detect-libc@2.1.2: {} + doctrine@2.1.0: dependencies: esutils: 2.0.3 @@ -4981,6 +5085,7 @@ snapshots: '@esbuild/win32-arm64': 0.27.2 '@esbuild/win32-ia32': 0.27.2 '@esbuild/win32-x64': 0.27.2 + optional: true escalade@3.2.0: {} @@ -5176,9 +5281,9 @@ snapshots: fast-levenshtein@2.0.6: {} - fdir@6.5.0(picomatch@4.0.3): + fdir@6.5.0(picomatch@4.0.4): optionalDependencies: - picomatch: 4.0.3 + picomatch: 4.0.4 file-entry-cache@8.0.0: dependencies: @@ -5195,10 +5300,10 @@ snapshots: flat-cache@4.0.1: dependencies: - flatted: 3.4.1 + flatted: 3.4.2 keyv: 4.5.4 - flatted@3.4.1: {} + flatted@3.4.2: {} follow-redirects@1.15.11: {} @@ -5260,7 +5365,7 @@ snapshots: dependencies: citty: 0.1.6 consola: 3.4.2 - defu: 6.1.4 + defu: 6.1.6 node-fetch-native: 1.6.7 nypm: 0.5.4 pathe: 2.0.3 @@ -5270,7 +5375,7 @@ snapshots: dependencies: citty: 0.1.6 consola: 3.4.2 - defu: 6.1.4 + defu: 6.1.6 node-fetch-native: 1.6.7 nypm: 0.6.5 pathe: 2.0.3 @@ -5288,7 +5393,7 @@ snapshots: gopd@1.2.0: {} - handlebars@4.7.8: + handlebars@4.7.9: dependencies: minimist: 1.2.8 neo-async: 2.6.2 @@ -5297,7 +5402,7 @@ snapshots: optionalDependencies: uglify-js: 3.19.3 - happy-dom@20.8.3: + happy-dom@20.8.9: dependencies: '@types/node': 25.3.5 '@types/whatwg-mimetype': 3.0.2 @@ -5557,13 +5662,62 @@ snapshots: prelude-ls: 1.2.1 type-check: 0.4.0 + lightningcss-android-arm64@1.32.0: + optional: true + + lightningcss-darwin-arm64@1.32.0: + optional: true + + lightningcss-darwin-x64@1.32.0: + optional: true + + lightningcss-freebsd-x64@1.32.0: + optional: true + + lightningcss-linux-arm-gnueabihf@1.32.0: + optional: true + + lightningcss-linux-arm64-gnu@1.32.0: + optional: true + + lightningcss-linux-arm64-musl@1.32.0: + optional: true + + lightningcss-linux-x64-gnu@1.32.0: + optional: true + + lightningcss-linux-x64-musl@1.32.0: + optional: true + + lightningcss-win32-arm64-msvc@1.32.0: + optional: true + + lightningcss-win32-x64-msvc@1.32.0: + optional: true + + lightningcss@1.32.0: + dependencies: + detect-libc: 2.1.2 + optionalDependencies: + lightningcss-android-arm64: 1.32.0 + lightningcss-darwin-arm64: 1.32.0 + lightningcss-darwin-x64: 1.32.0 + lightningcss-freebsd-x64: 1.32.0 + lightningcss-linux-arm-gnueabihf: 1.32.0 + lightningcss-linux-arm64-gnu: 1.32.0 + lightningcss-linux-arm64-musl: 1.32.0 + lightningcss-linux-x64-gnu: 1.32.0 + lightningcss-linux-x64-musl: 1.32.0 + lightningcss-win32-arm64-msvc: 1.32.0 + lightningcss-win32-x64-msvc: 1.32.0 + lines-and-columns@1.2.4: {} locate-path@6.0.0: dependencies: p-locate: 5.0.0 - lodash-es@4.17.23: {} + lodash-es@4.18.1: {} loose-envify@1.4.0: dependencies: @@ -5608,7 +5762,7 @@ snapshots: minimatch@10.2.4: dependencies: - brace-expansion: 5.0.4 + brace-expansion: 5.0.5 minimist@1.2.8: {} @@ -5772,7 +5926,7 @@ snapshots: picocolors@1.1.1: {} - picomatch@4.0.3: {} + picomatch@4.0.4: {} pkg-types@1.3.1: dependencies: @@ -5790,7 +5944,7 @@ snapshots: possible-typed-array-names@1.1.0: {} - postcss@8.5.6: + postcss@8.5.8: dependencies: nanoid: 3.3.11 picocolors: 1.1.1 @@ -5830,7 +5984,7 @@ snapshots: rc9@2.1.2: dependencies: - defu: 6.1.4 + defu: 6.1.6 destr: 2.0.5 react-cookie@8.0.1(react@19.2.4): @@ -5918,36 +6072,29 @@ snapshots: path-parse: 1.0.7 supports-preserve-symlinks-flag: 1.0.0 - rollup@4.59.0: + rolldown@1.0.0-rc.12(@emnapi/core@1.9.2)(@emnapi/runtime@1.9.2): dependencies: - '@types/estree': 1.0.8 + '@oxc-project/types': 0.122.0 + '@rolldown/pluginutils': 1.0.0-rc.12 optionalDependencies: - '@rollup/rollup-android-arm-eabi': 4.59.0 - '@rollup/rollup-android-arm64': 4.59.0 - '@rollup/rollup-darwin-arm64': 4.59.0 - '@rollup/rollup-darwin-x64': 4.59.0 - '@rollup/rollup-freebsd-arm64': 4.59.0 - '@rollup/rollup-freebsd-x64': 4.59.0 - '@rollup/rollup-linux-arm-gnueabihf': 4.59.0 - '@rollup/rollup-linux-arm-musleabihf': 4.59.0 - '@rollup/rollup-linux-arm64-gnu': 4.59.0 - '@rollup/rollup-linux-arm64-musl': 4.59.0 - '@rollup/rollup-linux-loong64-gnu': 4.59.0 - '@rollup/rollup-linux-loong64-musl': 4.59.0 - '@rollup/rollup-linux-ppc64-gnu': 4.59.0 - '@rollup/rollup-linux-ppc64-musl': 4.59.0 - '@rollup/rollup-linux-riscv64-gnu': 4.59.0 - '@rollup/rollup-linux-riscv64-musl': 4.59.0 - '@rollup/rollup-linux-s390x-gnu': 4.59.0 - '@rollup/rollup-linux-x64-gnu': 4.59.0 - '@rollup/rollup-linux-x64-musl': 4.59.0 - '@rollup/rollup-openbsd-x64': 4.59.0 - '@rollup/rollup-openharmony-arm64': 4.59.0 - '@rollup/rollup-win32-arm64-msvc': 4.59.0 - '@rollup/rollup-win32-ia32-msvc': 4.59.0 - '@rollup/rollup-win32-x64-gnu': 4.59.0 - '@rollup/rollup-win32-x64-msvc': 4.59.0 - fsevents: 2.3.3 + '@rolldown/binding-android-arm64': 1.0.0-rc.12 + '@rolldown/binding-darwin-arm64': 1.0.0-rc.12 + '@rolldown/binding-darwin-x64': 1.0.0-rc.12 + '@rolldown/binding-freebsd-x64': 1.0.0-rc.12 + '@rolldown/binding-linux-arm-gnueabihf': 1.0.0-rc.12 + '@rolldown/binding-linux-arm64-gnu': 1.0.0-rc.12 + '@rolldown/binding-linux-arm64-musl': 1.0.0-rc.12 + '@rolldown/binding-linux-ppc64-gnu': 1.0.0-rc.12 + '@rolldown/binding-linux-s390x-gnu': 1.0.0-rc.12 + '@rolldown/binding-linux-x64-gnu': 1.0.0-rc.12 + '@rolldown/binding-linux-x64-musl': 1.0.0-rc.12 + '@rolldown/binding-openharmony-arm64': 1.0.0-rc.12 + '@rolldown/binding-wasm32-wasi': 1.0.0-rc.12(@emnapi/core@1.9.2)(@emnapi/runtime@1.9.2) + '@rolldown/binding-win32-arm64-msvc': 1.0.0-rc.12 + '@rolldown/binding-win32-x64-msvc': 1.0.0-rc.12 + transitivePeerDependencies: + - '@emnapi/core' + - '@emnapi/runtime' run-applescript@7.1.0: {} @@ -6132,8 +6279,8 @@ snapshots: tinyglobby@0.2.15: dependencies: - fdir: 6.5.0(picomatch@4.0.3) - picomatch: 4.0.3 + fdir: 6.5.0(picomatch@4.0.4) + picomatch: 4.0.4 tinyrainbow@3.1.0: {} @@ -6228,27 +6375,31 @@ snapshots: dependencies: punycode: 2.3.1 - vite-plugin-css-injected-by-js@4.0.1(vite@7.3.1(@types/node@25.3.5)(jiti@2.6.1)): + vite-plugin-css-injected-by-js@4.0.1(vite@8.0.5(@emnapi/core@1.9.2)(@emnapi/runtime@1.9.2)(@types/node@25.3.5)(esbuild@0.27.2)(jiti@2.6.1)(yaml@2.8.3)): dependencies: - vite: 7.3.1(@types/node@25.3.5)(jiti@2.6.1) + vite: 8.0.5(@emnapi/core@1.9.2)(@emnapi/runtime@1.9.2)(@types/node@25.3.5)(esbuild@0.27.2)(jiti@2.6.1)(yaml@2.8.3) - vite@7.3.1(@types/node@25.3.5)(jiti@2.6.1): + vite@8.0.5(@emnapi/core@1.9.2)(@emnapi/runtime@1.9.2)(@types/node@25.3.5)(esbuild@0.27.2)(jiti@2.6.1)(yaml@2.8.3): dependencies: - esbuild: 0.27.2 - fdir: 6.5.0(picomatch@4.0.3) - picomatch: 4.0.3 - postcss: 8.5.6 - rollup: 4.59.0 + lightningcss: 1.32.0 + picomatch: 4.0.4 + postcss: 8.5.8 + rolldown: 1.0.0-rc.12(@emnapi/core@1.9.2)(@emnapi/runtime@1.9.2) tinyglobby: 0.2.15 optionalDependencies: '@types/node': 25.3.5 + esbuild: 0.27.2 fsevents: 2.3.3 jiti: 2.6.1 + yaml: 2.8.3 + transitivePeerDependencies: + - '@emnapi/core' + - '@emnapi/runtime' - vitest@4.1.0(@types/node@25.3.5)(happy-dom@20.8.3)(vite@7.3.1(@types/node@25.3.5)(jiti@2.6.1)): + vitest@4.1.0(@types/node@25.3.5)(happy-dom@20.8.9)(vite@8.0.5(@emnapi/core@1.9.2)(@emnapi/runtime@1.9.2)(@types/node@25.3.5)(esbuild@0.27.2)(jiti@2.6.1)(yaml@2.8.3)): dependencies: '@vitest/expect': 4.1.0 - '@vitest/mocker': 4.1.0(vite@7.3.1(@types/node@25.3.5)(jiti@2.6.1)) + '@vitest/mocker': 4.1.0(vite@8.0.5(@emnapi/core@1.9.2)(@emnapi/runtime@1.9.2)(@types/node@25.3.5)(esbuild@0.27.2)(jiti@2.6.1)(yaml@2.8.3)) '@vitest/pretty-format': 4.1.0 '@vitest/runner': 4.1.0 '@vitest/snapshot': 4.1.0 @@ -6259,17 +6410,17 @@ snapshots: magic-string: 0.30.21 obug: 2.1.1 pathe: 2.0.3 - picomatch: 4.0.3 + picomatch: 4.0.4 std-env: 4.0.0 tinybench: 2.9.0 tinyexec: 1.0.4 tinyglobby: 0.2.15 tinyrainbow: 3.1.0 - vite: 7.3.1(@types/node@25.3.5)(jiti@2.6.1) + vite: 8.0.5(@emnapi/core@1.9.2)(@emnapi/runtime@1.9.2)(@types/node@25.3.5)(esbuild@0.27.2)(jiti@2.6.1)(yaml@2.8.3) why-is-node-running: 2.3.0 optionalDependencies: '@types/node': 25.3.5 - happy-dom: 20.8.3 + happy-dom: 20.8.9 transitivePeerDependencies: - msw @@ -6340,7 +6491,7 @@ snapshots: yallist@5.0.0: {} - yaml@1.10.2: {} + yaml@2.8.3: {} yocto-queue@0.1.0: {} diff --git a/airflow-core/src/airflow/api_fastapi/common/types.py b/airflow-core/src/airflow/api_fastapi/common/types.py index bd4176a9fd927..7d2a944c82228 100644 --- a/airflow-core/src/airflow/api_fastapi/common/types.py +++ b/airflow-core/src/airflow/api_fastapi/common/types.py @@ -20,7 +20,7 @@ from dataclasses import dataclass from datetime import timedelta from enum import Enum -from typing import Annotated, Any, Literal +from typing import Annotated, Literal from pydantic import ( AfterValidator, @@ -208,15 +208,11 @@ def check_at_least_one_color(self) -> ThemeColors: raise ValueError("At least one color token must be provided: brand, gray, black, or white") return self - @model_serializer(mode="wrap") - def serialize_model(self, handler: Any) -> dict: - return {k: v for k, v in handler(self).items() if v is not None} - class Theme(BaseModel): """JSON to modify Chakra's theme.""" - tokens: dict[Literal["colors"], ThemeColors] + tokens: dict[Literal["colors"], ThemeColors] | None = None globalCss: dict[str, dict] | None = None icon: ThemeIconType = None icon_dark_mode: ThemeIconType = None diff --git a/airflow-core/src/airflow/api_fastapi/core_api/datamodels/ui/config.py b/airflow-core/src/airflow/api_fastapi/core_api/datamodels/ui/config.py index 96cd4aaad266a..a511b31142b22 100644 --- a/airflow-core/src/airflow/api_fastapi/core_api/datamodels/ui/config.py +++ b/airflow-core/src/airflow/api_fastapi/core_api/datamodels/ui/config.py @@ -16,6 +16,8 @@ # under the License. from __future__ import annotations +from pydantic import ConfigDict, field_serializer + from airflow.api_fastapi.common.types import Theme, UIAlert from airflow.api_fastapi.core_api.base import BaseModel @@ -23,6 +25,8 @@ class ConfigResponse(BaseModel): """configuration serializer.""" + model_config = ConfigDict(json_schema_mode_override="validation") + fallback_page_limit: int auto_refresh_interval: int hide_paused_dags_by_default: bool @@ -36,3 +40,9 @@ class ConfigResponse(BaseModel): external_log_name: str | None = None theme: Theme | None multi_team: bool + + @field_serializer("theme") + def serialize_theme(self, theme: Theme | None) -> dict | None: + if theme is None: + return None + return theme.model_dump(exclude_none=True) diff --git a/airflow-core/src/airflow/api_fastapi/core_api/openapi/_private_ui.yaml b/airflow-core/src/airflow/api_fastapi/core_api/openapi/_private_ui.yaml index 915e4d5430052..706cf64b492aa 100644 --- a/airflow-core/src/airflow/api_fastapi/core_api/openapi/_private_ui.yaml +++ b/airflow-core/src/airflow/api_fastapi/core_api/openapi/_private_ui.yaml @@ -3272,11 +3272,13 @@ components: Theme: properties: tokens: - additionalProperties: - $ref: '#/components/schemas/ThemeColors' - propertyNames: - const: colors - type: object + anyOf: + - additionalProperties: + $ref: '#/components/schemas/ThemeColors' + propertyNames: + const: colors + type: object + - type: 'null' title: Tokens globalCss: anyOf: @@ -3297,13 +3299,80 @@ components: - type: 'null' title: Icon Dark Mode type: object - required: - - tokens title: Theme description: JSON to modify Chakra's theme. ThemeColors: - additionalProperties: true + properties: + brand: + anyOf: + - additionalProperties: + additionalProperties: + $ref: '#/components/schemas/OklchColor' + propertyNames: + const: value + type: object + propertyNames: + enum: + - '50' + - '100' + - '200' + - '300' + - '400' + - '500' + - '600' + - '700' + - '800' + - '900' + - '950' + type: object + - type: 'null' + title: Brand + gray: + anyOf: + - additionalProperties: + additionalProperties: + $ref: '#/components/schemas/OklchColor' + propertyNames: + const: value + type: object + propertyNames: + enum: + - '50' + - '100' + - '200' + - '300' + - '400' + - '500' + - '600' + - '700' + - '800' + - '900' + - '950' + type: object + - type: 'null' + title: Gray + black: + anyOf: + - additionalProperties: + $ref: '#/components/schemas/OklchColor' + propertyNames: + const: value + type: object + - type: 'null' + title: Black + white: + anyOf: + - additionalProperties: + $ref: '#/components/schemas/OklchColor' + propertyNames: + const: value + type: object + - type: 'null' + title: White type: object + title: ThemeColors + description: Color tokens for the UI theme. All fields are optional; at least + one must be provided. TokenType: type: string enum: diff --git a/airflow-core/src/airflow/api_fastapi/core_api/routes/public/tasks.py b/airflow-core/src/airflow/api_fastapi/core_api/routes/public/tasks.py index c548989835d83..2df27b682ea3f 100644 --- a/airflow-core/src/airflow/api_fastapi/core_api/routes/public/tasks.py +++ b/airflow-core/src/airflow/api_fastapi/core_api/routes/public/tasks.py @@ -17,7 +17,6 @@ from __future__ import annotations -from operator import attrgetter from typing import cast from fastapi import Depends, HTTPException, status @@ -33,6 +32,29 @@ tasks_router = AirflowRouter(tags=["Task"], prefix="/dags/{dag_id}/tasks") +_SORTABLE_TASK_FIELDS = { + "task_id", + "task_display_name", + "owner", + "start_date", + "end_date", + "trigger_rule", + "depends_on_past", + "wait_for_downstream", + "retries", + "queue", + "pool", + "pool_slots", + "execution_timeout", + "retry_delay", + "retry_exponential_backoff", + "priority_weight", + "weight_rule", + "ui_color", + "ui_fgcolor", + "operator_name", +} + @tasks_router.get( "", @@ -52,10 +74,18 @@ def get_tasks( ) -> TaskCollectionResponse: """Get tasks for DAG.""" dag = get_latest_version_of_dag(dag_bag, dag_id, session) - try: - tasks = sorted(dag.tasks, key=attrgetter(order_by.lstrip("-")), reverse=(order_by[0:1] == "-")) - except AttributeError as err: - raise HTTPException(status.HTTP_400_BAD_REQUEST, str(err)) + lstripped_order_by = order_by.lstrip("-") + if lstripped_order_by not in _SORTABLE_TASK_FIELDS: + raise HTTPException( + status.HTTP_400_BAD_REQUEST, + f"Ordering with '{lstripped_order_by}' is disallowed or " + f"the attribute does not exist on the model", + ) + tasks = sorted( + dag.tasks, + key=lambda task: (getattr(task, lstripped_order_by) is None, getattr(task, lstripped_order_by)), + reverse=(order_by[0:1] == "-"), + ) return TaskCollectionResponse( tasks=cast("list[TaskResponse]", tasks), total_entries=len(tasks), diff --git a/airflow-core/src/airflow/api_fastapi/core_api/services/ui/connections.py b/airflow-core/src/airflow/api_fastapi/core_api/services/ui/connections.py index 96480555c4000..1cf45b7f7e1d0 100644 --- a/airflow-core/src/airflow/api_fastapi/core_api/services/ui/connections.py +++ b/airflow-core/src/airflow/api_fastapi/core_api/services/ui/connections.py @@ -27,10 +27,11 @@ ConnectionHookMetaData, StandardHookFields, ) +from airflow.providers_manager import HookInfo, ProvidersManager from airflow.serialization.definitions.param import SerializedParam if TYPE_CHECKING: - from airflow.providers_manager import ConnectionFormWidgetInfo, HookInfo + from airflow.providers_manager import ConnectionFormWidgetInfo log = logging.getLogger(__name__) @@ -125,8 +126,6 @@ def _get_hooks_with_mocked_fab() -> tuple[ """Get hooks with all details w/o FAB needing to be installed.""" from unittest import mock - from airflow.providers_manager import ProvidersManager - def mock_lazy_gettext(txt: str) -> str: """Mock for flask_babel.lazy_gettext.""" return txt @@ -225,19 +224,16 @@ def _convert_extra_fields(form_widgets: dict[str, ConnectionFormWidgetInfo]) -> @staticmethod @cache def hook_meta_data() -> list[ConnectionHookMetaData]: - hooks, connection_form_widgets, field_behaviours = HookMetaService._get_hooks_with_mocked_fab() - result: list[ConnectionHookMetaData] = [] - widgets = HookMetaService._convert_extra_fields(connection_form_widgets) - for hook_key, hook_info in hooks.items(): - if not hook_info: - continue - hook_meta = ConnectionHookMetaData( - connection_type=hook_key, - hook_class_name=hook_info.hook_class_name, - default_conn_name=None, # TODO: later - hook_name=hook_info.hook_name, - standard_fields=HookMetaService._make_standard_fields(field_behaviours.get(hook_key)), - extra_fields=widgets.get(hook_key), + pm = ProvidersManager() + widgets = HookMetaService._convert_extra_fields(pm._connection_form_widgets_from_metadata) + return [ + ConnectionHookMetaData( + connection_type=meta.connection_type, + hook_class_name=meta.hook_class_name, + default_conn_name=None, + hook_name=meta.hook_name, + standard_fields=HookMetaService._make_standard_fields(meta.field_behaviour), + extra_fields=widgets.get(meta.connection_type), ) - result.append(hook_meta) - return result + for meta in pm.iter_connection_type_hook_ui_metadata() + ] diff --git a/airflow-core/src/airflow/api_fastapi/execution_api/routes/task_instances.py b/airflow-core/src/airflow/api_fastapi/execution_api/routes/task_instances.py index 5f5073c916b68..e1687206d5547 100644 --- a/airflow-core/src/airflow/api_fastapi/execution_api/routes/task_instances.py +++ b/airflow-core/src/airflow/api_fastapi/execution_api/routes/task_instances.py @@ -70,6 +70,7 @@ from airflow.models.dagrun import DagRun as DR from airflow.models.log import Log from airflow.models.taskinstance import TaskInstance as TI, _stop_remaining_tasks +from airflow.models.taskinstancehistory import TaskInstanceHistory as TIH from airflow.models.taskreschedule import TaskReschedule from airflow.models.trigger import Trigger from airflow.models.xcom import XComModel @@ -679,6 +680,9 @@ def ti_skip_downstream( status.HTTP_409_CONFLICT: { "description": "The TI attempting to heartbeat should be terminated for the given reason" }, + status.HTTP_410_GONE: { + "description": "Task Instance not found in the TI table but exists in the Task Instance History table" + }, HTTP_422_UNPROCESSABLE_CONTENT: {"description": "Invalid payload for the state transition"}, }, ) @@ -702,6 +706,24 @@ def ti_heartbeat( "Retrieved current task state", state=previous_state, current_hostname=hostname, current_pid=pid ) except NoResultFound: + # Check if the TI exists in the Task Instance History table. + # If it does, it was likely cleared while running, so return 410 Gone + # instead of 404 Not Found to give the client a more specific signal. + tih_exists = session.scalar( + select(func.count(TIH.task_instance_id)).where(TIH.task_instance_id == task_instance_id) + ) + if tih_exists: + log.error( + "TaskInstance was previously cleared and archived in history, heartbeat skipped", + ti_id=str(task_instance_id), + ) + raise HTTPException( + status_code=status.HTTP_410_GONE, + detail={ + "reason": "not_found", + "message": "Task Instance not found, it may have been moved to the Task Instance History table", + }, + ) log.error("Task Instance not found") raise HTTPException( status_code=status.HTTP_404_NOT_FOUND, @@ -892,15 +914,13 @@ def get_task_instance_count( query = query.where(TI.run_id.in_(run_ids)) if task_group_id: - group_tasks = _get_group_tasks(dag_id, task_group_id, session, dag_bag, logical_dates, run_ids) + group_tasks = _get_group_tasks( + dag_id, task_group_id, session, dag_bag, logical_dates, run_ids, map_index + ) # Get unique (task_id, map_index) pairs - task_map_pairs = [(ti.task_id, ti.map_index) for ti in group_tasks] - if map_index is not None: - task_map_pairs = [(ti.task_id, ti.map_index) for ti in group_tasks if ti.map_index == map_index] - if not task_map_pairs: # If no task group tasks found, default to checking the task group ID itself # This matches the behavior in _get_external_task_group_task_ids @@ -1000,15 +1020,18 @@ def get_task_instance_states( if run_ids: query = query.where(TI.run_id.in_(run_ids)) + if map_index is not None: + query = query.where(TI.map_index == map_index) + results = session.scalars(query).all() if task_group_id: - group_tasks = _get_group_tasks(dag_id, task_group_id, session, dag_bag, logical_dates, run_ids) + group_tasks = _get_group_tasks( + dag_id, task_group_id, session, dag_bag, logical_dates, run_ids, map_index + ) results = results + group_tasks if task_ids else group_tasks - if map_index is not None: - results = [task for task in results if task.map_index == map_index] [ run_id_task_state_map[task.run_id].update( {task.task_id: task.state} @@ -1049,7 +1072,13 @@ def _is_eligible_to_retry(state: str, try_number: int, max_tries: int) -> bool: def _get_group_tasks( - dag_id: str, task_group_id: str, session: SessionDep, dag_bag: DagBagDep, logical_dates=None, run_ids=None + dag_id: str, + task_group_id: str, + session: SessionDep, + dag_bag: DagBagDep, + logical_dates=None, + run_ids=None, + map_index: int | None = None, ): # Get all tasks in the task group dag = get_latest_version_of_dag(dag_bag, dag_id, session, include_reason=True) @@ -1070,6 +1099,7 @@ def _get_group_tasks( TI.task_id.in_(task.task_id for task in task_group.iter_tasks()), *([TI.logical_date.in_(logical_dates)] if logical_dates else []), *([TI.run_id.in_(run_ids)] if run_ids else []), + *([TI.map_index == map_index] if map_index is not None else []), ) ).all() diff --git a/airflow-core/src/airflow/api_fastapi/execution_api/versions/v2026_04_06.py b/airflow-core/src/airflow/api_fastapi/execution_api/versions/v2026_04_06.py index 85ec1f2a60899..59e671f0a24a2 100644 --- a/airflow-core/src/airflow/api_fastapi/execution_api/versions/v2026_04_06.py +++ b/airflow-core/src/airflow/api_fastapi/execution_api/versions/v2026_04_06.py @@ -118,6 +118,34 @@ class ModifyDeferredTaskKwargsToJsonValue(VersionChange): schema(TIDeferredStatePayload).field("next_kwargs").had(type=dict[str, Any]), ) + @convert_response_to_previous_version_for(TIRunContext) # type: ignore[arg-type] + def convert_next_kwargs_to_base_serialization(response: ResponseInfo) -> None: # type: ignore[misc] + """ + Convert next_kwargs from SDK serde format to BaseSerialization format for old workers. + + Old workers (task-sdk < 1.2) only know BaseSerialization.deserialize(), which requires + dicts wrapped as {"__type": "dict", "__var": {...}}. SDK serde produces plain dicts that + BaseSerialization cannot parse, causing KeyError on __var. + + We must deserialize SDK serde first to recover native Python objects (datetime, + timedelta, etc.), then re-serialize with BaseSerialization so old workers get + proper typed values instead of raw {"__classname__": ...} dicts. + """ + next_kwargs = response.body.get("next_kwargs") + if next_kwargs is None: + return + + from airflow.sdk.serde import deserialize + from airflow.serialization.serialized_objects import BaseSerialization + + try: + plain = deserialize(next_kwargs) + except (ImportError, KeyError, AttributeError, TypeError): + # Already in BaseSerialization format (rolling upgrade, old data in DB) + return + + response.body["next_kwargs"] = BaseSerialization.serialize(plain) + class RemoveUpstreamMapIndexesField(VersionChange): """Remove upstream_map_indexes field from TIRunContext - now computed by Task SDK.""" diff --git a/airflow-core/src/airflow/assets/manager.py b/airflow-core/src/airflow/assets/manager.py index dca3db9b181f9..b5ead262d0c2d 100644 --- a/airflow-core/src/airflow/assets/manager.py +++ b/airflow-core/src/airflow/assets/manager.py @@ -575,6 +575,8 @@ def resolve_asset_manager() -> AssetManager: key="asset_manager_kwargs", fallback={}, ) + if TYPE_CHECKING: + assert isinstance(_asset_manager_kwargs, dict) return _asset_manager_class(**_asset_manager_kwargs) diff --git a/airflow-core/src/airflow/config_templates/airflow_local_settings.py b/airflow-core/src/airflow/config_templates/airflow_local_settings.py index 06639e0e85545..48f14b0f9a9ee 100644 --- a/airflow-core/src/airflow/config_templates/airflow_local_settings.py +++ b/airflow-core/src/airflow/config_templates/airflow_local_settings.py @@ -20,7 +20,7 @@ from __future__ import annotations import os -from typing import TYPE_CHECKING, Any +from typing import TYPE_CHECKING, Any, cast from urllib.parse import urlsplit from airflow.configuration import conf @@ -159,6 +159,7 @@ def _default_conn_name_from(mod_path, hook_name): "logging/remote_task_handler_kwargs must be a JSON object (a python dict), we got " f"{type(remote_task_handler_kwargs)}" ) + _handler_kwargs = cast("dict[str, Any]", remote_task_handler_kwargs) delete_local_copy = conf.getboolean("logging", "delete_local_logs") if remote_base_log_folder.startswith("s3://"): @@ -166,16 +167,17 @@ def _default_conn_name_from(mod_path, hook_name): _default_conn_name_from("airflow.providers.amazon.aws.hooks.s3", "S3Hook") REMOTE_TASK_LOG = S3RemoteLogIO( - **( + **cast( + "dict[str, Any]", { "base_log_folder": BASE_LOG_FOLDER, "remote_base": remote_base_log_folder, "delete_local_copy": delete_local_copy, } - | remote_task_handler_kwargs + | _handler_kwargs, ) ) - remote_task_handler_kwargs = {} + _handler_kwargs = {} elif remote_base_log_folder.startswith("cloudwatch://"): from airflow.providers.amazon.aws.log.cloudwatch_task_handler import CloudWatchRemoteLogIO @@ -183,17 +185,18 @@ def _default_conn_name_from(mod_path, hook_name): _default_conn_name_from("airflow.providers.amazon.aws.hooks.logs", "AwsLogsHook") url_parts = urlsplit(remote_base_log_folder) REMOTE_TASK_LOG = CloudWatchRemoteLogIO( - **( + **cast( + "dict[str, Any]", { "base_log_folder": BASE_LOG_FOLDER, "remote_base": remote_base_log_folder, "delete_local_copy": delete_local_copy, "log_group_arn": url_parts.netloc + url_parts.path, } - | remote_task_handler_kwargs + | _handler_kwargs, ) ) - remote_task_handler_kwargs = {} + _handler_kwargs = {} elif remote_base_log_folder.startswith("gs://"): from airflow.providers.google.cloud.log.gcs_task_handler import GCSRemoteLogIO @@ -201,17 +204,18 @@ def _default_conn_name_from(mod_path, hook_name): key_path = conf.get_mandatory_value("logging", "google_key_path", fallback=None) REMOTE_TASK_LOG = GCSRemoteLogIO( - **( + **cast( + "dict[str, Any]", { "base_log_folder": BASE_LOG_FOLDER, "remote_base": remote_base_log_folder, "delete_local_copy": delete_local_copy, "gcp_key_path": key_path, } - | remote_task_handler_kwargs + | _handler_kwargs, ) ) - remote_task_handler_kwargs = {} + _handler_kwargs = {} elif remote_base_log_folder.startswith("wasb"): from airflow.providers.microsoft.azure.log.wasb_task_handler import WasbRemoteLogIO @@ -224,17 +228,18 @@ def _default_conn_name_from(mod_path, hook_name): wasb_remote_base = remote_base_log_folder.removeprefix("wasb://") REMOTE_TASK_LOG = WasbRemoteLogIO( - **( + **cast( + "dict[str, Any]", { "base_log_folder": BASE_LOG_FOLDER, "remote_base": wasb_remote_base, "delete_local_copy": delete_local_copy, "wasb_container": wasb_log_container, } - | remote_task_handler_kwargs + | _handler_kwargs, ) ) - remote_task_handler_kwargs = {} + _handler_kwargs = {} elif remote_base_log_folder.startswith("stackdriver://"): key_path = conf.get_mandatory_value("logging", "GOOGLE_KEY_PATH", fallback=None) # stackdriver:///airflow-tasks => airflow-tasks @@ -255,32 +260,34 @@ def _default_conn_name_from(mod_path, hook_name): _default_conn_name_from("airflow.providers.alibaba.cloud.hooks.oss", "OSSHook") REMOTE_TASK_LOG = OSSRemoteLogIO( - **( + **cast( + "dict[str, Any]", { "base_log_folder": BASE_LOG_FOLDER, "remote_base": remote_base_log_folder, "delete_local_copy": delete_local_copy, } - | remote_task_handler_kwargs + | _handler_kwargs, ) ) - remote_task_handler_kwargs = {} + _handler_kwargs = {} elif remote_base_log_folder.startswith("hdfs://"): from airflow.providers.apache.hdfs.log.hdfs_task_handler import HdfsRemoteLogIO _default_conn_name_from("airflow.providers.apache.hdfs.hooks.webhdfs", "WebHDFSHook") REMOTE_TASK_LOG = HdfsRemoteLogIO( - **( + **cast( + "dict[str, Any]", { "base_log_folder": BASE_LOG_FOLDER, "remote_base": urlsplit(remote_base_log_folder).path, "delete_local_copy": delete_local_copy, } - | remote_task_handler_kwargs + | _handler_kwargs, ) ) - remote_task_handler_kwargs = {} + _handler_kwargs = {} elif ELASTICSEARCH_HOST: from airflow.providers.elasticsearch.log.es_task_handler import ElasticsearchRemoteLogIO @@ -291,6 +298,27 @@ def _default_conn_name_from(mod_path, hook_name): ELASTICSEARCH_HOST_FIELD: str = conf.get_mandatory_value("elasticsearch", "HOST_FIELD") ELASTICSEARCH_OFFSET_FIELD: str = conf.get_mandatory_value("elasticsearch", "OFFSET_FIELD") ELASTICSEARCH_LOG_ID_TEMPLATE: str = conf.get_mandatory_value("elasticsearch", "LOG_ID_TEMPLATE") + ELASTICSEARCH_END_OF_LOG_MARK: str = conf.get_mandatory_value("elasticsearch", "END_OF_LOG_MARK") + ELASTICSEARCH_FRONTEND: str = conf.get_mandatory_value("elasticsearch", "FRONTEND") + ELASTICSEARCH_JSON_FIELDS: str = conf.get_mandatory_value("elasticsearch", "JSON_FIELDS") + + ELASTICSEARCH_REMOTE_HANDLERS: dict[str, dict[str, str | bool | None]] = { + "task": { + "class": "airflow.providers.elasticsearch.log.es_task_handler.ElasticsearchTaskHandler", + "formatter": "airflow", + "base_log_folder": BASE_LOG_FOLDER, + "end_of_log_mark": ELASTICSEARCH_END_OF_LOG_MARK, + "host": ELASTICSEARCH_HOST, + "frontend": ELASTICSEARCH_FRONTEND, + "write_stdout": ELASTICSEARCH_WRITE_STDOUT, + "write_to_es": ELASTICSEARCH_WRITE_TO_ES, + "json_format": ELASTICSEARCH_JSON_FORMAT, + "json_fields": ELASTICSEARCH_JSON_FIELDS, + "host_field": ELASTICSEARCH_HOST_FIELD, + "offset_field": ELASTICSEARCH_OFFSET_FIELD, + }, + } + DEFAULT_LOGGING_CONFIG["handlers"].update(ELASTICSEARCH_REMOTE_HANDLERS) REMOTE_TASK_LOG = ElasticsearchRemoteLogIO( host=ELASTICSEARCH_HOST, diff --git a/airflow-core/src/airflow/config_templates/config.yml b/airflow-core/src/airflow/config_templates/config.yml index e1f1c228a618c..c83d8b629ec03 100644 --- a/airflow-core/src/airflow/config_templates/config.yml +++ b/airflow-core/src/airflow/config_templates/config.yml @@ -1977,8 +1977,14 @@ api_auth: description: | Secret key used to encode and decode JWTs to authenticate to public and private APIs. - It should be as random as possible. However, when running more than 1 instances of API services, - make sure all of them use the same ``jwt_secret`` otherwise calls will fail on authentication. + It should be as random as possible. This key must be consistent across all components that + generate or validate JWT tokens (Scheduler, API Server). For improved security, consider + using asymmetric keys (``jwt_private_key_path``) instead, which allow you to restrict the + signing key to only the components that need to generate tokens. + + For security-sensitive deployments, pass this value via environment variable + (``AIRFLOW__API_AUTH__JWT_SECRET``) rather than storing it in a configuration file, and + restrict it to only the components that need it. Mutually exclusive with ``jwt_private_key_path``. version_added: 3.0.0 diff --git a/airflow-core/src/airflow/dag_processing/collection.py b/airflow-core/src/airflow/dag_processing/collection.py index 1d60c32020fc3..96f3c89f8623a 100644 --- a/airflow-core/src/airflow/dag_processing/collection.py +++ b/airflow-core/src/airflow/dag_processing/collection.py @@ -1101,7 +1101,8 @@ def add_asset_trigger_references( asset_model.watchers = [ watcher for watcher in asset_model.watchers - if BaseEventTrigger.hash(watcher.trigger.classpath, watcher.trigger.kwargs) + if watcher.trigger is not None + and BaseEventTrigger.hash(watcher.trigger.classpath, watcher.trigger.kwargs) not in trigger_hashes ] diff --git a/airflow-core/src/airflow/example_dags/example_inlet_event_extra.py b/airflow-core/src/airflow/example_dags/example_inlet_event_extra.py index ead4b442b782e..eb61ed443f6b2 100644 --- a/airflow-core/src/airflow/example_dags/example_inlet_event_extra.py +++ b/airflow-core/src/airflow/example_dags/example_inlet_event_extra.py @@ -33,7 +33,7 @@ with DAG( dag_id="read_asset_event", catchup=False, - start_date=datetime.datetime.min, + start_date=datetime.datetime(1970, 1, 1, tzinfo=datetime.timezone.utc), schedule="@daily", tags=["consumes"], ): @@ -48,7 +48,7 @@ def read_asset_event(*, inlet_events=None): with DAG( dag_id="read_asset_event_from_classic", catchup=False, - start_date=datetime.datetime.min, + start_date=datetime.datetime(1970, 1, 1, tzinfo=datetime.timezone.utc), schedule="@daily", tags=["consumes"], ): diff --git a/airflow-core/src/airflow/example_dags/example_outlet_event_extra.py b/airflow-core/src/airflow/example_dags/example_outlet_event_extra.py index 04e88554d16d3..7baab90625ded 100644 --- a/airflow-core/src/airflow/example_dags/example_outlet_event_extra.py +++ b/airflow-core/src/airflow/example_dags/example_outlet_event_extra.py @@ -33,7 +33,7 @@ with DAG( dag_id="asset_with_extra_by_yield", catchup=False, - start_date=datetime.datetime.min, + start_date=datetime.datetime(1970, 1, 1, tzinfo=datetime.timezone.utc), schedule="@daily", tags=["produces"], ): @@ -47,7 +47,7 @@ def asset_with_extra_by_yield(): with DAG( dag_id="asset_with_extra_by_context", catchup=False, - start_date=datetime.datetime.min, + start_date=datetime.datetime(1970, 1, 1, tzinfo=datetime.timezone.utc), schedule="@daily", tags=["produces"], ): @@ -61,7 +61,7 @@ def asset_with_extra_by_context(*, outlet_events=None): with DAG( dag_id="asset_with_extra_from_classic_operator", catchup=False, - start_date=datetime.datetime.min, + start_date=datetime.datetime(1970, 1, 1, tzinfo=datetime.timezone.utc), schedule="@daily", tags=["produces"], ): diff --git a/airflow-core/src/airflow/executors/local_executor.py b/airflow-core/src/airflow/executors/local_executor.py index 9b5939a0bd2e7..5703936e1d49d 100644 --- a/airflow-core/src/airflow/executors/local_executor.py +++ b/airflow-core/src/airflow/executors/local_executor.py @@ -149,6 +149,7 @@ def _execute_work(log: Logger, workload: workloads.ExecuteTask, team_conf) -> No token=workload.token, server=team_conf.get("core", "execution_api_server_url", fallback=default_execution_api_server), log_path=workload.log_path, + subprocess_logs_to_stdout=True, ) diff --git a/airflow-core/src/airflow/models/dag.py b/airflow-core/src/airflow/models/dag.py index 677fbc26048e3..ad67721067c66 100644 --- a/airflow-core/src/airflow/models/dag.py +++ b/airflow-core/src/airflow/models/dag.py @@ -630,6 +630,10 @@ def dags_needing_dagruns(cls, session: Session) -> tuple[Any, dict[str, datetime you should ensure that any scheduling decisions are made in a single transaction -- as soon as the transaction is committed it will be unlocked. + For asset-triggered scheduling, Dags that have ``AssetDagRunQueue`` rows but no matching + ``SerializedDagModel`` row are omitted from ``triggered_date_by_dag`` until serialization exists; + ADRQs are **not** deleted here so the scheduler can re-evaluate on a later run. + :meta private: """ from airflow.models.serialized_dag import SerializedDagModel @@ -676,6 +680,16 @@ def dag_ready(dag_id: str, cond: SerializedAssetBase, statuses: dict[UKey, bool] for dag_id, adrqs in adrq_by_dag.items() } ser_dags = SerializedDagModel.get_latest_serialized_dags(dag_ids=list(dag_statuses), session=session) + ser_dag_ids = {ser_dag.dag_id for ser_dag in ser_dags} + if missing_from_serialized := set(adrq_by_dag.keys()) - ser_dag_ids: + log.info( + "Dags have queued asset events (ADRQ), but are not found in the serialized_dag table." + " — skipping Dag run creation: %s", + sorted(missing_from_serialized), + ) + for dag_id in missing_from_serialized: + del adrq_by_dag[dag_id] + del dag_statuses[dag_id] for ser_dag in ser_dags: dag_id = ser_dag.dag_id statuses = dag_statuses[dag_id] diff --git a/airflow-core/src/airflow/models/taskinstance.py b/airflow-core/src/airflow/models/taskinstance.py index e212ca68504f3..a0b95d8a3eac5 100644 --- a/airflow-core/src/airflow/models/taskinstance.py +++ b/airflow-core/src/airflow/models/taskinstance.py @@ -88,6 +88,7 @@ from airflow.models.taskreschedule import TaskReschedule from airflow.models.xcom import XCOM_RETURN_KEY, LazyXComSelectSequence, XComModel from airflow.settings import task_instance_mutation_hook +from airflow.task.priority_strategy import validate_and_load_priority_weight_strategy from airflow.ti_deps.dep_context import DepContext from airflow.ti_deps.dependencies_deps import REQUEUEABLE_DEPS, RUNNING_DEPS from airflow.ti_deps.deps.ready_to_reschedule import ReadyToRescheduleDep @@ -691,7 +692,10 @@ def insert_mapping( :meta private: """ - priority_weight = task.weight_rule.get_weight( + weight_rule = task.weight_rule + if not hasattr(weight_rule, "get_weight"): + weight_rule = validate_and_load_priority_weight_strategy(weight_rule) + priority_weight = weight_rule.get_weight( TaskInstance(task=task, run_id=run_id, map_index=map_index, dag_version_id=dag_version_id) ) context_carrier = new_task_run_carrier(dag_run.context_carrier) @@ -872,7 +876,10 @@ def refresh_from_task(self, task: Operator, pool_override: str | None = None) -> self.queue = task.queue self.pool = pool_override or task.pool self.pool_slots = task.pool_slots - self.priority_weight = self.task.weight_rule.get_weight(self) + weight_rule = self.task.weight_rule + if not hasattr(weight_rule, "get_weight"): + weight_rule = validate_and_load_priority_weight_strategy(weight_rule) + self.priority_weight = weight_rule.get_weight(self) self.run_as_user = task.run_as_user # Do not set max_tries to task.retries here because max_tries is a cumulative # value that needs to be stored in the db. diff --git a/airflow-core/src/airflow/models/trigger.py b/airflow-core/src/airflow/models/trigger.py index da78eede343dd..d17af8532e067 100644 --- a/airflow-core/src/airflow/models/trigger.py +++ b/airflow-core/src/airflow/models/trigger.py @@ -32,7 +32,6 @@ from airflow._shared.timezones import timezone from airflow.assets.manager import AssetManager from airflow.configuration import conf -from airflow.models import Callback from airflow.models.asset import AssetWatcherModel from airflow.models.base import Base from airflow.models.taskinstance import TaskInstance @@ -210,6 +209,8 @@ def bulk_fetch(cls, ids: Iterable[int], session: Session = NEW_SESSION) -> dict[ @provide_session def fetch_trigger_ids_with_non_task_associations(cls, session: Session = NEW_SESSION) -> set[str]: """Fetch all trigger IDs actively associated with non-task entities like assets and callbacks.""" + from airflow.models.callback import Callback + query = select(AssetWatcherModel.trigger_id).union_all( select(Callback.trigger_id).where(Callback.trigger_id.is_not(None)) ) @@ -408,6 +409,8 @@ def get_sorted_triggers( :param queues: The optional set of trigger queues to filter triggers by. :param session: The database session. """ + from airflow.models.callback import Callback + result: list[Row[Any]] = [] # Add triggers associated to callbacks first, then tasks, then assets @@ -477,13 +480,15 @@ def handle_event_submit(event: TriggerEvent, *, task_instance: TaskInstance, ses next_kwargs = BaseSerialization.deserialize(next_kwargs_raw) - # Add event to the plain dict, then serialize everything together. This ensures that the event is properly - # nested inside __var__ in the final serde serialized structure. + # Add event to the plain dict, then serialize everything together so nested + # non-primitive values get proper serde encoding. if TYPE_CHECKING: assert isinstance(next_kwargs, dict) next_kwargs["event"] = event.payload - # re-serialize the entire dict using serde to ensure consistent structure + # Re-serialize using serde. The Execution API version converter + # (ModifyDeferredTaskKwargsToJsonValue) handles converting this to + # BaseSerialization format when serving old workers. task_instance.next_kwargs = serialize(next_kwargs) # Remove ourselves as its trigger diff --git a/airflow-core/src/airflow/models/variable.py b/airflow-core/src/airflow/models/variable.py index 5435326de5417..48fa014879245 100644 --- a/airflow-core/src/airflow/models/variable.py +++ b/airflow-core/src/airflow/models/variable.py @@ -480,14 +480,14 @@ def check_for_write_conflict(key: str) -> None: _backend_name, _backend_name, ) - return + return None except Exception: log.exception( "Unable to retrieve variable from secrets backend (%s). " "Checking subsequent secrets backend.", type(secrets_backend).__name__, ) - return None + return None @staticmethod def get_variable_from_secrets(key: str, team_name: str | None = None) -> str | None: diff --git a/airflow-core/src/airflow/provider.yaml.schema.json b/airflow-core/src/airflow/provider.yaml.schema.json index ac6b05f30c87b..5714b8db658c5 100644 --- a/airflow-core/src/airflow/provider.yaml.schema.json +++ b/airflow-core/src/airflow/provider.yaml.schema.json @@ -378,6 +378,10 @@ "description": "Hook class name that implements the connection type", "type": "string" }, + "hook-name": { + "description": "Display name for the connection type in the UI (e.g. 'File (path)', 'Slack')", + "type": "string" + }, "ui-field-behaviour": { "description": "Customizations for standard connection form fields", "type": "object", diff --git a/airflow-core/src/airflow/provider_info.schema.json b/airflow-core/src/airflow/provider_info.schema.json index 7c3eea12591dd..86fc726a05168 100644 --- a/airflow-core/src/airflow/provider_info.schema.json +++ b/airflow-core/src/airflow/provider_info.schema.json @@ -298,6 +298,10 @@ "hook-class-name": { "description": "Hook class name that implements the connection type", "type": "string" + }, + "hook-name": { + "description": "Display name for the connection type in the UI", + "type": "string" } }, "required": [ diff --git a/airflow-core/src/airflow/providers_manager.py b/airflow-core/src/airflow/providers_manager.py index b8d48a31b9c34..6fefcbc39b06d 100644 --- a/airflow-core/src/airflow/providers_manager.py +++ b/airflow-core/src/airflow/providers_manager.py @@ -26,7 +26,7 @@ import logging import traceback import warnings -from collections.abc import Callable, MutableMapping +from collections.abc import Callable, Iterator, MutableMapping from dataclasses import dataclass from functools import wraps from importlib.resources import files as resource_files @@ -243,6 +243,15 @@ class HookInfo(NamedTuple): dialects: list[str] = [] +class ConnectionTypeHookUIMetadata(NamedTuple): + """Hook metadata for one connection type (connection UI); ``field_behaviour`` is standard fields.""" + + connection_type: str + hook_name: str + hook_class_name: str | None + field_behaviour: dict | None + + class ConnectionFormWidgetInfo(NamedTuple): """Connection Form Widget information.""" @@ -413,6 +422,8 @@ def __init__(self): self._dialect_provider_dict: dict[str, DialectInfo] = {} # Keeps dict of hooks keyed by connection type. They are lazy evaluated at access time self._hooks_lazy_dict: LazyDictWithCache[str, HookInfo | Callable] = LazyDictWithCache() + # Keeps hook display names read from provider.yaml (hook-name field) + self._hook_name_dict: dict[str, str] = {} # Keeps methods that should be used to add custom widgets tuple of keyed by name of the extra field self._connection_form_widgets: dict[str, ConnectionFormWidgetInfo] = {} # Customizations for javascript fields are kept here @@ -979,6 +990,9 @@ def _load_ui_metadata(self) -> None: if not connection_type or not hook_class_name: continue + if hook_name := conn_config.get("hook-name"): + self._hook_name_dict[connection_type] = hook_name + if conn_fields := conn_config.get("conn-fields"): self._add_widgets(package_name, hook_class_name, connection_type, conn_fields) @@ -1349,6 +1363,45 @@ def hooks(self) -> MutableMapping[str, HookInfo | None]: # When we return hooks here it will only be used to retrieve hook information return self._hooks_lazy_dict + def iter_connection_type_hook_ui_metadata(self) -> Iterator[ConnectionTypeHookUIMetadata]: + """ + Yield hook metadata per connection type for the connection UI. + + Does not import hook classes. + """ + self.initialize_providers_hooks() + all_types = frozenset(self._hooks_lazy_dict) | frozenset(self._hook_provider_dict) + for conn_type in sorted(all_types): + raw_entry = self._hooks_lazy_dict._raw_dict.get(conn_type) + provider_entry = self._hook_provider_dict.get(conn_type) + if isinstance(raw_entry, HookInfo): + hook_name = raw_entry.hook_name + hook_class_name = raw_entry.hook_class_name + elif provider_entry: + hook_name = self._hook_name_dict.get(conn_type, conn_type) + hook_class_name = provider_entry.hook_class_name + else: + hook_name = self._hook_name_dict.get(conn_type, conn_type) + hook_class_name = None + yield ConnectionTypeHookUIMetadata( + connection_type=conn_type, + hook_name=hook_name, + hook_class_name=hook_class_name, + field_behaviour=self._field_behaviours.get(conn_type), + ) + + @property + def _connection_form_widgets_from_metadata(self) -> dict[str, ConnectionFormWidgetInfo]: + """Return connection form widgets from metadata without importing every hook.""" + self.initialize_providers_hooks() + return self._connection_form_widgets + + @property + def _field_behaviours_from_metadata(self) -> dict[str, dict]: + """Return field behaviour dicts from metadata without importing every hook.""" + self.initialize_providers_hooks() + return self._field_behaviours + @property def dialects(self) -> MutableMapping[str, DialectInfo]: """Return dictionary of connection_type-to-dialect mapping.""" diff --git a/airflow-core/src/airflow/serialization/encoders.py b/airflow-core/src/airflow/serialization/encoders.py index dcb064dcde06b..2f30511a1e5fb 100644 --- a/airflow-core/src/airflow/serialization/encoders.py +++ b/airflow-core/src/airflow/serialization/encoders.py @@ -162,6 +162,14 @@ def _ensure_serialized(d): if isinstance(trigger, dict): classpath = trigger["classpath"] kwargs = trigger["kwargs"] + # unwrap any kwargs that are themselves serialized objects, to avoid double-serialization in the trigger's own serialize() method. + unwrapped = {} + for k, v in kwargs.items(): + if isinstance(v, dict) and Encoding.TYPE in v: + unwrapped[k] = BaseSerialization.deserialize(v) + else: + unwrapped[k] = v + kwargs = unwrapped else: classpath, kwargs = trigger.serialize() return { diff --git a/airflow-core/src/airflow/ti_deps/deps/trigger_rule_dep.py b/airflow-core/src/airflow/ti_deps/deps/trigger_rule_dep.py index 80d83db679aa5..4943913d3283a 100644 --- a/airflow-core/src/airflow/ti_deps/deps/trigger_rule_dep.py +++ b/airflow-core/src/airflow/ti_deps/deps/trigger_rule_dep.py @@ -637,6 +637,9 @@ def _evaluate_teardown_scope() -> Iterator[TIDepStatus]: setup_obj = task.dag.get_task(setup_id) in_scope_ids.update(indirect_upstream_ids & setup_obj.get_flat_relative_ids(upstream=False)) + if not in_scope_ids: + return + in_scope_tasks = {tid: task.dag.get_task(tid) for tid in in_scope_ids} done = sum( @@ -678,7 +681,9 @@ def _evaluate_teardown_scope() -> Iterator[TIDepStatus]: return yield from _evaluate_direct_relatives() else: - statuses = list(_evaluate_direct_relatives()) - yield from statuses - if not statuses: + has_status = False + for status in _evaluate_direct_relatives(): + has_status = True + yield status + if not has_status: yield from _evaluate_teardown_scope() diff --git a/airflow-core/src/airflow/ui/openapi-gen/requests/schemas.gen.ts b/airflow-core/src/airflow/ui/openapi-gen/requests/schemas.gen.ts index a6cabf8c9d3f1..356802aab9fcc 100644 --- a/airflow-core/src/airflow/ui/openapi-gen/requests/schemas.gen.ts +++ b/airflow-core/src/airflow/ui/openapi-gen/requests/schemas.gen.ts @@ -8895,13 +8895,20 @@ export const $TeamResponse = { export const $Theme = { properties: { tokens: { - additionalProperties: { - '$ref': '#/components/schemas/ThemeColors' - }, - propertyNames: { - const: 'colors' - }, - type: 'object', + anyOf: [ + { + additionalProperties: { + '$ref': '#/components/schemas/ThemeColors' + }, + propertyNames: { + const: 'colors' + }, + type: 'object' + }, + { + type: 'null' + } + ], title: 'Tokens' }, globalCss: { @@ -8943,14 +8950,96 @@ export const $Theme = { } }, type: 'object', - required: ['tokens'], title: 'Theme', description: "JSON to modify Chakra's theme." } as const; export const $ThemeColors = { - additionalProperties: true, - type: 'object' + properties: { + brand: { + anyOf: [ + { + additionalProperties: { + additionalProperties: { + '$ref': '#/components/schemas/OklchColor' + }, + propertyNames: { + const: 'value' + }, + type: 'object' + }, + propertyNames: { + enum: ['50', '100', '200', '300', '400', '500', '600', '700', '800', '900', '950'] + }, + type: 'object' + }, + { + type: 'null' + } + ], + title: 'Brand' + }, + gray: { + anyOf: [ + { + additionalProperties: { + additionalProperties: { + '$ref': '#/components/schemas/OklchColor' + }, + propertyNames: { + const: 'value' + }, + type: 'object' + }, + propertyNames: { + enum: ['50', '100', '200', '300', '400', '500', '600', '700', '800', '900', '950'] + }, + type: 'object' + }, + { + type: 'null' + } + ], + title: 'Gray' + }, + black: { + anyOf: [ + { + additionalProperties: { + '$ref': '#/components/schemas/OklchColor' + }, + propertyNames: { + const: 'value' + }, + type: 'object' + }, + { + type: 'null' + } + ], + title: 'Black' + }, + white: { + anyOf: [ + { + additionalProperties: { + '$ref': '#/components/schemas/OklchColor' + }, + propertyNames: { + const: 'value' + }, + type: 'object' + }, + { + type: 'null' + } + ], + title: 'White' + } + }, + type: 'object', + title: 'ThemeColors', + description: 'Color tokens for the UI theme. All fields are optional; at least one must be provided.' } as const; export const $TokenType = { diff --git a/airflow-core/src/airflow/ui/openapi-gen/requests/types.gen.ts b/airflow-core/src/airflow/ui/openapi-gen/requests/types.gen.ts index 14a88fd0ffaf9..1601e54b01a33 100644 --- a/airflow-core/src/airflow/ui/openapi-gen/requests/types.gen.ts +++ b/airflow-core/src/airflow/ui/openapi-gen/requests/types.gen.ts @@ -2199,9 +2199,9 @@ export type TeamResponse = { * JSON to modify Chakra's theme. */ export type Theme = { - tokens: { - [key: string]: ThemeColors; - }; + tokens?: { + [key: string]: ThemeColors; +} | null; globalCss?: { [key: string]: { [key: string]: unknown; @@ -2211,8 +2211,26 @@ export type Theme = { icon_dark_mode?: string | null; }; +/** + * Color tokens for the UI theme. All fields are optional; at least one must be provided. + */ export type ThemeColors = { - [key: string]: unknown; + brand?: { + [key: string]: { + [key: string]: OklchColor; + }; +} | null; + gray?: { + [key: string]: { + [key: string]: OklchColor; + }; +} | null; + black?: { + [key: string]: OklchColor; +} | null; + white?: { + [key: string]: OklchColor; +} | null; }; /** diff --git a/airflow-core/src/airflow/ui/package.json b/airflow-core/src/airflow/ui/package.json index 23e1fe4fd65a3..b93b0d918a8fb 100644 --- a/airflow-core/src/airflow/ui/package.json +++ b/airflow-core/src/airflow/ui/package.json @@ -31,68 +31,68 @@ "@guanmingchiu/sqlparser-ts": "^0.61.1", "@lezer/highlight": "^1.2.3", "@monaco-editor/react": "^4.7.0", - "@tanstack/react-query": "^5.90.21", + "@tanstack/react-query": "^5.96.1", "@tanstack/react-table": "^8.21.3", - "@tanstack/react-virtual": "^3.13.21", + "@tanstack/react-virtual": "^3.13.23", "@visx/group": "^3.12.0", "@visx/shape": "^3.12.0", - "@xyflow/react": "^12.10.1", + "@xyflow/react": "^12.10.2", "anser": "^2.3.5", - "axios": "^1.13.6", + "axios": "^1.14.0", "chakra-react-select": "^6.1.1", "chart.js": "^4.5.1", "chartjs-adapter-dayjs-4": "^1.0.4", "chartjs-plugin-annotation": "^3.1.0", - "dayjs": "^1.11.19", + "dayjs": "^1.11.20", "elkjs": "^0.11.1", "html-to-image": "^1.11.13", - "i18next": "^25.8.16", + "i18next": "^25.10.10", "i18next-browser-languagedetector": "^8.2.1", - "i18next-http-backend": "^3.0.2", + "i18next-http-backend": "^3.0.4", "next-themes": "^0.4.6", "react": "^19.2.4", "react-chartjs-2": "^5.3.1", "react-dom": "^19.2.4", - "react-hook-form": "^7.71.2", - "react-hotkeys-hook": "^4.6.1", - "react-i18next": "^15.5.1", + "react-hook-form": "^7.72.0", + "react-hotkeys-hook": "^4.6.2", + "react-i18next": "^15.7.4", "react-icons": "^5.6.0", "react-innertext": "^1.1.5", "react-markdown": "^9.1.0", "react-resizable-panels": "^3.0.6", - "react-router-dom": "^7.13.1", - "react-syntax-highlighter": "^15.6.1", + "react-router-dom": "^7.14.0", + "react-syntax-highlighter": "^15.6.6", "remark-gfm": "^4.0.1", - "use-debounce": "^10.1.0", + "use-debounce": "^10.1.1", "usehooks-ts": "^3.1.1", "yaml": "^2.8.2", - "zustand": "^5.0.11" + "zustand": "^5.0.12" }, "devDependencies": { "@7nohe/openapi-react-query-codegen": "^1.6.2", - "@eslint/compat": "^1.2.9", - "@eslint/js": "^9.39.1", - "@playwright/test": "^1.58.2", + "@eslint/compat": "^1.4.1", + "@eslint/js": "^9.39.4", + "@playwright/test": "^1.59.1", "@stylistic/eslint-plugin": "^2.13.0", - "@tanstack/eslint-plugin-query": "^5.91.4", + "@tanstack/eslint-plugin-query": "^5.96.1", "@testing-library/jest-dom": "^6.9.1", "@testing-library/react": "^16.3.2", "@trivago/prettier-plugin-sort-imports": "^4.3.0", - "@types/node": "^24.10.1", + "@types/node": "^24.12.0", "@types/react": "^19.2.14", "@types/react-dom": "^19.2.3", "@types/react-syntax-highlighter": "^15.5.13", - "@typescript-eslint/eslint-plugin": "^8.56.1", - "@typescript-eslint/parser": "^8.56.1", - "@typescript-eslint/utils": "^8.56.1", - "@vitejs/plugin-react": "^5.1.4", - "@vitejs/plugin-react-swc": "^4.2.3", + "@typescript-eslint/eslint-plugin": "^8.58.0", + "@typescript-eslint/parser": "^8.58.0", + "@typescript-eslint/utils": "^8.58.0", + "@vitejs/plugin-react": "^5.2.0", + "@vitejs/plugin-react-swc": "^4.3.0", "@vitest/coverage-v8": "^3.2.4", "babel-plugin-react-compiler": "^1.0.0", - "eslint": "^9.39.1", + "eslint": "^9.39.4", "eslint-config-prettier": "^10.1.8", "eslint-plugin-i18next": "^6.1.3", - "eslint-plugin-jsonc": "^2.21.0", + "eslint-plugin-jsonc": "^2.21.1", "eslint-plugin-jsx-a11y": "^6.10.2", "eslint-plugin-perfectionist": "^4.12.3", "eslint-plugin-prettier": "^5.5.5", @@ -102,13 +102,13 @@ "eslint-plugin-unicorn": "^55.0.0", "globals": "^15.15.0", "happy-dom": "^20.8.3", - "jsonc-eslint-parser": "^2.4.0", - "msw": "^2.12.10", + "jsonc-eslint-parser": "^2.4.2", + "msw": "^2.12.14", "openapi-merge-cli": "^1.3.2", "prettier": "^3.8.1", "ts-morph": "^27.0.2", "typescript": "^5.9.3", - "typescript-eslint": "^8.56.1", + "typescript-eslint": "^8.58.0", "vite": "^7.3.1", "vite-plugin-css-injected-by-js": "^3.5.2", "vitest": "^3.2.4", @@ -137,7 +137,12 @@ "picomatch@<2.3.2": ">=2.3.2", "picomatch@>=4.0.0 <4.0.4": ">=4.0.4", "yaml@>=1.0.0 <1.10.3": ">=1.10.3", - "yaml@>=2.0.0 <2.8.3": ">=2.8.3" + "yaml@>=2.0.0 <2.8.3": ">=2.8.3", + "lodash@>=4.0.0 <=4.17.23": ">=4.18.0", + "lodash@<=4.17.23": ">=4.18.0", + "defu@<=6.1.4": ">=6.1.5", + "vite@>=7.0.0 <=7.3.1": ">=7.3.2", + "vite@>=7.1.0 <=7.3.1": ">=7.3.2" } } } diff --git a/airflow-core/src/airflow/ui/playwright.config.ts b/airflow-core/src/airflow/ui/playwright.config.ts index cbd1e7ea728cb..f6a7ee7180f69 100644 --- a/airflow-core/src/airflow/ui/playwright.config.ts +++ b/airflow-core/src/airflow/ui/playwright.config.ts @@ -106,7 +106,7 @@ export default defineConfig({ process.env.CI !== undefined && process.env.CI !== "" ? ["github"] : ["list"], ], - retries: process.env.CI !== undefined && process.env.CI !== "" ? 2 : 0, + retries: process.env.CI !== undefined && process.env.CI !== "" ? 4 : 0, testDir: "./tests/e2e/specs", // TODO: Temporarily ignore flaky specs until stabilized diff --git a/airflow-core/src/airflow/ui/pnpm-lock.yaml b/airflow-core/src/airflow/ui/pnpm-lock.yaml index b92bce9c8b254..fc1bb57af35a4 100644 --- a/airflow-core/src/airflow/ui/pnpm-lock.yaml +++ b/airflow-core/src/airflow/ui/pnpm-lock.yaml @@ -21,6 +21,11 @@ overrides: picomatch@>=4.0.0 <4.0.4: '>=4.0.4' yaml@>=1.0.0 <1.10.3: '>=1.10.3' yaml@>=2.0.0 <2.8.3: '>=2.8.3' + lodash@>=4.0.0 <=4.17.23: '>=4.18.0' + lodash@<=4.17.23: '>=4.18.0' + defu@<=6.1.4: '>=6.1.5' + vite@>=7.0.0 <=7.3.1: '>=7.3.2' + vite@>=7.1.0 <=7.3.1: '>=7.3.2' importers: @@ -45,14 +50,14 @@ importers: specifier: ^4.7.0 version: 4.7.0(monaco-editor@0.53.0)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) '@tanstack/react-query': - specifier: ^5.90.21 - version: 5.90.21(react@19.2.4) + specifier: ^5.96.1 + version: 5.96.1(react@19.2.4) '@tanstack/react-table': specifier: ^8.21.3 version: 8.21.3(react-dom@19.2.4(react@19.2.4))(react@19.2.4) '@tanstack/react-virtual': - specifier: ^3.13.21 - version: 3.13.21(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + specifier: ^3.13.23 + version: 3.13.23(react-dom@19.2.4(react@19.2.4))(react@19.2.4) '@visx/group': specifier: ^3.12.0 version: 3.12.0(react@19.2.4) @@ -60,14 +65,14 @@ importers: specifier: ^3.12.0 version: 3.12.0(react@19.2.4) '@xyflow/react': - specifier: ^12.10.1 - version: 12.10.1(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + specifier: ^12.10.2 + version: 12.10.2(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) anser: specifier: ^2.3.5 version: 2.3.5 axios: - specifier: ^1.13.6 - version: 1.13.6 + specifier: ^1.14.0 + version: 1.14.0 chakra-react-select: specifier: ^6.1.1 version: 6.1.1(@chakra-ui/react@3.34.0(@emotion/react@11.14.0(@types/react@19.2.14)(react@19.2.4))(react-dom@19.2.4(react@19.2.4))(react@19.2.4))(@types/react@19.2.14)(next-themes@0.4.6(react-dom@19.2.4(react@19.2.4))(react@19.2.4))(react-dom@19.2.4(react@19.2.4))(react@19.2.4) @@ -76,13 +81,13 @@ importers: version: 4.5.1 chartjs-adapter-dayjs-4: specifier: ^1.0.4 - version: 1.0.4(chart.js@4.5.1)(dayjs@1.11.19) + version: 1.0.4(chart.js@4.5.1)(dayjs@1.11.20) chartjs-plugin-annotation: specifier: ^3.1.0 version: 3.1.0(chart.js@4.5.1) dayjs: - specifier: ^1.11.19 - version: 1.11.19 + specifier: ^1.11.20 + version: 1.11.20 elkjs: specifier: ^0.11.1 version: 0.11.1 @@ -90,14 +95,14 @@ importers: specifier: ^1.11.13 version: 1.11.13 i18next: - specifier: ^25.8.16 - version: 25.8.16(typescript@5.9.3) + specifier: ^25.10.10 + version: 25.10.10(typescript@5.9.3) i18next-browser-languagedetector: specifier: ^8.2.1 version: 8.2.1 i18next-http-backend: - specifier: ^3.0.2 - version: 3.0.2 + specifier: ^3.0.4 + version: 3.0.4 next-themes: specifier: ^0.4.6 version: 0.4.6(react-dom@19.2.4(react@19.2.4))(react@19.2.4) @@ -111,14 +116,14 @@ importers: specifier: ^19.2.4 version: 19.2.4(react@19.2.4) react-hook-form: - specifier: ^7.71.2 - version: 7.71.2(react@19.2.4) + specifier: ^7.72.0 + version: 7.72.0(react@19.2.4) react-hotkeys-hook: - specifier: ^4.6.1 - version: 4.6.1(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + specifier: ^4.6.2 + version: 4.6.2(react-dom@19.2.4(react@19.2.4))(react@19.2.4) react-i18next: - specifier: ^15.5.1 - version: 15.5.1(i18next@25.8.16(typescript@5.9.3))(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(typescript@5.9.3) + specifier: ^15.7.4 + version: 15.7.4(i18next@25.10.10(typescript@5.9.3))(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(typescript@5.9.3) react-icons: specifier: ^5.6.0 version: 5.6.0(react@19.2.4) @@ -132,17 +137,17 @@ importers: specifier: ^3.0.6 version: 3.0.6(react-dom@19.2.4(react@19.2.4))(react@19.2.4) react-router-dom: - specifier: ^7.13.1 - version: 7.13.1(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + specifier: ^7.14.0 + version: 7.14.0(react-dom@19.2.4(react@19.2.4))(react@19.2.4) react-syntax-highlighter: - specifier: ^15.6.1 - version: 15.6.1(react@19.2.4) + specifier: ^15.6.6 + version: 15.6.6(react@19.2.4) remark-gfm: specifier: ^4.0.1 version: 4.0.1 use-debounce: - specifier: ^10.1.0 - version: 10.1.0(react@19.2.4) + specifier: ^10.1.1 + version: 10.1.1(react@19.2.4) usehooks-ts: specifier: ^3.1.1 version: 3.1.1(react@19.2.4) @@ -150,27 +155,27 @@ importers: specifier: '>=2.8.3' version: 2.8.3 zustand: - specifier: ^5.0.11 - version: 5.0.11(@types/react@19.2.14)(react@19.2.4)(use-sync-external-store@1.6.0(react@19.2.4)) + specifier: ^5.0.12 + version: 5.0.12(@types/react@19.2.14)(react@19.2.4)(use-sync-external-store@1.6.0(react@19.2.4)) devDependencies: '@7nohe/openapi-react-query-codegen': specifier: ^1.6.2 version: 1.6.2(commander@12.1.0)(glob@11.1.0)(magicast@0.3.5)(ts-morph@27.0.2)(typescript@5.9.3) '@eslint/compat': - specifier: ^1.2.9 - version: 1.2.9(eslint@9.39.1(jiti@1.21.7)) + specifier: ^1.4.1 + version: 1.4.1(eslint@9.39.4(jiti@1.21.7)) '@eslint/js': - specifier: ^9.39.1 - version: 9.39.1 + specifier: ^9.39.4 + version: 9.39.4 '@playwright/test': - specifier: ^1.58.2 - version: 1.58.2 + specifier: ^1.59.1 + version: 1.59.1 '@stylistic/eslint-plugin': specifier: ^2.13.0 - version: 2.13.0(eslint@9.39.1(jiti@1.21.7))(typescript@5.9.3) + version: 2.13.0(eslint@9.39.4(jiti@1.21.7))(typescript@5.9.3) '@tanstack/eslint-plugin-query': - specifier: ^5.91.4 - version: 5.91.4(eslint@9.39.1(jiti@1.21.7))(typescript@5.9.3) + specifier: ^5.96.1 + version: 5.96.1(eslint@9.39.4(jiti@1.21.7))(typescript@5.9.3) '@testing-library/jest-dom': specifier: ^6.9.1 version: 6.9.1 @@ -181,8 +186,8 @@ importers: specifier: ^4.3.0 version: 4.3.0(prettier@3.8.1) '@types/node': - specifier: ^24.10.1 - version: 24.10.3 + specifier: ^24.12.0 + version: 24.12.0 '@types/react': specifier: ^19.2.14 version: 19.2.14 @@ -193,59 +198,59 @@ importers: specifier: ^15.5.13 version: 15.5.13 '@typescript-eslint/eslint-plugin': - specifier: ^8.56.1 - version: 8.56.1(@typescript-eslint/parser@8.56.1(eslint@9.39.1(jiti@1.21.7))(typescript@5.9.3))(eslint@9.39.1(jiti@1.21.7))(typescript@5.9.3) + specifier: ^8.58.0 + version: 8.58.0(@typescript-eslint/parser@8.58.0(eslint@9.39.4(jiti@1.21.7))(typescript@5.9.3))(eslint@9.39.4(jiti@1.21.7))(typescript@5.9.3) '@typescript-eslint/parser': - specifier: ^8.56.1 - version: 8.56.1(eslint@9.39.1(jiti@1.21.7))(typescript@5.9.3) + specifier: ^8.58.0 + version: 8.58.0(eslint@9.39.4(jiti@1.21.7))(typescript@5.9.3) '@typescript-eslint/utils': - specifier: ^8.56.1 - version: 8.56.1(eslint@9.39.1(jiti@1.21.7))(typescript@5.9.3) + specifier: ^8.58.0 + version: 8.58.0(eslint@9.39.4(jiti@1.21.7))(typescript@5.9.3) '@vitejs/plugin-react': - specifier: ^5.1.4 - version: 5.1.4(vite@7.3.1(@types/node@24.10.3)(jiti@1.21.7)(yaml@2.8.3)) + specifier: ^5.2.0 + version: 5.2.0(vite@8.0.5(@emnapi/core@1.9.2)(@emnapi/runtime@1.9.2)(@types/node@24.12.0)(esbuild@0.27.3)(jiti@1.21.7)(yaml@2.8.3)) '@vitejs/plugin-react-swc': - specifier: ^4.2.3 - version: 4.2.3(@swc/helpers@0.5.19)(vite@7.3.1(@types/node@24.10.3)(jiti@1.21.7)(yaml@2.8.3)) + specifier: ^4.3.0 + version: 4.3.0(@swc/helpers@0.5.19)(vite@8.0.5(@emnapi/core@1.9.2)(@emnapi/runtime@1.9.2)(@types/node@24.12.0)(esbuild@0.27.3)(jiti@1.21.7)(yaml@2.8.3)) '@vitest/coverage-v8': specifier: ^3.2.4 - version: 3.2.4(vitest@3.2.4(@types/debug@4.1.12)(@types/node@24.10.3)(happy-dom@20.8.9)(jiti@1.21.7)(msw@2.12.10(@types/node@24.10.3)(typescript@5.9.3))(yaml@2.8.3)) + version: 3.2.4(vitest@3.2.4(@emnapi/core@1.9.2)(@emnapi/runtime@1.9.2)(@types/debug@4.1.12)(@types/node@24.12.0)(esbuild@0.27.3)(happy-dom@20.8.9)(jiti@1.21.7)(msw@2.12.14(@types/node@24.12.0)(typescript@5.9.3))(yaml@2.8.3)) babel-plugin-react-compiler: specifier: ^1.0.0 version: 1.0.0 eslint: - specifier: ^9.39.1 - version: 9.39.1(jiti@1.21.7) + specifier: ^9.39.4 + version: 9.39.4(jiti@1.21.7) eslint-config-prettier: specifier: ^10.1.8 - version: 10.1.8(eslint@9.39.1(jiti@1.21.7)) + version: 10.1.8(eslint@9.39.4(jiti@1.21.7)) eslint-plugin-i18next: specifier: ^6.1.3 version: 6.1.3 eslint-plugin-jsonc: - specifier: ^2.21.0 - version: 2.21.0(eslint@9.39.1(jiti@1.21.7)) + specifier: ^2.21.1 + version: 2.21.1(eslint@9.39.4(jiti@1.21.7)) eslint-plugin-jsx-a11y: specifier: ^6.10.2 - version: 6.10.2(eslint@9.39.1(jiti@1.21.7)) + version: 6.10.2(eslint@9.39.4(jiti@1.21.7)) eslint-plugin-perfectionist: specifier: ^4.12.3 - version: 4.15.1(eslint@9.39.1(jiti@1.21.7))(typescript@5.9.3) + version: 4.15.1(eslint@9.39.4(jiti@1.21.7))(typescript@5.9.3) eslint-plugin-prettier: specifier: ^5.5.5 - version: 5.5.5(eslint-config-prettier@10.1.8(eslint@9.39.1(jiti@1.21.7)))(eslint@9.39.1(jiti@1.21.7))(prettier@3.8.1) + version: 5.5.5(eslint-config-prettier@10.1.8(eslint@9.39.4(jiti@1.21.7)))(eslint@9.39.4(jiti@1.21.7))(prettier@3.8.1) eslint-plugin-react: specifier: ^7.37.5 - version: 7.37.5(eslint@9.39.1(jiti@1.21.7)) + version: 7.37.5(eslint@9.39.4(jiti@1.21.7)) eslint-plugin-react-hooks: specifier: ^7.0.1 - version: 7.0.1(eslint@9.39.1(jiti@1.21.7)) + version: 7.0.1(eslint@9.39.4(jiti@1.21.7)) eslint-plugin-react-refresh: specifier: ^0.5.2 - version: 0.5.2(eslint@9.39.1(jiti@1.21.7)) + version: 0.5.2(eslint@9.39.4(jiti@1.21.7)) eslint-plugin-unicorn: specifier: ^55.0.0 - version: 55.0.0(eslint@9.39.1(jiti@1.21.7)) + version: 55.0.0(eslint@9.39.4(jiti@1.21.7)) globals: specifier: ^15.15.0 version: 15.15.0 @@ -253,11 +258,11 @@ importers: specifier: '>=20.8.8' version: 20.8.9 jsonc-eslint-parser: - specifier: ^2.4.0 - version: 2.4.1 + specifier: ^2.4.2 + version: 2.4.2 msw: - specifier: ^2.12.10 - version: 2.12.10(@types/node@24.10.3)(typescript@5.9.3) + specifier: ^2.12.14 + version: 2.12.14(@types/node@24.12.0)(typescript@5.9.3) openapi-merge-cli: specifier: ^1.3.2 version: 1.3.2 @@ -271,17 +276,17 @@ importers: specifier: ^5.9.3 version: 5.9.3 typescript-eslint: - specifier: ^8.56.1 - version: 8.56.1(eslint@9.39.1(jiti@1.21.7))(typescript@5.9.3) + specifier: ^8.58.0 + version: 8.58.0(eslint@9.39.4(jiti@1.21.7))(typescript@5.9.3) vite: - specifier: ^7.3.1 - version: 7.3.1(@types/node@24.10.3)(jiti@1.21.7)(yaml@2.8.3) + specifier: '>=7.3.2' + version: 8.0.5(@emnapi/core@1.9.2)(@emnapi/runtime@1.9.2)(@types/node@24.12.0)(esbuild@0.27.3)(jiti@1.21.7)(yaml@2.8.3) vite-plugin-css-injected-by-js: specifier: ^3.5.2 - version: 3.5.2(vite@7.3.1(@types/node@24.10.3)(jiti@1.21.7)(yaml@2.8.3)) + version: 3.5.2(vite@8.0.5(@emnapi/core@1.9.2)(@emnapi/runtime@1.9.2)(@types/node@24.12.0)(esbuild@0.27.3)(jiti@1.21.7)(yaml@2.8.3)) vitest: specifier: ^3.2.4 - version: 3.2.4(@types/debug@4.1.12)(@types/node@24.10.3)(happy-dom@20.8.9)(jiti@1.21.7)(msw@2.12.10(@types/node@24.10.3)(typescript@5.9.3))(yaml@2.8.3) + version: 3.2.4(@emnapi/core@1.9.2)(@emnapi/runtime@1.9.2)(@types/debug@4.1.12)(@types/node@24.12.0)(esbuild@0.27.3)(happy-dom@20.8.9)(jiti@1.21.7)(msw@2.12.14(@types/node@24.12.0)(typescript@5.9.3))(yaml@2.8.3) web-worker: specifier: ^1.5.0 version: 1.5.0 @@ -443,8 +448,8 @@ packages: resolution: {integrity: sha512-HFN59MmQXGHVyYadKLVumYsA9dBFun/ldYxipEjzA4196jpLZd8UjEEBLkbEkvfYreDqJhZxYAWFPtrfhNpj4w==} engines: {node: '>=6.9.0'} - '@babel/helpers@7.28.6': - resolution: {integrity: sha512-xOBvwq86HHdB7WUDTfKfT/Vuxh7gElQ+Sfti2Cy6yIWNW05P8iUslOVcZ4/sKbE+/jQaukQAdz/gf3724kYdqw==} + '@babel/helpers@7.29.2': + resolution: {integrity: sha512-HoGuUs4sCZNezVEKdVcwqmZN8GoHirLUcLaYVNBK2J0DadGtdcqgr3BCbvH8+XUo4NGjNl3VOtSjEKNzqfFgKw==} engines: {node: '>=6.9.0'} '@babel/parser@7.26.10': @@ -457,8 +462,8 @@ packages: engines: {node: '>=6.0.0'} hasBin: true - '@babel/parser@7.29.0': - resolution: {integrity: sha512-IyDgFV5GeDUVX4YdF/3CPULtVGSXXMLh1xVIgdCgxApktqnQV0r7/8Nqthg+8YLGaAtdyIlo2qIdZrbCv4+7ww==} + '@babel/parser@7.29.2': + resolution: {integrity: sha512-4GgRzy/+fsBa72/RZVJmGKPmZu9Byn8o4MoLpmNe1m8ZfYnz5emHLQz3U4gLud6Zwl0RZIcgiLD7Uq7ySFuDLA==} engines: {node: '>=6.0.0'} hasBin: true @@ -482,6 +487,10 @@ packages: resolution: {integrity: sha512-05WQkdpL9COIMz4LjTxGpPNCdlpyimKppYNoJ5Di5EUObifl8t4tuLuUBBZEpoLYOmfvIWrsp9fCl0HoPRVTdA==} engines: {node: '>=6.9.0'} + '@babel/runtime@7.29.2': + resolution: {integrity: sha512-JiDShH45zKHWyGe4ZNVRrCjBz8Nh9TMmZG1kh4QTK8hCBTWBi8Da+i7s1fJw7/lYpM4ccepSNfqzZ/QvABBi5g==} + engines: {node: '>=6.9.0'} + '@babel/template@7.27.2': resolution: {integrity: sha512-LPDZ85aEJyYSd18/DkjNh4/y1ntkE5KwUHWTiqgRxruuZL2F1yuHligVHLvcHY2vMHXttKFpJn6LwfI7cw7ODw==} engines: {node: '>=6.9.0'} @@ -532,6 +541,15 @@ packages: react: '>=18' react-dom: '>=18' + '@emnapi/core@1.9.2': + resolution: {integrity: sha512-UC+ZhH3XtczQYfOlu3lNEkdW/p4dsJ1r/bP7H8+rhao3TTTMO1ATq/4DdIi23XuGoFY+Cz0JmCbdVl0hz9jZcA==} + + '@emnapi/runtime@1.9.2': + resolution: {integrity: sha512-3U4+MIWHImeyu1wnmVygh5WlgfYDtyf0k8AbLhMFxOipihf6nrWC4syIm/SwEeec0mNSafiiNnMJwbza/Is6Lw==} + + '@emnapi/wasi-threads@1.2.1': + resolution: {integrity: sha512-uTII7OYF+/Mes/MrcIOYp5yOtSMLBWSIoLPpcgwipoiKbli6k322tcoFsxoIIxPDqW01SQGAgko4EzZi2BNv2w==} + '@emotion/babel-plugin@11.13.5': resolution: {integrity: sha512-pxHCpT2ex+0q+HH91/zsdHkw/lXd468DIN2zvfvLtPKLLMo6gQj7oLObq8PhkrxOZb/gGCq03S3Z7PDhS8pduQ==} @@ -738,37 +756,27 @@ packages: peerDependencies: eslint: ^6.0.0 || ^7.0.0 || >=8.0.0 - '@eslint-community/eslint-utils@4.9.0': - resolution: {integrity: sha512-ayVFHdtZ+hsq1t2Dy24wCmGXGe4q9Gu3smhLYALJrr473ZH27MsnSL+LKUlimp4BWJqMDMLmPpx/Q9R3OAlL4g==} - engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} - peerDependencies: - eslint: ^6.0.0 || ^7.0.0 || >=8.0.0 - '@eslint-community/eslint-utils@4.9.1': resolution: {integrity: sha512-phrYmNiYppR7znFEdqgfWHXR6NCkZEK7hwWDHZUjit/2/U0r6XvkDl0SYnoM51Hq7FhCGdLDT6zxCCOY1hexsQ==} engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} peerDependencies: eslint: ^6.0.0 || ^7.0.0 || >=8.0.0 - '@eslint-community/regexpp@4.12.1': - resolution: {integrity: sha512-CCZCDJuduB9OUkFkY2IgppNZMi2lBQgD2qzwXkEia16cge2pijY/aXi96CJMquDMn3nJdlPV1A5KrJEXwfLNzQ==} - engines: {node: ^12.0.0 || ^14.0.0 || >=16.0.0} - '@eslint-community/regexpp@4.12.2': resolution: {integrity: sha512-EriSTlt5OC9/7SXkRSCAhfSxxoSUgBm33OH+IkwbdpgoqsSsUg7y3uh+IICI/Qg4BBWr3U2i39RpmycbxMq4ew==} engines: {node: ^12.0.0 || ^14.0.0 || >=16.0.0} - '@eslint/compat@1.2.9': - resolution: {integrity: sha512-gCdSY54n7k+driCadyMNv8JSPzYLeDVM/ikZRtvtROBpRdFSkS8W9A82MqsaY7lZuwL0wiapgD0NT1xT0hyJsA==} + '@eslint/compat@1.4.1': + resolution: {integrity: sha512-cfO82V9zxxGBxcQDr1lfaYB7wykTa0b00mGa36FrJl7iTFd0Z2cHfEYuxcBRP/iNijCsWsEkA+jzT8hGYmv33w==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} peerDependencies: - eslint: ^9.10.0 + eslint: ^8.40 || 9 peerDependenciesMeta: eslint: optional: true - '@eslint/config-array@0.21.1': - resolution: {integrity: sha512-aw1gNayWpdI/jSYVgzN5pL0cfzU02GT3NBpeT/DXbx1/1x7ZKxFPd9bwrzygx/qiwIQiJ1sw/zD8qY/kRvlGHA==} + '@eslint/config-array@0.21.2': + resolution: {integrity: sha512-nJl2KGTlrf9GjLimgIru+V/mzgSK0ABCDQRvxw5BjURL7WfH5uoWmizbH7QB6MmnMBd8cIC9uceWnezL1VZWWw==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} '@eslint/config-helpers@0.4.2': @@ -779,12 +787,12 @@ packages: resolution: {integrity: sha512-yL/sLrpmtDaFEiUj1osRP4TI2MDz1AddJL+jZ7KSqvBuliN4xqYY54IfdN8qD8Toa6g1iloph1fxQNkjOxrrpQ==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} - '@eslint/eslintrc@3.3.1': - resolution: {integrity: sha512-gtF186CXhIl1p4pJNGZw8Yc6RlshoePRvE0X91oPGb3vZ8pM3qOS9W9NGPat9LziaBV7XrJWGylNQXkGcnM3IQ==} + '@eslint/eslintrc@3.3.5': + resolution: {integrity: sha512-4IlJx0X0qftVsN5E+/vGujTRIFtwuLbNsVUe7TO6zYPDR1O6nFwvwhIKEKSrl6dZchmYBITazxKoUYOjdtjlRg==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} - '@eslint/js@9.39.1': - resolution: {integrity: sha512-S26Stp4zCy88tH94QbBv3XCuzRQiZ9yXofEILmglYTh/Ug/a9/umqvgFtYBAo3Lp0nsI/5/qH1CCrbdK3AP1Tw==} + '@eslint/js@9.39.4': + resolution: {integrity: sha512-nE7DEIchvtiFTwBw4Lfbu59PG+kCofhjsKaCWzxTpt4lfRjRMqG6uMBzKXuEcyXhOHoUp9riAm7/aWYGhXZ9cw==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} '@eslint/object-schema@2.1.7': @@ -828,20 +836,16 @@ packages: resolution: {integrity: sha512-5DyQ4+1JEUzejeK1JGICcideyfUbGixgS9jNgex5nqkW+cY7WZhxBigmieN5Qnw9ZosSNVC9KQKyb+GUaGyKUA==} engines: {node: '>=18.18.0'} - '@humanfs/node@0.16.6': - resolution: {integrity: sha512-YuI2ZHQL78Q5HbhDiBA1X4LmYdXCKCMQIfw0pw7piHJwyREFebJUvrQN4cMssyES6x+vfUbx1CIpaQUKYdQZOw==} + '@humanfs/node@0.16.7': + resolution: {integrity: sha512-/zUx+yOsIrG4Y43Eh2peDeKCxlRt/gET6aHfaKpuq267qXdYDFViVHfMaLyygZOnl0kGWxFIgsBy8QFuTLUXEQ==} engines: {node: '>=18.18.0'} '@humanwhocodes/module-importer@1.0.1': resolution: {integrity: sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA==} engines: {node: '>=12.22'} - '@humanwhocodes/retry@0.3.1': - resolution: {integrity: sha512-JBxkERygn7Bv/GbN5Rv8Ul6LVknS+5Bp6RgDC/O8gEBU/yeH5Ui5C/OlWrTb6qct7LjjfT6Re2NxB0ln0yYybA==} - engines: {node: '>=18.18'} - - '@humanwhocodes/retry@0.4.2': - resolution: {integrity: sha512-xeO57FpIu4p1Ri3Jq/EXq4ClRm86dVF2z/+kvFnyqVYRavTZmaFaUBbWCOuuTh0o/g7DSsk6kc2vrS4Vl5oPOQ==} + '@humanwhocodes/retry@0.4.3': + resolution: {integrity: sha512-bV0Tgo9K4hfPCek+aMAn81RppFKv2ySDQeMoSZuvTASywNTnVJCArCZE2FWqpvIatKu7VMRLWlR1EazvVhDyhQ==} engines: {node: '>=18.18'} '@inquirer/ansi@1.0.2': @@ -957,6 +961,12 @@ packages: resolution: {integrity: sha512-cXu86tF4VQVfwz8W1SPbhoRyHJkti6mjH/XJIxp40jhO4j2k1m4KYrEykxqWPkFF3vrK4rgQppBh//AwyGSXPA==} engines: {node: '>=18'} + '@napi-rs/wasm-runtime@1.1.2': + resolution: {integrity: sha512-sNXv5oLJ7ob93xkZ1XnxisYhGYXfaG9f65/ZgYuAu3qt7b3NadcOEhLvx28hv31PgX8SZJRYrAIPQilQmFpLVw==} + peerDependencies: + '@emnapi/core': ^1.7.1 + '@emnapi/runtime': ^1.7.1 + '@open-draft/deferred-promise@2.2.0': resolution: {integrity: sha512-CecwLWx3rhxVQF6V4bAgPS5t+So2sTbPgAzafKkVizyi7tlwpcFpdFqq+wqF2OwNBmqFuu6tOyouTuxgpMfzmA==} @@ -966,6 +976,9 @@ packages: '@open-draft/until@2.1.0': resolution: {integrity: sha512-U69T3ItWHvLwGg5eJ0n3I62nWuE6ilHlmz7zM0npLBRvPRd7e6NYmg54vvRtP5mZG7kZqZCFVdsTWo7BPtBujg==} + '@oxc-project/types@0.122.0': + resolution: {integrity: sha512-oLAl5kBpV4w69UtFZ9xqcmTi+GENWOcPF7FCrczTiBbmC0ibXxCwyvZGbO39rCVEuLGAZM84DH0pUIyyv/YJzA==} + '@pandacss/is-valid-prop@1.9.0': resolution: {integrity: sha512-AZvpXWGyjbHc8TC+YVloQ31Z2c4j2xMvYj6UfVxuZdB5w4c9+4N8wy5R7I/XswNh8e4cfUlkvsEGDXjhJRgypw==} @@ -973,149 +986,112 @@ packages: resolution: {integrity: sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg==} engines: {node: '>=14'} - '@pkgr/core@0.2.4': - resolution: {integrity: sha512-ROFF39F6ZrnzSUEmQQZUar0Jt4xVoP9WnDRdWwF4NNcXs3xBTLgBUDoOwW141y1jP+S8nahIbdxbFC7IShw9Iw==} - engines: {node: ^12.20.0 || ^14.18.0 || >=16.0.0} - '@pkgr/core@0.2.9': resolution: {integrity: sha512-QNqXyfVS2wm9hweSYD2O7F0G06uurj9kZ96TRQE5Y9hU7+tgdZwIkbAKc5Ocy1HxEY2kuDQa6cQ1WRs/O5LFKA==} engines: {node: ^12.20.0 || ^14.18.0 || >=16.0.0} - '@playwright/test@1.58.2': - resolution: {integrity: sha512-akea+6bHYBBfA9uQqSYmlJXn61cTa+jbO87xVLCWbTqbWadRVmhxlXATaOjOgcBaWU4ePo0wB41KMFv3o35IXA==} + '@playwright/test@1.59.1': + resolution: {integrity: sha512-PG6q63nQg5c9rIi4/Z5lR5IVF7yU5MqmKaPOe0HSc0O2cX1fPi96sUQu5j7eo4gKCkB2AnNGoWt7y4/Xx3Kcqg==} engines: {node: '>=18'} hasBin: true - '@rolldown/pluginutils@1.0.0-rc.2': - resolution: {integrity: sha512-izyXV/v+cHiRfozX62W9htOAvwMo4/bXKDrQ+vom1L1qRuexPock/7VZDAhnpHCLNejd3NJ6hiab+tO0D44Rgw==} - - '@rolldown/pluginutils@1.0.0-rc.3': - resolution: {integrity: sha512-eybk3TjzzzV97Dlj5c+XrBFW57eTNhzod66y9HrBlzJ6NsCrWCp/2kaPS3K9wJmurBC0Tdw4yPjXKZqlznim3Q==} - - '@rollup/rollup-android-arm-eabi@4.59.0': - resolution: {integrity: sha512-upnNBkA6ZH2VKGcBj9Fyl9IGNPULcjXRlg0LLeaioQWueH30p6IXtJEbKAgvyv+mJaMxSm1l6xwDXYjpEMiLMg==} - cpu: [arm] - os: [android] - - '@rollup/rollup-android-arm64@4.59.0': - resolution: {integrity: sha512-hZ+Zxj3SySm4A/DylsDKZAeVg0mvi++0PYVceVyX7hemkw7OreKdCvW2oQ3T1FMZvCaQXqOTHb8qmBShoqk69Q==} + '@rolldown/binding-android-arm64@1.0.0-rc.12': + resolution: {integrity: sha512-pv1y2Fv0JybcykuiiD3qBOBdz6RteYojRFY1d+b95WVuzx211CRh+ytI/+9iVyWQ6koTh5dawe4S/yRfOFjgaA==} + engines: {node: ^20.19.0 || >=22.12.0} cpu: [arm64] os: [android] - '@rollup/rollup-darwin-arm64@4.59.0': - resolution: {integrity: sha512-W2Psnbh1J8ZJw0xKAd8zdNgF9HRLkdWwwdWqubSVk0pUuQkoHnv7rx4GiF9rT4t5DIZGAsConRE3AxCdJ4m8rg==} + '@rolldown/binding-darwin-arm64@1.0.0-rc.12': + resolution: {integrity: sha512-cFYr6zTG/3PXXF3pUO+umXxt1wkRK/0AYT8lDwuqvRC+LuKYWSAQAQZjCWDQpAH172ZV6ieYrNnFzVVcnSflAg==} + engines: {node: ^20.19.0 || >=22.12.0} cpu: [arm64] os: [darwin] - '@rollup/rollup-darwin-x64@4.59.0': - resolution: {integrity: sha512-ZW2KkwlS4lwTv7ZVsYDiARfFCnSGhzYPdiOU4IM2fDbL+QGlyAbjgSFuqNRbSthybLbIJ915UtZBtmuLrQAT/w==} + '@rolldown/binding-darwin-x64@1.0.0-rc.12': + resolution: {integrity: sha512-ZCsYknnHzeXYps0lGBz8JrF37GpE9bFVefrlmDrAQhOEi4IOIlcoU1+FwHEtyXGx2VkYAvhu7dyBf75EJQffBw==} + engines: {node: ^20.19.0 || >=22.12.0} cpu: [x64] os: [darwin] - '@rollup/rollup-freebsd-arm64@4.59.0': - resolution: {integrity: sha512-EsKaJ5ytAu9jI3lonzn3BgG8iRBjV4LxZexygcQbpiU0wU0ATxhNVEpXKfUa0pS05gTcSDMKpn3Sx+QB9RlTTA==} - cpu: [arm64] - os: [freebsd] - - '@rollup/rollup-freebsd-x64@4.59.0': - resolution: {integrity: sha512-d3DuZi2KzTMjImrxoHIAODUZYoUUMsuUiY4SRRcJy6NJoZ6iIqWnJu9IScV9jXysyGMVuW+KNzZvBLOcpdl3Vg==} + '@rolldown/binding-freebsd-x64@1.0.0-rc.12': + resolution: {integrity: sha512-dMLeprcVsyJsKolRXyoTH3NL6qtsT0Y2xeuEA8WQJquWFXkEC4bcu1rLZZSnZRMtAqwtrF/Ib9Ddtpa/Gkge9Q==} + engines: {node: ^20.19.0 || >=22.12.0} cpu: [x64] os: [freebsd] - '@rollup/rollup-linux-arm-gnueabihf@4.59.0': - resolution: {integrity: sha512-t4ONHboXi/3E0rT6OZl1pKbl2Vgxf9vJfWgmUoCEVQVxhW6Cw/c8I6hbbu7DAvgp82RKiH7TpLwxnJeKv2pbsw==} - cpu: [arm] - os: [linux] - - '@rollup/rollup-linux-arm-musleabihf@4.59.0': - resolution: {integrity: sha512-CikFT7aYPA2ufMD086cVORBYGHffBo4K8MQ4uPS/ZnY54GKj36i196u8U+aDVT2LX4eSMbyHtyOh7D7Zvk2VvA==} + '@rolldown/binding-linux-arm-gnueabihf@1.0.0-rc.12': + resolution: {integrity: sha512-YqWjAgGC/9M1lz3GR1r1rP79nMgo3mQiiA+Hfo+pvKFK1fAJ1bCi0ZQVh8noOqNacuY1qIcfyVfP6HoyBRZ85Q==} + engines: {node: ^20.19.0 || >=22.12.0} cpu: [arm] os: [linux] - '@rollup/rollup-linux-arm64-gnu@4.59.0': - resolution: {integrity: sha512-jYgUGk5aLd1nUb1CtQ8E+t5JhLc9x5WdBKew9ZgAXg7DBk0ZHErLHdXM24rfX+bKrFe+Xp5YuJo54I5HFjGDAA==} + '@rolldown/binding-linux-arm64-gnu@1.0.0-rc.12': + resolution: {integrity: sha512-/I5AS4cIroLpslsmzXfwbe5OmWvSsrFuEw3mwvbQ1kDxJ822hFHIx+vsN/TAzNVyepI/j/GSzrtCIwQPeKCLIg==} + engines: {node: ^20.19.0 || >=22.12.0} cpu: [arm64] os: [linux] - '@rollup/rollup-linux-arm64-musl@4.59.0': - resolution: {integrity: sha512-peZRVEdnFWZ5Bh2KeumKG9ty7aCXzzEsHShOZEFiCQlDEepP1dpUl/SrUNXNg13UmZl+gzVDPsiCwnV1uI0RUA==} + '@rolldown/binding-linux-arm64-musl@1.0.0-rc.12': + resolution: {integrity: sha512-V6/wZztnBqlx5hJQqNWwFdxIKN0m38p8Jas+VoSfgH54HSj9tKTt1dZvG6JRHcjh6D7TvrJPWFGaY9UBVOaWPw==} + engines: {node: ^20.19.0 || >=22.12.0} cpu: [arm64] os: [linux] - '@rollup/rollup-linux-loong64-gnu@4.59.0': - resolution: {integrity: sha512-gbUSW/97f7+r4gHy3Jlup8zDG190AuodsWnNiXErp9mT90iCy9NKKU0Xwx5k8VlRAIV2uU9CsMnEFg/xXaOfXg==} - cpu: [loong64] - os: [linux] - - '@rollup/rollup-linux-loong64-musl@4.59.0': - resolution: {integrity: sha512-yTRONe79E+o0FWFijasoTjtzG9EBedFXJMl888NBEDCDV9I2wGbFFfJQQe63OijbFCUZqxpHz1GzpbtSFikJ4Q==} - cpu: [loong64] - os: [linux] - - '@rollup/rollup-linux-ppc64-gnu@4.59.0': - resolution: {integrity: sha512-sw1o3tfyk12k3OEpRddF68a1unZ5VCN7zoTNtSn2KndUE+ea3m3ROOKRCZxEpmT9nsGnogpFP9x6mnLTCaoLkA==} - cpu: [ppc64] - os: [linux] - - '@rollup/rollup-linux-ppc64-musl@4.59.0': - resolution: {integrity: sha512-+2kLtQ4xT3AiIxkzFVFXfsmlZiG5FXYW7ZyIIvGA7Bdeuh9Z0aN4hVyXS/G1E9bTP/vqszNIN/pUKCk/BTHsKA==} + '@rolldown/binding-linux-ppc64-gnu@1.0.0-rc.12': + resolution: {integrity: sha512-AP3E9BpcUYliZCxa3w5Kwj9OtEVDYK6sVoUzy4vTOJsjPOgdaJZKFmN4oOlX0Wp0RPV2ETfmIra9x1xuayFB7g==} + engines: {node: ^20.19.0 || >=22.12.0} cpu: [ppc64] os: [linux] - '@rollup/rollup-linux-riscv64-gnu@4.59.0': - resolution: {integrity: sha512-NDYMpsXYJJaj+I7UdwIuHHNxXZ/b/N2hR15NyH3m2qAtb/hHPA4g4SuuvrdxetTdndfj9b1WOmy73kcPRoERUg==} - cpu: [riscv64] - os: [linux] - - '@rollup/rollup-linux-riscv64-musl@4.59.0': - resolution: {integrity: sha512-nLckB8WOqHIf1bhymk+oHxvM9D3tyPndZH8i8+35p/1YiVoVswPid2yLzgX7ZJP0KQvnkhM4H6QZ5m0LzbyIAg==} - cpu: [riscv64] - os: [linux] - - '@rollup/rollup-linux-s390x-gnu@4.59.0': - resolution: {integrity: sha512-oF87Ie3uAIvORFBpwnCvUzdeYUqi2wY6jRFWJAy1qus/udHFYIkplYRW+wo+GRUP4sKzYdmE1Y3+rY5Gc4ZO+w==} + '@rolldown/binding-linux-s390x-gnu@1.0.0-rc.12': + resolution: {integrity: sha512-nWwpvUSPkoFmZo0kQazZYOrT7J5DGOJ/+QHHzjvNlooDZED8oH82Yg67HvehPPLAg5fUff7TfWFHQS8IV1n3og==} + engines: {node: ^20.19.0 || >=22.12.0} cpu: [s390x] os: [linux] - '@rollup/rollup-linux-x64-gnu@4.59.0': - resolution: {integrity: sha512-3AHmtQq/ppNuUspKAlvA8HtLybkDflkMuLK4DPo77DfthRb71V84/c4MlWJXixZz4uruIH4uaa07IqoAkG64fg==} + '@rolldown/binding-linux-x64-gnu@1.0.0-rc.12': + resolution: {integrity: sha512-RNrafz5bcwRy+O9e6P8Z/OCAJW/A+qtBczIqVYwTs14pf4iV1/+eKEjdOUta93q2TsT/FI0XYDP3TCky38LMAg==} + engines: {node: ^20.19.0 || >=22.12.0} cpu: [x64] os: [linux] - '@rollup/rollup-linux-x64-musl@4.59.0': - resolution: {integrity: sha512-2UdiwS/9cTAx7qIUZB/fWtToJwvt0Vbo0zmnYt7ED35KPg13Q0ym1g442THLC7VyI6JfYTP4PiSOWyoMdV2/xg==} + '@rolldown/binding-linux-x64-musl@1.0.0-rc.12': + resolution: {integrity: sha512-Jpw/0iwoKWx3LJ2rc1yjFrj+T7iHZn2JDg1Yny1ma0luviFS4mhAIcd1LFNxK3EYu3DHWCps0ydXQ5i/rrJ2ig==} + engines: {node: ^20.19.0 || >=22.12.0} cpu: [x64] os: [linux] - '@rollup/rollup-openbsd-x64@4.59.0': - resolution: {integrity: sha512-M3bLRAVk6GOwFlPTIxVBSYKUaqfLrn8l0psKinkCFxl4lQvOSz8ZrKDz2gxcBwHFpci0B6rttydI4IpS4IS/jQ==} - cpu: [x64] - os: [openbsd] - - '@rollup/rollup-openharmony-arm64@4.59.0': - resolution: {integrity: sha512-tt9KBJqaqp5i5HUZzoafHZX8b5Q2Fe7UjYERADll83O4fGqJ49O1FsL6LpdzVFQcpwvnyd0i+K/VSwu/o/nWlA==} + '@rolldown/binding-openharmony-arm64@1.0.0-rc.12': + resolution: {integrity: sha512-vRugONE4yMfVn0+7lUKdKvN4D5YusEiPilaoO2sgUWpCvrncvWgPMzK00ZFFJuiPgLwgFNP5eSiUlv2tfc+lpA==} + engines: {node: ^20.19.0 || >=22.12.0} cpu: [arm64] os: [openharmony] - '@rollup/rollup-win32-arm64-msvc@4.59.0': - resolution: {integrity: sha512-V5B6mG7OrGTwnxaNUzZTDTjDS7F75PO1ae6MJYdiMu60sq0CqN5CVeVsbhPxalupvTX8gXVSU9gq+Rx1/hvu6A==} - cpu: [arm64] - os: [win32] + '@rolldown/binding-wasm32-wasi@1.0.0-rc.12': + resolution: {integrity: sha512-ykGiLr/6kkiHc0XnBfmFJuCjr5ZYKKofkx+chJWDjitX+KsJuAmrzWhwyOMSHzPhzOHOy7u9HlFoa5MoAOJ/Zg==} + engines: {node: '>=14.0.0'} + cpu: [wasm32] - '@rollup/rollup-win32-ia32-msvc@4.59.0': - resolution: {integrity: sha512-UKFMHPuM9R0iBegwzKF4y0C4J9u8C6MEJgFuXTBerMk7EJ92GFVFYBfOZaSGLu6COf7FxpQNqhNS4c4icUPqxA==} - cpu: [ia32] + '@rolldown/binding-win32-arm64-msvc@1.0.0-rc.12': + resolution: {integrity: sha512-5eOND4duWkwx1AzCxadcOrNeighiLwMInEADT0YM7xeEOOFcovWZCq8dadXgcRHSf3Ulh1kFo/qvzoFiCLOL1Q==} + engines: {node: ^20.19.0 || >=22.12.0} + cpu: [arm64] os: [win32] - '@rollup/rollup-win32-x64-gnu@4.59.0': - resolution: {integrity: sha512-laBkYlSS1n2L8fSo1thDNGrCTQMmxjYY5G0WFWjFFYZkKPjsMBsgJfGf4TLxXrF6RyhI60L8TMOjBMvXiTcxeA==} + '@rolldown/binding-win32-x64-msvc@1.0.0-rc.12': + resolution: {integrity: sha512-PyqoipaswDLAZtot351MLhrlrh6lcZPo2LSYE+VDxbVk24LVKAGOuE4hb8xZQmrPAuEtTZW8E6D2zc5EUZX4Lw==} + engines: {node: ^20.19.0 || >=22.12.0} cpu: [x64] os: [win32] - '@rollup/rollup-win32-x64-msvc@4.59.0': - resolution: {integrity: sha512-2HRCml6OztYXyJXAvdDXPKcawukWY2GpR5/nxKp4iBgiO3wcoEGkAaqctIbZcNB6KlUQBIqt8VYkNSj2397EfA==} - cpu: [x64] - os: [win32] + '@rolldown/pluginutils@1.0.0-rc.12': + resolution: {integrity: sha512-HHMwmarRKvoFsJorqYlFeFRzXZqCt2ETQlEDOb9aqssrnVBB1/+xgTGtuTrIk5vzLNX1MjMtTf7W9z3tsSbrxw==} + + '@rolldown/pluginutils@1.0.0-rc.3': + resolution: {integrity: sha512-eybk3TjzzzV97Dlj5c+XrBFW57eTNhzod66y9HrBlzJ6NsCrWCp/2kaPS3K9wJmurBC0Tdw4yPjXKZqlznim3Q==} + + '@rolldown/pluginutils@1.0.0-rc.7': + resolution: {integrity: sha512-qujRfC8sFVInYSPPMLQByRh7zhwkGFS4+tyMQ83srV1qrxL4g8E2tyxVVyxd0+8QeBM1mIk9KbWxkegRr76XzA==} '@stylistic/eslint-plugin@2.13.0': resolution: {integrity: sha512-RnO1SaiCFHn666wNz2QfZEFxvmiNRqhzaMXHXxXXKt+MEP7aajlPxUSMIQpKAaJfverpovEYqjBOXDq6dDcaOQ==} @@ -1123,68 +1099,80 @@ packages: peerDependencies: eslint: '>=8.40.0' - '@swc/core-darwin-arm64@1.15.18': - resolution: {integrity: sha512-+mIv7uBuSaywN3C9LNuWaX1jJJ3SKfiJuE6Lr3bd+/1Iv8oMU7oLBjYMluX1UrEPzwN2qCdY6Io0yVicABoCwQ==} + '@swc/core-darwin-arm64@1.15.24': + resolution: {integrity: sha512-uM5ZGfFXjtvtJ+fe448PVBEbn/CSxS3UAyLj3O9xOqKIWy3S6hPTXSPbszxkSsGDYKi+YFhzAsR4r/eXLxEQ0g==} engines: {node: '>=10'} cpu: [arm64] os: [darwin] - '@swc/core-darwin-x64@1.15.18': - resolution: {integrity: sha512-wZle0eaQhnzxWX5V/2kEOI6Z9vl/lTFEC6V4EWcn+5pDjhemCpQv9e/TDJ0GIoiClX8EDWRvuZwh+Z3dhL1NAg==} + '@swc/core-darwin-x64@1.15.24': + resolution: {integrity: sha512-fMIb/Zfn929pw25VMBhV7Ji2Dl+lCWtUPNdYJQYOke+00E5fcQ9ynxtP8+qhUo/HZc+mYQb1gJxwHM9vty+lXg==} engines: {node: '>=10'} cpu: [x64] os: [darwin] - '@swc/core-linux-arm-gnueabihf@1.15.18': - resolution: {integrity: sha512-ao61HGXVqrJFHAcPtF4/DegmwEkVCo4HApnotLU8ognfmU8x589z7+tcf3hU+qBiU1WOXV5fQX6W9Nzs6hjxDw==} + '@swc/core-linux-arm-gnueabihf@1.15.24': + resolution: {integrity: sha512-vOkjsyjjxnoYx3hMEWcGxQrMgnNrRm6WAegBXrN8foHtDAR+zpdhpGF5a4lj1bNPgXAvmysjui8cM1ov/Clkaw==} engines: {node: '>=10'} cpu: [arm] os: [linux] - '@swc/core-linux-arm64-gnu@1.15.18': - resolution: {integrity: sha512-3xnctOBLIq3kj8PxOCgPrGjBLP/kNOddr6f5gukYt/1IZxsITQaU9TDyjeX6jG+FiCIHjCuWuffsyQDL5Ew1bg==} + '@swc/core-linux-arm64-gnu@1.15.24': + resolution: {integrity: sha512-h/oNu+upkXJ6Cicnq7YGVj9PkdfarLCdQa8l/FlHYvfv8CEiMaeeTnpLU7gSBH/rGxosM6Qkfa/J9mThGF9CLA==} engines: {node: '>=10'} cpu: [arm64] os: [linux] - '@swc/core-linux-arm64-musl@1.15.18': - resolution: {integrity: sha512-0a+Lix+FSSHBSBOA0XznCcHo5/1nA6oLLjcnocvzXeqtdjnPb+SvchItHI+lfeiuj1sClYPDvPMLSLyXFaiIKw==} + '@swc/core-linux-arm64-musl@1.15.24': + resolution: {integrity: sha512-ZpF/pRe1guk6sKzQI9D1jAORtjTdNlyeXn9GDz8ophof/w2WhojRblvSDJaGe7rJjcPN8AaOkhwdRUh7q8oYIg==} engines: {node: '>=10'} cpu: [arm64] os: [linux] - '@swc/core-linux-x64-gnu@1.15.18': - resolution: {integrity: sha512-wG9J8vReUlpaHz4KOD/5UE1AUgirimU4UFT9oZmupUDEofxJKYb1mTA/DrMj0s78bkBiNI+7Fo2EgPuvOJfuAA==} + '@swc/core-linux-ppc64-gnu@1.15.24': + resolution: {integrity: sha512-QZEsZfisHTSJlmyChgDFNmKPb3W6Lhbfo/O76HhIngfEdnQNmukS38/VSe1feho+xkV5A5hETyCbx3sALBZKAQ==} + engines: {node: '>=10'} + cpu: [ppc64] + os: [linux] + + '@swc/core-linux-s390x-gnu@1.15.24': + resolution: {integrity: sha512-DLdJKVsJgglqQrJBuoUYNmzm3leI7kUZhLbZGHv42onfKsGf6JDS3+bzCUQfte/XOqDjh/tmmn1DR/CF/tCJFw==} + engines: {node: '>=10'} + cpu: [s390x] + os: [linux] + + '@swc/core-linux-x64-gnu@1.15.24': + resolution: {integrity: sha512-IpLYfposPA/XLxYOKpRfeccl1p5dDa3+okZDHHTchBkXEaVCnq5MADPmIWwIYj1tudt7hORsEHccG5no6IUQRw==} engines: {node: '>=10'} cpu: [x64] os: [linux] - '@swc/core-linux-x64-musl@1.15.18': - resolution: {integrity: sha512-4nwbVvCphKzicwNWRmvD5iBaZj8JYsRGa4xOxJmOyHlMDpsvvJ2OR2cODlvWyGFH6BYL1MfIAK3qph3hp0Az6g==} + '@swc/core-linux-x64-musl@1.15.24': + resolution: {integrity: sha512-JHy3fMSc0t/EPWgo74+OK5TGr51aElnzqfUPaiRf2qJ/BfX5CUCfMiWVBuhI7qmVMBnk1jTRnL/xZnOSHDPLYg==} engines: {node: '>=10'} cpu: [x64] os: [linux] - '@swc/core-win32-arm64-msvc@1.15.18': - resolution: {integrity: sha512-zk0RYO+LjiBCat2RTMHzAWaMky0cra9loH4oRrLKLLNuL+jarxKLFDA8xTZWEkCPLjUTwlRN7d28eDLLMgtUcQ==} + '@swc/core-win32-arm64-msvc@1.15.24': + resolution: {integrity: sha512-Txj+qUH1z2bUd1P3JvwByfjKFti3cptlAxhWgmunBUUxy/IW3CXLZ6l6Gk4liANadKkU71nIU1X30Z5vpMT3BA==} engines: {node: '>=10'} cpu: [arm64] os: [win32] - '@swc/core-win32-ia32-msvc@1.15.18': - resolution: {integrity: sha512-yVuTrZ0RccD5+PEkpcLOBAuPbYBXS6rslENvIXfvJGXSdX5QGi1ehC4BjAMl5FkKLiam4kJECUI0l7Hq7T1vwg==} + '@swc/core-win32-ia32-msvc@1.15.24': + resolution: {integrity: sha512-15D/nl3XwrhFpMv+MADFOiVwv3FvH9j8c6Rf8EXBT3Q5LoMh8YnDnSgPYqw1JzPnksvsBX6QPXLiPqmcR/Z4qQ==} engines: {node: '>=10'} cpu: [ia32] os: [win32] - '@swc/core-win32-x64-msvc@1.15.18': - resolution: {integrity: sha512-7NRmE4hmUQNCbYU3Hn9Tz57mK9Qq4c97ZS+YlamlK6qG9Fb5g/BB3gPDe0iLlJkns/sYv2VWSkm8c3NmbEGjbg==} + '@swc/core-win32-x64-msvc@1.15.24': + resolution: {integrity: sha512-PR0PlTlPra2JbaDphrOAzm6s0v9rA0F17YzB+XbWD95B4g2cWcZY9LAeTa4xll70VLw9Jr7xBrlohqlQmelMFQ==} engines: {node: '>=10'} cpu: [x64] os: [win32] - '@swc/core@1.15.18': - resolution: {integrity: sha512-z87aF9GphWp//fnkRsqvtY+inMVPgYW3zSlXH1kJFvRT5H/wiAn+G32qW5l3oEk63KSF1x3Ov0BfHCObAmT8RA==} + '@swc/core@1.15.24': + resolution: {integrity: sha512-5Hj8aNasue7yusUt8LGCUe/AjM7RMAce8ZoyDyiFwx7Al+GbYKL+yE7g4sJk8vEr1dKIkTRARkNIJENc4CjkBQ==} engines: {node: '>=10'} peerDependencies: '@swc/helpers': '>=0.5.17' @@ -1198,23 +1186,23 @@ packages: '@swc/helpers@0.5.19': resolution: {integrity: sha512-QamiFeIK3txNjgUTNppE6MiG3p7TdninpZu0E0PbqVh1a9FNLT2FRhisaa4NcaX52XVhA5l7Pk58Ft7Sqi/2sA==} - '@swc/types@0.1.25': - resolution: {integrity: sha512-iAoY/qRhNH8a/hBvm3zKj9qQ4oc2+3w1unPJa2XvTK3XjeLXtzcCingVPw/9e5mn1+0yPqxcBGp9Jf0pkfMb1g==} + '@swc/types@0.1.26': + resolution: {integrity: sha512-lyMwd7WGgG79RS7EERZV3T8wMdmPq3xwyg+1nmAM64kIhx5yl+juO2PYIHb7vTiPgPCj8LYjsNV2T5wiQHUEaw==} - '@tanstack/eslint-plugin-query@5.91.4': - resolution: {integrity: sha512-8a+GAeR7oxJ5laNyYBQ6miPK09Hi18o5Oie/jx8zioXODv/AUFLZQecKabPdpQSLmuDXEBPKFh+W5DKbWlahjQ==} + '@tanstack/eslint-plugin-query@5.96.1': + resolution: {integrity: sha512-BDJU+Q+zESjarSSFmbzpCBh+1wDxwW+DyQlvwIukF24MHYOoRPH4ouJRTlDdbp3BnIkeylZaHHSgIvxY9lgI/g==} peerDependencies: eslint: ^8.57.0 || ^9.0.0 - typescript: ^5.0.0 + typescript: ^5.4.0 peerDependenciesMeta: typescript: optional: true - '@tanstack/query-core@5.90.20': - resolution: {integrity: sha512-OMD2HLpNouXEfZJWcKeVKUgQ5n+n3A2JFmBaScpNDUqSrQSjiveC7dKMe53uJUg1nDG16ttFPz2xfilz6i2uVg==} + '@tanstack/query-core@5.96.1': + resolution: {integrity: sha512-u1yBgtavSy+N8wgtW3PiER6UpxcplMje65yXnnVgiHTqiMwLlxiw4WvQDrXyn+UD6lnn8kHaxmerJUzQcV/MMg==} - '@tanstack/react-query@5.90.21': - resolution: {integrity: sha512-0Lu6y5t+tvlTJMTO7oh5NSpJfpg/5D41LlThfepTixPYkJ0sE2Jj0m0f6yYqujBwIXlId87e234+MxG3D3g7kg==} + '@tanstack/react-query@5.96.1': + resolution: {integrity: sha512-2X7KYK5KKWUKGeWCVcqxXAkYefJtrKB7tSKWgeG++b0H6BRHxQaLSSi8AxcgjmUnnosHuh9WsFZqvE16P1WCzA==} peerDependencies: react: ^18 || ^19 @@ -1225,8 +1213,8 @@ packages: react: '>=16.8' react-dom: '>=16.8' - '@tanstack/react-virtual@3.13.21': - resolution: {integrity: sha512-SYXFrmrbPgXBvf+HsOsKhFgqSe4M6B29VHOsX9Jih9TlNkNkDWx0hWMiMLUghMEzyUz772ndzdEeCEBx+3GIZw==} + '@tanstack/react-virtual@3.13.23': + resolution: {integrity: sha512-XnMRnHQ23piOVj2bzJqHrRrLg4r+F86fuBcwteKfbIjJrtGxb4z7tIvPVAe4B+4UVwo9G4Giuz5fmapcrnZ0OQ==} peerDependencies: react: ^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 @@ -1235,8 +1223,8 @@ packages: resolution: {integrity: sha512-ldZXEhOBb8Is7xLs01fR3YEc3DERiz5silj8tnGkFZytt1abEvl/GhUmCE0PMLaMPTa3Jk4HbKmRlHmu+gCftg==} engines: {node: '>=12'} - '@tanstack/virtual-core@3.13.21': - resolution: {integrity: sha512-ww+fmLHyCbPSf7JNbWZP3g7wl6SdNo3ah5Aiw+0e9FDErkVHLKprYUrwTm7dF646FtEkN/KkAKPYezxpmvOjxw==} + '@tanstack/virtual-core@3.13.23': + resolution: {integrity: sha512-zSz2Z2HNyLjCplANTDyl3BcdQJc2k1+yyFoKhNRmCr7V7dY8o8q5m8uFTI1/Pg1kL+Hgrz6u3Xo6eFUB7l66cg==} '@testing-library/dom@10.4.0': resolution: {integrity: sha512-pemlzrSESWbdAloYml3bAJMEfNh1Z7EduzqPKprCH5S341frlpYnUEW0H72dLxa6IsYr+mPno20GiSm+h9dEdQ==} @@ -1273,6 +1261,9 @@ packages: '@ts-morph/common@0.28.1': resolution: {integrity: sha512-W74iWf7ILp1ZKNYXY5qbddNaml7e9Sedv5lvU1V8lftlitkc9Pq1A+jlH23ltDgWYeZFFEqGCD1Ies9hqu3O+g==} + '@tybys/wasm-util@0.10.1': + resolution: {integrity: sha512-9tTaPJLSiejZKx+Bmog4uSubteqTvFrVrURwkmHixBo0G4seD0zUxp98E1DzUBJxLQ3NPwXrGKDiVjwx/DpPsg==} + '@types/aria-query@5.0.4': resolution: {integrity: sha512-rfT93uj5s0PRL7EzccGMs3brplhcrghnDoV26NqKhCAS1hVo+WdNsPvE/yb6ilfr5hi2MEk6d5EWJTKdxg8jVw==} @@ -1351,9 +1342,6 @@ packages: '@types/estree-jsx@1.0.5': resolution: {integrity: sha512-52CcUVNFyfb1A2ALocQw/Dd1BQFNmSdkuC3BkZ6iqhdMfQz7JWOFRuJFloOzjk+6WijU56m9oKXFAXc7o3Towg==} - '@types/estree@1.0.6': - resolution: {integrity: sha512-AYnb1nQyY49te+VRAVgmzfcgjYS91mY5P0TKUDCLEM+gNnA+3T6rWITXRLYCpahpqSQbN5cE+gHpnPyXjHWxcw==} - '@types/estree@1.0.8': resolution: {integrity: sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w==} @@ -1378,8 +1366,8 @@ packages: '@types/ms@2.1.0': resolution: {integrity: sha512-GsCCIZDE/p3i96vtEqx+7dBUGXrc7zeSK3wwPHIaRThS+9OhWIXRqzs4d6k1SVU8g91DrNRWxWUGhp5KXQb2VA==} - '@types/node@24.10.3': - resolution: {integrity: sha512-gqkrWUsS8hcm0r44yn7/xZeV1ERva/nLgrLxFRUGb7aoNMIJfZJ3AC261zDQuOAKC7MiXai1WCpYc48jAHoShQ==} + '@types/node@24.12.0': + resolution: {integrity: sha512-GYDxsZi3ChgmckRT9HPU0WEhKLP08ev/Yfcq2AstjrDASOYCSXeyjDsHg4v5t4jOj7cyDX3vmprafKlWIG9MXQ==} '@types/normalize-package-data@2.4.4': resolution: {integrity: sha512-37i+OaWTh9qeK4LSHPsyRC7NahnGotNuZvjLSgcPzblpHB3rrCJxAOgI5gCdKm7coonsaX1Of0ILiTcnZjbfxA==} @@ -1421,67 +1409,67 @@ packages: '@types/ws@8.18.1': resolution: {integrity: sha512-ThVF6DCVhA8kUGy+aazFQ4kXQ7E1Ty7A3ypFOe0IcJV8O/M511G99AW24irKrW56Wt44yG9+ij8FaqoBGkuBXg==} - '@typescript-eslint/eslint-plugin@8.56.1': - resolution: {integrity: sha512-Jz9ZztpB37dNC+HU2HI28Bs9QXpzCz+y/twHOwhyrIRdbuVDxSytJNDl6z/aAKlaRIwC7y8wJdkBv7FxYGgi0A==} + '@typescript-eslint/eslint-plugin@8.58.0': + resolution: {integrity: sha512-RLkVSiNuUP1C2ROIWfqX+YcUfLaSnxGE/8M+Y57lopVwg9VTYYfhuz15Yf1IzCKgZj6/rIbYTmJCUSqr76r0Wg==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} peerDependencies: - '@typescript-eslint/parser': ^8.56.1 + '@typescript-eslint/parser': ^8.58.0 eslint: ^8.57.0 || ^9.0.0 || ^10.0.0 - typescript: '>=4.8.4 <6.0.0' + typescript: '>=4.8.4 <6.1.0' - '@typescript-eslint/parser@8.56.1': - resolution: {integrity: sha512-klQbnPAAiGYFyI02+znpBRLyjL4/BrBd0nyWkdC0s/6xFLkXYQ8OoRrSkqacS1ddVxf/LDyODIKbQ5TgKAf/Fg==} + '@typescript-eslint/parser@8.58.0': + resolution: {integrity: sha512-rLoGZIf9afaRBYsPUMtvkDWykwXwUPL60HebR4JgTI8mxfFe2cQTu3AGitANp4b9B2QlVru6WzjgB2IzJKiCSA==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} peerDependencies: eslint: ^8.57.0 || ^9.0.0 || ^10.0.0 - typescript: '>=4.8.4 <6.0.0' + typescript: '>=4.8.4 <6.1.0' - '@typescript-eslint/project-service@8.56.1': - resolution: {integrity: sha512-TAdqQTzHNNvlVFfR+hu2PDJrURiwKsUvxFn1M0h95BB8ah5jejas08jUWG4dBA68jDMI988IvtfdAI53JzEHOQ==} + '@typescript-eslint/project-service@8.58.0': + resolution: {integrity: sha512-8Q/wBPWLQP1j16NxoPNIKpDZFMaxl7yWIoqXWYeWO+Bbd2mjgvoF0dxP2jKZg5+x49rgKdf7Ck473M8PC3V9lg==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} peerDependencies: - typescript: '>=4.8.4 <6.0.0' + typescript: '>=4.8.4 <6.1.0' - '@typescript-eslint/scope-manager@8.56.1': - resolution: {integrity: sha512-YAi4VDKcIZp0O4tz/haYKhmIDZFEUPOreKbfdAN3SzUDMcPhJ8QI99xQXqX+HoUVq8cs85eRKnD+rne2UAnj2w==} + '@typescript-eslint/scope-manager@8.58.0': + resolution: {integrity: sha512-W1Lur1oF50FxSnNdGp3Vs6P+yBRSmZiw4IIjEeYxd8UQJwhUF0gDgDD/W/Tgmh73mxgEU3qX0Bzdl/NGuSPEpQ==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} - '@typescript-eslint/tsconfig-utils@8.56.1': - resolution: {integrity: sha512-qOtCYzKEeyr3aR9f28mPJqBty7+DBqsdd63eO0yyDwc6vgThj2UjWfJIcsFeSucYydqcuudMOprZ+x1SpF3ZuQ==} + '@typescript-eslint/tsconfig-utils@8.58.0': + resolution: {integrity: sha512-doNSZEVJsWEu4htiVC+PR6NpM+pa+a4ClH9INRWOWCUzMst/VA9c4gXq92F8GUD1rwhNvRLkgjfYtFXegXQF7A==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} peerDependencies: - typescript: '>=4.8.4 <6.0.0' + typescript: '>=4.8.4 <6.1.0' - '@typescript-eslint/type-utils@8.56.1': - resolution: {integrity: sha512-yB/7dxi7MgTtGhZdaHCemf7PuwrHMenHjmzgUW1aJpO+bBU43OycnM3Wn+DdvDO/8zzA9HlhaJ0AUGuvri4oGg==} + '@typescript-eslint/type-utils@8.58.0': + resolution: {integrity: sha512-aGsCQImkDIqMyx1u4PrVlbi/krmDsQUs4zAcCV6M7yPcPev+RqVlndsJy9kJ8TLihW9TZ0kbDAzctpLn5o+lOg==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} peerDependencies: eslint: ^8.57.0 || ^9.0.0 || ^10.0.0 - typescript: '>=4.8.4 <6.0.0' + typescript: '>=4.8.4 <6.1.0' '@typescript-eslint/types@8.48.1': resolution: {integrity: sha512-+fZ3LZNeiELGmimrujsDCT4CRIbq5oXdHe7chLiW8qzqyPMnn1puNstCrMNVAqwcl2FdIxkuJ4tOs/RFDBVc/Q==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} - '@typescript-eslint/types@8.56.1': - resolution: {integrity: sha512-dbMkdIUkIkchgGDIv7KLUpa0Mda4IYjo4IAMJUZ+3xNoUXxMsk9YtKpTHSChRS85o+H9ftm51gsK1dZReY9CVw==} + '@typescript-eslint/types@8.58.0': + resolution: {integrity: sha512-O9CjxypDT89fbHxRfETNoAnHj/i6IpRK0CvbVN3qibxlLdo5p5hcLmUuCCrHMpxiWSwKyI8mCP7qRNYuOJ0Uww==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} - '@typescript-eslint/typescript-estree@8.56.1': - resolution: {integrity: sha512-qzUL1qgalIvKWAf9C1HpvBjif+Vm6rcT5wZd4VoMb9+Km3iS3Cv9DY6dMRMDtPnwRAFyAi7YXJpTIEXLvdfPxg==} + '@typescript-eslint/typescript-estree@8.58.0': + resolution: {integrity: sha512-7vv5UWbHqew/dvs+D3e1RvLv1v2eeZ9txRHPnEEBUgSNLx5ghdzjHa0sgLWYVKssH+lYmV0JaWdoubo0ncGYLA==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} peerDependencies: - typescript: '>=4.8.4 <6.0.0' + typescript: '>=4.8.4 <6.1.0' - '@typescript-eslint/utils@8.56.1': - resolution: {integrity: sha512-HPAVNIME3tABJ61siYlHzSWCGtOoeP2RTIaHXFMPqjrQKCGB9OgUVdiNgH7TJS2JNIQ5qQ4RsAUDuGaGme/KOA==} + '@typescript-eslint/utils@8.58.0': + resolution: {integrity: sha512-RfeSqcFeHMHlAWzt4TBjWOAtoW9lnsAGiP3GbaX9uVgTYYrMbVnGONEfUCiSss+xMHFl+eHZiipmA8WkQ7FuNA==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} peerDependencies: eslint: ^8.57.0 || ^9.0.0 || ^10.0.0 - typescript: '>=4.8.4 <6.0.0' + typescript: '>=4.8.4 <6.1.0' - '@typescript-eslint/visitor-keys@8.56.1': - resolution: {integrity: sha512-KiROIzYdEV85YygXw6BI/Dx4fnBlFQu6Mq4QE4MOH9fFnhohw6wX/OAvDY2/C+ut0I3RSPKenvZJIVYqJNkhEw==} + '@typescript-eslint/visitor-keys@8.58.0': + resolution: {integrity: sha512-XJ9UD9+bbDo4a4epraTwG3TsNPeiB9aShrUneAVXy8q4LuwowN+qu89/6ByLMINqvIMeI9H9hOHQtg/ijrYXzQ==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} '@ungap/structured-clone@1.3.0': @@ -1506,17 +1494,17 @@ packages: '@visx/vendor@3.12.0': resolution: {integrity: sha512-SVO+G0xtnL9dsNpGDcjCgoiCnlB3iLSM9KLz1sLbSrV7RaVXwY3/BTm2X9OWN1jH2a9M+eHt6DJ6sE6CXm4cUg==} - '@vitejs/plugin-react-swc@4.2.3': - resolution: {integrity: sha512-QIluDil2prhY1gdA3GGwxZzTAmLdi8cQ2CcuMW4PB/Wu4e/1pzqrwhYWVd09LInCRlDUidQjd0B70QWbjWtLxA==} + '@vitejs/plugin-react-swc@4.3.0': + resolution: {integrity: sha512-mOkXCII839dHyAt/gpoSlm28JIVDwhZ6tnG6wJxUy2bmOx7UaPjvOyIDf3SFv5s7Eo7HVaq6kRcu6YMEzt5Z7w==} engines: {node: ^20.19.0 || >=22.12.0} peerDependencies: - vite: ^4 || ^5 || ^6 || ^7 + vite: '>=7.3.2' - '@vitejs/plugin-react@5.1.4': - resolution: {integrity: sha512-VIcFLdRi/VYRU8OL/puL7QXMYafHmqOnwTZY50U1JPlCNj30PxCMx65c494b1K9be9hX83KVt0+gTEwTWLqToA==} + '@vitejs/plugin-react@5.2.0': + resolution: {integrity: sha512-YmKkfhOAi3wsB1PhJq5Scj3GXMn3WvtQ/JC0xoopuHoXSdmtdStOpFrYaT1kie2YgFBcIe64ROzMYRjCrYOdYw==} engines: {node: ^20.19.0 || >=22.12.0} peerDependencies: - vite: ^4.2.0 || ^5.0.0 || ^6.0.0 || ^7.0.0 + vite: '>=7.3.2' '@vitest/coverage-v8@3.2.4': resolution: {integrity: sha512-EyF9SXU6kS5Ku/U82E259WSnvg6c8KTjppUncuNdm5QHpe17mwREHnjDzozC8x9MZ0xfBUFSaLkRv4TMA75ALQ==} @@ -1534,7 +1522,7 @@ packages: resolution: {integrity: sha512-46ryTE9RZO/rfDd7pEqFl7etuyzekzEhUbTW3BvmeO/BcCMEgq59BKhek3dXDWgAj4oMK6OZi+vRr1wPW6qjEQ==} peerDependencies: msw: ^2.4.9 - vite: ^5.0.0 || ^6.0.0 || ^7.0.0-0 + vite: '>=7.3.2' peerDependenciesMeta: msw: optional: true @@ -1556,14 +1544,14 @@ packages: '@vitest/utils@3.2.4': resolution: {integrity: sha512-fB2V0JFrQSMsCo9HiSq3Ezpdv4iYaXRG1Sx8edX3MwxfyNn83mKiGzOcH+Fkxt4MHxr3y42fQi1oeAInqgX2QA==} - '@xyflow/react@12.10.1': - resolution: {integrity: sha512-5eSWtIK/+rkldOuFbOOz44CRgQRjtS9v5nufk77DV+XBnfCGL9HAQ8PG00o2ZYKqkEU/Ak6wrKC95Tu+2zuK3Q==} + '@xyflow/react@12.10.2': + resolution: {integrity: sha512-CgIi6HwlcHXwlkTpr0fxLv/0sRVNZ8IdwKLzzeCscaYBwpvfcH1QFOCeaTCuEn1FQEs/B8CjnTSjhs8udgmBgQ==} peerDependencies: react: '>=17' react-dom: '>=17' - '@xyflow/system@0.0.75': - resolution: {integrity: sha512-iXs+AGFLi8w/VlAoc/iSxk+CxfT6o64Uw/k0CKASOPqjqz6E0rb5jFZgJtXGZCpfQI6OQpu5EnumP5fGxQheaQ==} + '@xyflow/system@0.0.76': + resolution: {integrity: sha512-hvwvnRS1B3REwVDlWexsq7YQaPZeG3/mKo1jv38UmnpWmxihp14bW6VtEOuHEwJX2FvzFw8k77LyKSk/wiZVNA==} '@zag-js/accordion@1.35.3': resolution: {integrity: sha512-wmw6yo5Zr6ShiKGTc5ICEOJCurWAOSGubIpGISiHi3cZ4tlxKF/vpATIUT3eq8xzdB56YK57yKCujs/WmwqqoA==} @@ -1804,8 +1792,8 @@ packages: engines: {node: '>=0.4.0'} hasBin: true - acorn@8.15.0: - resolution: {integrity: sha512-NZyJarBfL7nWwIq+FDL6Zp/yHEhePMNnnJ0y3qfieCrmNvYct8uvtiV41UvlSe6apAfk0fY1FbWx+NwfmpvtTg==} + acorn@8.16.0: + resolution: {integrity: sha512-UVJyE9MttOsBQIDKw1skb9nAwQuR5wuGD3+82K6JgJlm/Y+KI92oNsMNGZCYdDsVtRHSak0pcV5Dno5+4jh9sw==} engines: {node: '>=0.4.0'} hasBin: true @@ -1909,8 +1897,8 @@ packages: resolution: {integrity: sha512-Xm7bpRXnDSX2YE2YFfBk2FnF0ep6tmG7xPh8iHee8MIcrgq762Nkce856dYtJYLkuIoYZvGfTs/PbZhideTcEg==} engines: {node: '>=4'} - axios@1.13.6: - resolution: {integrity: sha512-ChTCHMouEe2kn713WHbQGcuYrr6fXTBiu460OTwWrWob16g1bXn4vtz07Ope7ewMozJAnEquLk5lWQWtBig9DQ==} + axios@1.14.0: + resolution: {integrity: sha512-3Y8yrqLSwjuzpXuZ0oIYZ/XGgLwUIBU3uLvbcpb0pidD9ctpShJd43KSlEEkVQg6DS0G9NKyzOvBfUtDKEyHvQ==} axobject-query@4.1.0: resolution: {integrity: sha512-qIj0G9wZbMGNLjLmg1PT6v2mE9AH2zlnADJD/2tC6E00hgmhUOfEB6greHPAfLRSufHqROIUTkw6E+M3lH0PTQ==} @@ -1933,8 +1921,8 @@ packages: resolution: {integrity: sha512-BLrgEcRTwX2o6gGxGOCNyMvGSp35YofuYzw9h1IMTRmKqttAZZVU67bdb9Pr2vUHA8+j3i2tJfjO6C6+4myGTA==} engines: {node: 18 || 20 || >=22} - baseline-browser-mapping@2.10.0: - resolution: {integrity: sha512-lIyg0szRfYbiy67j9KN8IyeD7q7hcmqnJ1ddWmNt19ItGpNN64mnllmxUNFIOdOm6by97jlL6wfpTTJrmnjWAA==} + baseline-browser-mapping@2.10.16: + resolution: {integrity: sha512-Lyf3aK28zpsD1yQMiiHD4RvVb6UdMoo8xzG2XzFIfR9luPzOpcBlAsT/qfB1XWS1bxWT+UtE4WmQgsp297FYOA==} engines: {node: '>=6.0.0'} hasBin: true @@ -1942,14 +1930,14 @@ packages: resolution: {integrity: sha512-Ceh+7ox5qe7LJuLHoY0feh3pHuUDHAcRUeyL2VYghZwfpkNIy/+8Ocg0a3UuSoYzavmylwuLWQOf3hl0jjMMIw==} engines: {node: '>=8'} - brace-expansion@1.1.12: - resolution: {integrity: sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==} + brace-expansion@1.1.13: + resolution: {integrity: sha512-9ZLprWS6EENmhEOpjCYW2c8VkmOvckIJZfkr7rBW6dObmfgJ/L1GpSYW5Hpo9lDz4D1+n0Ckz8rU7FwHDQiG/w==} - brace-expansion@2.0.2: - resolution: {integrity: sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==} + brace-expansion@2.0.3: + resolution: {integrity: sha512-MCV/fYJEbqx68aE58kv2cA/kiky1G8vux3OR6/jbS+jIMe/6fJWa0DTzJU7dqijOWYwHi1t29FlfYI9uytqlpA==} - brace-expansion@5.0.4: - resolution: {integrity: sha512-h+DEnpVvxmfVefa4jFbCf5HdH5YMDXRsmKflpf1pILZWRFlTbJpxeU55nJl4Smt5HQaGzg1o6RHFPJaOqnmBDg==} + brace-expansion@5.0.5: + resolution: {integrity: sha512-VZznLgtwhn+Mact9tfiwx64fA9erHH/MCXEUfB/0bX/6Fz6ny5EGTXYltMocqg4xFAQZtnO3DHWWXi8RiuN7cQ==} engines: {node: 18 || 20 || >=22} braces@3.0.3: @@ -1961,8 +1949,8 @@ packages: engines: {node: ^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7} hasBin: true - browserslist@4.28.1: - resolution: {integrity: sha512-ZC5Bd0LgJXgwGqUknZY/vkUQ04r8NXnJZ3yYi4vDmSiZmC/pdSN0NbNRPxZpbtO4uAfDUAFffO8IZoM3Gj8IkA==} + browserslist@4.28.2: + resolution: {integrity: sha512-48xSriZYYg+8qXna9kwqjIVzuQxi+KYWp2+5nCYnYKPTr0LvD89Jqk2Or5ogxz0NUMfIjhh2lIUX/LyX9B4oIg==} engines: {node: ^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7} hasBin: true @@ -2005,8 +1993,8 @@ packages: caniuse-lite@1.0.30001707: resolution: {integrity: sha512-3qtRjw/HQSMlDWf+X79N206fepf4SOOU6SQLMaq/0KkZLmSjPxAkBOQQ+FxbHKfHmYLZFfdWsO3KA90ceHPSnw==} - caniuse-lite@1.0.30001777: - resolution: {integrity: sha512-tmN+fJxroPndC74efCdp12j+0rk0RHwV5Jwa1zWaFVyw2ZxAuPeG8ZgWC3Wz7uSjT3qMRQ5XHZ4COgQmsCMJAQ==} + caniuse-lite@1.0.30001786: + resolution: {integrity: sha512-4oxTZEvqmLLrERwxO76yfKM7acZo310U+v4kqexI2TL1DkkUEMT8UijrxxcnVdxR3qkVf5awGRX+4Z6aPHVKrA==} ccount@2.0.1: resolution: {integrity: sha512-eyrF0jiFpY+3drT6383f1qhkbGsLSifNAjA61IUjZjmLCWjItY6LB9ft9YhoDgwfmclB2zhu51Lc7+95b8NRAg==} @@ -2155,8 +2143,8 @@ packages: resolution: {integrity: sha512-AdmX6xUzdNASswsFtmwSt7Vj8po9IuqXm0UXz7QKPuEUmPB4XyjGfaAr2PSuELMwkRMVH1EpIkX5bTZGRB3eCA==} engines: {node: '>=10'} - cross-fetch@4.0.0: - resolution: {integrity: sha512-e4a5N8lVvuLgAWgnCrLr2PP0YyDOTHa9H/Rj54dirp61qXnNq46m82bRhNqIA5VccJtWBvPTFRV3TtvHUKPB1g==} + cross-fetch@4.1.0: + resolution: {integrity: sha512-uKm5PU+MHTootlWEY+mZ4vvXoCn4fLQxT9dSc1sXVMSFkINTJVN8cAQROpwcKm8bJ/c7rgZVIBWzH5T78sNZZw==} cross-spawn@7.0.6: resolution: {integrity: sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==} @@ -2255,17 +2243,8 @@ packages: resolution: {integrity: sha512-BS8PfmtDGnrgYdOonGZQdLZslWIeCGFP9tpan0hi1Co2Zr2NKADsvGYA8XxuG/4UWgJ6Cjtv+YJnB6MM69QGlQ==} engines: {node: '>= 0.4'} - dayjs@1.11.19: - resolution: {integrity: sha512-t5EcLVS6QPBNqM2z8fakk/NKel+Xzshgt8FFKAn+qwlD1pzZWxh0nVCrvFK7ZDb6XucZeF9z8C7CBWTRIVApAw==} - - debug@4.4.0: - resolution: {integrity: sha512-6WTZ/IxCY/T6BALoZHaE4ctp9xm+Z5kY/pzYaCHRFeyVhojxlrm+46y68HA6hr0TcwEssoxNiDEUJQjfPZ/RYA==} - engines: {node: '>=6.0'} - peerDependencies: - supports-color: '*' - peerDependenciesMeta: - supports-color: - optional: true + dayjs@1.11.20: + resolution: {integrity: sha512-YbwwqR/uYpeoP4pu043q+LTDLFBLApUP6VxRihdfNTqu4ubqMlGDLd6ErXhEgsyvY0K6nCs7nggYumAN+9uEuQ==} debug@4.4.1: resolution: {integrity: sha512-KcKCqiftBJcZr++7ykoDIEwSa3XWowTfNPo92BYxjXiyYEVrUQh2aLyhxBCwww+heortUFxEJYcRzosstTEBYQ==} @@ -2303,8 +2282,8 @@ packages: resolution: {integrity: sha512-8QmQKqEASLd5nx0U1B1okLElbUuuttJ/AnYmRXbbbGDWh6uS208EjD4Xqq/I9wK7u0v6O08XhTWnt5XtEbR6Dg==} engines: {node: '>= 0.4'} - defu@6.1.4: - resolution: {integrity: sha512-mEQCMmwJu317oSz8CwdIOdwf3xMif1ttiM8LTufzc3g6kR+9Pe236twL8j3IYT1F7GfRgGcW6MWxzZjLIkuHIg==} + defu@6.1.6: + resolution: {integrity: sha512-f8mefEW4WIVg4LckePx3mALjQSPQgFlg9U8yaPdlsbdYcHQyj9n2zL2LJEA52smeYxOvmd/nB7TpMtHGMTHcug==} delaunator@5.0.1: resolution: {integrity: sha512-8nvh+XBe96aCESrGOqMp/84b13H9cdKbG5P2ejQCh4d4sK9RL4371qou9drQjMhvnPmhWl5hnmqbEE0fXr9Xnw==} @@ -2320,6 +2299,10 @@ packages: destr@2.0.5: resolution: {integrity: sha512-ugFTXCtDZunbzasqBxrK93Ik/DRYsO6S/fedkWEMKqt04xZ4csmnmwGDBAb07QWNaGMAmnTIemsYZCksjATwsA==} + detect-libc@2.1.2: + resolution: {integrity: sha512-Btj2BOOO83o3WyH59e8MgXsxEQVcarkUOpEYrubB0urwnN10yQ364rsiByU11nZlqWYZm05i/of7io4mzihBtQ==} + engines: {node: '>=8'} + devlop@1.1.0: resolution: {integrity: sha512-RWmIqhcFf1lRYBvNmr7qTNuyCt/7/ns2jbpp1+PalgE/rDQcBT0fioSMUpJ93irlUhC5hrg4cYqe6U+0ImW0rA==} @@ -2354,8 +2337,8 @@ packages: electron-to-chromium@1.5.123: resolution: {integrity: sha512-refir3NlutEZqlKaBLK0tzlVLe5P2wDKS7UQt/3SpibizgsRAPOsqQC3ffw1nlv3ze5gjRQZYHoPymgVZkplFA==} - electron-to-chromium@1.5.307: - resolution: {integrity: sha512-5z3uFKBWjiNR44nFcYdkcXjKMbg5KXNdciu7mhTPo9tB7NbqSNP2sSnGR+fqknZSCwKkBN+oxiiajWs4dT6ORg==} + electron-to-chromium@1.5.331: + resolution: {integrity: sha512-IbxXrsTlD3hRodkLnbxAPP4OuJYdWCeM3IOdT+CpcMoIwIoDfCmRpEtSPfwBXxVkg9xmBeY7Lz2Eo2TDn/HC3Q==} elkjs@0.11.1: resolution: {integrity: sha512-zxxR9k+rx5ktMwT/FwyLdPCrq7xN6e4VGGHH8hA01vVYKjTFik7nHOxBnAYtrgYUB1RpAiLvA1/U2YraWxyKKg==} @@ -2444,13 +2427,13 @@ packages: peerDependencies: eslint: '>=7.0.0' - eslint-json-compat-utils@0.2.1: - resolution: {integrity: sha512-YzEodbDyW8DX8bImKhAcCeu/L31Dd/70Bidx2Qex9OFUtgzXLqtfWL4Hr5fM/aCCB8QUZLuJur0S9k6UfgFkfg==} + eslint-json-compat-utils@0.2.3: + resolution: {integrity: sha512-RbBmDFyu7FqnjE8F0ZxPNzx5UaptdeS9Uu50r7A+D7s/+FCX+ybiyViYEgFUaFIFqSWJgZRTpL5d8Kanxxl2lQ==} engines: {node: '>=12'} peerDependencies: '@eslint/json': '*' eslint: '*' - jsonc-eslint-parser: ^2.4.0 + jsonc-eslint-parser: ^2.4.0 || ^3.0.0 peerDependenciesMeta: '@eslint/json': optional: true @@ -2459,8 +2442,8 @@ packages: resolution: {integrity: sha512-z/h4oBRd9wI1ET60HqcLSU6XPeAh/EPOrBBTyCdkWeMoYrWAaUVA+DOQkWTiNIyCltG4NTmy62SQisVXxoXurw==} engines: {node: '>=18.10.0'} - eslint-plugin-jsonc@2.21.0: - resolution: {integrity: sha512-HttlxdNG5ly3YjP1cFMP62R4qKLxJURfBZo2gnMY+yQojZxkLyOpY1H1KRTKBmvQeSG9pIpSGEhDjE17vvYosg==} + eslint-plugin-jsonc@2.21.1: + resolution: {integrity: sha512-dbNR5iEnQeORwsK2WZzr3QaMtFCY3kKJVMRHPzUpKzMhmVy2zIpVgFDpX8MNoIdoqz6KCpCfOJavhfiSbZbN+w==} engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} peerDependencies: eslint: '>=6.0.0' @@ -2534,8 +2517,8 @@ packages: resolution: {integrity: sha512-tD40eHxA35h0PEIZNeIjkHoDR4YjjJp34biM0mDvplBe//mB+IHCqHDGV7pxF+7MklTvighcCPPZC7ynWyjdTA==} engines: {node: ^20.19.0 || ^22.13.0 || >=24} - eslint@9.39.1: - resolution: {integrity: sha512-BhHmn2yNOFA9H9JmmIVKJmd288g9hrVRDkdoIgRCRuSySRUHH7r/DI6aAXW9T1WwUuY3DFgrcaqB+deURBLR5g==} + eslint@9.39.4: + resolution: {integrity: sha512-XoMjdBOwe/esVgEvLmNsD3IRHkm7fbKIUGvrleloJXUZgDHig2IPWNniv+GwjyJXzuNqVjlr5+4yVUZjycJwfQ==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} hasBin: true peerDependencies: @@ -2565,6 +2548,10 @@ packages: resolution: {integrity: sha512-ca9pw9fomFcKPvFLXhBKUK90ZvGibiGOvRJNbjljY7s7uq/5YO4BOzcYtJqExdx99rF6aAcnRxHmcUHcz6sQsg==} engines: {node: '>=0.10'} + esquery@1.7.0: + resolution: {integrity: sha512-Ap6G0WQwcU/LHsvLwON1fAQX9Zp0A2Y6Y/cJBl9r/JbW90Zyg4/zbG6zzKa2OTALELarYHmKu0GhpM5EO+7T0g==} + engines: {node: '>=0.10'} + esrecurse@4.3.0: resolution: {integrity: sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==} engines: {node: '>=4.0'} @@ -2751,8 +2738,8 @@ packages: graphemer@1.4.0: resolution: {integrity: sha512-EtKwoO6kxCL9WO5xipiHTZlSzBm7WLT627TqC/uVRd0HKmq8NXyebnNYxDoBi7wt8eTWrUrKXCOVaFq9x1kgag==} - graphql@16.13.1: - resolution: {integrity: sha512-gGgrVCoDKlIZ8fIqXBBb0pPKqDgki0Z/FSKNiQzSGj2uEYHr1tq5wmBegGwJx6QB5S5cM0khSBpi/JFHMCvsmQ==} + graphql@16.13.2: + resolution: {integrity: sha512-5bJ+nf/UCpAjHM8i06fl7eLyVC9iuNAjm9qzkiu2ZGhM0VscSvS6WDPfAwkdkBuoXGM9FJSbKl6wylMwP9Ktig==} engines: {node: ^12.22.0 || ^14.16.0 || ^16.0.0 || >=17.0.0} handlebars@4.7.9: @@ -2839,13 +2826,13 @@ packages: i18next-browser-languagedetector@8.2.1: resolution: {integrity: sha512-bZg8+4bdmaOiApD7N7BPT9W8MLZG+nPTOFlLiJiT8uzKXFjhxw4v2ierCXOwB5sFDMtuA5G4kgYZ0AznZxQ/cw==} - i18next-http-backend@3.0.2: - resolution: {integrity: sha512-PdlvPnvIp4E1sYi46Ik4tBYh/v/NbYfFFgTjkwFl0is8A18s7/bx9aXqsrOax9WUbeNS6mD2oix7Z0yGGf6m5g==} + i18next-http-backend@3.0.4: + resolution: {integrity: sha512-udwrBIE6cNpqn1gRAqRULq3+7MzIIuaiKRWrz++dVz5SqWW2VwXmPJtAgkI0JtMLFaADC9qNmnZAxWAhsxXx2g==} - i18next@25.8.16: - resolution: {integrity: sha512-/4Xvgm8RiJNcB+sZwplylrFNJ27DVvubGX7y6uXn7hh7aSvbmXVSRIyIGx08fEn05SYwaSYWt753mIpJuPKo+Q==} + i18next@25.10.10: + resolution: {integrity: sha512-cqUW2Z3EkRx7NqSyywjkgCLK7KLCL6IFVFcONG7nVYIJ3ekZ1/N5jUsihHV6Bq37NfhgtczxJcxduELtjTwkuQ==} peerDependencies: - typescript: ^5 + typescript: ^5 || ^6 peerDependenciesMeta: typescript: optional: true @@ -3111,8 +3098,8 @@ packages: engines: {node: '>=6'} hasBin: true - jsonc-eslint-parser@2.4.1: - resolution: {integrity: sha512-uuPNLJkKN8NXAlZlQ6kmUF9qO+T6Kyd7oV4+/7yy8Jz6+MZNyhPq8EdLpdfnPVzUC8qSf1b4j1azKaGnFsjmsw==} + jsonc-eslint-parser@2.4.2: + resolution: {integrity: sha512-1e4qoRgnn448pRuMvKGsFFymUCquZV0mpGgOyIKNgD3JVDTsVJyRBGH/Fm0tBb8WsWGgmB1mDe6/yJMQM37DUA==} engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} jsonpointer@5.0.1: @@ -3137,6 +3124,76 @@ packages: resolution: {integrity: sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==} engines: {node: '>= 0.8.0'} + lightningcss-android-arm64@1.32.0: + resolution: {integrity: sha512-YK7/ClTt4kAK0vo6w3X+Pnm0D2cf2vPHbhOXdoNti1Ga0al1P4TBZhwjATvjNwLEBCnKvjJc2jQgHXH0NEwlAg==} + engines: {node: '>= 12.0.0'} + cpu: [arm64] + os: [android] + + lightningcss-darwin-arm64@1.32.0: + resolution: {integrity: sha512-RzeG9Ju5bag2Bv1/lwlVJvBE3q6TtXskdZLLCyfg5pt+HLz9BqlICO7LZM7VHNTTn/5PRhHFBSjk5lc4cmscPQ==} + engines: {node: '>= 12.0.0'} + cpu: [arm64] + os: [darwin] + + lightningcss-darwin-x64@1.32.0: + resolution: {integrity: sha512-U+QsBp2m/s2wqpUYT/6wnlagdZbtZdndSmut/NJqlCcMLTWp5muCrID+K5UJ6jqD2BFshejCYXniPDbNh73V8w==} + engines: {node: '>= 12.0.0'} + cpu: [x64] + os: [darwin] + + lightningcss-freebsd-x64@1.32.0: + resolution: {integrity: sha512-JCTigedEksZk3tHTTthnMdVfGf61Fky8Ji2E4YjUTEQX14xiy/lTzXnu1vwiZe3bYe0q+SpsSH/CTeDXK6WHig==} + engines: {node: '>= 12.0.0'} + cpu: [x64] + os: [freebsd] + + lightningcss-linux-arm-gnueabihf@1.32.0: + resolution: {integrity: sha512-x6rnnpRa2GL0zQOkt6rts3YDPzduLpWvwAF6EMhXFVZXD4tPrBkEFqzGowzCsIWsPjqSK+tyNEODUBXeeVHSkw==} + engines: {node: '>= 12.0.0'} + cpu: [arm] + os: [linux] + + lightningcss-linux-arm64-gnu@1.32.0: + resolution: {integrity: sha512-0nnMyoyOLRJXfbMOilaSRcLH3Jw5z9HDNGfT/gwCPgaDjnx0i8w7vBzFLFR1f6CMLKF8gVbebmkUN3fa/kQJpQ==} + engines: {node: '>= 12.0.0'} + cpu: [arm64] + os: [linux] + + lightningcss-linux-arm64-musl@1.32.0: + resolution: {integrity: sha512-UpQkoenr4UJEzgVIYpI80lDFvRmPVg6oqboNHfoH4CQIfNA+HOrZ7Mo7KZP02dC6LjghPQJeBsvXhJod/wnIBg==} + engines: {node: '>= 12.0.0'} + cpu: [arm64] + os: [linux] + + lightningcss-linux-x64-gnu@1.32.0: + resolution: {integrity: sha512-V7Qr52IhZmdKPVr+Vtw8o+WLsQJYCTd8loIfpDaMRWGUZfBOYEJeyJIkqGIDMZPwPx24pUMfwSxxI8phr/MbOA==} + engines: {node: '>= 12.0.0'} + cpu: [x64] + os: [linux] + + lightningcss-linux-x64-musl@1.32.0: + resolution: {integrity: sha512-bYcLp+Vb0awsiXg/80uCRezCYHNg1/l3mt0gzHnWV9XP1W5sKa5/TCdGWaR/zBM2PeF/HbsQv/j2URNOiVuxWg==} + engines: {node: '>= 12.0.0'} + cpu: [x64] + os: [linux] + + lightningcss-win32-arm64-msvc@1.32.0: + resolution: {integrity: sha512-8SbC8BR40pS6baCM8sbtYDSwEVQd4JlFTOlaD3gWGHfThTcABnNDBda6eTZeqbofalIJhFx0qKzgHJmcPTnGdw==} + engines: {node: '>= 12.0.0'} + cpu: [arm64] + os: [win32] + + lightningcss-win32-x64-msvc@1.32.0: + resolution: {integrity: sha512-Amq9B/SoZYdDi1kFrojnoqPLxYhQ4Wo5XiL8EVJrVsB8ARoC1PWW6VGtT0WKCemjy8aC+louJnjS7U18x3b06Q==} + engines: {node: '>= 12.0.0'} + cpu: [x64] + os: [win32] + + lightningcss@1.32.0: + resolution: {integrity: sha512-NXYBzinNrblfraPGyrbPoD19C1h9lfI/1mzgWYvXUTe414Gz/X1FD2XBZSZM7rRTrMA8JL3OtAaGifrIKhQ5yQ==} + engines: {node: '>= 12.0.0'} + lines-and-columns@1.2.4: resolution: {integrity: sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==} @@ -3154,8 +3211,8 @@ packages: lodash.merge@4.6.2: resolution: {integrity: sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==} - lodash@4.17.23: - resolution: {integrity: sha512-LgVTMpQtIopCi79SJeDiP0TfWi5CNEc/L/aRdTh3yIvmZXTnheWpKjSZhnvMl8iXbC1tFg9gdHHDMLoV7CnG+w==} + lodash@4.18.1: + resolution: {integrity: sha512-dMInicTPVE8d1e5otfwmmjlxkZoUpiVLwyeTdUsi/Caj/gfzzblBcCE5sRHV/AsjuCmxWrte2TNGSYuCeCq+0Q==} longest-streak@3.1.0: resolution: {integrity: sha512-9Ri+o0JYgehTaVBBDoMqIl8GXtbWg711O3srftcHhZ0dqnETqLaoIK0x17fUw9rFSlK/0NlsKe0Ahhyl5pXE2g==} @@ -3173,8 +3230,8 @@ packages: lru-cache@10.4.3: resolution: {integrity: sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ==} - lru-cache@11.2.7: - resolution: {integrity: sha512-aY/R+aEsRelme17KGQa/1ZSIpLpNYYrhcrepKTZgE+W3WM16YMCaPwOHLHsmopZHELU0Ojin1lPVxKR0MihncA==} + lru-cache@11.3.2: + resolution: {integrity: sha512-wgWa6FWQ3QRRJbIjbsldRJZxdxYngT/dO0I5Ynmlnin8qy7tC6xYzbcJjtN4wHLXtkbVwHzk0C+OejVw1XM+DQ==} engines: {node: 20 || >=22} lru-cache@5.1.1: @@ -3345,8 +3402,8 @@ packages: resolution: {integrity: sha512-I9jwMn07Sy/IwOj3zVkVik2JTvgpaykDZEigL6Rx6N9LbMywwUSMtxET+7lVoDLLd3O3IXwJwvuuns8UB/HeAg==} engines: {node: '>=4'} - minimatch@10.2.4: - resolution: {integrity: sha512-oRjTw/97aTBN0RHbYCdtF1MQfvusSIBQM0IZEgzl6426+8jSC0nF1a/GmnVLpfB9yyr6g6FTqWqiZVbxrtaCIg==} + minimatch@10.2.5: + resolution: {integrity: sha512-MULkVLfKGYDFYejP07QOurDLLQpcjk7Fw+7jXS2R2czRQzR56yHRveU5NDJEOviH+hETZKSkIk5c+T23GjFUMg==} engines: {node: 18 || 20 || >=22} minimatch@3.1.5: @@ -3380,8 +3437,8 @@ packages: ms@2.1.3: resolution: {integrity: sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==} - msw@2.12.10: - resolution: {integrity: sha512-G3VUymSE0/iegFnuipujpwyTM2GuZAKXNeerUSrG2+Eg391wW63xFs5ixWsK9MWzr1AGoSkYGmyAzNgbR3+urw==} + msw@2.12.14: + resolution: {integrity: sha512-4KXa4nVBIBjbDbd7vfQNuQ25eFxug0aropCQFoI0JdOBuJWamkT1yLVIWReFI8SiTRc+H1hKzaNk+cLk2N9rtQ==} engines: {node: '>=18'} hasBin: true peerDependencies: @@ -3430,8 +3487,8 @@ packages: node-releases@2.0.19: resolution: {integrity: sha512-xxOWJsBKtzAq7DY0J+DTzuz58K8e7sJbdgwkbMWQe8UYB6ekmsQ45q0M/tJDsGaZmbC+l7n57UV8Hl5tHxO9uw==} - node-releases@2.0.36: - resolution: {integrity: sha512-TdC8FSgHz8Mwtw9g5L4gR/Sh9XhSP/0DEkQxfEFXOpiul5IiHgHan2VhYYb6agDSfp4KuvltmGApc8HMgUrIkA==} + node-releases@2.0.37: + resolution: {integrity: sha512-1h5gKZCF+pO/o3Iqt5Jp7wc9rH3eJJ0+nh/CIoiRwjRxde/hAHyLPXYN4V3CqKAbiZPSeJFSWHmJsbkicta0Eg==} normalize-package-data@2.5.0: resolution: {integrity: sha512-/5CMN3T0R4XTj4DcGaexo+roZSdSFW/0AOOTROrjxzCG1wrWXEsGbRKevjlIL+ZDE4sZlJr5ED4YW0yqmkK+eA==} @@ -3590,13 +3647,13 @@ packages: pkg-types@1.3.1: resolution: {integrity: sha512-/Jm5M4RvtBFVkKWRu2BLUTNP8/M2a+UwuAX+ae4770q1qVGtfjG+WTCupoZixokjmHiry8uI+dlY8KXYV5HVVQ==} - playwright-core@1.58.2: - resolution: {integrity: sha512-yZkEtftgwS8CsfYo7nm0KE8jsvm6i/PTgVtB8DL726wNf6H2IMsDuxCpJj59KDaxCtSnrWan2AeDqM7JBaultg==} + playwright-core@1.59.1: + resolution: {integrity: sha512-HBV/RJg81z5BiiZ9yPzIiClYV/QMsDCKUyogwH9p3MCP6IYjUFu/MActgYAvK0oWyV9NlwM3GLBjADyWgydVyg==} engines: {node: '>=18'} hasBin: true - playwright@1.58.2: - resolution: {integrity: sha512-vA30H8Nvkq/cPBnNw4Q8TWz1EJyqgpuinBcHET0YVJVFldr8JDNiU9LaWAE1KqSkRYazuaBhTpB5ZzShOezQ6A==} + playwright@1.59.1: + resolution: {integrity: sha512-C8oWjPR3F81yljW9o5OxcWzfh6avkVwDD2VYdwIGqTkl+OGFISgypqzfu7dOe4QNLL2aqcWBmI3PMtLIK233lw==} engines: {node: '>=18'} hasBin: true @@ -3645,8 +3702,9 @@ packages: proxy-compare@3.0.1: resolution: {integrity: sha512-V9plBAt3qjMlS1+nC8771KNf6oJ12gExvaxnNzN/9yVRLdTv/lc+oJlnSzrdYDAvBfTStPCoiaCOTmTs0adv7Q==} - proxy-from-env@1.1.0: - resolution: {integrity: sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg==} + proxy-from-env@2.1.0: + resolution: {integrity: sha512-cJ+oHTW1VAEa8cJslgmUZrc+sjRKgAKl3Zyse6+PV38hZe/V6Z14TbCuXcan9F9ghlz4QrFr2c92TNF82UkYHA==} + engines: {node: '>=10'} proxy-memoize@3.0.1: resolution: {integrity: sha512-VDdG/VYtOgdGkWJx7y0o7p+zArSf2383Isci8C+BP3YXgMYDoPd3cCBjw0JdWb6YBb9sFiOPbAADDVTPJnh+9g==} @@ -3669,22 +3727,22 @@ packages: peerDependencies: react: ^19.2.4 - react-hook-form@7.71.2: - resolution: {integrity: sha512-1CHvcDYzuRUNOflt4MOq3ZM46AronNJtQ1S7tnX6YN4y72qhgiUItpacZUAQ0TyWYci3yz1X+rXaSxiuEm86PA==} + react-hook-form@7.72.0: + resolution: {integrity: sha512-V4v6jubaf6JAurEaVnT9aUPKFbNtDgohj5CIgVGyPHvT9wRx5OZHVjz31GsxnPNI278XMu+ruFz+wGOscHaLKw==} engines: {node: '>=18.0.0'} peerDependencies: react: ^16.8.0 || ^17 || ^18 || ^19 - react-hotkeys-hook@4.6.1: - resolution: {integrity: sha512-XlZpbKUj9tkfgPgT9gA+1p7Ey6vFIZHttUjPqpTdyT5nqQ8mHL7elxvSbaC+dpSiHUSmr21Ya1mDxBZG3aje4Q==} + react-hotkeys-hook@4.6.2: + resolution: {integrity: sha512-FmP+ZriY3EG59Ug/lxNfrObCnW9xQShgk7Nb83+CkpfkcCpfS95ydv+E9JuXA5cp8KtskU7LGlIARpkc92X22Q==} peerDependencies: react: '>=16.8.1' react-dom: '>=16.8.1' - react-i18next@15.5.1: - resolution: {integrity: sha512-C8RZ7N7H0L+flitiX6ASjq9p5puVJU1Z8VyL3OgM/QOMRf40BMZX+5TkpxzZVcTmOLPX5zlti4InEX5pFyiVeA==} + react-i18next@15.7.4: + resolution: {integrity: sha512-nyU8iKNrI5uDJch0z9+Y5XEr34b0wkyYj3Rp+tfbahxtlswxSCjcUL9H0nqXo9IR3/t5Y5PKIA3fx3MfUyR9Xw==} peerDependencies: - i18next: '>= 23.2.3' + i18next: '>= 23.4.0' react: '>= 16.8.0' react-dom: '*' react-native: '*' @@ -3730,15 +3788,15 @@ packages: react: ^16.14.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 || ^19.0.0-rc react-dom: ^16.14.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 || ^19.0.0-rc - react-router-dom@7.13.1: - resolution: {integrity: sha512-UJnV3Rxc5TgUPJt2KJpo1Jpy0OKQr0AjgbZzBFjaPJcFOb2Y8jA5H3LT8HUJAiRLlWrEXWHbF1Z4SCZaQjWDHw==} + react-router-dom@7.14.0: + resolution: {integrity: sha512-2G3ajSVSZMEtmTjIklRWlNvo8wICEpLihfD/0YMDxbWK2UyP5EGfnoIn9AIQGnF3G/FX0MRbHXdFcD+rL1ZreQ==} engines: {node: '>=20.0.0'} peerDependencies: react: '>=18' react-dom: '>=18' - react-router@7.13.1: - resolution: {integrity: sha512-td+xP4X2/6BJvZoX6xw++A2DdEi++YypA69bJUV5oVvqf6/9/9nNlD70YO1e9d3MyamJEBQFEzk6mbfDYbqrSA==} + react-router@7.14.0: + resolution: {integrity: sha512-m/xR9N4LQLmAS0ZhkY2nkPA1N7gQ5TUVa5n8TgANuDTARbn1gt+zLPXEm7W0XDTbrQ2AJSJKhoa6yx1D8BcpxQ==} engines: {node: '>=20.0.0'} peerDependencies: react: '>=18' @@ -3753,8 +3811,8 @@ packages: react: ^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 - react-syntax-highlighter@15.6.1: - resolution: {integrity: sha512-OqJ2/vL7lEeV5zTJyG7kmARppUjiB9h9udl4qHQjjgEos66z00Ia0OckwYfRxCSFrW8RJIBnsBwQsHZbVPspqg==} + react-syntax-highlighter@15.6.6: + resolution: {integrity: sha512-DgXrc+AZF47+HvAPEmn7Ua/1p10jNoVZVI/LoPiYdtY+OM+/nG5yefLHKJwdKqY1adMuHFbeyBaG9j64ML7vTw==} peerDependencies: react: '>= 0.14.0' @@ -3845,9 +3903,9 @@ packages: robust-predicates@3.0.2: resolution: {integrity: sha512-IXgzBWvWQwE6PrDI05OvmXUIruQTcoMDzRsOd5CDvHCVLcLHMTSYvOK5Cm46kWqlV3yAbuSpBZdJ5oP5OUoStg==} - rollup@4.59.0: - resolution: {integrity: sha512-2oMpl67a3zCH9H79LeMcbDhXW/UmWG/y2zuqnF2jQq5uq9TbM9TVyXvA4+t+ne2IIkBdrLpAaRQAvo7YI/Yyeg==} - engines: {node: '>=18.0.0', npm: '>=8.0.0'} + rolldown@1.0.0-rc.12: + resolution: {integrity: sha512-yP4USLIMYrwpPHEFB5JGH1uxhcslv6/hL0OyvTuY+3qlOSJvZ7ntYnoWpehBxufkgN0cvXxppuTu5hHa/zPh+A==} + engines: {node: ^20.19.0 || >=22.12.0} hasBin: true safe-array-concat@1.1.3: @@ -4052,10 +4110,6 @@ packages: resolution: {integrity: sha512-Bh7QjT8/SuKUIfObSXNHNSK6WHo6J1tHCqJsuaFDP7gP0fkzSfTxI8y85JrppZ0h8l0maIgc2tfuZQ6/t3GtnQ==} engines: {node: ^14.18.0 || >=16.0.0} - synckit@0.11.8: - resolution: {integrity: sha512-+XZ+r1XGIJGeQk3VvXhT6xx/VpbHsRzsTkGgF6E5RX9TTXD0118l87puaEBZ566FhqblC6U0d4XnubznJDm30A==} - engines: {node: ^14.18.0 || >=16.0.0} - tagged-tag@1.0.0: resolution: {integrity: sha512-yEFYrVhod+hdNyx7g5Bnkkb0G6si8HJurOoOEgC8B/O0uXLHlaey/65KRv6cuWBNhBgHKAROVpc7QyYqE5gFng==} engines: {node: '>=20'} @@ -4090,11 +4144,11 @@ packages: resolution: {integrity: sha512-t2T/WLB2WRgZ9EpE4jgPJ9w+i66UZfDc8wHh0xrwiRNN+UwH98GIJkTeZqX9rg0i0ptwzqW+uYeIF0T4F8LR7A==} engines: {node: '>=14.0.0'} - tldts-core@7.0.25: - resolution: {integrity: sha512-ZjCZK0rppSBu7rjHYDYsEaMOIbbT+nWF57hKkv4IUmZWBNrBWBOjIElc0mKRgLM8bm7x/BBlof6t2gi/Oq/Asw==} + tldts-core@7.0.28: + resolution: {integrity: sha512-7W5Efjhsc3chVdFhqtaU0KtK32J37Zcr9RKtID54nG+tIpcY79CQK/veYPODxtD/LJ4Lue66jvrQzIX2Z2/pUQ==} - tldts@7.0.25: - resolution: {integrity: sha512-keinCnPbwXEUG3ilrWQZU+CqcTTzHq9m2HhoUP2l7Xmi8l1LuijAXLpAJ5zRW+ifKTNscs4NdCkfkDCBYm352w==} + tldts@7.0.28: + resolution: {integrity: sha512-+Zg3vWhRUv8B1maGSTFdev9mjoo8Etn2Ayfs4cnjlD3CsGkxXX4QyW3j2WJ0wdjYcYmy7Lx2RDsZMhgCWafKIw==} hasBin: true to-fast-properties@2.0.0: @@ -4105,8 +4159,8 @@ packages: resolution: {integrity: sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==} engines: {node: '>=8.0'} - tough-cookie@6.0.0: - resolution: {integrity: sha512-kXuRi1mtaKMrsLUxz3sQYvVl37B0Ns6MzfrtV5DvJceE9bPyspOqk9xxv7XbZWcfLWbFmm997vl83qUWVJA64w==} + tough-cookie@6.0.1: + resolution: {integrity: sha512-LktZQb3IeoUWB9lqR5EWTHgW/VTITCXg4D21M+lvybRVdylLrRMnqaIONLVb5mav8vM19m44HIcGq4qASeu2Qw==} engines: {node: '>=16'} tr46@0.0.3: @@ -4118,8 +4172,8 @@ packages: trough@2.2.0: resolution: {integrity: sha512-tmMpK00BjZiUyVyvrBK7knerNgmgvcV/KLVyuma/SC+TQN167GrMRciANTz09+k3zW8L8t60jWO1GpfkZdjTaw==} - ts-api-utils@2.4.0: - resolution: {integrity: sha512-3TaVTaAv2gTiMB35i3FiGJaRfwb3Pyn/j3m/bfAvGe8FB7CF6u+LMYqYlDh7reQf7UNvoTvdfAqHGmPGOSsPmA==} + ts-api-utils@2.5.0: + resolution: {integrity: sha512-OJ/ibxhPlqrMM0UiNHJ/0CKQkoKF243/AEmplt3qpRgkW8VG7IfOS41h7V8TjITqdByHzrjcS/2si+y4lIh8NA==} engines: {node: '>=18.12'} peerDependencies: typescript: '>=4.8.4' @@ -4145,8 +4199,8 @@ packages: resolution: {integrity: sha512-4dbzIzqvjtgiM5rw1k5rEHtBANKmdudhGyBEajN01fEyhaAIhsoKNy6y7+IN93IfpFtwY9iqi7kD+xwKhQsNJA==} engines: {node: '>=8'} - type-fest@5.4.4: - resolution: {integrity: sha512-JnTrzGu+zPV3aXIUhnyWJj4z/wigMsdYajGLIYakqyOW1nPllzXEJee0QQbHj+CTIQtXGlAjuK0UY+2xTyjVAw==} + type-fest@5.5.0: + resolution: {integrity: sha512-PlBfpQwiUvGViBNX84Yxwjsdhd1TUlXr6zjX7eoirtCPIr08NAmxwa+fcYBTeRQxHo9YC9wwF3m9i700sHma8g==} engines: {node: '>=20'} typed-array-buffer@1.0.3: @@ -4165,12 +4219,12 @@ packages: resolution: {integrity: sha512-3KS2b+kL7fsuk/eJZ7EQdnEmQoaho/r6KUef7hxvltNA5DR8NAUM+8wJMbJyZ4G9/7i3v5zPBIMN5aybAh2/Jg==} engines: {node: '>= 0.4'} - typescript-eslint@8.56.1: - resolution: {integrity: sha512-U4lM6pjmBX7J5wk4szltF7I1cGBHXZopnAXCMXb3+fZ3B/0Z3hq3wS/CCUB2NZBNAExK92mCU2tEohWuwVMsDQ==} + typescript-eslint@8.58.0: + resolution: {integrity: sha512-e2TQzKfaI85fO+F3QywtX+tCTsu/D3WW5LVU6nz8hTFKFZ8yBJ6mSYRpXqdR3mFjPWmO0eWsTa5f+UpAOe/FMA==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} peerDependencies: eslint: ^8.57.0 || ^9.0.0 || ^10.0.0 - typescript: '>=4.8.4 <6.0.0' + typescript: '>=4.8.4 <6.1.0' typescript@5.9.3: resolution: {integrity: sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw==} @@ -4234,8 +4288,8 @@ packages: urijs@1.19.11: resolution: {integrity: sha512-HXgFDgDommxn5/bIv0cnQZsPhHDA90NPHD6+c/v21U5+Sx5hoP8+dP9IZXBU1gIfvdRfhG8cel9QNPeionfcCQ==} - use-debounce@10.1.0: - resolution: {integrity: sha512-lu87Za35V3n/MyMoEpD5zJv0k7hCn0p+V/fK2kWD+3k2u3kOCwO593UArbczg1fhfs2rqPEnHpULJ3KmGdDzvg==} + use-debounce@10.1.1: + resolution: {integrity: sha512-kvds8BHR2k28cFsxW8k3nc/tRga2rs1RHYCqmmGqb90MEeE++oALwzh2COiuBLO1/QXiOuShXoSN2ZpWnMmvuQ==} engines: {node: '>= 16.0.0'} peerDependencies: react: '*' @@ -4277,17 +4331,18 @@ packages: vite-plugin-css-injected-by-js@3.5.2: resolution: {integrity: sha512-2MpU/Y+SCZyWUB6ua3HbJCrgnF0KACAsmzOQt1UvRVJCGF6S8xdA3ZUhWcWdM9ivG4I5az8PnQmwwrkC2CAQrQ==} peerDependencies: - vite: '>2.0.0-0' + vite: '>=7.3.2' - vite@7.3.1: - resolution: {integrity: sha512-w+N7Hifpc3gRjZ63vYBXA56dvvRlNWRczTdmCBBa+CotUzAPf5b7YMdMR/8CQoeYE5LX3W4wj6RYTgonm1b9DA==} + vite@8.0.5: + resolution: {integrity: sha512-nmu43Qvq9UopTRfMx2jOYW5l16pb3iDC1JH6yMuPkpVbzK0k+L7dfsEDH4jRgYFmsg0sTAqkojoZgzLMlwHsCQ==} engines: {node: ^20.19.0 || >=22.12.0} hasBin: true peerDependencies: '@types/node': ^20.19.0 || >=22.12.0 + '@vitejs/devtools': ^0.1.0 + esbuild: ^0.27.0 || ^0.28.0 jiti: '>=1.21.0' less: ^4.0.0 - lightningcss: ^1.21.0 sass: ^1.70.0 sass-embedded: ^1.70.0 stylus: '>=0.54.8' @@ -4298,12 +4353,14 @@ packages: peerDependenciesMeta: '@types/node': optional: true + '@vitejs/devtools': + optional: true + esbuild: + optional: true jiti: optional: true less: optional: true - lightningcss: - optional: true sass: optional: true sass-embedded: @@ -4484,8 +4541,8 @@ packages: react: optional: true - zustand@5.0.11: - resolution: {integrity: sha512-fdZY+dk7zn/vbWNCYmzZULHRrss0jx5pPFiOuMZ/5HJN6Yv3u+1Wswy/4MpZEkEGhtNH+pwxZB8OKgUBPzYAGg==} + zustand@5.0.12: + resolution: {integrity: sha512-i77ae3aZq4dhMlRhJVCYgMLKuSiZAaUPAct2AksxQ+gOtimhGMdXljRT21P5BNpeT4kXlLIckvkPM029OljD7g==} engines: {node: '>=12.20.0'} peerDependencies: '@types/react': '>=18.0.0' @@ -4635,7 +4692,7 @@ snapshots: '@babel/types': 7.28.5 '@jridgewell/remapping': 2.3.5 convert-source-map: 2.0.0 - debug: 4.4.1 + debug: 4.4.3 gensync: 1.0.0-beta.2 json5: 2.2.3 semver: 6.3.1 @@ -4648,8 +4705,8 @@ snapshots: '@babel/generator': 7.29.1 '@babel/helper-compilation-targets': 7.28.6 '@babel/helper-module-transforms': 7.28.6(@babel/core@7.29.0) - '@babel/helpers': 7.28.6 - '@babel/parser': 7.29.0 + '@babel/helpers': 7.29.2 + '@babel/parser': 7.29.2 '@babel/template': 7.28.6 '@babel/traverse': 7.29.0 '@babel/types': 7.29.0 @@ -4686,7 +4743,7 @@ snapshots: '@babel/generator@7.29.1': dependencies: - '@babel/parser': 7.29.0 + '@babel/parser': 7.29.2 '@babel/types': 7.29.0 '@jridgewell/gen-mapping': 0.3.13 '@jridgewell/trace-mapping': 0.3.31 @@ -4704,7 +4761,7 @@ snapshots: dependencies: '@babel/compat-data': 7.29.0 '@babel/helper-validator-option': 7.27.1 - browserslist: 4.28.1 + browserslist: 4.28.2 lru-cache: 5.1.1 semver: 6.3.1 @@ -4785,7 +4842,7 @@ snapshots: '@babel/template': 7.27.2 '@babel/types': 7.28.5 - '@babel/helpers@7.28.6': + '@babel/helpers@7.29.2': dependencies: '@babel/template': 7.28.6 '@babel/types': 7.29.0 @@ -4798,7 +4855,7 @@ snapshots: dependencies: '@babel/types': 7.28.5 - '@babel/parser@7.29.0': + '@babel/parser@7.29.2': dependencies: '@babel/types': 7.29.0 @@ -4818,6 +4875,8 @@ snapshots: '@babel/runtime@7.28.6': {} + '@babel/runtime@7.29.2': {} + '@babel/template@7.27.2': dependencies: '@babel/code-frame': 7.27.1 @@ -4827,7 +4886,7 @@ snapshots: '@babel/template@7.28.6': dependencies: '@babel/code-frame': 7.29.0 - '@babel/parser': 7.29.0 + '@babel/parser': 7.29.2 '@babel/types': 7.29.0 '@babel/traverse@7.23.2': @@ -4840,7 +4899,7 @@ snapshots: '@babel/helper-split-export-declaration': 7.24.7 '@babel/parser': 7.26.10 '@babel/types': 7.26.10 - debug: 4.4.0 + debug: 4.4.3 globals: 11.12.0 transitivePeerDependencies: - supports-color @@ -4853,7 +4912,7 @@ snapshots: '@babel/parser': 7.28.5 '@babel/template': 7.27.2 '@babel/types': 7.28.5 - debug: 4.4.1 + debug: 4.4.3 transitivePeerDependencies: - supports-color @@ -4862,7 +4921,7 @@ snapshots: '@babel/code-frame': 7.29.0 '@babel/generator': 7.29.1 '@babel/helper-globals': 7.28.0 - '@babel/parser': 7.29.0 + '@babel/parser': 7.29.2 '@babel/template': 7.28.6 '@babel/types': 7.29.0 debug: 4.4.3 @@ -4906,6 +4965,22 @@ snapshots: react: 19.2.4 react-dom: 19.2.4(react@19.2.4) + '@emnapi/core@1.9.2': + dependencies: + '@emnapi/wasi-threads': 1.2.1 + tslib: 2.8.1 + optional: true + + '@emnapi/runtime@1.9.2': + dependencies: + tslib: 2.8.1 + optional: true + + '@emnapi/wasi-threads@1.2.1': + dependencies: + tslib: 2.8.1 + optional: true + '@emotion/babel-plugin@11.13.5': dependencies: '@babel/helper-module-imports': 7.25.9 @@ -5052,33 +5127,28 @@ snapshots: '@esbuild/win32-x64@0.27.3': optional: true - '@eslint-community/eslint-utils@4.5.1(eslint@9.39.1(jiti@1.21.7))': - dependencies: - eslint: 9.39.1(jiti@1.21.7) - eslint-visitor-keys: 3.4.3 - - '@eslint-community/eslint-utils@4.9.0(eslint@9.39.1(jiti@1.21.7))': + '@eslint-community/eslint-utils@4.5.1(eslint@9.39.4(jiti@1.21.7))': dependencies: - eslint: 9.39.1(jiti@1.21.7) + eslint: 9.39.4(jiti@1.21.7) eslint-visitor-keys: 3.4.3 - '@eslint-community/eslint-utils@4.9.1(eslint@9.39.1(jiti@1.21.7))': + '@eslint-community/eslint-utils@4.9.1(eslint@9.39.4(jiti@1.21.7))': dependencies: - eslint: 9.39.1(jiti@1.21.7) + eslint: 9.39.4(jiti@1.21.7) eslint-visitor-keys: 3.4.3 - '@eslint-community/regexpp@4.12.1': {} - '@eslint-community/regexpp@4.12.2': {} - '@eslint/compat@1.2.9(eslint@9.39.1(jiti@1.21.7))': + '@eslint/compat@1.4.1(eslint@9.39.4(jiti@1.21.7))': + dependencies: + '@eslint/core': 0.17.0 optionalDependencies: - eslint: 9.39.1(jiti@1.21.7) + eslint: 9.39.4(jiti@1.21.7) - '@eslint/config-array@0.21.1': + '@eslint/config-array@0.21.2': dependencies: '@eslint/object-schema': 2.1.7 - debug: 4.4.1 + debug: 4.4.3 minimatch: 3.1.5 transitivePeerDependencies: - supports-color @@ -5091,10 +5161,10 @@ snapshots: dependencies: '@types/json-schema': 7.0.15 - '@eslint/eslintrc@3.3.1': + '@eslint/eslintrc@3.3.5': dependencies: ajv: 6.14.0 - debug: 4.4.1 + debug: 4.4.3 espree: 10.4.0 globals: 14.0.0 ignore: 5.3.2 @@ -5105,7 +5175,7 @@ snapshots: transitivePeerDependencies: - supports-color - '@eslint/js@9.39.1': {} + '@eslint/js@9.39.4': {} '@eslint/object-schema@2.1.7': {} @@ -5151,44 +5221,42 @@ snapshots: '@humanfs/core@0.19.1': {} - '@humanfs/node@0.16.6': + '@humanfs/node@0.16.7': dependencies: '@humanfs/core': 0.19.1 - '@humanwhocodes/retry': 0.3.1 + '@humanwhocodes/retry': 0.4.3 '@humanwhocodes/module-importer@1.0.1': {} - '@humanwhocodes/retry@0.3.1': {} - - '@humanwhocodes/retry@0.4.2': {} + '@humanwhocodes/retry@0.4.3': {} '@inquirer/ansi@1.0.2': {} - '@inquirer/confirm@5.1.21(@types/node@24.10.3)': + '@inquirer/confirm@5.1.21(@types/node@24.12.0)': dependencies: - '@inquirer/core': 10.3.2(@types/node@24.10.3) - '@inquirer/type': 3.0.10(@types/node@24.10.3) + '@inquirer/core': 10.3.2(@types/node@24.12.0) + '@inquirer/type': 3.0.10(@types/node@24.12.0) optionalDependencies: - '@types/node': 24.10.3 + '@types/node': 24.12.0 - '@inquirer/core@10.3.2(@types/node@24.10.3)': + '@inquirer/core@10.3.2(@types/node@24.12.0)': dependencies: '@inquirer/ansi': 1.0.2 '@inquirer/figures': 1.0.15 - '@inquirer/type': 3.0.10(@types/node@24.10.3) + '@inquirer/type': 3.0.10(@types/node@24.12.0) cli-width: 4.1.0 mute-stream: 2.0.0 signal-exit: 4.1.0 wrap-ansi: 6.2.0 yoctocolors-cjs: 2.1.3 optionalDependencies: - '@types/node': 24.10.3 + '@types/node': 24.12.0 '@inquirer/figures@1.0.15': {} - '@inquirer/type@3.0.10(@types/node@24.10.3)': + '@inquirer/type@3.0.10(@types/node@24.12.0)': optionalDependencies: - '@types/node': 24.10.3 + '@types/node': 24.12.0 '@internationalized/date@3.11.0': dependencies: @@ -5279,6 +5347,13 @@ snapshots: outvariant: 1.4.3 strict-event-emitter: 0.5.1 + '@napi-rs/wasm-runtime@1.1.2(@emnapi/core@1.9.2)(@emnapi/runtime@1.9.2)': + dependencies: + '@emnapi/core': 1.9.2 + '@emnapi/runtime': 1.9.2 + '@tybys/wasm-util': 0.10.1 + optional: true + '@open-draft/deferred-promise@2.2.0': {} '@open-draft/logger@0.3.0': @@ -5288,102 +5363,79 @@ snapshots: '@open-draft/until@2.1.0': {} + '@oxc-project/types@0.122.0': {} + '@pandacss/is-valid-prop@1.9.0': {} '@pkgjs/parseargs@0.11.0': optional: true - '@pkgr/core@0.2.4': {} - '@pkgr/core@0.2.9': {} - '@playwright/test@1.58.2': + '@playwright/test@1.59.1': dependencies: - playwright: 1.58.2 - - '@rolldown/pluginutils@1.0.0-rc.2': {} - - '@rolldown/pluginutils@1.0.0-rc.3': {} - - '@rollup/rollup-android-arm-eabi@4.59.0': - optional: true - - '@rollup/rollup-android-arm64@4.59.0': - optional: true - - '@rollup/rollup-darwin-arm64@4.59.0': - optional: true - - '@rollup/rollup-darwin-x64@4.59.0': - optional: true + playwright: 1.59.1 - '@rollup/rollup-freebsd-arm64@4.59.0': + '@rolldown/binding-android-arm64@1.0.0-rc.12': optional: true - '@rollup/rollup-freebsd-x64@4.59.0': + '@rolldown/binding-darwin-arm64@1.0.0-rc.12': optional: true - '@rollup/rollup-linux-arm-gnueabihf@4.59.0': + '@rolldown/binding-darwin-x64@1.0.0-rc.12': optional: true - '@rollup/rollup-linux-arm-musleabihf@4.59.0': + '@rolldown/binding-freebsd-x64@1.0.0-rc.12': optional: true - '@rollup/rollup-linux-arm64-gnu@4.59.0': + '@rolldown/binding-linux-arm-gnueabihf@1.0.0-rc.12': optional: true - '@rollup/rollup-linux-arm64-musl@4.59.0': + '@rolldown/binding-linux-arm64-gnu@1.0.0-rc.12': optional: true - '@rollup/rollup-linux-loong64-gnu@4.59.0': + '@rolldown/binding-linux-arm64-musl@1.0.0-rc.12': optional: true - '@rollup/rollup-linux-loong64-musl@4.59.0': + '@rolldown/binding-linux-ppc64-gnu@1.0.0-rc.12': optional: true - '@rollup/rollup-linux-ppc64-gnu@4.59.0': + '@rolldown/binding-linux-s390x-gnu@1.0.0-rc.12': optional: true - '@rollup/rollup-linux-ppc64-musl@4.59.0': + '@rolldown/binding-linux-x64-gnu@1.0.0-rc.12': optional: true - '@rollup/rollup-linux-riscv64-gnu@4.59.0': + '@rolldown/binding-linux-x64-musl@1.0.0-rc.12': optional: true - '@rollup/rollup-linux-riscv64-musl@4.59.0': + '@rolldown/binding-openharmony-arm64@1.0.0-rc.12': optional: true - '@rollup/rollup-linux-s390x-gnu@4.59.0': - optional: true - - '@rollup/rollup-linux-x64-gnu@4.59.0': - optional: true - - '@rollup/rollup-linux-x64-musl@4.59.0': - optional: true - - '@rollup/rollup-openbsd-x64@4.59.0': + '@rolldown/binding-wasm32-wasi@1.0.0-rc.12(@emnapi/core@1.9.2)(@emnapi/runtime@1.9.2)': + dependencies: + '@napi-rs/wasm-runtime': 1.1.2(@emnapi/core@1.9.2)(@emnapi/runtime@1.9.2) + transitivePeerDependencies: + - '@emnapi/core' + - '@emnapi/runtime' optional: true - '@rollup/rollup-openharmony-arm64@4.59.0': + '@rolldown/binding-win32-arm64-msvc@1.0.0-rc.12': optional: true - '@rollup/rollup-win32-arm64-msvc@4.59.0': + '@rolldown/binding-win32-x64-msvc@1.0.0-rc.12': optional: true - '@rollup/rollup-win32-ia32-msvc@4.59.0': - optional: true + '@rolldown/pluginutils@1.0.0-rc.12': {} - '@rollup/rollup-win32-x64-gnu@4.59.0': - optional: true + '@rolldown/pluginutils@1.0.0-rc.3': {} - '@rollup/rollup-win32-x64-msvc@4.59.0': - optional: true + '@rolldown/pluginutils@1.0.0-rc.7': {} - '@stylistic/eslint-plugin@2.13.0(eslint@9.39.1(jiti@1.21.7))(typescript@5.9.3)': + '@stylistic/eslint-plugin@2.13.0(eslint@9.39.4(jiti@1.21.7))(typescript@5.9.3)': dependencies: - '@typescript-eslint/utils': 8.56.1(eslint@9.39.1(jiti@1.21.7))(typescript@5.9.3) - eslint: 9.39.1(jiti@1.21.7) + '@typescript-eslint/utils': 8.58.0(eslint@9.39.4(jiti@1.21.7))(typescript@5.9.3) + eslint: 9.39.4(jiti@1.21.7) eslint-visitor-keys: 4.2.0 espree: 10.3.0 estraverse: 5.3.0 @@ -5392,51 +5444,59 @@ snapshots: - supports-color - typescript - '@swc/core-darwin-arm64@1.15.18': + '@swc/core-darwin-arm64@1.15.24': + optional: true + + '@swc/core-darwin-x64@1.15.24': + optional: true + + '@swc/core-linux-arm-gnueabihf@1.15.24': optional: true - '@swc/core-darwin-x64@1.15.18': + '@swc/core-linux-arm64-gnu@1.15.24': optional: true - '@swc/core-linux-arm-gnueabihf@1.15.18': + '@swc/core-linux-arm64-musl@1.15.24': optional: true - '@swc/core-linux-arm64-gnu@1.15.18': + '@swc/core-linux-ppc64-gnu@1.15.24': optional: true - '@swc/core-linux-arm64-musl@1.15.18': + '@swc/core-linux-s390x-gnu@1.15.24': optional: true - '@swc/core-linux-x64-gnu@1.15.18': + '@swc/core-linux-x64-gnu@1.15.24': optional: true - '@swc/core-linux-x64-musl@1.15.18': + '@swc/core-linux-x64-musl@1.15.24': optional: true - '@swc/core-win32-arm64-msvc@1.15.18': + '@swc/core-win32-arm64-msvc@1.15.24': optional: true - '@swc/core-win32-ia32-msvc@1.15.18': + '@swc/core-win32-ia32-msvc@1.15.24': optional: true - '@swc/core-win32-x64-msvc@1.15.18': + '@swc/core-win32-x64-msvc@1.15.24': optional: true - '@swc/core@1.15.18(@swc/helpers@0.5.19)': + '@swc/core@1.15.24(@swc/helpers@0.5.19)': dependencies: '@swc/counter': 0.1.3 - '@swc/types': 0.1.25 + '@swc/types': 0.1.26 optionalDependencies: - '@swc/core-darwin-arm64': 1.15.18 - '@swc/core-darwin-x64': 1.15.18 - '@swc/core-linux-arm-gnueabihf': 1.15.18 - '@swc/core-linux-arm64-gnu': 1.15.18 - '@swc/core-linux-arm64-musl': 1.15.18 - '@swc/core-linux-x64-gnu': 1.15.18 - '@swc/core-linux-x64-musl': 1.15.18 - '@swc/core-win32-arm64-msvc': 1.15.18 - '@swc/core-win32-ia32-msvc': 1.15.18 - '@swc/core-win32-x64-msvc': 1.15.18 + '@swc/core-darwin-arm64': 1.15.24 + '@swc/core-darwin-x64': 1.15.24 + '@swc/core-linux-arm-gnueabihf': 1.15.24 + '@swc/core-linux-arm64-gnu': 1.15.24 + '@swc/core-linux-arm64-musl': 1.15.24 + '@swc/core-linux-ppc64-gnu': 1.15.24 + '@swc/core-linux-s390x-gnu': 1.15.24 + '@swc/core-linux-x64-gnu': 1.15.24 + '@swc/core-linux-x64-musl': 1.15.24 + '@swc/core-win32-arm64-msvc': 1.15.24 + '@swc/core-win32-ia32-msvc': 1.15.24 + '@swc/core-win32-x64-msvc': 1.15.24 '@swc/helpers': 0.5.19 '@swc/counter@0.1.3': {} @@ -5445,24 +5505,24 @@ snapshots: dependencies: tslib: 2.8.1 - '@swc/types@0.1.25': + '@swc/types@0.1.26': dependencies: '@swc/counter': 0.1.3 - '@tanstack/eslint-plugin-query@5.91.4(eslint@9.39.1(jiti@1.21.7))(typescript@5.9.3)': + '@tanstack/eslint-plugin-query@5.96.1(eslint@9.39.4(jiti@1.21.7))(typescript@5.9.3)': dependencies: - '@typescript-eslint/utils': 8.56.1(eslint@9.39.1(jiti@1.21.7))(typescript@5.9.3) - eslint: 9.39.1(jiti@1.21.7) + '@typescript-eslint/utils': 8.58.0(eslint@9.39.4(jiti@1.21.7))(typescript@5.9.3) + eslint: 9.39.4(jiti@1.21.7) optionalDependencies: typescript: 5.9.3 transitivePeerDependencies: - supports-color - '@tanstack/query-core@5.90.20': {} + '@tanstack/query-core@5.96.1': {} - '@tanstack/react-query@5.90.21(react@19.2.4)': + '@tanstack/react-query@5.96.1(react@19.2.4)': dependencies: - '@tanstack/query-core': 5.90.20 + '@tanstack/query-core': 5.96.1 react: 19.2.4 '@tanstack/react-table@8.21.3(react-dom@19.2.4(react@19.2.4))(react@19.2.4)': @@ -5471,20 +5531,20 @@ snapshots: react: 19.2.4 react-dom: 19.2.4(react@19.2.4) - '@tanstack/react-virtual@3.13.21(react-dom@19.2.4(react@19.2.4))(react@19.2.4)': + '@tanstack/react-virtual@3.13.23(react-dom@19.2.4(react@19.2.4))(react@19.2.4)': dependencies: - '@tanstack/virtual-core': 3.13.21 + '@tanstack/virtual-core': 3.13.23 react: 19.2.4 react-dom: 19.2.4(react@19.2.4) '@tanstack/table-core@8.21.3': {} - '@tanstack/virtual-core@3.13.21': {} + '@tanstack/virtual-core@3.13.23': {} '@testing-library/dom@10.4.0': dependencies: '@babel/code-frame': 7.29.0 - '@babel/runtime': 7.28.6 + '@babel/runtime': 7.29.2 '@types/aria-query': 5.0.4 aria-query: 5.3.0 chalk: 4.1.2 @@ -5518,22 +5578,27 @@ snapshots: '@babel/traverse': 7.23.2 '@babel/types': 7.17.0 javascript-natural-sort: 0.7.1 - lodash: 4.17.23 + lodash: 4.18.1 prettier: 3.8.1 transitivePeerDependencies: - supports-color '@ts-morph/common@0.28.1': dependencies: - minimatch: 10.2.4 + minimatch: 10.2.5 path-browserify: 1.0.1 tinyglobby: 0.2.15 + '@tybys/wasm-util@0.10.1': + dependencies: + tslib: 2.8.1 + optional: true + '@types/aria-query@5.0.4': {} '@types/babel__core@7.20.5': dependencies: - '@babel/parser': 7.29.0 + '@babel/parser': 7.29.2 '@babel/types': 7.29.0 '@types/babel__generator': 7.27.0 '@types/babel__template': 7.4.4 @@ -5545,7 +5610,7 @@ snapshots: '@types/babel__template@7.4.4': dependencies: - '@babel/parser': 7.29.0 + '@babel/parser': 7.29.2 '@babel/types': 7.29.0 '@types/babel__traverse@7.28.0': @@ -5615,9 +5680,7 @@ snapshots: '@types/estree-jsx@1.0.5': dependencies: - '@types/estree': 1.0.6 - - '@types/estree@1.0.6': {} + '@types/estree': 1.0.8 '@types/estree@1.0.8': {} @@ -5641,7 +5704,7 @@ snapshots: '@types/ms@2.1.0': {} - '@types/node@24.10.3': + '@types/node@24.12.0': dependencies: undici-types: 7.16.0 @@ -5677,99 +5740,99 @@ snapshots: '@types/ws@8.18.1': dependencies: - '@types/node': 24.10.3 + '@types/node': 24.12.0 - '@typescript-eslint/eslint-plugin@8.56.1(@typescript-eslint/parser@8.56.1(eslint@9.39.1(jiti@1.21.7))(typescript@5.9.3))(eslint@9.39.1(jiti@1.21.7))(typescript@5.9.3)': + '@typescript-eslint/eslint-plugin@8.58.0(@typescript-eslint/parser@8.58.0(eslint@9.39.4(jiti@1.21.7))(typescript@5.9.3))(eslint@9.39.4(jiti@1.21.7))(typescript@5.9.3)': dependencies: '@eslint-community/regexpp': 4.12.2 - '@typescript-eslint/parser': 8.56.1(eslint@9.39.1(jiti@1.21.7))(typescript@5.9.3) - '@typescript-eslint/scope-manager': 8.56.1 - '@typescript-eslint/type-utils': 8.56.1(eslint@9.39.1(jiti@1.21.7))(typescript@5.9.3) - '@typescript-eslint/utils': 8.56.1(eslint@9.39.1(jiti@1.21.7))(typescript@5.9.3) - '@typescript-eslint/visitor-keys': 8.56.1 - eslint: 9.39.1(jiti@1.21.7) + '@typescript-eslint/parser': 8.58.0(eslint@9.39.4(jiti@1.21.7))(typescript@5.9.3) + '@typescript-eslint/scope-manager': 8.58.0 + '@typescript-eslint/type-utils': 8.58.0(eslint@9.39.4(jiti@1.21.7))(typescript@5.9.3) + '@typescript-eslint/utils': 8.58.0(eslint@9.39.4(jiti@1.21.7))(typescript@5.9.3) + '@typescript-eslint/visitor-keys': 8.58.0 + eslint: 9.39.4(jiti@1.21.7) ignore: 7.0.5 natural-compare: 1.4.0 - ts-api-utils: 2.4.0(typescript@5.9.3) + ts-api-utils: 2.5.0(typescript@5.9.3) typescript: 5.9.3 transitivePeerDependencies: - supports-color - '@typescript-eslint/parser@8.56.1(eslint@9.39.1(jiti@1.21.7))(typescript@5.9.3)': + '@typescript-eslint/parser@8.58.0(eslint@9.39.4(jiti@1.21.7))(typescript@5.9.3)': dependencies: - '@typescript-eslint/scope-manager': 8.56.1 - '@typescript-eslint/types': 8.56.1 - '@typescript-eslint/typescript-estree': 8.56.1(typescript@5.9.3) - '@typescript-eslint/visitor-keys': 8.56.1 + '@typescript-eslint/scope-manager': 8.58.0 + '@typescript-eslint/types': 8.58.0 + '@typescript-eslint/typescript-estree': 8.58.0(typescript@5.9.3) + '@typescript-eslint/visitor-keys': 8.58.0 debug: 4.4.3 - eslint: 9.39.1(jiti@1.21.7) + eslint: 9.39.4(jiti@1.21.7) typescript: 5.9.3 transitivePeerDependencies: - supports-color - '@typescript-eslint/project-service@8.56.1(typescript@5.9.3)': + '@typescript-eslint/project-service@8.58.0(typescript@5.9.3)': dependencies: - '@typescript-eslint/tsconfig-utils': 8.56.1(typescript@5.9.3) - '@typescript-eslint/types': 8.56.1 + '@typescript-eslint/tsconfig-utils': 8.58.0(typescript@5.9.3) + '@typescript-eslint/types': 8.58.0 debug: 4.4.3 typescript: 5.9.3 transitivePeerDependencies: - supports-color - '@typescript-eslint/scope-manager@8.56.1': + '@typescript-eslint/scope-manager@8.58.0': dependencies: - '@typescript-eslint/types': 8.56.1 - '@typescript-eslint/visitor-keys': 8.56.1 + '@typescript-eslint/types': 8.58.0 + '@typescript-eslint/visitor-keys': 8.58.0 - '@typescript-eslint/tsconfig-utils@8.56.1(typescript@5.9.3)': + '@typescript-eslint/tsconfig-utils@8.58.0(typescript@5.9.3)': dependencies: typescript: 5.9.3 - '@typescript-eslint/type-utils@8.56.1(eslint@9.39.1(jiti@1.21.7))(typescript@5.9.3)': + '@typescript-eslint/type-utils@8.58.0(eslint@9.39.4(jiti@1.21.7))(typescript@5.9.3)': dependencies: - '@typescript-eslint/types': 8.56.1 - '@typescript-eslint/typescript-estree': 8.56.1(typescript@5.9.3) - '@typescript-eslint/utils': 8.56.1(eslint@9.39.1(jiti@1.21.7))(typescript@5.9.3) + '@typescript-eslint/types': 8.58.0 + '@typescript-eslint/typescript-estree': 8.58.0(typescript@5.9.3) + '@typescript-eslint/utils': 8.58.0(eslint@9.39.4(jiti@1.21.7))(typescript@5.9.3) debug: 4.4.3 - eslint: 9.39.1(jiti@1.21.7) - ts-api-utils: 2.4.0(typescript@5.9.3) + eslint: 9.39.4(jiti@1.21.7) + ts-api-utils: 2.5.0(typescript@5.9.3) typescript: 5.9.3 transitivePeerDependencies: - supports-color '@typescript-eslint/types@8.48.1': {} - '@typescript-eslint/types@8.56.1': {} + '@typescript-eslint/types@8.58.0': {} - '@typescript-eslint/typescript-estree@8.56.1(typescript@5.9.3)': + '@typescript-eslint/typescript-estree@8.58.0(typescript@5.9.3)': dependencies: - '@typescript-eslint/project-service': 8.56.1(typescript@5.9.3) - '@typescript-eslint/tsconfig-utils': 8.56.1(typescript@5.9.3) - '@typescript-eslint/types': 8.56.1 - '@typescript-eslint/visitor-keys': 8.56.1 + '@typescript-eslint/project-service': 8.58.0(typescript@5.9.3) + '@typescript-eslint/tsconfig-utils': 8.58.0(typescript@5.9.3) + '@typescript-eslint/types': 8.58.0 + '@typescript-eslint/visitor-keys': 8.58.0 debug: 4.4.3 - minimatch: 10.2.4 + minimatch: 10.2.5 semver: 7.7.4 tinyglobby: 0.2.15 - ts-api-utils: 2.4.0(typescript@5.9.3) + ts-api-utils: 2.5.0(typescript@5.9.3) typescript: 5.9.3 transitivePeerDependencies: - supports-color - '@typescript-eslint/utils@8.56.1(eslint@9.39.1(jiti@1.21.7))(typescript@5.9.3)': + '@typescript-eslint/utils@8.58.0(eslint@9.39.4(jiti@1.21.7))(typescript@5.9.3)': dependencies: - '@eslint-community/eslint-utils': 4.9.1(eslint@9.39.1(jiti@1.21.7)) - '@typescript-eslint/scope-manager': 8.56.1 - '@typescript-eslint/types': 8.56.1 - '@typescript-eslint/typescript-estree': 8.56.1(typescript@5.9.3) - eslint: 9.39.1(jiti@1.21.7) + '@eslint-community/eslint-utils': 4.9.1(eslint@9.39.4(jiti@1.21.7)) + '@typescript-eslint/scope-manager': 8.58.0 + '@typescript-eslint/types': 8.58.0 + '@typescript-eslint/typescript-estree': 8.58.0(typescript@5.9.3) + eslint: 9.39.4(jiti@1.21.7) typescript: 5.9.3 transitivePeerDependencies: - supports-color - '@typescript-eslint/visitor-keys@8.56.1': + '@typescript-eslint/visitor-keys@8.58.0': dependencies: - '@typescript-eslint/types': 8.56.1 + '@typescript-eslint/types': 8.58.0 eslint-visitor-keys: 5.0.1 '@ungap/structured-clone@1.3.0': {} @@ -5802,7 +5865,7 @@ snapshots: classnames: 2.5.1 d3-path: 1.0.9 d3-shape: 1.3.7 - lodash: 4.17.23 + lodash: 4.18.1 prop-types: 15.8.1 react: 19.2.4 @@ -5828,15 +5891,15 @@ snapshots: d3-time-format: 4.1.0 internmap: 2.0.3 - '@vitejs/plugin-react-swc@4.2.3(@swc/helpers@0.5.19)(vite@7.3.1(@types/node@24.10.3)(jiti@1.21.7)(yaml@2.8.3))': + '@vitejs/plugin-react-swc@4.3.0(@swc/helpers@0.5.19)(vite@8.0.5(@emnapi/core@1.9.2)(@emnapi/runtime@1.9.2)(@types/node@24.12.0)(esbuild@0.27.3)(jiti@1.21.7)(yaml@2.8.3))': dependencies: - '@rolldown/pluginutils': 1.0.0-rc.2 - '@swc/core': 1.15.18(@swc/helpers@0.5.19) - vite: 7.3.1(@types/node@24.10.3)(jiti@1.21.7)(yaml@2.8.3) + '@rolldown/pluginutils': 1.0.0-rc.7 + '@swc/core': 1.15.24(@swc/helpers@0.5.19) + vite: 8.0.5(@emnapi/core@1.9.2)(@emnapi/runtime@1.9.2)(@types/node@24.12.0)(esbuild@0.27.3)(jiti@1.21.7)(yaml@2.8.3) transitivePeerDependencies: - '@swc/helpers' - '@vitejs/plugin-react@5.1.4(vite@7.3.1(@types/node@24.10.3)(jiti@1.21.7)(yaml@2.8.3))': + '@vitejs/plugin-react@5.2.0(vite@8.0.5(@emnapi/core@1.9.2)(@emnapi/runtime@1.9.2)(@types/node@24.12.0)(esbuild@0.27.3)(jiti@1.21.7)(yaml@2.8.3))': dependencies: '@babel/core': 7.29.0 '@babel/plugin-transform-react-jsx-self': 7.27.1(@babel/core@7.29.0) @@ -5844,11 +5907,11 @@ snapshots: '@rolldown/pluginutils': 1.0.0-rc.3 '@types/babel__core': 7.20.5 react-refresh: 0.18.0 - vite: 7.3.1(@types/node@24.10.3)(jiti@1.21.7)(yaml@2.8.3) + vite: 8.0.5(@emnapi/core@1.9.2)(@emnapi/runtime@1.9.2)(@types/node@24.12.0)(esbuild@0.27.3)(jiti@1.21.7)(yaml@2.8.3) transitivePeerDependencies: - supports-color - '@vitest/coverage-v8@3.2.4(vitest@3.2.4(@types/debug@4.1.12)(@types/node@24.10.3)(happy-dom@20.8.9)(jiti@1.21.7)(msw@2.12.10(@types/node@24.10.3)(typescript@5.9.3))(yaml@2.8.3))': + '@vitest/coverage-v8@3.2.4(vitest@3.2.4(@emnapi/core@1.9.2)(@emnapi/runtime@1.9.2)(@types/debug@4.1.12)(@types/node@24.12.0)(esbuild@0.27.3)(happy-dom@20.8.9)(jiti@1.21.7)(msw@2.12.14(@types/node@24.12.0)(typescript@5.9.3))(yaml@2.8.3))': dependencies: '@ampproject/remapping': 2.3.0 '@bcoe/v8-coverage': 1.0.2 @@ -5863,7 +5926,7 @@ snapshots: std-env: 3.9.0 test-exclude: 7.0.1 tinyrainbow: 2.0.0 - vitest: 3.2.4(@types/debug@4.1.12)(@types/node@24.10.3)(happy-dom@20.8.9)(jiti@1.21.7)(msw@2.12.10(@types/node@24.10.3)(typescript@5.9.3))(yaml@2.8.3) + vitest: 3.2.4(@emnapi/core@1.9.2)(@emnapi/runtime@1.9.2)(@types/debug@4.1.12)(@types/node@24.12.0)(esbuild@0.27.3)(happy-dom@20.8.9)(jiti@1.21.7)(msw@2.12.14(@types/node@24.12.0)(typescript@5.9.3))(yaml@2.8.3) transitivePeerDependencies: - supports-color @@ -5875,14 +5938,14 @@ snapshots: chai: 5.3.3 tinyrainbow: 2.0.0 - '@vitest/mocker@3.2.4(msw@2.12.10(@types/node@24.10.3)(typescript@5.9.3))(vite@7.3.1(@types/node@24.10.3)(jiti@1.21.7)(yaml@2.8.3))': + '@vitest/mocker@3.2.4(msw@2.12.14(@types/node@24.12.0)(typescript@5.9.3))(vite@8.0.5(@emnapi/core@1.9.2)(@emnapi/runtime@1.9.2)(@types/node@24.12.0)(esbuild@0.27.3)(jiti@1.21.7)(yaml@2.8.3))': dependencies: '@vitest/spy': 3.2.4 estree-walker: 3.0.3 magic-string: 0.30.19 optionalDependencies: - msw: 2.12.10(@types/node@24.10.3)(typescript@5.9.3) - vite: 7.3.1(@types/node@24.10.3)(jiti@1.21.7)(yaml@2.8.3) + msw: 2.12.14(@types/node@24.12.0)(typescript@5.9.3) + vite: 8.0.5(@emnapi/core@1.9.2)(@emnapi/runtime@1.9.2)(@types/node@24.12.0)(esbuild@0.27.3)(jiti@1.21.7)(yaml@2.8.3) '@vitest/pretty-format@3.2.4': dependencies: @@ -5910,9 +5973,9 @@ snapshots: loupe: 3.2.1 tinyrainbow: 2.0.0 - '@xyflow/react@12.10.1(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)': + '@xyflow/react@12.10.2(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)': dependencies: - '@xyflow/system': 0.0.75 + '@xyflow/system': 0.0.76 classcat: 5.0.5 react: 19.2.4 react-dom: 19.2.4(react@19.2.4) @@ -5921,7 +5984,7 @@ snapshots: - '@types/react' - immer - '@xyflow/system@0.0.75': + '@xyflow/system@0.0.76': dependencies: '@types/d3-drag': 3.0.7 '@types/d3-interpolate': 3.0.4 @@ -6493,13 +6556,13 @@ snapshots: dependencies: acorn: 8.14.1 - acorn-jsx@5.3.2(acorn@8.15.0): + acorn-jsx@5.3.2(acorn@8.16.0): dependencies: - acorn: 8.15.0 + acorn: 8.16.0 acorn@8.14.1: {} - acorn@8.15.0: {} + acorn@8.16.0: {} ajv@6.14.0: dependencies: @@ -6619,11 +6682,11 @@ snapshots: axe-core@4.10.3: {} - axios@1.13.6: + axios@1.14.0: dependencies: follow-redirects: 1.15.11 form-data: 4.0.5 - proxy-from-env: 1.1.0 + proxy-from-env: 2.1.0 transitivePeerDependencies: - debug @@ -6645,20 +6708,20 @@ snapshots: balanced-match@4.0.4: {} - baseline-browser-mapping@2.10.0: {} + baseline-browser-mapping@2.10.16: {} binary-extensions@2.3.0: {} - brace-expansion@1.1.12: + brace-expansion@1.1.13: dependencies: balanced-match: 1.0.2 concat-map: 0.0.1 - brace-expansion@2.0.2: + brace-expansion@2.0.3: dependencies: balanced-match: 1.0.2 - brace-expansion@5.0.4: + brace-expansion@5.0.5: dependencies: balanced-match: 4.0.4 @@ -6673,13 +6736,13 @@ snapshots: node-releases: 2.0.19 update-browserslist-db: 1.1.3(browserslist@4.24.4) - browserslist@4.28.1: + browserslist@4.28.2: dependencies: - baseline-browser-mapping: 2.10.0 - caniuse-lite: 1.0.30001777 - electron-to-chromium: 1.5.307 - node-releases: 2.0.36 - update-browserslist-db: 1.2.3(browserslist@4.28.1) + baseline-browser-mapping: 2.10.16 + caniuse-lite: 1.0.30001786 + electron-to-chromium: 1.5.331 + node-releases: 2.0.37 + update-browserslist-db: 1.2.3(browserslist@4.28.2) builtin-modules@3.3.0: {} @@ -6687,7 +6750,7 @@ snapshots: dependencies: chokidar: 3.6.0 confbox: 0.1.8 - defu: 6.1.4 + defu: 6.1.6 dotenv: 16.6.1 giget: 1.2.5 jiti: 1.21.7 @@ -6725,7 +6788,7 @@ snapshots: caniuse-lite@1.0.30001707: {} - caniuse-lite@1.0.30001777: {} + caniuse-lite@1.0.30001786: {} ccount@2.0.1: {} @@ -6771,10 +6834,10 @@ snapshots: dependencies: '@kurkle/color': 0.3.4 - chartjs-adapter-dayjs-4@1.0.4(chart.js@4.5.1)(dayjs@1.11.19): + chartjs-adapter-dayjs-4@1.0.4(chart.js@4.5.1)(dayjs@1.11.20): dependencies: chart.js: 4.5.1 - dayjs: 1.11.19 + dayjs: 1.11.20 chartjs-plugin-annotation@3.1.0(chart.js@4.5.1): dependencies: @@ -6862,7 +6925,7 @@ snapshots: path-type: 4.0.0 yaml: 2.8.3 - cross-fetch@4.0.0: + cross-fetch@4.1.0: dependencies: node-fetch: 2.7.0 transitivePeerDependencies: @@ -6970,11 +7033,7 @@ snapshots: es-errors: 1.3.0 is-data-view: 1.0.2 - dayjs@1.11.19: {} - - debug@4.4.0: - dependencies: - ms: 2.1.3 + dayjs@1.11.20: {} debug@4.4.1: dependencies: @@ -7004,7 +7063,7 @@ snapshots: has-property-descriptors: 1.0.2 object-keys: 1.1.1 - defu@6.1.4: {} + defu@6.1.6: {} delaunator@5.0.1: dependencies: @@ -7016,6 +7075,8 @@ snapshots: destr@2.0.5: {} + detect-libc@2.1.2: {} + devlop@1.1.0: dependencies: dequal: 2.0.3 @@ -7032,7 +7093,7 @@ snapshots: dom-helpers@5.2.1: dependencies: - '@babel/runtime': 7.28.6 + '@babel/runtime': 7.29.2 csstype: 3.2.3 dotenv@16.6.1: {} @@ -7047,7 +7108,7 @@ snapshots: electron-to-chromium@1.5.123: {} - electron-to-chromium@1.5.307: {} + electron-to-chromium@1.5.331: {} elkjs@0.11.1: {} @@ -7191,6 +7252,7 @@ snapshots: '@esbuild/win32-arm64': 0.27.3 '@esbuild/win32-ia32': 0.27.3 '@esbuild/win32-x64': 0.27.3 + optional: true escalade@3.2.0: {} @@ -7200,42 +7262,42 @@ snapshots: escape-string-regexp@5.0.0: {} - eslint-compat-utils@0.6.5(eslint@9.39.1(jiti@1.21.7)): + eslint-compat-utils@0.6.5(eslint@9.39.4(jiti@1.21.7)): dependencies: - eslint: 9.39.1(jiti@1.21.7) - semver: 7.7.1 + eslint: 9.39.4(jiti@1.21.7) + semver: 7.7.4 - eslint-config-prettier@10.1.8(eslint@9.39.1(jiti@1.21.7)): + eslint-config-prettier@10.1.8(eslint@9.39.4(jiti@1.21.7)): dependencies: - eslint: 9.39.1(jiti@1.21.7) + eslint: 9.39.4(jiti@1.21.7) - eslint-json-compat-utils@0.2.1(eslint@9.39.1(jiti@1.21.7))(jsonc-eslint-parser@2.4.1): + eslint-json-compat-utils@0.2.3(eslint@9.39.4(jiti@1.21.7))(jsonc-eslint-parser@2.4.2): dependencies: - eslint: 9.39.1(jiti@1.21.7) - esquery: 1.6.0 - jsonc-eslint-parser: 2.4.1 + eslint: 9.39.4(jiti@1.21.7) + esquery: 1.7.0 + jsonc-eslint-parser: 2.4.2 eslint-plugin-i18next@6.1.3: dependencies: - lodash: 4.17.23 + lodash: 4.18.1 requireindex: 1.1.0 - eslint-plugin-jsonc@2.21.0(eslint@9.39.1(jiti@1.21.7)): + eslint-plugin-jsonc@2.21.1(eslint@9.39.4(jiti@1.21.7)): dependencies: - '@eslint-community/eslint-utils': 4.9.0(eslint@9.39.1(jiti@1.21.7)) + '@eslint-community/eslint-utils': 4.9.1(eslint@9.39.4(jiti@1.21.7)) diff-sequences: 27.5.1 - eslint: 9.39.1(jiti@1.21.7) - eslint-compat-utils: 0.6.5(eslint@9.39.1(jiti@1.21.7)) - eslint-json-compat-utils: 0.2.1(eslint@9.39.1(jiti@1.21.7))(jsonc-eslint-parser@2.4.1) + eslint: 9.39.4(jiti@1.21.7) + eslint-compat-utils: 0.6.5(eslint@9.39.4(jiti@1.21.7)) + eslint-json-compat-utils: 0.2.3(eslint@9.39.4(jiti@1.21.7))(jsonc-eslint-parser@2.4.2) espree: 10.4.0 graphemer: 1.4.0 - jsonc-eslint-parser: 2.4.1 + jsonc-eslint-parser: 2.4.2 natural-compare: 1.4.0 - synckit: 0.11.8 + synckit: 0.11.12 transitivePeerDependencies: - '@eslint/json' - eslint-plugin-jsx-a11y@6.10.2(eslint@9.39.1(jiti@1.21.7)): + eslint-plugin-jsx-a11y@6.10.2(eslint@9.39.4(jiti@1.21.7)): dependencies: aria-query: 5.3.2 array-includes: 3.1.8 @@ -7245,7 +7307,7 @@ snapshots: axobject-query: 4.1.0 damerau-levenshtein: 1.0.8 emoji-regex: 9.2.2 - eslint: 9.39.1(jiti@1.21.7) + eslint: 9.39.4(jiti@1.21.7) hasown: 2.0.2 jsx-ast-utils: 3.3.5 language-tags: 1.0.9 @@ -7254,41 +7316,41 @@ snapshots: safe-regex-test: 1.1.0 string.prototype.includes: 2.0.1 - eslint-plugin-perfectionist@4.15.1(eslint@9.39.1(jiti@1.21.7))(typescript@5.9.3): + eslint-plugin-perfectionist@4.15.1(eslint@9.39.4(jiti@1.21.7))(typescript@5.9.3): dependencies: '@typescript-eslint/types': 8.48.1 - '@typescript-eslint/utils': 8.56.1(eslint@9.39.1(jiti@1.21.7))(typescript@5.9.3) - eslint: 9.39.1(jiti@1.21.7) + '@typescript-eslint/utils': 8.58.0(eslint@9.39.4(jiti@1.21.7))(typescript@5.9.3) + eslint: 9.39.4(jiti@1.21.7) natural-orderby: 5.0.0 transitivePeerDependencies: - supports-color - typescript - eslint-plugin-prettier@5.5.5(eslint-config-prettier@10.1.8(eslint@9.39.1(jiti@1.21.7)))(eslint@9.39.1(jiti@1.21.7))(prettier@3.8.1): + eslint-plugin-prettier@5.5.5(eslint-config-prettier@10.1.8(eslint@9.39.4(jiti@1.21.7)))(eslint@9.39.4(jiti@1.21.7))(prettier@3.8.1): dependencies: - eslint: 9.39.1(jiti@1.21.7) + eslint: 9.39.4(jiti@1.21.7) prettier: 3.8.1 prettier-linter-helpers: 1.0.1 synckit: 0.11.12 optionalDependencies: - eslint-config-prettier: 10.1.8(eslint@9.39.1(jiti@1.21.7)) + eslint-config-prettier: 10.1.8(eslint@9.39.4(jiti@1.21.7)) - eslint-plugin-react-hooks@7.0.1(eslint@9.39.1(jiti@1.21.7)): + eslint-plugin-react-hooks@7.0.1(eslint@9.39.4(jiti@1.21.7)): dependencies: '@babel/core': 7.28.5 '@babel/parser': 7.28.5 - eslint: 9.39.1(jiti@1.21.7) + eslint: 9.39.4(jiti@1.21.7) hermes-parser: 0.25.1 zod: 4.2.1 zod-validation-error: 4.0.2(zod@4.2.1) transitivePeerDependencies: - supports-color - eslint-plugin-react-refresh@0.5.2(eslint@9.39.1(jiti@1.21.7)): + eslint-plugin-react-refresh@0.5.2(eslint@9.39.4(jiti@1.21.7)): dependencies: - eslint: 9.39.1(jiti@1.21.7) + eslint: 9.39.4(jiti@1.21.7) - eslint-plugin-react@7.37.5(eslint@9.39.1(jiti@1.21.7)): + eslint-plugin-react@7.37.5(eslint@9.39.4(jiti@1.21.7)): dependencies: array-includes: 3.1.8 array.prototype.findlast: 1.2.5 @@ -7296,7 +7358,7 @@ snapshots: array.prototype.tosorted: 1.1.4 doctrine: 2.1.0 es-iterator-helpers: 1.2.1 - eslint: 9.39.1(jiti@1.21.7) + eslint: 9.39.4(jiti@1.21.7) estraverse: 5.3.0 hasown: 2.0.2 jsx-ast-utils: 3.3.5 @@ -7310,14 +7372,14 @@ snapshots: string.prototype.matchall: 4.0.12 string.prototype.repeat: 1.0.0 - eslint-plugin-unicorn@55.0.0(eslint@9.39.1(jiti@1.21.7)): + eslint-plugin-unicorn@55.0.0(eslint@9.39.4(jiti@1.21.7)): dependencies: '@babel/helper-validator-identifier': 7.25.9 - '@eslint-community/eslint-utils': 4.5.1(eslint@9.39.1(jiti@1.21.7)) + '@eslint-community/eslint-utils': 4.5.1(eslint@9.39.4(jiti@1.21.7)) ci-info: 4.2.0 clean-regexp: 1.0.0 core-js-compat: 3.41.0 - eslint: 9.39.1(jiti@1.21.7) + eslint: 9.39.4(jiti@1.21.7) esquery: 1.6.0 globals: 15.15.0 indent-string: 4.0.0 @@ -7343,29 +7405,29 @@ snapshots: eslint-visitor-keys@5.0.1: {} - eslint@9.39.1(jiti@1.21.7): + eslint@9.39.4(jiti@1.21.7): dependencies: - '@eslint-community/eslint-utils': 4.9.0(eslint@9.39.1(jiti@1.21.7)) - '@eslint-community/regexpp': 4.12.1 - '@eslint/config-array': 0.21.1 + '@eslint-community/eslint-utils': 4.9.1(eslint@9.39.4(jiti@1.21.7)) + '@eslint-community/regexpp': 4.12.2 + '@eslint/config-array': 0.21.2 '@eslint/config-helpers': 0.4.2 '@eslint/core': 0.17.0 - '@eslint/eslintrc': 3.3.1 - '@eslint/js': 9.39.1 + '@eslint/eslintrc': 3.3.5 + '@eslint/js': 9.39.4 '@eslint/plugin-kit': 0.4.1 - '@humanfs/node': 0.16.6 + '@humanfs/node': 0.16.7 '@humanwhocodes/module-importer': 1.0.1 - '@humanwhocodes/retry': 0.4.2 + '@humanwhocodes/retry': 0.4.3 '@types/estree': 1.0.8 ajv: 6.14.0 chalk: 4.1.2 cross-spawn: 7.0.6 - debug: 4.4.1 + debug: 4.4.3 escape-string-regexp: 4.0.0 eslint-scope: 8.4.0 eslint-visitor-keys: 4.2.1 espree: 10.4.0 - esquery: 1.6.0 + esquery: 1.7.0 esutils: 2.0.3 fast-deep-equal: 3.1.3 file-entry-cache: 8.0.0 @@ -7392,14 +7454,14 @@ snapshots: espree@10.4.0: dependencies: - acorn: 8.15.0 - acorn-jsx: 5.3.2(acorn@8.15.0) + acorn: 8.16.0 + acorn-jsx: 5.3.2(acorn@8.16.0) eslint-visitor-keys: 4.2.1 espree@9.6.1: dependencies: - acorn: 8.15.0 - acorn-jsx: 5.3.2(acorn@8.15.0) + acorn: 8.16.0 + acorn-jsx: 5.3.2(acorn@8.16.0) eslint-visitor-keys: 3.4.3 esprima@4.0.1: {} @@ -7408,6 +7470,10 @@ snapshots: dependencies: estraverse: 5.3.0 + esquery@1.7.0: + dependencies: + estraverse: 5.3.0 + esrecurse@4.3.0: dependencies: estraverse: 5.3.0 @@ -7541,7 +7607,7 @@ snapshots: dependencies: citty: 0.1.6 consola: 3.4.2 - defu: 6.1.4 + defu: 6.1.6 node-fetch-native: 1.6.7 nypm: 0.5.4 pathe: 2.0.3 @@ -7568,7 +7634,7 @@ snapshots: dependencies: foreground-child: 3.3.1 jackspeak: 4.2.3 - minimatch: 10.2.4 + minimatch: 10.2.5 minipass: 7.1.3 package-json-from-dist: 1.0.1 path-scurry: 2.0.2 @@ -7588,7 +7654,7 @@ snapshots: graphemer@1.4.0: {} - graphql@16.13.1: {} + graphql@16.13.2: {} handlebars@4.7.9: dependencies: @@ -7601,7 +7667,7 @@ snapshots: happy-dom@20.8.9: dependencies: - '@types/node': 24.10.3 + '@types/node': 24.12.0 '@types/whatwg-mimetype': 3.0.2 '@types/ws': 8.18.1 entities: 7.0.1 @@ -7637,7 +7703,7 @@ snapshots: hast-util-to-jsx-runtime@2.3.6: dependencies: - '@types/estree': 1.0.6 + '@types/estree': 1.0.8 '@types/hast': 3.0.4 '@types/unist': 3.0.3 comma-separated-tokens: 2.0.3 @@ -7699,15 +7765,15 @@ snapshots: dependencies: '@babel/runtime': 7.28.6 - i18next-http-backend@3.0.2: + i18next-http-backend@3.0.4: dependencies: - cross-fetch: 4.0.0 + cross-fetch: 4.1.0 transitivePeerDependencies: - encoding - i18next@25.8.16(typescript@5.9.3): + i18next@25.10.10(typescript@5.9.3): dependencies: - '@babel/runtime': 7.28.6 + '@babel/runtime': 7.29.2 optionalDependencies: typescript: 5.9.3 @@ -7961,12 +8027,12 @@ snapshots: json5@2.2.3: {} - jsonc-eslint-parser@2.4.1: + jsonc-eslint-parser@2.4.2: dependencies: - acorn: 8.15.0 + acorn: 8.16.0 eslint-visitor-keys: 3.4.3 espree: 9.6.1 - semver: 7.7.1 + semver: 7.7.4 jsonpointer@5.0.1: {} @@ -7992,6 +8058,55 @@ snapshots: prelude-ls: 1.2.1 type-check: 0.4.0 + lightningcss-android-arm64@1.32.0: + optional: true + + lightningcss-darwin-arm64@1.32.0: + optional: true + + lightningcss-darwin-x64@1.32.0: + optional: true + + lightningcss-freebsd-x64@1.32.0: + optional: true + + lightningcss-linux-arm-gnueabihf@1.32.0: + optional: true + + lightningcss-linux-arm64-gnu@1.32.0: + optional: true + + lightningcss-linux-arm64-musl@1.32.0: + optional: true + + lightningcss-linux-x64-gnu@1.32.0: + optional: true + + lightningcss-linux-x64-musl@1.32.0: + optional: true + + lightningcss-win32-arm64-msvc@1.32.0: + optional: true + + lightningcss-win32-x64-msvc@1.32.0: + optional: true + + lightningcss@1.32.0: + dependencies: + detect-libc: 2.1.2 + optionalDependencies: + lightningcss-android-arm64: 1.32.0 + lightningcss-darwin-arm64: 1.32.0 + lightningcss-darwin-x64: 1.32.0 + lightningcss-freebsd-x64: 1.32.0 + lightningcss-linux-arm-gnueabihf: 1.32.0 + lightningcss-linux-arm64-gnu: 1.32.0 + lightningcss-linux-arm64-musl: 1.32.0 + lightningcss-linux-x64-gnu: 1.32.0 + lightningcss-linux-x64-musl: 1.32.0 + lightningcss-win32-arm64-msvc: 1.32.0 + lightningcss-win32-x64-msvc: 1.32.0 + lines-and-columns@1.2.4: {} locate-path@5.0.0: @@ -8006,7 +8121,7 @@ snapshots: lodash.merge@4.6.2: {} - lodash@4.17.23: {} + lodash@4.18.1: {} longest-streak@3.1.0: {} @@ -8023,7 +8138,7 @@ snapshots: lru-cache@10.4.3: {} - lru-cache@11.2.7: {} + lru-cache@11.3.2: {} lru-cache@5.1.1: dependencies: @@ -8403,17 +8518,17 @@ snapshots: min-indent@1.0.1: {} - minimatch@10.2.4: + minimatch@10.2.5: dependencies: - brace-expansion: 5.0.4 + brace-expansion: 5.0.5 minimatch@3.1.5: dependencies: - brace-expansion: 1.1.12 + brace-expansion: 1.1.13 minimatch@9.0.9: dependencies: - brace-expansion: 2.0.2 + brace-expansion: 2.0.3 minimist@1.2.8: {} @@ -8427,7 +8542,7 @@ snapshots: mlly@1.8.0: dependencies: - acorn: 8.15.0 + acorn: 8.16.0 pathe: 2.0.3 pkg-types: 1.3.1 ufo: 1.6.1 @@ -8438,14 +8553,14 @@ snapshots: ms@2.1.3: {} - msw@2.12.10(@types/node@24.10.3)(typescript@5.9.3): + msw@2.12.14(@types/node@24.12.0)(typescript@5.9.3): dependencies: - '@inquirer/confirm': 5.1.21(@types/node@24.10.3) + '@inquirer/confirm': 5.1.21(@types/node@24.12.0) '@mswjs/interceptors': 0.41.3 '@open-draft/deferred-promise': 2.2.0 '@types/statuses': 2.0.6 cookie: 1.1.1 - graphql: 16.13.1 + graphql: 16.13.2 headers-polyfill: 4.0.3 is-node-process: 1.2.0 outvariant: 1.4.3 @@ -8454,8 +8569,8 @@ snapshots: rettime: 0.10.1 statuses: 2.0.2 strict-event-emitter: 0.5.1 - tough-cookie: 6.0.0 - type-fest: 5.4.4 + tough-cookie: 6.0.1 + type-fest: 5.5.0 until-async: 3.0.2 yargs: 17.7.2 optionalDependencies: @@ -8486,7 +8601,7 @@ snapshots: node-releases@2.0.19: {} - node-releases@2.0.36: {} + node-releases@2.0.37: {} normalize-package-data@2.5.0: dependencies: @@ -8558,7 +8673,7 @@ snapshots: openapi-merge@1.3.3: dependencies: atlassian-openapi: 1.0.21 - lodash: 4.17.23 + lodash: 4.18.1 ts-is-present: 1.2.2 optionator@0.9.4: @@ -8643,7 +8758,7 @@ snapshots: path-scurry@2.0.2: dependencies: - lru-cache: 11.2.7 + lru-cache: 11.3.2 minipass: 7.1.3 path-to-regexp@6.3.0: {} @@ -8672,11 +8787,11 @@ snapshots: mlly: 1.8.0 pathe: 2.0.3 - playwright-core@1.58.2: {} + playwright-core@1.59.1: {} - playwright@1.58.2: + playwright@1.59.1: dependencies: - playwright-core: 1.58.2 + playwright-core: 1.59.1 optionalDependencies: fsevents: 2.3.2 @@ -8720,7 +8835,7 @@ snapshots: proxy-compare@3.0.1: {} - proxy-from-env@1.1.0: {} + proxy-from-env@2.1.0: {} proxy-memoize@3.0.1: dependencies: @@ -8730,7 +8845,7 @@ snapshots: rc9@2.1.2: dependencies: - defu: 6.1.4 + defu: 6.1.6 destr: 2.0.5 react-chartjs-2@5.3.1(chart.js@4.5.1)(react@19.2.4): @@ -8743,20 +8858,20 @@ snapshots: react: 19.2.4 scheduler: 0.27.0 - react-hook-form@7.71.2(react@19.2.4): + react-hook-form@7.72.0(react@19.2.4): dependencies: react: 19.2.4 - react-hotkeys-hook@4.6.1(react-dom@19.2.4(react@19.2.4))(react@19.2.4): + react-hotkeys-hook@4.6.2(react-dom@19.2.4(react@19.2.4))(react@19.2.4): dependencies: react: 19.2.4 react-dom: 19.2.4(react@19.2.4) - react-i18next@15.5.1(i18next@25.8.16(typescript@5.9.3))(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(typescript@5.9.3): + react-i18next@15.7.4(i18next@25.10.10(typescript@5.9.3))(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(typescript@5.9.3): dependencies: - '@babel/runtime': 7.26.10 + '@babel/runtime': 7.29.2 html-parse-stringify: 3.0.1 - i18next: 25.8.16(typescript@5.9.3) + i18next: 25.10.10(typescript@5.9.3) react: 19.2.4 optionalDependencies: react-dom: 19.2.4(react@19.2.4) @@ -8800,13 +8915,13 @@ snapshots: react: 19.2.4 react-dom: 19.2.4(react@19.2.4) - react-router-dom@7.13.1(react-dom@19.2.4(react@19.2.4))(react@19.2.4): + react-router-dom@7.14.0(react-dom@19.2.4(react@19.2.4))(react@19.2.4): dependencies: react: 19.2.4 react-dom: 19.2.4(react@19.2.4) - react-router: 7.13.1(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + react-router: 7.14.0(react-dom@19.2.4(react@19.2.4))(react@19.2.4) - react-router@7.13.1(react-dom@19.2.4(react@19.2.4))(react@19.2.4): + react-router@7.14.0(react-dom@19.2.4(react@19.2.4))(react@19.2.4): dependencies: cookie: 1.1.1 react: 19.2.4 @@ -8816,7 +8931,7 @@ snapshots: react-select@5.10.1(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4): dependencies: - '@babel/runtime': 7.28.6 + '@babel/runtime': 7.29.2 '@emotion/cache': 11.14.0 '@emotion/react': 11.14.0(@types/react@19.2.14)(react@19.2.4) '@floating-ui/dom': 1.7.1 @@ -8831,9 +8946,9 @@ snapshots: - '@types/react' - supports-color - react-syntax-highlighter@15.6.1(react@19.2.4): + react-syntax-highlighter@15.6.6(react@19.2.4): dependencies: - '@babel/runtime': 7.26.10 + '@babel/runtime': 7.29.2 highlight.js: 10.7.3 highlightjs-vue: 1.0.0 lowlight: 1.20.0 @@ -8843,7 +8958,7 @@ snapshots: react-transition-group@4.4.5(react-dom@19.2.4(react@19.2.4))(react@19.2.4): dependencies: - '@babel/runtime': 7.28.6 + '@babel/runtime': 7.29.2 dom-helpers: 5.2.1 loose-envify: 1.4.0 prop-types: 15.8.1 @@ -8964,36 +9079,29 @@ snapshots: robust-predicates@3.0.2: {} - rollup@4.59.0: + rolldown@1.0.0-rc.12(@emnapi/core@1.9.2)(@emnapi/runtime@1.9.2): dependencies: - '@types/estree': 1.0.8 + '@oxc-project/types': 0.122.0 + '@rolldown/pluginutils': 1.0.0-rc.12 optionalDependencies: - '@rollup/rollup-android-arm-eabi': 4.59.0 - '@rollup/rollup-android-arm64': 4.59.0 - '@rollup/rollup-darwin-arm64': 4.59.0 - '@rollup/rollup-darwin-x64': 4.59.0 - '@rollup/rollup-freebsd-arm64': 4.59.0 - '@rollup/rollup-freebsd-x64': 4.59.0 - '@rollup/rollup-linux-arm-gnueabihf': 4.59.0 - '@rollup/rollup-linux-arm-musleabihf': 4.59.0 - '@rollup/rollup-linux-arm64-gnu': 4.59.0 - '@rollup/rollup-linux-arm64-musl': 4.59.0 - '@rollup/rollup-linux-loong64-gnu': 4.59.0 - '@rollup/rollup-linux-loong64-musl': 4.59.0 - '@rollup/rollup-linux-ppc64-gnu': 4.59.0 - '@rollup/rollup-linux-ppc64-musl': 4.59.0 - '@rollup/rollup-linux-riscv64-gnu': 4.59.0 - '@rollup/rollup-linux-riscv64-musl': 4.59.0 - '@rollup/rollup-linux-s390x-gnu': 4.59.0 - '@rollup/rollup-linux-x64-gnu': 4.59.0 - '@rollup/rollup-linux-x64-musl': 4.59.0 - '@rollup/rollup-openbsd-x64': 4.59.0 - '@rollup/rollup-openharmony-arm64': 4.59.0 - '@rollup/rollup-win32-arm64-msvc': 4.59.0 - '@rollup/rollup-win32-ia32-msvc': 4.59.0 - '@rollup/rollup-win32-x64-gnu': 4.59.0 - '@rollup/rollup-win32-x64-msvc': 4.59.0 - fsevents: 2.3.3 + '@rolldown/binding-android-arm64': 1.0.0-rc.12 + '@rolldown/binding-darwin-arm64': 1.0.0-rc.12 + '@rolldown/binding-darwin-x64': 1.0.0-rc.12 + '@rolldown/binding-freebsd-x64': 1.0.0-rc.12 + '@rolldown/binding-linux-arm-gnueabihf': 1.0.0-rc.12 + '@rolldown/binding-linux-arm64-gnu': 1.0.0-rc.12 + '@rolldown/binding-linux-arm64-musl': 1.0.0-rc.12 + '@rolldown/binding-linux-ppc64-gnu': 1.0.0-rc.12 + '@rolldown/binding-linux-s390x-gnu': 1.0.0-rc.12 + '@rolldown/binding-linux-x64-gnu': 1.0.0-rc.12 + '@rolldown/binding-linux-x64-musl': 1.0.0-rc.12 + '@rolldown/binding-openharmony-arm64': 1.0.0-rc.12 + '@rolldown/binding-wasm32-wasi': 1.0.0-rc.12(@emnapi/core@1.9.2)(@emnapi/runtime@1.9.2) + '@rolldown/binding-win32-arm64-msvc': 1.0.0-rc.12 + '@rolldown/binding-win32-x64-msvc': 1.0.0-rc.12 + transitivePeerDependencies: + - '@emnapi/core' + - '@emnapi/runtime' safe-array-concat@1.1.3: dependencies: @@ -9227,10 +9335,6 @@ snapshots: dependencies: '@pkgr/core': 0.2.9 - synckit@0.11.8: - dependencies: - '@pkgr/core': 0.2.4 - tagged-tag@1.0.0: {} tar@7.5.11: @@ -9262,11 +9366,11 @@ snapshots: tinyspy@4.0.3: {} - tldts-core@7.0.25: {} + tldts-core@7.0.28: {} - tldts@7.0.25: + tldts@7.0.28: dependencies: - tldts-core: 7.0.25 + tldts-core: 7.0.28 to-fast-properties@2.0.0: {} @@ -9274,9 +9378,9 @@ snapshots: dependencies: is-number: 7.0.0 - tough-cookie@6.0.0: + tough-cookie@6.0.1: dependencies: - tldts: 7.0.25 + tldts: 7.0.28 tr46@0.0.3: {} @@ -9284,7 +9388,7 @@ snapshots: trough@2.2.0: {} - ts-api-utils@2.4.0(typescript@5.9.3): + ts-api-utils@2.5.0(typescript@5.9.3): dependencies: typescript: 5.9.3 @@ -9305,7 +9409,7 @@ snapshots: type-fest@0.8.1: {} - type-fest@5.4.4: + type-fest@5.5.0: dependencies: tagged-tag: 1.0.0 @@ -9342,13 +9446,13 @@ snapshots: possible-typed-array-names: 1.1.0 reflect.getprototypeof: 1.0.10 - typescript-eslint@8.56.1(eslint@9.39.1(jiti@1.21.7))(typescript@5.9.3): + typescript-eslint@8.58.0(eslint@9.39.4(jiti@1.21.7))(typescript@5.9.3): dependencies: - '@typescript-eslint/eslint-plugin': 8.56.1(@typescript-eslint/parser@8.56.1(eslint@9.39.1(jiti@1.21.7))(typescript@5.9.3))(eslint@9.39.1(jiti@1.21.7))(typescript@5.9.3) - '@typescript-eslint/parser': 8.56.1(eslint@9.39.1(jiti@1.21.7))(typescript@5.9.3) - '@typescript-eslint/typescript-estree': 8.56.1(typescript@5.9.3) - '@typescript-eslint/utils': 8.56.1(eslint@9.39.1(jiti@1.21.7))(typescript@5.9.3) - eslint: 9.39.1(jiti@1.21.7) + '@typescript-eslint/eslint-plugin': 8.58.0(@typescript-eslint/parser@8.58.0(eslint@9.39.4(jiti@1.21.7))(typescript@5.9.3))(eslint@9.39.4(jiti@1.21.7))(typescript@5.9.3) + '@typescript-eslint/parser': 8.58.0(eslint@9.39.4(jiti@1.21.7))(typescript@5.9.3) + '@typescript-eslint/typescript-estree': 8.58.0(typescript@5.9.3) + '@typescript-eslint/utils': 8.58.0(eslint@9.39.4(jiti@1.21.7))(typescript@5.9.3) + eslint: 9.39.4(jiti@1.21.7) typescript: 5.9.3 transitivePeerDependencies: - supports-color @@ -9410,9 +9514,9 @@ snapshots: escalade: 3.2.0 picocolors: 1.1.1 - update-browserslist-db@1.2.3(browserslist@4.28.1): + update-browserslist-db@1.2.3(browserslist@4.28.2): dependencies: - browserslist: 4.28.1 + browserslist: 4.28.2 escalade: 3.2.0 picocolors: 1.1.1 @@ -9424,7 +9528,7 @@ snapshots: urijs@1.19.11: {} - use-debounce@10.1.0(react@19.2.4): + use-debounce@10.1.1(react@19.2.4): dependencies: react: 19.2.4 @@ -9458,18 +9562,21 @@ snapshots: '@types/unist': 3.0.3 vfile-message: 4.0.2 - vite-node@3.2.4(@types/node@24.10.3)(jiti@1.21.7)(yaml@2.8.3): + vite-node@3.2.4(@emnapi/core@1.9.2)(@emnapi/runtime@1.9.2)(@types/node@24.12.0)(esbuild@0.27.3)(jiti@1.21.7)(yaml@2.8.3): dependencies: cac: 6.7.14 debug: 4.4.1 es-module-lexer: 1.7.0 pathe: 2.0.3 - vite: 7.3.1(@types/node@24.10.3)(jiti@1.21.7)(yaml@2.8.3) + vite: 8.0.5(@emnapi/core@1.9.2)(@emnapi/runtime@1.9.2)(@types/node@24.12.0)(esbuild@0.27.3)(jiti@1.21.7)(yaml@2.8.3) transitivePeerDependencies: + - '@emnapi/core' + - '@emnapi/runtime' - '@types/node' + - '@vitejs/devtools' + - esbuild - jiti - less - - lightningcss - sass - sass-embedded - stylus @@ -9479,29 +9586,32 @@ snapshots: - tsx - yaml - vite-plugin-css-injected-by-js@3.5.2(vite@7.3.1(@types/node@24.10.3)(jiti@1.21.7)(yaml@2.8.3)): + vite-plugin-css-injected-by-js@3.5.2(vite@8.0.5(@emnapi/core@1.9.2)(@emnapi/runtime@1.9.2)(@types/node@24.12.0)(esbuild@0.27.3)(jiti@1.21.7)(yaml@2.8.3)): dependencies: - vite: 7.3.1(@types/node@24.10.3)(jiti@1.21.7)(yaml@2.8.3) + vite: 8.0.5(@emnapi/core@1.9.2)(@emnapi/runtime@1.9.2)(@types/node@24.12.0)(esbuild@0.27.3)(jiti@1.21.7)(yaml@2.8.3) - vite@7.3.1(@types/node@24.10.3)(jiti@1.21.7)(yaml@2.8.3): + vite@8.0.5(@emnapi/core@1.9.2)(@emnapi/runtime@1.9.2)(@types/node@24.12.0)(esbuild@0.27.3)(jiti@1.21.7)(yaml@2.8.3): dependencies: - esbuild: 0.27.3 - fdir: 6.5.0(picomatch@4.0.4) + lightningcss: 1.32.0 picomatch: 4.0.4 postcss: 8.5.8 - rollup: 4.59.0 + rolldown: 1.0.0-rc.12(@emnapi/core@1.9.2)(@emnapi/runtime@1.9.2) tinyglobby: 0.2.15 optionalDependencies: - '@types/node': 24.10.3 + '@types/node': 24.12.0 + esbuild: 0.27.3 fsevents: 2.3.3 jiti: 1.21.7 yaml: 2.8.3 + transitivePeerDependencies: + - '@emnapi/core' + - '@emnapi/runtime' - vitest@3.2.4(@types/debug@4.1.12)(@types/node@24.10.3)(happy-dom@20.8.9)(jiti@1.21.7)(msw@2.12.10(@types/node@24.10.3)(typescript@5.9.3))(yaml@2.8.3): + vitest@3.2.4(@emnapi/core@1.9.2)(@emnapi/runtime@1.9.2)(@types/debug@4.1.12)(@types/node@24.12.0)(esbuild@0.27.3)(happy-dom@20.8.9)(jiti@1.21.7)(msw@2.12.14(@types/node@24.12.0)(typescript@5.9.3))(yaml@2.8.3): dependencies: '@types/chai': 5.2.2 '@vitest/expect': 3.2.4 - '@vitest/mocker': 3.2.4(msw@2.12.10(@types/node@24.10.3)(typescript@5.9.3))(vite@7.3.1(@types/node@24.10.3)(jiti@1.21.7)(yaml@2.8.3)) + '@vitest/mocker': 3.2.4(msw@2.12.14(@types/node@24.12.0)(typescript@5.9.3))(vite@8.0.5(@emnapi/core@1.9.2)(@emnapi/runtime@1.9.2)(@types/node@24.12.0)(esbuild@0.27.3)(jiti@1.21.7)(yaml@2.8.3)) '@vitest/pretty-format': 3.2.4 '@vitest/runner': 3.2.4 '@vitest/snapshot': 3.2.4 @@ -9519,17 +9629,20 @@ snapshots: tinyglobby: 0.2.15 tinypool: 1.1.1 tinyrainbow: 2.0.0 - vite: 7.3.1(@types/node@24.10.3)(jiti@1.21.7)(yaml@2.8.3) - vite-node: 3.2.4(@types/node@24.10.3)(jiti@1.21.7)(yaml@2.8.3) + vite: 8.0.5(@emnapi/core@1.9.2)(@emnapi/runtime@1.9.2)(@types/node@24.12.0)(esbuild@0.27.3)(jiti@1.21.7)(yaml@2.8.3) + vite-node: 3.2.4(@emnapi/core@1.9.2)(@emnapi/runtime@1.9.2)(@types/node@24.12.0)(esbuild@0.27.3)(jiti@1.21.7)(yaml@2.8.3) why-is-node-running: 2.3.0 optionalDependencies: '@types/debug': 4.1.12 - '@types/node': 24.10.3 + '@types/node': 24.12.0 happy-dom: 20.8.9 transitivePeerDependencies: + - '@emnapi/core' + - '@emnapi/runtime' + - '@vitejs/devtools' + - esbuild - jiti - less - - lightningcss - msw - sass - sass-embedded @@ -9668,7 +9781,7 @@ snapshots: '@types/react': 19.2.14 react: 19.2.4 - zustand@5.0.11(@types/react@19.2.14)(react@19.2.4)(use-sync-external-store@1.6.0(react@19.2.4)): + zustand@5.0.12(@types/react@19.2.14)(react@19.2.4)(use-sync-external-store@1.6.0(react@19.2.4)): optionalDependencies: '@types/react': 19.2.14 react: 19.2.4 diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/zh-TW/admin.json b/airflow-core/src/airflow/ui/public/i18n/locales/zh-TW/admin.json index f7d17f2bb9230..22b38661efb7f 100644 --- a/airflow-core/src/airflow/ui/public/i18n/locales/zh-TW/admin.json +++ b/airflow-core/src/airflow/ui/public/i18n/locales/zh-TW/admin.json @@ -120,7 +120,8 @@ "includeDeferred": "包含延後任務", "nameMaxLength": "名稱最多只能包含 256 個字元", "nameRequired": "名稱是必填的", - "slots": "配額" + "slots": "配額", + "slotsHelperText": "使用 -1 表示無限制配額。" }, "noPoolsFound": "找不到資源池", "pool_one": "資源池", diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/zh-TW/assets.json b/airflow-core/src/airflow/ui/public/i18n/locales/zh-TW/assets.json index 3e3bedd805547..843fed1452273 100644 --- a/airflow-core/src/airflow/ui/public/i18n/locales/zh-TW/assets.json +++ b/airflow-core/src/airflow/ui/public/i18n/locales/zh-TW/assets.json @@ -29,7 +29,7 @@ "group": "群組", "lastAssetEvent": "最後資源事件", "name": "名稱", - "producingTasks": "生產任務", + "producingTasks": "生產者任務", "scheduledDags": "已排程的 Dags", "scheduling": "排程", "searchPlaceholder": "搜尋資源", diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/zh-TW/browse.json b/airflow-core/src/airflow/ui/public/i18n/locales/zh-TW/browse.json index e98d66cfb959e..f798eb6b875b0 100644 --- a/airflow-core/src/airflow/ui/public/i18n/locales/zh-TW/browse.json +++ b/airflow-core/src/airflow/ui/public/i18n/locales/zh-TW/browse.json @@ -26,11 +26,11 @@ }, "delete": { "error": "刪除 XCom 失敗", - "errorTitle": "Error", + "errorTitle": "錯誤", "success": "成功刪除 XCom", "successTitle": "已刪除 XCom", "title": "刪除 XCom", - "warning": "你確定要刪除這個 XCom 嗎?此動作無法復原。" + "warning": "您確定要刪除這個 XCom 嗎?此動作無法復原。" }, "edit": { "error": "更新 XCom 失敗", diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/zh-TW/common.json b/airflow-core/src/airflow/ui/public/i18n/locales/zh-TW/common.json index efaa36625d231..4562f9cb980d7 100644 --- a/airflow-core/src/airflow/ui/public/i18n/locales/zh-TW/common.json +++ b/airflow-core/src/airflow/ui/public/i18n/locales/zh-TW/common.json @@ -103,6 +103,12 @@ "notFound": "找不到頁面", "title": "錯誤" }, + "errors": { + "forbidden": { + "description": "您沒有執行此操作的權限。", + "title": "存取被拒" + } + }, "expand": { "collapse": "收合", "expand": "展開", @@ -128,6 +134,7 @@ "selectDateRange": "選擇日期範圍", "startTime": "起始時間" }, + "generateToken": "產生權杖", "logicalDate": "邏輯日期", "logout": "登出", "logoutConfirmation": "確定要登出嗎?", @@ -171,6 +178,7 @@ "reset": "重置", "runId": "執行 ID", "runTypes": { + "asset_materialization": "資源實體化", "asset_triggered": "資源觸發", "backfill": "回填", "manual": "手動觸發", @@ -327,6 +335,18 @@ } } }, + "tokenGeneration": { + "apiToken": "API 權杖", + "cliToken": "CLI 權杖", + "errorDescription": "產生權杖時發生錯誤,請再試一次。", + "errorTitle": "權杖產生失敗", + "generate": "產生", + "selectType": "選擇要產生的權杖類型。", + "title": "產生權杖", + "tokenExpiresIn": "此權杖將在 {{duration}} 後過期。", + "tokenGenerated": "您的權杖已產生。", + "tokenShownOnce": "此權杖僅會顯示一次,請立即複製。" + }, "total": "總計 {{state}}", "triggered": "已觸發", "tryNumber": "嘗試次數", diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/zh-TW/components.json b/airflow-core/src/airflow/ui/public/i18n/locales/zh-TW/components.json index 32994e4f3099b..7e9e22e38141c 100644 --- a/airflow-core/src/airflow/ui/public/i18n/locales/zh-TW/components.json +++ b/airflow-core/src/airflow/ui/public/i18n/locales/zh-TW/components.json @@ -99,7 +99,7 @@ "limitedList.allTags_other": "所有標籤 ({{count}})", "limitedList.clickToInteract": "點擊標籤以篩選 Dags", "limitedList.clickToOpenFull": "點擊 \"+{{count}} 更多\" 以開啟完整檢視", - "limitedList.copyPasteText": "你可以複製並貼上上方文字", + "limitedList.copyPasteText": "您可以複製並貼上上方文字", "limitedList.showingItems_one": "顯示 1 個項目", "limitedList.showingItems_other": "顯示 {{count}} 個項目", "logs": { @@ -144,7 +144,7 @@ "title": "已觸發 Dag 執行" } }, - "triggerAgainWithConfig": "使用此配置再次觸發", + "triggerAgainWithConfig": "使用此設定再次觸發", "unpause": "觸發時取消暫停 {{dagDisplayName}}" }, "trimText": { diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/zh-TW/dag.json b/airflow-core/src/airflow/ui/public/i18n/locales/zh-TW/dag.json index 9c6dced4c52fe..9702718cfff52 100644 --- a/airflow-core/src/airflow/ui/public/i18n/locales/zh-TW/dag.json +++ b/airflow-core/src/airflow/ui/public/i18n/locales/zh-TW/dag.json @@ -45,7 +45,8 @@ "buttons": { "resetToLatest": "重設為最新", "toggleGroup": "切換群組狀態" - } + }, + "runTypeLegend": "執行類型圖例" }, "header": { "buttons": { diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/zh-TW/dags.json b/airflow-core/src/airflow/ui/public/i18n/locales/zh-TW/dags.json index c55fe8367b0f7..473fecdda8262 100644 --- a/airflow-core/src/airflow/ui/public/i18n/locales/zh-TW/dags.json +++ b/airflow-core/src/airflow/ui/public/i18n/locales/zh-TW/dags.json @@ -65,7 +65,7 @@ "future": "未來", "onlyFailed": "只清除失敗任務", "past": "過去", - "preventRunningTasks": "如果任務正在運行,則阻止重新運行", + "preventRunningTasks": "若任務正在執行中,則防止重新執行", "queueNew": "排隊新任務", "runOnLatestVersion": "執行最新套件包版本", "upstream": "上游" diff --git a/airflow-core/src/airflow/ui/src/components/DagVersionSelect.test.tsx b/airflow-core/src/airflow/ui/src/components/DagVersionSelect.test.tsx new file mode 100644 index 0000000000000..38b0332252aeb --- /dev/null +++ b/airflow-core/src/airflow/ui/src/components/DagVersionSelect.test.tsx @@ -0,0 +1,120 @@ +/*! + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +import { render } from "@testing-library/react"; +import { describe, expect, it, vi } from "vitest"; + +import { Wrapper } from "src/utils/Wrapper"; + +import { DagVersionSelect } from "./DagVersionSelect"; + +const dagVersionV1 = { + bundle_name: "dags-folder", + bundle_version: null, + created_at: "2025-01-01T00:00:00Z", + dag_id: "test_dag", + version_number: 1, +}; +const dagVersionV2 = { + bundle_name: "dags-folder", + bundle_version: null, + created_at: "2025-01-02T00:00:00Z", + dag_id: "test_dag", + version_number: 2, +}; +const dagVersionV3 = { + bundle_name: "dags-folder", + bundle_version: null, + created_at: "2025-01-03T00:00:00Z", + dag_id: "test_dag", + version_number: 3, +}; + +const allVersions = [dagVersionV3, dagVersionV2, dagVersionV1]; + +let mockParams: Record = { dagId: "test_dag" }; + +vi.mock("react-router-dom", async () => { + const actual = await vi.importActual("react-router-dom"); + + return { + ...actual, + useParams: () => mockParams, + }; +}); + +vi.mock("openapi/queries", () => ({ + useDagRunServiceGetDagRun: vi.fn(() => ({ + data: undefined, + isLoading: false, + })), + useDagVersionServiceGetDagVersions: vi.fn(() => ({ + data: { dag_versions: allVersions, total_entries: 3 }, + isLoading: false, + })), +})); + +vi.mock("src/hooks/useSelectedVersion", () => ({ + default: vi.fn(() => undefined), +})); + +const { useDagRunServiceGetDagRun } = await import("openapi/queries"); + +const mockRunData = { + bundle_version: null, + conf: null, + dag_display_name: "test_dag", + dag_id: "test_dag", + dag_versions: [dagVersionV1, dagVersionV2], + end_date: null, + has_missed_deadline: false, + logical_date: null, + note: null, + partition_key: null, + queued_at: null, + run_after: "2025-01-01T00:00:00Z", + run_id: "run_1", + run_type: "manual" as const, + start_date: null, + state: "success" as const, + triggered_by: "ui" as const, + triggering_user_name: null, +}; + +const getItems = (container: HTMLElement) => container.querySelectorAll(".chakra-select__item"); + +describe("DagVersionSelect", () => { + it("shows all versions when no DagRun is selected", () => { + mockParams = { dagId: "test_dag" }; + const { container } = render(, { wrapper: Wrapper }); + + expect(getItems(container)).toHaveLength(3); + }); + + it("shows only the selected run's versions when a DagRun is selected", () => { + mockParams = { dagId: "test_dag", runId: "run_1" }; + vi.mocked(useDagRunServiceGetDagRun).mockReturnValue({ + data: mockRunData, + isLoading: false, + } as ReturnType); + + const { container } = render(, { wrapper: Wrapper }); + + expect(getItems(container)).toHaveLength(2); + }); +}); diff --git a/airflow-core/src/airflow/ui/src/components/DagVersionSelect.tsx b/airflow-core/src/airflow/ui/src/components/DagVersionSelect.tsx index 95d9b8870fdc4..5ec0f97eb4890 100644 --- a/airflow-core/src/airflow/ui/src/components/DagVersionSelect.tsx +++ b/airflow-core/src/airflow/ui/src/components/DagVersionSelect.tsx @@ -20,7 +20,7 @@ import { createListCollection, Flex, Select, type SelectValueChangeDetails, Text import { useTranslation } from "react-i18next"; import { useParams, useSearchParams } from "react-router-dom"; -import { useDagVersionServiceGetDagVersions } from "openapi/queries"; +import { useDagRunServiceGetDagRun, useDagVersionServiceGetDagVersions } from "openapi/queries"; import type { DagVersionResponse } from "openapi/requests/types.gen"; import { SearchParamsKeys } from "src/constants/searchParams"; import useSelectedVersion from "src/hooks/useSelectedVersion"; @@ -34,14 +34,27 @@ type VersionSelected = { export const DagVersionSelect = ({ showLabel = true }: { readonly showLabel?: boolean }) => { const { t: translate } = useTranslation("components"); - const { dagId = "" } = useParams(); + const { dagId = "", runId } = useParams(); const { data, isLoading } = useDagVersionServiceGetDagVersions({ dagId, orderBy: ["-version_number"] }); + const { data: runData } = useDagRunServiceGetDagRun({ dagId, dagRunId: runId ?? "" }, undefined, { + enabled: Boolean(runId), + }); const [searchParams, setSearchParams] = useSearchParams(); const selectedVersionNumber = useSelectedVersion(); - const selectedVersion = data?.dag_versions.find((dv) => dv.version_number === selectedVersionNumber); + + // When a DagRun is selected, show only that run's versions. Otherwise, show all versions. + const allVersions = data?.dag_versions ?? []; + const versions: Array = + runId !== undefined && runData + ? [...runData.dag_versions].sort( + (versionA, versionB) => versionB.version_number - versionA.version_number, + ) + : allVersions; + + const selectedVersion = versions.find((dv) => dv.version_number === selectedVersionNumber); const versionOptions = createListCollection({ - items: (data?.dag_versions ?? []).map((dv) => ({ value: dv.version_number, version: dv })), + items: versions.map((dv) => ({ value: dv.version_number, version: dv })), }); const handleStateChange = ({ items }: SelectValueChangeDetails) => { @@ -55,7 +68,7 @@ export const DagVersionSelect = ({ showLabel = true }: { readonly showLabel?: bo { diff --git a/airflow-core/src/airflow/ui/src/layouts/Details/Gantt/Gantt.tsx b/airflow-core/src/airflow/ui/src/layouts/Details/Gantt/Gantt.tsx index 71d14ccfbe78b..62de32cdb98bb 100644 --- a/airflow-core/src/airflow/ui/src/layouts/Details/Gantt/Gantt.tsx +++ b/airflow-core/src/airflow/ui/src/layouts/Details/Gantt/Gantt.tsx @@ -71,6 +71,8 @@ ChartJS.register( type Props = { readonly dagRunState?: DagRunState | undefined; readonly limit: number; + readonly runAfterGte?: string | undefined; + readonly runAfterLte?: string | undefined; readonly runType?: DagRunType | undefined; readonly triggeringUser?: string | undefined; }; @@ -79,7 +81,7 @@ const CHART_PADDING = 36; const CHART_ROW_HEIGHT = 20; const MIN_BAR_WIDTH = 10; -export const Gantt = ({ dagRunState, limit, runType, triggeringUser }: Props) => { +export const Gantt = ({ dagRunState, limit, runAfterGte, runAfterLte, runType, triggeringUser }: Props) => { const { dagId = "", groupId: selectedGroupId, runId = "", taskId: selectedTaskId } = useParams(); const [searchParams] = useSearchParams(); const { openGroupIds } = useOpenGroups(); @@ -114,6 +116,8 @@ export const Gantt = ({ dagRunState, limit, runType, triggeringUser }: Props) => const { data: gridRuns, isLoading: runsLoading } = useGridRuns({ dagRunState, limit, + runAfterGte, + runAfterLte, runType, triggeringUser, }); @@ -208,7 +212,7 @@ export const Gantt = ({ dagRunState, limit, runType, triggeringUser }: Props) => translate, }); - if (runId === "") { + if (runId === "" || (!isLoading && !selectedRun)) { return undefined; } diff --git a/airflow-core/src/airflow/ui/src/layouts/Details/Gantt/utils.test.ts b/airflow-core/src/airflow/ui/src/layouts/Details/Gantt/utils.test.ts new file mode 100644 index 0000000000000..b9fa6a491be54 --- /dev/null +++ b/airflow-core/src/airflow/ui/src/layouts/Details/Gantt/utils.test.ts @@ -0,0 +1,261 @@ +/*! + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +import type { ChartEvent, ActiveElement } from "chart.js"; +import dayjs from "dayjs"; +import type { TFunction } from "i18next"; +import { describe, it, expect } from "vitest"; + +import type { GanttDataItem } from "./utils"; +import { createChartOptions, transformGanttData } from "./utils"; + +// eslint-disable-next-line no-empty-function, @typescript-eslint/no-empty-function +const noop = () => {}; + +const defaultChartParams = { + gridColor: "#ccc", + handleBarClick: noop as (event: ChartEvent, elements: Array) => void, + handleBarHover: noop as (event: ChartEvent, elements: Array) => void, + hoveredId: undefined, + hoveredItemColor: "#eee", + labels: ["task_1", "task_2"], + selectedId: undefined, + selectedItemColor: "#ddd", + selectedTimezone: "UTC", + translate: ((key: string) => key) as unknown as TFunction, +}; + +describe("createChartOptions", () => { + describe("x-axis scale min/max with ISO date strings", () => { + it("should compute valid min/max for completed tasks with ISO dates", () => { + const data: Array = [ + { + state: "success", + taskId: "task_1", + x: ["2024-03-14T10:00:00.000Z", "2024-03-14T10:05:00.000Z"], + y: "task_1", + }, + { + state: "success", + taskId: "task_2", + x: ["2024-03-14T10:03:00.000Z", "2024-03-14T10:10:00.000Z"], + y: "task_2", + }, + ]; + + const options = createChartOptions({ + ...defaultChartParams, + data, + selectedRun: { + dag_id: "test_dag", + duration: 600, + end_date: "2024-03-14T10:10:00+00:00", + has_missed_deadline: false, + queued_at: "2024-03-14T09:59:00+00:00", + run_after: "2024-03-14T10:00:00+00:00", + run_id: "run_1", + run_type: "manual", + start_date: "2024-03-14T10:00:00+00:00", + state: "success", + }, + }); + + const xScale = options.scales.x; + + expect(xScale.min).toBeTypeOf("number"); + expect(xScale.max).toBeTypeOf("number"); + expect(Number.isNaN(xScale.min)).toBe(false); + expect(Number.isNaN(xScale.max)).toBe(false); + // max should be slightly beyond the latest end date (5% padding) + expect(xScale.max).toBeGreaterThan(new Date("2024-03-14T10:10:00.000Z").getTime()); + }); + + it("should compute valid min/max for running tasks", () => { + const now = dayjs().toISOString(); + const data: Array = [ + { + state: "success", + taskId: "task_1", + x: ["2024-03-14T10:00:00.000Z", "2024-03-14T10:05:00.000Z"], + y: "task_1", + }, + { + state: "running", + taskId: "task_2", + x: ["2024-03-14T10:05:00.000Z", now], + y: "task_2", + }, + ]; + + const options = createChartOptions({ + ...defaultChartParams, + data, + selectedRun: { + dag_id: "test_dag", + duration: 0, + // eslint-disable-next-line unicorn/no-null + end_date: null, + has_missed_deadline: false, + queued_at: "2024-03-14T09:59:00+00:00", + run_after: "2024-03-14T10:00:00+00:00", + run_id: "run_1", + run_type: "manual", + start_date: "2024-03-14T10:00:00+00:00", + state: "running", + }, + }); + + const xScale = options.scales.x; + + expect(xScale.min).toBeTypeOf("number"); + expect(xScale.max).toBeTypeOf("number"); + expect(Number.isNaN(xScale.min)).toBe(false); + expect(Number.isNaN(xScale.max)).toBe(false); + }); + + it("should handle empty data with running DagRun (fallback to formatted dates)", () => { + const options = createChartOptions({ + ...defaultChartParams, + data: [], + labels: [], + selectedRun: { + dag_id: "test_dag", + duration: 0, + // eslint-disable-next-line unicorn/no-null + end_date: null, + has_missed_deadline: false, + queued_at: "2024-03-14T09:59:00+00:00", + run_after: "2024-03-14T10:00:00+00:00", + run_id: "run_1", + run_type: "manual", + start_date: "2024-03-14T10:00:00+00:00", + state: "running", + }, + }); + + const xScale = options.scales.x; + + // With empty data, min/max are formatted date strings (fallback branch) + expect(xScale.min).toBeTypeOf("string"); + expect(xScale.max).toBeTypeOf("string"); + }); + }); +}); + +describe("transformGanttData", () => { + it("should skip tasks with null start_date", () => { + const result = transformGanttData({ + allTries: [ + { + // eslint-disable-next-line unicorn/no-null + end_date: null, + is_mapped: false, + // eslint-disable-next-line unicorn/no-null + start_date: null, + // eslint-disable-next-line unicorn/no-null + state: null, + task_display_name: "task_1", + task_id: "task_1", + try_number: 1, + }, + ], + flatNodes: [{ depth: 0, id: "task_1", is_mapped: false, label: "task_1" }], + gridSummaries: [], + }); + + expect(result).toHaveLength(0); + }); + + it("should include running tasks with valid start_date and use current time as end", () => { + const before = dayjs(); + const result = transformGanttData({ + allTries: [ + { + // eslint-disable-next-line unicorn/no-null + end_date: null, + is_mapped: false, + start_date: "2024-03-14T10:00:00+00:00", + state: "running", + task_display_name: "task_1", + task_id: "task_1", + try_number: 1, + }, + ], + flatNodes: [{ depth: 0, id: "task_1", is_mapped: false, label: "task_1" }], + gridSummaries: [], + }); + + expect(result).toHaveLength(1); + expect(result[0]?.state).toBe("running"); + // End time should be approximately now (ISO string) + const endTime = dayjs(result[0]?.x[1]); + + expect(endTime.valueOf()).toBeGreaterThanOrEqual(before.valueOf()); + }); + + it("should skip groups with null min_start_date or max_end_date", () => { + const result = transformGanttData({ + allTries: [], + flatNodes: [{ depth: 0, id: "group_1", is_mapped: false, isGroup: true, label: "group_1" }], + gridSummaries: [ + { + // eslint-disable-next-line unicorn/no-null + child_states: null, + // eslint-disable-next-line unicorn/no-null + max_end_date: null, + // eslint-disable-next-line unicorn/no-null + min_start_date: null, + // eslint-disable-next-line unicorn/no-null + state: null, + task_display_name: "group_1", + task_id: "group_1", + }, + ], + }); + + expect(result).toHaveLength(0); + }); + + it("should produce ISO date strings parseable by dayjs", () => { + const result = transformGanttData({ + allTries: [ + { + end_date: "2024-03-14T10:05:00+00:00", + is_mapped: false, + start_date: "2024-03-14T10:00:00+00:00", + state: "success", + task_display_name: "task_1", + task_id: "task_1", + try_number: 1, + }, + ], + flatNodes: [{ depth: 0, id: "task_1", is_mapped: false, label: "task_1" }], + gridSummaries: [], + }); + + expect(result).toHaveLength(1); + // x values should be valid ISO strings that dayjs can parse without NaN + const start = dayjs(result[0]?.x[0]); + const end = dayjs(result[0]?.x[1]); + + expect(start.isValid()).toBe(true); + expect(end.isValid()).toBe(true); + expect(Number.isNaN(start.valueOf())).toBe(false); + expect(Number.isNaN(end.valueOf())).toBe(false); + }); +}); diff --git a/airflow-core/src/airflow/ui/src/layouts/Details/Gantt/utils.ts b/airflow-core/src/airflow/ui/src/layouts/Details/Gantt/utils.ts index 22df4eb28cffc..fab1d1bcf773a 100644 --- a/airflow-core/src/airflow/ui/src/layouts/Details/Gantt/utils.ts +++ b/airflow-core/src/airflow/ui/src/layouts/Details/Gantt/utils.ts @@ -347,8 +347,8 @@ export const createChartOptions = ({ max: data.length > 0 ? (() => { - const maxTime = Math.max(...data.map((item) => new Date(item.x[1] ?? "").getTime())); - const minTime = Math.min(...data.map((item) => new Date(item.x[0] ?? "").getTime())); + const maxTime = Math.max(...data.map((item) => dayjs(item.x[1]).valueOf())); + const minTime = Math.min(...data.map((item) => dayjs(item.x[0]).valueOf())); const totalDuration = maxTime - minTime; // add 5% to the max time to avoid the last tick being cut off @@ -358,8 +358,8 @@ export const createChartOptions = ({ min: data.length > 0 ? (() => { - const maxTime = Math.max(...data.map((item) => new Date(item.x[1] ?? "").getTime())); - const minTime = Math.min(...data.map((item) => new Date(item.x[0] ?? "").getTime())); + const maxTime = Math.max(...data.map((item) => dayjs(item.x[1]).valueOf())); + const minTime = Math.min(...data.map((item) => dayjs(item.x[0]).valueOf())); const totalDuration = maxTime - minTime; // subtract 2% from min time so background color shows before data diff --git a/airflow-core/src/airflow/ui/src/pages/Connections/NothingFoundInfo.test.tsx b/airflow-core/src/airflow/ui/src/pages/Connections/NothingFoundInfo.test.tsx new file mode 100644 index 0000000000000..05197e0499e9c --- /dev/null +++ b/airflow-core/src/airflow/ui/src/pages/Connections/NothingFoundInfo.test.tsx @@ -0,0 +1,36 @@ +/*! + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +import "@testing-library/jest-dom"; +import { render, screen } from "@testing-library/react"; +import { describe, it, expect } from "vitest"; + +import { Wrapper } from "src/utils/Wrapper"; + +import { NothingFoundInfo } from "./NothingFoundInfo"; + +describe("NothingFoundInfo", () => { + it("should have correct external link attributes", () => { + render(, { wrapper: Wrapper }); + + const link = screen.getByRole("link"); + + expect(link).toHaveAttribute("target", "_blank"); + expect(link).toHaveAttribute("rel", "noopener noreferrer"); + }); +}); diff --git a/airflow-core/src/airflow/ui/src/pages/Connections/NothingFoundInfo.tsx b/airflow-core/src/airflow/ui/src/pages/Connections/NothingFoundInfo.tsx index 4d1388c827413..88897e3131908 100644 --- a/airflow-core/src/airflow/ui/src/pages/Connections/NothingFoundInfo.tsx +++ b/airflow-core/src/airflow/ui/src/pages/Connections/NothingFoundInfo.tsx @@ -35,7 +35,7 @@ export const NothingFoundInfo = () => { {translate("connections.nothingFound.description")} {translate("connections.nothingFound.learnMore")}{" "} - + {translate("connections.nothingFound.documentationLink")} diff --git a/airflow-core/src/airflow/ui/src/pages/HITLTaskInstances/HITLResponseForm.test.tsx b/airflow-core/src/airflow/ui/src/pages/HITLTaskInstances/HITLResponseForm.test.tsx new file mode 100644 index 0000000000000..ef670430f4844 --- /dev/null +++ b/airflow-core/src/airflow/ui/src/pages/HITLTaskInstances/HITLResponseForm.test.tsx @@ -0,0 +1,144 @@ +/* eslint-disable unicorn/no-null */ + +/*! + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +/// +import "@testing-library/jest-dom/vitest"; +import { render, screen } from "@testing-library/react"; +import { describe, expect, it, vi } from "vitest"; + +import type { HITLDetailHistory, TaskInstanceHistoryResponse } from "openapi/requests/types.gen"; +import { Wrapper } from "src/utils/Wrapper"; + +import { HITLResponseForm } from "./HITLResponseForm"; + +// --------------------------------------------------------------------------- +// Mocks +// --------------------------------------------------------------------------- +vi.mock("react-i18next", () => ({ + useTranslation: () => ({ + // eslint-disable-next-line id-length + t: (key: string) => key, + }), +})); + +vi.mock("src/queries/useUpdateHITLDetail", () => ({ + useUpdateHITLDetail: () => ({ updateHITLResponse: vi.fn() }), +})); + +// --------------------------------------------------------------------------- +// Fixtures +// --------------------------------------------------------------------------- +const MOCK_TASK_INSTANCE = { + dag_id: "test_dag", + dag_run_id: "run_1", + map_index: -1, + state: "deferred", + task_id: "test_task", +} as TaskInstanceHistoryResponse; + +const makeHITLDetail = ( + options: Array, + overrides: Partial = {}, +): { task_instance: TaskInstanceHistoryResponse } & Omit => ({ + assigned_users: [], + body: "Please pick one.", + chosen_options: [], + created_at: new Date().toISOString(), + defaults: null, + multiple: false, + options, + params: {}, + params_input: {}, + responded_at: null, + responded_by_user: null, + response_received: false, + subject: "Test subject", + task_instance: MOCK_TASK_INSTANCE, + ...overrides, +}); + +const renderForm = (options: Array, overrides?: Partial) => + render(, { + wrapper: Wrapper, + }); + +// --------------------------------------------------------------------------- +// Tests — option-button rendering boundary +// +// HITLResponseForm renders options one of two ways: +// shouldRenderOptionButton=true → one Button per option (data-testid="hitl-option-") +// shouldRenderOptionButton=false → a generic "Respond" button, no per-option buttons +// +// Bug (#64413): condition was `options.length < 4`, so with exactly 4 options +// shouldRenderOptionButton was false and the footer rendered nothing at all. +// Fix: change to `options.length <= 4`. +// --------------------------------------------------------------------------- +describe("HITLResponseForm – option button rendering boundary", () => { + it("renders per-option buttons for 1 option", () => { + renderForm(["Only"]); + expect(screen.getByTestId("hitl-option-Only")).toBeInTheDocument(); + }); + + it("renders per-option buttons for 2 options", () => { + renderForm(["Yes", "No"]); + expect(screen.getByTestId("hitl-option-Yes")).toBeInTheDocument(); + expect(screen.getByTestId("hitl-option-No")).toBeInTheDocument(); + }); + + it("renders per-option buttons for 3 options", () => { + const opts = ["Creator", "Explorer", "Viewer"]; + + renderForm(opts); + for (const opt of opts) { + expect(screen.getByTestId(`hitl-option-${opt}`)).toBeInTheDocument(); + } + }); + + // Regression test for #64413 — exactly 4 options previously rendered nothing. + it("renders per-option buttons for exactly 4 options", () => { + const opts = ["Creator", "Explorer", "ExplorerCanPublish", "Viewer"]; + + renderForm(opts); + for (const opt of opts) { + expect(screen.getByTestId(`hitl-option-${opt}`)).toBeInTheDocument(); + } + }); + + it("does NOT render per-option buttons for 5 options", () => { + const opts = ["A", "B", "C", "D", "E"]; + + renderForm(opts); + for (const opt of opts) { + expect(screen.queryByTestId(`hitl-option-${opt}`)).not.toBeInTheDocument(); + } + }); + + it("does NOT render per-option buttons when multiple=true", () => { + renderForm(["A", "B"], { multiple: true }); + expect(screen.queryByTestId("hitl-option-A")).not.toBeInTheDocument(); + expect(screen.queryByTestId("hitl-option-B")).not.toBeInTheDocument(); + }); + + it("renders Approve and Reject buttons for a 2-option approval task", () => { + renderForm(["Approve", "Reject"]); + expect(screen.getByTestId("hitl-option-Approve")).toBeInTheDocument(); + expect(screen.getByTestId("hitl-option-Reject")).toBeInTheDocument(); + }); +}); diff --git a/airflow-core/src/airflow/ui/src/pages/HITLTaskInstances/HITLResponseForm.tsx b/airflow-core/src/airflow/ui/src/pages/HITLTaskInstances/HITLResponseForm.tsx index 057cb032abe46..b12f78a4080ae 100644 --- a/airflow-core/src/airflow/ui/src/pages/HITLTaskInstances/HITLResponseForm.tsx +++ b/airflow-core/src/airflow/ui/src/pages/HITLTaskInstances/HITLResponseForm.tsx @@ -68,7 +68,7 @@ export const HITLResponseForm = ({ hitlDetail }: HITLResponseFormProps) => { hitlDetail.options.length === 2; const shouldRenderOptionButton = - hitlDetail.options.length < 4 && !hitlDetail.multiple && preloadedHITLOptions.length === 0; + hitlDetail.options.length <= 4 && !hitlDetail.multiple && preloadedHITLOptions.length === 0; const isPending = hitlDetail.task_instance.state === "deferred"; diff --git a/airflow-core/src/airflow/ui/src/pages/Security.tsx b/airflow-core/src/airflow/ui/src/pages/Security.tsx index c3b0fb89309d4..9a56996dcbcba 100644 --- a/airflow-core/src/airflow/ui/src/pages/Security.tsx +++ b/airflow-core/src/airflow/ui/src/pages/Security.tsx @@ -17,6 +17,7 @@ * under the License. */ import { Box } from "@chakra-ui/react"; +import { useRef } from "react"; import { useNavigate, useParams } from "react-router-dom"; import { useAuthLinksServiceGetAuthMenus } from "openapi/queries"; @@ -38,14 +39,25 @@ export const Security = () => { const link = authLinks?.extra_menu_items.find((mi) => mi.text.toLowerCase().replace(" ", "-") === page); const navigate = useNavigate(); + // Track when we are already redirecting so that setting iframe.src = "about:blank" + // (which fires another onLoad event) does not trigger a second navigate call. + const isRedirecting = useRef(false); const onLoad = () => { + if (isRedirecting.current) { + return; + } + const iframe: HTMLIFrameElement | null = document.querySelector("#security-iframe"); if (iframe?.contentWindow) { const base = new URL(document.baseURI).pathname.replace(/\/$/u, ""); // Remove trailing slash if exists if (!iframe.contentWindow.location.pathname.startsWith(`${base}/auth/`)) { + // Clear the iframe immediately so that the React app does not render its own + // navigation sidebar inside the iframe, which would produce a duplicate nav bar. + isRedirecting.current = true; + iframe.src = "about:blank"; void navigate("/"); } } diff --git a/airflow-core/src/airflow/ui/src/pages/TaskInstance/Logs/Logs.tsx b/airflow-core/src/airflow/ui/src/pages/TaskInstance/Logs/Logs.tsx index 681a92e68e3d7..c37b1e0b0525f 100644 --- a/airflow-core/src/airflow/ui/src/pages/TaskInstance/Logs/Logs.tsx +++ b/airflow-core/src/airflow/ui/src/pages/TaskInstance/Logs/Logs.tsx @@ -116,7 +116,10 @@ export const Logs = () => { ); }; - const getLogString = () => getParsedLogs().join("\n"); + const getLogString = () => + getParsedLogs() + .filter((line) => line !== "") + .join("\n"); const downloadLogs = () => { const logContent = getLogString(); diff --git a/airflow-core/src/airflow/ui/src/pages/TaskInstance/Logs/logDownloadContent.test.ts b/airflow-core/src/airflow/ui/src/pages/TaskInstance/Logs/logDownloadContent.test.ts new file mode 100644 index 0000000000000..84ccfbeaddbf0 --- /dev/null +++ b/airflow-core/src/airflow/ui/src/pages/TaskInstance/Logs/logDownloadContent.test.ts @@ -0,0 +1,131 @@ +/*! + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +import type { TFunction } from "i18next"; +import { describe, expect, it } from "vitest"; + +import type { TaskInstancesLogResponse } from "openapi/requests/types.gen"; +import { renderStructuredLog } from "src/components/renderStructuredLog"; +import { parseStreamingLogContent } from "src/utils/logs"; + +/** Same construction as Logs.tsx getLogString (download path). */ +const logStringForDownload = ( + fetchedData: TaskInstancesLogResponse | undefined, + logLevelFilters: Array, + translate: TFunction, +) => + parseStreamingLogContent(fetchedData) + .map((line) => + renderStructuredLog({ + index: 0, + logLevelFilters, + logLink: "", + logMessage: line, + renderingMode: "text", + showSource: false, + showTimestamp: true, + sourceFilters: [], + translate, + }), + ) + .filter((line) => line !== "") + .join("\n"); + +describe("Task log download content (log level filter)", () => { + const translate = ((key: string) => key) as unknown as TFunction; + + it("is empty when every structured line is excluded by the level filter", () => { + const fetchedData: TaskInstancesLogResponse = { + content: [ + { + event: "hello", + level: "info", + logger: "task.stdout", + timestamp: "2025-09-11T17:44:52.597476Z", + }, + ], + continuation_token: null, + }; + + const text = logStringForDownload(fetchedData, ["error"], translate); + + expect(text).toBe(""); + }); + + it("is empty when structured lines have no level and any log level filter is set", () => { + const fetchedData: TaskInstancesLogResponse = { + content: [ + { + event: "[timestamp] {file.py:1} INFO - legacy line without level field", + timestamp: "2025-02-28T10:49:09.679000+05:30", + }, + ], + continuation_token: null, + }; + + const text = logStringForDownload(fetchedData, ["info"], translate); + + expect(text).toBe(""); + }); + + it("does not prefix the download with newlines when earlier lines are filtered out", () => { + const fetchedData: TaskInstancesLogResponse = { + content: [ + { + event: "hidden-group-marker", + level: "debug", + logger: "task.stdout", + timestamp: "2025-09-11T17:44:52.597476Z", + }, + { + event: "visible-line", + level: "info", + logger: "task.stdout", + timestamp: "2025-09-11T17:44:52.597500Z", + }, + ], + continuation_token: null, + }; + + const text = logStringForDownload(fetchedData, ["info"], translate); + + expect(text.startsWith("\n")).toBe(false); + expect(text).toContain("visible-line"); + expect(text).not.toContain("hidden-group-marker"); + }); + + it("includes matching structured lines when the filter matches level", () => { + const fetchedData: TaskInstancesLogResponse = { + content: [ + { + event: "hello", + level: "info", + logger: "task.stdout", + timestamp: "2025-09-11T17:44:52.597476Z", + }, + ], + continuation_token: null, + }; + + const text = logStringForDownload(fetchedData, ["info"], translate); + + expect(text.length).toBeGreaterThan(0); + expect(text).toContain("hello"); + expect(text).toContain("INFO"); + }); +}); diff --git a/airflow-core/src/airflow/ui/src/theme.ts b/airflow-core/src/airflow/ui/src/theme.ts index fc9c07a99b892..15b34dda2c9dc 100644 --- a/airflow-core/src/airflow/ui/src/theme.ts +++ b/airflow-core/src/airflow/ui/src/theme.ts @@ -406,16 +406,20 @@ const defaultAirflowTheme = { export const createTheme = (userTheme?: Theme) => { const defaultAirflowConfig = defineConfig({ theme: defaultAirflowTheme }); - const userConfig = defineConfig( - userTheme - ? { - theme: { tokens: userTheme.tokens }, + const userConfig = userTheme + ? defineConfig({ + ...(userTheme.tokens !== undefined && { + theme: { tokens: userTheme.tokens as Record }, + }), + ...(userTheme.globalCss !== undefined && { globalCss: userTheme.globalCss as Record, - } - : {}, - ); + }), + }) + : undefined; - const mergedConfig = mergeConfigs(defaultConfig, defaultAirflowConfig, userConfig); + const mergedConfig = userConfig + ? mergeConfigs(defaultConfig, defaultAirflowConfig, userConfig) + : mergeConfigs(defaultConfig, defaultAirflowConfig); return createSystem(mergedConfig); }; diff --git a/airflow-core/src/airflow/utils/log/non_caching_file_handler.py b/airflow-core/src/airflow/utils/log/non_caching_file_handler.py index aa0ca9864e2ea..ad3c0dbe27974 100644 --- a/airflow-core/src/airflow/utils/log/non_caching_file_handler.py +++ b/airflow-core/src/airflow/utils/log/non_caching_file_handler.py @@ -25,7 +25,7 @@ def make_file_io_non_caching(io: IO[str]) -> IO[str]: try: fd = io.fileno() - os.posix_fadvise(fd, 0, 0, os.POSIX_FADV_DONTNEED) + os.posix_fadvise(fd, 0, 0, os.POSIX_FADV_DONTNEED) # type: ignore[attr-defined] except Exception: # in case either file descriptor cannot be retrieved or fadvise is not available # we should simply return the wrapper retrieved by FileHandler's open method diff --git a/airflow-core/tests/unit/always/test_providers_manager.py b/airflow-core/tests/unit/always/test_providers_manager.py index 580676d18b3bf..afa473e80a4f0 100644 --- a/airflow-core/tests/unit/always/test_providers_manager.py +++ b/airflow-core/tests/unit/always/test_providers_manager.py @@ -428,6 +428,14 @@ def test_load_ui_for_http_provider(self): assert "relabeling" in behaviour assert "placeholders" in behaviour + def test_iter_connection_type_hook_ui_metadata_matches_field_behaviours(self): + """iter_connection_type_hook_ui_metadata should expose the same standard-field behaviour dict.""" + pm = ProvidersManager() + pm.initialize_providers_hooks() + by_type = {m.connection_type: m for m in pm.iter_connection_type_hook_ui_metadata()} + assert "http" in by_type + assert by_type["http"].field_behaviour == pm._field_behaviours["http"] + def test_ui_metadata_loading_without_hook_import(self): """Test that UI metadata loads from provider info without importing hook classes.""" with patch("airflow.providers_manager.import_string") as mock_import: diff --git a/airflow-core/tests/unit/api_fastapi/common/test_types.py b/airflow-core/tests/unit/api_fastapi/common/test_types.py index 3476f6a529394..da17ca505cc76 100644 --- a/airflow-core/tests/unit/api_fastapi/common/test_types.py +++ b/airflow-core/tests/unit/api_fastapi/common/test_types.py @@ -147,7 +147,7 @@ def test_invalid_shade_key_rejected(self): def test_serialization_excludes_none_fields(self): colors = ThemeColors.model_validate({"brand": _BRAND_SCALE}) - dumped = colors.model_dump() + dumped = colors.model_dump(exclude_none=True) assert "brand" in dumped assert "gray" not in dumped assert "black" not in dumped @@ -200,10 +200,37 @@ def test_empty_colors_rejected(self): def test_serialization_round_trip(self): """Verify None color fields are excluded and OklchColor values are serialized as strings.""" theme = Theme.model_validate({"tokens": {"colors": {"brand": _BRAND_SCALE}}}) - dumped = theme.model_dump() + dumped = theme.model_dump(exclude_none=True) colors = dumped["tokens"]["colors"] assert "brand" in colors assert "gray" not in colors assert "black" not in colors assert "white" not in colors assert colors["brand"]["50"]["value"] == "oklch(0.975 0.007 298.0)" + + def test_globalcss_only_theme(self): + """tokens is optional; globalCss alone is sufficient.""" + theme = Theme.model_validate({"globalCss": {"button": {"text-transform": "uppercase"}}}) + assert theme.tokens is None + assert theme.globalCss == {"button": {"text-transform": "uppercase"}} + + def test_icon_only_theme(self): + """tokens is optional; an icon URL alone is sufficient.""" + theme = Theme.model_validate({"icon": "https://example.com/logo.svg"}) + assert theme.tokens is None + assert theme.icon == "https://example.com/logo.svg" + + def test_empty_theme(self): + """An empty theme object is valid — it means 'use OSS defaults'.""" + theme = Theme.model_validate({}) + assert theme.tokens is None + assert theme.globalCss is None + assert theme.icon is None + assert theme.icon_dark_mode is None + + def test_theme_serialization_excludes_none_tokens(self): + """When tokens is None it must not appear in the serialized output.""" + theme = Theme.model_validate({"globalCss": {"a": {"color": "red"}}}) + dumped = theme.model_dump(exclude_none=True) + assert "tokens" not in dumped + assert dumped == {"globalCss": {"a": {"color": "red"}}} diff --git a/airflow-core/tests/unit/api_fastapi/core_api/routes/public/test_task_instances.py b/airflow-core/tests/unit/api_fastapi/core_api/routes/public/test_task_instances.py index ba8f06a742459..76767a96094b1 100644 --- a/airflow-core/tests/unit/api_fastapi/core_api/routes/public/test_task_instances.py +++ b/airflow-core/tests/unit/api_fastapi/core_api/routes/public/test_task_instances.py @@ -21,7 +21,7 @@ import itertools import os from datetime import timedelta -from typing import TYPE_CHECKING +from typing import TYPE_CHECKING, Any from unittest import mock import pendulum @@ -146,7 +146,7 @@ def create_task_instances( assert dag_version for mi in map_indexes: - kwargs = self.ti_init | {"map_index": mi} + kwargs: dict[str, Any] = self.ti_init | {"map_index": mi} ti = TaskInstance(task=tasks[i], **kwargs, dag_version_id=dag_version.id) session.add(ti) ti.dag_run = dr diff --git a/airflow-core/tests/unit/api_fastapi/core_api/routes/public/test_tasks.py b/airflow-core/tests/unit/api_fastapi/core_api/routes/public/test_tasks.py index dd85ad1c325dd..0dacd19397eae 100644 --- a/airflow-core/tests/unit/api_fastapi/core_api/routes/public/test_tasks.py +++ b/airflow-core/tests/unit/api_fastapi/core_api/routes/public/test_tasks.py @@ -543,10 +543,22 @@ def test_should_raise_400_for_invalid_order_by_name(self, test_client): f"{self.api_prefix}/{self.dag_id}/tasks?order_by=invalid_task_colume_name", ) assert response.status_code == 400 - assert ( - response.json()["detail"] == "'EmptyOperator' object has no attribute 'invalid_task_colume_name'" + assert response.json()["detail"] == ( + "Ordering with 'invalid_task_colume_name' is disallowed or " + "the attribute does not exist on the model" ) + def test_should_respond_200_order_by_start_date_with_none(self, test_client): + """Sorting by a nullable field should not raise TypeError (issue #63927).""" + response = test_client.get( + f"{self.api_prefix}/{self.unscheduled_dag_id}/tasks?order_by=start_date", + ) + assert response.status_code == 200 + tasks = response.json()["tasks"] + assert len(tasks) == 2 + # All start_dates are None for unscheduled tasks; verify they sort without error + assert all(t["start_date"] is None for t in tasks) + def test_should_respond_404(self, test_client): dag_id = "xxxx_not_existing" response = test_client.get(f"{self.api_prefix}/{dag_id}/tasks") diff --git a/airflow-core/tests/unit/api_fastapi/core_api/routes/ui/test_config.py b/airflow-core/tests/unit/api_fastapi/core_api/routes/ui/test_config.py index dbc3c0eb64937..8b9982fc47b91 100644 --- a/airflow-core/tests/unit/api_fastapi/core_api/routes/ui/test_config.py +++ b/airflow-core/tests/unit/api_fastapi/core_api/routes/ui/test_config.py @@ -136,6 +136,19 @@ def mock_config_data_all_colors(): yield +THEME_CSS_ONLY = { + "globalCss": { + "button": {"text-transform": "uppercase"}, + } +} + + +@pytest.fixture +def mock_config_data_css_only(): + with conf_vars(_theme_conf_vars(THEME_CSS_ONLY)): + yield + + class TestGetConfig: def test_should_response_200(self, mock_config_data, test_client): """ @@ -170,3 +183,14 @@ def test_should_response_200_with_all_color_tokens(self, mock_config_data_all_co assert "white" in colors assert colors["black"] == {"value": "oklch(0.22 0.025 288.6)"} assert colors["white"] == {"value": "oklch(0.985 0.002 264.0)"} + + def test_should_response_200_with_css_only_theme(self, mock_config_data_css_only, test_client): + """Theme with only globalCss (no tokens) is valid and round-trips correctly.""" + response = test_client.get("/config") + + assert response.status_code == 200 + theme = response.json()["theme"] + assert "tokens" not in theme + assert theme["globalCss"] == {"button": {"text-transform": "uppercase"}} + assert "icon" not in theme + assert "icon_dark_mode" not in theme diff --git a/airflow-core/tests/unit/api_fastapi/execution_api/versions/head/test_task_instances.py b/airflow-core/tests/unit/api_fastapi/execution_api/versions/head/test_task_instances.py index 7f766ede71e4d..c6135711be9bf 100644 --- a/airflow-core/tests/unit/api_fastapi/execution_api/versions/head/test_task_instances.py +++ b/airflow-core/tests/unit/api_fastapi/execution_api/versions/head/test_task_instances.py @@ -1832,6 +1832,40 @@ def test_ti_heartbeat_non_existent_task(self, client, session, create_task_insta "message": "Task Instance not found", } + def test_ti_heartbeat_cleared_task_returns_410(self, client, session, create_task_instance): + """Test that a 410 error is returned when a TI was cleared and moved to TIH.""" + ti = create_task_instance( + task_id="test_ti_heartbeat_cleared", + state=State.RUNNING, + hostname="random-hostname", + pid=1547, + session=session, + ) + session.commit() + old_ti_id = ti.id + + # Simulate task being cleared: this archives the current try to TIH + # and assigns a new UUID to the TI, mirroring prepare_db_for_next_try(). + ti.prepare_db_for_next_try(session) + session.commit() + + assert session.get(TaskInstance, old_ti_id) is None + tih = session.scalar( + select(TaskInstanceHistory).where(TaskInstanceHistory.task_instance_id == old_ti_id) + ) + assert tih is not None + + response = client.put( + f"/execution/task-instances/{old_ti_id}/heartbeat", + json={"hostname": "random-hostname", "pid": 1547}, + ) + + assert response.status_code == 410 + assert response.json()["detail"] == { + "reason": "not_found", + "message": "Task Instance not found, it may have been moved to the Task Instance History table", + } + @pytest.mark.parametrize( "ti_state", [State.SUCCESS, State.FAILED], @@ -2336,6 +2370,7 @@ def add_one(x): ("map_index", "dynamic_task_args", "task_ids", "task_group_name", "expected_count"), ( pytest.param(None, [1, 2, 3], None, None, 5, id="use-default-map-index-None"), + pytest.param(0, [1, 2, 3], None, None, 1, id="with-map-index-0-no-task-group"), pytest.param(-1, [1, 2, 3], ["task1"], None, 1, id="with-task-ids-and-map-index-(-1)"), pytest.param(None, [1, 2, 3], None, "group1", 4, id="with-task-group-id-and-map-index-None"), pytest.param(0, [1, 2, 3], None, "group1", 1, id="with-task-group-id-and-map-index-0"), @@ -2853,6 +2888,15 @@ def add_one(x): }, id="with-default-map-index-None", ), + pytest.param( + 0, + [1, 2, 3], + None, + None, + {"-1": State.SUCCESS, "0": State.FAILED, "1": State.SUCCESS, "2": State.SUCCESS}, + {"group1.add_one_0": "failed"}, + id="with-map-index-0-no-task-group", + ), pytest.param( -1, [1, 2, 3], diff --git a/airflow-core/tests/unit/api_fastapi/execution_api/versions/v2026_04_06/test_task_instances.py b/airflow-core/tests/unit/api_fastapi/execution_api/versions/v2026_04_06/test_task_instances.py index 8117ac6b69c61..a914ac6c6e563 100644 --- a/airflow-core/tests/unit/api_fastapi/execution_api/versions/v2026_04_06/test_task_instances.py +++ b/airflow-core/tests/unit/api_fastapi/execution_api/versions/v2026_04_06/test_task_instances.py @@ -20,6 +20,7 @@ import pytest from airflow._shared.timezones import timezone +from airflow.serialization.serialized_objects import BaseSerialization from airflow.utils.state import DagRunState, State from tests_common.test_utils.db import clear_db_runs @@ -125,3 +126,129 @@ def test_old_version_preserves_real_start_date( assert response.status_code == 200 assert dag_run["start_date"] is not None, "start_date should not be None when DagRun has started" assert dag_run["start_date"] == TIMESTAMP.isoformat().replace("+00:00", "Z") + + +class TestNextKwargsBackwardCompat: + """Old workers only know BaseSerialization.deserialize -- SDK serde plain dicts cause KeyError.""" + + @pytest.fixture(autouse=True) + def _freeze_time(self, time_machine): + time_machine.move_to(TIMESTAMP_STR, tick=False) + + def setup_method(self): + clear_db_runs() + + def teardown_method(self): + clear_db_runs() + + def test_old_version_gets_base_serialization_format(self, old_ver_client, session, create_task_instance): + """Old API version receives next_kwargs wrapped in __type/__var so BaseSerialization can parse it.""" + ti = create_task_instance( + task_id="test_next_kwargs_compat", + state=State.QUEUED, + session=session, + start_date=TIMESTAMP, + ) + # Store SDK serde format (plain dict) in DB -- this is what trigger.py handle_event_submit produces + ti.next_method = "execute_complete" + ti.next_kwargs = {"cheesecake": True, "event": "payload"} + session.commit() + + response = old_ver_client.patch(f"/execution/task-instances/{ti.id}/run", json=RUN_PATCH_BODY) + + assert response.status_code == 200 + next_kwargs = response.json()["next_kwargs"] + # Old workers call BaseSerialization.deserialize on this -- verify it works + result = BaseSerialization.deserialize(next_kwargs) + assert result == {"cheesecake": True, "event": "payload"} + + def test_old_version_deserializes_complex_types(self, old_ver_client, session, create_task_instance): + """Non-primitive values (datetime) must round-trip through serde -> BaseSerialization correctly.""" + from airflow.sdk.serde import serialize as serde_serialize + + original = {"event": TIMESTAMP, "simple": True} + # Store SDK serde format with a datetime -- this is what handle_event_submit produces + # when the trigger payload contains a datetime (e.g. DateTimeSensorAsync) + serde_encoded = serde_serialize(original) + + ti = create_task_instance( + task_id="test_next_kwargs_datetime", + state=State.QUEUED, + session=session, + start_date=TIMESTAMP, + ) + ti.next_method = "execute_complete" + ti.next_kwargs = serde_encoded + session.commit() + + response = old_ver_client.patch(f"/execution/task-instances/{ti.id}/run", json=RUN_PATCH_BODY) + + assert response.status_code == 200 + next_kwargs = response.json()["next_kwargs"] + result = BaseSerialization.deserialize(next_kwargs) + assert result["simple"] is True + # datetime must come back as a datetime, not a {"__classname__": ...} dict + assert result["event"] == TIMESTAMP + + def test_old_version_handles_already_base_serialization_in_db( + self, old_ver_client, session, create_task_instance + ): + """Rolling upgrade: DB still has BaseSerialization format from old handle_event_submit.""" + ti = create_task_instance( + task_id="test_next_kwargs_already_base", + state=State.QUEUED, + session=session, + start_date=TIMESTAMP, + ) + ti.next_method = "execute_complete" + # Pre-upgrade data: BaseSerialization format already in DB + ti.next_kwargs = BaseSerialization.serialize({"cheesecake": True, "event": "payload"}) + session.commit() + + response = old_ver_client.patch(f"/execution/task-instances/{ti.id}/run", json=RUN_PATCH_BODY) + + assert response.status_code == 200 + next_kwargs = response.json()["next_kwargs"] + # Should still be parseable by old workers + result = BaseSerialization.deserialize(next_kwargs) + assert result == {"cheesecake": True, "event": "payload"} + + def test_old_version_handles_submit_failure_plain_dict( + self, old_ver_client, session, create_task_instance + ): + """submit_failure and scheduler timeout write raw plain dicts -- converter must handle those too.""" + ti = create_task_instance( + task_id="test_next_kwargs_failure", + state=State.QUEUED, + session=session, + start_date=TIMESTAMP, + ) + ti.next_method = "__fail__" + # This is what submit_failure / scheduler timeout writes -- plain dict, no wrapping + ti.next_kwargs = {"error": "Trigger timeout"} + session.commit() + + response = old_ver_client.patch(f"/execution/task-instances/{ti.id}/run", json=RUN_PATCH_BODY) + + assert response.status_code == 200 + next_kwargs = response.json()["next_kwargs"] + result = BaseSerialization.deserialize(next_kwargs) + assert result == {"error": "Trigger timeout"} + + def test_head_version_returns_raw_serde_format(self, client, session, create_task_instance): + """Head API version returns next_kwargs as-is (SDK serde format).""" + ti = create_task_instance( + task_id="test_next_kwargs_head", + state=State.QUEUED, + session=session, + start_date=TIMESTAMP, + ) + ti.next_method = "execute_complete" + ti.next_kwargs = {"cheesecake": True, "event": "payload"} + session.commit() + + response = client.patch(f"/execution/task-instances/{ti.id}/run", json=RUN_PATCH_BODY) + + assert response.status_code == 200 + # Head version gets the plain dict directly -- no BaseSerialization wrapping + assert response.json()["next_kwargs"] == {"cheesecake": True, "event": "payload"} diff --git a/airflow-core/tests/unit/dag_processing/test_collection.py b/airflow-core/tests/unit/dag_processing/test_collection.py index 77dc1318b0510..6a0aef00eaa8e 100644 --- a/airflow-core/tests/unit/dag_processing/test_collection.py +++ b/airflow-core/tests/unit/dag_processing/test_collection.py @@ -182,6 +182,94 @@ def test_add_asset_trigger_references( asset_model = session.scalars(select(AssetModel)).one() assert len(asset_model.triggers) == expected_num_triggers + @pytest.mark.usefixtures("testing_dag_bundle") + def test_add_asset_trigger_references_hash_consistency(self, dag_maker, session): + """Trigger hash from the DAG-parsed path must equal the hash computed + from the DB-stored Trigger row. A mismatch causes the scheduler to + recreate trigger rows on every heartbeat. + """ + from airflow.models.trigger import Trigger + from airflow.serialization.encoders import encode_trigger + from airflow.triggers.base import BaseEventTrigger + + trigger = FileDeleteTrigger(filepath="/tmp/test.txt", poke_interval=5.0) + asset = Asset( + "test_hash_consistency_asset", + watchers=[AssetWatcher(name="file_watcher", trigger=trigger)], + ) + + with dag_maker(dag_id="test_hash_consistency_dag", schedule=[asset]) as dag: + EmptyOperator(task_id="mytask") + + dags = {dag.dag_id: LazyDeserializedDAG.from_dag(dag)} + orm_dags = DagModelOperation(dags, "testing", None).add_dags(session=session) + orm_dags[dag.dag_id].is_paused = False + + asset_op = AssetModelOperation.collect(dags) + orm_assets = asset_op.sync_assets(session=session) + session.flush() + + asset_op.add_dag_asset_references(orm_dags, orm_assets, session=session) + asset_op.activate_assets_if_possible(orm_assets.values(), session=session) + asset_op.add_asset_trigger_references(orm_assets, session=session) + session.flush() + + # DAG-side hash (same computation as add_asset_trigger_references line 1025) + encoded = encode_trigger(trigger) + dag_hash = BaseEventTrigger.hash(encoded["classpath"], encoded["kwargs"]) + + # DB-side: expire and re-load the Trigger row to force a real DB read + asset_model = session.scalars(select(AssetModel)).one() + assert len(asset_model.triggers) == 1 + orm_trigger = asset_model.triggers[0] + trigger_id = orm_trigger.id + session.expire(orm_trigger) + reloaded = session.get(Trigger, trigger_id) + + # DB-side hash (same computation as add_asset_trigger_references line 1033) + db_hash = BaseEventTrigger.hash(reloaded.classpath, reloaded.kwargs) + + assert dag_hash == db_hash + + @pytest.mark.usefixtures("testing_dag_bundle") + def test_add_asset_trigger_references_idempotent(self, dag_maker, session): + """Calling add_asset_trigger_references twice with the same trigger + must not create duplicate rows. + """ + from airflow.models.trigger import Trigger + + trigger = FileDeleteTrigger(filepath="/tmp/test.txt", poke_interval=5.0) + asset = Asset( + "test_idempotent_asset", + watchers=[AssetWatcher(name="file_watcher", trigger=trigger)], + ) + + with dag_maker(dag_id="test_idempotent_dag", schedule=[asset]) as dag: + EmptyOperator(task_id="mytask") + + dags = {dag.dag_id: LazyDeserializedDAG.from_dag(dag)} + orm_dags = DagModelOperation(dags, "testing", None).add_dags(session=session) + orm_dags[dag.dag_id].is_paused = False + + asset_op = AssetModelOperation.collect(dags) + orm_assets = asset_op.sync_assets(session=session) + session.flush() + + asset_op.add_dag_asset_references(orm_dags, orm_assets, session=session) + asset_op.activate_assets_if_possible(orm_assets.values(), session=session) + + # First call — creates the trigger + asset_op.add_asset_trigger_references(orm_assets, session=session) + session.flush() + count_after_first = session.scalar(select(func.count(Trigger.id))) + + # Second call — should be a no-op (hashes match, no diff) + asset_op.add_asset_trigger_references(orm_assets, session=session) + session.flush() + count_after_second = session.scalar(select(func.count(Trigger.id))) + + assert count_after_first == count_after_second + @pytest.mark.parametrize( ("schedule", "model", "columns", "expected"), [ diff --git a/airflow-core/tests/unit/executors/test_local_executor.py b/airflow-core/tests/unit/executors/test_local_executor.py index 59afffe6833fe..af6507d26420c 100644 --- a/airflow-core/tests/unit/executors/test_local_executor.py +++ b/airflow-core/tests/unit/executors/test_local_executor.py @@ -268,7 +268,7 @@ def test_execution_api_server_url_config(self, mock_supervise, conf_values, expe with conf_vars(conf_values): team_conf = ExecutorConf(team_name=None) - _execute_work(log=mock.ANY, workload=mock.MagicMock(), team_conf=team_conf) + _execute_work(log=mock.MagicMock(), workload=mock.MagicMock(), team_conf=team_conf) mock_supervise.assert_called_with( ti=mock.ANY, @@ -277,6 +277,7 @@ def test_execution_api_server_url_config(self, mock_supervise, conf_values, expe token=mock.ANY, server=expected_server, log_path=mock.ANY, + subprocess_logs_to_stdout=True, ) @mock.patch("airflow.sdk.execution_time.supervisor.supervise") @@ -303,7 +304,7 @@ def test_team_and_global_config_isolation(self, mock_supervise): with conf_vars(config_overrides): # Test team-specific config team_conf = ExecutorConf(team_name=team_name) - _execute_work(log=mock.ANY, workload=mock.MagicMock(), team_conf=team_conf) + _execute_work(log=mock.MagicMock(), workload=mock.MagicMock(), team_conf=team_conf) # Verify team-specific server URL was used assert mock_supervise.call_count == 1 @@ -314,7 +315,7 @@ def test_team_and_global_config_isolation(self, mock_supervise): # Test global config (no team) global_conf = ExecutorConf(team_name=None) - _execute_work(log=mock.ANY, workload=mock.MagicMock(), team_conf=global_conf) + _execute_work(log=mock.MagicMock(), workload=mock.MagicMock(), team_conf=global_conf) # Verify default server URL was used assert mock_supervise.call_count == 1 diff --git a/airflow-core/tests/unit/models/test_dag.py b/airflow-core/tests/unit/models/test_dag.py index 046c85ea79901..00a0f1a1ef2da 100644 --- a/airflow-core/tests/unit/models/test_dag.py +++ b/airflow-core/tests/unit/models/test_dag.py @@ -33,7 +33,7 @@ import pendulum import pytest import time_machine -from sqlalchemy import delete, inspect, select, update +from sqlalchemy import delete, func, inspect, select, update from airflow import settings from airflow._shared.module_loading import qualname @@ -2047,6 +2047,134 @@ def test_dags_needing_dagruns_assets(self, dag_maker, session): dag_models = query.all() assert dag_models == [dag_model] + def test_dags_needing_dagruns_skips_adrq_when_serialized_dag_missing( + self, session, caplog, testing_dag_bundle + ): + """ADRQ rows for a Dag without SerializedDagModel must be skipped (no triggered_date_by_dag). + + Rows must remain in ``asset_dag_run_queue`` so the scheduler can re-evaluate on a later run once + ``SerializedDagModel`` exists (``dags_needing_dagruns`` only drops them from the in-memory + candidate set, it does not delete ORM rows). + """ + orphan_dag_id = "adrq_no_serialized_dag" + orphan_uri = "test://asset_for_orphan_adrq" + session.add(AssetModel(uri=orphan_uri)) + session.flush() + asset_id = session.scalar(select(AssetModel.id).where(AssetModel.uri == orphan_uri)) + + dag_model = DagModel( + dag_id=orphan_dag_id, + bundle_name="testing", + max_active_tasks=1, + has_task_concurrency_limits=False, + max_consecutive_failed_dag_runs=0, + next_dagrun=timezone.datetime(2038, 1, 1), + next_dagrun_create_after=timezone.datetime(2038, 1, 2), + is_stale=False, + has_import_errors=False, + is_paused=False, + asset_expression={"any": [{"uri": orphan_uri}]}, + ) + session.add(dag_model) + session.flush() + + session.add(AssetDagRunQueue(asset_id=asset_id, target_dag_id=orphan_dag_id)) + session.flush() + + with caplog.at_level(logging.DEBUG, logger="airflow.models.dag"): + _query, triggered_date_by_dag = DagModel.dags_needing_dagruns(session) + + assert orphan_dag_id not in triggered_date_by_dag + assert ( + "Dags have queued asset events (ADRQ), but are not found in the serialized_dag table." + in caplog.text + ) + assert orphan_dag_id in caplog.text + assert ( + session.scalar( + select(func.count()) + .select_from(AssetDagRunQueue) + .where(AssetDagRunQueue.target_dag_id == orphan_dag_id) + ) + == 1 + ) + + def test_dags_needing_dagruns_missing_serialized_debug_lists_sorted_dag_ids( + self, session, caplog, testing_dag_bundle + ): + """When multiple dags lack SerializedDagModel, the debug log lists dag_ids sorted.""" + session.add_all( + [ + AssetModel(uri="test://ds_ghost_z"), + AssetModel(uri="test://ds_ghost_a"), + ] + ) + session.flush() + id_z = session.scalar(select(AssetModel.id).where(AssetModel.uri == "test://ds_ghost_z")) + id_a = session.scalar(select(AssetModel.id).where(AssetModel.uri == "test://ds_ghost_a")) + far = timezone.datetime(2038, 1, 1) + far_after = timezone.datetime(2038, 1, 2) + session.add_all( + [ + DagModel( + dag_id="ghost_z", + bundle_name="testing", + max_active_tasks=1, + has_task_concurrency_limits=False, + max_consecutive_failed_dag_runs=0, + next_dagrun=far, + next_dagrun_create_after=far_after, + is_stale=False, + has_import_errors=False, + is_paused=False, + asset_expression={"any": [{"uri": "test://ds_ghost_z"}]}, + ), + DagModel( + dag_id="ghost_a", + bundle_name="testing", + max_active_tasks=1, + has_task_concurrency_limits=False, + max_consecutive_failed_dag_runs=0, + next_dagrun=far, + next_dagrun_create_after=far_after, + is_stale=False, + has_import_errors=False, + is_paused=False, + asset_expression={"any": [{"uri": "test://ds_ghost_a"}]}, + ), + ] + ) + session.flush() + + session.add_all( + [ + AssetDagRunQueue(asset_id=id_z, target_dag_id="ghost_z"), + AssetDagRunQueue(asset_id=id_a, target_dag_id="ghost_a"), + ] + ) + session.flush() + + with caplog.at_level(logging.DEBUG, logger="airflow.models.dag"): + _query, triggered_date_by_dag = DagModel.dags_needing_dagruns(session) + + assert "ghost_a" not in triggered_date_by_dag + assert "ghost_z" not in triggered_date_by_dag + msg = next( + r.message + for r in caplog.records + if "Dags have queued asset events (ADRQ), but are not found in the serialized_dag table." + in r.message + ) + assert msg.index("ghost_a") < msg.index("ghost_z") + assert ( + session.scalar( + select(func.count()) + .select_from(AssetDagRunQueue) + .where(AssetDagRunQueue.target_dag_id.in_(("ghost_a", "ghost_z"))) + ) + == 2 + ) + def test_dags_needing_dagruns_query_count(self, dag_maker, session): """Test that dags_needing_dagruns avoids N+1 on adrq.asset access.""" num_assets = 10 diff --git a/airflow-core/tests/unit/models/test_taskinstance.py b/airflow-core/tests/unit/models/test_taskinstance.py index bb058d1a7376f..07dbde0ec9b73 100644 --- a/airflow-core/tests/unit/models/test_taskinstance.py +++ b/airflow-core/tests/unit/models/test_taskinstance.py @@ -2653,6 +2653,26 @@ def mock_policy(task_instance: TaskInstance): assert ti.max_tries == expected_max_tries +@pytest.mark.parametrize( + ("weight_rule", "expected_weight"), + [ + pytest.param("downstream", 10 + 5, id="downstream-sums-descendants"), + pytest.param("upstream", 10, id="upstream-no-ancestors"), + pytest.param("absolute", 10, id="absolute-self-only"), + ], +) +def test_refresh_from_task_with_non_serialized_operator(weight_rule, expected_weight): + """Regression: TaskInstance must work with non-serialized operators whose weight_rule is a WeightRule enum.""" + with DAG(dag_id="test_dag"): + root = EmptyOperator(task_id="root", priority_weight=10, weight_rule=weight_rule) + child = EmptyOperator(task_id="child", priority_weight=5) + root >> child + + ti = TI(root, run_id=None, dag_version_id=mock.MagicMock()) + + assert ti.priority_weight == expected_weight + + class TestTaskInstanceRecordTaskMapXComPush: """Test TI.xcom_push() correctly records return values for task-mapping.""" diff --git a/airflow-core/tests/unit/serialization/test_encoders.py b/airflow-core/tests/unit/serialization/test_encoders.py new file mode 100644 index 0000000000000..479af95ae88b9 --- /dev/null +++ b/airflow-core/tests/unit/serialization/test_encoders.py @@ -0,0 +1,181 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from __future__ import annotations + +import pytest +from sqlalchemy import delete + +from airflow.models.trigger import Trigger +from airflow.providers.standard.triggers.file import FileDeleteTrigger +from airflow.serialization.encoders import encode_trigger +from airflow.serialization.enums import DagAttributeTypes as DAT, Encoding +from airflow.triggers.base import BaseEventTrigger + +pytest.importorskip("airflow.providers.apache.kafka") +from airflow.providers.apache.kafka.triggers.await_message import AwaitMessageTrigger + +# Trigger fixtures covering primitive-only kwargs (FileDeleteTrigger) and +# non-primitive kwargs like tuple/dict (AwaitMessageTrigger). +_TRIGGER_PARAMS = [ + pytest.param( + FileDeleteTrigger(filepath="/tmp/test.txt", poke_interval=5.0), + id="primitive_kwargs_only", + ), + pytest.param(AwaitMessageTrigger(topics=()), id="empty_tuple"), + pytest.param( + AwaitMessageTrigger(topics=("fizz_buzz",), poll_timeout=1.0, commit_offset=True), + id="single_topic_tuple", + ), + pytest.param( + AwaitMessageTrigger( + topics=["t1", "t2"], + apply_function="my.module.func", + apply_function_args=["a", "b"], + apply_function_kwargs={"key": "value"}, + kafka_config_id="my_kafka", + poll_interval=2, + poll_timeout=3, + ), + id="all_non_primitive_kwargs", + ), +] + + +class TestEncodeTrigger: + """Tests for encode_trigger round-trip correctness. + + When a serialized DAG with asset-watcher triggers is re-serialized + (e.g. in ``add_asset_trigger_references``), ``encode_trigger`` receives + a dict whose kwargs already contain wrapped values like + ``{__type: tuple, __var: [...]}``. The fix ensures these are unwrapped + before re-serialization to prevent double-wrapping. + """ + + def test_encode_from_trigger_object(self): + """Non-primitive kwargs are properly serialized from a trigger object.""" + trigger = AwaitMessageTrigger(topics=()) + result = encode_trigger(trigger) + + assert ( + result["classpath"] == "airflow.providers.apache.kafka.triggers.await_message.AwaitMessageTrigger" + ) + # tuple kwarg is wrapped by BaseSerialization + assert result["kwargs"]["topics"] == {Encoding.TYPE: DAT.TUPLE, Encoding.VAR: []} + # Primitives pass through as-is + assert result["kwargs"]["poll_timeout"] == 1 + assert result["kwargs"]["commit_offset"] is True + + def test_encode_file_delete_trigger(self): + """Primitive-only kwargs pass through without wrapping.""" + trigger = FileDeleteTrigger(filepath="/tmp/test.txt", poke_interval=10.0) + result = encode_trigger(trigger) + + assert result["classpath"] == "airflow.providers.standard.triggers.file.FileDeleteTrigger" + assert result["kwargs"]["filepath"] == "/tmp/test.txt" + assert result["kwargs"]["poke_interval"] == 10.0 + + @pytest.mark.parametrize("trigger", _TRIGGER_PARAMS) + def test_re_encode_is_idempotent(self, trigger): + """Encoding the output of encode_trigger again must not double-wrap kwargs.""" + first = encode_trigger(trigger) + second = encode_trigger(first) + + assert first == second + + @pytest.mark.parametrize("trigger", _TRIGGER_PARAMS) + def test_multiple_round_trips_are_stable(self, trigger): + """Encoding the same trigger dict many times remains idempotent.""" + result = encode_trigger(trigger) + for _ in range(5): + result = encode_trigger(result) + + assert result == encode_trigger(trigger) + + +@pytest.mark.db_test +class TestTriggerHashConsistency: + """Verify ``BaseEventTrigger.hash`` produces the same value for kwargs + from the DAG-parsed path and kwargs read back from the database. + + This mirrors the comparison in + ``AssetModelOperation.add_asset_trigger_references`` + (``airflow-core/src/airflow/dag_processing/collection.py``), where: + + * **DAG side** — ``BaseEventTrigger.hash(classpath, encode_trigger(watcher.trigger)["kwargs"])`` + * **DB side** — ``BaseEventTrigger.hash(trigger.classpath, trigger.kwargs)`` + where the ``Trigger`` row was persisted with ``encrypt_kwargs`` and + read back via ``_decrypt_kwargs``. + + If the hashes diverge, the scheduler sees phantom diffs and keeps + recreating trigger rows on every heartbeat. + """ + + @pytest.fixture(autouse=True) + def _clean_triggers(self, session): + session.execute(delete(Trigger)) + session.commit() + yield + session.execute(delete(Trigger)) + session.commit() + + @pytest.mark.parametrize("trigger", _TRIGGER_PARAMS) + def test_hash_matches_after_db_round_trip(self, trigger, session): + """Hash from DAG-parsed kwargs equals hash from a DB-persisted Trigger.""" + encoded = encode_trigger(trigger) + classpath = encoded["classpath"] + dag_kwargs = encoded["kwargs"] + + # DAG side hash — what add_asset_trigger_references computes + dag_hash = BaseEventTrigger.hash(classpath, dag_kwargs) + + # Persist to DB (same as add_asset_trigger_references lines 1073-1074) + trigger_row = Trigger(classpath=classpath, kwargs=dag_kwargs) + session.add(trigger_row) + session.flush() + + # Force a real DB read — expire the instance and re-select + trigger_id = trigger_row.id + session.expire(trigger_row) + reloaded = session.get(Trigger, trigger_id) + + # DB side hash — what add_asset_trigger_references computes from ORM + db_hash = BaseEventTrigger.hash(reloaded.classpath, reloaded.kwargs) + + assert dag_hash == db_hash + + @pytest.mark.parametrize("trigger", _TRIGGER_PARAMS) + def test_hash_matches_after_re_encode_and_db_round_trip(self, trigger, session): + """Hash stays consistent when encode_trigger output is re-encoded + (deserialized-DAG re-serialization path) before DB storage. + """ + re_encoded = encode_trigger(encode_trigger(trigger)) + classpath = re_encoded["classpath"] + dag_kwargs = re_encoded["kwargs"] + + dag_hash = BaseEventTrigger.hash(classpath, dag_kwargs) + + trigger_row = Trigger(classpath=classpath, kwargs=dag_kwargs) + session.add(trigger_row) + session.flush() + + trigger_id = trigger_row.id + session.expire(trigger_row) + reloaded = session.get(Trigger, trigger_id) + + db_hash = BaseEventTrigger.hash(reloaded.classpath, reloaded.kwargs) + + assert dag_hash == db_hash diff --git a/airflow-core/tests/unit/ti_deps/deps/test_trigger_rule_dep.py b/airflow-core/tests/unit/ti_deps/deps/test_trigger_rule_dep.py index d43e3ef25b4a3..f0820eb5b4673 100644 --- a/airflow-core/tests/unit/ti_deps/deps/test_trigger_rule_dep.py +++ b/airflow-core/tests/unit/ti_deps/deps/test_trigger_rule_dep.py @@ -967,7 +967,83 @@ def test_teardown_waits_for_multiple_cleared_in_scope_tasks( ) assert len(dep_statuses) == 1 assert not dep_statuses[0].passed - assert "2 in-scope" in dep_statuses[0].reason + + @pytest.mark.parametrize("flag_upstream_failed", [True, False]) + def test_teardown_waits_for_parallel_branches(self, session, dag_maker, flag_upstream_failed): + """ + Teardown should wait when parallel branches have incomplete tasks. + + Reproduces the DAG shape from https://github.com/apache/airflow/issues/29332: + setup >> [t_fail, t_slow] >> downstream >> teardown + where t_slow is still running when teardown is evaluated. + """ + with dag_maker(session=session): + setup = EmptyOperator(task_id="setup").as_setup() + t_fail = EmptyOperator(task_id="t_fail") + t_slow = EmptyOperator(task_id="t_slow") + downstream = EmptyOperator(task_id="downstream") + teardown_task = EmptyOperator(task_id="teardown").as_teardown(setups=setup) + setup >> [t_fail, t_slow] >> downstream >> teardown_task + + dr = dag_maker.create_dagrun() + tis = {ti.task_id: ti for ti in dr.get_task_instances(session=session)} + + tis["setup"].state = SUCCESS + tis["t_fail"].state = FAILED + # t_slow is still running (state=None) + session.merge(tis["setup"]) + session.merge(tis["t_fail"]) + session.flush() + + teardown_ti = tis["teardown"] + teardown_ti.task = dag_maker.dag.get_task("teardown") + + dep_statuses = tuple( + TriggerRuleDep()._evaluate_trigger_rule( + ti=teardown_ti, + dep_context=DepContext(flag_upstream_failed=flag_upstream_failed), + session=session, + ) + ) + assert len(dep_statuses) == 1 + assert not dep_statuses[0].passed + + @pytest.mark.parametrize("flag_upstream_failed", [True, False]) + def test_teardown_runs_when_in_scope_tasks_failed(self, session, dag_maker, flag_upstream_failed): + """ + Teardown should run when all in-scope tasks are done, even if some FAILED. + + Teardowns must run regardless of upstream failure state to clean up resources. + """ + with dag_maker(session=session): + setup = EmptyOperator(task_id="setup").as_setup() + t1 = EmptyOperator(task_id="t1") + t2 = EmptyOperator(task_id="t2") + teardown_task = EmptyOperator(task_id="teardown").as_teardown(setups=setup) + setup >> t1 >> t2 >> teardown_task + + dr = dag_maker.create_dagrun() + tis = {ti.task_id: ti for ti in dr.get_task_instances(session=session)} + + tis["setup"].state = SUCCESS + tis["t1"].state = FAILED + tis["t2"].state = UPSTREAM_FAILED + for tid in ("setup", "t1", "t2"): + session.merge(tis[tid]) + session.flush() + + teardown_ti = tis["teardown"] + teardown_ti.task = dag_maker.dag.get_task("teardown") + + dep_statuses = tuple( + TriggerRuleDep()._evaluate_trigger_rule( + ti=teardown_ti, + dep_context=DepContext(flag_upstream_failed=flag_upstream_failed), + session=session, + ) + ) + # All in-scope tasks are in terminal states, teardown should proceed + assert not dep_statuses @pytest.mark.parametrize(("flag_upstream_failed", "expected_ti_state"), [(True, SKIPPED), (False, None)]) def test_all_skipped_tr_failure( diff --git a/airflow-ctl/.pre-commit-config.yaml b/airflow-ctl/.pre-commit-config.yaml index e63268b077ef8..c45a1985ec129 100644 --- a/airflow-ctl/.pre-commit-config.yaml +++ b/airflow-ctl/.pre-commit-config.yaml @@ -25,21 +25,9 @@ repos: - repo: local hooks: - id: mypy-airflow-ctl - stages: ['pre-push'] name: Run mypy for airflow-ctl language: python - entry: ../scripts/ci/prek/mypy.py - files: - (?x) - ^src/airflowctl/.*\.py$| - ^tests/.*\.py$ - exclude: .*generated.py - require_serial: true - - id: mypy-airflow-ctl - stages: ['manual'] - name: Run mypy for airflow-ctl (manual) - language: python - entry: ../scripts/ci/prek/mypy_folder.py airflow-ctl + entry: ../scripts/ci/prek/mypy_local_folder.py airflow-ctl pass_filenames: false files: ^.*\.py$ require_serial: true diff --git a/airflow-ctl/src/airflowctl/ctl/commands/connection_command.py b/airflow-ctl/src/airflowctl/ctl/commands/connection_command.py index b1a8a820998ac..5dcb63ce23277 100644 --- a/airflow-ctl/src/airflowctl/ctl/commands/connection_command.py +++ b/airflow-ctl/src/airflowctl/ctl/commands/connection_command.py @@ -67,8 +67,8 @@ def import_(args, api_client=NEW_API_CLIENT) -> None: response = api_client.connections.bulk(BulkBodyConnectionBody(actions=[connection_create_action])) if response.create.errors: rich.print(f"[red]Failed to import connections: {response.create.errors}[/red]") - raise SystemExit + raise SystemExit(1) rich.print(f"[green]Successfully imported {response.create.success} connection(s)[/green]") except Exception as e: rich.print(f"[red]Failed to import connections: {e}[/red]") - raise SystemExit + raise SystemExit(1) diff --git a/airflow-ctl/src/airflowctl/ctl/commands/variable_command.py b/airflow-ctl/src/airflowctl/ctl/commands/variable_command.py index 466be2ccac70d..88bf33a0f0197 100644 --- a/airflow-ctl/src/airflowctl/ctl/commands/variable_command.py +++ b/airflow-ctl/src/airflowctl/ctl/commands/variable_command.py @@ -51,7 +51,7 @@ def import_(args, api_client=NEW_API_CLIENT) -> list[str]: vars_to_update = [] for k, v in var_json.items(): value, description = v, None - if isinstance(v, dict) and v.get("value"): + if isinstance(v, dict) and "value" in v: value, description = v["value"], v.get("description") vars_to_update.append( diff --git a/airflow-ctl/tests/airflow_ctl/ctl/commands/test_connections_command.py b/airflow-ctl/tests/airflow_ctl/ctl/commands/test_connections_command.py index f944e66ab1f24..bdfb759d0a91d 100644 --- a/airflow-ctl/tests/airflow_ctl/ctl/commands/test_connections_command.py +++ b/airflow-ctl/tests/airflow_ctl/ctl/commands/test_connections_command.py @@ -124,11 +124,12 @@ def test_import_error(self, api_client_maker, tmp_path, monkeypatch): } expected_json_path.write_text(json.dumps(connection_file)) - with pytest.raises(SystemExit): + with pytest.raises(SystemExit) as exc_info: connection_command.import_( self.parser.parse_args(["connections", "import", expected_json_path.as_posix()]), api_client=api_client, ) + assert exc_info.value.code == 1 def test_import_without_extra_field(self, api_client_maker, tmp_path, monkeypatch): """Import succeeds when JSON omits the ``extra`` field (#62653). diff --git a/airflow-ctl/tests/airflow_ctl/ctl/commands/test_variable_command.py b/airflow-ctl/tests/airflow_ctl/ctl/commands/test_variable_command.py index 9703c8f866b90..a0598d03459f8 100644 --- a/airflow-ctl/tests/airflow_ctl/ctl/commands/test_variable_command.py +++ b/airflow-ctl/tests/airflow_ctl/ctl/commands/test_variable_command.py @@ -83,6 +83,37 @@ def test_import_success(self, api_client_maker, tmp_path, monkeypatch): ) assert response == [self.key] + @pytest.mark.parametrize( + "falsy_value", + [ + "", + 0, + False, + ], + ids=["empty_string", "zero", "false"], + ) + def test_import_falsy_values(self, api_client_maker, tmp_path, monkeypatch, falsy_value): + """Test that falsy values (empty string, 0, False) are correctly imported.""" + api_client = api_client_maker( + path="/api/v2/variables", + response_json=self.bulk_response_success.model_dump(), + expected_http_status_code=200, + kind=ClientKind.CLI, + ) + + monkeypatch.chdir(tmp_path) + expected_json_path = tmp_path / self.export_file_name + variable_file = { + self.key: {"value": falsy_value, "description": "test falsy value"}, + } + + expected_json_path.write_text(json.dumps(variable_file)) + response = variable_command.import_( + self.parser.parse_args(["variables", "import", expected_json_path.as_posix()]), + api_client=api_client, + ) + assert response == [self.key] + def test_import_error(self, api_client_maker, tmp_path, monkeypatch): api_client = api_client_maker( path="/api/v2/variables", diff --git a/contributing-docs/07_local_virtualenv.rst b/contributing-docs/07_local_virtualenv.rst index e6a0409ac9d80..b75db8d4dbd9e 100644 --- a/contributing-docs/07_local_virtualenv.rst +++ b/contributing-docs/07_local_virtualenv.rst @@ -267,57 +267,52 @@ for the provider is as simple as running: uv run pytest -Installing "golden" version of dependencies -------------------------------------------- +Locked versions of dependencies +------------------------------- -Whatever virtualenv solution you use, when you want to make sure you are using the same -version of dependencies as in main, you can install recommended version of the dependencies by using pip: -constraint-python.txt files as ``constraint`` file. This might be useful -to avoid "works-for-me" syndrome, where you use different version of dependencies than the ones -that are used in main, CI tests and by other contributors. +The ``uv.lock`` file is committed to the Airflow repository and is used by ``uv sync`` to ensure +consistent dependency versions across all developers. When you run ``uv sync``, it uses the lock file +to install exact dependency versions, so you don't need to pass constraint files manually. -There are different constraint files for different python versions. For example this command will install -all basic devel requirements and requirements of google provider as last successfully tested for Python 3.10: - -.. code:: bash - - uv pip install -e ".[devel,google]" \ - --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-source-providers-3.10.txt" +The ``uv sync`` command prefers the locked versions of dependencies from ``uv.lock``. It will only +attempt to resolve new dependencies when ``pyproject.toml`` files change (e.g. when a new dependency +is added or version bounds are modified). This means that day-to-day ``uv sync`` is fast and +deterministic — it simply installs what the lock file specifies without re-resolving the dependency +tree. +If you want to make sure that ``uv sync`` does not update your lock file at all (for example in CI +or when running tests), you can pass the ``--frozen`` flag: -In the future we will utilise ``uv.lock`` to manage dependencies and constraints, but for the moment we do not -commit ``uv.lock`` file to Airflow repository because we need to figure out automation of updating the ``uv.lock`` -very frequently (few times a day sometimes). With Airflow's 700+ dependencies it's all but guaranteed that we -will have 3-4 changes a day and currently automated constraints generation mechanism in ``canary`` build keeps -constraints updated, but for ASF policy reasons we cannot update ``uv.lock`` in the same way - but work is in -progress to fix it. - -Make sure to use latest main for such installation, those constraints are "development constraints" and they -are refreshed several times a day to make sure they are up to date with the latest changes in the main branch. +.. code:: bash -Note that this might not always work as expected, because the constraints are not always updated -immediately after the dependencies are updated, sometimes there is a very recent change (few hours, rarely more -than a day) which still runs in ``canary`` build and constraints will not be updated until the canary build -succeeds. Usually what works in this case is running your install command without constraints. + uv sync --frozen -You can upgrade just airflow, without paying attention to provider's dependencies by using -the 'constraints-no-providers' constraint files. This allows you to keep installed provider dependencies -and install to latest supported ones by pure Airflow core. +This will fail if the lock file is out of date with respect to ``pyproject.toml``, rather than +silently updating it. This is useful when you want to guarantee fully reproducible environments. -.. code:: bash +Cooldown via ``exclude-newer`` +.............................. - uv pip install -e ".[devel]" \ - --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-no-providers-3.10.txt" +The ``[tool.uv]`` section in the top-level ``pyproject.toml`` sets ``exclude-newer = "4 days"``. +This acts as a cooldown period — when ``uv`` resolves new dependencies, it ignores package versions +released in the last 4 days. This protects against broken or yanked releases that might otherwise +immediately break the dependency resolution for all developers. When ``uv`` writes the lock file, it +records the resolved ``exclude-newer`` timestamp so that subsequent ``uv sync`` calls use the same +cutoff, ensuring consistency across machines. -These are examples of the development options available with the local virtualenv in your IDE: +Constraints generated from the lock file +......................................... -* local debugging; -* Airflow source view; -* auto-completion; -* documentation support; -* unit tests. +Airflow also publishes traditional ``pip``-style constraint files (see +`Airflow dependencies and extras <13_airflow_dependencies_and_extras.rst>`_ for details). When +installing Airflow from sources, these constraint files are generated directly from ``uv.lock`` using +``uv export --frozen``, which converts the lock file into a flat list of pinned versions suitable for +``pip install --constraint``. This ensures that both the ``uv sync`` workflow and the ``pip`` constraint +workflow install the same dependency versions. -This document describes minimum requirements and instructions for using a standalone version of the local virtualenv. +The lock file is updated regularly — whenever dependencies are changed via any ``pyproject.toml`` and +when ``breeze ci upgrade`` is run. Make sure to use the latest main branch to get the most +up-to-date ``uv.lock``. Running Tests ------------- diff --git a/contributing-docs/08_static_code_checks.rst b/contributing-docs/08_static_code_checks.rst index 2750cbafe5dc7..23256c6c0e026 100644 --- a/contributing-docs/08_static_code_checks.rst +++ b/contributing-docs/08_static_code_checks.rst @@ -173,17 +173,18 @@ But you can run prek hooks manually as needed. prek - Run only mypy check on your staged airflow and dev files by specifying the - ``mypy-airflow-core`` and ``mypy-dev`` prek hooks (more hooks can be specified): + ``mypy-airflow-core`` and ``mypy-dev`` prek hooks (more hooks can be specified). + For non-provider projects, mypy runs locally via ``uv`` (no breeze image needed): .. code-block:: bash - prek mypy-airflow-core mypy-dev --stage pre-push + prek mypy-airflow-core mypy-dev - Run only mypy airflow checks on all "airflow-core" files by using: .. code-block:: bash - prek mypy-airflow-core --all-files --stage pre-push + prek mypy-airflow-core --all-files - Run all pre-commit stage hooks on all files by using: @@ -276,60 +277,43 @@ Most of the checks we run are configured to run automatically when you commit th there are some checks that are not run automatically and you need to run them manually. You can run them manually by running ``prek --stage manual ``. -Special pin-versions prek -------------------------- - -There is a separate prek ``pin-versions`` prek hook which is used to pin versions of -GitHub Actions in the CI workflows. - -This action requires ``GITHUB_TOKEN`` to be set, otherwise you might hit the rate limits with GitHub API, it -It is not run automatically when you commit the code but in runs as a separate job in the CI. -However, you can run it manually by running: - -.. code-block:: bash - - export GITHUB_TOKEN=YOUR_GITHUB_TOKEN - prek --all-files --stage manual --verbose pin-versions - - Mypy checks ----------- -When we run mypy checks locally when pushing a change to PR, the ``mypy-*`` checks is run, ``mypy-airflow``, -``mypy-dev``, ``mypy-providers``, ``mypy-airflow-ctl``, depending on the files you are changing. The mypy checks -are run by passing those changed files to mypy. This is way faster than running checks for all files (even -if mypy cache is used - especially when you change a file in Airflow core that is imported and used by many -files). You also need to have ``breeze ci-image build --python 3.10`` built locally to run the mypy checks. +When we run mypy checks locally, the ``mypy-*`` checks run depending on the files you are changing: +``mypy-airflow-core``, ``mypy-dev``, ``mypy-providers``, ``mypy-task-sdk``, ``mypy-airflow-ctl``, etc. -However, in some cases, it produces different results than when running checks for the whole set -of files, because ``mypy`` does not even know that some types are defined in other files and it might not -be able to follow imports properly if they are dynamic. Therefore in CI we run ``mypy`` check for whole -directories (``airflow`` - excluding providers, ``providers``, ``dev`` and ``docs``) to make sure -that we catch all ``mypy`` errors - so you can experience different results when running mypy locally and -in CI. If you want to run mypy checks for all files locally, you can do it by running the following -command (example for ``airflow`` files): +For **non-provider projects** (airflow-core, task-sdk, airflow-ctl, dev, scripts, devel-common), mypy +runs locally using the ``uv`` virtualenv — no breeze CI image is needed. These checks run as regular +prek hooks in the ``pre-commit`` stage, checking whole directories at once. This means they run both +as part of local commits and as part of regular static checks in CI (not as separate mypy CI jobs). +You can also run mypy directly. Use ``--frozen`` to avoid updating ``uv.lock``: .. code-block:: bash - prek --stage manual mypy- --all-files + uv run --frozen --project --with "apache-airflow-devel-common[mypy]" mypy path/to/code -For example: +To run the prek hook for a specific project (example for ``airflow-core`` files): .. code-block:: bash - prek --stage manual mypy-airflow --all-files + prek mypy-airflow-core --all-files To show unused mypy ignores for any providers/airflow etc, eg: run below command: .. code-block:: bash + export SHOW_UNUSED_MYPY_WARNINGS=true - prek --stage manual mypy-airflow --all-files + prek mypy-airflow-core --all-files + +For non-provider projects, the local mypy cache is stored in ``.mypy_cache`` at the repo root. + +For **providers**, mypy still runs via breeze (``breeze run mypy``) as a separate CI job and requires +``breeze ci-image build --python 3.10`` to be built locally. Providers use a separate docker-volume +(called ``mypy-cache-volume``) that keeps the cache of last MyPy execution. -MyPy uses a separate docker-volume (called ``mypy-cache-volume``) that keeps the cache of last MyPy -execution in order to speed MyPy checks up (sometimes by order of magnitude). While in most cases MyPy -will handle refreshing the cache when and if needed, there are some cases when it won't (cache invalidation -is the hard problem in computer science). This might happen for example when we upgrade MyPY. In such -cases you might need to manually remove the cache volume by running ``breeze down --cleanup-mypy-cache``. +To clear all mypy caches (both local ``.mypy_cache`` and the Docker volume), run +``breeze down --cleanup-mypy-cache``. ----------- diff --git a/contributing-docs/13_airflow_dependencies_and_extras.rst b/contributing-docs/13_airflow_dependencies_and_extras.rst index cd43e486ffbdf..7c59ce25d17b1 100644 --- a/contributing-docs/13_airflow_dependencies_and_extras.rst +++ b/contributing-docs/13_airflow_dependencies_and_extras.rst @@ -318,17 +318,17 @@ example ``pip install apache-airflow==1.10.2 Werkzeug<1.0.0``) There are several sets of constraints we keep: -* 'constraints' - these are constraints generated by matching the current Airflow version from sources +* ``constraints`` - these are constraints generated by matching the current Airflow version from sources and providers that are installed from PyPI. Those are constraints used by the users who want to install Airflow with pip, they are named ``constraints-.txt``. -* "constraints-source-providers" - these are constraints generated by using providers installed from +* ``constraints-source-providers`` - these are constraints generated by using providers installed from current sources. While adding new providers their dependencies might change, so this set of providers is the current set of the constraints for Airflow and providers from the current main sources. Those providers are used by CI system to keep "stable" set of constraints. They are named ``constraints-source-providers-.txt`` -* "constraints-no-providers" - these are constraints generated from only Apache Airflow, without any +* ``constraints-no-providers`` - these are constraints generated from only Apache Airflow, without any providers. If you want to manage Airflow separately and then add providers individually, you can use them. Those constraints are named ``constraints-no-providers-.txt``. @@ -375,6 +375,11 @@ using ``constraints-no-providers`` constraint files as well. --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-no-providers-3.10.txt" +These constraint files are generated from the ``uv.lock`` file committed in the repository, using +``uv export --frozen`` to convert the lock file into a flat list of pinned versions suitable for +``pip install --constraint``. This means the constraint files always reflect the same dependency +versions that ``uv sync`` installs for developers. + The ``constraints-.txt`` and ``constraints-no-providers-.txt`` will be automatically regenerated by CI job every time after the ``pyproject.toml`` is updated and pushed if the tests are successful. diff --git a/dev/AGENTS.md b/dev/AGENTS.md new file mode 100644 index 0000000000000..947e0ac9e8e42 --- /dev/null +++ b/dev/AGENTS.md @@ -0,0 +1,44 @@ + + + + +**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* + +- [dev/ directory guidelines](#dev-directory-guidelines) + - [Scripts](#scripts) + + + +# dev/ directory guidelines + +## Scripts + +New scripts in `dev/` must be standalone Python scripts (not bash). Each script must include +[inline script metadata](https://packaging.python.org/en/latest/specifications/inline-script-metadata/) +placed **after** the Apache License header, so that `uv run` can execute it without any prior +installation: + +```python +#!/usr/bin/env python3 +# Licensed to the Apache Software Foundation (ASF) ... +# http://www.apache.org/licenses/LICENSE-2.0 +# ... +# /// script +# requires-python = ">=3.9" +# dependencies = [ +# "some-package", +# ] +# /// +``` + +If the script only uses the standard library, omit the `dependencies` key but keep the +`requires-python` line. + +Run scripts with: + +```shell +uv run dev/my_script.py [args...] +``` + +Document `uv run` (not `python`) as the invocation method in READMEs and instructions. diff --git a/dev/README_AIRFLOW3_DEV.md b/dev/README_AIRFLOW3_DEV.md index c97431bf042e8..3c19a28cf6c75 100644 --- a/dev/README_AIRFLOW3_DEV.md +++ b/dev/README_AIRFLOW3_DEV.md @@ -64,6 +64,17 @@ If you want to have a fix backported to 3.1.x please add (or request to add) "ba When preparing a new 3.1.x release, the release manager will sync the `v3-1-test` branch to `v3-1-stable` and cut the release from the stable branch. PRs should **never** target `v3-1-stable` directly unless explicitly instructed by the release manager. +> [!TIP] +> **Shortcut for first RC candidates:** When preparing the first RC candidate for a new minor release +> (e.g., 3.2.0rc1), it is unlikely to be approved on the first attempt — bugs are typically found during +> RC testing. In this case, the release manager can prepare the RC directly from the `v3-X-test` branch +> without opening a PR to `v3-X-stable`. This saves the overhead of creating and managing a PR that will +> likely need additional changes before GA. However, when using this shortcut, the release manager **must** +> verify that the `v3-X-test` push CI action ("Tests" workflow) has succeeded before cutting the RC. You can +> check this at: +> https://github.com/apache/airflow/actions/workflows/ci-amd-arm.yml?query=event%3Apush+branch%3Av3-2-test +> (adjust the branch filter for the relevant `v3-X-test` branch). + ## Developing for Airflow 3 PRs should target `main` branch. diff --git a/dev/README_RELEASE_AIRFLOW.md b/dev/README_RELEASE_AIRFLOW.md index ce6dd6864dd45..0e5a9bf336657 100644 --- a/dev/README_RELEASE_AIRFLOW.md +++ b/dev/README_RELEASE_AIRFLOW.md @@ -500,10 +500,21 @@ uv tool install -e ./dev/breeze - PR from the 'test' branch to the 'stable' branch -- When the PR is approved, install `dev/breeze` in a virtualenv: +> [!TIP] +> **Shortcut for first RC candidates:** When preparing the first RC candidate for a new minor release +> (e.g., 3.2.0rc1), it is unlikely to be approved on the first attempt — bugs are typically found during +> RC testing. In this case, the release manager can prepare the RC directly from the `v3-X-test` branch +> without opening a PR to `v3-X-stable`. This saves the overhead of creating and managing a PR that will +> likely need additional changes before GA. However, when using this shortcut, the release manager **must** +> verify that the `v3-X-test` push CI action ("Tests" workflow) has succeeded before cutting the RC. You can +> check this at: +> https://github.com/apache/airflow/actions/workflows/ci-amd-arm.yml?query=event%3Apush+branch%3Av3-2-test +> (adjust the branch filter for the relevant `v3-X-test` branch). + +- When the PR is approved (or when using the shortcut above), install `dev/breeze` in a virtualenv: ```shell script - pip install -e ./dev/breeze + uv pip install -e ./dev/breeze ``` - Set `GITHUB_TOKEN` environment variable. Needed in patch release for generating issue for testing of the RC. @@ -886,13 +897,13 @@ Optionally you can use the `breeze release-management check-release-files` comma present in SVN. This command may also help with verifying installation of the packages. ```shell script -breeze release-management check-release-files airflow --version ${VERSION_RC} +breeze release-management check-release-files airflow --version ${VERSION_RC} --path-to-airflow-svn=${PATH_TO_AIRFLOW_SVN} ``` You will see commands that you can execute to check installation of the distributions in containers. ```shell script -breeze release-management check-release-files task-sdk --version ${TASK_SDK_VERSION_RC} +breeze release-management check-release-files task-sdk --version ${TASK_SDK_VERSION_RC} --path-to-airflow-svn=${PATH_TO_AIRFLOW_SVN} ``` You will see commands that you can execute to check installation of the distributions in containers. diff --git a/dev/README_RELEASE_AIRFLOWCTL.md b/dev/README_RELEASE_AIRFLOWCTL.md index 6c7cdcd205bfc..2d373e3f7b9f6 100644 --- a/dev/README_RELEASE_AIRFLOWCTL.md +++ b/dev/README_RELEASE_AIRFLOWCTL.md @@ -536,7 +536,7 @@ You can use the `breeze release-management check-release-files` command to verif present in SVN. This command may also help with verifying installation of the packages. ```shell script -breeze release-management check-release-files airflow-ctl --version ${VERSION_RC} +breeze release-management check-release-files airflow-ctl --version ${VERSION_RC} --path-to-airflow-svn=${PATH_TO_AIRFLOW_SVN} ``` You will see commands that you can execute to check installation of the distributions in containers. diff --git a/dev/README_RELEASE_PROVIDERS.md b/dev/README_RELEASE_PROVIDERS.md index 745ab206554db..74d806416b0ca 100644 --- a/dev/README_RELEASE_PROVIDERS.md +++ b/dev/README_RELEASE_PROVIDERS.md @@ -30,6 +30,7 @@ - [Perform review of security issues that are marked for the release](#perform-review-of-security-issues-that-are-marked-for-the-release) - [Convert commits to changelog entries and bump provider versions](#convert-commits-to-changelog-entries-and-bump-provider-versions) - [Update versions of dependent providers to the next version](#update-versions-of-dependent-providers-to-the-next-version) + - [Create a PR with the changes](#create-a-pr-with-the-changes) - [Apply incremental changes and merge the PR](#apply-incremental-changes-and-merge-the-pr) - [(Optional) Apply template updates](#optional-apply-template-updates) - [Build Provider distributions for SVN apache upload](#build-provider-distributions-for-svn-apache-upload) @@ -234,6 +235,22 @@ removed. breeze release-management update-providers-next-version ``` +## Create a PR with the changes + +Make sure to set labels: `allow provider dependency bump` and `skip common compat check` to the PR, +so that the PR is not blocked by selective checks. + +You can do it for example this way: + +```shell script +gh pr create \ + --title "Prepare providers release ${RELEASE_DATE}" \ + --label "allow provider dependency bump" \ + --label "skip common compat check" \ + --body "Prepare providers release ${RELEASE_DATE}" \ + --web +``` + ## Apply incremental changes and merge the PR When those changes are generated, you should commit the changes, create a PR and get it reviewed. diff --git a/dev/README_RELEASE_PYTHON_CLIENT.md b/dev/README_RELEASE_PYTHON_CLIENT.md index dff1d511bc886..68d6e57d044ad 100644 --- a/dev/README_RELEASE_PYTHON_CLIENT.md +++ b/dev/README_RELEASE_PYTHON_CLIENT.md @@ -448,7 +448,7 @@ You can use the `breeze release-management check-release-files` command to verif present in SVN. This command may also help with verifying installation of the packages. ```shell script -breeze release-management check-release-files python-client --version ${VERSION_RC} +breeze release-management check-release-files python-client --version ${VERSION_RC} --path-to-airflow-svn=${PATH_TO_AIRFLOW_SVN} ``` You will see commands that you can execute to check installation of the distributions in containers. diff --git a/dev/breeze/doc/ci/02_images.md b/dev/breeze/doc/ci/02_images.md index acabc76c32af1..4a72605d7d52d 100644 --- a/dev/breeze/doc/ci/02_images.md +++ b/dev/breeze/doc/ci/02_images.md @@ -443,7 +443,7 @@ can be used for CI images: | `ADDITIONAL_DEV_APT_DEPS` | | Additional apt dev dependencies installed in the first part of the image | | `ADDITIONAL_DEV_APT_ENV` | | Additional env variables defined when installing dev deps | | `AIRFLOW_PIP_VERSION` | `26.0.1` | `pip` version used. | -| `AIRFLOW_UV_VERSION` | `0.11.1` | `uv` version used. | +| `AIRFLOW_UV_VERSION` | `0.11.3` | `uv` version used. | | `AIRFLOW_PREK_VERSION` | `0.3.8` | `prek` version used. | | `AIRFLOW_USE_UV` | `true` | Whether to use UV for installation. | | `PIP_PROGRESS_BAR` | `on` | Progress bar for PIP installation | diff --git a/dev/breeze/doc/images/output_ci_upgrade.svg b/dev/breeze/doc/images/output_ci_upgrade.svg index 930f22c68e246..5370d25bff3fc 100644 --- a/dev/breeze/doc/images/output_ci_upgrade.svg +++ b/dev/breeze/doc/images/output_ci_upgrade.svg @@ -1,4 +1,4 @@ - +